mirror of
https://github.com/Infisical/infisical.git
synced 2026-01-09 15:38:03 -05:00
Merge branch 'main' of https://github.com/Infisical/infisical into fix/suborgs-routing
This commit is contained in:
1
.gitignore
vendored
1
.gitignore
vendored
@@ -74,3 +74,4 @@ cli/test/infisical-merge
|
||||
backend/bdd/.bdd-infisical-bootstrap-result.json
|
||||
|
||||
/npm/bin
|
||||
__pycache__
|
||||
|
||||
@@ -3,6 +3,7 @@ import os
|
||||
|
||||
import pathlib
|
||||
import typing
|
||||
from copy import deepcopy
|
||||
|
||||
import httpx
|
||||
from behave.runner import Context
|
||||
@@ -185,28 +186,33 @@ def bootstrap_infisical(context: Context):
|
||||
|
||||
|
||||
def before_all(context: Context):
|
||||
base_vars = {
|
||||
"BASE_URL": BASE_URL,
|
||||
"PEBBLE_URL": PEBBLE_URL,
|
||||
}
|
||||
if BOOTSTRAP_INFISICAL:
|
||||
details = bootstrap_infisical(context)
|
||||
context.vars = {
|
||||
"BASE_URL": BASE_URL,
|
||||
"PEBBLE_URL": PEBBLE_URL,
|
||||
vars = base_vars | {
|
||||
"PROJECT_ID": details["project"]["id"],
|
||||
"CERT_CA_ID": details["ca"]["id"],
|
||||
"CERT_TEMPLATE_ID": details["cert_template"]["id"],
|
||||
"AUTH_TOKEN": details["auth_token"],
|
||||
}
|
||||
else:
|
||||
context.vars = {
|
||||
"BASE_URL": BASE_URL,
|
||||
"PEBBLE_URL": PEBBLE_URL,
|
||||
vars = base_vars | {
|
||||
"PROJECT_ID": PROJECT_ID,
|
||||
"CERT_CA_ID": CERT_CA_ID,
|
||||
"CERT_TEMPLATE_ID": CERT_TEMPLATE_ID,
|
||||
"AUTH_TOKEN": AUTH_TOKEN,
|
||||
}
|
||||
context._initial_vars = vars
|
||||
context.http_client = httpx.Client(base_url=BASE_URL)
|
||||
|
||||
|
||||
def before_scenario(context: Context, scenario: typing.Any):
|
||||
context.vars = deepcopy(context._initial_vars)
|
||||
|
||||
|
||||
def after_scenario(context: Context, scenario: typing.Any):
|
||||
if hasattr(context, "web_server"):
|
||||
context.web_server.shutdown_and_server_close()
|
||||
|
||||
@@ -221,7 +221,6 @@ Feature: Access Control
|
||||
| order | .authorizations[0].uri | auth_uri | {auth_uri} | |
|
||||
| order | .authorizations[0].body.challenges[0].url | challenge_uri | {challenge_uri} | {} |
|
||||
|
||||
|
||||
Scenario Outline: URL mismatch
|
||||
Given I have an ACME cert profile as "acme_profile"
|
||||
When I have an ACME client connecting to "{BASE_URL}/api/v1/pki/acme/profiles/{acme_profile.id}/directory"
|
||||
@@ -271,3 +270,52 @@ Feature: Access Control
|
||||
| order | .authorizations[0].uri | auth_uri | {auth_uri} | https://example.com/acmes/auths/FOOBAR | URL mismatch in the protected header |
|
||||
| order | .authorizations[0].body.challenges[0].url | challenge_uri | {challenge_uri} | BAD | Invalid URL in the protected header |
|
||||
| order | .authorizations[0].body.challenges[0].url | challenge_uri | {challenge_uri} | https://example.com/acmes/challenges/FOOBAR | URL mismatch in the protected header |
|
||||
|
||||
Scenario Outline: Send KID and JWK in the same time
|
||||
Given I have an ACME cert profile as "acme_profile"
|
||||
When I have an ACME client connecting to "{BASE_URL}/api/v1/pki/acme/profiles/{acme_profile.id}/directory"
|
||||
Then I register a new ACME account with email fangpen@infisical.com and EAB key id "{acme_profile.eab_kid}" with secret "{acme_profile.eab_secret}" as acme_account
|
||||
And I memorize acme_account.uri with jq "capture("/(?<id>[^/]+)$") | .id" as account_id
|
||||
When I create certificate signing request as csr
|
||||
Then I add names to certificate signing request csr
|
||||
"""
|
||||
{
|
||||
"COMMON_NAME": "localhost"
|
||||
}
|
||||
"""
|
||||
Then I create a RSA private key pair as cert_key
|
||||
And I sign the certificate signing request csr with private key cert_key and output it as csr_pem in PEM format
|
||||
And I submit the certificate signing request PEM csr_pem certificate order to the ACME server as order
|
||||
And I peak and memorize the next nonce as nonce_value
|
||||
And I memorize <src_var> with jq "<jq>" as <dest_var>
|
||||
When I send a raw ACME request to "<url>"
|
||||
"""
|
||||
{
|
||||
"protected": {
|
||||
"alg": "RS256",
|
||||
"nonce": "{nonce_value}",
|
||||
"url": "<url>",
|
||||
"kid": "{acme_account.uri}",
|
||||
"jwk": {
|
||||
"n": "mmEWxUv2lUYDZe_M2FXJ_WDXgHoEG7PVvg-dfz1STzyMwx0qvM66KMenXSyVA0r-_Ssb6p8VexSWGOFKskM4ryKUihn2KNH5e8nXZBqzqYeKQ8vqaCdaWzTxFI1dg0xhk0CWptkZHxpRpLalztFJ1Pq7L2qvQOM2YT7wPYbwQhpaSiVNXAb1W4FwAPyC04v1mHehvST-esaDT7j_5-eU5cCcmyi4_g5nBawcinOjj5o3VCg4X8UjK--AjhAyYHx1nRMr-7xk4x-0VIpQ_OODjLB3WzN8s1YEb0Jx5Bv1JyeCw35zahqs3fAFyRje-p5ENk9NCxfz5x9ZGkszkkNt0Q",
|
||||
"e": "AQAB",
|
||||
"kty": "RSA"
|
||||
}
|
||||
},
|
||||
"payload": {}
|
||||
}
|
||||
"""
|
||||
Then the value response.status_code should be equal to 400
|
||||
And the value response with jq ".status" should be equal to 400
|
||||
And the value response with jq ".type" should be equal to "urn:ietf:params:acme:error:malformed"
|
||||
And the value response with jq ".detail" should be equal to "Both JWK and KID are provided in the protected header"
|
||||
|
||||
Examples: Endpoints
|
||||
| src_var | jq | dest_var | url |
|
||||
| order | . | not_used | {BASE_URL}/api/v1/pki/acme/profiles/{acme_profile.id}/accounts/{account_id}/orders |
|
||||
| order | . | not_used | {BASE_URL}/api/v1/pki/acme/profiles/{acme_profile.id}/new-order |
|
||||
| order | . | not_used | {order.uri} |
|
||||
| order | . | not_used | {order.uri}/finalize |
|
||||
| order | . | not_used | {order.uri}/certificate |
|
||||
| order | .authorizations[0].uri | auth_uri | {auth_uri} |
|
||||
| order | .authorizations[0].body.challenges[0].url | challenge_uri | {challenge_uri} |
|
||||
|
||||
@@ -6,13 +6,32 @@ Feature: Account
|
||||
Then I register a new ACME account with email fangpen@infisical.com and EAB key id "{acme_profile.eab_kid}" with secret "{acme_profile.eab_secret}" as acme_account
|
||||
And the value acme_account.uri with jq "." should match pattern {BASE_URL}/api/v1/pki/acme/profiles/{acme_profile.id}/accounts/(.+)
|
||||
|
||||
Scenario: Create a new account with the same key pair twice
|
||||
Given I have an ACME cert profile as "acme_profile"
|
||||
When I have an ACME client connecting to "{BASE_URL}/api/v1/pki/acme/profiles/{acme_profile.id}/directory"
|
||||
Then I register a new ACME account with email fangpen@infisical.com and EAB key id "{acme_profile.eab_kid}" with secret "{acme_profile.eab_secret}" as acme_account
|
||||
And I memorize acme_account.uri as kid
|
||||
And I register a new ACME account with email fangpen@infisical.com and EAB key id "{acme_profile.eab_kid}" with secret "{acme_profile.eab_secret}" as acme_account2
|
||||
And the value error.__class__.__name__ should be equal to "ConflictError"
|
||||
And the value error.location should be equal to "{kid}"
|
||||
|
||||
Scenario: Find an existing account
|
||||
Given I have an ACME cert profile as "acme_profile"
|
||||
When I have an ACME client connecting to "{BASE_URL}/api/v1/pki/acme/profiles/{acme_profile.id}/directory"
|
||||
Then I register a new ACME account with email fangpen@infisical.com and EAB key id "{acme_profile.eab_kid}" with secret "{acme_profile.eab_secret}" as acme_account
|
||||
And I memorize acme_account.uri as account_uri
|
||||
And I find the existing ACME account with email fangpen@infisical.com and EAB key id "{acme_profile.eab_kid}" with secret "{acme_profile.eab_secret}" as acme_account
|
||||
And the value acme_account.uri should be equal to "{account_uri}"
|
||||
And I find the existing ACME account with email fangpen@infisical.com and EAB key id "{acme_profile.eab_kid}" with secret "{acme_profile.eab_secret}" as retrieved_account
|
||||
And the value retrieved_account.uri should be equal to "{account_uri}"
|
||||
|
||||
# Note: This is a very special case for cert-manager.
|
||||
Scenario: Create a new account with EAB then retrieve it without EAB
|
||||
Given I have an ACME cert profile as "acme_profile"
|
||||
When I have an ACME client connecting to "{BASE_URL}/api/v1/pki/acme/profiles/{acme_profile.id}/directory"
|
||||
Then I register a new ACME account with email fangpen@infisical.com and EAB key id "{acme_profile.eab_kid}" with secret "{acme_profile.eab_secret}" as acme_account
|
||||
And I memorize acme_account.uri as account_uri
|
||||
And I find the existing ACME account without EAB as retrieved_account
|
||||
And the value error with should be absent
|
||||
And the value retrieved_account.uri should be equal to "{account_uri}"
|
||||
|
||||
Scenario: Create a new account without EAB
|
||||
Given I have an ACME cert profile as "acme_profile"
|
||||
|
||||
@@ -9,6 +9,9 @@ Feature: Directory
|
||||
{
|
||||
"newNonce": "{BASE_URL}/api/v1/pki/acme/profiles/{acme_profile.id}/new-nonce",
|
||||
"newAccount": "{BASE_URL}/api/v1/pki/acme/profiles/{acme_profile.id}/new-account",
|
||||
"newOrder": "{BASE_URL}/api/v1/pki/acme/profiles/{acme_profile.id}/new-order"
|
||||
"newOrder": "{BASE_URL}/api/v1/pki/acme/profiles/{acme_profile.id}/new-order",
|
||||
"meta": {
|
||||
"externalAccountRequired": true
|
||||
}
|
||||
}
|
||||
"""
|
||||
@@ -387,6 +387,9 @@ def register_account_with_eab(
|
||||
):
|
||||
acme_client = context.acme_client
|
||||
account_public_key = acme_client.net.key.public_key()
|
||||
if not only_return_existing:
|
||||
# clear the account in case if we want to register twice
|
||||
acme_client.net.account = None
|
||||
if hasattr(context, "alt_eab_url"):
|
||||
eab_directory = messages.Directory.from_json(
|
||||
{"newAccount": context.alt_eab_url}
|
||||
@@ -406,8 +409,14 @@ def register_account_with_eab(
|
||||
only_return_existing=only_return_existing,
|
||||
)
|
||||
try:
|
||||
context.vars[account_var] = acme_client.new_account(registration)
|
||||
if not only_return_existing:
|
||||
context.vars[account_var] = acme_client.new_account(registration)
|
||||
else:
|
||||
context.vars[account_var] = acme_client.query_registration(
|
||||
acme_client.net.account
|
||||
)
|
||||
except Exception as exp:
|
||||
logger.error(f"Failed to register: {exp}", exc_info=True)
|
||||
context.vars["error"] = exp
|
||||
|
||||
|
||||
@@ -434,6 +443,17 @@ def step_impl(context: Context, email: str, kid: str, secret: str, account_var:
|
||||
)
|
||||
|
||||
|
||||
@then("I find the existing ACME account without EAB as {account_var}")
|
||||
def step_impl(context: Context, account_var: str):
|
||||
acme_client = context.acme_client
|
||||
# registration = messages.RegistrationResource.from_json(dict(uri=""))
|
||||
registration = acme_client.net.account
|
||||
try:
|
||||
context.vars[account_var] = acme_client.query_registration(registration)
|
||||
except Exception as exp:
|
||||
context.vars["error"] = exp
|
||||
|
||||
|
||||
@then("I register a new ACME account with email {email} without EAB")
|
||||
def step_impl(context: Context, email: str):
|
||||
acme_client = context.acme_client
|
||||
@@ -600,6 +620,19 @@ def step_impl(context: Context, var_path: str, jq_query: str):
|
||||
)
|
||||
|
||||
|
||||
@then("the value {var_path} with should be absent")
|
||||
def step_impl(context: Context, var_path: str):
|
||||
try:
|
||||
value = eval_var(context, var_path)
|
||||
except Exception as exp:
|
||||
if isinstance(exp, KeyError):
|
||||
return
|
||||
raise
|
||||
assert False, (
|
||||
f"value at {var_path!r} should be absent, but we got this instead: {value!r}"
|
||||
)
|
||||
|
||||
|
||||
@then('the value {var_path} with jq "{jq_query}" should be equal to {expected}')
|
||||
def step_impl(context: Context, var_path: str, jq_query: str, expected: str):
|
||||
value, result = apply_value_with_jq(
|
||||
@@ -615,13 +648,14 @@ def step_impl(context: Context, var_path: str, jq_query: str, expected: str):
|
||||
|
||||
@then('the value {var_path} with jq "{jq_query}" should match pattern {regex}')
|
||||
def step_impl(context: Context, var_path: str, jq_query: str, regex: str):
|
||||
actual_regex = replace_vars(regex, context.vars)
|
||||
value, result = apply_value_with_jq(
|
||||
context=context,
|
||||
var_path=var_path,
|
||||
jq_query=jq_query,
|
||||
)
|
||||
assert re.match(replace_vars(regex, context.vars), result), (
|
||||
f"{json.dumps(value)!r} with jq {jq_query!r}, the result {json.dumps(result)!r} does not match {regex!r}"
|
||||
assert re.match(actual_regex, result), (
|
||||
f"{json.dumps(value)!r} with jq {jq_query!r}, the result {json.dumps(result)!r} does not match {actual_regex!r}"
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -15,6 +15,7 @@ from josepy import JSONObjectWithFields
|
||||
|
||||
ACC_KEY_BITS = 2048
|
||||
ACC_KEY_PUBLIC_EXPONENT = 65537
|
||||
NOCK_API_PREFIX = "/api/__bdd_nock__"
|
||||
logger = logging.getLogger(__name__)
|
||||
faker = Faker()
|
||||
|
||||
@@ -265,7 +266,7 @@ def x509_cert_to_dict(cert: x509.Certificate) -> dict:
|
||||
def define_nock(context: Context, definitions: list[dict]):
|
||||
jwt_token = context.vars["AUTH_TOKEN"]
|
||||
response = context.http_client.post(
|
||||
"/api/v1/bdd-nock/define",
|
||||
f"{NOCK_API_PREFIX}/define",
|
||||
headers=dict(authorization="Bearer {}".format(jwt_token)),
|
||||
json=dict(definitions=definitions),
|
||||
)
|
||||
@@ -275,7 +276,7 @@ def define_nock(context: Context, definitions: list[dict]):
|
||||
def restore_nock(context: Context):
|
||||
jwt_token = context.vars["AUTH_TOKEN"]
|
||||
response = context.http_client.post(
|
||||
"/api/v1/bdd-nock/restore",
|
||||
f"{NOCK_API_PREFIX}/restore",
|
||||
headers=dict(authorization="Bearer {}".format(jwt_token)),
|
||||
json=dict(),
|
||||
)
|
||||
@@ -285,7 +286,7 @@ def restore_nock(context: Context):
|
||||
def clean_all_nock(context: Context):
|
||||
jwt_token = context.vars["AUTH_TOKEN"]
|
||||
response = context.http_client.post(
|
||||
"/api/v1/bdd-nock/clean-all",
|
||||
f"{NOCK_API_PREFIX}/clean-all",
|
||||
headers=dict(authorization="Bearer {}".format(jwt_token)),
|
||||
json=dict(),
|
||||
)
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
{
|
||||
"watch": ["src"],
|
||||
"watch": [
|
||||
"src"
|
||||
],
|
||||
"ext": ".ts,.js",
|
||||
"ignore": [],
|
||||
"exec": "tsx ./src/main.ts | pino-pretty --colorize --colorizeObjects --singleLine"
|
||||
}
|
||||
"exec": "tsx --tsconfig=./tsconfig.dev.json --inspect=0.0.0.0:9229 ./src/main.ts | pino-pretty --colorize --colorizeObjects --singleLine"
|
||||
}
|
||||
@@ -32,7 +32,7 @@
|
||||
"binary:clean": "rm -rf ./dist && rm -rf ./binary",
|
||||
"binary:rename-imports": "ts-node ./scripts/rename-mjs.ts",
|
||||
"test": "echo \"Error: no test specified\" && exit 1",
|
||||
"dev": "tsx watch --clear-screen=false ./src/main.ts | pino-pretty --colorize --colorizeObjects --singleLine",
|
||||
"dev": "tsx watch --clear-screen=false ./src/main.ts --config tsconfig.dev.json | pino-pretty --colorize --colorizeObjects --singleLine",
|
||||
"dev:docker": "nodemon",
|
||||
"build": "tsup --sourcemap",
|
||||
"build:frontend": "npm run build --prefix ../frontend",
|
||||
@@ -266,4 +266,4 @@
|
||||
"zod": "^3.22.4",
|
||||
"zod-to-json-schema": "^3.24.5"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,32 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { dropConstraintIfExists } from "@app/db/migrations/utils/dropConstraintIfExists";
|
||||
import { TableName } from "@app/db/schemas";
|
||||
|
||||
const CONSTRAINT_NAME = "unique_pki_acme_account_public_key_and_profile_id";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.PkiAcmeAccount)) {
|
||||
const hasProfileId = await knex.schema.hasColumn(TableName.PkiAcmeAccount, "profileId");
|
||||
const hasPublicKeyThumbprint = await knex.schema.hasColumn(TableName.PkiAcmeAccount, "publicKeyThumbprint");
|
||||
|
||||
if (hasProfileId && hasPublicKeyThumbprint) {
|
||||
await knex.schema.alterTable(TableName.PkiAcmeAccount, (table) => {
|
||||
table.unique(["profileId", "publicKeyThumbprint"], { indexName: CONSTRAINT_NAME });
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.PkiAcmeAccount)) {
|
||||
const hasProfileId = await knex.schema.hasColumn(TableName.PkiAcmeAccount, "profileId");
|
||||
const hasPublicKeyThumbprint = await knex.schema.hasColumn(TableName.PkiAcmeAccount, "publicKeyThumbprint");
|
||||
|
||||
await knex.schema.alterTable(TableName.PkiAcmeAccount, async () => {
|
||||
if (hasProfileId && hasPublicKeyThumbprint) {
|
||||
await dropConstraintIfExists(TableName.PkiAcmeAccount, CONSTRAINT_NAME, knex);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -41,17 +41,15 @@ export const registerPamSessionRouter = async (server: FastifyZodProvider) => {
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const { credentials, projectId, account } = await server.services.pamAccount.getSessionCredentials(
|
||||
req.params.sessionId,
|
||||
req.permission
|
||||
);
|
||||
const { credentials, projectId, account, sessionStarted } =
|
||||
await server.services.pamAccount.getSessionCredentials(req.params.sessionId, req.permission);
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
orgId: req.permission.orgId,
|
||||
projectId,
|
||||
event: {
|
||||
type: EventType.PAM_SESSION_START,
|
||||
type: EventType.PAM_SESSION_CREDENTIALS_GET,
|
||||
metadata: {
|
||||
sessionId: req.params.sessionId,
|
||||
accountName: account.name
|
||||
@@ -59,6 +57,21 @@ export const registerPamSessionRouter = async (server: FastifyZodProvider) => {
|
||||
}
|
||||
});
|
||||
|
||||
if (sessionStarted) {
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
orgId: req.permission.orgId,
|
||||
projectId,
|
||||
event: {
|
||||
type: EventType.PAM_SESSION_START,
|
||||
metadata: {
|
||||
sessionId: req.params.sessionId,
|
||||
accountName: account.name
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return { credentials: credentials as z.infer<typeof SessionCredentialsSchema> };
|
||||
}
|
||||
});
|
||||
|
||||
@@ -186,6 +186,7 @@ export enum EventType {
|
||||
CREATE_TOKEN_IDENTITY_TOKEN_AUTH = "create-token-identity-token-auth",
|
||||
UPDATE_TOKEN_IDENTITY_TOKEN_AUTH = "update-token-identity-token-auth",
|
||||
GET_TOKENS_IDENTITY_TOKEN_AUTH = "get-tokens-identity-token-auth",
|
||||
GET_TOKEN_IDENTITY_TOKEN_AUTH = "get-token-identity-token-auth",
|
||||
|
||||
ADD_IDENTITY_TOKEN_AUTH = "add-identity-token-auth",
|
||||
UPDATE_IDENTITY_TOKEN_AUTH = "update-identity-token-auth",
|
||||
@@ -535,6 +536,7 @@ export enum EventType {
|
||||
DASHBOARD_GET_SECRET_VALUE = "dashboard-get-secret-value",
|
||||
DASHBOARD_GET_SECRET_VERSION_VALUE = "dashboard-get-secret-version-value",
|
||||
|
||||
PAM_SESSION_CREDENTIALS_GET = "pam-session-credentials-get",
|
||||
PAM_SESSION_START = "pam-session-start",
|
||||
PAM_SESSION_LOGS_UPDATE = "pam-session-logs-update",
|
||||
PAM_SESSION_END = "pam-session-end",
|
||||
@@ -1029,6 +1031,15 @@ interface GetTokensIdentityTokenAuthEvent {
|
||||
};
|
||||
}
|
||||
|
||||
interface GetTokenIdentityTokenAuthEvent {
|
||||
type: EventType.GET_TOKEN_IDENTITY_TOKEN_AUTH;
|
||||
metadata: {
|
||||
identityId: string;
|
||||
identityName: string;
|
||||
tokenId: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface AddIdentityTokenAuthEvent {
|
||||
type: EventType.ADD_IDENTITY_TOKEN_AUTH;
|
||||
metadata: {
|
||||
@@ -3978,6 +3989,14 @@ interface OrgRoleDeleteEvent {
|
||||
};
|
||||
}
|
||||
|
||||
interface PamSessionCredentialsGetEvent {
|
||||
type: EventType.PAM_SESSION_CREDENTIALS_GET;
|
||||
metadata: {
|
||||
sessionId: string;
|
||||
accountName: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface PamSessionStartEvent {
|
||||
type: EventType.PAM_SESSION_START;
|
||||
metadata: {
|
||||
@@ -4214,6 +4233,7 @@ export type Event =
|
||||
| CreateTokenIdentityTokenAuthEvent
|
||||
| UpdateTokenIdentityTokenAuthEvent
|
||||
| GetTokensIdentityTokenAuthEvent
|
||||
| GetTokenIdentityTokenAuthEvent
|
||||
| AddIdentityTokenAuthEvent
|
||||
| UpdateIdentityTokenAuthEvent
|
||||
| GetIdentityTokenAuthEvent
|
||||
@@ -4531,6 +4551,7 @@ export type Event =
|
||||
| OrgRoleCreateEvent
|
||||
| OrgRoleUpdateEvent
|
||||
| OrgRoleDeleteEvent
|
||||
| PamSessionCredentialsGetEvent
|
||||
| PamSessionStartEvent
|
||||
| PamSessionLogsUpdateEvent
|
||||
| PamSessionEndEvent
|
||||
|
||||
@@ -39,3 +39,9 @@ export const getDefaultOnPremFeatures = () => {
|
||||
};
|
||||
|
||||
export const setupLicenseRequestWithStore = () => {};
|
||||
|
||||
export const getLicenseKeyConfig = () => {
|
||||
return {
|
||||
isValid: false
|
||||
};
|
||||
};
|
||||
|
||||
@@ -1,13 +1,56 @@
|
||||
import axios, { AxiosError } from "axios";
|
||||
|
||||
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { getConfig, TEnvConfig } from "@app/lib/config/env";
|
||||
import { request } from "@app/lib/config/request";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { UserAliasType } from "@app/services/user-alias/user-alias-types";
|
||||
|
||||
import { TFeatureSet } from "./license-types";
|
||||
import { LicenseType, TFeatureSet, TLicenseKeyConfig, TOfflineLicenseContents } from "./license-types";
|
||||
|
||||
export const isOfflineLicenseKey = (licenseKey: string): boolean => {
|
||||
try {
|
||||
const contents = JSON.parse(Buffer.from(licenseKey, "base64").toString("utf8")) as TOfflineLicenseContents;
|
||||
|
||||
return "signature" in contents && "license" in contents;
|
||||
} catch (error) {
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
||||
export const getLicenseKeyConfig = (
|
||||
config?: Pick<TEnvConfig, "LICENSE_KEY" | "LICENSE_KEY_OFFLINE">
|
||||
): TLicenseKeyConfig => {
|
||||
const cfg = config || getConfig();
|
||||
|
||||
if (!cfg) {
|
||||
return { isValid: false };
|
||||
}
|
||||
|
||||
const licenseKey = cfg.LICENSE_KEY;
|
||||
|
||||
if (licenseKey) {
|
||||
if (isOfflineLicenseKey(licenseKey)) {
|
||||
return { isValid: true, licenseKey, type: LicenseType.Offline };
|
||||
}
|
||||
|
||||
return { isValid: true, licenseKey, type: LicenseType.Online };
|
||||
}
|
||||
|
||||
const offlineLicenseKey = cfg.LICENSE_KEY_OFFLINE;
|
||||
|
||||
// backwards compatibility
|
||||
if (offlineLicenseKey) {
|
||||
if (isOfflineLicenseKey(offlineLicenseKey)) {
|
||||
return { isValid: true, licenseKey: offlineLicenseKey, type: LicenseType.Offline };
|
||||
}
|
||||
|
||||
return { isValid: false };
|
||||
}
|
||||
|
||||
return { isValid: false };
|
||||
};
|
||||
|
||||
export const getDefaultOnPremFeatures = (): TFeatureSet => ({
|
||||
_id: null,
|
||||
|
||||
@@ -22,9 +22,10 @@ import { OrgPermissionBillingActions, OrgPermissionSubjects } from "../permissio
|
||||
import { TPermissionServiceFactory } from "../permission/permission-service-types";
|
||||
import { BillingPlanRows, BillingPlanTableHead } from "./licence-enums";
|
||||
import { TLicenseDALFactory } from "./license-dal";
|
||||
import { getDefaultOnPremFeatures, setupLicenseRequestWithStore } from "./license-fns";
|
||||
import { getDefaultOnPremFeatures, getLicenseKeyConfig, setupLicenseRequestWithStore } from "./license-fns";
|
||||
import {
|
||||
InstanceType,
|
||||
LicenseType,
|
||||
TAddOrgPmtMethodDTO,
|
||||
TAddOrgTaxIdDTO,
|
||||
TCreateOrgPortalSession,
|
||||
@@ -77,6 +78,7 @@ export const licenseServiceFactory = ({
|
||||
let instanceType = InstanceType.OnPrem;
|
||||
let onPremFeatures: TFeatureSet = getDefaultOnPremFeatures();
|
||||
let selfHostedLicense: TOfflineLicense | null = null;
|
||||
const licenseKeyConfig = getLicenseKeyConfig(envConfig);
|
||||
|
||||
const licenseServerCloudApi = setupLicenseRequestWithStore(
|
||||
envConfig.LICENSE_SERVER_URL || "",
|
||||
@@ -85,10 +87,13 @@ export const licenseServiceFactory = ({
|
||||
envConfig.INTERNAL_REGION
|
||||
);
|
||||
|
||||
const onlineLicenseKey =
|
||||
licenseKeyConfig.isValid && licenseKeyConfig.type === LicenseType.Online ? licenseKeyConfig.licenseKey : "";
|
||||
|
||||
const licenseServerOnPremApi = setupLicenseRequestWithStore(
|
||||
envConfig.LICENSE_SERVER_URL || "",
|
||||
LICENSE_SERVER_ON_PREM_LOGIN,
|
||||
envConfig.LICENSE_KEY || "",
|
||||
onlineLicenseKey,
|
||||
envConfig.INTERNAL_REGION
|
||||
);
|
||||
|
||||
@@ -131,7 +136,7 @@ export const licenseServiceFactory = ({
|
||||
return;
|
||||
}
|
||||
|
||||
if (envConfig.LICENSE_KEY) {
|
||||
if (licenseKeyConfig.isValid && licenseKeyConfig.type === LicenseType.Online) {
|
||||
const token = await licenseServerOnPremApi.refreshLicense();
|
||||
if (token) {
|
||||
await syncLicenseKeyOnPremFeatures(true);
|
||||
@@ -142,10 +147,10 @@ export const licenseServiceFactory = ({
|
||||
return;
|
||||
}
|
||||
|
||||
if (envConfig.LICENSE_KEY_OFFLINE) {
|
||||
if (licenseKeyConfig.isValid && licenseKeyConfig.type === LicenseType.Offline) {
|
||||
let isValidOfflineLicense = true;
|
||||
const contents: TOfflineLicenseContents = JSON.parse(
|
||||
Buffer.from(envConfig.LICENSE_KEY_OFFLINE, "base64").toString("utf8")
|
||||
Buffer.from(licenseKeyConfig.licenseKey, "base64").toString("utf8")
|
||||
);
|
||||
const isVerified = await verifyOfflineLicense(JSON.stringify(contents.license), contents.signature);
|
||||
|
||||
@@ -184,7 +189,7 @@ export const licenseServiceFactory = ({
|
||||
};
|
||||
|
||||
const initializeBackgroundSync = async () => {
|
||||
if (envConfig.LICENSE_KEY) {
|
||||
if (licenseKeyConfig?.isValid && licenseKeyConfig?.type === LicenseType.Online) {
|
||||
logger.info("Setting up background sync process for refresh onPremFeatures");
|
||||
const job = new CronJob("*/10 * * * *", syncLicenseKeyOnPremFeatures);
|
||||
job.start();
|
||||
|
||||
@@ -136,3 +136,18 @@ export type TDelOrgTaxIdDTO = TOrgPermission & { taxId: string };
|
||||
export type TOrgInvoiceDTO = TOrgPermission;
|
||||
|
||||
export type TOrgLicensesDTO = TOrgPermission;
|
||||
|
||||
export enum LicenseType {
|
||||
Offline = "offline",
|
||||
Online = "online"
|
||||
}
|
||||
|
||||
export type TLicenseKeyConfig =
|
||||
| {
|
||||
isValid: false;
|
||||
}
|
||||
| {
|
||||
isValid: true;
|
||||
licenseKey: string;
|
||||
type: LicenseType;
|
||||
};
|
||||
|
||||
@@ -668,11 +668,6 @@ export const pamAccountServiceFactory = ({
|
||||
throw new BadRequestError({ message: "Session has ended or expired" });
|
||||
}
|
||||
|
||||
// Verify that the session has not already had credentials fetched
|
||||
if (session.status !== PamSessionStatus.Starting) {
|
||||
throw new BadRequestError({ message: "Session has already been started" });
|
||||
}
|
||||
|
||||
const account = await pamAccountDAL.findById(session.accountId);
|
||||
if (!account) throw new NotFoundError({ message: `Account with ID '${session.accountId}' not found` });
|
||||
|
||||
@@ -689,11 +684,16 @@ export const pamAccountServiceFactory = ({
|
||||
|
||||
const decryptedResource = await decryptResource(resource, session.projectId, kmsService);
|
||||
|
||||
let sessionStarted = false;
|
||||
|
||||
// Mark session as started
|
||||
await pamSessionDAL.updateById(sessionId, {
|
||||
status: PamSessionStatus.Active,
|
||||
startedAt: new Date()
|
||||
});
|
||||
if (session.status === PamSessionStatus.Starting) {
|
||||
await pamSessionDAL.updateById(sessionId, {
|
||||
status: PamSessionStatus.Active,
|
||||
startedAt: new Date()
|
||||
});
|
||||
sessionStarted = true;
|
||||
}
|
||||
|
||||
return {
|
||||
credentials: {
|
||||
@@ -701,7 +701,8 @@ export const pamAccountServiceFactory = ({
|
||||
...decryptedAccount.credentials
|
||||
},
|
||||
projectId: project.id,
|
||||
account
|
||||
account,
|
||||
sessionStarted
|
||||
};
|
||||
};
|
||||
|
||||
|
||||
@@ -74,7 +74,12 @@ export const pkiAcmeChallengeServiceFactory = ({
|
||||
// Notice: well, we are in a transaction, ideally we should not hold transaction and perform
|
||||
// a long running operation for long time. But assuming we are not performing a tons of
|
||||
// challenge validation at the same time, it should be fine.
|
||||
const challengeResponse = await fetch(challengeUrl, { signal: AbortSignal.timeout(timeoutMs) });
|
||||
const challengeResponse = await fetch(challengeUrl, {
|
||||
// In case if we override the host in the development mode, still provide the original host in the header
|
||||
// to help the upstream server to validate the request
|
||||
headers: { Host: host },
|
||||
signal: AbortSignal.timeout(timeoutMs)
|
||||
});
|
||||
if (challengeResponse.status !== 200) {
|
||||
throw new AcmeIncorrectResponseError({
|
||||
message: `ACME challenge response is not 200: ${challengeResponse.status}`
|
||||
|
||||
@@ -58,7 +58,15 @@ export const GetAcmeDirectoryResponseSchema = z.object({
|
||||
newNonce: z.string(),
|
||||
newAccount: z.string(),
|
||||
newOrder: z.string(),
|
||||
revokeCert: z.string().optional()
|
||||
revokeCert: z.string().optional(),
|
||||
meta: z
|
||||
.object({
|
||||
termsOfService: z.string().optional(),
|
||||
website: z.string().optional(),
|
||||
caaIdentities: z.array(z.string()).optional(),
|
||||
externalAccountRequired: z.boolean().optional()
|
||||
})
|
||||
.optional()
|
||||
});
|
||||
|
||||
// New Account payload schema
|
||||
|
||||
@@ -206,6 +206,9 @@ export const pkiAcmeServiceFactory = ({
|
||||
const { protectedHeader: rawProtectedHeader, payload: rawPayload } = result;
|
||||
try {
|
||||
const protectedHeader = ProtectedHeaderSchema.parse(rawProtectedHeader);
|
||||
if (protectedHeader.jwk && protectedHeader.kid) {
|
||||
throw new AcmeMalformedError({ message: "Both JWK and KID are provided in the protected header" });
|
||||
}
|
||||
const parsedUrl = (() => {
|
||||
try {
|
||||
return new URL(protectedHeader.url);
|
||||
@@ -288,6 +291,7 @@ export const pkiAcmeServiceFactory = ({
|
||||
url,
|
||||
rawJwsPayload,
|
||||
getJWK: async (protectedHeader) => {
|
||||
// get jwk instead of kid
|
||||
if (!protectedHeader.kid) {
|
||||
throw new AcmeMalformedError({ message: "KID is required in the protected header" });
|
||||
}
|
||||
@@ -353,7 +357,10 @@ export const pkiAcmeServiceFactory = ({
|
||||
return {
|
||||
newNonce: buildUrl(profile.id, "/new-nonce"),
|
||||
newAccount: buildUrl(profile.id, "/new-account"),
|
||||
newOrder: buildUrl(profile.id, "/new-order")
|
||||
newOrder: buildUrl(profile.id, "/new-order"),
|
||||
meta: {
|
||||
externalAccountRequired: true
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
@@ -386,11 +393,61 @@ export const pkiAcmeServiceFactory = ({
|
||||
payload: TCreateAcmeAccountPayload;
|
||||
}): Promise<TAcmeResponse<TCreateAcmeAccountResponse>> => {
|
||||
const profile = await validateAcmeProfile(profileId);
|
||||
const publicKeyThumbprint = await calculateJwkThumbprint(jwk, "sha256");
|
||||
|
||||
const existingAccount: TPkiAcmeAccounts | null = await acmeAccountDAL.findByProfileIdAndPublicKeyThumbprintAndAlg(
|
||||
profileId,
|
||||
alg,
|
||||
publicKeyThumbprint
|
||||
);
|
||||
if (onlyReturnExisting) {
|
||||
if (!existingAccount) {
|
||||
throw new AcmeAccountDoesNotExistError({ message: "ACME account not found" });
|
||||
}
|
||||
return {
|
||||
status: 200,
|
||||
body: {
|
||||
status: "valid",
|
||||
contact: existingAccount.emails,
|
||||
orders: buildUrl(profile.id, `/accounts/${existingAccount.id}/orders`)
|
||||
},
|
||||
headers: {
|
||||
Location: buildUrl(profile.id, `/accounts/${existingAccount.id}`),
|
||||
Link: `<${buildUrl(profile.id, "/directory")}>;rel="index"`
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// Note: We only check EAB for the new account request. This is a very special case for cert-manager.
|
||||
// There's a bug in their ACME client implementation, they don't take the account KID value they have
|
||||
// and relying on a '{"onlyReturnExisting": true}' new-account request to find out their KID value.
|
||||
// But the problem is, that new-account request doesn't come with EAB. And while the get existing account operation
|
||||
// fails, they just discard the error and proceed to request a new order. Since no KID provided, their ACME
|
||||
// client will send JWK instead. As a result, we are seeing KID not provide in header error for the new-order
|
||||
// endpoint.
|
||||
//
|
||||
// To solve the problem, we lose the check for EAB a bit for the onlyReturnExisting new account request.
|
||||
// It should be fine as we've already checked EAB when they created the account.
|
||||
// And the private key ownership indicating they are the same user.
|
||||
// ref: https://github.com/cert-manager/cert-manager/issues/7388#issuecomment-3535630925
|
||||
if (!externalAccountBinding) {
|
||||
throw new AcmeExternalAccountRequiredError({ message: "External account binding is required" });
|
||||
}
|
||||
if (existingAccount) {
|
||||
return {
|
||||
status: 200,
|
||||
body: {
|
||||
status: "valid",
|
||||
contact: existingAccount.emails,
|
||||
orders: buildUrl(profile.id, `/accounts/${existingAccount.id}/orders`)
|
||||
},
|
||||
headers: {
|
||||
Location: buildUrl(profile.id, `/accounts/${existingAccount.id}`),
|
||||
Link: `<${buildUrl(profile.id, "/directory")}>;rel="index"`
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
const publicKeyThumbprint = await calculateJwkThumbprint(jwk, "sha256");
|
||||
const certificateManagerKmsId = await getProjectKmsCertificateKeyId({
|
||||
projectId: profile.projectId,
|
||||
projectDAL,
|
||||
@@ -441,30 +498,7 @@ export const pkiAcmeServiceFactory = ({
|
||||
});
|
||||
}
|
||||
|
||||
const existingAccount: TPkiAcmeAccounts | null = await acmeAccountDAL.findByProfileIdAndPublicKeyThumbprintAndAlg(
|
||||
profileId,
|
||||
alg,
|
||||
publicKeyThumbprint
|
||||
);
|
||||
if (onlyReturnExisting && !existingAccount) {
|
||||
throw new AcmeAccountDoesNotExistError({ message: "ACME account not found" });
|
||||
}
|
||||
if (existingAccount) {
|
||||
// With the same public key, we found an existing account, just return it
|
||||
return {
|
||||
status: 200,
|
||||
body: {
|
||||
status: "valid",
|
||||
contact: existingAccount.emails,
|
||||
orders: buildUrl(profile.id, `/accounts/${existingAccount.id}/orders`)
|
||||
},
|
||||
headers: {
|
||||
Location: buildUrl(profile.id, `/accounts/${existingAccount.id}`),
|
||||
Link: `<${buildUrl(profile.id, "/directory")}>;rel="index"`
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// TODO: handle unique constraint violation error, should be very very rare
|
||||
const newAccount = await acmeAccountDAL.create({
|
||||
profileId: profile.id,
|
||||
alg,
|
||||
|
||||
@@ -584,6 +584,10 @@ export const TOKEN_AUTH = {
|
||||
offset: "The offset to start from. If you enter 10, it will start from the 10th token.",
|
||||
limit: "The number of tokens to return."
|
||||
},
|
||||
GET_TOKEN: {
|
||||
identityId: "The ID of the machine identity to get the token for.",
|
||||
tokenId: "The ID of the token to get metadata for."
|
||||
},
|
||||
CREATE_TOKEN: {
|
||||
identityId: "The ID of the machine identity to create the token for.",
|
||||
name: "The name of the token to create."
|
||||
|
||||
@@ -400,7 +400,7 @@ const envSchema = z
|
||||
isAcmeDevelopmentMode: data.NODE_ENV === "development" && data.ACME_DEVELOPMENT_MODE,
|
||||
isProductionMode: data.NODE_ENV === "production" || IS_PACKAGED,
|
||||
isRedisSentinelMode: Boolean(data.REDIS_SENTINEL_HOSTS),
|
||||
isBddNockApiEnabled: data.NODE_ENV === "development" && data.BDD_NOCK_API_ENABLED,
|
||||
isBddNockApiEnabled: data.NODE_ENV !== "production" && data.BDD_NOCK_API_ENABLED,
|
||||
REDIS_SENTINEL_HOSTS: data.REDIS_SENTINEL_HOSTS?.trim()
|
||||
?.split(",")
|
||||
.map((el) => {
|
||||
|
||||
104
backend/src/server/routes/bdd/bdd-nock-router.dev.ts
Normal file
104
backend/src/server/routes/bdd/bdd-nock-router.dev.ts
Normal file
@@ -0,0 +1,104 @@
|
||||
import type { Definition } from "nock";
|
||||
import { z } from "zod";
|
||||
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { ForbiddenRequestError } from "@app/lib/errors";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
// When running in production, we don't want to even import nock, because it's not needed and it increases memory usage a lots.
|
||||
// It once caused an outage in the production environment.
|
||||
// This is why we would rather to crash the app if it's not in development mode (in that case, Kubernetes should stop it from rolling out).
|
||||
if (process.env.NODE_ENV === "production") {
|
||||
throw new Error("BDD Nock API can only be enabled in development or test mode");
|
||||
}
|
||||
|
||||
export const registerBddNockRouter = async (server: FastifyZodProvider) => {
|
||||
const appCfg = getConfig();
|
||||
const importNock = async () => {
|
||||
// eslint-disable-next-line import/no-extraneous-dependencies
|
||||
const { default: nock } = await import("nock");
|
||||
return nock;
|
||||
};
|
||||
|
||||
const checkIfBddNockApiEnabled = () => {
|
||||
// Note: Please note that this API is only available in development mode and only for BDD tests.
|
||||
// This endpoint should NEVER BE ENABLED IN PRODUCTION!
|
||||
if (appCfg.NODE_ENV === "production" || !appCfg.isBddNockApiEnabled) {
|
||||
throw new ForbiddenRequestError({ message: "BDD Nock API is not enabled" });
|
||||
}
|
||||
};
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/define",
|
||||
schema: {
|
||||
body: z.object({ definitions: z.unknown().array() }),
|
||||
response: {
|
||||
200: z.object({ status: z.string() })
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
checkIfBddNockApiEnabled();
|
||||
const { body } = req;
|
||||
const { definitions } = body;
|
||||
logger.info(definitions, "Defining nock");
|
||||
const processedDefinitions = definitions.map((definition: unknown) => {
|
||||
const { path, ...rest } = definition as Definition;
|
||||
return {
|
||||
...rest,
|
||||
path:
|
||||
path !== undefined && typeof path === "string"
|
||||
? path
|
||||
: new RegExp((path as unknown as { regex: string }).regex ?? "")
|
||||
} as Definition;
|
||||
});
|
||||
|
||||
const nock = await importNock();
|
||||
nock.define(processedDefinitions);
|
||||
// Ensure we are activating the nocks, because we could have called `nock.restore()` before this call.
|
||||
if (!nock.isActive()) {
|
||||
nock.activate();
|
||||
}
|
||||
return { status: "ok" };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/clean-all",
|
||||
schema: {
|
||||
response: {
|
||||
200: z.object({ status: z.string() })
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async () => {
|
||||
checkIfBddNockApiEnabled();
|
||||
logger.info("Cleaning all nocks");
|
||||
const nock = await importNock();
|
||||
nock.cleanAll();
|
||||
return { status: "ok" };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/restore",
|
||||
schema: {
|
||||
response: {
|
||||
200: z.object({ status: z.string() })
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async () => {
|
||||
checkIfBddNockApiEnabled();
|
||||
logger.info("Restore network requests from nock");
|
||||
const nock = await importNock();
|
||||
nock.restore();
|
||||
return { status: "ok" };
|
||||
}
|
||||
});
|
||||
};
|
||||
6
backend/src/server/routes/bdd/bdd-nock-router.ts
Normal file
6
backend/src/server/routes/bdd/bdd-nock-router.ts
Normal file
@@ -0,0 +1,6 @@
|
||||
export const registerBddNockRouter = async () => {
|
||||
// This route is only available in development or test mode.
|
||||
// The actual implementation is in the dev.ts file and will be aliased to that file in development or test mode.
|
||||
// And if somehow we try to enable it in production, we will throw an error.
|
||||
throw new Error("BDD Nock should not be enabled in production");
|
||||
};
|
||||
@@ -1,3 +1,4 @@
|
||||
import { registerBddNockRouter } from "@bdd_routes/bdd-nock-router";
|
||||
import { CronJob } from "cron";
|
||||
import { Knex } from "knex";
|
||||
import { monitorEventLoopDelay } from "perf_hooks";
|
||||
@@ -2431,6 +2432,7 @@ export const registerRoutes = async (
|
||||
}
|
||||
}
|
||||
|
||||
await kmsService.startService(hsmStatus);
|
||||
await telemetryQueue.startTelemetryCheck();
|
||||
await telemetryQueue.startAggregatedEventsJob();
|
||||
await dailyResourceCleanUp.init();
|
||||
@@ -2443,7 +2445,6 @@ export const registerRoutes = async (
|
||||
await pkiSubscriberQueue.startDailyAutoRenewalJob();
|
||||
await pkiAlertV2Queue.init();
|
||||
await certificateV3Queue.init();
|
||||
await kmsService.startService(hsmStatus);
|
||||
await microsoftTeamsService.start();
|
||||
await dynamicSecretQueueService.init();
|
||||
await eventBusService.init();
|
||||
@@ -2698,6 +2699,12 @@ export const registerRoutes = async (
|
||||
await server.register(registerV3Routes, { prefix: "/api/v3" });
|
||||
await server.register(registerV4Routes, { prefix: "/api/v4" });
|
||||
|
||||
// Note: This is a special route for BDD tests. It's only available in development mode and only for BDD tests.
|
||||
// This route should NEVER BE ENABLED IN PRODUCTION!
|
||||
if (getConfig().isBddNockApiEnabled) {
|
||||
await server.register(registerBddNockRouter, { prefix: "/api/__bdd_nock__" });
|
||||
}
|
||||
|
||||
server.addHook("onClose", async () => {
|
||||
cronJobs.forEach((job) => job.stop());
|
||||
await telemetryService.flushAll();
|
||||
|
||||
@@ -9,6 +9,8 @@ import {
|
||||
SuperAdminSchema,
|
||||
UsersSchema
|
||||
} from "@app/db/schemas";
|
||||
import { getLicenseKeyConfig } from "@app/ee/services/license/license-fns";
|
||||
import { LicenseType } from "@app/ee/services/license/license-types";
|
||||
import { getConfig, overridableKeys } from "@app/lib/config/env";
|
||||
import { crypto } from "@app/lib/crypto/cryptography";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
@@ -65,6 +67,9 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => {
|
||||
const config = await getServerCfg();
|
||||
const serverEnvs = getConfig();
|
||||
|
||||
const licenseKeyConfig = getLicenseKeyConfig();
|
||||
const hasOfflineLicense = licenseKeyConfig.isValid && licenseKeyConfig.type === LicenseType.Offline;
|
||||
|
||||
return {
|
||||
config: {
|
||||
...config,
|
||||
@@ -73,7 +78,7 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => {
|
||||
isSecretScanningDisabled: serverEnvs.DISABLE_SECRET_SCANNING,
|
||||
kubernetesAutoFetchServiceAccountToken: serverEnvs.KUBERNETES_AUTO_FETCH_SERVICE_ACCOUNT_TOKEN,
|
||||
paramsFolderSecretDetectionEnabled: serverEnvs.PARAMS_FOLDER_SECRET_DETECTION_ENABLED,
|
||||
isOfflineUsageReportsEnabled: !!serverEnvs.LICENSE_KEY_OFFLINE
|
||||
isOfflineUsageReportsEnabled: hasOfflineLicense
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1,87 +0,0 @@
|
||||
// import { z } from "zod";
|
||||
|
||||
// import { getConfig } from "@app/lib/config/env";
|
||||
// import { ForbiddenRequestError } from "@app/lib/errors";
|
||||
// import { logger } from "@app/lib/logger";
|
||||
// import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
// import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
// export const registerBddNockRouter = async (server: FastifyZodProvider) => {
|
||||
// const checkIfBddNockApiEnabled = () => {
|
||||
// const appCfg = getConfig();
|
||||
// // Note: Please note that this API is only available in development mode and only for BDD tests.
|
||||
// // This endpoint should NEVER BE ENABLED IN PRODUCTION!
|
||||
// if (appCfg.NODE_ENV !== "development" || !appCfg.isBddNockApiEnabled) {
|
||||
// throw new ForbiddenRequestError({ message: "BDD Nock API is not enabled" });
|
||||
// }
|
||||
// };
|
||||
|
||||
// server.route({
|
||||
// method: "POST",
|
||||
// url: "/define",
|
||||
// schema: {
|
||||
// body: z.object({ definitions: z.unknown().array() }),
|
||||
// response: {
|
||||
// 200: z.object({ status: z.string() })
|
||||
// }
|
||||
// },
|
||||
// onRequest: verifyAuth([AuthMode.JWT]),
|
||||
// handler: async (req) => {
|
||||
// checkIfBddNockApiEnabled();
|
||||
// const { body } = req;
|
||||
// const { definitions } = body;
|
||||
// logger.info(definitions, "Defining nock");
|
||||
// const processedDefinitions = definitions.map((definition: unknown) => {
|
||||
// const { path, ...rest } = definition as Definition;
|
||||
// return {
|
||||
// ...rest,
|
||||
// path:
|
||||
// path !== undefined && typeof path === "string"
|
||||
// ? path
|
||||
// : new RegExp((path as unknown as { regex: string }).regex ?? "")
|
||||
// } as Definition;
|
||||
// });
|
||||
|
||||
// nock.define(processedDefinitions);
|
||||
// // Ensure we are activating the nocks, because we could have called `nock.restore()` before this call.
|
||||
// if (!nock.isActive()) {
|
||||
// nock.activate();
|
||||
// }
|
||||
// return { status: "ok" };
|
||||
// }
|
||||
// });
|
||||
|
||||
// server.route({
|
||||
// method: "POST",
|
||||
// url: "/clean-all",
|
||||
// schema: {
|
||||
// response: {
|
||||
// 200: z.object({ status: z.string() })
|
||||
// }
|
||||
// },
|
||||
// onRequest: verifyAuth([AuthMode.JWT]),
|
||||
// handler: async () => {
|
||||
// checkIfBddNockApiEnabled();
|
||||
// logger.info("Cleaning all nocks");
|
||||
// nock.cleanAll();
|
||||
// return { status: "ok" };
|
||||
// }
|
||||
// });
|
||||
|
||||
// server.route({
|
||||
// method: "POST",
|
||||
// url: "/restore",
|
||||
// schema: {
|
||||
// response: {
|
||||
// 200: z.object({ status: z.string() })
|
||||
// }
|
||||
// },
|
||||
// onRequest: verifyAuth([AuthMode.JWT]),
|
||||
// handler: async () => {
|
||||
// checkIfBddNockApiEnabled();
|
||||
// logger.info("Restore network requests from nock");
|
||||
// nock.restore();
|
||||
// return { status: "ok" };
|
||||
// }
|
||||
// });
|
||||
// };
|
||||
@@ -314,7 +314,8 @@ export const registerIdentityTokenAuthRouter = async (server: FastifyZodProvider
|
||||
accessToken: z.string(),
|
||||
expiresIn: z.coerce.number(),
|
||||
accessTokenMaxTTL: z.coerce.number(),
|
||||
tokenType: z.literal("Bearer")
|
||||
tokenType: z.literal("Bearer"),
|
||||
tokenData: IdentityAccessTokensSchema
|
||||
})
|
||||
}
|
||||
},
|
||||
@@ -346,7 +347,8 @@ export const registerIdentityTokenAuthRouter = async (server: FastifyZodProvider
|
||||
accessToken,
|
||||
tokenType: "Bearer" as const,
|
||||
expiresIn: identityTokenAuth.accessTokenTTL,
|
||||
accessTokenMaxTTL: identityTokenAuth.accessTokenMaxTTL
|
||||
accessTokenMaxTTL: identityTokenAuth.accessTokenMaxTTL,
|
||||
tokenData: identityAccessToken
|
||||
};
|
||||
}
|
||||
});
|
||||
@@ -406,6 +408,60 @@ export const registerIdentityTokenAuthRouter = async (server: FastifyZodProvider
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/token-auth/identities/:identityId/tokens/:tokenId",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
schema: {
|
||||
hide: false,
|
||||
tags: [ApiDocsTags.TokenAuth],
|
||||
description: "Get token for machine identity with Token Auth",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
params: z.object({
|
||||
identityId: z.string().describe(TOKEN_AUTH.GET_TOKEN.identityId),
|
||||
tokenId: z.string().describe(TOKEN_AUTH.GET_TOKEN.tokenId)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
token: IdentityAccessTokensSchema
|
||||
})
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const { token, identityMembershipOrg } = await server.services.identityTokenAuth.getTokenAuthTokenById({
|
||||
identityId: req.params.identityId,
|
||||
tokenId: req.params.tokenId,
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorOrgId: req.permission.orgId,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
isActorSuperAdmin: isSuperAdmin(req.auth)
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
orgId: identityMembershipOrg.scopeOrgId,
|
||||
event: {
|
||||
type: EventType.GET_TOKEN_IDENTITY_TOKEN_AUTH,
|
||||
metadata: {
|
||||
identityId: token.identityId,
|
||||
identityName: identityMembershipOrg.identity.name,
|
||||
tokenId: token.id
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { token };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "PATCH",
|
||||
url: "/token-auth/tokens/:tokenId",
|
||||
|
||||
@@ -8,7 +8,6 @@ import { registerSecretSyncRouter, SECRET_SYNC_REGISTER_ROUTER_MAP } from "@app/
|
||||
|
||||
import { registerAdminRouter } from "./admin-router";
|
||||
import { registerAuthRoutes } from "./auth-router";
|
||||
// import { registerBddNockRouter } from "./bdd-nock-router";
|
||||
import { registerProjectBotRouter } from "./bot-router";
|
||||
import { registerCaRouter } from "./certificate-authority-router";
|
||||
import { CERTIFICATE_AUTHORITY_REGISTER_ROUTER_MAP } from "./certificate-authority-routers";
|
||||
@@ -238,10 +237,4 @@ export const registerV1Routes = async (server: FastifyZodProvider) => {
|
||||
|
||||
await server.register(registerEventRouter, { prefix: "/events" });
|
||||
await server.register(registerUpgradePathRouter, { prefix: "/upgrade-path" });
|
||||
|
||||
// Note: This is a special route for BDD tests. It's only available in development mode and only for BDD tests.
|
||||
// This route should NEVER BE ENABLED IN PRODUCTION!
|
||||
// if (getConfig().isBddNockApiEnabled) {
|
||||
// await server.register(registerBddNockRouter, { prefix: "/bdd-nock" });
|
||||
// }
|
||||
};
|
||||
|
||||
@@ -428,7 +428,13 @@ describe("CertificateProfileService", () => {
|
||||
service.createProfile({
|
||||
...mockActor,
|
||||
projectId: "project-123",
|
||||
data: validProfileData
|
||||
data: {
|
||||
...validProfileData,
|
||||
enrollmentType: EnrollmentType.ACME,
|
||||
acmeConfig: {},
|
||||
apiConfig: undefined,
|
||||
estConfig: undefined
|
||||
}
|
||||
})
|
||||
).rejects.toThrowError(
|
||||
new BadRequestError({
|
||||
|
||||
@@ -18,7 +18,6 @@ export const identityAccessTokenDALFactory = (db: TDbClient) => {
|
||||
.where(filter)
|
||||
.join(TableName.Identity, `${TableName.Identity}.id`, `${TableName.IdentityAccessToken}.identityId`)
|
||||
.select(selectAllTableCols(TableName.IdentityAccessToken))
|
||||
.select(db.ref("name").withSchema(TableName.Identity))
|
||||
.select(db.ref("orgId").withSchema(TableName.Identity).as("identityScopeOrgId"))
|
||||
.first();
|
||||
|
||||
|
||||
@@ -38,6 +38,7 @@ import {
|
||||
TAttachTokenAuthDTO,
|
||||
TCreateTokenAuthTokenDTO,
|
||||
TGetTokenAuthDTO,
|
||||
TGetTokenAuthTokenByIdDTO,
|
||||
TGetTokenAuthTokensDTO,
|
||||
TRevokeTokenAuthDTO,
|
||||
TRevokeTokenAuthTokenDTO,
|
||||
@@ -618,6 +619,52 @@ export const identityTokenAuthServiceFactory = ({
|
||||
return { tokens, identityMembershipOrg };
|
||||
};
|
||||
|
||||
const getTokenAuthTokenById = async ({
|
||||
tokenId,
|
||||
identityId,
|
||||
isActorSuperAdmin,
|
||||
actorId,
|
||||
actor,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
}: TGetTokenAuthTokenByIdDTO) => {
|
||||
await validateIdentityUpdateForSuperAdminPrivileges(identityId, isActorSuperAdmin);
|
||||
|
||||
const identityMembershipOrg = await membershipIdentityDAL.getIdentityById({
|
||||
scopeData: {
|
||||
scope: AccessScope.Organization,
|
||||
orgId: actorOrgId
|
||||
},
|
||||
identityId
|
||||
});
|
||||
if (!identityMembershipOrg) throw new NotFoundError({ message: `Failed to find identity with ID ${identityId}` });
|
||||
|
||||
if (!identityMembershipOrg.identity.authMethods.includes(IdentityAuthMethod.TOKEN_AUTH)) {
|
||||
throw new BadRequestError({
|
||||
message: "The identity does not have Token Auth"
|
||||
});
|
||||
}
|
||||
const { permission } = await permissionService.getOrgPermission({
|
||||
scope: OrganizationActionScope.Any,
|
||||
actor,
|
||||
actorId,
|
||||
orgId: identityMembershipOrg.scopeOrgId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
});
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionIdentityActions.Read, OrgPermissionSubjects.Identity);
|
||||
|
||||
const token = await identityAccessTokenDAL.findOne({
|
||||
[`${TableName.IdentityAccessToken}.id` as "id"]: tokenId,
|
||||
[`${TableName.IdentityAccessToken}.authMethod` as "authMethod"]: IdentityAuthMethod.TOKEN_AUTH,
|
||||
[`${TableName.IdentityAccessToken}.identityId` as "identityId"]: identityId
|
||||
});
|
||||
|
||||
if (!token) throw new NotFoundError({ message: `Token with ID ${tokenId} not found` });
|
||||
|
||||
return { token, identityMembershipOrg };
|
||||
};
|
||||
|
||||
const updateTokenAuthToken = async ({
|
||||
tokenId,
|
||||
name,
|
||||
@@ -797,6 +844,7 @@ export const identityTokenAuthServiceFactory = ({
|
||||
revokeIdentityTokenAuth,
|
||||
createTokenAuthToken,
|
||||
getTokenAuthTokens,
|
||||
getTokenAuthTokenById,
|
||||
updateTokenAuthToken,
|
||||
revokeTokenAuthToken
|
||||
};
|
||||
|
||||
@@ -40,6 +40,12 @@ export type TGetTokenAuthTokensDTO = {
|
||||
isActorSuperAdmin?: boolean;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TGetTokenAuthTokenByIdDTO = {
|
||||
tokenId: string;
|
||||
identityId: string;
|
||||
isActorSuperAdmin?: boolean;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TUpdateTokenAuthTokenDTO = {
|
||||
tokenId: string;
|
||||
name?: string;
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
import crypto from "crypto";
|
||||
|
||||
import { getLicenseKeyConfig } from "@app/ee/services/license/license-fns";
|
||||
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { LicenseType } from "@app/ee/services/license/license-types";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
|
||||
import { TOfflineUsageReportDALFactory } from "./offline-usage-report-dal";
|
||||
@@ -30,10 +31,13 @@ export const offlineUsageReportServiceFactory = ({
|
||||
};
|
||||
|
||||
const generateUsageReportCSV = async () => {
|
||||
const cfg = getConfig();
|
||||
if (!cfg.LICENSE_KEY_OFFLINE) {
|
||||
const licenseKeyConfig = getLicenseKeyConfig();
|
||||
const hasOfflineLicense = licenseKeyConfig.isValid && licenseKeyConfig.type === LicenseType.Offline;
|
||||
|
||||
if (!hasOfflineLicense) {
|
||||
throw new BadRequestError({
|
||||
message: "Offline usage reports are not enabled. LICENSE_KEY_OFFLINE must be configured."
|
||||
message:
|
||||
"Offline usage reports are not enabled. Usage reports are only available for self-hosted offline instances"
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
9
backend/tsconfig.dev.json
Normal file
9
backend/tsconfig.dev.json
Normal file
@@ -0,0 +1,9 @@
|
||||
{
|
||||
"extends": "./tsconfig.json",
|
||||
"compilerOptions": {
|
||||
"paths": {
|
||||
"@app/*": ["./src/*"],
|
||||
"@bdd_routes/bdd-nock-router": ["./src/server/routes/bdd/bdd-nock-router.dev.ts"]
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -24,7 +24,8 @@
|
||||
"skipLibCheck": true,
|
||||
"baseUrl": ".",
|
||||
"paths": {
|
||||
"@app/*": ["./src/*"]
|
||||
"@app/*": ["./src/*"],
|
||||
"@bdd_routes/*": ["./src/server/routes/bdd/*"]
|
||||
},
|
||||
"jsx": "react-jsx"
|
||||
},
|
||||
|
||||
@@ -2,8 +2,8 @@
|
||||
import path from "node:path";
|
||||
|
||||
import fs from "fs/promises";
|
||||
import {replaceTscAliasPaths} from "tsc-alias";
|
||||
import {defineConfig} from "tsup";
|
||||
import { replaceTscAliasPaths } from "tsc-alias";
|
||||
import { defineConfig } from "tsup";
|
||||
|
||||
// Instead of using tsx or tsc for building, consider using tsup.
|
||||
// TSX serves as an alternative to Node.js, allowing you to build directly on the Node.js runtime.
|
||||
@@ -29,7 +29,7 @@ export default defineConfig({
|
||||
external: ["../../../frontend/node_modules/next/dist/server/next-server.js"],
|
||||
outDir: "dist",
|
||||
tsconfig: "./tsconfig.json",
|
||||
entry: ["./src"],
|
||||
entry: ["./src", "!./src/**/*.dev.ts"],
|
||||
sourceMap: true,
|
||||
skipNodeModulesBundle: true,
|
||||
esbuildPlugins: [
|
||||
@@ -45,22 +45,22 @@ export default defineConfig({
|
||||
const isRelativePath = args.path.startsWith(".");
|
||||
const absPath = isRelativePath
|
||||
? path.join(args.resolveDir, args.path)
|
||||
: path.join(args.path.replace("@app", "./src"));
|
||||
: path.join(args.path.replace("@app", "./src").replace("@bdd_routes", "./src/server/routes/bdd"));
|
||||
|
||||
const isFile = await fs
|
||||
.stat(`${absPath}.ts`)
|
||||
.then((el) => el.isFile)
|
||||
.catch(async (err) => {
|
||||
if (err.code === "ENOTDIR") {
|
||||
return true;
|
||||
}
|
||||
.catch(async (err) => {
|
||||
if (err.code === "ENOTDIR") {
|
||||
return true;
|
||||
}
|
||||
|
||||
// If .ts file doesn't exist, try checking for .tsx file
|
||||
return fs
|
||||
.stat(`${absPath}.tsx`)
|
||||
.then((el) => el.isFile)
|
||||
.catch((err) => err.code === "ENOTDIR");
|
||||
});
|
||||
// If .ts file doesn't exist, try checking for .tsx file
|
||||
return fs
|
||||
.stat(`${absPath}.tsx`)
|
||||
.then((el) => el.isFile)
|
||||
.catch((err) => err.code === "ENOTDIR");
|
||||
});
|
||||
|
||||
return {
|
||||
path: isFile ? `${args.path}.mjs` : `${args.path}/index.mjs`,
|
||||
|
||||
@@ -28,7 +28,8 @@ export default defineConfig({
|
||||
},
|
||||
resolve: {
|
||||
alias: {
|
||||
"@app": path.resolve(__dirname, "./src")
|
||||
"@app": path.resolve(__dirname, "./src"),
|
||||
"@bdd_routes/bdd-nock-router": path.resolve(__dirname, "./src/server/routes/bdd/bdd-nock-router.dev.ts")
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
@@ -11,7 +11,8 @@ export default defineConfig({
|
||||
},
|
||||
resolve: {
|
||||
alias: {
|
||||
"@app": path.resolve(__dirname, "./src")
|
||||
"@app": path.resolve(__dirname, "./src"),
|
||||
"@bdd_routes/bdd-nock-router": path.resolve(__dirname, "./src/server/routes/bdd/bdd-nock-router.dev.ts")
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
@@ -71,6 +71,7 @@ services:
|
||||
ports:
|
||||
- 4000:4000
|
||||
- 9464:9464 # for OTEL collection of Prometheus metrics
|
||||
- 9229:9229 # For debugger access
|
||||
environment:
|
||||
- NODE_ENV=development
|
||||
- DB_CONNECTION_URI=postgres://infisical:infisical@db/infisical?sslmode=disable
|
||||
|
||||
@@ -43,6 +43,9 @@ description: "Learn how to configure a Vercel Sync for Infisical."
|
||||
- **Overwrite Destination Secrets**: Removes any secrets at the destination endpoint not present in Infisical.
|
||||
- **Import Secrets (Prioritize Infisical)**: Imports secrets from the destination endpoint before syncing, prioritizing values from Infisical over Vercel when keys conflict.
|
||||
- **Import Secrets (Prioritize Vercel)**: Imports secrets from the destination endpoint before syncing, prioritizing values from Vercel over Infisical when keys conflict.
|
||||
<Note>
|
||||
Vercel does not expose the values of [sensitive environment variables](https://vercel.com/docs/environment-variables/sensitive-environment-variables), so Infisical cannot import them during the initial sync. As a result, these secrets are created in Infisical with empty values. After the first sync, you'll need to manually re-enter their values in Infisical to ensure both platforms stay aligned.
|
||||
</Note>
|
||||
- **Key Schema**: Template that determines how secret names are transformed when syncing, using `{{secretKey}}` as a placeholder for the original secret name and `{{environment}}` for the environment.
|
||||
<Note>
|
||||
We highly recommend using a Key Schema to ensure that Infisical only manages the specific keys you intend, keeping everything else untouched.
|
||||
@@ -149,4 +152,5 @@ description: "Learn how to configure a Vercel Sync for Infisical."
|
||||
}
|
||||
```
|
||||
</Tab>
|
||||
|
||||
</Tabs>
|
||||
|
||||
@@ -118,6 +118,22 @@ var _ = await sdk.Auth().UniversalAuth().LoginAsync(
|
||||
- `clientId` (string): The client ID of your Machine Identity.
|
||||
- `clientSecret` (string): The client secret of your Machine Identity.
|
||||
|
||||
### LDAP Auth
|
||||
|
||||
#### Authenticating
|
||||
```cs
|
||||
var _ = await sdk.Auth().LdapAuth().LoginAsync(
|
||||
"IDENTITY_ID",
|
||||
"USERNAME",
|
||||
"PASSWORD"
|
||||
);
|
||||
```
|
||||
|
||||
**Parameters:**
|
||||
- `identityId` (string): The ID of your Machine Identity .
|
||||
- `username` (string): The LDAP username for authentication.
|
||||
- `password` (string): The LDAP password for authentication.
|
||||
|
||||
### `Secrets()`
|
||||
|
||||
The `Secrets()` sub-class handles operations related to the Infisical secrets management product.
|
||||
|
||||
@@ -284,6 +284,114 @@ if err != nil {
|
||||
}
|
||||
```
|
||||
|
||||
#### JWT Auth
|
||||
|
||||
<Info>
|
||||
Please note that this authentication method requires a valid JWT token from your JWT issuer. Please [read
|
||||
more](/documentation/platform/identities/jwt-auth) about this authentication
|
||||
method.
|
||||
</Info>
|
||||
|
||||
**Using the SDK**
|
||||
|
||||
```go
|
||||
credential, err := client.Auth().JwtAuthLogin("MACHINE_IDENTITY_ID", "JWT_TOKEN")
|
||||
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
os.Exit(1)
|
||||
}
|
||||
```
|
||||
|
||||
#### LDAP Auth
|
||||
|
||||
<Info>
|
||||
Please note that this authentication method requires LDAP credentials. Please [read
|
||||
more](/documentation/platform/identities/ldap-auth/general) about this authentication
|
||||
method.
|
||||
</Info>
|
||||
|
||||
**Using environment variables**
|
||||
|
||||
You can set the `INFISICAL_LDAP_AUTH_IDENTITY_ID` environment variable and pass empty string for the identity ID:
|
||||
|
||||
```go
|
||||
credential, err := client.Auth().LdapAuthLogin("", "LDAP_USERNAME", "LDAP_PASSWORD")
|
||||
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
os.Exit(1)
|
||||
}
|
||||
```
|
||||
|
||||
**Using the SDK directly**
|
||||
|
||||
```go
|
||||
credential, err := client.Auth().LdapAuthLogin("MACHINE_IDENTITY_ID", "LDAP_USERNAME", "LDAP_PASSWORD")
|
||||
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
os.Exit(1)
|
||||
}
|
||||
```
|
||||
|
||||
#### OCI Auth
|
||||
|
||||
<Info>
|
||||
Please note that this authentication method will only work if you're running
|
||||
your application on Oracle Cloud Infrastructure. Please [read
|
||||
more](/documentation/platform/identities/oci-auth) about this authentication
|
||||
method.
|
||||
</Info>
|
||||
|
||||
**Using environment variables**
|
||||
|
||||
You can set the `INFISICAL_OCI_AUTH_IDENTITY_ID` environment variable and omit the `IdentityID` field:
|
||||
|
||||
```go
|
||||
credential, err := client.Auth().OciAuthLogin(infisical.OciAuthLoginOptions{
|
||||
UserID: "USER_OCID",
|
||||
TenancyID: "TENANCY_OCID",
|
||||
Fingerprint: "FINGERPRINT",
|
||||
PrivateKey: "PRIVATE_KEY",
|
||||
Region: "REGION",
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
os.Exit(1)
|
||||
}
|
||||
```
|
||||
|
||||
**Using the SDK directly**
|
||||
|
||||
```go
|
||||
credential, err := client.Auth().OciAuthLogin(infisical.OciAuthLoginOptions{
|
||||
IdentityID: "MACHINE_IDENTITY_ID",
|
||||
UserID: "USER_OCID",
|
||||
TenancyID: "TENANCY_OCID",
|
||||
Fingerprint: "FINGERPRINT",
|
||||
PrivateKey: "PRIVATE_KEY",
|
||||
Region: "REGION",
|
||||
Passphrase: nil, // Optional: pointer to string if your private key has a passphrase
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
os.Exit(1)
|
||||
}
|
||||
```
|
||||
|
||||
**OciAuthLoginOptions fields:**
|
||||
|
||||
- `IdentityID` (string) - Your Infisical Machine Identity ID. Can be set via `INFISICAL_OCI_AUTH_IDENTITY_ID` environment variable.
|
||||
- `UserID` (string) - Your OCI user OCID.
|
||||
- `TenancyID` (string) - Your OCI tenancy OCID.
|
||||
- `Fingerprint` (string) - Your OCI API key fingerprint.
|
||||
- `PrivateKey` (string) - Your OCI private key (PEM format).
|
||||
- `Region` (string) - Your OCI region (e.g., `us-ashburn-1`).
|
||||
- `Passphrase` (*string) - Optional: pointer to passphrase string if your private key is encrypted.
|
||||
|
||||
## Secrets
|
||||
|
||||
### List Secrets
|
||||
|
||||
@@ -14,14 +14,13 @@ This guide walks through how you can use these paid features on a self-hosted in
|
||||
Once purchased, you will be issued a license key.
|
||||
</Step>
|
||||
<Step title="Activate the license">
|
||||
Depending on whether or not the environment where Infisical is deployed has internet access, you may be issued a regular license or an offline license.
|
||||
Set your license key as the value of the **LICENSE_KEY** environment variable within your Infisical instance.
|
||||
|
||||
|
||||
<Tabs>
|
||||
<Tab title="Regular License">
|
||||
- Assign the issued license key to the `LICENSE_KEY` environment variable in your Infisical instance.
|
||||
|
||||
- Your Infisical instance will need to communicate with the Infisical license server to validate the license key.
|
||||
- Your Infisical instance will need to communicate with the Infisical license server to validate the license key.
|
||||
If you want to limit outgoing connections only to the Infisical license server, you can use the following IP addresses: `13.248.249.247` and `35.71.190.59`
|
||||
|
||||
<Note>
|
||||
@@ -29,16 +28,18 @@ This guide walks through how you can use these paid features on a self-hosted in
|
||||
</Note>
|
||||
</Tab>
|
||||
<Tab title="Offline License">
|
||||
- Assign the issued license key to the `LICENSE_KEY_OFFLINE` environment variable in your Infisical instance.
|
||||
- Assign the issued offline license key to the `LICENSE_KEY` environment variable in your Infisical instance.
|
||||
|
||||
- The system will automatically detect that it's an offline license based on the key format.
|
||||
|
||||
<Note>
|
||||
How you set the environment variable will depend on the deployment method you used. Please refer to the documentation of your deployment method for specific instructions.
|
||||
While the LICENSE_KEY_OFFLINE environment variable continues to be supported for compatibility with existing configurations, we recommend transitioning to LICENSE_KEY for all license types going forward.
|
||||
</Note>
|
||||
</Tab>
|
||||
</Tabs>
|
||||
|
||||
Once your instance starts up, the license key will be validated and you’ll be able to use the paid features.
|
||||
Once your instance starts up, the license key will be validated and you'll be able to use the paid features.
|
||||
However, when the license expires, Infisical will continue to run, but EE features will be disabled until the license is renewed or a new one is purchased.
|
||||
</Step>
|
||||
</Steps>
|
||||
|
||||
</Steps>
|
||||
|
||||
@@ -262,24 +262,25 @@ export const eventToNameMap: { [K in EventType]: string } = {
|
||||
[EventType.UPDATE_IDENTITY_PROJECT_MEMBERSHIP]: "Update Identity Project Membership",
|
||||
[EventType.DELETE_IDENTITY_PROJECT_MEMBERSHIP]: "Delete Identity Project Membership",
|
||||
|
||||
[EventType.PAM_SESSION_START]: "PAM Session Start",
|
||||
[EventType.PAM_SESSION_LOGS_UPDATE]: "PAM Session Logs Update",
|
||||
[EventType.PAM_SESSION_END]: "PAM Session End",
|
||||
[EventType.PAM_SESSION_GET]: "PAM Session Get",
|
||||
[EventType.PAM_SESSION_LIST]: "PAM Session List",
|
||||
[EventType.PAM_FOLDER_CREATE]: "PAM Folder Create",
|
||||
[EventType.PAM_FOLDER_UPDATE]: "PAM Folder Update",
|
||||
[EventType.PAM_FOLDER_DELETE]: "PAM Folder Delete",
|
||||
[EventType.PAM_ACCOUNT_LIST]: "PAM Account List",
|
||||
[EventType.PAM_ACCOUNT_ACCESS]: "PAM Account Access",
|
||||
[EventType.PAM_ACCOUNT_CREATE]: "PAM Account Create",
|
||||
[EventType.PAM_ACCOUNT_UPDATE]: "PAM Account Update",
|
||||
[EventType.PAM_ACCOUNT_DELETE]: "PAM Account Delete",
|
||||
[EventType.PAM_RESOURCE_LIST]: "PAM Resource List",
|
||||
[EventType.PAM_RESOURCE_GET]: "PAM Resource Get",
|
||||
[EventType.PAM_RESOURCE_CREATE]: "PAM Resource Create",
|
||||
[EventType.PAM_RESOURCE_UPDATE]: "PAM Resource Update",
|
||||
[EventType.PAM_RESOURCE_DELETE]: "PAM Resource Delete",
|
||||
[EventType.PAM_SESSION_CREDENTIALS_GET]: "Get PAM Session Credentials",
|
||||
[EventType.PAM_SESSION_START]: "Start PAM Session",
|
||||
[EventType.PAM_SESSION_LOGS_UPDATE]: "Update PAM Session Logs",
|
||||
[EventType.PAM_SESSION_END]: "End PAM Session",
|
||||
[EventType.PAM_SESSION_GET]: "Get PAM Session",
|
||||
[EventType.PAM_SESSION_LIST]: "List PAM Sessions",
|
||||
[EventType.PAM_FOLDER_CREATE]: "Create PAM Folder",
|
||||
[EventType.PAM_FOLDER_UPDATE]: "Update PAM Folder",
|
||||
[EventType.PAM_FOLDER_DELETE]: "Delete PAM Folder",
|
||||
[EventType.PAM_ACCOUNT_LIST]: "List PAM Accounts",
|
||||
[EventType.PAM_ACCOUNT_ACCESS]: "Access PAM Account",
|
||||
[EventType.PAM_ACCOUNT_CREATE]: "Create PAM Account",
|
||||
[EventType.PAM_ACCOUNT_UPDATE]: "Update PAM Account",
|
||||
[EventType.PAM_ACCOUNT_DELETE]: "Delete PAM Account",
|
||||
[EventType.PAM_RESOURCE_LIST]: "List PAM Resources",
|
||||
[EventType.PAM_RESOURCE_GET]: "Get PAM Resource",
|
||||
[EventType.PAM_RESOURCE_CREATE]: "Create PAM Resource",
|
||||
[EventType.PAM_RESOURCE_UPDATE]: "Update PAM Resource",
|
||||
[EventType.PAM_RESOURCE_DELETE]: "Delete PAM Resource",
|
||||
|
||||
[EventType.CREATE_CERTIFICATE_PROFILE]: "Create Certificate Profile",
|
||||
[EventType.UPDATE_CERTIFICATE_PROFILE]: "Update Certificate Profile",
|
||||
@@ -314,6 +315,7 @@ const sharedProjectEvents = [
|
||||
export const projectToEventsMap: Partial<Record<ProjectType, EventType[]>> = {
|
||||
[ProjectType.PAM]: [
|
||||
...sharedProjectEvents,
|
||||
EventType.PAM_SESSION_CREDENTIALS_GET,
|
||||
EventType.PAM_SESSION_START,
|
||||
EventType.PAM_SESSION_LOGS_UPDATE,
|
||||
EventType.PAM_SESSION_END,
|
||||
|
||||
@@ -254,6 +254,7 @@ export enum EventType {
|
||||
UPDATE_IDENTITY_PROJECT_MEMBERSHIP = "update-identity-project-membership",
|
||||
DELETE_IDENTITY_PROJECT_MEMBERSHIP = "delete-identity-project-membership",
|
||||
|
||||
PAM_SESSION_CREDENTIALS_GET = "pam-session-credentials-get",
|
||||
PAM_SESSION_START = "pam-session-start",
|
||||
PAM_SESSION_LOGS_UPDATE = "pam-session-logs-update",
|
||||
PAM_SESSION_END = "pam-session-end",
|
||||
|
||||
@@ -840,9 +840,7 @@ export type CreateTokenIdentityTokenAuthDTO = {
|
||||
|
||||
export type CreateTokenIdentityTokenAuthRes = {
|
||||
accessToken: string;
|
||||
tokenType: string;
|
||||
expiresIn: number;
|
||||
accessTokenMaxTTL: number;
|
||||
tokenData: IdentityAccessToken;
|
||||
};
|
||||
|
||||
export type UpdateTokenIdentityTokenAuthDTO = {
|
||||
|
||||
Reference in New Issue
Block a user