Merge remote-tracking branch 'origin' into pki-v3-docs

This commit is contained in:
Tuan Dang
2025-11-05 16:16:47 -08:00
628 changed files with 19299 additions and 8522 deletions

57
.github/values.yaml vendored
View File

@@ -1,57 +0,0 @@
## @section Common parameters
##
## @param nameOverride Override release name
##
nameOverride: ""
## @param fullnameOverride Override release fullname
##
fullnameOverride: ""
## @section Infisical backend parameters
## Documentation : https://infisical.com/docs/self-hosting/deployments/kubernetes
##
infisical:
autoDatabaseSchemaMigration: false
enabled: false
name: infisical
replicaCount: 3
image:
repository: infisical/staging_infisical
tag: "latest"
pullPolicy: Always
deploymentAnnotations:
secrets.infisical.com/auto-reload: "true"
kubeSecretRef: "managed-secret"
ingress:
## @param ingress.enabled Enable ingress
##
enabled: true
## @param ingress.ingressClassName Ingress class name
##
ingressClassName: nginx
## @param ingress.nginx.enabled Ingress controller
##
# nginx:
# enabled: true
## @param ingress.annotations Ingress annotations
##
annotations:
cert-manager.io/cluster-issuer: "letsencrypt-prod"
hostName: "gamma.infisical.com"
tls:
- secretName: letsencrypt-prod
hosts:
- gamma.infisical.com
postgresql:
enabled: false
redis:
enabled: false

View File

@@ -56,7 +56,7 @@ jobs:
--config ct.yaml \
--charts helm-charts/infisical-standalone-postgres \
--helm-extra-args="--timeout=300s" \
--helm-extra-set-args="--set ingress.nginx.enabled=false --set infisical.autoDatabaseSchemaMigration=false --set infisical.replicaCount=1 --set infisical.image.tag=v0.132.2-postgres" \
--helm-extra-set-args="--set ingress.nginx.enabled=false --set infisical.replicaCount=1 --set infisical.image.tag=v0.151.0" \
--namespace infisical-standalone-postgres
release:

View File

@@ -135,10 +135,10 @@ jobs:
TAG_NAME="${{ github.ref_name }}"
echo "Checking for tag: $TAG_NAME"
EXACT_MATCH=$(gh api repos/Infisical/infisical-omnibus/git/refs/tags/$TAG_NAME | jq -r 'if type == "array" then .[].ref else .ref end' | grep -x "refs/tags/$TAG_NAME")
EXACT_MATCH=$(gh api repos/Infisical/infisical-omnibus/git/refs/tags/$TAG_NAME 2>/dev/null | jq -r 'if type == "array" then .[].ref else .ref end' | grep -x "refs/tags/$TAG_NAME" || true)
if [ "$EXACT_MATCH" == "refs/tags/$TAG_NAME" ]; then
echo "Tag $TAG_NAME already exists, skipping..."
echo "Tag $TAG_NAME already exists, skipping..."
else
echo "Creating tag in Infisical/infisical-omnibus: $TAG_NAME"
LATEST_SHA=$(gh api repos/Infisical/infisical-omnibus/git/refs/heads/main --jq '.object.sha')

View File

@@ -24,6 +24,8 @@ jobs:
- name: Set up chart-testing
uses: helm/chart-testing-action@v2.7.0
with:
yamale_version: "6.0.0"
- name: Run chart-testing (lint)
run: ct lint --config ct.yaml --charts helm-charts/infisical-gateway

View File

@@ -27,6 +27,8 @@ jobs:
- name: Set up chart-testing
uses: helm/chart-testing-action@v2.7.0
with:
yamale_version: "6.0.0"
- name: Run chart-testing (lint)
run: ct lint --config ct.yaml --charts helm-charts/infisical-gateway

View File

@@ -66,5 +66,5 @@ jobs:
--config ct.yaml \
--charts helm-charts/infisical-standalone-postgres \
--helm-extra-args="--timeout=300s" \
--helm-extra-set-args="--set ingress.nginx.enabled=false --set infisical.autoDatabaseSchemaMigration=false --set infisical.replicaCount=1 --set infisical.image.tag=v0.132.2-postgres --set infisical.autoBootstrap.enabled=true" \
--helm-extra-set-args="--set ingress.nginx.enabled=false --set infisical.replicaCount=1 --set infisical.image.tag=v0.151.0 --set infisical.autoBootstrap.enabled=true" \
--namespace infisical-standalone-postgres

View File

@@ -158,7 +158,7 @@ RUN wget https://www.openssl.org/source/openssl-3.1.2.tar.gz \
# Install Infisical CLI
RUN curl -1sLf 'https://artifacts-cli.infisical.com/setup.deb.sh' | bash \
&& apt-get update && apt-get install -y infisical=0.42.6 \
&& apt-get update && apt-get install -y infisical=0.43.14 \
&& rm -rf /var/lib/apt/lists/*
RUN groupadd -r -g 1001 nodejs && useradd -r -u 1001 -g nodejs non-root-user

View File

@@ -142,7 +142,7 @@ RUN apt-get update && apt-get install -y \
# Install Infisical CLI
RUN curl -1sLf 'https://artifacts-cli.infisical.com/setup.deb.sh' | bash \
&& apt-get update && apt-get install -y infisical=0.42.6 \
&& apt-get update && apt-get install -y infisical=0.43.14 \
&& rm -rf /var/lib/apt/lists/*
WORKDIR /

View File

@@ -55,7 +55,7 @@ COPY --from=build /app .
# Install Infisical CLI
RUN apt-get install -y curl bash && \
curl -1sLf 'https://artifacts-cli.infisical.com/setup.deb.sh' | bash && \
apt-get update && apt-get install -y infisical=0.41.89 git
apt-get update && apt-get install -y infisical=0.43.14 git
HEALTHCHECK --interval=10s --timeout=3s --start-period=10s \
CMD node healthcheck.js

View File

@@ -49,25 +49,26 @@ RUN rm -fr ${SOFTHSM2_SOURCES}
# Install pkcs11-tool
RUN apt-get install -y opensc
RUN mkdir -p /etc/softhsm2/tokens && \
softhsm2-util --init-token --slot 0 --label "auth-app" --pin 1234 --so-pin 0000
# ? App setup
# Install Infisical CLI
RUN curl -1sLf 'https://artifacts-cli.infisical.com/setup.deb.sh' | bash && \
apt-get update && \
apt-get install -y infisical=0.41.89
apt-get install -y infisical=0.43.14
WORKDIR /app
COPY package.json package.json
COPY package-lock.json package-lock.json
COPY dev-entrypoint.sh dev-entrypoint.sh
RUN chmod +x dev-entrypoint.sh
RUN npm install
COPY . .
ENV HOST=0.0.0.0
ENTRYPOINT ["/app/dev-entrypoint.sh"]
CMD ["npm", "run", "dev:docker"]

View File

@@ -50,9 +50,6 @@ RUN rm -fr ${SOFTHSM2_SOURCES}
# Install pkcs11-tool
RUN apt-get install -y opensc
RUN mkdir -p /etc/softhsm2/tokens && \
softhsm2-util --init-token --slot 0 --label "auth-app" --pin 1234 --so-pin 0000
WORKDIR /openssl-build
RUN wget https://www.openssl.org/source/openssl-3.1.2.tar.gz \
&& tar -xf openssl-3.1.2.tar.gz \
@@ -70,13 +67,16 @@ RUN wget https://www.openssl.org/source/openssl-3.1.2.tar.gz \
# Install Infisical CLI
RUN curl -1sLf 'https://artifacts-cli.infisical.com/setup.deb.sh' | bash && \
apt-get update && \
apt-get install -y infisical=0.41.89
apt-get install -y infisical=0.43.14
WORKDIR /app
COPY package.json package.json
COPY package-lock.json package-lock.json
COPY dev-entrypoint.sh dev-entrypoint.sh
RUN chmod +x dev-entrypoint.sh
RUN npm install
COPY . .
@@ -87,4 +87,5 @@ ENV OPENSSL_MODULES=/usr/local/lib/ossl-modules
# ENV NODE_OPTIONS=--force-fips # Note(Daniel): We can't set this on the node options because it may break for existing folks using the infisical/infisical-fips image. Instead we call crypto.setFips(true) at runtime.
ENV FIPS_ENABLED=true
ENTRYPOINT ["/app/dev-entrypoint.sh"]
CMD ["npm", "run", "dev:docker"]

16
backend/dev-entrypoint.sh Executable file
View File

@@ -0,0 +1,16 @@
#!/bin/sh
update-ca-certificates
# Initialize SoftHSM token if it doesn't exist
if [ ! -f /etc/softhsm2/tokens/auth-app.db ]; then
echo "Initializing SoftHSM token..."
mkdir -p /etc/softhsm2/tokens
softhsm2-util --init-token --slot 0 --label "auth-app" --pin 1234 --so-pin 0000
echo "SoftHSM token initialized"
else
echo "SoftHSM token already exists, skipping initialization"
fi
exec "$@"

View File

@@ -146,7 +146,8 @@ describe("Service token secret ops", async () => {
let folderId = "";
beforeAll(async () => {
initLogger();
await initEnvConfig(testSuperAdminDAL, logger);
await initEnvConfig(testHsmService, testKmsRootConfigDAL, testSuperAdminDAL, logger);
serviceToken = await createServiceToken(
[{ secretPath: "/**", environment: seedData1.environment.slug }],

View File

@@ -158,7 +158,7 @@ describe("Secret V3 Router", async () => {
let folderId = "";
beforeAll(async () => {
initLogger();
await initEnvConfig(testSuperAdminDAL, logger);
await initEnvConfig(testHsmService, testKmsRootConfigDAL, testSuperAdminDAL, logger);
const projectKeyRes = await testServer.inject({
method: "GET",

View File

@@ -6,7 +6,7 @@ import { crypto } from "@app/lib/crypto/cryptography";
import path from "path";
import { seedData1 } from "@app/db/seed-data";
import { getDatabaseCredentials, initEnvConfig } from "@app/lib/config/env";
import { getDatabaseCredentials, getHsmConfig, initEnvConfig } from "@app/lib/config/env";
import { initLogger } from "@app/lib/logger";
import { main } from "@app/server/app";
import { AuthMethod, AuthTokenType } from "@app/services/auth/auth-type";
@@ -20,6 +20,8 @@ import { initializeHsmModule } from "@app/ee/services/hsm/hsm-fns";
import { buildRedisFromConfig } from "@app/lib/config/redis";
import { superAdminDALFactory } from "@app/services/super-admin/super-admin-dal";
import { bootstrapCheck } from "@app/server/boot-strap-check";
import { hsmServiceFactory } from "@app/ee/services/hsm/hsm-service";
import { kmsRootConfigDALFactory } from "@app/services/kms/kms-root-config-dal";
dotenv.config({ path: path.join(__dirname, "../../.env.test"), debug: true });
export default {
@@ -28,6 +30,7 @@ export default {
async setup() {
const logger = initLogger();
const databaseCredentials = getDatabaseCredentials(logger);
const hsmConfig = getHsmConfig(logger);
const db = initDbConnection({
dbConnectionUri: databaseCredentials.dbConnectionUri,
@@ -35,7 +38,19 @@ export default {
});
const superAdminDAL = superAdminDALFactory(db);
const envCfg = await initEnvConfig(superAdminDAL, logger);
const kmsRootConfigDAL = kmsRootConfigDALFactory(db);
const hsmModule = initializeHsmModule(hsmConfig);
hsmModule.initialize();
const hsmService = hsmServiceFactory({
hsmModule: hsmModule.getModule(),
envConfig: hsmConfig
});
await hsmService.startService();
const envCfg = await initEnvConfig(hsmService, kmsRootConfigDAL, superAdminDAL, logger);
const redis = buildRedisFromConfig(envCfg);
await redis.flushdb("SYNC");
@@ -68,16 +83,14 @@ export default {
await queue.initialize();
const hsmModule = initializeHsmModule(envCfg);
hsmModule.initialize();
const server = await main({
db,
smtp,
logger,
queue,
keyStore,
hsmModule: hsmModule.getModule(),
hsmService,
kmsRootConfigDAL,
superAdminDAL,
redis,
envConfig: envCfg
@@ -92,6 +105,10 @@ export default {
// @ts-expect-error type
globalThis.testSuperAdminDAL = superAdminDAL;
// @ts-expect-error type
globalThis.testKmsRootConfigDAL = kmsRootConfigDAL;
// @ts-expect-error type
globalThis.testHsmService = hsmService;
// @ts-expect-error type
globalThis.jwtAuthToken = crypto.jwt().sign(
{
authTokenType: AuthTokenType.ACCESS_TOKEN,

3072
backend/package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -40,10 +40,10 @@
"type:check": "node --max-old-space-size=8192 ./node_modules/.bin/tsc --noEmit",
"lint:fix": "node --max-old-space-size=8192 ./node_modules/.bin/eslint --fix --ext js,ts ./src",
"lint": "node --max-old-space-size=8192 ./node_modules/.bin/eslint 'src/**/*.ts'",
"test:unit": "vitest run -c vitest.unit.config.ts",
"test:e2e": "vitest run -c vitest.e2e.config.ts --bail=1",
"test:e2e-watch": "vitest -c vitest.e2e.config.ts --bail=1",
"test:e2e-coverage": "vitest run --coverage -c vitest.e2e.config.ts",
"test:unit": "vitest run -c vitest.unit.config.mts",
"test:e2e": "vitest run -c vitest.e2e.config.mts --bail=1",
"test:e2e-watch": "vitest -c vitest.e2e.config.mts --bail=1",
"test:e2e-coverage": "vitest run --coverage -c vitest.e2e.config.mts",
"generate:component": "tsx ./scripts/create-backend-file.ts",
"generate:schema": "tsx ./scripts/generate-schema-types.ts && eslint --fix --ext ts ./src/db/schemas",
"auditlog-migration:latest": "node ./dist/db/rename-migrations-to-mjs.mjs && knex --knexfile ./dist/db/auditlog-knexfile.mjs --client pg migrate:latest",
@@ -98,7 +98,7 @@
"@types/jsrp": "^0.2.6",
"@types/libsodium-wrappers": "^0.7.13",
"@types/lodash.isequal": "^4.5.8",
"@types/node": "^20.17.30",
"@types/node": "^20.19.0",
"@types/nodemailer": "^6.4.14",
"@types/passport-google-oauth20": "^2.0.14",
"@types/pg": "^8.10.9",
@@ -130,10 +130,10 @@
"ts-node": "^10.9.2",
"tsc-alias": "^1.8.8",
"tsconfig-paths": "^4.2.0",
"tsup": "^8.0.1",
"tsup": "^8.5.0",
"tsx": "^4.4.0",
"typescript": "^5.3.2",
"vitest": "^1.2.2"
"vitest": "^3.0.6"
},
"dependencies": {
"@aws-sdk/client-elasticache": "^3.637.0",

View File

@@ -1,7 +1,9 @@
import { FastifyInstance, RawReplyDefaultExpression, RawRequestDefaultExpression, RawServerDefault } from "fastify";
import { THsmServiceFactory } from "@app/ee/services/hsm/hsm-service";
import { CustomLogger } from "@app/lib/logger/logger";
import { ZodTypeProvider } from "@app/server/plugins/fastify-zod";
import { TKmsRootConfigDALFactory } from "@app/services/kms/kms-root-config-dal";
import { TSuperAdminDALFactory } from "@app/services/super-admin/super-admin-dal";
declare global {
@@ -16,5 +18,7 @@ declare global {
// used only for testing
const testServer: FastifyZodProvider;
const testSuperAdminDAL: TSuperAdminDALFactory;
const testKmsRootConfigDAL: TKmsRootConfigDALFactory;
const testHsmService: THsmServiceFactory;
const jwtAuthToken: string;
}

View File

@@ -135,9 +135,23 @@ import { TWorkflowIntegrationServiceFactory } from "@app/services/workflow-integ
declare module "@fastify/request-context" {
interface RequestContextData {
reqId: string;
ip?: string;
userAgent?: string;
orgId?: string;
orgName?: string;
userAuthInfo?: {
userId: string;
email: string;
};
projectDetails?: {
id: string;
name: string;
slug: string;
};
identityAuthInfo?: {
identityId: string;
identityName: string;
authMethod: string;
oidc?: {
claims: Record<string, string>;
};

View File

@@ -3,13 +3,14 @@ import { Knex } from "knex";
import { inMemoryKeyStore } from "@app/keystore/memory";
import { crypto } from "@app/lib/crypto/cryptography";
import { initLogger } from "@app/lib/logger";
import { kmsRootConfigDALFactory } from "@app/services/kms/kms-root-config-dal";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { superAdminDALFactory } from "@app/services/super-admin/super-admin-dal";
import { SecretKeyEncoding, TableName } from "../schemas";
import { getMigrationEnvConfig } from "./utils/env-config";
import { getMigrationEnvConfig, getMigrationHsmConfig } from "./utils/env-config";
import { createCircularCache } from "./utils/ring-buffer";
import { getMigrationEncryptionServices } from "./utils/services";
import { getMigrationEncryptionServices, getMigrationHsmService } from "./utils/services";
const BATCH_SIZE = 500;
export async function up(knex: Knex): Promise<void> {
@@ -25,10 +26,12 @@ export async function up(knex: Knex): Promise<void> {
if (hasUrl) t.string("url").nullable().alter();
});
}
initLogger();
const { hsmService } = await getMigrationHsmService({ envConfig: getMigrationHsmConfig() });
const superAdminDAL = superAdminDALFactory(knex);
const envConfig = await getMigrationEnvConfig(superAdminDAL);
const kmsRootConfigDAL = kmsRootConfigDALFactory(knex);
const envConfig = await getMigrationEnvConfig(superAdminDAL, hsmService, kmsRootConfigDAL);
const keyStore = inMemoryKeyStore();
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });

View File

@@ -4,13 +4,14 @@ import { inMemoryKeyStore } from "@app/keystore/memory";
import { crypto } from "@app/lib/crypto/cryptography";
import { selectAllTableCols } from "@app/lib/knex";
import { initLogger } from "@app/lib/logger";
import { kmsRootConfigDALFactory } from "@app/services/kms/kms-root-config-dal";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { superAdminDALFactory } from "@app/services/super-admin/super-admin-dal";
import { SecretKeyEncoding, TableName } from "../schemas";
import { getMigrationEnvConfig } from "./utils/env-config";
import { getMigrationEnvConfig, getMigrationHsmConfig } from "./utils/env-config";
import { createCircularCache } from "./utils/ring-buffer";
import { getMigrationEncryptionServices } from "./utils/services";
import { getMigrationEncryptionServices, getMigrationHsmService } from "./utils/services";
const BATCH_SIZE = 500;
export async function up(knex: Knex): Promise<void> {
@@ -30,8 +31,12 @@ export async function up(knex: Knex): Promise<void> {
}
initLogger();
const { hsmService } = await getMigrationHsmService({ envConfig: getMigrationHsmConfig() });
const superAdminDAL = superAdminDALFactory(knex);
const envConfig = await getMigrationEnvConfig(superAdminDAL);
const kmsRootConfigDAL = kmsRootConfigDALFactory(knex);
const envConfig = await getMigrationEnvConfig(superAdminDAL, hsmService, kmsRootConfigDAL);
const keyStore = inMemoryKeyStore();
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });

View File

@@ -4,13 +4,14 @@ import { inMemoryKeyStore } from "@app/keystore/memory";
import { crypto } from "@app/lib/crypto/cryptography";
import { selectAllTableCols } from "@app/lib/knex";
import { initLogger } from "@app/lib/logger";
import { kmsRootConfigDALFactory } from "@app/services/kms/kms-root-config-dal";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { superAdminDALFactory } from "@app/services/super-admin/super-admin-dal";
import { SecretKeyEncoding, TableName } from "../schemas";
import { getMigrationEnvConfig } from "./utils/env-config";
import { getMigrationEnvConfig, getMigrationHsmConfig } from "./utils/env-config";
import { createCircularCache } from "./utils/ring-buffer";
import { getMigrationEncryptionServices } from "./utils/services";
import { getMigrationEncryptionServices, getMigrationHsmService } from "./utils/services";
const BATCH_SIZE = 500;
export async function up(knex: Knex): Promise<void> {
@@ -24,8 +25,11 @@ export async function up(knex: Knex): Promise<void> {
}
initLogger();
const { hsmService } = await getMigrationHsmService({ envConfig: getMigrationHsmConfig() });
const superAdminDAL = superAdminDALFactory(knex);
const envConfig = await getMigrationEnvConfig(superAdminDAL);
const kmsRootConfigDAL = kmsRootConfigDALFactory(knex);
const envConfig = await getMigrationEnvConfig(superAdminDAL, hsmService, kmsRootConfigDAL);
const keyStore = inMemoryKeyStore();
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });

View File

@@ -4,13 +4,14 @@ import { inMemoryKeyStore } from "@app/keystore/memory";
import { crypto, SymmetricKeySize } from "@app/lib/crypto/cryptography";
import { selectAllTableCols } from "@app/lib/knex";
import { initLogger } from "@app/lib/logger";
import { kmsRootConfigDALFactory } from "@app/services/kms/kms-root-config-dal";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { superAdminDALFactory } from "@app/services/super-admin/super-admin-dal";
import { SecretKeyEncoding, TableName, TOrgBots } from "../schemas";
import { getMigrationEnvConfig } from "./utils/env-config";
import { getMigrationEnvConfig, getMigrationHsmConfig } from "./utils/env-config";
import { createCircularCache } from "./utils/ring-buffer";
import { getMigrationEncryptionServices } from "./utils/services";
import { getMigrationEncryptionServices, getMigrationHsmService } from "./utils/services";
const BATCH_SIZE = 500;
const reencryptIdentityK8sAuth = async (knex: Knex) => {
@@ -55,9 +56,11 @@ const reencryptIdentityK8sAuth = async (knex: Knex) => {
}
initLogger();
const superAdminDAL = superAdminDALFactory(knex);
const envConfig = await getMigrationEnvConfig(superAdminDAL);
const { hsmService } = await getMigrationHsmService({ envConfig: getMigrationHsmConfig() });
const superAdminDAL = superAdminDALFactory(knex);
const kmsRootConfigDAL = kmsRootConfigDALFactory(knex);
const envConfig = await getMigrationEnvConfig(superAdminDAL, hsmService, kmsRootConfigDAL);
const keyStore = inMemoryKeyStore();
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
const orgEncryptionRingBuffer =

View File

@@ -4,13 +4,14 @@ import { inMemoryKeyStore } from "@app/keystore/memory";
import { crypto, SymmetricKeySize } from "@app/lib/crypto/cryptography";
import { selectAllTableCols } from "@app/lib/knex";
import { initLogger } from "@app/lib/logger";
import { kmsRootConfigDALFactory } from "@app/services/kms/kms-root-config-dal";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { superAdminDALFactory } from "@app/services/super-admin/super-admin-dal";
import { SecretKeyEncoding, TableName, TOrgBots } from "../schemas";
import { getMigrationEnvConfig } from "./utils/env-config";
import { getMigrationEnvConfig, getMigrationHsmConfig } from "./utils/env-config";
import { createCircularCache } from "./utils/ring-buffer";
import { getMigrationEncryptionServices } from "./utils/services";
import { getMigrationEncryptionServices, getMigrationHsmService } from "./utils/services";
const BATCH_SIZE = 500;
const reencryptIdentityOidcAuth = async (knex: Knex) => {
@@ -35,8 +36,11 @@ const reencryptIdentityOidcAuth = async (knex: Knex) => {
}
initLogger();
const { hsmService } = await getMigrationHsmService({ envConfig: getMigrationHsmConfig() });
const superAdminDAL = superAdminDALFactory(knex);
const envConfig = await getMigrationEnvConfig(superAdminDAL);
const kmsRootConfigDAL = kmsRootConfigDALFactory(knex);
const envConfig = await getMigrationEnvConfig(superAdminDAL, hsmService, kmsRootConfigDAL);
const keyStore = inMemoryKeyStore();
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });

View File

@@ -4,16 +4,18 @@ import { inMemoryKeyStore } from "@app/keystore/memory";
import { crypto, SymmetricKeySize } from "@app/lib/crypto/cryptography";
import { selectAllTableCols } from "@app/lib/knex";
import { initLogger } from "@app/lib/logger";
import { kmsRootConfigDALFactory } from "@app/services/kms/kms-root-config-dal";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { superAdminDALFactory } from "@app/services/super-admin/super-admin-dal";
import { SecretKeyEncoding, TableName } from "../schemas";
import { getMigrationEnvConfig } from "./utils/env-config";
import { getMigrationEnvConfig, getMigrationHsmConfig } from "./utils/env-config";
import { createCircularCache } from "./utils/ring-buffer";
import { getMigrationEncryptionServices } from "./utils/services";
import { getMigrationEncryptionServices, getMigrationHsmService } from "./utils/services";
const BATCH_SIZE = 500;
const reencryptSamlConfig = async (knex: Knex) => {
const reencryptSamlConfig = async (knex: Knex, kmsService: TKmsServiceFactory) => {
const hasEncryptedEntrypointColumn = await knex.schema.hasColumn(TableName.SamlConfig, "encryptedSamlEntryPoint");
const hasEncryptedIssuerColumn = await knex.schema.hasColumn(TableName.SamlConfig, "encryptedSamlIssuer");
const hasEncryptedCertificateColumn = await knex.schema.hasColumn(TableName.SamlConfig, "encryptedSamlCertificate");
@@ -28,10 +30,6 @@ const reencryptSamlConfig = async (knex: Knex) => {
}
initLogger();
const superAdminDAL = superAdminDALFactory(knex);
const envConfig = await getMigrationEnvConfig(superAdminDAL);
const keyStore = inMemoryKeyStore();
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
const orgEncryptionRingBuffer =
createCircularCache<Awaited<ReturnType<(typeof kmsService)["createCipherPairWithDataKey"]>>>(25);
@@ -159,7 +157,7 @@ const reencryptSamlConfig = async (knex: Knex) => {
}
};
const reencryptLdapConfig = async (knex: Knex) => {
const reencryptLdapConfig = async (knex: Knex, kmsService: TKmsServiceFactory) => {
const hasEncryptedLdapBindDNColum = await knex.schema.hasColumn(TableName.LdapConfig, "encryptedLdapBindDN");
const hasEncryptedLdapBindPassColumn = await knex.schema.hasColumn(TableName.LdapConfig, "encryptedLdapBindPass");
const hasEncryptedCertificateColumn = await knex.schema.hasColumn(TableName.LdapConfig, "encryptedLdapCaCertificate");
@@ -194,10 +192,6 @@ const reencryptLdapConfig = async (knex: Knex) => {
}
initLogger();
const superAdminDAL = superAdminDALFactory(knex);
const envConfig = await getMigrationEnvConfig(superAdminDAL);
const keyStore = inMemoryKeyStore();
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
const orgEncryptionRingBuffer =
createCircularCache<Awaited<ReturnType<(typeof kmsService)["createCipherPairWithDataKey"]>>>(25);
@@ -323,7 +317,7 @@ const reencryptLdapConfig = async (knex: Knex) => {
}
};
const reencryptOidcConfig = async (knex: Knex) => {
const reencryptOidcConfig = async (knex: Knex, kmsService: TKmsServiceFactory) => {
const hasEncryptedOidcClientIdColumn = await knex.schema.hasColumn(TableName.OidcConfig, "encryptedOidcClientId");
const hasEncryptedOidcClientSecretColumn = await knex.schema.hasColumn(
TableName.OidcConfig,
@@ -354,10 +348,6 @@ const reencryptOidcConfig = async (knex: Knex) => {
}
initLogger();
const superAdminDAL = superAdminDALFactory(knex);
const envConfig = await getMigrationEnvConfig(superAdminDAL);
const keyStore = inMemoryKeyStore();
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
const orgEncryptionRingBuffer =
createCircularCache<Awaited<ReturnType<(typeof kmsService)["createCipherPairWithDataKey"]>>>(25);
@@ -462,9 +452,18 @@ const reencryptOidcConfig = async (knex: Knex) => {
};
export async function up(knex: Knex): Promise<void> {
await reencryptSamlConfig(knex);
await reencryptLdapConfig(knex);
await reencryptOidcConfig(knex);
initLogger();
const { hsmService } = await getMigrationHsmService({ envConfig: getMigrationHsmConfig() });
const superAdminDAL = superAdminDALFactory(knex);
const kmsRootConfigDAL = kmsRootConfigDALFactory(knex);
const envConfig = await getMigrationEnvConfig(superAdminDAL, hsmService, kmsRootConfigDAL);
const keyStore = inMemoryKeyStore();
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
await reencryptSamlConfig(knex, kmsService);
await reencryptLdapConfig(knex, kmsService);
await reencryptOidcConfig(knex, kmsService);
}
const dropSamlConfigColumns = async (knex: Knex) => {

View File

@@ -3,12 +3,13 @@ import { Knex } from "knex";
import { inMemoryKeyStore } from "@app/keystore/memory";
import { selectAllTableCols } from "@app/lib/knex";
import { initLogger } from "@app/lib/logger";
import { kmsRootConfigDALFactory } from "@app/services/kms/kms-root-config-dal";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { superAdminDALFactory } from "@app/services/super-admin/super-admin-dal";
import { TableName } from "../schemas";
import { getMigrationEnvConfig } from "./utils/env-config";
import { getMigrationEncryptionServices } from "./utils/services";
import { getMigrationEnvConfig, getMigrationHsmConfig } from "./utils/env-config";
import { getMigrationEncryptionServices, getMigrationHsmService } from "./utils/services";
// Note(daniel): We aren't dropping tables or columns in this migrations so we can easily rollback if needed.
// In the future we need to drop the projectGatewayId on the dynamic secrets table, and drop the project_gateways table entirely.
@@ -40,8 +41,10 @@ export async function up(knex: Knex): Promise<void> {
);
initLogger();
const { hsmService } = await getMigrationHsmService({ envConfig: getMigrationHsmConfig() });
const superAdminDAL = superAdminDALFactory(knex);
const envConfig = await getMigrationEnvConfig(superAdminDAL);
const kmsRootConfigDAL = kmsRootConfigDALFactory(knex);
const envConfig = await getMigrationEnvConfig(superAdminDAL, hsmService, kmsRootConfigDAL);
const keyStore = inMemoryKeyStore();
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });

View File

@@ -2,19 +2,23 @@ import { Knex } from "knex";
import { inMemoryKeyStore } from "@app/keystore/memory";
import { selectAllTableCols } from "@app/lib/knex";
import { kmsRootConfigDALFactory } from "@app/services/kms/kms-root-config-dal";
import { superAdminDALFactory } from "@app/services/super-admin/super-admin-dal";
import { TableName } from "../schemas";
import { getMigrationEnvConfig } from "./utils/env-config";
import { getMigrationEncryptionServices } from "./utils/services";
import { getMigrationEnvConfig, getMigrationHsmConfig } from "./utils/env-config";
import { getMigrationEncryptionServices, getMigrationHsmService } from "./utils/services";
export async function up(knex: Knex) {
const existingSuperAdminsWithGithubConnection = await knex(TableName.SuperAdmin)
.select(selectAllTableCols(TableName.SuperAdmin))
.whereNotNull(`${TableName.SuperAdmin}.encryptedGitHubAppConnectionClientId`);
const { hsmService } = await getMigrationHsmService({ envConfig: getMigrationHsmConfig() });
const superAdminDAL = superAdminDALFactory(knex);
const envConfig = await getMigrationEnvConfig(superAdminDAL);
const kmsRootConfigDAL = kmsRootConfigDALFactory(knex);
const envConfig = await getMigrationEnvConfig(superAdminDAL, hsmService, kmsRootConfigDAL);
const keyStore = inMemoryKeyStore();
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });

View File

@@ -2,13 +2,14 @@ import { Knex } from "knex";
import { inMemoryKeyStore } from "@app/keystore/memory";
import { crypto } from "@app/lib/crypto/cryptography";
import { kmsRootConfigDALFactory } from "@app/services/kms/kms-root-config-dal";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { superAdminDALFactory } from "@app/services/super-admin/super-admin-dal";
import { SecretKeyEncoding, TableName } from "../schemas";
import { getMigrationEnvConfig } from "./utils/env-config";
import { getMigrationEnvConfig, getMigrationHsmConfig } from "./utils/env-config";
import { createCircularCache } from "./utils/ring-buffer";
import { getMigrationEncryptionServices } from "./utils/services";
import { getMigrationEncryptionServices, getMigrationHsmService } from "./utils/services";
const BATCH_SIZE = 500;
export async function up(knex: Knex): Promise<void> {
@@ -25,8 +26,10 @@ export async function up(knex: Knex): Promise<void> {
});
if (!hasEncryptedCredentials) {
const { hsmService } = await getMigrationHsmService({ envConfig: getMigrationHsmConfig() });
const superAdminDAL = superAdminDALFactory(knex);
const envConfig = await getMigrationEnvConfig(superAdminDAL);
const kmsRootConfigDAL = kmsRootConfigDALFactory(knex);
const envConfig = await getMigrationEnvConfig(superAdminDAL, hsmService, kmsRootConfigDAL);
const keyStore = inMemoryKeyStore();
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
@@ -131,8 +134,11 @@ export async function down(knex: Knex): Promise<void> {
const hasEncryptedCredentials = await knex.schema.hasColumn(TableName.AuditLogStream, "encryptedCredentials");
if (hasEncryptedCredentials) {
const { hsmService } = await getMigrationHsmService({ envConfig: getMigrationHsmConfig() });
const superAdminDAL = superAdminDALFactory(knex);
const envConfig = await getMigrationEnvConfig(superAdminDAL);
const kmsRootConfigDAL = kmsRootConfigDALFactory(knex);
const envConfig = await getMigrationEnvConfig(superAdminDAL, hsmService, kmsRootConfigDAL);
const keyStore = inMemoryKeyStore();
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });

View File

@@ -0,0 +1,49 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasColumn(TableName.PamAccount, "rotationEnabled"))) {
await knex.schema.alterTable(TableName.PamAccount, (t) => {
t.boolean("rotationEnabled").notNullable().defaultTo(false);
});
}
if (!(await knex.schema.hasColumn(TableName.PamAccount, "rotationIntervalSeconds"))) {
await knex.schema.alterTable(TableName.PamAccount, (t) => {
t.integer("rotationIntervalSeconds").nullable();
});
}
if (!(await knex.schema.hasColumn(TableName.PamAccount, "lastRotatedAt"))) {
await knex.schema.alterTable(TableName.PamAccount, (t) => {
t.timestamp("lastRotatedAt").nullable();
});
}
if (!(await knex.schema.hasColumn(TableName.PamResource, "encryptedRotationAccountCredentials"))) {
await knex.schema.alterTable(TableName.PamResource, (t) => {
t.binary("encryptedRotationAccountCredentials").nullable();
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.PamResource, "encryptedRotationAccountCredentials")) {
await knex.schema.alterTable(TableName.PamResource, (t) => {
t.dropColumn("encryptedRotationAccountCredentials");
});
}
if (await knex.schema.hasColumn(TableName.PamAccount, "rotationEnabled")) {
await knex.schema.alterTable(TableName.PamAccount, (t) => {
t.dropColumn("rotationEnabled");
});
}
if (await knex.schema.hasColumn(TableName.PamAccount, "rotationIntervalSeconds")) {
await knex.schema.alterTable(TableName.PamAccount, (t) => {
t.dropColumn("rotationIntervalSeconds");
});
}
if (await knex.schema.hasColumn(TableName.PamAccount, "lastRotatedAt")) {
await knex.schema.alterTable(TableName.PamAccount, (t) => {
t.dropColumn("lastRotatedAt");
});
}
}

View File

@@ -2,7 +2,7 @@ import { Knex } from "knex";
import { dropConstraintIfExists } from "@app/db/migrations/utils/dropConstraintIfExists";
import { AccessScope, TableName } from "../schemas";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasParentOrgId = await knex.schema.hasColumn(TableName.Organization, "parentOrgId");
@@ -18,8 +18,6 @@ export async function up(knex: Knex): Promise<void> {
await dropConstraintIfExists(TableName.Organization, "organizations_slug_unique", knex);
t.unique(["rootOrgId", "parentOrgId", "slug"]);
});
// had to switch to raw for null not distinct
}
const hasIdentityOrgCol = await knex.schema.hasColumn(TableName.Identity, "orgId");
@@ -28,22 +26,6 @@ export async function up(knex: Knex): Promise<void> {
t.uuid("orgId");
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
});
await knex.raw(
`
UPDATE ?? AS identity
SET "orgId" = membership."scopeOrgId"
FROM ?? AS membership
WHERE
membership."actorIdentityId" = identity."id"
AND membership."scope" = ?
`,
[TableName.Identity, TableName.Membership, AccessScope.Organization]
);
await knex.schema.alterTable(TableName.Identity, (t) => {
t.uuid("orgId").notNullable().alter();
});
}
}

View File

@@ -0,0 +1,48 @@
import { Knex } from "knex";
import { chunkArray } from "@app/lib/fn";
import { AccessScope, TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
await knex.transaction(async (tx) => {
const hasIdentityOrgCol = await tx.schema.hasColumn(TableName.Identity, "orgId");
if (hasIdentityOrgCol) {
const identityMemberships = await tx(TableName.Membership)
.where({
scope: AccessScope.Organization
})
.whereNotNull("actorIdentityId")
.select("actorIdentityId", "scopeOrgId");
const identityToOrgMapping: Record<string, string> = {};
identityMemberships.forEach((el) => {
if (el.actorIdentityId) {
identityToOrgMapping[el.actorIdentityId] = el.scopeOrgId;
}
});
const batchMemberships = chunkArray(identityMemberships, 500);
for await (const membership of batchMemberships) {
const identityIds = membership.map((el) => el.actorIdentityId).filter(Boolean) as string[];
if (identityIds.length) {
const identities = await tx(TableName.Identity).whereIn("id", identityIds).select("*");
await tx(TableName.Identity)
.insert(
identities.map((el) => ({
...el,
orgId: identityToOrgMapping[el.id]
}))
)
.onConflict("id")
.merge();
}
}
}
});
}
export async function down(): Promise<void> {}
const config = { transaction: false };
export { config };

View File

@@ -0,0 +1,51 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.PkiApiEnrollmentConfig, "autoRenewDays")) {
await knex.schema.alterTable(TableName.PkiApiEnrollmentConfig, (t) => {
t.renameColumn("autoRenewDays", "renewBeforeDays");
});
}
if (!(await knex.schema.hasColumn(TableName.Certificate, "renewBeforeDays"))) {
await knex.schema.alterTable(TableName.Certificate, (t) => {
t.integer("renewBeforeDays").nullable();
t.uuid("renewedFromCertificateId").nullable();
t.uuid("renewedByCertificateId").nullable();
t.text("renewalError").nullable();
t.string("keyAlgorithm").nullable();
t.string("signatureAlgorithm").nullable();
t.foreign("renewedFromCertificateId").references("id").inTable(TableName.Certificate).onDelete("SET NULL");
t.foreign("renewedByCertificateId").references("id").inTable(TableName.Certificate).onDelete("SET NULL");
t.index("renewedFromCertificateId");
t.index("renewedByCertificateId");
t.index("renewBeforeDays");
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.Certificate, "renewBeforeDays")) {
await knex.schema.alterTable(TableName.Certificate, (t) => {
t.dropForeign(["renewedFromCertificateId"]);
t.dropForeign(["renewedByCertificateId"]);
t.dropIndex("renewedFromCertificateId");
t.dropIndex("renewedByCertificateId");
t.dropIndex("renewBeforeDays");
t.dropColumn("renewBeforeDays");
t.dropColumn("renewedFromCertificateId");
t.dropColumn("renewedByCertificateId");
t.dropColumn("renewalError");
t.dropColumn("keyAlgorithm");
t.dropColumn("signatureAlgorithm");
});
}
if (await knex.schema.hasColumn(TableName.PkiApiEnrollmentConfig, "renewBeforeDays")) {
await knex.schema.alterTable(TableName.PkiApiEnrollmentConfig, (t) => {
t.renameColumn("renewBeforeDays", "autoRenewDays");
});
}
}

View File

@@ -0,0 +1,68 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
// Fix for 20250722152841_add-policies-environments-table.ts migration.
// 20250722152841_add-policies-environments-table.ts introduced a bug where you can no longer delete a project if it has any approval policy environments.
export async function up(knex: Knex): Promise<void> {
// Fix SecretApprovalPolicyEnvironment to cascade delete when environment is deleted
// note: this won't actually happen, as we prevent deletion of environments with active approval policies
// in the old migration it was ON DELETE SET NULL, which doesn't work because envId is not a nullable col
await knex.schema.alterTable(TableName.SecretApprovalPolicyEnvironment, (t) => {
t.dropForeign(["envId"]);
t.foreign("envId").references("id").inTable(TableName.Environment).onDelete("CASCADE");
});
// Fix AccessApprovalPolicyEnvironment to cascade delete when environment is deleted
// note: this won't actually happen, as we prevent deletion of environments with active approval policies
// in the old migration it was ON DELETE SET NULL, which doesn't work because envId is not a nullable col
await knex.schema.alterTable(TableName.AccessApprovalPolicyEnvironment, (t) => {
t.dropForeign(["envId"]);
t.foreign("envId").references("id").inTable(TableName.Environment).onDelete("CASCADE");
});
// Fix SecretApprovalPolicy to CASCADE instead of SET NULL
// in the old migration it was ON DELETE SET NULL, which doesn't work because envId is not a nullable col
await knex.schema.alterTable(TableName.SecretApprovalPolicy, (t) => {
t.dropForeign(["envId"]);
t.foreign("envId").references("id").inTable(TableName.Environment).onDelete("CASCADE");
});
// Fix AccessApprovalPolicy to CASCADE instead of SET NULL
// in the old migration it was ON DELETE SET NULL, which doesn't work because envId is not a nullable col
await knex.schema.alterTable(TableName.AccessApprovalPolicy, (t) => {
t.dropForeign(["envId"]);
t.foreign("envId").references("id").inTable(TableName.Environment).onDelete("CASCADE");
});
}
export async function down(knex: Knex): Promise<void> {
// Revert SecretApprovalPolicyEnvironment
await knex.schema.alterTable(TableName.SecretApprovalPolicyEnvironment, (t) => {
t.dropForeign(["envId"]);
t.foreign("envId").references("id").inTable(TableName.Environment);
});
// Revert AccessApprovalPolicyEnvironment
await knex.schema.alterTable(TableName.AccessApprovalPolicyEnvironment, (t) => {
t.dropForeign(["envId"]);
t.foreign("envId").references("id").inTable(TableName.Environment);
});
// Revert SecretApprovalPolicy back to SET NULL
await knex.schema.alterTable(TableName.SecretApprovalPolicy, (t) => {
t.dropForeign(["envId"]);
t.foreign("envId").references("id").inTable(TableName.Environment).onDelete("SET NULL");
});
// Revert AccessApprovalPolicy back to SET NULL
await knex.schema.alterTable(TableName.AccessApprovalPolicy, (t) => {
t.dropForeign(["envId"]);
t.foreign("envId").references("id").inTable(TableName.Environment).onDelete("SET NULL");
});
}

View File

@@ -0,0 +1,60 @@
import { Knex } from "knex";
import { AccessScope, TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasGroupsTable = await knex.schema.hasTable(TableName.Groups);
const hasMembershipTable = await knex.schema.hasTable(TableName.Membership);
const hasMembershipRoleTable = await knex.schema.hasTable(TableName.MembershipRole);
if (!hasGroupsTable || !hasMembershipTable || !hasMembershipRoleTable) {
return;
}
const groupsWithoutMembership = await knex
.select(
`${TableName.Groups}.id`,
`${TableName.Groups}.orgId`,
`${TableName.Groups}.role`,
`${TableName.Groups}.roleId`
)
.from(TableName.Groups)
.leftJoin(TableName.Membership, `${TableName.Groups}.id`, `${TableName.Membership}.actorGroupId`)
.whereNull(`${TableName.Membership}.actorGroupId`);
if (groupsWithoutMembership.length > 0) {
const membershipInserts = groupsWithoutMembership.map((group) => ({
actorGroupId: group.id,
scope: AccessScope.Organization,
scopeOrgId: group.orgId,
isActive: true
}));
const insertedMemberships = await knex(TableName.Membership).insert(membershipInserts).returning("*");
const membershipRoleInserts = insertedMemberships.map((membership, index) => {
const group = groupsWithoutMembership[index];
return {
membershipId: membership.id,
role: group.role,
customRoleId: group.roleId
};
});
await knex(TableName.MembershipRole).insert(membershipRoleInserts);
}
await knex.schema.alterTable(TableName.Membership, (t) => {
t.check(
`("actorUserId" IS NOT NULL OR "actorIdentityId" IS NOT NULL OR "actorGroupId" IS NOT NULL)`,
undefined,
"at_least_one_actor"
);
});
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.alterTable(TableName.Membership, (t) => {
t.dropChecks("at_least_one_actor");
});
}

View File

@@ -0,0 +1,27 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasOrgBlockDuplicateColumn = await knex.schema.hasColumn(
TableName.Organization,
"blockDuplicateSecretSyncDestinations"
);
if (!hasOrgBlockDuplicateColumn) {
await knex.schema.table(TableName.Organization, (table) => {
table.boolean("blockDuplicateSecretSyncDestinations").notNullable().defaultTo(false);
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasOrgBlockDuplicateColumn = await knex.schema.hasColumn(
TableName.Organization,
"blockDuplicateSecretSyncDestinations"
);
if (hasOrgBlockDuplicateColumn) {
await knex.schema.table(TableName.Organization, (table) => {
table.dropColumn("blockDuplicateSecretSyncDestinations");
});
}
}

View File

@@ -0,0 +1,29 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasColumn(TableName.PamAccount, "rotationStatus"))) {
await knex.schema.alterTable(TableName.PamAccount, (t) => {
t.string("rotationStatus").nullable();
});
}
if (!(await knex.schema.hasColumn(TableName.PamAccount, "encryptedLastRotationMessage"))) {
await knex.schema.alterTable(TableName.PamAccount, (t) => {
t.binary("encryptedLastRotationMessage").nullable();
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.PamAccount, "rotationStatus")) {
await knex.schema.alterTable(TableName.PamAccount, (t) => {
t.dropColumn("rotationStatus");
});
}
if (await knex.schema.hasColumn(TableName.PamAccount, "encryptedLastRotationMessage")) {
await knex.schema.alterTable(TableName.PamAccount, (t) => {
t.dropColumn("encryptedLastRotationMessage");
});
}
}

View File

@@ -0,0 +1,22 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
await knex.transaction(async (tx) => {
await tx.schema.alterTable(TableName.IdentityAccessToken, (table) => {
table.dropForeign("identityId");
});
});
}
export async function down(knex: Knex): Promise<void> {
await knex.transaction(async (tx) => {
await tx.schema.alterTable(TableName.IdentityAccessToken, (table) => {
table.foreign("identityId").references("id").inTable(TableName.Identity);
});
});
}
const config = { transaction: false };
export { config };

View File

@@ -0,0 +1,30 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
const MIGRATION_TIMEOUT = 30 * 60 * 1000; // 30 minutes
export async function up(knex: Knex): Promise<void> {
const result = await knex.raw("SHOW statement_timeout");
const originalTimeout = result.rows[0].statement_timeout;
await knex.transaction(async (tx) => {
try {
await tx.raw(`SET statement_timeout = ${MIGRATION_TIMEOUT}`);
const hasIdentityOrgCol = await tx.schema.hasColumn(TableName.Identity, "orgId");
if (hasIdentityOrgCol) {
await tx(TableName.Identity).whereNull("orgId").delete();
await tx.schema.alterTable(TableName.Identity, (t) => {
t.uuid("orgId").notNullable().alter();
});
}
} finally {
await tx.raw(`SET statement_timeout = '${originalTimeout}'`);
}
});
}
export async function down(): Promise<void> {}
const config = { transaction: false };
export { config };

View File

@@ -1,7 +1,10 @@
import { z } from "zod";
import { THsmServiceFactory } from "@app/ee/services/hsm/hsm-service";
import { crypto } from "@app/lib/crypto/cryptography";
import { removeTrailingSlash } from "@app/lib/fn";
import { zpStr } from "@app/lib/zod";
import { TKmsRootConfigDALFactory } from "@app/services/kms/kms-root-config-dal";
import { TSuperAdminDALFactory } from "@app/services/super-admin/super-admin-dal";
const envSchema = z
@@ -22,13 +25,17 @@ const envSchema = z
HSM_LIB_PATH: zpStr(z.string().optional()),
HSM_PIN: zpStr(z.string().optional()),
HSM_KEY_LABEL: zpStr(z.string().optional()),
HSM_SLOT: z.coerce.number().optional().default(0)
HSM_SLOT: z.coerce.number().optional().default(0),
LICENSE_SERVER_URL: zpStr(z.string().optional().default("https://portal.infisical.com")),
LICENSE_SERVER_KEY: zpStr(z.string().optional()),
LICENSE_KEY: zpStr(z.string().optional()),
LICENSE_KEY_OFFLINE: zpStr(z.string().optional()),
INTERNAL_REGION: zpStr(z.enum(["us", "eu"]).optional()),
SITE_URL: zpStr(z.string().transform((val) => (val ? removeTrailingSlash(val) : val))).optional()
})
// To ensure that basic encryption is always possible.
.refine(
(data) => Boolean(data.ENCRYPTION_KEY) || Boolean(data.ROOT_ENCRYPTION_KEY),
"Either ENCRYPTION_KEY or ROOT_ENCRYPTION_KEY must be defined."
)
.transform((data) => ({
...data,
isHsmConfigured:
@@ -37,7 +44,27 @@ const envSchema = z
export type TMigrationEnvConfig = z.infer<typeof envSchema>;
export const getMigrationEnvConfig = async (superAdminDAL: TSuperAdminDALFactory) => {
export const getMigrationHsmConfig = () => {
const parsedEnv = envSchema.safeParse(process.env);
if (!parsedEnv.success) {
console.error("Invalid environment variables. Check the error below");
console.error(parsedEnv.error.issues);
process.exit(-1);
}
return {
isHsmConfigured: parsedEnv.data.isHsmConfigured,
HSM_PIN: parsedEnv.data.HSM_PIN,
HSM_SLOT: parsedEnv.data.HSM_SLOT,
HSM_LIB_PATH: parsedEnv.data.HSM_LIB_PATH,
HSM_KEY_LABEL: parsedEnv.data.HSM_KEY_LABEL
};
};
export const getMigrationEnvConfig = async (
superAdminDAL: TSuperAdminDALFactory,
hsmService: THsmServiceFactory,
kmsRootConfigDAL: TKmsRootConfigDALFactory
) => {
const parsedEnv = envSchema.safeParse(process.env);
if (!parsedEnv.success) {
// eslint-disable-next-line no-console
@@ -53,7 +80,7 @@ export const getMigrationEnvConfig = async (superAdminDAL: TSuperAdminDALFactory
let envCfg = Object.freeze(parsedEnv.data);
const fipsEnabled = await crypto.initialize(superAdminDAL, envCfg);
const fipsEnabled = await crypto.initialize(superAdminDAL, hsmService, kmsRootConfigDAL, envCfg);
// Fix for 128-bit entropy encryption key expansion issue:
// In FIPS it is not ideal to expand a 128-bit key into 256-bit. We solved this issue in the past by creating the ROOT_ENCRYPTION_KEY.

View File

@@ -1,28 +1,23 @@
import { Knex } from "knex";
import { initializeHsmModule } from "@app/ee/services/hsm/hsm-fns";
import { initializeHsmModule, isHsmActiveAndEnabled } from "@app/ee/services/hsm/hsm-fns";
import { hsmServiceFactory } from "@app/ee/services/hsm/hsm-service";
import { licenseDALFactory } from "@app/ee/services/license/license-dal";
import { licenseServiceFactory } from "@app/ee/services/license/license-service";
import { permissionDALFactory } from "@app/ee/services/permission/permission-dal";
import { permissionServiceFactory } from "@app/ee/services/permission/permission-service";
import { TKeyStoreFactory } from "@app/keystore/keystore";
import { folderCheckpointDALFactory } from "@app/services/folder-checkpoint/folder-checkpoint-dal";
import { folderCheckpointResourcesDALFactory } from "@app/services/folder-checkpoint-resources/folder-checkpoint-resources-dal";
import { folderCommitDALFactory } from "@app/services/folder-commit/folder-commit-dal";
import { folderCommitServiceFactory } from "@app/services/folder-commit/folder-commit-service";
import { folderCommitChangesDALFactory } from "@app/services/folder-commit-changes/folder-commit-changes-dal";
import { folderTreeCheckpointDALFactory } from "@app/services/folder-tree-checkpoint/folder-tree-checkpoint-dal";
import { folderTreeCheckpointResourcesDALFactory } from "@app/services/folder-tree-checkpoint-resources/folder-tree-checkpoint-resources-dal";
import { BadRequestError } from "@app/lib/errors";
import { identityDALFactory } from "@app/services/identity/identity-dal";
import { internalKmsDALFactory } from "@app/services/kms/internal-kms-dal";
import { kmskeyDALFactory } from "@app/services/kms/kms-key-dal";
import { kmsRootConfigDALFactory } from "@app/services/kms/kms-root-config-dal";
import { kmsServiceFactory } from "@app/services/kms/kms-service";
import { RootKeyEncryptionStrategy } from "@app/services/kms/kms-types";
import { orgDALFactory } from "@app/services/org/org-dal";
import { projectDALFactory } from "@app/services/project/project-dal";
import { resourceMetadataDALFactory } from "@app/services/resource-metadata/resource-metadata-dal";
import { secretFolderDALFactory } from "@app/services/secret-folder/secret-folder-dal";
import { secretFolderVersionDALFactory } from "@app/services/secret-folder/secret-folder-version-dal";
import { secretTagDALFactory } from "@app/services/secret-tag/secret-tag-dal";
import { secretV2BridgeDALFactory } from "@app/services/secret-v2-bridge/secret-v2-bridge-dal";
import { secretVersionV2BridgeDALFactory } from "@app/services/secret-v2-bridge/secret-version-dal";
import { roleDALFactory } from "@app/services/role/role-dal";
import { serviceTokenDALFactory } from "@app/services/service-token/service-token-dal";
import { userDALFactory } from "@app/services/user/user-dal";
import { TMigrationEnvConfig } from "./env-config";
@@ -33,8 +28,11 @@ type TDependencies = {
keyStore: TKeyStoreFactory;
};
export const getMigrationEncryptionServices = async ({ envConfig, db, keyStore }: TDependencies) => {
// eslint-disable-next-line no-param-reassign
type THsmServiceDependencies = {
envConfig: Pick<TMigrationEnvConfig, "HSM_PIN" | "HSM_SLOT" | "HSM_LIB_PATH" | "HSM_KEY_LABEL" | "isHsmConfigured">;
};
export const getMigrationHsmService = async ({ envConfig }: THsmServiceDependencies) => {
const hsmModule = initializeHsmModule(envConfig);
hsmModule.initialize();
@@ -43,67 +41,72 @@ export const getMigrationEncryptionServices = async ({ envConfig, db, keyStore }
envConfig
});
const orgDAL = orgDALFactory(db);
const kmsRootConfigDAL = kmsRootConfigDALFactory(db);
const kmsDAL = kmskeyDALFactory(db);
const internalKmsDAL = internalKmsDALFactory(db);
const projectDAL = projectDALFactory(db);
const kmsService = kmsServiceFactory({
kmsRootConfigDAL,
keyStore,
kmsDAL,
internalKmsDAL,
orgDAL,
projectDAL,
hsmService,
envConfig
});
await hsmService.startService();
await kmsService.startService();
return { kmsService };
return { hsmService };
};
export const getMigrationPITServices = async ({
db,
keyStore,
envConfig
}: {
db: Knex;
keyStore: TKeyStoreFactory;
envConfig: TMigrationEnvConfig;
}) => {
export const getMigrationEncryptionServices = async ({ envConfig, db, keyStore }: TDependencies) => {
// ----- DAL dependencies -----
const orgDAL = orgDALFactory(db);
const licenseDAL = licenseDALFactory(db);
const permissionDAL = permissionDALFactory(db);
const projectDAL = projectDALFactory(db);
const folderCommitDAL = folderCommitDALFactory(db);
const folderCommitChangesDAL = folderCommitChangesDALFactory(db);
const folderCheckpointDAL = folderCheckpointDALFactory(db);
const folderTreeCheckpointDAL = folderTreeCheckpointDALFactory(db);
const roleDAL = roleDALFactory(db);
const userDAL = userDALFactory(db);
const identityDAL = identityDALFactory(db);
const folderDAL = secretFolderDALFactory(db);
const folderVersionDAL = secretFolderVersionDALFactory(db);
const secretVersionV2BridgeDAL = secretVersionV2BridgeDALFactory(db);
const folderCheckpointResourcesDAL = folderCheckpointResourcesDALFactory(db);
const secretV2BridgeDAL = secretV2BridgeDALFactory({ db, keyStore });
const folderTreeCheckpointResourcesDAL = folderTreeCheckpointResourcesDALFactory(db);
const secretTagDAL = secretTagDALFactory(db);
const orgDAL = orgDALFactory(db);
const serviceTokenDAL = serviceTokenDALFactory(db);
const kmsRootConfigDAL = kmsRootConfigDALFactory(db);
const kmsDAL = kmskeyDALFactory(db);
const internalKmsDAL = internalKmsDALFactory(db);
const resourceMetadataDAL = resourceMetadataDALFactory(db);
const hsmModule = initializeHsmModule(envConfig);
hsmModule.initialize();
// ----- Service dependencies -----
const permissionService = permissionServiceFactory({
permissionDAL,
serviceTokenDAL,
projectDAL,
keyStore,
roleDAL,
userDAL,
identityDAL
});
const hsmService = hsmServiceFactory({
hsmModule: hsmModule.getModule(),
const licenseService = licenseServiceFactory({
permissionService,
orgDAL,
licenseDAL,
keyStore,
projectDAL,
envConfig
});
// ----- HSM startup -----
const { hsmService } = await getMigrationHsmService({ envConfig });
const hsmStatus = await isHsmActiveAndEnabled({
hsmService,
kmsRootConfigDAL,
licenseService
});
// if the encryption strategy is software - user needs to provide an encryption key
// if the encryption strategy is null AND the hsm is not configured - user needs to provide an encryption key
const needsEncryptionKey =
hsmStatus.rootKmsConfigEncryptionStrategy === RootKeyEncryptionStrategy.Software ||
(hsmStatus.rootKmsConfigEncryptionStrategy === null && !hsmStatus.isHsmConfigured);
if (needsEncryptionKey) {
if (!envConfig.ROOT_ENCRYPTION_KEY && !envConfig.ENCRYPTION_KEY) {
throw new BadRequestError({
message:
"Root KMS encryption strategy is set to software. Please set the ENCRYPTION_KEY environment variable and restart your deployment.\nYou can enable HSM encryption in the Server Console."
});
}
}
// ----- KMS startup -----
const kmsService = kmsServiceFactory({
kmsRootConfigDAL,
keyStore,
@@ -115,27 +118,7 @@ export const getMigrationPITServices = async ({
envConfig
});
await hsmService.startService();
await kmsService.startService();
await kmsService.startService(hsmStatus);
const folderCommitService = folderCommitServiceFactory({
folderCommitDAL,
folderCommitChangesDAL,
folderCheckpointDAL,
folderTreeCheckpointDAL,
userDAL,
identityDAL,
folderDAL,
folderVersionDAL,
secretVersionV2BridgeDAL,
projectDAL,
folderCheckpointResourcesDAL,
secretV2BridgeDAL,
folderTreeCheckpointResourcesDAL,
kmsService,
secretTagDAL,
resourceMetadataDAL
});
return { folderCommitService };
return { kmsService, hsmService };
};

View File

@@ -27,7 +27,13 @@ export const CertificatesSchema = z.object({
extendedKeyUsages: z.string().array().nullable().optional(),
projectId: z.string(),
pkiSubscriberId: z.string().uuid().nullable().optional(),
profileId: z.string().uuid().nullable().optional()
profileId: z.string().uuid().nullable().optional(),
renewBeforeDays: z.number().nullable().optional(),
renewedFromCertificateId: z.string().uuid().nullable().optional(),
renewedByCertificateId: z.string().uuid().nullable().optional(),
renewalError: z.string().nullable().optional(),
keyAlgorithm: z.string().nullable().optional(),
signatureAlgorithm: z.string().nullable().optional()
});
export type TCertificates = z.infer<typeof CertificatesSchema>;

View File

@@ -40,7 +40,8 @@ export const OrganizationsSchema = z.object({
googleSsoAuthEnforced: z.boolean().default(false),
googleSsoAuthLastUsed: z.date().nullable().optional(),
parentOrgId: z.string().uuid().nullable().optional(),
rootOrgId: z.string().uuid().nullable().optional()
rootOrgId: z.string().uuid().nullable().optional(),
blockDuplicateSecretSyncDestinations: z.boolean().default(false)
});
export type TOrganizations = z.infer<typeof OrganizationsSchema>;

View File

@@ -18,7 +18,12 @@ export const PamAccountsSchema = z.object({
description: z.string().nullable().optional(),
encryptedCredentials: zodBuffer,
createdAt: z.date(),
updatedAt: z.date()
updatedAt: z.date(),
rotationEnabled: z.boolean().default(false),
rotationIntervalSeconds: z.number().nullable().optional(),
lastRotatedAt: z.date().nullable().optional(),
rotationStatus: z.string().nullable().optional(),
encryptedLastRotationMessage: zodBuffer.nullable().optional()
});
export type TPamAccounts = z.infer<typeof PamAccountsSchema>;

View File

@@ -17,7 +17,8 @@ export const PamResourcesSchema = z.object({
resourceType: z.string(),
encryptedConnectionDetails: zodBuffer,
createdAt: z.date(),
updatedAt: z.date()
updatedAt: z.date(),
encryptedRotationAccountCredentials: zodBuffer.nullable().optional()
});
export type TPamResources = z.infer<typeof PamResourcesSchema>;

View File

@@ -10,7 +10,7 @@ import { TImmutableDBKeys } from "./models";
export const PkiApiEnrollmentConfigsSchema = z.object({
id: z.string().uuid(),
autoRenew: z.boolean().default(false).nullable().optional(),
autoRenewDays: z.number().nullable().optional(),
renewBeforeDays: z.number().nullable().optional(),
createdAt: z.date(),
updatedAt: z.date()
});

View File

@@ -1,7 +1,10 @@
import { Knex } from "knex";
import { initEnvConfig } from "@app/lib/config/env";
import { initializeHsmModule } from "@app/ee/services/hsm/hsm-fns";
import { hsmServiceFactory } from "@app/ee/services/hsm/hsm-service";
import { getHsmConfig, initEnvConfig } from "@app/lib/config/env";
import { initLogger, logger } from "@app/lib/logger";
import { kmsRootConfigDALFactory } from "@app/services/kms/kms-root-config-dal";
import { superAdminDALFactory } from "@app/services/super-admin/super-admin-dal";
import { AuthMethod } from "../../services/auth/auth-type";
@@ -17,7 +20,21 @@ export async function seed(knex: Knex): Promise<void> {
initLogger();
const superAdminDAL = superAdminDALFactory(knex);
await initEnvConfig(superAdminDAL, logger);
const kmsRootConfigDAL = kmsRootConfigDALFactory(knex);
const hsmConfig = getHsmConfig(logger);
const hsmModule = initializeHsmModule(hsmConfig);
hsmModule.initialize();
const hsmService = hsmServiceFactory({
hsmModule: hsmModule.getModule(),
envConfig: hsmConfig
});
await hsmService.startService();
await initEnvConfig(hsmService, kmsRootConfigDAL, superAdminDAL, logger);
await knex(TableName.SuperAdmin).insert([
// eslint-disable-next-line

View File

@@ -1,11 +1,14 @@
import { Knex } from "knex";
import { initEnvConfig } from "@app/lib/config/env";
import { initializeHsmModule } from "@app/ee/services/hsm/hsm-fns";
import { hsmServiceFactory } from "@app/ee/services/hsm/hsm-service";
import { getHsmConfig, initEnvConfig } from "@app/lib/config/env";
import { crypto, SymmetricKeySize } from "@app/lib/crypto/cryptography";
import { generateUserSrpKeys } from "@app/lib/crypto/srp";
import { initLogger, logger } from "@app/lib/logger";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { AuthMethod } from "@app/services/auth/auth-type";
import { kmsRootConfigDALFactory } from "@app/services/kms/kms-root-config-dal";
import { membershipRoleDALFactory } from "@app/services/membership/membership-role-dal";
import { membershipUserDALFactory } from "@app/services/membership-user/membership-user-dal";
import { assignWorkspaceKeysToMembers, createProjectKey } from "@app/services/project/project-fns";
@@ -192,7 +195,21 @@ export async function seed(knex: Knex): Promise<void> {
initLogger();
const superAdminDAL = superAdminDALFactory(knex);
await initEnvConfig(superAdminDAL, logger);
const kmsRootConfigDAL = kmsRootConfigDALFactory(knex);
const hsmConfig = getHsmConfig(logger);
const hsmModule = initializeHsmModule(hsmConfig);
hsmModule.initialize();
const hsmService = hsmServiceFactory({
hsmModule: hsmModule.getModule(),
envConfig: hsmConfig
});
await hsmService.startService();
await initEnvConfig(hsmService, kmsRootConfigDAL, superAdminDAL, logger);
const [project] = await knex(TableName.Project)
.insert({

View File

@@ -1,8 +1,11 @@
import { Knex } from "knex";
import { initEnvConfig } from "@app/lib/config/env";
import { initializeHsmModule } from "@app/ee/services/hsm/hsm-fns";
import { hsmServiceFactory } from "@app/ee/services/hsm/hsm-service";
import { getHsmConfig, initEnvConfig } from "@app/lib/config/env";
import { crypto } from "@app/lib/crypto/cryptography";
import { initLogger, logger } from "@app/lib/logger";
import { kmsRootConfigDALFactory } from "@app/services/kms/kms-root-config-dal";
import { superAdminDALFactory } from "@app/services/super-admin/super-admin-dal";
import { AccessScope, IdentityAuthMethod, OrgMembershipRole, ProjectMembershipRole, TableName } from "../schemas";
@@ -15,7 +18,20 @@ export async function seed(knex: Knex): Promise<void> {
initLogger();
const superAdminDAL = superAdminDALFactory(knex);
await initEnvConfig(superAdminDAL, logger);
const kmsRootConfigDAL = kmsRootConfigDALFactory(knex);
const hsmConfig = getHsmConfig(logger);
const hsmModule = initializeHsmModule(hsmConfig);
hsmModule.initialize();
const hsmService = hsmServiceFactory({
hsmModule: hsmModule.getModule(),
envConfig: hsmConfig
});
await hsmService.startService();
await initEnvConfig(hsmService, kmsRootConfigDAL, superAdminDAL, logger);
// Inserts seed entries
await knex(TableName.Identity).insert([

View File

@@ -2,7 +2,6 @@ import { z } from "zod";
import { DynamicSecretLeasesSchema } from "@app/db/schemas";
import { ApiDocsTags, DYNAMIC_SECRET_LEASES } from "@app/lib/api-docs";
import { daysToMillisecond } from "@app/lib/dates";
import { removeTrailingSlash } from "@app/lib/fn";
import { ms } from "@app/lib/ms";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
@@ -32,8 +31,8 @@ export const registerDynamicSecretLeaseRouter = async (server: FastifyZodProvide
const valMs = ms(val);
if (valMs < 60 * 1000)
ctx.addIssue({ code: z.ZodIssueCode.custom, message: "TTL must be a greater than 1min" });
if (valMs > daysToMillisecond(1))
ctx.addIssue({ code: z.ZodIssueCode.custom, message: "TTL must be less than a day" });
if (valMs > ms("10y"))
ctx.addIssue({ code: z.ZodIssueCode.custom, message: "TTL must be less than 10 years" });
}),
path: z.string().trim().default("/").transform(removeTrailingSlash).describe(DYNAMIC_SECRET_LEASES.CREATE.path),
environmentSlug: z.string().min(1).describe(DYNAMIC_SECRET_LEASES.CREATE.environmentSlug),
@@ -127,8 +126,8 @@ export const registerDynamicSecretLeaseRouter = async (server: FastifyZodProvide
const valMs = ms(val);
if (valMs < 60 * 1000)
ctx.addIssue({ code: z.ZodIssueCode.custom, message: "TTL must be a greater than 1min" });
if (valMs > daysToMillisecond(1))
ctx.addIssue({ code: z.ZodIssueCode.custom, message: "TTL must be less than a day" });
if (valMs > ms("10y"))
ctx.addIssue({ code: z.ZodIssueCode.custom, message: "TTL must be less than 10 years" });
}),
projectSlug: z.string().min(1).describe(DYNAMIC_SECRET_LEASES.RENEW.projectSlug),
path: z

View File

@@ -2,7 +2,6 @@ import { z } from "zod";
import { DynamicSecretLeasesSchema } from "@app/db/schemas";
import { ApiDocsTags, DYNAMIC_SECRET_LEASES } from "@app/lib/api-docs";
import { daysToMillisecond } from "@app/lib/dates";
import { removeTrailingSlash } from "@app/lib/fn";
import { ms } from "@app/lib/ms";
import { writeLimit } from "@app/server/config/rateLimiter";
@@ -32,8 +31,8 @@ export const registerKubernetesDynamicSecretLeaseRouter = async (server: Fastify
const valMs = ms(val);
if (valMs < 60 * 1000)
ctx.addIssue({ code: z.ZodIssueCode.custom, message: "TTL must be greater than 1min" });
if (valMs > daysToMillisecond(1))
ctx.addIssue({ code: z.ZodIssueCode.custom, message: "TTL must be less than a day" });
if (valMs > ms("10y"))
ctx.addIssue({ code: z.ZodIssueCode.custom, message: "TTL must be less than 10 years" });
}),
path: z.string().trim().default("/").transform(removeTrailingSlash).describe(DYNAMIC_SECRET_LEASES.CREATE.path),
environmentSlug: z.string().min(1).describe(DYNAMIC_SECRET_LEASES.CREATE.environmentSlug),

View File

@@ -3,7 +3,6 @@ import { z } from "zod";
import { DynamicSecretLeasesSchema } from "@app/db/schemas";
import { DynamicSecretProviderSchema } from "@app/ee/services/dynamic-secret/providers/models";
import { ApiDocsTags, DYNAMIC_SECRETS } from "@app/lib/api-docs";
import { daysToMillisecond } from "@app/lib/dates";
import { removeTrailingSlash } from "@app/lib/fn";
import { ms } from "@app/lib/ms";
import { isValidHandleBarTemplate } from "@app/lib/template/validate-handlebars";
@@ -60,8 +59,8 @@ export const registerDynamicSecretRouter = async (server: FastifyZodProvider) =>
const valMs = ms(val);
if (valMs < 60 * 1000)
ctx.addIssue({ code: z.ZodIssueCode.custom, message: "TTL must be a greater than 1min" });
if (valMs > daysToMillisecond(1))
ctx.addIssue({ code: z.ZodIssueCode.custom, message: "TTL must be less than a day" });
if (valMs > ms("10y"))
ctx.addIssue({ code: z.ZodIssueCode.custom, message: "TTL must be less than 10 years" });
}),
maxTTL: z
.string()
@@ -72,8 +71,8 @@ export const registerDynamicSecretRouter = async (server: FastifyZodProvider) =>
const valMs = ms(val);
if (valMs < 60 * 1000)
ctx.addIssue({ code: z.ZodIssueCode.custom, message: "TTL must be a greater than 1min" });
if (valMs > daysToMillisecond(1))
ctx.addIssue({ code: z.ZodIssueCode.custom, message: "TTL must be less than a day" });
if (valMs > ms("10y"))
ctx.addIssue({ code: z.ZodIssueCode.custom, message: "TTL must be less than 10 years" });
})
.nullable(),
path: z.string().describe(DYNAMIC_SECRETS.CREATE.path).trim().default("/").transform(removeTrailingSlash),
@@ -130,8 +129,8 @@ export const registerDynamicSecretRouter = async (server: FastifyZodProvider) =>
const valMs = ms(val);
if (valMs < 60 * 1000)
ctx.addIssue({ code: z.ZodIssueCode.custom, message: "TTL must be a greater than 1min" });
if (valMs > daysToMillisecond(1))
ctx.addIssue({ code: z.ZodIssueCode.custom, message: "TTL must be less than a day" });
if (valMs > ms("10y"))
ctx.addIssue({ code: z.ZodIssueCode.custom, message: "TTL must be less than 10 years" });
}),
maxTTL: z
.string()
@@ -142,8 +141,8 @@ export const registerDynamicSecretRouter = async (server: FastifyZodProvider) =>
const valMs = ms(val);
if (valMs < 60 * 1000)
ctx.addIssue({ code: z.ZodIssueCode.custom, message: "TTL must be a greater than 1min" });
if (valMs > daysToMillisecond(1))
ctx.addIssue({ code: z.ZodIssueCode.custom, message: "TTL must be less than a day" });
if (valMs > ms("10y"))
ctx.addIssue({ code: z.ZodIssueCode.custom, message: "TTL must be less than 10 years" });
})
.nullable(),
newName: z.string().describe(DYNAMIC_SECRETS.UPDATE.newName).optional(),

View File

@@ -182,7 +182,8 @@ export const registerKmipSpecRouter = async (server: FastifyZodProvider) => {
algorithm: z.string(),
isActive: z.boolean(),
createdAt: z.date(),
updatedAt: z.date()
updatedAt: z.date(),
kmipMetadata: z.record(z.any()).nullish()
})
}
},
@@ -384,7 +385,8 @@ export const registerKmipSpecRouter = async (server: FastifyZodProvider) => {
isActive: z.boolean(),
algorithm: z.string(),
createdAt: z.date(),
updatedAt: z.date()
updatedAt: z.date(),
kmipMetadata: z.record(z.any()).nullish()
})
.array()
})

View File

@@ -1,3 +1,8 @@
import {
CreateMySQLAccountSchema,
SanitizedMySQLAccountWithResourceSchema,
UpdateMySQLAccountSchema
} from "@app/ee/services/pam-resource/mysql/mysql-resource-schemas";
import { PamResource } from "@app/ee/services/pam-resource/pam-resource-enums";
import {
CreatePostgresAccountSchema,
@@ -16,5 +21,14 @@ export const PAM_ACCOUNT_REGISTER_ROUTER_MAP: Record<PamResource, (server: Fasti
createAccountSchema: CreatePostgresAccountSchema,
updateAccountSchema: UpdatePostgresAccountSchema
});
},
[PamResource.MySQL]: async (server: FastifyZodProvider) => {
registerPamResourceEndpoints({
server,
resourceType: PamResource.MySQL,
accountResponseSchema: SanitizedMySQLAccountWithResourceSchema,
createAccountSchema: CreateMySQLAccountSchema,
updateAccountSchema: UpdateMySQLAccountSchema
});
}
};

View File

@@ -22,11 +22,15 @@ export const registerPamResourceEndpoints = <C extends TPamAccount>({
folderId?: C["folderId"];
name: C["name"];
description?: C["description"];
rotationEnabled: C["rotationEnabled"];
rotationIntervalSeconds?: C["rotationIntervalSeconds"];
}>;
updateAccountSchema: z.ZodType<{
credentials?: C["credentials"];
name?: C["name"];
description?: C["description"];
rotationEnabled?: C["rotationEnabled"];
rotationIntervalSeconds?: C["rotationIntervalSeconds"];
}>;
accountResponseSchema: z.ZodTypeAny;
}) => {
@@ -60,7 +64,9 @@ export const registerPamResourceEndpoints = <C extends TPamAccount>({
resourceType,
folderId: req.body.folderId,
name: req.body.name,
description: req.body.description
description: req.body.description,
rotationEnabled: req.body.rotationEnabled,
rotationIntervalSeconds: req.body.rotationIntervalSeconds
}
}
});
@@ -108,7 +114,9 @@ export const registerPamResourceEndpoints = <C extends TPamAccount>({
resourceId: account.resourceId,
resourceType,
name: req.body.name,
description: req.body.description
description: req.body.description,
rotationEnabled: req.body.rotationEnabled,
rotationIntervalSeconds: req.body.rotationIntervalSeconds
}
}
});

View File

@@ -2,6 +2,7 @@ import { z } from "zod";
import { PamFoldersSchema } from "@app/db/schemas";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { SanitizedMySQLAccountWithResourceSchema } from "@app/ee/services/pam-resource/mysql/mysql-resource-schemas";
import { PamResource } from "@app/ee/services/pam-resource/pam-resource-enums";
import { SanitizedPostgresAccountWithResourceSchema } from "@app/ee/services/pam-resource/postgres/postgres-resource-schemas";
import { BadRequestError } from "@app/lib/errors";
@@ -10,8 +11,10 @@ import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
// Use z.union([...]) when more resources are added
const SanitizedAccountSchema = SanitizedPostgresAccountWithResourceSchema;
const SanitizedAccountSchema = z.union([
SanitizedPostgresAccountWithResourceSchema,
SanitizedMySQLAccountWithResourceSchema
]);
export const registerPamAccountRouter = async (server: FastifyZodProvider) => {
server.route({

View File

@@ -1,7 +1,12 @@
import {
CreateMySQLResourceSchema,
MySQLResourceSchema,
UpdateMySQLResourceSchema
} from "@app/ee/services/pam-resource/mysql/mysql-resource-schemas";
import { PamResource } from "@app/ee/services/pam-resource/pam-resource-enums";
import {
CreatePostgresResourceSchema,
PostgresResourceSchema,
SanitizedPostgresResourceSchema,
UpdatePostgresResourceSchema
} from "@app/ee/services/pam-resource/postgres/postgres-resource-schemas";
@@ -12,9 +17,18 @@ export const PAM_RESOURCE_REGISTER_ROUTER_MAP: Record<PamResource, (server: Fast
registerPamResourceEndpoints({
server,
resourceType: PamResource.Postgres,
resourceResponseSchema: PostgresResourceSchema,
resourceResponseSchema: SanitizedPostgresResourceSchema,
createResourceSchema: CreatePostgresResourceSchema,
updateResourceSchema: UpdatePostgresResourceSchema
});
},
[PamResource.MySQL]: async (server: FastifyZodProvider) => {
registerPamResourceEndpoints({
server,
resourceType: PamResource.MySQL,
resourceResponseSchema: MySQLResourceSchema,
createResourceSchema: CreateMySQLResourceSchema,
updateResourceSchema: UpdateMySQLResourceSchema
});
}
};

View File

@@ -21,11 +21,13 @@ export const registerPamResourceEndpoints = <T extends TPamResource>({
connectionDetails: T["connectionDetails"];
gatewayId: T["gatewayId"];
name: T["name"];
rotationAccountCredentials?: T["rotationAccountCredentials"];
}>;
updateResourceSchema: z.ZodType<{
connectionDetails?: T["connectionDetails"];
gatewayId?: T["gatewayId"];
name?: T["name"];
rotationAccountCredentials?: T["rotationAccountCredentials"];
}>;
resourceResponseSchema: z.ZodTypeAny;
}) => {

View File

@@ -1,18 +1,24 @@
import { z } from "zod";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import {
MySQLResourceListItemSchema,
SanitizedMySQLResourceSchema
} from "@app/ee/services/pam-resource/mysql/mysql-resource-schemas";
import {
PostgresResourceListItemSchema,
PostgresResourceSchema
SanitizedPostgresResourceSchema
} from "@app/ee/services/pam-resource/postgres/postgres-resource-schemas";
import { readLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
// Use z.union([...]) when more resources are added
const ResourceSchema = PostgresResourceSchema;
const SanitizedResourceSchema = z.union([SanitizedPostgresResourceSchema, SanitizedMySQLResourceSchema]);
const ResourceOptionsSchema = z.discriminatedUnion("resource", [PostgresResourceListItemSchema]);
const ResourceOptionsSchema = z.discriminatedUnion("resource", [
PostgresResourceListItemSchema,
MySQLResourceListItemSchema
]);
export const registerPamResourceRouter = async (server: FastifyZodProvider) => {
server.route({
@@ -50,7 +56,7 @@ export const registerPamResourceRouter = async (server: FastifyZodProvider) => {
}),
response: {
200: z.object({
resources: ResourceSchema.array()
resources: SanitizedResourceSchema.array()
})
}
},

View File

@@ -2,14 +2,14 @@ import { z } from "zod";
import { PamSessionsSchema } from "@app/db/schemas";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { MySQLSessionCredentialsSchema } from "@app/ee/services/pam-resource/mysql/mysql-resource-schemas";
import { PostgresSessionCredentialsSchema } from "@app/ee/services/pam-resource/postgres/postgres-resource-schemas";
import { PamSessionCommandLogSchema, SanitizedSessionSchema } from "@app/ee/services/pam-session/pam-session-schemas";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
// Use z.union([]) once there's multiple
const SessionCredentialsSchema = PostgresSessionCredentialsSchema;
const SessionCredentialsSchema = z.union([PostgresSessionCredentialsSchema, MySQLSessionCredentialsSchema]);
export const registerPamSessionRouter = async (server: FastifyZodProvider) => {
// Meant to be hit solely by gateway identities

View File

@@ -7,6 +7,7 @@
// All the any rules are disabled because passport typesense with fastify is really poor
import { Authenticator } from "@fastify/passport";
import { requestContext } from "@fastify/request-context";
import fastifySession from "@fastify/session";
import { MultiSamlStrategy } from "@node-saml/passport-saml";
import { FastifyRequest } from "fastify";
@@ -17,6 +18,7 @@ import { ApiDocsTags, SamlSso } from "@app/lib/api-docs";
import { getConfig } from "@app/lib/config/env";
import { BadRequestError } from "@app/lib/errors";
import { logger } from "@app/lib/logger";
import { AuthAttemptAuthMethod, AuthAttemptAuthResult, authAttemptCounter } from "@app/lib/telemetry/metrics";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { SanitizedSamlConfigSchema } from "@app/server/routes/sanitizedSchema/directory-config";
@@ -102,15 +104,15 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
},
// eslint-disable-next-line
async (req, profile, cb) => {
if (!profile) throw new BadRequestError({ message: "Missing profile" });
const email =
profile?.email ??
// entra sends data in this format
(profile["http://schemas.xmlsoap.org/ws/2005/05/identity/claims/email"] as string) ??
(profile?.emailAddress as string); // emailRippling is added because in Rippling the field `email` reserved\
try {
if (!profile) throw new BadRequestError({ message: "Missing profile" });
const email =
profile?.email ??
// entra sends data in this format
(profile["http://schemas.xmlsoap.org/ws/2005/05/identity/claims/email"] as string) ??
(profile?.emailAddress as string); // emailRippling is added because in Rippling the field `email` reserved\
const firstName = (profile.firstName ??
// entra sends data in this format
profile["http://schemas.xmlsoap.org/ws/2005/05/identity/claims/firstName"]) as string;
@@ -144,7 +146,7 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
})
.filter((el) => el.key && !["email", "firstName", "lastName"].includes(el.key));
const { isUserCompleted, providerAuthToken } = await server.services.saml.samlLogin({
const { isUserCompleted, providerAuthToken, user, organization } = await server.services.saml.samlLogin({
externalId: profile.nameID,
email: email.toLowerCase(),
firstName,
@@ -154,8 +156,32 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
orgId: (req as unknown as FastifyRequest).ssoConfig?.orgId,
metadata: userMetadata
});
if (appCfg.OTEL_TELEMETRY_COLLECTION_ENABLED) {
authAttemptCounter.add(1, {
"infisical.user.email": email.toLowerCase(),
"infisical.user.id": user.id,
"infisical.organization.id": organization.id,
"infisical.organization.name": organization.name,
"infisical.auth.method": AuthAttemptAuthMethod.SAML,
"infisical.auth.result": AuthAttemptAuthResult.SUCCESS,
"client.address": requestContext.get("ip"),
"user_agent.original": requestContext.get("userAgent")
});
}
cb(null, { isUserCompleted, providerAuthToken });
} catch (error) {
if (appCfg.OTEL_TELEMETRY_COLLECTION_ENABLED) {
authAttemptCounter.add(1, {
"infisical.user.email": email.toLowerCase(),
"infisical.auth.method": AuthAttemptAuthMethod.SAML,
"infisical.auth.result": AuthAttemptAuthResult.FAILURE,
"client.address": requestContext.get("ip"),
"user_agent.original": requestContext.get("userAgent")
});
}
logger.error(error);
cb(error as Error);
}

View File

@@ -340,6 +340,8 @@ export enum EventType {
ISSUE_PKI_SUBSCRIBER_CERT = "issue-pki-subscriber-cert",
SIGN_PKI_SUBSCRIBER_CERT = "sign-pki-subscriber-cert",
AUTOMATED_RENEW_SUBSCRIBER_CERT = "automated-renew-subscriber-cert",
AUTOMATED_RENEW_CERTIFICATE = "automated-renew-certificate",
AUTOMATED_RENEW_CERTIFICATE_FAILED = "automated-renew-certificate-failed",
LIST_PKI_SUBSCRIBER_CERTS = "list-pki-subscriber-certs",
GET_SUBSCRIBER_ACTIVE_CERT_BUNDLE = "get-subscriber-active-cert-bundle",
CREATE_KMS = "create-kms",
@@ -367,6 +369,9 @@ export enum EventType {
ISSUE_CERTIFICATE_FROM_PROFILE = "issue-certificate-from-profile",
SIGN_CERTIFICATE_FROM_PROFILE = "sign-certificate-from-profile",
ORDER_CERTIFICATE_FROM_PROFILE = "order-certificate-from-profile",
RENEW_CERTIFICATE = "renew-certificate",
UPDATE_CERTIFICATE_RENEWAL_CONFIG = "update-certificate-renewal-config",
DISABLE_CERTIFICATE_RENEWAL_CONFIG = "disable-certificate-renewal-config",
ATTEMPT_CREATE_SLACK_INTEGRATION = "attempt-create-slack-integration",
ATTEMPT_REINSTALL_SLACK_INTEGRATION = "attempt-reinstall-slack-integration",
GET_PROJECT_SLACK_CONFIG = "get-project-slack-config",
@@ -527,6 +532,8 @@ export enum EventType {
PAM_ACCOUNT_CREATE = "pam-account-create",
PAM_ACCOUNT_UPDATE = "pam-account-update",
PAM_ACCOUNT_DELETE = "pam-account-delete",
PAM_ACCOUNT_CREDENTIAL_ROTATION = "pam-account-credential-rotation",
PAM_ACCOUNT_CREDENTIAL_ROTATION_FAILED = "pam-account-credential-rotation-failed",
PAM_RESOURCE_LIST = "pam-resource-list",
PAM_RESOURCE_GET = "pam-resource-get",
PAM_RESOURCE_CREATE = "pam-resource-create",
@@ -2456,6 +2463,29 @@ interface AutomatedRenewPkiSubscriberCert {
};
}
interface AutomatedRenewCertificate {
type: EventType.AUTOMATED_RENEW_CERTIFICATE;
metadata: {
certificateId: string;
commonName: string;
profileId: string;
renewBeforeDays: string;
profileName: string;
};
}
interface AutomatedRenewCertificateFailed {
type: EventType.AUTOMATED_RENEW_CERTIFICATE_FAILED;
metadata: {
certificateId: string;
commonName: string;
profileId: string;
renewBeforeDays: string;
profileName: string;
error: string;
};
}
interface SignPkiSubscriberCert {
type: EventType.SIGN_PKI_SUBSCRIBER_CERT;
metadata: {
@@ -2718,6 +2748,16 @@ interface OrderCertificateFromProfile {
};
}
interface RenewCertificate {
type: EventType.RENEW_CERTIFICATE;
metadata: {
originalCertificateId: string;
newCertificateId: string;
profileName: string;
commonName: string;
};
}
interface AttemptCreateSlackIntegration {
type: EventType.ATTEMPT_CREATE_SLACK_INTEGRATION;
metadata: {
@@ -3915,6 +3955,8 @@ interface PamAccountCreateEvent {
folderId?: string | null;
name: string;
description?: string | null;
rotationEnabled: boolean;
rotationIntervalSeconds?: number | null;
};
}
@@ -3926,6 +3968,8 @@ interface PamAccountUpdateEvent {
resourceType: string;
name?: string;
description?: string | null;
rotationEnabled?: boolean;
rotationIntervalSeconds?: number | null;
};
}
@@ -3939,6 +3983,27 @@ interface PamAccountDeleteEvent {
};
}
interface PamAccountCredentialRotationEvent {
type: EventType.PAM_ACCOUNT_CREDENTIAL_ROTATION;
metadata: {
accountName: string;
accountId: string;
resourceId: string;
resourceType: string;
};
}
interface PamAccountCredentialRotationFailedEvent {
type: EventType.PAM_ACCOUNT_CREDENTIAL_ROTATION_FAILED;
metadata: {
accountName: string;
accountId: string;
resourceId: string;
resourceType: string;
errorMessage: string;
};
}
interface PamResourceListEvent {
type: EventType.PAM_RESOURCE_LIST;
metadata: {
@@ -3982,6 +4047,23 @@ interface PamResourceDeleteEvent {
};
}
interface UpdateCertificateRenewalConfigEvent {
type: EventType.UPDATE_CERTIFICATE_RENEWAL_CONFIG;
metadata: {
certificateId: string;
renewBeforeDays: string;
commonName: string;
};
}
interface DisableCertificateRenewalConfigEvent {
type: EventType.DISABLE_CERTIFICATE_RENEWAL_CONFIG;
metadata: {
certificateId: string;
commonName: string;
};
}
export type Event =
| CreateSubOrganizationEvent
| UpdateSubOrganizationEvent
@@ -4189,6 +4271,7 @@ export type Event =
| IssueCertificateFromProfile
| SignCertificateFromProfile
| OrderCertificateFromProfile
| RenewCertificate
| GetAzureAdCsTemplatesEvent
| AttemptCreateSlackIntegration
| AttemptReinstallSlackIntegration
@@ -4340,8 +4423,14 @@ export type Event =
| PamAccountCreateEvent
| PamAccountUpdateEvent
| PamAccountDeleteEvent
| PamAccountCredentialRotationEvent
| PamAccountCredentialRotationFailedEvent
| PamResourceListEvent
| PamResourceGetEvent
| PamResourceCreateEvent
| PamResourceUpdateEvent
| PamResourceDeleteEvent;
| PamResourceDeleteEvent
| UpdateCertificateRenewalConfigEvent
| DisableCertificateRenewalConfigEvent
| AutomatedRenewCertificate
| AutomatedRenewCertificateFailed;

View File

@@ -112,7 +112,7 @@ export const dynamicSecretServiceFactory = ({
const existingDynamicSecret = await dynamicSecretDAL.findOne({ name, folderId: folder.id });
if (existingDynamicSecret)
throw new BadRequestError({ message: "Provided dynamic secret already exist under the folder" });
throw new BadRequestError({ message: "Provided dynamic secret already exists under the folder" });
const selectedProvider = dynamicSecretProviders[provider.type];
const inputs = await selectedProvider.validateProviderInputs(provider.inputs, { projectId });
@@ -265,7 +265,7 @@ export const dynamicSecretServiceFactory = ({
if (newName) {
const existingDynamicSecret = await dynamicSecretDAL.findOne({ name: newName, folderId: folder.id });
if (existingDynamicSecret)
throw new BadRequestError({ message: "Provided dynamic secret already exist under the folder" });
throw new BadRequestError({ message: "Provided dynamic secret already exists under the folder" });
}
const { encryptor: secretManagerEncryptor, decryptor: secretManagerDecryptor } =
await kmsService.createCipherPairWithDataKey({

View File

@@ -1,8 +1,14 @@
import * as pkcs11js from "pkcs11js";
import { TEnvConfig } from "@app/lib/config/env";
import { BadRequestError } from "@app/lib/errors";
import { logger } from "@app/lib/logger";
import { KMS_ROOT_CONFIG_UUID } from "@app/services/kms/kms-fns";
import { TKmsRootConfigDALFactory } from "@app/services/kms/kms-root-config-dal";
import { RootKeyEncryptionStrategy } from "@app/services/kms/kms-types";
import { TLicenseServiceFactory } from "../license/license-service";
import { THsmServiceFactory } from "./hsm-service";
import { HsmModule } from "./hsm-types";
export const initializeHsmModule = (envConfig: Pick<TEnvConfig, "isHsmConfigured" | "HSM_LIB_PATH">) => {
@@ -25,10 +31,9 @@ export const initializeHsmModule = (envConfig: Pick<TEnvConfig, "isHsmConfigured
logger.info("PKCS#11 module initialized");
} catch (error) {
logger.error(error, "Failed to initialize PKCS#11 module");
if ((error as { message?: string })?.message === "CKR_CRYPTOKI_ALREADY_INITIALIZED") {
logger.info("Skipping HSM initialization because it's already initialized.");
isInitialized = true;
} else {
logger.error(error, "Failed to initialize PKCS#11 module");
throw error;
@@ -60,3 +65,36 @@ export const initializeHsmModule = (envConfig: Pick<TEnvConfig, "isHsmConfigured
getModule
};
};
export const isHsmActiveAndEnabled = async ({
hsmService,
kmsRootConfigDAL,
licenseService
}: {
hsmService: Pick<THsmServiceFactory, "isActive">;
kmsRootConfigDAL: Pick<TKmsRootConfigDALFactory, "findById">;
licenseService?: Pick<TLicenseServiceFactory, "onPremFeatures">;
}) => {
const isHsmConfigured = await hsmService.isActive();
// null if the root kms config does not exist
let rootKmsConfigEncryptionStrategy: RootKeyEncryptionStrategy | null = null;
const rootKmsConfig = await kmsRootConfigDAL.findById(KMS_ROOT_CONFIG_UUID).catch(() => null);
rootKmsConfigEncryptionStrategy = (rootKmsConfig?.encryptionStrategy || null) as RootKeyEncryptionStrategy | null;
if (
rootKmsConfigEncryptionStrategy === RootKeyEncryptionStrategy.HSM &&
licenseService &&
!licenseService.onPremFeatures.hsm
) {
throw new BadRequestError({
message: "Your license does not include HSM integration. Please upgrade to the Enterprise plan to use HSM."
});
}
return {
rootKmsConfigEncryptionStrategy,
isHsmConfigured
};
};

View File

@@ -25,6 +25,8 @@ export const hsmServiceFactory = ({ hsmModule: { isInitialized, pkcs11 }, envCon
const AES_KEY_SIZE = 256;
const HMAC_KEY_SIZE = 256;
let pkcs11TestPassed = false;
const $withSession = async <T>(callbackWithSession: SessionCallback<T>): Promise<T> => {
const RETRY_INTERVAL = 200; // 200ms between attempts
const MAX_TIMEOUT = 90_000; // 90 seconds maximum total time
@@ -363,7 +365,9 @@ export const hsmServiceFactory = ({ hsmModule: { isInitialized, pkcs11 }, envCon
return false;
}
let pkcs11TestPassed = false;
if (pkcs11TestPassed) {
return true;
}
try {
pkcs11TestPassed = await $withSession($testPkcs11Module);
@@ -371,7 +375,7 @@ export const hsmServiceFactory = ({ hsmModule: { isInitialized, pkcs11 }, envCon
logger.error(err, "HSM: Error testing PKCS#11 module");
}
return envConfig.isHsmConfigured && isInitialized && pkcs11TestPassed;
return pkcs11TestPassed;
};
const startService = async () => {
@@ -460,10 +464,23 @@ export const hsmServiceFactory = ({ hsmModule: { isInitialized, pkcs11 }, envCon
}
};
const randomBytes = async (length: number) => {
if (!pkcs11 || !isInitialized) {
throw new Error("PKCS#11 module is not initialized");
}
const randomData = await $withSession((sessionHandle) =>
pkcs11.C_GenerateRandom(sessionHandle, Buffer.alloc(length))
);
return randomData;
};
return {
encrypt,
startService,
isActive,
decrypt
decrypt,
randomBytes
};
};

View File

@@ -1,5 +1,7 @@
import pkcs11js from "pkcs11js";
import { RootKeyEncryptionStrategy } from "@app/services/kms/kms-types";
export type HsmModule = {
pkcs11: pkcs11js.PKCS11;
isInitialized: boolean;
@@ -9,3 +11,8 @@ export enum HsmKeyType {
AES = "AES",
HMAC = "hmac"
}
export type THsmStatus = {
rootKmsConfigEncryptionStrategy: RootKeyEncryptionStrategy | null;
isHsmConfigured: boolean;
};

View File

@@ -341,7 +341,8 @@ export const kmipOperationServiceFactory = ({
algorithm: completeKeyDetails.internalKms.encryptionAlgorithm,
isActive: !key.isDisabled,
createdAt: key.createdAt,
updatedAt: key.updatedAt
updatedAt: key.updatedAt,
kmipMetadata: key.kmipMetadata as Record<string, unknown>
};
};

View File

@@ -11,7 +11,7 @@ import { Knex } from "knex";
import { OrganizationActionScope } from "@app/db/schemas";
import { TKeyStoreFactory } from "@app/keystore/keystore";
import { getConfig } from "@app/lib/config/env";
import { TEnvConfig } from "@app/lib/config/env";
import { verifyOfflineLicense } from "@app/lib/crypto";
import { BadRequestError, NotFoundError } from "@app/lib/errors";
import { logger } from "@app/lib/logger";
@@ -40,11 +40,16 @@ import {
TOrgPlanDTO,
TOrgPlansTableDTO,
TOrgPmtMethodsDTO,
TPlanBillingInfo,
TStartOrgTrialDTO,
TUpdateOrgBillingDetailsDTO
} from "./license-types";
type TLicenseServiceFactoryDep = {
envConfig: Pick<
TEnvConfig,
"LICENSE_SERVER_URL" | "LICENSE_SERVER_KEY" | "LICENSE_KEY" | "LICENSE_KEY_OFFLINE" | "INTERNAL_REGION" | "SITE_URL"
>;
orgDAL: Pick<TOrgDALFactory, "findRootOrgDetails" | "countAllOrgMembers" | "findById">;
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
licenseDAL: TLicenseDALFactory;
@@ -65,26 +70,26 @@ export const licenseServiceFactory = ({
permissionService,
licenseDAL,
keyStore,
projectDAL
projectDAL,
envConfig
}: TLicenseServiceFactoryDep) => {
let isValidLicense = false;
let instanceType = InstanceType.OnPrem;
let onPremFeatures: TFeatureSet = getDefaultOnPremFeatures();
let selfHostedLicense: TOfflineLicense | null = null;
const appCfg = getConfig();
const licenseServerCloudApi = setupLicenseRequestWithStore(
appCfg.LICENSE_SERVER_URL || "",
envConfig.LICENSE_SERVER_URL || "",
LICENSE_SERVER_CLOUD_LOGIN,
appCfg.LICENSE_SERVER_KEY || "",
appCfg.INTERNAL_REGION
envConfig.LICENSE_SERVER_KEY || "",
envConfig.INTERNAL_REGION
);
const licenseServerOnPremApi = setupLicenseRequestWithStore(
appCfg.LICENSE_SERVER_URL || "",
envConfig.LICENSE_SERVER_URL || "",
LICENSE_SERVER_ON_PREM_LOGIN,
appCfg.LICENSE_KEY || "",
appCfg.INTERNAL_REGION
envConfig.LICENSE_KEY || "",
envConfig.INTERNAL_REGION
);
const syncLicenseKeyOnPremFeatures = async (shouldThrow: boolean = false) => {
@@ -118,7 +123,7 @@ export const licenseServiceFactory = ({
const init = async () => {
try {
if (appCfg.LICENSE_SERVER_KEY) {
if (envConfig.LICENSE_SERVER_KEY) {
const token = await licenseServerCloudApi.refreshLicense();
if (token) instanceType = InstanceType.Cloud;
logger.info(`Instance type: ${InstanceType.Cloud}`);
@@ -126,7 +131,7 @@ export const licenseServiceFactory = ({
return;
}
if (appCfg.LICENSE_KEY) {
if (envConfig.LICENSE_KEY) {
const token = await licenseServerOnPremApi.refreshLicense();
if (token) {
await syncLicenseKeyOnPremFeatures(true);
@@ -137,10 +142,10 @@ export const licenseServiceFactory = ({
return;
}
if (appCfg.LICENSE_KEY_OFFLINE) {
if (envConfig.LICENSE_KEY_OFFLINE) {
let isValidOfflineLicense = true;
const contents: TOfflineLicenseContents = JSON.parse(
Buffer.from(appCfg.LICENSE_KEY_OFFLINE, "base64").toString("utf8")
Buffer.from(envConfig.LICENSE_KEY_OFFLINE, "base64").toString("utf8")
);
const isVerified = await verifyOfflineLicense(JSON.stringify(contents.license), contents.signature);
@@ -179,7 +184,7 @@ export const licenseServiceFactory = ({
};
const initializeBackgroundSync = async () => {
if (appCfg.LICENSE_KEY) {
if (envConfig.LICENSE_KEY) {
logger.info("Setting up background sync process for refresh onPremFeatures");
const job = new CronJob("*/10 * * * *", syncLicenseKeyOnPremFeatures);
job.start();
@@ -212,9 +217,8 @@ export const licenseServiceFactory = ({
const membersUsed = await licenseDAL.countOfOrgMembers(rootOrgId);
currentPlan.membersUsed = membersUsed;
const identityUsed = await licenseDAL.countOrgUsersAndIdentities(rootOrgId);
currentPlan.identitiesUsed = identityUsed;
if (currentPlan.identityLimit && currentPlan.identityLimit !== identityUsed) {
if (currentPlan?.identitiesUsed && currentPlan.identitiesUsed !== identityUsed) {
try {
await licenseServerCloudApi.request.patch(`/api/license-server/v1/customers/${org.customerId}/cloud-plan`, {
quantity: membersUsed,
@@ -227,6 +231,7 @@ export const licenseServiceFactory = ({
);
}
}
currentPlan.identitiesUsed = identityUsed;
await keyStore.setItemWithExpiry(
FEATURE_CACHE_KEY(org.id),
@@ -440,8 +445,8 @@ export const licenseServiceFactory = ({
} = await licenseServerCloudApi.request.post(
`/api/license-server/v1/customers/${organization.customerId}/billing-details/payment-methods`,
{
success_url: `${appCfg.SITE_URL}/organization/billing`,
cancel_url: `${appCfg.SITE_URL}/organization/billing`
success_url: `${envConfig.SITE_URL}/organization/billing`,
cancel_url: `${envConfig.SITE_URL}/organization/billing`
}
);
@@ -454,13 +459,28 @@ export const licenseServiceFactory = ({
} = await licenseServerCloudApi.request.post(
`/api/license-server/v1/customers/${organization.customerId}/billing-details/billing-portal`,
{
return_url: `${appCfg.SITE_URL}/organization/billing`
return_url: `${envConfig.SITE_URL}/organization/billing`
}
);
return { url };
};
const getUsageMetrics = async (orgId: string) => {
const [orgMembersUsed, identityUsed, projectCount] = await Promise.all([
orgDAL.countAllOrgMembers(orgId),
licenseDAL.countOfOrgIdentities(orgId),
projectDAL.countOfOrgProjects(orgId)
]);
return {
orgMembersUsed,
identityUsed,
projectCount,
totalIdentities: identityUsed + orgMembersUsed
};
};
const getOrgBillingInfo = async ({ orgId, actor, actorId, actorAuthMethod, actorOrgId }: TGetOrgBillInfoDTO) => {
const { permission } = await permissionService.getOrgPermission({
actorId,
@@ -479,10 +499,16 @@ export const licenseServiceFactory = ({
});
}
if (instanceType === InstanceType.Cloud) {
const { data } = await licenseServerCloudApi.request.get(
const { data } = await licenseServerCloudApi.request.get<TPlanBillingInfo>(
`/api/license-server/v1/customers/${organization.customerId}/cloud-plan/billing`
);
return data;
const { identityUsed, orgMembersUsed } = await getUsageMetrics(orgId);
return {
...data,
users: orgMembersUsed,
identities: identityUsed
};
}
return {
@@ -491,7 +517,9 @@ export const licenseServiceFactory = ({
interval: "month",
intervalCount: 1,
amount: 0,
quantity: 1
quantity: 1,
users: 0,
identities: 0
};
};
@@ -535,21 +563,6 @@ export const licenseServiceFactory = ({
throw new Error(`Unsupported instance type for server-based plan table: ${instanceType}`);
};
const getUsageMetrics = async (orgId: string) => {
const [orgMembersUsed, identityUsed, projectCount] = await Promise.all([
orgDAL.countAllOrgMembers(orgId),
licenseDAL.countOfOrgIdentities(orgId),
projectDAL.countOfOrgProjects(orgId)
]);
return {
orgMembersUsed,
identityUsed,
projectCount,
totalIdentities: identityUsed + orgMembersUsed
};
};
// returns org current plan feature table
const getOrgPlanTable = async ({ orgId, actor, actorId, actorAuthMethod, actorOrgId }: TGetOrgBillInfoDTO) => {
const { permission } = await permissionService.getOrgPermission({

View File

@@ -22,6 +22,15 @@ export type TOfflineLicense = {
features: TFeatureSet;
};
export type TPlanBillingInfo = {
currentPeriodStart: number;
currentPeriodEnd: number;
interval: "month" | "year";
intervalCount: number;
amount: number;
quantity: number;
};
export type TFeatureSet = {
_id: null;
slug: string | null;

View File

@@ -1,5 +1,6 @@
/* eslint-disable @typescript-eslint/no-unsafe-call */
import { ForbiddenError } from "@casl/ability";
import { requestContext } from "@fastify/request-context";
import { Issuer, Issuer as OpenIdIssuer, Strategy as OpenIdStrategy, TokenSet } from "openid-client";
import { AccessScope, OrganizationActionScope, OrgMembershipStatus, TableName, TUsers } from "@app/db/schemas";
@@ -15,6 +16,7 @@ import { TPermissionServiceFactory } from "@app/ee/services/permission/permissio
import { getConfig } from "@app/lib/config/env";
import { crypto } from "@app/lib/crypto";
import { BadRequestError, ForbiddenRequestError, NotFoundError, OidcAuthError } from "@app/lib/errors";
import { AuthAttemptAuthMethod, AuthAttemptAuthResult, authAttemptCounter } from "@app/lib/telemetry/metrics";
import { OrgServiceActor } from "@app/lib/types";
import { ActorType, AuthMethod, AuthTokenType } from "@app/services/auth/auth-type";
import { TAuthTokenServiceFactory } from "@app/services/auth-token/auth-token-service";
@@ -471,7 +473,7 @@ export const oidcConfigServiceFactory = ({
});
}
return { isUserCompleted, providerAuthToken };
return { isUserCompleted, providerAuthToken, user };
};
const updateOidcCfg = async ({
@@ -754,10 +756,35 @@ export const oidcConfigServiceFactory = ({
callbackPort,
manageGroupMemberships: oidcCfg.manageGroupMemberships
})
.then(({ isUserCompleted, providerAuthToken }) => {
.then(({ isUserCompleted, providerAuthToken, user }) => {
if (appCfg.OTEL_TELEMETRY_COLLECTION_ENABLED) {
authAttemptCounter.add(1, {
"infisical.user.email": claims?.email?.toLowerCase(),
"infisical.user.id": user.id,
"infisical.organization.id": org.id,
"infisical.organization.name": org.name,
"infisical.auth.method": AuthAttemptAuthMethod.OIDC,
"infisical.auth.result": AuthAttemptAuthResult.SUCCESS,
"client.address": requestContext.get("ip"),
"user_agent.original": requestContext.get("userAgent")
});
}
cb(null, { isUserCompleted, providerAuthToken });
})
.catch((error) => {
if (appCfg.OTEL_TELEMETRY_COLLECTION_ENABLED) {
authAttemptCounter.add(1, {
"infisical.user.email": claims?.email?.toLowerCase(),
"infisical.organization.id": org.id,
"infisical.organization.name": org.name,
"infisical.auth.method": AuthAttemptAuthMethod.OIDC,
"infisical.auth.result": AuthAttemptAuthResult.FAILURE,
"client.address": requestContext.get("ip"),
"user_agent.original": requestContext.get("userAgent")
});
}
cb(error);
});
}

View File

@@ -18,7 +18,8 @@ export const pamAccountDALFactory = (db: TDbClient) => {
.select(
// resource
db.ref("name").withSchema(TableName.PamResource).as("resourceName"),
db.ref("resourceType").withSchema(TableName.PamResource)
db.ref("resourceType").withSchema(TableName.PamResource),
db.ref("encryptedRotationAccountCredentials").withSchema(TableName.PamResource)
);
if (filter) {
@@ -28,16 +29,35 @@ export const pamAccountDALFactory = (db: TDbClient) => {
const accounts = await query;
return accounts.map(({ resourceId, resourceName, resourceType, ...account }) => ({
...account,
resourceId,
resource: {
id: resourceId,
name: resourceName,
resourceType
}
}));
return accounts.map(
({ resourceId, resourceName, resourceType, encryptedRotationAccountCredentials, ...account }) => ({
...account,
resourceId,
resource: {
id: resourceId,
name: resourceName,
resourceType,
encryptedRotationAccountCredentials
}
})
);
};
return { ...orm, findWithResourceDetails };
const findAccountsDueForRotation = async (tx?: Knex) => {
const dbClient = tx || db.replicaNode();
const accounts = await dbClient(TableName.PamAccount)
.innerJoin(TableName.PamResource, `${TableName.PamAccount}.resourceId`, `${TableName.PamResource}.id`)
.whereNotNull(`${TableName.PamResource}.encryptedRotationAccountCredentials`)
.whereNotNull(`${TableName.PamAccount}.rotationIntervalSeconds`)
.where(`${TableName.PamAccount}.rotationEnabled`, true)
.whereRaw(
`COALESCE("${TableName.PamAccount}"."lastRotatedAt", "${TableName.PamAccount}"."createdAt") + "${TableName.PamAccount}"."rotationIntervalSeconds" * interval '1 second' < NOW()`
)
.select(selectAllTableCols(TableName.PamAccount));
return accounts;
};
return { ...orm, findWithResourceDetails, findAccountsDueForRotation };
};

View File

@@ -45,17 +45,47 @@ export const decryptAccountCredentials = async ({
return JSON.parse(decryptedPlainTextBlob.toString()) as TPamAccountCredentials;
};
export const decryptAccount = async <T extends { encryptedCredentials: Buffer }>(
export const decryptAccountMessage = async ({
projectId,
encryptedMessage,
kmsService
}: {
projectId: string;
encryptedMessage: Buffer;
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
}) => {
const { decryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.SecretManager,
projectId
});
const decryptedPlainTextBlob = decryptor({
cipherTextBlob: encryptedMessage
});
return decryptedPlainTextBlob.toString();
};
export const decryptAccount = async <
T extends { encryptedCredentials: Buffer; encryptedLastRotationMessage?: Buffer | null }
>(
account: T,
projectId: string,
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">
): Promise<T & { credentials: TPamAccountCredentials }> => {
): Promise<T & { credentials: TPamAccountCredentials; lastRotationMessage: string | null }> => {
return {
...account,
credentials: await decryptAccountCredentials({
encryptedCredentials: account.encryptedCredentials,
projectId,
kmsService
})
} as T & { credentials: TPamAccountCredentials };
}),
lastRotationMessage: account.encryptedLastRotationMessage
? await decryptAccountMessage({
encryptedMessage: account.encryptedLastRotationMessage,
projectId,
kmsService
})
: null
};
};

View File

@@ -11,12 +11,15 @@ import {
} from "@app/ee/services/permission/project-permission";
import { DatabaseErrorCode } from "@app/lib/error-codes";
import { BadRequestError, DatabaseError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
import { logger } from "@app/lib/logger";
import { OrgServiceActor } from "@app/lib/types";
import { ActorType } from "@app/services/auth/auth-type";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { TProjectDALFactory } from "@app/services/project/project-dal";
import { TUserDALFactory } from "@app/services/user/user-dal";
import { EventType, TAuditLogServiceFactory } from "../audit-log/audit-log-types";
import { TGatewayV2ServiceFactory } from "../gateway-v2/gateway-v2-service";
import { TLicenseServiceFactory } from "../license/license-service";
import { TPamFolderDALFactory } from "../pam-folder/pam-folder-dal";
@@ -45,10 +48,12 @@ type TPamAccountServiceFactoryDep = {
"getPAMConnectionDetails" | "getPlatformConnectionDetailsByGatewayId"
>;
userDAL: TUserDALFactory;
auditLogService: Pick<TAuditLogServiceFactory, "createAuditLog">;
};
export type TPamAccountServiceFactory = ReturnType<typeof pamAccountServiceFactory>;
const ROTATION_CONCURRENCY_LIMIT = 10;
export const pamAccountServiceFactory = ({
pamResourceDAL,
pamSessionDAL,
@@ -59,10 +64,19 @@ export const pamAccountServiceFactory = ({
permissionService,
licenseService,
kmsService,
gatewayV2Service
gatewayV2Service,
auditLogService
}: TPamAccountServiceFactoryDep) => {
const create = async (
{ credentials, resourceId, name, description, folderId }: TCreateAccountDTO,
{
credentials,
resourceId,
name,
description,
folderId,
rotationEnabled,
rotationIntervalSeconds
}: TCreateAccountDTO,
actor: OrgServiceActor
) => {
const orgLicensePlan = await licenseService.getPlan(actor.orgId);
@@ -72,6 +86,12 @@ export const pamAccountServiceFactory = ({
});
}
if (rotationEnabled && (rotationIntervalSeconds === undefined || rotationIntervalSeconds === null)) {
throw new BadRequestError({
message: "Rotation interval must be defined when rotation is enabled."
});
}
const resource = await pamResourceDAL.findById(resourceId);
if (!resource) throw new NotFoundError({ message: `Resource with ID '${resourceId}' not found` });
@@ -84,6 +104,10 @@ export const pamAccountServiceFactory = ({
actionProjectType: ActionProjectType.PAM
});
if (!resource.encryptedRotationAccountCredentials && rotationEnabled) {
throw new NotFoundError({ message: "Rotation credentials are not configured for this account's resource" });
}
const accountPath = await getFullPamFolderPath({
pamFolderDAL,
folderId,
@@ -126,12 +150,19 @@ export const pamAccountServiceFactory = ({
encryptedCredentials,
name,
description,
folderId
folderId,
rotationEnabled,
rotationIntervalSeconds
});
return {
...(await decryptAccount(account, resource.projectId, kmsService)),
resource: { id: resource.id, name: resource.name, resourceType: resource.resourceType }
resource: {
id: resource.id,
name: resource.name,
resourceType: resource.resourceType,
rotationCredentialsConfigured: !!resource.encryptedRotationAccountCredentials
}
};
} catch (err) {
if (err instanceof DatabaseError && (err.error as { code: string })?.code === DatabaseErrorCode.UniqueViolation) {
@@ -145,7 +176,7 @@ export const pamAccountServiceFactory = ({
};
const updateById = async (
{ accountId, credentials, description, name }: TUpdateAccountDTO,
{ accountId, credentials, description, name, rotationEnabled, rotationIntervalSeconds }: TUpdateAccountDTO,
actor: OrgServiceActor
) => {
const orgLicensePlan = await licenseService.getPlan(actor.orgId);
@@ -195,6 +226,17 @@ export const pamAccountServiceFactory = ({
updateDoc.description = description;
}
if (rotationEnabled !== undefined) {
if (!resource.encryptedRotationAccountCredentials && rotationEnabled) {
throw new NotFoundError({ message: "Rotation credentials are not configured for this account's resource" });
}
updateDoc.rotationEnabled = rotationEnabled;
}
if (rotationIntervalSeconds !== undefined) {
updateDoc.rotationIntervalSeconds = rotationIntervalSeconds;
}
if (credentials !== undefined) {
const connectionDetails = await decryptResourceConnectionDetails({
projectId: account.projectId,
@@ -211,7 +253,7 @@ export const pamAccountServiceFactory = ({
// Logic to prevent overwriting unedited censored values
const finalCredentials = { ...credentials };
if (credentials.password === "******") {
if (credentials.password === "__INFISICAL_UNCHANGED__") {
const decryptedCredentials = await decryptAccountCredentials({
encryptedCredentials: account.encryptedCredentials,
projectId: account.projectId,
@@ -239,7 +281,12 @@ export const pamAccountServiceFactory = ({
return {
...(await decryptAccount(updatedAccount, account.projectId, kmsService)),
resource: { id: resource.id, name: resource.name, resourceType: resource.resourceType }
resource: {
id: resource.id,
name: resource.name,
resourceType: resource.resourceType,
rotationCredentialsConfigured: !!resource.encryptedRotationAccountCredentials
}
};
};
@@ -278,7 +325,12 @@ export const pamAccountServiceFactory = ({
return {
...(await decryptAccount(deletedAccount, account.projectId, kmsService)),
resource: { id: resource.id, name: resource.name, resourceType: resource.resourceType }
resource: {
id: resource.id,
name: resource.name,
resourceType: resource.resourceType,
rotationCredentialsConfigured: !!resource.encryptedRotationAccountCredentials
}
};
};
@@ -300,8 +352,9 @@ export const pamAccountServiceFactory = ({
const decryptedAndPermittedAccounts: Array<
TPamAccounts & {
resource: Pick<TPamResources, "id" | "name" | "resourceType">;
resource: Pick<TPamResources, "id" | "name" | "resourceType"> & { rotationCredentialsConfigured: boolean };
credentials: TPamAccountCredentials;
lastRotationMessage: string | null;
}
> = [];
@@ -325,12 +378,14 @@ export const pamAccountServiceFactory = ({
) {
// Decrypt the account only if the user has permission to read it
const decryptedAccount = await decryptAccount(account, account.projectId, kmsService);
decryptedAndPermittedAccounts.push({
...decryptedAccount,
resource: {
id: account.resource.id,
name: account.resource.name,
resourceType: account.resource.resourceType
resourceType: account.resource.resourceType,
rotationCredentialsConfigured: !!account.resource.encryptedRotationAccountCredentials
}
});
}
@@ -517,12 +572,131 @@ export const pamAccountServiceFactory = ({
};
};
const rotateAllDueAccounts = async () => {
const accounts = await pamAccountDAL.findAccountsDueForRotation();
for (let i = 0; i < accounts.length; i += ROTATION_CONCURRENCY_LIMIT) {
const batch = accounts.slice(i, i + ROTATION_CONCURRENCY_LIMIT);
const rotationPromises = batch.map(async (account) => {
let logResourceType = "unknown";
try {
await pamAccountDAL.transaction(async (tx) => {
const resource = await pamResourceDAL.findById(account.resourceId, tx);
if (!resource || !resource.encryptedRotationAccountCredentials) return;
logResourceType = resource.resourceType;
const { connectionDetails, rotationAccountCredentials, gatewayId, resourceType } = await decryptResource(
resource,
account.projectId,
kmsService
);
if (!rotationAccountCredentials) return;
const accountCredentials = await decryptAccountCredentials({
encryptedCredentials: account.encryptedCredentials,
projectId: account.projectId,
kmsService
});
const factory = PAM_RESOURCE_FACTORY_MAP[resourceType as PamResource](
resourceType as PamResource,
connectionDetails,
gatewayId,
gatewayV2Service
);
const newCredentials = await factory.rotateAccountCredentials(
rotationAccountCredentials,
accountCredentials
);
const encryptedCredentials = await encryptAccountCredentials({
credentials: newCredentials,
projectId: account.projectId,
kmsService
});
await pamAccountDAL.updateById(
account.id,
{
encryptedCredentials,
lastRotatedAt: new Date(),
rotationStatus: "success",
encryptedLastRotationMessage: null
},
tx
);
await auditLogService.createAuditLog({
projectId: account.projectId,
actor: {
type: ActorType.PLATFORM,
metadata: {}
},
event: {
type: EventType.PAM_ACCOUNT_CREDENTIAL_ROTATION,
metadata: {
accountId: account.id,
accountName: account.name,
resourceId: resource.id,
resourceType: logResourceType
}
}
});
});
} catch (error) {
logger.error(error, `Failed to rotate credentials for account [accountId=${account.id}]`);
const errorMessage = error instanceof Error ? error.message : "An unknown error occurred";
const { encryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.SecretManager,
projectId: account.projectId
});
const { cipherTextBlob: encryptedMessage } = encryptor({
plainText: Buffer.from(errorMessage)
});
await pamAccountDAL.updateById(account.id, {
rotationStatus: "failed",
encryptedLastRotationMessage: encryptedMessage
});
await auditLogService.createAuditLog({
projectId: account.projectId,
actor: {
type: ActorType.PLATFORM,
metadata: {}
},
event: {
type: EventType.PAM_ACCOUNT_CREDENTIAL_ROTATION_FAILED,
metadata: {
accountId: account.id,
accountName: account.name,
resourceId: account.resourceId,
resourceType: logResourceType,
errorMessage
}
}
});
}
});
// eslint-disable-next-line no-await-in-loop
await Promise.all(rotationPromises);
}
};
return {
create,
updateById,
deleteById,
list,
access,
getSessionCredentials
getSessionCredentials,
rotateAllDueAccounts
};
};

View File

@@ -1,7 +1,10 @@
import { TPamAccount } from "../pam-resource/pam-resource-types";
// DTOs
export type TCreateAccountDTO = Pick<TPamAccount, "name" | "description" | "credentials" | "folderId" | "resourceId">;
export type TCreateAccountDTO = Pick<
TPamAccount,
"name" | "description" | "credentials" | "folderId" | "resourceId" | "rotationEnabled" | "rotationIntervalSeconds"
>;
export type TUpdateAccountDTO = Partial<Omit<TCreateAccountDTO, "folderId" | "resourceId">> & {
accountId: string;

View File

@@ -0,0 +1,8 @@
import { MySQLResourceListItemSchema } from "./mysql-resource-schemas";
export const getMySQLResourceListItem = () => {
return {
name: MySQLResourceListItemSchema.shape.name.value,
resource: MySQLResourceListItemSchema.shape.resource.value
};
};

View File

@@ -0,0 +1,76 @@
import { z } from "zod";
import { PamResource } from "../pam-resource-enums";
import {
BaseCreatePamAccountSchema,
BaseCreatePamResourceSchema,
BasePamAccountSchema,
BasePamAccountSchemaWithResource,
BasePamResourceSchema,
BaseUpdatePamAccountSchema,
BaseUpdatePamResourceSchema
} from "../pam-resource-schemas";
import {
BaseSqlAccountCredentialsSchema,
BaseSqlResourceConnectionDetailsSchema
} from "../shared/sql/sql-resource-schemas";
// Resources
export const MySQLResourceConnectionDetailsSchema = BaseSqlResourceConnectionDetailsSchema.extend({
// MySQL db in many cases the db will not be provided when making connection
database: z.string().trim()
});
export const MySQLAccountCredentialsSchema = BaseSqlAccountCredentialsSchema;
const BaseMySQLResourceSchema = BasePamResourceSchema.extend({ resourceType: z.literal(PamResource.MySQL) });
export const MySQLResourceSchema = BaseMySQLResourceSchema.extend({
connectionDetails: MySQLResourceConnectionDetailsSchema,
rotationAccountCredentials: MySQLAccountCredentialsSchema.nullable().optional()
});
export const SanitizedMySQLResourceSchema = BaseMySQLResourceSchema.extend({
connectionDetails: MySQLResourceConnectionDetailsSchema,
rotationAccountCredentials: MySQLAccountCredentialsSchema.pick({
username: true
})
.nullable()
.optional()
});
export const MySQLResourceListItemSchema = z.object({
name: z.literal("MySQL"),
resource: z.literal(PamResource.MySQL)
});
export const CreateMySQLResourceSchema = BaseCreatePamResourceSchema.extend({
connectionDetails: MySQLResourceConnectionDetailsSchema,
rotationAccountCredentials: MySQLAccountCredentialsSchema.nullable().optional()
});
export const UpdateMySQLResourceSchema = BaseUpdatePamResourceSchema.extend({
connectionDetails: MySQLResourceConnectionDetailsSchema.optional(),
rotationAccountCredentials: MySQLAccountCredentialsSchema.nullable().optional()
});
// Accounts
export const MySQLAccountSchema = BasePamAccountSchema.extend({
credentials: MySQLAccountCredentialsSchema
});
export const CreateMySQLAccountSchema = BaseCreatePamAccountSchema.extend({
credentials: MySQLAccountCredentialsSchema
});
export const UpdateMySQLAccountSchema = BaseUpdatePamAccountSchema.extend({
credentials: MySQLAccountCredentialsSchema.optional()
});
export const SanitizedMySQLAccountWithResourceSchema = BasePamAccountSchemaWithResource.extend({
credentials: MySQLAccountCredentialsSchema.pick({
username: true
})
});
// Sessions
export const MySQLSessionCredentialsSchema = MySQLResourceConnectionDetailsSchema.and(MySQLAccountCredentialsSchema);

View File

@@ -0,0 +1,16 @@
import { z } from "zod";
import {
MySQLAccountCredentialsSchema,
MySQLAccountSchema,
MySQLResourceConnectionDetailsSchema,
MySQLResourceSchema
} from "./mysql-resource-schemas";
// Resources
export type TMySQLResource = z.infer<typeof MySQLResourceSchema>;
export type TMySQLResourceConnectionDetails = z.infer<typeof MySQLResourceConnectionDetailsSchema>;
// Accounts
export type TMySQLAccount = z.infer<typeof MySQLAccountSchema>;
export type TMySQLAccountCredentials = z.infer<typeof MySQLAccountCredentialsSchema>;

View File

@@ -1,3 +1,4 @@
export enum PamResource {
Postgres = "postgres"
Postgres = "postgres",
MySQL = "mysql"
}

View File

@@ -5,5 +5,6 @@ import { sqlResourceFactory } from "./shared/sql/sql-resource-factory";
type TPamResourceFactoryImplementation = TPamResourceFactory<TPamResourceConnectionDetails, TPamAccountCredentials>;
export const PAM_RESOURCE_FACTORY_MAP: Record<PamResource, TPamResourceFactoryImplementation> = {
[PamResource.Postgres]: sqlResourceFactory as TPamResourceFactoryImplementation
[PamResource.Postgres]: sqlResourceFactory as TPamResourceFactoryImplementation,
[PamResource.MySQL]: sqlResourceFactory as TPamResourceFactoryImplementation
};

View File

@@ -2,11 +2,13 @@ import { TPamResources } from "@app/db/schemas";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { decryptAccountCredentials } from "../pam-account/pam-account-fns";
import { getMySQLResourceListItem } from "./mysql/mysql-resource-fns";
import { TPamResource, TPamResourceConnectionDetails } from "./pam-resource-types";
import { getPostgresResourceListItem } from "./postgres/postgres-resource-fns";
export const listResourceOptions = () => {
return [getPostgresResourceListItem()].sort((a, b) => a.name.localeCompare(b.name));
return [getPostgresResourceListItem(), getMySQLResourceListItem()].sort((a, b) => a.name.localeCompare(b.name));
};
// Resource
@@ -63,6 +65,13 @@ export const decryptResource = async (
encryptedConnectionDetails: resource.encryptedConnectionDetails,
projectId,
kmsService
})
}),
rotationAccountCredentials: resource.encryptedRotationAccountCredentials
? await decryptAccountCredentials({
encryptedCredentials: resource.encryptedRotationAccountCredentials,
projectId,
kmsService
})
: null
} as TPamResource;
};

View File

@@ -6,6 +6,7 @@ import { slugSchema } from "@app/server/lib/schemas";
// Resources
export const BasePamResourceSchema = PamResourcesSchema.omit({
encryptedConnectionDetails: true,
encryptedRotationAccountCredentials: true,
resourceType: true
});
@@ -30,17 +31,25 @@ export const BasePamAccountSchemaWithResource = BasePamAccountSchema.extend({
id: true,
name: true,
resourceType: true
})
}).extend({
rotationCredentialsConfigured: z.boolean()
}),
lastRotationMessage: z.string().nullable().optional(),
rotationStatus: z.string().nullable().optional()
});
export const BaseCreatePamAccountSchema = z.object({
resourceId: z.string().uuid(),
folderId: z.string().uuid().optional(),
name: slugSchema({ field: "name" }),
description: z.string().max(512).nullable().optional()
description: z.string().max(512).nullable().optional(),
rotationEnabled: z.boolean(),
rotationIntervalSeconds: z.number().min(3600).nullable().optional()
});
export const BaseUpdatePamAccountSchema = z.object({
name: slugSchema({ field: "name" }).optional(),
description: z.string().max(512).nullable().optional()
description: z.string().max(512).nullable().optional(),
rotationEnabled: z.boolean().optional(),
rotationIntervalSeconds: z.number().min(3600).nullable().optional()
});

View File

@@ -10,10 +10,16 @@ import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { TGatewayV2ServiceFactory } from "../gateway-v2/gateway-v2-service";
import { TLicenseServiceFactory } from "../license/license-service";
import { decryptAccountCredentials, encryptAccountCredentials } from "../pam-account/pam-account-fns";
import { TPamResourceDALFactory } from "./pam-resource-dal";
import { PamResource } from "./pam-resource-enums";
import { PAM_RESOURCE_FACTORY_MAP } from "./pam-resource-factory";
import { decryptResource, encryptResourceConnectionDetails, listResourceOptions } from "./pam-resource-fns";
import {
decryptResource,
decryptResourceConnectionDetails,
encryptResourceConnectionDetails,
listResourceOptions
} from "./pam-resource-fns";
import { TCreateResourceDTO, TUpdateResourceDTO } from "./pam-resource-types";
type TPamResourceServiceFactoryDep = {
@@ -61,7 +67,7 @@ export const pamResourceServiceFactory = ({
};
const create = async (
{ resourceType, connectionDetails, gatewayId, name, projectId }: TCreateResourceDTO,
{ resourceType, connectionDetails, gatewayId, name, projectId, rotationAccountCredentials }: TCreateResourceDTO,
actor: OrgServiceActor
) => {
const orgLicensePlan = await licenseService.getPlan(actor.orgId);
@@ -88,26 +94,42 @@ export const pamResourceServiceFactory = ({
gatewayId,
gatewayV2Service
);
const validatedConnectionDetails = await factory.validateConnection();
const validatedConnectionDetails = await factory.validateConnection();
const encryptedConnectionDetails = await encryptResourceConnectionDetails({
connectionDetails: validatedConnectionDetails,
projectId,
kmsService
});
let encryptedRotationAccountCredentials: Buffer | null = null;
if (rotationAccountCredentials) {
const validatedRotationAccountCredentials = await factory.validateAccountCredentials(rotationAccountCredentials);
encryptedRotationAccountCredentials = await encryptAccountCredentials({
credentials: validatedRotationAccountCredentials,
projectId,
kmsService
});
}
const resource = await pamResourceDAL.create({
resourceType,
encryptedConnectionDetails,
gatewayId,
name,
projectId
projectId,
encryptedRotationAccountCredentials
});
return decryptResource(resource, projectId, kmsService);
};
const updateById = async ({ connectionDetails, resourceId, name }: TUpdateResourceDTO, actor: OrgServiceActor) => {
const updateById = async (
{ connectionDetails, resourceId, name, rotationAccountCredentials }: TUpdateResourceDTO,
actor: OrgServiceActor
) => {
const orgLicensePlan = await licenseService.getPlan(actor.orgId);
if (!orgLicensePlan.pam) {
throw new BadRequestError({
@@ -151,6 +173,60 @@ export const pamResourceServiceFactory = ({
updateDoc.encryptedConnectionDetails = encryptedConnectionDetails;
}
if (rotationAccountCredentials !== undefined) {
updateDoc.encryptedRotationAccountCredentials = null;
if (rotationAccountCredentials) {
const decryptedConnectionDetails =
connectionDetails ??
(await decryptResourceConnectionDetails({
encryptedConnectionDetails: resource.encryptedConnectionDetails,
projectId: resource.projectId,
kmsService
}));
const factory = PAM_RESOURCE_FACTORY_MAP[resource.resourceType as PamResource](
resource.resourceType as PamResource,
decryptedConnectionDetails,
resource.gatewayId,
gatewayV2Service
);
// Logic to prevent overwriting unedited censored values
const finalCredentials = { ...rotationAccountCredentials };
if (
resource.encryptedRotationAccountCredentials &&
rotationAccountCredentials.password === "__INFISICAL_UNCHANGED__"
) {
const decryptedCredentials = await decryptAccountCredentials({
encryptedCredentials: resource.encryptedRotationAccountCredentials,
projectId: resource.projectId,
kmsService
});
finalCredentials.password = decryptedCredentials.password;
}
try {
const validatedRotationAccountCredentials = await factory.validateAccountCredentials(finalCredentials);
updateDoc.encryptedRotationAccountCredentials = await encryptAccountCredentials({
credentials: validatedRotationAccountCredentials,
projectId: resource.projectId,
kmsService
});
} catch (err) {
if (err instanceof BadRequestError) {
throw new BadRequestError({
message: `Rotation Account Error: ${err.message}`
});
}
throw err;
}
}
}
// If nothing was updated, return the fetched resource
if (Object.keys(updateDoc).length === 0) {
return decryptResource(resource, resource.projectId, kmsService);

View File

@@ -1,4 +1,10 @@
import { TGatewayV2ServiceFactory } from "../gateway-v2/gateway-v2-service";
import {
TMySQLAccount,
TMySQLAccountCredentials,
TMySQLResource,
TMySQLResourceConnectionDetails
} from "./mysql/mysql-resource-types";
import { PamResource } from "./pam-resource-enums";
import {
TPostgresAccount,
@@ -8,17 +14,18 @@ import {
} from "./postgres/postgres-resource-types";
// Resource types
export type TPamResource = TPostgresResource;
export type TPamResourceConnectionDetails = TPostgresResourceConnectionDetails;
export type TPamResource = TPostgresResource | TMySQLResource;
export type TPamResourceConnectionDetails = TPostgresResourceConnectionDetails | TMySQLResourceConnectionDetails;
// Account types
export type TPamAccount = TPostgresAccount;
export type TPamAccountCredentials = TPostgresAccountCredentials;
export type TPamAccount = TPostgresAccount | TMySQLAccount;
// eslint-disable-next-line @typescript-eslint/no-duplicate-type-constituents
export type TPamAccountCredentials = TPostgresAccountCredentials | TMySQLAccountCredentials;
// Resource DTOs
export type TCreateResourceDTO = Pick<
TPamResource,
"name" | "connectionDetails" | "resourceType" | "gatewayId" | "projectId"
"name" | "connectionDetails" | "resourceType" | "gatewayId" | "projectId" | "rotationAccountCredentials"
>;
export type TUpdateResourceDTO = Partial<Omit<TCreateResourceDTO, "resourceType" | "projectId">> & {
@@ -30,6 +37,10 @@ export type TPamResourceFactoryValidateConnection<T extends TPamResourceConnecti
export type TPamResourceFactoryValidateAccountCredentials<C extends TPamAccountCredentials> = (
credentials: C
) => Promise<C>;
export type TPamResourceFactoryRotateAccountCredentials<C extends TPamAccountCredentials> = (
rotationAccountCredentials: C,
currentCredentials: C
) => Promise<C>;
export type TPamResourceFactory<T extends TPamResourceConnectionDetails, C extends TPamAccountCredentials> = (
resourceType: PamResource,
@@ -39,4 +50,5 @@ export type TPamResourceFactory<T extends TPamResourceConnectionDetails, C exten
) => {
validateConnection: TPamResourceFactoryValidateConnection<T>;
validateAccountCredentials: TPamResourceFactoryValidateAccountCredentials<C>;
rotateAccountCredentials: TPamResourceFactoryRotateAccountCredentials<C>;
};

View File

@@ -15,13 +15,24 @@ import {
BaseSqlResourceConnectionDetailsSchema
} from "../shared/sql/sql-resource-schemas";
// Resources
export const PostgresResourceConnectionDetailsSchema = BaseSqlResourceConnectionDetailsSchema;
export const PostgresAccountCredentialsSchema = BaseSqlAccountCredentialsSchema;
// Resources
const BasePostgresResourceSchema = BasePamResourceSchema.extend({ resourceType: z.literal(PamResource.Postgres) });
export const PostgresResourceSchema = BasePostgresResourceSchema.extend({
connectionDetails: PostgresResourceConnectionDetailsSchema
connectionDetails: PostgresResourceConnectionDetailsSchema,
rotationAccountCredentials: PostgresAccountCredentialsSchema.nullable().optional()
});
export const SanitizedPostgresResourceSchema = BasePostgresResourceSchema.extend({
connectionDetails: PostgresResourceConnectionDetailsSchema,
rotationAccountCredentials: PostgresAccountCredentialsSchema.pick({
username: true
})
.nullable()
.optional()
});
export const PostgresResourceListItemSchema = z.object({
@@ -30,16 +41,16 @@ export const PostgresResourceListItemSchema = z.object({
});
export const CreatePostgresResourceSchema = BaseCreatePamResourceSchema.extend({
connectionDetails: PostgresResourceConnectionDetailsSchema
connectionDetails: PostgresResourceConnectionDetailsSchema,
rotationAccountCredentials: PostgresAccountCredentialsSchema.nullable().optional()
});
export const UpdatePostgresResourceSchema = BaseUpdatePamResourceSchema.extend({
connectionDetails: PostgresResourceConnectionDetailsSchema.optional()
connectionDetails: PostgresResourceConnectionDetailsSchema.optional(),
rotationAccountCredentials: PostgresAccountCredentialsSchema.nullable().optional()
});
// Accounts
export const PostgresAccountCredentialsSchema = BaseSqlAccountCredentialsSchema;
export const PostgresAccountSchema = BasePamAccountSchema.extend({
credentials: PostgresAccountCredentialsSchema
});

View File

@@ -1,4 +1,6 @@
import knex, { Knex } from "knex";
import knex from "knex";
import mysql, { Connection } from "mysql2/promise";
import * as pg from "pg";
import tls, { PeerCertificate } from "tls";
import { verifyHostInputValidity } from "@app/ee/services/dynamic-secret/dynamic-secret-fns";
@@ -6,39 +8,176 @@ import { TGatewayV2ServiceFactory } from "@app/ee/services/gateway-v2/gateway-v2
import { BadRequestError } from "@app/lib/errors";
import { GatewayProxyProtocol } from "@app/lib/gateway";
import { withGatewayV2Proxy } from "@app/lib/gateway-v2/gateway-v2";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { PamResource } from "../../pam-resource-enums";
import { TPamResourceFactory, TPamResourceFactoryValidateAccountCredentials } from "../../pam-resource-types";
import {
TPamResourceFactory,
TPamResourceFactoryRotateAccountCredentials,
TPamResourceFactoryValidateAccountCredentials
} from "../../pam-resource-types";
import { TSqlAccountCredentials, TSqlResourceConnectionDetails } from "./sql-resource-types";
const EXTERNAL_REQUEST_TIMEOUT = 10 * 1000;
const TEST_CONNECTION_USERNAME = "infisical-gateway-connection-test";
const TEST_CONNECTION_PASSWORD = "infisical-gateway-connection-test-password";
const SIMPLE_QUERY = "select 1";
const SQL_CONNECTION_CLIENT_MAP = {
[PamResource.Postgres]: "pg"
};
export interface SqlResourceConnection {
/**
* Check and see if the connection is good or not.
*
* @param connectOnly when true, if we only want to know that making the connection is possible or not,
* we don't care about authentication failures
* @returns Promise to be resolved when the connection is good, otherwise an error will be errbacked
*/
validate: (connectOnly: boolean) => Promise<void>;
const getConnectionConfig = (
resourceType: PamResource,
{ host, sslEnabled, sslRejectUnauthorized, sslCertificate }: TSqlResourceConnectionDetails
) => {
switch (resourceType) {
/**
* Rotate password and return the new credentials.
*
* @param currentCredentials the current credentials to rotate
*
* @returns Promise to be resolved with the new credentials
*/
rotateCredentials: (
currentCredentials: TSqlAccountCredentials,
newPassword: string
) => Promise<TSqlAccountCredentials>;
/**
* Close the connection.
*
* @returns Promise for closing the connection
*/
close: () => Promise<void>;
}
const makeSqlConnection = (
proxyPort: number,
config: {
connectionDetails: TSqlResourceConnectionDetails;
resourceType: PamResource;
username?: string;
password?: string;
}
): SqlResourceConnection => {
const { connectionDetails, resourceType, username, password } = config;
const { host, sslEnabled, sslRejectUnauthorized, sslCertificate } = connectionDetails;
const actualUsername = username ?? TEST_CONNECTION_USERNAME; // Use provided username or fallback
const actualPassword = password ?? TEST_CONNECTION_PASSWORD; // Use provided password or fallback
switch (config.resourceType) {
case PamResource.Postgres: {
const client = knex({
client: "pg",
connection: {
host: "localhost",
port: proxyPort,
user: actualUsername,
password: actualPassword,
database: connectionDetails.database,
connectionTimeoutMillis: EXTERNAL_REQUEST_TIMEOUT,
ssl: sslEnabled
? {
rejectUnauthorized: sslRejectUnauthorized,
ca: sslCertificate,
servername: host,
// When using proxy, we need to bypass hostname validation since we connect to localhost
// but validate the certificate against the actual hostname
checkServerIdentity: (hostname: string, cert: PeerCertificate) => {
return tls.checkServerIdentity(host, cert);
}
}
: false
}
});
return {
ssl: sslEnabled
? {
rejectUnauthorized: sslRejectUnauthorized,
ca: sslCertificate,
servername: host,
// When using proxy, we need to bypass hostname validation since we connect to localhost
// but validate the certificate against the actual hostname
checkServerIdentity: (hostname: string, cert: PeerCertificate) => {
return tls.checkServerIdentity(host, cert);
validate: async (connectOnly) => {
try {
await client.raw(SIMPLE_QUERY);
} catch (error) {
if (error instanceof pg.DatabaseError) {
// Hacky way to know if we successfully hit the database.
// TODO: potentially two approaches to solve the problem.
// 1. change the work flow, add account first then resource
// 2. modify relay to add a new endpoint for returning if the target host is healthy or not
// (like being able to do an auth handshake regardless pass or not)
if (
connectOnly &&
(error.message === `password authentication failed for user "${TEST_CONNECTION_USERNAME}"` ||
error.message.includes("no pg_hba.conf entry for host"))
) {
return;
}
}
: false
throw new BadRequestError({
message: `Unable to validate connection to ${resourceType}: ${(error as Error).message || String(error)}`
});
}
},
rotateCredentials: async (currentCredentials, newPassword) => {
// Note: The generated random password is not really going to make SQL Injection possible.
// The reason we are not using parameters binding is that the "ALTER USER" syntax is DDL,
// parameters binding is not supported. But just in case if the this code got copied
// around and repurposed, let's just do some naive escaping regardless
await client.raw(`ALTER USER :username: WITH PASSWORD '${newPassword.replace(/'/g, "''")}'`, {
username: currentCredentials.username
});
return { username: currentCredentials.username, password: newPassword };
},
close: () => client.destroy()
};
}
case PamResource.MySQL: {
return {
validate: async (connectOnly) => {
let client: Connection | null = null;
try {
// Notice: the reason we are not using Knex for mysql2 is because we don't need any fancy feature from Knex.
// mysql2 doesn't provide custom ssl verification function pass in.
// ref: https://github.com/sidorares/node-mysql2/blob/2543272a2ada8d8a07f74582549d7dd3fe948e2d/lib/base/connection.js#L358-L362
// and then even I tried to workaround it with Knex's pool afterCreate hook, but then encounter a bug:
// ref: https://github.com/knex/knex/issues/5352
// It appears that using Knex causing more troubles than not, we are just checking the connections,
// so it's much easier to create raw connection with the driver lib directly
client = await mysql.createConnection({
host: "localhost",
port: proxyPort,
user: actualUsername, // Use provided username or fallback
password: actualPassword, // Use provided password or fallback
database: connectionDetails.database,
ssl: sslEnabled
? {
rejectUnauthorized: sslRejectUnauthorized,
ca: sslCertificate
}
: undefined
});
await client.query(SIMPLE_QUERY);
} catch (error) {
if (connectOnly) {
// Hacky way to know if we successfully hit the database.
if (
error instanceof Error &&
error.message.startsWith(`Access denied for user '${TEST_CONNECTION_USERNAME}'@`)
) {
return;
}
}
// TODO: handle other errors, and throw standardlized errors providing user-friendly msg
throw error;
} finally {
await client?.end();
}
},
rotateCredentials: async () => {
// TODO: the pwd rotation for MySQL is not supported yet
throw new BadRequestError({
message: "Unsupported operation"
});
},
close: async () => {}
};
}
default:
@@ -57,10 +196,9 @@ export const executeWithGateway = async <T>(
password?: string;
},
gatewayV2Service: Pick<TGatewayV2ServiceFactory, "getPlatformConnectionDetailsByGatewayId">,
operation: (client: Knex) => Promise<T>
operation: (connection: SqlResourceConnection) => Promise<T>
): Promise<T> => {
const { connectionDetails, resourceType, gatewayId, username, password } = config;
const { connectionDetails, gatewayId } = config;
const [targetHost] = await verifyHostInputValidity(connectionDetails.host, true);
const platformConnectionDetails = await gatewayV2Service.getPlatformConnectionDetailsByGatewayId({
gatewayId,
@@ -74,22 +212,11 @@ export const executeWithGateway = async <T>(
return withGatewayV2Proxy(
async (proxyPort) => {
const client = knex({
client: SQL_CONNECTION_CLIENT_MAP[resourceType],
connection: {
database: connectionDetails.database,
port: proxyPort,
host: "localhost",
user: username ?? TEST_CONNECTION_USERNAME, // Use provided username or fallback
password: password ?? TEST_CONNECTION_PASSWORD, // Use provided password or fallback
connectionTimeoutMillis: EXTERNAL_REQUEST_TIMEOUT,
...getConnectionConfig(resourceType, connectionDetails)
}
});
const connection = makeSqlConnection(proxyPort, config);
try {
return await operation(client);
return await operation(connection);
} finally {
await client.destroy();
await connection.close();
}
},
{
@@ -110,25 +237,14 @@ export const sqlResourceFactory: TPamResourceFactory<TSqlResourceConnectionDetai
const validateConnection = async () => {
try {
await executeWithGateway({ connectionDetails, gatewayId, resourceType }, gatewayV2Service, async (client) => {
await client.raw("Select 1");
await client.validate(true);
});
return connectionDetails;
} catch (error) {
// Hacky way to know if we successfully hit the database
if (error instanceof BadRequestError) {
if (error.message === `password authentication failed for user "${TEST_CONNECTION_USERNAME}"`) {
return connectionDetails;
}
if (error.message.includes("no pg_hba.conf entry for host")) {
return connectionDetails;
}
if (error.message === "Connection terminated unexpectedly") {
throw new BadRequestError({
message: "Connection terminated unexpectedly. Verify that host and port are correct"
});
}
if (error instanceof BadRequestError && error.message === "Connection terminated unexpectedly") {
throw new BadRequestError({
message: "Connection terminated unexpectedly. Verify that host and port are correct"
});
}
throw new BadRequestError({
@@ -151,11 +267,12 @@ export const sqlResourceFactory: TPamResourceFactory<TSqlResourceConnectionDetai
},
gatewayV2Service,
async (client) => {
await client.raw("Select 1");
await client.validate(false);
}
);
return credentials;
} catch (error) {
// TODO: extract these logic into each SQL connection
if (error instanceof BadRequestError) {
if (error.message === `password authentication failed for user "${credentials.username}"`) {
throw new BadRequestError({
@@ -176,8 +293,55 @@ export const sqlResourceFactory: TPamResourceFactory<TSqlResourceConnectionDetai
}
};
const rotateAccountCredentials: TPamResourceFactoryRotateAccountCredentials<TSqlAccountCredentials> = async (
rotationAccountCredentials,
currentCredentials
) => {
const newPassword = alphaNumericNanoId(32);
try {
return await executeWithGateway(
{
connectionDetails,
gatewayId,
resourceType,
username: rotationAccountCredentials.username,
password: rotationAccountCredentials.password
},
gatewayV2Service,
(client) => client.rotateCredentials(currentCredentials, newPassword)
);
} catch (error) {
if (error instanceof BadRequestError) {
if (error.message === `password authentication failed for user "${rotationAccountCredentials.username}"`) {
throw new BadRequestError({
message: "Management credentials invalid: Username or password incorrect"
});
}
if (error.message.includes("permission denied")) {
throw new BadRequestError({
message: `Management credentials lack permission to rotate password for user "${currentCredentials.username}"`
});
}
if (error.message === "Connection terminated unexpectedly") {
throw new BadRequestError({
message: "Connection terminated unexpectedly. Verify that host and port are correct"
});
}
}
const sanitizedErrorMessage = ((error as Error).message || String(error)).replaceAll(newPassword, "REDACTED");
throw new BadRequestError({
message: `Unable to rotate account credentials for ${resourceType}: ${sanitizedErrorMessage}`
});
}
};
return {
validateConnection,
validateAccountCredentials
validateAccountCredentials,
rotateAccountCredentials
};
};

View File

@@ -16,6 +16,6 @@ export const BaseSqlResourceConnectionDetailsSchema = z.object({
// Accounts
export const BaseSqlAccountCredentialsSchema = z.object({
username: z.string().trim().min(1),
password: z.string().trim().min(1)
username: z.string().trim().min(1).max(63),
password: z.string().trim().min(1).max(256)
});

View File

@@ -1,7 +1,9 @@
import { TMySQLAccountCredentials, TMySQLResourceConnectionDetails } from "../../mysql/mysql-resource-types";
import {
TPostgresAccountCredentials,
TPostgresResourceConnectionDetails
} from "../../postgres/postgres-resource-types";
export type TSqlResourceConnectionDetails = TPostgresResourceConnectionDetails;
export type TSqlAccountCredentials = TPostgresAccountCredentials;
export type TSqlResourceConnectionDetails = TPostgresResourceConnectionDetails | TMySQLResourceConnectionDetails;
// eslint-disable-next-line @typescript-eslint/no-duplicate-type-constituents
export type TSqlAccountCredentials = TPostgresAccountCredentials | TMySQLAccountCredentials;

View File

@@ -337,6 +337,12 @@ export const permissionServiceFactory = ({
throw new NotFoundError({ message: `Project with ${projectId} not found` });
}
requestContext.set("projectDetails", {
id: projectDetails.id,
name: projectDetails.name,
slug: projectDetails.slug
});
if (projectDetails.orgId !== actorOrgId) {
throw new ForbiddenRequestError({ name: "You are not logged into this organization" });
}

View File

@@ -84,7 +84,7 @@ type TSamlConfigServiceFactoryDep = {
projectDAL: Pick<TProjectDALFactory, "findById" | "findProjectGhostUser">;
projectBotDAL: Pick<TProjectBotDALFactory, "findOne">;
projectKeyDAL: Pick<TProjectKeyDALFactory, "find" | "delete" | "findLatestProjectKey" | "insertMany">;
membershipGroupDAL: Pick<TMembershipGroupDALFactory, "find">;
membershipGroupDAL: Pick<TMembershipGroupDALFactory, "find" | "create">;
};
export const samlConfigServiceFactory = ({
@@ -183,6 +183,22 @@ export const samlConfigServiceFactory = ({
transaction
);
orgGroupsMap.set(groupName, newGroup);
const orgMembership = await membershipGroupDAL.create(
{
actorGroupId: newGroup.id,
scope: AccessScope.Organization,
scopeOrgId: orgId
},
transaction
);
await membershipRoleDAL.create(
{
membershipId: orgMembership.id,
role: OrgMembershipRole.NoAccess,
customRoleId: null
},
transaction
);
}
}
@@ -753,7 +769,7 @@ export const samlConfigServiceFactory = ({
});
}
return { isUserCompleted, providerAuthToken };
return { isUserCompleted, providerAuthToken, user, organization };
};
return {

View File

@@ -1,4 +1,4 @@
import { TSamlConfigs } from "@app/db/schemas";
import { TOrganizations, TSamlConfigs, TUsers } from "@app/db/schemas";
import { TOrgPermission } from "@app/lib/types";
import { ActorAuthMethod, ActorType } from "@app/services/auth/auth-type";
@@ -78,5 +78,7 @@ export type TSamlConfigServiceFactory = {
samlLogin: (arg: TSamlLoginDTO) => Promise<{
isUserCompleted: boolean;
providerAuthToken: string;
user: TUsers;
organization: TOrganizations;
}>;
};

View File

@@ -1517,7 +1517,7 @@ export const secretApprovalRequestServiceFactory = ({
}))
);
if (secrets.length)
throw new BadRequestError({ message: `Secret already exist: ${secrets.map((el) => el.key).join(",")}` });
throw new BadRequestError({ message: `Secret already exists: ${secrets.map((el) => el.key).join(",")}` });
commits.push(
...createdSecrets.map((createdSecret) => ({

View File

@@ -2348,6 +2348,9 @@ export const AppConnections = {
RAILWAY: {
apiToken: "The API token used to authenticate with Railway."
},
NORTHFLANK: {
apiToken: "The API token used to authenticate with Northflank."
},
CHECKLY: {
apiKey: "The API key used to authenticate with Checkly."
},
@@ -2620,6 +2623,12 @@ export const SecretSyncs = {
siteName: "The name of the Netlify site to sync secrets to.",
siteId: "The ID of the Netlify site to sync secrets to.",
context: "The Netlify context to sync secrets to."
},
NORTHFLANK: {
projectId: "The ID of the Northflank project to sync secrets to.",
projectName: "The name of the Northflank project to sync secrets to.",
secretGroupId: "The ID of the Northflank secret group to sync secrets to.",
secretGroupName: "The name of the Northflank secret group to sync secrets to."
}
}
};

View File

@@ -1,7 +1,9 @@
import { z } from "zod";
import { THsmServiceFactory } from "@app/ee/services/hsm/hsm-service";
import { crypto } from "@app/lib/crypto/cryptography";
import { QueueWorkerProfile } from "@app/lib/types";
import { TKmsRootConfigDALFactory } from "@app/services/kms/kms-root-config-dal";
import { TSuperAdminDALFactory } from "@app/services/super-admin/super-admin-dal";
import { BadRequestError } from "../errors";
@@ -363,11 +365,6 @@ const envSchema = z
/* INTERNAL ----------------------------------------------------------------------------- */
INTERNAL_REGION: zpStr(z.enum(["us", "eu"]).optional())
})
// To ensure that basic encryption is always possible.
.refine(
(data) => Boolean(data.ENCRYPTION_KEY) || Boolean(data.ROOT_ENCRYPTION_KEY),
"Either ENCRYPTION_KEY or ROOT_ENCRYPTION_KEY must be defined."
)
.refine(
(data) => Boolean(data.REDIS_URL) || Boolean(data.REDIS_SENTINEL_HOSTS) || Boolean(data.REDIS_CLUSTER_HOSTS),
"Either REDIS_URL, REDIS_SENTINEL_HOSTS or REDIS_CLUSTER_HOSTS must be defined."
@@ -453,7 +450,12 @@ export const getConfig = () => envCfg;
export const getOriginalConfig = () => originalEnvConfig;
// cannot import singleton logger directly as it needs config to load various transport
export const initEnvConfig = async (superAdminDAL?: TSuperAdminDALFactory, logger?: CustomLogger) => {
export const initEnvConfig = async (
hsmService: THsmServiceFactory,
kmsRootConfigDAL: TKmsRootConfigDALFactory,
superAdminDAL?: TSuperAdminDALFactory,
logger?: CustomLogger
) => {
const parsedEnv = envSchema.safeParse(process.env);
if (!parsedEnv.success) {
(logger ?? console).error("Invalid environment variables. Check the error below");
@@ -469,7 +471,7 @@ export const initEnvConfig = async (superAdminDAL?: TSuperAdminDALFactory, logge
}
if (superAdminDAL) {
const fipsEnabled = await crypto.initialize(superAdminDAL);
const fipsEnabled = await crypto.initialize(superAdminDAL, hsmService, kmsRootConfigDAL);
if (fipsEnabled) {
const newEnvCfg = {
@@ -532,6 +534,22 @@ export const getDatabaseCredentials = (logger?: CustomLogger) => {
};
};
export const getHsmConfig = (logger?: CustomLogger) => {
const parsedEnv = envSchema.safeParse(process.env);
if (!parsedEnv.success) {
(logger ?? console).error("Invalid environment variables. Check the error below");
(logger ?? console).error(parsedEnv.error.issues);
process.exit(-1);
}
return {
isHsmConfigured: parsedEnv.data.isHsmConfigured,
HSM_PIN: parsedEnv.data.HSM_PIN,
HSM_SLOT: parsedEnv.data.HSM_SLOT,
HSM_LIB_PATH: parsedEnv.data.HSM_LIB_PATH,
HSM_KEY_LABEL: parsedEnv.data.HSM_KEY_LABEL
};
};
// A list of environment variables that can be overwritten
export const overwriteSchema: {
[key: string]: {

View File

@@ -9,7 +9,11 @@ import nacl from "tweetnacl";
import naclUtils from "tweetnacl-util";
import { SecretEncryptionAlgo, SecretKeyEncoding } from "@app/db/schemas";
import { isHsmActiveAndEnabled } from "@app/ee/services/hsm/hsm-fns";
import { THsmServiceFactory } from "@app/ee/services/hsm/hsm-service";
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
import { TKmsRootConfigDALFactory } from "@app/services/kms/kms-root-config-dal";
import { RootKeyEncryptionStrategy } from "@app/services/kms/kms-types";
import { TSuperAdminDALFactory } from "@app/services/super-admin/super-admin-dal";
import { ADMIN_CONFIG_DB_UUID } from "@app/services/super-admin/super-admin-service";
@@ -106,49 +110,73 @@ const cryptographyFactory = () => {
}
};
const $setFipsModeEnabled = (enabled: boolean, envCfg?: Pick<TEnvConfig, "ENCRYPTION_KEY">) => {
const $setFipsModeEnabled = async (
enabled: boolean,
hsmService: THsmServiceFactory,
kmsRootConfigDAL: TKmsRootConfigDALFactory,
envCfg?: Pick<TEnvConfig, "ENCRYPTION_KEY">
) => {
// If FIPS is enabled, we need to validate that the ENCRYPTION_KEY is in a base64 format, and is a 256-bit key.
if (enabled) {
crypto.setFips(true);
const appCfg = envCfg || getConfig();
if (appCfg.ENCRYPTION_KEY) {
// we need to validate that the ENCRYPTION_KEY is a base64 encoded 256-bit key
const hsmStatus = await isHsmActiveAndEnabled({
hsmService,
kmsRootConfigDAL
});
// note(daniel): for some reason this resolves as true for some hex-encoded strings.
if (!isBase64(appCfg.ENCRYPTION_KEY)) {
// if the encryption strategy is software - user needs to provide an encryption key
// if the encryption strategy is null AND the hsm is not configured - user needs to provide an encryption key
const needsEncryptionKey =
hsmStatus.rootKmsConfigEncryptionStrategy === RootKeyEncryptionStrategy.Software ||
(hsmStatus.rootKmsConfigEncryptionStrategy === null && !hsmStatus.isHsmConfigured);
// only perform encryption key validation if it's actually required.
if (needsEncryptionKey) {
if (appCfg.ENCRYPTION_KEY) {
// we need to validate that the ENCRYPTION_KEY is a base64 encoded 256-bit key
// note(daniel): for some reason this resolves as true for some hex-encoded strings.
if (!isBase64(appCfg.ENCRYPTION_KEY)) {
throw new CryptographyError({
message:
"FIPS mode is enabled, but the ENCRYPTION_KEY environment variable is not a base64 encoded 256-bit key.\nYou can generate a 256-bit key using the following command: `openssl rand -base64 32`"
});
}
if (bytesToBits(Buffer.from(appCfg.ENCRYPTION_KEY, "base64").length) !== 256) {
throw new CryptographyError({
message:
"FIPS mode is enabled, but the ENCRYPTION_KEY environment variable is not a 256-bit key.\nYou can generate a 256-bit key using the following command: `openssl rand -base64 32`"
});
}
} else {
throw new CryptographyError({
message:
"FIPS mode is enabled, but the ENCRYPTION_KEY environment variable is not a base64 encoded 256-bit key.\nYou can generate a 256-bit key using the following command: `openssl rand -base64 32`"
"FIPS mode is enabled, but the ENCRYPTION_KEY environment variable is not set.\nYou can generate a 256-bit key using the following command: `openssl rand -base64 32`"
});
}
if (bytesToBits(Buffer.from(appCfg.ENCRYPTION_KEY, "base64").length) !== 256) {
throw new CryptographyError({
message:
"FIPS mode is enabled, but the ENCRYPTION_KEY environment variable is not a 256-bit key.\nYou can generate a 256-bit key using the following command: `openssl rand -base64 32`"
});
}
} else {
throw new CryptographyError({
message:
"FIPS mode is enabled, but the ENCRYPTION_KEY environment variable is not set.\nYou can generate a 256-bit key using the following command: `openssl rand -base64 32`"
});
}
}
$fipsEnabled = enabled;
$isInitialized = true;
};
const initialize = async (superAdminDAL: TSuperAdminDALFactory, envCfg?: Pick<TEnvConfig, "ENCRYPTION_KEY">) => {
const initialize = async (
superAdminDAL: TSuperAdminDALFactory,
hsmService: THsmServiceFactory,
kmsRootConfigDAL: TKmsRootConfigDALFactory,
envCfg?: Pick<TEnvConfig, "ENCRYPTION_KEY">
) => {
if ($isInitialized) {
return isFipsModeEnabled();
}
if (process.env.FIPS_ENABLED !== "true") {
logger.info("Cryptography module initialized in normal operation mode.");
$setFipsModeEnabled(false, envCfg);
await $setFipsModeEnabled(false, hsmService, kmsRootConfigDAL, envCfg);
return false;
}
@@ -158,11 +186,11 @@ const cryptographyFactory = () => {
if (serverCfg) {
if (serverCfg.fipsEnabled) {
logger.info("[FIPS]: Instance is configured for FIPS mode of operation. Continuing startup with FIPS enabled.");
$setFipsModeEnabled(true, envCfg);
await $setFipsModeEnabled(true, hsmService, kmsRootConfigDAL, envCfg);
return true;
}
logger.info("[FIPS]: Instance age predates FIPS mode inception date. Continuing without FIPS.");
$setFipsModeEnabled(false, envCfg);
await $setFipsModeEnabled(false, hsmService, kmsRootConfigDAL, envCfg);
return false;
}
@@ -171,7 +199,7 @@ const cryptographyFactory = () => {
// TODO(daniel): check if it's an enterprise deployment
// if there is no server cfg, and FIPS_MODE is `true`, its a fresh FIPS deployment. We need to set the fipsEnabled to true.
$setFipsModeEnabled(true, envCfg);
await $setFipsModeEnabled(true, hsmService, kmsRootConfigDAL, envCfg);
return true;
};
@@ -258,6 +286,13 @@ const cryptographyFactory = () => {
const rootEncryptionKey = appCfg.ROOT_ENCRYPTION_KEY;
const encryptionKey = appCfg.ENCRYPTION_KEY;
// Sanity check
if (!rootEncryptionKey && !encryptionKey) {
throw new CryptographyError({
message: "Tried to encrypt with instance root encryption key, but no root encryption key is set."
});
}
if (rootEncryptionKey) {
const { iv, tag, ciphertext } = encrypt({
plaintext: data,
@@ -303,6 +338,14 @@ const cryptographyFactory = () => {
// the or gate is used used in migration
const rootEncryptionKey = appCfg?.ROOT_ENCRYPTION_KEY || process.env.ROOT_ENCRYPTION_KEY;
const encryptionKey = appCfg?.ENCRYPTION_KEY || process.env.ENCRYPTION_KEY;
// Sanity check
if (!rootEncryptionKey && !encryptionKey) {
throw new CryptographyError({
message: "Tried to decrypt with instance root encryption key, but no root encryption key is set."
});
}
if (rootEncryptionKey && keyEncoding === SecretKeyEncoding.BASE64) {
const data = symmetric().decrypt({
key: rootEncryptionKey,

View File

@@ -7,6 +7,7 @@ import https from "https";
import { verifyHostInputValidity } from "@app/ee/services/dynamic-secret/dynamic-secret-fns";
import { splitPemChain } from "@app/services/certificate/certificate-fns";
import { getConfig } from "../config/env";
import { BadRequestError } from "../errors";
import { GatewayProxyProtocol } from "../gateway/types";
import { logger } from "../logger";
@@ -80,6 +81,8 @@ const createGatewayConnection = async (
gateway: { clientCertificate: string; clientPrivateKey: string; serverCertificateChain: string },
protocol: GatewayProxyProtocol
): Promise<net.Socket> => {
const appCfg = getConfig();
const protocolToAlpn = {
[GatewayProxyProtocol.Http]: "infisical-http-proxy",
[GatewayProxyProtocol.Tcp]: "infisical-tcp-proxy",
@@ -94,7 +97,8 @@ const createGatewayConnection = async (
minVersion: "TLSv1.2",
maxVersion: "TLSv1.3",
rejectUnauthorized: true,
ALPNProtocols: [protocolToAlpn[protocol]]
ALPNProtocols: [protocolToAlpn[protocol]],
checkServerIdentity: appCfg.isDevelopmentMode ? () => undefined : tls.checkServerIdentity
};
return new Promise((resolve, reject) => {

View File

@@ -0,0 +1,100 @@
import { requestContext } from "@fastify/request-context";
import opentelemetry from "@opentelemetry/api";
import { getConfig } from "../config/env";
const infisicalMeter = opentelemetry.metrics.getMeter("Infisical");
export enum AuthAttemptAuthMethod {
EMAIL = "email",
SAML = "saml",
OIDC = "oidc",
GOOGLE = "google",
GITHUB = "github",
GITLAB = "gitlab",
TOKEN_AUTH = "token-auth",
UNIVERSAL_AUTH = "universal-auth",
KUBERNETES_AUTH = "kubernetes-auth",
GCP_AUTH = "gcp-auth",
ALICLOUD_AUTH = "alicloud-auth",
AWS_AUTH = "aws-auth",
AZURE_AUTH = "azure-auth",
TLS_CERT_AUTH = "tls-cert-auth",
OCI_AUTH = "oci-auth",
OIDC_AUTH = "oidc-auth",
JWT_AUTH = "jwt-auth",
LDAP_AUTH = "ldap-auth"
}
export enum AuthAttemptAuthResult {
SUCCESS = "success",
FAILURE = "failure"
}
export const authAttemptCounter = infisicalMeter.createCounter("infisical.auth.attempt.count", {
description: "Authentication attempts (both successful and failed)",
unit: "{attempt}"
});
export const secretReadCounter = infisicalMeter.createCounter("infisical.secret.read.count", {
description: "Number of secret read operations",
unit: "{operation}"
});
export const recordSecretReadMetric = (params: { environment: string; secretPath: string; name?: string }) => {
const appCfg = getConfig();
if (appCfg.OTEL_TELEMETRY_COLLECTION_ENABLED) {
const attributes: Record<string, string> = {
"infisical.environment": params.environment,
"infisical.secret.path": params.secretPath,
...(params.name ? { "infisical.secret.name": params.name } : {})
};
const orgId = requestContext.get("orgId");
if (orgId) {
attributes["infisical.organization.id"] = orgId;
}
const orgName = requestContext.get("orgName");
if (orgName) {
attributes["infisical.organization.name"] = orgName;
}
const projectDetails = requestContext.get("projectDetails");
if (projectDetails?.id) {
attributes["infisical.project.id"] = projectDetails.id;
}
if (projectDetails?.name) {
attributes["infisical.project.name"] = projectDetails.name;
}
const userAuthInfo = requestContext.get("userAuthInfo");
if (userAuthInfo?.userId) {
attributes["infisical.user.id"] = userAuthInfo.userId;
}
if (userAuthInfo?.email) {
attributes["infisical.user.email"] = userAuthInfo.email;
}
const identityAuthInfo = requestContext.get("identityAuthInfo");
if (identityAuthInfo?.identityId) {
attributes["infisical.identity.id"] = identityAuthInfo.identityId;
}
if (identityAuthInfo?.identityName) {
attributes["infisical.identity.name"] = identityAuthInfo.identityName;
}
const userAgent = requestContext.get("userAgent");
if (userAgent) {
attributes["user_agent.original"] = userAgent;
}
const ip = requestContext.get("ip");
if (ip) {
attributes["client.address"] = ip;
}
secretReadCounter.add(1, attributes);
}
};

View File

@@ -9,14 +9,16 @@ import { keyValueStoreDALFactory } from "@app/keystore/key-value-store-dal";
import { runMigrations } from "./auto-start-migrations";
import { initAuditLogDbConnection, initDbConnection } from "./db";
import { hsmServiceFactory } from "./ee/services/hsm/hsm-service";
import { keyStoreFactory } from "./keystore/keystore";
import { formatSmtpConfig, getDatabaseCredentials, initEnvConfig } from "./lib/config/env";
import { formatSmtpConfig, getDatabaseCredentials, getHsmConfig, initEnvConfig } from "./lib/config/env";
import { buildRedisFromConfig } from "./lib/config/redis";
import { removeTemporaryBaseDirectory } from "./lib/files";
import { initLogger } from "./lib/logger";
import { queueServiceFactory } from "./queue";
import { main } from "./server/app";
import { bootstrapCheck } from "./server/boot-strap-check";
import { kmsRootConfigDALFactory } from "./services/kms/kms-root-config-dal";
import { smtpServiceFactory } from "./services/smtp/smtp-service";
import { superAdminDALFactory } from "./services/super-admin/super-admin-dal";
@@ -26,6 +28,18 @@ const run = async () => {
const logger = initLogger();
await removeTemporaryBaseDirectory();
const hsmConfig = getHsmConfig(logger);
const hsmModule = initializeHsmModule(hsmConfig);
hsmModule.initialize();
const hsmService = hsmServiceFactory({
hsmModule: hsmModule.getModule(),
envConfig: hsmConfig
});
await hsmService.startService();
const databaseCredentials = getDatabaseCredentials(logger);
const db = initDbConnection({
@@ -35,7 +49,8 @@ const run = async () => {
});
const superAdminDAL = superAdminDALFactory(db);
const envConfig = await initEnvConfig(superAdminDAL, logger);
const kmsRootConfigDAL = kmsRootConfigDALFactory(db);
const envConfig = await initEnvConfig(hsmService, kmsRootConfigDAL, superAdminDAL, logger);
const auditLogDb = envConfig.AUDIT_LOGS_DB_CONNECTION_URI
? initAuditLogDbConnection({
@@ -59,14 +74,12 @@ const run = async () => {
const keyStore = keyStoreFactory(envConfig, keyValueStoreDAL);
const redis = buildRedisFromConfig(envConfig);
const hsmModule = initializeHsmModule(envConfig);
hsmModule.initialize();
const server = await main({
db,
auditLogDb,
superAdminDAL,
hsmModule: hsmModule.getModule(),
kmsRootConfigDAL,
hsmService,
smtp,
logger,
queue,

View File

@@ -77,7 +77,9 @@ export enum QueueName {
DailyReminders = "daily-reminders",
SecretReminderMigration = "secret-reminder-migration",
UserNotification = "user-notification",
HealthAlert = "health-alert"
HealthAlert = "health-alert",
CertificateV3AutoRenewal = "certificate-v3-auto-renewal",
PamAccountRotation = "pam-account-rotation"
}
export enum QueueJobs {
@@ -126,7 +128,9 @@ export enum QueueJobs {
DailyReminders = "daily-reminders",
SecretReminderMigration = "secret-reminder-migration",
UserNotification = "user-notification-job",
HealthAlert = "health-alert"
HealthAlert = "health-alert",
CertificateV3DailyAutoRenewal = "certificate-v3-daily-auto-renewal",
PamAccountRotation = "pam-account-rotation"
}
export type TQueueJobTypes = {
@@ -357,6 +361,14 @@ export type TQueueJobTypes = {
name: QueueJobs.HealthAlert;
payload: undefined;
};
[QueueName.CertificateV3AutoRenewal]: {
name: QueueJobs.CertificateV3DailyAutoRenewal;
payload: undefined;
};
[QueueName.PamAccountRotation]: {
name: QueueJobs.PamAccountRotation;
payload: undefined;
};
};
const SECRET_SCANNING_JOBS = [

View File

@@ -15,12 +15,13 @@ import fastify from "fastify";
import { Cluster, Redis } from "ioredis";
import { Knex } from "knex";
import { HsmModule } from "@app/ee/services/hsm/hsm-types";
import { THsmServiceFactory } from "@app/ee/services/hsm/hsm-service";
import { TKeyStoreFactory } from "@app/keystore/keystore";
import { getConfig, IS_PACKAGED, TEnvConfig } from "@app/lib/config/env";
import { CustomLogger } from "@app/lib/logger/logger";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { TQueueServiceFactory } from "@app/queue";
import { TKmsRootConfigDALFactory } from "@app/services/kms/kms-root-config-dal";
import { TSmtpService } from "@app/services/smtp/smtp-service";
import { TSuperAdminDALFactory } from "@app/services/super-admin/super-admin-dal";
@@ -42,16 +43,16 @@ type TMain = {
logger?: CustomLogger;
queue: TQueueServiceFactory;
keyStore: TKeyStoreFactory;
hsmModule: HsmModule;
redis: Redis | Cluster;
envConfig: TEnvConfig;
superAdminDAL: TSuperAdminDALFactory;
hsmService: THsmServiceFactory;
kmsRootConfigDAL: TKmsRootConfigDALFactory;
};
// Run the server!
export const main = async ({
db,
hsmModule,
auditLogDb,
smtp,
logger,
@@ -59,7 +60,9 @@ export const main = async ({
keyStore,
redis,
envConfig,
superAdminDAL
superAdminDAL,
hsmService,
kmsRootConfigDAL
}: TMain) => {
const appCfg = getConfig();
@@ -138,7 +141,9 @@ export const main = async ({
await server.register(fastifyRequestContext, {
defaultStoreValues: (req) => ({
reqId: req.id,
log: req.log.child({ reqId: req.id })
log: req.log.child({ reqId: req.id }),
ip: req.realIp,
userAgent: req.headers["user-agent"]
})
});
@@ -148,9 +153,10 @@ export const main = async ({
db,
auditLogDb,
keyStore,
hsmModule,
hsmService,
envConfig,
superAdminDAL
superAdminDAL,
kmsRootConfigDAL
});
await server.register(registerServeUI, {

View File

@@ -43,6 +43,6 @@ export const GenericResourceNameSchema = z
export const BaseSecretNameSchema = z.string().trim().min(1);
export const SecretNameSchema = BaseSecretNameSchema.refine(
(el) => !el.includes(":"),
"Secret name cannot contain colon."
).refine((el) => !el.includes("/"), "Secret name cannot contain forward slash.");
(el) => !el.includes(":") && !el.includes("/"),
"Secret name cannot contain colon or forward slash."
);

View File

@@ -1,12 +1,26 @@
import { requestContext } from "@fastify/request-context";
import opentelemetry from "@opentelemetry/api";
import fp from "fastify-plugin";
export const apiMetrics = fp(async (fastify) => {
const apiMeter = opentelemetry.metrics.getMeter("API");
const latencyHistogram = apiMeter.createHistogram("API_latency", {
unit: "ms"
});
const apiMeter = opentelemetry.metrics.getMeter("API");
const latencyHistogram = apiMeter.createHistogram("API_latency", {
unit: "ms"
});
const infisicalMeter = opentelemetry.metrics.getMeter("Infisical");
const requestCounter = infisicalMeter.createCounter("infisical.http.server.request.count", {
description: "Total number of API requests to Infisical (covers both human users and machine identities)",
unit: "{request}"
});
const requestDurationHistogram = infisicalMeter.createHistogram("infisical.http.server.request.duration", {
description: "API request latency",
unit: "s"
});
export const apiMetrics = fp(async (fastify) => {
fastify.addHook("onResponse", async (request, reply) => {
const { method } = request;
const route = request.routerPath;
@@ -17,5 +31,67 @@ export const apiMetrics = fp(async (fastify) => {
method,
statusCode
});
const orgId = requestContext.get("orgId");
const orgName = requestContext.get("orgName");
const userAuthInfo = requestContext.get("userAuthInfo");
const identityAuthInfo = requestContext.get("identityAuthInfo");
const projectDetails = requestContext.get("projectDetails");
const userAgent = requestContext.get("userAgent");
const ip = requestContext.get("ip");
const attributes: Record<string, string | number> = {
"http.request.method": method,
"http.route": route,
"http.response.status_code": statusCode
};
if (orgId) {
attributes["infisical.organization.id"] = orgId;
}
if (orgName) {
attributes["infisical.organization.name"] = orgName;
}
if (userAuthInfo) {
if (userAuthInfo.userId) {
attributes["infisical.user.id"] = userAuthInfo.userId;
}
if (userAuthInfo.email) {
attributes["infisical.user.email"] = userAuthInfo.email;
}
}
if (identityAuthInfo) {
if (identityAuthInfo.identityId) {
attributes["infisical.identity.id"] = identityAuthInfo.identityId;
}
if (identityAuthInfo.identityName) {
attributes["infisical.identity.name"] = identityAuthInfo.identityName;
}
if (identityAuthInfo.authMethod) {
attributes["infisical.auth.method"] = identityAuthInfo.authMethod;
}
}
if (projectDetails) {
if (projectDetails.id) {
attributes["infisical.project.id"] = projectDetails.id;
}
if (projectDetails.name) {
attributes["infisical.project.name"] = projectDetails.name;
}
}
if (userAgent) {
attributes["user_agent.original"] = userAgent;
}
if (ip) {
attributes["client.address"] = ip;
}
requestCounter.add(1, attributes);
requestDurationHistogram.record(reply.elapsedTime / 1000, attributes);
});
});

Some files were not shown because too many files have changed in this diff Show More