Merge branch 'main' into misc/add-checks-for-helm-verification

This commit is contained in:
Maidul Islam
2025-06-12 22:22:17 -04:00
committed by GitHub
525 changed files with 25316 additions and 2399 deletions

View File

@@ -0,0 +1,53 @@
name: Detect Non-RE2 Regex
on:
pull_request:
types: [opened, synchronize]
jobs:
check-non-re2-regex:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Get diff of backend/*
run: |
git diff --unified=0 "origin/${{ github.base_ref }}"...HEAD -- backend/ > diff.txt
- name: Scan backend diff for non-RE2 regex
run: |
# Extract only added lines (excluding file headers)
grep '^+' diff.txt | grep -v '^+++' | sed 's/^\+//' > added_lines.txt
if [ ! -s added_lines.txt ]; then
echo "✅ No added lines in backend/ to check for regex usage."
exit 0
fi
regex_usage_pattern='(^|[^A-Za-z0-9_"'"'"'`\.\/\\])(\/(?:\\.|[^\/\n\\])+\/[gimsuyv]*(?=\s*[\.\(;,)\]}:]|$)|new RegExp\()'
# Find all added lines that contain regex patterns
if grep -E "$regex_usage_pattern" added_lines.txt > potential_violations.txt 2>/dev/null; then
# Filter out lines that contain 'new RE2' (allowing for whitespace variations)
if grep -v -E 'new\s+RE2\s*\(' potential_violations.txt > actual_violations.txt 2>/dev/null && [ -s actual_violations.txt ]; then
echo "🚨 ERROR: Found forbidden regex pattern in added/modified backend code."
echo ""
echo "The following lines use raw regex literals (/.../) or new RegExp(...):"
echo "Please replace with 'new RE2(...)' for RE2 compatibility."
echo ""
echo "Offending lines:"
cat actual_violations.txt
exit 1
else
echo "✅ All identified regex usages are correctly using 'new RE2(...)'."
fi
else
echo "✅ No regex patterns found in added/modified backend lines."
fi
- name: Cleanup temporary files
if: always()
run: |
rm -f diff.txt added_lines.txt potential_violations.txt actual_violations.txt

View File

@@ -44,3 +44,4 @@ cli/detect/config/gitleaks.toml:gcp-api-key:582
.github/workflows/run-helm-chart-tests-infisical-standalone-postgres.yml:generic-api-key:50
.github/workflows/helm-release-infisical-core.yml:generic-api-key:48
.github/workflows/helm-release-infisical-core.yml:generic-api-key:47
backend/src/services/smtp/smtp-service.ts:generic-api-key:79

View File

@@ -84,6 +84,11 @@ const getZodDefaultValue = (type: unknown, value: string | number | boolean | Ob
}
};
const bigIntegerColumns: Record<string, string[]> = {
"folder_commits": ["commitId"]
};
const main = async () => {
const tables = (
await db("information_schema.tables")
@@ -108,6 +113,9 @@ const main = async () => {
const columnName = columnNames[colNum];
const colInfo = columns[columnName];
let ztype = getZodPrimitiveType(colInfo.type);
if (bigIntegerColumns[tableName]?.includes(columnName)) {
ztype = "z.coerce.bigint()";
}
if (["zodBuffer"].includes(ztype)) {
zodImportSet.add(ztype);
}

View File

@@ -26,6 +26,7 @@ import { TLdapConfigServiceFactory } from "@app/ee/services/ldap-config/ldap-con
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
import { TOidcConfigServiceFactory } from "@app/ee/services/oidc/oidc-config-service";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
import { TPitServiceFactory } from "@app/ee/services/pit/pit-service";
import { TProjectTemplateServiceFactory } from "@app/ee/services/project-template/project-template-service";
import { TProjectUserAdditionalPrivilegeServiceFactory } from "@app/ee/services/project-user-additional-privilege/project-user-additional-privilege-service";
import { TRateLimitServiceFactory } from "@app/ee/services/rate-limit/rate-limit-service";
@@ -59,10 +60,12 @@ import { TCertificateTemplateServiceFactory } from "@app/services/certificate-te
import { TCmekServiceFactory } from "@app/services/cmek/cmek-service";
import { TExternalGroupOrgRoleMappingServiceFactory } from "@app/services/external-group-org-role-mapping/external-group-org-role-mapping-service";
import { TExternalMigrationServiceFactory } from "@app/services/external-migration/external-migration-service";
import { TFolderCommitServiceFactory } from "@app/services/folder-commit/folder-commit-service";
import { TGroupProjectServiceFactory } from "@app/services/group-project/group-project-service";
import { THsmServiceFactory } from "@app/services/hsm/hsm-service";
import { TIdentityServiceFactory } from "@app/services/identity/identity-service";
import { TIdentityAccessTokenServiceFactory } from "@app/services/identity-access-token/identity-access-token-service";
import { TIdentityAliCloudAuthServiceFactory } from "@app/services/identity-alicloud-auth/identity-alicloud-auth-service";
import { TIdentityAwsAuthServiceFactory } from "@app/services/identity-aws-auth/identity-aws-auth-service";
import { TIdentityAzureAuthServiceFactory } from "@app/services/identity-azure-auth/identity-azure-auth-service";
import { TIdentityGcpAuthServiceFactory } from "@app/services/identity-gcp-auth/identity-gcp-auth-service";
@@ -119,6 +122,10 @@ declare module "@fastify/request-context" {
oidc?: {
claims: Record<string, string>;
};
kubernetes?: {
namespace: string;
name: string;
};
};
identityPermissionMetadata?: Record<string, unknown>; // filled by permission service
assumedPrivilegeDetails?: { requesterId: string; actorId: string; actorType: ActorType; projectId: string };
@@ -212,6 +219,7 @@ declare module "fastify" {
identityUa: TIdentityUaServiceFactory;
identityKubernetesAuth: TIdentityKubernetesAuthServiceFactory;
identityGcpAuth: TIdentityGcpAuthServiceFactory;
identityAliCloudAuth: TIdentityAliCloudAuthServiceFactory;
identityAwsAuth: TIdentityAwsAuthServiceFactory;
identityAzureAuth: TIdentityAzureAuthServiceFactory;
identityOciAuth: TIdentityOciAuthServiceFactory;
@@ -272,6 +280,8 @@ declare module "fastify" {
microsoftTeams: TMicrosoftTeamsServiceFactory;
assumePrivileges: TAssumePrivilegeServiceFactory;
githubOrgSync: TGithubOrgSyncServiceFactory;
folderCommit: TFolderCommitServiceFactory;
pit: TPitServiceFactory;
secretScanningV2: TSecretScanningV2ServiceFactory;
internalCertificateAuthority: TInternalCertificateAuthorityServiceFactory;
pkiTemplate: TPkiTemplatesServiceFactory;

View File

@@ -80,6 +80,24 @@ import {
TExternalKms,
TExternalKmsInsert,
TExternalKmsUpdate,
TFolderCheckpointResources,
TFolderCheckpointResourcesInsert,
TFolderCheckpointResourcesUpdate,
TFolderCheckpoints,
TFolderCheckpointsInsert,
TFolderCheckpointsUpdate,
TFolderCommitChanges,
TFolderCommitChangesInsert,
TFolderCommitChangesUpdate,
TFolderCommits,
TFolderCommitsInsert,
TFolderCommitsUpdate,
TFolderTreeCheckpointResources,
TFolderTreeCheckpointResourcesInsert,
TFolderTreeCheckpointResourcesUpdate,
TFolderTreeCheckpoints,
TFolderTreeCheckpointsInsert,
TFolderTreeCheckpointsUpdate,
TGateways,
TGatewaysInsert,
TGatewaysUpdate,
@@ -107,6 +125,9 @@ import {
TIdentityAccessTokens,
TIdentityAccessTokensInsert,
TIdentityAccessTokensUpdate,
TIdentityAlicloudAuths,
TIdentityAlicloudAuthsInsert,
TIdentityAlicloudAuthsUpdate,
TIdentityAwsAuths,
TIdentityAwsAuthsInsert,
TIdentityAwsAuthsUpdate,
@@ -768,6 +789,11 @@ declare module "knex/types/tables" {
TIdentityGcpAuthsInsert,
TIdentityGcpAuthsUpdate
>;
[TableName.IdentityAliCloudAuth]: KnexOriginal.CompositeTableType<
TIdentityAlicloudAuths,
TIdentityAlicloudAuthsInsert,
TIdentityAlicloudAuthsUpdate
>;
[TableName.IdentityAwsAuth]: KnexOriginal.CompositeTableType<
TIdentityAwsAuths,
TIdentityAwsAuthsInsert,
@@ -1122,6 +1148,36 @@ declare module "knex/types/tables" {
TGithubOrgSyncConfigsInsert,
TGithubOrgSyncConfigsUpdate
>;
[TableName.FolderCommit]: KnexOriginal.CompositeTableType<
TFolderCommits,
TFolderCommitsInsert,
TFolderCommitsUpdate
>;
[TableName.FolderCommitChanges]: KnexOriginal.CompositeTableType<
TFolderCommitChanges,
TFolderCommitChangesInsert,
TFolderCommitChangesUpdate
>;
[TableName.FolderCheckpoint]: KnexOriginal.CompositeTableType<
TFolderCheckpoints,
TFolderCheckpointsInsert,
TFolderCheckpointsUpdate
>;
[TableName.FolderCheckpointResources]: KnexOriginal.CompositeTableType<
TFolderCheckpointResources,
TFolderCheckpointResourcesInsert,
TFolderCheckpointResourcesUpdate
>;
[TableName.FolderTreeCheckpoint]: KnexOriginal.CompositeTableType<
TFolderTreeCheckpoints,
TFolderTreeCheckpointsInsert,
TFolderTreeCheckpointsUpdate
>;
[TableName.FolderTreeCheckpointResources]: KnexOriginal.CompositeTableType<
TFolderTreeCheckpointResources,
TFolderTreeCheckpointResourcesInsert,
TFolderTreeCheckpointResourcesUpdate
>;
[TableName.SecretScanningDataSource]: KnexOriginal.CompositeTableType<
TSecretScanningDataSources,
TSecretScanningDataSourcesInsert,

View File

@@ -0,0 +1,166 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
const hasFolderCommitTable = await knex.schema.hasTable(TableName.FolderCommit);
if (!hasFolderCommitTable) {
await knex.schema.createTable(TableName.FolderCommit, (t) => {
t.uuid("id").primary().defaultTo(knex.fn.uuid());
t.bigIncrements("commitId");
t.jsonb("actorMetadata").notNullable();
t.string("actorType").notNullable();
t.string("message");
t.uuid("folderId").notNullable();
t.uuid("envId").notNullable();
t.foreign("envId").references("id").inTable(TableName.Environment).onDelete("CASCADE");
t.timestamps(true, true, true);
t.index("folderId");
t.index("envId");
});
}
const hasFolderCommitChangesTable = await knex.schema.hasTable(TableName.FolderCommitChanges);
if (!hasFolderCommitChangesTable) {
await knex.schema.createTable(TableName.FolderCommitChanges, (t) => {
t.uuid("id").primary().defaultTo(knex.fn.uuid());
t.uuid("folderCommitId").notNullable();
t.foreign("folderCommitId").references("id").inTable(TableName.FolderCommit).onDelete("CASCADE");
t.string("changeType").notNullable();
t.boolean("isUpdate").notNullable().defaultTo(false);
t.uuid("secretVersionId");
t.foreign("secretVersionId").references("id").inTable(TableName.SecretVersionV2).onDelete("CASCADE");
t.uuid("folderVersionId");
t.foreign("folderVersionId").references("id").inTable(TableName.SecretFolderVersion).onDelete("CASCADE");
t.timestamps(true, true, true);
t.index("folderCommitId");
t.index("secretVersionId");
t.index("folderVersionId");
});
}
const hasFolderCheckpointTable = await knex.schema.hasTable(TableName.FolderCheckpoint);
if (!hasFolderCheckpointTable) {
await knex.schema.createTable(TableName.FolderCheckpoint, (t) => {
t.uuid("id").primary().defaultTo(knex.fn.uuid());
t.uuid("folderCommitId").notNullable();
t.foreign("folderCommitId").references("id").inTable(TableName.FolderCommit).onDelete("CASCADE");
t.timestamps(true, true, true);
t.index("folderCommitId");
});
}
const hasFolderCheckpointResourcesTable = await knex.schema.hasTable(TableName.FolderCheckpointResources);
if (!hasFolderCheckpointResourcesTable) {
await knex.schema.createTable(TableName.FolderCheckpointResources, (t) => {
t.uuid("id").primary().defaultTo(knex.fn.uuid());
t.uuid("folderCheckpointId").notNullable();
t.foreign("folderCheckpointId").references("id").inTable(TableName.FolderCheckpoint).onDelete("CASCADE");
t.uuid("secretVersionId");
t.foreign("secretVersionId").references("id").inTable(TableName.SecretVersionV2).onDelete("CASCADE");
t.uuid("folderVersionId");
t.foreign("folderVersionId").references("id").inTable(TableName.SecretFolderVersion).onDelete("CASCADE");
t.timestamps(true, true, true);
t.index("folderCheckpointId");
t.index("secretVersionId");
t.index("folderVersionId");
});
}
const hasFolderTreeCheckpointTable = await knex.schema.hasTable(TableName.FolderTreeCheckpoint);
if (!hasFolderTreeCheckpointTable) {
await knex.schema.createTable(TableName.FolderTreeCheckpoint, (t) => {
t.uuid("id").primary().defaultTo(knex.fn.uuid());
t.uuid("folderCommitId").notNullable();
t.foreign("folderCommitId").references("id").inTable(TableName.FolderCommit).onDelete("CASCADE");
t.timestamps(true, true, true);
t.index("folderCommitId");
});
}
const hasFolderTreeCheckpointResourcesTable = await knex.schema.hasTable(TableName.FolderTreeCheckpointResources);
if (!hasFolderTreeCheckpointResourcesTable) {
await knex.schema.createTable(TableName.FolderTreeCheckpointResources, (t) => {
t.uuid("id").primary().defaultTo(knex.fn.uuid());
t.uuid("folderTreeCheckpointId").notNullable();
t.foreign("folderTreeCheckpointId").references("id").inTable(TableName.FolderTreeCheckpoint).onDelete("CASCADE");
t.uuid("folderId").notNullable();
t.uuid("folderCommitId").notNullable();
t.foreign("folderCommitId").references("id").inTable(TableName.FolderCommit).onDelete("CASCADE");
t.timestamps(true, true, true);
t.index("folderTreeCheckpointId");
t.index("folderId");
t.index("folderCommitId");
});
}
if (!hasFolderCommitTable) {
await createOnUpdateTrigger(knex, TableName.FolderCommit);
}
if (!hasFolderCommitChangesTable) {
await createOnUpdateTrigger(knex, TableName.FolderCommitChanges);
}
if (!hasFolderCheckpointTable) {
await createOnUpdateTrigger(knex, TableName.FolderCheckpoint);
}
if (!hasFolderCheckpointResourcesTable) {
await createOnUpdateTrigger(knex, TableName.FolderCheckpointResources);
}
if (!hasFolderTreeCheckpointTable) {
await createOnUpdateTrigger(knex, TableName.FolderTreeCheckpoint);
}
if (!hasFolderTreeCheckpointResourcesTable) {
await createOnUpdateTrigger(knex, TableName.FolderTreeCheckpointResources);
}
}
export async function down(knex: Knex): Promise<void> {
const hasFolderCheckpointResourcesTable = await knex.schema.hasTable(TableName.FolderCheckpointResources);
const hasFolderTreeCheckpointResourcesTable = await knex.schema.hasTable(TableName.FolderTreeCheckpointResources);
const hasFolderCommitTable = await knex.schema.hasTable(TableName.FolderCommit);
const hasFolderCommitChangesTable = await knex.schema.hasTable(TableName.FolderCommitChanges);
const hasFolderTreeCheckpointTable = await knex.schema.hasTable(TableName.FolderTreeCheckpoint);
const hasFolderCheckpointTable = await knex.schema.hasTable(TableName.FolderCheckpoint);
if (hasFolderTreeCheckpointResourcesTable) {
await dropOnUpdateTrigger(knex, TableName.FolderTreeCheckpointResources);
await knex.schema.dropTableIfExists(TableName.FolderTreeCheckpointResources);
}
if (hasFolderCheckpointResourcesTable) {
await dropOnUpdateTrigger(knex, TableName.FolderCheckpointResources);
await knex.schema.dropTableIfExists(TableName.FolderCheckpointResources);
}
if (hasFolderTreeCheckpointTable) {
await dropOnUpdateTrigger(knex, TableName.FolderTreeCheckpoint);
await knex.schema.dropTableIfExists(TableName.FolderTreeCheckpoint);
}
if (hasFolderCheckpointTable) {
await dropOnUpdateTrigger(knex, TableName.FolderCheckpoint);
await knex.schema.dropTableIfExists(TableName.FolderCheckpoint);
}
if (hasFolderCommitChangesTable) {
await dropOnUpdateTrigger(knex, TableName.FolderCommitChanges);
await knex.schema.dropTableIfExists(TableName.FolderCommitChanges);
}
if (hasFolderCommitTable) {
await dropOnUpdateTrigger(knex, TableName.FolderCommit);
await knex.schema.dropTableIfExists(TableName.FolderCommit);
}
}

View File

@@ -0,0 +1,19 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasColumn(TableName.SecretFolderVersion, "description"))) {
await knex.schema.alterTable(TableName.SecretFolderVersion, (t) => {
t.string("description").nullable();
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.SecretFolderVersion, "description")) {
await knex.schema.alterTable(TableName.SecretFolderVersion, (t) => {
t.dropColumn("description");
});
}
}

View File

@@ -0,0 +1,139 @@
/* eslint-disable no-await-in-loop */
import { Knex } from "knex";
import { chunkArray } from "@app/lib/fn";
import { selectAllTableCols } from "@app/lib/knex";
import { logger } from "@app/lib/logger";
import { SecretType, TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
logger.info("Starting secret version fix migration");
// Get all shared secret IDs first to optimize versions query
const secretIds = await knex(TableName.SecretV2)
.where("type", SecretType.Shared)
.select("id")
.then((rows) => rows.map((row) => row.id));
logger.info(`Found ${secretIds.length} shared secrets to process`);
if (secretIds.length === 0) {
logger.info("No shared secrets found");
return;
}
const secretIdChunks = chunkArray(secretIds, 5000);
for (let chunkIndex = 0; chunkIndex < secretIdChunks.length; chunkIndex += 1) {
const currentSecretIds = secretIdChunks[chunkIndex];
logger.info(`Processing chunk ${chunkIndex + 1} of ${secretIdChunks.length}`);
// Get secrets and versions for current chunk
const [sharedSecrets, allVersions] = await Promise.all([
knex(TableName.SecretV2).whereIn("id", currentSecretIds).select(selectAllTableCols(TableName.SecretV2)),
knex(TableName.SecretVersionV2).whereIn("secretId", currentSecretIds).select("secretId", "version")
]);
const versionsBySecretId = new Map<string, number[]>();
allVersions.forEach((v) => {
const versions = versionsBySecretId.get(v.secretId);
if (versions) {
versions.push(v.version);
} else {
versionsBySecretId.set(v.secretId, [v.version]);
}
});
const versionsToAdd = [];
const secretsToUpdate = [];
// Process each shared secret
for (const secret of sharedSecrets) {
const existingVersions = versionsBySecretId.get(secret.id) || [];
if (existingVersions.length === 0) {
// No versions exist - add current version
versionsToAdd.push({
secretId: secret.id,
version: secret.version,
key: secret.key,
encryptedValue: secret.encryptedValue,
encryptedComment: secret.encryptedComment,
reminderNote: secret.reminderNote,
reminderRepeatDays: secret.reminderRepeatDays,
skipMultilineEncoding: secret.skipMultilineEncoding,
metadata: secret.metadata,
folderId: secret.folderId,
actorType: "platform"
});
} else {
const latestVersion = Math.max(...existingVersions);
if (latestVersion !== secret.version) {
// Latest version doesn't match - create new version and update secret
const nextVersion = latestVersion + 1;
versionsToAdd.push({
secretId: secret.id,
version: nextVersion,
key: secret.key,
encryptedValue: secret.encryptedValue,
encryptedComment: secret.encryptedComment,
reminderNote: secret.reminderNote,
reminderRepeatDays: secret.reminderRepeatDays,
skipMultilineEncoding: secret.skipMultilineEncoding,
metadata: secret.metadata,
folderId: secret.folderId,
actorType: "platform"
});
secretsToUpdate.push({
id: secret.id,
newVersion: nextVersion
});
}
}
}
logger.info(
`Chunk ${chunkIndex + 1}: Adding ${versionsToAdd.length} versions, updating ${secretsToUpdate.length} secrets`
);
// Batch insert new versions
if (versionsToAdd.length > 0) {
const insertBatches = chunkArray(versionsToAdd, 9000);
for (let i = 0; i < insertBatches.length; i += 1) {
await knex.batchInsert(TableName.SecretVersionV2, insertBatches[i]);
}
}
if (secretsToUpdate.length > 0) {
const updateBatches = chunkArray(secretsToUpdate, 1000);
for (const updateBatch of updateBatches) {
const ids = updateBatch.map((u) => u.id);
const versionCases = updateBatch.map((u) => `WHEN '${u.id}' THEN ${u.newVersion}`).join(" ");
await knex.raw(
`
UPDATE ${TableName.SecretV2}
SET version = CASE id ${versionCases} END,
"updatedAt" = NOW()
WHERE id IN (${ids.map(() => "?").join(",")})
`,
ids
);
}
}
}
logger.info("Secret version fix migration completed");
}
export async function down(): Promise<void> {
logger.info("Rollback not implemented for secret version fix migration");
// Note: Rolling back this migration would be complex and potentially destructive
// as it would require tracking which version entries were added
}

View File

@@ -0,0 +1,345 @@
import { Knex } from "knex";
import { chunkArray } from "@app/lib/fn";
import { selectAllTableCols } from "@app/lib/knex";
import { logger } from "@app/lib/logger";
import { ActorType } from "@app/services/auth/auth-type";
import { ChangeType } from "@app/services/folder-commit/folder-commit-service";
import {
ProjectType,
SecretType,
TableName,
TFolderCheckpoints,
TFolderCommits,
TFolderTreeCheckpoints,
TSecretFolders
} from "../schemas";
const sortFoldersByHierarchy = (folders: TSecretFolders[]) => {
// Create a map for quick lookup of children by parent ID
const childrenMap = new Map<string, TSecretFolders[]>();
// Set of all folder IDs
const allFolderIds = new Set<string>();
// Build the set of all folder IDs
folders.forEach((folder) => {
if (folder.id) {
allFolderIds.add(folder.id);
}
});
// Group folders by their parentId
folders.forEach((folder) => {
if (folder.parentId) {
const children = childrenMap.get(folder.parentId) || [];
children.push(folder);
childrenMap.set(folder.parentId, children);
}
});
// Find root folders - those with no parentId or with a parentId that doesn't exist
const rootFolders = folders.filter((folder) => !folder.parentId || !allFolderIds.has(folder.parentId));
// Process each level of the hierarchy
const result = [];
let currentLevel = rootFolders;
while (currentLevel.length > 0) {
result.push(...currentLevel);
const nextLevel = [];
for (const folder of currentLevel) {
if (folder.id) {
const children = childrenMap.get(folder.id) || [];
nextLevel.push(...children);
}
}
currentLevel = nextLevel;
}
return result.reverse();
};
const getSecretsByFolderIds = async (knex: Knex, folderIds: string[]): Promise<Record<string, string[]>> => {
const secrets = await knex(TableName.SecretV2)
.whereIn(`${TableName.SecretV2}.folderId`, folderIds)
.where(`${TableName.SecretV2}.type`, SecretType.Shared)
.join<TableName.SecretVersionV2>(TableName.SecretVersionV2, (queryBuilder) => {
void queryBuilder
.on(`${TableName.SecretVersionV2}.secretId`, `${TableName.SecretV2}.id`)
.andOn(`${TableName.SecretVersionV2}.version`, `${TableName.SecretV2}.version`);
})
.select(selectAllTableCols(TableName.SecretV2))
.select(knex.ref("id").withSchema(TableName.SecretVersionV2).as("secretVersionId"));
const secretsMap: Record<string, string[]> = {};
secrets.forEach((secret) => {
if (!secretsMap[secret.folderId]) {
secretsMap[secret.folderId] = [];
}
secretsMap[secret.folderId].push(secret.secretVersionId);
});
return secretsMap;
};
const getFoldersByParentIds = async (knex: Knex, parentIds: string[]): Promise<Record<string, string[]>> => {
const folders = await knex(TableName.SecretFolder)
.whereIn(`${TableName.SecretFolder}.parentId`, parentIds)
.where(`${TableName.SecretFolder}.isReserved`, false)
.join<TableName.SecretFolderVersion>(TableName.SecretFolderVersion, (queryBuilder) => {
void queryBuilder
.on(`${TableName.SecretFolderVersion}.folderId`, `${TableName.SecretFolder}.id`)
.andOn(`${TableName.SecretFolderVersion}.version`, `${TableName.SecretFolder}.version`);
})
.select(selectAllTableCols(TableName.SecretFolder))
.select(knex.ref("id").withSchema(TableName.SecretFolderVersion).as("folderVersionId"));
const foldersMap: Record<string, string[]> = {};
folders.forEach((folder) => {
if (!folder.parentId) {
return;
}
if (!foldersMap[folder.parentId]) {
foldersMap[folder.parentId] = [];
}
foldersMap[folder.parentId].push(folder.folderVersionId);
});
return foldersMap;
};
export async function up(knex: Knex): Promise<void> {
logger.info("Initializing folder commits");
const hasFolderCommitTable = await knex.schema.hasTable(TableName.FolderCommit);
if (hasFolderCommitTable) {
// Get Projects to Initialize
const projects = await knex(TableName.Project)
.where(`${TableName.Project}.version`, 3)
.where(`${TableName.Project}.type`, ProjectType.SecretManager)
.select(selectAllTableCols(TableName.Project));
logger.info(`Found ${projects.length} projects to initialize`);
// Process Projects in batches of 100
const batches = chunkArray(projects, 100);
let i = 0;
for (const batch of batches) {
i += 1;
logger.info(`Processing project batch ${i} of ${batches.length}`);
let foldersCommitsList = [];
const rootFoldersMap: Record<string, string> = {};
const envRootFoldersMap: Record<string, string> = {};
// Get All Folders for the Project
// eslint-disable-next-line no-await-in-loop
const folders = await knex(TableName.SecretFolder)
.join(TableName.Environment, `${TableName.SecretFolder}.envId`, `${TableName.Environment}.id`)
.whereIn(
`${TableName.Environment}.projectId`,
batch.map((project) => project.id)
)
.where(`${TableName.SecretFolder}.isReserved`, false)
.select(selectAllTableCols(TableName.SecretFolder));
logger.info(`Found ${folders.length} folders to initialize in project batch ${i} of ${batches.length}`);
// Sort Folders by Hierarchy (parents before nested folders)
const sortedFolders = sortFoldersByHierarchy(folders);
// eslint-disable-next-line no-await-in-loop
const folderSecretsMap = await getSecretsByFolderIds(
knex,
sortedFolders.map((folder) => folder.id)
);
// eslint-disable-next-line no-await-in-loop
const folderFoldersMap = await getFoldersByParentIds(
knex,
sortedFolders.map((folder) => folder.id)
);
// Get folder commit changes
for (const folder of sortedFolders) {
const subFolderVersionIds = folderFoldersMap[folder.id];
const secretVersionIds = folderSecretsMap[folder.id];
const changes = [];
if (subFolderVersionIds) {
changes.push(
...subFolderVersionIds.map((folderVersionId) => ({
folderId: folder.id,
changeType: ChangeType.ADD,
secretVersionId: undefined,
folderVersionId,
isUpdate: false
}))
);
}
if (secretVersionIds) {
changes.push(
...secretVersionIds.map((secretVersionId) => ({
folderId: folder.id,
changeType: ChangeType.ADD,
secretVersionId,
folderVersionId: undefined,
isUpdate: false
}))
);
}
if (changes.length > 0) {
const folderCommit = {
commit: {
actorMetadata: {},
actorType: ActorType.PLATFORM,
message: "Initialized folder",
folderId: folder.id,
envId: folder.envId
},
changes
};
foldersCommitsList.push(folderCommit);
if (!folder.parentId) {
rootFoldersMap[folder.id] = folder.envId;
envRootFoldersMap[folder.envId] = folder.id;
}
}
}
logger.info(`Retrieved folder changes for project batch ${i} of ${batches.length}`);
const filteredBrokenProjectFolders: string[] = [];
foldersCommitsList = foldersCommitsList.filter((folderCommit) => {
if (!envRootFoldersMap[folderCommit.commit.envId]) {
filteredBrokenProjectFolders.push(folderCommit.commit.folderId);
return false;
}
return true;
});
logger.info(
`Filtered ${filteredBrokenProjectFolders.length} broken project folders: ${JSON.stringify(filteredBrokenProjectFolders)}`
);
// Insert New Commits in batches of 9000
const newCommits = foldersCommitsList.map((folderCommit) => folderCommit.commit);
const commitBatches = chunkArray(newCommits, 9000);
let j = 0;
for (const commitBatch of commitBatches) {
j += 1;
logger.info(`Inserting folder commits - batch ${j} of ${commitBatches.length}`);
// Create folder commit
// eslint-disable-next-line no-await-in-loop
const newCommitsInserted = (await knex
.batchInsert(TableName.FolderCommit, commitBatch)
.returning("*")) as TFolderCommits[];
logger.info(`Finished inserting folder commits - batch ${j} of ${commitBatches.length}`);
const newCommitsMap: Record<string, string> = {};
const newCommitsMapInverted: Record<string, string> = {};
const newCheckpointsMap: Record<string, string> = {};
newCommitsInserted.forEach((commit) => {
newCommitsMap[commit.folderId] = commit.id;
newCommitsMapInverted[commit.id] = commit.folderId;
});
// Create folder checkpoints
// eslint-disable-next-line no-await-in-loop
const newCheckpoints = (await knex
.batchInsert(
TableName.FolderCheckpoint,
Object.values(newCommitsMap).map((commitId) => ({
folderCommitId: commitId
}))
)
.returning("*")) as TFolderCheckpoints[];
logger.info(`Finished inserting folder checkpoints - batch ${j} of ${commitBatches.length}`);
newCheckpoints.forEach((checkpoint) => {
newCheckpointsMap[newCommitsMapInverted[checkpoint.folderCommitId]] = checkpoint.id;
});
// Create folder commit changes
// eslint-disable-next-line no-await-in-loop
await knex.batchInsert(
TableName.FolderCommitChanges,
foldersCommitsList
.map((folderCommit) => folderCommit.changes)
.flat()
.map((change) => ({
folderCommitId: newCommitsMap[change.folderId],
changeType: change.changeType,
secretVersionId: change.secretVersionId,
folderVersionId: change.folderVersionId,
isUpdate: false
}))
);
logger.info(`Finished inserting folder commit changes - batch ${j} of ${commitBatches.length}`);
// Create folder checkpoint resources
// eslint-disable-next-line no-await-in-loop
await knex.batchInsert(
TableName.FolderCheckpointResources,
foldersCommitsList
.map((folderCommit) => folderCommit.changes)
.flat()
.map((change) => ({
folderCheckpointId: newCheckpointsMap[change.folderId],
folderVersionId: change.folderVersionId,
secretVersionId: change.secretVersionId
}))
);
logger.info(`Finished inserting folder checkpoint resources - batch ${j} of ${commitBatches.length}`);
// Create Folder Tree Checkpoint
// eslint-disable-next-line no-await-in-loop
const newTreeCheckpoints = (await knex
.batchInsert(
TableName.FolderTreeCheckpoint,
Object.keys(rootFoldersMap).map((folderId) => ({
folderCommitId: newCommitsMap[folderId]
}))
)
.returning("*")) as TFolderTreeCheckpoints[];
logger.info(`Finished inserting folder tree checkpoints - batch ${j} of ${commitBatches.length}`);
const newTreeCheckpointsMap: Record<string, string> = {};
newTreeCheckpoints.forEach((checkpoint) => {
newTreeCheckpointsMap[rootFoldersMap[newCommitsMapInverted[checkpoint.folderCommitId]]] = checkpoint.id;
});
// Create Folder Tree Checkpoint Resources
// eslint-disable-next-line no-await-in-loop
await knex
.batchInsert(
TableName.FolderTreeCheckpointResources,
newCommitsInserted.map((folderCommit) => ({
folderTreeCheckpointId: newTreeCheckpointsMap[folderCommit.envId],
folderId: folderCommit.folderId,
folderCommitId: folderCommit.id
}))
)
.returning("*");
logger.info(`Finished inserting folder tree checkpoint resources - batch ${j} of ${commitBatches.length}`);
}
}
}
logger.info("Folder commits initialized");
}
export async function down(knex: Knex): Promise<void> {
const hasFolderCommitTable = await knex.schema.hasTable(TableName.FolderCommit);
if (hasFolderCommitTable) {
// delete all existing entries
await knex(TableName.FolderCommit).del();
}
}

View File

@@ -0,0 +1,23 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasTokenReviewModeColumn = await knex.schema.hasColumn(TableName.IdentityKubernetesAuth, "tokenReviewMode");
if (!hasTokenReviewModeColumn) {
await knex.schema.alterTable(TableName.IdentityKubernetesAuth, (table) => {
table.string("tokenReviewMode").notNullable().defaultTo("api");
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasTokenReviewModeColumn = await knex.schema.hasColumn(TableName.IdentityKubernetesAuth, "tokenReviewMode");
if (hasTokenReviewModeColumn) {
await knex.schema.alterTable(TableName.IdentityKubernetesAuth, (table) => {
table.dropColumn("tokenReviewMode");
});
}
}

View File

@@ -0,0 +1,21 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasShowSnapshotsLegacyColumn = await knex.schema.hasColumn(TableName.Project, "showSnapshotsLegacy");
if (!hasShowSnapshotsLegacyColumn) {
await knex.schema.table(TableName.Project, (table) => {
table.boolean("showSnapshotsLegacy").notNullable().defaultTo(false);
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasShowSnapshotsLegacyColumn = await knex.schema.hasColumn(TableName.Project, "showSnapshotsLegacy");
if (hasShowSnapshotsLegacyColumn) {
await knex.schema.table(TableName.Project, (table) => {
table.dropColumn("showSnapshotsLegacy");
});
}
}

View File

@@ -0,0 +1,21 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasConfigColumn = await knex.schema.hasColumn(TableName.DynamicSecretLease, "config");
if (!hasConfigColumn) {
await knex.schema.alterTable(TableName.DynamicSecretLease, (table) => {
table.jsonb("config");
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasConfigColumn = await knex.schema.hasColumn(TableName.DynamicSecretLease, "config");
if (hasConfigColumn) {
await knex.schema.alterTable(TableName.DynamicSecretLease, (table) => {
table.dropColumn("config");
});
}
}

View File

@@ -0,0 +1,45 @@
import { Knex } from "knex";
import { selectAllTableCols } from "@app/lib/knex";
import { TableName } from "../schemas";
const BATCH_SIZE = 1000;
export async function up(knex: Knex): Promise<void> {
const hasKubernetesHostColumn = await knex.schema.hasColumn(TableName.IdentityKubernetesAuth, "kubernetesHost");
if (hasKubernetesHostColumn) {
await knex.schema.alterTable(TableName.IdentityKubernetesAuth, (table) => {
table.string("kubernetesHost").nullable().alter();
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasKubernetesHostColumn = await knex.schema.hasColumn(TableName.IdentityKubernetesAuth, "kubernetesHost");
// find all rows where kubernetesHost is null
const rows = await knex(TableName.IdentityKubernetesAuth)
.whereNull("kubernetesHost")
.select(selectAllTableCols(TableName.IdentityKubernetesAuth));
if (rows.length > 0) {
for (let i = 0; i < rows.length; i += BATCH_SIZE) {
const batch = rows.slice(i, i + BATCH_SIZE);
// eslint-disable-next-line no-await-in-loop
await knex(TableName.IdentityKubernetesAuth)
.whereIn(
"id",
batch.map((row) => row.id)
)
.update({ kubernetesHost: "" });
}
}
if (hasKubernetesHostColumn) {
await knex.schema.alterTable(TableName.IdentityKubernetesAuth, (table) => {
table.string("kubernetesHost").notNullable().alter();
});
}
}

View File

@@ -0,0 +1,29 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.IdentityAliCloudAuth))) {
await knex.schema.createTable(TableName.IdentityAliCloudAuth, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.bigInteger("accessTokenTTL").defaultTo(7200).notNullable();
t.bigInteger("accessTokenMaxTTL").defaultTo(7200).notNullable();
t.bigInteger("accessTokenNumUsesLimit").defaultTo(0).notNullable();
t.jsonb("accessTokenTrustedIps").notNullable();
t.timestamps(true, true, true);
t.uuid("identityId").notNullable().unique();
t.foreign("identityId").references("id").inTable(TableName.Identity).onDelete("CASCADE");
t.string("type").notNullable();
t.string("allowedArns").notNullable();
});
}
await createOnUpdateTrigger(knex, TableName.IdentityAliCloudAuth);
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTableIfExists(TableName.IdentityAliCloudAuth);
await dropOnUpdateTrigger(knex, TableName.IdentityAliCloudAuth);
}

View File

@@ -3,12 +3,27 @@ import { Knex } from "knex";
import { initializeHsmModule } from "@app/ee/services/hsm/hsm-fns";
import { hsmServiceFactory } from "@app/ee/services/hsm/hsm-service";
import { TKeyStoreFactory } from "@app/keystore/keystore";
import { folderCheckpointDALFactory } from "@app/services/folder-checkpoint/folder-checkpoint-dal";
import { folderCheckpointResourcesDALFactory } from "@app/services/folder-checkpoint-resources/folder-checkpoint-resources-dal";
import { folderCommitDALFactory } from "@app/services/folder-commit/folder-commit-dal";
import { folderCommitServiceFactory } from "@app/services/folder-commit/folder-commit-service";
import { folderCommitChangesDALFactory } from "@app/services/folder-commit-changes/folder-commit-changes-dal";
import { folderTreeCheckpointDALFactory } from "@app/services/folder-tree-checkpoint/folder-tree-checkpoint-dal";
import { folderTreeCheckpointResourcesDALFactory } from "@app/services/folder-tree-checkpoint-resources/folder-tree-checkpoint-resources-dal";
import { identityDALFactory } from "@app/services/identity/identity-dal";
import { internalKmsDALFactory } from "@app/services/kms/internal-kms-dal";
import { kmskeyDALFactory } from "@app/services/kms/kms-key-dal";
import { kmsRootConfigDALFactory } from "@app/services/kms/kms-root-config-dal";
import { kmsServiceFactory } from "@app/services/kms/kms-service";
import { orgDALFactory } from "@app/services/org/org-dal";
import { projectDALFactory } from "@app/services/project/project-dal";
import { resourceMetadataDALFactory } from "@app/services/resource-metadata/resource-metadata-dal";
import { secretFolderDALFactory } from "@app/services/secret-folder/secret-folder-dal";
import { secretFolderVersionDALFactory } from "@app/services/secret-folder/secret-folder-version-dal";
import { secretTagDALFactory } from "@app/services/secret-tag/secret-tag-dal";
import { secretV2BridgeDALFactory } from "@app/services/secret-v2-bridge/secret-v2-bridge-dal";
import { secretVersionV2BridgeDALFactory } from "@app/services/secret-v2-bridge/secret-version-dal";
import { userDALFactory } from "@app/services/user/user-dal";
import { TMigrationEnvConfig } from "./env-config";
@@ -50,3 +65,77 @@ export const getMigrationEncryptionServices = async ({ envConfig, db, keyStore }
return { kmsService };
};
export const getMigrationPITServices = async ({
db,
keyStore,
envConfig
}: {
db: Knex;
keyStore: TKeyStoreFactory;
envConfig: TMigrationEnvConfig;
}) => {
const projectDAL = projectDALFactory(db);
const folderCommitDAL = folderCommitDALFactory(db);
const folderCommitChangesDAL = folderCommitChangesDALFactory(db);
const folderCheckpointDAL = folderCheckpointDALFactory(db);
const folderTreeCheckpointDAL = folderTreeCheckpointDALFactory(db);
const userDAL = userDALFactory(db);
const identityDAL = identityDALFactory(db);
const folderDAL = secretFolderDALFactory(db);
const folderVersionDAL = secretFolderVersionDALFactory(db);
const secretVersionV2BridgeDAL = secretVersionV2BridgeDALFactory(db);
const folderCheckpointResourcesDAL = folderCheckpointResourcesDALFactory(db);
const secretV2BridgeDAL = secretV2BridgeDALFactory({ db, keyStore });
const folderTreeCheckpointResourcesDAL = folderTreeCheckpointResourcesDALFactory(db);
const secretTagDAL = secretTagDALFactory(db);
const orgDAL = orgDALFactory(db);
const kmsRootConfigDAL = kmsRootConfigDALFactory(db);
const kmsDAL = kmskeyDALFactory(db);
const internalKmsDAL = internalKmsDALFactory(db);
const resourceMetadataDAL = resourceMetadataDALFactory(db);
const hsmModule = initializeHsmModule(envConfig);
hsmModule.initialize();
const hsmService = hsmServiceFactory({
hsmModule: hsmModule.getModule(),
envConfig
});
const kmsService = kmsServiceFactory({
kmsRootConfigDAL,
keyStore,
kmsDAL,
internalKmsDAL,
orgDAL,
projectDAL,
hsmService,
envConfig
});
await hsmService.startService();
await kmsService.startService();
const folderCommitService = folderCommitServiceFactory({
folderCommitDAL,
folderCommitChangesDAL,
folderCheckpointDAL,
folderTreeCheckpointDAL,
userDAL,
identityDAL,
folderDAL,
folderVersionDAL,
secretVersionV2BridgeDAL,
projectDAL,
folderCheckpointResourcesDAL,
secretV2BridgeDAL,
folderTreeCheckpointResourcesDAL,
kmsService,
secretTagDAL,
resourceMetadataDAL
});
return { folderCommitService };
};

View File

@@ -16,7 +16,8 @@ export const DynamicSecretLeasesSchema = z.object({
statusDetails: z.string().nullable().optional(),
dynamicSecretId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date()
updatedAt: z.date(),
config: z.unknown().nullable().optional()
});
export type TDynamicSecretLeases = z.infer<typeof DynamicSecretLeasesSchema>;

View File

@@ -0,0 +1,23 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const FolderCheckpointResourcesSchema = z.object({
id: z.string().uuid(),
folderCheckpointId: z.string().uuid(),
secretVersionId: z.string().uuid().nullable().optional(),
folderVersionId: z.string().uuid().nullable().optional(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TFolderCheckpointResources = z.infer<typeof FolderCheckpointResourcesSchema>;
export type TFolderCheckpointResourcesInsert = Omit<z.input<typeof FolderCheckpointResourcesSchema>, TImmutableDBKeys>;
export type TFolderCheckpointResourcesUpdate = Partial<
Omit<z.input<typeof FolderCheckpointResourcesSchema>, TImmutableDBKeys>
>;

View File

@@ -0,0 +1,19 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const FolderCheckpointsSchema = z.object({
id: z.string().uuid(),
folderCommitId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TFolderCheckpoints = z.infer<typeof FolderCheckpointsSchema>;
export type TFolderCheckpointsInsert = Omit<z.input<typeof FolderCheckpointsSchema>, TImmutableDBKeys>;
export type TFolderCheckpointsUpdate = Partial<Omit<z.input<typeof FolderCheckpointsSchema>, TImmutableDBKeys>>;

View File

@@ -0,0 +1,23 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const FolderCommitChangesSchema = z.object({
id: z.string().uuid(),
folderCommitId: z.string().uuid(),
changeType: z.string(),
isUpdate: z.boolean().default(false),
secretVersionId: z.string().uuid().nullable().optional(),
folderVersionId: z.string().uuid().nullable().optional(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TFolderCommitChanges = z.infer<typeof FolderCommitChangesSchema>;
export type TFolderCommitChangesInsert = Omit<z.input<typeof FolderCommitChangesSchema>, TImmutableDBKeys>;
export type TFolderCommitChangesUpdate = Partial<Omit<z.input<typeof FolderCommitChangesSchema>, TImmutableDBKeys>>;

View File

@@ -0,0 +1,24 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const FolderCommitsSchema = z.object({
id: z.string().uuid(),
commitId: z.coerce.bigint(),
actorMetadata: z.unknown(),
actorType: z.string(),
message: z.string().nullable().optional(),
folderId: z.string().uuid(),
envId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TFolderCommits = z.infer<typeof FolderCommitsSchema>;
export type TFolderCommitsInsert = Omit<z.input<typeof FolderCommitsSchema>, TImmutableDBKeys>;
export type TFolderCommitsUpdate = Partial<Omit<z.input<typeof FolderCommitsSchema>, TImmutableDBKeys>>;

View File

@@ -0,0 +1,26 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const FolderTreeCheckpointResourcesSchema = z.object({
id: z.string().uuid(),
folderTreeCheckpointId: z.string().uuid(),
folderId: z.string().uuid(),
folderCommitId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TFolderTreeCheckpointResources = z.infer<typeof FolderTreeCheckpointResourcesSchema>;
export type TFolderTreeCheckpointResourcesInsert = Omit<
z.input<typeof FolderTreeCheckpointResourcesSchema>,
TImmutableDBKeys
>;
export type TFolderTreeCheckpointResourcesUpdate = Partial<
Omit<z.input<typeof FolderTreeCheckpointResourcesSchema>, TImmutableDBKeys>
>;

View File

@@ -0,0 +1,19 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const FolderTreeCheckpointsSchema = z.object({
id: z.string().uuid(),
folderCommitId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TFolderTreeCheckpoints = z.infer<typeof FolderTreeCheckpointsSchema>;
export type TFolderTreeCheckpointsInsert = Omit<z.input<typeof FolderTreeCheckpointsSchema>, TImmutableDBKeys>;
export type TFolderTreeCheckpointsUpdate = Partial<Omit<z.input<typeof FolderTreeCheckpointsSchema>, TImmutableDBKeys>>;

View File

@@ -0,0 +1,25 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const IdentityAlicloudAuthsSchema = z.object({
id: z.string().uuid(),
accessTokenTTL: z.coerce.number().default(7200),
accessTokenMaxTTL: z.coerce.number().default(7200),
accessTokenNumUsesLimit: z.coerce.number().default(0),
accessTokenTrustedIps: z.unknown(),
createdAt: z.date(),
updatedAt: z.date(),
identityId: z.string().uuid(),
type: z.string(),
allowedArns: z.string()
});
export type TIdentityAlicloudAuths = z.infer<typeof IdentityAlicloudAuthsSchema>;
export type TIdentityAlicloudAuthsInsert = Omit<z.input<typeof IdentityAlicloudAuthsSchema>, TImmutableDBKeys>;
export type TIdentityAlicloudAuthsUpdate = Partial<Omit<z.input<typeof IdentityAlicloudAuthsSchema>, TImmutableDBKeys>>;

View File

@@ -18,7 +18,7 @@ export const IdentityKubernetesAuthsSchema = z.object({
createdAt: z.date(),
updatedAt: z.date(),
identityId: z.string().uuid(),
kubernetesHost: z.string(),
kubernetesHost: z.string().nullable().optional(),
encryptedCaCert: z.string().nullable().optional(),
caCertIV: z.string().nullable().optional(),
caCertTag: z.string().nullable().optional(),
@@ -31,7 +31,8 @@ export const IdentityKubernetesAuthsSchema = z.object({
encryptedKubernetesTokenReviewerJwt: zodBuffer.nullable().optional(),
encryptedKubernetesCaCertificate: zodBuffer.nullable().optional(),
gatewayId: z.string().uuid().nullable().optional(),
accessTokenPeriod: z.coerce.number().default(0)
accessTokenPeriod: z.coerce.number().default(0),
tokenReviewMode: z.string().default("api")
});
export type TIdentityKubernetesAuths = z.infer<typeof IdentityKubernetesAuthsSchema>;

View File

@@ -24,6 +24,12 @@ export * from "./dynamic-secrets";
export * from "./external-certificate-authorities";
export * from "./external-group-org-role-mappings";
export * from "./external-kms";
export * from "./folder-checkpoint-resources";
export * from "./folder-checkpoints";
export * from "./folder-commit-changes";
export * from "./folder-commits";
export * from "./folder-tree-checkpoint-resources";
export * from "./folder-tree-checkpoints";
export * from "./gateways";
export * from "./git-app-install-sessions";
export * from "./git-app-org";
@@ -33,6 +39,7 @@ export * from "./group-project-memberships";
export * from "./groups";
export * from "./identities";
export * from "./identity-access-tokens";
export * from "./identity-alicloud-auths";
export * from "./identity-aws-auths";
export * from "./identity-azure-auths";
export * from "./identity-gcp-auths";

View File

@@ -80,6 +80,7 @@ export enum TableName {
IdentityGcpAuth = "identity_gcp_auths",
IdentityAzureAuth = "identity_azure_auths",
IdentityUaClientSecret = "identity_ua_client_secrets",
IdentityAliCloudAuth = "identity_alicloud_auths",
IdentityAwsAuth = "identity_aws_auths",
IdentityOciAuth = "identity_oci_auths",
IdentityOidcAuth = "identity_oidc_auths",
@@ -160,6 +161,12 @@ export enum TableName {
ProjectMicrosoftTeamsConfigs = "project_microsoft_teams_configs",
SecretReminderRecipients = "secret_reminder_recipients",
GithubOrgSyncConfig = "github_org_sync_configs",
FolderCommit = "folder_commits",
FolderCommitChanges = "folder_commit_changes",
FolderCheckpoint = "folder_checkpoints",
FolderCheckpointResources = "folder_checkpoint_resources",
FolderTreeCheckpoint = "folder_tree_checkpoints",
FolderTreeCheckpointResources = "folder_tree_checkpoint_resources",
SecretScanningDataSource = "secret_scanning_data_sources",
SecretScanningResource = "secret_scanning_resources",
SecretScanningScan = "secret_scanning_scans",
@@ -167,7 +174,7 @@ export enum TableName {
SecretScanningConfig = "secret_scanning_configs"
}
export type TImmutableDBKeys = "id" | "createdAt" | "updatedAt";
export type TImmutableDBKeys = "id" | "createdAt" | "updatedAt" | "commitId";
export const UserDeviceSchema = z
.object({
@@ -241,6 +248,7 @@ export enum IdentityAuthMethod {
UNIVERSAL_AUTH = "universal-auth",
KUBERNETES_AUTH = "kubernetes-auth",
GCP_AUTH = "gcp-auth",
ALICLOUD_AUTH = "alicloud-auth",
AWS_AUTH = "aws-auth",
AZURE_AUTH = "azure-auth",
OCI_AUTH = "oci-auth",

View File

@@ -28,7 +28,8 @@ export const ProjectsSchema = z.object({
type: z.string(),
enforceCapitalization: z.boolean().default(false),
hasDeleteProtection: z.boolean().default(false).nullable().optional(),
secretSharing: z.boolean().default(true)
secretSharing: z.boolean().default(true),
showSnapshotsLegacy: z.boolean().default(false)
});
export type TProjects = z.infer<typeof ProjectsSchema>;

View File

@@ -14,7 +14,8 @@ export const SecretFolderVersionsSchema = z.object({
createdAt: z.date(),
updatedAt: z.date(),
envId: z.string().uuid(),
folderId: z.string().uuid()
folderId: z.string().uuid(),
description: z.string().nullable().optional()
});
export type TSecretFolderVersions = z.infer<typeof SecretFolderVersionsSchema>;

View File

@@ -0,0 +1,17 @@
import {
CreateOracleDBConnectionSchema,
SanitizedOracleDBConnectionSchema,
UpdateOracleDBConnectionSchema
} from "@app/ee/services/app-connections/oracledb";
import { registerAppConnectionEndpoints } from "@app/server/routes/v1/app-connection-routers/app-connection-endpoints";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
export const registerOracleDBConnectionRouter = async (server: FastifyZodProvider) => {
registerAppConnectionEndpoints({
app: AppConnection.OracleDB,
server,
sanitizedResponseSchema: SanitizedOracleDBConnectionSchema,
createSchema: CreateOracleDBConnectionSchema,
updateSchema: UpdateOracleDBConnectionSchema
});
};

View File

@@ -36,7 +36,8 @@ export const registerDynamicSecretLeaseRouter = async (server: FastifyZodProvide
ctx.addIssue({ code: z.ZodIssueCode.custom, message: "TTL must be less than a day" });
}),
path: z.string().trim().default("/").transform(removeTrailingSlash).describe(DYNAMIC_SECRET_LEASES.CREATE.path),
environmentSlug: z.string().min(1).describe(DYNAMIC_SECRET_LEASES.CREATE.path)
environmentSlug: z.string().min(1).describe(DYNAMIC_SECRET_LEASES.CREATE.environmentSlug),
config: z.any().optional()
}),
response: {
200: z.object({

View File

@@ -0,0 +1,67 @@
import { z } from "zod";
import { DynamicSecretLeasesSchema } from "@app/db/schemas";
import { ApiDocsTags, DYNAMIC_SECRET_LEASES } from "@app/lib/api-docs";
import { daysToMillisecond } from "@app/lib/dates";
import { removeTrailingSlash } from "@app/lib/fn";
import { ms } from "@app/lib/ms";
import { writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { SanitizedDynamicSecretSchema } from "@app/server/routes/sanitizedSchemas";
import { AuthMode } from "@app/services/auth/auth-type";
export const registerKubernetesDynamicSecretLeaseRouter = async (server: FastifyZodProvider) => {
server.route({
method: "POST",
url: "/",
config: {
rateLimit: writeLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.DynamicSecrets],
body: z.object({
dynamicSecretName: z.string().min(1).describe(DYNAMIC_SECRET_LEASES.CREATE.dynamicSecretName).toLowerCase(),
projectSlug: z.string().min(1).describe(DYNAMIC_SECRET_LEASES.CREATE.projectSlug),
ttl: z
.string()
.optional()
.describe(DYNAMIC_SECRET_LEASES.CREATE.ttl)
.superRefine((val, ctx) => {
if (!val) return;
const valMs = ms(val);
if (valMs < 60 * 1000)
ctx.addIssue({ code: z.ZodIssueCode.custom, message: "TTL must be greater than 1min" });
if (valMs > daysToMillisecond(1))
ctx.addIssue({ code: z.ZodIssueCode.custom, message: "TTL must be less than a day" });
}),
path: z.string().trim().default("/").transform(removeTrailingSlash).describe(DYNAMIC_SECRET_LEASES.CREATE.path),
environmentSlug: z.string().min(1).describe(DYNAMIC_SECRET_LEASES.CREATE.environmentSlug),
config: z
.object({
namespace: z.string().min(1).optional().describe(DYNAMIC_SECRET_LEASES.KUBERNETES.CREATE.config.namespace)
})
.optional()
}),
response: {
200: z.object({
lease: DynamicSecretLeasesSchema,
dynamicSecret: SanitizedDynamicSecretSchema,
data: z.unknown()
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const { data, lease, dynamicSecret } = await server.services.dynamicSecretLease.create({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
name: req.body.dynamicSecretName,
...req.body
});
return { lease, data, dynamicSecret };
}
});
};

View File

@@ -23,7 +23,10 @@ const validateUsernameTemplateCharacters = characterValidator([
CharacterType.CloseBrace,
CharacterType.CloseBracket,
CharacterType.OpenBracket,
CharacterType.Fullstop
CharacterType.Fullstop,
CharacterType.SingleQuote,
CharacterType.Spaces,
CharacterType.Pipe
]);
const userTemplateSchema = z
@@ -33,7 +36,7 @@ const userTemplateSchema = z
.refine((el) => validateUsernameTemplateCharacters(el))
.refine((el) =>
isValidHandleBarTemplate(el, {
allowedExpressions: (val) => ["randomUsername", "unixTimestamp"].includes(val)
allowedExpressions: (val) => ["randomUsername", "unixTimestamp", "identity.name"].includes(val)
})
);

View File

@@ -6,6 +6,7 @@ import { registerAssumePrivilegeRouter } from "./assume-privilege-router";
import { registerAuditLogStreamRouter } from "./audit-log-stream-router";
import { registerCaCrlRouter } from "./certificate-authority-crl-router";
import { registerDynamicSecretLeaseRouter } from "./dynamic-secret-lease-router";
import { registerKubernetesDynamicSecretLeaseRouter } from "./dynamic-secret-lease-routers/kubernetes-lease-router";
import { registerDynamicSecretRouter } from "./dynamic-secret-router";
import { registerExternalKmsRouter } from "./external-kms-router";
import { registerGatewayRouter } from "./gateway-router";
@@ -18,6 +19,7 @@ import { registerLdapRouter } from "./ldap-router";
import { registerLicenseRouter } from "./license-router";
import { registerOidcRouter } from "./oidc-router";
import { registerOrgRoleRouter } from "./org-role-router";
import { registerPITRouter } from "./pit-router";
import { registerProjectRoleRouter } from "./project-role-router";
import { registerProjectRouter } from "./project-router";
import { registerRateLimitRouter } from "./rate-limit-router";
@@ -53,6 +55,7 @@ export const registerV1EERoutes = async (server: FastifyZodProvider) => {
{ prefix: "/workspace" }
);
await server.register(registerSnapshotRouter, { prefix: "/secret-snapshot" });
await server.register(registerPITRouter, { prefix: "/pit" });
await server.register(registerSecretApprovalPolicyRouter, { prefix: "/secret-approvals" });
await server.register(registerSecretApprovalRequestRouter, {
prefix: "/secret-approval-requests"
@@ -69,6 +72,7 @@ export const registerV1EERoutes = async (server: FastifyZodProvider) => {
async (dynamicSecretRouter) => {
await dynamicSecretRouter.register(registerDynamicSecretRouter);
await dynamicSecretRouter.register(registerDynamicSecretLeaseRouter, { prefix: "/leases" });
await dynamicSecretRouter.register(registerKubernetesDynamicSecretLeaseRouter, { prefix: "/leases/kubernetes" });
},
{ prefix: "/dynamic-secrets" }
);

View File

@@ -0,0 +1,416 @@
/* eslint-disable @typescript-eslint/no-base-to-string */
import { z } from "zod";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { removeTrailingSlash } from "@app/lib/fn";
import { readLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { booleanSchema } from "@app/server/routes/sanitizedSchemas";
import { AuthMode } from "@app/services/auth/auth-type";
import { commitChangesResponseSchema, resourceChangeSchema } from "@app/services/folder-commit/folder-commit-schemas";
const commitHistoryItemSchema = z.object({
id: z.string(),
folderId: z.string(),
actorType: z.string(),
actorMetadata: z.unknown().optional(),
message: z.string().optional().nullable(),
commitId: z.string(),
createdAt: z.string().or(z.date()),
envId: z.string()
});
const folderStateSchema = z.array(
z.object({
type: z.string(),
id: z.string(),
versionId: z.string(),
secretKey: z.string().optional(),
secretVersion: z.number().optional(),
folderName: z.string().optional(),
folderVersion: z.number().optional()
})
);
export const registerPITRouter = async (server: FastifyZodProvider) => {
// Get commits count for a folder
server.route({
method: "GET",
url: "/commits/count",
config: {
rateLimit: readLimit
},
schema: {
querystring: z.object({
environment: z.string().trim(),
path: z.string().trim().default("/").transform(removeTrailingSlash),
projectId: z.string().trim()
}),
response: {
200: z.object({
count: z.number(),
folderId: z.string()
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const result = await server.services.pit.getCommitsCount({
actor: req.permission?.type,
actorId: req.permission?.id,
actorOrgId: req.permission?.orgId,
actorAuthMethod: req.permission?.authMethod,
projectId: req.query.projectId,
environment: req.query.environment,
path: req.query.path
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: req.query.projectId,
event: {
type: EventType.GET_PROJECT_PIT_COMMIT_COUNT,
metadata: {
environment: req.query.environment,
path: req.query.path,
commitCount: result.count.toString()
}
}
});
return result;
}
});
// Get all commits for a folder
server.route({
method: "GET",
url: "/commits",
config: {
rateLimit: readLimit
},
schema: {
querystring: z.object({
environment: z.string().trim(),
path: z.string().trim().default("/").transform(removeTrailingSlash),
projectId: z.string().trim(),
offset: z.coerce.number().min(0).default(0),
limit: z.coerce.number().min(1).max(100).default(20),
search: z.string().trim().optional(),
sort: z.enum(["asc", "desc"]).default("desc")
}),
response: {
200: z.object({
commits: commitHistoryItemSchema.array(),
total: z.number(),
hasMore: z.boolean()
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const result = await server.services.pit.getCommitsForFolder({
actor: req.permission?.type,
actorId: req.permission?.id,
actorOrgId: req.permission?.orgId,
actorAuthMethod: req.permission?.authMethod,
projectId: req.query.projectId,
environment: req.query.environment,
path: req.query.path,
offset: req.query.offset,
limit: req.query.limit,
search: req.query.search,
sort: req.query.sort
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: req.query.projectId,
event: {
type: EventType.GET_PROJECT_PIT_COMMITS,
metadata: {
environment: req.query.environment,
path: req.query.path,
commitCount: result.commits.length.toString(),
offset: req.query.offset.toString(),
limit: req.query.limit.toString(),
search: req.query.search,
sort: req.query.sort
}
}
});
return result;
}
});
// Get commit changes for a specific commit
server.route({
method: "GET",
url: "/commits/:commitId/changes",
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
commitId: z.string().trim()
}),
querystring: z.object({
projectId: z.string().trim()
}),
response: {
200: commitChangesResponseSchema
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const result = await server.services.pit.getCommitChanges({
actor: req.permission?.type,
actorId: req.permission?.id,
actorOrgId: req.permission?.orgId,
actorAuthMethod: req.permission?.authMethod,
projectId: req.query.projectId,
commitId: req.params.commitId
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: req.query.projectId,
event: {
type: EventType.GET_PROJECT_PIT_COMMIT_CHANGES,
metadata: {
commitId: req.params.commitId,
changesCount: (result.changes.changes?.length || 0).toString()
}
}
});
return result;
}
});
// Retrieve rollback changes for a commit
server.route({
method: "GET",
url: "/commits/:commitId/compare",
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
commitId: z.string().trim()
}),
querystring: z.object({
folderId: z.string().trim(),
environment: z.string().trim(),
deepRollback: booleanSchema.default(false),
secretPath: z.string().trim().default("/").transform(removeTrailingSlash),
projectId: z.string().trim()
}),
response: {
200: z.array(
z.object({
folderId: z.string(),
folderName: z.string(),
folderPath: z.string().optional(),
changes: z.array(resourceChangeSchema)
})
)
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const result = await server.services.pit.compareCommitChanges({
actor: req.permission?.type,
actorId: req.permission?.id,
actorOrgId: req.permission?.orgId,
actorAuthMethod: req.permission?.authMethod,
projectId: req.query.projectId,
commitId: req.params.commitId,
folderId: req.query.folderId,
environment: req.query.environment,
deepRollback: req.query.deepRollback,
secretPath: req.query.secretPath
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: req.query.projectId,
event: {
type: EventType.PIT_COMPARE_FOLDER_STATES,
metadata: {
targetCommitId: req.params.commitId,
folderId: req.query.folderId,
deepRollback: req.query.deepRollback,
diffsCount: result.length.toString(),
environment: req.query.environment,
folderPath: req.query.secretPath
}
}
});
return result;
}
});
// Rollback to a previous commit
server.route({
method: "POST",
url: "/commits/:commitId/rollback",
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
commitId: z.string().trim()
}),
body: z.object({
folderId: z.string().trim(),
deepRollback: z.boolean().default(false),
message: z.string().max(256).trim().optional(),
environment: z.string().trim(),
projectId: z.string().trim()
}),
response: {
200: z.object({
success: z.boolean(),
secretChangesCount: z.number().optional(),
folderChangesCount: z.number().optional(),
totalChanges: z.number().optional()
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const result = await server.services.pit.rollbackToCommit({
actor: req.permission?.type,
actorId: req.permission?.id,
actorOrgId: req.permission?.orgId,
actorAuthMethod: req.permission?.authMethod,
projectId: req.body.projectId,
commitId: req.params.commitId,
folderId: req.body.folderId,
deepRollback: req.body.deepRollback,
message: req.body.message,
environment: req.body.environment
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: req.body.projectId,
event: {
type: EventType.PIT_ROLLBACK_COMMIT,
metadata: {
targetCommitId: req.params.commitId,
environment: req.body.environment,
folderId: req.body.folderId,
deepRollback: req.body.deepRollback,
message: req.body.message || "Rollback to previous commit",
totalChanges: result.totalChanges?.toString() || "0"
}
}
});
return result;
}
});
// Revert commit
server.route({
method: "POST",
url: "/commits/:commitId/revert",
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
commitId: z.string().trim()
}),
body: z.object({
projectId: z.string().trim()
}),
response: {
200: z.object({
success: z.boolean(),
message: z.string(),
originalCommitId: z.string(),
revertCommitId: z.string().optional(),
changesReverted: z.number().optional()
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const result = await server.services.pit.revertCommit({
actor: req.permission?.type,
actorId: req.permission?.id,
actorOrgId: req.permission?.orgId,
actorAuthMethod: req.permission?.authMethod,
projectId: req.body.projectId,
commitId: req.params.commitId
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: req.body.projectId,
event: {
type: EventType.PIT_REVERT_COMMIT,
metadata: {
commitId: req.params.commitId,
revertCommitId: result.revertCommitId,
changesReverted: result.changesReverted?.toString()
}
}
});
return result;
}
});
// Folder state at commit
server.route({
method: "GET",
url: "/commits/:commitId",
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
commitId: z.string().trim()
}),
querystring: z.object({
folderId: z.string().trim(),
projectId: z.string().trim()
}),
response: {
200: folderStateSchema
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const result = await server.services.pit.getFolderStateAtCommit({
actor: req.permission?.type,
actorId: req.permission?.id,
actorOrgId: req.permission?.orgId,
actorAuthMethod: req.permission?.authMethod,
projectId: req.query.projectId,
commitId: req.params.commitId
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: req.query.projectId,
event: {
type: EventType.PIT_GET_FOLDER_STATE,
metadata: {
commitId: req.params.commitId,
folderId: req.query.folderId,
resourceCount: result.length.toString()
}
}
});
return result;
}
});
};

View File

@@ -65,9 +65,10 @@ export const registerSnapshotRouter = async (server: FastifyZodProvider) => {
rateLimit: writeLimit
},
schema: {
hide: false,
hide: true,
deprecated: true,
tags: [ApiDocsTags.Projects],
description: "Roll back project secrets to those captured in a secret snapshot version.",
description: "(Deprecated) Roll back project secrets to those captured in a secret snapshot version.",
security: [
{
bearerAuth: []
@@ -84,6 +85,10 @@ export const registerSnapshotRouter = async (server: FastifyZodProvider) => {
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.API_KEY, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
throw new Error(
"This endpoint is deprecated. Please use the new PIT recovery system. More information is available at: https://infisical.com/docs/documentation/platform/pit-recovery."
);
const secretSnapshot = await server.services.snapshot.rollbackSnapshot({
actor: req.permission.type,
actorId: req.permission.id,

View File

@@ -6,6 +6,7 @@ import { registerAzureClientSecretRotationRouter } from "./azure-client-secret-r
import { registerLdapPasswordRotationRouter } from "./ldap-password-rotation-router";
import { registerMsSqlCredentialsRotationRouter } from "./mssql-credentials-rotation-router";
import { registerMySqlCredentialsRotationRouter } from "./mysql-credentials-rotation-router";
import { registerOracleDBCredentialsRotationRouter } from "./oracledb-credentials-rotation-router";
import { registerPostgresCredentialsRotationRouter } from "./postgres-credentials-rotation-router";
export * from "./secret-rotation-v2-router";
@@ -17,6 +18,7 @@ export const SECRET_ROTATION_REGISTER_ROUTER_MAP: Record<
[SecretRotation.PostgresCredentials]: registerPostgresCredentialsRotationRouter,
[SecretRotation.MsSqlCredentials]: registerMsSqlCredentialsRotationRouter,
[SecretRotation.MySqlCredentials]: registerMySqlCredentialsRotationRouter,
[SecretRotation.OracleDBCredentials]: registerOracleDBCredentialsRotationRouter,
[SecretRotation.Auth0ClientSecret]: registerAuth0ClientSecretRotationRouter,
[SecretRotation.AzureClientSecret]: registerAzureClientSecretRotationRouter,
[SecretRotation.AwsIamUserSecret]: registerAwsIamUserSecretRotationRouter,

View File

@@ -0,0 +1,19 @@
import {
CreateOracleDBCredentialsRotationSchema,
OracleDBCredentialsRotationSchema,
UpdateOracleDBCredentialsRotationSchema
} from "@app/ee/services/secret-rotation-v2/oracledb-credentials";
import { SecretRotation } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-enums";
import { SqlCredentialsRotationGeneratedCredentialsSchema } from "@app/ee/services/secret-rotation-v2/shared/sql-credentials";
import { registerSecretRotationEndpoints } from "./secret-rotation-v2-endpoints";
export const registerOracleDBCredentialsRotationRouter = async (server: FastifyZodProvider) =>
registerSecretRotationEndpoints({
type: SecretRotation.OracleDBCredentials,
server,
responseSchema: OracleDBCredentialsRotationSchema,
createSchema: CreateOracleDBCredentialsRotationSchema,
updateSchema: UpdateOracleDBCredentialsRotationSchema,
generatedCredentialsSchema: SqlCredentialsRotationGeneratedCredentialsSchema
});

View File

@@ -7,6 +7,7 @@ import { AzureClientSecretRotationListItemSchema } from "@app/ee/services/secret
import { LdapPasswordRotationListItemSchema } from "@app/ee/services/secret-rotation-v2/ldap-password";
import { MsSqlCredentialsRotationListItemSchema } from "@app/ee/services/secret-rotation-v2/mssql-credentials";
import { MySqlCredentialsRotationListItemSchema } from "@app/ee/services/secret-rotation-v2/mysql-credentials";
import { OracleDBCredentialsRotationListItemSchema } from "@app/ee/services/secret-rotation-v2/oracledb-credentials";
import { PostgresCredentialsRotationListItemSchema } from "@app/ee/services/secret-rotation-v2/postgres-credentials";
import { SecretRotationV2Schema } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-union-schema";
import { ApiDocsTags, SecretRotations } from "@app/lib/api-docs";
@@ -18,6 +19,7 @@ const SecretRotationV2OptionsSchema = z.discriminatedUnion("type", [
PostgresCredentialsRotationListItemSchema,
MsSqlCredentialsRotationListItemSchema,
MySqlCredentialsRotationListItemSchema,
OracleDBCredentialsRotationListItemSchema,
Auth0ClientSecretRotationListItemSchema,
AzureClientSecretRotationListItemSchema,
AwsIamUserSecretRotationListItemSchema,

View File

@@ -187,6 +187,56 @@ export const registerSecretScanningV2Router = async (server: FastifyZodProvider)
}
});
server.route({
method: "PATCH",
url: "/findings",
config: {
rateLimit: writeLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.SecretScanning],
description: "Update one or more Secret Scanning Findings in a batch.",
body: z
.object({
findingId: z.string().trim().min(1, "Finding ID required").describe(SecretScanningFindings.UPDATE.findingId),
status: z.nativeEnum(SecretScanningFindingStatus).optional().describe(SecretScanningFindings.UPDATE.status),
remarks: z.string().nullish().describe(SecretScanningFindings.UPDATE.remarks)
})
.array()
.max(500),
response: {
200: z.object({ findings: SecretScanningFindingSchema.array() })
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const { body, permission } = req;
const updatedFindingPromises = body.map(async (findingUpdatePayload) => {
const { finding, projectId } = await server.services.secretScanningV2.updateSecretScanningFindingById(
findingUpdatePayload,
permission
);
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId,
event: {
type: EventType.SECRET_SCANNING_FINDING_UPDATE,
metadata: findingUpdatePayload
}
});
return finding;
});
const findings = await Promise.all(updatedFindingPromises);
return { findings };
}
});
server.route({
method: "GET",
url: "/configs",

View File

@@ -0,0 +1,4 @@
export * from "./oracledb-connection-enums";
export * from "./oracledb-connection-fns";
export * from "./oracledb-connection-schemas";
export * from "./oracledb-connection-types";

View File

@@ -0,0 +1,3 @@
export enum OracleDBConnectionMethod {
UsernameAndPassword = "username-and-password"
}

View File

@@ -0,0 +1,12 @@
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
import { OracleDBConnectionMethod } from "./oracledb-connection-enums";
export const getOracleDBConnectionListItem = () => {
return {
name: "OracleDB" as const,
app: AppConnection.OracleDB as const,
methods: Object.values(OracleDBConnectionMethod) as [OracleDBConnectionMethod.UsernameAndPassword],
supportsPlatformManagement: true as const
};
};

View File

@@ -0,0 +1,64 @@
import z from "zod";
import { AppConnections } from "@app/lib/api-docs";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
import {
BaseAppConnectionSchema,
GenericCreateAppConnectionFieldsSchema,
GenericUpdateAppConnectionFieldsSchema
} from "@app/services/app-connection/app-connection-schemas";
import { BaseSqlUsernameAndPasswordConnectionSchema } from "@app/services/app-connection/shared/sql";
import { OracleDBConnectionMethod } from "./oracledb-connection-enums";
export const OracleDBConnectionCredentialsSchema = BaseSqlUsernameAndPasswordConnectionSchema;
const BaseOracleDBConnectionSchema = BaseAppConnectionSchema.extend({ app: z.literal(AppConnection.OracleDB) });
export const OracleDBConnectionSchema = BaseOracleDBConnectionSchema.extend({
method: z.literal(OracleDBConnectionMethod.UsernameAndPassword),
credentials: OracleDBConnectionCredentialsSchema
});
export const SanitizedOracleDBConnectionSchema = z.discriminatedUnion("method", [
BaseOracleDBConnectionSchema.extend({
method: z.literal(OracleDBConnectionMethod.UsernameAndPassword),
credentials: OracleDBConnectionCredentialsSchema.pick({
host: true,
database: true,
port: true,
username: true,
sslEnabled: true,
sslRejectUnauthorized: true,
sslCertificate: true
})
})
]);
export const ValidateOracleDBConnectionCredentialsSchema = z.discriminatedUnion("method", [
z.object({
method: z
.literal(OracleDBConnectionMethod.UsernameAndPassword)
.describe(AppConnections.CREATE(AppConnection.OracleDB).method),
credentials: OracleDBConnectionCredentialsSchema.describe(AppConnections.CREATE(AppConnection.OracleDB).credentials)
})
]);
export const CreateOracleDBConnectionSchema = ValidateOracleDBConnectionCredentialsSchema.and(
GenericCreateAppConnectionFieldsSchema(AppConnection.OracleDB, { supportsPlatformManagedCredentials: true })
);
export const UpdateOracleDBConnectionSchema = z
.object({
credentials: OracleDBConnectionCredentialsSchema.optional().describe(
AppConnections.UPDATE(AppConnection.OracleDB).credentials
)
})
.and(GenericUpdateAppConnectionFieldsSchema(AppConnection.OracleDB, { supportsPlatformManagedCredentials: true }));
export const OracleDBConnectionListItemSchema = z.object({
name: z.literal("OracleDB"),
app: z.literal(AppConnection.OracleDB),
methods: z.nativeEnum(OracleDBConnectionMethod).array(),
supportsPlatformManagement: z.literal(true)
});

View File

@@ -0,0 +1,17 @@
import z from "zod";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
import {
CreateOracleDBConnectionSchema,
OracleDBConnectionSchema,
ValidateOracleDBConnectionCredentialsSchema
} from "./oracledb-connection-schemas";
export type TOracleDBConnection = z.infer<typeof OracleDBConnectionSchema>;
export type TOracleDBConnectionInput = z.infer<typeof CreateOracleDBConnectionSchema> & {
app: AppConnection.OracleDB;
};
export type TValidateOracleDBConnectionCredentialsSchema = typeof ValidateOracleDBConnectionCredentialsSchema;

View File

@@ -44,6 +44,7 @@ import {
TSecretSyncRaw,
TUpdateSecretSyncDTO
} from "@app/services/secret-sync/secret-sync-types";
import { TWebhookPayloads } from "@app/services/webhook/webhook-types";
import { WorkflowIntegration } from "@app/services/workflow-integration/workflow-integration-types";
import { KmipPermission } from "../kmip/kmip-enum";
@@ -169,6 +170,12 @@ export enum EventType {
REVOKE_IDENTITY_GCP_AUTH = "revoke-identity-gcp-auth",
GET_IDENTITY_GCP_AUTH = "get-identity-gcp-auth",
LOGIN_IDENTITY_ALICLOUD_AUTH = "login-identity-alicloud-auth",
ADD_IDENTITY_ALICLOUD_AUTH = "add-identity-alicloud-auth",
UPDATE_IDENTITY_ALICLOUD_AUTH = "update-identity-alicloud-auth",
REVOKE_IDENTITY_ALICLOUD_AUTH = "revoke-identity-alicloud-auth",
GET_IDENTITY_ALICLOUD_AUTH = "get-identity-alicloud-auth",
LOGIN_IDENTITY_AWS_AUTH = "login-identity-aws-auth",
ADD_IDENTITY_AWS_AUTH = "add-identity-aws-auth",
UPDATE_IDENTITY_AWS_AUTH = "update-identity-aws-auth",
@@ -206,6 +213,7 @@ export enum EventType {
CREATE_WEBHOOK = "create-webhook",
UPDATE_WEBHOOK_STATUS = "update-webhook-status",
DELETE_WEBHOOK = "delete-webhook",
WEBHOOK_TRIGGERED = "webhook-triggered",
GET_SECRET_IMPORTS = "get-secret-imports",
GET_SECRET_IMPORT = "get-secret-import",
CREATE_SECRET_IMPORT = "create-secret-import",
@@ -393,6 +401,13 @@ export enum EventType {
PROJECT_ASSUME_PRIVILEGE_SESSION_START = "project-assume-privileges-session-start",
PROJECT_ASSUME_PRIVILEGE_SESSION_END = "project-assume-privileges-session-end",
GET_PROJECT_PIT_COMMITS = "get-project-pit-commits",
GET_PROJECT_PIT_COMMIT_CHANGES = "get-project-pit-commit-changes",
GET_PROJECT_PIT_COMMIT_COUNT = "get-project-pit-commit-count",
PIT_ROLLBACK_COMMIT = "pit-rollback-commit",
PIT_REVERT_COMMIT = "pit-revert-commit",
PIT_GET_FOLDER_STATE = "pit-get-folder-state",
PIT_COMPARE_FOLDER_STATES = "pit-compare-folder-states",
SECRET_SCANNING_DATA_SOURCE_LIST = "secret-scanning-data-source-list",
SECRET_SCANNING_DATA_SOURCE_CREATE = "secret-scanning-data-source-create",
SECRET_SCANNING_DATA_SOURCE_UPDATE = "secret-scanning-data-source-update",
@@ -1051,6 +1066,53 @@ interface GetIdentityAwsAuthEvent {
};
}
interface LoginIdentityAliCloudAuthEvent {
type: EventType.LOGIN_IDENTITY_ALICLOUD_AUTH;
metadata: {
identityId: string;
identityAliCloudAuthId: string;
identityAccessTokenId: string;
};
}
interface AddIdentityAliCloudAuthEvent {
type: EventType.ADD_IDENTITY_ALICLOUD_AUTH;
metadata: {
identityId: string;
allowedArns: string;
accessTokenTTL: number;
accessTokenMaxTTL: number;
accessTokenNumUsesLimit: number;
accessTokenTrustedIps: Array<TIdentityTrustedIp>;
};
}
interface DeleteIdentityAliCloudAuthEvent {
type: EventType.REVOKE_IDENTITY_ALICLOUD_AUTH;
metadata: {
identityId: string;
};
}
interface UpdateIdentityAliCloudAuthEvent {
type: EventType.UPDATE_IDENTITY_ALICLOUD_AUTH;
metadata: {
identityId: string;
allowedArns: string;
accessTokenTTL?: number;
accessTokenMaxTTL?: number;
accessTokenNumUsesLimit?: number;
accessTokenTrustedIps?: Array<TIdentityTrustedIp>;
};
}
interface GetIdentityAliCloudAuthEvent {
type: EventType.GET_IDENTITY_ALICLOUD_AUTH;
metadata: {
identityId: string;
};
}
interface LoginIdentityOciAuthEvent {
type: EventType.LOGIN_IDENTITY_OCI_AUTH;
metadata: {
@@ -1440,6 +1502,14 @@ interface DeleteWebhookEvent {
};
}
export interface WebhookTriggeredEvent {
type: EventType.WEBHOOK_TRIGGERED;
metadata: {
webhookId: string;
status: string;
} & TWebhookPayloads;
}
interface GetSecretImportsEvent {
type: EventType.GET_SECRET_IMPORTS;
metadata: {
@@ -2979,6 +3049,78 @@ interface MicrosoftTeamsWorkflowIntegrationUpdateEvent {
};
}
interface GetProjectPitCommitsEvent {
type: EventType.GET_PROJECT_PIT_COMMITS;
metadata: {
commitCount: string;
environment: string;
path: string;
offset: string;
limit: string;
search?: string;
sort: string;
};
}
interface GetProjectPitCommitChangesEvent {
type: EventType.GET_PROJECT_PIT_COMMIT_CHANGES;
metadata: {
changesCount: string;
commitId: string;
};
}
interface GetProjectPitCommitCountEvent {
type: EventType.GET_PROJECT_PIT_COMMIT_COUNT;
metadata: {
environment: string;
path: string;
commitCount: string;
};
}
interface PitRollbackCommitEvent {
type: EventType.PIT_ROLLBACK_COMMIT;
metadata: {
targetCommitId: string;
folderId: string;
deepRollback: boolean;
message: string;
totalChanges: string;
environment: string;
};
}
interface PitRevertCommitEvent {
type: EventType.PIT_REVERT_COMMIT;
metadata: {
commitId: string;
revertCommitId?: string;
changesReverted?: string;
};
}
interface PitGetFolderStateEvent {
type: EventType.PIT_GET_FOLDER_STATE;
metadata: {
commitId: string;
folderId: string;
resourceCount: string;
};
}
interface PitCompareFolderStatesEvent {
type: EventType.PIT_COMPARE_FOLDER_STATES;
metadata: {
targetCommitId: string;
folderId: string;
deepRollback: boolean;
diffsCount: string;
environment: string;
folderPath: string;
};
}
interface SecretScanningDataSourceListEvent {
type: EventType.SECRET_SCANNING_DATA_SOURCE_LIST;
metadata: {
@@ -3183,6 +3325,11 @@ export type Event =
| UpdateIdentityAwsAuthEvent
| GetIdentityAwsAuthEvent
| DeleteIdentityAwsAuthEvent
| LoginIdentityAliCloudAuthEvent
| AddIdentityAliCloudAuthEvent
| UpdateIdentityAliCloudAuthEvent
| GetIdentityAliCloudAuthEvent
| DeleteIdentityAliCloudAuthEvent
| LoginIdentityOciAuthEvent
| AddIdentityOciAuthEvent
| UpdateIdentityOciAuthEvent
@@ -3221,6 +3368,7 @@ export type Event =
| CreateWebhookEvent
| UpdateWebhookStatusEvent
| DeleteWebhookEvent
| WebhookTriggeredEvent
| GetSecretImportsEvent
| GetSecretImportEvent
| CreateSecretImportEvent
@@ -3397,6 +3545,13 @@ export type Event =
| MicrosoftTeamsWorkflowIntegrationGetEvent
| MicrosoftTeamsWorkflowIntegrationListEvent
| MicrosoftTeamsWorkflowIntegrationUpdateEvent
| GetProjectPitCommitsEvent
| GetProjectPitCommitChangesEvent
| PitRollbackCommitEvent
| GetProjectPitCommitCountEvent
| PitRevertCommitEvent
| PitCompareFolderStatesEvent
| PitGetFolderStateEvent
| SecretScanningDataSourceListEvent
| SecretScanningDataSourceGetEvent
| SecretScanningDataSourceCreateEvent

View File

@@ -10,6 +10,7 @@ import { TDynamicSecretDALFactory } from "../dynamic-secret/dynamic-secret-dal";
import { DynamicSecretStatus } from "../dynamic-secret/dynamic-secret-types";
import { DynamicSecretProviders, TDynamicProviderFns } from "../dynamic-secret/providers/models";
import { TDynamicSecretLeaseDALFactory } from "./dynamic-secret-lease-dal";
import { TDynamicSecretLeaseConfig } from "./dynamic-secret-lease-types";
type TDynamicSecretLeaseQueueServiceFactoryDep = {
queueService: TQueueServiceFactory;
@@ -99,7 +100,9 @@ export const dynamicSecretLeaseQueueServiceFactory = ({
secretManagerDecryptor({ cipherTextBlob: dynamicSecretCfg.encryptedInput }).toString()
) as object;
await selectedProvider.revoke(decryptedStoredInput, dynamicSecretLease.externalEntityId);
await selectedProvider.revoke(decryptedStoredInput, dynamicSecretLease.externalEntityId, {
projectId: folder.projectId
});
await dynamicSecretLeaseDAL.deleteById(dynamicSecretLease.id);
return;
}
@@ -132,8 +135,15 @@ export const dynamicSecretLeaseQueueServiceFactory = ({
await Promise.all(dynamicSecretLeases.map(({ id }) => unsetLeaseRevocation(id)));
await Promise.all(
dynamicSecretLeases.map(({ externalEntityId }) =>
selectedProvider.revoke(decryptedStoredInput, externalEntityId)
dynamicSecretLeases.map(({ externalEntityId, config }) =>
selectedProvider.revoke(
decryptedStoredInput,
externalEntityId,
{
projectId: folder.projectId
},
config as TDynamicSecretLeaseConfig
)
)
);
}

View File

@@ -1,4 +1,5 @@
import { ForbiddenError, subject } from "@casl/ability";
import RE2 from "re2";
import { ActionProjectType } from "@app/db/schemas";
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
@@ -11,10 +12,13 @@ import { getConfig } from "@app/lib/config/env";
import { BadRequestError, NotFoundError } from "@app/lib/errors";
import { logger } from "@app/lib/logger";
import { ms } from "@app/lib/ms";
import { ActorType } from "@app/services/auth/auth-type";
import { TIdentityDALFactory } from "@app/services/identity/identity-dal";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { TProjectDALFactory } from "@app/services/project/project-dal";
import { TSecretFolderDALFactory } from "@app/services/secret-folder/secret-folder-dal";
import { TUserDALFactory } from "@app/services/user/user-dal";
import { TDynamicSecretDALFactory } from "../dynamic-secret/dynamic-secret-dal";
import { DynamicSecretProviders, TDynamicProviderFns } from "../dynamic-secret/providers/models";
@@ -25,6 +29,7 @@ import {
TCreateDynamicSecretLeaseDTO,
TDeleteDynamicSecretLeaseDTO,
TDetailsDynamicSecretLeaseDTO,
TDynamicSecretLeaseConfig,
TListDynamicSecretLeasesDTO,
TRenewDynamicSecretLeaseDTO
} from "./dynamic-secret-lease-types";
@@ -39,6 +44,8 @@ type TDynamicSecretLeaseServiceFactoryDep = {
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
projectDAL: Pick<TProjectDALFactory, "findProjectBySlug">;
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
userDAL: Pick<TUserDALFactory, "findById">;
identityDAL: TIdentityDALFactory;
};
export type TDynamicSecretLeaseServiceFactory = ReturnType<typeof dynamicSecretLeaseServiceFactory>;
@@ -52,8 +59,16 @@ export const dynamicSecretLeaseServiceFactory = ({
dynamicSecretQueueService,
projectDAL,
licenseService,
kmsService
kmsService,
userDAL,
identityDAL
}: TDynamicSecretLeaseServiceFactoryDep) => {
const extractEmailUsername = (email: string) => {
const regex = new RE2(/^([^@]+)/);
const match = email.match(regex);
return match ? match[1] : email;
};
const create = async ({
environmentSlug,
path,
@@ -63,7 +78,8 @@ export const dynamicSecretLeaseServiceFactory = ({
actorId,
actorOrgId,
actorAuthMethod,
ttl
ttl,
config
}: TCreateDynamicSecretLeaseDTO) => {
const appCfg = getConfig();
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
@@ -132,10 +148,25 @@ export const dynamicSecretLeaseServiceFactory = ({
let result;
try {
const identity: { name: string } = { name: "" };
if (actor === ActorType.USER) {
const user = await userDAL.findById(actorId);
if (user) {
identity.name = extractEmailUsername(user.username);
}
} else if (actor === ActorType.Machine) {
const machineIdentity = await identityDAL.findById(actorId);
if (machineIdentity) {
identity.name = machineIdentity.name;
}
}
result = await selectedProvider.create({
inputs: decryptedStoredInput,
expireAt: expireAt.getTime(),
usernameTemplate: dynamicSecretCfg.usernameTemplate
usernameTemplate: dynamicSecretCfg.usernameTemplate,
identity,
metadata: { projectId },
config
});
} catch (error: unknown) {
if (error && typeof error === "object" && error !== null && "sqlMessage" in error) {
@@ -149,8 +180,10 @@ export const dynamicSecretLeaseServiceFactory = ({
expireAt,
version: 1,
dynamicSecretId: dynamicSecretCfg.id,
externalEntityId: entityId
externalEntityId: entityId,
config
});
await dynamicSecretQueueService.setLeaseRevocation(dynamicSecretLease.id, Number(expireAt) - Number(new Date()));
return { lease: dynamicSecretLease, dynamicSecret: dynamicSecretCfg, data };
};
@@ -231,13 +264,17 @@ export const dynamicSecretLeaseServiceFactory = ({
const expireAt = new Date(dynamicSecretLease.expireAt.getTime() + ms(selectedTTL));
if (maxTTL) {
const maxExpiryDate = new Date(dynamicSecretLease.createdAt.getTime() + ms(maxTTL));
if (expireAt > maxExpiryDate) throw new BadRequestError({ message: "TTL cannot be larger than max ttl" });
if (expireAt > maxExpiryDate)
throw new BadRequestError({
message: "The requested renewal would exceed the maximum allowed lease duration. Please choose a shorter TTL"
});
}
const { entityId } = await selectedProvider.renew(
decryptedStoredInput,
dynamicSecretLease.externalEntityId,
expireAt.getTime()
expireAt.getTime(),
{ projectId }
);
await dynamicSecretQueueService.unsetLeaseRevocation(dynamicSecretLease.id);
@@ -313,7 +350,12 @@ export const dynamicSecretLeaseServiceFactory = ({
) as object;
const revokeResponse = await selectedProvider
.revoke(decryptedStoredInput, dynamicSecretLease.externalEntityId)
.revoke(
decryptedStoredInput,
dynamicSecretLease.externalEntityId,
{ projectId },
dynamicSecretLease.config as TDynamicSecretLeaseConfig
)
.catch(async (err) => {
// only propogate this error if forced is false
if (!isForced) return { error: err as Error };

View File

@@ -10,6 +10,7 @@ export type TCreateDynamicSecretLeaseDTO = {
environmentSlug: string;
ttl?: string;
projectSlug: string;
config?: TDynamicSecretLeaseConfig;
} & Omit<TProjectPermission, "projectId">;
export type TDetailsDynamicSecretLeaseDTO = {
@@ -41,3 +42,9 @@ export type TRenewDynamicSecretLeaseDTO = {
ttl?: string;
projectSlug: string;
} & Omit<TProjectPermission, "projectId">;
export type TDynamicSecretKubernetesLeaseConfig = {
namespace?: string;
};
export type TDynamicSecretLeaseConfig = TDynamicSecretKubernetesLeaseConfig;

View File

@@ -116,7 +116,7 @@ export const dynamicSecretServiceFactory = ({
throw new BadRequestError({ message: "Provided dynamic secret already exist under the folder" });
const selectedProvider = dynamicSecretProviders[provider.type];
const inputs = await selectedProvider.validateProviderInputs(provider.inputs);
const inputs = await selectedProvider.validateProviderInputs(provider.inputs, { projectId });
let selectedGatewayId: string | null = null;
if (inputs && typeof inputs === "object" && "gatewayId" in inputs && inputs.gatewayId) {
@@ -146,7 +146,7 @@ export const dynamicSecretServiceFactory = ({
selectedGatewayId = gateway.id;
}
const isConnected = await selectedProvider.validateConnection(provider.inputs);
const isConnected = await selectedProvider.validateConnection(provider.inputs, { projectId });
if (!isConnected) throw new BadRequestError({ message: "Provider connection failed" });
const { encryptor: secretManagerEncryptor } = await kmsService.createCipherPairWithDataKey({
@@ -272,7 +272,7 @@ export const dynamicSecretServiceFactory = ({
secretManagerDecryptor({ cipherTextBlob: dynamicSecretCfg.encryptedInput }).toString()
) as object;
const newInput = { ...decryptedStoredInput, ...(inputs || {}) };
const updatedInput = await selectedProvider.validateProviderInputs(newInput);
const updatedInput = await selectedProvider.validateProviderInputs(newInput, { projectId });
let selectedGatewayId: string | null = null;
if (updatedInput && typeof updatedInput === "object" && "gatewayId" in updatedInput && updatedInput?.gatewayId) {
@@ -301,7 +301,7 @@ export const dynamicSecretServiceFactory = ({
selectedGatewayId = gateway.id;
}
const isConnected = await selectedProvider.validateConnection(newInput);
const isConnected = await selectedProvider.validateConnection(newInput, { projectId });
if (!isConnected) throw new BadRequestError({ message: "Provider connection failed" });
const updatedDynamicCfg = await dynamicSecretDAL.transaction(async (tx) => {
@@ -472,7 +472,9 @@ export const dynamicSecretServiceFactory = ({
secretManagerDecryptor({ cipherTextBlob: dynamicSecretCfg.encryptedInput }).toString()
) as object;
const selectedProvider = dynamicSecretProviders[dynamicSecretCfg.type as DynamicSecretProviders];
const providerInputs = (await selectedProvider.validateProviderInputs(decryptedStoredInput)) as object;
const providerInputs = (await selectedProvider.validateProviderInputs(decryptedStoredInput, {
projectId
})) as object;
return { ...dynamicSecretCfg, inputs: providerInputs };
};

View File

@@ -16,6 +16,7 @@ import { BadRequestError } from "@app/lib/errors";
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
import { DynamicSecretAwsElastiCacheSchema, TDynamicProviderFns } from "./models";
import { compileUsernameTemplate } from "./templateUtils";
const CreateElastiCacheUserSchema = z.object({
UserId: z.string().trim().min(1),
@@ -132,14 +133,14 @@ const generatePassword = () => {
return customAlphabet(charset, 64)();
};
const generateUsername = (usernameTemplate?: string | null) => {
const generateUsername = (usernameTemplate?: string | null, identity?: { name: string }) => {
const charset = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-";
const randomUsername = `inf-${customAlphabet(charset, 32)()}`;
if (!usernameTemplate) return randomUsername;
return handlebars.compile(usernameTemplate)({
return compileUsernameTemplate({
usernameTemplate,
randomUsername,
unixTimestamp: Math.floor(Date.now() / 100)
identity
});
};
@@ -174,14 +175,21 @@ export const AwsElastiCacheDatabaseProvider = (): TDynamicProviderFns => {
return true;
};
const create = async (data: { inputs: unknown; expireAt: number; usernameTemplate?: string | null }) => {
const { inputs, expireAt, usernameTemplate } = data;
const create = async (data: {
inputs: unknown;
expireAt: number;
usernameTemplate?: string | null;
identity?: {
name: string;
};
}) => {
const { inputs, expireAt, usernameTemplate, identity } = data;
const providerInputs = await validateProviderInputs(inputs);
if (!(await validateConnection(providerInputs))) {
throw new BadRequestError({ message: "Failed to establish connection" });
}
const leaseUsername = generateUsername(usernameTemplate);
const leaseUsername = generateUsername(usernameTemplate, identity);
const leasePassword = generatePassword();
const leaseExpiration = new Date(expireAt).toISOString();

View File

@@ -16,21 +16,25 @@ import {
PutUserPolicyCommand,
RemoveUserFromGroupCommand
} from "@aws-sdk/client-iam";
import handlebars from "handlebars";
import { AssumeRoleCommand, STSClient } from "@aws-sdk/client-sts";
import { randomUUID } from "crypto";
import { z } from "zod";
import { getConfig } from "@app/lib/config/env";
import { BadRequestError } from "@app/lib/errors";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { DynamicSecretAwsIamSchema, TDynamicProviderFns } from "./models";
import { AwsIamAuthType, DynamicSecretAwsIamSchema, TDynamicProviderFns } from "./models";
import { compileUsernameTemplate } from "./templateUtils";
const generateUsername = (usernameTemplate?: string | null) => {
const generateUsername = (usernameTemplate?: string | null, identity?: { name: string }) => {
const randomUsername = alphaNumericNanoId(32);
if (!usernameTemplate) return randomUsername;
return handlebars.compile(usernameTemplate)({
return compileUsernameTemplate({
usernameTemplate,
randomUsername,
unixTimestamp: Math.floor(Date.now() / 100)
identity
});
};
@@ -40,7 +44,43 @@ export const AwsIamProvider = (): TDynamicProviderFns => {
return providerInputs;
};
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretAwsIamSchema>) => {
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretAwsIamSchema>, projectId: string) => {
const appCfg = getConfig();
if (providerInputs.method === AwsIamAuthType.AssumeRole) {
const stsClient = new STSClient({
region: providerInputs.region,
credentials:
appCfg.DYNAMIC_SECRET_AWS_ACCESS_KEY_ID && appCfg.DYNAMIC_SECRET_AWS_SECRET_ACCESS_KEY
? {
accessKeyId: appCfg.DYNAMIC_SECRET_AWS_ACCESS_KEY_ID,
secretAccessKey: appCfg.DYNAMIC_SECRET_AWS_SECRET_ACCESS_KEY
}
: undefined // if hosting on AWS
});
const command = new AssumeRoleCommand({
RoleArn: providerInputs.roleArn,
RoleSessionName: `infisical-dynamic-secret-${randomUUID()}`,
DurationSeconds: 900, // 15 mins
ExternalId: projectId
});
const assumeRes = await stsClient.send(command);
if (!assumeRes.Credentials?.AccessKeyId || !assumeRes.Credentials?.SecretAccessKey) {
throw new BadRequestError({ message: "Failed to assume role - verify credentials and role configuration" });
}
const client = new IAMClient({
region: providerInputs.region,
credentials: {
accessKeyId: assumeRes.Credentials?.AccessKeyId,
secretAccessKey: assumeRes.Credentials?.SecretAccessKey,
sessionToken: assumeRes.Credentials?.SessionToken
}
});
return client;
}
const client = new IAMClient({
region: providerInputs.region,
credentials: {
@@ -52,21 +92,41 @@ export const AwsIamProvider = (): TDynamicProviderFns => {
return client;
};
const validateConnection = async (inputs: unknown) => {
const validateConnection = async (inputs: unknown, { projectId }: { projectId: string }) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await $getClient(providerInputs);
const isConnected = await client.send(new GetUserCommand({})).then(() => true);
const client = await $getClient(providerInputs, projectId);
const isConnected = await client
.send(new GetUserCommand({}))
.then(() => true)
.catch((err) => {
const message = (err as Error)?.message;
if (
providerInputs.method === AwsIamAuthType.AssumeRole &&
// assume role will throw an error asking to provider username, but if so this has access in aws correctly
message.includes("Must specify userName when calling with non-User credentials")
) {
return true;
}
throw err;
});
return isConnected;
};
const create = async (data: { inputs: unknown; expireAt: number; usernameTemplate?: string | null }) => {
const { inputs, usernameTemplate } = data;
const create = async (data: {
inputs: unknown;
expireAt: number;
usernameTemplate?: string | null;
identity?: {
name: string;
};
metadata: { projectId: string };
}) => {
const { inputs, usernameTemplate, metadata, identity } = data;
const providerInputs = await validateProviderInputs(inputs);
const client = await $getClient(providerInputs);
const client = await $getClient(providerInputs, metadata.projectId);
const username = generateUsername(usernameTemplate);
const username = generateUsername(usernameTemplate, identity);
const { policyArns, userGroups, policyDocument, awsPath, permissionBoundaryPolicyArn } = providerInputs;
const createUserRes = await client.send(
new CreateUserCommand({
@@ -76,6 +136,7 @@ export const AwsIamProvider = (): TDynamicProviderFns => {
UserName: username
})
);
if (!createUserRes.User) throw new BadRequestError({ message: "Failed to create AWS IAM User" });
if (userGroups) {
await Promise.all(
@@ -125,9 +186,9 @@ export const AwsIamProvider = (): TDynamicProviderFns => {
};
};
const revoke = async (inputs: unknown, entityId: string) => {
const revoke = async (inputs: unknown, entityId: string, metadata: { projectId: string }) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await $getClient(providerInputs);
const client = await $getClient(providerInputs, metadata.projectId);
const username = entityId;

View File

@@ -8,19 +8,20 @@ import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars
import { verifyHostInputValidity } from "../dynamic-secret-fns";
import { DynamicSecretCassandraSchema, TDynamicProviderFns } from "./models";
import { compileUsernameTemplate } from "./templateUtils";
const generatePassword = (size = 48) => {
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
return customAlphabet(charset, 48)(size);
};
const generateUsername = (usernameTemplate?: string | null) => {
const generateUsername = (usernameTemplate?: string | null, identity?: { name: string }) => {
const randomUsername = alphaNumericNanoId(32); // Username must start with an ascii letter, so we prepend the username with "inf-"
if (!usernameTemplate) return randomUsername;
return handlebars.compile(usernameTemplate)({
return compileUsernameTemplate({
usernameTemplate,
randomUsername,
unixTimestamp: Math.floor(Date.now() / 100)
identity
});
};
@@ -75,12 +76,17 @@ export const CassandraProvider = (): TDynamicProviderFns => {
return isConnected;
};
const create = async (data: { inputs: unknown; expireAt: number; usernameTemplate?: string | null }) => {
const { inputs, expireAt, usernameTemplate } = data;
const create = async (data: {
inputs: unknown;
expireAt: number;
usernameTemplate?: string | null;
identity?: { name: string };
}) => {
const { inputs, expireAt, usernameTemplate, identity } = data;
const providerInputs = await validateProviderInputs(inputs);
const client = await $getClient(providerInputs);
const username = generateUsername(usernameTemplate);
const username = generateUsername(usernameTemplate, identity);
const password = generatePassword();
const { keyspace } = providerInputs;
const expiration = new Date(expireAt).toISOString();

View File

@@ -1,5 +1,4 @@
import { Client as ElasticSearchClient } from "@elastic/elasticsearch";
import handlebars from "handlebars";
import { customAlphabet } from "nanoid";
import { z } from "zod";
@@ -7,19 +6,20 @@ import { alphaNumericNanoId } from "@app/lib/nanoid";
import { verifyHostInputValidity } from "../dynamic-secret-fns";
import { DynamicSecretElasticSearchSchema, ElasticSearchAuthTypes, TDynamicProviderFns } from "./models";
import { compileUsernameTemplate } from "./templateUtils";
const generatePassword = () => {
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
return customAlphabet(charset, 64)();
};
const generateUsername = (usernameTemplate?: string | null) => {
const generateUsername = (usernameTemplate?: string | null, identity?: { name: string }) => {
const randomUsername = alphaNumericNanoId(32); // Username must start with an ascii letter, so we prepend the username with "inf-"
if (!usernameTemplate) return randomUsername;
return handlebars.compile(usernameTemplate)({
return compileUsernameTemplate({
usernameTemplate,
randomUsername,
unixTimestamp: Math.floor(Date.now() / 100)
identity
});
};
@@ -71,12 +71,12 @@ export const ElasticSearchProvider = (): TDynamicProviderFns => {
return infoResponse;
};
const create = async (data: { inputs: unknown; usernameTemplate?: string | null }) => {
const { inputs, usernameTemplate } = data;
const create = async (data: { inputs: unknown; usernameTemplate?: string | null; identity?: { name: string } }) => {
const { inputs, usernameTemplate, identity } = data;
const providerInputs = await validateProviderInputs(inputs);
const connection = await $getClient(providerInputs);
const username = generateUsername(usernameTemplate);
const username = generateUsername(usernameTemplate, identity);
const password = generatePassword();
await connection.security.putUser({

View File

@@ -0,0 +1,105 @@
import { gaxios, Impersonated, JWT } from "google-auth-library";
import { GetAccessTokenResponse } from "google-auth-library/build/src/auth/oauth2client";
import { getConfig } from "@app/lib/config/env";
import { BadRequestError, InternalServerError } from "@app/lib/errors";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { DynamicSecretGcpIamSchema, TDynamicProviderFns } from "./models";
export const GcpIamProvider = (): TDynamicProviderFns => {
const validateProviderInputs = async (inputs: unknown) => {
const providerInputs = await DynamicSecretGcpIamSchema.parseAsync(inputs);
return providerInputs;
};
const $getToken = async (serviceAccountEmail: string, ttl: number): Promise<string> => {
const appCfg = getConfig();
if (!appCfg.INF_APP_CONNECTION_GCP_SERVICE_ACCOUNT_CREDENTIAL) {
throw new InternalServerError({
message: "Environment variable has not been configured: INF_APP_CONNECTION_GCP_SERVICE_ACCOUNT_CREDENTIAL"
});
}
const credJson = JSON.parse(appCfg.INF_APP_CONNECTION_GCP_SERVICE_ACCOUNT_CREDENTIAL) as {
client_email: string;
private_key: string;
};
const sourceClient = new JWT({
email: credJson.client_email,
key: credJson.private_key,
scopes: ["https://www.googleapis.com/auth/cloud-platform"]
});
const impersonatedCredentials = new Impersonated({
sourceClient,
targetPrincipal: serviceAccountEmail,
lifetime: ttl,
delegates: [],
targetScopes: ["https://www.googleapis.com/auth/iam", "https://www.googleapis.com/auth/cloud-platform"]
});
let tokenResponse: GetAccessTokenResponse | undefined;
try {
tokenResponse = await impersonatedCredentials.getAccessToken();
} catch (error) {
let message = "Unable to validate connection";
if (error instanceof gaxios.GaxiosError) {
message = error.message;
}
throw new BadRequestError({
message
});
}
if (!tokenResponse || !tokenResponse.token) {
throw new BadRequestError({
message: "Unable to validate connection"
});
}
return tokenResponse.token;
};
const validateConnection = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
await $getToken(providerInputs.serviceAccountEmail, 10);
return true;
};
const create = async (data: { inputs: unknown; expireAt: number }) => {
const { inputs, expireAt } = data;
const providerInputs = await validateProviderInputs(inputs);
const now = Math.floor(Date.now() / 1000);
const ttl = Math.max(Math.floor(expireAt / 1000) - now, 0);
const token = await $getToken(providerInputs.serviceAccountEmail, ttl);
const entityId = alphaNumericNanoId(32);
return { entityId, data: { SERVICE_ACCOUNT_EMAIL: providerInputs.serviceAccountEmail, TOKEN: token } };
};
const revoke = async (_inputs: unknown, entityId: string) => {
// There's no way to revoke GCP IAM access tokens
return { entityId };
};
const renew = async (inputs: unknown, entityId: string, expireAt: number) => {
// To renew a token it must be re-created
const data = await create({ inputs, expireAt });
return { ...data, entityId };
};
return {
validateProviderInputs,
validateConnection,
create,
revoke,
renew
};
};

View File

@@ -6,6 +6,7 @@ import { AwsIamProvider } from "./aws-iam";
import { AzureEntraIDProvider } from "./azure-entra-id";
import { CassandraProvider } from "./cassandra";
import { ElasticSearchProvider } from "./elastic-search";
import { GcpIamProvider } from "./gcp-iam";
import { KubernetesProvider } from "./kubernetes";
import { LdapProvider } from "./ldap";
import { DynamicSecretProviders, TDynamicProviderFns } from "./models";
@@ -42,5 +43,6 @@ export const buildDynamicSecretProviders = ({
[DynamicSecretProviders.Totp]: TotpProvider(),
[DynamicSecretProviders.SapAse]: SapAseProvider(),
[DynamicSecretProviders.Kubernetes]: KubernetesProvider({ gatewayService }),
[DynamicSecretProviders.Vertica]: VerticaProvider({ gatewayService })
[DynamicSecretProviders.Vertica]: VerticaProvider({ gatewayService }),
[DynamicSecretProviders.GcpIam]: GcpIamProvider()
});

View File

@@ -1,24 +1,46 @@
import axios from "axios";
import axios, { AxiosError } from "axios";
import handlebars from "handlebars";
import https from "https";
import { InternalServerError } from "@app/lib/errors";
import { withGatewayProxy } from "@app/lib/gateway";
import { BadRequestError, InternalServerError } from "@app/lib/errors";
import { GatewayHttpProxyActions, GatewayProxyProtocol, withGatewayProxy } from "@app/lib/gateway";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { blockLocalAndPrivateIpAddresses } from "@app/lib/validator";
import { TKubernetesTokenRequest } from "@app/services/identity-kubernetes-auth/identity-kubernetes-auth-types";
import { TDynamicSecretKubernetesLeaseConfig } from "../../dynamic-secret-lease/dynamic-secret-lease-types";
import { TGatewayServiceFactory } from "../../gateway/gateway-service";
import { DynamicSecretKubernetesSchema, TDynamicProviderFns } from "./models";
import {
DynamicSecretKubernetesSchema,
KubernetesAuthMethod,
KubernetesCredentialType,
KubernetesRoleType,
TDynamicProviderFns
} from "./models";
const EXTERNAL_REQUEST_TIMEOUT = 10 * 1000;
// This value is just a placeholder. When using gateway auth method, the url is irrelevant.
const GATEWAY_AUTH_DEFAULT_URL = "https://kubernetes.default.svc.cluster.local";
type TKubernetesProviderDTO = {
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">;
};
const generateUsername = (usernameTemplate?: string | null) => {
const randomUsername = `dynamic-secret-sa-${alphaNumericNanoId(10).toLowerCase()}`;
if (!usernameTemplate) return randomUsername;
return handlebars.compile(usernameTemplate)({
randomUsername,
unixTimestamp: Math.floor(Date.now() / 100)
});
};
export const KubernetesProvider = ({ gatewayService }: TKubernetesProviderDTO): TDynamicProviderFns => {
const validateProviderInputs = async (inputs: unknown) => {
const providerInputs = await DynamicSecretKubernetesSchema.parseAsync(inputs);
if (!providerInputs.gatewayId) {
if (!providerInputs.gatewayId && providerInputs.url) {
await blockLocalAndPrivateIpAddresses(providerInputs.url);
}
@@ -30,19 +52,27 @@ export const KubernetesProvider = ({ gatewayService }: TKubernetesProviderDTO):
gatewayId: string;
targetHost: string;
targetPort: number;
caCert?: string;
reviewTokenThroughGateway: boolean;
enableSsl: boolean;
},
gatewayCallback: (host: string, port: number) => Promise<T>
gatewayCallback: (host: string, port: number, httpsAgent?: https.Agent) => Promise<T>
): Promise<T> => {
const relayDetails = await gatewayService.fnGetGatewayClientTlsByGatewayId(inputs.gatewayId);
const [relayHost, relayPort] = relayDetails.relayAddress.split(":");
const callbackResult = await withGatewayProxy(
async (port) => {
async (port, httpsAgent) => {
// Needs to be https protocol or the kubernetes API server will fail with "Client sent an HTTP request to an HTTPS server"
const res = await gatewayCallback("https://localhost", port);
const res = await gatewayCallback(
inputs.reviewTokenThroughGateway ? "http://localhost" : "https://localhost",
port,
httpsAgent
);
return res;
},
{
protocol: inputs.reviewTokenThroughGateway ? GatewayProxyProtocol.Http : GatewayProxyProtocol.Tcp,
targetHost: inputs.targetHost,
targetPort: inputs.targetPort,
relayHost,
@@ -53,7 +83,12 @@ export const KubernetesProvider = ({ gatewayService }: TKubernetesProviderDTO):
ca: relayDetails.certChain,
cert: relayDetails.certificate,
key: relayDetails.privateKey.toString()
}
},
// we always pass this, because its needed for both tcp and http protocol
httpsAgent: new https.Agent({
ca: inputs.caCert,
rejectUnauthorized: inputs.enableSsl
})
}
);
@@ -63,7 +98,189 @@ export const KubernetesProvider = ({ gatewayService }: TKubernetesProviderDTO):
const validateConnection = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const serviceAccountGetCallback = async (host: string, port: number) => {
const serviceAccountDynamicCallback = async (host: string, port: number, httpsAgent?: https.Agent) => {
if (providerInputs.credentialType !== KubernetesCredentialType.Dynamic) {
throw new Error("invalid callback");
}
const baseUrl = port ? `${host}:${port}` : host;
const serviceAccountName = generateUsername();
const roleBindingName = `${serviceAccountName}-role-binding`;
const namespaces = providerInputs.namespace.split(",").map((namespace) => namespace.trim());
// Test each namespace sequentially instead of in parallel to simplify cleanup
for await (const namespace of namespaces) {
try {
// 1. Create a test service account
await axios.post(
`${baseUrl}/api/v1/namespaces/${namespace}/serviceaccounts`,
{
metadata: {
name: serviceAccountName,
namespace
}
},
{
headers: {
"Content-Type": "application/json",
...(providerInputs.authMethod === KubernetesAuthMethod.Gateway
? { "x-infisical-action": GatewayHttpProxyActions.UseGatewayK8sServiceAccount }
: { Authorization: `Bearer ${providerInputs.clusterToken}` })
},
...(providerInputs.authMethod === KubernetesAuthMethod.Api
? {
httpsAgent
}
: {}),
signal: AbortSignal.timeout(EXTERNAL_REQUEST_TIMEOUT),
timeout: EXTERNAL_REQUEST_TIMEOUT
}
);
// 2. Create a test role binding
const roleBindingUrl =
providerInputs.roleType === KubernetesRoleType.ClusterRole
? `${baseUrl}/apis/rbac.authorization.k8s.io/v1/clusterrolebindings`
: `${baseUrl}/apis/rbac.authorization.k8s.io/v1/namespaces/${namespace}/rolebindings`;
const roleBindingMetadata = {
name: roleBindingName,
...(providerInputs.roleType !== KubernetesRoleType.ClusterRole && { namespace })
};
await axios.post(
roleBindingUrl,
{
metadata: roleBindingMetadata,
roleRef: {
kind: providerInputs.roleType === KubernetesRoleType.ClusterRole ? "ClusterRole" : "Role",
name: providerInputs.role,
apiGroup: "rbac.authorization.k8s.io"
},
subjects: [
{
kind: "ServiceAccount",
name: serviceAccountName,
namespace
}
]
},
{
headers: {
"Content-Type": "application/json",
...(providerInputs.authMethod === KubernetesAuthMethod.Gateway
? { "x-infisical-action": GatewayHttpProxyActions.UseGatewayK8sServiceAccount }
: { Authorization: `Bearer ${providerInputs.clusterToken}` })
},
...(providerInputs.authMethod === KubernetesAuthMethod.Api
? {
httpsAgent
}
: {}),
signal: AbortSignal.timeout(EXTERNAL_REQUEST_TIMEOUT),
timeout: EXTERNAL_REQUEST_TIMEOUT
}
);
// 3. Request a token for the test service account
await axios.post(
`${baseUrl}/api/v1/namespaces/${namespace}/serviceaccounts/${serviceAccountName}/token`,
{
spec: {
expirationSeconds: 600, // 10 minutes
...(providerInputs.audiences?.length ? { audiences: providerInputs.audiences } : {})
}
},
{
headers: {
"Content-Type": "application/json",
...(providerInputs.authMethod === KubernetesAuthMethod.Gateway
? { "x-infisical-action": GatewayHttpProxyActions.UseGatewayK8sServiceAccount }
: { Authorization: `Bearer ${providerInputs.clusterToken}` })
},
...(providerInputs.authMethod === KubernetesAuthMethod.Api
? {
httpsAgent
}
: {}),
signal: AbortSignal.timeout(EXTERNAL_REQUEST_TIMEOUT),
timeout: EXTERNAL_REQUEST_TIMEOUT
}
);
// 4. Cleanup: delete role binding and service account
if (providerInputs.roleType === KubernetesRoleType.Role) {
await axios.delete(
`${baseUrl}/apis/rbac.authorization.k8s.io/v1/namespaces/${namespace}/rolebindings/${roleBindingName}`,
{
headers: {
"Content-Type": "application/json",
...(providerInputs.authMethod === KubernetesAuthMethod.Gateway
? { "x-infisical-action": GatewayHttpProxyActions.UseGatewayK8sServiceAccount }
: { Authorization: `Bearer ${providerInputs.clusterToken}` })
},
...(providerInputs.authMethod === KubernetesAuthMethod.Api
? {
httpsAgent
}
: {}),
signal: AbortSignal.timeout(EXTERNAL_REQUEST_TIMEOUT),
timeout: EXTERNAL_REQUEST_TIMEOUT
}
);
} else {
await axios.delete(`${baseUrl}/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/${roleBindingName}`, {
headers: {
"Content-Type": "application/json",
...(providerInputs.authMethod === KubernetesAuthMethod.Gateway
? { "x-infisical-action": GatewayHttpProxyActions.UseGatewayK8sServiceAccount }
: { Authorization: `Bearer ${providerInputs.clusterToken}` })
},
...(providerInputs.authMethod === KubernetesAuthMethod.Api
? {
httpsAgent
}
: {}),
signal: AbortSignal.timeout(EXTERNAL_REQUEST_TIMEOUT),
timeout: EXTERNAL_REQUEST_TIMEOUT
});
}
await axios.delete(`${baseUrl}/api/v1/namespaces/${namespace}/serviceaccounts/${serviceAccountName}`, {
headers: {
"Content-Type": "application/json",
...(providerInputs.authMethod === KubernetesAuthMethod.Gateway
? { "x-infisical-action": GatewayHttpProxyActions.UseGatewayK8sServiceAccount }
: { Authorization: `Bearer ${providerInputs.clusterToken}` })
},
...(providerInputs.authMethod === KubernetesAuthMethod.Api
? {
httpsAgent
}
: {}),
signal: AbortSignal.timeout(EXTERNAL_REQUEST_TIMEOUT),
timeout: EXTERNAL_REQUEST_TIMEOUT
});
} catch (error) {
const cleanupInfo = `You may need to manually clean up the following resources in namespace "${namespace}": Service Account - ${serviceAccountName}, ${providerInputs.roleType === KubernetesRoleType.Role ? "Role" : "Cluster Role"} Binding - ${roleBindingName}.`;
let mainErrorMessage = "Unknown error";
if (error instanceof AxiosError) {
mainErrorMessage = (error.response?.data as { message: string })?.message;
} else if (error instanceof Error) {
mainErrorMessage = error.message;
}
throw new Error(`${mainErrorMessage}. ${cleanupInfo}`);
}
}
};
const serviceAccountStaticCallback = async (host: string, port: number, httpsAgent?: https.Agent) => {
if (providerInputs.credentialType !== KubernetesCredentialType.Static) {
throw new Error("invalid callback");
}
const baseUrl = port ? `${host}:${port}` : host;
await axios.get(
@@ -71,36 +288,63 @@ export const KubernetesProvider = ({ gatewayService }: TKubernetesProviderDTO):
{
headers: {
"Content-Type": "application/json",
Authorization: `Bearer ${providerInputs.clusterToken}`
...(providerInputs.authMethod === KubernetesAuthMethod.Gateway
? { "x-infisical-action": GatewayHttpProxyActions.UseGatewayK8sServiceAccount }
: { Authorization: `Bearer ${providerInputs.clusterToken}` })
},
...(providerInputs.authMethod === KubernetesAuthMethod.Api
? {
httpsAgent
}
: {}),
signal: AbortSignal.timeout(EXTERNAL_REQUEST_TIMEOUT),
timeout: EXTERNAL_REQUEST_TIMEOUT,
httpsAgent: new https.Agent({
ca: providerInputs.ca,
rejectUnauthorized: providerInputs.sslEnabled
})
timeout: EXTERNAL_REQUEST_TIMEOUT
}
);
};
const url = new URL(providerInputs.url);
const rawUrl =
providerInputs.authMethod === KubernetesAuthMethod.Gateway ? GATEWAY_AUTH_DEFAULT_URL : providerInputs.url || "";
const url = new URL(rawUrl);
const k8sGatewayHost = url.hostname;
const k8sPort = url.port ? Number(url.port) : 443;
const k8sHost = `${url.protocol}//${url.hostname}`;
try {
if (providerInputs.gatewayId) {
const k8sHost = url.hostname;
await $gatewayProxyWrapper(
{
gatewayId: providerInputs.gatewayId,
targetHost: k8sHost,
targetPort: k8sPort
},
serviceAccountGetCallback
);
if (providerInputs.authMethod === KubernetesAuthMethod.Gateway) {
await $gatewayProxyWrapper(
{
gatewayId: providerInputs.gatewayId,
targetHost: k8sHost,
targetPort: k8sPort,
enableSsl: providerInputs.sslEnabled,
caCert: providerInputs.ca,
reviewTokenThroughGateway: true
},
providerInputs.credentialType === KubernetesCredentialType.Static
? serviceAccountStaticCallback
: serviceAccountDynamicCallback
);
} else {
await $gatewayProxyWrapper(
{
gatewayId: providerInputs.gatewayId,
targetHost: k8sGatewayHost,
targetPort: k8sPort,
enableSsl: providerInputs.sslEnabled,
caCert: providerInputs.ca,
reviewTokenThroughGateway: false
},
providerInputs.credentialType === KubernetesCredentialType.Static
? serviceAccountStaticCallback
: serviceAccountDynamicCallback
);
}
} else if (providerInputs.credentialType === KubernetesCredentialType.Static) {
await serviceAccountStaticCallback(k8sHost, k8sPort);
} else {
const k8sHost = `${url.protocol}//${url.hostname}`;
await serviceAccountGetCallback(k8sHost, k8sPort);
await serviceAccountDynamicCallback(k8sHost, k8sPort);
}
return true;
@@ -116,10 +360,153 @@ export const KubernetesProvider = ({ gatewayService }: TKubernetesProviderDTO):
}
};
const create = async ({ inputs, expireAt }: { inputs: unknown; expireAt: number }) => {
const create = async ({
inputs,
expireAt,
usernameTemplate,
config
}: {
inputs: unknown;
expireAt: number;
usernameTemplate?: string | null;
config?: TDynamicSecretKubernetesLeaseConfig;
}) => {
const providerInputs = await validateProviderInputs(inputs);
const tokenRequestCallback = async (host: string, port: number) => {
const serviceAccountDynamicCallback = async (host: string, port: number, httpsAgent?: https.Agent) => {
if (providerInputs.credentialType !== KubernetesCredentialType.Dynamic) {
throw new Error("invalid callback");
}
const baseUrl = port ? `${host}:${port}` : host;
const serviceAccountName = generateUsername(usernameTemplate);
const roleBindingName = `${serviceAccountName}-role-binding`;
const allowedNamespaces = providerInputs.namespace.split(",").map((namespace) => namespace.trim());
if (config?.namespace && !allowedNamespaces?.includes(config?.namespace)) {
throw new BadRequestError({
message: `Namespace ${config?.namespace} is not allowed. Allowed namespaces: ${allowedNamespaces?.join(", ")}`
});
}
const namespace = config?.namespace || allowedNamespaces[0];
if (!namespace) {
throw new BadRequestError({
message: "No namespace provided"
});
}
// 1. Create the service account
await axios.post(
`${baseUrl}/api/v1/namespaces/${namespace}/serviceaccounts`,
{
metadata: {
name: serviceAccountName,
namespace
}
},
{
headers: {
"Content-Type": "application/json",
...(providerInputs.authMethod === KubernetesAuthMethod.Gateway
? { "x-infisical-action": GatewayHttpProxyActions.UseGatewayK8sServiceAccount }
: { Authorization: `Bearer ${providerInputs.clusterToken}` })
},
...(providerInputs.authMethod === KubernetesAuthMethod.Api
? {
httpsAgent
}
: {}),
signal: AbortSignal.timeout(EXTERNAL_REQUEST_TIMEOUT),
timeout: EXTERNAL_REQUEST_TIMEOUT
}
);
// 2. Create the role binding
const roleBindingUrl =
providerInputs.roleType === KubernetesRoleType.ClusterRole
? `${baseUrl}/apis/rbac.authorization.k8s.io/v1/clusterrolebindings`
: `${baseUrl}/apis/rbac.authorization.k8s.io/v1/namespaces/${namespace}/rolebindings`;
const roleBindingMetadata = {
name: roleBindingName,
...(providerInputs.roleType !== KubernetesRoleType.ClusterRole && { namespace })
};
await axios.post(
roleBindingUrl,
{
metadata: roleBindingMetadata,
roleRef: {
kind: providerInputs.roleType === KubernetesRoleType.ClusterRole ? "ClusterRole" : "Role",
name: providerInputs.role,
apiGroup: "rbac.authorization.k8s.io"
},
subjects: [
{
kind: "ServiceAccount",
name: serviceAccountName,
namespace
}
]
},
{
headers: {
"Content-Type": "application/json",
...(providerInputs.authMethod === KubernetesAuthMethod.Gateway
? { "x-infisical-action": GatewayHttpProxyActions.UseGatewayK8sServiceAccount }
: { Authorization: `Bearer ${providerInputs.clusterToken}` })
},
...(providerInputs.authMethod === KubernetesAuthMethod.Api
? {
httpsAgent
}
: {}),
signal: AbortSignal.timeout(EXTERNAL_REQUEST_TIMEOUT),
timeout: EXTERNAL_REQUEST_TIMEOUT
}
);
// 3. Request a token for the service account
const res = await axios.post<TKubernetesTokenRequest>(
`${baseUrl}/api/v1/namespaces/${namespace}/serviceaccounts/${serviceAccountName}/token`,
{
spec: {
expirationSeconds: Math.floor((expireAt - Date.now()) / 1000),
...(providerInputs.audiences?.length ? { audiences: providerInputs.audiences } : {})
}
},
{
headers: {
"Content-Type": "application/json",
...(providerInputs.authMethod === KubernetesAuthMethod.Gateway
? { "x-infisical-action": GatewayHttpProxyActions.UseGatewayK8sServiceAccount }
: { Authorization: `Bearer ${providerInputs.clusterToken}` })
},
...(providerInputs.authMethod === KubernetesAuthMethod.Api
? {
httpsAgent
}
: {}),
signal: AbortSignal.timeout(EXTERNAL_REQUEST_TIMEOUT),
timeout: EXTERNAL_REQUEST_TIMEOUT
}
);
return { ...res.data, serviceAccountName };
};
const tokenRequestStaticCallback = async (host: string, port: number, httpsAgent?: https.Agent) => {
if (providerInputs.credentialType !== KubernetesCredentialType.Static) {
throw new Error("invalid callback");
}
if (config?.namespace && config.namespace !== providerInputs.namespace) {
throw new BadRequestError({
message: `Namespace ${config?.namespace} is not allowed. Allowed namespace: ${providerInputs.namespace}.`
});
}
const baseUrl = port ? `${host}:${port}` : host;
const res = await axios.post<TKubernetesTokenRequest>(
@@ -133,39 +520,71 @@ export const KubernetesProvider = ({ gatewayService }: TKubernetesProviderDTO):
{
headers: {
"Content-Type": "application/json",
Authorization: `Bearer ${providerInputs.clusterToken}`
...(providerInputs.authMethod === KubernetesAuthMethod.Gateway
? { "x-infisical-action": GatewayHttpProxyActions.UseGatewayK8sServiceAccount }
: { Authorization: `Bearer ${providerInputs.clusterToken}` })
},
...(providerInputs.authMethod === KubernetesAuthMethod.Api
? {
httpsAgent
}
: {}),
signal: AbortSignal.timeout(EXTERNAL_REQUEST_TIMEOUT),
timeout: EXTERNAL_REQUEST_TIMEOUT,
httpsAgent: new https.Agent({
ca: providerInputs.ca,
rejectUnauthorized: providerInputs.sslEnabled
})
timeout: EXTERNAL_REQUEST_TIMEOUT
}
);
return res.data;
return { ...res.data, serviceAccountName: providerInputs.serviceAccountName };
};
const url = new URL(providerInputs.url);
const rawUrl =
providerInputs.authMethod === KubernetesAuthMethod.Gateway ? GATEWAY_AUTH_DEFAULT_URL : providerInputs.url || "";
const url = new URL(rawUrl);
const k8sHost = `${url.protocol}//${url.hostname}`;
const k8sGatewayHost = url.hostname;
const k8sPort = url.port ? Number(url.port) : 443;
try {
const tokenData = providerInputs.gatewayId
? await $gatewayProxyWrapper(
let tokenData;
if (providerInputs.gatewayId) {
if (providerInputs.authMethod === KubernetesAuthMethod.Gateway) {
tokenData = await $gatewayProxyWrapper(
{
gatewayId: providerInputs.gatewayId,
targetHost: k8sHost,
targetPort: k8sPort,
enableSsl: providerInputs.sslEnabled,
caCert: providerInputs.ca,
reviewTokenThroughGateway: true
},
providerInputs.credentialType === KubernetesCredentialType.Static
? tokenRequestStaticCallback
: serviceAccountDynamicCallback
);
} else {
tokenData = await $gatewayProxyWrapper(
{
gatewayId: providerInputs.gatewayId,
targetHost: k8sGatewayHost,
targetPort: k8sPort
targetPort: k8sPort,
enableSsl: providerInputs.sslEnabled,
caCert: providerInputs.ca,
reviewTokenThroughGateway: false
},
tokenRequestCallback
)
: await tokenRequestCallback(k8sHost, k8sPort);
providerInputs.credentialType === KubernetesCredentialType.Static
? tokenRequestStaticCallback
: serviceAccountDynamicCallback
);
}
} else {
tokenData =
providerInputs.credentialType === KubernetesCredentialType.Static
? await tokenRequestStaticCallback(k8sHost, k8sPort)
: await serviceAccountDynamicCallback(k8sHost, k8sPort);
}
return {
entityId: providerInputs.serviceAccountName,
entityId: tokenData.serviceAccountName,
data: { TOKEN: tokenData.status.token }
};
} catch (error) {
@@ -180,7 +599,122 @@ export const KubernetesProvider = ({ gatewayService }: TKubernetesProviderDTO):
}
};
const revoke = async (_inputs: unknown, entityId: string) => {
const revoke = async (
inputs: unknown,
entityId: string,
// eslint-disable-next-line @typescript-eslint/no-unused-vars
_metadata: { projectId: string },
config?: TDynamicSecretKubernetesLeaseConfig
) => {
const providerInputs = await validateProviderInputs(inputs);
const serviceAccountDynamicCallback = async (host: string, port: number, httpsAgent?: https.Agent) => {
if (providerInputs.credentialType !== KubernetesCredentialType.Dynamic) {
throw new Error("invalid callback");
}
const baseUrl = port ? `${host}:${port}` : host;
const roleBindingName = `${entityId}-role-binding`;
const namespace = config?.namespace ?? providerInputs.namespace.split(",")[0].trim();
if (providerInputs.roleType === KubernetesRoleType.Role) {
await axios.delete(
`${baseUrl}/apis/rbac.authorization.k8s.io/v1/namespaces/${namespace}/rolebindings/${roleBindingName}`,
{
headers: {
"Content-Type": "application/json",
...(providerInputs.authMethod === KubernetesAuthMethod.Gateway
? { "x-infisical-action": GatewayHttpProxyActions.UseGatewayK8sServiceAccount }
: { Authorization: `Bearer ${providerInputs.clusterToken}` })
},
...(providerInputs.authMethod === KubernetesAuthMethod.Api
? {
httpsAgent
}
: {}),
signal: AbortSignal.timeout(EXTERNAL_REQUEST_TIMEOUT),
timeout: EXTERNAL_REQUEST_TIMEOUT
}
);
} else {
await axios.delete(`${baseUrl}/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/${roleBindingName}`, {
headers: {
"Content-Type": "application/json",
...(providerInputs.authMethod === KubernetesAuthMethod.Gateway
? { "x-infisical-action": GatewayHttpProxyActions.UseGatewayK8sServiceAccount }
: { Authorization: `Bearer ${providerInputs.clusterToken}` })
},
...(providerInputs.authMethod === KubernetesAuthMethod.Api
? {
httpsAgent
}
: {}),
signal: AbortSignal.timeout(EXTERNAL_REQUEST_TIMEOUT),
timeout: EXTERNAL_REQUEST_TIMEOUT
});
}
// Delete the service account
await axios.delete(`${baseUrl}/api/v1/namespaces/${namespace}/serviceaccounts/${entityId}`, {
headers: {
"Content-Type": "application/json",
...(providerInputs.authMethod === KubernetesAuthMethod.Gateway
? { "x-infisical-action": GatewayHttpProxyActions.UseGatewayK8sServiceAccount }
: { Authorization: `Bearer ${providerInputs.clusterToken}` })
},
...(providerInputs.authMethod === KubernetesAuthMethod.Api
? {
httpsAgent
}
: {}),
signal: AbortSignal.timeout(EXTERNAL_REQUEST_TIMEOUT),
timeout: EXTERNAL_REQUEST_TIMEOUT
});
};
if (providerInputs.credentialType === KubernetesCredentialType.Dynamic) {
const rawUrl =
providerInputs.authMethod === KubernetesAuthMethod.Gateway
? GATEWAY_AUTH_DEFAULT_URL
: providerInputs.url || "";
const url = new URL(rawUrl);
const k8sGatewayHost = url.hostname;
const k8sPort = url.port ? Number(url.port) : 443;
const k8sHost = `${url.protocol}//${url.hostname}`;
if (providerInputs.gatewayId) {
if (providerInputs.authMethod === KubernetesAuthMethod.Gateway) {
await $gatewayProxyWrapper(
{
gatewayId: providerInputs.gatewayId,
targetHost: k8sHost,
targetPort: k8sPort,
enableSsl: providerInputs.sslEnabled,
caCert: providerInputs.ca,
reviewTokenThroughGateway: true
},
serviceAccountDynamicCallback
);
} else {
await $gatewayProxyWrapper(
{
gatewayId: providerInputs.gatewayId,
targetHost: k8sGatewayHost,
targetPort: k8sPort,
enableSsl: providerInputs.sslEnabled,
caCert: providerInputs.ca,
reviewTokenThroughGateway: false
},
serviceAccountDynamicCallback
);
}
} else {
await serviceAccountDynamicCallback(k8sHost, k8sPort);
}
}
return { entityId };
};

View File

@@ -9,6 +9,7 @@ import { BadRequestError } from "@app/lib/errors";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { LdapCredentialType, LdapSchema, TDynamicProviderFns } from "./models";
import { compileUsernameTemplate } from "./templateUtils";
const generatePassword = () => {
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
@@ -22,13 +23,13 @@ const encodePassword = (password?: string) => {
return base64Password;
};
const generateUsername = (usernameTemplate?: string | null) => {
const generateUsername = (usernameTemplate?: string | null, identity?: { name: string }) => {
const randomUsername = alphaNumericNanoId(32); // Username must start with an ascii letter, so we prepend the username with "inf-"
if (!usernameTemplate) return randomUsername;
return handlebars.compile(usernameTemplate)({
return compileUsernameTemplate({
usernameTemplate,
randomUsername,
unixTimestamp: Math.floor(Date.now() / 100)
identity
});
};
@@ -196,8 +197,8 @@ export const LdapProvider = (): TDynamicProviderFns => {
return dnArray;
};
const create = async (data: { inputs: unknown; usernameTemplate?: string | null }) => {
const { inputs, usernameTemplate } = data;
const create = async (data: { inputs: unknown; usernameTemplate?: string | null; identity?: { name: string } }) => {
const { inputs, usernameTemplate, identity } = data;
const providerInputs = await validateProviderInputs(inputs);
const client = await $getClient(providerInputs);
@@ -224,7 +225,7 @@ export const LdapProvider = (): TDynamicProviderFns => {
});
}
} else {
const username = generateUsername(usernameTemplate);
const username = generateUsername(usernameTemplate, identity);
const password = generatePassword();
const generatedLdif = generateLDIF({ username, password, ldifTemplate: providerInputs.creationLdif });

View File

@@ -1,5 +1,10 @@
import RE2 from "re2";
import { z } from "zod";
import { CharacterType, characterValidator } from "@app/lib/validator/validate-string";
import { TDynamicSecretLeaseConfig } from "../../dynamic-secret-lease/dynamic-secret-lease-types";
export type PasswordRequirements = {
length: number;
required: {
@@ -20,6 +25,11 @@ export enum SqlProviders {
Vertica = "vertica"
}
export enum AwsIamAuthType {
AssumeRole = "assume-role",
AccessKey = "access-key"
}
export enum ElasticSearchAuthTypes {
User = "user",
ApiKey = "api-key"
@@ -31,7 +41,18 @@ export enum LdapCredentialType {
}
export enum KubernetesCredentialType {
Static = "static"
Static = "static",
Dynamic = "dynamic"
}
export enum KubernetesRoleType {
ClusterRole = "cluster-role",
Role = "role"
}
export enum KubernetesAuthMethod {
Gateway = "gateway",
Api = "api"
}
export enum TotpConfigType {
@@ -168,16 +189,38 @@ export const DynamicSecretSapAseSchema = z.object({
revocationStatement: z.string().trim()
});
export const DynamicSecretAwsIamSchema = z.object({
accessKey: z.string().trim().min(1),
secretAccessKey: z.string().trim().min(1),
region: z.string().trim().min(1),
awsPath: z.string().trim().optional(),
permissionBoundaryPolicyArn: z.string().trim().optional(),
policyDocument: z.string().trim().optional(),
userGroups: z.string().trim().optional(),
policyArns: z.string().trim().optional()
});
export const DynamicSecretAwsIamSchema = z.preprocess(
(val) => {
if (typeof val === "object" && val !== null && !Object.hasOwn(val, "method")) {
// eslint-disable-next-line no-param-reassign
(val as { method: string }).method = AwsIamAuthType.AccessKey;
}
return val;
},
z.discriminatedUnion("method", [
z.object({
method: z.literal(AwsIamAuthType.AccessKey),
accessKey: z.string().trim().min(1),
secretAccessKey: z.string().trim().min(1),
region: z.string().trim().min(1),
awsPath: z.string().trim().optional(),
permissionBoundaryPolicyArn: z.string().trim().optional(),
policyDocument: z.string().trim().optional(),
userGroups: z.string().trim().optional(),
policyArns: z.string().trim().optional()
}),
z.object({
method: z.literal(AwsIamAuthType.AssumeRole),
roleArn: z.string().trim().min(1, "Role ARN required"),
region: z.string().trim().min(1),
awsPath: z.string().trim().optional(),
permissionBoundaryPolicyArn: z.string().trim().optional(),
policyDocument: z.string().trim().optional(),
userGroups: z.string().trim().optional(),
policyArns: z.string().trim().optional()
})
])
);
export const DynamicSecretMongoAtlasSchema = z.object({
adminPublicKey: z.string().trim().min(1).describe("Admin user public api key"),
@@ -282,17 +325,89 @@ export const LdapSchema = z.union([
})
]);
export const DynamicSecretKubernetesSchema = z.object({
url: z.string().url().trim().min(1),
gatewayId: z.string().nullable().optional(),
sslEnabled: z.boolean().default(true),
clusterToken: z.string().trim().min(1),
ca: z.string().optional(),
serviceAccountName: z.string().trim().min(1),
credentialType: z.literal(KubernetesCredentialType.Static),
namespace: z.string().trim().min(1),
audiences: z.array(z.string().trim().min(1))
});
export const DynamicSecretKubernetesSchema = z
.discriminatedUnion("credentialType", [
z.object({
url: z
.string()
.optional()
.refine((val: string | undefined) => !val || new RE2(/^https?:\/\/.+/).test(val), {
message: "Invalid URL. Must start with http:// or https:// (e.g. https://example.com)"
}),
clusterToken: z.string().trim().optional(),
ca: z.string().optional(),
sslEnabled: z.boolean().default(false),
credentialType: z.literal(KubernetesCredentialType.Static),
serviceAccountName: z.string().trim().min(1),
namespace: z
.string()
.trim()
.min(1)
.refine((val) => !val.includes(","), "Namespace must be a single value, not a comma-separated list")
.refine(
(val) => characterValidator([CharacterType.AlphaNumeric, CharacterType.Hyphen])(val),
"Invalid namespace format"
),
gatewayId: z.string().optional(),
audiences: z.array(z.string().trim().min(1)),
authMethod: z.nativeEnum(KubernetesAuthMethod).default(KubernetesAuthMethod.Api)
}),
z.object({
url: z
.string()
.url()
.optional()
.refine((val: string | undefined) => !val || new RE2(/^https?:\/\/.+/).test(val), {
message: "Invalid URL. Must start with http:// or https:// (e.g. https://example.com)"
}),
clusterToken: z.string().trim().optional(),
ca: z.string().optional(),
sslEnabled: z.boolean().default(false),
credentialType: z.literal(KubernetesCredentialType.Dynamic),
namespace: z
.string()
.trim()
.min(1)
.refine((val) => {
const namespaces = val.split(",").map((ns) => ns.trim());
return (
namespaces.length > 0 &&
namespaces.every((ns) => ns.length > 0) &&
namespaces.every((ns) => characterValidator([CharacterType.AlphaNumeric, CharacterType.Hyphen])(ns))
);
}, "Must be a valid comma-separated list of namespace values"),
gatewayId: z.string().optional(),
audiences: z.array(z.string().trim().min(1)),
roleType: z.nativeEnum(KubernetesRoleType),
role: z.string().trim().min(1),
authMethod: z.nativeEnum(KubernetesAuthMethod).default(KubernetesAuthMethod.Api)
})
])
.superRefine((data, ctx) => {
if (data.authMethod === KubernetesAuthMethod.Gateway && !data.gatewayId) {
ctx.addIssue({
path: ["gatewayId"],
code: z.ZodIssueCode.custom,
message: "When auth method is set to Gateway, a gateway must be selected"
});
}
if (data.authMethod === KubernetesAuthMethod.Api || !data.authMethod) {
if (!data.clusterToken) {
ctx.addIssue({
path: ["clusterToken"],
code: z.ZodIssueCode.custom,
message: "When auth method is set to Token, a cluster token must be provided"
});
}
if (!data.url) {
ctx.addIssue({
path: ["url"],
code: z.ZodIssueCode.custom,
message: "When auth method is set to Token, a cluster URL must be provided"
});
}
}
});
export const DynamicSecretVerticaSchema = z.object({
host: z.string().trim().toLowerCase(),
@@ -355,6 +470,10 @@ export const DynamicSecretTotpSchema = z.discriminatedUnion("configType", [
})
]);
export const DynamicSecretGcpIamSchema = z.object({
serviceAccountEmail: z.string().email().trim().min(1, "Service account email required").max(128)
});
export enum DynamicSecretProviders {
SqlDatabase = "sql-database",
Cassandra = "cassandra",
@@ -372,7 +491,8 @@ export enum DynamicSecretProviders {
Totp = "totp",
SapAse = "sap-ase",
Kubernetes = "kubernetes",
Vertica = "vertica"
Vertica = "vertica",
GcpIam = "gcp-iam"
}
export const DynamicSecretProviderSchema = z.discriminatedUnion("type", [
@@ -392,7 +512,8 @@ export const DynamicSecretProviderSchema = z.discriminatedUnion("type", [
z.object({ type: z.literal(DynamicSecretProviders.Snowflake), inputs: DynamicSecretSnowflakeSchema }),
z.object({ type: z.literal(DynamicSecretProviders.Totp), inputs: DynamicSecretTotpSchema }),
z.object({ type: z.literal(DynamicSecretProviders.Kubernetes), inputs: DynamicSecretKubernetesSchema }),
z.object({ type: z.literal(DynamicSecretProviders.Vertica), inputs: DynamicSecretVerticaSchema })
z.object({ type: z.literal(DynamicSecretProviders.Vertica), inputs: DynamicSecretVerticaSchema }),
z.object({ type: z.literal(DynamicSecretProviders.GcpIam), inputs: DynamicSecretGcpIamSchema })
]);
export type TDynamicProviderFns = {
@@ -400,9 +521,24 @@ export type TDynamicProviderFns = {
inputs: unknown;
expireAt: number;
usernameTemplate?: string | null;
identity?: {
name: string;
};
metadata: { projectId: string };
config?: TDynamicSecretLeaseConfig;
}) => Promise<{ entityId: string; data: unknown }>;
validateConnection: (inputs: unknown) => Promise<boolean>;
validateProviderInputs: (inputs: object) => Promise<unknown>;
revoke: (inputs: unknown, entityId: string) => Promise<{ entityId: string }>;
renew: (inputs: unknown, entityId: string, expireAt: number) => Promise<{ entityId: string }>;
validateConnection: (inputs: unknown, metadata: { projectId: string }) => Promise<boolean>;
validateProviderInputs: (inputs: object, metadata: { projectId: string }) => Promise<unknown>;
revoke: (
inputs: unknown,
entityId: string,
metadata: { projectId: string },
config?: TDynamicSecretLeaseConfig
) => Promise<{ entityId: string }>;
renew: (
inputs: unknown,
entityId: string,
expireAt: number,
metadata: { projectId: string }
) => Promise<{ entityId: string }>;
};

View File

@@ -1,5 +1,4 @@
import axios, { AxiosError } from "axios";
import handlebars from "handlebars";
import { customAlphabet } from "nanoid";
import { z } from "zod";
@@ -7,19 +6,20 @@ import { createDigestAuthRequestInterceptor } from "@app/lib/axios/digest-auth";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { DynamicSecretMongoAtlasSchema, TDynamicProviderFns } from "./models";
import { compileUsernameTemplate } from "./templateUtils";
const generatePassword = (size = 48) => {
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
return customAlphabet(charset, 48)(size);
};
const generateUsername = (usernameTemplate?: string | null) => {
const generateUsername = (usernameTemplate?: string | null, identity?: { name: string }) => {
const randomUsername = alphaNumericNanoId(32);
if (!usernameTemplate) return randomUsername;
return handlebars.compile(usernameTemplate)({
return compileUsernameTemplate({
usernameTemplate,
randomUsername,
unixTimestamp: Math.floor(Date.now() / 100)
identity
});
};
@@ -64,12 +64,17 @@ export const MongoAtlasProvider = (): TDynamicProviderFns => {
return isConnected;
};
const create = async (data: { inputs: unknown; expireAt: number; usernameTemplate?: string | null }) => {
const { inputs, expireAt, usernameTemplate } = data;
const create = async (data: {
inputs: unknown;
expireAt: number;
usernameTemplate?: string | null;
identity?: { name: string };
}) => {
const { inputs, expireAt, usernameTemplate, identity } = data;
const providerInputs = await validateProviderInputs(inputs);
const client = await $getClient(providerInputs);
const username = generateUsername(usernameTemplate);
const username = generateUsername(usernameTemplate, identity);
const password = generatePassword();
const expiration = new Date(expireAt).toISOString();
await client({

View File

@@ -1,4 +1,3 @@
import handlebars from "handlebars";
import { MongoClient } from "mongodb";
import { customAlphabet } from "nanoid";
import { z } from "zod";
@@ -7,19 +6,20 @@ import { alphaNumericNanoId } from "@app/lib/nanoid";
import { verifyHostInputValidity } from "../dynamic-secret-fns";
import { DynamicSecretMongoDBSchema, TDynamicProviderFns } from "./models";
import { compileUsernameTemplate } from "./templateUtils";
const generatePassword = (size = 48) => {
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
return customAlphabet(charset, 48)(size);
};
const generateUsername = (usernameTemplate?: string | null) => {
const generateUsername = (usernameTemplate?: string | null, identity?: { name: string }) => {
const randomUsername = alphaNumericNanoId(32);
if (!usernameTemplate) return randomUsername;
return handlebars.compile(usernameTemplate)({
return compileUsernameTemplate({
usernameTemplate,
randomUsername,
unixTimestamp: Math.floor(Date.now() / 100)
identity
});
};
@@ -60,12 +60,12 @@ export const MongoDBProvider = (): TDynamicProviderFns => {
return isConnected;
};
const create = async (data: { inputs: unknown; usernameTemplate?: string | null }) => {
const { inputs, usernameTemplate } = data;
const create = async (data: { inputs: unknown; usernameTemplate?: string | null; identity?: { name: string } }) => {
const { inputs, usernameTemplate, identity } = data;
const providerInputs = await validateProviderInputs(inputs);
const client = await $getClient(providerInputs);
const username = generateUsername(usernameTemplate);
const username = generateUsername(usernameTemplate, identity);
const password = generatePassword();
const db = client.db(providerInputs.database);

View File

@@ -1,5 +1,4 @@
import axios, { Axios } from "axios";
import handlebars from "handlebars";
import https from "https";
import { customAlphabet } from "nanoid";
import { z } from "zod";
@@ -9,19 +8,20 @@ import { alphaNumericNanoId } from "@app/lib/nanoid";
import { verifyHostInputValidity } from "../dynamic-secret-fns";
import { DynamicSecretRabbitMqSchema, TDynamicProviderFns } from "./models";
import { compileUsernameTemplate } from "./templateUtils";
const generatePassword = () => {
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
return customAlphabet(charset, 64)();
};
const generateUsername = (usernameTemplate?: string | null) => {
const generateUsername = (usernameTemplate?: string | null, identity?: { name: string }) => {
const randomUsername = alphaNumericNanoId(32); // Username must start with an ascii letter, so we prepend the username with "inf-"
if (!usernameTemplate) return randomUsername;
return handlebars.compile(usernameTemplate)({
return compileUsernameTemplate({
usernameTemplate,
randomUsername,
unixTimestamp: Math.floor(Date.now() / 100)
identity
});
};
@@ -117,12 +117,12 @@ export const RabbitMqProvider = (): TDynamicProviderFns => {
return infoResponse;
};
const create = async (data: { inputs: unknown; usernameTemplate?: string | null }) => {
const { inputs, usernameTemplate } = data;
const create = async (data: { inputs: unknown; usernameTemplate?: string | null; identity?: { name: string } }) => {
const { inputs, usernameTemplate, identity } = data;
const providerInputs = await validateProviderInputs(inputs);
const connection = await $getClient(providerInputs);
const username = generateUsername(usernameTemplate);
const username = generateUsername(usernameTemplate, identity);
const password = generatePassword();
await createRabbitMqUser({

View File

@@ -9,19 +9,20 @@ import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars
import { verifyHostInputValidity } from "../dynamic-secret-fns";
import { DynamicSecretRedisDBSchema, TDynamicProviderFns } from "./models";
import { compileUsernameTemplate } from "./templateUtils";
const generatePassword = () => {
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
return customAlphabet(charset, 64)();
};
const generateUsername = (usernameTemplate?: string | null) => {
const generateUsername = (usernameTemplate?: string | null, identity?: { name: string }) => {
const randomUsername = alphaNumericNanoId(32); // Username must start with an ascii letter, so we prepend the username with "inf-"
if (!usernameTemplate) return randomUsername;
return handlebars.compile(usernameTemplate)({
return compileUsernameTemplate({
usernameTemplate,
randomUsername,
unixTimestamp: Math.floor(Date.now() / 100)
identity
});
};
@@ -121,12 +122,17 @@ export const RedisDatabaseProvider = (): TDynamicProviderFns => {
return pingResponse;
};
const create = async (data: { inputs: unknown; expireAt: number; usernameTemplate?: string | null }) => {
const { inputs, expireAt, usernameTemplate } = data;
const create = async (data: {
inputs: unknown;
expireAt: number;
usernameTemplate?: string | null;
identity?: { name: string };
}) => {
const { inputs, expireAt, usernameTemplate, identity } = data;
const providerInputs = await validateProviderInputs(inputs);
const connection = await $getClient(providerInputs);
const username = generateUsername(usernameTemplate);
const username = generateUsername(usernameTemplate, identity);
const password = generatePassword();
const expiration = new Date(expireAt).toISOString();

View File

@@ -9,19 +9,20 @@ import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars
import { verifyHostInputValidity } from "../dynamic-secret-fns";
import { DynamicSecretSapAseSchema, TDynamicProviderFns } from "./models";
import { compileUsernameTemplate } from "./templateUtils";
const generatePassword = (size = 48) => {
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789";
return customAlphabet(charset, 48)(size);
};
const generateUsername = (usernameTemplate?: string | null) => {
const generateUsername = (usernameTemplate?: string | null, identity?: { name: string }) => {
const randomUsername = `inf_${alphaNumericNanoId(25)}`; // Username must start with an ascii letter, so we prepend the username with "inf-"
if (!usernameTemplate) return randomUsername;
return handlebars.compile(usernameTemplate)({
return compileUsernameTemplate({
usernameTemplate,
randomUsername,
unixTimestamp: Math.floor(Date.now() / 100)
identity
});
};
@@ -87,11 +88,11 @@ export const SapAseProvider = (): TDynamicProviderFns => {
return true;
};
const create = async (data: { inputs: unknown; usernameTemplate?: string | null }) => {
const { inputs, usernameTemplate } = data;
const create = async (data: { inputs: unknown; usernameTemplate?: string | null; identity?: { name: string } }) => {
const { inputs, usernameTemplate, identity } = data;
const providerInputs = await validateProviderInputs(inputs);
const username = generateUsername(usernameTemplate);
const username = generateUsername(usernameTemplate, identity);
const password = generatePassword();
const client = await $getClient(providerInputs);

View File

@@ -15,19 +15,20 @@ import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars
import { verifyHostInputValidity } from "../dynamic-secret-fns";
import { DynamicSecretSapHanaSchema, TDynamicProviderFns } from "./models";
import { compileUsernameTemplate } from "./templateUtils";
const generatePassword = (size = 48) => {
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789";
return customAlphabet(charset, 48)(size);
};
const generateUsername = (usernameTemplate?: string | null) => {
const generateUsername = (usernameTemplate?: string | null, identity?: { name: string }) => {
const randomUsername = alphaNumericNanoId(32); // Username must start with an ascii letter, so we prepend the username with "inf-"
if (!usernameTemplate) return randomUsername;
return handlebars.compile(usernameTemplate)({
return compileUsernameTemplate({
usernameTemplate,
randomUsername,
unixTimestamp: Math.floor(Date.now() / 100)
identity
});
};
@@ -97,11 +98,16 @@ export const SapHanaProvider = (): TDynamicProviderFns => {
return testResult;
};
const create = async (data: { inputs: unknown; expireAt: number; usernameTemplate?: string | null }) => {
const { inputs, expireAt, usernameTemplate } = data;
const create = async (data: {
inputs: unknown;
expireAt: number;
usernameTemplate?: string | null;
identity?: { name: string };
}) => {
const { inputs, expireAt, usernameTemplate, identity } = data;
const providerInputs = await validateProviderInputs(inputs);
const username = generateUsername(usernameTemplate);
const username = generateUsername(usernameTemplate, identity);
const password = generatePassword();
const expiration = new Date(expireAt).toISOString();

View File

@@ -8,6 +8,7 @@ import { alphaNumericNanoId } from "@app/lib/nanoid";
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
import { DynamicSecretSnowflakeSchema, TDynamicProviderFns } from "./models";
import { compileUsernameTemplate } from "./templateUtils";
// destroy client requires callback...
const noop = () => {};
@@ -17,13 +18,13 @@ const generatePassword = (size = 48) => {
return customAlphabet(charset, 48)(size);
};
const generateUsername = (usernameTemplate?: string | null) => {
const generateUsername = (usernameTemplate?: string | null, identity?: { name: string }) => {
const randomUsername = `infisical_${alphaNumericNanoId(32)}`; // Username must start with an ascii letter, so we prepend the username with "inf-"
if (!usernameTemplate) return randomUsername;
return handlebars.compile(usernameTemplate)({
return compileUsernameTemplate({
usernameTemplate,
randomUsername,
unixTimestamp: Math.floor(Date.now() / 100)
identity
});
};
@@ -88,13 +89,18 @@ export const SnowflakeProvider = (): TDynamicProviderFns => {
return isValidConnection;
};
const create = async (data: { inputs: unknown; expireAt: number; usernameTemplate?: string | null }) => {
const { inputs, expireAt, usernameTemplate } = data;
const create = async (data: {
inputs: unknown;
expireAt: number;
usernameTemplate?: string | null;
identity?: { name: string };
}) => {
const { inputs, expireAt, usernameTemplate, identity } = data;
const providerInputs = await validateProviderInputs(inputs);
const client = await $getClient(providerInputs);
const username = generateUsername(usernameTemplate);
const username = generateUsername(usernameTemplate, identity);
const password = generatePassword();
try {

View File

@@ -3,13 +3,14 @@ import handlebars from "handlebars";
import knex from "knex";
import { z } from "zod";
import { withGatewayProxy } from "@app/lib/gateway";
import { GatewayProxyProtocol, withGatewayProxy } from "@app/lib/gateway";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
import { TGatewayServiceFactory } from "../../gateway/gateway-service";
import { verifyHostInputValidity } from "../dynamic-secret-fns";
import { DynamicSecretSqlDBSchema, PasswordRequirements, SqlProviders, TDynamicProviderFns } from "./models";
import { compileUsernameTemplate } from "./templateUtils";
const EXTERNAL_REQUEST_TIMEOUT = 10 * 1000;
@@ -104,9 +105,8 @@ const generatePassword = (provider: SqlProviders, requirements?: PasswordRequire
}
};
const generateUsername = (provider: SqlProviders, usernameTemplate?: string | null) => {
const generateUsername = (provider: SqlProviders, usernameTemplate?: string | null, identity?: { name: string }) => {
let randomUsername = "";
// For oracle, the client assumes everything is upper case when not using quotes around the password
if (provider === SqlProviders.Oracle) {
randomUsername = alphaNumericNanoId(32).toUpperCase();
@@ -114,10 +114,13 @@ const generateUsername = (provider: SqlProviders, usernameTemplate?: string | nu
randomUsername = alphaNumericNanoId(32);
}
if (!usernameTemplate) return randomUsername;
return handlebars.compile(usernameTemplate)({
return compileUsernameTemplate({
usernameTemplate,
randomUsername,
unixTimestamp: Math.floor(Date.now() / 100)
identity,
options: {
toUpperCase: provider === SqlProviders.Oracle
}
});
};
@@ -185,6 +188,7 @@ export const SqlDatabaseProvider = ({ gatewayService }: TSqlDatabaseProviderDTO)
await gatewayCallback("localhost", port);
},
{
protocol: GatewayProxyProtocol.Tcp,
targetHost: providerInputs.host,
targetPort: providerInputs.port,
relayHost,
@@ -220,11 +224,16 @@ export const SqlDatabaseProvider = ({ gatewayService }: TSqlDatabaseProviderDTO)
return isConnected;
};
const create = async (data: { inputs: unknown; expireAt: number; usernameTemplate?: string | null }) => {
const { inputs, expireAt, usernameTemplate } = data;
const create = async (data: {
inputs: unknown;
expireAt: number;
usernameTemplate?: string | null;
identity?: { name: string };
}) => {
const { inputs, expireAt, usernameTemplate, identity } = data;
const providerInputs = await validateProviderInputs(inputs);
const username = generateUsername(providerInputs.client, usernameTemplate);
const username = generateUsername(providerInputs.client, usernameTemplate, identity);
const password = generatePassword(providerInputs.client, providerInputs.passwordRequirements);
const gatewayCallback = async (host = providerInputs.host, port = providerInputs.port) => {

View File

@@ -0,0 +1,80 @@
/* eslint-disable func-names */
import handlebars from "handlebars";
import RE2 from "re2";
import { logger } from "@app/lib/logger";
import { alphaNumericNanoId } from "@app/lib/nanoid";
export const compileUsernameTemplate = ({
usernameTemplate,
randomUsername,
identity,
unixTimestamp,
options
}: {
usernameTemplate: string;
randomUsername: string;
identity?: { name: string };
unixTimestamp?: number;
options?: {
toUpperCase?: boolean;
};
}): string => {
// Create isolated handlebars instance
const hbs = handlebars.create();
// Register random helper on local instance
hbs.registerHelper("random", function (length: number) {
if (typeof length !== "number" || length <= 0 || length > 100) {
return "";
}
return alphaNumericNanoId(length);
});
// Register replace helper on local instance
hbs.registerHelper("replace", function (text: string, searchValue: string, replaceValue: string) {
// Convert to string if it's not already
const textStr = String(text || "");
if (!textStr) {
return textStr;
}
try {
const re2Pattern = new RE2(searchValue, "g");
// Replace all occurrences
return re2Pattern.replace(textStr, replaceValue);
} catch (error) {
logger.error(error, "RE2 pattern failed, using original template");
return textStr;
}
});
// Register truncate helper on local instance
hbs.registerHelper("truncate", function (text: string, length: number) {
// Convert to string if it's not already
const textStr = String(text || "");
if (!textStr) {
return textStr;
}
if (typeof length !== "number" || length <= 0) return textStr;
return textStr.substring(0, length);
});
// Compile template with context using local instance
const context = {
randomUsername,
unixTimestamp: unixTimestamp || Math.floor(Date.now() / 100),
identity: {
name: identity?.name
}
};
const result = hbs.compile(usernameTemplate)(context);
if (options?.toUpperCase) {
return result.toUpperCase();
}
return result;
};

View File

@@ -4,7 +4,7 @@ import knex, { Knex } from "knex";
import { z } from "zod";
import { BadRequestError } from "@app/lib/errors";
import { withGatewayProxy } from "@app/lib/gateway";
import { GatewayProxyProtocol, withGatewayProxy } from "@app/lib/gateway";
import { logger } from "@app/lib/logger";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
@@ -196,6 +196,7 @@ export const VerticaProvider = ({ gatewayService }: TVerticaProviderDTO): TDynam
await gatewayCallback("localhost", port);
},
{
protocol: GatewayProxyProtocol.Tcp,
targetHost: providerInputs.host,
targetPort: providerInputs.port,
relayHost,

View File

@@ -42,6 +42,10 @@ export type TListGroupUsersDTO = {
filter?: EFilterReturnedUsers;
} & TGenericPermission;
export type TListProjectGroupUsersDTO = TListGroupUsersDTO & {
projectId: string;
};
export type TAddUserToGroupDTO = {
id: string;
username: string;

View File

@@ -709,6 +709,10 @@ export const licenseServiceFactory = ({
return licenses;
};
const invalidateGetPlan = async (orgId: string) => {
await keyStore.deleteItem(FEATURE_CACHE_KEY(orgId));
};
return {
generateOrgCustomerId,
removeOrgCustomer,
@@ -723,6 +727,7 @@ export const licenseServiceFactory = ({
return onPremFeatures;
},
getPlan,
invalidateGetPlan,
updateSubscriptionOrgMemberCount,
refreshPlan,
getOrgPlan,

View File

@@ -4,6 +4,7 @@ import {
ProjectPermissionActions,
ProjectPermissionCertificateActions,
ProjectPermissionCmekActions,
ProjectPermissionCommitsActions,
ProjectPermissionDynamicSecretActions,
ProjectPermissionGroupActions,
ProjectPermissionIdentityActions,
@@ -90,6 +91,11 @@ const buildAdminPermissionRules = () => {
ProjectPermissionSub.Certificates
);
can(
[ProjectPermissionCommitsActions.Read, ProjectPermissionCommitsActions.PerformRollback],
ProjectPermissionSub.Commits
);
can(
[
ProjectPermissionSshHostActions.Edit,
@@ -292,6 +298,11 @@ const buildMemberPermissionRules = () => {
ProjectPermissionSub.SecretImports
);
can(
[ProjectPermissionCommitsActions.Read, ProjectPermissionCommitsActions.PerformRollback],
ProjectPermissionSub.Commits
);
can([ProjectPermissionActions.Read], ProjectPermissionSub.SecretApproval);
can([ProjectPermissionSecretRotationActions.Read], ProjectPermissionSub.SecretRotation);
@@ -479,6 +490,7 @@ const buildViewerPermissionRules = () => {
can(ProjectPermissionActions.Read, ProjectPermissionSub.SshCertificates);
can(ProjectPermissionActions.Read, ProjectPermissionSub.SshCertificateTemplates);
can(ProjectPermissionSecretSyncActions.Read, ProjectPermissionSub.SecretSyncs);
can(ProjectPermissionCommitsActions.Read, ProjectPermissionSub.Commits);
can(
[

View File

@@ -17,6 +17,11 @@ export enum ProjectPermissionActions {
Delete = "delete"
}
export enum ProjectPermissionCommitsActions {
Read = "read",
PerformRollback = "perform-rollback"
}
export enum ProjectPermissionCertificateActions {
Read = "read",
Create = "create",
@@ -172,6 +177,7 @@ export enum ProjectPermissionSub {
SecretRollback = "secret-rollback",
SecretApproval = "secret-approval",
SecretRotation = "secret-rotation",
Commits = "commits",
Identity = "identity",
CertificateAuthorities = "certificate-authorities",
Certificates = "certificates",
@@ -325,6 +331,7 @@ export type ProjectPermissionSet =
| [ProjectPermissionActions.Read, ProjectPermissionSub.SecretRollback]
| [ProjectPermissionActions.Create, ProjectPermissionSub.SecretRollback]
| [ProjectPermissionActions.Edit, ProjectPermissionSub.Kms]
| [ProjectPermissionCommitsActions, ProjectPermissionSub.Commits]
| [ProjectPermissionSecretScanningDataSourceActions, ProjectPermissionSub.SecretScanningDataSources]
| [ProjectPermissionSecretScanningFindingActions, ProjectPermissionSub.SecretScanningFindings]
| [ProjectPermissionSecretScanningConfigActions, ProjectPermissionSub.SecretScanningConfigs];
@@ -376,7 +383,8 @@ const DynamicSecretConditionV2Schema = z
.object({
[PermissionConditionOperators.$EQ]: PermissionConditionSchema[PermissionConditionOperators.$EQ],
[PermissionConditionOperators.$NEQ]: PermissionConditionSchema[PermissionConditionOperators.$NEQ],
[PermissionConditionOperators.$IN]: PermissionConditionSchema[PermissionConditionOperators.$IN]
[PermissionConditionOperators.$IN]: PermissionConditionSchema[PermissionConditionOperators.$IN],
[PermissionConditionOperators.$GLOB]: PermissionConditionSchema[PermissionConditionOperators.$GLOB]
})
.partial()
]),
@@ -404,6 +412,23 @@ const DynamicSecretConditionV2Schema = z
})
.partial();
const SecretImportConditionSchema = z
.object({
environment: z.union([
z.string(),
z
.object({
[PermissionConditionOperators.$EQ]: PermissionConditionSchema[PermissionConditionOperators.$EQ],
[PermissionConditionOperators.$NEQ]: PermissionConditionSchema[PermissionConditionOperators.$NEQ],
[PermissionConditionOperators.$IN]: PermissionConditionSchema[PermissionConditionOperators.$IN],
[PermissionConditionOperators.$GLOB]: PermissionConditionSchema[PermissionConditionOperators.$GLOB]
})
.partial()
]),
secretPath: SECRET_PATH_PERMISSION_OPERATOR_SCHEMA
})
.partial();
const SecretConditionV2Schema = z
.object({
environment: z.union([
@@ -658,6 +683,12 @@ const GeneralPermissionSchema = [
"Describe what action an entity can take."
)
}),
z.object({
subject: z.literal(ProjectPermissionSub.Commits).describe("The entity this permission pertains to."),
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionCommitsActions).describe(
"Describe what action an entity can take."
)
}),
z.object({
subject: z
.literal(ProjectPermissionSub.SecretScanningDataSources)
@@ -741,7 +772,7 @@ export const ProjectPermissionV2Schema = z.discriminatedUnion("subject", [
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionActions).describe(
"Describe what action an entity can take."
),
conditions: SecretConditionV1Schema.describe(
conditions: SecretImportConditionSchema.describe(
"When specified, only matching conditions will be allowed to access given resource."
).optional()
}),

View File

@@ -0,0 +1,485 @@
/* eslint-disable no-await-in-loop */
import { ForbiddenError } from "@casl/ability";
import { ActionProjectType } from "@app/db/schemas";
import { ProjectPermissionCommitsActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
import { NotFoundError } from "@app/lib/errors";
import { logger } from "@app/lib/logger";
import { ActorAuthMethod, ActorType } from "@app/services/auth/auth-type";
import { ResourceType, TFolderCommitServiceFactory } from "@app/services/folder-commit/folder-commit-service";
import {
isFolderCommitChange,
isSecretCommitChange
} from "@app/services/folder-commit-changes/folder-commit-changes-dal";
import { TProjectEnvDALFactory } from "@app/services/project-env/project-env-dal";
import { TSecretServiceFactory } from "@app/services/secret/secret-service";
import { TSecretFolderDALFactory } from "@app/services/secret-folder/secret-folder-dal";
import { TSecretFolderServiceFactory } from "@app/services/secret-folder/secret-folder-service";
import { TPermissionServiceFactory } from "../permission/permission-service";
type TPitServiceFactoryDep = {
folderCommitService: TFolderCommitServiceFactory;
secretService: Pick<TSecretServiceFactory, "getSecretVersionsV2ByIds" | "getChangeVersions">;
folderService: Pick<TSecretFolderServiceFactory, "getFolderById" | "getFolderVersions">;
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
folderDAL: Pick<TSecretFolderDALFactory, "findSecretPathByFolderIds">;
projectEnvDAL: Pick<TProjectEnvDALFactory, "findOne">;
};
export type TPitServiceFactory = ReturnType<typeof pitServiceFactory>;
export const pitServiceFactory = ({
folderCommitService,
secretService,
folderService,
permissionService,
folderDAL,
projectEnvDAL
}: TPitServiceFactoryDep) => {
const getCommitsCount = async ({
actor,
actorId,
actorOrgId,
actorAuthMethod,
projectId,
environment,
path
}: {
actor: ActorType;
actorId: string;
actorOrgId: string;
actorAuthMethod: ActorAuthMethod;
projectId: string;
environment: string;
path: string;
}) => {
const result = await folderCommitService.getCommitsCount({
actor,
actorId,
actorOrgId,
actorAuthMethod,
projectId,
environment,
path
});
return result;
};
const getCommitsForFolder = async ({
actor,
actorId,
actorOrgId,
actorAuthMethod,
projectId,
environment,
path,
offset,
limit,
search,
sort
}: {
actor: ActorType;
actorId: string;
actorOrgId: string;
actorAuthMethod: ActorAuthMethod;
projectId: string;
environment: string;
path: string;
offset: number;
limit: number;
search?: string;
sort: "asc" | "desc";
}) => {
const result = await folderCommitService.getCommitsForFolder({
actor,
actorId,
actorOrgId,
actorAuthMethod,
projectId,
environment,
path,
offset,
limit,
search,
sort
});
return {
commits: result.commits.map((commit) => ({
...commit,
commitId: commit.commitId.toString()
})),
total: result.total,
hasMore: result.hasMore
};
};
const getCommitChanges = async ({
actor,
actorId,
actorOrgId,
actorAuthMethod,
projectId,
commitId
}: {
actor: ActorType;
actorId: string;
actorOrgId: string;
actorAuthMethod: ActorAuthMethod;
projectId: string;
commitId: string;
}) => {
const changes = await folderCommitService.getCommitChanges({
actor,
actorId,
actorOrgId,
actorAuthMethod,
projectId,
commitId
});
const [folderWithPath] = await folderDAL.findSecretPathByFolderIds(projectId, [changes.folderId]);
for (const change of changes.changes) {
if (isSecretCommitChange(change)) {
change.versions = await secretService.getChangeVersions(
{
secretVersion: change.secretVersion,
secretId: change.secretId,
id: change.id,
isUpdate: change.isUpdate,
changeType: change.changeType
},
(Number.parseInt(change.secretVersion, 10) - 1).toString(),
actorId,
actor,
actorOrgId,
actorAuthMethod,
changes.envId,
projectId,
folderWithPath?.path || ""
);
} else if (isFolderCommitChange(change)) {
change.versions = await folderService.getFolderVersions(
change,
(Number.parseInt(change.folderVersion, 10) - 1).toString(),
change.folderChangeId
);
}
}
return {
changes: {
...changes,
commitId: changes.commitId.toString()
}
};
};
const compareCommitChanges = async ({
actor,
actorId,
actorOrgId,
actorAuthMethod,
projectId,
commitId,
folderId,
environment,
deepRollback,
secretPath
}: {
actor: ActorType;
actorId: string;
actorOrgId: string;
actorAuthMethod: ActorAuthMethod;
projectId: string;
commitId: string;
folderId: string;
environment: string;
deepRollback: boolean;
secretPath: string;
}) => {
const latestCommit = await folderCommitService.getLatestCommit({
folderId,
actor,
actorId,
actorOrgId,
actorAuthMethod,
projectId
});
const targetCommit = await folderCommitService.getCommitById({
commitId,
actor,
actorId,
actorOrgId,
actorAuthMethod,
projectId
});
const env = await projectEnvDAL.findOne({
projectId,
slug: environment
});
if (!latestCommit) {
throw new NotFoundError({ message: "Latest commit not found" });
}
let diffs;
if (deepRollback) {
diffs = await folderCommitService.deepCompareFolder({
targetCommitId: targetCommit.id,
envId: env.id,
projectId
});
} else {
const folderData = await folderService.getFolderById({
actor,
actorId,
actorOrgId,
actorAuthMethod,
id: folderId
});
diffs = [
{
folderId: folderData.id,
folderName: folderData.name,
folderPath: secretPath,
changes: await folderCommitService.compareFolderStates({
targetCommitId: commitId,
currentCommitId: latestCommit.id
})
}
];
}
for (const diff of diffs) {
for (const change of diff.changes) {
// Use discriminated union type checking
if (change.type === ResourceType.SECRET) {
// TypeScript now knows this is a SecretChange
if (change.secretKey && change.secretVersion && change.secretId) {
change.versions = await secretService.getChangeVersions(
{
secretVersion: change.secretVersion,
secretId: change.secretId,
id: change.id,
isUpdate: change.isUpdate,
changeType: change.changeType
},
change.fromVersion || "1",
actorId,
actor,
actorOrgId,
actorAuthMethod,
env.id,
projectId,
diff.folderPath || ""
);
}
} else if (change.type === ResourceType.FOLDER) {
// TypeScript now knows this is a FolderChange
if (change.folderVersion) {
change.versions = await folderService.getFolderVersions(change, change.fromVersion || "1", change.id);
}
}
}
}
return diffs;
};
const rollbackToCommit = async ({
actor,
actorId,
actorOrgId,
actorAuthMethod,
projectId,
commitId,
folderId,
deepRollback,
message,
environment
}: {
actor: ActorType;
actorId: string;
actorOrgId: string;
actorAuthMethod: ActorAuthMethod;
projectId: string;
commitId: string;
folderId: string;
deepRollback: boolean;
message?: string;
environment: string;
}) => {
const { permission: userPermission } = await permissionService.getProjectPermission({
actor,
actorId,
projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SecretManager
});
ForbiddenError.from(userPermission).throwUnlessCan(
ProjectPermissionCommitsActions.PerformRollback,
ProjectPermissionSub.Commits
);
const latestCommit = await folderCommitService.getLatestCommit({
folderId,
actor,
actorId,
actorOrgId,
actorAuthMethod,
projectId
});
if (!latestCommit) {
throw new NotFoundError({ message: "Latest commit not found" });
}
logger.info(`PIT - Attempting to rollback folder ${folderId} from commit ${latestCommit.id} to commit ${commitId}`);
const targetCommit = await folderCommitService.getCommitById({
commitId,
actor,
actorId,
actorAuthMethod,
actorOrgId,
projectId
});
const env = await projectEnvDAL.findOne({
projectId,
slug: environment
});
if (!targetCommit || targetCommit.folderId !== folderId || targetCommit.envId !== env.id) {
throw new NotFoundError({ message: "Target commit not found" });
}
if (!latestCommit || latestCommit.envId !== env.id) {
throw new NotFoundError({ message: "Latest commit not found" });
}
if (deepRollback) {
await folderCommitService.deepRollbackFolder(commitId, env.id, actorId, actor, projectId, message);
return { success: true };
}
const diff = await folderCommitService.compareFolderStates({
currentCommitId: latestCommit.id,
targetCommitId: commitId
});
const response = await folderCommitService.applyFolderStateDifferences({
differences: diff,
actorInfo: {
actorType: actor,
actorId,
message: message || "Rollback to previous commit"
},
folderId,
projectId,
reconstructNewFolders: deepRollback
});
return {
success: true,
secretChangesCount: response.secretChangesCount,
folderChangesCount: response.folderChangesCount,
totalChanges: response.totalChanges
};
};
const revertCommit = async ({
actor,
actorId,
actorOrgId,
actorAuthMethod,
projectId,
commitId
}: {
actor: ActorType;
actorId: string;
actorOrgId: string;
actorAuthMethod: ActorAuthMethod;
projectId: string;
commitId: string;
}) => {
const response = await folderCommitService.revertCommitChanges({
commitId,
actor,
actorId,
actorAuthMethod,
actorOrgId,
projectId
});
return response;
};
const getFolderStateAtCommit = async ({
actor,
actorId,
actorOrgId,
actorAuthMethod,
projectId,
commitId
}: {
actor: ActorType;
actorId: string;
actorOrgId: string;
actorAuthMethod: ActorAuthMethod;
projectId: string;
commitId: string;
}) => {
const commit = await folderCommitService.getCommitById({
commitId,
actor,
actorId,
actorOrgId,
actorAuthMethod,
projectId
});
if (!commit) {
throw new NotFoundError({ message: `Commit with ID ${commitId} not found` });
}
const response = await folderCommitService.reconstructFolderState(commitId);
return response.map((item) => {
if (item.type === ResourceType.SECRET) {
return {
...item,
secretVersion: Number(item.secretVersion)
};
}
if (item.type === ResourceType.FOLDER) {
return {
...item,
folderVersion: Number(item.folderVersion)
};
}
return item;
});
};
return {
getCommitsCount,
getCommitsForFolder,
getCommitChanges,
compareCommitChanges,
rollbackToCommit,
revertCommit,
getFolderStateAtCommit
};
};

View File

@@ -1,3 +1,4 @@
/* eslint-disable no-nested-ternary */
import { ForbiddenError, subject } from "@casl/ability";
import {
@@ -20,6 +21,7 @@ import { EnforcementLevel } from "@app/lib/types";
import { triggerWorkflowIntegrationNotification } from "@app/lib/workflow-integrations/trigger-notification";
import { TriggerFeature } from "@app/lib/workflow-integrations/types";
import { ActorType } from "@app/services/auth/auth-type";
import { TFolderCommitServiceFactory } from "@app/services/folder-commit/folder-commit-service";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { TMicrosoftTeamsServiceFactory } from "@app/services/microsoft-teams/microsoft-teams-service";
@@ -130,6 +132,7 @@ type TSecretApprovalRequestServiceFactoryDep = {
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
projectMicrosoftTeamsConfigDAL: Pick<TProjectMicrosoftTeamsConfigDALFactory, "getIntegrationDetailsByProject">;
microsoftTeamsService: Pick<TMicrosoftTeamsServiceFactory, "sendNotification">;
folderCommitService: Pick<TFolderCommitServiceFactory, "createCommit">;
};
export type TSecretApprovalRequestServiceFactory = ReturnType<typeof secretApprovalRequestServiceFactory>;
@@ -161,7 +164,8 @@ export const secretApprovalRequestServiceFactory = ({
projectSlackConfigDAL,
resourceMetadataDAL,
projectMicrosoftTeamsConfigDAL,
microsoftTeamsService
microsoftTeamsService,
folderCommitService
}: TSecretApprovalRequestServiceFactoryDep) => {
const requestCount = async ({ projectId, actor, actorId, actorOrgId, actorAuthMethod }: TApprovalRequestCountDTO) => {
if (actor === ActorType.SERVICE) throw new BadRequestError({ message: "Cannot use service token" });
@@ -243,7 +247,7 @@ export const secretApprovalRequestServiceFactory = ({
const { botKey, shouldUseSecretV2Bridge } = await projectBotService.getBotKey(projectId);
const { policy } = secretApprovalRequest;
const { hasRole } = await permissionService.getProjectPermission({
const { hasRole, permission } = await permissionService.getProjectPermission({
actor,
actorId,
projectId,
@@ -259,6 +263,12 @@ export const secretApprovalRequestServiceFactory = ({
throw new ForbiddenRequestError({ message: "User has insufficient privileges" });
}
const hasSecretReadAccess = permission.can(
ProjectPermissionSecretActions.DescribeAndReadValue,
ProjectPermissionSub.Secrets
);
const hiddenSecretValue = "******";
let secrets;
if (shouldUseSecretV2Bridge) {
const { decryptor: secretManagerDecryptor } = await kmsService.createCipherPairWithDataKey({
@@ -275,9 +285,9 @@ export const secretApprovalRequestServiceFactory = ({
version: el.version,
secretMetadata: el.secretMetadata as ResourceMetadataDTO,
isRotatedSecret: el.secret?.isRotatedSecret ?? false,
secretValue:
// eslint-disable-next-line no-nested-ternary
el.secret && el.secret.isRotatedSecret
secretValue: !hasSecretReadAccess
? hiddenSecretValue
: el.secret && el.secret.isRotatedSecret
? undefined
: el.encryptedValue
? secretManagerDecryptor({ cipherTextBlob: el.encryptedValue }).toString()
@@ -290,9 +300,11 @@ export const secretApprovalRequestServiceFactory = ({
secretKey: el.secret.key,
id: el.secret.id,
version: el.secret.version,
secretValue: el.secret.encryptedValue
? secretManagerDecryptor({ cipherTextBlob: el.secret.encryptedValue }).toString()
: "",
secretValue: !hasSecretReadAccess
? hiddenSecretValue
: el.secret.encryptedValue
? secretManagerDecryptor({ cipherTextBlob: el.secret.encryptedValue }).toString()
: "",
secretComment: el.secret.encryptedComment
? secretManagerDecryptor({ cipherTextBlob: el.secret.encryptedComment }).toString()
: ""
@@ -303,9 +315,11 @@ export const secretApprovalRequestServiceFactory = ({
secretKey: el.secretVersion.key,
id: el.secretVersion.id,
version: el.secretVersion.version,
secretValue: el.secretVersion.encryptedValue
? secretManagerDecryptor({ cipherTextBlob: el.secretVersion.encryptedValue }).toString()
: "",
secretValue: !hasSecretReadAccess
? hiddenSecretValue
: el.secretVersion.encryptedValue
? secretManagerDecryptor({ cipherTextBlob: el.secretVersion.encryptedValue }).toString()
: "",
secretComment: el.secretVersion.encryptedComment
? secretManagerDecryptor({ cipherTextBlob: el.secretVersion.encryptedComment }).toString()
: "",
@@ -597,6 +611,10 @@ export const secretApprovalRequestServiceFactory = ({
? await fnSecretV2BridgeBulkInsert({
tx,
folderId,
actor: {
actorId,
type: actor
},
orgId: actorOrgId,
inputSecrets: secretCreationCommits.map((el) => ({
tagIds: el?.tags.map(({ id }) => id),
@@ -619,13 +637,18 @@ export const secretApprovalRequestServiceFactory = ({
secretDAL: secretV2BridgeDAL,
secretVersionDAL: secretVersionV2BridgeDAL,
secretTagDAL,
secretVersionTagDAL: secretVersionTagV2BridgeDAL
secretVersionTagDAL: secretVersionTagV2BridgeDAL,
folderCommitService
})
: [];
const updatedSecrets = secretUpdationCommits.length
? await fnSecretV2BridgeBulkUpdate({
folderId,
orgId: actorOrgId,
actor: {
actorId,
type: actor
},
tx,
inputSecrets: secretUpdationCommits.map((el) => {
const encryptedValue =
@@ -659,7 +682,8 @@ export const secretApprovalRequestServiceFactory = ({
secretVersionDAL: secretVersionV2BridgeDAL,
secretTagDAL,
secretVersionTagDAL: secretVersionTagV2BridgeDAL,
resourceMetadataDAL
resourceMetadataDAL,
folderCommitService
})
: [];
const deletedSecret = secretDeletionCommits.length
@@ -667,10 +691,13 @@ export const secretApprovalRequestServiceFactory = ({
projectId,
folderId,
tx,
actorId: "",
actorId,
actorType: actor,
secretDAL: secretV2BridgeDAL,
secretQueueService,
inputSecrets: secretDeletionCommits.map(({ key }) => ({ secretKey: key, type: SecretType.Shared }))
inputSecrets: secretDeletionCommits.map(({ key }) => ({ secretKey: key, type: SecretType.Shared })),
folderCommitService,
secretVersionDAL: secretVersionV2BridgeDAL
})
: [];
const updatedSecretApproval = await secretApprovalRequestDAL.updateById(

View File

@@ -10,6 +10,7 @@ import { logger } from "@app/lib/logger";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { QueueName, TQueueServiceFactory } from "@app/queue";
import { ActorType } from "@app/services/auth/auth-type";
import { TFolderCommitServiceFactory } from "@app/services/folder-commit/folder-commit-service";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { TProjectBotServiceFactory } from "@app/services/project-bot/project-bot-service";
@@ -87,6 +88,7 @@ type TSecretReplicationServiceFactoryDep = {
projectBotService: Pick<TProjectBotServiceFactory, "getBotKey">;
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
folderCommitService: Pick<TFolderCommitServiceFactory, "createCommit">;
};
export type TSecretReplicationServiceFactory = ReturnType<typeof secretReplicationServiceFactory>;
@@ -132,6 +134,7 @@ export const secretReplicationServiceFactory = ({
secretVersionV2BridgeDAL,
secretV2BridgeDAL,
kmsService,
folderCommitService,
resourceMetadataDAL
}: TSecretReplicationServiceFactoryDep) => {
const $getReplicatedSecrets = (
@@ -419,7 +422,7 @@ export const secretReplicationServiceFactory = ({
return {
op: operation,
requestId: approvalRequestDoc.id,
metadata: doc.metadata,
metadata: doc.metadata ? JSON.stringify(doc.metadata) : [],
secretMetadata: JSON.stringify(doc.secretMetadata),
key: doc.key,
encryptedValue: doc.encryptedValue,
@@ -446,11 +449,12 @@ export const secretReplicationServiceFactory = ({
tx,
secretTagDAL,
resourceMetadataDAL,
folderCommitService,
secretVersionTagDAL: secretVersionV2TagBridgeDAL,
inputSecrets: locallyCreatedSecrets.map((doc) => {
return {
type: doc.type,
metadata: doc.metadata,
metadata: doc.metadata ? JSON.stringify(doc.metadata) : [],
key: doc.key,
encryptedValue: doc.encryptedValue,
encryptedComment: doc.encryptedComment,
@@ -466,6 +470,7 @@ export const secretReplicationServiceFactory = ({
orgId,
folderId: destinationReplicationFolderId,
secretVersionDAL: secretVersionV2BridgeDAL,
folderCommitService,
secretDAL: secretV2BridgeDAL,
tx,
resourceMetadataDAL,
@@ -479,7 +484,7 @@ export const secretReplicationServiceFactory = ({
},
data: {
type: doc.type,
metadata: doc.metadata,
metadata: doc.metadata ? JSON.stringify(doc.metadata) : [],
key: doc.key,
encryptedValue: doc.encryptedValue as Buffer,
encryptedComment: doc.encryptedComment,

View File

@@ -101,10 +101,56 @@ export const azureClientSecretRotationFactory: TRotationFactory<
}
};
/**
* Checks if a credential with the given keyId exists.
*/
const credentialExists = async (keyId: string): Promise<boolean> => {
const accessToken = await getAzureConnectionAccessToken(connection.id, appConnectionDAL, kmsService);
const endpoint = `${GRAPH_API_BASE}/applications/${objectId}/passwordCredentials`;
try {
const { data } = await request.get<{ value: Array<{ keyId: string }> }>(endpoint, {
headers: {
Authorization: `Bearer ${accessToken}`,
"Content-Type": "application/json"
}
});
return data.value?.some((credential) => credential.keyId === keyId) || false;
} catch (error: unknown) {
if (error instanceof AxiosError) {
let message;
if (
error.response?.data &&
typeof error.response.data === "object" &&
"error" in error.response.data &&
typeof (error.response.data as AzureErrorResponse).error.message === "string"
) {
message = (error.response.data as AzureErrorResponse).error.message;
}
throw new BadRequestError({
message: `Failed to check credential existence for app ${objectId}: ${
message || error.message || "Unknown error"
}`
});
}
throw new BadRequestError({
message: "Unable to validate connection: verify credentials"
});
}
};
/**
* Revokes a client secret from the Azure app using its keyId.
* First checks if the credential exists before attempting revocation.
*/
const revokeCredential = async (keyId: string) => {
// Check if credential exists before attempting revocation
const exists = await credentialExists(keyId);
if (!exists) {
return; // Credential doesn't exist, nothing to revoke
}
const accessToken = await getAzureConnectionAccessToken(connection.id, appConnectionDAL, kmsService);
const endpoint = `${GRAPH_API_BASE}/applications/${objectId}/removePassword`;

View File

@@ -0,0 +1,3 @@
export * from "./oracledb-credentials-rotation-constants";
export * from "./oracledb-credentials-rotation-schemas";
export * from "./oracledb-credentials-rotation-types";

View File

@@ -0,0 +1,20 @@
import { SecretRotation } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-enums";
import { TSecretRotationV2ListItem } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-types";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
export const ORACLEDB_CREDENTIALS_ROTATION_LIST_OPTION: TSecretRotationV2ListItem = {
name: "OracleDB Credentials",
type: SecretRotation.OracleDBCredentials,
connection: AppConnection.OracleDB,
template: {
createUserStatement: `-- create user
CREATE USER INFISICAL_USER IDENTIFIED BY "temporary_password";
-- grant all privileges
GRANT ALL PRIVILEGES TO INFISICAL_USER;`,
secretsMapping: {
username: "ORACLEDB_USERNAME",
password: "ORACLEDB_PASSWORD"
}
}
};

View File

@@ -0,0 +1,41 @@
import { z } from "zod";
import { SecretRotation } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-enums";
import {
BaseCreateSecretRotationSchema,
BaseSecretRotationSchema,
BaseUpdateSecretRotationSchema
} from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-schemas";
import {
SqlCredentialsRotationParametersSchema,
SqlCredentialsRotationSecretsMappingSchema,
SqlCredentialsRotationTemplateSchema
} from "@app/ee/services/secret-rotation-v2/shared/sql-credentials";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
export const OracleDBCredentialsRotationSchema = BaseSecretRotationSchema(SecretRotation.OracleDBCredentials).extend({
type: z.literal(SecretRotation.OracleDBCredentials),
parameters: SqlCredentialsRotationParametersSchema,
secretsMapping: SqlCredentialsRotationSecretsMappingSchema
});
export const CreateOracleDBCredentialsRotationSchema = BaseCreateSecretRotationSchema(
SecretRotation.OracleDBCredentials
).extend({
parameters: SqlCredentialsRotationParametersSchema,
secretsMapping: SqlCredentialsRotationSecretsMappingSchema
});
export const UpdateOracleDBCredentialsRotationSchema = BaseUpdateSecretRotationSchema(
SecretRotation.OracleDBCredentials
).extend({
parameters: SqlCredentialsRotationParametersSchema.optional(),
secretsMapping: SqlCredentialsRotationSecretsMappingSchema.optional()
});
export const OracleDBCredentialsRotationListItemSchema = z.object({
name: z.literal("OracleDB Credentials"),
connection: z.literal(AppConnection.OracleDB),
type: z.literal(SecretRotation.OracleDBCredentials),
template: SqlCredentialsRotationTemplateSchema
});

View File

@@ -0,0 +1,18 @@
import { z } from "zod";
import { TOracleDBConnection } from "../../app-connections/oracledb";
import {
CreateOracleDBCredentialsRotationSchema,
OracleDBCredentialsRotationListItemSchema,
OracleDBCredentialsRotationSchema
} from "./oracledb-credentials-rotation-schemas";
export type TOracleDBCredentialsRotation = z.infer<typeof OracleDBCredentialsRotationSchema>;
export type TOracleDBCredentialsRotationInput = z.infer<typeof CreateOracleDBCredentialsRotationSchema>;
export type TOracleDBCredentialsRotationListItem = z.infer<typeof OracleDBCredentialsRotationListItemSchema>;
export type TOracleDBCredentialsRotationWithConnection = TOracleDBCredentialsRotation & {
connection: TOracleDBConnection;
};

View File

@@ -2,6 +2,7 @@ export enum SecretRotation {
PostgresCredentials = "postgres-credentials",
MsSqlCredentials = "mssql-credentials",
MySqlCredentials = "mysql-credentials",
OracleDBCredentials = "oracledb-credentials",
Auth0ClientSecret = "auth0-client-secret",
AzureClientSecret = "azure-client-secret",
AwsIamUserSecret = "aws-iam-user-secret",

View File

@@ -10,6 +10,7 @@ import { AZURE_CLIENT_SECRET_ROTATION_LIST_OPTION } from "./azure-client-secret"
import { LDAP_PASSWORD_ROTATION_LIST_OPTION, TLdapPasswordRotation } from "./ldap-password";
import { MSSQL_CREDENTIALS_ROTATION_LIST_OPTION } from "./mssql-credentials";
import { MYSQL_CREDENTIALS_ROTATION_LIST_OPTION } from "./mysql-credentials";
import { ORACLEDB_CREDENTIALS_ROTATION_LIST_OPTION } from "./oracledb-credentials";
import { POSTGRES_CREDENTIALS_ROTATION_LIST_OPTION } from "./postgres-credentials";
import { SecretRotation, SecretRotationStatus } from "./secret-rotation-v2-enums";
import { TSecretRotationV2ServiceFactoryDep } from "./secret-rotation-v2-service";
@@ -25,6 +26,7 @@ const SECRET_ROTATION_LIST_OPTIONS: Record<SecretRotation, TSecretRotationV2List
[SecretRotation.PostgresCredentials]: POSTGRES_CREDENTIALS_ROTATION_LIST_OPTION,
[SecretRotation.MsSqlCredentials]: MSSQL_CREDENTIALS_ROTATION_LIST_OPTION,
[SecretRotation.MySqlCredentials]: MYSQL_CREDENTIALS_ROTATION_LIST_OPTION,
[SecretRotation.OracleDBCredentials]: ORACLEDB_CREDENTIALS_ROTATION_LIST_OPTION,
[SecretRotation.Auth0ClientSecret]: AUTH0_CLIENT_SECRET_ROTATION_LIST_OPTION,
[SecretRotation.AzureClientSecret]: AZURE_CLIENT_SECRET_ROTATION_LIST_OPTION,
[SecretRotation.AwsIamUserSecret]: AWS_IAM_USER_SECRET_ROTATION_LIST_OPTION,

View File

@@ -5,6 +5,7 @@ export const SECRET_ROTATION_NAME_MAP: Record<SecretRotation, string> = {
[SecretRotation.PostgresCredentials]: "PostgreSQL Credentials",
[SecretRotation.MsSqlCredentials]: "Microsoft SQL Server Credentials",
[SecretRotation.MySqlCredentials]: "MySQL Credentials",
[SecretRotation.OracleDBCredentials]: "OracleDB Credentials",
[SecretRotation.Auth0ClientSecret]: "Auth0 Client Secret",
[SecretRotation.AzureClientSecret]: "Azure Client Secret",
[SecretRotation.AwsIamUserSecret]: "AWS IAM User Secret",
@@ -15,6 +16,7 @@ export const SECRET_ROTATION_CONNECTION_MAP: Record<SecretRotation, AppConnectio
[SecretRotation.PostgresCredentials]: AppConnection.Postgres,
[SecretRotation.MsSqlCredentials]: AppConnection.MsSql,
[SecretRotation.MySqlCredentials]: AppConnection.MySql,
[SecretRotation.OracleDBCredentials]: AppConnection.OracleDB,
[SecretRotation.Auth0ClientSecret]: AppConnection.Auth0,
[SecretRotation.AzureClientSecret]: AppConnection.AzureClientSecrets,
[SecretRotation.AwsIamUserSecret]: AppConnection.AWS,

View File

@@ -63,6 +63,7 @@ import { TAppConnectionDALFactory } from "@app/services/app-connection/app-conne
import { decryptAppConnection } from "@app/services/app-connection/app-connection-fns";
import { TAppConnectionServiceFactory } from "@app/services/app-connection/app-connection-service";
import { ActorType } from "@app/services/auth/auth-type";
import { TFolderCommitServiceFactory } from "@app/services/folder-commit/folder-commit-service";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { TProjectBotServiceFactory } from "@app/services/project-bot/project-bot-service";
@@ -98,7 +99,7 @@ export type TSecretRotationV2ServiceFactoryDep = {
TSecretV2BridgeDALFactory,
"bulkUpdate" | "insertMany" | "deleteMany" | "upsertSecretReferences" | "find" | "invalidateSecretCacheByProjectId"
>;
secretVersionV2BridgeDAL: Pick<TSecretVersionV2DALFactory, "insertMany">;
secretVersionV2BridgeDAL: Pick<TSecretVersionV2DALFactory, "insertMany" | "findLatestVersionMany">;
secretVersionTagV2BridgeDAL: Pick<TSecretVersionV2TagDALFactory, "insertMany">;
resourceMetadataDAL: Pick<TResourceMetadataDALFactory, "insertMany" | "delete">;
secretTagDAL: Pick<TSecretTagDALFactory, "saveTagsToSecretV2" | "deleteTagsToSecretV2" | "find">;
@@ -106,6 +107,7 @@ export type TSecretRotationV2ServiceFactoryDep = {
snapshotService: Pick<TSecretSnapshotServiceFactory, "performSnapshot">;
queueService: Pick<TQueueServiceFactory, "queuePg">;
appConnectionDAL: Pick<TAppConnectionDALFactory, "findById" | "update" | "updateById">;
folderCommitService: Pick<TFolderCommitServiceFactory, "createCommit">;
};
export type TSecretRotationV2ServiceFactory = ReturnType<typeof secretRotationV2ServiceFactory>;
@@ -121,6 +123,7 @@ const SECRET_ROTATION_FACTORY_MAP: Record<SecretRotation, TRotationFactoryImplem
[SecretRotation.PostgresCredentials]: sqlCredentialsRotationFactory as TRotationFactoryImplementation,
[SecretRotation.MsSqlCredentials]: sqlCredentialsRotationFactory as TRotationFactoryImplementation,
[SecretRotation.MySqlCredentials]: sqlCredentialsRotationFactory as TRotationFactoryImplementation,
[SecretRotation.OracleDBCredentials]: sqlCredentialsRotationFactory as TRotationFactoryImplementation,
[SecretRotation.Auth0ClientSecret]: auth0ClientSecretRotationFactory as TRotationFactoryImplementation,
[SecretRotation.AzureClientSecret]: azureClientSecretRotationFactory as TRotationFactoryImplementation,
[SecretRotation.AwsIamUserSecret]: awsIamUserSecretRotationFactory as TRotationFactoryImplementation,
@@ -145,6 +148,7 @@ export const secretRotationV2ServiceFactory = ({
snapshotService,
keyStore,
queueService,
folderCommitService,
appConnectionDAL
}: TSecretRotationV2ServiceFactoryDep) => {
const $queueSendSecretRotationStatusNotification = async (secretRotation: TSecretRotationV2Raw) => {
@@ -538,7 +542,12 @@ export const secretRotationV2ServiceFactory = ({
secretVersionDAL: secretVersionV2BridgeDAL,
secretVersionTagDAL: secretVersionTagV2BridgeDAL,
secretTagDAL,
resourceMetadataDAL
folderCommitService,
resourceMetadataDAL,
actor: {
type: actor.type,
actorId: actor.id
}
});
await secretRotationV2DAL.insertSecretMappings(
@@ -674,7 +683,12 @@ export const secretRotationV2ServiceFactory = ({
secretVersionDAL: secretVersionV2BridgeDAL,
secretVersionTagDAL: secretVersionTagV2BridgeDAL,
secretTagDAL,
resourceMetadataDAL
folderCommitService,
resourceMetadataDAL,
actor: {
type: actor.type,
actorId: actor.id
}
});
secretsMappingUpdated = true;
@@ -792,6 +806,9 @@ export const secretRotationV2ServiceFactory = ({
projectId,
folderId,
actorId: actor.id, // not actually used since rotated secrets are shared
actorType: actor.type,
folderCommitService,
secretVersionDAL: secretVersionV2BridgeDAL,
tx
});
}
@@ -935,6 +952,10 @@ export const secretRotationV2ServiceFactory = ({
secretDAL: secretV2BridgeDAL,
secretVersionDAL: secretVersionV2BridgeDAL,
secretVersionTagDAL: secretVersionTagV2BridgeDAL,
folderCommitService,
actor: {
type: ActorType.PLATFORM
},
secretTagDAL,
resourceMetadataDAL
});

View File

@@ -45,6 +45,12 @@ import {
TMySqlCredentialsRotationListItem,
TMySqlCredentialsRotationWithConnection
} from "./mysql-credentials";
import {
TOracleDBCredentialsRotation,
TOracleDBCredentialsRotationInput,
TOracleDBCredentialsRotationListItem,
TOracleDBCredentialsRotationWithConnection
} from "./oracledb-credentials";
import {
TPostgresCredentialsRotation,
TPostgresCredentialsRotationInput,
@@ -58,6 +64,7 @@ export type TSecretRotationV2 =
| TPostgresCredentialsRotation
| TMsSqlCredentialsRotation
| TMySqlCredentialsRotation
| TOracleDBCredentialsRotation
| TAuth0ClientSecretRotation
| TAzureClientSecretRotation
| TLdapPasswordRotation
@@ -67,6 +74,7 @@ export type TSecretRotationV2WithConnection =
| TPostgresCredentialsRotationWithConnection
| TMsSqlCredentialsRotationWithConnection
| TMySqlCredentialsRotationWithConnection
| TOracleDBCredentialsRotationWithConnection
| TAuth0ClientSecretRotationWithConnection
| TAzureClientSecretRotationWithConnection
| TLdapPasswordRotationWithConnection
@@ -83,6 +91,7 @@ export type TSecretRotationV2Input =
| TPostgresCredentialsRotationInput
| TMsSqlCredentialsRotationInput
| TMySqlCredentialsRotationInput
| TOracleDBCredentialsRotationInput
| TAuth0ClientSecretRotationInput
| TAzureClientSecretRotationInput
| TLdapPasswordRotationInput
@@ -92,6 +101,7 @@ export type TSecretRotationV2ListItem =
| TPostgresCredentialsRotationListItem
| TMsSqlCredentialsRotationListItem
| TMySqlCredentialsRotationListItem
| TOracleDBCredentialsRotationListItem
| TAuth0ClientSecretRotationListItem
| TAzureClientSecretRotationListItem
| TLdapPasswordRotationListItem

View File

@@ -1,18 +1,19 @@
import { z } from "zod";
import { Auth0ClientSecretRotationSchema } from "@app/ee/services/secret-rotation-v2/auth0-client-secret";
import { AwsIamUserSecretRotationSchema } from "@app/ee/services/secret-rotation-v2/aws-iam-user-secret";
import { AzureClientSecretRotationSchema } from "@app/ee/services/secret-rotation-v2/azure-client-secret";
import { LdapPasswordRotationSchema } from "@app/ee/services/secret-rotation-v2/ldap-password";
import { MsSqlCredentialsRotationSchema } from "@app/ee/services/secret-rotation-v2/mssql-credentials";
import { MySqlCredentialsRotationSchema } from "@app/ee/services/secret-rotation-v2/mysql-credentials";
import { OracleDBCredentialsRotationSchema } from "@app/ee/services/secret-rotation-v2/oracledb-credentials";
import { PostgresCredentialsRotationSchema } from "@app/ee/services/secret-rotation-v2/postgres-credentials";
import { AwsIamUserSecretRotationSchema } from "./aws-iam-user-secret";
export const SecretRotationV2Schema = z.discriminatedUnion("type", [
PostgresCredentialsRotationSchema,
MsSqlCredentialsRotationSchema,
MySqlCredentialsRotationSchema,
OracleDBCredentialsRotationSchema,
Auth0ClientSecretRotationSchema,
AzureClientSecretRotationSchema,
LdapPasswordRotationSchema,

View File

@@ -2,14 +2,15 @@ import { z } from "zod";
import { TMsSqlCredentialsRotationWithConnection } from "@app/ee/services/secret-rotation-v2/mssql-credentials";
import { TMySqlCredentialsRotationWithConnection } from "@app/ee/services/secret-rotation-v2/mysql-credentials";
import { TOracleDBCredentialsRotationWithConnection } from "@app/ee/services/secret-rotation-v2/oracledb-credentials";
import { TPostgresCredentialsRotationWithConnection } from "@app/ee/services/secret-rotation-v2/postgres-credentials";
import { SqlCredentialsRotationGeneratedCredentialsSchema } from "./sql-credentials-rotation-schemas";
import { SqlCredentialsRotationGeneratedCredentialsSchema } from "@app/ee/services/secret-rotation-v2/shared/sql-credentials/sql-credentials-rotation-schemas";
export type TSqlCredentialsRotationWithConnection =
| TPostgresCredentialsRotationWithConnection
| TMsSqlCredentialsRotationWithConnection
| TMySqlCredentialsRotationWithConnection;
| TMySqlCredentialsRotationWithConnection
| TOracleDBCredentialsRotationWithConnection;
export type TSqlCredentialsRotationGeneratedCredentials = z.infer<
typeof SqlCredentialsRotationGeneratedCredentialsSchema

View File

@@ -178,6 +178,13 @@ export const getDbSetQuery = (db: TDbProviderClients, variables: { username: str
};
}
if (db === TDbProviderClients.OracleDB) {
return {
query: `ALTER USER ?? IDENTIFIED BY "${variables.password}"`,
variables: [variables.username]
};
}
// add more based on client
return {
query: `ALTER USER ?? IDENTIFIED BY '${variables.password}'`,

View File

@@ -14,6 +14,7 @@ import { logger } from "@app/lib/logger";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { QueueJobs, QueueName, TQueueServiceFactory } from "@app/queue";
import { ActorType } from "@app/services/auth/auth-type";
import { CommitType, TFolderCommitServiceFactory } from "@app/services/folder-commit/folder-commit-service";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { TProjectBotServiceFactory } from "@app/services/project-bot/project-bot-service";
@@ -53,6 +54,7 @@ type TSecretRotationQueueFactoryDep = {
secretVersionV2BridgeDAL: Pick<TSecretVersionV2DALFactory, "insertMany" | "findLatestVersionMany">;
telemetryService: Pick<TTelemetryServiceFactory, "sendPostHogEvents">;
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
folderCommitService: Pick<TFolderCommitServiceFactory, "createCommit">;
};
// These error should stop the repeatable job and ask user to reconfigure rotation
@@ -77,6 +79,7 @@ export const secretRotationQueueFactory = ({
telemetryService,
secretV2BridgeDAL,
secretVersionV2BridgeDAL,
folderCommitService,
kmsService
}: TSecretRotationQueueFactoryDep) => {
const addToQueue = async (rotationId: string, interval: number) => {
@@ -330,7 +333,7 @@ export const secretRotationQueueFactory = ({
})),
tx
);
await secretVersionV2BridgeDAL.insertMany(
const secretVersions = await secretVersionV2BridgeDAL.insertMany(
updatedSecrets.map(({ id, updatedAt, createdAt, ...el }) => ({
...el,
actorType: ActorType.PLATFORM,
@@ -338,6 +341,22 @@ export const secretRotationQueueFactory = ({
})),
tx
);
await folderCommitService.createCommit(
{
actor: {
type: ActorType.PLATFORM
},
message: "Changed by Secret rotation",
folderId: secretVersions[0].folderId,
changes: secretVersions.map((sv) => ({
type: CommitType.ADD,
isUpdate: true,
secretVersionId: sv.id
}))
},
tx
);
});
await secretV2BridgeDAL.invalidateSecretCacheByProjectId(secretRotation.projectId);

View File

@@ -10,7 +10,8 @@ export enum TDbProviderClients {
// mysql and maria db
MySql = "mysql",
MsSqlServer = "mssql"
MsSqlServer = "mssql",
OracleDB = "oracledb"
}
export enum TAwsProviderSystems {

View File

@@ -8,6 +8,7 @@ import { InternalServerError, NotFoundError } from "@app/lib/errors";
import { groupBy } from "@app/lib/fn";
import { logger } from "@app/lib/logger";
import { ActorType } from "@app/services/auth/auth-type";
import { CommitType, TFolderCommitServiceFactory } from "@app/services/folder-commit/folder-commit-service";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { TProjectBotServiceFactory } from "@app/services/project-bot/project-bot-service";
@@ -51,8 +52,8 @@ type TSecretSnapshotServiceFactoryDep = {
snapshotSecretV2BridgeDAL: TSnapshotSecretV2DALFactory;
snapshotFolderDAL: TSnapshotFolderDALFactory;
secretVersionDAL: Pick<TSecretVersionDALFactory, "insertMany" | "findLatestVersionByFolderId">;
secretVersionV2BridgeDAL: Pick<TSecretVersionV2DALFactory, "insertMany" | "findLatestVersionByFolderId">;
folderVersionDAL: Pick<TSecretFolderVersionDALFactory, "findLatestVersionByFolderId" | "insertMany">;
secretVersionV2BridgeDAL: Pick<TSecretVersionV2DALFactory, "insertMany" | "findLatestVersionByFolderId" | "findOne">;
folderVersionDAL: Pick<TSecretFolderVersionDALFactory, "findLatestVersionByFolderId" | "insertMany" | "findOne">;
secretDAL: Pick<TSecretDALFactory, "delete" | "insertMany">;
secretV2BridgeDAL: Pick<TSecretV2BridgeDALFactory, "delete" | "insertMany">;
secretTagDAL: Pick<TSecretTagDALFactory, "saveTagsToSecret" | "saveTagsToSecretV2">;
@@ -63,6 +64,7 @@ type TSecretSnapshotServiceFactoryDep = {
licenseService: Pick<TLicenseServiceFactory, "isValidLicense">;
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
projectBotService: Pick<TProjectBotServiceFactory, "getBotKey">;
folderCommitService: Pick<TFolderCommitServiceFactory, "createCommit">;
};
export type TSecretSnapshotServiceFactory = ReturnType<typeof secretSnapshotServiceFactory>;
@@ -84,7 +86,8 @@ export const secretSnapshotServiceFactory = ({
snapshotSecretV2BridgeDAL,
secretVersionV2TagBridgeDAL,
kmsService,
projectBotService
projectBotService,
folderCommitService
}: TSecretSnapshotServiceFactoryDep) => {
const projectSecretSnapshotCount = async ({
environment,
@@ -403,6 +406,18 @@ export const secretSnapshotServiceFactory = ({
.filter((el) => el.isRotatedSecret)
.map((el) => el.secretId);
const deletedSecretsChanges = new Map(); // secretId -> version info
const deletedFoldersChanges = new Map(); // folderId -> version info
const addedSecretsChanges = new Map(); // secretId -> version info
const addedFoldersChanges = new Map(); // folderId -> version info
const commitChanges: {
type: string;
secretVersionId?: string;
folderVersionId?: string;
isUpdate?: boolean;
folderId?: string;
}[] = [];
// this will remove all secrets in current folder except rotated secrets which we ignore
const deletedTopLevelSecs = await secretV2BridgeDAL.delete(
{
@@ -424,7 +439,35 @@ export const secretSnapshotServiceFactory = ({
},
tx
);
await Promise.all(
deletedTopLevelSecs.map(async (sec) => {
const version = await secretVersionV2BridgeDAL.findOne({ secretId: sec.id, version: sec.version }, tx);
deletedSecretsChanges.set(sec.id, {
id: sec.id,
version: sec.version,
// Store the version ID if available from the snapshot
versionId: version?.id
});
})
);
const deletedTopLevelSecsGroupById = groupBy(deletedTopLevelSecs, (item) => item.id);
const deletedFoldersData = await folderDAL.delete({ parentId: snapshot.folderId, isReserved: false }, tx);
await Promise.all(
deletedFoldersData.map(async (folder) => {
const version = await folderVersionDAL.findOne({ folderId: folder.id, version: folder.version }, tx);
deletedFoldersChanges.set(folder.id, {
id: folder.id,
version: folder.version,
// Store the version ID if available
versionId: version?.id
});
})
);
// this will remove all secrets and folders on child
// due to sql foreign key and link list connection removing the folders removes everything below too
const deletedFolders = await folderDAL.delete({ parentId: snapshot.folderId, isReserved: false }, tx);
@@ -489,14 +532,21 @@ export const secretSnapshotServiceFactory = ({
});
await secretTagDAL.saveTagsToSecretV2(secretTagsToBeInsert, tx);
const folderVersions = await folderVersionDAL.insertMany(
folders.map(({ version, name, id, envId }) => ({
folders.map(({ version, name, id, envId, description }) => ({
name,
version,
folderId: id,
envId
envId,
description
})),
tx
);
// Track added folders
folderVersions.forEach((fv) => {
addedFoldersChanges.set(fv.folderId, fv);
});
const userActorId = actor === ActorType.USER ? actorId : undefined;
const identityActorId = actor !== ActorType.USER ? actorId : undefined;
const actorType = actor || ActorType.PLATFORM;
@@ -511,6 +561,11 @@ export const secretSnapshotServiceFactory = ({
})),
tx
);
secretVersions.forEach((sv) => {
addedSecretsChanges.set(sv.secretId, sv);
});
await secretVersionV2TagBridgeDAL.insertMany(
secretVersions.flatMap(({ secretId, id }) =>
secretVerTagToBeInsert?.[secretId]?.length
@@ -522,6 +577,70 @@ export const secretSnapshotServiceFactory = ({
),
tx
);
// Compute commit changes
// Handle secrets
deletedSecretsChanges.forEach((deletedInfo, secretId) => {
const addedSecret = addedSecretsChanges.get(secretId);
if (addedSecret) {
// Secret was deleted and re-added - this is an update only if versions are different
if (deletedInfo.versionId !== addedSecret.id) {
commitChanges.push({
type: CommitType.ADD, // In the commit system, updates are tracked as "add" with isUpdate=true
secretVersionId: addedSecret.id,
isUpdate: true
});
}
// Remove from addedSecrets since we've handled it
addedSecretsChanges.delete(secretId);
} else if (deletedInfo.versionId) {
// Secret was only deleted
commitChanges.push({
type: CommitType.DELETE,
secretVersionId: deletedInfo.versionId
});
}
});
// Add remaining new secrets (not updates)
addedSecretsChanges.forEach((addedSecret) => {
commitChanges.push({
type: CommitType.ADD,
secretVersionId: addedSecret.id
});
});
// Handle folders
deletedFoldersChanges.forEach((deletedInfo, folderId) => {
const addedFolder = addedFoldersChanges.get(folderId);
if (addedFolder) {
// Folder was deleted and re-added - this is an update only if versions are different
if (deletedInfo.versionId !== addedFolder.id) {
commitChanges.push({
type: CommitType.ADD,
folderVersionId: addedFolder.id,
isUpdate: true
});
}
// Remove from addedFolders since we've handled it
addedFoldersChanges.delete(folderId);
} else if (deletedInfo.versionId) {
// Folder was only deleted
commitChanges.push({
type: CommitType.DELETE,
folderVersionId: deletedInfo.versionId,
folderId: deletedInfo.id
});
}
});
// Add remaining new folders (not updates)
addedFoldersChanges.forEach((addedFolder) => {
commitChanges.push({
type: CommitType.ADD,
folderVersionId: addedFolder.id
});
});
const newSnapshot = await snapshotDAL.create(
{
folderId: snapshot.folderId,
@@ -550,6 +669,22 @@ export const secretSnapshotServiceFactory = ({
})),
tx
);
if (commitChanges.length > 0) {
await folderCommitService.createCommit(
{
actor: {
type: actorType,
metadata: {
id: userActorId || identityActorId
}
},
message: "Rollback to snapshot",
folderId: snapshot.folderId,
changes: commitChanges
},
tx
);
}
return { ...newSnapshot, snapshotSecrets, snapshotFolders };
});
@@ -609,11 +744,12 @@ export const secretSnapshotServiceFactory = ({
});
await secretTagDAL.saveTagsToSecret(secretTagsToBeInsert, tx);
const folderVersions = await folderVersionDAL.insertMany(
folders.map(({ version, name, id, envId }) => ({
folders.map(({ version, name, id, envId, description }) => ({
name,
version,
folderId: id,
envId
envId,
description
})),
tx
);

View File

@@ -117,6 +117,7 @@ export const OCIVaultSyncFns = {
syncSecrets: async (secretSync: TOCIVaultSyncWithCredentials, secretMap: TSecretMap) => {
const {
connection,
environment,
destinationConfig: { compartmentOcid, vaultOcid, keyOcid }
} = secretSync;
@@ -213,7 +214,7 @@ export const OCIVaultSyncFns = {
// Update and delete secrets
for await (const [key, variable] of Object.entries(variables)) {
// eslint-disable-next-line no-continue
if (!matchesSchema(key, secretSync.syncOptions.keySchema)) continue;
if (!matchesSchema(key, environment?.slug || "", secretSync.syncOptions.keySchema)) continue;
// Only update / delete active secrets
if (variable.lifecycleState === vault.models.SecretSummary.LifecycleState.Active) {

View File

@@ -10,7 +10,8 @@ export const PgSqlLock = {
KmsRootKeyInit: 2025,
OrgGatewayRootCaInit: (orgId: string) => pgAdvisoryLockHashText(`org-gateway-root-ca:${orgId}`),
OrgGatewayCertExchange: (orgId: string) => pgAdvisoryLockHashText(`org-gateway-cert-exchange:${orgId}`),
SecretRotationV2Creation: (folderId: string) => pgAdvisoryLockHashText(`secret-rotation-v2-creation:${folderId}`)
SecretRotationV2Creation: (folderId: string) => pgAdvisoryLockHashText(`secret-rotation-v2-creation:${folderId}`),
CreateProject: (orgId: string) => pgAdvisoryLockHashText(`create-project:${orgId}`)
} as const;
export type TKeyStoreFactory = ReturnType<typeof keyStoreFactory>;
@@ -26,6 +27,7 @@ export const KeyStorePrefixes = {
KmsOrgDataKeyCreation: "kms-org-data-key-creation-lock",
WaitUntilReadyKmsOrgKeyCreation: "wait-until-ready-kms-org-key-creation-",
WaitUntilReadyKmsOrgDataKeyCreation: "wait-until-ready-kms-org-data-key-creation-",
FolderTreeCheckpoint: (envId: string) => `folder-tree-checkpoint-${envId}`,
WaitUntilReadyProjectEnvironmentOperation: (projectId: string) =>
`wait-until-ready-project-environments-operation-${projectId}`,

View File

@@ -21,6 +21,7 @@ export enum ApiDocsTags {
TokenAuth = "Token Auth",
UniversalAuth = "Universal Auth",
GcpAuth = "GCP Auth",
AliCloudAuth = "Alibaba Cloud Auth",
AwsAuth = "AWS Auth",
OciAuth = "OCI Auth",
AzureAuth = "Azure Auth",
@@ -89,6 +90,7 @@ export const GROUPS = {
limit: "The number of users to return.",
username: "The username to search for.",
search: "The text string that user email or name will be filtered by.",
projectId: "The ID of the project the group belongs to.",
filterUsers:
"Whether to filter the list of returned users. 'existingMembers' will only return existing users in the group, 'nonMembers' will only return users not in the group, undefined will return all users in the organization."
},
@@ -242,6 +244,43 @@ export const LDAP_AUTH = {
}
} as const;
export const ALICLOUD_AUTH = {
LOGIN: {
identityId: "The ID of the identity to login.",
Action: "The Alibaba Cloud API action. For STS GetCallerIdentity, this should be 'GetCallerIdentity'.",
Format: "The response format. For STS GetCallerIdentity, this should be 'JSON'.",
Version: "The API version. This should be in 'YYYY-MM-DD' format (e.g., '2015-04-01').",
AccessKeyId: "The AccessKey ID of the RAM user or STS token.",
SignatureMethod: "The signature algorithm. For STS GetCallerIdentity, this should be 'HMAC-SHA1'.",
Timestamp: "The timestamp of the request in UTC, formatted as 'YYYY-MM-DDTHH:mm:ssZ'.",
SignatureVersion: "The signature version. For STS GetCallerIdentity, this should be '1.0'.",
SignatureNonce: "A unique random string to prevent replay attacks.",
Signature: "The signature string calculated based on the request parameters and AccessKey Secret."
},
ATTACH: {
identityId: "The ID of the identity to attach the configuration onto.",
allowedArns: "The comma-separated list of trusted ARNs that are allowed to authenticate with Infisical.",
accessTokenTTL: "The lifetime for an access token in seconds.",
accessTokenMaxTTL: "The maximum lifetime for an access token in seconds.",
accessTokenNumUsesLimit: "The maximum number of times that an access token can be used.",
accessTokenTrustedIps: "The IPs or CIDR ranges that access tokens can be used from."
},
UPDATE: {
identityId: "The ID of the identity to update the auth method for.",
allowedArns: "The comma-separated list of trusted ARNs that are allowed to authenticate with Infisical.",
accessTokenTTL: "The new lifetime for an access token in seconds.",
accessTokenMaxTTL: "The new maximum lifetime for an access token in seconds.",
accessTokenNumUsesLimit: "The new maximum number of times that an access token can be used.",
accessTokenTrustedIps: "The new IPs or CIDR ranges that access tokens can be used from."
},
RETRIEVE: {
identityId: "The ID of the identity to retrieve the auth method for."
},
REVOKE: {
identityId: "The ID of the identity to revoke the auth method for."
}
} as const;
export const AWS_AUTH = {
LOGIN: {
identityId: "The ID of the identity to login.",
@@ -400,6 +439,8 @@ export const KUBERNETES_AUTH = {
caCert: "The PEM-encoded CA cert for the Kubernetes API server.",
tokenReviewerJwt:
"Optional JWT token for accessing Kubernetes TokenReview API. If provided, this long-lived token will be used to validate service account tokens during authentication. If omitted, the client's own JWT will be used instead, which requires the client to have the system:auth-delegator ClusterRole binding.",
tokenReviewMode:
"The mode to use for token review. Must be one of: 'api', 'gateway'. If gateway is selected, the gateway must be deployed in Kubernetes, and the gateway must have the system:auth-delegator ClusterRole binding.",
allowedNamespaces:
"The comma-separated list of trusted namespaces that service accounts must belong to authenticate with Infisical.",
allowedNames: "The comma-separated list of trusted service account names that can authenticate with Infisical.",
@@ -417,6 +458,8 @@ export const KUBERNETES_AUTH = {
caCert: "The new PEM-encoded CA cert for the Kubernetes API server.",
tokenReviewerJwt:
"Optional JWT token for accessing Kubernetes TokenReview API. If provided, this long-lived token will be used to validate service account tokens during authentication. If omitted, the client's own JWT will be used instead, which requires the client to have the system:auth-delegator ClusterRole binding.",
tokenReviewMode:
"The mode to use for token review. Must be one of: 'api', 'gateway'. If gateway is selected, the gateway must be deployed in Kubernetes, and the gateway must have the system:auth-delegator ClusterRole binding.",
allowedNamespaces:
"The new comma-separated list of trusted namespaces that service accounts must belong to authenticate with Infisical.",
allowedNames: "The new comma-separated list of trusted service account names that can authenticate with Infisical.",
@@ -621,7 +664,8 @@ export const PROJECTS = {
autoCapitalization: "Disable or enable auto-capitalization for the project.",
slug: "An optional slug for the project. (must be unique within the organization)",
hasDeleteProtection: "Enable or disable delete protection for the project.",
secretSharing: "Enable or disable secret sharing for the project."
secretSharing: "Enable or disable secret sharing for the project.",
showSnapshotsLegacy: "Enable or disable legacy snapshots for the project."
},
GET_KEY: {
workspaceId: "The ID of the project to get the key from."
@@ -1107,6 +1151,14 @@ export const DYNAMIC_SECRET_LEASES = {
leaseId: "The ID of the dynamic secret lease.",
isForced:
"A boolean flag to delete the the dynamic secret from Infisical without trying to remove it from external provider. Used when the dynamic secret got modified externally."
},
KUBERNETES: {
CREATE: {
config: {
namespace:
"The Kubernetes namespace to create the lease in. If not specified, the first namespace defined in the configuration will be used."
}
}
}
} as const;
export const SECRET_TAGS = {
@@ -2156,6 +2208,11 @@ export const AppConnections = {
code: "The OAuth code to use to connect with Azure Client Secrets.",
tenantId: "The Tenant ID to use to connect with Azure Client Secrets."
},
AZURE_DEVOPS: {
code: "The OAuth code to use to connect with Azure DevOps.",
tenantId: "The Tenant ID to use to connect with Azure DevOps.",
orgName: "The Organization name to use to connect with Azure DevOps."
},
OCI: {
userOcid: "The OCID (Oracle Cloud Identifier) of the user making the request.",
tenancyOcid: "The OCID (Oracle Cloud Identifier) of the tenancy in Oracle Cloud Infrastructure.",
@@ -2270,9 +2327,14 @@ export const SecretSyncs = {
"The URL of the Azure App Configuration to sync secrets to. Example: https://example.azconfig.io/",
label: "An optional label to assign to secrets created in Azure App Configuration."
},
AZURE_DEVOPS: {
devopsProjectId: "The ID of the Azure DevOps project to sync secrets to.",
devopsProjectName: "The name of the Azure DevOps project to sync secrets to."
},
GCP: {
scope: "The Google project scope that secrets should be synced to.",
projectId: "The ID of the Google project secrets should be synced to."
projectId: "The ID of the Google project secrets should be synced to.",
locationId: 'The ID of the Google project location secrets should be synced to (ie "us-west4").'
},
DATABRICKS: {
scope: "The Databricks secret scope that secrets should be synced to."

View File

@@ -213,6 +213,12 @@ const envSchema = z
GATEWAY_RELAY_AUTH_SECRET: zpStr(z.string().optional()),
DYNAMIC_SECRET_ALLOW_INTERNAL_IP: zodStrBool.default("false"),
DYNAMIC_SECRET_AWS_ACCESS_KEY_ID: zpStr(z.string().optional()).default(
process.env.INF_APP_CONNECTION_AWS_ACCESS_KEY_ID
),
DYNAMIC_SECRET_AWS_SECRET_ACCESS_KEY: zpStr(z.string().optional()).default(
process.env.INF_APP_CONNECTION_AWS_SECRET_ACCESS_KEY
),
/* ----------------------------------------------------------------------------- */
/* App Connections ----------------------------------------------------------------------------- */
@@ -255,6 +261,10 @@ const envSchema = z
DATADOG_SERVICE: zpStr(z.string().optional().default("infisical-core")),
DATADOG_HOSTNAME: zpStr(z.string().optional()),
// PIT
PIT_CHECKPOINT_WINDOW: zpStr(z.string().optional().default("2")),
PIT_TREE_CHECKPOINT_WINDOW: zpStr(z.string().optional().default("30")),
/* CORS ----------------------------------------------------------------------------- */
CORS_ALLOWED_ORIGINS: zpStr(
z

View File

@@ -0,0 +1,428 @@
/* eslint-disable no-await-in-loop */
import crypto from "node:crypto";
import net from "node:net";
import quicDefault, * as quicModule from "@infisical/quic";
import axios from "axios";
import https from "https";
import { BadRequestError } from "../errors";
import { logger } from "../logger";
import {
GatewayProxyProtocol,
IGatewayProxyOptions,
IGatewayProxyServer,
TGatewayTlsOptions,
TPingGatewayAndVerifyDTO
} from "./types";
const DEFAULT_MAX_RETRIES = 3;
const DEFAULT_RETRY_DELAY = 1000; // 1 second
const quic = quicDefault || quicModule;
const parseSubjectDetails = (data: string) => {
const values: Record<string, string> = {};
data.split("\n").forEach((el) => {
const [key, value] = el.split("=");
values[key.trim()] = value.trim();
});
return values;
};
const createQuicConnection = async (
relayHost: string,
relayPort: number,
tlsOptions: TGatewayTlsOptions,
identityId: string,
orgId: string
) => {
const client = await quic.QUICClient.createQUICClient({
host: relayHost,
port: relayPort,
config: {
ca: tlsOptions.ca,
cert: tlsOptions.cert,
key: tlsOptions.key,
applicationProtos: ["infisical-gateway"],
verifyPeer: true,
verifyCallback: async (certs) => {
if (!certs || certs.length === 0) return quic.native.CryptoError.CertificateRequired;
const serverCertificate = new crypto.X509Certificate(Buffer.from(certs[0]));
const caCertificate = new crypto.X509Certificate(tlsOptions.ca);
const isValidServerCertificate = serverCertificate.verify(caCertificate.publicKey);
if (!isValidServerCertificate) return quic.native.CryptoError.BadCertificate;
const subjectDetails = parseSubjectDetails(serverCertificate.subject);
if (subjectDetails.OU !== "Gateway" || subjectDetails.CN !== identityId || subjectDetails.O !== orgId) {
return quic.native.CryptoError.CertificateUnknown;
}
if (new Date() > new Date(serverCertificate.validTo) || new Date() < new Date(serverCertificate.validFrom)) {
return quic.native.CryptoError.CertificateExpired;
}
const formatedRelayHost =
process.env.NODE_ENV === "development" ? relayHost.replace("host.docker.internal", "127.0.0.1") : relayHost;
if (!serverCertificate.checkIP(formatedRelayHost)) return quic.native.CryptoError.BadCertificate;
},
maxIdleTimeout: 90000,
keepAliveIntervalTime: 30000
},
crypto: {
ops: {
randomBytes: async (data) => {
crypto.getRandomValues(new Uint8Array(data));
}
}
}
});
return client;
};
export const pingGatewayAndVerify = async ({
relayHost,
relayPort,
tlsOptions,
maxRetries = DEFAULT_MAX_RETRIES,
identityId,
orgId
}: TPingGatewayAndVerifyDTO) => {
let lastError: Error | null = null;
const quicClient = await createQuicConnection(relayHost, relayPort, tlsOptions, identityId, orgId).catch((err) => {
throw new BadRequestError({
message: (err as Error)?.message,
error: err as Error
});
});
for (let attempt = 1; attempt <= maxRetries; attempt += 1) {
try {
const stream = quicClient.connection.newStream("bidi");
const pingWriter = stream.writable.getWriter();
await pingWriter.write(Buffer.from("PING\n"));
pingWriter.releaseLock();
// Read PONG response
const reader = stream.readable.getReader();
const { value, done } = await reader.read();
if (done) {
throw new Error("Gateway closed before receiving PONG");
}
const response = Buffer.from(value).toString();
if (response !== "PONG\n" && response !== "PONG") {
throw new Error(`Failed to Ping. Unexpected response: ${response}`);
}
reader.releaseLock();
return;
} catch (err) {
lastError = err as Error;
if (attempt < maxRetries) {
await new Promise((resolve) => {
setTimeout(resolve, DEFAULT_RETRY_DELAY);
});
}
} finally {
await quicClient.destroy();
}
}
logger.error(lastError);
throw new BadRequestError({
message: `Failed to ping gateway after ${maxRetries} attempts. Last error: ${lastError?.message}`
});
};
const setupProxyServer = async ({
targetPort,
targetHost,
tlsOptions,
relayHost,
relayPort,
identityId,
orgId,
protocol = GatewayProxyProtocol.Tcp,
httpsAgent
}: {
targetHost?: string;
targetPort?: number;
relayPort: number;
relayHost: string;
tlsOptions: TGatewayTlsOptions;
identityId: string;
orgId: string;
protocol?: GatewayProxyProtocol;
httpsAgent?: https.Agent;
}): Promise<IGatewayProxyServer> => {
const quicClient = await createQuicConnection(relayHost, relayPort, tlsOptions, identityId, orgId).catch((err) => {
throw new BadRequestError({
error: err as Error
});
});
const proxyErrorMsg = [""];
return new Promise((resolve, reject) => {
const server = net.createServer();
let streamClosed = false;
// eslint-disable-next-line @typescript-eslint/no-misused-promises
server.on("connection", async (clientConn) => {
try {
clientConn.setKeepAlive(true, 30000); // 30 seconds
clientConn.setNoDelay(true);
const stream = quicClient.connection.newStream("bidi");
const forwardWriter = stream.writable.getWriter();
let command: string;
if (protocol === GatewayProxyProtocol.Http) {
if (!targetHost && !targetPort) {
command = `FORWARD-HTTP`;
logger.debug(`Using HTTP proxy mode, no target URL provided [command=${command.trim()}]`);
} else {
if (!targetHost || targetPort === undefined) {
throw new BadRequestError({
message: `Target host and port are required for HTTP proxy mode with custom target`
});
}
const targetUrl = `${targetHost}:${targetPort}`; // note(daniel): targetHost MUST include the scheme (https|http)
command = `FORWARD-HTTP ${targetUrl}`;
logger.debug(`Using HTTP proxy mode, custom target URL provided [command=${command.trim()}]`);
// extract ca certificate from httpsAgent if present
if (httpsAgent && targetHost.startsWith("https://")) {
const agentOptions = httpsAgent.options;
if (agentOptions && agentOptions.ca) {
const caCert = Array.isArray(agentOptions.ca) ? agentOptions.ca.join("\n") : agentOptions.ca;
const caB64 = Buffer.from(caCert as string).toString("base64");
command += ` ca=${caB64}`;
const rejectUnauthorized = agentOptions.rejectUnauthorized !== false;
command += ` verify=${rejectUnauthorized}`;
logger.debug(`Using HTTP proxy mode, custom target URL provided [command=${command.trim()}]`);
}
}
}
command += "\n";
} else if (protocol === GatewayProxyProtocol.Tcp) {
if (!targetHost || !targetPort) {
throw new BadRequestError({
message: `Target host and port are required for TCP proxy mode`
});
}
// For TCP mode, send FORWARD-TCP with host:port
command = `FORWARD-TCP ${targetHost}:${targetPort}\n`;
logger.debug(`Using TCP proxy mode: ${command.trim()}`);
} else {
throw new BadRequestError({
message: `Invalid protocol: ${protocol as string}`
});
}
await forwardWriter.write(Buffer.from(command));
forwardWriter.releaseLock();
// Set up bidirectional copy
const setupCopy = () => {
// Client to QUIC
// eslint-disable-next-line
(async () => {
const writer = stream.writable.getWriter();
// Create a handler for client data
clientConn.on("data", (chunk) => {
writer.write(chunk).catch((err) => {
proxyErrorMsg.push((err as Error)?.message);
});
});
// Handle client connection close
clientConn.on("end", () => {
if (!streamClosed) {
try {
writer.close().catch((err) => {
logger.debug(err, "Error closing writer (already closed)");
});
} catch (error) {
logger.debug(error, "Error in writer close");
}
}
});
clientConn.on("error", (clientConnErr) => {
writer.abort(clientConnErr?.message).catch((err) => {
proxyErrorMsg.push((err as Error)?.message);
});
});
})();
// QUIC to Client
void (async () => {
try {
const reader = stream.readable.getReader();
let reading = true;
while (reading) {
const { value, done } = await reader.read();
if (done) {
reading = false;
clientConn.end(); // Close client connection when QUIC stream ends
break;
}
// Write data to TCP client
const canContinue = clientConn.write(Buffer.from(value));
// Handle backpressure
if (!canContinue) {
await new Promise((res) => {
clientConn.once("drain", res);
});
}
}
} catch (err) {
proxyErrorMsg.push((err as Error)?.message);
clientConn.destroy();
}
})();
};
setupCopy();
// Handle connection closure
clientConn.on("close", () => {
if (!streamClosed) {
streamClosed = true;
stream.destroy().catch((err) => {
logger.debug(err, "Stream already destroyed during close event");
});
}
});
const cleanup = async () => {
try {
clientConn?.destroy();
} catch (err) {
logger.debug(err, "Error destroying client connection");
}
if (!streamClosed) {
streamClosed = true;
try {
await stream.destroy();
} catch (err) {
logger.debug(err, "Error destroying stream (might be already closed)");
}
}
};
clientConn.on("error", (clientConnErr) => {
logger.error(clientConnErr, "Client socket error");
cleanup().catch((err) => {
logger.error(err, "Client conn cleanup");
});
});
clientConn.on("end", () => {
cleanup().catch((err) => {
logger.error(err, "Client conn end");
});
});
} catch (err) {
logger.error(err, "Failed to establish target connection:");
clientConn.end();
reject(err);
}
});
server.on("error", (err) => {
reject(err);
});
server.on("close", () => {
quicClient?.destroy().catch((err) => {
logger.error(err, "Failed to destroy quic client");
});
});
server.listen(0, () => {
const address = server.address();
if (!address || typeof address === "string") {
server.close();
reject(new Error("Failed to get server port"));
return;
}
logger.info(`Gateway proxy started on port ${address.port} (${protocol} mode)`);
resolve({
server,
port: address.port,
cleanup: async () => {
try {
server.close();
} catch (err) {
logger.debug(err, "Error closing server");
}
try {
await quicClient?.destroy();
} catch (err) {
logger.debug(err, "Error destroying QUIC client");
}
},
getProxyError: () => proxyErrorMsg.join(",")
});
});
});
};
export const withGatewayProxy = async <T>(
callback: (port: number, httpsAgent?: https.Agent) => Promise<T>,
options: IGatewayProxyOptions
): Promise<T> => {
const { relayHost, relayPort, targetHost, targetPort, tlsOptions, identityId, orgId, protocol, httpsAgent } = options;
// Setup the proxy server
const { port, cleanup, getProxyError } = await setupProxyServer({
targetHost,
targetPort,
relayPort,
relayHost,
tlsOptions,
identityId,
orgId,
protocol,
httpsAgent
});
try {
// Execute the callback with the allocated port
return await callback(port, httpsAgent);
} catch (err) {
const proxyErrorMessage = getProxyError();
if (proxyErrorMessage) {
logger.error(new Error(proxyErrorMessage), "Failed to proxy");
}
logger.error(err, "Failed to do gateway");
let errorMessage = proxyErrorMessage || (err as Error)?.message;
if (axios.isAxiosError(err) && (err.response?.data as { message?: string })?.message) {
errorMessage = (err.response?.data as { message: string }).message;
}
throw new BadRequestError({ message: errorMessage });
} finally {
// Ensure cleanup happens regardless of success or failure
await cleanup();
}
};

View File

@@ -1,392 +1,2 @@
/* eslint-disable no-await-in-loop */
import crypto from "node:crypto";
import net from "node:net";
import quicDefault, * as quicModule from "@infisical/quic";
import axios from "axios";
import { BadRequestError } from "../errors";
import { logger } from "../logger";
const DEFAULT_MAX_RETRIES = 3;
const DEFAULT_RETRY_DELAY = 1000; // 1 second
const quic = quicDefault || quicModule;
const parseSubjectDetails = (data: string) => {
const values: Record<string, string> = {};
data.split("\n").forEach((el) => {
const [key, value] = el.split("=");
values[key.trim()] = value.trim();
});
return values;
};
type TTlsOption = { ca: string; cert: string; key: string };
const createQuicConnection = async (
relayHost: string,
relayPort: number,
tlsOptions: TTlsOption,
identityId: string,
orgId: string
) => {
const client = await quic.QUICClient.createQUICClient({
host: relayHost,
port: relayPort,
config: {
ca: tlsOptions.ca,
cert: tlsOptions.cert,
key: tlsOptions.key,
applicationProtos: ["infisical-gateway"],
verifyPeer: true,
verifyCallback: async (certs) => {
if (!certs || certs.length === 0) return quic.native.CryptoError.CertificateRequired;
const serverCertificate = new crypto.X509Certificate(Buffer.from(certs[0]));
const caCertificate = new crypto.X509Certificate(tlsOptions.ca);
const isValidServerCertificate = serverCertificate.verify(caCertificate.publicKey);
if (!isValidServerCertificate) return quic.native.CryptoError.BadCertificate;
const subjectDetails = parseSubjectDetails(serverCertificate.subject);
if (subjectDetails.OU !== "Gateway" || subjectDetails.CN !== identityId || subjectDetails.O !== orgId) {
return quic.native.CryptoError.CertificateUnknown;
}
if (new Date() > new Date(serverCertificate.validTo) || new Date() < new Date(serverCertificate.validFrom)) {
return quic.native.CryptoError.CertificateExpired;
}
const formatedRelayHost =
process.env.NODE_ENV === "development" ? relayHost.replace("host.docker.internal", "127.0.0.1") : relayHost;
if (!serverCertificate.checkIP(formatedRelayHost)) return quic.native.CryptoError.BadCertificate;
},
maxIdleTimeout: 90000,
keepAliveIntervalTime: 30000
},
crypto: {
ops: {
randomBytes: async (data) => {
crypto.getRandomValues(new Uint8Array(data));
}
}
}
});
return client;
};
type TPingGatewayAndVerifyDTO = {
relayHost: string;
relayPort: number;
tlsOptions: TTlsOption;
maxRetries?: number;
identityId: string;
orgId: string;
};
export const pingGatewayAndVerify = async ({
relayHost,
relayPort,
tlsOptions,
maxRetries = DEFAULT_MAX_RETRIES,
identityId,
orgId
}: TPingGatewayAndVerifyDTO) => {
let lastError: Error | null = null;
const quicClient = await createQuicConnection(relayHost, relayPort, tlsOptions, identityId, orgId).catch((err) => {
throw new BadRequestError({
message: (err as Error)?.message,
error: err as Error
});
});
for (let attempt = 1; attempt <= maxRetries; attempt += 1) {
try {
const stream = quicClient.connection.newStream("bidi");
const pingWriter = stream.writable.getWriter();
await pingWriter.write(Buffer.from("PING\n"));
pingWriter.releaseLock();
// Read PONG response
const reader = stream.readable.getReader();
const { value, done } = await reader.read();
if (done) {
throw new Error("Gateway closed before receiving PONG");
}
const response = Buffer.from(value).toString();
if (response !== "PONG\n" && response !== "PONG") {
throw new Error(`Failed to Ping. Unexpected response: ${response}`);
}
reader.releaseLock();
return;
} catch (err) {
lastError = err as Error;
if (attempt < maxRetries) {
await new Promise((resolve) => {
setTimeout(resolve, DEFAULT_RETRY_DELAY);
});
}
} finally {
await quicClient.destroy();
}
}
logger.error(lastError);
throw new BadRequestError({
message: `Failed to ping gateway after ${maxRetries} attempts. Last error: ${lastError?.message}`
});
};
interface TProxyServer {
server: net.Server;
port: number;
cleanup: () => Promise<void>;
getProxyError: () => string;
}
const setupProxyServer = async ({
targetPort,
targetHost,
tlsOptions,
relayHost,
relayPort,
identityId,
orgId
}: {
targetHost: string;
targetPort: number;
relayPort: number;
relayHost: string;
tlsOptions: TTlsOption;
identityId: string;
orgId: string;
}): Promise<TProxyServer> => {
const quicClient = await createQuicConnection(relayHost, relayPort, tlsOptions, identityId, orgId).catch((err) => {
throw new BadRequestError({
error: err as Error
});
});
const proxyErrorMsg = [""];
return new Promise((resolve, reject) => {
const server = net.createServer();
let streamClosed = false;
// eslint-disable-next-line @typescript-eslint/no-misused-promises
server.on("connection", async (clientConn) => {
try {
clientConn.setKeepAlive(true, 30000); // 30 seconds
clientConn.setNoDelay(true);
const stream = quicClient.connection.newStream("bidi");
// Send FORWARD-TCP command
const forwardWriter = stream.writable.getWriter();
await forwardWriter.write(Buffer.from(`FORWARD-TCP ${targetHost}:${targetPort}\n`));
forwardWriter.releaseLock();
// Set up bidirectional copy
const setupCopy = () => {
// Client to QUIC
// eslint-disable-next-line
(async () => {
const writer = stream.writable.getWriter();
// Create a handler for client data
clientConn.on("data", (chunk) => {
writer.write(chunk).catch((err) => {
proxyErrorMsg.push((err as Error)?.message);
});
});
// Handle client connection close
clientConn.on("end", () => {
if (!streamClosed) {
try {
writer.close().catch((err) => {
logger.debug(err, "Error closing writer (already closed)");
});
} catch (error) {
logger.debug(error, "Error in writer close");
}
}
});
clientConn.on("error", (clientConnErr) => {
writer.abort(clientConnErr?.message).catch((err) => {
proxyErrorMsg.push((err as Error)?.message);
});
});
})();
// QUIC to Client
void (async () => {
try {
const reader = stream.readable.getReader();
let reading = true;
while (reading) {
const { value, done } = await reader.read();
if (done) {
reading = false;
clientConn.end(); // Close client connection when QUIC stream ends
break;
}
// Write data to TCP client
const canContinue = clientConn.write(Buffer.from(value));
// Handle backpressure
if (!canContinue) {
await new Promise((res) => {
clientConn.once("drain", res);
});
}
}
} catch (err) {
proxyErrorMsg.push((err as Error)?.message);
clientConn.destroy();
}
})();
};
setupCopy();
// Handle connection closure
clientConn.on("close", () => {
if (!streamClosed) {
streamClosed = true;
stream.destroy().catch((err) => {
logger.debug(err, "Stream already destroyed during close event");
});
}
});
const cleanup = async () => {
try {
clientConn?.destroy();
} catch (err) {
logger.debug(err, "Error destroying client connection");
}
if (!streamClosed) {
streamClosed = true;
try {
await stream.destroy();
} catch (err) {
logger.debug(err, "Error destroying stream (might be already closed)");
}
}
};
clientConn.on("error", (clientConnErr) => {
logger.error(clientConnErr, "Client socket error");
cleanup().catch((err) => {
logger.error(err, "Client conn cleanup");
});
});
clientConn.on("end", () => {
cleanup().catch((err) => {
logger.error(err, "Client conn end");
});
});
} catch (err) {
logger.error(err, "Failed to establish target connection:");
clientConn.end();
reject(err);
}
});
server.on("error", (err) => {
reject(err);
});
server.on("close", () => {
quicClient?.destroy().catch((err) => {
logger.error(err, "Failed to destroy quic client");
});
});
server.listen(0, () => {
const address = server.address();
if (!address || typeof address === "string") {
server.close();
reject(new Error("Failed to get server port"));
return;
}
logger.info("Gateway proxy started");
resolve({
server,
port: address.port,
cleanup: async () => {
try {
server.close();
} catch (err) {
logger.debug(err, "Error closing server");
}
try {
await quicClient?.destroy();
} catch (err) {
logger.debug(err, "Error destroying QUIC client");
}
},
getProxyError: () => proxyErrorMsg.join(",")
});
});
});
};
interface ProxyOptions {
targetHost: string;
targetPort: number;
relayHost: string;
relayPort: number;
tlsOptions: TTlsOption;
identityId: string;
orgId: string;
}
export const withGatewayProxy = async <T>(
callback: (port: number) => Promise<T>,
options: ProxyOptions
): Promise<T> => {
const { relayHost, relayPort, targetHost, targetPort, tlsOptions, identityId, orgId } = options;
// Setup the proxy server
const { port, cleanup, getProxyError } = await setupProxyServer({
targetHost,
targetPort,
relayPort,
relayHost,
tlsOptions,
identityId,
orgId
});
try {
// Execute the callback with the allocated port
return await callback(port);
} catch (err) {
const proxyErrorMessage = getProxyError();
if (proxyErrorMessage) {
logger.error(new Error(proxyErrorMessage), "Failed to proxy");
}
logger.error(err, "Failed to do gateway");
let errorMessage = proxyErrorMessage || (err as Error)?.message;
if (axios.isAxiosError(err) && (err.response?.data as { message?: string })?.message) {
errorMessage = (err.response?.data as { message: string }).message;
}
throw new BadRequestError({ message: errorMessage });
} finally {
// Ensure cleanup happens regardless of success or failure
await cleanup();
}
};
export { pingGatewayAndVerify, withGatewayProxy } from "./gateway";
export { GatewayHttpProxyActions, GatewayProxyProtocol } from "./types";

View File

@@ -0,0 +1,43 @@
import net from "node:net";
import https from "https";
export type TGatewayTlsOptions = { ca: string; cert: string; key: string };
export enum GatewayProxyProtocol {
Http = "http",
Tcp = "tcp"
}
export enum GatewayHttpProxyActions {
InjectGatewayK8sServiceAccountToken = "inject-k8s-sa-auth-token",
UseGatewayK8sServiceAccount = "use-k8s-sa"
}
export interface IGatewayProxyOptions {
targetHost?: string;
targetPort?: number;
relayHost: string;
relayPort: number;
tlsOptions: TGatewayTlsOptions;
identityId: string;
orgId: string;
protocol: GatewayProxyProtocol;
httpsAgent?: https.Agent;
}
export type TPingGatewayAndVerifyDTO = {
relayHost: string;
relayPort: number;
tlsOptions: TGatewayTlsOptions;
maxRetries?: number;
identityId: string;
orgId: string;
};
export interface IGatewayProxyServer {
server: net.Server;
port: number;
cleanup: () => Promise<void>;
getProxyError: () => string;
}

View File

@@ -7,13 +7,24 @@ type SanitizationArg = {
allowedExpressions?: (arg: string) => boolean;
};
const isValidExpression = (expression: string, dto: SanitizationArg): boolean => {
// Allow helper functions (replace, truncate)
const allowedHelpers = ["replace", "truncate", "random"];
if (allowedHelpers.includes(expression)) {
return true;
}
// Check regular allowed expressions
return dto?.allowedExpressions?.(expression) || false;
};
export const validateHandlebarTemplate = (templateName: string, template: string, dto: SanitizationArg) => {
const parsedAst = handlebars.parse(template);
parsedAst.body.forEach((el) => {
if (el.type === "ContentStatement") return;
if (el.type === "MustacheStatement" && "path" in el) {
const { path } = el as { type: "MustacheStatement"; path: { type: "PathExpression"; original: string } };
if (path.type === "PathExpression" && dto?.allowedExpressions?.(path.original)) return;
if (path.type === "PathExpression" && isValidExpression(path.original, dto)) return;
}
logger.error(el, "Template sanitization failed");
throw new BadRequestError({ message: `Template sanitization failed: ${templateName}` });
@@ -26,7 +37,7 @@ export const isValidHandleBarTemplate = (template: string, dto: SanitizationArg)
if (el.type === "ContentStatement") return true;
if (el.type === "MustacheStatement" && "path" in el) {
const { path } = el as { type: "MustacheStatement"; path: { type: "PathExpression"; original: string } };
if (path.type === "PathExpression" && dto?.allowedExpressions?.(path.original)) return true;
if (path.type === "PathExpression" && isValidExpression(path.original, dto)) return true;
}
return false;
});

View File

@@ -60,6 +60,7 @@ export enum QueueName {
ImportSecretsFromExternalSource = "import-secrets-from-external-source",
AppConnectionSecretSync = "app-connection-secret-sync",
SecretRotationV2 = "secret-rotation-v2",
FolderTreeCheckpoint = "folder-tree-checkpoint",
InvalidateCache = "invalidate-cache",
SecretScanningV2 = "secret-scanning-v2"
}
@@ -94,6 +95,7 @@ export enum QueueJobs {
SecretRotationV2QueueRotations = "secret-rotation-v2-queue-rotations",
SecretRotationV2RotateSecrets = "secret-rotation-v2-rotate-secrets",
SecretRotationV2SendNotification = "secret-rotation-v2-send-notification",
CreateFolderTreeCheckpoint = "create-folder-tree-checkpoint",
InvalidateCache = "invalidate-cache",
SecretScanningV2FullScan = "secret-scanning-v2-full-scan",
SecretScanningV2DiffScan = "secret-scanning-v2-diff-scan",
@@ -209,6 +211,12 @@ export type TQueueJobTypes = {
name: QueueJobs.ProjectV3Migration;
payload: { projectId: string };
};
[QueueName.FolderTreeCheckpoint]: {
name: QueueJobs.CreateFolderTreeCheckpoint;
payload: {
envId: string;
};
};
[QueueName.ImportSecretsFromExternalSource]: {
name: QueueJobs.ImportSecretsFromExternalSource;
payload: {

Some files were not shown because too many files have changed in this diff Show More