mirror of
https://github.com/Infisical/infisical.git
synced 2026-01-08 23:18:05 -05:00
Merge branch 'main' of https://github.com/Infisical/infisical into feat/octopus-deploy-secret-sync
This commit is contained in:
@@ -58,4 +58,5 @@ docs/documentation/platform/pki/enrollment-methods/api.mdx:private-key:139
|
||||
docs/documentation/platform/pki/certificate-syncs/aws-secrets-manager.mdx:private-key:62
|
||||
docs/documentation/platform/pki/certificate-syncs/chef.mdx:private-key:61
|
||||
backend/src/services/certificate-request/certificate-request-service.test.ts:private-key:246
|
||||
backend/src/services/certificate-request/certificate-request-service.test.ts:private-key:248
|
||||
backend/src/services/certificate-request/certificate-request-service.test.ts:private-key:248
|
||||
docs/documentation/platform/pki/enrollment-methods/api.mdx:private-key:142
|
||||
2
Makefile
2
Makefile
@@ -14,7 +14,7 @@ up-dev-metrics:
|
||||
docker compose -f docker-compose.dev.yml --profile metrics up --build
|
||||
|
||||
up-prod:
|
||||
docker-compose -f docker-compose.prod.yml up --build
|
||||
docker compose -f docker-compose.prod.yml up --build
|
||||
|
||||
down:
|
||||
docker compose -f docker-compose.dev.yml down
|
||||
|
||||
@@ -192,3 +192,28 @@ Feature: Challenge
|
||||
And the value response with jq ".status" should be equal to 400
|
||||
And the value response with jq ".type" should be equal to "urn:ietf:params:acme:error:badCSR"
|
||||
And the value response with jq ".detail" should be equal to "Invalid CSR: Common name + SANs mismatch with order identifiers"
|
||||
|
||||
Scenario: Get certificate without passing challenge when skip DNS ownership verification is enabled
|
||||
Given I create an ACME profile with config as "acme_profile"
|
||||
"""
|
||||
{
|
||||
"skipDnsOwnershipVerification": true
|
||||
}
|
||||
"""
|
||||
When I have an ACME client connecting to "{BASE_URL}/api/v1/cert-manager/acme/profiles/{acme_profile.id}/directory"
|
||||
Then I register a new ACME account with email fangpen@infisical.com and EAB key id "{acme_profile.eab_kid}" with secret "{acme_profile.eab_secret}" as acme_account
|
||||
When I create certificate signing request as csr
|
||||
Then I add names to certificate signing request csr
|
||||
"""
|
||||
{
|
||||
"COMMON_NAME": "localhost"
|
||||
}
|
||||
"""
|
||||
And I create a RSA private key pair as cert_key
|
||||
And I sign the certificate signing request csr with private key cert_key and output it as csr_pem in PEM format
|
||||
And I submit the certificate signing request PEM csr_pem certificate order to the ACME server as order
|
||||
And the value order.body with jq ".status" should be equal to "ready"
|
||||
And I poll and finalize the ACME order order as finalized_order
|
||||
And the value finalized_order.body with jq ".status" should be equal to "valid"
|
||||
And I parse the full-chain certificate from order finalized_order as cert
|
||||
And the value cert with jq ".subject.common_name" should be equal to "localhost"
|
||||
|
||||
@@ -266,6 +266,46 @@ def step_impl(context: Context, ca_id: str, template_id: str, profile_var: str):
|
||||
)
|
||||
|
||||
|
||||
@given(
|
||||
'I create an ACME profile with config as "{profile_var}"'
|
||||
)
|
||||
def step_impl(context: Context, profile_var: str):
|
||||
profile_slug = faker.slug()
|
||||
jwt_token = context.vars["AUTH_TOKEN"]
|
||||
acme_config = replace_vars(json.loads(context.text), context.vars)
|
||||
response = context.http_client.post(
|
||||
"/api/v1/cert-manager/certificate-profiles",
|
||||
headers=dict(authorization="Bearer {}".format(jwt_token)),
|
||||
json={
|
||||
"projectId": context.vars["PROJECT_ID"],
|
||||
"slug": profile_slug,
|
||||
"description": "ACME Profile created by BDD test",
|
||||
"enrollmentType": "acme",
|
||||
"caId": context.vars["CERT_CA_ID"],
|
||||
"certificateTemplateId": context.vars["CERT_TEMPLATE_ID"],
|
||||
"acmeConfig": acme_config,
|
||||
},
|
||||
)
|
||||
response.raise_for_status()
|
||||
resp_json = response.json()
|
||||
profile_id = resp_json["certificateProfile"]["id"]
|
||||
kid = profile_id
|
||||
|
||||
response = context.http_client.get(
|
||||
f"/api/v1/cert-manager/certificate-profiles/{profile_id}/acme/eab-secret/reveal",
|
||||
headers=dict(authorization="Bearer {}".format(jwt_token)),
|
||||
)
|
||||
response.raise_for_status()
|
||||
resp_json = response.json()
|
||||
secret = resp_json["eabSecret"]
|
||||
|
||||
context.vars[profile_var] = AcmeProfile(
|
||||
profile_id,
|
||||
eab_kid=kid,
|
||||
eab_secret=secret,
|
||||
)
|
||||
|
||||
|
||||
@given('I have an ACME cert profile with external ACME CA as "{profile_var}"')
|
||||
def step_impl(context: Context, profile_var: str):
|
||||
profile_id = context.vars.get("PROFILE_ID")
|
||||
|
||||
8
backend/src/@types/knex.d.ts
vendored
8
backend/src/@types/knex.d.ts
vendored
@@ -170,6 +170,9 @@ import {
|
||||
TIdentityGcpAuths,
|
||||
TIdentityGcpAuthsInsert,
|
||||
TIdentityGcpAuthsUpdate,
|
||||
TIdentityGroupMembership,
|
||||
TIdentityGroupMembershipInsert,
|
||||
TIdentityGroupMembershipUpdate,
|
||||
TIdentityJwtAuths,
|
||||
TIdentityJwtAuthsInsert,
|
||||
TIdentityJwtAuthsUpdate,
|
||||
@@ -857,6 +860,11 @@ declare module "knex/types/tables" {
|
||||
TUserGroupMembershipInsert,
|
||||
TUserGroupMembershipUpdate
|
||||
>;
|
||||
[TableName.IdentityGroupMembership]: KnexOriginal.CompositeTableType<
|
||||
TIdentityGroupMembership,
|
||||
TIdentityGroupMembershipInsert,
|
||||
TIdentityGroupMembershipUpdate
|
||||
>;
|
||||
[TableName.GroupProjectMembership]: KnexOriginal.CompositeTableType<
|
||||
TGroupProjectMemberships,
|
||||
TGroupProjectMembershipsInsert,
|
||||
|
||||
@@ -0,0 +1,28 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasTable(TableName.IdentityGroupMembership))) {
|
||||
await knex.schema.createTable(TableName.IdentityGroupMembership, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.uuid("identityId").notNullable();
|
||||
t.foreign("identityId").references("id").inTable(TableName.Identity).onDelete("CASCADE");
|
||||
t.uuid("groupId").notNullable();
|
||||
t.foreign("groupId").references("id").inTable(TableName.Groups).onDelete("CASCADE");
|
||||
t.timestamps(true, true, true);
|
||||
|
||||
t.unique(["identityId", "groupId"]);
|
||||
});
|
||||
}
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.IdentityGroupMembership);
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.IdentityGroupMembership)) {
|
||||
await knex.schema.dropTable(TableName.IdentityGroupMembership);
|
||||
await dropOnUpdateTrigger(knex, TableName.IdentityGroupMembership);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,38 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
import { dropConstraintIfExists } from "./utils/dropConstraintIfExists";
|
||||
|
||||
const FOREIGN_KEY_CONSTRAINT_NAME = "certificate_requests_acme_order_id_fkey";
|
||||
const INDEX_NAME = "certificate_requests_acme_order_id_idx";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.CertificateRequests)) {
|
||||
const hasAcmeOrderId = await knex.schema.hasColumn(TableName.CertificateRequests, "acmeOrderId");
|
||||
|
||||
if (!hasAcmeOrderId) {
|
||||
await knex.schema.alterTable(TableName.CertificateRequests, (t) => {
|
||||
t.uuid("acmeOrderId").nullable();
|
||||
t.foreign("acmeOrderId", FOREIGN_KEY_CONSTRAINT_NAME)
|
||||
.references("id")
|
||||
.inTable(TableName.PkiAcmeOrder)
|
||||
.onDelete("SET NULL");
|
||||
t.index("acmeOrderId", INDEX_NAME);
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.CertificateRequests)) {
|
||||
const hasAcmeOrderId = await knex.schema.hasColumn(TableName.CertificateRequests, "acmeOrderId");
|
||||
|
||||
if (hasAcmeOrderId) {
|
||||
await dropConstraintIfExists(TableName.CertificateRequests, FOREIGN_KEY_CONSTRAINT_NAME, knex);
|
||||
await knex.schema.alterTable(TableName.CertificateRequests, (t) => {
|
||||
t.dropIndex("acmeOrderId", INDEX_NAME);
|
||||
t.dropColumn("acmeOrderId");
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,23 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.PkiAcmeEnrollmentConfig)) {
|
||||
if (!(await knex.schema.hasColumn(TableName.PkiAcmeEnrollmentConfig, "skipDnsOwnershipVerification"))) {
|
||||
await knex.schema.alterTable(TableName.PkiAcmeEnrollmentConfig, (t) => {
|
||||
t.boolean("skipDnsOwnershipVerification").defaultTo(false).notNullable();
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.PkiAcmeEnrollmentConfig)) {
|
||||
if (await knex.schema.hasColumn(TableName.PkiAcmeEnrollmentConfig, "skipDnsOwnershipVerification")) {
|
||||
await knex.schema.alterTable(TableName.PkiAcmeEnrollmentConfig, (t) => {
|
||||
t.dropColumn("skipDnsOwnershipVerification");
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -26,7 +26,8 @@ export const CertificateRequestsSchema = z.object({
|
||||
keyAlgorithm: z.string().nullable().optional(),
|
||||
signatureAlgorithm: z.string().nullable().optional(),
|
||||
errorMessage: z.string().nullable().optional(),
|
||||
metadata: z.string().nullable().optional()
|
||||
metadata: z.string().nullable().optional(),
|
||||
acmeOrderId: z.string().uuid().nullable().optional()
|
||||
});
|
||||
|
||||
export type TCertificateRequests = z.infer<typeof CertificateRequestsSchema>;
|
||||
|
||||
22
backend/src/db/schemas/identity-group-membership.ts
Normal file
22
backend/src/db/schemas/identity-group-membership.ts
Normal file
@@ -0,0 +1,22 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const IdentityGroupMembershipSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
identityId: z.string().uuid(),
|
||||
groupId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TIdentityGroupMembership = z.infer<typeof IdentityGroupMembershipSchema>;
|
||||
export type TIdentityGroupMembershipInsert = Omit<z.input<typeof IdentityGroupMembershipSchema>, TImmutableDBKeys>;
|
||||
export type TIdentityGroupMembershipUpdate = Partial<
|
||||
Omit<z.input<typeof IdentityGroupMembershipSchema>, TImmutableDBKeys>
|
||||
>;
|
||||
@@ -55,6 +55,7 @@ export * from "./identity-alicloud-auths";
|
||||
export * from "./identity-aws-auths";
|
||||
export * from "./identity-azure-auths";
|
||||
export * from "./identity-gcp-auths";
|
||||
export * from "./identity-group-membership";
|
||||
export * from "./identity-jwt-auths";
|
||||
export * from "./identity-kubernetes-auths";
|
||||
export * from "./identity-metadata";
|
||||
|
||||
@@ -42,6 +42,7 @@ export enum TableName {
|
||||
GroupProjectMembershipRole = "group_project_membership_roles",
|
||||
ExternalGroupOrgRoleMapping = "external_group_org_role_mappings",
|
||||
UserGroupMembership = "user_group_membership",
|
||||
IdentityGroupMembership = "identity_group_membership",
|
||||
UserAliases = "user_aliases",
|
||||
UserEncryptionKey = "user_encryption_keys",
|
||||
AuthTokens = "auth_tokens",
|
||||
|
||||
@@ -13,7 +13,8 @@ export const PkiAcmeEnrollmentConfigsSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
encryptedEabSecret: zodBuffer,
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
updatedAt: z.date(),
|
||||
skipDnsOwnershipVerification: z.boolean().default(false)
|
||||
});
|
||||
|
||||
export type TPkiAcmeEnrollmentConfigs = z.infer<typeof PkiAcmeEnrollmentConfigsSchema>;
|
||||
|
||||
@@ -11,6 +11,7 @@ import {
|
||||
} from "@app/ee/services/external-kms/providers/model";
|
||||
import { crypto } from "@app/lib/crypto/cryptography";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { deterministicStringify } from "@app/lib/fn/object";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
@@ -88,9 +89,11 @@ export const registerExternalKmsEndpoints = <
|
||||
...rest
|
||||
} = externalKms;
|
||||
|
||||
const credentialsToHash = deterministicStringify(configuration.credential);
|
||||
|
||||
const credentialsHash = crypto.nativeCrypto
|
||||
.createHash("sha256")
|
||||
.update(externalKmsData.encryptedProviderInputs)
|
||||
.update(Buffer.from(credentialsToHash))
|
||||
.digest("hex");
|
||||
return { ...rest, externalKms: { ...externalKmsData, configuration, credentialsHash } };
|
||||
}
|
||||
@@ -153,9 +156,12 @@ export const registerExternalKmsEndpoints = <
|
||||
external: { providerInput: externalKmsConfiguration, ...externalKmsData },
|
||||
...rest
|
||||
} = externalKms;
|
||||
|
||||
const credentialsToHash = deterministicStringify(externalKmsConfiguration.credential);
|
||||
|
||||
const credentialsHash = crypto.nativeCrypto
|
||||
.createHash("sha256")
|
||||
.update(externalKmsData.encryptedProviderInputs)
|
||||
.update(Buffer.from(credentialsToHash))
|
||||
.digest("hex");
|
||||
return { ...rest, externalKms: { ...externalKmsData, configuration: externalKmsConfiguration, credentialsHash } };
|
||||
}
|
||||
@@ -222,9 +228,12 @@ export const registerExternalKmsEndpoints = <
|
||||
external: { providerInput: externalKmsConfiguration, ...externalKmsData },
|
||||
...rest
|
||||
} = externalKms;
|
||||
|
||||
const credentialsToHash = deterministicStringify(externalKmsConfiguration.credential);
|
||||
|
||||
const credentialsHash = crypto.nativeCrypto
|
||||
.createHash("sha256")
|
||||
.update(externalKmsData.encryptedProviderInputs)
|
||||
.update(Buffer.from(credentialsToHash))
|
||||
.digest("hex");
|
||||
return { ...rest, externalKms: { ...externalKmsData, configuration: externalKmsConfiguration, credentialsHash } };
|
||||
}
|
||||
@@ -277,9 +286,12 @@ export const registerExternalKmsEndpoints = <
|
||||
external: { providerInput: configuration, ...externalKmsData },
|
||||
...rest
|
||||
} = externalKms;
|
||||
|
||||
const credentialsToHash = deterministicStringify(configuration.credential);
|
||||
|
||||
const credentialsHash = crypto.nativeCrypto
|
||||
.createHash("sha256")
|
||||
.update(externalKmsData.encryptedProviderInputs)
|
||||
.update(Buffer.from(credentialsToHash))
|
||||
.digest("hex");
|
||||
|
||||
return { ...rest, externalKms: { ...externalKmsData, configuration, credentialsHash } };
|
||||
|
||||
@@ -1,18 +1,27 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { GroupsSchema, OrgMembershipRole, ProjectsSchema, UsersSchema } from "@app/db/schemas";
|
||||
import { GroupsSchema, IdentitiesSchema, OrgMembershipRole, ProjectsSchema, UsersSchema } from "@app/db/schemas";
|
||||
import {
|
||||
EFilterReturnedProjects,
|
||||
EFilterReturnedUsers,
|
||||
EGroupProjectsOrderBy
|
||||
FilterMemberType,
|
||||
FilterReturnedMachineIdentities,
|
||||
FilterReturnedProjects,
|
||||
FilterReturnedUsers,
|
||||
GroupMembersOrderBy,
|
||||
GroupProjectsOrderBy
|
||||
} from "@app/ee/services/group/group-types";
|
||||
import { ApiDocsTags, GROUPS } from "@app/lib/api-docs";
|
||||
import { OrderByDirection } from "@app/lib/types";
|
||||
import { CharacterType, characterValidator } from "@app/lib/validator/validate-string";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { slugSchema } from "@app/server/lib/schemas";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
const GroupIdentityResponseSchema = IdentitiesSchema.pick({
|
||||
id: true,
|
||||
name: true
|
||||
});
|
||||
|
||||
export const registerGroupRouter = async (server: FastifyZodProvider) => {
|
||||
server.route({
|
||||
url: "/",
|
||||
@@ -190,8 +199,15 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => {
|
||||
offset: z.coerce.number().min(0).default(0).describe(GROUPS.LIST_USERS.offset),
|
||||
limit: z.coerce.number().min(1).max(100).default(10).describe(GROUPS.LIST_USERS.limit),
|
||||
username: z.string().trim().optional().describe(GROUPS.LIST_USERS.username),
|
||||
search: z.string().trim().optional().describe(GROUPS.LIST_USERS.search),
|
||||
filter: z.nativeEnum(EFilterReturnedUsers).optional().describe(GROUPS.LIST_USERS.filterUsers)
|
||||
search: z
|
||||
.string()
|
||||
.trim()
|
||||
.refine((val) => characterValidator([CharacterType.AlphaNumeric, CharacterType.Hyphen])(val), {
|
||||
message: "Invalid pattern: only alphanumeric characters, - are allowed."
|
||||
})
|
||||
.optional()
|
||||
.describe(GROUPS.LIST_USERS.search),
|
||||
filter: z.nativeEnum(FilterReturnedUsers).optional().describe(GROUPS.LIST_USERS.filterUsers)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
@@ -202,12 +218,10 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => {
|
||||
lastName: true,
|
||||
id: true
|
||||
})
|
||||
.merge(
|
||||
z.object({
|
||||
isPartOfGroup: z.boolean(),
|
||||
joinedGroupAt: z.date().nullable()
|
||||
})
|
||||
)
|
||||
.extend({
|
||||
isPartOfGroup: z.boolean(),
|
||||
joinedGroupAt: z.date().nullable()
|
||||
})
|
||||
.array(),
|
||||
totalCount: z.number()
|
||||
})
|
||||
@@ -227,6 +241,134 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => {
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/:id/machine-identities",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
schema: {
|
||||
hide: false,
|
||||
tags: [ApiDocsTags.Groups],
|
||||
params: z.object({
|
||||
id: z.string().trim().describe(GROUPS.LIST_MACHINE_IDENTITIES.id)
|
||||
}),
|
||||
querystring: z.object({
|
||||
offset: z.coerce.number().min(0).default(0).describe(GROUPS.LIST_MACHINE_IDENTITIES.offset),
|
||||
limit: z.coerce.number().min(1).max(100).default(10).describe(GROUPS.LIST_MACHINE_IDENTITIES.limit),
|
||||
search: z
|
||||
.string()
|
||||
.trim()
|
||||
.refine((val) => characterValidator([CharacterType.AlphaNumeric, CharacterType.Hyphen])(val), {
|
||||
message: "Invalid pattern: only alphanumeric characters, - are allowed."
|
||||
})
|
||||
.optional()
|
||||
.describe(GROUPS.LIST_MACHINE_IDENTITIES.search),
|
||||
filter: z
|
||||
.nativeEnum(FilterReturnedMachineIdentities)
|
||||
.optional()
|
||||
.describe(GROUPS.LIST_MACHINE_IDENTITIES.filterMachineIdentities)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
machineIdentities: GroupIdentityResponseSchema.extend({
|
||||
isPartOfGroup: z.boolean(),
|
||||
joinedGroupAt: z.date().nullable()
|
||||
}).array(),
|
||||
totalCount: z.number()
|
||||
})
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const { machineIdentities, totalCount } = await server.services.group.listGroupMachineIdentities({
|
||||
id: req.params.id,
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
...req.query
|
||||
});
|
||||
|
||||
return { machineIdentities, totalCount };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/:id/members",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
schema: {
|
||||
hide: false,
|
||||
tags: [ApiDocsTags.Groups],
|
||||
params: z.object({
|
||||
id: z.string().trim().describe(GROUPS.LIST_MEMBERS.id)
|
||||
}),
|
||||
querystring: z.object({
|
||||
offset: z.coerce.number().min(0).default(0).describe(GROUPS.LIST_MEMBERS.offset),
|
||||
limit: z.coerce.number().min(1).max(100).default(10).describe(GROUPS.LIST_MEMBERS.limit),
|
||||
search: z
|
||||
.string()
|
||||
.trim()
|
||||
.refine((val) => characterValidator([CharacterType.AlphaNumeric, CharacterType.Hyphen])(val), {
|
||||
message: "Invalid pattern: only alphanumeric characters, - are allowed."
|
||||
})
|
||||
.optional()
|
||||
.describe(GROUPS.LIST_MEMBERS.search),
|
||||
orderBy: z
|
||||
.nativeEnum(GroupMembersOrderBy)
|
||||
.default(GroupMembersOrderBy.Name)
|
||||
.optional()
|
||||
.describe(GROUPS.LIST_MEMBERS.orderBy),
|
||||
orderDirection: z.nativeEnum(OrderByDirection).optional().describe(GROUPS.LIST_MEMBERS.orderDirection),
|
||||
memberTypeFilter: z
|
||||
.union([z.nativeEnum(FilterMemberType), z.array(z.nativeEnum(FilterMemberType))])
|
||||
.optional()
|
||||
.describe(GROUPS.LIST_MEMBERS.memberTypeFilter)
|
||||
.transform((val) => {
|
||||
if (!val) return undefined;
|
||||
return Array.isArray(val) ? val : [val];
|
||||
})
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
members: z
|
||||
.discriminatedUnion("type", [
|
||||
z.object({
|
||||
id: z.string(),
|
||||
joinedGroupAt: z.date().nullable(),
|
||||
type: z.literal("user"),
|
||||
user: UsersSchema.pick({ id: true, firstName: true, lastName: true, email: true, username: true })
|
||||
}),
|
||||
z.object({
|
||||
id: z.string(),
|
||||
joinedGroupAt: z.date().nullable(),
|
||||
type: z.literal("machineIdentity"),
|
||||
machineIdentity: GroupIdentityResponseSchema
|
||||
})
|
||||
])
|
||||
.array(),
|
||||
totalCount: z.number()
|
||||
})
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const { members, totalCount } = await server.services.group.listGroupMembers({
|
||||
id: req.params.id,
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
...req.query
|
||||
});
|
||||
|
||||
return { members, totalCount };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/:id/projects",
|
||||
@@ -243,11 +385,18 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => {
|
||||
querystring: z.object({
|
||||
offset: z.coerce.number().min(0).default(0).describe(GROUPS.LIST_PROJECTS.offset),
|
||||
limit: z.coerce.number().min(1).max(100).default(10).describe(GROUPS.LIST_PROJECTS.limit),
|
||||
search: z.string().trim().optional().describe(GROUPS.LIST_PROJECTS.search),
|
||||
filter: z.nativeEnum(EFilterReturnedProjects).optional().describe(GROUPS.LIST_PROJECTS.filterProjects),
|
||||
search: z
|
||||
.string()
|
||||
.trim()
|
||||
.refine((val) => characterValidator([CharacterType.AlphaNumeric, CharacterType.Hyphen])(val), {
|
||||
message: "Invalid pattern: only alphanumeric characters, - are allowed."
|
||||
})
|
||||
.optional()
|
||||
.describe(GROUPS.LIST_PROJECTS.search),
|
||||
filter: z.nativeEnum(FilterReturnedProjects).optional().describe(GROUPS.LIST_PROJECTS.filterProjects),
|
||||
orderBy: z
|
||||
.nativeEnum(EGroupProjectsOrderBy)
|
||||
.default(EGroupProjectsOrderBy.Name)
|
||||
.nativeEnum(GroupProjectsOrderBy)
|
||||
.default(GroupProjectsOrderBy.Name)
|
||||
.describe(GROUPS.LIST_PROJECTS.orderBy),
|
||||
orderDirection: z
|
||||
.nativeEnum(OrderByDirection)
|
||||
@@ -263,11 +412,9 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => {
|
||||
description: true,
|
||||
type: true
|
||||
})
|
||||
.merge(
|
||||
z.object({
|
||||
joinedGroupAt: z.date().nullable()
|
||||
})
|
||||
)
|
||||
.extend({
|
||||
joinedGroupAt: z.date().nullable()
|
||||
})
|
||||
.array(),
|
||||
totalCount: z.number()
|
||||
})
|
||||
@@ -325,6 +472,40 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => {
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/:id/machine-identities/:machineIdentityId",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
schema: {
|
||||
hide: false,
|
||||
tags: [ApiDocsTags.Groups],
|
||||
params: z.object({
|
||||
id: z.string().trim().describe(GROUPS.ADD_MACHINE_IDENTITY.id),
|
||||
machineIdentityId: z.string().trim().describe(GROUPS.ADD_MACHINE_IDENTITY.machineIdentityId)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
id: z.string()
|
||||
})
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const machineIdentity = await server.services.group.addMachineIdentityToGroup({
|
||||
id: req.params.id,
|
||||
identityId: req.params.machineIdentityId,
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId
|
||||
});
|
||||
|
||||
return machineIdentity;
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "DELETE",
|
||||
url: "/:id/users/:username",
|
||||
@@ -362,4 +543,38 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => {
|
||||
return user;
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "DELETE",
|
||||
url: "/:id/machine-identities/:machineIdentityId",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
schema: {
|
||||
hide: false,
|
||||
tags: [ApiDocsTags.Groups],
|
||||
params: z.object({
|
||||
id: z.string().trim().describe(GROUPS.DELETE_MACHINE_IDENTITY.id),
|
||||
machineIdentityId: z.string().trim().describe(GROUPS.DELETE_MACHINE_IDENTITY.machineIdentityId)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
id: z.string()
|
||||
})
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const machineIdentity = await server.services.group.removeMachineIdentityFromGroup({
|
||||
id: req.params.id,
|
||||
identityId: req.params.machineIdentityId,
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId
|
||||
});
|
||||
|
||||
return machineIdentity;
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
@@ -3,6 +3,11 @@ import {
|
||||
SanitizedAwsIamAccountWithResourceSchema,
|
||||
UpdateAwsIamAccountSchema
|
||||
} from "@app/ee/services/pam-resource/aws-iam/aws-iam-resource-schemas";
|
||||
import {
|
||||
CreateKubernetesAccountSchema,
|
||||
SanitizedKubernetesAccountWithResourceSchema,
|
||||
UpdateKubernetesAccountSchema
|
||||
} from "@app/ee/services/pam-resource/kubernetes/kubernetes-resource-schemas";
|
||||
import {
|
||||
CreateMySQLAccountSchema,
|
||||
SanitizedMySQLAccountWithResourceSchema,
|
||||
@@ -50,6 +55,15 @@ export const PAM_ACCOUNT_REGISTER_ROUTER_MAP: Record<PamResource, (server: Fasti
|
||||
updateAccountSchema: UpdateSSHAccountSchema
|
||||
});
|
||||
},
|
||||
[PamResource.Kubernetes]: async (server: FastifyZodProvider) => {
|
||||
registerPamResourceEndpoints({
|
||||
server,
|
||||
resourceType: PamResource.Kubernetes,
|
||||
accountResponseSchema: SanitizedKubernetesAccountWithResourceSchema,
|
||||
createAccountSchema: CreateKubernetesAccountSchema,
|
||||
updateAccountSchema: UpdateKubernetesAccountSchema
|
||||
});
|
||||
},
|
||||
[PamResource.AwsIam]: async (server: FastifyZodProvider) => {
|
||||
registerPamResourceEndpoints({
|
||||
server,
|
||||
|
||||
@@ -4,6 +4,7 @@ import { PamFoldersSchema } from "@app/db/schemas";
|
||||
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { PamAccountOrderBy, PamAccountView } from "@app/ee/services/pam-account/pam-account-enums";
|
||||
import { SanitizedAwsIamAccountWithResourceSchema } from "@app/ee/services/pam-resource/aws-iam/aws-iam-resource-schemas";
|
||||
import { SanitizedKubernetesAccountWithResourceSchema } from "@app/ee/services/pam-resource/kubernetes/kubernetes-resource-schemas";
|
||||
import { SanitizedMySQLAccountWithResourceSchema } from "@app/ee/services/pam-resource/mysql/mysql-resource-schemas";
|
||||
import { PamResource } from "@app/ee/services/pam-resource/pam-resource-enums";
|
||||
import { GatewayAccessResponseSchema } from "@app/ee/services/pam-resource/pam-resource-schemas";
|
||||
@@ -21,10 +22,17 @@ const SanitizedAccountSchema = z.union([
|
||||
SanitizedSSHAccountWithResourceSchema, // ORDER MATTERS
|
||||
SanitizedPostgresAccountWithResourceSchema,
|
||||
SanitizedMySQLAccountWithResourceSchema,
|
||||
SanitizedKubernetesAccountWithResourceSchema,
|
||||
SanitizedAwsIamAccountWithResourceSchema
|
||||
]);
|
||||
|
||||
type TSanitizedAccount = z.infer<typeof SanitizedAccountSchema>;
|
||||
const ListPamAccountsResponseSchema = z.object({
|
||||
accounts: SanitizedAccountSchema.array(),
|
||||
folders: PamFoldersSchema.array(),
|
||||
totalCount: z.number().default(0),
|
||||
folderId: z.string().optional(),
|
||||
folderPaths: z.record(z.string(), z.string())
|
||||
});
|
||||
|
||||
export const registerPamAccountRouter = async (server: FastifyZodProvider) => {
|
||||
server.route({
|
||||
@@ -55,13 +63,7 @@ export const registerPamAccountRouter = async (server: FastifyZodProvider) => {
|
||||
.optional()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
accounts: SanitizedAccountSchema.array(),
|
||||
folders: PamFoldersSchema.array(),
|
||||
totalCount: z.number().default(0),
|
||||
folderId: z.string().optional(),
|
||||
folderPaths: z.record(z.string(), z.string())
|
||||
})
|
||||
200: ListPamAccountsResponseSchema
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
@@ -98,7 +100,7 @@ export const registerPamAccountRouter = async (server: FastifyZodProvider) => {
|
||||
}
|
||||
});
|
||||
|
||||
return { accounts: accounts as TSanitizedAccount[], folders, totalCount, folderId, folderPaths };
|
||||
return { accounts, folders, totalCount, folderId, folderPaths } as z.infer<typeof ListPamAccountsResponseSchema>;
|
||||
}
|
||||
});
|
||||
|
||||
@@ -135,6 +137,7 @@ export const registerPamAccountRouter = async (server: FastifyZodProvider) => {
|
||||
GatewayAccessResponseSchema.extend({ resourceType: z.literal(PamResource.Postgres) }),
|
||||
GatewayAccessResponseSchema.extend({ resourceType: z.literal(PamResource.MySQL) }),
|
||||
GatewayAccessResponseSchema.extend({ resourceType: z.literal(PamResource.SSH) }),
|
||||
GatewayAccessResponseSchema.extend({ resourceType: z.literal(PamResource.Kubernetes) }),
|
||||
// AWS IAM (no gateway, returns console URL)
|
||||
z.object({
|
||||
sessionId: z.string(),
|
||||
|
||||
@@ -3,6 +3,11 @@ import {
|
||||
SanitizedAwsIamResourceSchema,
|
||||
UpdateAwsIamResourceSchema
|
||||
} from "@app/ee/services/pam-resource/aws-iam/aws-iam-resource-schemas";
|
||||
import {
|
||||
CreateKubernetesResourceSchema,
|
||||
SanitizedKubernetesResourceSchema,
|
||||
UpdateKubernetesResourceSchema
|
||||
} from "@app/ee/services/pam-resource/kubernetes/kubernetes-resource-schemas";
|
||||
import {
|
||||
CreateMySQLResourceSchema,
|
||||
MySQLResourceSchema,
|
||||
@@ -50,6 +55,15 @@ export const PAM_RESOURCE_REGISTER_ROUTER_MAP: Record<PamResource, (server: Fast
|
||||
updateResourceSchema: UpdateSSHResourceSchema
|
||||
});
|
||||
},
|
||||
[PamResource.Kubernetes]: async (server: FastifyZodProvider) => {
|
||||
registerPamResourceEndpoints({
|
||||
server,
|
||||
resourceType: PamResource.Kubernetes,
|
||||
resourceResponseSchema: SanitizedKubernetesResourceSchema,
|
||||
createResourceSchema: CreateKubernetesResourceSchema,
|
||||
updateResourceSchema: UpdateKubernetesResourceSchema
|
||||
});
|
||||
},
|
||||
[PamResource.AwsIam]: async (server: FastifyZodProvider) => {
|
||||
registerPamResourceEndpoints({
|
||||
server,
|
||||
|
||||
@@ -5,6 +5,10 @@ import {
|
||||
AwsIamResourceListItemSchema,
|
||||
SanitizedAwsIamResourceSchema
|
||||
} from "@app/ee/services/pam-resource/aws-iam/aws-iam-resource-schemas";
|
||||
import {
|
||||
KubernetesResourceListItemSchema,
|
||||
SanitizedKubernetesResourceSchema
|
||||
} from "@app/ee/services/pam-resource/kubernetes/kubernetes-resource-schemas";
|
||||
import {
|
||||
MySQLResourceListItemSchema,
|
||||
SanitizedMySQLResourceSchema
|
||||
@@ -27,6 +31,7 @@ const SanitizedResourceSchema = z.union([
|
||||
SanitizedPostgresResourceSchema,
|
||||
SanitizedMySQLResourceSchema,
|
||||
SanitizedSSHResourceSchema,
|
||||
SanitizedKubernetesResourceSchema,
|
||||
SanitizedAwsIamResourceSchema
|
||||
]);
|
||||
|
||||
@@ -34,6 +39,7 @@ const ResourceOptionsSchema = z.discriminatedUnion("resource", [
|
||||
PostgresResourceListItemSchema,
|
||||
MySQLResourceListItemSchema,
|
||||
SSHResourceListItemSchema,
|
||||
KubernetesResourceListItemSchema,
|
||||
AwsIamResourceListItemSchema
|
||||
]);
|
||||
|
||||
|
||||
@@ -2,10 +2,12 @@ import { z } from "zod";
|
||||
|
||||
import { PamSessionsSchema } from "@app/db/schemas";
|
||||
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { KubernetesSessionCredentialsSchema } from "@app/ee/services/pam-resource/kubernetes/kubernetes-resource-schemas";
|
||||
import { MySQLSessionCredentialsSchema } from "@app/ee/services/pam-resource/mysql/mysql-resource-schemas";
|
||||
import { PostgresSessionCredentialsSchema } from "@app/ee/services/pam-resource/postgres/postgres-resource-schemas";
|
||||
import { SSHSessionCredentialsSchema } from "@app/ee/services/pam-resource/ssh/ssh-resource-schemas";
|
||||
import {
|
||||
HttpEventSchema,
|
||||
PamSessionCommandLogSchema,
|
||||
SanitizedSessionSchema,
|
||||
TerminalEventSchema
|
||||
@@ -17,7 +19,8 @@ import { AuthMode } from "@app/services/auth/auth-type";
|
||||
const SessionCredentialsSchema = z.union([
|
||||
SSHSessionCredentialsSchema,
|
||||
PostgresSessionCredentialsSchema,
|
||||
MySQLSessionCredentialsSchema
|
||||
MySQLSessionCredentialsSchema,
|
||||
KubernetesSessionCredentialsSchema
|
||||
]);
|
||||
|
||||
export const registerPamSessionRouter = async (server: FastifyZodProvider) => {
|
||||
@@ -89,7 +92,7 @@ export const registerPamSessionRouter = async (server: FastifyZodProvider) => {
|
||||
sessionId: z.string().uuid()
|
||||
}),
|
||||
body: z.object({
|
||||
logs: z.array(z.union([PamSessionCommandLogSchema, TerminalEventSchema]))
|
||||
logs: z.array(z.union([PamSessionCommandLogSchema, TerminalEventSchema, HttpEventSchema]))
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
|
||||
@@ -142,6 +142,7 @@ export const registerUserAdditionalPrivilegeRouter = async (server: FastifyZodPr
|
||||
data: {
|
||||
...req.body,
|
||||
...req.body.type,
|
||||
name: req.body.slug,
|
||||
permissions: req.body.permissions
|
||||
? // eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore-error this is valid ts
|
||||
|
||||
@@ -56,7 +56,7 @@ type TSecretApprovalRequestServiceFactoryDep = {
|
||||
TAccessApprovalRequestReviewerDALFactory,
|
||||
"create" | "find" | "findOne" | "transaction" | "delete"
|
||||
>;
|
||||
groupDAL: Pick<TGroupDALFactory, "findAllGroupPossibleMembers">;
|
||||
groupDAL: Pick<TGroupDALFactory, "findAllGroupPossibleUsers">;
|
||||
smtpService: Pick<TSmtpService, "sendMail">;
|
||||
userDAL: Pick<
|
||||
TUserDALFactory,
|
||||
@@ -182,7 +182,7 @@ export const accessApprovalRequestServiceFactory = ({
|
||||
await Promise.all(
|
||||
approverGroupIds.map((groupApproverId) =>
|
||||
groupDAL
|
||||
.findAllGroupPossibleMembers({
|
||||
.findAllGroupPossibleUsers({
|
||||
orgId: actorOrgId,
|
||||
groupId: groupApproverId
|
||||
})
|
||||
|
||||
@@ -49,6 +49,7 @@ import { TWebhookPayloads } from "@app/services/webhook/webhook-types";
|
||||
import { WorkflowIntegration } from "@app/services/workflow-integration/workflow-integration-types";
|
||||
|
||||
import { KmipPermission } from "../kmip/kmip-enum";
|
||||
import { AcmeChallengeType, AcmeIdentifierType } from "../pki-acme/pki-acme-schemas";
|
||||
import { ApprovalStatus } from "../secret-approval-request/secret-approval-request-types";
|
||||
|
||||
export type TListProjectAuditLogDTO = {
|
||||
@@ -78,7 +79,9 @@ export type TCreateAuditLogDTO = {
|
||||
| ScimClientActor
|
||||
| PlatformActor
|
||||
| UnknownUserActor
|
||||
| KmipClientActor;
|
||||
| KmipClientActor
|
||||
| AcmeProfileActor
|
||||
| AcmeAccountActor;
|
||||
orgId?: string;
|
||||
projectId?: string;
|
||||
} & BaseAuthData;
|
||||
@@ -574,7 +577,18 @@ export enum EventType {
|
||||
APPROVAL_REQUEST_CANCEL = "approval-request-cancel",
|
||||
APPROVAL_REQUEST_GRANT_LIST = "approval-request-grant-list",
|
||||
APPROVAL_REQUEST_GRANT_GET = "approval-request-grant-get",
|
||||
APPROVAL_REQUEST_GRANT_REVOKE = "approval-request-grant-revoke"
|
||||
APPROVAL_REQUEST_GRANT_REVOKE = "approval-request-grant-revoke",
|
||||
|
||||
// PKI ACME
|
||||
CREATE_ACME_ACCOUNT = "create-acme-account",
|
||||
RETRIEVE_ACME_ACCOUNT = "retrieve-acme-account",
|
||||
CREATE_ACME_ORDER = "create-acme-order",
|
||||
FINALIZE_ACME_ORDER = "finalize-acme-order",
|
||||
DOWNLOAD_ACME_CERTIFICATE = "download-acme-certificate",
|
||||
RESPOND_TO_ACME_CHALLENGE = "respond-to-acme-challenge",
|
||||
PASS_ACME_CHALLENGE = "pass-acme-challenge",
|
||||
ATTEMPT_ACME_CHALLENGE = "attempt-acme-challenge",
|
||||
FAIL_ACME_CHALLENGE = "fail-acme-challenge"
|
||||
}
|
||||
|
||||
export const filterableSecretEvents: EventType[] = [
|
||||
@@ -615,6 +629,15 @@ interface KmipClientActorMetadata {
|
||||
name: string;
|
||||
}
|
||||
|
||||
interface AcmeProfileActorMetadata {
|
||||
profileId: string;
|
||||
}
|
||||
|
||||
interface AcmeAccountActorMetadata {
|
||||
profileId: string;
|
||||
accountId: string;
|
||||
}
|
||||
|
||||
interface UnknownUserActorMetadata {}
|
||||
|
||||
export interface UserActor {
|
||||
@@ -652,7 +675,25 @@ export interface ScimClientActor {
|
||||
metadata: ScimClientActorMetadata;
|
||||
}
|
||||
|
||||
export type Actor = UserActor | ServiceActor | IdentityActor | ScimClientActor | PlatformActor | KmipClientActor;
|
||||
export interface AcmeProfileActor {
|
||||
type: ActorType.ACME_PROFILE;
|
||||
metadata: AcmeProfileActorMetadata;
|
||||
}
|
||||
|
||||
export interface AcmeAccountActor {
|
||||
type: ActorType.ACME_ACCOUNT;
|
||||
metadata: AcmeAccountActorMetadata;
|
||||
}
|
||||
|
||||
export type Actor =
|
||||
| UserActor
|
||||
| ServiceActor
|
||||
| IdentityActor
|
||||
| ScimClientActor
|
||||
| PlatformActor
|
||||
| KmipClientActor
|
||||
| AcmeProfileActor
|
||||
| AcmeAccountActor;
|
||||
|
||||
interface GetSecretsEvent {
|
||||
type: EventType.GET_SECRETS;
|
||||
@@ -4368,6 +4409,84 @@ interface ApprovalRequestGrantRevokeEvent {
|
||||
};
|
||||
}
|
||||
|
||||
interface CreateAcmeAccountEvent {
|
||||
type: EventType.CREATE_ACME_ACCOUNT;
|
||||
metadata: {
|
||||
accountId: string;
|
||||
publicKeyThumbprint: string;
|
||||
emails?: string[];
|
||||
};
|
||||
}
|
||||
|
||||
interface RetrieveAcmeAccountEvent {
|
||||
type: EventType.RETRIEVE_ACME_ACCOUNT;
|
||||
metadata: {
|
||||
accountId: string;
|
||||
publicKeyThumbprint: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface CreateAcmeOrderEvent {
|
||||
type: EventType.CREATE_ACME_ORDER;
|
||||
metadata: {
|
||||
orderId: string;
|
||||
identifiers: Array<{
|
||||
type: AcmeIdentifierType;
|
||||
value: string;
|
||||
}>;
|
||||
};
|
||||
}
|
||||
|
||||
interface FinalizeAcmeOrderEvent {
|
||||
type: EventType.FINALIZE_ACME_ORDER;
|
||||
metadata: {
|
||||
orderId: string;
|
||||
csr: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface DownloadAcmeCertificateEvent {
|
||||
type: EventType.DOWNLOAD_ACME_CERTIFICATE;
|
||||
metadata: {
|
||||
orderId: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface RespondToAcmeChallengeEvent {
|
||||
type: EventType.RESPOND_TO_ACME_CHALLENGE;
|
||||
metadata: {
|
||||
challengeId: string;
|
||||
type: AcmeChallengeType;
|
||||
};
|
||||
}
|
||||
interface PassedAcmeChallengeEvent {
|
||||
type: EventType.PASS_ACME_CHALLENGE;
|
||||
metadata: {
|
||||
challengeId: string;
|
||||
type: AcmeChallengeType;
|
||||
};
|
||||
}
|
||||
|
||||
interface AttemptAcmeChallengeEvent {
|
||||
type: EventType.ATTEMPT_ACME_CHALLENGE;
|
||||
metadata: {
|
||||
challengeId: string;
|
||||
type: AcmeChallengeType;
|
||||
retryCount: number;
|
||||
errorMessage: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface FailAcmeChallengeEvent {
|
||||
type: EventType.FAIL_ACME_CHALLENGE;
|
||||
metadata: {
|
||||
challengeId: string;
|
||||
type: AcmeChallengeType;
|
||||
retryCount: number;
|
||||
errorMessage: string;
|
||||
};
|
||||
}
|
||||
|
||||
export type Event =
|
||||
| CreateSubOrganizationEvent
|
||||
| UpdateSubOrganizationEvent
|
||||
@@ -4768,4 +4887,13 @@ export type Event =
|
||||
| ApprovalRequestCancelEvent
|
||||
| ApprovalRequestGrantListEvent
|
||||
| ApprovalRequestGrantGetEvent
|
||||
| ApprovalRequestGrantRevokeEvent;
|
||||
| ApprovalRequestGrantRevokeEvent
|
||||
| CreateAcmeAccountEvent
|
||||
| RetrieveAcmeAccountEvent
|
||||
| CreateAcmeOrderEvent
|
||||
| FinalizeAcmeOrderEvent
|
||||
| DownloadAcmeCertificateEvent
|
||||
| RespondToAcmeChallengeEvent
|
||||
| PassedAcmeChallengeEvent
|
||||
| AttemptAcmeChallengeEvent
|
||||
| FailAcmeChallengeEvent;
|
||||
|
||||
@@ -380,6 +380,7 @@ export const externalKmsServiceFactory = ({
|
||||
|
||||
const findById = async ({ actor, actorId, actorOrgId, actorAuthMethod, id: kmsId }: TGetExternalKmsByIdDTO) => {
|
||||
const kmsDoc = await kmsDAL.findById(kmsId);
|
||||
if (!kmsDoc) throw new NotFoundError({ message: `Could not find KMS with ID '${kmsId}'` });
|
||||
const { permission } = await permissionService.getOrgPermission({
|
||||
scope: OrganizationActionScope.Any,
|
||||
actor,
|
||||
|
||||
@@ -6,7 +6,14 @@ import { DatabaseError } from "@app/lib/errors";
|
||||
import { buildFindFilter, ormify, selectAllTableCols, TFindFilter, TFindOpt } from "@app/lib/knex";
|
||||
import { OrderByDirection } from "@app/lib/types";
|
||||
|
||||
import { EFilterReturnedProjects, EFilterReturnedUsers, EGroupProjectsOrderBy } from "./group-types";
|
||||
import {
|
||||
FilterMemberType,
|
||||
FilterReturnedMachineIdentities,
|
||||
FilterReturnedProjects,
|
||||
FilterReturnedUsers,
|
||||
GroupMembersOrderBy,
|
||||
GroupProjectsOrderBy
|
||||
} from "./group-types";
|
||||
|
||||
export type TGroupDALFactory = ReturnType<typeof groupDALFactory>;
|
||||
|
||||
@@ -70,7 +77,7 @@ export const groupDALFactory = (db: TDbClient) => {
|
||||
};
|
||||
|
||||
// special query
|
||||
const findAllGroupPossibleMembers = async ({
|
||||
const findAllGroupPossibleUsers = async ({
|
||||
orgId,
|
||||
groupId,
|
||||
offset = 0,
|
||||
@@ -85,7 +92,7 @@ export const groupDALFactory = (db: TDbClient) => {
|
||||
limit?: number;
|
||||
username?: string;
|
||||
search?: string;
|
||||
filter?: EFilterReturnedUsers;
|
||||
filter?: FilterReturnedUsers;
|
||||
}) => {
|
||||
try {
|
||||
const query = db
|
||||
@@ -127,11 +134,11 @@ export const groupDALFactory = (db: TDbClient) => {
|
||||
}
|
||||
|
||||
switch (filter) {
|
||||
case EFilterReturnedUsers.EXISTING_MEMBERS:
|
||||
void query.andWhere(`${TableName.UserGroupMembership}.createdAt`, "is not", null);
|
||||
case FilterReturnedUsers.EXISTING_MEMBERS:
|
||||
void query.whereNotNull(`${TableName.UserGroupMembership}.createdAt`);
|
||||
break;
|
||||
case EFilterReturnedUsers.NON_MEMBERS:
|
||||
void query.andWhere(`${TableName.UserGroupMembership}.createdAt`, "is", null);
|
||||
case FilterReturnedUsers.NON_MEMBERS:
|
||||
void query.whereNull(`${TableName.UserGroupMembership}.createdAt`);
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
@@ -155,7 +162,7 @@ export const groupDALFactory = (db: TDbClient) => {
|
||||
username: memberUsername,
|
||||
firstName,
|
||||
lastName,
|
||||
isPartOfGroup: !!memberGroupId,
|
||||
isPartOfGroup: Boolean(memberGroupId),
|
||||
joinedGroupAt
|
||||
})
|
||||
),
|
||||
@@ -167,6 +174,256 @@ export const groupDALFactory = (db: TDbClient) => {
|
||||
}
|
||||
};
|
||||
|
||||
const findAllGroupPossibleMachineIdentities = async ({
|
||||
orgId,
|
||||
groupId,
|
||||
offset = 0,
|
||||
limit,
|
||||
search,
|
||||
filter
|
||||
}: {
|
||||
orgId: string;
|
||||
groupId: string;
|
||||
offset?: number;
|
||||
limit?: number;
|
||||
search?: string;
|
||||
filter?: FilterReturnedMachineIdentities;
|
||||
}) => {
|
||||
try {
|
||||
const query = db
|
||||
.replicaNode()(TableName.Membership)
|
||||
.where(`${TableName.Membership}.scopeOrgId`, orgId)
|
||||
.where(`${TableName.Membership}.scope`, AccessScope.Organization)
|
||||
.whereNotNull(`${TableName.Membership}.actorIdentityId`)
|
||||
.whereNull(`${TableName.Identity}.projectId`)
|
||||
.join(TableName.Identity, `${TableName.Membership}.actorIdentityId`, `${TableName.Identity}.id`)
|
||||
.leftJoin(TableName.IdentityGroupMembership, (bd) => {
|
||||
bd.on(`${TableName.IdentityGroupMembership}.identityId`, "=", `${TableName.Identity}.id`).andOn(
|
||||
`${TableName.IdentityGroupMembership}.groupId`,
|
||||
"=",
|
||||
db.raw("?", [groupId])
|
||||
);
|
||||
})
|
||||
.select(
|
||||
db.ref("id").withSchema(TableName.Membership),
|
||||
db.ref("groupId").withSchema(TableName.IdentityGroupMembership),
|
||||
db.ref("createdAt").withSchema(TableName.IdentityGroupMembership).as("joinedGroupAt"),
|
||||
db.ref("name").withSchema(TableName.Identity),
|
||||
db.ref("id").withSchema(TableName.Identity).as("identityId"),
|
||||
db.raw(`count(*) OVER() as total_count`)
|
||||
)
|
||||
.offset(offset)
|
||||
.orderBy("name", "asc");
|
||||
|
||||
if (limit) {
|
||||
void query.limit(limit);
|
||||
}
|
||||
|
||||
if (search) {
|
||||
void query.andWhereRaw(`LOWER("${TableName.Identity}"."name") ilike ?`, `%${search}%`);
|
||||
}
|
||||
|
||||
switch (filter) {
|
||||
case FilterReturnedMachineIdentities.ASSIGNED_MACHINE_IDENTITIES:
|
||||
void query.whereNotNull(`${TableName.IdentityGroupMembership}.createdAt`);
|
||||
break;
|
||||
case FilterReturnedMachineIdentities.NON_ASSIGNED_MACHINE_IDENTITIES:
|
||||
void query.whereNull(`${TableName.IdentityGroupMembership}.createdAt`);
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
||||
const machineIdentities = await query;
|
||||
|
||||
return {
|
||||
machineIdentities: machineIdentities.map(({ name, identityId, joinedGroupAt, groupId: identityGroupId }) => ({
|
||||
id: identityId,
|
||||
name,
|
||||
isPartOfGroup: Boolean(identityGroupId),
|
||||
joinedGroupAt
|
||||
})),
|
||||
// @ts-expect-error col select is raw and not strongly typed
|
||||
totalCount: Number(machineIdentities?.[0]?.total_count ?? 0)
|
||||
};
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "Find all group identities" });
|
||||
}
|
||||
};
|
||||
|
||||
const findAllGroupPossibleMembers = async ({
|
||||
orgId,
|
||||
groupId,
|
||||
offset = 0,
|
||||
limit,
|
||||
search,
|
||||
orderBy = GroupMembersOrderBy.Name,
|
||||
orderDirection = OrderByDirection.ASC,
|
||||
memberTypeFilter
|
||||
}: {
|
||||
orgId: string;
|
||||
groupId: string;
|
||||
offset?: number;
|
||||
limit?: number;
|
||||
search?: string;
|
||||
orderBy?: GroupMembersOrderBy;
|
||||
orderDirection?: OrderByDirection;
|
||||
memberTypeFilter?: FilterMemberType[];
|
||||
}) => {
|
||||
try {
|
||||
const includeUsers =
|
||||
!memberTypeFilter || memberTypeFilter.length === 0 || memberTypeFilter.includes(FilterMemberType.USERS);
|
||||
const includeMachineIdentities =
|
||||
!memberTypeFilter ||
|
||||
memberTypeFilter.length === 0 ||
|
||||
memberTypeFilter.includes(FilterMemberType.MACHINE_IDENTITIES);
|
||||
|
||||
const query = db
|
||||
.replicaNode()(TableName.Membership)
|
||||
.where(`${TableName.Membership}.scopeOrgId`, orgId)
|
||||
.where(`${TableName.Membership}.scope`, AccessScope.Organization)
|
||||
.leftJoin(TableName.Users, `${TableName.Membership}.actorUserId`, `${TableName.Users}.id`)
|
||||
.leftJoin(TableName.Identity, `${TableName.Membership}.actorIdentityId`, `${TableName.Identity}.id`)
|
||||
.leftJoin(TableName.UserGroupMembership, (bd) => {
|
||||
bd.on(`${TableName.UserGroupMembership}.userId`, "=", `${TableName.Users}.id`).andOn(
|
||||
`${TableName.UserGroupMembership}.groupId`,
|
||||
"=",
|
||||
db.raw("?", [groupId])
|
||||
);
|
||||
})
|
||||
.leftJoin(TableName.IdentityGroupMembership, (bd) => {
|
||||
bd.on(`${TableName.IdentityGroupMembership}.identityId`, "=", `${TableName.Identity}.id`).andOn(
|
||||
`${TableName.IdentityGroupMembership}.groupId`,
|
||||
"=",
|
||||
db.raw("?", [groupId])
|
||||
);
|
||||
})
|
||||
.where((qb) => {
|
||||
void qb
|
||||
.where((innerQb) => {
|
||||
void innerQb
|
||||
.whereNotNull(`${TableName.Membership}.actorUserId`)
|
||||
.whereNotNull(`${TableName.UserGroupMembership}.createdAt`)
|
||||
.where(`${TableName.Users}.isGhost`, false);
|
||||
})
|
||||
.orWhere((innerQb) => {
|
||||
void innerQb
|
||||
.whereNotNull(`${TableName.Membership}.actorIdentityId`)
|
||||
.whereNotNull(`${TableName.IdentityGroupMembership}.createdAt`)
|
||||
.whereNull(`${TableName.Identity}.projectId`);
|
||||
});
|
||||
})
|
||||
.select(
|
||||
db.raw(
|
||||
`CASE WHEN "${TableName.Membership}"."actorUserId" IS NOT NULL THEN "${TableName.UserGroupMembership}"."createdAt" ELSE "${TableName.IdentityGroupMembership}"."createdAt" END as "joinedGroupAt"`
|
||||
),
|
||||
db.ref("email").withSchema(TableName.Users),
|
||||
db.ref("username").withSchema(TableName.Users),
|
||||
db.ref("firstName").withSchema(TableName.Users),
|
||||
db.ref("lastName").withSchema(TableName.Users),
|
||||
db.raw(`"${TableName.Users}"."id"::text as "userId"`),
|
||||
db.raw(`"${TableName.Identity}"."id"::text as "identityId"`),
|
||||
db.ref("name").withSchema(TableName.Identity).as("identityName"),
|
||||
db.raw(
|
||||
`CASE WHEN "${TableName.Membership}"."actorUserId" IS NOT NULL THEN 'user' ELSE 'machineIdentity' END as "member_type"`
|
||||
),
|
||||
db.raw(`count(*) OVER() as total_count`)
|
||||
);
|
||||
|
||||
void query.andWhere((qb) => {
|
||||
if (includeUsers) {
|
||||
void qb.whereNotNull(`${TableName.Membership}.actorUserId`);
|
||||
}
|
||||
|
||||
if (includeMachineIdentities) {
|
||||
void qb[includeUsers ? "orWhere" : "where"]((innerQb) => {
|
||||
void innerQb.whereNotNull(`${TableName.Membership}.actorIdentityId`);
|
||||
});
|
||||
}
|
||||
|
||||
if (!includeUsers && !includeMachineIdentities) {
|
||||
void qb.whereRaw("FALSE");
|
||||
}
|
||||
});
|
||||
|
||||
if (search) {
|
||||
void query.andWhere((qb) => {
|
||||
void qb
|
||||
.whereRaw(
|
||||
`CONCAT_WS(' ', "${TableName.Users}"."firstName", "${TableName.Users}"."lastName", lower("${TableName.Users}"."username")) ilike ?`,
|
||||
[`%${search}%`]
|
||||
)
|
||||
.orWhereRaw(`LOWER("${TableName.Identity}"."name") ilike ?`, [`%${search}%`]);
|
||||
});
|
||||
}
|
||||
|
||||
if (orderBy === GroupMembersOrderBy.Name) {
|
||||
const orderDirectionClause = orderDirection === OrderByDirection.ASC ? "ASC" : "DESC";
|
||||
|
||||
// This order by clause is used to sort the members by name.
|
||||
// It first checks if the full name (first name and last name) is not empty, then the username, then the email, then the identity name. If all of these are empty, it returns null.
|
||||
void query.orderByRaw(
|
||||
`LOWER(COALESCE(NULLIF(TRIM(CONCAT_WS(' ', "${TableName.Users}"."firstName", "${TableName.Users}"."lastName")), ''), "${TableName.Users}"."username", "${TableName.Users}"."email", "${TableName.Identity}"."name")) ${orderDirectionClause}`
|
||||
);
|
||||
}
|
||||
|
||||
if (offset) {
|
||||
void query.offset(offset);
|
||||
}
|
||||
if (limit) {
|
||||
void query.limit(limit);
|
||||
}
|
||||
|
||||
const results = (await query) as unknown as {
|
||||
email: string;
|
||||
username: string;
|
||||
firstName: string;
|
||||
lastName: string;
|
||||
userId: string;
|
||||
identityId: string;
|
||||
identityName: string;
|
||||
member_type: "user" | "machineIdentity";
|
||||
joinedGroupAt: Date;
|
||||
total_count: string;
|
||||
}[];
|
||||
|
||||
const members = results.map(
|
||||
({ email, username, firstName, lastName, userId, identityId, identityName, member_type, joinedGroupAt }) => {
|
||||
if (member_type === "user") {
|
||||
return {
|
||||
id: userId,
|
||||
joinedGroupAt,
|
||||
type: "user" as const,
|
||||
user: {
|
||||
id: userId,
|
||||
email,
|
||||
username,
|
||||
firstName,
|
||||
lastName
|
||||
}
|
||||
};
|
||||
}
|
||||
return {
|
||||
id: identityId,
|
||||
joinedGroupAt,
|
||||
type: "machineIdentity" as const,
|
||||
machineIdentity: {
|
||||
id: identityId,
|
||||
name: identityName
|
||||
}
|
||||
};
|
||||
}
|
||||
);
|
||||
|
||||
return {
|
||||
members,
|
||||
totalCount: Number(results?.[0]?.total_count ?? 0)
|
||||
};
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "Find all group possible members" });
|
||||
}
|
||||
};
|
||||
|
||||
const findAllGroupProjects = async ({
|
||||
orgId,
|
||||
groupId,
|
||||
@@ -182,8 +439,8 @@ export const groupDALFactory = (db: TDbClient) => {
|
||||
offset?: number;
|
||||
limit?: number;
|
||||
search?: string;
|
||||
filter?: EFilterReturnedProjects;
|
||||
orderBy?: EGroupProjectsOrderBy;
|
||||
filter?: FilterReturnedProjects;
|
||||
orderBy?: GroupProjectsOrderBy;
|
||||
orderDirection?: OrderByDirection;
|
||||
}) => {
|
||||
try {
|
||||
@@ -225,10 +482,10 @@ export const groupDALFactory = (db: TDbClient) => {
|
||||
}
|
||||
|
||||
switch (filter) {
|
||||
case EFilterReturnedProjects.ASSIGNED_PROJECTS:
|
||||
case FilterReturnedProjects.ASSIGNED_PROJECTS:
|
||||
void query.whereNotNull(`${TableName.Membership}.id`);
|
||||
break;
|
||||
case EFilterReturnedProjects.UNASSIGNED_PROJECTS:
|
||||
case FilterReturnedProjects.UNASSIGNED_PROJECTS:
|
||||
void query.whereNull(`${TableName.Membership}.id`);
|
||||
break;
|
||||
default:
|
||||
@@ -313,6 +570,8 @@ export const groupDALFactory = (db: TDbClient) => {
|
||||
...groupOrm,
|
||||
findGroups,
|
||||
findByOrgId,
|
||||
findAllGroupPossibleUsers,
|
||||
findAllGroupPossibleMachineIdentities,
|
||||
findAllGroupPossibleMembers,
|
||||
findAllGroupProjects,
|
||||
findGroupsByProjectId,
|
||||
|
||||
@@ -5,9 +5,11 @@ import { crypto } from "@app/lib/crypto/cryptography";
|
||||
import { BadRequestError, ForbiddenRequestError, NotFoundError, ScimRequestError } from "@app/lib/errors";
|
||||
|
||||
import {
|
||||
TAddIdentitiesToGroup,
|
||||
TAddUsersToGroup,
|
||||
TAddUsersToGroupByUserIds,
|
||||
TConvertPendingGroupAdditionsToGroupMemberships,
|
||||
TRemoveIdentitiesFromGroup,
|
||||
TRemoveUsersFromGroupByUserIds
|
||||
} from "./group-types";
|
||||
|
||||
@@ -285,6 +287,70 @@ export const addUsersToGroupByUserIds = async ({
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Add identities with identity ids [identityIds] to group [group].
|
||||
* @param {group} group - group to add identity(s) to
|
||||
* @param {string[]} identityIds - id(s) of organization scoped identity(s) to add to group
|
||||
* @returns {Promise<{ id: string }[]>} - id(s) of added identity(s)
|
||||
*/
|
||||
export const addIdentitiesToGroup = async ({
|
||||
group,
|
||||
identityIds,
|
||||
identityDAL,
|
||||
identityGroupMembershipDAL,
|
||||
membershipDAL
|
||||
}: TAddIdentitiesToGroup) => {
|
||||
const identityIdsSet = new Set(identityIds);
|
||||
const identityIdsArray = Array.from(identityIdsSet);
|
||||
|
||||
// ensure all identities exist and belong to the org via org scoped membership
|
||||
const foundIdentitiesMemberships = await membershipDAL.find({
|
||||
scope: AccessScope.Organization,
|
||||
scopeOrgId: group.orgId,
|
||||
$in: {
|
||||
actorIdentityId: identityIdsArray
|
||||
}
|
||||
});
|
||||
|
||||
const existingIdentityOrgMembershipsIdentityIdsSet = new Set(
|
||||
foundIdentitiesMemberships.map((u) => u.actorIdentityId as string)
|
||||
);
|
||||
|
||||
identityIdsArray.forEach((identityId) => {
|
||||
if (!existingIdentityOrgMembershipsIdentityIdsSet.has(identityId)) {
|
||||
throw new ForbiddenRequestError({
|
||||
message: `Identity with id ${identityId} is not part of the organization`
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// check if identity group membership already exists
|
||||
const existingIdentityGroupMemberships = await identityGroupMembershipDAL.find({
|
||||
groupId: group.id,
|
||||
$in: {
|
||||
identityId: identityIdsArray
|
||||
}
|
||||
});
|
||||
|
||||
if (existingIdentityGroupMemberships.length) {
|
||||
throw new BadRequestError({
|
||||
message: `${identityIdsArray.length > 1 ? `Identities are` : `Identity is`} already part of the group ${group.slug}`
|
||||
});
|
||||
}
|
||||
|
||||
return identityDAL.transaction(async (tx) => {
|
||||
await identityGroupMembershipDAL.insertMany(
|
||||
foundIdentitiesMemberships.map((membership) => ({
|
||||
identityId: membership.actorIdentityId as string,
|
||||
groupId: group.id
|
||||
})),
|
||||
tx
|
||||
);
|
||||
|
||||
return identityIdsArray.map((identityId) => ({ id: identityId }));
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Remove users with user ids [userIds] from group [group].
|
||||
* - Users may be part of the group (non-pending + pending);
|
||||
@@ -421,6 +487,75 @@ export const removeUsersFromGroupByUserIds = async ({
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Remove identities with identity ids [identityIds] from group [group].
|
||||
* @param {group} group - group to remove identity(s) from
|
||||
* @param {string[]} identityIds - id(s) of identity(s) to remove from group
|
||||
* @returns {Promise<{ id: string }[]>} - id(s) of removed identity(s)
|
||||
*/
|
||||
export const removeIdentitiesFromGroup = async ({
|
||||
group,
|
||||
identityIds,
|
||||
identityDAL,
|
||||
membershipDAL,
|
||||
identityGroupMembershipDAL
|
||||
}: TRemoveIdentitiesFromGroup) => {
|
||||
const identityIdsSet = new Set(identityIds);
|
||||
const identityIdsArray = Array.from(identityIdsSet);
|
||||
|
||||
// ensure all identities exist and belong to the org via org scoped membership
|
||||
const foundIdentitiesMemberships = await membershipDAL.find({
|
||||
scope: AccessScope.Organization,
|
||||
scopeOrgId: group.orgId,
|
||||
$in: {
|
||||
actorIdentityId: identityIdsArray
|
||||
}
|
||||
});
|
||||
|
||||
const foundIdentitiesMembershipsIdentityIdsSet = new Set(
|
||||
foundIdentitiesMemberships.map((u) => u.actorIdentityId as string)
|
||||
);
|
||||
|
||||
if (foundIdentitiesMembershipsIdentityIdsSet.size !== identityIdsArray.length) {
|
||||
throw new NotFoundError({
|
||||
message: `Machine identities not found`
|
||||
});
|
||||
}
|
||||
|
||||
// check if identity group membership already exists
|
||||
const existingIdentityGroupMemberships = await identityGroupMembershipDAL.find({
|
||||
groupId: group.id,
|
||||
$in: {
|
||||
identityId: identityIdsArray
|
||||
}
|
||||
});
|
||||
|
||||
const existingIdentityGroupMembershipsIdentityIdsSet = new Set(
|
||||
existingIdentityGroupMemberships.map((u) => u.identityId)
|
||||
);
|
||||
|
||||
identityIdsArray.forEach((identityId) => {
|
||||
if (!existingIdentityGroupMembershipsIdentityIdsSet.has(identityId)) {
|
||||
throw new ForbiddenRequestError({
|
||||
message: `Machine identities are not part of the group ${group.slug}`
|
||||
});
|
||||
}
|
||||
});
|
||||
return identityDAL.transaction(async (tx) => {
|
||||
await identityGroupMembershipDAL.delete(
|
||||
{
|
||||
groupId: group.id,
|
||||
$in: {
|
||||
identityId: identityIdsArray
|
||||
}
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
return identityIdsArray.map((identityId) => ({ id: identityId }));
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Convert pending group additions for users with ids [userIds] to group memberships.
|
||||
* @param {string[]} userIds - id(s) of user(s) to try to convert pending group additions to group memberships
|
||||
|
||||
@@ -5,6 +5,8 @@ import { AccessScope, OrganizationActionScope, OrgMembershipRole, TRoles } from
|
||||
import { TOidcConfigDALFactory } from "@app/ee/services/oidc/oidc-config-dal";
|
||||
import { BadRequestError, NotFoundError, PermissionBoundaryError, UnauthorizedError } from "@app/lib/errors";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { TIdentityDALFactory } from "@app/services/identity/identity-dal";
|
||||
import { TMembershipDALFactory } from "@app/services/membership/membership-dal";
|
||||
import { TMembershipRoleDALFactory } from "@app/services/membership/membership-role-dal";
|
||||
import { TMembershipGroupDALFactory } from "@app/services/membership-group/membership-group-dal";
|
||||
import { TOrgDALFactory } from "@app/services/org/org-dal";
|
||||
@@ -18,33 +20,48 @@ import { OrgPermissionGroupActions, OrgPermissionSubjects } from "../permission/
|
||||
import { constructPermissionErrorMessage, validatePrivilegeChangeOperation } from "../permission/permission-fns";
|
||||
import { TPermissionServiceFactory } from "../permission/permission-service-types";
|
||||
import { TGroupDALFactory } from "./group-dal";
|
||||
import { addUsersToGroupByUserIds, removeUsersFromGroupByUserIds } from "./group-fns";
|
||||
import {
|
||||
addIdentitiesToGroup,
|
||||
addUsersToGroupByUserIds,
|
||||
removeIdentitiesFromGroup,
|
||||
removeUsersFromGroupByUserIds
|
||||
} from "./group-fns";
|
||||
import {
|
||||
TAddMachineIdentityToGroupDTO,
|
||||
TAddUserToGroupDTO,
|
||||
TCreateGroupDTO,
|
||||
TDeleteGroupDTO,
|
||||
TGetGroupByIdDTO,
|
||||
TListGroupMachineIdentitiesDTO,
|
||||
TListGroupMembersDTO,
|
||||
TListGroupProjectsDTO,
|
||||
TListGroupUsersDTO,
|
||||
TRemoveMachineIdentityFromGroupDTO,
|
||||
TRemoveUserFromGroupDTO,
|
||||
TUpdateGroupDTO
|
||||
} from "./group-types";
|
||||
import { TIdentityGroupMembershipDALFactory } from "./identity-group-membership-dal";
|
||||
import { TUserGroupMembershipDALFactory } from "./user-group-membership-dal";
|
||||
|
||||
type TGroupServiceFactoryDep = {
|
||||
userDAL: Pick<TUserDALFactory, "find" | "findUserEncKeyByUserIdsBatch" | "transaction" | "findUserByUsername">;
|
||||
identityDAL: Pick<TIdentityDALFactory, "findOne" | "find" | "transaction">;
|
||||
identityGroupMembershipDAL: Pick<TIdentityGroupMembershipDALFactory, "find" | "delete" | "insertMany">;
|
||||
groupDAL: Pick<
|
||||
TGroupDALFactory,
|
||||
| "create"
|
||||
| "findOne"
|
||||
| "update"
|
||||
| "delete"
|
||||
| "findAllGroupPossibleUsers"
|
||||
| "findAllGroupPossibleMachineIdentities"
|
||||
| "findAllGroupPossibleMembers"
|
||||
| "findById"
|
||||
| "transaction"
|
||||
| "findAllGroupProjects"
|
||||
>;
|
||||
membershipGroupDAL: Pick<TMembershipGroupDALFactory, "find" | "findOne" | "create">;
|
||||
membershipDAL: Pick<TMembershipDALFactory, "find" | "findOne">;
|
||||
membershipRoleDAL: Pick<TMembershipRoleDALFactory, "create" | "delete">;
|
||||
orgDAL: Pick<TOrgDALFactory, "findMembership" | "countAllOrgMembers" | "findById">;
|
||||
userGroupMembershipDAL: Pick<
|
||||
@@ -65,6 +82,9 @@ type TGroupServiceFactoryDep = {
|
||||
export type TGroupServiceFactory = ReturnType<typeof groupServiceFactory>;
|
||||
|
||||
export const groupServiceFactory = ({
|
||||
identityDAL,
|
||||
membershipDAL,
|
||||
identityGroupMembershipDAL,
|
||||
userDAL,
|
||||
groupDAL,
|
||||
orgDAL,
|
||||
@@ -362,7 +382,7 @@ export const groupServiceFactory = ({
|
||||
message: `Failed to find group with ID ${id}`
|
||||
});
|
||||
|
||||
const { members, totalCount } = await groupDAL.findAllGroupPossibleMembers({
|
||||
const { members, totalCount } = await groupDAL.findAllGroupPossibleUsers({
|
||||
orgId: group.orgId,
|
||||
groupId: group.id,
|
||||
offset,
|
||||
@@ -375,6 +395,100 @@ export const groupServiceFactory = ({
|
||||
return { users: members, totalCount };
|
||||
};
|
||||
|
||||
const listGroupMachineIdentities = async ({
|
||||
id,
|
||||
offset,
|
||||
limit,
|
||||
actor,
|
||||
actorId,
|
||||
actorAuthMethod,
|
||||
actorOrgId,
|
||||
search,
|
||||
filter
|
||||
}: TListGroupMachineIdentitiesDTO) => {
|
||||
if (!actorOrgId) throw new UnauthorizedError({ message: "No organization ID provided in request" });
|
||||
|
||||
const { permission } = await permissionService.getOrgPermission({
|
||||
scope: OrganizationActionScope.Any,
|
||||
actor,
|
||||
actorId,
|
||||
orgId: actorOrgId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
});
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionGroupActions.Read, OrgPermissionSubjects.Groups);
|
||||
|
||||
const group = await groupDAL.findOne({
|
||||
orgId: actorOrgId,
|
||||
id
|
||||
});
|
||||
|
||||
if (!group)
|
||||
throw new NotFoundError({
|
||||
message: `Failed to find group with ID ${id}`
|
||||
});
|
||||
|
||||
const { machineIdentities, totalCount } = await groupDAL.findAllGroupPossibleMachineIdentities({
|
||||
orgId: group.orgId,
|
||||
groupId: group.id,
|
||||
offset,
|
||||
limit,
|
||||
search,
|
||||
filter
|
||||
});
|
||||
|
||||
return { machineIdentities, totalCount };
|
||||
};
|
||||
|
||||
const listGroupMembers = async ({
|
||||
id,
|
||||
offset,
|
||||
limit,
|
||||
search,
|
||||
orderBy,
|
||||
orderDirection,
|
||||
memberTypeFilter,
|
||||
actor,
|
||||
actorId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
}: TListGroupMembersDTO) => {
|
||||
if (!actorOrgId) throw new UnauthorizedError({ message: "No organization ID provided in request" });
|
||||
|
||||
const { permission } = await permissionService.getOrgPermission({
|
||||
scope: OrganizationActionScope.Any,
|
||||
actor,
|
||||
actorId,
|
||||
orgId: actorOrgId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
});
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionGroupActions.Read, OrgPermissionSubjects.Groups);
|
||||
|
||||
const group = await groupDAL.findOne({
|
||||
orgId: actorOrgId,
|
||||
id
|
||||
});
|
||||
|
||||
if (!group)
|
||||
throw new NotFoundError({
|
||||
message: `Failed to find group with ID ${id}`
|
||||
});
|
||||
|
||||
const { members, totalCount } = await groupDAL.findAllGroupPossibleMembers({
|
||||
orgId: group.orgId,
|
||||
groupId: group.id,
|
||||
offset,
|
||||
limit,
|
||||
search,
|
||||
orderBy,
|
||||
orderDirection,
|
||||
memberTypeFilter
|
||||
});
|
||||
|
||||
return { members, totalCount };
|
||||
};
|
||||
|
||||
const listGroupProjects = async ({
|
||||
id,
|
||||
offset,
|
||||
@@ -504,6 +618,81 @@ export const groupServiceFactory = ({
|
||||
return users[0];
|
||||
};
|
||||
|
||||
const addMachineIdentityToGroup = async ({
|
||||
id,
|
||||
identityId,
|
||||
actor,
|
||||
actorId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
}: TAddMachineIdentityToGroupDTO) => {
|
||||
if (!actorOrgId) throw new UnauthorizedError({ message: "No organization ID provided in request" });
|
||||
|
||||
const { permission } = await permissionService.getOrgPermission({
|
||||
scope: OrganizationActionScope.Any,
|
||||
actor,
|
||||
actorId,
|
||||
orgId: actorOrgId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
});
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionGroupActions.Edit, OrgPermissionSubjects.Groups);
|
||||
|
||||
// check if group with slug exists
|
||||
const group = await groupDAL.findOne({
|
||||
orgId: actorOrgId,
|
||||
id
|
||||
});
|
||||
|
||||
if (!group)
|
||||
throw new NotFoundError({
|
||||
message: `Failed to find group with ID ${id}`
|
||||
});
|
||||
|
||||
const [rolePermissionDetails] = await permissionService.getOrgPermissionByRoles([group.role], actorOrgId);
|
||||
const { shouldUseNewPrivilegeSystem } = await orgDAL.findById(actorOrgId);
|
||||
|
||||
// check if user has broader or equal to privileges than group
|
||||
const permissionBoundary = validatePrivilegeChangeOperation(
|
||||
shouldUseNewPrivilegeSystem,
|
||||
OrgPermissionGroupActions.AddIdentities,
|
||||
OrgPermissionSubjects.Groups,
|
||||
permission,
|
||||
rolePermissionDetails.permission
|
||||
);
|
||||
|
||||
if (!permissionBoundary.isValid)
|
||||
throw new PermissionBoundaryError({
|
||||
message: constructPermissionErrorMessage(
|
||||
"Failed to add identity to more privileged group",
|
||||
shouldUseNewPrivilegeSystem,
|
||||
OrgPermissionGroupActions.AddIdentities,
|
||||
OrgPermissionSubjects.Groups
|
||||
),
|
||||
details: { missingPermissions: permissionBoundary.missingPermissions }
|
||||
});
|
||||
|
||||
const identityMembership = await membershipDAL.findOne({
|
||||
scope: AccessScope.Organization,
|
||||
scopeOrgId: group.orgId,
|
||||
actorIdentityId: identityId
|
||||
});
|
||||
|
||||
if (!identityMembership) {
|
||||
throw new NotFoundError({ message: `Identity with id ${identityId} is not part of the organization` });
|
||||
}
|
||||
|
||||
const identities = await addIdentitiesToGroup({
|
||||
group,
|
||||
identityIds: [identityId],
|
||||
identityDAL,
|
||||
membershipDAL,
|
||||
identityGroupMembershipDAL
|
||||
});
|
||||
|
||||
return identities[0];
|
||||
};
|
||||
|
||||
const removeUserFromGroup = async ({
|
||||
id,
|
||||
username,
|
||||
@@ -587,14 +776,91 @@ export const groupServiceFactory = ({
|
||||
return users[0];
|
||||
};
|
||||
|
||||
const removeMachineIdentityFromGroup = async ({
|
||||
id,
|
||||
identityId,
|
||||
actor,
|
||||
actorId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
}: TRemoveMachineIdentityFromGroupDTO) => {
|
||||
if (!actorOrgId) throw new UnauthorizedError({ message: "No organization ID provided in request" });
|
||||
|
||||
const { permission } = await permissionService.getOrgPermission({
|
||||
scope: OrganizationActionScope.Any,
|
||||
actor,
|
||||
actorId,
|
||||
orgId: actorOrgId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
});
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionGroupActions.Edit, OrgPermissionSubjects.Groups);
|
||||
|
||||
const group = await groupDAL.findOne({
|
||||
orgId: actorOrgId,
|
||||
id
|
||||
});
|
||||
|
||||
if (!group)
|
||||
throw new NotFoundError({
|
||||
message: `Failed to find group with ID ${id}`
|
||||
});
|
||||
|
||||
const [rolePermissionDetails] = await permissionService.getOrgPermissionByRoles([group.role], actorOrgId);
|
||||
const { shouldUseNewPrivilegeSystem } = await orgDAL.findById(actorOrgId);
|
||||
|
||||
// check if user has broader or equal to privileges than group
|
||||
const permissionBoundary = validatePrivilegeChangeOperation(
|
||||
shouldUseNewPrivilegeSystem,
|
||||
OrgPermissionGroupActions.RemoveIdentities,
|
||||
OrgPermissionSubjects.Groups,
|
||||
permission,
|
||||
rolePermissionDetails.permission
|
||||
);
|
||||
if (!permissionBoundary.isValid)
|
||||
throw new PermissionBoundaryError({
|
||||
message: constructPermissionErrorMessage(
|
||||
"Failed to remove identity from more privileged group",
|
||||
shouldUseNewPrivilegeSystem,
|
||||
OrgPermissionGroupActions.RemoveIdentities,
|
||||
OrgPermissionSubjects.Groups
|
||||
),
|
||||
details: { missingPermissions: permissionBoundary.missingPermissions }
|
||||
});
|
||||
|
||||
const identityMembership = await membershipDAL.findOne({
|
||||
scope: AccessScope.Organization,
|
||||
scopeOrgId: group.orgId,
|
||||
actorIdentityId: identityId
|
||||
});
|
||||
|
||||
if (!identityMembership) {
|
||||
throw new NotFoundError({ message: `Identity with id ${identityId} is not part of the organization` });
|
||||
}
|
||||
|
||||
const identities = await removeIdentitiesFromGroup({
|
||||
group,
|
||||
identityIds: [identityId],
|
||||
identityDAL,
|
||||
membershipDAL,
|
||||
identityGroupMembershipDAL
|
||||
});
|
||||
|
||||
return identities[0];
|
||||
};
|
||||
|
||||
return {
|
||||
createGroup,
|
||||
updateGroup,
|
||||
deleteGroup,
|
||||
listGroupUsers,
|
||||
listGroupMachineIdentities,
|
||||
listGroupMembers,
|
||||
listGroupProjects,
|
||||
addUserToGroup,
|
||||
addMachineIdentityToGroup,
|
||||
removeUserFromGroup,
|
||||
removeMachineIdentityFromGroup,
|
||||
getGroupById
|
||||
};
|
||||
};
|
||||
|
||||
@@ -3,6 +3,8 @@ import { Knex } from "knex";
|
||||
import { TGroups } from "@app/db/schemas";
|
||||
import { TUserGroupMembershipDALFactory } from "@app/ee/services/group/user-group-membership-dal";
|
||||
import { OrderByDirection, TGenericPermission } from "@app/lib/types";
|
||||
import { TIdentityDALFactory } from "@app/services/identity/identity-dal";
|
||||
import { TMembershipDALFactory } from "@app/services/membership/membership-dal";
|
||||
import { TMembershipGroupDALFactory } from "@app/services/membership-group/membership-group-dal";
|
||||
import { TOrgDALFactory } from "@app/services/org/org-dal";
|
||||
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
||||
@@ -10,6 +12,8 @@ import { TProjectBotDALFactory } from "@app/services/project-bot/project-bot-dal
|
||||
import { TProjectKeyDALFactory } from "@app/services/project-key/project-key-dal";
|
||||
import { TUserDALFactory } from "@app/services/user/user-dal";
|
||||
|
||||
import { TIdentityGroupMembershipDALFactory } from "./identity-group-membership-dal";
|
||||
|
||||
export type TCreateGroupDTO = {
|
||||
name: string;
|
||||
slug?: string;
|
||||
@@ -39,7 +43,25 @@ export type TListGroupUsersDTO = {
|
||||
limit: number;
|
||||
username?: string;
|
||||
search?: string;
|
||||
filter?: EFilterReturnedUsers;
|
||||
filter?: FilterReturnedUsers;
|
||||
} & TGenericPermission;
|
||||
|
||||
export type TListGroupMachineIdentitiesDTO = {
|
||||
id: string;
|
||||
offset: number;
|
||||
limit: number;
|
||||
search?: string;
|
||||
filter?: FilterReturnedMachineIdentities;
|
||||
} & TGenericPermission;
|
||||
|
||||
export type TListGroupMembersDTO = {
|
||||
id: string;
|
||||
offset: number;
|
||||
limit: number;
|
||||
search?: string;
|
||||
orderBy?: GroupMembersOrderBy;
|
||||
orderDirection?: OrderByDirection;
|
||||
memberTypeFilter?: FilterMemberType[];
|
||||
} & TGenericPermission;
|
||||
|
||||
export type TListGroupProjectsDTO = {
|
||||
@@ -47,8 +69,8 @@ export type TListGroupProjectsDTO = {
|
||||
offset: number;
|
||||
limit: number;
|
||||
search?: string;
|
||||
filter?: EFilterReturnedProjects;
|
||||
orderBy?: EGroupProjectsOrderBy;
|
||||
filter?: FilterReturnedProjects;
|
||||
orderBy?: GroupProjectsOrderBy;
|
||||
orderDirection?: OrderByDirection;
|
||||
} & TGenericPermission;
|
||||
|
||||
@@ -61,11 +83,21 @@ export type TAddUserToGroupDTO = {
|
||||
username: string;
|
||||
} & TGenericPermission;
|
||||
|
||||
export type TAddMachineIdentityToGroupDTO = {
|
||||
id: string;
|
||||
identityId: string;
|
||||
} & TGenericPermission;
|
||||
|
||||
export type TRemoveUserFromGroupDTO = {
|
||||
id: string;
|
||||
username: string;
|
||||
} & TGenericPermission;
|
||||
|
||||
export type TRemoveMachineIdentityFromGroupDTO = {
|
||||
id: string;
|
||||
identityId: string;
|
||||
} & TGenericPermission;
|
||||
|
||||
// group fns types
|
||||
|
||||
export type TAddUsersToGroup = {
|
||||
@@ -93,6 +125,14 @@ export type TAddUsersToGroupByUserIds = {
|
||||
tx?: Knex;
|
||||
};
|
||||
|
||||
export type TAddIdentitiesToGroup = {
|
||||
group: TGroups;
|
||||
identityIds: string[];
|
||||
identityDAL: Pick<TIdentityDALFactory, "transaction">;
|
||||
identityGroupMembershipDAL: Pick<TIdentityGroupMembershipDALFactory, "find" | "insertMany">;
|
||||
membershipDAL: Pick<TMembershipDALFactory, "find">;
|
||||
};
|
||||
|
||||
export type TRemoveUsersFromGroupByUserIds = {
|
||||
group: TGroups;
|
||||
userIds: string[];
|
||||
@@ -103,6 +143,14 @@ export type TRemoveUsersFromGroupByUserIds = {
|
||||
tx?: Knex;
|
||||
};
|
||||
|
||||
export type TRemoveIdentitiesFromGroup = {
|
||||
group: TGroups;
|
||||
identityIds: string[];
|
||||
identityDAL: Pick<TIdentityDALFactory, "find" | "transaction">;
|
||||
membershipDAL: Pick<TMembershipDALFactory, "find">;
|
||||
identityGroupMembershipDAL: Pick<TIdentityGroupMembershipDALFactory, "find" | "delete">;
|
||||
};
|
||||
|
||||
export type TConvertPendingGroupAdditionsToGroupMemberships = {
|
||||
userIds: string[];
|
||||
userDAL: Pick<TUserDALFactory, "findUserEncKeyByUserIdsBatch" | "transaction" | "find" | "findById">;
|
||||
@@ -117,16 +165,30 @@ export type TConvertPendingGroupAdditionsToGroupMemberships = {
|
||||
tx?: Knex;
|
||||
};
|
||||
|
||||
export enum EFilterReturnedUsers {
|
||||
export enum FilterReturnedUsers {
|
||||
EXISTING_MEMBERS = "existingMembers",
|
||||
NON_MEMBERS = "nonMembers"
|
||||
}
|
||||
|
||||
export enum EFilterReturnedProjects {
|
||||
export enum FilterReturnedMachineIdentities {
|
||||
ASSIGNED_MACHINE_IDENTITIES = "assignedMachineIdentities",
|
||||
NON_ASSIGNED_MACHINE_IDENTITIES = "nonAssignedMachineIdentities"
|
||||
}
|
||||
|
||||
export enum FilterReturnedProjects {
|
||||
ASSIGNED_PROJECTS = "assignedProjects",
|
||||
UNASSIGNED_PROJECTS = "unassignedProjects"
|
||||
}
|
||||
|
||||
export enum EGroupProjectsOrderBy {
|
||||
export enum GroupProjectsOrderBy {
|
||||
Name = "name"
|
||||
}
|
||||
|
||||
export enum GroupMembersOrderBy {
|
||||
Name = "name"
|
||||
}
|
||||
|
||||
export enum FilterMemberType {
|
||||
USERS = "users",
|
||||
MACHINE_IDENTITIES = "machineIdentities"
|
||||
}
|
||||
|
||||
@@ -0,0 +1,13 @@
|
||||
import { TDbClient } from "@app/db";
|
||||
import { TableName } from "@app/db/schemas";
|
||||
import { ormify } from "@app/lib/knex";
|
||||
|
||||
export type TIdentityGroupMembershipDALFactory = ReturnType<typeof identityGroupMembershipDALFactory>;
|
||||
|
||||
export const identityGroupMembershipDALFactory = (db: TDbClient) => {
|
||||
const identityGroupMembershipOrm = ormify(db, TableName.IdentityGroupMembership);
|
||||
|
||||
return {
|
||||
...identityGroupMembershipOrm
|
||||
};
|
||||
};
|
||||
@@ -689,13 +689,30 @@ export const pamAccountServiceFactory = ({
|
||||
throw new BadRequestError({ message: "Gateway ID is required for this resource type" });
|
||||
}
|
||||
|
||||
const { host, port } =
|
||||
resourceType !== PamResource.Kubernetes
|
||||
? connectionDetails
|
||||
: (() => {
|
||||
const url = new URL(connectionDetails.url);
|
||||
let portNumber: number | undefined;
|
||||
if (url.port) {
|
||||
portNumber = Number(url.port);
|
||||
} else {
|
||||
portNumber = url.protocol === "https:" ? 443 : 80;
|
||||
}
|
||||
return {
|
||||
host: url.hostname,
|
||||
port: portNumber
|
||||
};
|
||||
})();
|
||||
|
||||
const gatewayConnectionDetails = await gatewayV2Service.getPAMConnectionDetails({
|
||||
gatewayId,
|
||||
duration,
|
||||
sessionId: session.id,
|
||||
resourceType: resource.resourceType as PamResource,
|
||||
host: (connectionDetails as TSqlResourceConnectionDetails).host,
|
||||
port: (connectionDetails as TSqlResourceConnectionDetails).port,
|
||||
host,
|
||||
port,
|
||||
actorMetadata: {
|
||||
id: actor.id,
|
||||
type: actor.type,
|
||||
@@ -746,6 +763,13 @@ export const pamAccountServiceFactory = ({
|
||||
};
|
||||
}
|
||||
break;
|
||||
case PamResource.Kubernetes:
|
||||
metadata = {
|
||||
resourceName: resource.name,
|
||||
accountName: account.name,
|
||||
accountPath
|
||||
};
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
||||
@@ -71,23 +71,29 @@ export const pamFolderDALFactory = (db: TDbClient) => {
|
||||
const findByPath = async (projectId: string, path: string, tx?: Knex) => {
|
||||
try {
|
||||
const dbInstance = tx || db.replicaNode();
|
||||
|
||||
const folders = await dbInstance(TableName.PamFolder)
|
||||
.where(`${TableName.PamFolder}.projectId`, projectId)
|
||||
.select(selectAllTableCols(TableName.PamFolder));
|
||||
|
||||
const pathSegments = path.split("/").filter(Boolean);
|
||||
if (pathSegments.length === 0) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const foldersByParentId = new Map<string | null, typeof folders>();
|
||||
for (const folder of folders) {
|
||||
const children = foldersByParentId.get(folder.parentId ?? null) ?? [];
|
||||
children.push(folder);
|
||||
foldersByParentId.set(folder.parentId ?? null, children);
|
||||
}
|
||||
|
||||
let parentId: string | null = null;
|
||||
let currentFolder: Awaited<ReturnType<typeof orm.findOne>> | undefined;
|
||||
let currentFolder: (typeof folders)[0] | undefined;
|
||||
|
||||
for await (const segment of pathSegments) {
|
||||
const query = dbInstance(TableName.PamFolder)
|
||||
.where(`${TableName.PamFolder}.projectId`, projectId)
|
||||
.where(`${TableName.PamFolder}.name`, segment);
|
||||
|
||||
if (parentId) {
|
||||
void query.where(`${TableName.PamFolder}.parentId`, parentId);
|
||||
} else {
|
||||
void query.whereNull(`${TableName.PamFolder}.parentId`);
|
||||
}
|
||||
|
||||
currentFolder = await query.first();
|
||||
for (const segment of pathSegments) {
|
||||
const childFolders: typeof folders = foldersByParentId.get(parentId) || [];
|
||||
currentFolder = childFolders.find((folder) => folder.name === segment);
|
||||
|
||||
if (!currentFolder) {
|
||||
return undefined;
|
||||
|
||||
@@ -0,0 +1,3 @@
|
||||
export enum KubernetesAuthMethod {
|
||||
ServiceAccountToken = "service-account-token"
|
||||
}
|
||||
@@ -0,0 +1,225 @@
|
||||
import axios, { AxiosError } from "axios";
|
||||
import https from "https";
|
||||
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { GatewayProxyProtocol } from "@app/lib/gateway/types";
|
||||
import { withGatewayV2Proxy } from "@app/lib/gateway-v2/gateway-v2";
|
||||
import { logger } from "@app/lib/logger";
|
||||
|
||||
import { verifyHostInputValidity } from "../../dynamic-secret/dynamic-secret-fns";
|
||||
import { TGatewayV2ServiceFactory } from "../../gateway-v2/gateway-v2-service";
|
||||
import { PamResource } from "../pam-resource-enums";
|
||||
import {
|
||||
TPamResourceFactory,
|
||||
TPamResourceFactoryRotateAccountCredentials,
|
||||
TPamResourceFactoryValidateAccountCredentials
|
||||
} from "../pam-resource-types";
|
||||
import { KubernetesAuthMethod } from "./kubernetes-resource-enums";
|
||||
import { TKubernetesAccountCredentials, TKubernetesResourceConnectionDetails } from "./kubernetes-resource-types";
|
||||
|
||||
const EXTERNAL_REQUEST_TIMEOUT = 10 * 1000;
|
||||
|
||||
export const executeWithGateway = async <T>(
|
||||
config: {
|
||||
connectionDetails: TKubernetesResourceConnectionDetails;
|
||||
resourceType: PamResource;
|
||||
gatewayId: string;
|
||||
},
|
||||
gatewayV2Service: Pick<TGatewayV2ServiceFactory, "getPlatformConnectionDetailsByGatewayId">,
|
||||
operation: (baseUrl: string, httpsAgent: https.Agent) => Promise<T>
|
||||
): Promise<T> => {
|
||||
const { connectionDetails, gatewayId } = config;
|
||||
const url = new URL(connectionDetails.url);
|
||||
const [targetHost] = await verifyHostInputValidity(url.hostname, true);
|
||||
|
||||
let targetPort: number;
|
||||
if (url.port) {
|
||||
targetPort = Number(url.port);
|
||||
} else if (url.protocol === "https:") {
|
||||
targetPort = 443;
|
||||
} else {
|
||||
targetPort = 80;
|
||||
}
|
||||
|
||||
const platformConnectionDetails = await gatewayV2Service.getPlatformConnectionDetailsByGatewayId({
|
||||
gatewayId,
|
||||
targetHost,
|
||||
targetPort
|
||||
});
|
||||
if (!platformConnectionDetails) {
|
||||
throw new BadRequestError({ message: "Unable to connect to gateway, no platform connection details found" });
|
||||
}
|
||||
const httpsAgent = new https.Agent({
|
||||
ca: connectionDetails.sslCertificate,
|
||||
rejectUnauthorized: connectionDetails.sslRejectUnauthorized,
|
||||
servername: targetHost
|
||||
});
|
||||
return withGatewayV2Proxy(
|
||||
async (proxyPort) => {
|
||||
const protocol = url.protocol === "https:" ? "https" : "http";
|
||||
const baseUrl = `${protocol}://localhost:${proxyPort}`;
|
||||
return operation(baseUrl, httpsAgent);
|
||||
},
|
||||
{
|
||||
protocol: GatewayProxyProtocol.Tcp,
|
||||
relayHost: platformConnectionDetails.relayHost,
|
||||
gateway: platformConnectionDetails.gateway,
|
||||
relay: platformConnectionDetails.relay,
|
||||
httpsAgent
|
||||
}
|
||||
);
|
||||
};
|
||||
|
||||
export const kubernetesResourceFactory: TPamResourceFactory<
|
||||
TKubernetesResourceConnectionDetails,
|
||||
TKubernetesAccountCredentials
|
||||
> = (resourceType, connectionDetails, gatewayId, gatewayV2Service) => {
|
||||
const validateConnection = async () => {
|
||||
if (!gatewayId) {
|
||||
throw new BadRequestError({ message: "Gateway ID is required" });
|
||||
}
|
||||
try {
|
||||
await executeWithGateway(
|
||||
{ connectionDetails, gatewayId, resourceType },
|
||||
gatewayV2Service,
|
||||
async (baseUrl, httpsAgent) => {
|
||||
// Validate connection by checking API server version
|
||||
try {
|
||||
await axios.get(`${baseUrl}/version`, {
|
||||
...(httpsAgent ? { httpsAgent } : {}),
|
||||
signal: AbortSignal.timeout(EXTERNAL_REQUEST_TIMEOUT),
|
||||
timeout: EXTERNAL_REQUEST_TIMEOUT
|
||||
});
|
||||
} catch (error) {
|
||||
if (error instanceof AxiosError) {
|
||||
// If we get a 401/403, it means we reached the API server but need auth - that's fine for connection validation
|
||||
if (error.response?.status === 401 || error.response?.status === 403) {
|
||||
logger.info(
|
||||
{ status: error.response.status },
|
||||
"[Kubernetes Resource Factory] Kubernetes connection validation succeeded (auth required)"
|
||||
);
|
||||
return connectionDetails;
|
||||
}
|
||||
throw new BadRequestError({
|
||||
message: `Unable to connect to Kubernetes API server: ${error.response?.statusText || error.message}`
|
||||
});
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
|
||||
logger.info("[Kubernetes Resource Factory] Kubernetes connection validation succeeded");
|
||||
return connectionDetails;
|
||||
}
|
||||
);
|
||||
return connectionDetails;
|
||||
} catch (error) {
|
||||
throw new BadRequestError({
|
||||
message: `Unable to validate connection to ${resourceType}: ${(error as Error).message || String(error)}`
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const validateAccountCredentials: TPamResourceFactoryValidateAccountCredentials<
|
||||
TKubernetesAccountCredentials
|
||||
> = async (credentials) => {
|
||||
if (!gatewayId) {
|
||||
throw new BadRequestError({ message: "Gateway ID is required" });
|
||||
}
|
||||
try {
|
||||
await executeWithGateway(
|
||||
{ connectionDetails, gatewayId, resourceType },
|
||||
gatewayV2Service,
|
||||
async (baseUrl, httpsAgent) => {
|
||||
const { authMethod } = credentials;
|
||||
if (authMethod === KubernetesAuthMethod.ServiceAccountToken) {
|
||||
// Validate service account token using SelfSubjectReview API (whoami)
|
||||
// This endpoint doesn't require any special permissions from the service account
|
||||
try {
|
||||
await axios.post(
|
||||
`${baseUrl}/apis/authentication.k8s.io/v1/selfsubjectreviews`,
|
||||
{
|
||||
apiVersion: "authentication.k8s.io/v1",
|
||||
kind: "SelfSubjectReview"
|
||||
},
|
||||
{
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
Authorization: `Bearer ${credentials.serviceAccountToken}`
|
||||
},
|
||||
...(httpsAgent ? { httpsAgent } : {}),
|
||||
signal: AbortSignal.timeout(EXTERNAL_REQUEST_TIMEOUT),
|
||||
timeout: EXTERNAL_REQUEST_TIMEOUT
|
||||
}
|
||||
);
|
||||
|
||||
logger.info("[Kubernetes Resource Factory] Kubernetes service account token authentication successful");
|
||||
} catch (error) {
|
||||
if (error instanceof AxiosError) {
|
||||
if (error.response?.status === 401 || error.response?.status === 403) {
|
||||
throw new BadRequestError({
|
||||
message:
|
||||
"Account credentials invalid. Service account token is not valid or does not have required permissions."
|
||||
});
|
||||
}
|
||||
throw new BadRequestError({
|
||||
message: `Unable to validate account credentials: ${error.response?.statusText || error.message}`
|
||||
});
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
} else {
|
||||
throw new BadRequestError({
|
||||
message: `Unsupported Kubernetes auth method: ${authMethod as string}`
|
||||
});
|
||||
}
|
||||
}
|
||||
);
|
||||
return credentials;
|
||||
} catch (error) {
|
||||
if (error instanceof BadRequestError) {
|
||||
throw error;
|
||||
}
|
||||
throw new BadRequestError({
|
||||
message: `Unable to validate account credentials for ${resourceType}: ${(error as Error).message || String(error)}`
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const rotateAccountCredentials: TPamResourceFactoryRotateAccountCredentials<
|
||||
TKubernetesAccountCredentials
|
||||
> = async () => {
|
||||
throw new BadRequestError({
|
||||
message: `Unable to rotate account credentials for ${resourceType}: not implemented`
|
||||
});
|
||||
};
|
||||
|
||||
const handleOverwritePreventionForCensoredValues = async (
|
||||
updatedAccountCredentials: TKubernetesAccountCredentials,
|
||||
currentCredentials: TKubernetesAccountCredentials
|
||||
) => {
|
||||
if (updatedAccountCredentials.authMethod !== currentCredentials.authMethod) {
|
||||
return updatedAccountCredentials;
|
||||
}
|
||||
|
||||
if (
|
||||
updatedAccountCredentials.authMethod === KubernetesAuthMethod.ServiceAccountToken &&
|
||||
currentCredentials.authMethod === KubernetesAuthMethod.ServiceAccountToken
|
||||
) {
|
||||
if (updatedAccountCredentials.serviceAccountToken === "__INFISICAL_UNCHANGED__") {
|
||||
return {
|
||||
...updatedAccountCredentials,
|
||||
serviceAccountToken: currentCredentials.serviceAccountToken
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
return updatedAccountCredentials;
|
||||
};
|
||||
|
||||
return {
|
||||
validateConnection,
|
||||
validateAccountCredentials,
|
||||
rotateAccountCredentials,
|
||||
handleOverwritePreventionForCensoredValues
|
||||
};
|
||||
};
|
||||
@@ -0,0 +1,8 @@
|
||||
import { KubernetesResourceListItemSchema } from "./kubernetes-resource-schemas";
|
||||
|
||||
export const getKubernetesResourceListItem = () => {
|
||||
return {
|
||||
name: KubernetesResourceListItemSchema.shape.name.value,
|
||||
resource: KubernetesResourceListItemSchema.shape.resource.value
|
||||
};
|
||||
};
|
||||
@@ -0,0 +1,94 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { PamResource } from "../pam-resource-enums";
|
||||
import {
|
||||
BaseCreateGatewayPamResourceSchema,
|
||||
BaseCreatePamAccountSchema,
|
||||
BasePamAccountSchema,
|
||||
BasePamAccountSchemaWithResource,
|
||||
BasePamResourceSchema,
|
||||
BaseUpdateGatewayPamResourceSchema,
|
||||
BaseUpdatePamAccountSchema
|
||||
} from "../pam-resource-schemas";
|
||||
import { KubernetesAuthMethod } from "./kubernetes-resource-enums";
|
||||
|
||||
export const BaseKubernetesResourceSchema = BasePamResourceSchema.extend({
|
||||
resourceType: z.literal(PamResource.Kubernetes)
|
||||
});
|
||||
|
||||
export const KubernetesResourceListItemSchema = z.object({
|
||||
name: z.literal("Kubernetes"),
|
||||
resource: z.literal(PamResource.Kubernetes)
|
||||
});
|
||||
|
||||
export const KubernetesResourceConnectionDetailsSchema = z.object({
|
||||
url: z.string().url().trim().max(500),
|
||||
sslRejectUnauthorized: z.boolean(),
|
||||
sslCertificate: z
|
||||
.string()
|
||||
.trim()
|
||||
.transform((value) => value || undefined)
|
||||
.optional()
|
||||
});
|
||||
|
||||
export const KubernetesServiceAccountTokenCredentialsSchema = z.object({
|
||||
authMethod: z.literal(KubernetesAuthMethod.ServiceAccountToken),
|
||||
serviceAccountToken: z.string().trim().max(10000)
|
||||
});
|
||||
|
||||
export const KubernetesAccountCredentialsSchema = z.discriminatedUnion("authMethod", [
|
||||
KubernetesServiceAccountTokenCredentialsSchema
|
||||
]);
|
||||
|
||||
export const KubernetesResourceSchema = BaseKubernetesResourceSchema.extend({
|
||||
connectionDetails: KubernetesResourceConnectionDetailsSchema,
|
||||
rotationAccountCredentials: KubernetesAccountCredentialsSchema.nullable().optional()
|
||||
});
|
||||
|
||||
export const SanitizedKubernetesResourceSchema = BaseKubernetesResourceSchema.extend({
|
||||
connectionDetails: KubernetesResourceConnectionDetailsSchema,
|
||||
rotationAccountCredentials: z
|
||||
.discriminatedUnion("authMethod", [
|
||||
z.object({
|
||||
authMethod: z.literal(KubernetesAuthMethod.ServiceAccountToken)
|
||||
})
|
||||
])
|
||||
.nullable()
|
||||
.optional()
|
||||
});
|
||||
|
||||
export const CreateKubernetesResourceSchema = BaseCreateGatewayPamResourceSchema.extend({
|
||||
connectionDetails: KubernetesResourceConnectionDetailsSchema,
|
||||
rotationAccountCredentials: KubernetesAccountCredentialsSchema.nullable().optional()
|
||||
});
|
||||
|
||||
export const UpdateKubernetesResourceSchema = BaseUpdateGatewayPamResourceSchema.extend({
|
||||
connectionDetails: KubernetesResourceConnectionDetailsSchema.optional(),
|
||||
rotationAccountCredentials: KubernetesAccountCredentialsSchema.nullable().optional()
|
||||
});
|
||||
|
||||
// Accounts
|
||||
export const KubernetesAccountSchema = BasePamAccountSchema.extend({
|
||||
credentials: KubernetesAccountCredentialsSchema
|
||||
});
|
||||
|
||||
export const CreateKubernetesAccountSchema = BaseCreatePamAccountSchema.extend({
|
||||
credentials: KubernetesAccountCredentialsSchema
|
||||
});
|
||||
|
||||
export const UpdateKubernetesAccountSchema = BaseUpdatePamAccountSchema.extend({
|
||||
credentials: KubernetesAccountCredentialsSchema.optional()
|
||||
});
|
||||
|
||||
export const SanitizedKubernetesAccountWithResourceSchema = BasePamAccountSchemaWithResource.extend({
|
||||
credentials: z.discriminatedUnion("authMethod", [
|
||||
z.object({
|
||||
authMethod: z.literal(KubernetesAuthMethod.ServiceAccountToken)
|
||||
})
|
||||
])
|
||||
});
|
||||
|
||||
// Sessions
|
||||
export const KubernetesSessionCredentialsSchema = KubernetesResourceConnectionDetailsSchema.and(
|
||||
KubernetesAccountCredentialsSchema
|
||||
);
|
||||
@@ -0,0 +1,16 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import {
|
||||
KubernetesAccountCredentialsSchema,
|
||||
KubernetesAccountSchema,
|
||||
KubernetesResourceConnectionDetailsSchema,
|
||||
KubernetesResourceSchema
|
||||
} from "./kubernetes-resource-schemas";
|
||||
|
||||
// Resources
|
||||
export type TKubernetesResource = z.infer<typeof KubernetesResourceSchema>;
|
||||
export type TKubernetesResourceConnectionDetails = z.infer<typeof KubernetesResourceConnectionDetailsSchema>;
|
||||
|
||||
// Accounts
|
||||
export type TKubernetesAccount = z.infer<typeof KubernetesAccountSchema>;
|
||||
export type TKubernetesAccountCredentials = z.infer<typeof KubernetesAccountCredentialsSchema>;
|
||||
@@ -2,6 +2,7 @@ export enum PamResource {
|
||||
Postgres = "postgres",
|
||||
MySQL = "mysql",
|
||||
SSH = "ssh",
|
||||
Kubernetes = "kubernetes",
|
||||
AwsIam = "aws-iam"
|
||||
}
|
||||
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { awsIamResourceFactory } from "./aws-iam/aws-iam-resource-factory";
|
||||
import { kubernetesResourceFactory } from "./kubernetes/kubernetes-resource-factory";
|
||||
import { PamResource } from "./pam-resource-enums";
|
||||
import { TPamAccountCredentials, TPamResourceConnectionDetails, TPamResourceFactory } from "./pam-resource-types";
|
||||
import { sqlResourceFactory } from "./shared/sql/sql-resource-factory";
|
||||
@@ -10,5 +11,6 @@ export const PAM_RESOURCE_FACTORY_MAP: Record<PamResource, TPamResourceFactoryIm
|
||||
[PamResource.Postgres]: sqlResourceFactory as TPamResourceFactoryImplementation,
|
||||
[PamResource.MySQL]: sqlResourceFactory as TPamResourceFactoryImplementation,
|
||||
[PamResource.SSH]: sshResourceFactory as TPamResourceFactoryImplementation,
|
||||
[PamResource.Kubernetes]: kubernetesResourceFactory as TPamResourceFactoryImplementation,
|
||||
[PamResource.AwsIam]: awsIamResourceFactory as TPamResourceFactoryImplementation
|
||||
};
|
||||
|
||||
@@ -4,14 +4,18 @@ import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||
|
||||
import { decryptAccountCredentials } from "../pam-account/pam-account-fns";
|
||||
import { getAwsIamResourceListItem } from "./aws-iam/aws-iam-resource-fns";
|
||||
import { getKubernetesResourceListItem } from "./kubernetes/kubernetes-resource-fns";
|
||||
import { getMySQLResourceListItem } from "./mysql/mysql-resource-fns";
|
||||
import { TPamResource, TPamResourceConnectionDetails } from "./pam-resource-types";
|
||||
import { getPostgresResourceListItem } from "./postgres/postgres-resource-fns";
|
||||
|
||||
export const listResourceOptions = () => {
|
||||
return [getPostgresResourceListItem(), getMySQLResourceListItem(), getAwsIamResourceListItem()].sort((a, b) =>
|
||||
a.name.localeCompare(b.name)
|
||||
);
|
||||
return [
|
||||
getPostgresResourceListItem(),
|
||||
getMySQLResourceListItem(),
|
||||
getAwsIamResourceListItem(),
|
||||
getKubernetesResourceListItem()
|
||||
].sort((a, b) => a.name.localeCompare(b.name));
|
||||
};
|
||||
|
||||
// Resource
|
||||
|
||||
@@ -7,6 +7,12 @@ import {
|
||||
TAwsIamResource,
|
||||
TAwsIamResourceConnectionDetails
|
||||
} from "./aws-iam/aws-iam-resource-types";
|
||||
import {
|
||||
TKubernetesAccount,
|
||||
TKubernetesAccountCredentials,
|
||||
TKubernetesResource,
|
||||
TKubernetesResourceConnectionDetails
|
||||
} from "./kubernetes/kubernetes-resource-types";
|
||||
import {
|
||||
TMySQLAccount,
|
||||
TMySQLAccountCredentials,
|
||||
@@ -28,21 +34,23 @@ import {
|
||||
} from "./ssh/ssh-resource-types";
|
||||
|
||||
// Resource types
|
||||
export type TPamResource = TPostgresResource | TMySQLResource | TSSHResource | TAwsIamResource;
|
||||
export type TPamResource = TPostgresResource | TMySQLResource | TSSHResource | TAwsIamResource | TKubernetesResource;
|
||||
export type TPamResourceConnectionDetails =
|
||||
| TPostgresResourceConnectionDetails
|
||||
| TMySQLResourceConnectionDetails
|
||||
| TSSHResourceConnectionDetails
|
||||
| TKubernetesResourceConnectionDetails
|
||||
| TAwsIamResourceConnectionDetails;
|
||||
|
||||
// Account types
|
||||
export type TPamAccount = TPostgresAccount | TMySQLAccount | TSSHAccount | TAwsIamAccount;
|
||||
export type TPamAccount = TPostgresAccount | TMySQLAccount | TSSHAccount | TAwsIamAccount | TKubernetesAccount;
|
||||
|
||||
export type TPamAccountCredentials =
|
||||
| TPostgresAccountCredentials
|
||||
// eslint-disable-next-line @typescript-eslint/no-duplicate-type-constituents
|
||||
| TMySQLAccountCredentials
|
||||
| TSSHAccountCredentials
|
||||
| TKubernetesAccountCredentials
|
||||
| TAwsIamAccountCredentials;
|
||||
|
||||
// Resource DTOs
|
||||
|
||||
@@ -11,6 +11,8 @@ export const PamSessionCommandLogSchema = z.object({
|
||||
// SSH Terminal Event schemas
|
||||
export const TerminalEventTypeSchema = z.enum(["input", "output", "resize", "error"]);
|
||||
|
||||
export const HttpEventTypeSchema = z.enum(["request", "response"]);
|
||||
|
||||
export const TerminalEventSchema = z.object({
|
||||
timestamp: z.coerce.date(),
|
||||
eventType: TerminalEventTypeSchema,
|
||||
@@ -18,8 +20,29 @@ export const TerminalEventSchema = z.object({
|
||||
elapsedTime: z.number() // Seconds since session start (for replay)
|
||||
});
|
||||
|
||||
export const HttpBaseEventSchema = z.object({
|
||||
timestamp: z.coerce.date(),
|
||||
requestId: z.string(),
|
||||
eventType: TerminalEventTypeSchema,
|
||||
headers: z.record(z.string(), z.array(z.string())),
|
||||
body: z.string().optional()
|
||||
});
|
||||
|
||||
export const HttpRequestEventSchema = HttpBaseEventSchema.extend({
|
||||
eventType: z.literal(HttpEventTypeSchema.Values.request),
|
||||
method: z.string(),
|
||||
url: z.string()
|
||||
});
|
||||
|
||||
export const HttpResponseEventSchema = HttpBaseEventSchema.extend({
|
||||
eventType: z.literal(HttpEventTypeSchema.Values.response),
|
||||
status: z.string()
|
||||
});
|
||||
|
||||
export const HttpEventSchema = z.discriminatedUnion("eventType", [HttpRequestEventSchema, HttpResponseEventSchema]);
|
||||
|
||||
export const SanitizedSessionSchema = PamSessionsSchema.omit({
|
||||
encryptedLogsBlob: true
|
||||
}).extend({
|
||||
logs: z.array(z.union([PamSessionCommandLogSchema, TerminalEventSchema]))
|
||||
logs: z.array(z.union([PamSessionCommandLogSchema, HttpEventSchema, TerminalEventSchema]))
|
||||
});
|
||||
|
||||
@@ -1,13 +1,19 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { PamSessionCommandLogSchema, SanitizedSessionSchema, TerminalEventSchema } from "./pam-session-schemas";
|
||||
import {
|
||||
HttpEventSchema,
|
||||
PamSessionCommandLogSchema,
|
||||
SanitizedSessionSchema,
|
||||
TerminalEventSchema
|
||||
} from "./pam-session-schemas";
|
||||
|
||||
export type TPamSessionCommandLog = z.infer<typeof PamSessionCommandLogSchema>;
|
||||
export type TTerminalEvent = z.infer<typeof TerminalEventSchema>;
|
||||
export type THttpEvent = z.infer<typeof HttpEventSchema>;
|
||||
export type TPamSanitizedSession = z.infer<typeof SanitizedSessionSchema>;
|
||||
|
||||
// DTOs
|
||||
export type TUpdateSessionLogsDTO = {
|
||||
sessionId: string;
|
||||
logs: (TPamSessionCommandLog | TTerminalEvent)[];
|
||||
logs: (TPamSessionCommandLog | TTerminalEvent | THttpEvent)[];
|
||||
};
|
||||
|
||||
@@ -88,8 +88,10 @@ export enum OrgPermissionGroupActions {
|
||||
Edit = "edit",
|
||||
Delete = "delete",
|
||||
GrantPrivileges = "grant-privileges",
|
||||
AddIdentities = "add-identities",
|
||||
AddMembers = "add-members",
|
||||
RemoveMembers = "remove-members"
|
||||
RemoveMembers = "remove-members",
|
||||
RemoveIdentities = "remove-identities"
|
||||
}
|
||||
|
||||
export enum OrgPermissionBillingActions {
|
||||
@@ -381,8 +383,10 @@ const buildAdminPermission = () => {
|
||||
can(OrgPermissionGroupActions.Edit, OrgPermissionSubjects.Groups);
|
||||
can(OrgPermissionGroupActions.Delete, OrgPermissionSubjects.Groups);
|
||||
can(OrgPermissionGroupActions.GrantPrivileges, OrgPermissionSubjects.Groups);
|
||||
can(OrgPermissionGroupActions.AddIdentities, OrgPermissionSubjects.Groups);
|
||||
can(OrgPermissionGroupActions.AddMembers, OrgPermissionSubjects.Groups);
|
||||
can(OrgPermissionGroupActions.RemoveMembers, OrgPermissionSubjects.Groups);
|
||||
can(OrgPermissionGroupActions.RemoveIdentities, OrgPermissionSubjects.Groups);
|
||||
|
||||
can(OrgPermissionBillingActions.Read, OrgPermissionSubjects.Billing);
|
||||
can(OrgPermissionBillingActions.ManageBilling, OrgPermissionSubjects.Billing);
|
||||
|
||||
@@ -178,6 +178,16 @@ export const permissionDALFactory = (db: TDbClient): TPermissionDALFactory => {
|
||||
.where(`${TableName.UserGroupMembership}.userId`, actorId)
|
||||
.select(db.ref("id").withSchema(TableName.Groups));
|
||||
|
||||
const identityGroupSubquery = (tx || db)(TableName.Groups)
|
||||
.leftJoin(
|
||||
TableName.IdentityGroupMembership,
|
||||
`${TableName.IdentityGroupMembership}.groupId`,
|
||||
`${TableName.Groups}.id`
|
||||
)
|
||||
.where(`${TableName.Groups}.orgId`, scopeData.orgId)
|
||||
.where(`${TableName.IdentityGroupMembership}.identityId`, actorId)
|
||||
.select(db.ref("id").withSchema(TableName.Groups));
|
||||
|
||||
const docs = await (tx || db)
|
||||
.replicaNode()(TableName.Membership)
|
||||
.join(TableName.MembershipRole, `${TableName.Membership}.id`, `${TableName.MembershipRole}.membershipId`)
|
||||
@@ -214,7 +224,9 @@ export const permissionDALFactory = (db: TDbClient): TPermissionDALFactory => {
|
||||
.where(`${TableName.Membership}.actorUserId`, actorId)
|
||||
.orWhereIn(`${TableName.Membership}.actorGroupId`, userGroupSubquery);
|
||||
} else if (actorType === ActorType.IDENTITY) {
|
||||
void qb.where(`${TableName.Membership}.actorIdentityId`, actorId);
|
||||
void qb
|
||||
.where(`${TableName.Membership}.actorIdentityId`, actorId)
|
||||
.orWhereIn(`${TableName.Membership}.actorGroupId`, identityGroupSubquery);
|
||||
}
|
||||
})
|
||||
.where((qb) => {
|
||||
@@ -653,6 +665,15 @@ export const permissionDALFactory = (db: TDbClient): TPermissionDALFactory => {
|
||||
orgId: string
|
||||
) => {
|
||||
try {
|
||||
const identityGroupSubquery = db(TableName.Groups)
|
||||
.leftJoin(
|
||||
TableName.IdentityGroupMembership,
|
||||
`${TableName.IdentityGroupMembership}.groupId`,
|
||||
`${TableName.Groups}.id`
|
||||
)
|
||||
.where(`${TableName.Groups}.orgId`, orgId)
|
||||
.select(db.ref("id").withSchema(TableName.Groups));
|
||||
|
||||
const docs = await db
|
||||
.replicaNode()(TableName.Membership)
|
||||
.join(TableName.MembershipRole, `${TableName.Membership}.id`, `${TableName.MembershipRole}.membershipId`)
|
||||
@@ -668,7 +689,11 @@ export const permissionDALFactory = (db: TDbClient): TPermissionDALFactory => {
|
||||
void queryBuilder.on(`${TableName.Membership}.actorIdentityId`, `${TableName.IdentityMetadata}.identityId`);
|
||||
})
|
||||
.where(`${TableName.Membership}.scopeOrgId`, orgId)
|
||||
.whereNotNull(`${TableName.Membership}.actorIdentityId`)
|
||||
.where((qb) => {
|
||||
void qb
|
||||
.whereNotNull(`${TableName.Membership}.actorIdentityId`)
|
||||
.orWhereIn(`${TableName.Membership}.actorGroupId`, identityGroupSubquery);
|
||||
})
|
||||
.where(`${TableName.Membership}.scope`, AccessScope.Project)
|
||||
.where(`${TableName.Membership}.scopeProjectId`, projectId)
|
||||
.select(selectAllTableCols(TableName.MembershipRole))
|
||||
|
||||
@@ -122,6 +122,11 @@ export const pkiAcmeChallengeDALFactory = (db: TDbClient) => {
|
||||
const result = await (tx || db)(TableName.PkiAcmeChallenge)
|
||||
.join(TableName.PkiAcmeAuth, `${TableName.PkiAcmeChallenge}.authId`, `${TableName.PkiAcmeAuth}.id`)
|
||||
.join(TableName.PkiAcmeAccount, `${TableName.PkiAcmeAuth}.accountId`, `${TableName.PkiAcmeAccount}.id`)
|
||||
.join(
|
||||
TableName.PkiCertificateProfile,
|
||||
`${TableName.PkiAcmeAccount}.profileId`,
|
||||
`${TableName.PkiCertificateProfile}.id`
|
||||
)
|
||||
.select(
|
||||
selectAllTableCols(TableName.PkiAcmeChallenge),
|
||||
db.ref("id").withSchema(TableName.PkiAcmeAuth).as("authId"),
|
||||
@@ -131,7 +136,9 @@ export const pkiAcmeChallengeDALFactory = (db: TDbClient) => {
|
||||
db.ref("identifierValue").withSchema(TableName.PkiAcmeAuth).as("authIdentifierValue"),
|
||||
db.ref("expiresAt").withSchema(TableName.PkiAcmeAuth).as("authExpiresAt"),
|
||||
db.ref("id").withSchema(TableName.PkiAcmeAccount).as("accountId"),
|
||||
db.ref("publicKeyThumbprint").withSchema(TableName.PkiAcmeAccount).as("accountPublicKeyThumbprint")
|
||||
db.ref("publicKeyThumbprint").withSchema(TableName.PkiAcmeAccount).as("accountPublicKeyThumbprint"),
|
||||
db.ref("profileId").withSchema(TableName.PkiAcmeAccount).as("profileId"),
|
||||
db.ref("projectId").withSchema(TableName.PkiCertificateProfile).as("projectId")
|
||||
)
|
||||
// For all challenges, acquire update lock on the auth to avoid race conditions
|
||||
.forUpdate(TableName.PkiAcmeAuth)
|
||||
@@ -149,6 +156,8 @@ export const pkiAcmeChallengeDALFactory = (db: TDbClient) => {
|
||||
authExpiresAt,
|
||||
accountId,
|
||||
accountPublicKeyThumbprint,
|
||||
profileId,
|
||||
projectId,
|
||||
...challenge
|
||||
} = result;
|
||||
return {
|
||||
@@ -161,7 +170,11 @@ export const pkiAcmeChallengeDALFactory = (db: TDbClient) => {
|
||||
expiresAt: authExpiresAt,
|
||||
account: {
|
||||
id: accountId,
|
||||
publicKeyThumbprint: accountPublicKeyThumbprint
|
||||
publicKeyThumbprint: accountPublicKeyThumbprint,
|
||||
project: {
|
||||
id: projectId
|
||||
},
|
||||
profileId
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
@@ -5,7 +5,9 @@ import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { isPrivateIp } from "@app/lib/ip/ipRange";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { ActorType } from "@app/services/auth/auth-type";
|
||||
|
||||
import { EventType, TAuditLogServiceFactory } from "../audit-log/audit-log-types";
|
||||
import { TPkiAcmeChallengeDALFactory } from "./pki-acme-challenge-dal";
|
||||
import {
|
||||
AcmeConnectionError,
|
||||
@@ -25,10 +27,12 @@ type TPkiAcmeChallengeServiceFactoryDep = {
|
||||
| "markAsInvalidCascadeById"
|
||||
| "updateById"
|
||||
>;
|
||||
auditLogService: Pick<TAuditLogServiceFactory, "createAuditLog">;
|
||||
};
|
||||
|
||||
export const pkiAcmeChallengeServiceFactory = ({
|
||||
acmeChallengeDAL
|
||||
acmeChallengeDAL,
|
||||
auditLogService
|
||||
}: TPkiAcmeChallengeServiceFactoryDep): TPkiAcmeChallengeServiceFactory => {
|
||||
const appCfg = getConfig();
|
||||
const markChallengeAsReady = async (challengeId: string): Promise<TPkiAcmeChallenges> => {
|
||||
@@ -113,7 +117,25 @@ export const pkiAcmeChallengeServiceFactory = ({
|
||||
}
|
||||
logger.info({ challengeId }, "ACME challenge response is correct, marking challenge as valid");
|
||||
await acmeChallengeDAL.markAsValidCascadeById(challengeId);
|
||||
await auditLogService.createAuditLog({
|
||||
projectId: challenge.auth.account.project.id,
|
||||
actor: {
|
||||
type: ActorType.ACME_ACCOUNT,
|
||||
metadata: {
|
||||
profileId: challenge.auth.account.profileId,
|
||||
accountId: challenge.auth.account.id
|
||||
}
|
||||
},
|
||||
event: {
|
||||
type: EventType.PASS_ACME_CHALLENGE,
|
||||
metadata: {
|
||||
challengeId,
|
||||
type: challenge.type as AcmeChallengeType
|
||||
}
|
||||
}
|
||||
});
|
||||
} catch (exp) {
|
||||
let finalAttempt = false;
|
||||
if (retryCount >= 2) {
|
||||
logger.error(
|
||||
exp,
|
||||
@@ -121,35 +143,59 @@ export const pkiAcmeChallengeServiceFactory = ({
|
||||
);
|
||||
// This is the last attempt to validate the challenge response, if it fails, we mark the challenge as invalid
|
||||
await acmeChallengeDAL.markAsInvalidCascadeById(challengeId);
|
||||
finalAttempt = true;
|
||||
}
|
||||
// Properly type and inspect the error
|
||||
if (axios.isAxiosError(exp)) {
|
||||
const axiosError = exp as AxiosError;
|
||||
const errorCode = axiosError.code;
|
||||
const errorMessage = axiosError.message;
|
||||
try {
|
||||
// Properly type and inspect the error
|
||||
if (axios.isAxiosError(exp)) {
|
||||
const axiosError = exp as AxiosError;
|
||||
const errorCode = axiosError.code;
|
||||
const errorMessage = axiosError.message;
|
||||
|
||||
if (errorCode === "ECONNREFUSED" || errorMessage.includes("ECONNREFUSED")) {
|
||||
throw new AcmeConnectionError({ message: "Connection refused" });
|
||||
if (errorCode === "ECONNREFUSED" || errorMessage.includes("ECONNREFUSED")) {
|
||||
throw new AcmeConnectionError({ message: "Connection refused" });
|
||||
}
|
||||
if (errorCode === "ENOTFOUND" || errorMessage.includes("ENOTFOUND")) {
|
||||
throw new AcmeDnsFailureError({ message: "Hostname could not be resolved (DNS failure)" });
|
||||
}
|
||||
if (errorCode === "ECONNRESET" || errorMessage.includes("ECONNRESET")) {
|
||||
throw new AcmeConnectionError({ message: "Connection reset by peer" });
|
||||
}
|
||||
if (errorCode === "ECONNABORTED" || errorMessage.includes("timeout")) {
|
||||
logger.error(exp, "Connection timed out while validating ACME challenge response");
|
||||
throw new AcmeConnectionError({ message: "Connection timed out" });
|
||||
}
|
||||
logger.error(exp, "Unknown error validating ACME challenge response");
|
||||
throw new AcmeServerInternalError({ message: "Unknown error validating ACME challenge response" });
|
||||
}
|
||||
if (errorCode === "ENOTFOUND" || errorMessage.includes("ENOTFOUND")) {
|
||||
throw new AcmeDnsFailureError({ message: "Hostname could not be resolved (DNS failure)" });
|
||||
}
|
||||
if (errorCode === "ECONNRESET" || errorMessage.includes("ECONNRESET")) {
|
||||
throw new AcmeConnectionError({ message: "Connection reset by peer" });
|
||||
}
|
||||
if (errorCode === "ECONNABORTED" || errorMessage.includes("timeout")) {
|
||||
logger.error(exp, "Connection timed out while validating ACME challenge response");
|
||||
throw new AcmeConnectionError({ message: "Connection timed out" });
|
||||
if (exp instanceof Error) {
|
||||
logger.error(exp, "Error validating ACME challenge response");
|
||||
throw exp;
|
||||
}
|
||||
logger.error(exp, "Unknown error validating ACME challenge response");
|
||||
throw new AcmeServerInternalError({ message: "Unknown error validating ACME challenge response" });
|
||||
} catch (outterExp) {
|
||||
await auditLogService.createAuditLog({
|
||||
projectId: challenge.auth.account.project.id,
|
||||
actor: {
|
||||
type: ActorType.ACME_ACCOUNT,
|
||||
metadata: {
|
||||
profileId: challenge.auth.account.profileId,
|
||||
accountId: challenge.auth.account.id
|
||||
}
|
||||
},
|
||||
event: {
|
||||
type: finalAttempt ? EventType.FAIL_ACME_CHALLENGE : EventType.ATTEMPT_ACME_CHALLENGE,
|
||||
metadata: {
|
||||
challengeId,
|
||||
type: challenge.type as AcmeChallengeType,
|
||||
retryCount,
|
||||
errorMessage: exp instanceof Error ? exp.message : "Unknown error"
|
||||
}
|
||||
}
|
||||
});
|
||||
throw outterExp;
|
||||
}
|
||||
if (exp instanceof Error) {
|
||||
logger.error(exp, "Error validating ACME challenge response");
|
||||
throw exp;
|
||||
}
|
||||
logger.error(exp, "Unknown error validating ACME challenge response");
|
||||
throw new AcmeServerInternalError({ message: "Unknown error validating ACME challenge response" });
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@@ -4,6 +4,7 @@ import { TDbClient } from "@app/db";
|
||||
import { TableName } from "@app/db/schemas";
|
||||
import { DatabaseError } from "@app/lib/errors";
|
||||
import { ormify, selectAllTableCols, sqlNestRelationships } from "@app/lib/knex";
|
||||
import { CertificateRequestStatus } from "@app/services/certificate-request/certificate-request-types";
|
||||
|
||||
export type TPkiAcmeOrderDALFactory = ReturnType<typeof pkiAcmeOrderDALFactory>;
|
||||
|
||||
@@ -19,6 +20,43 @@ export const pkiAcmeOrderDALFactory = (db: TDbClient) => {
|
||||
}
|
||||
};
|
||||
|
||||
const findWithCertificateRequestForSync = async (id: string, tx?: Knex) => {
|
||||
try {
|
||||
const order = await (tx || db)(TableName.PkiAcmeOrder)
|
||||
.leftJoin(
|
||||
TableName.CertificateRequests,
|
||||
`${TableName.PkiAcmeOrder}.id`,
|
||||
`${TableName.CertificateRequests}.acmeOrderId`
|
||||
)
|
||||
.select(
|
||||
selectAllTableCols(TableName.PkiAcmeOrder),
|
||||
db.ref("id").withSchema(TableName.CertificateRequests).as("certificateRequestId"),
|
||||
db.ref("status").withSchema(TableName.CertificateRequests).as("certificateRequestStatus"),
|
||||
db.ref("certificateId").withSchema(TableName.CertificateRequests).as("certificateId")
|
||||
)
|
||||
.forUpdate(TableName.PkiAcmeOrder)
|
||||
.where(`${TableName.PkiAcmeOrder}.id`, id)
|
||||
.first();
|
||||
if (!order) {
|
||||
return null;
|
||||
}
|
||||
const { certificateRequestId, certificateRequestStatus, certificateId, ...details } = order;
|
||||
return {
|
||||
...details,
|
||||
certificateRequest:
|
||||
certificateRequestId && certificateRequestStatus
|
||||
? {
|
||||
id: certificateRequestId,
|
||||
status: certificateRequestStatus as CertificateRequestStatus,
|
||||
certificateId
|
||||
}
|
||||
: undefined
|
||||
};
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "Find PKI ACME order by id with certificate request" });
|
||||
}
|
||||
};
|
||||
|
||||
const findByAccountAndOrderIdWithAuthorizations = async (accountId: string, orderId: string, tx?: Knex) => {
|
||||
try {
|
||||
const rows = await (tx || db)(TableName.PkiAcmeOrder)
|
||||
@@ -72,6 +110,7 @@ export const pkiAcmeOrderDALFactory = (db: TDbClient) => {
|
||||
return {
|
||||
...pkiAcmeOrderOrm,
|
||||
findByIdForFinalization,
|
||||
findWithCertificateRequestForSync,
|
||||
findByAccountAndOrderIdWithAuthorizations,
|
||||
listByAccountId
|
||||
};
|
||||
|
||||
@@ -6,8 +6,8 @@ export enum AcmeIdentifierType {
|
||||
|
||||
export enum AcmeOrderStatus {
|
||||
Pending = "pending",
|
||||
Processing = "processing",
|
||||
Ready = "ready",
|
||||
Processing = "processing",
|
||||
Valid = "valid",
|
||||
Invalid = "invalid"
|
||||
}
|
||||
|
||||
@@ -7,8 +7,10 @@ import {
|
||||
importJWK,
|
||||
JWSHeaderParameters
|
||||
} from "jose";
|
||||
import { Knex } from "knex";
|
||||
import { z, ZodError } from "zod";
|
||||
|
||||
import { TPkiAcmeOrders } from "@app/db/schemas";
|
||||
import { TPkiAcmeAccounts } from "@app/db/schemas/pki-acme-accounts";
|
||||
import { TPkiAcmeAuths } from "@app/db/schemas/pki-acme-auths";
|
||||
import { KeyStorePrefixes, TKeyStoreFactory } from "@app/keystore/keystore";
|
||||
@@ -17,20 +19,15 @@ import { crypto } from "@app/lib/crypto/cryptography";
|
||||
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { isPrivateIp } from "@app/lib/ip/ipRange";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { TAppConnectionDALFactory } from "@app/services/app-connection/app-connection-dal";
|
||||
import { ActorType } from "@app/services/auth/auth-type";
|
||||
import { TCertificateBodyDALFactory } from "@app/services/certificate/certificate-body-dal";
|
||||
import { TCertificateDALFactory } from "@app/services/certificate/certificate-dal";
|
||||
import { TCertificateSecretDALFactory } from "@app/services/certificate/certificate-secret-dal";
|
||||
import {
|
||||
CertExtendedKeyUsage,
|
||||
CertKeyUsage,
|
||||
CertSubjectAlternativeNameType
|
||||
} from "@app/services/certificate/certificate-types";
|
||||
import { orderCertificate } from "@app/services/certificate-authority/acme/acme-certificate-authority-fns";
|
||||
import { CertSubjectAlternativeNameType } from "@app/services/certificate/certificate-types";
|
||||
import { TCertificateAuthorityDALFactory } from "@app/services/certificate-authority/certificate-authority-dal";
|
||||
import { CaType } from "@app/services/certificate-authority/certificate-authority-enums";
|
||||
import { TExternalCertificateAuthorityDALFactory } from "@app/services/certificate-authority/external-certificate-authority-dal";
|
||||
import {
|
||||
TCertificateIssuanceQueueFactory,
|
||||
TIssueCertificateFromProfileJobData
|
||||
} from "@app/services/certificate-authority/certificate-issuance-queue";
|
||||
import {
|
||||
extractAlgorithmsFromCSR,
|
||||
extractCertificateRequestFromCSR
|
||||
@@ -40,6 +37,8 @@ import {
|
||||
EnrollmentType,
|
||||
TCertificateProfileWithConfigs
|
||||
} from "@app/services/certificate-profile/certificate-profile-types";
|
||||
import { TCertificateRequestServiceFactory } from "@app/services/certificate-request/certificate-request-service";
|
||||
import { CertificateRequestStatus } from "@app/services/certificate-request/certificate-request-types";
|
||||
import { TCertificateTemplateV2DALFactory } from "@app/services/certificate-template-v2/certificate-template-v2-dal";
|
||||
import { TCertificateTemplateV2ServiceFactory } from "@app/services/certificate-template-v2/certificate-template-v2-service";
|
||||
import { TCertificateV3ServiceFactory } from "@app/services/certificate-v3/certificate-v3-service";
|
||||
@@ -47,6 +46,7 @@ import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
||||
import { getProjectKmsCertificateKeyId } from "@app/services/project/project-fns";
|
||||
|
||||
import { EventType, TAuditLogServiceFactory } from "../audit-log/audit-log-types";
|
||||
import { TLicenseServiceFactory } from "../license/license-service";
|
||||
import { TPkiAcmeAccountDALFactory } from "./pki-acme-account-dal";
|
||||
import { TPkiAcmeAuthDALFactory } from "./pki-acme-auth-dal";
|
||||
@@ -99,13 +99,9 @@ import {
|
||||
|
||||
type TPkiAcmeServiceFactoryDep = {
|
||||
projectDAL: Pick<TProjectDALFactory, "findOne" | "updateById" | "transaction" | "findById">;
|
||||
appConnectionDAL: Pick<TAppConnectionDALFactory, "findById">;
|
||||
certificateDAL: Pick<TCertificateDALFactory, "create" | "transaction" | "updateById">;
|
||||
certificateAuthorityDAL: Pick<TCertificateAuthorityDALFactory, "findByIdWithAssociatedCa">;
|
||||
externalCertificateAuthorityDAL: Pick<TExternalCertificateAuthorityDALFactory, "update">;
|
||||
certificateProfileDAL: Pick<TCertificateProfileDALFactory, "findByIdWithOwnerOrgId" | "findByIdWithConfigs">;
|
||||
certificateBodyDAL: Pick<TCertificateBodyDALFactory, "findOne" | "create">;
|
||||
certificateSecretDAL: Pick<TCertificateSecretDALFactory, "findOne" | "create">;
|
||||
certificateTemplateV2DAL: Pick<TCertificateTemplateV2DALFactory, "findById">;
|
||||
acmeAccountDAL: Pick<
|
||||
TPkiAcmeAccountDALFactory,
|
||||
@@ -113,11 +109,13 @@ type TPkiAcmeServiceFactoryDep = {
|
||||
>;
|
||||
acmeOrderDAL: Pick<
|
||||
TPkiAcmeOrderDALFactory,
|
||||
| "findById"
|
||||
| "create"
|
||||
| "transaction"
|
||||
| "updateById"
|
||||
| "findByAccountAndOrderIdWithAuthorizations"
|
||||
| "findByIdForFinalization"
|
||||
| "findWithCertificateRequestForSync"
|
||||
| "listByAccountId"
|
||||
>;
|
||||
acmeAuthDAL: Pick<TPkiAcmeAuthDALFactory, "create" | "findByAccountIdAndAuthIdWithChallenges">;
|
||||
@@ -134,19 +132,18 @@ type TPkiAcmeServiceFactoryDep = {
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
certificateV3Service: Pick<TCertificateV3ServiceFactory, "signCertificateFromProfile">;
|
||||
certificateTemplateV2Service: Pick<TCertificateTemplateV2ServiceFactory, "validateCertificateRequest">;
|
||||
certificateRequestService: Pick<TCertificateRequestServiceFactory, "createCertificateRequest">;
|
||||
certificateIssuanceQueue: Pick<TCertificateIssuanceQueueFactory, "queueCertificateIssuance">;
|
||||
acmeChallengeService: Pick<TPkiAcmeChallengeServiceFactory, "markChallengeAsReady">;
|
||||
pkiAcmeQueueService: Pick<TPkiAcmeQueueServiceFactory, "queueChallengeValidation">;
|
||||
auditLogService: Pick<TAuditLogServiceFactory, "createAuditLog">;
|
||||
};
|
||||
|
||||
export const pkiAcmeServiceFactory = ({
|
||||
projectDAL,
|
||||
appConnectionDAL,
|
||||
certificateDAL,
|
||||
certificateAuthorityDAL,
|
||||
externalCertificateAuthorityDAL,
|
||||
certificateProfileDAL,
|
||||
certificateBodyDAL,
|
||||
certificateSecretDAL,
|
||||
certificateTemplateV2DAL,
|
||||
acmeAccountDAL,
|
||||
acmeOrderDAL,
|
||||
@@ -158,8 +155,11 @@ export const pkiAcmeServiceFactory = ({
|
||||
licenseService,
|
||||
certificateV3Service,
|
||||
certificateTemplateV2Service,
|
||||
certificateRequestService,
|
||||
certificateIssuanceQueue,
|
||||
acmeChallengeService,
|
||||
pkiAcmeQueueService
|
||||
pkiAcmeQueueService,
|
||||
auditLogService
|
||||
}: TPkiAcmeServiceFactoryDep): TPkiAcmeServiceFactory => {
|
||||
const validateAcmeProfile = async (profileId: string): Promise<TCertificateProfileWithConfigs> => {
|
||||
const profile = await certificateProfileDAL.findByIdWithConfigs(profileId);
|
||||
@@ -364,6 +364,52 @@ export const pkiAcmeServiceFactory = ({
|
||||
};
|
||||
};
|
||||
|
||||
const checkAndSyncAcmeOrderStatus = async ({ orderId }: { orderId: string }): Promise<TPkiAcmeOrders> => {
|
||||
const order = await acmeOrderDAL.findById(orderId);
|
||||
if (!order) {
|
||||
throw new NotFoundError({ message: "ACME order not found" });
|
||||
}
|
||||
if (order.status !== AcmeOrderStatus.Processing) {
|
||||
// We only care about processing orders, as they are the ones that have async certificate requests
|
||||
return order;
|
||||
}
|
||||
return acmeOrderDAL.transaction(async (tx) => {
|
||||
// Lock the order for syncing with async cert request
|
||||
const orderWithCertificateRequest = await acmeOrderDAL.findWithCertificateRequestForSync(orderId, tx);
|
||||
if (!orderWithCertificateRequest) {
|
||||
throw new NotFoundError({ message: "ACME order not found" });
|
||||
}
|
||||
// Check the status again after we have acquired the lock, as things may have changed since we last checked
|
||||
if (
|
||||
orderWithCertificateRequest.status !== AcmeOrderStatus.Processing ||
|
||||
!orderWithCertificateRequest.certificateRequest
|
||||
) {
|
||||
return orderWithCertificateRequest;
|
||||
}
|
||||
let newStatus: AcmeOrderStatus | undefined;
|
||||
let newCertificateId: string | undefined;
|
||||
switch (orderWithCertificateRequest.certificateRequest.status) {
|
||||
case CertificateRequestStatus.PENDING:
|
||||
break;
|
||||
case CertificateRequestStatus.ISSUED:
|
||||
newStatus = AcmeOrderStatus.Valid;
|
||||
newCertificateId = orderWithCertificateRequest.certificateRequest.certificateId ?? undefined;
|
||||
break;
|
||||
case CertificateRequestStatus.FAILED:
|
||||
newStatus = AcmeOrderStatus.Invalid;
|
||||
break;
|
||||
default:
|
||||
throw new AcmeServerInternalError({
|
||||
message: `Invalid certificate request status: ${orderWithCertificateRequest.certificateRequest.status as string}`
|
||||
});
|
||||
}
|
||||
if (newStatus) {
|
||||
return acmeOrderDAL.updateById(orderId, { status: newStatus, certificateId: newCertificateId }, tx);
|
||||
}
|
||||
return orderWithCertificateRequest;
|
||||
});
|
||||
};
|
||||
|
||||
const getAcmeDirectory = async (profileId: string): Promise<TGetAcmeDirectoryResponse> => {
|
||||
const profile = await validateAcmeProfile(profileId);
|
||||
return {
|
||||
@@ -446,6 +492,23 @@ export const pkiAcmeServiceFactory = ({
|
||||
throw new AcmeExternalAccountRequiredError({ message: "External account binding is required" });
|
||||
}
|
||||
if (existingAccount) {
|
||||
await auditLogService.createAuditLog({
|
||||
projectId: profile.projectId,
|
||||
actor: {
|
||||
type: ActorType.ACME_PROFILE,
|
||||
metadata: {
|
||||
profileId: profile.id
|
||||
}
|
||||
},
|
||||
event: {
|
||||
type: EventType.RETRIEVE_ACME_ACCOUNT,
|
||||
metadata: {
|
||||
accountId: existingAccount.id,
|
||||
publicKeyThumbprint
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
status: 200,
|
||||
body: {
|
||||
@@ -518,7 +581,25 @@ export const pkiAcmeServiceFactory = ({
|
||||
publicKeyThumbprint,
|
||||
emails: contact ?? []
|
||||
});
|
||||
// TODO: create audit log here
|
||||
|
||||
await auditLogService.createAuditLog({
|
||||
projectId: profile.projectId,
|
||||
actor: {
|
||||
type: ActorType.ACME_PROFILE,
|
||||
metadata: {
|
||||
profileId: profile.id
|
||||
}
|
||||
},
|
||||
event: {
|
||||
type: EventType.CREATE_ACME_ACCOUNT,
|
||||
metadata: {
|
||||
accountId: newAccount.id,
|
||||
publicKeyThumbprint: newAccount.publicKeyThumbprint,
|
||||
emails: newAccount.emails
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
status: 201,
|
||||
body: {
|
||||
@@ -567,6 +648,8 @@ export const pkiAcmeServiceFactory = ({
|
||||
accountId: string;
|
||||
payload: TCreateAcmeOrderPayload;
|
||||
}): Promise<TAcmeResponse<TAcmeOrderResource>> => {
|
||||
const profile = await validateAcmeProfile(profileId);
|
||||
const skipDnsOwnershipVerification = profile.acmeConfig?.skipDnsOwnershipVerification ?? false;
|
||||
// TODO: check and see if we have existing orders for this account that meet the criteria
|
||||
// if we do, return the existing order
|
||||
// TODO: check the identifiers and see if are they even allowed for this profile.
|
||||
@@ -592,7 +675,7 @@ export const pkiAcmeServiceFactory = ({
|
||||
const createdOrder = await acmeOrderDAL.create(
|
||||
{
|
||||
accountId: account.id,
|
||||
status: AcmeOrderStatus.Pending,
|
||||
status: skipDnsOwnershipVerification ? AcmeOrderStatus.Ready : AcmeOrderStatus.Pending,
|
||||
notBefore: payload.notBefore ? new Date(payload.notBefore) : undefined,
|
||||
notAfter: payload.notAfter ? new Date(payload.notAfter) : undefined,
|
||||
// TODO: read config from the profile to get the expiration time instead
|
||||
@@ -611,7 +694,7 @@ export const pkiAcmeServiceFactory = ({
|
||||
const auth = await acmeAuthDAL.create(
|
||||
{
|
||||
accountId: account.id,
|
||||
status: AcmeAuthStatus.Pending,
|
||||
status: skipDnsOwnershipVerification ? AcmeAuthStatus.Valid : AcmeAuthStatus.Pending,
|
||||
identifierType: identifier.type,
|
||||
identifierValue: identifier.value,
|
||||
// RFC 8555 suggests a token with at least 128 bits of entropy
|
||||
@@ -623,15 +706,17 @@ export const pkiAcmeServiceFactory = ({
|
||||
},
|
||||
tx
|
||||
);
|
||||
// TODO: support other challenge types here. Currently only HTTP-01 is supported.
|
||||
await acmeChallengeDAL.create(
|
||||
{
|
||||
authId: auth.id,
|
||||
status: AcmeChallengeStatus.Pending,
|
||||
type: AcmeChallengeType.HTTP_01
|
||||
},
|
||||
tx
|
||||
);
|
||||
if (!skipDnsOwnershipVerification) {
|
||||
// TODO: support other challenge types here. Currently only HTTP-01 is supported.
|
||||
await acmeChallengeDAL.create(
|
||||
{
|
||||
authId: auth.id,
|
||||
status: AcmeChallengeStatus.Pending,
|
||||
type: AcmeChallengeType.HTTP_01
|
||||
},
|
||||
tx
|
||||
);
|
||||
}
|
||||
return auth;
|
||||
})
|
||||
);
|
||||
@@ -643,7 +728,26 @@ export const pkiAcmeServiceFactory = ({
|
||||
})),
|
||||
tx
|
||||
);
|
||||
// TODO: create audit log here
|
||||
await auditLogService.createAuditLog({
|
||||
projectId: profile.projectId,
|
||||
actor: {
|
||||
type: ActorType.ACME_ACCOUNT,
|
||||
metadata: {
|
||||
profileId: account.profileId,
|
||||
accountId: account.id
|
||||
}
|
||||
},
|
||||
event: {
|
||||
type: EventType.CREATE_ACME_ORDER,
|
||||
metadata: {
|
||||
orderId: createdOrder.id,
|
||||
identifiers: authorizations.map((auth) => ({
|
||||
type: auth.identifierType as AcmeIdentifierType,
|
||||
value: auth.identifierValue
|
||||
}))
|
||||
}
|
||||
}
|
||||
});
|
||||
return { ...createdOrder, authorizations, account };
|
||||
});
|
||||
|
||||
@@ -673,9 +777,12 @@ export const pkiAcmeServiceFactory = ({
|
||||
if (!order) {
|
||||
throw new NotFoundError({ message: "ACME order not found" });
|
||||
}
|
||||
// Sync order first in case if there is a certificate request that needs to be processed
|
||||
await checkAndSyncAcmeOrderStatus({ orderId });
|
||||
const updatedOrder = (await acmeOrderDAL.findByAccountAndOrderIdWithAuthorizations(accountId, orderId))!;
|
||||
return {
|
||||
status: 200,
|
||||
body: buildAcmeOrderResource({ profileId, order }),
|
||||
body: buildAcmeOrderResource({ profileId, order: updatedOrder }),
|
||||
headers: {
|
||||
Location: buildUrl(profileId, `/orders/${orderId}`),
|
||||
Link: `<${buildUrl(profileId, "/directory")}>;rel="index"`
|
||||
@@ -683,6 +790,129 @@ export const pkiAcmeServiceFactory = ({
|
||||
};
|
||||
};
|
||||
|
||||
const processCertificateIssuanceForOrder = async ({
|
||||
caType,
|
||||
accountId,
|
||||
actorOrgId,
|
||||
profileId,
|
||||
orderId,
|
||||
csr,
|
||||
finalizingOrder,
|
||||
certificateRequest,
|
||||
profile,
|
||||
ca,
|
||||
tx
|
||||
}: {
|
||||
caType: CaType;
|
||||
accountId: string;
|
||||
actorOrgId: string;
|
||||
profileId: string;
|
||||
orderId: string;
|
||||
csr: string;
|
||||
finalizingOrder: {
|
||||
notBefore?: Date | null;
|
||||
notAfter?: Date | null;
|
||||
};
|
||||
certificateRequest: ReturnType<typeof extractCertificateRequestFromCSR>;
|
||||
profile: TCertificateProfileWithConfigs;
|
||||
ca: Awaited<ReturnType<typeof certificateAuthorityDAL.findByIdWithAssociatedCa>>;
|
||||
tx?: Knex;
|
||||
}): Promise<{ certificateId?: string; certIssuanceJobData?: TIssueCertificateFromProfileJobData }> => {
|
||||
if (caType === CaType.INTERNAL) {
|
||||
const result = await certificateV3Service.signCertificateFromProfile({
|
||||
actor: ActorType.ACME_ACCOUNT,
|
||||
actorId: accountId,
|
||||
actorAuthMethod: null,
|
||||
actorOrgId,
|
||||
profileId,
|
||||
csr,
|
||||
notBefore: finalizingOrder.notBefore ? new Date(finalizingOrder.notBefore) : undefined,
|
||||
notAfter: finalizingOrder.notAfter ? new Date(finalizingOrder.notAfter) : undefined,
|
||||
validity: !finalizingOrder.notAfter
|
||||
? {
|
||||
// 47 days, the default TTL comes with Let's Encrypt
|
||||
// TODO: read config from the profile to get the expiration time instead
|
||||
ttl: `${47}d`
|
||||
}
|
||||
: // ttl is not used if notAfter is provided
|
||||
({ ttl: "0d" } as const),
|
||||
enrollmentType: EnrollmentType.ACME
|
||||
});
|
||||
return {
|
||||
certificateId: result.certificateId
|
||||
};
|
||||
}
|
||||
|
||||
const { keyAlgorithm: extractedKeyAlgorithm, signatureAlgorithm: extractedSignatureAlgorithm } =
|
||||
extractAlgorithmsFromCSR(csr);
|
||||
const updatedCertificateRequest = {
|
||||
...certificateRequest,
|
||||
keyAlgorithm: extractedKeyAlgorithm,
|
||||
signatureAlgorithm: extractedSignatureAlgorithm,
|
||||
validity: finalizingOrder.notAfter
|
||||
? (() => {
|
||||
const notBefore = finalizingOrder.notBefore ? new Date(finalizingOrder.notBefore) : new Date();
|
||||
const notAfter = new Date(finalizingOrder.notAfter);
|
||||
const diffMs = notAfter.getTime() - notBefore.getTime();
|
||||
const diffDays = Math.round(diffMs / (1000 * 60 * 60 * 24));
|
||||
return { ttl: `${diffDays}d` };
|
||||
})()
|
||||
: certificateRequest.validity
|
||||
};
|
||||
|
||||
const template = await certificateTemplateV2DAL.findById(profile.certificateTemplateId);
|
||||
if (!template) {
|
||||
throw new NotFoundError({ message: "Certificate template not found" });
|
||||
}
|
||||
const validationResult = await certificateTemplateV2Service.validateCertificateRequest(
|
||||
template.id,
|
||||
updatedCertificateRequest
|
||||
);
|
||||
if (!validationResult.isValid) {
|
||||
throw new AcmeBadCSRError({ message: `Invalid CSR: ${validationResult.errors.join(", ")}` });
|
||||
}
|
||||
|
||||
const certRequest = await certificateRequestService.createCertificateRequest({
|
||||
actor: ActorType.ACME_ACCOUNT,
|
||||
actorId: accountId,
|
||||
actorAuthMethod: null,
|
||||
actorOrgId,
|
||||
projectId: profile.projectId,
|
||||
caId: ca.id,
|
||||
profileId: profile.id,
|
||||
commonName: updatedCertificateRequest.commonName ?? "",
|
||||
keyUsages: updatedCertificateRequest.keyUsages?.map((usage) => usage.toString()) ?? [],
|
||||
extendedKeyUsages: updatedCertificateRequest.extendedKeyUsages?.map((usage) => usage.toString()) ?? [],
|
||||
keyAlgorithm: updatedCertificateRequest.keyAlgorithm || "",
|
||||
signatureAlgorithm: updatedCertificateRequest.signatureAlgorithm || "",
|
||||
altNames: updatedCertificateRequest.subjectAlternativeNames?.map((san) => san.value).join(","),
|
||||
notBefore: updatedCertificateRequest.notBefore,
|
||||
notAfter: updatedCertificateRequest.notAfter,
|
||||
status: CertificateRequestStatus.PENDING,
|
||||
acmeOrderId: orderId,
|
||||
csr,
|
||||
tx
|
||||
});
|
||||
const csrObj = new x509.Pkcs10CertificateRequest(csr);
|
||||
const csrPem = csrObj.toString("pem");
|
||||
return {
|
||||
certIssuanceJobData: {
|
||||
certificateId: orderId,
|
||||
profileId: profile.id,
|
||||
caId: profile.caId || "",
|
||||
ttl: updatedCertificateRequest.validity?.ttl || "1y",
|
||||
signatureAlgorithm: updatedCertificateRequest.signatureAlgorithm || "",
|
||||
keyAlgorithm: updatedCertificateRequest.keyAlgorithm || "",
|
||||
commonName: updatedCertificateRequest.commonName || "",
|
||||
altNames: updatedCertificateRequest.subjectAlternativeNames?.map((san) => san.value) || [],
|
||||
keyUsages: updatedCertificateRequest.keyUsages?.map((usage) => usage.toString()) ?? [],
|
||||
extendedKeyUsages: updatedCertificateRequest.extendedKeyUsages?.map((usage) => usage.toString()) ?? [],
|
||||
certificateRequestId: certRequest.id,
|
||||
csr: csrPem
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
const finalizeAcmeOrder = async ({
|
||||
profileId,
|
||||
accountId,
|
||||
@@ -707,7 +937,11 @@ export const pkiAcmeServiceFactory = ({
|
||||
throw new NotFoundError({ message: "ACME order not found" });
|
||||
}
|
||||
if (order.status === AcmeOrderStatus.Ready) {
|
||||
const { order: updatedOrder, error } = await acmeOrderDAL.transaction(async (tx) => {
|
||||
const {
|
||||
order: updatedOrder,
|
||||
error,
|
||||
certIssuanceJobData
|
||||
} = await acmeOrderDAL.transaction(async (tx) => {
|
||||
const finalizingOrder = (await acmeOrderDAL.findByIdForFinalization(orderId, tx))!;
|
||||
// TODO: ideally, this should be doen with onRequest: verifyAuth([AuthMode.ACME_JWS_SIGNATURE]), instead?
|
||||
const { ownerOrgId: actorOrgId } = (await certificateProfileDAL.findByIdWithOwnerOrgId(profileId, tx))!;
|
||||
@@ -754,94 +988,31 @@ export const pkiAcmeServiceFactory = ({
|
||||
}
|
||||
const caType = (ca.externalCa?.type as CaType) ?? CaType.INTERNAL;
|
||||
let errorToReturn: Error | undefined;
|
||||
let certIssuanceJobDataToReturn: TIssueCertificateFromProfileJobData | undefined;
|
||||
try {
|
||||
const { certificateId } = await (async () => {
|
||||
if (caType === CaType.INTERNAL) {
|
||||
const result = await certificateV3Service.signCertificateFromProfile({
|
||||
actor: ActorType.ACME_ACCOUNT,
|
||||
actorId: accountId,
|
||||
actorAuthMethod: null,
|
||||
actorOrgId,
|
||||
profileId,
|
||||
csr,
|
||||
notBefore: finalizingOrder.notBefore ? new Date(finalizingOrder.notBefore) : undefined,
|
||||
notAfter: finalizingOrder.notAfter ? new Date(finalizingOrder.notAfter) : undefined,
|
||||
validity: !finalizingOrder.notAfter
|
||||
? {
|
||||
// 47 days, the default TTL comes with Let's Encrypt
|
||||
// TODO: read config from the profile to get the expiration time instead
|
||||
ttl: `${47}d`
|
||||
}
|
||||
: // ttl is not used if notAfter is provided
|
||||
({ ttl: "0d" } as const),
|
||||
enrollmentType: EnrollmentType.ACME
|
||||
});
|
||||
return { certificateId: result.certificateId };
|
||||
}
|
||||
const { certificateAuthority } = (await certificateProfileDAL.findByIdWithConfigs(profileId, tx))!;
|
||||
const csrObj = new x509.Pkcs10CertificateRequest(csr);
|
||||
const csrPem = csrObj.toString("pem");
|
||||
|
||||
const { keyAlgorithm: extractedKeyAlgorithm, signatureAlgorithm: extractedSignatureAlgorithm } =
|
||||
extractAlgorithmsFromCSR(csr);
|
||||
|
||||
certificateRequest.keyAlgorithm = extractedKeyAlgorithm;
|
||||
certificateRequest.signatureAlgorithm = extractedSignatureAlgorithm;
|
||||
if (finalizingOrder.notAfter) {
|
||||
const notBefore = finalizingOrder.notBefore ? new Date(finalizingOrder.notBefore) : new Date();
|
||||
const notAfter = new Date(finalizingOrder.notAfter);
|
||||
const diffMs = notAfter.getTime() - notBefore.getTime();
|
||||
const diffDays = Math.round(diffMs / (1000 * 60 * 60 * 24));
|
||||
certificateRequest.validity = { ttl: `${diffDays}d` };
|
||||
}
|
||||
|
||||
const template = await certificateTemplateV2DAL.findById(profile.certificateTemplateId);
|
||||
if (!template) {
|
||||
throw new NotFoundError({ message: "Certificate template not found" });
|
||||
}
|
||||
const validationResult = await certificateTemplateV2Service.validateCertificateRequest(
|
||||
template.id,
|
||||
certificateRequest
|
||||
);
|
||||
if (!validationResult.isValid) {
|
||||
throw new AcmeBadCSRError({ message: `Invalid CSR: ${validationResult.errors.join(", ")}` });
|
||||
}
|
||||
// TODO: this is pretty slow, and we are holding the transaction open for a long time,
|
||||
// we should queue the certificate issuance to a background job instead
|
||||
const cert = await orderCertificate(
|
||||
{
|
||||
caId: certificateAuthority!.id,
|
||||
// It is possible that the CSR does not have a common name, in which case we use an empty string
|
||||
// (more likely than not for a CSR from a modern ACME client like certbot, cert-manager, etc.)
|
||||
commonName: certificateRequest.commonName ?? "",
|
||||
altNames: certificateRequest.subjectAlternativeNames?.map((san) => san.value),
|
||||
csr: Buffer.from(csrPem),
|
||||
// TODO: not 100% sure what are these columns for, but let's put the values for common website SSL certs for now
|
||||
keyUsages: [CertKeyUsage.DIGITAL_SIGNATURE, CertKeyUsage.KEY_ENCIPHERMENT, CertKeyUsage.KEY_AGREEMENT],
|
||||
extendedKeyUsages: [CertExtendedKeyUsage.SERVER_AUTH]
|
||||
},
|
||||
{
|
||||
appConnectionDAL,
|
||||
certificateAuthorityDAL,
|
||||
externalCertificateAuthorityDAL,
|
||||
certificateDAL,
|
||||
certificateBodyDAL,
|
||||
certificateSecretDAL,
|
||||
kmsService,
|
||||
projectDAL
|
||||
}
|
||||
);
|
||||
return { certificateId: cert.id };
|
||||
})();
|
||||
const result = await processCertificateIssuanceForOrder({
|
||||
caType,
|
||||
accountId,
|
||||
actorOrgId,
|
||||
profileId,
|
||||
orderId,
|
||||
csr,
|
||||
finalizingOrder,
|
||||
certificateRequest,
|
||||
profile,
|
||||
ca,
|
||||
tx
|
||||
});
|
||||
await acmeOrderDAL.updateById(
|
||||
orderId,
|
||||
{
|
||||
status: AcmeOrderStatus.Valid,
|
||||
status: result.certificateId ? AcmeOrderStatus.Valid : AcmeOrderStatus.Processing,
|
||||
csr,
|
||||
certificateId
|
||||
certificateId: result.certificateId
|
||||
},
|
||||
tx
|
||||
);
|
||||
certIssuanceJobDataToReturn = result.certIssuanceJobData;
|
||||
} catch (exp) {
|
||||
await acmeOrderDAL.updateById(
|
||||
orderId,
|
||||
@@ -859,18 +1030,43 @@ export const pkiAcmeServiceFactory = ({
|
||||
} else if (exp instanceof AcmeError) {
|
||||
errorToReturn = exp;
|
||||
} else {
|
||||
errorToReturn = new AcmeServerInternalError({ message: "Failed to sign certificate with internal error" });
|
||||
errorToReturn = new AcmeServerInternalError({
|
||||
message: "Failed to sign certificate with internal error"
|
||||
});
|
||||
}
|
||||
}
|
||||
return {
|
||||
order: (await acmeOrderDAL.findByAccountAndOrderIdWithAuthorizations(accountId, orderId, tx))!,
|
||||
error: errorToReturn
|
||||
error: errorToReturn,
|
||||
certIssuanceJobData: certIssuanceJobDataToReturn
|
||||
};
|
||||
});
|
||||
if (error) {
|
||||
throw error;
|
||||
}
|
||||
if (certIssuanceJobData) {
|
||||
// TODO: ideally, this should be done inside the transaction, but the pg-boss queue doesn't support external transactions
|
||||
// as it seems to be. we need to commit the transaction before queuing the job, otherwise the job will fail (not found error).
|
||||
await certificateIssuanceQueue.queueCertificateIssuance(certIssuanceJobData);
|
||||
}
|
||||
order = updatedOrder;
|
||||
await auditLogService.createAuditLog({
|
||||
projectId: profile.projectId,
|
||||
actor: {
|
||||
type: ActorType.ACME_ACCOUNT,
|
||||
metadata: {
|
||||
profileId,
|
||||
accountId
|
||||
}
|
||||
},
|
||||
event: {
|
||||
type: EventType.FINALIZE_ACME_ORDER,
|
||||
metadata: {
|
||||
orderId: updatedOrder.id,
|
||||
csr: updatedOrder.csr!
|
||||
}
|
||||
}
|
||||
});
|
||||
} else if (order.status !== AcmeOrderStatus.Valid) {
|
||||
throw new AcmeOrderNotReadyError({ message: "ACME order is not ready" });
|
||||
}
|
||||
@@ -898,14 +1094,16 @@ export const pkiAcmeServiceFactory = ({
|
||||
if (!order) {
|
||||
throw new NotFoundError({ message: "ACME order not found" });
|
||||
}
|
||||
if (order.status !== AcmeOrderStatus.Valid) {
|
||||
// Sync order first in case if there is a certificate request that needs to be processed
|
||||
const syncedOrder = await checkAndSyncAcmeOrderStatus({ orderId });
|
||||
if (syncedOrder.status !== AcmeOrderStatus.Valid) {
|
||||
throw new AcmeOrderNotReadyError({ message: "ACME order is not valid" });
|
||||
}
|
||||
if (!order.certificateId) {
|
||||
if (!syncedOrder.certificateId) {
|
||||
throw new NotFoundError({ message: "The certificate for this ACME order no longer exists" });
|
||||
}
|
||||
|
||||
const certBody = await certificateBodyDAL.findOne({ certId: order.certificateId });
|
||||
const certBody = await certificateBodyDAL.findOne({ certId: syncedOrder.certificateId });
|
||||
const certificateManagerKeyId = await getProjectKmsCertificateKeyId({
|
||||
projectId: profile.projectId,
|
||||
projectDAL,
|
||||
@@ -926,6 +1124,24 @@ export const pkiAcmeServiceFactory = ({
|
||||
|
||||
const certLeaf = certObj.toString("pem").trim().replace("\n", "\r\n");
|
||||
const certChain = certificateChain.trim().replace("\n", "\r\n");
|
||||
|
||||
await auditLogService.createAuditLog({
|
||||
projectId: profile.projectId,
|
||||
actor: {
|
||||
type: ActorType.ACME_ACCOUNT,
|
||||
metadata: {
|
||||
profileId,
|
||||
accountId
|
||||
}
|
||||
},
|
||||
event: {
|
||||
type: EventType.DOWNLOAD_ACME_CERTIFICATE,
|
||||
metadata: {
|
||||
orderId
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
status: 200,
|
||||
body:
|
||||
@@ -1008,6 +1224,7 @@ export const pkiAcmeServiceFactory = ({
|
||||
authzId: string;
|
||||
challengeId: string;
|
||||
}): Promise<TAcmeResponse<TRespondToAcmeChallengeResponse>> => {
|
||||
const profile = await validateAcmeProfile(profileId);
|
||||
const result = await acmeChallengeDAL.findByAccountAuthAndChallengeId(accountId, authzId, challengeId);
|
||||
if (!result) {
|
||||
throw new NotFoundError({ message: "ACME challenge not found" });
|
||||
@@ -1015,6 +1232,23 @@ export const pkiAcmeServiceFactory = ({
|
||||
await acmeChallengeService.markChallengeAsReady(challengeId);
|
||||
await pkiAcmeQueueService.queueChallengeValidation(challengeId);
|
||||
const challenge = (await acmeChallengeDAL.findByIdForChallengeValidation(challengeId))!;
|
||||
await auditLogService.createAuditLog({
|
||||
projectId: profile.projectId,
|
||||
actor: {
|
||||
type: ActorType.ACME_ACCOUNT,
|
||||
metadata: {
|
||||
profileId,
|
||||
accountId
|
||||
}
|
||||
},
|
||||
event: {
|
||||
type: EventType.RESPOND_TO_ACME_CHALLENGE,
|
||||
metadata: {
|
||||
challengeId,
|
||||
type: challenge.type as AcmeChallengeType
|
||||
}
|
||||
}
|
||||
});
|
||||
return {
|
||||
status: 200,
|
||||
body: {
|
||||
|
||||
@@ -72,7 +72,7 @@ type TScimServiceFactoryDep = {
|
||||
TGroupDALFactory,
|
||||
| "create"
|
||||
| "findOne"
|
||||
| "findAllGroupPossibleMembers"
|
||||
| "findAllGroupPossibleUsers"
|
||||
| "delete"
|
||||
| "findGroups"
|
||||
| "transaction"
|
||||
@@ -952,7 +952,7 @@ export const scimServiceFactory = ({
|
||||
}
|
||||
|
||||
const users = await groupDAL
|
||||
.findAllGroupPossibleMembers({
|
||||
.findAllGroupPossibleUsers({
|
||||
orgId: group.orgId,
|
||||
groupId: group.id
|
||||
})
|
||||
|
||||
@@ -214,7 +214,10 @@ export const secretRotationV2DALFactory = (
|
||||
tx?: Knex
|
||||
) => {
|
||||
try {
|
||||
const extendedQuery = baseSecretRotationV2Query({ filter, db, tx, options })
|
||||
const { limit, offset = 0, sort, ...queryOptions } = options || {};
|
||||
const baseOptions = { ...queryOptions };
|
||||
|
||||
const subquery = baseSecretRotationV2Query({ filter, db, tx, options: baseOptions })
|
||||
.join(
|
||||
TableName.SecretRotationV2SecretMapping,
|
||||
`${TableName.SecretRotationV2SecretMapping}.rotationId`,
|
||||
@@ -233,6 +236,7 @@ export const secretRotationV2DALFactory = (
|
||||
)
|
||||
.leftJoin(TableName.ResourceMetadata, `${TableName.SecretV2}.id`, `${TableName.ResourceMetadata}.secretId`)
|
||||
.select(
|
||||
selectAllTableCols(TableName.SecretRotationV2),
|
||||
db.ref("id").withSchema(TableName.SecretV2).as("secretId"),
|
||||
db.ref("key").withSchema(TableName.SecretV2).as("secretKey"),
|
||||
db.ref("version").withSchema(TableName.SecretV2).as("secretVersion"),
|
||||
@@ -252,18 +256,31 @@ export const secretRotationV2DALFactory = (
|
||||
db.ref("slug").withSchema(TableName.SecretTag).as("tagSlug"),
|
||||
db.ref("id").withSchema(TableName.ResourceMetadata).as("metadataId"),
|
||||
db.ref("key").withSchema(TableName.ResourceMetadata).as("metadataKey"),
|
||||
db.ref("value").withSchema(TableName.ResourceMetadata).as("metadataValue")
|
||||
db.ref("value").withSchema(TableName.ResourceMetadata).as("metadataValue"),
|
||||
db.raw(`DENSE_RANK() OVER (ORDER BY ${TableName.SecretRotationV2}."createdAt" DESC) as rank`)
|
||||
);
|
||||
|
||||
if (search) {
|
||||
void extendedQuery.where((query) => {
|
||||
void query
|
||||
void subquery.where((qb) => {
|
||||
void qb
|
||||
.whereILike(`${TableName.SecretV2}.key`, `%${search}%`)
|
||||
.orWhereILike(`${TableName.SecretRotationV2}.name`, `%${search}%`);
|
||||
});
|
||||
}
|
||||
|
||||
const secretRotations = await extendedQuery;
|
||||
let secretRotations: Awaited<typeof subquery>;
|
||||
if (limit !== undefined) {
|
||||
const rankOffset = offset + 1;
|
||||
const queryWithLimit = (tx || db)
|
||||
.with("inner", subquery)
|
||||
.select("*")
|
||||
.from("inner")
|
||||
.where("inner.rank", ">=", rankOffset)
|
||||
.andWhere("inner.rank", "<", rankOffset + limit);
|
||||
secretRotations = (await queryWithLimit) as unknown as Awaited<typeof subquery>;
|
||||
} else {
|
||||
secretRotations = await subquery;
|
||||
}
|
||||
|
||||
if (!secretRotations.length) return [];
|
||||
|
||||
|
||||
@@ -106,6 +106,25 @@ export const GROUPS = {
|
||||
filterUsers:
|
||||
"Whether to filter the list of returned users. 'existingMembers' will only return existing users in the group, 'nonMembers' will only return users not in the group, undefined will return all users in the organization."
|
||||
},
|
||||
LIST_MACHINE_IDENTITIES: {
|
||||
id: "The ID of the group to list identities for.",
|
||||
offset: "The offset to start from. If you enter 10, it will start from the 10th identity.",
|
||||
limit: "The number of identities to return.",
|
||||
search: "The text string that machine identity name will be filtered by.",
|
||||
filterMachineIdentities:
|
||||
"Whether to filter the list of returned identities. 'assignedMachineIdentities' will only return identities assigned to the group, 'nonAssignedMachineIdentities' will only return identities not assigned to the group, undefined will return all identities in the organization."
|
||||
},
|
||||
LIST_MEMBERS: {
|
||||
id: "The ID of the group to list members for.",
|
||||
offset: "The offset to start from. If you enter 10, it will start from the 10th member.",
|
||||
limit: "The number of members to return.",
|
||||
search:
|
||||
"The text string that member email(in case of users) or name(in case of machine identities) will be filtered by.",
|
||||
orderBy: "The column to order members by.",
|
||||
orderDirection: "The direction to order members in.",
|
||||
memberTypeFilter:
|
||||
"Filter members by type. Can be a single value ('users' or 'machineIdentities') or an array of values. If not specified, both users and machine identities will be returned."
|
||||
},
|
||||
LIST_PROJECTS: {
|
||||
id: "The ID of the group to list projects for.",
|
||||
offset: "The offset to start from. If you enter 10, it will start from the 10th project.",
|
||||
@@ -120,12 +139,20 @@ export const GROUPS = {
|
||||
id: "The ID of the group to add the user to.",
|
||||
username: "The username of the user to add to the group."
|
||||
},
|
||||
ADD_MACHINE_IDENTITY: {
|
||||
id: "The ID of the group to add the machine identity to.",
|
||||
machineIdentityId: "The ID of the machine identity to add to the group."
|
||||
},
|
||||
GET_BY_ID: {
|
||||
id: "The ID of the group to fetch."
|
||||
},
|
||||
DELETE_USER: {
|
||||
id: "The ID of the group to remove the user from.",
|
||||
username: "The username of the user to remove from the group."
|
||||
},
|
||||
DELETE_MACHINE_IDENTITY: {
|
||||
id: "The ID of the group to remove the machine identity from.",
|
||||
machineIdentityId: "The ID of the machine identity to remove from the group."
|
||||
}
|
||||
} as const;
|
||||
|
||||
|
||||
@@ -103,3 +103,34 @@ export const deepEqualSkipFields = (obj1: unknown, obj2: unknown, skipFields: st
|
||||
|
||||
return deepEqual(filtered1, filtered2);
|
||||
};
|
||||
|
||||
export const deterministicStringify = (value: unknown): string => {
|
||||
if (value === null || value === undefined) {
|
||||
return JSON.stringify(value);
|
||||
}
|
||||
|
||||
if (typeof value === "string" || typeof value === "number" || typeof value === "boolean") {
|
||||
return JSON.stringify(value);
|
||||
}
|
||||
|
||||
if (Array.isArray(value)) {
|
||||
const items = value.map((item) => deterministicStringify(item));
|
||||
return `[${items.join(",")}]`;
|
||||
}
|
||||
|
||||
if (typeof value === "object") {
|
||||
const sortedKeys = Object.keys(value).sort();
|
||||
const sortedObj: Record<string, unknown> = {};
|
||||
for (const key of sortedKeys) {
|
||||
const val = (value as Record<string, unknown>)[key];
|
||||
if (typeof val === "object" && val !== null) {
|
||||
sortedObj[key] = JSON.parse(deterministicStringify(val));
|
||||
} else {
|
||||
sortedObj[key] = val;
|
||||
}
|
||||
}
|
||||
return JSON.stringify(sortedObj);
|
||||
}
|
||||
|
||||
return JSON.stringify(value);
|
||||
};
|
||||
|
||||
@@ -46,6 +46,7 @@ import { githubOrgSyncDALFactory } from "@app/ee/services/github-org-sync/github
|
||||
import { githubOrgSyncServiceFactory } from "@app/ee/services/github-org-sync/github-org-sync-service";
|
||||
import { groupDALFactory } from "@app/ee/services/group/group-dal";
|
||||
import { groupServiceFactory } from "@app/ee/services/group/group-service";
|
||||
import { identityGroupMembershipDALFactory } from "@app/ee/services/group/identity-group-membership-dal";
|
||||
import { userGroupMembershipDALFactory } from "@app/ee/services/group/user-group-membership-dal";
|
||||
import { isHsmActiveAndEnabled } from "@app/ee/services/hsm/hsm-fns";
|
||||
import { THsmServiceFactory } from "@app/ee/services/hsm/hsm-service";
|
||||
@@ -470,6 +471,7 @@ export const registerRoutes = async (
|
||||
const identityMetadataDAL = identityMetadataDALFactory(db);
|
||||
const identityAccessTokenDAL = identityAccessTokenDALFactory(db);
|
||||
const identityOrgMembershipDAL = identityOrgDALFactory(db);
|
||||
const identityGroupMembershipDAL = identityGroupMembershipDALFactory(db);
|
||||
const identityProjectDAL = identityProjectDALFactory(db);
|
||||
const identityAuthTemplateDAL = identityAuthTemplateDALFactory(db);
|
||||
|
||||
@@ -754,6 +756,9 @@ export const registerRoutes = async (
|
||||
membershipGroupDAL
|
||||
});
|
||||
const groupService = groupServiceFactory({
|
||||
identityDAL,
|
||||
membershipDAL,
|
||||
identityGroupMembershipDAL,
|
||||
userDAL,
|
||||
groupDAL,
|
||||
orgDAL,
|
||||
@@ -2303,7 +2308,8 @@ export const registerRoutes = async (
|
||||
});
|
||||
|
||||
const acmeChallengeService = pkiAcmeChallengeServiceFactory({
|
||||
acmeChallengeDAL
|
||||
acmeChallengeDAL,
|
||||
auditLogService
|
||||
});
|
||||
|
||||
const pkiAcmeQueueService = await pkiAcmeQueueServiceFactory({
|
||||
@@ -2313,13 +2319,9 @@ export const registerRoutes = async (
|
||||
|
||||
const pkiAcmeService = pkiAcmeServiceFactory({
|
||||
projectDAL,
|
||||
appConnectionDAL,
|
||||
certificateDAL,
|
||||
certificateAuthorityDAL,
|
||||
externalCertificateAuthorityDAL,
|
||||
certificateProfileDAL,
|
||||
certificateBodyDAL,
|
||||
certificateSecretDAL,
|
||||
certificateTemplateV2DAL,
|
||||
acmeAccountDAL,
|
||||
acmeOrderDAL,
|
||||
@@ -2331,8 +2333,11 @@ export const registerRoutes = async (
|
||||
licenseService,
|
||||
certificateV3Service,
|
||||
certificateTemplateV2Service,
|
||||
certificateRequestService,
|
||||
certificateIssuanceQueue,
|
||||
acmeChallengeService,
|
||||
pkiAcmeQueueService
|
||||
pkiAcmeQueueService,
|
||||
auditLogService
|
||||
});
|
||||
|
||||
const pkiSubscriberService = pkiSubscriberServiceFactory({
|
||||
|
||||
@@ -47,7 +47,11 @@ export const registerCertificateProfilesRouter = async (server: FastifyZodProvid
|
||||
renewBeforeDays: z.number().min(1).max(30).optional()
|
||||
})
|
||||
.optional(),
|
||||
acmeConfig: z.object({}).optional(),
|
||||
acmeConfig: z
|
||||
.object({
|
||||
skipDnsOwnershipVerification: z.boolean().optional()
|
||||
})
|
||||
.optional(),
|
||||
externalConfigs: ExternalConfigUnionSchema
|
||||
})
|
||||
.refine(
|
||||
@@ -245,7 +249,8 @@ export const registerCertificateProfilesRouter = async (server: FastifyZodProvid
|
||||
acmeConfig: z
|
||||
.object({
|
||||
id: z.string(),
|
||||
directoryUrl: z.string()
|
||||
directoryUrl: z.string(),
|
||||
skipDnsOwnershipVerification: z.boolean().optional()
|
||||
})
|
||||
.optional(),
|
||||
externalConfigs: ExternalConfigUnionSchema
|
||||
@@ -434,6 +439,11 @@ export const registerCertificateProfilesRouter = async (server: FastifyZodProvid
|
||||
renewBeforeDays: z.number().min(1).max(30).optional()
|
||||
})
|
||||
.optional(),
|
||||
acmeConfig: z
|
||||
.object({
|
||||
skipDnsOwnershipVerification: z.boolean().optional()
|
||||
})
|
||||
.optional(),
|
||||
externalConfigs: ExternalConfigUnionSchema
|
||||
})
|
||||
.refine(
|
||||
|
||||
@@ -316,13 +316,11 @@ export const registerCertificateRouter = async (server: FastifyZodProvider) => {
|
||||
params: z.object({
|
||||
requestId: z.string().uuid()
|
||||
}),
|
||||
query: z.object({
|
||||
projectId: z.string().uuid()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
status: z.nativeEnum(CertificateRequestStatus),
|
||||
certificate: z.string().nullable(),
|
||||
certificateId: z.string().nullable(),
|
||||
privateKey: z.string().nullable(),
|
||||
serialNumber: z.string().nullable(),
|
||||
errorMessage: z.string().nullable(),
|
||||
@@ -333,18 +331,17 @@ export const registerCertificateRouter = async (server: FastifyZodProvider) => {
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const data = await server.services.certificateRequest.getCertificateFromRequest({
|
||||
const { certificateRequest, projectId } = await server.services.certificateRequest.getCertificateFromRequest({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
projectId: (req.query as { projectId: string }).projectId,
|
||||
certificateRequestId: req.params.requestId
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId: (req.query as { projectId: string }).projectId,
|
||||
projectId,
|
||||
event: {
|
||||
type: EventType.GET_CERTIFICATE_REQUEST,
|
||||
metadata: {
|
||||
@@ -352,7 +349,7 @@ export const registerCertificateRouter = async (server: FastifyZodProvider) => {
|
||||
}
|
||||
}
|
||||
});
|
||||
return data;
|
||||
return certificateRequest;
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
@@ -624,7 +624,10 @@ export const registerDashboardRouter = async (server: FastifyZodProvider) => {
|
||||
secretValueHidden: z.boolean(),
|
||||
secretPath: z.string().optional(),
|
||||
secretMetadata: ResourceMetadataSchema.optional(),
|
||||
tags: SanitizedTagSchema.array().optional()
|
||||
tags: SanitizedTagSchema.array().optional(),
|
||||
reminder: RemindersSchema.extend({
|
||||
recipients: z.string().array()
|
||||
}).nullable()
|
||||
})
|
||||
.nullable()
|
||||
.array()
|
||||
@@ -743,6 +746,7 @@ export const registerDashboardRouter = async (server: FastifyZodProvider) => {
|
||||
ReturnType<typeof server.services.secretRotationV2.getDashboardSecretRotations>
|
||||
>[number]["secrets"][number] & {
|
||||
isEmpty: boolean;
|
||||
reminder: Awaited<ReturnType<typeof server.services.reminder.getRemindersForDashboard>>[string] | null;
|
||||
}
|
||||
> | null)[];
|
||||
})[]
|
||||
@@ -847,27 +851,38 @@ export const registerDashboardRouter = async (server: FastifyZodProvider) => {
|
||||
);
|
||||
|
||||
if (remainingLimit > 0 && totalSecretRotationCount > adjustedOffset) {
|
||||
secretRotations = (
|
||||
await server.services.secretRotationV2.getDashboardSecretRotations(
|
||||
{
|
||||
projectId,
|
||||
search,
|
||||
orderBy,
|
||||
orderDirection,
|
||||
environments: [environment],
|
||||
secretPath,
|
||||
limit: remainingLimit,
|
||||
offset: adjustedOffset
|
||||
},
|
||||
req.permission
|
||||
)
|
||||
).map((rotation) => ({
|
||||
const rawSecretRotations = await server.services.secretRotationV2.getDashboardSecretRotations(
|
||||
{
|
||||
projectId,
|
||||
search,
|
||||
orderBy,
|
||||
orderDirection,
|
||||
environments: [environment],
|
||||
secretPath,
|
||||
limit: remainingLimit,
|
||||
offset: adjustedOffset
|
||||
},
|
||||
req.permission
|
||||
);
|
||||
|
||||
const allRotationSecretIds = rawSecretRotations
|
||||
.flatMap((rotation) => rotation.secrets)
|
||||
.filter((secret) => Boolean(secret))
|
||||
.map((secret) => secret.id);
|
||||
|
||||
const rotationReminders =
|
||||
allRotationSecretIds.length > 0
|
||||
? await server.services.reminder.getRemindersForDashboard(allRotationSecretIds)
|
||||
: {};
|
||||
|
||||
secretRotations = rawSecretRotations.map((rotation) => ({
|
||||
...rotation,
|
||||
secrets: rotation.secrets.map((secret) =>
|
||||
secret
|
||||
? {
|
||||
...secret,
|
||||
isEmpty: !secret.secretValue
|
||||
isEmpty: !secret.secretValue,
|
||||
reminder: rotationReminders[secret.id] ?? null
|
||||
}
|
||||
: secret
|
||||
)
|
||||
@@ -948,7 +963,8 @@ export const registerDashboardRouter = async (server: FastifyZodProvider) => {
|
||||
search,
|
||||
tagSlugs: tags,
|
||||
includeTagsInSearch: true,
|
||||
includeMetadataInSearch: true
|
||||
includeMetadataInSearch: true,
|
||||
excludeRotatedSecrets: includeSecretRotations
|
||||
});
|
||||
|
||||
if (remainingLimit > 0 && totalSecretCount > adjustedOffset) {
|
||||
@@ -970,7 +986,8 @@ export const registerDashboardRouter = async (server: FastifyZodProvider) => {
|
||||
offset: adjustedOffset,
|
||||
tagSlugs: tags,
|
||||
includeTagsInSearch: true,
|
||||
includeMetadataInSearch: true
|
||||
includeMetadataInSearch: true,
|
||||
excludeRotatedSecrets: includeSecretRotations
|
||||
})
|
||||
).secrets;
|
||||
|
||||
|
||||
@@ -9,7 +9,7 @@ import {
|
||||
TemporaryPermissionMode,
|
||||
UsersSchema
|
||||
} from "@app/db/schemas";
|
||||
import { EFilterReturnedUsers } from "@app/ee/services/group/group-types";
|
||||
import { FilterReturnedUsers } from "@app/ee/services/group/group-types";
|
||||
import { ApiDocsTags, GROUPS, PROJECTS } from "@app/lib/api-docs";
|
||||
import { ms } from "@app/lib/ms";
|
||||
import { isUuidV4 } from "@app/lib/validator";
|
||||
@@ -355,9 +355,10 @@ export const registerGroupProjectRouter = async (server: FastifyZodProvider) =>
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
hide: false,
|
||||
hide: true,
|
||||
deprecated: true,
|
||||
tags: [ApiDocsTags.ProjectGroups],
|
||||
description: "Return project group users",
|
||||
description: "Return project group users (Deprecated: Use /api/v1/groups/{id}/users instead)",
|
||||
params: z.object({
|
||||
projectId: z.string().trim().describe(GROUPS.LIST_USERS.projectId),
|
||||
groupId: z.string().trim().describe(GROUPS.LIST_USERS.id)
|
||||
@@ -367,7 +368,7 @@ export const registerGroupProjectRouter = async (server: FastifyZodProvider) =>
|
||||
limit: z.coerce.number().min(1).max(100).default(10).describe(GROUPS.LIST_USERS.limit),
|
||||
username: z.string().trim().optional().describe(GROUPS.LIST_USERS.username),
|
||||
search: z.string().trim().optional().describe(GROUPS.LIST_USERS.search),
|
||||
filter: z.nativeEnum(EFilterReturnedUsers).optional().describe(GROUPS.LIST_USERS.filterUsers)
|
||||
filter: z.nativeEnum(FilterReturnedUsers).optional().describe(GROUPS.LIST_USERS.filterUsers)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
|
||||
@@ -9,7 +9,7 @@ import {
|
||||
TemporaryPermissionMode,
|
||||
UsersSchema
|
||||
} from "@app/db/schemas";
|
||||
import { EFilterReturnedUsers } from "@app/ee/services/group/group-types";
|
||||
import { FilterReturnedUsers } from "@app/ee/services/group/group-types";
|
||||
import { ApiDocsTags, GROUPS, PROJECTS } from "@app/lib/api-docs";
|
||||
import { ms } from "@app/lib/ms";
|
||||
import { isUuidV4 } from "@app/lib/validator";
|
||||
@@ -367,7 +367,7 @@ export const registerDeprecatedGroupProjectRouter = async (server: FastifyZodPro
|
||||
limit: z.coerce.number().min(1).max(100).default(10).describe(GROUPS.LIST_USERS.limit),
|
||||
username: z.string().trim().optional().describe(GROUPS.LIST_USERS.username),
|
||||
search: z.string().trim().optional().describe(GROUPS.LIST_USERS.search),
|
||||
filter: z.nativeEnum(EFilterReturnedUsers).optional().describe(GROUPS.LIST_USERS.filterUsers)
|
||||
filter: z.nativeEnum(FilterReturnedUsers).optional().describe(GROUPS.LIST_USERS.filterUsers)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
|
||||
@@ -41,6 +41,7 @@ export enum ActorType { // would extend to AWS, Azure, ...
|
||||
IDENTITY = "identity",
|
||||
Machine = "machine",
|
||||
SCIM_CLIENT = "scimClient",
|
||||
ACME_PROFILE = "acmeProfile",
|
||||
ACME_ACCOUNT = "acmeAccount",
|
||||
UNKNOWN_USER = "unknownUser"
|
||||
}
|
||||
|
||||
@@ -168,7 +168,11 @@ export const certificateProfileDALFactory = (db: TDbClient) => {
|
||||
db.ref("autoRenew").withSchema(TableName.PkiApiEnrollmentConfig).as("apiConfigAutoRenew"),
|
||||
db.ref("renewBeforeDays").withSchema(TableName.PkiApiEnrollmentConfig).as("apiConfigRenewBeforeDays"),
|
||||
db.ref("id").withSchema(TableName.PkiAcmeEnrollmentConfig).as("acmeConfigId"),
|
||||
db.ref("encryptedEabSecret").withSchema(TableName.PkiAcmeEnrollmentConfig).as("acmeConfigEncryptedEabSecret")
|
||||
db.ref("encryptedEabSecret").withSchema(TableName.PkiAcmeEnrollmentConfig).as("acmeConfigEncryptedEabSecret"),
|
||||
db
|
||||
.ref("skipDnsOwnershipVerification")
|
||||
.withSchema(TableName.PkiAcmeEnrollmentConfig)
|
||||
.as("acmeConfigSkipDnsOwnershipVerification")
|
||||
)
|
||||
.where(`${TableName.PkiCertificateProfile}.id`, id)
|
||||
.first();
|
||||
@@ -198,7 +202,8 @@ export const certificateProfileDALFactory = (db: TDbClient) => {
|
||||
const acmeConfig = result.acmeConfigId
|
||||
? ({
|
||||
id: result.acmeConfigId,
|
||||
encryptedEabSecret: result.acmeConfigEncryptedEabSecret
|
||||
encryptedEabSecret: result.acmeConfigEncryptedEabSecret,
|
||||
skipDnsOwnershipVerification: result.acmeConfigSkipDnsOwnershipVerification ?? false
|
||||
} as TCertificateProfileWithConfigs["acmeConfig"])
|
||||
: undefined;
|
||||
|
||||
@@ -356,7 +361,11 @@ export const certificateProfileDALFactory = (db: TDbClient) => {
|
||||
db.ref("id").withSchema(TableName.PkiApiEnrollmentConfig).as("apiId"),
|
||||
db.ref("autoRenew").withSchema(TableName.PkiApiEnrollmentConfig).as("apiAutoRenew"),
|
||||
db.ref("renewBeforeDays").withSchema(TableName.PkiApiEnrollmentConfig).as("apiRenewBeforeDays"),
|
||||
db.ref("id").withSchema(TableName.PkiAcmeEnrollmentConfig).as("acmeId")
|
||||
db.ref("id").withSchema(TableName.PkiAcmeEnrollmentConfig).as("acmeId"),
|
||||
db
|
||||
.ref("skipDnsOwnershipVerification")
|
||||
.withSchema(TableName.PkiAcmeEnrollmentConfig)
|
||||
.as("acmeSkipDnsOwnershipVerification")
|
||||
);
|
||||
|
||||
if (processedRules) {
|
||||
@@ -393,7 +402,8 @@ export const certificateProfileDALFactory = (db: TDbClient) => {
|
||||
|
||||
const acmeConfig = result.acmeId
|
||||
? {
|
||||
id: result.acmeId as string
|
||||
id: result.acmeId as string,
|
||||
skipDnsOwnershipVerification: !!result.acmeSkipDnsOwnershipVerification
|
||||
}
|
||||
: undefined;
|
||||
|
||||
|
||||
@@ -30,7 +30,11 @@ export const createCertificateProfileSchema = z
|
||||
renewBeforeDays: z.number().min(1).max(30).optional()
|
||||
})
|
||||
.optional(),
|
||||
acmeConfig: z.object({}).optional()
|
||||
acmeConfig: z
|
||||
.object({
|
||||
skipDnsOwnershipVerification: z.boolean().optional()
|
||||
})
|
||||
.optional()
|
||||
})
|
||||
.refine(
|
||||
(data) => {
|
||||
@@ -155,6 +159,11 @@ export const updateCertificateProfileSchema = z
|
||||
autoRenew: z.boolean().default(false),
|
||||
renewBeforeDays: z.number().min(1).max(30).optional()
|
||||
})
|
||||
.optional(),
|
||||
acmeConfig: z
|
||||
.object({
|
||||
skipDnsOwnershipVerification: z.boolean().optional()
|
||||
})
|
||||
.optional()
|
||||
})
|
||||
.refine(
|
||||
|
||||
@@ -403,7 +403,13 @@ export const certificateProfileServiceFactory = ({
|
||||
apiConfigId = apiConfig.id;
|
||||
} else if (data.enrollmentType === EnrollmentType.ACME && data.acmeConfig) {
|
||||
const { encryptedEabSecret } = await generateAndEncryptAcmeEabSecret(projectId, kmsService, projectDAL);
|
||||
const acmeConfig = await acmeEnrollmentConfigDAL.create({ encryptedEabSecret }, tx);
|
||||
const acmeConfig = await acmeEnrollmentConfigDAL.create(
|
||||
{
|
||||
skipDnsOwnershipVerification: data.acmeConfig.skipDnsOwnershipVerification ?? false,
|
||||
encryptedEabSecret
|
||||
},
|
||||
tx
|
||||
);
|
||||
acmeConfigId = acmeConfig.id;
|
||||
}
|
||||
|
||||
@@ -505,7 +511,7 @@ export const certificateProfileServiceFactory = ({
|
||||
const updatedData =
|
||||
finalIssuerType === IssuerType.SELF_SIGNED && existingProfile.caId ? { ...data, caId: null } : data;
|
||||
|
||||
const { estConfig, apiConfig, ...profileUpdateData } = updatedData;
|
||||
const { estConfig, apiConfig, acmeConfig, ...profileUpdateData } = updatedData;
|
||||
|
||||
const updatedProfile = await certificateProfileDAL.transaction(async (tx) => {
|
||||
if (estConfig && existingProfile.estConfigId) {
|
||||
@@ -547,6 +553,16 @@ export const certificateProfileServiceFactory = ({
|
||||
);
|
||||
}
|
||||
|
||||
if (acmeConfig && existingProfile.acmeConfigId) {
|
||||
await acmeEnrollmentConfigDAL.updateById(
|
||||
existingProfile.acmeConfigId,
|
||||
{
|
||||
skipDnsOwnershipVerification: acmeConfig.skipDnsOwnershipVerification ?? false
|
||||
},
|
||||
tx
|
||||
);
|
||||
}
|
||||
|
||||
const profileResult = await certificateProfileDAL.updateById(profileId, profileUpdateData, tx);
|
||||
return profileResult;
|
||||
});
|
||||
|
||||
@@ -46,7 +46,9 @@ export type TCertificateProfileUpdate = Omit<
|
||||
autoRenew?: boolean;
|
||||
renewBeforeDays?: number;
|
||||
};
|
||||
acmeConfig?: unknown;
|
||||
acmeConfig?: {
|
||||
skipDnsOwnershipVerification?: boolean;
|
||||
};
|
||||
};
|
||||
|
||||
export type TCertificateProfileWithConfigs = TCertificateProfile & {
|
||||
@@ -83,6 +85,7 @@ export type TCertificateProfileWithConfigs = TCertificateProfile & {
|
||||
id: string;
|
||||
directoryUrl: string;
|
||||
encryptedEabSecret?: Buffer;
|
||||
skipDnsOwnershipVerification?: boolean;
|
||||
};
|
||||
};
|
||||
|
||||
|
||||
@@ -258,7 +258,7 @@ describe("CertificateRequestService", () => {
|
||||
(mockCertificateService.getCertBody as any).mockResolvedValue(mockCertBody);
|
||||
(mockCertificateService.getCertPrivateKey as any).mockResolvedValue(mockPrivateKey);
|
||||
|
||||
const result = await service.getCertificateFromRequest(mockGetData);
|
||||
const { certificateRequest, projectId } = await service.getCertificateFromRequest(mockGetData);
|
||||
|
||||
expect(mockCertificateRequestDAL.findByIdWithCertificate).toHaveBeenCalledWith(
|
||||
"550e8400-e29b-41d4-a716-446655440005"
|
||||
@@ -277,8 +277,9 @@ describe("CertificateRequestService", () => {
|
||||
actorAuthMethod: AuthMethod.EMAIL,
|
||||
actorOrgId: "550e8400-e29b-41d4-a716-446655440002"
|
||||
});
|
||||
expect(result).toEqual({
|
||||
expect(certificateRequest).toEqual({
|
||||
status: CertificateRequestStatus.ISSUED,
|
||||
certificateId: "550e8400-e29b-41d4-a716-446655440006",
|
||||
certificate: "-----BEGIN CERTIFICATE-----\nMOCK_CERT_PEM\n-----END CERTIFICATE-----",
|
||||
privateKey: "-----BEGIN PRIVATE KEY-----\nMOCK_KEY_PEM\n-----END PRIVATE KEY-----",
|
||||
serialNumber: "123456",
|
||||
@@ -286,6 +287,7 @@ describe("CertificateRequestService", () => {
|
||||
createdAt: mockRequestWithCert.createdAt,
|
||||
updatedAt: mockRequestWithCert.updatedAt
|
||||
});
|
||||
expect(projectId).toEqual("550e8400-e29b-41d4-a716-446655440003");
|
||||
});
|
||||
|
||||
it("should get certificate from request successfully when no certificate is attached", async () => {
|
||||
@@ -310,10 +312,11 @@ describe("CertificateRequestService", () => {
|
||||
(mockPermissionService.getProjectPermission as any).mockResolvedValue(mockPermission);
|
||||
(mockCertificateRequestDAL.findByIdWithCertificate as any).mockResolvedValue(mockRequestWithoutCert);
|
||||
|
||||
const result = await service.getCertificateFromRequest(mockGetData);
|
||||
const { certificateRequest, projectId } = await service.getCertificateFromRequest(mockGetData);
|
||||
|
||||
expect(result).toEqual({
|
||||
expect(certificateRequest).toEqual({
|
||||
status: CertificateRequestStatus.PENDING,
|
||||
certificateId: null,
|
||||
certificate: null,
|
||||
privateKey: null,
|
||||
serialNumber: null,
|
||||
@@ -321,6 +324,7 @@ describe("CertificateRequestService", () => {
|
||||
createdAt: mockRequestWithoutCert.createdAt,
|
||||
updatedAt: mockRequestWithoutCert.updatedAt
|
||||
});
|
||||
expect(projectId).toEqual("550e8400-e29b-41d4-a716-446655440003");
|
||||
});
|
||||
|
||||
it("should get certificate from request successfully when user lacks private key permission", async () => {
|
||||
@@ -354,7 +358,7 @@ describe("CertificateRequestService", () => {
|
||||
(mockCertificateRequestDAL.findByIdWithCertificate as any).mockResolvedValue(mockRequestWithCert);
|
||||
(mockCertificateService.getCertBody as any).mockResolvedValue(mockCertBody);
|
||||
|
||||
const result = await service.getCertificateFromRequest(mockGetData);
|
||||
const { certificateRequest, projectId } = await service.getCertificateFromRequest(mockGetData);
|
||||
|
||||
expect(mockCertificateRequestDAL.findByIdWithCertificate).toHaveBeenCalledWith(
|
||||
"550e8400-e29b-41d4-a716-446655440005"
|
||||
@@ -367,8 +371,9 @@ describe("CertificateRequestService", () => {
|
||||
actorOrgId: "550e8400-e29b-41d4-a716-446655440002"
|
||||
});
|
||||
expect(mockCertificateService.getCertPrivateKey).not.toHaveBeenCalled();
|
||||
expect(result).toEqual({
|
||||
expect(certificateRequest).toEqual({
|
||||
status: CertificateRequestStatus.ISSUED,
|
||||
certificateId: "550e8400-e29b-41d4-a716-446655440008",
|
||||
certificate: "-----BEGIN CERTIFICATE-----\nMOCK_CERT_PEM\n-----END CERTIFICATE-----",
|
||||
privateKey: null,
|
||||
serialNumber: "123456",
|
||||
@@ -376,6 +381,7 @@ describe("CertificateRequestService", () => {
|
||||
createdAt: mockRequestWithCert.createdAt,
|
||||
updatedAt: mockRequestWithCert.updatedAt
|
||||
});
|
||||
expect(projectId).toEqual("550e8400-e29b-41d4-a716-446655440003");
|
||||
});
|
||||
|
||||
it("should get certificate from request successfully when user has private key permission but key retrieval fails", async () => {
|
||||
@@ -414,7 +420,7 @@ describe("CertificateRequestService", () => {
|
||||
(mockCertificateService.getCertBody as any).mockResolvedValue(mockCertBody);
|
||||
(mockCertificateService.getCertPrivateKey as any).mockRejectedValue(new Error("Private key not found"));
|
||||
|
||||
const result = await service.getCertificateFromRequest(mockGetData);
|
||||
const { certificateRequest, projectId } = await service.getCertificateFromRequest(mockGetData);
|
||||
|
||||
expect(mockCertificateRequestDAL.findByIdWithCertificate).toHaveBeenCalledWith(
|
||||
"550e8400-e29b-41d4-a716-446655440005"
|
||||
@@ -433,8 +439,9 @@ describe("CertificateRequestService", () => {
|
||||
actorAuthMethod: AuthMethod.EMAIL,
|
||||
actorOrgId: "550e8400-e29b-41d4-a716-446655440002"
|
||||
});
|
||||
expect(result).toEqual({
|
||||
expect(certificateRequest).toEqual({
|
||||
status: CertificateRequestStatus.ISSUED,
|
||||
certificateId: "550e8400-e29b-41d4-a716-446655440009",
|
||||
certificate: "-----BEGIN CERTIFICATE-----\nMOCK_CERT_PEM\n-----END CERTIFICATE-----",
|
||||
privateKey: null,
|
||||
serialNumber: "123456",
|
||||
@@ -442,6 +449,7 @@ describe("CertificateRequestService", () => {
|
||||
createdAt: mockRequestWithCert.createdAt,
|
||||
updatedAt: mockRequestWithCert.updatedAt
|
||||
});
|
||||
expect(projectId).toEqual("550e8400-e29b-41d4-a716-446655440003");
|
||||
});
|
||||
|
||||
it("should get certificate from request with error message when failed", async () => {
|
||||
@@ -466,17 +474,19 @@ describe("CertificateRequestService", () => {
|
||||
(mockPermissionService.getProjectPermission as any).mockResolvedValue(mockPermission);
|
||||
(mockCertificateRequestDAL.findByIdWithCertificate as any).mockResolvedValue(mockFailedRequest);
|
||||
|
||||
const result = await service.getCertificateFromRequest(mockGetData);
|
||||
const { certificateRequest, projectId } = await service.getCertificateFromRequest(mockGetData);
|
||||
|
||||
expect(result).toEqual({
|
||||
expect(certificateRequest).toEqual({
|
||||
status: CertificateRequestStatus.FAILED,
|
||||
certificate: null,
|
||||
certificateId: null,
|
||||
privateKey: null,
|
||||
serialNumber: null,
|
||||
errorMessage: "Certificate issuance failed",
|
||||
createdAt: mockFailedRequest.createdAt,
|
||||
updatedAt: mockFailedRequest.updatedAt
|
||||
});
|
||||
expect(projectId).toEqual("550e8400-e29b-41d4-a716-446655440003");
|
||||
});
|
||||
|
||||
it("should throw NotFoundError when certificate request does not exist", async () => {
|
||||
|
||||
@@ -91,6 +91,7 @@ export const certificateRequestServiceFactory = ({
|
||||
permissionService
|
||||
}: TCertificateRequestServiceFactoryDep) => {
|
||||
const createCertificateRequest = async ({
|
||||
acmeOrderId,
|
||||
actor,
|
||||
actorId,
|
||||
actorAuthMethod,
|
||||
@@ -123,6 +124,7 @@ export const certificateRequestServiceFactory = ({
|
||||
{
|
||||
status,
|
||||
projectId,
|
||||
acmeOrderId,
|
||||
...validatedData
|
||||
},
|
||||
tx
|
||||
@@ -170,13 +172,17 @@ export const certificateRequestServiceFactory = ({
|
||||
actorId,
|
||||
actorAuthMethod,
|
||||
actorOrgId,
|
||||
projectId,
|
||||
certificateRequestId
|
||||
}: TGetCertificateFromRequestDTO) => {
|
||||
const certificateRequest = await certificateRequestDAL.findByIdWithCertificate(certificateRequestId);
|
||||
if (!certificateRequest) {
|
||||
throw new NotFoundError({ message: "Certificate request not found" });
|
||||
}
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission({
|
||||
actor,
|
||||
actorId,
|
||||
projectId,
|
||||
projectId: certificateRequest.projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId,
|
||||
actionProjectType: ActionProjectType.CertificateManager
|
||||
@@ -187,25 +193,20 @@ export const certificateRequestServiceFactory = ({
|
||||
ProjectPermissionSub.Certificates
|
||||
);
|
||||
|
||||
const certificateRequest = await certificateRequestDAL.findByIdWithCertificate(certificateRequestId);
|
||||
if (!certificateRequest) {
|
||||
throw new NotFoundError({ message: "Certificate request not found" });
|
||||
}
|
||||
|
||||
if (certificateRequest.projectId !== projectId) {
|
||||
throw new NotFoundError({ message: "Certificate request not found" });
|
||||
}
|
||||
|
||||
// If no certificate is attached, return basic info
|
||||
if (!certificateRequest.certificate) {
|
||||
return {
|
||||
status: certificateRequest.status as CertificateRequestStatus,
|
||||
certificate: null,
|
||||
privateKey: null,
|
||||
serialNumber: null,
|
||||
errorMessage: certificateRequest.errorMessage || null,
|
||||
createdAt: certificateRequest.createdAt,
|
||||
updatedAt: certificateRequest.updatedAt
|
||||
certificateRequest: {
|
||||
status: certificateRequest.status as CertificateRequestStatus,
|
||||
certificate: null,
|
||||
certificateId: null,
|
||||
privateKey: null,
|
||||
serialNumber: null,
|
||||
errorMessage: certificateRequest.errorMessage || null,
|
||||
createdAt: certificateRequest.createdAt,
|
||||
updatedAt: certificateRequest.updatedAt
|
||||
},
|
||||
projectId: certificateRequest.projectId
|
||||
};
|
||||
}
|
||||
|
||||
@@ -240,13 +241,17 @@ export const certificateRequestServiceFactory = ({
|
||||
}
|
||||
|
||||
return {
|
||||
status: certificateRequest.status as CertificateRequestStatus,
|
||||
certificate: certBody.certificate,
|
||||
privateKey,
|
||||
serialNumber: certificateRequest.certificate.serialNumber,
|
||||
errorMessage: certificateRequest.errorMessage || null,
|
||||
createdAt: certificateRequest.createdAt,
|
||||
updatedAt: certificateRequest.updatedAt
|
||||
certificateRequest: {
|
||||
status: certificateRequest.status as CertificateRequestStatus,
|
||||
certificate: certBody.certificate,
|
||||
certificateId: certificateRequest.certificate.id,
|
||||
privateKey,
|
||||
serialNumber: certificateRequest.certificate.serialNumber,
|
||||
errorMessage: certificateRequest.errorMessage || null,
|
||||
createdAt: certificateRequest.createdAt,
|
||||
updatedAt: certificateRequest.updatedAt
|
||||
},
|
||||
projectId: certificateRequest.projectId
|
||||
};
|
||||
};
|
||||
|
||||
|
||||
@@ -21,13 +21,14 @@ export type TCreateCertificateRequestDTO = TProjectPermission & {
|
||||
metadata?: string;
|
||||
status: CertificateRequestStatus;
|
||||
certificateId?: string;
|
||||
acmeOrderId?: string;
|
||||
};
|
||||
|
||||
export type TGetCertificateRequestDTO = TProjectPermission & {
|
||||
certificateRequestId: string;
|
||||
};
|
||||
|
||||
export type TGetCertificateFromRequestDTO = TProjectPermission & {
|
||||
export type TGetCertificateFromRequestDTO = Omit<TProjectPermission, "projectId"> & {
|
||||
certificateRequestId: string;
|
||||
};
|
||||
|
||||
|
||||
@@ -1,61 +1,13 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TDbClient } from "@app/db";
|
||||
import { TableName } from "@app/db/schemas";
|
||||
import { DatabaseError } from "@app/lib/errors";
|
||||
import { ormify } from "@app/lib/knex";
|
||||
|
||||
import { TAcmeEnrollmentConfigInsert, TAcmeEnrollmentConfigUpdate } from "./enrollment-config-types";
|
||||
|
||||
export type TAcmeEnrollmentConfigDALFactory = ReturnType<typeof acmeEnrollmentConfigDALFactory>;
|
||||
|
||||
export const acmeEnrollmentConfigDALFactory = (db: TDbClient) => {
|
||||
const acmeEnrollmentConfigOrm = ormify(db, TableName.PkiAcmeEnrollmentConfig);
|
||||
|
||||
const create = async (data: TAcmeEnrollmentConfigInsert, tx?: Knex) => {
|
||||
try {
|
||||
const result = await (tx || db)(TableName.PkiAcmeEnrollmentConfig).insert(data).returning("*");
|
||||
const [acmeConfig] = result;
|
||||
|
||||
if (!acmeConfig) {
|
||||
throw new Error("Failed to create ACME enrollment config");
|
||||
}
|
||||
|
||||
return acmeConfig;
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "Create ACME enrollment config" });
|
||||
}
|
||||
};
|
||||
|
||||
const updateById = async (id: string, data: TAcmeEnrollmentConfigUpdate, tx?: Knex) => {
|
||||
try {
|
||||
const result = await (tx || db)(TableName.PkiAcmeEnrollmentConfig).where({ id }).update(data).returning("*");
|
||||
const [acmeConfig] = result;
|
||||
|
||||
if (!acmeConfig) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return acmeConfig;
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "Update ACME enrollment config" });
|
||||
}
|
||||
};
|
||||
|
||||
const findById = async (id: string, tx?: Knex) => {
|
||||
try {
|
||||
const acmeConfig = await (tx || db)(TableName.PkiAcmeEnrollmentConfig).where({ id }).first();
|
||||
|
||||
return acmeConfig || null;
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "Find ACME enrollment config by id" });
|
||||
}
|
||||
};
|
||||
|
||||
return {
|
||||
...acmeEnrollmentConfigOrm,
|
||||
create,
|
||||
updateById,
|
||||
findById
|
||||
...acmeEnrollmentConfigOrm
|
||||
};
|
||||
};
|
||||
|
||||
@@ -37,4 +37,6 @@ export interface TApiConfigData {
|
||||
renewBeforeDays?: number;
|
||||
}
|
||||
|
||||
export interface TAcmeConfigData {}
|
||||
export interface TAcmeConfigData {
|
||||
skipDnsOwnershipVerification?: boolean;
|
||||
}
|
||||
|
||||
@@ -10,7 +10,7 @@ import { TGroupDALFactory } from "../../ee/services/group/group-dal";
|
||||
import { TProjectDALFactory } from "../project/project-dal";
|
||||
|
||||
type TGroupProjectServiceFactoryDep = {
|
||||
groupDAL: Pick<TGroupDALFactory, "findOne" | "findAllGroupPossibleMembers">;
|
||||
groupDAL: Pick<TGroupDALFactory, "findOne" | "findAllGroupPossibleUsers">;
|
||||
projectDAL: Pick<TProjectDALFactory, "findOne" | "findProjectGhostUser" | "findById">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission" | "getProjectPermissionByRoles">;
|
||||
};
|
||||
@@ -51,7 +51,7 @@ export const groupProjectServiceFactory = ({
|
||||
});
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionGroupActions.Read, ProjectPermissionSub.Groups);
|
||||
|
||||
const { members, totalCount } = await groupDAL.findAllGroupPossibleMembers({
|
||||
const { members, totalCount } = await groupDAL.findAllGroupPossibleUsers({
|
||||
orgId: project.orgId,
|
||||
groupId: id,
|
||||
offset,
|
||||
|
||||
@@ -32,7 +32,7 @@ export const identityAccessTokenDALFactory = (db: TDbClient) => {
|
||||
const removeExpiredTokens = async (tx?: Knex) => {
|
||||
logger.info(`${QueueName.DailyResourceCleanUp}: remove expired access token started`);
|
||||
|
||||
const BATCH_SIZE = 10000;
|
||||
const BATCH_SIZE = 5000;
|
||||
const MAX_RETRY_ON_FAILURE = 3;
|
||||
const QUERY_TIMEOUT_MS = 10 * 60 * 1000; // 10 minutes
|
||||
const MAX_TTL = 315_360_000; // Maximum TTL value in seconds (10 years)
|
||||
@@ -101,7 +101,7 @@ export const identityAccessTokenDALFactory = (db: TDbClient) => {
|
||||
} finally {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await new Promise((resolve) => {
|
||||
setTimeout(resolve, 10); // time to breathe for db
|
||||
setTimeout(resolve, 500); // time to breathe for db
|
||||
});
|
||||
}
|
||||
isRetrying = numberOfRetryOnFailure > 0;
|
||||
|
||||
@@ -25,12 +25,27 @@ export const projectDALFactory = (db: TDbClient) => {
|
||||
|
||||
const findIdentityProjects = async (identityId: string, orgId: string, projectType?: ProjectType) => {
|
||||
try {
|
||||
const identityGroupSubquery = db
|
||||
.replicaNode()(TableName.Groups)
|
||||
.leftJoin(
|
||||
TableName.IdentityGroupMembership,
|
||||
`${TableName.IdentityGroupMembership}.groupId`,
|
||||
`${TableName.Groups}.id`
|
||||
)
|
||||
.where(`${TableName.Groups}.orgId`, orgId)
|
||||
.where(`${TableName.IdentityGroupMembership}.identityId`, identityId)
|
||||
.select(db.ref("id").withSchema(TableName.Groups));
|
||||
|
||||
const workspaces = await db
|
||||
.replicaNode()(TableName.Membership)
|
||||
.where(`${TableName.Membership}.scope`, AccessScope.Project)
|
||||
.where(`${TableName.Membership}.actorIdentityId`, identityId)
|
||||
.join(TableName.Project, `${TableName.Membership}.scopeProjectId`, `${TableName.Project}.id`)
|
||||
.where(`${TableName.Project}.orgId`, orgId)
|
||||
.andWhere((qb) => {
|
||||
void qb
|
||||
.where(`${TableName.Membership}.actorIdentityId`, identityId)
|
||||
.orWhereIn(`${TableName.Membership}.actorGroupId`, identityGroupSubquery);
|
||||
})
|
||||
.andWhere((qb) => {
|
||||
if (projectType) {
|
||||
void qb.where(`${TableName.Project}.type`, projectType);
|
||||
@@ -347,11 +362,25 @@ export const projectDALFactory = (db: TDbClient) => {
|
||||
.where(`${TableName.Groups}.orgId`, dto.orgId)
|
||||
.where(`${TableName.UserGroupMembership}.userId`, dto.actorId)
|
||||
.select(db.ref("id").withSchema(TableName.Groups));
|
||||
|
||||
const identityGroupMembershipSubquery = db
|
||||
.replicaNode()(TableName.Groups)
|
||||
.leftJoin(
|
||||
TableName.IdentityGroupMembership,
|
||||
`${TableName.IdentityGroupMembership}.groupId`,
|
||||
`${TableName.Groups}.id`
|
||||
)
|
||||
.where(`${TableName.Groups}.orgId`, dto.orgId)
|
||||
.where(`${TableName.IdentityGroupMembership}.identityId`, dto.actorId)
|
||||
.select(db.ref("id").withSchema(TableName.Groups));
|
||||
|
||||
const membershipSubQuery = db(TableName.Membership)
|
||||
.where(`${TableName.Membership}.scope`, AccessScope.Project)
|
||||
.where((qb) => {
|
||||
if (dto.actor === ActorType.IDENTITY) {
|
||||
void qb.where(`${TableName.Membership}.actorIdentityId`, dto.actorId);
|
||||
void qb
|
||||
.where(`${TableName.Membership}.actorIdentityId`, dto.actorId)
|
||||
.orWhereIn(`${TableName.Membership}.actorGroupId`, identityGroupMembershipSubquery);
|
||||
} else {
|
||||
void qb
|
||||
.where(`${TableName.Membership}.actorUserId`, dto.actorId)
|
||||
|
||||
@@ -1986,7 +1986,7 @@ export const projectServiceFactory = ({
|
||||
if (project.type === ProjectType.SecretManager) {
|
||||
projectTypeUrl = "secret-management";
|
||||
} else if (project.type === ProjectType.CertificateManager) {
|
||||
projectTypeUrl = "cert-management";
|
||||
projectTypeUrl = "cert-manager";
|
||||
}
|
||||
|
||||
const callbackPath = `/organizations/${project.orgId}/projects/${projectTypeUrl}/${project.id}/access-management?selectedTab=members&requesterEmail=${userDetails.email}`;
|
||||
|
||||
@@ -416,6 +416,7 @@ export const secretV2BridgeDALFactory = ({ db, keyStore }: TSecretV2DalArg) => {
|
||||
tagSlugs?: string[];
|
||||
includeTagsInSearch?: boolean;
|
||||
includeMetadataInSearch?: boolean;
|
||||
excludeRotatedSecrets?: boolean;
|
||||
}
|
||||
) => {
|
||||
try {
|
||||
@@ -481,6 +482,10 @@ export const secretV2BridgeDALFactory = ({ db, keyStore }: TSecretV2DalArg) => {
|
||||
);
|
||||
}
|
||||
|
||||
if (filters?.excludeRotatedSecrets) {
|
||||
void query.whereNull(`${TableName.SecretRotationV2SecretMapping}.secretId`);
|
||||
}
|
||||
|
||||
const secrets = await query;
|
||||
|
||||
// @ts-expect-error not inferred by knex
|
||||
@@ -594,6 +599,11 @@ export const secretV2BridgeDALFactory = ({ db, keyStore }: TSecretV2DalArg) => {
|
||||
void bd.whereIn(`${TableName.SecretTag}.slug`, slugs);
|
||||
}
|
||||
})
|
||||
.where((bd) => {
|
||||
if (filters?.excludeRotatedSecrets) {
|
||||
void bd.whereNull(`${TableName.SecretRotationV2SecretMapping}.secretId`);
|
||||
}
|
||||
})
|
||||
.orderBy(
|
||||
filters?.orderBy === SecretsOrderBy.Name ? "key" : "id",
|
||||
filters?.orderDirection ?? OrderByDirection.ASC
|
||||
|
||||
@@ -483,8 +483,8 @@ export const secretV2BridgeServiceFactory = ({
|
||||
});
|
||||
if (!sharedSecretToModify)
|
||||
throw new NotFoundError({ message: `Secret with name ${inputSecret.secretName} not found` });
|
||||
if (sharedSecretToModify.isRotatedSecret && (inputSecret.newSecretName || inputSecret.secretValue))
|
||||
throw new BadRequestError({ message: "Cannot update rotated secret name or value" });
|
||||
if (sharedSecretToModify.isRotatedSecret && inputSecret.newSecretName)
|
||||
throw new BadRequestError({ message: "Cannot update rotated secret name" });
|
||||
secretId = sharedSecretToModify.id;
|
||||
secret = sharedSecretToModify;
|
||||
}
|
||||
@@ -888,6 +888,7 @@ export const secretV2BridgeServiceFactory = ({
|
||||
| "tagSlugs"
|
||||
| "environment"
|
||||
| "search"
|
||||
| "excludeRotatedSecrets"
|
||||
>) => {
|
||||
const { permission } = await permissionService.getProjectPermission({
|
||||
actor,
|
||||
@@ -1934,8 +1935,14 @@ export const secretV2BridgeServiceFactory = ({
|
||||
if (el.isRotatedSecret) {
|
||||
const input = secretsToUpdateGroupByPath[secretPath].find((i) => i.secretKey === el.key);
|
||||
|
||||
if (input && (input.newSecretName || input.secretValue))
|
||||
throw new BadRequestError({ message: `Cannot update rotated secret name or value: ${el.key}` });
|
||||
if (input) {
|
||||
if (input.newSecretName) {
|
||||
delete input.newSecretName;
|
||||
}
|
||||
if (input.secretValue !== undefined) {
|
||||
delete input.secretValue;
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
@@ -2061,8 +2068,11 @@ export const secretV2BridgeServiceFactory = ({
|
||||
commitChanges,
|
||||
inputSecrets: secretsToUpdate.map((el) => {
|
||||
const originalSecret = secretsToUpdateInDBGroupedByKey[el.secretKey][0];
|
||||
const shouldUpdateValue = !originalSecret.isRotatedSecret && typeof el.secretValue !== "undefined";
|
||||
const shouldUpdateName = !originalSecret.isRotatedSecret && el.newSecretName;
|
||||
|
||||
const encryptedValue =
|
||||
typeof el.secretValue !== "undefined"
|
||||
shouldUpdateValue && el.secretValue !== undefined
|
||||
? {
|
||||
encryptedValue: secretManagerEncryptor({ plainText: Buffer.from(el.secretValue) }).cipherTextBlob,
|
||||
references: secretReferencesGroupByInputSecretKey[el.secretKey]?.nestedReferences
|
||||
@@ -2077,7 +2087,7 @@ export const secretV2BridgeServiceFactory = ({
|
||||
(value) => secretManagerEncryptor({ plainText: Buffer.from(value) }).cipherTextBlob
|
||||
),
|
||||
skipMultilineEncoding: el.skipMultilineEncoding,
|
||||
key: el.newSecretName || el.secretKey,
|
||||
key: shouldUpdateName ? el.newSecretName : el.secretKey,
|
||||
tags: el.tagIds,
|
||||
secretMetadata: el.secretMetadata,
|
||||
...encryptedValue
|
||||
|
||||
@@ -50,6 +50,7 @@ export type TGetSecretsDTO = {
|
||||
limit?: number;
|
||||
search?: string;
|
||||
keys?: string[];
|
||||
excludeRotatedSecrets?: boolean;
|
||||
} & TProjectPermission;
|
||||
|
||||
export type TGetSecretsMissingReadValuePermissionDTO = Omit<
|
||||
@@ -362,6 +363,7 @@ export type TFindSecretsByFolderIdsFilter = {
|
||||
includeTagsInSearch?: boolean;
|
||||
includeMetadataInSearch?: boolean;
|
||||
keys?: string[];
|
||||
excludeRotatedSecrets?: boolean;
|
||||
};
|
||||
|
||||
export type TGetSecretsRawByFolderMappingsDTO = {
|
||||
|
||||
@@ -200,6 +200,11 @@ export const secretVersionV2BridgeDALFactory = (db: TDbClient) => {
|
||||
.leftJoin(TableName.Users, `${TableName.Users}.id`, `${TableName.SecretVersionV2}.userActorId`)
|
||||
.leftJoin(TableName.Identity, `${TableName.Identity}.id`, `${TableName.SecretVersionV2}.identityActorId`)
|
||||
.leftJoin(TableName.UserGroupMembership, `${TableName.UserGroupMembership}.userId`, `${TableName.Users}.id`)
|
||||
.leftJoin(
|
||||
TableName.IdentityGroupMembership,
|
||||
`${TableName.IdentityGroupMembership}.identityId`,
|
||||
`${TableName.Identity}.id`
|
||||
)
|
||||
.leftJoin(TableName.Membership, (qb) => {
|
||||
void qb
|
||||
.on(`${TableName.Membership}.scope`, db.raw("?", [AccessScope.Project]))
|
||||
@@ -208,7 +213,8 @@ export const secretVersionV2BridgeDALFactory = (db: TDbClient) => {
|
||||
void sqb
|
||||
.on(`${TableName.Membership}.actorUserId`, `${TableName.SecretVersionV2}.userActorId`)
|
||||
.orOn(`${TableName.Membership}.actorIdentityId`, `${TableName.SecretVersionV2}.identityActorId`)
|
||||
.orOn(`${TableName.Membership}.actorGroupId`, `${TableName.UserGroupMembership}.groupId`);
|
||||
.orOn(`${TableName.Membership}.actorGroupId`, `${TableName.UserGroupMembership}.groupId`)
|
||||
.orOn(`${TableName.Membership}.actorGroupId`, `${TableName.IdentityGroupMembership}.groupId`);
|
||||
});
|
||||
})
|
||||
.leftJoin(TableName.SecretV2, `${TableName.SecretVersionV2}.secretId`, `${TableName.SecretV2}.id`)
|
||||
|
||||
@@ -1154,6 +1154,7 @@ export const secretServiceFactory = ({
|
||||
| "search"
|
||||
| "includeTagsInSearch"
|
||||
| "includeMetadataInSearch"
|
||||
| "excludeRotatedSecrets"
|
||||
>) => {
|
||||
const { shouldUseSecretV2Bridge } = await projectBotService.getBotKey(projectId);
|
||||
|
||||
|
||||
@@ -214,6 +214,7 @@ export type TGetSecretsRawDTO = {
|
||||
keys?: string[];
|
||||
includeTagsInSearch?: boolean;
|
||||
includeMetadataInSearch?: boolean;
|
||||
excludeRotatedSecrets?: boolean;
|
||||
} & TProjectPermission;
|
||||
|
||||
export type TGetSecretAccessListDTO = {
|
||||
|
||||
@@ -61,9 +61,7 @@ export const PkiExpirationAlertTemplate = ({
|
||||
</Section>
|
||||
|
||||
<Section className="text-center mt-[32px] mb-[16px]">
|
||||
<BaseButton href={`${siteUrl}/projects/cert-management/${projectId}/policies`}>
|
||||
View Certificate Alerts
|
||||
</BaseButton>
|
||||
<BaseButton href={`${siteUrl}/projects/cert-manager/${projectId}/policies`}>View Certificate Alerts</BaseButton>
|
||||
</Section>
|
||||
</BaseEmailWrapper>
|
||||
);
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
import {
|
||||
AcmeAccountActor,
|
||||
AcmeProfileActor,
|
||||
IdentityActor,
|
||||
KmipClientActor,
|
||||
PlatformActor,
|
||||
@@ -60,6 +62,8 @@ export type TSecretModifiedEvent = {
|
||||
| ScimClientActor
|
||||
| PlatformActor
|
||||
| UnknownUserActor
|
||||
| AcmeAccountActor
|
||||
| AcmeProfileActor
|
||||
| KmipClientActor;
|
||||
};
|
||||
};
|
||||
|
||||
@@ -9,7 +9,7 @@ services:
|
||||
condition: service_healthy
|
||||
redis:
|
||||
condition: service_started
|
||||
image: infisical/infisical:latest-postgres
|
||||
image: infisical/infisical:latest # PIN THIS TO A SPECIFIC TAG
|
||||
pull_policy: always
|
||||
env_file: .env
|
||||
ports:
|
||||
|
||||
@@ -0,0 +1,4 @@
|
||||
---
|
||||
title: "Get Certificate Request"
|
||||
openapi: "GET /api/v1/cert-manager/certificates/certificate-requests/{requestId}"
|
||||
---
|
||||
@@ -0,0 +1,4 @@
|
||||
---
|
||||
title: "Issue Certificate"
|
||||
openapi: "POST /api/v1/cert-manager/certificates"
|
||||
---
|
||||
@@ -0,0 +1,4 @@
|
||||
---
|
||||
title: "Add Machine Identity to Group"
|
||||
openapi: "POST /api/v1/groups/{id}/machine-identities/{machineIdentityId}"
|
||||
---
|
||||
@@ -0,0 +1,4 @@
|
||||
---
|
||||
title: "List Group Machine Identities"
|
||||
openapi: "GET /api/v1/groups/{id}/machine-identities"
|
||||
---
|
||||
@@ -0,0 +1,5 @@
|
||||
---
|
||||
title: "List Group Members"
|
||||
openapi: "GET /api/v1/groups/{id}/members"
|
||||
---
|
||||
|
||||
@@ -0,0 +1,5 @@
|
||||
---
|
||||
title: "List Group Projects"
|
||||
openapi: "GET /api/v1/groups/{id}/projects"
|
||||
---
|
||||
|
||||
@@ -0,0 +1,4 @@
|
||||
---
|
||||
title: "Remove Machine Identity from Group"
|
||||
openapi: "DELETE /api/v1/groups/{id}/machine-identities/{machineIdentityId}"
|
||||
---
|
||||
203
docs/docs.json
203
docs/docs.json
@@ -664,7 +664,16 @@
|
||||
"group": "Concepts",
|
||||
"pages": [
|
||||
"documentation/platform/pki/concepts/certificate-mgmt",
|
||||
"documentation/platform/pki/concepts/certificate-lifecycle"
|
||||
"documentation/platform/pki/concepts/certificate-lifecycle",
|
||||
"documentation/platform/pki/concepts/certificate-components"
|
||||
]
|
||||
},
|
||||
{
|
||||
"group": "Guides",
|
||||
"pages": [
|
||||
"documentation/platform/pki/guides/request-cert-agent",
|
||||
"documentation/platform/pki/guides/request-cert-api",
|
||||
"documentation/platform/pki/guides/request-cert-acme"
|
||||
]
|
||||
}
|
||||
]
|
||||
@@ -704,6 +713,7 @@
|
||||
{
|
||||
"group": "Infrastructure Integrations",
|
||||
"pages": [
|
||||
"integrations/platforms/certificate-agent",
|
||||
"documentation/platform/pki/k8s-cert-manager",
|
||||
"documentation/platform/pki/integration-guides/gloo-mesh",
|
||||
"documentation/platform/pki/integration-guides/windows-server-acme",
|
||||
@@ -878,7 +888,12 @@
|
||||
"api-reference/endpoints/groups/get-by-id",
|
||||
"api-reference/endpoints/groups/add-group-user",
|
||||
"api-reference/endpoints/groups/remove-group-user",
|
||||
"api-reference/endpoints/groups/list-group-users"
|
||||
"api-reference/endpoints/groups/list-group-users",
|
||||
"api-reference/endpoints/groups/add-group-machine-identity",
|
||||
"api-reference/endpoints/groups/remove-group-machine-identity",
|
||||
"api-reference/endpoints/groups/list-group-machine-identities",
|
||||
"api-reference/endpoints/groups/list-group-projects",
|
||||
"api-reference/endpoints/groups/list-group-members"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -2508,7 +2523,7 @@
|
||||
]
|
||||
},
|
||||
{
|
||||
"group": "Infisical PKI",
|
||||
"group": "Certificate Management",
|
||||
"pages": [
|
||||
{
|
||||
"group": "Certificate Authorities",
|
||||
@@ -2547,6 +2562,8 @@
|
||||
"pages": [
|
||||
"api-reference/endpoints/certificates/list",
|
||||
"api-reference/endpoints/certificates/read",
|
||||
"api-reference/endpoints/certificates/certificate-request",
|
||||
"api-reference/endpoints/certificates/create-certificate",
|
||||
"api-reference/endpoints/certificates/renew",
|
||||
"api-reference/endpoints/certificates/update-config",
|
||||
"api-reference/endpoints/certificates/revoke",
|
||||
@@ -3096,6 +3113,186 @@
|
||||
{
|
||||
"source": "/documentation/platform/pki/est",
|
||||
"destination": "/documentation/platform/pki/enrollment-methods/est"
|
||||
},
|
||||
{
|
||||
"source": "/api-reference/endpoints/integrations/create-auth",
|
||||
"destination": "/integrations/secret-syncs"
|
||||
},
|
||||
{
|
||||
"source": "/api-reference/endpoints/integrations/create",
|
||||
"destination": "/integrations/secret-syncs"
|
||||
},
|
||||
{
|
||||
"source": "/api-reference/endpoints/integrations/delete-auth-by-id",
|
||||
"destination": "/integrations/secret-syncs"
|
||||
},
|
||||
{
|
||||
"source": "/api-reference/endpoints/integrations/delete-auth",
|
||||
"destination": "/integrations/secret-syncs"
|
||||
},
|
||||
{
|
||||
"source": "/api-reference/endpoints/integrations/delete",
|
||||
"destination": "/integrations/secret-syncs"
|
||||
},
|
||||
{
|
||||
"source": "/api-reference/endpoints/integrations/find-auth",
|
||||
"destination": "/integrations/secret-syncs"
|
||||
},
|
||||
{
|
||||
"source": "/api-reference/endpoints/integrations/list-auth",
|
||||
"destination": "/integrations/secret-syncs"
|
||||
},
|
||||
{
|
||||
"source": "/api-reference/endpoints/integrations/list-project-integrations",
|
||||
"destination": "/integrations/secret-syncs"
|
||||
},
|
||||
{
|
||||
"source": "/api-reference/endpoints/integrations/update",
|
||||
"destination": "/integrations/secret-syncs"
|
||||
},
|
||||
{
|
||||
"source": "/api-reference/overview/examples/integration",
|
||||
"destination": "/integrations/secret-syncs"
|
||||
},
|
||||
{
|
||||
"source": "/documentation/platform/integrations",
|
||||
"destination": "/integrations/secret-syncs"
|
||||
},
|
||||
{
|
||||
"source": "/integrations/cicd/circleci",
|
||||
"destination": "/integrations/secret-syncs"
|
||||
},
|
||||
{
|
||||
"source": "/integrations/cicd/codefresh",
|
||||
"destination": "/integrations/secret-syncs"
|
||||
},
|
||||
{
|
||||
"source": "/integrations/cicd/octopus-deploy",
|
||||
"destination": "/integrations/secret-syncs"
|
||||
},
|
||||
{
|
||||
"source": "/integrations/cicd/rundeck",
|
||||
"destination": "/integrations/secret-syncs"
|
||||
},
|
||||
{
|
||||
"source": "/integrations/cicd/travisci",
|
||||
"destination": "/integrations/secret-syncs"
|
||||
},
|
||||
{
|
||||
"source": "/integrations/cloud/aws-parameter-store",
|
||||
"destination": "/integrations/secret-syncs"
|
||||
},
|
||||
{
|
||||
"source": "/integrations/cloud/aws-secret-manager",
|
||||
"destination": "/integrations/secret-syncs"
|
||||
},
|
||||
{
|
||||
"source": "/integrations/cloud/azure-app-configuration",
|
||||
"destination": "/integrations/secret-syncs"
|
||||
},
|
||||
{
|
||||
"source": "/integrations/cloud/azure-devops",
|
||||
"destination": "/integrations/secret-syncs"
|
||||
},
|
||||
{
|
||||
"source": "/integrations/cloud/azure-key-vault",
|
||||
"destination": "/integrations/secret-syncs"
|
||||
},
|
||||
{
|
||||
"source": "/integrations/cloud/checkly",
|
||||
"destination": "/integrations/secret-syncs"
|
||||
},
|
||||
{
|
||||
"source": "/integrations/cloud/cloud-66",
|
||||
"destination": "/integrations/secret-syncs"
|
||||
},
|
||||
{
|
||||
"source": "/integrations/cloud/cloudflare-pages",
|
||||
"destination": "/integrations/secret-syncs"
|
||||
},
|
||||
{
|
||||
"source": "/integrations/cloud/cloudflare-workers",
|
||||
"destination": "/integrations/secret-syncs"
|
||||
},
|
||||
{
|
||||
"source": "/integrations/cloud/databricks",
|
||||
"destination": "/integrations/secret-syncs"
|
||||
},
|
||||
{
|
||||
"source": "/integrations/cloud/digital-ocean-app-platform",
|
||||
"destination": "/integrations/secret-syncs"
|
||||
},
|
||||
{
|
||||
"source": "/integrations/cloud/flyio",
|
||||
"destination": "/integrations/secret-syncs"
|
||||
},
|
||||
{
|
||||
"source": "/integrations/cloud/gcp-secret-manager",
|
||||
"destination": "/integrations/secret-syncs"
|
||||
},
|
||||
{
|
||||
"source": "/integrations/cloud/hashicorp-vault",
|
||||
"destination": "/integrations/secret-syncs"
|
||||
},
|
||||
{
|
||||
"source": "/integrations/cloud/hasura-cloud",
|
||||
"destination": "/integrations/secret-syncs"
|
||||
},
|
||||
{
|
||||
"source": "/integrations/cloud/heroku",
|
||||
"destination": "/integrations/secret-syncs"
|
||||
},
|
||||
{
|
||||
"source": "/integrations/cloud/laravel-forge",
|
||||
"destination": "/integrations/secret-syncs"
|
||||
},
|
||||
{
|
||||
"source": "/integrations/cloud/netlify",
|
||||
"destination": "/integrations/secret-syncs"
|
||||
},
|
||||
{
|
||||
"source": "/integrations/cloud/northflank",
|
||||
"destination": "/integrations/secret-syncs"
|
||||
},
|
||||
{
|
||||
"source": "/integrations/cloud/qovery",
|
||||
"destination": "/integrations/secret-syncs"
|
||||
},
|
||||
{
|
||||
"source": "/integrations/cloud/railway",
|
||||
"destination": "/integrations/secret-syncs"
|
||||
},
|
||||
{
|
||||
"source": "/integrations/cloud/render",
|
||||
"destination": "/integrations/secret-syncs"
|
||||
},
|
||||
{
|
||||
"source": "/integrations/cloud/supabase",
|
||||
"destination": "/integrations/secret-syncs"
|
||||
},
|
||||
{
|
||||
"source": "/integrations/cloud/terraform-cloud",
|
||||
"destination": "/integrations/secret-syncs"
|
||||
},
|
||||
{
|
||||
"source": "/integrations/cloud/vercel",
|
||||
"destination": "/integrations/secret-syncs"
|
||||
},
|
||||
{
|
||||
"source": "/integrations/cloud/windmill",
|
||||
"destination": "/integrations/secret-syncs"
|
||||
},
|
||||
{
|
||||
"source": "/integrations/overview",
|
||||
"destination": "/integrations/secret-syncs"
|
||||
},
|
||||
{
|
||||
"source": "/integrations/cloud/aws-amplify",
|
||||
"destination": "/integrations/cicd/aws-amplify"
|
||||
},
|
||||
{
|
||||
"source": "/integrations/cloud/teamcity",
|
||||
"destination": "/integrations/secret-syncs/teamcity"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@@ -1,29 +1,29 @@
|
||||
---
|
||||
title: "User Groups"
|
||||
description: "Manage user groups in Infisical."
|
||||
title: "Groups"
|
||||
description: "Manage groups containing users and machine identities in Infisical."
|
||||
---
|
||||
|
||||
<Info>
|
||||
User Groups is a paid feature.
|
||||
|
||||
If you're using Infisical Cloud, then it is available under the **Enterprise Tier**. If you're self-hosting Infisical,
|
||||
then you should contact team@infisical.com to purchase an enterprise license to use it.
|
||||
Groups is a paid feature. If you're using Infisical Cloud, then it is
|
||||
available under the **Enterprise Tier**. If you're self-hosting Infisical,
|
||||
then you should contact team@infisical.com to purchase an enterprise license
|
||||
to use it.
|
||||
</Info>
|
||||
|
||||
## Concept
|
||||
|
||||
A (user) group is a collection of users that you can create in an Infisical organization to more efficiently manage permissions and access control for multiple users together. For example, you can have a group called `Developers` with the `Developer` role containing all the developers in your organization.
|
||||
A group is a collection of identities (users and/or machine identities) that you can create in an Infisical organization to more efficiently manage permissions and access control for multiple identities together. For example, you can have a group called `Developers` with the `Developer` role containing all the developers in your organization, or a group called `CI/CD Identities` containing all the machine identities used in your CI/CD pipelines.
|
||||
|
||||
User groups have the following properties:
|
||||
Groups have the following properties:
|
||||
|
||||
- If a group is added to a project under specific role(s), all users in the group will be provisioned access to the project with the role(s). Conversely, if a group is removed from a project, all users in the group will lose access to the project.
|
||||
- If a user is added to a group, they will inherit the access control properties of the group including access to project(s) under the role(s) assigned to the group. Conversely, if a user is removed from a group, they will lose access to project(s) that the group has access to.
|
||||
- If a user was previously added to a project under a role and is later added to a group that has access to the same project under a different role, then the user will now have access to the project under the composite permissions of the two roles. If the group is subsequently removed from the project, the user will not lose access to the project as they were previously added to the project separately.
|
||||
- A user can be part of multiple groups. If a user is part of multiple groups, they will inherit the composite permissions of all the groups that they are part of.
|
||||
- If a group is added to a project under specific role(s), all identities in the group will be provisioned access to the project with the role(s). Conversely, if a group is removed from a project, all identities in the group will lose access to the project.
|
||||
- If an identity is added to a group, they will inherit the access control properties of the group including access to project(s) under the role(s) assigned to the group. Conversely, if an identity is removed from a group, they will lose access to project(s) that the group has access to.
|
||||
- If an identity was previously added to a project under a role and is later added to a group that has access to the same project under a different role, then the identity will now have access to the project under the composite permissions of the two roles. If the group is subsequently removed from the project, the identity will not lose access to the project as they were previously added to the project separately.
|
||||
- An identity can be part of multiple groups. If an identity is part of multiple groups, they will inherit the composite permissions of all the groups that they are part of.
|
||||
|
||||
## Workflow
|
||||
|
||||
In the following steps, we explore how to create and use user groups to provision user access to projects in Infisical.
|
||||
In the following steps, we explore how to create and use groups to provision access to projects in Infisical. Groups can contain both users and machine identities, and the workflow is the same for both types of identities.
|
||||
|
||||
<Steps>
|
||||
<Step title="Creating a group">
|
||||
@@ -32,36 +32,38 @@ In the following steps, we explore how to create and use user groups to provisio
|
||||

|
||||
|
||||
When creating a group, you specify an organization level [role](/documentation/platform/access-controls/role-based-access-controls) for it to assume; you can configure roles in Organization Settings > Access Control > Organization Roles.
|
||||
|
||||
|
||||

|
||||
|
||||
|
||||
Now input a few details for your new group. Here’s some guidance for each field:
|
||||
- Name (required): A friendly name for the group like `Engineering`.
|
||||
- Slug (required): A unique identifier for the group like `engineering`.
|
||||
- Role (required): A role from the Organization Roles tab for the group to assume. The organization role assigned will determine what organization level resources this group can have access to.
|
||||
|
||||
</Step>
|
||||
<Step title="Adding users to the group">
|
||||
Next, you'll want to assign users to the group. To do this, press on the users icon on the group and start assigning users to the group.
|
||||
<Step title="Adding identities to the group">
|
||||
Next, you'll want to assign identities (users and/or machine identities) to the group. To do this, click on the group row to open the group details page and click on the **+** button.
|
||||
|
||||

|
||||

|
||||
|
||||
In this example, we're assigning **Alan Turing** and **Ada Lovelace** to the group **Engineering**.
|
||||
In this example, we're assigning **Alan Turing** and **Ada Lovelace** (users) to the group **Engineering**. You can similarly add machine identities to the group by selecting them from the **Machine Identities** tab in the modal.
|
||||
|
||||

|
||||
</Step>
|
||||
<Step title="Adding the group to a project">
|
||||
To enable the group to access project-level resources such as secrets within a specific project, you should add it to that project.
|
||||
|
||||
To do this, head over to the project you want to add the group to and go to Project Settings > Access Control > Groups and press **Add group**.
|
||||
|
||||
To do this, head over to the project you want to add the group to and go to Project Settings > Access Control > Groups and press **Add Group to Project**.
|
||||
|
||||

|
||||
|
||||
|
||||
Next, select the group you want to add to the project and the project level role you want to allow it to assume. The project role assigned will determine what project level resources this group can have access to.
|
||||
|
||||
|
||||

|
||||
|
||||
|
||||
That's it!
|
||||
|
||||
The users of the group now have access to the project under the role you assigned to the group.
|
||||
|
||||
All identities of the group now have access to the project under the role you assigned to the group.
|
||||
|
||||
</Step>
|
||||
</Steps>
|
||||
</Steps>
|
||||
|
||||
258
docs/documentation/platform/pam/resources/aws-iam.mdx
Normal file
258
docs/documentation/platform/pam/resources/aws-iam.mdx
Normal file
@@ -0,0 +1,258 @@
|
||||
---
|
||||
title: "AWS IAM"
|
||||
sidebarTitle: "AWS IAM"
|
||||
description: "Learn how to configure AWS Management Console access through Infisical PAM for secure, audited, and just-in-time access to AWS."
|
||||
---
|
||||
|
||||
Infisical PAM supports secure, just-in-time access to the **AWS Management Console** through federated sign-in. This allows your team to access AWS without sharing long-lived credentials, while maintaining a complete audit trail of who accessed what and when.
|
||||
|
||||
## How It Works
|
||||
|
||||
Unlike database or SSH resources that require a Gateway for network connectivity, AWS Console access works differently. Infisical uses AWS STS (Security Token Service) to assume roles on your behalf and generates temporary federated sign-in URLs.
|
||||
|
||||
```mermaid
|
||||
sequenceDiagram
|
||||
participant User
|
||||
participant Infisical
|
||||
participant Resource Role as Resource Role<br/>(Your AWS Account)
|
||||
participant Target Role as Target Role<br/>(Your AWS Account)
|
||||
participant Console as AWS Console
|
||||
|
||||
User->>Infisical: Request AWS Console access
|
||||
Infisical->>Resource Role: AssumeRole (with ExternalId)
|
||||
Resource Role-->>Infisical: Temporary credentials
|
||||
Infisical->>Target Role: AssumeRole (role chaining)
|
||||
Target Role-->>Infisical: Session credentials
|
||||
Infisical->>Console: Generate federation URL
|
||||
Console-->>Infisical: Signed console URL
|
||||
Infisical-->>User: Return console URL
|
||||
User->>Console: Open AWS Console (federated)
|
||||
```
|
||||
|
||||
### Key Concepts
|
||||
|
||||
1. **Resource Role**: An IAM role in your AWS account that trusts Infisical. This is the "bridge" role that Infisical assumes first.
|
||||
|
||||
2. **Target Role**: The IAM role that end users will actually use in the AWS Console. The Resource Role assumes this role on behalf of the user.
|
||||
|
||||
3. **Role Chaining**: Infisical uses AWS role chaining - it first assumes the Resource Role, then uses those credentials to assume the Target Role. This provides an additional layer of security and audit capability.
|
||||
|
||||
4. **External ID**: A unique identifier (your Infisical Project ID) used in the trust policy to prevent [confused deputy attacks](https://docs.aws.amazon.com/IAM/latest/UserGuide/confused-deputy.html).
|
||||
|
||||
## Session Behavior
|
||||
|
||||
### Session Duration
|
||||
|
||||
The session duration is set when creating the account and applies to all access requests. You can specify the duration using human-readable formats like `15m`, `30m`, or `1h`. Due to AWS role chaining limitations:
|
||||
|
||||
- **Minimum**: 15 minutes (`15m`)
|
||||
- **Maximum**: 1 hour (`1h`)
|
||||
|
||||
### Session Tracking
|
||||
|
||||
Infisical tracks:
|
||||
- When the session was created
|
||||
- Who accessed which role
|
||||
- When the session expires
|
||||
|
||||
<Info>
|
||||
**Important**: AWS Console sessions cannot be terminated early. Once a federated URL is generated, the session remains valid until the configured duration expires. However, you can [revoke active sessions](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_use_revoke-sessions.html) by modifying the role's trust policy.
|
||||
</Info>
|
||||
|
||||
### CloudTrail Integration
|
||||
|
||||
All actions performed in the AWS Console are logged in [AWS CloudTrail](https://console.aws.amazon.com/cloudtrail). The session is identified by the `RoleSessionName`, which includes the user's email address for attribution:
|
||||
|
||||
```
|
||||
arn:aws:sts::123456789012:assumed-role/pam-readonly/user@example.com
|
||||
```
|
||||
|
||||
This allows you to correlate Infisical PAM sessions with CloudTrail logs for complete audit visibility.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
Before configuring AWS Console access in Infisical PAM, you need to set up two IAM roles in your AWS account:
|
||||
|
||||
1. **Resource Role** - Trusted by Infisical, can assume target roles
|
||||
2. **Target Role(s)** - The actual roles users will use in the console
|
||||
|
||||
<Info>
|
||||
**No Gateway Required**: Unlike database or SSH resources, AWS Console access does not require an Infisical Gateway. Infisical communicates directly with AWS APIs.
|
||||
</Info>
|
||||
|
||||
## Create the PAM Resource
|
||||
|
||||
The PAM Resource represents the connection between Infisical and your AWS account. It contains the Resource Role that Infisical will assume.
|
||||
|
||||
<Steps>
|
||||
<Step title="Create the Resource Role Permissions Policy">
|
||||
First, create an IAM policy that allows the Resource Role to assume your target roles. For simplicity, you can use a wildcard to allow assuming any role in your account:
|
||||
|
||||
```json
|
||||
{
|
||||
"Version": "2012-10-17",
|
||||
"Statement": [{
|
||||
"Effect": "Allow",
|
||||
"Action": "sts:AssumeRole",
|
||||
"Resource": "arn:aws:iam::<YOUR_ACCOUNT_ID>:role/*"
|
||||
}]
|
||||
}
|
||||
```
|
||||
|
||||

|
||||
|
||||
<Note>
|
||||
**For more granular control**: If you want to restrict which roles the Resource Role can assume, replace the wildcard (`/*`) with a more specific pattern. For example:
|
||||
- `arn:aws:iam::<YOUR_ACCOUNT_ID>:role/pam-*` to only allow roles with the `pam-` prefix
|
||||
- `arn:aws:iam::<YOUR_ACCOUNT_ID>:role/infisical-*` to only allow roles with the `infisical-` prefix
|
||||
|
||||
This allows you to limit the blast radius of the Resource Role's permissions.
|
||||
</Note>
|
||||
</Step>
|
||||
|
||||
<Step title="Create the Resource Role with Trust Policy">
|
||||
Create an IAM role (e.g., `InfisicalResourceRole`) with:
|
||||
- The permissions policy from the previous step attached
|
||||
- The following trust policy:
|
||||
|
||||
```json
|
||||
{
|
||||
"Version": "2012-10-17",
|
||||
"Statement": [{
|
||||
"Effect": "Allow",
|
||||
"Principal": {
|
||||
"AWS": "arn:aws:iam::<INFISICAL_AWS_ACCOUNT_ID>:root"
|
||||
},
|
||||
"Action": "sts:AssumeRole",
|
||||
"Condition": {
|
||||
"StringEquals": {
|
||||
"sts:ExternalId": "<YOUR_INFISICAL_PROJECT_ID>"
|
||||
}
|
||||
}
|
||||
}]
|
||||
}
|
||||
```
|
||||
|
||||

|
||||
|
||||

|
||||
|
||||
<Warning>
|
||||
**Security Best Practice**: Always use the External ID condition. This prevents confused deputy attacks where another Infisical customer could potentially trick Infisical into assuming your role.
|
||||
</Warning>
|
||||
|
||||
**Infisical AWS Account IDs:**
|
||||
| Region | Account ID |
|
||||
|--------|------------|
|
||||
| US | `381492033652` |
|
||||
| EU | `345594589636` |
|
||||
|
||||
<Note>
|
||||
**For Dedicated Instances**: Your AWS account ID differs from the ones listed above. Please contact Infisical support to obtain your dedicated AWS account ID.
|
||||
</Note>
|
||||
|
||||
<Note>
|
||||
**For Self-Hosted Instances**: Use the AWS account ID where your Infisical instance is deployed. This is the account that hosts your Infisical infrastructure and will be assuming the Resource Role.
|
||||
</Note>
|
||||
</Step>
|
||||
|
||||
<Step title="Create the Resource in Infisical">
|
||||
1. Navigate to your PAM project and go to the **Resources** tab
|
||||
2. Click **Add Resource** and select **AWS IAM**
|
||||
3. Enter a name for the resource (e.g., `production-aws`)
|
||||
4. Enter the **Resource Role ARN** - the ARN of the role you created in the previous step
|
||||
|
||||

|
||||
|
||||
Clicking **Create Resource** will validate that Infisical can assume the Resource Role. If the connection fails, verify:
|
||||
- The trust policy has the correct Infisical AWS account ID
|
||||
- The External ID matches your project ID
|
||||
- The role ARN is correct
|
||||
</Step>
|
||||
</Steps>
|
||||
|
||||
## Create PAM Accounts
|
||||
|
||||
A PAM Account represents a specific Target Role that users can request access to. You can create multiple accounts per resource, each pointing to a different target role with different permission levels.
|
||||
|
||||
<Steps>
|
||||
<Step title="Create the Target Role Trust Policy">
|
||||
Each target role needs a trust policy that allows your Resource Role to assume it:
|
||||
|
||||
```json
|
||||
{
|
||||
"Version": "2012-10-17",
|
||||
"Statement": [{
|
||||
"Effect": "Allow",
|
||||
"Principal": {
|
||||
"AWS": "arn:aws:iam::<YOUR_ACCOUNT_ID>:role/InfisicalResourceRole"
|
||||
},
|
||||
"Action": "sts:AssumeRole",
|
||||
"Condition": {
|
||||
"StringEquals": {
|
||||
"sts:ExternalId": "<YOUR_INFISICAL_PROJECT_ID>"
|
||||
}
|
||||
}
|
||||
}]
|
||||
}
|
||||
```
|
||||
|
||||

|
||||
</Step>
|
||||
|
||||
<Step title="Create the Account in Infisical">
|
||||
1. Navigate to the **Accounts** tab in your PAM project
|
||||
2. Click **Add Account** and select the AWS IAM resource you created
|
||||
3. Fill in the account details:
|
||||
|
||||

|
||||
|
||||
<ParamField path="Name" type="string" required>
|
||||
A friendly name for this account (e.g., `readonly`, `admin`, `developer`)
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Description" type="string">
|
||||
Optional description of what this account is used for
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Target Role ARN" type="string" required>
|
||||
The ARN of the IAM role users will assume (e.g., `arn:aws:iam::123456789012:role/pam-readonly`)
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Default Session Duration" type="string" required>
|
||||
Session duration using human-readable format (e.g., `15m`, `30m`, `1h`). Minimum 15 minutes, maximum 1 hour.
|
||||
|
||||
<Warning>
|
||||
Due to AWS role chaining limitations, the maximum session duration is **1 hour**, regardless of the target role's configured maximum session duration.
|
||||
</Warning>
|
||||
</ParamField>
|
||||
</Step>
|
||||
</Steps>
|
||||
|
||||
## Access the AWS Console
|
||||
|
||||
Once your resource and accounts are configured, users can request access through Infisical:
|
||||
|
||||

|
||||
|
||||
<Steps>
|
||||
<Step title="Navigate to Accounts">
|
||||
Go to the **Accounts** tab in your PAM project.
|
||||
</Step>
|
||||
|
||||
<Step title="Find the Account">
|
||||
Find the AWS Console account you want to access.
|
||||
</Step>
|
||||
|
||||
<Step title="Request Access">
|
||||
Click the **Access** button.
|
||||
|
||||
Infisical will:
|
||||
1. Assume the Resource Role using your project's External ID
|
||||
2. Assume the Target Role using role chaining
|
||||
3. Generate a federated sign-in URL
|
||||
4. Open the AWS Console in a new browser tab
|
||||
|
||||
The user will be signed into the AWS Console with the permissions of the Target Role.
|
||||
</Step>
|
||||
</Steps>
|
||||
224
docs/documentation/platform/pam/resources/kubernetes.mdx
Normal file
224
docs/documentation/platform/pam/resources/kubernetes.mdx
Normal file
@@ -0,0 +1,224 @@
|
||||
---
|
||||
title: "Kubernetes"
|
||||
sidebarTitle: "Kubernetes"
|
||||
description: "Learn how to configure Kubernetes cluster access through Infisical PAM for secure, audited, and just-in-time access to your Kubernetes clusters."
|
||||
---
|
||||
|
||||
Infisical PAM supports secure, just-in-time access to Kubernetes clusters through service account token authentication. This allows your team to access Kubernetes clusters without sharing long-lived credentials, while maintaining a complete audit trail of who accessed what and when.
|
||||
|
||||
## How It Works
|
||||
|
||||
Kubernetes access in Infisical PAM uses an Infisical Gateway to securely proxy connections to your Kubernetes API server. When a user requests access, Infisical generates a temporary kubeconfig that routes traffic through the Gateway, enabling secure access without exposing your cluster directly.
|
||||
|
||||
```mermaid
|
||||
sequenceDiagram
|
||||
participant User
|
||||
participant CLI as Infisical CLI
|
||||
participant Infisical
|
||||
participant Gateway as Infisical Gateway
|
||||
participant K8s as Kubernetes API Server
|
||||
|
||||
User->>CLI: Request Kubernetes access
|
||||
CLI->>Infisical: Authenticate & request session
|
||||
Infisical-->>CLI: Session credentials & Gateway info
|
||||
CLI->>CLI: Start local proxy
|
||||
CLI->>Gateway: Establish secure tunnel
|
||||
User->>CLI: kubectl commands
|
||||
CLI->>Gateway: Proxy kubectl requests
|
||||
Gateway->>K8s: Forward with SA token
|
||||
K8s-->>Gateway: Response
|
||||
Gateway-->>CLI: Return response
|
||||
CLI-->>User: kubectl output
|
||||
```
|
||||
|
||||
### Key Concepts
|
||||
|
||||
1. **Gateway**: An Infisical Gateway deployed in your network that can reach the Kubernetes API server. The Gateway handles secure communication between users and your cluster.
|
||||
|
||||
2. **Service Account Token**: A Kubernetes service account token that grants access to the cluster. This token is stored securely in Infisical and used by the Gateway to authenticate with the Kubernetes API.
|
||||
|
||||
3. **Local Proxy**: The Infisical CLI starts a local proxy on your machine that intercepts kubectl commands and routes them securely through the Gateway to your cluster.
|
||||
|
||||
4. **Session Tracking**: All access sessions are logged, including when the session was created, who accessed the cluster, session duration, and when it ended.
|
||||
|
||||
### Session Tracking
|
||||
|
||||
Infisical tracks:
|
||||
- When the session was created
|
||||
- Who accessed which cluster
|
||||
- Session duration
|
||||
- All kubectl commands executed during the session
|
||||
- When the session ended
|
||||
|
||||
<Info>
|
||||
**Session Logs**: After ending a session (by stopping the proxy), you can view detailed session logs in the Sessions page, including all commands executed during the session.
|
||||
</Info>
|
||||
|
||||
## Prerequisites
|
||||
|
||||
Before configuring Kubernetes access in Infisical PAM, you need:
|
||||
|
||||
1. **Infisical Gateway** - A Gateway deployed in your network with access to the Kubernetes API server
|
||||
2. **Service Account** - A Kubernetes service account with appropriate RBAC permissions
|
||||
3. **Infisical CLI** - The Infisical CLI installed on user machines
|
||||
|
||||
<Warning>
|
||||
**Gateway Required**: Unlike AWS Console access, Kubernetes access requires an Infisical Gateway to be deployed and registered with your Infisical instance. The Gateway must have network connectivity to your Kubernetes API server.
|
||||
</Warning>
|
||||
|
||||
## Create the PAM Resource
|
||||
|
||||
The PAM Resource represents the connection between Infisical and your Kubernetes cluster.
|
||||
|
||||
<Steps>
|
||||
<Step title="Ensure Gateway is Running">
|
||||
Before creating the resource, ensure you have an Infisical Gateway running and registered with your Infisical instance. The Gateway must have network access to your Kubernetes API server.
|
||||
</Step>
|
||||
|
||||
<Step title="Create the Resource in Infisical">
|
||||
1. Navigate to your PAM project and go to the **Resources** tab
|
||||
2. Click **Add Resource** and select **Kubernetes**
|
||||
3. Enter a name for the resource (e.g., `production-k8s`, `staging-cluster`)
|
||||
4. Enter the **Kubernetes API Server URL** - the URL to your Kubernetes API endpoint (e.g.`https://kubernetes.example.com:6443`)
|
||||
5. Select the **Gateway** that has access to this cluster
|
||||
6. Configure SSL verification options if needed
|
||||
|
||||
<Note>
|
||||
**SSL Verification**: You may need to disable SSL verification if your Kubernetes API server uses a self-signed certificate or if the certificate's hostname doesn't match the URL you're using to access it.
|
||||
</Note>
|
||||
</Step>
|
||||
</Steps>
|
||||
|
||||
## Create a Service Account
|
||||
|
||||
Infisical PAM currently supports service account token authentication for Kubernetes. You'll need to create a service account with appropriate permissions in your cluster.
|
||||
|
||||
<Steps>
|
||||
<Step title="Create the Service Account YAML">
|
||||
Create a file named `sa.yaml` with the following content:
|
||||
|
||||
```yaml sa.yaml
|
||||
apiVersion: v1
|
||||
kind: ServiceAccount
|
||||
metadata:
|
||||
name: infisical-pam-sa
|
||||
namespace: kube-system
|
||||
---
|
||||
# Bind the ServiceAccount to the desired ClusterRole
|
||||
# This example uses cluster-admin - adjust based on your needs
|
||||
apiVersion: rbac.authorization.k8s.io/v1
|
||||
kind: ClusterRoleBinding
|
||||
metadata:
|
||||
name: infisical-pam-binding
|
||||
subjects:
|
||||
- kind: ServiceAccount
|
||||
name: infisical-pam-sa
|
||||
namespace: kube-system
|
||||
roleRef:
|
||||
kind: ClusterRole
|
||||
name: cluster-admin # Change this to a more restrictive role as needed
|
||||
apiGroup: rbac.authorization.k8s.io
|
||||
---
|
||||
# Create a static, non-expiring token for the ServiceAccount
|
||||
apiVersion: v1
|
||||
kind: Secret
|
||||
metadata:
|
||||
name: infisical-pam-sa-token
|
||||
namespace: kube-system
|
||||
annotations:
|
||||
kubernetes.io/service-account.name: infisical-pam-sa
|
||||
type: kubernetes.io/service-account-token
|
||||
```
|
||||
|
||||
<Warning>
|
||||
**Security Best Practice**: The example above uses `cluster-admin` for simplicity. In production environments, you should create custom ClusterRoles or Roles with the minimum permissions required for each use case.
|
||||
</Warning>
|
||||
</Step>
|
||||
|
||||
<Step title="Apply the Service Account">
|
||||
Apply the configuration to your cluster:
|
||||
|
||||
```bash
|
||||
kubectl apply -f sa.yaml
|
||||
```
|
||||
|
||||
This creates:
|
||||
- A ServiceAccount named `infisical-pam-sa` in the `kube-system` namespace
|
||||
- A ClusterRoleBinding that grants the service account its permissions
|
||||
- A Secret containing a static, non-expiring token for the service account
|
||||
</Step>
|
||||
|
||||
<Step title="Retrieve the Service Account Token">
|
||||
Get the service account token that you'll use when creating the PAM account:
|
||||
|
||||
```bash
|
||||
kubectl -n kube-system get secret infisical-pam-sa-token -o jsonpath='{.data.token}' | base64 -d
|
||||
```
|
||||
|
||||
Copy this token - you'll need it in the next step.
|
||||
</Step>
|
||||
</Steps>
|
||||
|
||||
## Create PAM Accounts
|
||||
|
||||
Once you have configured the PAM resource, you'll need to configure a PAM account for your Kubernetes resource.
|
||||
A PAM Account represents a specific service account that users can request access to. You can create multiple accounts per resource, each with different permission levels.
|
||||
|
||||
<Steps>
|
||||
<Step title="Navigate to Accounts">
|
||||
Go to the **Accounts** tab in your PAM project.
|
||||
</Step>
|
||||
|
||||
<Step title="Add New Account">
|
||||
Click **Add Account** and select the Kubernetes resource you created.
|
||||
</Step>
|
||||
|
||||
<Step title="Fill in Account Details">
|
||||
Fill in the account details and paste the service account token you retrieved earlier.
|
||||
</Step>
|
||||
</Steps>
|
||||
|
||||
## Access Kubernetes Cluster
|
||||
|
||||
Once your resource and accounts are configured, users can request access through the Infisical CLI:
|
||||
|
||||
<Steps>
|
||||
<Step title="Get the Access Command">
|
||||
1. Navigate to the **Accounts** tab in your PAM project
|
||||
2. Find the Kubernetes account you want to access
|
||||
3. Click the **Access** button
|
||||
4. Copy the provided CLI command
|
||||
|
||||
</Step>
|
||||
|
||||
<Step title="Run the Access Command">
|
||||
Run the copied command in your terminal.
|
||||
|
||||
The CLI will:
|
||||
1. Authenticate with Infisical
|
||||
2. Establish a secure connection through the Gateway
|
||||
3. Start a local proxy on your machine
|
||||
4. Configure kubectl to use the proxy
|
||||
</Step>
|
||||
|
||||
<Step title="Use kubectl">
|
||||
Once the proxy is running, you can use `kubectl` commands as normal:
|
||||
|
||||
```bash
|
||||
kubectl get pods
|
||||
kubectl get namespaces
|
||||
kubectl describe deployment my-app
|
||||
```
|
||||
|
||||
All commands are routed securely through the Infisical Gateway to your cluster.
|
||||
</Step>
|
||||
|
||||
<Step title="End the Session">
|
||||
When you're done, stop the proxy by pressing `Ctrl+C` in the terminal where it's running. This will:
|
||||
- Close the secure tunnel
|
||||
- End the session
|
||||
- Log the session details to Infisical
|
||||
|
||||
You can view session logs in the **Sessions** page of your PAM project.
|
||||
</Step>
|
||||
</Steps>
|
||||
@@ -1,401 +0,0 @@
|
||||
---
|
||||
title: "Certificates"
|
||||
sidebarTitle: "Certificates"
|
||||
description: "Learn how to issue X.509 certificates with Infisical."
|
||||
---
|
||||
|
||||
## Concept
|
||||
|
||||
Assuming that you've created a Private CA hierarchy with a root CA and an intermediate CA, you can now issue/revoke X.509 certificates using the intermediate CA.
|
||||
|
||||
<div align="center">
|
||||
|
||||
```mermaid
|
||||
graph TD
|
||||
A[Root CA]
|
||||
A --> B[Intermediate CA]
|
||||
A --> C[Intermediate CA]
|
||||
B --> D[Leaf Certificate]
|
||||
C --> E[Leaf Certificate]
|
||||
```
|
||||
|
||||
</div>
|
||||
|
||||
## Workflow
|
||||
|
||||
The typical workflow for managing certificates consists of the following steps:
|
||||
|
||||
1. Issuing a certificate under an intermediate CA with details like name and validity period. As part of certificate issuance, you can either issue a certificate directly from a CA or do it via a certificate template.
|
||||
2. Managing certificate lifecycle events such as certificate renewal and revocation. As part of the certificate revocation flow,
|
||||
you can also query for a Certificate Revocation List [CRL](https://en.wikipedia.org/wiki/Certificate_revocation_list), a time-stamped, signed
|
||||
data structure issued by a CA containing a list of revoked certificates to check if a certificate has been revoked.
|
||||
|
||||
<Note>
|
||||
Note that this workflow can be executed via the Infisical UI or manually such
|
||||
as via API.
|
||||
</Note>
|
||||
|
||||
## Guide to Issuing Certificates
|
||||
|
||||
In the following steps, we explore how to issue a X.509 certificate under a CA.
|
||||
|
||||
<Tabs>
|
||||
<Tab title="Infisical UI">
|
||||
|
||||
<Steps>
|
||||
<Step title="Creating a certificate template">
|
||||
A certificate template is a set of policies for certificates issued under that template; each template is bound to a specific CA and can also be bound to a certificate collection for alerting such that any certificate issued under the template is automatically added to the collection.
|
||||
|
||||
With certificate templates, you can specify, for example, that issued certificates must have a common name (CN) adhering to a specific format like `.*.acme.com` or perhaps that the max TTL cannot be more than 1 year.
|
||||
|
||||
Head to your Project > Certificate Authorities > Your Issuing CA and create a certificate template.
|
||||
|
||||

|
||||
|
||||
Here's some guidance on each field:
|
||||
|
||||
- Template Name: A name for the certificate template.
|
||||
- Issuing CA: The Certificate Authority (CA) that will issue certificates based on this template.
|
||||
- Certificate Collection (Optional): The certificate collection that certificates should be added to when issued under the template.
|
||||
- Common Name (CN): A regular expression used to validate the common name in certificate requests.
|
||||
- Alternative Names (SANs): A regular expression used to validate subject alternative names in certificate requests.
|
||||
- TTL: The maximum Time-to-Live (TTL) for certificates issued using this template.
|
||||
- Key Usage: The key usage constraint or default value for certificates issued using this template.
|
||||
- Extended Key Usage: The extended key usage constraint or default value for certificates issued using this template.
|
||||
</Step>
|
||||
<Step title="Creating a certificate">
|
||||
To create a certificate, head to your Project > Internal PKI > Certificates and press **Issue** under the Certificates section.
|
||||
|
||||

|
||||
|
||||
Here, set the **Certificate Template** to the template from step 1 and fill out the rest of the details for the certificate to be issued.
|
||||
|
||||

|
||||
|
||||
Here's some guidance on each field:
|
||||
|
||||
- Friendly Name: A friendly name for the certificate; this is only for display and defaults to the common name of the certificate if left empty.
|
||||
- Common Name (CN): The common name for the certificate like `service.acme.com`.
|
||||
- Alternative Names (SANs): A comma-delimited list of Subject Alternative Names (SANs) for the certificate; these can be hostnames or email addresses like `app1.acme.com, app2.acme.com`.
|
||||
- TTL: The lifetime of the certificate in seconds.
|
||||
- Key Usage: The key usage extension of the certificate.
|
||||
- Extended Key Usage: The extended key usage extension of the certificate.
|
||||
|
||||
<Note>
|
||||
Note that Infisical PKI supports issuing certificates without certificate templates as well. If this is desired, then you can set the **Certificate Template** field to **None**
|
||||
and specify the **Issuing CA** and optional **Certificate Collection** fields; the rest of the fields for the issued certificate remain the same.
|
||||
|
||||
That said, we recommend using certificate templates to enforce policies and attach expiration monitoring on issued certificates.
|
||||
</Note>
|
||||
|
||||
</Step>
|
||||
<Step title="Copying the certificate details">
|
||||
Once you have created the certificate from step 1, you'll be presented with the certificate details including the **Certificate Body**, **Certificate Chain**, and **Private Key**.
|
||||
|
||||

|
||||
|
||||
<Note>
|
||||
Make sure to download and store the **Private Key** in a secure location as it will only be displayed once at the time of certificate issuance.
|
||||
The **Certificate Body** and **Certificate Chain** will remain accessible and can be copied at any time.
|
||||
</Note>
|
||||
</Step>
|
||||
</Steps>
|
||||
</Tab>
|
||||
<Tab title="API">
|
||||
|
||||
<Steps>
|
||||
<Step title="Creating a certificate template">
|
||||
A certificate template is a set of policies for certificates issued under that template; each template is bound to a specific CA and can also be bound to a certificate collection for alerting such that any certificate issued under the template is automatically added to the collection.
|
||||
|
||||
With certificate templates, you can specify, for example, that issued certificates must have a common name (CN) adhering to a specific format like .*.acme.com or perhaps that the max TTL cannot be more than 1 year.
|
||||
|
||||
To create a certificate template, make an API request to the [Create Certificate Template](/api-reference/endpoints/certificate-templates-v2/create) API endpoint, specifying the issuing CA.
|
||||
|
||||
### Sample request
|
||||
|
||||
```bash Request
|
||||
curl --request POST \
|
||||
--url https://us.infisical.com/api/v2/certificate-templates \
|
||||
--header 'Content-Type: application/json' \
|
||||
--data '{
|
||||
"projectId": "<string>",
|
||||
"name": "<string>",
|
||||
"description": "<string>",
|
||||
"subject": [
|
||||
{
|
||||
"type": "common_name",
|
||||
"allowed": [
|
||||
"*.infisical.com"
|
||||
]
|
||||
}
|
||||
],
|
||||
"sans": [
|
||||
{
|
||||
"type": "dns_name",
|
||||
"allowed": [
|
||||
"*.sample.com"
|
||||
]
|
||||
}
|
||||
],
|
||||
"keyUsages": {
|
||||
"allowed": [
|
||||
"digital_signature"
|
||||
]
|
||||
},
|
||||
"extendedKeyUsages": {
|
||||
"allowed": [
|
||||
"client_auth"
|
||||
]
|
||||
},
|
||||
"algorithms": {
|
||||
"signature": [
|
||||
"SHA256-RSA"
|
||||
],
|
||||
"keyAlgorithm": [
|
||||
"RSA-2048"
|
||||
]
|
||||
},
|
||||
"validity": {
|
||||
"max": "365d"
|
||||
}
|
||||
}'
|
||||
```
|
||||
|
||||
### Sample response
|
||||
|
||||
```bash Response
|
||||
{
|
||||
"certificateTemplate": {
|
||||
"id": "3c90c3cc-0d44-4b50-8888-8dd25736052a",
|
||||
"projectId": "3c90c3cc-0d44-4b50-8888-8dd25736052a",
|
||||
"name": "<string>",
|
||||
"description": "<string>",
|
||||
"subject": [
|
||||
{
|
||||
"type": "common_name",
|
||||
"allowed": [
|
||||
"*.infisical.com"
|
||||
]
|
||||
}
|
||||
],
|
||||
"sans": [
|
||||
{
|
||||
"type": "dns_name",
|
||||
"allowed": [
|
||||
"*.sample.com"
|
||||
]
|
||||
}
|
||||
],
|
||||
"keyUsages": {
|
||||
"allowed": [
|
||||
"digital_signature"
|
||||
]
|
||||
},
|
||||
"extendedKeyUsages": {
|
||||
"allowed": [
|
||||
"client_auth"
|
||||
]
|
||||
},
|
||||
"algorithms": {
|
||||
"signature": [
|
||||
"SHA256-RSA"
|
||||
],
|
||||
"keyAlgorithm": [
|
||||
"RSA-2048"
|
||||
]
|
||||
},
|
||||
"validity": {
|
||||
"max": "365d"
|
||||
},
|
||||
"createdAt": "2023-11-07T05:31:56Z",
|
||||
"updatedAt": "2023-11-07T05:31:56Z"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
</Step>
|
||||
<Step title="Creating a certificate">
|
||||
To create a certificate under the certificate template, make an API request to the [Issue Certificate](/api-reference/endpoints/certificates/issue-certificate) API endpoint,
|
||||
specifying the issuing CA.
|
||||
|
||||
### Sample request
|
||||
|
||||
```bash Request
|
||||
curl --location --request POST 'https://app.infisical.com/api/v1/cert-manager/certificates/issue-certificate' \
|
||||
--header 'Content-Type: application/json' \
|
||||
--data-raw '{
|
||||
"profileId": "<profile-id>",
|
||||
"commonName": "service.acme.com",
|
||||
"ttl": "1y",
|
||||
"signatureAlgorithm": "RSA-SHA256",
|
||||
"keyAlgorithm": "RSA_2048"
|
||||
}'
|
||||
```
|
||||
|
||||
### Sample response
|
||||
|
||||
```bash Response
|
||||
{
|
||||
certificate: "...",
|
||||
certificateChain: "...",
|
||||
issuingCaCertificate: "...",
|
||||
privateKey: "...",
|
||||
serialNumber: "..."
|
||||
}
|
||||
```
|
||||
|
||||
<Note>
|
||||
Note that Infisical PKI supports issuing certificates without certificate templates as well. If this is desired, then you can set the **Certificate Template** field to **None**
|
||||
and specify the **Issuing CA** and optional **Certificate Collection** fields; the rest of the fields for the issued certificate remain the same.
|
||||
|
||||
That said, we recommend using certificate templates to enforce policies and attach expiration monitoring on issued certificates.
|
||||
</Note>
|
||||
|
||||
<Note>
|
||||
Make sure to store the `privateKey` as it is only returned once here at the time of certificate issuance. The `certificate` and `certificateChain` will remain accessible and can be retrieved at any time.
|
||||
</Note>
|
||||
|
||||
If you have an external private key, you can also create a certificate by making an API request containing a pem-encoded CSR (Certificate Signing Request) to the [Sign Certificate](/api-reference/endpoints/certificates/sign-certificate) API endpoint, specifying the issuing CA.
|
||||
|
||||
### Sample request
|
||||
|
||||
```bash Request
|
||||
curl --location --request POST 'https://app.infisical.com/api/v1/cert-manager/certificates/sign-certificate' \
|
||||
--header 'Content-Type: application/json' \
|
||||
--data-raw '{
|
||||
"certificateTemplateId": "<certificate-template-id>",
|
||||
"csr": "...",
|
||||
"ttl": "1y",
|
||||
}'
|
||||
```
|
||||
|
||||
### Sample response
|
||||
|
||||
```bash Response
|
||||
{
|
||||
certificate: "...",
|
||||
certificateChain: "...",
|
||||
issuingCaCertificate: "...",
|
||||
privateKey: "...",
|
||||
serialNumber: "..."
|
||||
}
|
||||
```
|
||||
|
||||
</Step>
|
||||
</Steps>
|
||||
</Tab>
|
||||
</Tabs>
|
||||
|
||||
## Guide to Revoking Certificates
|
||||
|
||||
In the following steps, we explore how to revoke a X.509 certificate under a CA and obtain a Certificate Revocation List (CRL) for a CA.
|
||||
|
||||
<Tabs>
|
||||
<Tab title="Infisical UI">
|
||||
<Steps>
|
||||
<Step title="Revoking a Certificate">
|
||||
Assuming that you've issued a certificate under a CA, you can revoke it by
|
||||
selecting the **Revoke Certificate** option for it and specifying the reason
|
||||
for revocation.
|
||||
|
||||

|
||||
|
||||

|
||||
|
||||
</Step>
|
||||
<Step title="Obtaining a CRL">
|
||||
In order to check the revocation status of a certificate, you can check it
|
||||
against the CRL of a CA by heading to its Issuing CA and downloading the CRL.
|
||||
|
||||

|
||||
|
||||
To verify a certificate against the
|
||||
downloaded CRL with OpenSSL, you can use the following command:
|
||||
|
||||
```bash
|
||||
openssl verify -crl_check -CAfile chain.pem -CRLfile crl.pem cert.pem
|
||||
```
|
||||
|
||||
Note that you can also obtain the CRL from the certificate itself by
|
||||
referencing the CRL distribution point extension on the certificate.
|
||||
|
||||
To check a certificate against the CRL distribution point specified within it with OpenSSL, you can use the following command:
|
||||
|
||||
```bash
|
||||
openssl verify -verbose -crl_check -crl_download -CAfile chain.pem cert.pem
|
||||
```
|
||||
|
||||
</Step>
|
||||
</Steps>
|
||||
</Tab>
|
||||
<Tab title="API">
|
||||
<Steps>
|
||||
<Step title="Revoking a certificate">
|
||||
Assuming that you've issued a certificate under a CA, you can revoke it by making an API request to the [Revoke Certificate](/api-reference/endpoints/certificates/revoke) API endpoint,
|
||||
specifying the serial number of the certificate and the reason for revocation.
|
||||
|
||||
### Sample request
|
||||
|
||||
```bash Request
|
||||
curl --location --request POST 'https://app.infisical.com/api/v1/cert-manager/certificates/<cert-id>/revoke' \
|
||||
--header 'Authorization: Bearer <access-token>' \
|
||||
--header 'Content-Type: application/json' \
|
||||
--data-raw '{
|
||||
"revocationReason": "UNSPECIFIED"
|
||||
}'
|
||||
```
|
||||
|
||||
### Sample response
|
||||
|
||||
```bash Response
|
||||
{
|
||||
message: "Successfully revoked certificate",
|
||||
serialNumber: "...",
|
||||
revokedAt: "..."
|
||||
}
|
||||
```
|
||||
</Step>
|
||||
<Step title="Obtaining a CRL">
|
||||
In order to check the revocation status of a certificate, you can check it against the CRL of the issuing CA.
|
||||
To obtain the CRLs of the CA, make an API request to the [List CRLs](/api-reference/endpoints/certificate-authorities/crl) API endpoint.
|
||||
|
||||
### Sample request
|
||||
|
||||
```bash Request
|
||||
curl --location --request GET 'https://app.infisical.com/api/v1/cert-manager/ca/internal/<ca-id>/crls' \
|
||||
--header 'Authorization: Bearer <access-token>'
|
||||
```
|
||||
|
||||
### Sample response
|
||||
|
||||
```bash Response
|
||||
[
|
||||
{
|
||||
id: "...",
|
||||
crl: "..."
|
||||
},
|
||||
...
|
||||
]
|
||||
```
|
||||
|
||||
To verify a certificate against the CRL with OpenSSL, you can use the following command:
|
||||
|
||||
```bash
|
||||
openssl verify -crl_check -CAfile chain.pem -CRLfile crl.pem cert.pem
|
||||
```
|
||||
</Step>
|
||||
</Steps>
|
||||
|
||||
</Tab>
|
||||
</Tabs>
|
||||
|
||||
## FAQ
|
||||
|
||||
<AccordionGroup>
|
||||
<Accordion title="What is the workflow for renewing a certificate?">
|
||||
To renew a certificate, you have to issue a new certificate from the same CA
|
||||
with the same common name as the old certificate. The original certificate
|
||||
will continue to be valid through its original TTL unless explicitly
|
||||
revoked.
|
||||
</Accordion>
|
||||
</AccordionGroup>
|
||||
@@ -29,13 +29,13 @@ Refer to the documentation for each [enrollment method](/documentation/platform/
|
||||
## Guide to Renewing Certificates
|
||||
|
||||
To [renew a certificate](/documentation/platform/pki/concepts/certificate-lifecycle#renewal), you can either request a new certificate from a certificate profile or have the platform
|
||||
automatically request a new one for you. Whether you pursue a client-driven or server-driven approach is totally dependent on the enrollment method configured on your certificate
|
||||
automatically request a new one for you to be delivered downstream to a target destination. Whether you pursue a client-driven or server-driven approach is totally dependent on the enrollment method configured on your certificate
|
||||
profile as well as your infrastructure use-case.
|
||||
|
||||
### Client-Driven Certificate Renewal
|
||||
|
||||
Client-driven certificate renewal is when renewal is initiated client-side by the end-entity consuming the certificate.
|
||||
This is the most common approach to certificate renewal and is suitable for most use-cases.
|
||||
More specifically, the client (e.g. [Infisical Agent](/integrations/platforms/certificate-agent), [ACME client](https://letsencrypt.org/docs/client-options/), etc.) monitors the certificate and makes a request for Infisical to issue a new certificate back to it when the existing certificate is nearing expiration. This is the most common approach to certificate renewal and is suitable for most use-cases.
|
||||
|
||||
### Server-Driven Certificate Renewal
|
||||
|
||||
|
||||
@@ -0,0 +1,30 @@
|
||||
---
|
||||
title: "Certificate Components"
|
||||
description: "Learn the main components for managing certificates with Infisical."
|
||||
---
|
||||
|
||||
## Core Components
|
||||
|
||||
The following resources define how certificates are issued, shaped, and governed in Infisical:
|
||||
|
||||
- [Certificate Authority (CA)](/documentation/platform/pki/ca/overview): The trusted entity that issues X.509 certificates. This can be an [Internal CA](/documentation/platform/pki/ca/private-ca) or an [External CA](/documentation/platform/pki/ca/external-ca) in Infisical.
|
||||
The former represents a fully managed CA hierarchy within Infisical, while the latter represents an external CA (e.g. [DigiCert](/documentation/platform/pki/ca/digicert), [Let's Encrypt](/documentation/platform/pki/ca/lets-encrypt), [Microsoft AD CS](/documentation/platform/pki/ca/azure-adcs), etc.) that can be integrated with Infisical.
|
||||
|
||||
- [Certificate Template](/documentation/platform/pki/certificates/templates): A policy structure specifying permitted attributes for requested certificates. This includes constraints around subject naming conventions, SAN fields, key usages, and extended key usages.
|
||||
|
||||
- [Certificate Profile](/documentation/platform/pki/certificates/profiles): A configuration set specifying how leaf certificates should be issued for a group of end-entities including the issuing CA, a certificate template, and the enrollment method (e.g. [ACME](/documentation/platform/pki/enrollment-methods/acme), [EST](/documentation/platform/pki/enrollment-methods/est), [API](/documentation/platform/pki/enrollment-methods/api), etc.) used to enroll certificates.
|
||||
|
||||
- [Certificate](/documentation/platform/pki/certificates/certificates): The actual X.509 certificate issued for a profile. Once created, it is tracked in Infisical’s certificate inventory for management, renewal, and lifecycle operations.
|
||||
|
||||
## Access Control
|
||||
|
||||
Access control defines who (or what) can manage certificate resources and who can issue certificates within a project. Without clear boundaries, [certificate authorities](/documentation/platform/pki/ca/overview) and issuance workflows can be misconfigured or misused.
|
||||
|
||||
To manage access to certificates, you assign role-based permissions at the project level. These permissions determine which certificate authorities, certificate templates, certificate profiles, and other related resources a user or machine identity can act on. For example,
|
||||
you may want to:
|
||||
|
||||
- Have specific teams(s) manage your internal CA hierarchy or external CA integration configuration and have separate team(s) configure certificate profiles for requested certificates.
|
||||
- Limit which teams can manage policies defined on certificate templates.
|
||||
- Have specific end-entities (e.g. servers, devices, users) request certificates from specific certificate profiles.
|
||||
|
||||
This model follows the [principle of least privilege](https://en.wikipedia.org/wiki/Principle_of_least_privilege) so that each user or machine identity can manage or issue only the certificate resources it is responsible for and nothing more.
|
||||
@@ -9,7 +9,7 @@ A (digital) _certificate_ is a file that is tied to a cryptographic key pair and
|
||||
|
||||
For example, when you visit a website over HTTPS, your browser checks the TLS certificate deployed on the web server or load balancer to make sure it’s really the site it claims to be. If the certificate is valid, your browser establishes an encrypted connection with the server.
|
||||
|
||||
Certificates contain information about the subject (who it identifies), the public key, and a digital signature from the CA that issued the certificate. They also include additional fields such as key usages, validity periods, and extensions that define how and where the certificate can be used. When a certificate expires, the service presenting it is no longer trusted, and clients won't be able to establish a secure connection to the service.
|
||||
Certificates contain information about the subject (who it identifies), the public key, and a digital signature from the Certificate Authority (CA) that issued the certificate. They also include additional fields such as key usages, validity periods, and extensions that define how and where the certificate can be used. When a certificate expires, the service presenting it is no longer trusted, and clients won't be able to establish a secure connection to the service.
|
||||
|
||||
## What is Certificate Management?
|
||||
|
||||
|
||||
@@ -6,7 +6,9 @@ sidebarTitle: "ACME"
|
||||
## Concept
|
||||
|
||||
The ACME enrollment method allows Infisical to act as an ACME server. It lets you request and manage certificates against a specific [certificate profile](/documentation/platform/pki/certificates/profiles) using the [ACME protocol](https://en.wikipedia.org/wiki/Automatic_Certificate_Management_Environment).
|
||||
This method is suitable for web servers, load balancers, and other general-purpose servers that can run an [ACME client](https://letsencrypt.org/docs/client-options/) for automated certificate management.
|
||||
|
||||
This method is suitable for web servers, load balancers, and other general-purpose servers that can run an [ACME client](https://letsencrypt.org/docs/client-options/) for automated certificate management;
|
||||
it can also be used with [cert-manager](https://cert-manager.io/) to issue and renew certificates for Kubernetes workloads through the [ACME issuer type](https://cert-manager.io/docs/configuration/acme/).
|
||||
|
||||
Infisical's ACME enrollment method is based on [RFC 8555](https://datatracker.ietf.org/doc/html/rfc8555/).
|
||||
|
||||
@@ -26,6 +28,17 @@ In the following steps, we explore how to issue a X.509 certificate using the AC
|
||||
|
||||

|
||||
|
||||
<Note>
|
||||
|
||||
By default, when the ACME client requests a certificate against the certificate profile for a particular domain, Infisical will verify domain ownership using the [HTTP-01 challenge](https://letsencrypt.org/docs/challenge-types/#http-01-challenge) method prior to issuing a certificate back to the client.
|
||||
|
||||
If you want Infisical to skip domain ownership validation entirely, you can enable the **Skip DNS Ownership Validation** checkbox.
|
||||
|
||||
Note that skipping domain ownership validation for the ACME enrollment method is **not the same** as skipping validation for an [External ACME CA integration](/documentation/platform/pki/ca/acme-ca).
|
||||
|
||||
When using the ACME enrollment, the domain ownership check occurring between the ACME client and Infisical can be skipped. In contrast, External ACME CA integrations always require domain ownership validation, as Infisical must complete a DNS-01 challenge with the upstream ACME-compatible CA.
|
||||
</Note>
|
||||
|
||||
</Step>
|
||||
<Step title="Obtain the ACME configuration">
|
||||
Once you've created the certificate profile, you can obtain its ACME configuration details by clicking the **Reveal ACME EAB** option on the profile.
|
||||
|
||||
@@ -100,32 +100,34 @@ Here, select the certificate profile from step 1 that will be used to issue the
|
||||
</Step>
|
||||
<Step title="Issue a certificate">
|
||||
|
||||
To issue a certificate against the certificate profile, make an API request to the [Issue Certificate](/api-reference/endpoints/certificates/issue-certificate) API endpoint.
|
||||
To issue a certificate against the certificate profile, make an API request to the [Issue Certificate](/api-reference/endpoints/certificates/create-certificate) API endpoint.
|
||||
|
||||
### Sample request
|
||||
|
||||
```bash Request
|
||||
curl --location --request POST 'https://app.infisical.com/api/v1/cert-manager/certificates/issue-certificate' \
|
||||
curl --location --request POST 'https://app.infisical.com/api/v1/cert-manager/certificates' \
|
||||
--header 'Authorization: Bearer <access-token>' \
|
||||
--header 'Content-Type: application/json' \
|
||||
--data-raw '{
|
||||
"profileId": "<certificate-profile-id>",
|
||||
"commonName": "service.acme.com",
|
||||
"ttl": "1y",
|
||||
"signatureAlgorithm": "RSA-SHA256",
|
||||
"keyAlgorithm": "RSA_2048",
|
||||
"keyUsages": ["digital_signature", "key_encipherment"],
|
||||
"extendedKeyUsages": ["server_auth"],
|
||||
"altNames": [
|
||||
{
|
||||
"type": "DNS",
|
||||
"value": "service.acme.com"
|
||||
},
|
||||
{
|
||||
"type": "DNS",
|
||||
"value": "www.service.acme.com"
|
||||
}
|
||||
]
|
||||
"attributes": {
|
||||
"commonName": "service.acme.com",
|
||||
"ttl": "1y",
|
||||
"signatureAlgorithm": "RSA-SHA256",
|
||||
"keyAlgorithm": "RSA_2048",
|
||||
"keyUsages": ["digital_signature", "key_encipherment"],
|
||||
"extendedKeyUsages": ["server_auth"],
|
||||
"altNames": [
|
||||
{
|
||||
"type": "DNS",
|
||||
"value": "service.acme.com"
|
||||
},
|
||||
{
|
||||
"type": "DNS",
|
||||
"value": "www.service.acme.com"
|
||||
}
|
||||
]
|
||||
}
|
||||
}'
|
||||
```
|
||||
|
||||
@@ -133,31 +135,36 @@ Here, select the certificate profile from step 1 that will be used to issue the
|
||||
|
||||
```bash Response
|
||||
{
|
||||
"certificate": "-----BEGIN CERTIFICATE-----\nMIIEpDCCAowCCQD...\n-----END CERTIFICATE-----",
|
||||
"certificateChain": "-----BEGIN CERTIFICATE-----\nMIIEpDCCAowCCQD...\n-----END CERTIFICATE-----",
|
||||
"issuingCaCertificate": "-----BEGIN CERTIFICATE-----\nMIIEpDCCAowCCQD...\n-----END CERTIFICATE-----",
|
||||
"privateKey": "-----BEGIN PRIVATE KEY-----\nMIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQC...\n-----END PRIVATE KEY-----",
|
||||
"serialNumber": "123456789012345678",
|
||||
"certificateId": "880h3456-e29b-41d4-a716-446655440003"
|
||||
"certificate": {
|
||||
"certificate": "-----BEGIN CERTIFICATE-----\nMIIEpDCCAowCCQD...\n-----END CERTIFICATE-----",
|
||||
"certificateChain": "-----BEGIN CERTIFICATE-----\nMIIEpDCCAowCCQD...\n-----END CERTIFICATE-----",
|
||||
"issuingCaCertificate": "-----BEGIN CERTIFICATE-----\nMIIEpDCCAowCCQD...\n-----END CERTIFICATE-----",
|
||||
"privateKey": "-----BEGIN PRIVATE KEY-----\nMIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQC...\n-----END PRIVATE KEY-----",
|
||||
"serialNumber": "123456789012345678",
|
||||
"certificateId": "880h3456-e29b-41d4-a716-446655440003"
|
||||
},
|
||||
"certificateRequestId": "..."
|
||||
}
|
||||
```
|
||||
|
||||
<Note>
|
||||
Make sure to store the `privateKey` as it is only returned once here at the time of certificate issuance. The `certificate` and `certificateChain` will remain accessible and can be retrieved at any time.
|
||||
Note: If the certificate is available to be issued immediately, the `certificate` field in the response will contain the certificate data. If issuance is delayed (for example, due to pending approval or additional processing), the `certificate` field will be `null` and you can use the `certificateRequestId` to poll for status or retrieve the certificate when it is ready using the [Get Certificate Request](/api-reference/endpoints/certificates/certificate-request) API endpoint.
|
||||
</Note>
|
||||
|
||||
If you have an external private key, you can also issue a certificate by making an API request containing a pem-encoded CSR (Certificate Signing Request) to the [Sign Certificate](/api-reference/endpoints/certificates/sign-certificate) API endpoint.
|
||||
If you have an external private key, you can also issue a certificate by making an API request containing a pem-encoded CSR (Certificate Signing Request) to the same [Issue Certificate](/api-reference/endpoints/certificates/create-certificate) API endpoint.
|
||||
|
||||
### Sample request
|
||||
|
||||
```bash Request
|
||||
curl --location --request POST 'https://app.infisical.com/api/v1/cert-manager/certificates/sign-certificate' \
|
||||
curl --location --request POST 'https://app.infisical.com/api/v1/cert-manager/certificates' \
|
||||
--header 'Authorization: Bearer <access-token>' \
|
||||
--header 'Content-Type: application/json' \
|
||||
--data-raw '{
|
||||
"profileId": "<certificate-profile-id>",
|
||||
"csr": "-----BEGIN CERTIFICATE REQUEST-----\nMIICvDCCAaQCAQAwdzELMAkGA1UEBhMCVVMxDTALBgNVBAgMBE9oaW8...\n-----END CERTIFICATE REQUEST-----",
|
||||
"ttl": "1y"
|
||||
"attributes": {
|
||||
"ttl": "1y"
|
||||
}
|
||||
}'
|
||||
```
|
||||
|
||||
@@ -165,11 +172,14 @@ Here, select the certificate profile from step 1 that will be used to issue the
|
||||
|
||||
```bash Response
|
||||
{
|
||||
"certificate": "-----BEGIN CERTIFICATE-----\nMIIEpDCCAowCCQD...\n-----END CERTIFICATE-----",
|
||||
"certificateChain": "-----BEGIN CERTIFICATE-----\nMIIEpDCCAowCCQD...\n-----END CERTIFICATE-----",
|
||||
"issuingCaCertificate": "-----BEGIN CERTIFICATE-----\nMIIEpDCCAowCCQD...\n-----END CERTIFICATE-----",
|
||||
"serialNumber": "123456789012345679",
|
||||
"certificateId": "990i4567-e29b-41d4-a716-446655440004"
|
||||
"certificate": {
|
||||
"certificate": "-----BEGIN CERTIFICATE-----\nMIIEpDCCAowCCQD...\n-----END CERTIFICATE-----",
|
||||
"certificateChain": "-----BEGIN CERTIFICATE-----\nMIIEpDCCAowCCQD...\n-----END CERTIFICATE-----",
|
||||
"issuingCaCertificate": "-----BEGIN CERTIFICATE-----\nMIIEpDCCAowCCQD...\n-----END CERTIFICATE-----",
|
||||
"serialNumber": "123456789012345679",
|
||||
"certificateId": "990i4567-e29b-41d4-a716-446655440004"
|
||||
},
|
||||
"certificateRequestId": "..."
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
108
docs/documentation/platform/pki/guides/request-cert-acme.mdx
Normal file
108
docs/documentation/platform/pki/guides/request-cert-acme.mdx
Normal file
@@ -0,0 +1,108 @@
|
||||
---
|
||||
title: "Obtain a Certificate via ACME"
|
||||
---
|
||||
|
||||
import RequestCertSetup from "/snippets/documentation/platform/pki/guides/request-cert-setup.mdx";
|
||||
|
||||
The [ACME enrollment method](/documentation/platform/pki/enrollment-methods/acme) lets any [ACME client](https://letsencrypt.org/docs/client-options/) obtain TLS certificates from Infisical using the [ACME protocol](https://en.wikipedia.org/wiki/Automatic_Certificate_Management_Environment).
|
||||
This includes ACME clients like [Certbot](https://certbot.eff.org/), [cert-manager](https://cert-manager.io/) in Kubernetes using the [ACME issuer type](https://cert-manager.io/docs/configuration/acme/), and more.
|
||||
|
||||
Infisical currently supports the [HTTP-01 challenge type](https://letsencrypt.org/docs/challenge-types/#http-01-challenge) for domain validation as part of the ACME enrollment method.
|
||||
|
||||
## Diagram
|
||||
|
||||
The following sequence diagram illustrates the certificate enrollment workflow for requesting a certificate via ACME from Infisical.
|
||||
|
||||
```mermaid
|
||||
sequenceDiagram
|
||||
autonumber
|
||||
participant ACME as ACME Client
|
||||
participant Infis as Infisical ACME Server
|
||||
participant Authz as HTTP-01 Challenge<br/>Validation Endpoint
|
||||
participant CA as CA<br/>(Internal or External)
|
||||
|
||||
Note over ACME: ACME Client discovers<br/>Infisical ACME Directory URL
|
||||
|
||||
ACME->>Infis: GET /directory
|
||||
Infis-->>ACME: Directory + nonce + endpoints
|
||||
|
||||
ACME->>Infis: HEAD /new-nonce
|
||||
Infis-->>ACME: Return nonce in Replay-Nonce header
|
||||
|
||||
ACME->>Infis: POST /new-account<br/>(contact, ToS agreed)
|
||||
Infis-->>ACME: Return account object
|
||||
|
||||
Note over ACME,Infis: Requesting a certificate
|
||||
|
||||
ACME->>Infis: POST /new-order<br/>(identifiers: DNS names)
|
||||
Infis-->>ACME: Return order<br/>with authorization URLs
|
||||
|
||||
loop For each authorization (one per DNS name)
|
||||
ACME->>Infis: POST /authorizations/:authzId
|
||||
Infis-->>ACME: Return HTTP-01 challenge<br/>(URL + token + keyAuth)
|
||||
|
||||
Note over ACME: Client must prove control<br/>over the domain via HTTP
|
||||
|
||||
ACME->>Authz: Provision challenge response<br/>at<br/>/.well-known/acme-challenge/<token>
|
||||
|
||||
ACME->>Infis: POST /authorizations/:authzId/challenges/:challengeId<br/>(trigger validation)
|
||||
|
||||
Infis->>Authz: HTTP GET /.well-known/acme-challenge/<token>
|
||||
Authz-->>Infis: Return keyAuth
|
||||
|
||||
Infis-->>ACME: Authorization = valid
|
||||
end
|
||||
|
||||
Note over Infis: All authorizations valid → ready to finalize
|
||||
|
||||
ACME->>ACME: Generate keypair locally<br/>and create CSR
|
||||
ACME->>Infis: POST /orders/:orderId/finalize<br/>(CSR)
|
||||
|
||||
Infis->>CA: Request certificate issuance<br/>(CSR)
|
||||
CA-->>Infis: Signed certificate (+ chain)
|
||||
|
||||
Infis-->>ACME: Return order with certificate URL<br/>(status: valid)
|
||||
|
||||
ACME->>Infis: POST /orders/:orderId/certificate
|
||||
Infis-->>ACME: Return certificate<br/>and certificate chain
|
||||
```
|
||||
|
||||
## Guide
|
||||
|
||||
In the following steps, we explore an end-to-end workflow for obtaining a certificate via ACME with Infisical.
|
||||
|
||||
<Steps>
|
||||
<RequestCertSetup />
|
||||
<Step title="Create a certificate profile">
|
||||
Next, follow the guide [here](/documentation/platform/pki/certificates/profiles#guide-to-creating-a-certificate-profile) to create a [certificate profile](/documentation/platform/pki/certificates/profiles)
|
||||
that will be referenced when requesting a certificate.
|
||||
|
||||
The certificate profile specifies which certificate template and issuing CA should be used to validate an incoming certificate request and issue a certificate;
|
||||
it also specifies the [enrollment method](/documentation/platform/pki/enrollment-methods/overview) for how certificates can be requested against this profile
|
||||
to begin with.
|
||||
|
||||
You should specify the certificate template from Step 2, the issuing CA from Step 1, and the **ACME** option in the **Enrollment Method** dropdown when creating the certificate profile.
|
||||
|
||||
</Step>
|
||||
<Step title="Request a certificate">
|
||||
Finally, follow the guide [here](/documentation/platform/pki/enrollment-methods/acme#guide-to-certificate-enrollment-via-acme) to request a certificate against the certificate profile
|
||||
using an [ACME client](https://letsencrypt.org/docs/client-options/).
|
||||
|
||||
The ACME client will connect to Infisical's ACME server at the **ACME Directory URL** and authenticate using the **EAB Key Identifier (KID)** and **EAB Secret** credentials as part of the ACME protocol.
|
||||
|
||||
The typical ACME workflow looks likes this:
|
||||
|
||||
- The ACME client creates (or reuses) an ACME account with Infisical using EAB credentials.
|
||||
- The ACME client creates an order for one or more DNS names.
|
||||
- For each DNS name, the ACME client receives an `HTTP-01` challenge and provisions the corresponding token response at `/.well-known/acme-challenge/<token>`.
|
||||
- Once all authorizations are valid, the ACME client finalizes the order by sending a CSR to Infisical.
|
||||
- Infisical issues the certificate from the issuing CA on the certificate profile and returns it (plus the chain) back to the ACME client.
|
||||
|
||||
ACME clients typically handle renewal by tracking certificate expiration and completing the lifecycle once again to request a new certificate.
|
||||
|
||||
<Note>
|
||||
We recommend reading more about the ACME protocol [here](https://letsencrypt.org/how-it-works/).
|
||||
</Note>
|
||||
|
||||
</Step>
|
||||
</Steps>
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user