diff --git a/.env.example b/.env.example index bdb3e536d0..67110d69af 100644 --- a/.env.example +++ b/.env.example @@ -63,3 +63,7 @@ CLIENT_SECRET_GITHUB_LOGIN= CLIENT_ID_GITLAB_LOGIN= CLIENT_SECRET_GITLAB_LOGIN= + +CAPTCHA_SECRET= + +NEXT_PUBLIC_CAPTCHA_SITE_KEY= diff --git a/.github/workflows/check-api-for-breaking-changes.yml b/.github/workflows/check-api-for-breaking-changes.yml index dadd6c8605..914f23fc2c 100644 --- a/.github/workflows/check-api-for-breaking-changes.yml +++ b/.github/workflows/check-api-for-breaking-changes.yml @@ -47,7 +47,7 @@ jobs: - name: Wait for container to be stable and check logs run: | SECONDS=0 - r HEALTHY=0 + HEALTHY=0 while [ $SECONDS -lt 60 ]; do if docker ps | grep infisical-api | grep -q healthy; then echo "Container is healthy." diff --git a/.github/workflows/release_build_infisical_cli.yml b/.github/workflows/release_build_infisical_cli.yml index e4a5945e04..02c3492376 100644 --- a/.github/workflows/release_build_infisical_cli.yml +++ b/.github/workflows/release_build_infisical_cli.yml @@ -22,6 +22,9 @@ jobs: CLI_TESTS_SERVICE_TOKEN: ${{ secrets.CLI_TESTS_SERVICE_TOKEN }} CLI_TESTS_PROJECT_ID: ${{ secrets.CLI_TESTS_PROJECT_ID }} CLI_TESTS_ENV_SLUG: ${{ secrets.CLI_TESTS_ENV_SLUG }} + CLI_TESTS_USER_EMAIL: ${{ secrets.CLI_TESTS_USER_EMAIL }} + CLI_TESTS_USER_PASSWORD: ${{ secrets.CLI_TESTS_USER_PASSWORD }} + CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE: ${{ secrets.CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE }} goreleaser: runs-on: ubuntu-20.04 @@ -56,7 +59,7 @@ jobs: - uses: goreleaser/goreleaser-action@v4 with: distribution: goreleaser-pro - version: latest + version: v1.26.2-pro args: release --clean env: GITHUB_TOKEN: ${{ secrets.GO_RELEASER_GITHUB_TOKEN }} diff --git a/.github/workflows/run-cli-tests.yml b/.github/workflows/run-cli-tests.yml index e814f9143f..f8e9d77978 100644 --- a/.github/workflows/run-cli-tests.yml +++ b/.github/workflows/run-cli-tests.yml @@ -20,7 +20,12 @@ on: required: true CLI_TESTS_ENV_SLUG: required: true - + CLI_TESTS_USER_EMAIL: + required: true + CLI_TESTS_USER_PASSWORD: + required: true + CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE: + required: true jobs: test: defaults: @@ -43,5 +48,8 @@ jobs: CLI_TESTS_SERVICE_TOKEN: ${{ secrets.CLI_TESTS_SERVICE_TOKEN }} CLI_TESTS_PROJECT_ID: ${{ secrets.CLI_TESTS_PROJECT_ID }} CLI_TESTS_ENV_SLUG: ${{ secrets.CLI_TESTS_ENV_SLUG }} + CLI_TESTS_USER_EMAIL: ${{ secrets.CLI_TESTS_USER_EMAIL }} + CLI_TESTS_USER_PASSWORD: ${{ secrets.CLI_TESTS_USER_PASSWORD }} + INFISICAL_VAULT_FILE_PASSPHRASE: ${{ secrets.CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE }} run: go test -v -count=1 ./test diff --git a/Dockerfile.standalone-infisical b/Dockerfile.standalone-infisical index 0fb2a6671c..8ffe7e3dea 100644 --- a/Dockerfile.standalone-infisical +++ b/Dockerfile.standalone-infisical @@ -1,6 +1,7 @@ ARG POSTHOG_HOST=https://app.posthog.com ARG POSTHOG_API_KEY=posthog-api-key ARG INTERCOM_ID=intercom-id +ARG CAPTCHA_SITE_KEY=captcha-site-key FROM node:20-alpine AS base @@ -34,7 +35,9 @@ ENV NEXT_PUBLIC_POSTHOG_API_KEY $POSTHOG_API_KEY ARG INTERCOM_ID ENV NEXT_PUBLIC_INTERCOM_ID $INTERCOM_ID ARG INFISICAL_PLATFORM_VERSION -ENV NEXT_PUBLIC_INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION +ENV NEXT_PUBLIC_INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION +ARG CAPTCHA_SITE_KEY +ENV NEXT_PUBLIC_CAPTCHA_SITE_KEY $CAPTCHA_SITE_KEY # Build RUN npm run build @@ -110,6 +113,9 @@ ENV NEXT_PUBLIC_POSTHOG_API_KEY=$POSTHOG_API_KEY \ ARG INTERCOM_ID=intercom-id ENV NEXT_PUBLIC_INTERCOM_ID=$INTERCOM_ID \ BAKED_NEXT_PUBLIC_INTERCOM_ID=$INTERCOM_ID +ARG CAPTCHA_SITE_KEY +ENV NEXT_PUBLIC_CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY \ + BAKED_NEXT_PUBLIC_CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY WORKDIR / diff --git a/README.md b/README.md index 80f754c029..a5ed5c6213 100644 --- a/README.md +++ b/README.md @@ -85,13 +85,13 @@ To set up and run Infisical locally, make sure you have Git and Docker installed Linux/macOS: ```console -git clone https://github.com/Infisical/infisical && cd "$(basename $_ .git)" && cp .env.example .env && docker-compose -f docker-compose.prod.yml up +git clone https://github.com/Infisical/infisical && cd "$(basename $_ .git)" && cp .env.example .env && docker compose -f docker-compose.prod.yml up ``` Windows Command Prompt: ```console -git clone https://github.com/Infisical/infisical && cd infisical && copy .env.example .env && docker-compose -f docker-compose.prod.yml up +git clone https://github.com/Infisical/infisical && cd infisical && copy .env.example .env && docker compose -f docker-compose.prod.yml up ``` Create an account at `http://localhost:80` diff --git a/backend/src/db/migrations/20240610181521_add-consecutive-failed-password-attempts-user.ts b/backend/src/db/migrations/20240610181521_add-consecutive-failed-password-attempts-user.ts new file mode 100644 index 0000000000..66fa031821 --- /dev/null +++ b/backend/src/db/migrations/20240610181521_add-consecutive-failed-password-attempts-user.ts @@ -0,0 +1,29 @@ +import { Knex } from "knex"; + +import { TableName } from "../schemas"; + +export async function up(knex: Knex): Promise { + const hasConsecutiveFailedPasswordAttempts = await knex.schema.hasColumn( + TableName.Users, + "consecutiveFailedPasswordAttempts" + ); + + await knex.schema.alterTable(TableName.Users, (tb) => { + if (!hasConsecutiveFailedPasswordAttempts) { + tb.integer("consecutiveFailedPasswordAttempts").defaultTo(0); + } + }); +} + +export async function down(knex: Knex): Promise { + const hasConsecutiveFailedPasswordAttempts = await knex.schema.hasColumn( + TableName.Users, + "consecutiveFailedPasswordAttempts" + ); + + await knex.schema.alterTable(TableName.Users, (tb) => { + if (hasConsecutiveFailedPasswordAttempts) { + tb.dropColumn("consecutiveFailedPasswordAttempts"); + } + }); +} diff --git a/backend/src/db/migrations/20240612200518_add-pit-version-limit.ts b/backend/src/db/migrations/20240612200518_add-pit-version-limit.ts new file mode 100644 index 0000000000..e37c24e2c8 --- /dev/null +++ b/backend/src/db/migrations/20240612200518_add-pit-version-limit.ts @@ -0,0 +1,21 @@ +import { Knex } from "knex"; + +import { TableName } from "../schemas"; + +export async function up(knex: Knex): Promise { + const hasPitVersionLimitColumn = await knex.schema.hasColumn(TableName.Project, "pitVersionLimit"); + await knex.schema.alterTable(TableName.Project, (tb) => { + if (!hasPitVersionLimitColumn) { + tb.integer("pitVersionLimit").notNullable().defaultTo(10); + } + }); +} + +export async function down(knex: Knex): Promise { + const hasPitVersionLimitColumn = await knex.schema.hasColumn(TableName.Project, "pitVersionLimit"); + await knex.schema.alterTable(TableName.Project, (tb) => { + if (hasPitVersionLimitColumn) { + tb.dropColumn("pitVersionLimit"); + } + }); +} diff --git a/backend/src/db/migrations/20240607032218_certificate-mgmt.ts b/backend/src/db/migrations/20240612201737_certificate-mgmt.ts similarity index 100% rename from backend/src/db/migrations/20240607032218_certificate-mgmt.ts rename to backend/src/db/migrations/20240612201737_certificate-mgmt.ts diff --git a/backend/src/db/schemas/projects.ts b/backend/src/db/schemas/projects.ts index ea85e28f72..91035ab8e0 100644 --- a/backend/src/db/schemas/projects.ts +++ b/backend/src/db/schemas/projects.ts @@ -17,7 +17,8 @@ export const ProjectsSchema = z.object({ updatedAt: z.date(), version: z.number().default(1), upgradeStatus: z.string().nullable().optional(), - kmsCertificateKeyId: z.string().uuid().nullable().optional() + kmsCertificateKeyId: z.string().uuid().nullable().optional(), + pitVersionLimit: z.number().default(10) }); export type TProjects = z.infer; diff --git a/backend/src/db/schemas/users.ts b/backend/src/db/schemas/users.ts index 9e0b9a3b51..5134f3ee60 100644 --- a/backend/src/db/schemas/users.ts +++ b/backend/src/db/schemas/users.ts @@ -25,7 +25,8 @@ export const UsersSchema = z.object({ isEmailVerified: z.boolean().default(false).nullable().optional(), consecutiveFailedMfaAttempts: z.number().default(0).nullable().optional(), isLocked: z.boolean().default(false).nullable().optional(), - temporaryLockDateEnd: z.date().nullable().optional() + temporaryLockDateEnd: z.date().nullable().optional(), + consecutiveFailedPasswordAttempts: z.number().default(0).nullable().optional() }); export type TUsers = z.infer; diff --git a/backend/src/ee/routes/v1/scim-router.ts b/backend/src/ee/routes/v1/scim-router.ts index 8965c28f3b..0a45486ef1 100644 --- a/backend/src/ee/routes/v1/scim-router.ts +++ b/backend/src/ee/routes/v1/scim-router.ts @@ -362,6 +362,7 @@ export const registerScimRouter = async (server: FastifyZodProvider) => { const groups = await req.server.services.scim.listScimGroups({ orgId: req.permission.orgId, startIndex: req.query.startIndex, + filter: req.query.filter, limit: req.query.count }); diff --git a/backend/src/ee/routes/v1/secret-approval-policy-router.ts b/backend/src/ee/routes/v1/secret-approval-policy-router.ts index f6a9556258..b09b58e261 100644 --- a/backend/src/ee/routes/v1/secret-approval-policy-router.ts +++ b/backend/src/ee/routes/v1/secret-approval-policy-router.ts @@ -1,6 +1,7 @@ import { nanoid } from "nanoid"; import { z } from "zod"; +import { removeTrailingSlash } from "@app/lib/fn"; import { readLimit, writeLimit } from "@app/server/config/rateLimiter"; import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; import { sapPubSchema } from "@app/server/routes/sanitizedSchemas"; @@ -19,7 +20,11 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi workspaceId: z.string(), name: z.string().optional(), environment: z.string(), - secretPath: z.string().optional().nullable(), + secretPath: z + .string() + .optional() + .nullable() + .transform((val) => (val ? removeTrailingSlash(val) : val)), approvers: z.string().array().min(1), approvals: z.number().min(1).default(1) }) @@ -63,7 +68,11 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi name: z.string().optional(), approvers: z.string().array().min(1), approvals: z.number().min(1).default(1), - secretPath: z.string().optional().nullable() + secretPath: z + .string() + .optional() + .nullable() + .transform((val) => (val ? removeTrailingSlash(val) : val)) }) .refine((data) => data.approvals <= data.approvers.length, { path: ["approvals"], @@ -157,7 +166,7 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi querystring: z.object({ workspaceId: z.string().trim(), environment: z.string().trim(), - secretPath: z.string().trim() + secretPath: z.string().trim().transform(removeTrailingSlash) }), response: { 200: z.object({ diff --git a/backend/src/ee/services/ldap-config/ldap-config-service.ts b/backend/src/ee/services/ldap-config/ldap-config-service.ts index 6773c9486f..dd49bd0aeb 100644 --- a/backend/src/ee/services/ldap-config/ldap-config-service.ts +++ b/backend/src/ee/services/ldap-config/ldap-config-service.ts @@ -77,7 +77,7 @@ type TLdapConfigServiceFactoryDep = { >; userAliasDAL: Pick; permissionService: Pick; - licenseService: Pick; + licenseService: Pick; }; export type TLdapConfigServiceFactory = ReturnType; @@ -510,6 +510,7 @@ export const ldapConfigServiceFactory = ({ return newUserAlias; }); } + await licenseService.updateSubscriptionOrgMemberCount(organization.id); const user = await userDAL.transaction(async (tx) => { const newUser = await userDAL.findOne({ id: userAlias.userId }, tx); diff --git a/backend/src/ee/services/saml-config/saml-config-service.ts b/backend/src/ee/services/saml-config/saml-config-service.ts index 7dfd211e13..5d7b7ec3b9 100644 --- a/backend/src/ee/services/saml-config/saml-config-service.ts +++ b/backend/src/ee/services/saml-config/saml-config-service.ts @@ -50,7 +50,7 @@ type TSamlConfigServiceFactoryDep = { orgMembershipDAL: Pick; orgBotDAL: Pick; permissionService: Pick; - licenseService: Pick; + licenseService: Pick; tokenService: Pick; smtpService: Pick; }; @@ -449,6 +449,7 @@ export const samlConfigServiceFactory = ({ return newUser; }); } + await licenseService.updateSubscriptionOrgMemberCount(organization.id); const isUserCompleted = Boolean(user.isAccepted); const providerAuthToken = jwt.sign( diff --git a/backend/src/ee/services/scim/scim-fns.ts b/backend/src/ee/services/scim/scim-fns.ts index ec54a4d1fc..08b6521851 100644 --- a/backend/src/ee/services/scim/scim-fns.ts +++ b/backend/src/ee/services/scim/scim-fns.ts @@ -18,6 +18,20 @@ export const buildScimUserList = ({ }; }; +export const parseScimFilter = (filterToParse: string | undefined) => { + if (!filterToParse) return {}; + const [parsedName, parsedValue] = filterToParse.split("eq").map((s) => s.trim()); + + let attributeName = parsedName; + if (parsedName === "userName") { + attributeName = "email"; + } else if (parsedName === "displayName") { + attributeName = "name"; + } + + return { [attributeName]: parsedValue.replace(/"/g, "") }; +}; + export const buildScimUser = ({ orgMembershipId, username, diff --git a/backend/src/ee/services/scim/scim-service.ts b/backend/src/ee/services/scim/scim-service.ts index 9a084c6d71..dc175f15b9 100644 --- a/backend/src/ee/services/scim/scim-service.ts +++ b/backend/src/ee/services/scim/scim-service.ts @@ -30,7 +30,7 @@ import { UserAliasType } from "@app/services/user-alias/user-alias-types"; import { TLicenseServiceFactory } from "../license/license-service"; import { OrgPermissionActions, OrgPermissionSubjects } from "../permission/org-permission"; import { TPermissionServiceFactory } from "../permission/permission-service"; -import { buildScimGroup, buildScimGroupList, buildScimUser, buildScimUserList } from "./scim-fns"; +import { buildScimGroup, buildScimGroupList, buildScimUser, buildScimUserList, parseScimFilter } from "./scim-fns"; import { TCreateScimGroupDTO, TCreateScimTokenDTO, @@ -184,18 +184,6 @@ export const scimServiceFactory = ({ status: 403 }); - const parseFilter = (filterToParse: string | undefined) => { - if (!filterToParse) return {}; - const [parsedName, parsedValue] = filterToParse.split("eq").map((s) => s.trim()); - - let attributeName = parsedName; - if (parsedName === "userName") { - attributeName = "email"; - } - - return { [attributeName]: parsedValue.replace(/"/g, "") }; - }; - const findOpts = { ...(startIndex && { offset: startIndex - 1 }), ...(limit && { limit }) @@ -204,7 +192,7 @@ export const scimServiceFactory = ({ const users = await orgDAL.findMembership( { [`${TableName.OrgMembership}.orgId` as "id"]: orgId, - ...parseFilter(filter) + ...parseScimFilter(filter) }, findOpts ); @@ -391,7 +379,7 @@ export const scimServiceFactory = ({ ); } } - + await licenseService.updateSubscriptionOrgMemberCount(org.id); return { user, orgMembership }; }); @@ -557,7 +545,7 @@ export const scimServiceFactory = ({ return {}; // intentionally return empty object upon success }; - const listScimGroups = async ({ orgId, startIndex, limit }: TListScimGroupsDTO) => { + const listScimGroups = async ({ orgId, startIndex, limit, filter }: TListScimGroupsDTO) => { const plan = await licenseService.getPlan(orgId); if (!plan.groups) throw new BadRequestError({ @@ -580,7 +568,8 @@ export const scimServiceFactory = ({ const groups = await groupDAL.findGroups( { - orgId + orgId, + ...(filter && parseScimFilter(filter)) }, { offset: startIndex - 1, diff --git a/backend/src/ee/services/scim/scim-types.ts b/backend/src/ee/services/scim/scim-types.ts index 46ab90b8f0..cffc804070 100644 --- a/backend/src/ee/services/scim/scim-types.ts +++ b/backend/src/ee/services/scim/scim-types.ts @@ -66,6 +66,7 @@ export type TDeleteScimUserDTO = { export type TListScimGroupsDTO = { startIndex: number; + filter?: string; limit: number; orgId: string; }; diff --git a/backend/src/ee/services/secret-approval-policy/secret-approval-policy-service.ts b/backend/src/ee/services/secret-approval-policy/secret-approval-policy-service.ts index 8ddadb9bf6..f99384de61 100644 --- a/backend/src/ee/services/secret-approval-policy/secret-approval-policy-service.ts +++ b/backend/src/ee/services/secret-approval-policy/secret-approval-policy-service.ts @@ -4,6 +4,7 @@ import picomatch from "picomatch"; import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service"; import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission"; import { BadRequestError } from "@app/lib/errors"; +import { removeTrailingSlash } from "@app/lib/fn"; import { containsGlobPatterns } from "@app/lib/picomatch"; import { TProjectEnvDALFactory } from "@app/services/project-env/project-env-dal"; import { TProjectMembershipDALFactory } from "@app/services/project-membership/project-membership-dal"; @@ -207,7 +208,8 @@ export const secretApprovalPolicyServiceFactory = ({ return sapPolicies; }; - const getSecretApprovalPolicy = async (projectId: string, environment: string, secretPath: string) => { + const getSecretApprovalPolicy = async (projectId: string, environment: string, path: string) => { + const secretPath = removeTrailingSlash(path); const env = await projectEnvDAL.findOne({ slug: environment, projectId }); if (!env) throw new BadRequestError({ message: "Environment not found" }); diff --git a/backend/src/ee/services/secret-snapshot/secret-snapshot-service.ts b/backend/src/ee/services/secret-snapshot/secret-snapshot-service.ts index bd87505776..3e11429694 100644 --- a/backend/src/ee/services/secret-snapshot/secret-snapshot-service.ts +++ b/backend/src/ee/services/secret-snapshot/secret-snapshot-service.ts @@ -81,8 +81,7 @@ export const secretSnapshotServiceFactory = ({ const folder = await folderDAL.findBySecretPath(projectId, environment, path); if (!folder) throw new BadRequestError({ message: "Folder not found" }); - const count = await snapshotDAL.countOfSnapshotsByFolderId(folder.id); - return count; + return snapshotDAL.countOfSnapshotsByFolderId(folder.id); }; const listSnapshots = async ({ diff --git a/backend/src/ee/services/secret-snapshot/snapshot-dal.ts b/backend/src/ee/services/secret-snapshot/snapshot-dal.ts index cdd5a999b8..92c6b611dd 100644 --- a/backend/src/ee/services/secret-snapshot/snapshot-dal.ts +++ b/backend/src/ee/services/secret-snapshot/snapshot-dal.ts @@ -1,3 +1,4 @@ +/* eslint-disable no-await-in-loop */ import { Knex } from "knex"; import { TDbClient } from "@app/db"; @@ -11,6 +12,7 @@ import { } from "@app/db/schemas"; import { DatabaseError } from "@app/lib/errors"; import { ormify, selectAllTableCols, sqlNestRelationships } from "@app/lib/knex"; +import { logger } from "@app/lib/logger"; export type TSnapshotDALFactory = ReturnType; @@ -325,12 +327,152 @@ export const snapshotDALFactory = (db: TDbClient) => { } }; + /** + * Prunes excess snapshots from the database to ensure only a specified number of recent snapshots are retained for each folder. + * + * This function operates in three main steps: + * 1. Pruning snapshots from root/non-versioned folders. + * 2. Pruning snapshots from versioned folders. + * 3. Removing orphaned snapshots that do not belong to any existing folder or folder version. + * + * The function processes snapshots in batches, determined by the `PRUNE_FOLDER_BATCH_SIZE` constant, + * to manage the large datasets without overwhelming the DB. + * + * Steps: + * - Fetch a batch of folder IDs. + * - For each batch, use a Common Table Expression (CTE) to rank snapshots within each folder by their creation date. + * - Identify and delete snapshots that exceed the project's point-in-time version limit (`pitVersionLimit`). + * - Repeat the process for versioned folders. + * - Finally, delete orphaned snapshots that do not have an associated folder. + */ + const pruneExcessSnapshots = async () => { + const PRUNE_FOLDER_BATCH_SIZE = 10000; + + try { + let uuidOffset = "00000000-0000-0000-0000-000000000000"; + // cleanup snapshots from root/non-versioned folders + // eslint-disable-next-line no-constant-condition, no-unreachable-loop + while (true) { + const folderBatch = await db(TableName.SecretFolder) + .where("id", ">", uuidOffset) + .where("isReserved", false) + .orderBy("id", "asc") + .limit(PRUNE_FOLDER_BATCH_SIZE) + .select("id"); + + const batchEntries = folderBatch.map((folder) => folder.id); + + if (folderBatch.length) { + try { + logger.info(`Pruning snapshots in [range=${batchEntries[0]}:${batchEntries[batchEntries.length - 1]}]`); + await db(TableName.Snapshot) + .with("snapshot_cte", (qb) => { + void qb + .from(TableName.Snapshot) + .whereIn(`${TableName.Snapshot}.folderId`, batchEntries) + .select( + "folderId", + `${TableName.Snapshot}.id as id`, + db.raw( + `ROW_NUMBER() OVER (PARTITION BY ${TableName.Snapshot}."folderId" ORDER BY ${TableName.Snapshot}."createdAt" DESC) AS row_num` + ) + ); + }) + .join(TableName.SecretFolder, `${TableName.SecretFolder}.id`, `${TableName.Snapshot}.folderId`) + .join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretFolder}.envId`) + .join(TableName.Project, `${TableName.Project}.id`, `${TableName.Environment}.projectId`) + .join("snapshot_cte", "snapshot_cte.id", `${TableName.Snapshot}.id`) + .whereNull(`${TableName.SecretFolder}.parentId`) + .whereRaw(`snapshot_cte.row_num > ${TableName.Project}."pitVersionLimit"`) + .delete(); + } catch (err) { + logger.error( + `Failed to prune snapshots from root/non-versioned folders in range ${batchEntries[0]}:${ + batchEntries[batchEntries.length - 1] + }` + ); + } finally { + uuidOffset = batchEntries[batchEntries.length - 1]; + } + } else { + break; + } + } + + // cleanup snapshots from versioned folders + uuidOffset = "00000000-0000-0000-0000-000000000000"; + // eslint-disable-next-line no-constant-condition + while (true) { + const folderBatch = await db(TableName.SecretFolderVersion) + .select("folderId") + .distinct("folderId") + .where("folderId", ">", uuidOffset) + .orderBy("folderId", "asc") + .limit(PRUNE_FOLDER_BATCH_SIZE); + + const batchEntries = folderBatch.map((folder) => folder.folderId); + + if (folderBatch.length) { + try { + logger.info(`Pruning snapshots in range ${batchEntries[0]}:${batchEntries[batchEntries.length - 1]}`); + await db(TableName.Snapshot) + .with("snapshot_cte", (qb) => { + void qb + .from(TableName.Snapshot) + .whereIn(`${TableName.Snapshot}.folderId`, batchEntries) + .select( + "folderId", + `${TableName.Snapshot}.id as id`, + db.raw( + `ROW_NUMBER() OVER (PARTITION BY ${TableName.Snapshot}."folderId" ORDER BY ${TableName.Snapshot}."createdAt" DESC) AS row_num` + ) + ); + }) + .join( + TableName.SecretFolderVersion, + `${TableName.SecretFolderVersion}.folderId`, + `${TableName.Snapshot}.folderId` + ) + .join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretFolderVersion}.envId`) + .join(TableName.Project, `${TableName.Project}.id`, `${TableName.Environment}.projectId`) + .join("snapshot_cte", "snapshot_cte.id", `${TableName.Snapshot}.id`) + .whereRaw(`snapshot_cte.row_num > ${TableName.Project}."pitVersionLimit"`) + .delete(); + } catch (err) { + logger.error( + `Failed to prune snapshots from versioned folders in range ${batchEntries[0]}:${ + batchEntries[batchEntries.length - 1] + }` + ); + } finally { + uuidOffset = batchEntries[batchEntries.length - 1]; + } + } else { + break; + } + } + + // cleanup orphaned snapshots (those that don't belong to an existing folder and folder version) + await db(TableName.Snapshot) + .whereNotIn("folderId", (qb) => { + void qb + .select("folderId") + .from(TableName.SecretFolderVersion) + .union((qb1) => void qb1.select("id").from(TableName.SecretFolder)); + }) + .delete(); + } catch (error) { + throw new DatabaseError({ error, name: "SnapshotPrune" }); + } + }; + return { ...secretSnapshotOrm, findById, findLatestSnapshotByFolderId, findRecursivelySnapshots, countOfSnapshotsByFolderId, - findSecretSnapshotDataById + findSecretSnapshotDataById, + pruneExcessSnapshots }; }; diff --git a/backend/src/lib/api-docs/constants.ts b/backend/src/lib/api-docs/constants.ts index da82016f13..1637b266a8 100644 --- a/backend/src/lib/api-docs/constants.ts +++ b/backend/src/lib/api-docs/constants.ts @@ -386,6 +386,8 @@ export const SECRET_IMPORTS = { environment: "The slug of the environment to import into.", path: "The path to import into.", workspaceId: "The ID of the project you are working in.", + isReplication: + "When true, secrets from the source will be automatically sent to the destination. If approval policies exist at the destination, the secrets will be sent as approval requests instead of being applied immediately.", import: { environment: "The slug of the environment to import from.", path: "The path to import from." @@ -674,7 +676,10 @@ export const INTEGRATION = { secretGCPLabel: "The label for GCP secrets.", secretAWSTag: "The tags for AWS secrets.", kmsKeyId: "The ID of the encryption key from AWS KMS.", - shouldDisableDelete: "The flag to disable deletion of secrets in AWS Parameter Store." + shouldDisableDelete: "The flag to disable deletion of secrets in AWS Parameter Store.", + shouldMaskSecrets: "Specifies if the secrets synced from Infisical to Gitlab should be marked as 'Masked'.", + shouldProtectSecrets: "Specifies if the secrets synced from Infisical to Gitlab should be marked as 'Protected'.", + shouldEnableDelete: "The flag to enable deletion of secrets" } }, UPDATE: { diff --git a/backend/src/lib/config/env.ts b/backend/src/lib/config/env.ts index 2caae9ec5d..80a2111fcb 100644 --- a/backend/src/lib/config/env.ts +++ b/backend/src/lib/config/env.ts @@ -39,7 +39,9 @@ const envSchema = z HTTPS_ENABLED: zodStrBool, // smtp options SMTP_HOST: zpStr(z.string().optional()), - SMTP_SECURE: zodStrBool, + SMTP_IGNORE_TLS: zodStrBool.default("false"), + SMTP_REQUIRE_TLS: zodStrBool.default("true"), + SMTP_TLS_REJECT_UNAUTHORIZED: zodStrBool.default("true"), SMTP_PORT: z.coerce.number().default(587), SMTP_USERNAME: zpStr(z.string().optional()), SMTP_PASSWORD: zpStr(z.string().optional()), @@ -120,7 +122,8 @@ const envSchema = z .transform((val) => val === "true") .optional(), INFISICAL_CLOUD: zodStrBool.default("false"), - MAINTENANCE_MODE: zodStrBool.default("false") + MAINTENANCE_MODE: zodStrBool.default("false"), + CAPTCHA_SECRET: zpStr(z.string().optional()) }) .transform((data) => ({ ...data, @@ -152,13 +155,20 @@ export const initEnvConfig = (logger: Logger) => { return envCfg; }; -export const formatSmtpConfig = () => ({ - host: envCfg.SMTP_HOST, - port: envCfg.SMTP_PORT, - auth: - envCfg.SMTP_USERNAME && envCfg.SMTP_PASSWORD - ? { user: envCfg.SMTP_USERNAME, pass: envCfg.SMTP_PASSWORD } - : undefined, - secure: envCfg.SMTP_SECURE, - from: `"${envCfg.SMTP_FROM_NAME}" <${envCfg.SMTP_FROM_ADDRESS}>` -}); +export const formatSmtpConfig = () => { + return { + host: envCfg.SMTP_HOST, + port: envCfg.SMTP_PORT, + auth: + envCfg.SMTP_USERNAME && envCfg.SMTP_PASSWORD + ? { user: envCfg.SMTP_USERNAME, pass: envCfg.SMTP_PASSWORD } + : undefined, + secure: envCfg.SMTP_PORT === 465, + from: `"${envCfg.SMTP_FROM_NAME}" <${envCfg.SMTP_FROM_ADDRESS}>`, + ignoreTLS: envCfg.SMTP_IGNORE_TLS, + requireTLS: envCfg.SMTP_REQUIRE_TLS, + tls: { + rejectUnauthorized: envCfg.SMTP_TLS_REJECT_UNAUTHORIZED + } + }; +}; diff --git a/backend/src/server/boot-strap-check.ts b/backend/src/server/boot-strap-check.ts index 381e575efa..ceaef59e7f 100644 --- a/backend/src/server/boot-strap-check.ts +++ b/backend/src/server/boot-strap-check.ts @@ -5,7 +5,6 @@ import { createTransport } from "nodemailer"; import { formatSmtpConfig, getConfig } from "@app/lib/config/env"; import { logger } from "@app/lib/logger"; -import { getTlsOption } from "@app/services/smtp/smtp-service"; import { getServerCfg } from "@app/services/super-admin/super-admin-service"; type BootstrapOpt = { @@ -44,7 +43,7 @@ export const bootstrapCheck = async ({ db }: BootstrapOpt) => { console.info("Testing smtp connection"); const smtpCfg = formatSmtpConfig(); - await createTransport({ ...smtpCfg, ...getTlsOption(smtpCfg.host, smtpCfg.secure) }) + await createTransport(smtpCfg) .verify() .then(async () => { console.info("SMTP successfully connected"); diff --git a/backend/src/server/routes/index.ts b/backend/src/server/routes/index.ts index 30fce02726..d9f4bf029d 100644 --- a/backend/src/server/routes/index.ts +++ b/backend/src/server/routes/index.ts @@ -878,6 +878,9 @@ export const registerRoutes = async ( const dailyResourceCleanUp = dailyResourceCleanUpQueueServiceFactory({ auditLogDAL, queueService, + secretVersionDAL, + secretFolderVersionDAL: folderVersionDAL, + snapshotDAL, identityAccessTokenDAL, secretSharingDAL }); diff --git a/backend/src/server/routes/v1/integration-router.ts b/backend/src/server/routes/v1/integration-router.ts index bdb58aa8b3..97a7f4d7a2 100644 --- a/backend/src/server/routes/v1/integration-router.ts +++ b/backend/src/server/routes/v1/integration-router.ts @@ -8,7 +8,7 @@ import { writeLimit } from "@app/server/config/rateLimiter"; import { getTelemetryDistinctId } from "@app/server/lib/telemetry"; import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; import { AuthMode } from "@app/services/auth/auth-type"; -import { IntegrationMappingBehavior } from "@app/services/integration-auth/integration-list"; +import { IntegrationMetadataSchema } from "@app/services/integration/integration-schema"; import { PostHogEventTypes, TIntegrationCreatedEvent } from "@app/services/telemetry/telemetry-types"; export const registerIntegrationRouter = async (server: FastifyZodProvider) => { @@ -46,36 +46,7 @@ export const registerIntegrationRouter = async (server: FastifyZodProvider) => { path: z.string().trim().optional().describe(INTEGRATION.CREATE.path), region: z.string().trim().optional().describe(INTEGRATION.CREATE.region), scope: z.string().trim().optional().describe(INTEGRATION.CREATE.scope), - metadata: z - .object({ - secretPrefix: z.string().optional().describe(INTEGRATION.CREATE.metadata.secretPrefix), - secretSuffix: z.string().optional().describe(INTEGRATION.CREATE.metadata.secretSuffix), - initialSyncBehavior: z.string().optional().describe(INTEGRATION.CREATE.metadata.initialSyncBehavoir), - mappingBehavior: z - .nativeEnum(IntegrationMappingBehavior) - .optional() - .describe(INTEGRATION.CREATE.metadata.mappingBehavior), - shouldAutoRedeploy: z.boolean().optional().describe(INTEGRATION.CREATE.metadata.shouldAutoRedeploy), - secretGCPLabel: z - .object({ - labelName: z.string(), - labelValue: z.string() - }) - .optional() - .describe(INTEGRATION.CREATE.metadata.secretGCPLabel), - secretAWSTag: z - .array( - z.object({ - key: z.string(), - value: z.string() - }) - ) - .optional() - .describe(INTEGRATION.CREATE.metadata.secretAWSTag), - kmsKeyId: z.string().optional().describe(INTEGRATION.CREATE.metadata.kmsKeyId), - shouldDisableDelete: z.boolean().optional().describe(INTEGRATION.CREATE.metadata.shouldDisableDelete) - }) - .default({}) + metadata: IntegrationMetadataSchema.default({}) }), response: { 200: z.object({ @@ -161,33 +132,7 @@ export const registerIntegrationRouter = async (server: FastifyZodProvider) => { targetEnvironment: z.string().trim().describe(INTEGRATION.UPDATE.targetEnvironment), owner: z.string().trim().describe(INTEGRATION.UPDATE.owner), environment: z.string().trim().describe(INTEGRATION.UPDATE.environment), - metadata: z - .object({ - secretPrefix: z.string().optional().describe(INTEGRATION.CREATE.metadata.secretPrefix), - secretSuffix: z.string().optional().describe(INTEGRATION.CREATE.metadata.secretSuffix), - initialSyncBehavior: z.string().optional().describe(INTEGRATION.CREATE.metadata.initialSyncBehavoir), - mappingBehavior: z.string().optional().describe(INTEGRATION.CREATE.metadata.mappingBehavior), - shouldAutoRedeploy: z.boolean().optional().describe(INTEGRATION.CREATE.metadata.shouldAutoRedeploy), - secretGCPLabel: z - .object({ - labelName: z.string(), - labelValue: z.string() - }) - .optional() - .describe(INTEGRATION.CREATE.metadata.secretGCPLabel), - secretAWSTag: z - .array( - z.object({ - key: z.string(), - value: z.string() - }) - ) - .optional() - .describe(INTEGRATION.CREATE.metadata.secretAWSTag), - kmsKeyId: z.string().optional().describe(INTEGRATION.CREATE.metadata.kmsKeyId), - shouldDisableDelete: z.boolean().optional().describe(INTEGRATION.CREATE.metadata.shouldDisableDelete) - }) - .optional() + metadata: IntegrationMetadataSchema.optional() }), response: { 200: z.object({ diff --git a/backend/src/server/routes/v1/project-router.ts b/backend/src/server/routes/v1/project-router.ts index 1cf655a973..0984b66f61 100644 --- a/backend/src/server/routes/v1/project-router.ts +++ b/backend/src/server/routes/v1/project-router.ts @@ -334,6 +334,44 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => { } }); + server.route({ + method: "PUT", + url: "/:workspaceSlug/version-limit", + config: { + rateLimit: writeLimit + }, + schema: { + params: z.object({ + workspaceSlug: z.string().trim() + }), + body: z.object({ + pitVersionLimit: z.number().min(1).max(100) + }), + response: { + 200: z.object({ + message: z.string(), + workspace: ProjectsSchema + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT]), + handler: async (req) => { + const workspace = await server.services.project.updateVersionLimit({ + actorId: req.permission.id, + actor: req.permission.type, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId, + pitVersionLimit: req.body.pitVersionLimit, + workspaceSlug: req.params.workspaceSlug + }); + + return { + message: "Successfully changed workspace version limit", + workspace + }; + } + }); + server.route({ method: "GET", url: "/:workspaceId/integrations", diff --git a/backend/src/server/routes/v1/secret-import-router.ts b/backend/src/server/routes/v1/secret-import-router.ts index 50311273c4..ca604e7382 100644 --- a/backend/src/server/routes/v1/secret-import-router.ts +++ b/backend/src/server/routes/v1/secret-import-router.ts @@ -30,7 +30,7 @@ export const registerSecretImportRouter = async (server: FastifyZodProvider) => environment: z.string().trim().describe(SECRET_IMPORTS.CREATE.import.environment), path: z.string().trim().transform(removeTrailingSlash).describe(SECRET_IMPORTS.CREATE.import.path) }), - isReplication: z.boolean().default(false) + isReplication: z.boolean().default(false).describe(SECRET_IMPORTS.CREATE.isReplication) }), response: { 200: z.object({ diff --git a/backend/src/server/routes/v3/login-router.ts b/backend/src/server/routes/v3/login-router.ts index 900ad56d27..4c7df5612b 100644 --- a/backend/src/server/routes/v3/login-router.ts +++ b/backend/src/server/routes/v3/login-router.ts @@ -80,7 +80,8 @@ export const registerLoginRouter = async (server: FastifyZodProvider) => { body: z.object({ email: z.string().trim(), providerAuthToken: z.string().trim().optional(), - clientProof: z.string().trim() + clientProof: z.string().trim(), + captchaToken: z.string().trim().optional() }), response: { 200: z.discriminatedUnion("mfaEnabled", [ @@ -106,6 +107,7 @@ export const registerLoginRouter = async (server: FastifyZodProvider) => { const appCfg = getConfig(); const data = await server.services.login.loginExchangeClientProof({ + captchaToken: req.body.captchaToken, email: req.body.email, ip: req.realIp, userAgent, diff --git a/backend/src/services/auth/auth-login-service.ts b/backend/src/services/auth/auth-login-service.ts index cbf43b2454..a136508e7f 100644 --- a/backend/src/services/auth/auth-login-service.ts +++ b/backend/src/services/auth/auth-login-service.ts @@ -3,6 +3,7 @@ import jwt from "jsonwebtoken"; import { TUsers, UserDeviceSchema } from "@app/db/schemas"; import { isAuthMethodSaml } from "@app/ee/services/permission/permission-fns"; import { getConfig } from "@app/lib/config/env"; +import { request } from "@app/lib/config/request"; import { generateSrpServerKey, srpCheckClientProof } from "@app/lib/crypto"; import { BadRequestError, DatabaseError, UnauthorizedError } from "@app/lib/errors"; import { getServerCfg } from "@app/services/super-admin/super-admin-service"; @@ -176,12 +177,16 @@ export const authLoginServiceFactory = ({ clientProof, ip, userAgent, - providerAuthToken + providerAuthToken, + captchaToken }: TLoginClientProofDTO) => { + const appCfg = getConfig(); + const userEnc = await userDAL.findUserEncKeyByUsername({ username: email }); if (!userEnc) throw new Error("Failed to find user"); + const user = await userDAL.findById(userEnc.userId); const cfg = getConfig(); let authMethod = AuthMethod.EMAIL; @@ -196,6 +201,31 @@ export const authLoginServiceFactory = ({ } } + if ( + user.consecutiveFailedPasswordAttempts && + user.consecutiveFailedPasswordAttempts >= 10 && + Boolean(appCfg.CAPTCHA_SECRET) + ) { + if (!captchaToken) { + throw new BadRequestError({ + name: "Captcha Required", + message: "Accomplish the required captcha by logging in via Web" + }); + } + + // validate captcha token + const response = await request.postForm<{ success: boolean }>("https://api.hcaptcha.com/siteverify", { + response: captchaToken, + secret: appCfg.CAPTCHA_SECRET + }); + + if (!response.data.success) { + throw new BadRequestError({ + name: "Invalid Captcha" + }); + } + } + if (!userEnc.serverPrivateKey || !userEnc.clientPublicKey) throw new Error("Failed to authenticate. Try again?"); const isValidClientProof = await srpCheckClientProof( userEnc.salt, @@ -204,15 +234,31 @@ export const authLoginServiceFactory = ({ userEnc.clientPublicKey, clientProof ); - if (!isValidClientProof) throw new Error("Failed to authenticate. Try again?"); + + if (!isValidClientProof) { + await userDAL.update( + { id: userEnc.userId }, + { + $incr: { + consecutiveFailedPasswordAttempts: 1 + } + } + ); + + throw new Error("Failed to authenticate. Try again?"); + } await userDAL.updateUserEncryptionByUserId(userEnc.userId, { serverPrivateKey: null, clientPublicKey: null }); + + await userDAL.updateById(userEnc.userId, { + consecutiveFailedPasswordAttempts: 0 + }); + // send multi factor auth token if they it enabled if (userEnc.isMfaEnabled && userEnc.email) { - const user = await userDAL.findById(userEnc.userId); enforceUserLockStatus(Boolean(user.isLocked), user.temporaryLockDateEnd); const mfaToken = jwt.sign( diff --git a/backend/src/services/auth/auth-login-type.ts b/backend/src/services/auth/auth-login-type.ts index 37b90f548b..4f73ec9961 100644 --- a/backend/src/services/auth/auth-login-type.ts +++ b/backend/src/services/auth/auth-login-type.ts @@ -12,6 +12,7 @@ export type TLoginClientProofDTO = { providerAuthToken?: string; ip: string; userAgent: string; + captchaToken?: string; }; export type TVerifyMfaTokenDTO = { diff --git a/backend/src/services/auth/auth-signup-service.ts b/backend/src/services/auth/auth-signup-service.ts index be7f5777db..528cb44fab 100644 --- a/backend/src/services/auth/auth-signup-service.ts +++ b/backend/src/services/auth/auth-signup-service.ts @@ -231,7 +231,7 @@ export const authSignupServiceFactory = ({ const accessToken = jwt.sign( { - authMethod: AuthMethod.EMAIL, + authMethod: authMethod || AuthMethod.EMAIL, authTokenType: AuthTokenType.ACCESS_TOKEN, userId: updateduser.info.id, tokenVersionId: tokenSession.id, @@ -244,7 +244,7 @@ export const authSignupServiceFactory = ({ const refreshToken = jwt.sign( { - authMethod: AuthMethod.EMAIL, + authMethod: authMethod || AuthMethod.EMAIL, authTokenType: AuthTokenType.REFRESH_TOKEN, userId: updateduser.info.id, tokenVersionId: tokenSession.id, diff --git a/backend/src/services/integration-auth/integration-sync-secret.ts b/backend/src/services/integration-auth/integration-sync-secret.ts index 0ae0a0275d..6351b4d824 100644 --- a/backend/src/services/integration-auth/integration-sync-secret.ts +++ b/backend/src/services/integration-auth/integration-sync-secret.ts @@ -31,6 +31,7 @@ import { logger } from "@app/lib/logger"; import { TCreateManySecretsRawFn, TUpdateManySecretsRawFn } from "@app/services/secret/secret-types"; import { TIntegrationDALFactory } from "../integration/integration-dal"; +import { IntegrationMetadataSchema } from "../integration/integration-schema"; import { IntegrationInitialSyncBehavior, IntegrationMappingBehavior, @@ -1363,38 +1364,41 @@ const syncSecretsGitHub = async ({ } } - for await (const encryptedSecret of encryptedSecrets) { - if ( - !(encryptedSecret.name in secrets) && - !(appendices?.prefix !== undefined && !encryptedSecret.name.startsWith(appendices?.prefix)) && - !(appendices?.suffix !== undefined && !encryptedSecret.name.endsWith(appendices?.suffix)) - ) { - switch (integration.scope) { - case GithubScope.Org: { - await octokit.request("DELETE /orgs/{org}/actions/secrets/{secret_name}", { - org: integration.owner as string, - secret_name: encryptedSecret.name - }); - break; - } - case GithubScope.Env: { - await octokit.request( - "DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}", - { - repository_id: Number(integration.appId), - environment_name: integration.targetEnvironmentId as string, + const metadata = IntegrationMetadataSchema.parse(integration.metadata); + if (metadata.shouldEnableDelete) { + for await (const encryptedSecret of encryptedSecrets) { + if ( + !(encryptedSecret.name in secrets) && + !(appendices?.prefix !== undefined && !encryptedSecret.name.startsWith(appendices?.prefix)) && + !(appendices?.suffix !== undefined && !encryptedSecret.name.endsWith(appendices?.suffix)) + ) { + switch (integration.scope) { + case GithubScope.Org: { + await octokit.request("DELETE /orgs/{org}/actions/secrets/{secret_name}", { + org: integration.owner as string, secret_name: encryptedSecret.name - } - ); - break; - } - default: { - await octokit.request("DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}", { - owner: integration.owner as string, - repo: integration.app as string, - secret_name: encryptedSecret.name - }); - break; + }); + break; + } + case GithubScope.Env: { + await octokit.request( + "DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}", + { + repository_id: Number(integration.appId), + environment_name: integration.targetEnvironmentId as string, + secret_name: encryptedSecret.name + } + ); + break; + } + default: { + await octokit.request("DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}", { + owner: integration.owner as string, + repo: integration.app as string, + secret_name: encryptedSecret.name + }); + break; + } } } } @@ -1917,13 +1921,13 @@ const syncSecretsGitLab = async ({ return allEnvVariables; }; + const metadata = IntegrationMetadataSchema.parse(integration.metadata); const allEnvVariables = await getAllEnvVariables(integration?.appId as string, accessToken); const getSecretsRes: GitLabSecret[] = allEnvVariables .filter((secret: GitLabSecret) => secret.environment_scope === integration.targetEnvironment) .filter((gitLabSecret) => { let isValid = true; - const metadata = z.record(z.any()).parse(integration.metadata); if (metadata.secretPrefix && !gitLabSecret.key.startsWith(metadata.secretPrefix)) { isValid = false; } @@ -1943,8 +1947,8 @@ const syncSecretsGitLab = async ({ { key, value: secrets[key].value, - protected: false, - masked: false, + protected: Boolean(metadata.shouldProtectSecrets), + masked: Boolean(metadata.shouldMaskSecrets), raw: false, environment_scope: integration.targetEnvironment }, @@ -1961,7 +1965,9 @@ const syncSecretsGitLab = async ({ `${gitLabApiUrl}/v4/projects/${integration?.appId}/variables/${existingSecret.key}?filter[environment_scope]=${integration.targetEnvironment}`, { ...existingSecret, - value: secrets[existingSecret.key].value + value: secrets[existingSecret.key].value, + protected: Boolean(metadata.shouldProtectSecrets), + masked: Boolean(metadata.shouldMaskSecrets) }, { headers: { diff --git a/backend/src/services/integration/integration-schema.ts b/backend/src/services/integration/integration-schema.ts new file mode 100644 index 0000000000..1ea01e56a8 --- /dev/null +++ b/backend/src/services/integration/integration-schema.ts @@ -0,0 +1,37 @@ +import { z } from "zod"; + +import { INTEGRATION } from "@app/lib/api-docs"; + +import { IntegrationMappingBehavior } from "../integration-auth/integration-list"; + +export const IntegrationMetadataSchema = z.object({ + secretPrefix: z.string().optional().describe(INTEGRATION.CREATE.metadata.secretPrefix), + secretSuffix: z.string().optional().describe(INTEGRATION.CREATE.metadata.secretSuffix), + initialSyncBehavior: z.string().optional().describe(INTEGRATION.CREATE.metadata.initialSyncBehavoir), + mappingBehavior: z + .nativeEnum(IntegrationMappingBehavior) + .optional() + .describe(INTEGRATION.CREATE.metadata.mappingBehavior), + shouldAutoRedeploy: z.boolean().optional().describe(INTEGRATION.CREATE.metadata.shouldAutoRedeploy), + secretGCPLabel: z + .object({ + labelName: z.string(), + labelValue: z.string() + }) + .optional() + .describe(INTEGRATION.CREATE.metadata.secretGCPLabel), + secretAWSTag: z + .array( + z.object({ + key: z.string(), + value: z.string() + }) + ) + .optional() + .describe(INTEGRATION.CREATE.metadata.secretAWSTag), + kmsKeyId: z.string().optional().describe(INTEGRATION.CREATE.metadata.kmsKeyId), + shouldDisableDelete: z.boolean().optional().describe(INTEGRATION.CREATE.metadata.shouldDisableDelete), + shouldEnableDelete: z.boolean().optional().describe(INTEGRATION.CREATE.metadata.shouldEnableDelete), + shouldMaskSecrets: z.boolean().optional().describe(INTEGRATION.CREATE.metadata.shouldMaskSecrets), + shouldProtectSecrets: z.boolean().optional().describe(INTEGRATION.CREATE.metadata.shouldProtectSecrets) +}); diff --git a/backend/src/services/integration/integration-types.ts b/backend/src/services/integration/integration-types.ts index 9c75cad2d5..abbccbe90b 100644 --- a/backend/src/services/integration/integration-types.ts +++ b/backend/src/services/integration/integration-types.ts @@ -29,6 +29,9 @@ export type TCreateIntegrationDTO = { }[]; kmsKeyId?: string; shouldDisableDelete?: boolean; + shouldMaskSecrets?: boolean; + shouldProtectSecrets?: boolean; + shouldEnableDelete?: boolean; }; } & Omit; @@ -54,6 +57,7 @@ export type TUpdateIntegrationDTO = { }[]; kmsKeyId?: string; shouldDisableDelete?: boolean; + shouldEnableDelete?: boolean; }; } & Omit; diff --git a/backend/src/services/org/org-service.ts b/backend/src/services/org/org-service.ts index 60ddc52306..68d2b8cda2 100644 --- a/backend/src/services/org/org-service.ts +++ b/backend/src/services/org/org-service.ts @@ -336,6 +336,7 @@ export const orgServiceFactory = ({ return org; }); + await licenseService.updateSubscriptionOrgMemberCount(organization.id); return organization; }; diff --git a/backend/src/services/project/project-service.ts b/backend/src/services/project/project-service.ts index aeff701264..1cf2c2f71a 100644 --- a/backend/src/services/project/project-service.ts +++ b/backend/src/services/project/project-service.ts @@ -42,6 +42,7 @@ import { TToggleProjectAutoCapitalizationDTO, TUpdateProjectDTO, TUpdateProjectNameDTO, + TUpdateProjectVersionLimitDTO, TUpgradeProjectDTO } from "./project-types"; @@ -141,7 +142,8 @@ export const projectServiceFactory = ({ name: workspaceName, orgId: organization.id, slug: projectSlug || slugify(`${workspaceName}-${alphaNumericNanoId(4)}`), - version: ProjectVersion.V2 + version: ProjectVersion.V2, + pitVersionLimit: 10 }, tx ); @@ -414,6 +416,35 @@ export const projectServiceFactory = ({ return updatedProject; }; + const updateVersionLimit = async ({ + actor, + actorId, + actorOrgId, + actorAuthMethod, + pitVersionLimit, + workspaceSlug + }: TUpdateProjectVersionLimitDTO) => { + const project = await projectDAL.findProjectBySlug(workspaceSlug, actorOrgId); + if (!project) { + throw new BadRequestError({ + message: "Project not found" + }); + } + + const { hasRole } = await permissionService.getProjectPermission( + actor, + actorId, + project.id, + actorAuthMethod, + actorOrgId + ); + + if (!hasRole(ProjectMembershipRole.Admin)) + throw new BadRequestError({ message: "Only admins are allowed to take this action" }); + + return projectDAL.updateById(project.id, { pitVersionLimit }); + }; + const updateName = async ({ projectId, actor, @@ -577,6 +608,7 @@ export const projectServiceFactory = ({ updateName, upgradeProject, listProjectCas, - listProjectCertificates + listProjectCertificates, + updateVersionLimit }; }; diff --git a/backend/src/services/project/project-types.ts b/backend/src/services/project/project-types.ts index e04d3663ff..39e0477483 100644 --- a/backend/src/services/project/project-types.ts +++ b/backend/src/services/project/project-types.ts @@ -44,6 +44,11 @@ export type TToggleProjectAutoCapitalizationDTO = { autoCapitalization: boolean; } & TProjectPermission; +export type TUpdateProjectVersionLimitDTO = { + pitVersionLimit: number; + workspaceSlug: string; +} & Omit; + export type TUpdateProjectNameDTO = { name: string; } & TProjectPermission; diff --git a/backend/src/services/resource-cleanup/resource-cleanup-queue.ts b/backend/src/services/resource-cleanup/resource-cleanup-queue.ts index afae2677f7..2e01e35494 100644 --- a/backend/src/services/resource-cleanup/resource-cleanup-queue.ts +++ b/backend/src/services/resource-cleanup/resource-cleanup-queue.ts @@ -1,13 +1,19 @@ import { TAuditLogDALFactory } from "@app/ee/services/audit-log/audit-log-dal"; +import { TSnapshotDALFactory } from "@app/ee/services/secret-snapshot/snapshot-dal"; import { logger } from "@app/lib/logger"; import { QueueJobs, QueueName, TQueueServiceFactory } from "@app/queue"; import { TIdentityAccessTokenDALFactory } from "../identity-access-token/identity-access-token-dal"; +import { TSecretVersionDALFactory } from "../secret/secret-version-dal"; +import { TSecretFolderVersionDALFactory } from "../secret-folder/secret-folder-version-dal"; import { TSecretSharingDALFactory } from "../secret-sharing/secret-sharing-dal"; type TDailyResourceCleanUpQueueServiceFactoryDep = { auditLogDAL: Pick; identityAccessTokenDAL: Pick; + secretVersionDAL: Pick; + secretFolderVersionDAL: Pick; + snapshotDAL: Pick; secretSharingDAL: Pick; queueService: TQueueServiceFactory; }; @@ -17,6 +23,9 @@ export type TDailyResourceCleanUpQueueServiceFactory = ReturnType { @@ -25,6 +34,9 @@ export const dailyResourceCleanUpQueueServiceFactory = ({ await auditLogDAL.pruneAuditLog(); await identityAccessTokenDAL.removeExpiredTokens(); await secretSharingDAL.pruneExpiredSharedSecrets(); + await snapshotDAL.pruneExcessSnapshots(); + await secretVersionDAL.pruneExcessVersions(); + await secretFolderVersionDAL.pruneExcessVersions(); logger.info(`${QueueName.DailyResourceCleanUp}: queue task completed`); }); diff --git a/backend/src/services/secret-folder/secret-folder-version-dal.ts b/backend/src/services/secret-folder/secret-folder-version-dal.ts index 73b536b48e..fb68ce8015 100644 --- a/backend/src/services/secret-folder/secret-folder-version-dal.ts +++ b/backend/src/services/secret-folder/secret-folder-version-dal.ts @@ -62,5 +62,32 @@ export const secretFolderVersionDALFactory = (db: TDbClient) => { } }; - return { ...secretFolderVerOrm, findLatestFolderVersions, findLatestVersionByFolderId }; + const pruneExcessVersions = async () => { + try { + await db(TableName.SecretFolderVersion) + .with("folder_cte", (qb) => { + void qb + .from(TableName.SecretFolderVersion) + .select( + "id", + "folderId", + db.raw( + `ROW_NUMBER() OVER (PARTITION BY ${TableName.SecretFolderVersion}."folderId" ORDER BY ${TableName.SecretFolderVersion}."createdAt" DESC) AS row_num` + ) + ); + }) + .join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretFolderVersion}.envId`) + .join(TableName.Project, `${TableName.Project}.id`, `${TableName.Environment}.projectId`) + .join("folder_cte", "folder_cte.id", `${TableName.SecretFolderVersion}.id`) + .whereRaw(`folder_cte.row_num > ${TableName.Project}."pitVersionLimit"`) + .delete(); + } catch (error) { + throw new DatabaseError({ + error, + name: "Secret Folder Version Prune" + }); + } + }; + + return { ...secretFolderVerOrm, findLatestFolderVersions, findLatestVersionByFolderId, pruneExcessVersions }; }; diff --git a/backend/src/services/secret/secret-fns.ts b/backend/src/services/secret/secret-fns.ts index 3cd6c4e6eb..6758f48157 100644 --- a/backend/src/services/secret/secret-fns.ts +++ b/backend/src/services/secret/secret-fns.ts @@ -309,7 +309,7 @@ export const interpolateSecrets = ({ projectId, secretEncKey, secretDAL, folderD }; const expandSecrets = async ( - secrets: Record + secrets: Record ) => { const expandedSec: Record = {}; const interpolatedSec: Record = {}; @@ -329,8 +329,8 @@ export const interpolateSecrets = ({ projectId, secretEncKey, secretDAL, folderD // should not do multi line encoding if user has set it to skip // eslint-disable-next-line secrets[key].value = secrets[key].skipMultilineEncoding - ? expandedSec[key] - : formatMultiValueEnv(expandedSec[key]); + ? formatMultiValueEnv(expandedSec[key]) + : expandedSec[key]; // eslint-disable-next-line continue; } @@ -347,7 +347,7 @@ export const interpolateSecrets = ({ projectId, secretEncKey, secretDAL, folderD ); // eslint-disable-next-line - secrets[key].value = secrets[key].skipMultilineEncoding ? expandedVal : formatMultiValueEnv(expandedVal); + secrets[key].value = secrets[key].skipMultilineEncoding ? formatMultiValueEnv(expandedVal) : expandedVal; } return secrets; @@ -395,7 +395,8 @@ export const decryptSecretRaw = ( type: secret.type, _id: secret.id, id: secret.id, - user: secret.userId + user: secret.userId, + skipMultilineEncoding: secret.skipMultilineEncoding }; }; diff --git a/backend/src/services/secret/secret-queue.ts b/backend/src/services/secret/secret-queue.ts index d40a18e5ef..42e13b4456 100644 --- a/backend/src/services/secret/secret-queue.ts +++ b/backend/src/services/secret/secret-queue.ts @@ -1,4 +1,6 @@ /* eslint-disable no-await-in-loop */ +import { AxiosError } from "axios"; + import { getConfig } from "@app/lib/config/env"; import { decryptSymmetric128BitHexKeyUTF8 } from "@app/lib/crypto"; import { daysToMillisecond, secondsToMillis } from "@app/lib/dates"; @@ -67,7 +69,10 @@ const MAX_SYNC_SECRET_DEPTH = 5; export const uniqueSecretQueueKey = (environment: string, secretPath: string) => `secret-queue-dedupe-${environment}-${secretPath}`; -type TIntegrationSecret = Record; +type TIntegrationSecret = Record< + string, + { value: string; comment?: string; skipMultilineEncoding?: boolean | null | undefined } +>; export const secretQueueFactory = ({ queueService, integrationDAL, @@ -567,11 +572,14 @@ export const secretQueueFactory = ({ isSynced: true }); } catch (err: unknown) { - logger.info("Secret integration sync error:", err); + logger.info("Secret integration sync error: %o", err); + const message = + err instanceof AxiosError ? JSON.stringify((err as AxiosError)?.response?.data) : (err as Error)?.message; + await integrationDAL.updateById(integration.id, { lastSyncJobId: job.id, lastUsed: new Date(), - syncMessage: (err as Error)?.message, + syncMessage: message, isSynced: false }); } diff --git a/backend/src/services/secret/secret-service.ts b/backend/src/services/secret/secret-service.ts index 5688f7f152..d6682a2536 100644 --- a/backend/src/services/secret/secret-service.ts +++ b/backend/src/services/secret/secret-service.ts @@ -952,15 +952,49 @@ export const secretServiceFactory = ({ }); const decryptedSecrets = secrets.map((el) => decryptSecretRaw(el, botKey)); - const decryptedImports = (imports || [])?.map(({ secrets: importedSecrets, ...el }) => ({ - ...el, - secrets: importedSecrets.map((sec) => + const processedImports = (imports || [])?.map(({ secrets: importedSecrets, ...el }) => { + const decryptedImportSecrets = importedSecrets.map((sec) => decryptSecretRaw( { ...sec, environment: el.environment, workspace: projectId, secretPath: el.secretPath }, botKey ) - ) - })); + ); + + // secret-override to handle duplicate keys from different import levels + // this prioritizes secret values from direct imports + const importedKeys = new Set(); + const importedEntries = decryptedImportSecrets.reduce( + ( + accum: { + secretKey: string; + secretPath: string; + workspace: string; + environment: string; + secretValue: string; + secretComment: string; + version: number; + type: string; + _id: string; + id: string; + user: string | null | undefined; + skipMultilineEncoding: boolean | null | undefined; + }[], + sec + ) => { + if (!importedKeys.has(sec.secretKey)) { + importedKeys.add(sec.secretKey); + return [...accum, sec]; + } + return accum; + }, + [] + ); + + return { + ...el, + secrets: importedEntries + }; + }); if (expandSecretReferences) { const expandSecrets = interpolateSecrets({ @@ -971,10 +1005,24 @@ export const secretServiceFactory = ({ }); const batchSecretsExpand = async ( - secretBatch: { secretKey: string; secretValue: string; secretComment?: string; secretPath: string }[] + secretBatch: { + secretKey: string; + secretValue: string; + secretComment?: string; + secretPath: string; + skipMultilineEncoding: boolean | null | undefined; + }[] ) => { // Group secrets by secretPath - const secretsByPath: Record = {}; + const secretsByPath: Record< + string, + { + secretKey: string; + secretValue: string; + secretComment?: string; + skipMultilineEncoding: boolean | null | undefined; + }[] + > = {}; secretBatch.forEach((secret) => { if (!secretsByPath[secret.secretPath]) { @@ -990,11 +1038,15 @@ export const secretServiceFactory = ({ continue; } - const secretRecord: Record = {}; + const secretRecord: Record< + string, + { value: string; comment?: string; skipMultilineEncoding: boolean | null | undefined } + > = {}; secretsByPath[secPath].forEach((decryptedSecret) => { secretRecord[decryptedSecret.secretKey] = { value: decryptedSecret.secretValue, - comment: decryptedSecret.secretComment + comment: decryptedSecret.secretComment, + skipMultilineEncoding: decryptedSecret.skipMultilineEncoding }; }); @@ -1011,12 +1063,12 @@ export const secretServiceFactory = ({ await batchSecretsExpand(decryptedSecrets); // expand imports by batch - await Promise.all(decryptedImports.map((decryptedImport) => batchSecretsExpand(decryptedImport.secrets))); + await Promise.all(processedImports.map((processedImport) => batchSecretsExpand(processedImport.secrets))); } return { secrets: decryptedSecrets, - imports: decryptedImports + imports: processedImports }; }; diff --git a/backend/src/services/secret/secret-version-dal.ts b/backend/src/services/secret/secret-version-dal.ts index 203406e301..4d641bb8d2 100644 --- a/backend/src/services/secret/secret-version-dal.ts +++ b/backend/src/services/secret/secret-version-dal.ts @@ -111,8 +111,37 @@ export const secretVersionDALFactory = (db: TDbClient) => { } }; + const pruneExcessVersions = async () => { + try { + await db(TableName.SecretVersion) + .with("version_cte", (qb) => { + void qb + .from(TableName.SecretVersion) + .select( + "id", + "folderId", + db.raw( + `ROW_NUMBER() OVER (PARTITION BY ${TableName.SecretVersion}."secretId" ORDER BY ${TableName.SecretVersion}."createdAt" DESC) AS row_num` + ) + ); + }) + .join(TableName.SecretFolder, `${TableName.SecretFolder}.id`, `${TableName.SecretVersion}.folderId`) + .join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretFolder}.envId`) + .join(TableName.Project, `${TableName.Project}.id`, `${TableName.Environment}.projectId`) + .join("version_cte", "version_cte.id", `${TableName.SecretVersion}.id`) + .whereRaw(`version_cte.row_num > ${TableName.Project}."pitVersionLimit"`) + .delete(); + } catch (error) { + throw new DatabaseError({ + error, + name: "Secret Version Prune" + }); + } + }; + return { ...secretVersionOrm, + pruneExcessVersions, findLatestVersionMany, bulkUpdate, findLatestVersionByFolderId, diff --git a/backend/src/services/smtp/smtp-service.ts b/backend/src/services/smtp/smtp-service.ts index 7d6b98b313..1fb89c5537 100644 --- a/backend/src/services/smtp/smtp-service.ts +++ b/backend/src/services/smtp/smtp-service.ts @@ -41,21 +41,8 @@ export enum SmtpHost { Office365 = "smtp.office365.com" } -export const getTlsOption = (host?: SmtpHost | string, secure?: boolean) => { - if (!secure) return { secure: false }; - if (!host) return { secure: true }; - - if ((host as SmtpHost) === SmtpHost.Sendgrid) { - return { secure: true, port: 465 }; // more details here https://nodemailer.com/smtp/ - } - if (host.includes("amazonaws.com")) { - return { tls: { ciphers: "TLSv1.2" } }; - } - return { requireTLS: true, tls: { ciphers: "TLSv1.2" } }; -}; - export const smtpServiceFactory = (cfg: TSmtpConfig) => { - const smtp = createTransport({ ...cfg, ...getTlsOption(cfg.host, cfg.secure) }); + const smtp = createTransport(cfg); const isSmtpOn = Boolean(cfg.host); const sendMail = async ({ substitutions, recipients, template, subjectLine }: TSmtpSendMail) => { diff --git a/backend/src/services/user/user-service.ts b/backend/src/services/user/user-service.ts index a82259db6f..693078fbdb 100644 --- a/backend/src/services/user/user-service.ts +++ b/backend/src/services/user/user-service.ts @@ -21,6 +21,7 @@ type TUserServiceFactoryDep = { | "findOneUserAction" | "createUserAction" | "findUserEncKeyByUserId" + | "delete" >; userAliasDAL: Pick; orgMembershipDAL: Pick; @@ -85,7 +86,7 @@ export const userServiceFactory = ({ tx ); - // check if there are users with the same email. + // check if there are verified users with the same email. const users = await userDAL.find( { email, @@ -134,6 +135,15 @@ export const userServiceFactory = ({ ); } } else { + await userDAL.delete( + { + email, + isAccepted: false, + isEmailVerified: false + }, + tx + ); + // update current user's username to [email] await userDAL.updateById( user.id, diff --git a/cli/.gitignore b/cli/.gitignore index 5fa3e39c55..8eb54d72b9 100644 --- a/cli/.gitignore +++ b/cli/.gitignore @@ -1,3 +1,4 @@ .infisical.json dist/ agent-config.test.yaml +.test.env \ No newline at end of file diff --git a/cli/go.mod b/cli/go.mod index 833745effc..6a1da8c6d8 100644 --- a/cli/go.mod +++ b/cli/go.mod @@ -3,7 +3,9 @@ module github.com/Infisical/infisical-merge go 1.21 require ( + github.com/bradleyjkemp/cupaloy/v2 v2.8.0 github.com/charmbracelet/lipgloss v0.5.0 + github.com/creack/pty v1.1.21 github.com/denisbrodbeck/machineid v1.0.1 github.com/fatih/semgroup v1.2.0 github.com/gitleaks/go-gitdiff v0.8.0 @@ -29,7 +31,6 @@ require ( require ( github.com/alessio/shellescape v1.4.1 // indirect github.com/asaskevich/govalidator v0.0.0-20200907205600-7a23bdc65eef // indirect - github.com/bradleyjkemp/cupaloy/v2 v2.8.0 // indirect github.com/chzyer/readline v1.5.1 // indirect github.com/danieljoos/wincred v1.2.0 // indirect github.com/davecgh/go-spew v1.1.1 // indirect diff --git a/cli/go.sum b/cli/go.sum index 3535791366..ff3030a9cc 100644 --- a/cli/go.sum +++ b/cli/go.sum @@ -74,6 +74,8 @@ github.com/coreos/go-systemd/v22 v22.3.2/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSV github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= github.com/cpuguy83/go-md2man/v2 v2.0.2/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= +github.com/creack/pty v1.1.21 h1:1/QdRyBaHHJP61QkWMXlOIBfsgdDeeKfK8SYVUWJKf0= +github.com/creack/pty v1.1.21/go.mod h1:MOBLtS5ELjhRRrroQr9kyvTxUAFNvYEK993ew/Vr4O4= github.com/danieljoos/wincred v1.2.0 h1:ozqKHaLK0W/ii4KVbbvluM91W2H3Sh0BncbUNPS7jLE= github.com/danieljoos/wincred v1.2.0/go.mod h1:FzQLLMKBFdvu+osBrnFODiv32YGwCfx0SkRa/eYHgec= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= diff --git a/cli/packages/util/common.go b/cli/packages/util/common.go index 2b57383eff..55907da9de 100644 --- a/cli/packages/util/common.go +++ b/cli/packages/util/common.go @@ -4,6 +4,8 @@ import ( "fmt" "net/http" "os" + + "github.com/Infisical/infisical-merge/packages/config" ) func GetHomeDir() (string, error) { @@ -21,7 +23,7 @@ func WriteToFile(fileName string, dataToWrite []byte, filePerm os.FileMode) erro return nil } -func CheckIsConnectedToInternet() (ok bool) { - _, err := http.Get("http://clients3.google.com/generate_204") +func ValidateInfisicalAPIConnection() (ok bool) { + _, err := http.Get(fmt.Sprintf("%v/status", config.INFISICAL_URL)) return err == nil } diff --git a/cli/packages/util/secrets.go b/cli/packages/util/secrets.go index 27f0636a90..02c2704bd6 100644 --- a/cli/packages/util/secrets.go +++ b/cli/packages/util/secrets.go @@ -307,32 +307,33 @@ func FilterSecretsByTag(plainTextSecrets []models.SingleEnvironmentVariable, tag } func GetAllEnvironmentVariables(params models.GetAllSecretsParameters, projectConfigFilePath string) ([]models.SingleEnvironmentVariable, error) { - isConnected := CheckIsConnectedToInternet() var secretsToReturn []models.SingleEnvironmentVariable // var serviceTokenDetails api.GetServiceTokenDetailsResponse var errorToReturn error if params.InfisicalToken == "" && params.UniversalAuthAccessToken == "" { - if isConnected { - log.Debug().Msg("GetAllEnvironmentVariables: Connected to internet, checking logged in creds") - - if projectConfigFilePath == "" { - RequireLocalWorkspaceFile() - } else { - ValidateWorkspaceFile(projectConfigFilePath) - } - - RequireLogin() + if projectConfigFilePath == "" { + RequireLocalWorkspaceFile() + } else { + ValidateWorkspaceFile(projectConfigFilePath) } + RequireLogin() + log.Debug().Msg("GetAllEnvironmentVariables: Trying to fetch secrets using logged in details") loggedInUserDetails, err := GetCurrentLoggedInUserDetails() + isConnected := ValidateInfisicalAPIConnection() + + if isConnected { + log.Debug().Msg("GetAllEnvironmentVariables: Connected to Infisical instance, checking logged in creds") + } + if err != nil { return nil, err } - if loggedInUserDetails.LoginExpired { + if isConnected && loggedInUserDetails.LoginExpired { PrintErrorMessageAndExit("Your login session has expired, please run [infisical login] and try again") } @@ -364,12 +365,12 @@ func GetAllEnvironmentVariables(params models.GetAllSecretsParameters, projectCo backupSecretsEncryptionKey := []byte(loggedInUserDetails.UserCredentials.PrivateKey)[0:32] if errorToReturn == nil { - WriteBackupSecrets(infisicalDotJson.WorkspaceId, params.Environment, backupSecretsEncryptionKey, secretsToReturn) + WriteBackupSecrets(infisicalDotJson.WorkspaceId, params.Environment, params.SecretsPath, backupSecretsEncryptionKey, secretsToReturn) } // only attempt to serve cached secrets if no internet connection and if at least one secret cached if !isConnected { - backedSecrets, err := ReadBackupSecrets(infisicalDotJson.WorkspaceId, params.Environment, backupSecretsEncryptionKey) + backedSecrets, err := ReadBackupSecrets(infisicalDotJson.WorkspaceId, params.Environment, params.SecretsPath, backupSecretsEncryptionKey) if len(backedSecrets) > 0 { PrintWarning("Unable to fetch latest secret(s) due to connection error, serving secrets from last successful fetch. For more info, run with --debug") secretsToReturn = backedSecrets @@ -634,8 +635,9 @@ func GetPlainTextSecrets(key []byte, encryptedSecrets []api.EncryptedSecretV3) ( return plainTextSecrets, nil } -func WriteBackupSecrets(workspace string, environment string, encryptionKey []byte, secrets []models.SingleEnvironmentVariable) error { - fileName := fmt.Sprintf("secrets_%s_%s", workspace, environment) +func WriteBackupSecrets(workspace string, environment string, secretsPath string, encryptionKey []byte, secrets []models.SingleEnvironmentVariable) error { + formattedPath := strings.ReplaceAll(secretsPath, "/", "-") + fileName := fmt.Sprintf("secrets_%s_%s_%s", workspace, environment, formattedPath) secrets_backup_folder_name := "secrets-backup" _, fullConfigFileDirPath, err := GetFullConfigFilePath() @@ -672,8 +674,9 @@ func WriteBackupSecrets(workspace string, environment string, encryptionKey []by return nil } -func ReadBackupSecrets(workspace string, environment string, encryptionKey []byte) ([]models.SingleEnvironmentVariable, error) { - fileName := fmt.Sprintf("secrets_%s_%s", workspace, environment) +func ReadBackupSecrets(workspace string, environment string, secretsPath string, encryptionKey []byte) ([]models.SingleEnvironmentVariable, error) { + formattedPath := strings.ReplaceAll(secretsPath, "/", "-") + fileName := fmt.Sprintf("secrets_%s_%s_%s", workspace, environment, formattedPath) secrets_backup_folder_name := "secrets-backup" _, fullConfigFileDirPath, err := GetFullConfigFilePath() diff --git a/cli/scripts/export_test_env.sh b/cli/scripts/export_test_env.sh new file mode 100644 index 0000000000..0b242281d9 --- /dev/null +++ b/cli/scripts/export_test_env.sh @@ -0,0 +1,23 @@ +#!/bin/bash + +TEST_ENV_FILE=".test.env" + +# Check if the .env file exists +if [ ! -f "$TEST_ENV_FILE" ]; then + echo "$TEST_ENV_FILE does not exist." + exit 1 +fi + +# Export the variables +while IFS= read -r line +do + # Skip empty lines and lines starting with # + if [[ -z "$line" || "$line" =~ ^\# ]]; then + continue + fi + # Read the key-value pair + IFS='=' read -r key value <<< "$line" + eval export $key=\$value +done < "$TEST_ENV_FILE" + +echo "Test environment variables set." diff --git a/cli/test/.snapshots/test-TestUserAuth_SecretsGetAll b/cli/test/.snapshots/test-TestUserAuth_SecretsGetAll new file mode 100644 index 0000000000..260607e976 --- /dev/null +++ b/cli/test/.snapshots/test-TestUserAuth_SecretsGetAll @@ -0,0 +1,7 @@ +┌───────────────┬──────────────┬─────────────┐ +│ SECRET NAME │ SECRET VALUE │ SECRET TYPE │ +├───────────────┼──────────────┼─────────────┤ +│ TEST-SECRET-1 │ test-value-1 │ shared │ +│ TEST-SECRET-2 │ test-value-2 │ shared │ +│ TEST-SECRET-3 │ test-value-3 │ shared │ +└───────────────┴──────────────┴─────────────┘ diff --git a/cli/test/.snapshots/test-testUserAuth_SecretsGetAllWithoutConnection b/cli/test/.snapshots/test-testUserAuth_SecretsGetAllWithoutConnection new file mode 100644 index 0000000000..c48627f73f --- /dev/null +++ b/cli/test/.snapshots/test-testUserAuth_SecretsGetAllWithoutConnection @@ -0,0 +1,8 @@ +Warning: Unable to fetch latest secret(s) due to connection error, serving secrets from last successful fetch. For more info, run with --debug +┌───────────────┬──────────────┬─────────────┐ +│ SECRET NAME │ SECRET VALUE │ SECRET TYPE │ +├───────────────┼──────────────┼─────────────┤ +│ TEST-SECRET-1 │ test-value-1 │ shared │ +│ TEST-SECRET-2 │ test-value-2 │ shared │ +│ TEST-SECRET-3 │ test-value-3 │ shared │ +└───────────────┴──────────────┴─────────────┘ diff --git a/cli/test/export_test.go b/cli/test/export_test.go index 9a936871dc..c44bf20af0 100644 --- a/cli/test/export_test.go +++ b/cli/test/export_test.go @@ -8,7 +8,6 @@ import ( func TestUniversalAuth_ExportSecretsWithImports(t *testing.T) { MachineIdentityLoginCmd(t) - SetupCli(t) output, err := ExecuteCliCommand(FORMATTED_CLI_NAME, "export", "--token", creds.UAAccessToken, "--projectId", creds.ProjectID, "--env", creds.EnvSlug, "--silent") @@ -24,8 +23,6 @@ func TestUniversalAuth_ExportSecretsWithImports(t *testing.T) { } func TestServiceToken_ExportSecretsWithImports(t *testing.T) { - SetupCli(t) - output, err := ExecuteCliCommand(FORMATTED_CLI_NAME, "export", "--token", creds.ServiceToken, "--projectId", creds.ProjectID, "--env", creds.EnvSlug, "--silent") if err != nil { @@ -41,8 +38,6 @@ func TestServiceToken_ExportSecretsWithImports(t *testing.T) { func TestUniversalAuth_ExportSecretsWithoutImports(t *testing.T) { MachineIdentityLoginCmd(t) - SetupCli(t) - output, err := ExecuteCliCommand(FORMATTED_CLI_NAME, "export", "--token", creds.UAAccessToken, "--projectId", creds.ProjectID, "--env", creds.EnvSlug, "--silent", "--include-imports=false") if err != nil { @@ -57,8 +52,6 @@ func TestUniversalAuth_ExportSecretsWithoutImports(t *testing.T) { } func TestServiceToken_ExportSecretsWithoutImports(t *testing.T) { - SetupCli(t) - output, err := ExecuteCliCommand(FORMATTED_CLI_NAME, "export", "--token", creds.ServiceToken, "--projectId", creds.ProjectID, "--env", creds.EnvSlug, "--silent", "--include-imports=false") if err != nil { diff --git a/cli/test/helper.go b/cli/test/helper.go index 995367c4b1..819f4c4c9d 100644 --- a/cli/test/helper.go +++ b/cli/test/helper.go @@ -2,10 +2,10 @@ package tests import ( "fmt" + "log" "os" "os/exec" "strings" - "testing" ) const ( @@ -23,6 +23,8 @@ type Credentials struct { ServiceToken string ProjectID string EnvSlug string + UserEmail string + UserPassword string } var creds = Credentials{ @@ -32,18 +34,21 @@ var creds = Credentials{ ServiceToken: os.Getenv("CLI_TESTS_SERVICE_TOKEN"), ProjectID: os.Getenv("CLI_TESTS_PROJECT_ID"), EnvSlug: os.Getenv("CLI_TESTS_ENV_SLUG"), + UserEmail: os.Getenv("CLI_TESTS_USER_EMAIL"), + UserPassword: os.Getenv("CLI_TESTS_USER_PASSWORD"), } func ExecuteCliCommand(command string, args ...string) (string, error) { cmd := exec.Command(command, args...) output, err := cmd.CombinedOutput() if err != nil { + fmt.Println(fmt.Sprint(err) + ": " + string(output)) return strings.TrimSpace(string(output)), err } return strings.TrimSpace(string(output)), nil } -func SetupCli(t *testing.T) { +func SetupCli() { if creds.ClientID == "" || creds.ClientSecret == "" || creds.ServiceToken == "" || creds.ProjectID == "" || creds.EnvSlug == "" { panic("Missing required environment variables") @@ -57,7 +62,7 @@ func SetupCli(t *testing.T) { if !alreadyBuilt { if err := exec.Command("go", "build", "../.").Run(); err != nil { - t.Fatal(err) + log.Fatal(err) } } diff --git a/cli/test/login_test.go b/cli/test/login_test.go index 0f45914132..71273a3ec7 100644 --- a/cli/test/login_test.go +++ b/cli/test/login_test.go @@ -1,14 +1,124 @@ package tests import ( + "log" + "os/exec" + "strings" "testing" + "github.com/creack/pty" "github.com/stretchr/testify/assert" ) -func MachineIdentityLoginCmd(t *testing.T) { - SetupCli(t) +func UserInitCmd() { + c := exec.Command(FORMATTED_CLI_NAME, "init") + ptmx, err := pty.Start(c) + if err != nil { + log.Fatalf("error running CLI command: %v", err) + } + defer func() { _ = ptmx.Close() }() + stepChan := make(chan int, 10) + + go func() { + buf := make([]byte, 1024) + step := -1 + for { + n, err := ptmx.Read(buf) + if n > 0 { + terminalOut := string(buf) + if strings.Contains(terminalOut, "Which Infisical organization would you like to select a project from?") && step < 0 { + step += 1 + stepChan <- step + } else if strings.Contains(terminalOut, "Which of your Infisical projects would you like to connect this project to?") && step < 1 { + step += 1; + stepChan <- step + } + } + if err != nil { + close(stepChan) + return + } + } + }() + + for i := range stepChan { + switch i { + case 0: + ptmx.Write([]byte("\n")) + case 1: + ptmx.Write([]byte("\n")) + } + } +} + +func UserLoginCmd() { + // set vault to file because CI has no keyring + vaultCmd := exec.Command(FORMATTED_CLI_NAME, "vault", "set", "file") + _, err := vaultCmd.Output() + if err != nil { + log.Fatalf("error setting vault: %v", err) + } + + // Start programmatic interaction with CLI + c := exec.Command(FORMATTED_CLI_NAME, "login", "--interactive") + ptmx, err := pty.Start(c) + if err != nil { + log.Fatalf("error running CLI command: %v", err) + } + defer func() { _ = ptmx.Close() }() + + stepChan := make(chan int, 10) + + go func() { + buf := make([]byte, 1024) + step := -1 + for { + n, err := ptmx.Read(buf) + if n > 0 { + terminalOut := string(buf) + if strings.Contains(terminalOut, "Infisical Cloud") && step < 0 { + step += 1; + stepChan <- step + } else if strings.Contains(terminalOut, "Email") && step < 1 { + step += 1; + stepChan <- step + } else if strings.Contains(terminalOut, "Password") && step < 2 { + step += 1; + stepChan <- step + } else if strings.Contains(terminalOut, "Infisical organization") && step < 3 { + step += 1; + stepChan <- step + } else if strings.Contains(terminalOut, "Enter passphrase") && step < 4 { + step += 1; + stepChan <- step + } + } + if err != nil { + close(stepChan) + return + } + } + }() + + for i := range stepChan { + switch i { + case 0: + ptmx.Write([]byte("\n")) + case 1: + ptmx.Write([]byte(creds.UserEmail)) + ptmx.Write([]byte("\n")) + case 2: + ptmx.Write([]byte(creds.UserPassword)) + ptmx.Write([]byte("\n")) + case 3: + ptmx.Write([]byte("\n")) + } + } + +} + +func MachineIdentityLoginCmd(t *testing.T) { if creds.UAAccessToken != "" { return } diff --git a/cli/test/main_test.go b/cli/test/main_test.go new file mode 100644 index 0000000000..e14893aec0 --- /dev/null +++ b/cli/test/main_test.go @@ -0,0 +1,23 @@ +package tests + +import ( + "fmt" + "os" + "testing" +) + +func TestMain(m *testing.M) { + // Setup + fmt.Println("Setting up CLI...") + SetupCli() + fmt.Println("Performing user login...") + UserLoginCmd() + fmt.Println("Performing infisical init...") + UserInitCmd() + + // Run the tests + code := m.Run() + + // Exit + os.Exit(code) +} diff --git a/cli/test/run_test.go b/cli/test/run_test.go index 808f4f14ff..d2c6021cc2 100644 --- a/cli/test/run_test.go +++ b/cli/test/run_test.go @@ -8,8 +8,6 @@ import ( ) func TestServiceToken_RunCmdRecursiveAndImports(t *testing.T) { - SetupCli(t) - output, err := ExecuteCliCommand(FORMATTED_CLI_NAME, "run", "--token", creds.ServiceToken, "--projectId", creds.ProjectID, "--env", creds.EnvSlug, "--recursive", "--silent", "--", "echo", "hello world") if err != nil { @@ -25,8 +23,6 @@ func TestServiceToken_RunCmdRecursiveAndImports(t *testing.T) { } } func TestServiceToken_RunCmdWithImports(t *testing.T) { - SetupCli(t) - output, err := ExecuteCliCommand(FORMATTED_CLI_NAME, "run", "--token", creds.ServiceToken, "--projectId", creds.ProjectID, "--env", creds.EnvSlug, "--silent", "--", "echo", "hello world") if err != nil { @@ -44,8 +40,6 @@ func TestServiceToken_RunCmdWithImports(t *testing.T) { func TestUniversalAuth_RunCmdRecursiveAndImports(t *testing.T) { MachineIdentityLoginCmd(t) - SetupCli(t) - output, err := ExecuteCliCommand(FORMATTED_CLI_NAME, "run", "--token", creds.UAAccessToken, "--projectId", creds.ProjectID, "--env", creds.EnvSlug, "--recursive", "--silent", "--", "echo", "hello world") if err != nil { @@ -63,8 +57,6 @@ func TestUniversalAuth_RunCmdRecursiveAndImports(t *testing.T) { func TestUniversalAuth_RunCmdWithImports(t *testing.T) { MachineIdentityLoginCmd(t) - SetupCli(t) - output, err := ExecuteCliCommand(FORMATTED_CLI_NAME, "run", "--token", creds.UAAccessToken, "--projectId", creds.ProjectID, "--env", creds.EnvSlug, "--silent", "--", "echo", "hello world") if err != nil { @@ -83,8 +75,6 @@ func TestUniversalAuth_RunCmdWithImports(t *testing.T) { func TestUniversalAuth_RunCmdWithoutImports(t *testing.T) { MachineIdentityLoginCmd(t) - SetupCli(t) - output, err := ExecuteCliCommand(FORMATTED_CLI_NAME, "run", "--token", creds.UAAccessToken, "--projectId", creds.ProjectID, "--env", creds.EnvSlug, "--silent", "--include-imports=false", "--", "echo", "hello world") if err != nil { @@ -101,8 +91,6 @@ func TestUniversalAuth_RunCmdWithoutImports(t *testing.T) { } func TestServiceToken_RunCmdWithoutImports(t *testing.T) { - SetupCli(t) - output, err := ExecuteCliCommand(FORMATTED_CLI_NAME, "run", "--token", creds.ServiceToken, "--projectId", creds.ProjectID, "--env", creds.EnvSlug, "--silent", "--include-imports=false", "--", "echo", "hello world") if err != nil { diff --git a/cli/test/secrets_by_name_test.go b/cli/test/secrets_by_name_test.go index 440324e1ac..26a8314bb4 100644 --- a/cli/test/secrets_by_name_test.go +++ b/cli/test/secrets_by_name_test.go @@ -7,8 +7,6 @@ import ( ) func TestServiceToken_GetSecretsByNameRecursive(t *testing.T) { - SetupCli(t) - output, err := ExecuteCliCommand(FORMATTED_CLI_NAME, "secrets", "get", "TEST-SECRET-1", "TEST-SECRET-2", "FOLDER-SECRET-1", "--token", creds.ServiceToken, "--projectId", creds.ProjectID, "--env", creds.EnvSlug, "--recursive", "--silent") if err != nil { @@ -23,8 +21,6 @@ func TestServiceToken_GetSecretsByNameRecursive(t *testing.T) { } func TestServiceToken_GetSecretsByNameWithNotFoundSecret(t *testing.T) { - SetupCli(t) - output, err := ExecuteCliCommand(FORMATTED_CLI_NAME, "secrets", "get", "TEST-SECRET-1", "TEST-SECRET-2", "FOLDER-SECRET-1", "DOES-NOT-EXIST", "--token", creds.ServiceToken, "--projectId", creds.ProjectID, "--env", creds.EnvSlug, "--recursive", "--silent") if err != nil { @@ -39,8 +35,6 @@ func TestServiceToken_GetSecretsByNameWithNotFoundSecret(t *testing.T) { } func TestServiceToken_GetSecretsByNameWithImports(t *testing.T) { - SetupCli(t) - output, err := ExecuteCliCommand(FORMATTED_CLI_NAME, "secrets", "get", "TEST-SECRET-1", "STAGING-SECRET-2", "FOLDER-SECRET-1", "--token", creds.ServiceToken, "--projectId", creds.ProjectID, "--env", creds.EnvSlug, "--recursive", "--silent") if err != nil { @@ -56,8 +50,6 @@ func TestServiceToken_GetSecretsByNameWithImports(t *testing.T) { func TestUniversalAuth_GetSecretsByNameRecursive(t *testing.T) { MachineIdentityLoginCmd(t) - SetupCli(t) - output, err := ExecuteCliCommand(FORMATTED_CLI_NAME, "secrets", "get", "TEST-SECRET-1", "TEST-SECRET-2", "FOLDER-SECRET-1", "--token", creds.UAAccessToken, "--projectId", creds.ProjectID, "--env", creds.EnvSlug, "--recursive", "--silent") if err != nil { @@ -73,8 +65,6 @@ func TestUniversalAuth_GetSecretsByNameRecursive(t *testing.T) { func TestUniversalAuth_GetSecretsByNameWithNotFoundSecret(t *testing.T) { MachineIdentityLoginCmd(t) - SetupCli(t) - output, err := ExecuteCliCommand(FORMATTED_CLI_NAME, "secrets", "get", "TEST-SECRET-1", "TEST-SECRET-2", "FOLDER-SECRET-1", "DOES-NOT-EXIST", "--token", creds.UAAccessToken, "--projectId", creds.ProjectID, "--env", creds.EnvSlug, "--recursive", "--silent") if err != nil { @@ -90,8 +80,6 @@ func TestUniversalAuth_GetSecretsByNameWithNotFoundSecret(t *testing.T) { func TestUniversalAuth_GetSecretsByNameWithImports(t *testing.T) { MachineIdentityLoginCmd(t) - SetupCli(t) - output, err := ExecuteCliCommand(FORMATTED_CLI_NAME, "secrets", "get", "TEST-SECRET-1", "STAGING-SECRET-2", "FOLDER-SECRET-1", "--token", creds.UAAccessToken, "--projectId", creds.ProjectID, "--env", creds.EnvSlug, "--recursive", "--silent") if err != nil { diff --git a/cli/test/secrets_test.go b/cli/test/secrets_test.go index 453666406d..f5d5a7b1f3 100644 --- a/cli/test/secrets_test.go +++ b/cli/test/secrets_test.go @@ -3,12 +3,12 @@ package tests import ( "testing" + "github.com/Infisical/infisical-merge/packages/util" "github.com/bradleyjkemp/cupaloy/v2" ) -func TestServiceToken_SecretsGetWithImportsAndRecursiveCmd(t *testing.T) { - SetupCli(t) +func TestServiceToken_SecretsGetWithImportsAndRecursiveCmd(t *testing.T) { output, err := ExecuteCliCommand(FORMATTED_CLI_NAME, "secrets", "--token", creds.ServiceToken, "--projectId", creds.ProjectID, "--env", creds.EnvSlug, "--recursive", "--silent") if err != nil { @@ -23,8 +23,6 @@ func TestServiceToken_SecretsGetWithImportsAndRecursiveCmd(t *testing.T) { } func TestServiceToken_SecretsGetWithoutImportsAndWithoutRecursiveCmd(t *testing.T) { - SetupCli(t) - output, err := ExecuteCliCommand(FORMATTED_CLI_NAME, "secrets", "--token", creds.ServiceToken, "--projectId", creds.ProjectID, "--env", creds.EnvSlug, "--include-imports=false", "--silent") if err != nil { @@ -39,7 +37,6 @@ func TestServiceToken_SecretsGetWithoutImportsAndWithoutRecursiveCmd(t *testing. } func TestUniversalAuth_SecretsGetWithImportsAndRecursiveCmd(t *testing.T) { - SetupCli(t) MachineIdentityLoginCmd(t) output, err := ExecuteCliCommand(FORMATTED_CLI_NAME, "secrets", "--token", creds.UAAccessToken, "--projectId", creds.ProjectID, "--env", creds.EnvSlug, "--recursive", "--silent") @@ -56,7 +53,6 @@ func TestUniversalAuth_SecretsGetWithImportsAndRecursiveCmd(t *testing.T) { } func TestUniversalAuth_SecretsGetWithoutImportsAndWithoutRecursiveCmd(t *testing.T) { - SetupCli(t) MachineIdentityLoginCmd(t) output, err := ExecuteCliCommand(FORMATTED_CLI_NAME, "secrets", "--token", creds.UAAccessToken, "--projectId", creds.ProjectID, "--env", creds.EnvSlug, "--include-imports=false", "--silent") @@ -73,7 +69,6 @@ func TestUniversalAuth_SecretsGetWithoutImportsAndWithoutRecursiveCmd(t *testing } func TestUniversalAuth_SecretsGetWrongEnvironment(t *testing.T) { - SetupCli(t) MachineIdentityLoginCmd(t) output, _ := ExecuteCliCommand(FORMATTED_CLI_NAME, "secrets", "--token", creds.UAAccessToken, "--projectId", creds.ProjectID, "--env", "invalid-env", "--recursive", "--silent") @@ -85,3 +80,45 @@ func TestUniversalAuth_SecretsGetWrongEnvironment(t *testing.T) { } } + +func TestUserAuth_SecretsGetAll(t *testing.T) { + output, err := ExecuteCliCommand(FORMATTED_CLI_NAME, "secrets", "--projectId", creds.ProjectID, "--env", creds.EnvSlug, "--include-imports=false", "--silent") + if err != nil { + t.Fatalf("error running CLI command: %v", err) + } + + // Use cupaloy to snapshot test the output + err = cupaloy.Snapshot(output) + if err != nil { + t.Fatalf("snapshot failed: %v", err) + } + + // explicitly called here because it should happen directly after successful secretsGetAll + testUserAuth_SecretsGetAllWithoutConnection(t) +} + +func testUserAuth_SecretsGetAllWithoutConnection(t *testing.T) { + originalConfigFile, err := util.GetConfigFile() + if err != nil { + t.Fatalf("error getting config file") + } + newConfigFile := originalConfigFile + + // set it to a URL that will always be unreachable + newConfigFile.LoggedInUserDomain = "http://localhost:4999" + util.WriteConfigFile(&newConfigFile) + + // restore config file + defer util.WriteConfigFile(&originalConfigFile) + + output, err := ExecuteCliCommand(FORMATTED_CLI_NAME, "secrets", "--projectId", creds.ProjectID, "--env", creds.EnvSlug, "--include-imports=false", "--silent") + if err != nil { + t.Fatalf("error running CLI command: %v", err) + } + + // Use cupaloy to snapshot test the output + err = cupaloy.Snapshot(output) + if err != nil { + t.Fatalf("snapshot failed: %v", err) + } +} \ No newline at end of file diff --git a/docs/mint.json b/docs/mint.json index 4dcc3aaa55..d9914f3736 100644 --- a/docs/mint.json +++ b/docs/mint.json @@ -392,6 +392,7 @@ "pages": [ "sdks/languages/node", "sdks/languages/python", + "sdks/languages/go", "sdks/languages/java", "sdks/languages/csharp" ] diff --git a/docs/sdks/languages/go.mdx b/docs/sdks/languages/go.mdx new file mode 100644 index 0000000000..a8affa1541 --- /dev/null +++ b/docs/sdks/languages/go.mdx @@ -0,0 +1,438 @@ +--- +title: "Infisical Go SDK" +sidebarTitle: "Go" +icon: "golang" +--- + + + +If you're working with Go Lang, the official [Infisical Go SDK](https://github.com/infisical/go-sdk) package is the easiest way to fetch and work with secrets for your application. + +- [Package](https://pkg.go.dev/github.com/infisical/go-sdk) +- [Github Repository](https://github.com/infisical/go-sdk) + +## Basic Usage + +```go +package main + +import ( + "fmt" + "os" + + infisical "github.com/infisical/go-sdk" +) + +func main() { + + client, err := infisical.NewInfisicalClient(infisical.Config{ + SiteUrl: "https://app.infisical.com", // Optional, default is https://app.infisical.com + }) + + if err != nil { + fmt.Printf("Error: %v", err) + os.Exit(1) + } + + _, err = client.Auth().UniversalAuthLogin("YOUR_CLIENT_ID", "YOUR_CLIENT_SECRET") + + if err != nil { + fmt.Printf("Authentication failed: %v", err) + os.Exit(1) + } + + apiKeySecret, err := client.Secrets().Retrieve(infisical.RetrieveSecretOptions{ + SecretKey: "API_KEY", + Environment: "dev", + ProjectID: "YOUR_PROJECT_ID", + SecretPath: "/", + }) + + if err != nil { + fmt.Printf("Error: %v", err) + os.Exit(1) + } + + fmt.Printf("API Key Secret: %v", apiKeySecret) + +} +``` + +This example demonstrates how to use the Infisical Go SDK in a simple Go application. The application retrieves a secret named `API_KEY` from the `dev` environment of the `YOUR_PROJECT_ID` project. + + + We do not recommend hardcoding your [Machine Identity Tokens](/platform/identities/overview). Setting it as an environment variable would be best. + + +# Installation + +```console +$ go get github.com/infisical/go-sdk +``` +# Configuration + +Import the SDK and create a client instance. + +```go +client, err := infisical.NewInfisicalClient(infisical.Config{ + SiteUrl: "https://app.infisical.com", // Optional, default is https://api.infisical.com + }) + +if err != nil { + fmt.Printf("Error: %v", err) + os.Exit(1) +} +``` + +### ClientSettings methods + + + + + The URL of the Infisical API. Default is `https://api.infisical.com`. + + + + Optionally set the user agent that will be used for HTTP requests. _(Not recommended)_ + + + + + +### Authentication + +The SDK supports a variety of authentication methods. The most common authentication method is Universal Auth, which uses a client ID and client secret to authenticate. + +#### Universal Auth + +**Using environment variables** + +Call `.Auth().UniversalAuthLogin()` with empty arguments to use the following environment variables: + +- `INFISICAL_UNIVERSAL_AUTH_CLIENT_ID` - Your machine identity client ID. +- `INFISICAL_UNIVERSAL_AUTH_CLIENT_SECRET` - Your machine identity client secret. + +**Using the SDK directly** +```go +_, err := client.Auth().UniversalAuthLogin("CLIENT_ID", "CLIENT_SECRET") + +if err != nil { + fmt.Println(err) + os.Exit(1) +} +``` + +#### GCP ID Token Auth + + Please note that this authentication method will only work if you're running your application on Google Cloud Platform. + Please [read more](/documentation/platform/identities/gcp-auth) about this authentication method. + + +**Using environment variables** + +Call `.Auth().GcpIdTokenAuthLogin()` with empty arguments to use the following environment variables: + +- `INFISICAL_GCP_AUTH_IDENTITY_ID` - Your Infisical Machine Identity ID. + +**Using the SDK directly** +```go +_, err := client.Auth().GcpIdTokenAuthLogin("YOUR_MACHINE_IDENTITY_ID") + +if err != nil { + fmt.Println(err) + os.Exit(1) +} +``` + +#### GCP IAM Auth + +**Using environment variables** + +Call `.Auth().GcpIamAuthLogin()` with empty arguments to use the following environment variables: + +- `INFISICAL_GCP_IAM_AUTH_IDENTITY_ID` - Your Infisical Machine Identity ID. +- `INFISICAL_GCP_IAM_SERVICE_ACCOUNT_KEY_FILE_PATH` - The path to your GCP service account key file. + +**Using the SDK directly** +```go +_, err = client.Auth().GcpIamAuthLogin("MACHINE_IDENTITY_ID", "SERVICE_ACCOUNT_KEY_FILE_PATH") + +if err != nil { + fmt.Println(err) + os.Exit(1) +} +``` + +#### AWS IAM Auth + + Please note that this authentication method will only work if you're running your application on AWS. + Please [read more](/documentation/platform/identities/aws-auth) about this authentication method. + + +**Using environment variables** + +Call `.Auth().AwsIamAuthLogin()` with empty arguments to use the following environment variables: + +- `INFISICAL_AWS_IAM_AUTH_IDENTITY_ID` - Your Infisical Machine Identity ID. + +**Using the SDK directly** +```go +_, err = client.Auth().AwsIamAuthLogin("MACHINE_IDENTITY_ID") + +if err != nil { + fmt.Println(err) + os.Exit(1) +} +``` + + +#### Azure Auth + + Please note that this authentication method will only work if you're running your application on Azure. + Please [read more](/documentation/platform/identities/azure-auth) about this authentication method. + + +**Using environment variables** + +Call `.Auth().AzureAuthLogin()` with empty arguments to use the following environment variables: + +- `INFISICAL_AZURE_AUTH_IDENTITY_ID` - Your Infisical Machine Identity ID. + +**Using the SDK directly** +```go +_, err = client.Auth().AzureAuthLogin("MACHINE_IDENTITY_ID") + +if err != nil { + fmt.Println(err) + os.Exit(1) +} +``` + +#### Kubernetes Auth + + Please note that this authentication method will only work if you're running your application on Kubernetes. + Please [read more](/documentation/platform/identities/kubernetes-auth) about this authentication method. + + +**Using environment variables** + +Call `.Auth().KubernetesAuthLogin()` with empty arguments to use the following environment variables: + +- `INFISICAL_KUBERNETES_IDENTITY_ID` - Your Infisical Machine Identity ID. +- `INFISICAL_KUBERNETES_SERVICE_ACCOUNT_TOKEN_PATH_ENV_NAME` - The environment variable name that contains the path to the service account token. This is optional and will default to `/var/run/secrets/kubernetes.io/serviceaccount/token`. + +**Using the SDK directly** +```go +// Service account token path will default to /var/run/secrets/kubernetes.io/serviceaccount/token if empty value is passed +_, err = client.Auth().KubernetesAuthLogin("MACHINE_IDENTITY_ID", "SERVICE_ACCOUNT_TOKEN_PATH") + +if err != nil { + fmt.Println(err) + os.Exit(1) +} +``` + +## Working with Secrets + +### client.Secrets().List(options) + +```go +secrets, err := client.Secrets().List(infisical.ListSecretsOptions{ + ProjectID: "PROJECT_ID", + Environment: "dev", + SecretPath: "/foo/bar", + AttachToProcessEnv: false, +}) +``` + +Retrieve all secrets within the Infisical project and environment that client is connected to + +#### Parameters + + + + + The slug name (dev, prod, etc) of the environment from where secrets should be fetched from. + + + + The project ID where the secret lives in. + + + + The path from where secrets should be fetched from. + + + + Whether or not to set the fetched secrets to the process environment. If true, you can access the secrets like so `System.getenv("SECRET_NAME")`. + + + + Whether or not to include imported secrets from the current path. Read about [secret import](/documentation/platform/secret-reference) + + + + Whether or not to fetch secrets recursively from the specified path. Please note that there's a 20-depth limit for recursive fetching. + + + + Whether or not to expand secret references in the fetched secrets. Read about [secret reference](/documentation/platform/secret-reference) + + + + + +### client.Secrets().Retrieve(options) + +```go +secret, err := client.Secrets().Retrieve(infisical.RetrieveSecretOptions{ + SecretKey: "API_KEY", + ProjectID: "PROJECT_ID", + Environment: "dev", +}) +``` + +Retrieve a secret from Infisical. + +By default, `Secrets().Retrieve()` fetches and returns a shared secret. + +#### Parameters + + + + + The key of the secret to retrieve. + + + The project ID where the secret lives in. + + + The slug name (dev, prod, etc) of the environment from where secrets should be fetched from. + + + The path from where secret should be fetched from. + + + The type of the secret. Valid options are "shared" or "personal". If not specified, the default value is "shared". + + + + +### client.Secrets().Create(options) + +```go +secret, err := client.Secrets().Create(infisical.CreateSecretOptions{ + ProjectID: "PROJECT_ID", + Environment: "dev", + + SecretKey: "NEW_SECRET_KEY", + SecretValue: "NEW_SECRET_VALUE", + SecretComment: "This is a new secret", +}) +``` + +Create a new secret in Infisical. + +#### Parameters + + + + + The key of the secret to create. + + + The value of the secret. + + + A comment for the secret. + + + The project ID where the secret lives in. + + + The slug name (dev, prod, etc) of the environment from where secrets should be fetched from. + + + The path from where secret should be created. + + + The type of the secret. Valid options are "shared" or "personal". If not specified, the default value is "shared". + + + + +### client.Secrets().Update(options) + +```go +secret, err := client.Secrets().Update(infisical.UpdateSecretOptions{ + ProjectID: "PROJECT_ID", + Environment: "dev", + SecretKey: "NEW_SECRET_KEY", + NewSecretValue: "NEW_SECRET_VALUE", + NewSkipMultilineEncoding: false, +}) +``` + +Update an existing secret in Infisical. + +#### Parameters + + + + + The key of the secret to update. + + + The new value of the secret. + + + Whether or not to skip multiline encoding for the new secret value. + + + The project ID where the secret lives in. + + + The slug name (dev, prod, etc) of the environment from where secrets should be fetched from. + + + The path from where secret should be updated. + + + The type of the secret. Valid options are "shared" or "personal". If not specified, the default value is "shared". + + + + +### client.Secrets().Delete(options) + +```go +secret, err := client.Secrets().Delete(infisical.DeleteSecretOptions{ + ProjectID: "PROJECT_ID", + Environment: "dev", + SecretKey: "SECRET_KEY", +}) +``` + +Delete a secret in Infisical. + +#### Parameters + + + + + The key of the secret to update. + + + The project ID where the secret lives in. + + + The slug name (dev, prod, etc) of the environment from where secrets should be fetched from. + + + The path from where secret should be deleted. + + + The type of the secret. Valid options are "shared" or "personal". If not specified, the default value is "shared". + + + \ No newline at end of file diff --git a/docs/self-hosting/configuration/envars.mdx b/docs/self-hosting/configuration/envars.mdx index 33e6c697cb..b225a3ace6 100644 --- a/docs/self-hosting/configuration/envars.mdx +++ b/docs/self-hosting/configuration/envars.mdx @@ -48,44 +48,44 @@ The platform utilizes Postgres to persist all of its data and Redis for caching Without email configuration, Infisical's core functions like sign-up/login and secret operations work, but this disables multi-factor authentication, email invites for projects, alerts for suspicious logins, and all other email-dependent features. - - Hostname to connect to for establishing SMTP connections - - -{" "} - - - Credential to connect to host (e.g. team@infisical.com) + + Hostname to connect to for establishing SMTP connections -{" "} - - - Credential to connect to host - - -{" "} - Port to connect to for establishing SMTP connections -{" "} - - - If true, use TLS when connecting to host. If false, TLS will be used if - STARTTLS is supported + + Credential to connect to host (e.g. team@infisical.com) -{" "} + + Credential to connect to host + Email address to be used for sending emails - - Name label to be used in From field (e.g. Team) - + + Name label to be used in From field (e.g. Team) + + + + If this is `true` and `SMTP_PORT` is not 465 then TLS is not used even if the + server supports STARTTLS extension. + + + + If this is `true` and `SMTP_PORT` is not 465 then Infisical tries to use + STARTTLS even if the server does not advertise support for it. If the + connection can not be encrypted then message is not sent. + + + + If this is `true`, Infisical will validate the server's SSL/TLS certificate and reject the connection if the certificate is invalid or not trusted. If set to `false`, the client will accept the server's certificate regardless of its validity, which can be useful in development or testing environments but is not recommended for production use. + @@ -105,7 +105,6 @@ SMTP_HOST=smtp.sendgrid.net SMTP_USERNAME=apikey SMTP_PASSWORD=SG.rqFsfjxYPiqE1lqZTgD_lz7x8IVLx # your SendGrid API Key from step above SMTP_PORT=587 -SMTP_SECURE=true SMTP_FROM_ADDRESS=hey@example.com # your email address being used to send out emails SMTP_FROM_NAME=Infisical ``` @@ -128,7 +127,6 @@ SMTP_HOST=smtp.mailgun.org # obtained from credentials page SMTP_USERNAME=postmaster@example.mailgun.org # obtained from credentials page SMTP_PASSWORD=password # obtained from credentials page SMTP_PORT=587 -SMTP_SECURE=true SMTP_FROM_ADDRESS=hey@example.com # your email address being used to send out emails SMTP_FROM_NAME=Infisical ``` @@ -159,7 +157,6 @@ SMTP_FROM_NAME=Infisical SMTP_USERNAME=xxx # your SMTP username SMTP_PASSWORD=xxx # your SMTP password SMTP_PORT=465 - SMTP_SECURE=true SMTP_FROM_ADDRESS=hey@example.com # your email address being used to send out emails SMTP_FROM_NAME=Infisical ``` @@ -187,7 +184,6 @@ SMTP_HOST=smtp.socketlabs.com SMTP_USERNAME=username # obtained from your credentials SMTP_PASSWORD=password # obtained from your credentials SMTP_PORT=587 -SMTP_SECURE=true SMTP_FROM_ADDRESS=hey@example.com # your email address being used to send out emails SMTP_FROM_NAME=Infisical ``` @@ -229,7 +225,6 @@ SMTP_HOST=smtp.resend.com SMTP_USERNAME=resend SMTP_PASSWORD=YOUR_API_KEY SMTP_PORT=587 -SMTP_SECURE=true SMTP_FROM_ADDRESS=hey@example.com # your email address being used to send out emails SMTP_FROM_NAME=Infisical ``` @@ -253,7 +248,6 @@ SMTP_HOST=smtp.gmail.com SMTP_USERNAME=hey@gmail.com # your email SMTP_PASSWORD=password # your password SMTP_PORT=587 -SMTP_SECURE=true SMTP_FROM_ADDRESS=hey@gmail.com SMTP_FROM_NAME=Infisical ``` @@ -277,7 +271,6 @@ SMTP_HOST=smtp.office365.com SMTP_USERNAME=username@yourdomain.com # your username SMTP_PASSWORD=password # your password SMTP_PORT=587 -SMTP_SECURE=true SMTP_FROM_ADDRESS=username@yourdomain.com SMTP_FROM_NAME=Infisical ``` @@ -294,7 +287,6 @@ SMTP_HOST=smtp.zoho.com SMTP_USERNAME=username # your email SMTP_PASSWORD=password # your password SMTP_PORT=587 -SMTP_SECURE=true SMTP_FROM_ADDRESS=hey@example.com # your personal Zoho email or domain-based email linked to Zoho Mail SMTP_FROM_NAME=Infisical ``` @@ -320,7 +312,8 @@ To login into Infisical with OAuth providers such as Google, configure the assoc - When set, all visits to the Infisical login page will automatically redirect users of your Infisical instance to the SAML identity provider associated with the specified organization slug. +When set, all visits to the Infisical login page will automatically redirect users of your Infisical instance to the SAML identity provider associated with the specified organization slug. + diff --git a/frontend/Dockerfile b/frontend/Dockerfile index 9090fc603b..5251dc4064 100644 --- a/frontend/Dockerfile +++ b/frontend/Dockerfile @@ -2,6 +2,7 @@ ARG POSTHOG_HOST=https://app.posthog.com ARG POSTHOG_API_KEY=posthog-api-key ARG INTERCOM_ID=intercom-id ARG NEXT_INFISICAL_PLATFORM_VERSION=next-infisical-platform-version +ARG CAPTCHA_SITE_KEY=captcha-site-key FROM node:16-alpine AS deps # Install dependencies only when needed. Check https://github.com/nodejs/docker-node/tree/b4117f9333da4138b03a546ec926ef50a31506c3#nodealpine to understand why libc6-compat might be needed. @@ -31,6 +32,8 @@ ARG POSTHOG_API_KEY ENV NEXT_PUBLIC_POSTHOG_API_KEY $POSTHOG_API_KEY ARG INTERCOM_ID ENV NEXT_PUBLIC_INTERCOM_ID $INTERCOM_ID +ARG CAPTCHA_SITE_KEY +ENV NEXT_PUBLIC_CAPTCHA_SITE_KEY $CAPTCHA_SITE_KEY # Build RUN npm run build @@ -57,7 +60,9 @@ ENV NEXT_PUBLIC_SAML_ORG_SLUG=$SAML_ORG_SLUG \ BAKED_NEXT_PUBLIC_SAML_ORG_SLUG=$SAML_ORG_SLUG ARG NEXT_INFISICAL_PLATFORM_VERSION ENV NEXT_PUBLIC_INFISICAL_PLATFORM_VERSION=$NEXT_INFISICAL_PLATFORM_VERSION - +ARG CAPTCHA_SITE_KEY +ENV NEXT_PUBLIC_CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY \ + BAKED_NEXT_PUBLIC_CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY COPY --chown=nextjs:nodejs --chmod=555 scripts ./scripts COPY --from=builder /app/public ./public RUN chown nextjs:nodejs ./public/data diff --git a/frontend/next.config.js b/frontend/next.config.js index 9d894694fe..5e48e70da7 100644 --- a/frontend/next.config.js +++ b/frontend/next.config.js @@ -1,13 +1,12 @@ - const path = require("path"); const ContentSecurityPolicy = ` default-src 'self'; - script-src 'self' https://app.posthog.com https://js.stripe.com https://api.stripe.com https://widget.intercom.io https://js.intercomcdn.com 'unsafe-inline' 'unsafe-eval'; - style-src 'self' https://rsms.me 'unsafe-inline'; + script-src 'self' https://app.posthog.com https://js.stripe.com https://api.stripe.com https://widget.intercom.io https://js.intercomcdn.com https://hcaptcha.com https://*.hcaptcha.com 'unsafe-inline' 'unsafe-eval'; + style-src 'self' https://rsms.me 'unsafe-inline' https://hcaptcha.com https://*.hcaptcha.com; child-src https://api.stripe.com; - frame-src https://js.stripe.com/ https://api.stripe.com https://www.youtube.com/; - connect-src 'self' wss://nexus-websocket-a.intercom.io https://api-iam.intercom.io https://api.heroku.com/ https://id.heroku.com/oauth/authorize https://id.heroku.com/oauth/token https://checkout.stripe.com https://app.posthog.com https://api.stripe.com https://api.pwnedpasswords.com http://127.0.0.1:*; + frame-src https://js.stripe.com/ https://api.stripe.com https://www.youtube.com/ https://hcaptcha.com https://*.hcaptcha.com; + connect-src 'self' wss://nexus-websocket-a.intercom.io https://api-iam.intercom.io https://api.heroku.com/ https://id.heroku.com/oauth/authorize https://id.heroku.com/oauth/token https://checkout.stripe.com https://app.posthog.com https://api.stripe.com https://api.pwnedpasswords.com http://127.0.0.1:* https://hcaptcha.com https://*.hcaptcha.com; img-src 'self' https://static.intercomassets.com https://js.intercomcdn.com https://downloads.intercomcdn.com https://*.stripe.com https://i.ytimg.com/ data:; media-src https://js.intercomcdn.com; font-src 'self' https://fonts.intercomcdn.com/ https://maxcdn.bootstrapcdn.com https://rsms.me https://fonts.gstatic.com; diff --git a/frontend/package-lock.json b/frontend/package-lock.json index c33c9dc360..489df0ea18 100644 --- a/frontend/package-lock.json +++ b/frontend/package-lock.json @@ -4,7 +4,6 @@ "requires": true, "packages": { "": { - "name": "frontend", "dependencies": { "@casl/ability": "^6.5.0", "@casl/react": "^3.1.0", @@ -19,6 +18,7 @@ "@fortawesome/free-regular-svg-icons": "^6.1.1", "@fortawesome/free-solid-svg-icons": "^6.1.2", "@fortawesome/react-fontawesome": "^0.2.0", + "@hcaptcha/react-hcaptcha": "^1.10.1", "@headlessui/react": "^1.7.7", "@hookform/resolvers": "^2.9.10", "@octokit/rest": "^19.0.7", @@ -3200,6 +3200,24 @@ "react": ">=16.3" } }, + "node_modules/@hcaptcha/loader": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@hcaptcha/loader/-/loader-1.2.4.tgz", + "integrity": "sha512-3MNrIy/nWBfyVVvMPBKdKrX7BeadgiimW0AL/a/8TohNtJqxoySKgTJEXOQvYwlHemQpUzFrIsK74ody7JiMYw==" + }, + "node_modules/@hcaptcha/react-hcaptcha": { + "version": "1.10.1", + "resolved": "https://registry.npmjs.org/@hcaptcha/react-hcaptcha/-/react-hcaptcha-1.10.1.tgz", + "integrity": "sha512-P0en4gEZAecah7Pt3WIaJO2gFlaLZKkI0+Tfdg8fNqsDxqT9VytZWSkH4WAkiPRULK1QcGgUZK+J56MXYmPifw==", + "dependencies": { + "@babel/runtime": "^7.17.9", + "@hcaptcha/loader": "^1.2.1" + }, + "peerDependencies": { + "react": ">= 16.3.0", + "react-dom": ">= 16.3.0" + } + }, "node_modules/@headlessui/react": { "version": "1.7.18", "resolved": "https://registry.npmjs.org/@headlessui/react/-/react-1.7.18.tgz", diff --git a/frontend/package.json b/frontend/package.json index e01ef945e6..a4acb57382 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -26,6 +26,7 @@ "@fortawesome/free-regular-svg-icons": "^6.1.1", "@fortawesome/free-solid-svg-icons": "^6.1.2", "@fortawesome/react-fontawesome": "^0.2.0", + "@hcaptcha/react-hcaptcha": "^1.10.1", "@headlessui/react": "^1.7.7", "@hookform/resolvers": "^2.9.10", "@octokit/rest": "^19.0.7", diff --git a/frontend/scripts/initialize-standalone-build.sh b/frontend/scripts/initialize-standalone-build.sh index d9138bb77e..644877d8f8 100755 --- a/frontend/scripts/initialize-standalone-build.sh +++ b/frontend/scripts/initialize-standalone-build.sh @@ -4,6 +4,8 @@ scripts/replace-standalone-build-variable.sh "$BAKED_NEXT_PUBLIC_POSTHOG_API_KEY scripts/replace-standalone-build-variable.sh "$BAKED_NEXT_PUBLIC_INTERCOM_ID" "$NEXT_PUBLIC_INTERCOM_ID" +scripts/replace-standalone-build-variable.sh "$BAKED_NEXT_PUBLIC_CAPTCHA_SITE_KEY" "$NEXT_PUBLIC_CAPTCHA_SITE_KEY" + if [ "$TELEMETRY_ENABLED" != "false" ]; then echo "Telemetry is enabled" scripts/set-standalone-build-telemetry.sh true diff --git a/frontend/scripts/start.sh b/frontend/scripts/start.sh index 1488ad328c..7dda6c95b1 100644 --- a/frontend/scripts/start.sh +++ b/frontend/scripts/start.sh @@ -6,6 +6,8 @@ scripts/replace-variable.sh "$BAKED_NEXT_PUBLIC_INTERCOM_ID" "$NEXT_PUBLIC_INTER scripts/replace-variable.sh "$BAKED_NEXT_SAML_ORG_SLUG" "$NEXT_PUBLIC_SAML_ORG_SLUG" +scripts/replace-variable.sh "$BAKED_NEXT_PUBLIC_CAPTCHA_SITE_KEY" "$NEXT_PUBLIC_CAPTCHA_SITE_KEY" + if [ "$TELEMETRY_ENABLED" != "false" ]; then echo "Telemetry is enabled" scripts/set-telemetry.sh true diff --git a/frontend/src/components/utilities/attemptCliLogin.ts b/frontend/src/components/utilities/attemptCliLogin.ts index e95f5bf885..8f7c4bb9f5 100644 --- a/frontend/src/components/utilities/attemptCliLogin.ts +++ b/frontend/src/components/utilities/attemptCliLogin.ts @@ -30,11 +30,13 @@ export interface IsCliLoginSuccessful { const attemptLogin = async ({ email, password, - providerAuthToken + providerAuthToken, + captchaToken }: { email: string; password: string; providerAuthToken?: string; + captchaToken?: string; }): Promise => { const telemetry = new Telemetry().getInstance(); return new Promise((resolve, reject) => { @@ -70,7 +72,8 @@ const attemptLogin = async ({ } = await login2({ email, clientProof, - providerAuthToken + providerAuthToken, + captchaToken }); if (mfaEnabled) { // case: MFA is enabled diff --git a/frontend/src/components/utilities/attemptLogin.ts b/frontend/src/components/utilities/attemptLogin.ts index 195cf9b9a2..b909b1ba7c 100644 --- a/frontend/src/components/utilities/attemptLogin.ts +++ b/frontend/src/components/utilities/attemptLogin.ts @@ -22,11 +22,13 @@ interface IsLoginSuccessful { const attemptLogin = async ({ email, password, - providerAuthToken + providerAuthToken, + captchaToken }: { email: string; password: string; providerAuthToken?: string; + captchaToken?: string; }): Promise => { const telemetry = new Telemetry().getInstance(); // eslint-disable-next-line new-cap @@ -58,6 +60,7 @@ const attemptLogin = async ({ iv, tag } = await login2({ + captchaToken, email, clientProof, providerAuthToken diff --git a/frontend/src/components/utilities/config/index.ts b/frontend/src/components/utilities/config/index.ts index 10d4856c06..9b3bf37f1a 100644 --- a/frontend/src/components/utilities/config/index.ts +++ b/frontend/src/components/utilities/config/index.ts @@ -2,5 +2,6 @@ const ENV = process.env.NEXT_PUBLIC_ENV! || "development"; // investigate const POSTHOG_API_KEY = process.env.NEXT_PUBLIC_POSTHOG_API_KEY!; const POSTHOG_HOST = process.env.NEXT_PUBLIC_POSTHOG_HOST! || "https://app.posthog.com"; const INTERCOMid = process.env.NEXT_PUBLIC_INTERCOMid!; +const CAPTCHA_SITE_KEY = process.env.NEXT_PUBLIC_CAPTCHA_SITE_KEY!; -export { ENV, INTERCOMid, POSTHOG_API_KEY, POSTHOG_HOST }; +export { CAPTCHA_SITE_KEY, ENV, INTERCOMid, POSTHOG_API_KEY, POSTHOG_HOST }; diff --git a/frontend/src/components/v2/SecretPathInput/SecretPathInput.tsx b/frontend/src/components/v2/SecretPathInput/SecretPathInput.tsx index 9dfb5ff623..b453456d79 100644 --- a/frontend/src/components/v2/SecretPathInput/SecretPathInput.tsx +++ b/frontend/src/components/v2/SecretPathInput/SecretPathInput.tsx @@ -78,7 +78,8 @@ export const SecretPathInput = ({ const validPaths = inputValue.split("/"); validPaths.pop(); - const newValue = `${validPaths.join("/")}/${suggestions[selectedIndex]}/`; + // removed trailing slash + const newValue = `${validPaths.join("/")}/${suggestions[selectedIndex]}`; onChange?.(newValue); setInputValue(newValue); setSecretPath(newValue); diff --git a/frontend/src/helpers/project.ts b/frontend/src/helpers/project.ts index ff8e700a4e..6c4e91ffc5 100644 --- a/frontend/src/helpers/project.ts +++ b/frontend/src/helpers/project.ts @@ -93,27 +93,29 @@ const initProjectHelper = async ({ projectName }: { projectName: string }) => { }); try { - secrets?.forEach((secret) => { - createSecret({ - workspaceId: project.id, - environment: secret.environment, - type: secret.type, - secretKey: secret.secretName, - secretKeyCiphertext: secret.secretKeyCiphertext, - secretKeyIV: secret.secretKeyIV, - secretKeyTag: secret.secretKeyTag, - secretValueCiphertext: secret.secretValueCiphertext, - secretValueIV: secret.secretValueIV, - secretValueTag: secret.secretValueTag, - secretCommentCiphertext: secret.secretCommentCiphertext, - secretCommentIV: secret.secretCommentIV, - secretCommentTag: secret.secretCommentTag, - secretPath: "/", - metadata: { - source: "signup" - } - }); - }); + await Promise.allSettled( + (secrets || []).map((secret) => + createSecret({ + workspaceId: project.id, + environment: secret.environment, + type: secret.type, + secretKey: secret.secretName, + secretKeyCiphertext: secret.secretKeyCiphertext, + secretKeyIV: secret.secretKeyIV, + secretKeyTag: secret.secretKeyTag, + secretValueCiphertext: secret.secretValueCiphertext, + secretValueIV: secret.secretValueIV, + secretValueTag: secret.secretValueTag, + secretCommentCiphertext: secret.secretCommentCiphertext, + secretCommentIV: secret.secretCommentIV, + secretCommentTag: secret.secretCommentTag, + secretPath: "/", + metadata: { + source: "signup" + } + }) + ) + ); } catch (err) { console.error("Failed to upload secrets", err); } diff --git a/frontend/src/hooks/api/auth/types.ts b/frontend/src/hooks/api/auth/types.ts index 41c324bffe..ce1b18bc83 100644 --- a/frontend/src/hooks/api/auth/types.ts +++ b/frontend/src/hooks/api/auth/types.ts @@ -30,6 +30,7 @@ export type Login1DTO = { }; export type Login2DTO = { + captchaToken?: string; email: string; clientProof: string; providerAuthToken?: string; diff --git a/frontend/src/hooks/api/integrations/queries.tsx b/frontend/src/hooks/api/integrations/queries.tsx index 3aa8f3ed1d..81d0f00cae 100644 --- a/frontend/src/hooks/api/integrations/queries.tsx +++ b/frontend/src/hooks/api/integrations/queries.tsx @@ -73,6 +73,9 @@ export const useCreateIntegration = () => { }[]; kmsKeyId?: string; shouldDisableDelete?: boolean; + shouldMaskSecrets?: boolean; + shouldProtectSecrets?: boolean; + shouldEnableDelete?: boolean; }; }) => { const { diff --git a/frontend/src/hooks/api/secretImports/queries.tsx b/frontend/src/hooks/api/secretImports/queries.tsx index 2879c859bf..701a205435 100644 --- a/frontend/src/hooks/api/secretImports/queries.tsx +++ b/frontend/src/hooks/api/secretImports/queries.tsx @@ -264,13 +264,12 @@ export const useGetImportedSecretsAllEnvs = ({ }); const isImportedSecretPresentInEnv = useCallback( - (secPath: string, envSlug: string, secretName: string) => { + (envSlug: string, secretName: string) => { const selectedEnvIndex = environments.indexOf(envSlug); if (selectedEnvIndex !== -1) { - const isPresent = secretImports?.[selectedEnvIndex]?.data?.find( - ({ secretPath, secrets }) => - secretPath === secPath && secrets.some((s) => s.key === secretName) + const isPresent = secretImports?.[selectedEnvIndex]?.data?.find(({ secrets }) => + secrets.some((s) => s.key === secretName) ); return Boolean(isPresent); diff --git a/frontend/src/hooks/api/workspace/queries.tsx b/frontend/src/hooks/api/workspace/queries.tsx index 6b92ae5a1e..d3d30e4201 100644 --- a/frontend/src/hooks/api/workspace/queries.tsx +++ b/frontend/src/hooks/api/workspace/queries.tsx @@ -23,6 +23,7 @@ import { TUpdateWorkspaceIdentityRoleDTO, TUpdateWorkspaceUserRoleDTO, UpdateEnvironmentDTO, + UpdatePitVersionLimitDTO, Workspace } from "./types"; @@ -258,6 +259,21 @@ export const useToggleAutoCapitalization = () => { }); }; +export const useUpdateWorkspaceVersionLimit = () => { + const queryClient = useQueryClient(); + + return useMutation<{}, {}, UpdatePitVersionLimitDTO>({ + mutationFn: ({ projectSlug, pitVersionLimit }) => { + return apiRequest.put(`/api/v1/workspace/${projectSlug}/version-limit`, { + pitVersionLimit + }); + }, + onSuccess: () => { + queryClient.invalidateQueries(workspaceKeys.getAllUserWorkspace); + } + }); +}; + export const useDeleteWorkspace = () => { const queryClient = useQueryClient(); diff --git a/frontend/src/hooks/api/workspace/types.ts b/frontend/src/hooks/api/workspace/types.ts index 8be9beed0d..8c28d09389 100644 --- a/frontend/src/hooks/api/workspace/types.ts +++ b/frontend/src/hooks/api/workspace/types.ts @@ -16,6 +16,7 @@ export type Workspace = { upgradeStatus: string | null; autoCapitalization: boolean; environments: WorkspaceEnv[]; + pitVersionLimit: number; slug: string; }; @@ -48,6 +49,7 @@ export type CreateWorkspaceDTO = { }; export type RenameWorkspaceDTO = { workspaceID: string; newWorkspaceName: string }; +export type UpdatePitVersionLimitDTO = { projectSlug: string; pitVersionLimit: number }; export type ToggleAutoCapitalizationDTO = { workspaceID: string; state: boolean }; export type DeleteWorkspaceDTO = { workspaceID: string }; @@ -128,4 +130,4 @@ export type TUpdateWorkspaceGroupRoleDTO = { temporaryAccessStartTime: string; } )[]; -}; \ No newline at end of file +}; diff --git a/frontend/src/pages/integrations/github/create.tsx b/frontend/src/pages/integrations/github/create.tsx index e42a7e9eb4..f92a98943a 100644 --- a/frontend/src/pages/integrations/github/create.tsx +++ b/frontend/src/pages/integrations/github/create.tsx @@ -33,6 +33,7 @@ import { Input, Select, SelectItem, + Switch, Tab, TabList, TabPanel, @@ -59,7 +60,7 @@ const schema = yup.object({ selectedSourceEnvironment: yup.string().trim().required("Project Environment is required"), secretPath: yup.string().trim().required("Secrets Path is required"), secretSuffix: yup.string().trim().optional(), - + shouldEnableDelete: yup.boolean().optional(), scope: yup.mixed().oneOf(targetEnv.slice()).required(), repoIds: yup.mixed().when("scope", { @@ -98,7 +99,6 @@ type FormData = yup.InferType; export default function GitHubCreateIntegrationPage() { const router = useRouter(); const { mutateAsync } = useCreateIntegration(); - const integrationAuthId = (queryString.parse(router.asPath.split("?")[1]).integrationAuthId as string) ?? ""; @@ -120,7 +120,8 @@ export default function GitHubCreateIntegrationPage() { defaultValues: { secretPath: "/", scope: "github-repo", - repoIds: [] + repoIds: [], + shouldEnableDelete: false } }); @@ -177,7 +178,8 @@ export default function GitHubCreateIntegrationPage() { app: targetApp.name, // repo name owner: targetApp.owner, // repo owner metadata: { - secretSuffix: data.secretSuffix + secretSuffix: data.secretSuffix, + shouldEnableDelete: data.shouldEnableDelete } }); }) @@ -194,7 +196,8 @@ export default function GitHubCreateIntegrationPage() { scope: data.scope, owner: integrationAuthOrgs?.find((e) => e.orgId === data.orgId)?.name, metadata: { - secretSuffix: data.secretSuffix + secretSuffix: data.secretSuffix, + shouldEnableDelete: data.shouldEnableDelete } }); break; @@ -211,7 +214,8 @@ export default function GitHubCreateIntegrationPage() { owner: repoOwner, targetEnvironmentId: data.envId, metadata: { - secretSuffix: data.secretSuffix + secretSuffix: data.secretSuffix, + shouldEnableDelete: data.shouldEnableDelete } }); break; @@ -546,6 +550,21 @@ export default function GitHubCreateIntegrationPage() { animate={{ opacity: 1, translateX: 0 }} exit={{ opacity: 0, translateX: 30 }} > +
+ ( + onChange(isChecked)} + isChecked={value} + > + Delete secrets in Github that are not in Infisical + + )} + /> +
; @@ -138,7 +141,9 @@ export default function GitLabCreateIntegrationPage() { targetAppId, targetEnvironment, secretPrefix, - secretSuffix + secretSuffix, + shouldMaskSecrets, + shouldProtectSecrets }: FormData) => { try { setIsLoading(true); @@ -156,7 +161,9 @@ export default function GitLabCreateIntegrationPage() { secretPath, metadata: { secretPrefix, - secretSuffix + secretSuffix, + shouldMaskSecrets, + shouldProtectSecrets } }); @@ -390,6 +397,36 @@ export default function GitLabCreateIntegrationPage() { exit={{ opacity: 0, translateX: 30 }} className="pb-[14.25rem]" > +
+ ( + onChange(isChecked)} + isChecked={value} + > +
Mark Infisical secrets in Gitlab as 'Masked' secrets
+
+ )} + /> +
+
+ ( + onChange(isChecked)} + isChecked={value} + > + Mark Infisical secrets in Gitlab as 'Protected' secrets + + )} + /> +
(null); const { data: serverDetails } = useFetchServerStatus(); useEffect(() => { @@ -56,7 +61,8 @@ export const InitialStep = ({ setStep, email, setEmail, password, setPassword }: // attemptCliLogin const isCliLoginSuccessful = await attemptCliLogin({ email: email.toLowerCase(), - password + password, + captchaToken }); if (isCliLoginSuccessful && isCliLoginSuccessful.success) { @@ -78,7 +84,8 @@ export const InitialStep = ({ setStep, email, setEmail, password, setPassword }: } else { const isLoginSuccessful = await attemptLogin({ email: email.toLowerCase(), - password + password, + captchaToken }); if (isLoginSuccessful && isLoginSuccessful.success) { @@ -112,6 +119,12 @@ export const InitialStep = ({ setStep, email, setEmail, password, setPassword }: return; } + if (err.response.data.error === "Captcha Required") { + setShouldShowCaptcha(true); + setIsLoading(false); + return; + } + setLoginError(true); createNotification({ text: "Login unsuccessful. Double-check your credentials and try again.", @@ -119,6 +132,11 @@ export const InitialStep = ({ setStep, email, setEmail, password, setPassword }: }); } + if (captchaRef.current) { + captchaRef.current.resetCaptcha(); + } + + setCaptchaToken(""); setIsLoading(false); }; @@ -240,8 +258,19 @@ export const InitialStep = ({ setStep, email, setEmail, password, setPassword }: className="select:-webkit-autofill:focus h-10" /> + {shouldShowCaptcha && ( +
+ setCaptchaToken(token)} + ref={captchaRef} + /> +
+ )}
+ {shouldShowCaptcha && ( +
+ setCaptchaToken(token)} + ref={captchaRef} + /> +
+ )}
+ +
+ ); +}; diff --git a/frontend/src/views/Settings/ProjectSettingsPage/components/PointInTimeVersionLimitSection/index.tsx b/frontend/src/views/Settings/ProjectSettingsPage/components/PointInTimeVersionLimitSection/index.tsx new file mode 100644 index 0000000000..242b8c79a1 --- /dev/null +++ b/frontend/src/views/Settings/ProjectSettingsPage/components/PointInTimeVersionLimitSection/index.tsx @@ -0,0 +1 @@ +export { PointInTimeVersionLimitSection } from "./PointInTimeVersionLimitSection"; diff --git a/frontend/src/views/Settings/ProjectSettingsPage/components/ProjectGeneralTab/ProjectGeneralTab.tsx b/frontend/src/views/Settings/ProjectSettingsPage/components/ProjectGeneralTab/ProjectGeneralTab.tsx index 7d7c30fb0a..511dff93e1 100644 --- a/frontend/src/views/Settings/ProjectSettingsPage/components/ProjectGeneralTab/ProjectGeneralTab.tsx +++ b/frontend/src/views/Settings/ProjectSettingsPage/components/ProjectGeneralTab/ProjectGeneralTab.tsx @@ -3,6 +3,7 @@ import { BackfillSecretReferenceSecretion } from "../BackfillSecretReferenceSect import { DeleteProjectSection } from "../DeleteProjectSection"; import { E2EESection } from "../E2EESection"; import { EnvironmentSection } from "../EnvironmentSection"; +import { PointInTimeVersionLimitSection } from "../PointInTimeVersionLimitSection"; import { ProjectNameChangeSection } from "../ProjectNameChangeSection"; import { SecretTagsSection } from "../SecretTagsSection"; @@ -14,6 +15,7 @@ export const ProjectGeneralTab = () => { + diff --git a/k8-operator/controllers/infisicalsecret_helper.go b/k8-operator/controllers/infisicalsecret_helper.go index c14f724ebc..f0b510a3cf 100644 --- a/k8-operator/controllers/infisicalsecret_helper.go +++ b/k8-operator/controllers/infisicalsecret_helper.go @@ -232,7 +232,6 @@ func (r *InfisicalSecretReconciler) UpdateInfisicalManagedKubeSecret(ctx context } managedKubeSecret.Data = plainProcessedSecrets - managedKubeSecret.ObjectMeta.Annotations = map[string]string{} managedKubeSecret.ObjectMeta.Annotations[SECRET_VERSION_ANNOTATION] = ETag err := r.Client.Update(ctx, &managedKubeSecret)