diff --git a/backend/package-lock.json b/backend/package-lock.json index 524a6017b8..0a4a2db17e 100644 --- a/backend/package-lock.json +++ b/backend/package-lock.json @@ -71,6 +71,7 @@ "@types/node": "^18.11.3", "@types/nodemailer": "^6.4.6", "@types/passport": "^1.0.12", + "@types/picomatch": "^2.3.0", "@types/supertest": "^2.0.12", "@types/swagger-jsdoc": "^6.0.1", "@types/swagger-ui-express": "^4.1.3", @@ -2200,6 +2201,26 @@ } } }, + "node_modules/@jest/reporters/node_modules/glob": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "dev": true, + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, "node_modules/@jest/schemas": { "version": "29.4.3", "resolved": "https://registry.npmjs.org/@jest/schemas/-/schemas-29.4.3.tgz", @@ -3241,6 +3262,12 @@ "@types/express": "*" } }, + "node_modules/@types/picomatch": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/@types/picomatch/-/picomatch-2.3.0.tgz", + "integrity": "sha512-O397rnSS9iQI4OirieAtsDqvCj4+3eY1J+EPdNTKuHuRWIfUoGyzX294o8C4KJYaLqgSrd2o60c5EqCU8Zv02g==", + "dev": true + }, "node_modules/@types/prettier": { "version": "2.7.2", "resolved": "https://registry.npmjs.org/@types/prettier/-/prettier-2.7.2.tgz", @@ -5680,25 +5707,6 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/glob": { - "version": "7.2.3", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", - "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", - "dependencies": { - "fs.realpath": "^1.0.0", - "inflight": "^1.0.4", - "inherits": "2", - "minimatch": "^3.1.1", - "once": "^1.3.0", - "path-is-absolute": "^1.0.0" - }, - "engines": { - "node": "*" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, "node_modules/glob-parent": { "version": "6.0.2", "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", @@ -6481,6 +6489,26 @@ } } }, + "node_modules/jest-config/node_modules/glob": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "dev": true, + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, "node_modules/jest-diff": { "version": "29.5.0", "resolved": "https://registry.npmjs.org/jest-diff/-/jest-diff-29.5.0.tgz", @@ -6776,6 +6804,26 @@ "node": "^14.15.0 || ^16.10.0 || >=18.0.0" } }, + "node_modules/jest-runtime/node_modules/glob": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "dev": true, + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, "node_modules/jest-snapshot": { "version": "29.5.0", "resolved": "https://registry.npmjs.org/jest-snapshot/-/jest-snapshot-29.5.0.tgz", @@ -11071,6 +11119,25 @@ "url": "https://github.com/sponsors/isaacs" } }, + "node_modules/rimraf/node_modules/glob": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, "node_modules/ripemd160": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/ripemd160/-/ripemd160-2.0.2.tgz", @@ -11669,6 +11736,25 @@ "node": ">=0.4.0" } }, + "node_modules/swagger-autogen/node_modules/glob": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, "node_modules/swagger-ui-dist": { "version": "4.19.0", "resolved": "https://registry.npmjs.org/swagger-ui-dist/-/swagger-ui-dist-4.19.0.tgz", @@ -11723,6 +11809,26 @@ "node": ">=8" } }, + "node_modules/test-exclude/node_modules/glob": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "dev": true, + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, "node_modules/text-hex": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/text-hex/-/text-hex-1.0.0.tgz", @@ -14154,6 +14260,22 @@ "string-length": "^4.0.1", "strip-ansi": "^6.0.0", "v8-to-istanbul": "^9.0.1" + }, + "dependencies": { + "glob": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "dev": true, + "requires": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + } + } } }, "@jest/schemas": { @@ -14988,6 +15110,12 @@ "@types/express": "*" } }, + "@types/picomatch": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/@types/picomatch/-/picomatch-2.3.0.tgz", + "integrity": "sha512-O397rnSS9iQI4OirieAtsDqvCj4+3eY1J+EPdNTKuHuRWIfUoGyzX294o8C4KJYaLqgSrd2o60c5EqCU8Zv02g==", + "dev": true + }, "@types/prettier": { "version": "2.7.2", "resolved": "https://registry.npmjs.org/@types/prettier/-/prettier-2.7.2.tgz", @@ -16808,19 +16936,6 @@ "integrity": "sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==", "dev": true }, - "glob": { - "version": "7.2.3", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", - "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", - "requires": { - "fs.realpath": "^1.0.0", - "inflight": "^1.0.4", - "inherits": "2", - "minimatch": "^3.1.1", - "once": "^1.3.0", - "path-is-absolute": "^1.0.0" - } - }, "glob-parent": { "version": "6.0.2", "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", @@ -17371,6 +17486,22 @@ "pretty-format": "^29.5.0", "slash": "^3.0.0", "strip-json-comments": "^3.1.1" + }, + "dependencies": { + "glob": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "dev": true, + "requires": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + } + } } }, "jest-diff": { @@ -17606,6 +17737,22 @@ "jest-util": "^29.5.0", "slash": "^3.0.0", "strip-bom": "^4.0.0" + }, + "dependencies": { + "glob": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "dev": true, + "requires": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + } + } } }, "jest-snapshot": { @@ -20674,6 +20821,21 @@ "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", "requires": { "glob": "^7.1.3" + }, + "dependencies": { + "glob": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "requires": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + } + } } }, "ripemd160": { @@ -21129,6 +21291,19 @@ "version": "7.4.1", "resolved": "https://registry.npmjs.org/acorn/-/acorn-7.4.1.tgz", "integrity": "sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A==" + }, + "glob": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "requires": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + } } } }, @@ -21174,6 +21349,22 @@ "@istanbuljs/schema": "^0.1.2", "glob": "^7.1.4", "minimatch": "^3.0.4" + }, + "dependencies": { + "glob": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "dev": true, + "requires": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + } + } } }, "text-hex": { diff --git a/backend/package.json b/backend/package.json index 448ccfac2b..0c0f0642df 100644 --- a/backend/package.json +++ b/backend/package.json @@ -89,6 +89,7 @@ "@types/node": "^18.11.3", "@types/nodemailer": "^6.4.6", "@types/passport": "^1.0.12", + "@types/picomatch": "^2.3.0", "@types/supertest": "^2.0.12", "@types/swagger-jsdoc": "^6.0.1", "@types/swagger-ui-express": "^4.1.3", diff --git a/backend/src/controllers/v2/secretsController.ts b/backend/src/controllers/v2/secretsController.ts index a760a511cd..b93846e70c 100644 --- a/backend/src/controllers/v2/secretsController.ts +++ b/backend/src/controllers/v2/secretsController.ts @@ -9,7 +9,7 @@ import { ACTION_UPDATE_SECRETS, ALGORITHM_AES_256_GCM, ENCODING_SCHEME_UTF8, - SECRET_PERSONAL, + SECRET_PERSONAL } from "../../variables"; import { BadRequestError, UnauthorizedRequestError } from "../../utils/errors"; import { EventService } from "../../services"; @@ -21,7 +21,7 @@ import { PERMISSION_WRITE_SECRETS } from "../../variables"; import { userHasNoAbility, userHasWorkspaceAccess, - userHasWriteOnlyAbility, + userHasWriteOnlyAbility } from "../../ee/helpers/checkMembershipPermissions"; import Tag from "../../models/tag"; import _ from "lodash"; @@ -30,8 +30,9 @@ import Folder from "../../models/folder"; import { getFolderByPath, getFolderIdFromServiceToken, - searchByFolderId, + searchByFolderId } from "../../services/FolderService"; +import { isValidScope } from "../../helpers/secrets"; /** * Peform a batch of any specified CUD secret operations @@ -47,7 +48,7 @@ export const batchSecrets = async (req: Request, res: Response) => { workspaceId, environment, requests, - secretPath, + secretPath }: { workspaceId: string; environment: string; @@ -63,7 +64,7 @@ export const batchSecrets = async (req: Request, res: Response) => { // get secret blind index salt const salt = await SecretService.getSecretBlindIndexSalt({ - workspaceId: new Types.ObjectId(workspaceId), + workspaceId: new Types.ObjectId(workspaceId) }); const folders = await Folder.findOne({ workspace: workspaceId, environment }); @@ -73,22 +74,17 @@ export const batchSecrets = async (req: Request, res: Response) => { } if (req.authData.authPayload instanceof ServiceTokenData) { - const { secretPath: serviceTkScopedSecretPath } = req.authData.authPayload; + const isValidScopeAccess = isValidScope(req.authData.authPayload, environment, secretPath); + // in service token when not giving secretpath folderid must be root // this is to avoid giving folderid when service tokens are used - if ( - (!secretPath && folderId !== "root") || - (secretPath && secretPath !== serviceTkScopedSecretPath) - ) { + if ((!secretPath && folderId !== "root") || (secretPath && !isValidScopeAccess)) { throw UnauthorizedRequestError({ message: "Folder Permission Denied" }); } } + if (secretPath) { - folderId = await getFolderIdFromServiceToken( - workspaceId, - environment, - secretPath - ); + folderId = await getFolderIdFromServiceToken(workspaceId, environment, secretPath); } for await (const request of requests) { @@ -97,12 +93,10 @@ export const batchSecrets = async (req: Request, res: Response) => { let secretBlindIndex = ""; switch (request.method) { case "POST": - secretBlindIndex = await SecretService.generateSecretBlindIndexWithSalt( - { - secretName: request.secret.secretName, - salt, - } - ); + secretBlindIndex = await SecretService.generateSecretBlindIndexWithSalt({ + secretName: request.secret.secretName, + salt + }); createSecrets.push({ ...request.secret, @@ -113,16 +107,14 @@ export const batchSecrets = async (req: Request, res: Response) => { folder: folderId, secretBlindIndex, algorithm: ALGORITHM_AES_256_GCM, - keyEncoding: ENCODING_SCHEME_UTF8, + keyEncoding: ENCODING_SCHEME_UTF8 }); break; case "PATCH": - secretBlindIndex = await SecretService.generateSecretBlindIndexWithSalt( - { - secretName: request.secret.secretName, - salt, - } - ); + secretBlindIndex = await SecretService.generateSecretBlindIndexWithSalt({ + secretName: request.secret.secretName, + salt + }); updateSecrets.push({ ...request.secret, @@ -130,7 +122,7 @@ export const batchSecrets = async (req: Request, res: Response) => { secretBlindIndex, folder: folderId, algorithm: ALGORITHM_AES_256_GCM, - keyEncoding: ENCODING_SCHEME_UTF8, + keyEncoding: ENCODING_SCHEME_UTF8 }); break; case "DELETE": @@ -150,9 +142,9 @@ export const batchSecrets = async (req: Request, res: Response) => { ...n._doc, _id: new Types.ObjectId(), secret: n._id, - isDeleted: false, + isDeleted: false }; - }), + }) }); const addAction = (await EELogService.createAction({ @@ -161,7 +153,7 @@ export const batchSecrets = async (req: Request, res: Response) => { serviceAccountId: req.serviceAccount?._id, serviceTokenDataId: req.serviceTokenData?._id, workspaceId: new Types.ObjectId(workspaceId), - secretIds: createdSecrets.map((n) => n._id), + secretIds: createdSecrets.map((n) => n._id) })) as IAction; actions.push(addAction); @@ -175,8 +167,8 @@ export const batchSecrets = async (req: Request, res: Response) => { workspaceId, folderId, channel, - userAgent: req.headers?.["user-agent"], - }, + userAgent: req.headers?.["user-agent"] + } }); } } @@ -195,7 +187,7 @@ export const batchSecrets = async (req: Request, res: Response) => { listedSecretsObj = req.secrets.reduce( (obj: any, secret: ISecret) => ({ ...obj, - [secret._id.toString()]: secret, + [secret._id.toString()]: secret }), {} ); @@ -204,16 +196,16 @@ export const batchSecrets = async (req: Request, res: Response) => { updateOne: { filter: { _id: new Types.ObjectId(u._id), - workspace: new Types.ObjectId(workspaceId), + workspace: new Types.ObjectId(workspaceId) }, update: { $inc: { - version: 1, + version: 1 }, ...u, - _id: new Types.ObjectId(u._id), - }, - }, + _id: new Types.ObjectId(u._id) + } + } })); await Secret.bulkWrite(updateOperations); @@ -240,25 +232,25 @@ export const batchSecrets = async (req: Request, res: Response) => { algorithm: ALGORITHM_AES_256_GCM, keyEncoding: ENCODING_SCHEME_UTF8, tags: u.tags, - folder: u.folder, + folder: u.folder }) ); await EESecretService.addSecretVersions({ - secretVersions, + secretVersions }); updatedSecrets = await Secret.find({ _id: { - $in: updateSecrets.map((u) => new Types.ObjectId(u._id)), - }, + $in: updateSecrets.map((u) => new Types.ObjectId(u._id)) + } }); const updateAction = (await EELogService.createAction({ name: ACTION_UPDATE_SECRETS, userId: req.user._id, workspaceId: new Types.ObjectId(workspaceId), - secretIds: updatedSecrets.map((u) => u._id), + secretIds: updatedSecrets.map((u) => u._id) })) as IAction; actions.push(updateAction); @@ -272,8 +264,8 @@ export const batchSecrets = async (req: Request, res: Response) => { workspaceId, folderId, channel, - userAgent: req.headers?.["user-agent"], - }, + userAgent: req.headers?.["user-agent"] + } }); } } @@ -282,19 +274,19 @@ export const batchSecrets = async (req: Request, res: Response) => { if (deleteSecrets.length > 0) { await Secret.deleteMany({ _id: { - $in: deleteSecrets, - }, + $in: deleteSecrets + } }); await EESecretService.markDeletedSecretVersions({ - secretIds: deleteSecrets, + secretIds: deleteSecrets }); const deleteAction = (await EELogService.createAction({ name: ACTION_DELETE_SECRETS, userId: req.user._id, workspaceId: new Types.ObjectId(workspaceId), - secretIds: deleteSecrets, + secretIds: deleteSecrets })) as IAction; actions.push(deleteAction); @@ -307,8 +299,8 @@ export const batchSecrets = async (req: Request, res: Response) => { environment, workspaceId, channel: channel, - userAgent: req.headers?.["user-agent"], - }, + userAgent: req.headers?.["user-agent"] + } }); } } @@ -320,22 +312,22 @@ export const batchSecrets = async (req: Request, res: Response) => { workspaceId: new Types.ObjectId(workspaceId), actions, channel, - ipAddress: req.realIP, + ipAddress: req.realIP }); } // // trigger event - push secrets await EventService.handleEvent({ event: eventPushSecrets({ - workspaceId: new Types.ObjectId(workspaceId), - }), + workspaceId: new Types.ObjectId(workspaceId) + }) }); // (EE) take a secret snapshot await EESecretService.takeSecretSnapshot({ workspaceId: new Types.ObjectId(workspaceId), environment, - folderId, + folderId }); const resObj: { [key: string]: ISecret[] | string[] } = {}; @@ -418,7 +410,7 @@ export const createSecrets = async (req: Request, res: Response) => { const { workspaceId, environment, - secretPath, + secretPath }: { workspaceId: string; environment: string; @@ -435,8 +427,7 @@ export const createSecrets = async (req: Request, res: Response) => { ); if (!hasAccess) { throw UnauthorizedRequestError({ - message: - "You do not have the necessary permission(s) perform this action", + message: "You do not have the necessary permission(s) perform this action" }); } } @@ -449,28 +440,27 @@ export const createSecrets = async (req: Request, res: Response) => { // case: create 1 secret listOfSecretsToCreate = [req.body.secrets]; } + if (req.authData.authPayload instanceof ServiceTokenData) { - const { secretPath: serviceTkScopedSecretPath } = req.authData.authPayload; + const isValidScopeAccess = isValidScope( + req.authData.authPayload, + environment, + secretPath || "/" + ); + // in service token when not giving secretpath folderid must be root // this is to avoid giving folderid when service tokens are used - if ( - (!secretPath && folderId !== "root") || - (secretPath && secretPath !== serviceTkScopedSecretPath) - ) { + if ((!secretPath && folderId !== "root") || (secretPath && !isValidScopeAccess)) { throw UnauthorizedRequestError({ message: "Folder Permission Denied" }); } } if (secretPath) { - folderId = await getFolderIdFromServiceToken( - workspaceId, - environment, - secretPath - ); + folderId = await getFolderIdFromServiceToken(workspaceId, environment, secretPath); } // get secret blind index salt const salt = await SecretService.getSecretBlindIndexSalt({ - workspaceId: new Types.ObjectId(workspaceId), + workspaceId: new Types.ObjectId(workspaceId) }); type secretsToCreateType = { @@ -502,15 +492,14 @@ export const createSecrets = async (req: Request, res: Response) => { secretCommentCiphertext, secretCommentIV, secretCommentTag, - tags, + tags }: secretsToCreateType) => { let secretBlindIndex; if (secretName) { - secretBlindIndex = - await SecretService.generateSecretBlindIndexWithSalt({ - secretName, - salt, - }); + secretBlindIndex = await SecretService.generateSecretBlindIndexWithSalt({ + secretName, + salt + }); } return { @@ -532,22 +521,22 @@ export const createSecrets = async (req: Request, res: Response) => { secretCommentTag, algorithm: ALGORITHM_AES_256_GCM, keyEncoding: ENCODING_SCHEME_UTF8, - tags, + tags }; } ) ); - const newlyCreatedSecrets: ISecret[] = ( - await Secret.insertMany(secretsToInsert) - ).map((insertedSecret) => insertedSecret.toObject()); + const newlyCreatedSecrets: ISecret[] = (await Secret.insertMany(secretsToInsert)).map( + (insertedSecret) => insertedSecret.toObject() + ); setTimeout(async () => { // trigger event - push secrets await EventService.handleEvent({ event: eventPushSecrets({ - workspaceId: new Types.ObjectId(workspaceId), - }), + workspaceId: new Types.ObjectId(workspaceId) + }) }); }, 5000); @@ -567,7 +556,7 @@ export const createSecrets = async (req: Request, res: Response) => { secretKeyTag, secretValueCiphertext, secretValueIV, - secretValueTag, + secretValueTag }) => new SecretVersion({ secret: _id, @@ -586,9 +575,9 @@ export const createSecrets = async (req: Request, res: Response) => { secretValueTag, folder: folderId, algorithm: ALGORITHM_AES_256_GCM, - keyEncoding: ENCODING_SCHEME_UTF8, + keyEncoding: ENCODING_SCHEME_UTF8 }) - ), + ) }); const addAction = await EELogService.createAction({ @@ -597,7 +586,7 @@ export const createSecrets = async (req: Request, res: Response) => { serviceAccountId: req.serviceAccount?._id, serviceTokenDataId: req.serviceTokenData?._id, workspaceId: new Types.ObjectId(workspaceId), - secretIds: newlyCreatedSecrets.map((n) => n._id), + secretIds: newlyCreatedSecrets.map((n) => n._id) }); // (EE) create (audit) log @@ -609,14 +598,14 @@ export const createSecrets = async (req: Request, res: Response) => { workspaceId: new Types.ObjectId(workspaceId), actions: [addAction], channel, - ipAddress: req.realIP, + ipAddress: req.realIP })); // (EE) take a secret snapshot await EESecretService.takeSecretSnapshot({ workspaceId: new Types.ObjectId(workspaceId), environment, - folderId, + folderId }); const postHogClient = await TelemetryService.getPostHogClient(); @@ -624,7 +613,7 @@ export const createSecrets = async (req: Request, res: Response) => { postHogClient.capture({ event: "secrets added", distinctId: await TelemetryService.getDistinctId({ - authData: req.authData, + authData: req.authData }), properties: { numberOfSecrets: listOfSecretsToCreate.length, @@ -632,13 +621,13 @@ export const createSecrets = async (req: Request, res: Response) => { workspaceId, channel: channel, folderId, - userAgent: req.headers?.["user-agent"], - }, + userAgent: req.headers?.["user-agent"] + } }); } return res.status(200).send({ - secrets: newlyCreatedSecrets, + secrets: newlyCreatedSecrets }); }; @@ -696,10 +685,7 @@ export const getSecrets = async (req: Request, res: Response) => { const environment = req.query.environment as string; const folders = await Folder.findOne({ workspace: workspaceId, environment }); - if ( - (!folders && folderId && folderId !== "root") || - (!folders && secretPath) - ) { + if ((!folders && folderId && folderId !== "root") || (!folders && secretPath)) { res.send({ secrets: [] }); return; } @@ -712,13 +698,15 @@ export const getSecrets = async (req: Request, res: Response) => { } if (req.authData.authPayload instanceof ServiceTokenData) { - const { secretPath: serviceTkScopedSecretPath } = req.authData.authPayload; + const isValidScopeAccess = isValidScope( + req.authData.authPayload, + environment, + (secretPath as string) || "/" + ); + // in service token when not giving secretpath folderid must be root // this is to avoid giving folderid when service tokens are used - if ( - (!secretPath && folderId !== "root") || - (secretPath && secretPath !== serviceTkScopedSecretPath) - ) { + if ((!secretPath && folderId !== "root") || (secretPath && !isValidScopeAccess)) { throw UnauthorizedRequestError({ message: "Folder Permission Denied" }); } } @@ -738,8 +726,7 @@ export const getSecrets = async (req: Request, res: Response) => { // query tags table to get all tags ids for the tag names for the given workspace let tagIds = []; - const tagNamesList = - typeof tagSlugs === "string" && tagSlugs !== "" ? tagSlugs.split(",") : []; + const tagNamesList = typeof tagSlugs === "string" && tagSlugs !== "" ? tagSlugs.split(",") : []; if (tagNamesList != undefined && tagNamesList.length != 0) { const workspaceFromDB = await Tag.find({ workspace: workspaceId }); tagIds = _.map(tagNamesList, (tagName: string) => { @@ -762,8 +749,7 @@ export const getSecrets = async (req: Request, res: Response) => { ); if (hasNoAccess) { throw UnauthorizedRequestError({ - message: - "You do not have the necessary permission(s) perform this action", + message: "You do not have the necessary permission(s) perform this action" }); } @@ -773,8 +759,8 @@ export const getSecrets = async (req: Request, res: Response) => { folder: folderId, $or: [ { user: req.user._id }, // personal secrets for this user - { user: { $exists: false } }, // shared secrets from workspace - ], + { user: { $exists: false } } // shared secrets from workspace + ] }; if (tagIds.length > 0) { @@ -801,8 +787,8 @@ export const getSecrets = async (req: Request, res: Response) => { environment, $or: [ { user: userId }, // personal secrets for this user - { user: { $exists: false } }, // shared secrets from workspace - ], + { user: { $exists: false } } // shared secrets from workspace + ] }; if (tagIds.length > 0) { @@ -820,7 +806,7 @@ export const getSecrets = async (req: Request, res: Response) => { workspace: workspaceId, environment, folder: folderId, - user: { $exists: false }, // shared secrets only from workspace + user: { $exists: false } // shared secrets only from workspace }; if (tagIds.length > 0) { @@ -838,7 +824,7 @@ export const getSecrets = async (req: Request, res: Response) => { serviceAccountId: req.serviceAccount?._id, serviceTokenDataId: req.serviceTokenData?._id, workspaceId: new Types.ObjectId(workspaceId as string), - secretIds: secrets.map((n: any) => n._id), + secretIds: secrets.map((n: any) => n._id) }); readAction && @@ -849,7 +835,7 @@ export const getSecrets = async (req: Request, res: Response) => { workspaceId: new Types.ObjectId(workspaceId as string), actions: [readAction], channel, - ipAddress: req.realIP, + ipAddress: req.realIP })); const postHogClient = await TelemetryService.getPostHogClient(); @@ -857,7 +843,7 @@ export const getSecrets = async (req: Request, res: Response) => { postHogClient.capture({ event: "secrets pulled", distinctId: await TelemetryService.getDistinctId({ - authData: req.authData, + authData: req.authData }), properties: { numberOfSecrets: secrets.length, @@ -865,13 +851,13 @@ export const getSecrets = async (req: Request, res: Response) => { workspaceId, channel, folderId, - userAgent: req.headers?.["user-agent"], - }, + userAgent: req.headers?.["user-agent"] + } }); } return res.status(200).send({ - secrets, + secrets }); }; @@ -925,9 +911,7 @@ export const updateSecrets = async (req: Request, res: Response) => { } } */ - const channel = req.headers?.["user-agent"]?.toLowerCase().includes("mozilla") - ? "web" - : "cli"; + const channel = req.headers?.["user-agent"]?.toLowerCase().includes("mozilla") ? "web" : "cli"; interface PatchSecret { id: string; @@ -943,51 +927,47 @@ export const updateSecrets = async (req: Request, res: Response) => { tags: string[]; } - const updateOperationsToPerform = req.body.secrets.map( - (secret: PatchSecret) => { - const { - secretKeyCiphertext, - secretKeyIV, - secretKeyTag, - secretValueCiphertext, - secretValueIV, - secretValueTag, - secretCommentCiphertext, - secretCommentIV, - secretCommentTag, - tags, - } = secret; + const updateOperationsToPerform = req.body.secrets.map((secret: PatchSecret) => { + const { + secretKeyCiphertext, + secretKeyIV, + secretKeyTag, + secretValueCiphertext, + secretValueIV, + secretValueTag, + secretCommentCiphertext, + secretCommentIV, + secretCommentTag, + tags + } = secret; - return { - updateOne: { - filter: { _id: new Types.ObjectId(secret.id) }, - update: { - $inc: { - version: 1, - }, - secretKeyCiphertext, - secretKeyIV, - secretKeyTag, - secretValueCiphertext, - secretValueIV, - secretValueTag, - algorithm: ALGORITHM_AES_256_GCM, - keyEncoding: ENCODING_SCHEME_UTF8, - tags, - ...(secretCommentCiphertext !== undefined && - secretCommentIV && - secretCommentTag - ? { - secretCommentCiphertext, - secretCommentIV, - secretCommentTag, - } - : {}), + return { + updateOne: { + filter: { _id: new Types.ObjectId(secret.id) }, + update: { + $inc: { + version: 1 }, - }, - }; - } - ); + secretKeyCiphertext, + secretKeyIV, + secretKeyTag, + secretValueCiphertext, + secretValueIV, + secretValueTag, + algorithm: ALGORITHM_AES_256_GCM, + keyEncoding: ENCODING_SCHEME_UTF8, + tags, + ...(secretCommentCiphertext !== undefined && secretCommentIV && secretCommentTag + ? { + secretCommentCiphertext, + secretCommentIV, + secretCommentTag + } + : {}) + } + } + }; + }); await Secret.bulkWrite(updateOperationsToPerform); @@ -1009,7 +989,7 @@ export const updateSecrets = async (req: Request, res: Response) => { secretCommentCiphertext, secretCommentIV, secretCommentTag, - tags, + tags } = secretModificationsBySecretId[secret._id.toString()]; return { @@ -1018,9 +998,7 @@ export const updateSecrets = async (req: Request, res: Response) => { workspace: secret.workspace, type: secret.type, environment: secret.environment, - secretKeyCiphertext: secretKeyCiphertext - ? secretKeyCiphertext - : secret.secretKeyCiphertext, + secretKeyCiphertext: secretKeyCiphertext ? secretKeyCiphertext : secret.secretKeyCiphertext, secretKeyIV: secretKeyIV ? secretKeyIV : secret.secretKeyIV, secretKeyTag: secretKeyTag ? secretKeyTag : secret.secretKeyTag, secretValueCiphertext: secretValueCiphertext @@ -1031,17 +1009,13 @@ export const updateSecrets = async (req: Request, res: Response) => { secretCommentCiphertext: secretCommentCiphertext ? secretCommentCiphertext : secret.secretCommentCiphertext, - secretCommentIV: secretCommentIV - ? secretCommentIV - : secret.secretCommentIV, - secretCommentTag: secretCommentTag - ? secretCommentTag - : secret.secretCommentTag, + secretCommentIV: secretCommentIV ? secretCommentIV : secret.secretCommentIV, + secretCommentTag: secretCommentTag ? secretCommentTag : secret.secretCommentTag, tags: tags ? tags : secret.tags, algorithm: ALGORITHM_AES_256_GCM, - keyEncoding: ENCODING_SCHEME_UTF8, + keyEncoding: ENCODING_SCHEME_UTF8 }; - }), + }) }; await EESecretService.addSecretVersions(secretVersions); @@ -1062,8 +1036,8 @@ export const updateSecrets = async (req: Request, res: Response) => { setTimeout(async () => { await EventService.handleEvent({ event: eventPushSecrets({ - workspaceId: new Types.ObjectId(key), - }), + workspaceId: new Types.ObjectId(key) + }) }); }, 10000); @@ -1073,7 +1047,7 @@ export const updateSecrets = async (req: Request, res: Response) => { serviceAccountId: req.serviceAccount?._id, serviceTokenDataId: req.serviceTokenData?._id, workspaceId: new Types.ObjectId(key), - secretIds: workspaceSecretObj[key].map((secret: ISecret) => secret._id), + secretIds: workspaceSecretObj[key].map((secret: ISecret) => secret._id) }); // (EE) create (audit) log @@ -1085,7 +1059,7 @@ export const updateSecrets = async (req: Request, res: Response) => { workspaceId: new Types.ObjectId(key), actions: [updateAction], channel, - ipAddress: req.realIP, + ipAddress: req.realIP })); // (EE) take a secret snapshot @@ -1101,15 +1075,15 @@ export const updateSecrets = async (req: Request, res: Response) => { postHogClient.capture({ event: "secrets modified", distinctId: await TelemetryService.getDistinctId({ - authData: req.authData, + authData: req.authData }), properties: { numberOfSecrets: workspaceSecretObj[key].length, environment: workspaceSecretObj[key][0].environment, workspaceId: key, channel: channel, - userAgent: req.headers?.["user-agent"], - }, + userAgent: req.headers?.["user-agent"] + } }); } }); @@ -1117,9 +1091,9 @@ export const updateSecrets = async (req: Request, res: Response) => { return res.status(200).send({ secrets: await Secret.find({ _id: { - $in: req.secrets.map((secret: ISecret) => secret._id), - }, - }), + $in: req.secrets.map((secret: ISecret) => secret._id) + } + }) }); }; @@ -1179,12 +1153,12 @@ export const deleteSecrets = async (req: Request, res: Response) => { await Secret.deleteMany({ _id: { - $in: toDelete, - }, + $in: toDelete + } }); await EESecretService.markDeletedSecretVersions({ - secretIds: toDelete, + secretIds: toDelete }); // group secrets into workspaces so deleted secrets can @@ -1202,8 +1176,8 @@ export const deleteSecrets = async (req: Request, res: Response) => { // trigger event - push secrets await EventService.handleEvent({ event: eventPushSecrets({ - workspaceId: new Types.ObjectId(key), - }), + workspaceId: new Types.ObjectId(key) + }) }); const deleteAction = await EELogService.createAction({ name: ACTION_DELETE_SECRETS, @@ -1211,7 +1185,7 @@ export const deleteSecrets = async (req: Request, res: Response) => { serviceAccountId: req.serviceAccount?._id, serviceTokenDataId: req.serviceTokenData?._id, workspaceId: new Types.ObjectId(key), - secretIds: workspaceSecretObj[key].map((secret: ISecret) => secret._id), + secretIds: workspaceSecretObj[key].map((secret: ISecret) => secret._id) }); // (EE) create (audit) log @@ -1223,7 +1197,7 @@ export const deleteSecrets = async (req: Request, res: Response) => { workspaceId: new Types.ObjectId(key), actions: [deleteAction], channel, - ipAddress: req.realIP, + ipAddress: req.realIP })); // (EE) take a secret snapshot @@ -1237,20 +1211,20 @@ export const deleteSecrets = async (req: Request, res: Response) => { postHogClient.capture({ event: "secrets deleted", distinctId: await TelemetryService.getDistinctId({ - authData: req.authData, + authData: req.authData }), properties: { numberOfSecrets: workspaceSecretObj[key].length, environment: workspaceSecretObj[key][0].environment, workspaceId: key, channel: channel, - userAgent: req.headers?.["user-agent"], - }, + userAgent: req.headers?.["user-agent"] + } }); } }); return res.status(200).send({ - secrets: req.secrets, + secrets: req.secrets }); }; diff --git a/backend/src/controllers/v2/serviceTokenDataController.ts b/backend/src/controllers/v2/serviceTokenDataController.ts index 304311da78..d81fb97d69 100644 --- a/backend/src/controllers/v2/serviceTokenDataController.ts +++ b/backend/src/controllers/v2/serviceTokenDataController.ts @@ -2,10 +2,7 @@ import { Request, Response } from "express"; import crypto from "crypto"; import bcrypt from "bcrypt"; import { ServiceAccount, ServiceTokenData, User } from "../../models"; -import { - AUTH_MODE_JWT, - AUTH_MODE_SERVICE_ACCOUNT, -} from "../../variables"; +import { AUTH_MODE_JWT, AUTH_MODE_SERVICE_ACCOUNT } from "../../variables"; import { getSaltRounds } from "../../config"; import { BadRequestError } from "../../utils/errors"; import Folder from "../../models/folder"; @@ -46,14 +43,13 @@ export const getServiceTokenData = async (req: Request, res: Response) => { if (!(req.authData.authPayload instanceof ServiceTokenData)) throw BadRequestError({ - message: "Failed accepted client validation for service token data", + message: "Failed accepted client validation for service token data" }); - const serviceTokenData = await ServiceTokenData.findById( - req.authData.authPayload._id - ) + const serviceTokenData = await ServiceTokenData.findById(req.authData.authPayload._id) .select("+encryptedKey +iv +tag") - .populate("user").lean(); + .populate("user") + .lean(); return res.status(200).json(serviceTokenData); }; @@ -68,29 +64,7 @@ export const getServiceTokenData = async (req: Request, res: Response) => { export const createServiceTokenData = async (req: Request, res: Response) => { let serviceTokenData; - const { - name, - workspaceId, - environment, - encryptedKey, - iv, - tag, - expiresIn, - secretPath, - permissions, - } = req.body; - - const folders = await Folder.findOne({ - workspace: workspaceId, - environment, - }); - - if (folders) { - const folder = getFolderByPath(folders.nodes, secretPath); - if (folder == undefined) { - throw BadRequestError({ message: "Path for service token does not exist" }) - } - } + const { name, workspaceId, encryptedKey, iv, tag, expiresIn, permissions, scopes } = req.body; const secret = crypto.randomBytes(16).toString("hex"); const secretHash = await bcrypt.hash(secret, await getSaltRounds()); @@ -103,10 +77,7 @@ export const createServiceTokenData = async (req: Request, res: Response) => { let user, serviceAccount; - if ( - req.authData.authMode === AUTH_MODE_JWT && - req.authData.authPayload instanceof User - ) { + if (req.authData.authMode === AUTH_MODE_JWT && req.authData.authPayload instanceof User) { user = req.authData.authPayload._id; } @@ -120,17 +91,16 @@ export const createServiceTokenData = async (req: Request, res: Response) => { serviceTokenData = await new ServiceTokenData({ name, workspace: workspaceId, - environment, user, serviceAccount, + scopes, lastUsed: new Date(), expiresAt, secretHash, encryptedKey, iv, tag, - secretPath, - permissions, + permissions }).save(); // return service token data without sensitive data @@ -142,7 +112,7 @@ export const createServiceTokenData = async (req: Request, res: Response) => { return res.status(200).send({ serviceToken, - serviceTokenData, + serviceTokenData }); }; @@ -155,11 +125,9 @@ export const createServiceTokenData = async (req: Request, res: Response) => { export const deleteServiceTokenData = async (req: Request, res: Response) => { const { serviceTokenDataId } = req.params; - const serviceTokenData = await ServiceTokenData.findByIdAndDelete( - serviceTokenDataId - ); + const serviceTokenData = await ServiceTokenData.findByIdAndDelete(serviceTokenDataId); return res.status(200).send({ - serviceTokenData, + serviceTokenData }); }; diff --git a/backend/src/helpers/secrets.ts b/backend/src/helpers/secrets.ts index 51731bb6a8..1a68930bc6 100644 --- a/backend/src/helpers/secrets.ts +++ b/backend/src/helpers/secrets.ts @@ -4,13 +4,14 @@ import { DeleteSecretParams, GetSecretParams, GetSecretsParams, - UpdateSecretParams, + UpdateSecretParams } from "../interfaces/services/SecretService"; import { ISecret, + IServiceTokenData, Secret, SecretBlindIndexData, - ServiceTokenData, + ServiceTokenData } from "../models"; import { SecretVersion } from "../ee/models"; import { @@ -18,7 +19,7 @@ import { InternalServerError, SecretBlindIndexDataNotFoundError, SecretNotFoundError, - UnauthorizedRequestError, + UnauthorizedRequestError } from "../utils/errors"; import { ACTION_ADD_SECRETS, @@ -29,51 +30,57 @@ import { ENCODING_SCHEME_BASE64, ENCODING_SCHEME_UTF8, SECRET_PERSONAL, - SECRET_SHARED, + SECRET_SHARED } from "../variables"; import crypto from "crypto"; import * as argon2 from "argon2"; import { decryptSymmetric128BitHexKeyUTF8, - encryptSymmetric128BitHexKeyUTF8, + encryptSymmetric128BitHexKeyUTF8 } from "../utils/crypto"; import { TelemetryService } from "../services"; import { client, getEncryptionKey, getRootEncryptionKey } from "../config"; import { EELogService, EESecretService } from "../ee/services"; -import { - getAuthDataPayloadIdObj, - getAuthDataPayloadUserObj, -} from "../utils/auth"; +import { getAuthDataPayloadIdObj, getAuthDataPayloadUserObj } from "../utils/auth"; import { getFolderIdFromServiceToken } from "../services/FolderService"; +import picomatch from "picomatch"; + +export const isValidScope = ( + authPayload: IServiceTokenData, + environment: string, + secretPath: string +) => { + const { scopes: tkScopes } = authPayload; + const validScope = tkScopes.find( + (scope) => + picomatch.isMatch(secretPath, scope.secretPath, { strictSlashes: false }) && + scope.environment === environment + ); + + return Boolean(validScope); +}; /** * Returns an object containing secret [secret] but with its value, key, comment decrypted. - * + * * Precondition: the workspace for secret [secret] must have E2EE disabled * @param {ISecret} secret - secret to repackage to raw * @param {String} key - symmetric key to use to decrypt secret - * @returns + * @returns */ -export const repackageSecretToRaw = ({ - secret, - key, -}: { - secret: ISecret; - key: string; -}) => { - +export const repackageSecretToRaw = ({ secret, key }: { secret: ISecret; key: string }) => { const secretKey = decryptSymmetric128BitHexKeyUTF8({ ciphertext: secret.secretKeyCiphertext, iv: secret.secretKeyIV, tag: secret.secretKeyTag, - key, + key }); const secretValue = decryptSymmetric128BitHexKeyUTF8({ ciphertext: secret.secretValueCiphertext, iv: secret.secretValueIV, tag: secret.secretValueTag, - key, + key }); let secretComment = ""; @@ -83,11 +90,11 @@ export const repackageSecretToRaw = ({ ciphertext: secret.secretCommentCiphertext, iv: secret.secretCommentIV, tag: secret.secretCommentTag, - key, + key }); } - return ({ + return { _id: secret._id, version: secret.version, workspace: secret.workspace, @@ -96,9 +103,9 @@ export const repackageSecretToRaw = ({ user: secret.user, secretKey, secretValue, - secretComment, - }); -} + secretComment + }; +}; /** * Create secret blind index data containing encrypted blind index [salt] @@ -107,7 +114,7 @@ export const repackageSecretToRaw = ({ * @param {Types.ObjectId} obj.workspaceId */ export const createSecretBlindIndexDataHelper = async ({ - workspaceId, + workspaceId }: { workspaceId: Types.ObjectId; }) => { @@ -121,7 +128,7 @@ export const createSecretBlindIndexDataHelper = async ({ const { ciphertext: encryptedSaltCiphertext, iv: saltIV, - tag: saltTag, + tag: saltTag } = client.encryptSymmetric(salt, rootEncryptionKey); return await new SecretBlindIndexData({ @@ -130,16 +137,16 @@ export const createSecretBlindIndexDataHelper = async ({ saltIV, saltTag, algorithm: ALGORITHM_AES_256_GCM, - keyEncoding: ENCODING_SCHEME_BASE64, + keyEncoding: ENCODING_SCHEME_BASE64 }).save(); } else { const { ciphertext: encryptedSaltCiphertext, iv: saltIV, - tag: saltTag, + tag: saltTag } = encryptSymmetric128BitHexKeyUTF8({ plaintext: salt, - key: encryptionKey, + key: encryptionKey }); return await new SecretBlindIndexData({ @@ -148,7 +155,7 @@ export const createSecretBlindIndexDataHelper = async ({ saltIV, saltTag, algorithm: ALGORITHM_AES_256_GCM, - keyEncoding: ENCODING_SCHEME_UTF8, + keyEncoding: ENCODING_SCHEME_UTF8 }).save(); } }; @@ -160,7 +167,7 @@ export const createSecretBlindIndexDataHelper = async ({ * @returns */ export const getSecretBlindIndexSaltHelper = async ({ - workspaceId, + workspaceId }: { workspaceId: Types.ObjectId; }) => { @@ -168,36 +175,30 @@ export const getSecretBlindIndexSaltHelper = async ({ const rootEncryptionKey = await getRootEncryptionKey(); const secretBlindIndexData = await SecretBlindIndexData.findOne({ - workspace: workspaceId, + workspace: workspaceId }).select("+algorithm +keyEncoding"); if (!secretBlindIndexData) throw SecretBlindIndexDataNotFoundError(); - if ( - rootEncryptionKey && - secretBlindIndexData.keyEncoding === ENCODING_SCHEME_BASE64 - ) { + if (rootEncryptionKey && secretBlindIndexData.keyEncoding === ENCODING_SCHEME_BASE64) { return client.decryptSymmetric( secretBlindIndexData.encryptedSaltCiphertext, rootEncryptionKey, secretBlindIndexData.saltIV, secretBlindIndexData.saltTag ); - } else if ( - encryptionKey && - secretBlindIndexData.keyEncoding === ENCODING_SCHEME_UTF8 - ) { + } else if (encryptionKey && secretBlindIndexData.keyEncoding === ENCODING_SCHEME_UTF8) { // decrypt workspace salt return decryptSymmetric128BitHexKeyUTF8({ ciphertext: secretBlindIndexData.encryptedSaltCiphertext, iv: secretBlindIndexData.saltIV, tag: secretBlindIndexData.saltTag, - key: encryptionKey, + key: encryptionKey }); } throw InternalServerError({ - message: "Failed to obtain workspace salt needed for secret blind indexing", + message: "Failed to obtain workspace salt needed for secret blind indexing" }); }; @@ -210,7 +211,7 @@ export const getSecretBlindIndexSaltHelper = async ({ */ export const generateSecretBlindIndexWithSaltHelper = async ({ secretName, - salt, + salt }: { secretName: string; salt: string; @@ -224,7 +225,7 @@ export const generateSecretBlindIndexWithSaltHelper = async ({ memoryCost: 65536, // default pool of 64 MiB per thread. hashLength: 32, parallelism: 1, - raw: true, + raw: true }) ).toString("base64"); @@ -240,7 +241,7 @@ export const generateSecretBlindIndexWithSaltHelper = async ({ */ export const generateSecretBlindIndexHelper = async ({ secretName, - workspaceId, + workspaceId }: { secretName: string; workspaceId: Types.ObjectId; @@ -250,16 +251,13 @@ export const generateSecretBlindIndexHelper = async ({ const rootEncryptionKey = await getRootEncryptionKey(); const secretBlindIndexData = await SecretBlindIndexData.findOne({ - workspace: workspaceId, + workspace: workspaceId }).select("+algorithm +keyEncoding"); if (!secretBlindIndexData) throw SecretBlindIndexDataNotFoundError(); let salt; - if ( - rootEncryptionKey && - secretBlindIndexData.keyEncoding === ENCODING_SCHEME_BASE64 - ) { + if (rootEncryptionKey && secretBlindIndexData.keyEncoding === ENCODING_SCHEME_BASE64) { salt = client.decryptSymmetric( secretBlindIndexData.encryptedSaltCiphertext, rootEncryptionKey, @@ -269,32 +267,29 @@ export const generateSecretBlindIndexHelper = async ({ const secretBlindIndex = await generateSecretBlindIndexWithSaltHelper({ secretName, - salt, + salt }); return secretBlindIndex; - } else if ( - encryptionKey && - secretBlindIndexData.keyEncoding === ENCODING_SCHEME_UTF8 - ) { + } else if (encryptionKey && secretBlindIndexData.keyEncoding === ENCODING_SCHEME_UTF8) { // decrypt workspace salt salt = decryptSymmetric128BitHexKeyUTF8({ ciphertext: secretBlindIndexData.encryptedSaltCiphertext, iv: secretBlindIndexData.saltIV, tag: secretBlindIndexData.saltTag, - key: encryptionKey, + key: encryptionKey }); const secretBlindIndex = await generateSecretBlindIndexWithSaltHelper({ secretName, - salt, + salt }); return secretBlindIndex; } throw InternalServerError({ - message: "Failed to generate secret blind index", + message: "Failed to generate secret blind index" }); }; @@ -323,38 +318,32 @@ export const createSecretHelper = async ({ secretCommentCiphertext, secretCommentIV, secretCommentTag, - secretPath = "/", + secretPath = "/" }: CreateSecretParams) => { - const secretBlindIndex = await generateSecretBlindIndexHelper({ secretName, - workspaceId: new Types.ObjectId(workspaceId), + workspaceId: new Types.ObjectId(workspaceId) }); // if using service token filter towards the folderId by secretpath if (authData.authPayload instanceof ServiceTokenData) { - const { secretPath: serviceTkScopedSecretPath } = authData.authPayload; - if (secretPath !== serviceTkScopedSecretPath) { + if (!isValidScope(authData.authPayload, environment, secretPath)) { throw UnauthorizedRequestError({ message: "Folder Permission Denied" }); } } - const folderId = await getFolderIdFromServiceToken( - workspaceId, - environment, - secretPath - ); + const folderId = await getFolderIdFromServiceToken(workspaceId, environment, secretPath); const exists = await Secret.exists({ secretBlindIndex, workspace: new Types.ObjectId(workspaceId), folder: folderId, type, - ...(type === SECRET_PERSONAL ? getAuthDataPayloadUserObj(authData) : {}), + ...(type === SECRET_PERSONAL ? getAuthDataPayloadUserObj(authData) : {}) }); if (exists) throw BadRequestError({ - message: "Failed to create secret that already exists", + message: "Failed to create secret that already exists" }); if (type === SECRET_PERSONAL) { @@ -365,13 +354,12 @@ export const createSecretHelper = async ({ secretBlindIndex, folder: folderId, workspace: new Types.ObjectId(workspaceId), - type: SECRET_SHARED, + type: SECRET_SHARED }); if (!exists) throw BadRequestError({ - message: - "Failed to create personal secret override for no corresponding shared secret", + message: "Failed to create personal secret override for no corresponding shared secret" }); } @@ -394,7 +382,7 @@ export const createSecretHelper = async ({ secretCommentTag, folder: folderId, algorithm: ALGORITHM_AES_256_GCM, - keyEncoding: ENCODING_SCHEME_UTF8, + keyEncoding: ENCODING_SCHEME_UTF8 }).save(); const secretVersion = new SecretVersion({ @@ -414,12 +402,12 @@ export const createSecretHelper = async ({ secretValueIV, secretValueTag, algorithm: ALGORITHM_AES_256_GCM, - keyEncoding: ENCODING_SCHEME_UTF8, + keyEncoding: ENCODING_SCHEME_UTF8 }); // (EE) add version for new secret await EESecretService.addSecretVersions({ - secretVersions: [secretVersion], + secretVersions: [secretVersion] }); // (EE) create (audit) log @@ -427,7 +415,7 @@ export const createSecretHelper = async ({ name: ACTION_ADD_SECRETS, ...getAuthDataPayloadIdObj(authData), workspaceId, - secretIds: [secret._id], + secretIds: [secret._id] }); action && @@ -436,14 +424,14 @@ export const createSecretHelper = async ({ workspaceId, actions: [action], channel: authData.authChannel, - ipAddress: authData.authIP, + ipAddress: authData.authIP })); // (EE) take a secret snapshot await EESecretService.takeSecretSnapshot({ workspaceId, environment, - folderId, + folderId }); const postHogClient = await TelemetryService.getPostHogClient(); @@ -452,7 +440,7 @@ export const createSecretHelper = async ({ postHogClient.capture({ event: "secrets added", distinctId: await TelemetryService.getDistinctId({ - authData, + authData }), properties: { numberOfSecrets: 1, @@ -460,8 +448,8 @@ export const createSecretHelper = async ({ workspaceId, folderId, channel: authData.authChannel, - userAgent: authData.authUserAgent, - }, + userAgent: authData.authUserAgent + } }); } @@ -480,21 +468,16 @@ export const getSecretsHelper = async ({ workspaceId, environment, authData, - secretPath = "/", + secretPath = "/" }: GetSecretsParams) => { let secrets: ISecret[] = []; // if using service token filter towards the folderId by secretpath if (authData.authPayload instanceof ServiceTokenData) { - const { secretPath: serviceTkScopedSecretPath } = authData.authPayload; - if (secretPath !== serviceTkScopedSecretPath) { + if (!isValidScope(authData.authPayload, environment, secretPath)) { throw UnauthorizedRequestError({ message: "Folder Permission Denied" }); } } - const folderId = await getFolderIdFromServiceToken( - workspaceId, - environment, - secretPath - ); + const folderId = await getFolderIdFromServiceToken(workspaceId, environment, secretPath); // get personal secrets first secrets = await Secret.find({ @@ -502,8 +485,10 @@ export const getSecretsHelper = async ({ environment, folder: folderId, type: SECRET_PERSONAL, - ...getAuthDataPayloadUserObj(authData), - }).populate("tags").lean(); + ...getAuthDataPayloadUserObj(authData) + }) + .populate("tags") + .lean(); // concat with shared secrets secrets = secrets.concat( @@ -513,9 +498,11 @@ export const getSecretsHelper = async ({ folder: folderId, type: SECRET_SHARED, secretBlindIndex: { - $nin: secrets.map((secret) => secret.secretBlindIndex), - }, - }).populate("tags").lean() + $nin: secrets.map((secret) => secret.secretBlindIndex) + } + }) + .populate("tags") + .lean() ); // (EE) create (audit) log @@ -523,7 +510,7 @@ export const getSecretsHelper = async ({ name: ACTION_READ_SECRETS, ...getAuthDataPayloadIdObj(authData), workspaceId, - secretIds: secrets.map((secret) => secret._id), + secretIds: secrets.map((secret) => secret._id) }); action && @@ -532,7 +519,7 @@ export const getSecretsHelper = async ({ workspaceId, actions: [action], channel: authData.authChannel, - ipAddress: authData.authIP, + ipAddress: authData.authIP })); const postHogClient = await TelemetryService.getPostHogClient(); @@ -541,7 +528,7 @@ export const getSecretsHelper = async ({ postHogClient.capture({ event: "secrets pulled", distinctId: await TelemetryService.getDistinctId({ - authData, + authData }), properties: { numberOfSecrets: secrets.length, @@ -549,8 +536,8 @@ export const getSecretsHelper = async ({ workspaceId, folderId, channel: authData.authChannel, - userAgent: authData.authUserAgent, - }, + userAgent: authData.authUserAgent + } }); } @@ -573,25 +560,20 @@ export const getSecretHelper = async ({ environment, type, authData, - secretPath = "/", + secretPath = "/" }: GetSecretParams) => { const secretBlindIndex = await generateSecretBlindIndexHelper({ secretName, - workspaceId: new Types.ObjectId(workspaceId), + workspaceId: new Types.ObjectId(workspaceId) }); let secret: ISecret | null = null; // if using service token filter towards the folderId by secretpath if (authData.authPayload instanceof ServiceTokenData) { - const { secretPath: serviceTkScopedSecretPath } = authData.authPayload; - if (secretPath !== serviceTkScopedSecretPath) { + if (!isValidScope(authData.authPayload, environment, secretPath)) { throw UnauthorizedRequestError({ message: "Folder Permission Denied" }); } } - const folderId = await getFolderIdFromServiceToken( - workspaceId, - environment, - secretPath - ); + const folderId = await getFolderIdFromServiceToken(workspaceId, environment, secretPath); // try getting personal secret first (if exists) secret = await Secret.findOne({ @@ -600,7 +582,7 @@ export const getSecretHelper = async ({ environment, folder: folderId, type: type ?? SECRET_PERSONAL, - ...(type === SECRET_PERSONAL ? getAuthDataPayloadUserObj(authData) : {}), + ...(type === SECRET_PERSONAL ? getAuthDataPayloadUserObj(authData) : {}) }).lean(); if (!secret) { @@ -611,7 +593,7 @@ export const getSecretHelper = async ({ workspace: new Types.ObjectId(workspaceId), environment, folder: folderId, - type: SECRET_SHARED, + type: SECRET_SHARED }).lean(); } @@ -622,7 +604,7 @@ export const getSecretHelper = async ({ name: ACTION_READ_SECRETS, ...getAuthDataPayloadIdObj(authData), workspaceId, - secretIds: [secret._id], + secretIds: [secret._id] }); action && @@ -631,7 +613,7 @@ export const getSecretHelper = async ({ workspaceId, actions: [action], channel: authData.authChannel, - ipAddress: authData.authIP, + ipAddress: authData.authIP })); const postHogClient = await TelemetryService.getPostHogClient(); @@ -640,7 +622,7 @@ export const getSecretHelper = async ({ postHogClient.capture({ event: "secrets pull", distinctId: await TelemetryService.getDistinctId({ - authData, + authData }), properties: { numberOfSecrets: 1, @@ -648,8 +630,8 @@ export const getSecretHelper = async ({ workspaceId, folderId, channel: authData.authChannel, - userAgent: authData.authUserAgent, - }, + userAgent: authData.authUserAgent + } }); } @@ -679,26 +661,21 @@ export const updateSecretHelper = async ({ secretValueCiphertext, secretValueIV, secretValueTag, - secretPath, + secretPath }: UpdateSecretParams) => { const secretBlindIndex = await generateSecretBlindIndexHelper({ secretName, - workspaceId: new Types.ObjectId(workspaceId), + workspaceId: new Types.ObjectId(workspaceId) }); let secret: ISecret | null = null; // if using service token filter towards the folderId by secretpath if (authData.authPayload instanceof ServiceTokenData) { - const { secretPath: serviceTkScopedSecretPath } = authData.authPayload; - if (secretPath !== serviceTkScopedSecretPath) { + if (!isValidScope(authData.authPayload, environment, secretPath)) { throw UnauthorizedRequestError({ message: "Folder Permission Denied" }); } } - const folderId = await getFolderIdFromServiceToken( - workspaceId, - environment, - secretPath - ); + const folderId = await getFolderIdFromServiceToken(workspaceId, environment, secretPath); if (type === SECRET_SHARED) { // case: update shared secret @@ -708,16 +685,16 @@ export const updateSecretHelper = async ({ workspace: new Types.ObjectId(workspaceId), environment, folder: folderId, - type, + type }, { secretValueCiphertext, secretValueIV, secretValueTag, - $inc: { version: 1 }, + $inc: { version: 1 } }, { - new: true, + new: true } ); } else { @@ -730,16 +707,16 @@ export const updateSecretHelper = async ({ environment, type, folder: folderId, - ...getAuthDataPayloadUserObj(authData), + ...getAuthDataPayloadUserObj(authData) }, { secretValueCiphertext, secretValueIV, secretValueTag, - $inc: { version: 1 }, + $inc: { version: 1 } }, { - new: true, + new: true } ); } @@ -763,12 +740,12 @@ export const updateSecretHelper = async ({ secretValueIV, secretValueTag, algorithm: ALGORITHM_AES_256_GCM, - keyEncoding: ENCODING_SCHEME_UTF8, + keyEncoding: ENCODING_SCHEME_UTF8 }); // (EE) add version for new secret await EESecretService.addSecretVersions({ - secretVersions: [secretVersion], + secretVersions: [secretVersion] }); // (EE) create (audit) log @@ -776,7 +753,7 @@ export const updateSecretHelper = async ({ name: ACTION_UPDATE_SECRETS, ...getAuthDataPayloadIdObj(authData), workspaceId, - secretIds: [secret._id], + secretIds: [secret._id] }); action && @@ -785,14 +762,14 @@ export const updateSecretHelper = async ({ workspaceId, actions: [action], channel: authData.authChannel, - ipAddress: authData.authIP, + ipAddress: authData.authIP })); // (EE) take a secret snapshot await EESecretService.takeSecretSnapshot({ workspaceId, environment, - folderId: secret?.folder, + folderId: secret?.folder }); const postHogClient = await TelemetryService.getPostHogClient(); @@ -801,7 +778,7 @@ export const updateSecretHelper = async ({ postHogClient.capture({ event: "secrets modified", distinctId: await TelemetryService.getDistinctId({ - authData, + authData }), properties: { numberOfSecrets: 1, @@ -809,8 +786,8 @@ export const updateSecretHelper = async ({ workspaceId, folderId, channel: authData.authChannel, - userAgent: authData.authUserAgent, - }, + userAgent: authData.authUserAgent + } }); } @@ -833,26 +810,20 @@ export const deleteSecretHelper = async ({ environment, type, authData, - secretPath = "/", + secretPath = "/" }: DeleteSecretParams) => { const secretBlindIndex = await generateSecretBlindIndexHelper({ secretName, - workspaceId: new Types.ObjectId(workspaceId), + workspaceId: new Types.ObjectId(workspaceId) }); // if using service token filter towards the folderId by secretpath if (authData.authPayload instanceof ServiceTokenData) { - const { secretPath: serviceTkScopedSecretPath } = authData.authPayload; - - if (secretPath !== serviceTkScopedSecretPath) { + if (!isValidScope(authData.authPayload, environment, secretPath)) { throw UnauthorizedRequestError({ message: "Folder Permission Denied" }); } } - const folderId = await getFolderIdFromServiceToken( - workspaceId, - environment, - secretPath - ); + const folderId = await getFolderIdFromServiceToken(workspaceId, environment, secretPath); let secrets: ISecret[] = []; let secret: ISecret | null = null; @@ -862,7 +833,7 @@ export const deleteSecretHelper = async ({ secretBlindIndex, workspaceId: new Types.ObjectId(workspaceId), environment, - folder: folderId, + folder: folderId }).lean(); secret = await Secret.findOneAndDelete({ @@ -870,14 +841,14 @@ export const deleteSecretHelper = async ({ workspaceId: new Types.ObjectId(workspaceId), environment, type, - folder: folderId, + folder: folderId }).lean(); await Secret.deleteMany({ secretBlindIndex, workspaceId: new Types.ObjectId(workspaceId), environment, - folder: folderId, + folder: folderId }); } else { secret = await Secret.findOneAndDelete({ @@ -886,7 +857,7 @@ export const deleteSecretHelper = async ({ workspaceId: new Types.ObjectId(workspaceId), environment, type, - ...getAuthDataPayloadUserObj(authData), + ...getAuthDataPayloadUserObj(authData) }).lean(); if (secret) { @@ -897,7 +868,7 @@ export const deleteSecretHelper = async ({ if (!secret) throw SecretNotFoundError(); await EESecretService.markDeletedSecretVersions({ - secretIds: secrets.map((secret) => secret._id), + secretIds: secrets.map((secret) => secret._id) }); // (EE) create (audit) log @@ -905,22 +876,23 @@ export const deleteSecretHelper = async ({ name: ACTION_DELETE_SECRETS, ...getAuthDataPayloadIdObj(authData), workspaceId, - secretIds: secrets.map((secret) => secret._id), + secretIds: secrets.map((secret) => secret._id) }); - action && (await EELogService.createLog({ - ...getAuthDataPayloadIdObj(authData), - workspaceId, - actions: [action], - channel: authData.authChannel, - ipAddress: authData.authIP, - })); + action && + (await EELogService.createLog({ + ...getAuthDataPayloadIdObj(authData), + workspaceId, + actions: [action], + channel: authData.authChannel, + ipAddress: authData.authIP + })); // (EE) take a secret snapshot await EESecretService.takeSecretSnapshot({ workspaceId, environment, - folderId: secret?.folder, + folderId: secret?.folder }); const postHogClient = await TelemetryService.getPostHogClient(); @@ -929,7 +901,7 @@ export const deleteSecretHelper = async ({ postHogClient.capture({ event: "secrets deleted", distinctId: await TelemetryService.getDistinctId({ - authData, + authData }), properties: { numberOfSecrets: secrets.length, @@ -937,13 +909,13 @@ export const deleteSecretHelper = async ({ workspaceId, folderId, channel: authData.authChannel, - userAgent: authData.authUserAgent, - }, + userAgent: authData.authUserAgent + } }); } - return ({ + return { secrets, - secret, - }); + secret + }; }; diff --git a/backend/src/models/serviceTokenData.ts b/backend/src/models/serviceTokenData.ts index 57528a4e94..804184386a 100644 --- a/backend/src/models/serviceTokenData.ts +++ b/backend/src/models/serviceTokenData.ts @@ -4,7 +4,10 @@ export interface IServiceTokenData extends Document { _id: Types.ObjectId; name: string; workspace: Types.ObjectId; - environment: string; + scopes: Array<{ + environment: string; + secretPath: string; + }>; user: Types.ObjectId; serviceAccount: Types.ObjectId; lastUsed: Date; @@ -13,7 +16,6 @@ export interface IServiceTokenData extends Document { encryptedKey: string; iv: string; tag: string; - secretPath: string; permissions: string[]; } @@ -21,68 +23,72 @@ const serviceTokenDataSchema = new Schema( { name: { type: String, - required: true, + required: true }, workspace: { type: Schema.Types.ObjectId, ref: "Workspace", - required: true, + required: true }, - environment: { - type: String, - required: true, + scopes: { + type: [ + { + environment: { + type: String, + required: true + }, + secretPath: { + type: String, + default: "/", + required: true + } + } + ], + required: true }, user: { type: Schema.Types.ObjectId, ref: "User", - required: true, + required: true }, serviceAccount: { type: Schema.Types.ObjectId, - ref: "ServiceAccount", + ref: "ServiceAccount" }, lastUsed: { - type: Date, + type: Date }, expiresAt: { - type: Date, + type: Date }, secretHash: { type: String, required: true, - select: false, + select: false }, encryptedKey: { type: String, - select: false, + select: false }, iv: { type: String, - select: false, + select: false }, tag: { type: String, - select: false, + select: false }, permissions: { type: [String], enum: ["read", "write"], - default: ["read"], - }, - secretPath: { - type: String, - default: "/", - required: true, - }, + default: ["read"] + } }, { - timestamps: true, + timestamps: true } ); -const ServiceTokenData = model( - "ServiceTokenData", - serviceTokenDataSchema -); +const ServiceTokenData = model("ServiceTokenData", serviceTokenDataSchema); export default ServiceTokenData; diff --git a/backend/src/routes/v2/serviceTokenData.ts b/backend/src/routes/v2/serviceTokenData.ts index 33ffad3cf5..84f443debb 100644 --- a/backend/src/routes/v2/serviceTokenData.ts +++ b/backend/src/routes/v2/serviceTokenData.ts @@ -4,7 +4,7 @@ import { requireAuth, requireServiceTokenDataAuth, requireWorkspaceAuth, - validateRequest, + validateRequest } from "../../middleware"; import { body, param } from "express-validator"; import { @@ -13,14 +13,14 @@ import { AUTH_MODE_SERVICE_ACCOUNT, AUTH_MODE_SERVICE_TOKEN, MEMBER, - PERMISSION_WRITE_SECRETS, + PERMISSION_WRITE_SECRETS } from "../../variables"; import { serviceTokenDataController } from "../../controllers/v2"; router.get( "/", requireAuth({ - acceptedAuthModes: [AUTH_MODE_SERVICE_TOKEN], + acceptedAuthModes: [AUTH_MODE_SERVICE_TOKEN] }), serviceTokenDataController.getServiceTokenData ); @@ -28,33 +28,30 @@ router.get( router.post( "/", requireAuth({ - acceptedAuthModes: [AUTH_MODE_JWT, AUTH_MODE_SERVICE_ACCOUNT], + acceptedAuthModes: [AUTH_MODE_JWT, AUTH_MODE_SERVICE_ACCOUNT] }), requireWorkspaceAuth({ acceptedRoles: [ADMIN, MEMBER], locationWorkspaceId: "body", locationEnvironment: "body", - requiredPermissions: [PERMISSION_WRITE_SECRETS], + requiredPermissions: [PERMISSION_WRITE_SECRETS] }), body("name").exists().isString().trim(), body("workspaceId").exists().isString().trim(), - body("environment").exists().isString().trim(), + body("scopes").exists().isArray(), + body("scopes.*.environment").exists().isString().trim(), + body("scopes.*.secretPath").exists().isString().trim(), body("encryptedKey").exists().isString().trim(), body("iv").exists().isString().trim(), - body("secretPath").isString().default("/").trim(), body("tag").exists().isString().trim(), body("expiresIn").exists().isNumeric(), // measured in ms body("permissions") .isArray({ min: 1 }) .custom((value: string[]) => { const allowedPermissions = ["read", "write"]; - const invalidValues = value.filter( - (v) => !allowedPermissions.includes(v) - ); + const invalidValues = value.filter((v) => !allowedPermissions.includes(v)); if (invalidValues.length > 0) { - throw new Error( - `permissions contains invalid values: ${invalidValues.join(", ")}` - ); + throw new Error(`permissions contains invalid values: ${invalidValues.join(", ")}`); } return true; @@ -66,10 +63,10 @@ router.post( router.delete( "/:serviceTokenDataId", requireAuth({ - acceptedAuthModes: [AUTH_MODE_JWT], + acceptedAuthModes: [AUTH_MODE_JWT] }), requireServiceTokenDataAuth({ - acceptedRoles: [ADMIN, MEMBER], + acceptedRoles: [ADMIN, MEMBER] }), param("serviceTokenDataId").exists().trim(), validateRequest, diff --git a/backend/src/utils/setup/backfillData.ts b/backend/src/utils/setup/backfillData.ts index a77d3ee275..801a75e6fa 100644 --- a/backend/src/utils/setup/backfillData.ts +++ b/backend/src/utils/setup/backfillData.ts @@ -13,14 +13,14 @@ import { Secret, SecretBlindIndexData, ServiceTokenData, - Workspace, + Workspace } from "../../models"; import { generateKeyPair } from "../../utils/crypto"; import { client, getEncryptionKey, getRootEncryptionKey } from "../../config"; import { ALGORITHM_AES_256_GCM, ENCODING_SCHEME_BASE64, - ENCODING_SCHEME_UTF8, + ENCODING_SCHEME_UTF8 } from "../../variables"; import { InternalServerError } from "../errors"; @@ -29,10 +29,7 @@ import { InternalServerError } from "../errors"; * corresponding secret versions */ export const backfillSecretVersions = async () => { - await Secret.updateMany( - { version: { $exists: false } }, - { $set: { version: 1 } } - ); + await Secret.updateMany({ version: { $exists: false } }, { $set: { version: 1 } }); const unversionedSecrets: ISecret[] = await Secret.aggregate([ { @@ -40,14 +37,14 @@ export const backfillSecretVersions = async () => { from: "secretversions", localField: "_id", foreignField: "secret", - as: "versions", - }, + as: "versions" + } }, { $match: { - versions: { $size: 0 }, - }, - }, + versions: { $size: 0 } + } + } ]); if (unversionedSecrets.length > 0) { @@ -62,9 +59,9 @@ export const backfillSecretVersions = async () => { workspace: s.workspace, environment: s.environment, algorithm: ALGORITHM_AES_256_GCM, - keyEncoding: ENCODING_SCHEME_UTF8, + keyEncoding: ENCODING_SCHEME_UTF8 }) - ), + ) }); } console.log("Migration: Secret version migration v1 complete"); @@ -80,8 +77,8 @@ export const backfillBots = async () => { const workspaceIdsWithBot = await Bot.distinct("workspace"); const workspaceIdsToAddBot = await Workspace.distinct("_id", { _id: { - $nin: workspaceIdsWithBot, - }, + $nin: workspaceIdsWithBot + } }); if (workspaceIdsToAddBot.length === 0) return; @@ -94,7 +91,7 @@ export const backfillBots = async () => { const { ciphertext: encryptedPrivateKey, iv, - tag, + tag } = client.encryptSymmetric(privateKey, rootEncryptionKey); return new Bot({ @@ -106,16 +103,16 @@ export const backfillBots = async () => { iv, tag, algorithm: ALGORITHM_AES_256_GCM, - keyEncoding: ENCODING_SCHEME_BASE64, + keyEncoding: ENCODING_SCHEME_BASE64 }); } else if (encryptionKey) { const { ciphertext: encryptedPrivateKey, iv, - tag, + tag } = encryptSymmetric128BitHexKeyUTF8({ plaintext: privateKey, - key: encryptionKey, + key: encryptionKey }); return new Bot({ @@ -127,13 +124,12 @@ export const backfillBots = async () => { iv, tag, algorithm: ALGORITHM_AES_256_GCM, - keyEncoding: ENCODING_SCHEME_UTF8, + keyEncoding: ENCODING_SCHEME_UTF8 }); } throw InternalServerError({ - message: - "Failed to backfill workspace bots due to missing encryption key", + message: "Failed to backfill workspace bots due to missing encryption key" }); }) ); @@ -149,13 +145,11 @@ export const backfillSecretBlindIndexData = async () => { const encryptionKey = await getEncryptionKey(); const rootEncryptionKey = await getRootEncryptionKey(); - const workspaceIdsBlindIndexed = await SecretBlindIndexData.distinct( - "workspace" - ); + const workspaceIdsBlindIndexed = await SecretBlindIndexData.distinct("workspace"); const workspaceIdsToBlindIndex = await Workspace.distinct("_id", { _id: { - $nin: workspaceIdsBlindIndexed, - }, + $nin: workspaceIdsBlindIndexed + } }); if (workspaceIdsToBlindIndex.length === 0) return; @@ -168,7 +162,7 @@ export const backfillSecretBlindIndexData = async () => { const { ciphertext: encryptedSaltCiphertext, iv: saltIV, - tag: saltTag, + tag: saltTag } = client.encryptSymmetric(salt, rootEncryptionKey); return new SecretBlindIndexData({ @@ -177,16 +171,16 @@ export const backfillSecretBlindIndexData = async () => { saltIV, saltTag, algorithm: ALGORITHM_AES_256_GCM, - keyEncoding: ENCODING_SCHEME_BASE64, + keyEncoding: ENCODING_SCHEME_BASE64 }); } else if (encryptionKey) { const { ciphertext: encryptedSaltCiphertext, iv: saltIV, - tag: saltTag, + tag: saltTag } = encryptSymmetric128BitHexKeyUTF8({ plaintext: salt, - key: encryptionKey, + key: encryptionKey }); return new SecretBlindIndexData({ @@ -195,13 +189,12 @@ export const backfillSecretBlindIndexData = async () => { saltIV, saltTag, algorithm: ALGORITHM_AES_256_GCM, - keyEncoding: ENCODING_SCHEME_UTF8, + keyEncoding: ENCODING_SCHEME_UTF8 }); } throw InternalServerError({ - message: - "Failed to backfill secret blind index data due to missing encryption key", + message: "Failed to backfill secret blind index data due to missing encryption key" }); }) ); @@ -219,17 +212,17 @@ export const backfillEncryptionMetadata = async () => { await Secret.updateMany( { algorithm: { - $exists: false, + $exists: false }, keyEncoding: { - $exists: false, - }, + $exists: false + } }, { $set: { algorithm: ALGORITHM_AES_256_GCM, - keyEncoding: ENCODING_SCHEME_UTF8, - }, + keyEncoding: ENCODING_SCHEME_UTF8 + } } ); @@ -237,17 +230,17 @@ export const backfillEncryptionMetadata = async () => { await SecretVersion.updateMany( { algorithm: { - $exists: false, + $exists: false }, keyEncoding: { - $exists: false, - }, + $exists: false + } }, { $set: { algorithm: ALGORITHM_AES_256_GCM, - keyEncoding: ENCODING_SCHEME_UTF8, - }, + keyEncoding: ENCODING_SCHEME_UTF8 + } } ); @@ -255,17 +248,17 @@ export const backfillEncryptionMetadata = async () => { await SecretBlindIndexData.updateMany( { algorithm: { - $exists: false, + $exists: false }, keyEncoding: { - $exists: false, - }, + $exists: false + } }, { $set: { algorithm: ALGORITHM_AES_256_GCM, - keyEncoding: ENCODING_SCHEME_UTF8, - }, + keyEncoding: ENCODING_SCHEME_UTF8 + } } ); @@ -273,17 +266,17 @@ export const backfillEncryptionMetadata = async () => { await Bot.updateMany( { algorithm: { - $exists: false, + $exists: false }, keyEncoding: { - $exists: false, - }, + $exists: false + } }, { $set: { algorithm: ALGORITHM_AES_256_GCM, - keyEncoding: ENCODING_SCHEME_UTF8, - }, + keyEncoding: ENCODING_SCHEME_UTF8 + } } ); @@ -291,17 +284,17 @@ export const backfillEncryptionMetadata = async () => { await BackupPrivateKey.updateMany( { algorithm: { - $exists: false, + $exists: false }, keyEncoding: { - $exists: false, - }, + $exists: false + } }, { $set: { algorithm: ALGORITHM_AES_256_GCM, - keyEncoding: ENCODING_SCHEME_UTF8, - }, + keyEncoding: ENCODING_SCHEME_UTF8 + } } ); @@ -309,17 +302,17 @@ export const backfillEncryptionMetadata = async () => { await IntegrationAuth.updateMany( { algorithm: { - $exists: false, + $exists: false }, keyEncoding: { - $exists: false, - }, + $exists: false + } }, { $set: { algorithm: ALGORITHM_AES_256_GCM, - keyEncoding: ENCODING_SCHEME_UTF8, - }, + keyEncoding: ENCODING_SCHEME_UTF8 + } } ); }; @@ -328,26 +321,26 @@ export const backfillSecretFolders = async () => { await Secret.updateMany( { folder: { - $exists: false, - }, + $exists: false + } }, { $set: { - folder: "root", - }, + folder: "root" + } } ); await SecretVersion.updateMany( { folder: { - $exists: false, - }, + $exists: false + } }, { $set: { - folder: "root", - }, + folder: "root" + } } ); @@ -355,20 +348,20 @@ export const backfillSecretFolders = async () => { await SecretVersion.updateMany( { tags: { - $exists: false, - }, + $exists: false + } }, { $set: { - tags: [], - }, + tags: [] + } } ); let secretSnapshots = await SecretSnapshot.find({ environment: { - $exists: false, - }, + $exists: false + } }) .populate<{ secretVersions: ISecretVersion[] }>("secretVersions") .limit(50); @@ -377,8 +370,7 @@ export const backfillSecretFolders = async () => { for (const secSnapshot of secretSnapshots) { const groupSnapByEnv: Record> = {}; secSnapshot.secretVersions.forEach((secVer) => { - if (!groupSnapByEnv?.[secVer.environment]) - groupSnapByEnv[secVer.environment] = []; + if (!groupSnapByEnv?.[secVer.environment]) groupSnapByEnv[secVer.environment] = []; groupSnapByEnv[secVer.environment].push(secVer); }); @@ -390,7 +382,7 @@ export const backfillSecretFolders = async () => { ...secSnapshot.toObject({ virtuals: false }), _id: new Types.ObjectId(), environment: snapEnv, - secretVersions: secretIdsOfEnvGroup, + secretVersions: secretIdsOfEnvGroup }; }); @@ -400,8 +392,8 @@ export const backfillSecretFolders = async () => { secretSnapshots = await SecretSnapshot.find({ environment: { - $exists: false, - }, + $exists: false + } }) .populate<{ secretVersions: ISecretVersion[] }>("secretVersions") .limit(50); @@ -414,13 +406,13 @@ export const backfillServiceToken = async () => { await ServiceTokenData.updateMany( { secretPath: { - $exists: false, - }, + $exists: false + } }, { $set: { - secretPath: "/", - }, + secretPath: "/" + } } ); console.log("Migration: Service token migration v1 complete"); @@ -430,14 +422,33 @@ export const backfillIntegration = async () => { await Integration.updateMany( { secretPath: { - $exists: false, - }, + $exists: false + } }, { $set: { - secretPath: "/", - }, + secretPath: "/" + } } ); console.log("Migration: Integration migration v1 complete"); }; + +export const backfillServiceTokenMultiScope = async () => { + await ServiceTokenData.updateMany( + { + scopes: { + $exists: false + } + }, + [ + { + $set: { + scopes: [{ environment: "$environment", secretPath: "$secretPath" }] + } + } + ] + ); + + console.log("Migration: Service token migration v2 complete"); +}; diff --git a/backend/src/utils/setup/index.ts b/backend/src/utils/setup/index.ts index 16d9ee1ae3..00f41fe676 100644 --- a/backend/src/utils/setup/index.ts +++ b/backend/src/utils/setup/index.ts @@ -14,17 +14,15 @@ import { backfillSecretFolders, backfillSecretVersions, backfillServiceToken, + backfillServiceTokenMultiScope } from "./backfillData"; -import { - reencryptBotPrivateKeys, - reencryptSecretBlindIndexDataSalts, -} from "./reencryptData"; +import { reencryptBotPrivateKeys, reencryptSecretBlindIndexDataSalts } from "./reencryptData"; import { getClientIdGoogle, getClientSecretGoogle, getMongoURL, getNodeEnv, - getSentryDSN, + getSentryDSN } from "../../config"; import { initializePassport } from "../auth"; @@ -79,6 +77,7 @@ export const setup = async () => { await backfillSecretFolders(); await backfillServiceToken(); await backfillIntegration(); + await backfillServiceTokenMultiScope(); // re-encrypt any data previously encrypted under server hex 128-bit ENCRYPTION_KEY // to base64 256-bit ROOT_ENCRYPTION_KEY @@ -90,7 +89,7 @@ export const setup = async () => { dsn: await getSentryDSN(), tracesSampleRate: 1.0, debug: (await getNodeEnv()) === "production" ? false : true, - environment: await getNodeEnv(), + environment: await getNodeEnv() }); await createTestUserForDevelopment(); diff --git a/backend/src/validation/serviceTokenData.ts b/backend/src/validation/serviceTokenData.ts index 0ade7f96e2..580bcbdb98 100644 --- a/backend/src/validation/serviceTokenData.ts +++ b/backend/src/validation/serviceTokenData.ts @@ -1,22 +1,19 @@ import { Types } from "mongoose"; import { - ISecret, - IServiceAccount, - IServiceTokenData, - IUser, - ServiceAccount, - ServiceTokenData, - User, + ISecret, + IServiceAccount, + IServiceTokenData, + IUser, + ServiceAccount, + ServiceTokenData, + User } from "../models"; -import { - ServiceTokenDataNotFoundError, - UnauthorizedRequestError, -} from "../utils/errors"; +import { ServiceTokenDataNotFoundError, UnauthorizedRequestError } from "../utils/errors"; import { - AUTH_MODE_API_KEY, - AUTH_MODE_JWT, - AUTH_MODE_SERVICE_ACCOUNT, - AUTH_MODE_SERVICE_TOKEN, + AUTH_MODE_API_KEY, + AUTH_MODE_JWT, + AUTH_MODE_SERVICE_ACCOUNT, + AUTH_MODE_SERVICE_TOKEN } from "../variables"; import { validateUserClientForWorkspace } from "./user"; import { validateServiceAccountClientForWorkspace } from "./serviceAccount"; @@ -30,65 +27,71 @@ import { validateServiceAccountClientForWorkspace } from "./serviceAccount"; * @param {Array<'admin' | 'member'>} obj.acceptedRoles - accepted workspace roles */ export const validateClientForServiceTokenData = async ({ - authData, - serviceTokenDataId, - acceptedRoles, + authData, + serviceTokenDataId, + acceptedRoles }: { - authData: { - authMode: string; - authPayload: IUser | IServiceAccount | IServiceTokenData; - }; - serviceTokenDataId: Types.ObjectId; - acceptedRoles: Array<"admin" | "member">; + authData: { + authMode: string; + authPayload: IUser | IServiceAccount | IServiceTokenData; + }; + serviceTokenDataId: Types.ObjectId; + acceptedRoles: Array<"admin" | "member">; }) => { - const serviceTokenData = await ServiceTokenData - .findById(serviceTokenDataId) - .select("+encryptedKey +iv +tag") - .populate<{ user: IUser }>("user"); + const serviceTokenData = await ServiceTokenData.findById(serviceTokenDataId) + .select("+encryptedKey +iv +tag") + .populate<{ user: IUser }>("user"); - if (!serviceTokenData) throw ServiceTokenDataNotFoundError({ - message: "Failed to find service token data", + if (!serviceTokenData) + throw ServiceTokenDataNotFoundError({ + message: "Failed to find service token data" }); - if (authData.authMode === AUTH_MODE_JWT && authData.authPayload instanceof User) { - await validateUserClientForWorkspace({ - user: authData.authPayload, - workspaceId: serviceTokenData.workspace, - acceptedRoles, - }); - - return serviceTokenData; - } + if (authData.authMode === AUTH_MODE_JWT && authData.authPayload instanceof User) { + await validateUserClientForWorkspace({ + user: authData.authPayload, + workspaceId: serviceTokenData.workspace, + acceptedRoles + }); - if (authData.authMode === AUTH_MODE_SERVICE_ACCOUNT && authData.authPayload instanceof ServiceAccount) { - await validateServiceAccountClientForWorkspace({ - serviceAccount: authData.authPayload, - workspaceId: serviceTokenData.workspace, - }); - - return serviceTokenData; - } + return serviceTokenData; + } - if (authData.authMode === AUTH_MODE_SERVICE_TOKEN && authData.authPayload instanceof ServiceTokenData) { - throw UnauthorizedRequestError({ - message: "Failed service token authorization for service token data", - }); - } + if ( + authData.authMode === AUTH_MODE_SERVICE_ACCOUNT && + authData.authPayload instanceof ServiceAccount + ) { + await validateServiceAccountClientForWorkspace({ + serviceAccount: authData.authPayload, + workspaceId: serviceTokenData.workspace + }); - if (authData.authMode === AUTH_MODE_API_KEY && authData.authPayload instanceof User) { - await validateUserClientForWorkspace({ - user: authData.authPayload, - workspaceId: serviceTokenData.workspace, - acceptedRoles, - }); - - return serviceTokenData; - } - + return serviceTokenData; + } + + if ( + authData.authMode === AUTH_MODE_SERVICE_TOKEN && + authData.authPayload instanceof ServiceTokenData + ) { throw UnauthorizedRequestError({ - message: "Failed client authorization for service token data", + message: "Failed service token authorization for service token data" }); -} + } + + if (authData.authMode === AUTH_MODE_API_KEY && authData.authPayload instanceof User) { + await validateUserClientForWorkspace({ + user: authData.authPayload, + workspaceId: serviceTokenData.workspace, + acceptedRoles + }); + + return serviceTokenData; + } + + throw UnauthorizedRequestError({ + message: "Failed client authorization for service token data" + }); +}; /** * Validate that service token (client) can access workspace @@ -101,42 +104,42 @@ export const validateClientForServiceTokenData = async ({ * @param {String[]} requiredPermissions - required permissions as part of the endpoint */ export const validateServiceTokenDataClientForWorkspace = async ({ - serviceTokenData, - workspaceId, - environment, - requiredPermissions, + serviceTokenData, + workspaceId, + environment, + requiredPermissions }: { - serviceTokenData: IServiceTokenData; - workspaceId: Types.ObjectId; - environment?: string; - requiredPermissions?: string[]; + serviceTokenData: IServiceTokenData; + workspaceId: Types.ObjectId; + environment?: string; + requiredPermissions?: string[]; }) => { - if (!serviceTokenData.workspace.equals(workspaceId)) { - // case: invalid workspaceId passed + if (!serviceTokenData.workspace.equals(workspaceId)) { + // case: invalid workspaceId passed + throw UnauthorizedRequestError({ + message: "Failed service token authorization for the given workspace" + }); + } + + if (environment) { + // case: environment is specified + + if (!serviceTokenData.scopes.find(({ environment: tkEnv }) => tkEnv === environment)) { + // case: invalid environment passed + throw UnauthorizedRequestError({ + message: "Failed service token authorization for the given workspace environment" + }); + } + + requiredPermissions?.forEach((permission) => { + if (!serviceTokenData.permissions.includes(permission)) { throw UnauthorizedRequestError({ - message: "Failed service token authorization for the given workspace", + message: `Failed service token authorization for the given workspace environment action: ${permission}` }); - } - - if (environment) { - // case: environment is specified - - if (serviceTokenData.environment !== environment) { - // case: invalid environment passed - throw UnauthorizedRequestError({ - message: "Failed service token authorization for the given workspace environment", - }); - } - - requiredPermissions?.forEach((permission) => { - if (!serviceTokenData.permissions.includes(permission)) { - throw UnauthorizedRequestError({ - message: `Failed service token authorization for the given workspace environment action: ${permission}`, - }); - } - }); - } -} + } + }); + } +}; /** * Validate that service token (client) can access secrets @@ -147,36 +150,35 @@ export const validateServiceTokenDataClientForWorkspace = async ({ * @param {string[]} requiredPermissions - required permissions as part of the endpoint */ export const validateServiceTokenDataClientForSecrets = async ({ - serviceTokenData, - secrets, - requiredPermissions, + serviceTokenData, + secrets, + requiredPermissions }: { - serviceTokenData: IServiceTokenData; - secrets: ISecret[]; - requiredPermissions?: string[]; + serviceTokenData: IServiceTokenData; + secrets: ISecret[]; + requiredPermissions?: string[]; }) => { + secrets.forEach((secret: ISecret) => { + if (!serviceTokenData.workspace.equals(secret.workspace)) { + // case: invalid workspaceId passed + throw UnauthorizedRequestError({ + message: "Failed service token authorization for the given workspace" + }); + } - secrets.forEach((secret: ISecret) => { - if (!serviceTokenData.workspace.equals(secret.workspace)) { - // case: invalid workspaceId passed - throw UnauthorizedRequestError({ - message: "Failed service token authorization for the given workspace", - }); - } - - if (serviceTokenData.environment !== secret.environment) { - // case: invalid environment passed - throw UnauthorizedRequestError({ - message: "Failed service token authorization for the given workspace environment", - }); - } - - requiredPermissions?.forEach((permission) => { - if (!serviceTokenData.permissions.includes(permission)) { - throw UnauthorizedRequestError({ - message: `Failed service token authorization for the given workspace environment action: ${permission}`, - }); - } + if (!serviceTokenData.scopes.find(({ environment: tkEnv }) => tkEnv === secret.environment)) { + // case: invalid environment passed + throw UnauthorizedRequestError({ + message: "Failed service token authorization for the given workspace environment" + }); + } + + requiredPermissions?.forEach((permission) => { + if (!serviceTokenData.permissions.includes(permission)) { + throw UnauthorizedRequestError({ + message: `Failed service token authorization for the given workspace environment action: ${permission}` }); + } }); -} \ No newline at end of file + }); +}; diff --git a/cli/packages/api/model.go b/cli/packages/api/model.go index 954982dc94..09e6779e0d 100644 --- a/cli/packages/api/model.go +++ b/cli/packages/api/model.go @@ -181,14 +181,16 @@ type GetServiceTokenDetailsResponse struct { ID string `json:"_id"` Name string `json:"name"` Workspace string `json:"workspace"` - Environment string `json:"environment"` ExpiresAt time.Time `json:"expiresAt"` EncryptedKey string `json:"encryptedKey"` Iv string `json:"iv"` Tag string `json:"tag"` CreatedAt time.Time `json:"createdAt"` UpdatedAt time.Time `json:"updatedAt"` - SecretPath string `json:"secretPath"` + Scopes []struct { + Environment string `json:"environment"` + SecretPath string `json:"secretPath"` + } `json:"scopes"` } type GetAccessibleEnvironmentsRequest struct { diff --git a/cli/packages/cmd/export.go b/cli/packages/cmd/export.go index 453d1b75c7..c41750134a 100644 --- a/cli/packages/cmd/export.go +++ b/cli/packages/cmd/export.go @@ -83,7 +83,7 @@ var exportCmd = &cobra.Command{ var output string if shouldExpandSecrets { - substitutions := util.SubstituteSecrets(secrets) + substitutions := util.ExpandSecrets(secrets, infisicalToken) output, err = formatEnvs(substitutions, format) if err != nil { util.HandleError(err) diff --git a/cli/packages/cmd/run.go b/cli/packages/cmd/run.go index 8b82faa65c..2303cbe2e1 100644 --- a/cli/packages/cmd/run.go +++ b/cli/packages/cmd/run.go @@ -100,7 +100,7 @@ var runCmd = &cobra.Command{ } if shouldExpandSecrets { - secrets = util.SubstituteSecrets(secrets) + secrets = util.ExpandSecrets(secrets, infisicalToken) } secretsByKey := getSecretsByKeys(secrets) diff --git a/cli/packages/cmd/secrets.go b/cli/packages/cmd/secrets.go index 0d6181a565..688dd55bf1 100644 --- a/cli/packages/cmd/secrets.go +++ b/cli/packages/cmd/secrets.go @@ -65,7 +65,7 @@ var secretsCmd = &cobra.Command{ } if shouldExpandSecrets { - secrets = util.SubstituteSecrets(secrets) + secrets = util.ExpandSecrets(secrets, infisicalToken) } visualize.PrintAllSecretDetails(secrets) diff --git a/cli/packages/util/log.go b/cli/packages/util/log.go index d2bcbe6b03..a9bf75ec12 100644 --- a/cli/packages/util/log.go +++ b/cli/packages/util/log.go @@ -45,5 +45,5 @@ func PrintErrorMessageAndExit(messages ...string) { } func printError(e error) { - color.New(color.FgRed).Fprintf(os.Stderr, "Hmm, we ran into an error: %v", e) + color.New(color.FgRed).Fprintf(os.Stderr, "Hmm, we ran into an error: %v\n", e) } diff --git a/cli/packages/util/secrets.go b/cli/packages/util/secrets.go index 3f742372f9..d49e926072 100644 --- a/cli/packages/util/secrets.go +++ b/cli/packages/util/secrets.go @@ -6,6 +6,7 @@ import ( "errors" "fmt" "os" + "path" "regexp" "strings" @@ -16,7 +17,7 @@ import ( "github.com/rs/zerolog/log" ) -func GetPlainTextSecretsViaServiceToken(fullServiceToken string) ([]models.SingleEnvironmentVariable, api.GetServiceTokenDetailsResponse, error) { +func GetPlainTextSecretsViaServiceToken(fullServiceToken string, environment string, secretPath string) ([]models.SingleEnvironmentVariable, api.GetServiceTokenDetailsResponse, error) { serviceTokenParts := strings.SplitN(fullServiceToken, ".", 4) if len(serviceTokenParts) < 4 { return nil, api.GetServiceTokenDetailsResponse{}, fmt.Errorf("invalid service token entered. Please double check your service token and try again") @@ -34,10 +35,19 @@ func GetPlainTextSecretsViaServiceToken(fullServiceToken string) ([]models.Singl return nil, api.GetServiceTokenDetailsResponse{}, fmt.Errorf("unable to get service token details. [err=%v]", err) } + // if multiple scopes are there then user needs to specify which environment and secret path + if environment == "" { + if len(serviceTokenDetails.Scopes) != 1 { + return nil, api.GetServiceTokenDetailsResponse{}, fmt.Errorf("you need to provide the --env for multiple environment scoped token") + } else { + environment = serviceTokenDetails.Scopes[0].Environment + } + } + encryptedSecrets, err := api.CallGetSecretsV3(httpClient, api.GetEncryptedSecretsV3Request{ WorkspaceId: serviceTokenDetails.Workspace, - Environment: serviceTokenDetails.Environment, - SecretPath: serviceTokenDetails.SecretPath, + Environment: environment, + SecretPath: secretPath, }) if err != nil { @@ -189,11 +199,7 @@ func GetAllEnvironmentVariables(params models.GetAllSecretsParameters) ([]models } else { log.Debug().Msg("Trying to fetch secrets using service token") - secretsToReturn, _, errorToReturn = GetPlainTextSecretsViaServiceToken(infisicalToken) - - // if serviceTokenDetails.Environment != params.Environment { - // PrintErrorMessageAndExit(fmt.Sprintf("Fetch secrets failed: token allows [%s] environment access, not [%s]. Service tokens are environment-specific; no need for --env flag.", params.Environment, serviceTokenDetails.Environment)) - // } + secretsToReturn, _, errorToReturn = GetPlainTextSecretsViaServiceToken(infisicalToken, params.Environment, params.SecretsPath) } return secretsToReturn, errorToReturn @@ -279,22 +285,103 @@ func getExpandedEnvVariable(secrets []models.SingleEnvironmentVariable, variable return "${" + variableWeAreLookingFor + "}" } -func SubstituteSecrets(secrets []models.SingleEnvironmentVariable) []models.SingleEnvironmentVariable { - hashMapOfCompleteVariables := make(map[string]string) - hashMapOfSelfRefs := make(map[string]string) - expandedSecrets := []models.SingleEnvironmentVariable{} - - for _, secret := range secrets { - expandedVariable := getExpandedEnvVariable(secrets, secret.Key, hashMapOfCompleteVariables, hashMapOfSelfRefs) - expandedSecrets = append(expandedSecrets, models.SingleEnvironmentVariable{ - Key: secret.Key, - Value: expandedVariable, - Type: secret.Type, - }) +var secRefRegex = regexp.MustCompile(`\${([^\}]*)}`) +func recursivelyExpandSecret(expandedSecs map[string]string, interpolatedSecs map[string]string, crossSecRefFetch func(env string, path []string, key string) string, key string) string { + if v, ok := expandedSecs[key]; ok { + return v } - return expandedSecrets + interpolatedVal, ok := interpolatedSecs[key] + if !ok { + HandleError(fmt.Errorf("Could not find refered secret - %s", key), "Kindly check whether its provided") + } + + refs := secRefRegex.FindAllStringSubmatch(interpolatedVal, -1) + for _, val := range refs { + // key: "${something}" val: [${something},something] + interpolatedExp, interpolationKey := val[0], val[1] + ref := strings.Split(interpolationKey, ".") + + // ${KEY1} => [key1] + if len(ref) == 1 { + val := recursivelyExpandSecret(expandedSecs, interpolatedSecs, crossSecRefFetch, interpolationKey) + interpolatedVal = strings.ReplaceAll(interpolatedVal, interpolatedExp, val) + continue + } + + // cross board reference ${env.folder.key1} => [env folder key1] + if len(ref) > 1 { + secEnv, tmpSecPath, secKey := ref[0], ref[1:len(ref)-1], ref[len(ref)-1] + interpolatedSecs[interpolationKey] = crossSecRefFetch(secEnv, tmpSecPath, secKey) // get the reference value + val := recursivelyExpandSecret(expandedSecs, interpolatedSecs, crossSecRefFetch, interpolationKey) + interpolatedVal = strings.ReplaceAll(interpolatedVal, interpolatedExp, val) + } + + } + expandedSecs[key] = interpolatedVal + return interpolatedVal +} + +func getSecretsByKeys(secrets []models.SingleEnvironmentVariable) map[string]models.SingleEnvironmentVariable { + secretMapByName := make(map[string]models.SingleEnvironmentVariable, len(secrets)) + + for _, secret := range secrets { + secretMapByName[secret.Key] = secret + } + + return secretMapByName +} + +func ExpandSecrets(secrets []models.SingleEnvironmentVariable, infisicalToken string) []models.SingleEnvironmentVariable { + expandedSecs := make(map[string]string) + interpolatedSecs := make(map[string]string) + // map[env.secret-path][keyname]Secret + crossEnvRefSecs := make(map[string]map[string]models.SingleEnvironmentVariable) // a cache to hold all cross board reference secrets + + for _, sec := range secrets { + // get all references in a secret + refs := secRefRegex.FindAllStringSubmatch(sec.Value, -1) + // nil means its a secret without reference + if refs == nil { + expandedSecs[sec.Key] = sec.Value // atomic secrets without any interpolation + } else { + interpolatedSecs[sec.Key] = sec.Value + } + } + + for i, sec := range secrets { + // already present pick that up + if expandedVal, ok := expandedSecs[sec.Key]; ok { + secrets[i].Value = expandedVal + continue + } + + expandedVal := recursivelyExpandSecret(expandedSecs, interpolatedSecs, func(env string, secPaths []string, secKey string) string { + secPaths = append([]string{"/"}, secPaths...) + secPath := path.Join(secPaths...) + + secPathDot := strings.Join(secPaths, ".") + uniqKey := fmt.Sprintf("%s.%s", env, secPathDot) + + if crossRefSec, ok := crossEnvRefSecs[uniqKey]; !ok { + // if not in cross reference cache, fetch it from server + refSecs, err := GetAllEnvironmentVariables(models.GetAllSecretsParameters{Environment: env, InfisicalToken: infisicalToken, SecretsPath: secPath}) + if err != nil { + HandleError(err, fmt.Sprintf("Could not fetch secrets in environment: %s secret-path: %s", env, secPath), "If you are using a service token to fetch secrets, please ensure it is valid") + } + refSecsByKey := getSecretsByKeys(refSecs) + // save it to avoid calling api again for same environment and folder path + crossEnvRefSecs[uniqKey] = refSecsByKey + return refSecsByKey[secKey].Value + } else { + return crossRefSec[secKey].Value + } + }, sec.Key) + + secrets[i].Value = expandedVal + } + return secrets } func OverrideSecrets(secrets []models.SingleEnvironmentVariable, secretType string) []models.SingleEnvironmentVariable { diff --git a/docs/documentation/platform/folder.mdx b/docs/documentation/platform/folder.mdx index 716cb4e46a..e285967cdf 100644 --- a/docs/documentation/platform/folder.mdx +++ b/docs/documentation/platform/folder.mdx @@ -37,6 +37,8 @@ For more information on integrations, [refer infisical integration](/integration You can scope the secrets that can be read and written using an Infisical token by providing the secret path option when creating the token. +You can provide the folder path as glob if you want to have access to multiple folders and the tokens do support multi-environment. + ![folder scoped service token](../../images/project-folder-token.png) For more information, [refer infisical token section.](./token) diff --git a/docs/documentation/platform/secret-reference.mdx b/docs/documentation/platform/secret-reference.mdx new file mode 100644 index 0000000000..9d24e5255f --- /dev/null +++ b/docs/documentation/platform/secret-reference.mdx @@ -0,0 +1,26 @@ +--- +title: "Reference Secrets" +description: "How to use reference secrets in Infisical" +--- + +You can use the interpolation syntax to reference a secret in the same environment, another folder, or another environment +The interpolation syntax is a way of referencing a secret by using a special placeholder. The placeholder is the name of the secret, followed by the environment or folder name, separated by a colon. + +For example, to reference a secret named mysecret in the same environment, you would use the placeholder `${mysecret}`. + +While for another environment like `test` would be `${test.mysecret}` + +Some more examples of referencing are + +| Syntax | Environment | Folder | Secret Key | +| --------------------- | ----------- | ------------ | ---------- | +| `${KEY1}` | same env | ssame folder | KEY1 | +| `${dev.KEY2}` | dev | / | KEY2 | +| `${test.frontend.KEY2}` | test | /frontend | KEY2 | + +# Permission system for reference + +When you use the infisical CLI to log in, the permission system will work the same way as your user permissions. +This means that if you have permission to access other environments, your references to those environments will be resolved. + +When using the Infisical CLI with a service token, the service token must have permissions to the referenced environment and folder path. diff --git a/docs/images/project-folder-token.png b/docs/images/project-folder-token.png index 4fda56d275..2402acdf21 100644 Binary files a/docs/images/project-folder-token.png and b/docs/images/project-folder-token.png differ diff --git a/docs/integrations/platforms/kubernetes.mdx b/docs/integrations/platforms/kubernetes.mdx index d797293a08..62af3aca5c 100644 --- a/docs/integrations/platforms/kubernetes.mdx +++ b/docs/integrations/platforms/kubernetes.mdx @@ -39,9 +39,8 @@ The operator can be install via [Helm](helm.sh) or [kubectl](https://github.com/ ## Sync Infisical Secrets to your cluster To retrieve secrets from an Infisical project and save them as native Kubernetes secrets within a specific namespace, utilize the `InfisicalSecret` custom resource definition (CRD). -This resource can be created after installing the Infisical operator. For each new managed secret, you will need to create a new InfisicalSecret CRD. -```yaml +```yaml example-infisical-secret-crd.yaml apiVersion: secrets.infisical.com/v1alpha1 kind: InfisicalSecret metadata: @@ -50,15 +49,18 @@ metadata: spec: # The host that should be used to pull secrets from. If left empty, the value specified in Global configuration will be used hostAPI: https://app.infisical.com/api - resyncInterval: 60 # <-- the time in seconds between secret re-sync. Faster re-syncs will require higher rate limits + resyncInterval: authentication: serviceToken: serviceTokenSecretReference: secretName: service-token secretNamespace: option + secretsScope: + envSlug: dev + secretsPath: "/" managedSecretReference: secretName: managed-secret # <-- the name of kubernetes secret that will be created - secretNamespace: default # <-- where the kubernetes secret that will be created + secretNamespace: default # <-- where the kubernetes secret should be created ``` ### InfisicalSecret CRD properties @@ -86,45 +88,59 @@ Default re-sync interval is every 1 minute. - The `authentication` property tells the operator where it should look to find credentials needed to fetch secrets from Infisical. + This block defines the method that will be used to authenticate with Infisical so that secrets can be fetched. Currently, only [Service Tokens](../../documentation/platform/token) can be used to authenticate with Infisical. + - - - Authenticating with service tokens is a great option when you have a small number of services you'd like to fetch secrets for and are looking for the least amount of setup. - - #### 1. Generate service token + + The service token required to authenticate with Infisical needs to be stored in a Kubernetes secret. This block defines the reference to the name and name space of secret that stores this service token. + Follow the instructions below to create and store the service token in a Kubernetes secrets and reference it in your CRD. - You can generate a [service token](../../documentation/platform/token) for an Infisical project by heading over to the Infisical dashboard then to Project Settings. + #### 1. Generate service token - #### 2. Create Kubernetes secret containing service token + You can generate a [service token](../../documentation/platform/token) for an Infisical project by heading over to the Infisical dashboard then to Project Settings. - Once you have generated the service token, you will need to create a Kubernetes secret containing the service token you generated. - To quickly create a Kubernetes secret containing the generated service token, you can run the command below. + #### 2. Create Kubernetes secret containing service token - ``` bash - kubectl create secret generic service-token --from-literal=infisicalToken= - ``` + Once you have generated the service token, you will need to create a Kubernetes secret containing the service token you generated. + To quickly create a Kubernetes secret containing the generated service token, you can run the command below. Make sure you replace `` with your service token. - #### 3. Add reference for the Kubernetes secret containing service token + ``` bash + kubectl create secret generic service-token --from-literal=infisicalToken= + ``` - Once the secret is created, add the name and namespace of the secret that was just created under `authentication.serviceToken.serviceTokenSecretReference` field in the InfisicalSecret resource. + #### 3. Add reference for the Kubernetes secret containing service token - ## Example - ```yaml - apiVersion: secrets.infisical.com/v1alpha1 - kind: InfisicalSecret - metadata: - name: infisicalsecret-sample-crd - spec: - authentication: - serviceToken: - serviceTokenSecretReference: - secretName: service-token # <-- name of the Kubernetes secret that stores our service token - secretNamespace: option # <-- namespace of the Kubernetes secret that stores our service token - ... - ``` - - + Once the secret is created, add the name and namespace of the secret that was just created under `authentication.serviceToken.serviceTokenSecretReference` field in the InfisicalSecret resource. + + ## Example + ```yaml + apiVersion: secrets.infisical.com/v1alpha1 + kind: InfisicalSecret + metadata: + name: infisicalsecret-sample-crd + spec: + authentication: + serviceToken: + serviceTokenSecretReference: + secretName: service-token # <-- name of the Kubernetes secret that stores our service token + secretNamespace: option # <-- namespace of the Kubernetes secret that stores our service token + ... + ``` + + + + This block defines the scope of what secrets should be fetched. This is needed as your service token can have access to multiple folders and environments. + A scope is defined by `envSlug` and `secretsPath`. + + #### envSlug + + This refers to the short hand name of an environment. For example for the `development` environment the environment slug is `dev`. You can locate the slug of your environment by heading to your project settings in the Infisical dashboard. + + #### secretsPath + + secretsPath is the path to the secret in the given environment. For example a path of `/` would refer to the root of the environment whereas `/folder1` would refer to the secrets in folder1 from the root. + + Both fields are required. diff --git a/docs/mint.json b/docs/mint.json index ba66b271f3..25db15c632 100644 --- a/docs/mint.json +++ b/docs/mint.json @@ -110,6 +110,7 @@ "documentation/platform/organization", "documentation/platform/project", "documentation/platform/folder", + "documentation/platform/secret-reference", "documentation/platform/pit-recovery", "documentation/platform/secret-versioning", "documentation/platform/audit-logs", diff --git a/frontend/src/hooks/api/serviceTokens/types.ts b/frontend/src/hooks/api/serviceTokens/types.ts index 0c68aea798..d6dc322ccd 100644 --- a/frontend/src/hooks/api/serviceTokens/types.ts +++ b/frontend/src/hooks/api/serviceTokens/types.ts @@ -1,9 +1,13 @@ +export type ServiceTokenScope = { + environment: string; + secretPath: string; +}; + export type ServiceToken = { _id: string; name: string; workspace: string; - environment: string; - secretPath: string; + scopes: ServiceTokenScope[]; user: string; expiresAt: string; createdAt: string; @@ -14,9 +18,8 @@ export type ServiceToken = { export type CreateServiceTokenDTO = { name: string; workspaceId: string; - environment: string; + scopes: ServiceTokenScope[]; expiresIn: number; - secretPath: string; encryptedKey: string; iv: string; tag: string; diff --git a/frontend/src/views/Settings/ProjectSettingsPage/components/ServiceTokenSection/AddServiceTokenModal.tsx b/frontend/src/views/Settings/ProjectSettingsPage/components/ServiceTokenSection/AddServiceTokenModal.tsx index 338738ea2a..a44db233b4 100644 --- a/frontend/src/views/Settings/ProjectSettingsPage/components/ServiceTokenSection/AddServiceTokenModal.tsx +++ b/frontend/src/views/Settings/ProjectSettingsPage/components/ServiceTokenSection/AddServiceTokenModal.tsx @@ -1,9 +1,9 @@ import crypto from "crypto"; import { useEffect, useState } from "react"; -import { Controller, useForm } from "react-hook-form"; +import { Controller, useFieldArray, useForm } from "react-hook-form"; import { useTranslation } from "react-i18next"; -import { faCheck, faCopy } from "@fortawesome/free-solid-svg-icons"; +import { faCheck, faCopy, faPlus, faTrashCan } from "@fortawesome/free-solid-svg-icons"; import { FontAwesomeIcon } from "@fortawesome/react-fontawesome"; import { yupResolver } from "@hookform/resolvers/yup"; import * as yup from "yup"; @@ -27,10 +27,7 @@ import { } from "@app/components/v2"; import { useWorkspace } from "@app/context"; import { useToggle } from "@app/hooks"; -import { - useCreateServiceToken, - useGetUserWsKey -} from "@app/hooks/api"; +import { useCreateServiceToken, useGetUserWsKey } from "@app/hooks/api"; import { UsePopUpState } from "@app/hooks/usePopUp"; const apiTokenExpiry = [ @@ -44,8 +41,23 @@ const apiTokenExpiry = [ const schema = yup.object({ name: yup.string().max(100).required().label("Service Token Name"), - environment: yup.string().max(50).required().label("Environment"), - secretPath: yup.string().required().default("/").label("Secret Path"), + scopes: yup + .array( + yup.object({ + environment: yup.string().max(50).required().label("Environment"), + secretPath: yup + .string() + .required() + .default("/") + .label("Secret Path") + .transform((val) => + typeof val === "string" && val.at(-1) === "/" && val.length > 1 ? val.slice(0, -1) : val + ) + }) + ) + .min(1) + .required() + .label("Scope"), expiresIn: yup.string().optional().label("Service Token Expiration"), permissions: yup .object() @@ -60,284 +72,301 @@ const schema = yup.object({ export type FormData = yup.InferType; type Props = { - popUp: UsePopUpState<["createAPIToken"]>; - handlePopUpToggle: (popUpName: keyof UsePopUpState<["createAPIToken"]>, state?: boolean) => void; + popUp: UsePopUpState<["createAPIToken"]>; + handlePopUpToggle: (popUpName: keyof UsePopUpState<["createAPIToken"]>, state?: boolean) => void; }; -export const AddServiceTokenModal = ({ - popUp, - handlePopUpToggle -}: Props) => { - const { t } = useTranslation(); - const { createNotification } = useNotificationContext(); - const { currentWorkspace } = useWorkspace(); - const { - control, - reset, - handleSubmit, - formState: { isSubmitting } - } = useForm({ - resolver: yupResolver(schema) - }); +export const AddServiceTokenModal = ({ popUp, handlePopUpToggle }: Props) => { + const { t } = useTranslation(); + const { createNotification } = useNotificationContext(); + const { currentWorkspace } = useWorkspace(); + const { + control, + reset, + handleSubmit, + formState: { isSubmitting } + } = useForm({ + resolver: yupResolver(schema), + defaultValues: { + scopes: [{ secretPath: "/", environment: currentWorkspace?.environments?.[0]?.slug }] + } + }); - const [newToken, setToken] = useState(""); - const [isTokenCopied, setIsTokenCopied] = useToggle(false); + const { fields: tokenScopes, append, remove } = useFieldArray({ control, name: "scopes" }); - const { data: latestFileKey } = useGetUserWsKey(currentWorkspace?._id ?? ""); - const createServiceToken = useCreateServiceToken(); - const hasServiceToken = Boolean(newToken); + const [newToken, setToken] = useState(""); + const [isTokenCopied, setIsTokenCopied] = useToggle(false); - useEffect(() => { - let timer: NodeJS.Timeout; - if (isTokenCopied) { - timer = setTimeout(() => setIsTokenCopied.off(), 2000); - } + const { data: latestFileKey } = useGetUserWsKey(currentWorkspace?._id ?? ""); + const createServiceToken = useCreateServiceToken(); + const hasServiceToken = Boolean(newToken); - return () => clearTimeout(timer); - }, [isTokenCopied]); + useEffect(() => { + let timer: NodeJS.Timeout; + if (isTokenCopied) { + timer = setTimeout(() => setIsTokenCopied.off(), 2000); + } - const copyTokenToClipboard = () => { - navigator.clipboard.writeText(newToken); - setIsTokenCopied.on(); - }; + return () => clearTimeout(timer); + }, [isTokenCopied]); - const onFormSubmit = async ({ + const copyTokenToClipboard = () => { + navigator.clipboard.writeText(newToken); + setIsTokenCopied.on(); + }; + + const onFormSubmit = async ({ name, scopes, expiresIn, permissions }: FormData) => { + try { + if (!currentWorkspace?._id) return; + if (!latestFileKey) return; + + const key = decryptAssymmetric({ + ciphertext: latestFileKey.encryptedKey, + nonce: latestFileKey.nonce, + publicKey: latestFileKey.sender.publicKey, + privateKey: localStorage.getItem("PRIVATE_KEY") as string + }); + + const randomBytes = crypto.randomBytes(16).toString("hex"); + + const { ciphertext, iv, tag } = encryptSymmetric({ + plaintext: key, + key: randomBytes + }); + + const { serviceToken } = await createServiceToken.mutateAsync({ + encryptedKey: ciphertext, + iv, + tag, + scopes, + expiresIn: Number(expiresIn), name, - environment, - secretPath, - expiresIn, - permissions - }: FormData) => { - try { - if (!currentWorkspace?._id) return; - if (!latestFileKey) return; + workspaceId: currentWorkspace._id, + randomBytes, + permissions: Object.entries(permissions) + .filter(([, permissionsValue]) => permissionsValue) + .map(([permissionsKey]) => permissionsKey) + }); - const key = decryptAssymmetric({ - ciphertext: latestFileKey.encryptedKey, - nonce: latestFileKey.nonce, - publicKey: latestFileKey.sender.publicKey, - privateKey: localStorage.getItem("PRIVATE_KEY") as string - }); + setToken(serviceToken); + createNotification({ + text: "Successfully created a service token", + type: "success" + }); + } catch (err) { + console.error(err); + createNotification({ + text: "Failed to create a service token", + type: "error" + }); + } + }; - const randomBytes = crypto.randomBytes(16).toString("hex"); - - const { ciphertext, iv, tag } = encryptSymmetric({ - plaintext: key, - key: randomBytes - }); - - const { serviceToken } = await createServiceToken.mutateAsync({ - encryptedKey: ciphertext, - iv, - tag, - environment, - secretPath, - expiresIn: Number(expiresIn), - name, - workspaceId: currentWorkspace._id, - randomBytes, - permissions: Object.entries(permissions) - .filter(([, permissionsValue]) => permissionsValue) - .map(([permissionsKey]) => permissionsKey) - }); - - setToken(serviceToken); - - createNotification({ - text: "Successfully created a service token", - type: "success" - }); - - } catch (err) { - console.error(err); - createNotification({ - text: "Failed to create a service token", - type: "error" - }); + return ( + { + handlePopUpToggle("createAPIToken", open); + reset(); + setToken(""); + }} + > + { - handlePopUpToggle("createAPIToken", open); - reset(); - setToken(""); - }} - > - - {!hasServiceToken ? ( -
- ( - - - - )} - /> - ( - - - - )} - /> - ( - - - - )} - /> - ( - - - - )} - /> - { - const options = [ - { - label: "Read (default)", - value: "read" - }, - { - label: "Write (optional)", - value: "write" - } - ]; - - return ( - - <> - {options.map(({ label, value: optionValue }) => { - return ( - { - onChange({ - ...value, - [optionValue]: state - }); - }} - > - {label} - - ); - })} - - - ); - }} - /> -
- - - - -
- - ) : ( -
-

{newToken}

- - - - {t("common.click-to-copy")} - - -
+ subTitle={t("section.token.add-dialog.description") as string} + > + {!hasServiceToken ? ( +
+ ( + + + )} - - - ); -} \ No newline at end of file + /> + {tokenScopes.map(({ id }, index) => ( +
+ ( + + + + )} + /> + ( + + + + )} + /> + remove(index)} + > + + +
+ ))} +
+ +
+ ( + + + + )} + /> + { + const options = [ + { + label: "Read (default)", + value: "read" + }, + { + label: "Write (optional)", + value: "write" + } + ]; + + return ( + + <> + {options.map(({ label, value: optionValue }) => { + return ( + { + onChange({ + ...value, + [optionValue]: state + }); + }} + > + {label} + + ); + })} + + + ); + }} + /> +
+ + + + +
+ + ) : ( +
+

{newToken}

+ + + + {t("common.click-to-copy")} + + +
+ )} +
+
+ ); +}; diff --git a/frontend/src/views/Settings/ProjectSettingsPage/components/ServiceTokenSection/ServiceTokenSection.tsx b/frontend/src/views/Settings/ProjectSettingsPage/components/ServiceTokenSection/ServiceTokenSection.tsx index fd31870f11..376328673f 100644 --- a/frontend/src/views/Settings/ProjectSettingsPage/components/ServiceTokenSection/ServiceTokenSection.tsx +++ b/frontend/src/views/Settings/ProjectSettingsPage/components/ServiceTokenSection/ServiceTokenSection.tsx @@ -3,14 +3,9 @@ import { faPlus } from "@fortawesome/free-solid-svg-icons"; import { FontAwesomeIcon } from "@fortawesome/react-fontawesome"; import { useNotificationContext } from "@app/components/context/Notifications/NotificationProvider"; -import { - Button, - DeleteActionModal, -} from "@app/components/v2"; +import { Button, DeleteActionModal } from "@app/components/v2"; import { usePopUp } from "@app/hooks"; -import { - useDeleteServiceToken -} from "@app/hooks/api"; +import { useDeleteServiceToken } from "@app/hooks/api"; import { AddServiceTokenModal } from "./AddServiceTokenModal"; import { ServiceTokenTable } from "./ServiceTokenTable"; @@ -29,7 +24,9 @@ export const ServiceTokenSection = () => { const onDeleteApproved = async () => { try { - deleteServiceToken.mutateAsync((popUp?.deleteAPITokenConfirmation?.data as DeleteModalData)?.id); + deleteServiceToken.mutateAsync( + (popUp?.deleteAPITokenConfirmation?.data as DeleteModalData)?.id + ); createNotification({ text: "Successfully deleted service token", type: "success" @@ -46,32 +43,29 @@ export const ServiceTokenSection = () => { }; return ( -
-
-

{t("section.token.service-tokens")}

- +
+
+

+ {t("section.token.service-tokens")} +

+
-

{t("section.token.service-tokens-description")}

- - +

{t("section.token.service-tokens-description")}

+ + handlePopUpToggle("deleteAPITokenConfirmation", isOpen)} deleteKey={(popUp?.deleteAPITokenConfirmation?.data as DeleteModalData)?.name} onClose={() => handlePopUpClose("deleteAPITokenConfirmation")} diff --git a/frontend/src/views/Settings/ProjectSettingsPage/components/ServiceTokenSection/ServiceTokenTable.tsx b/frontend/src/views/Settings/ProjectSettingsPage/components/ServiceTokenSection/ServiceTokenTable.tsx index 69dab23dff..ce39abde2a 100644 --- a/frontend/src/views/Settings/ProjectSettingsPage/components/ServiceTokenSection/ServiceTokenTable.tsx +++ b/frontend/src/views/Settings/ProjectSettingsPage/components/ServiceTokenSection/ServiceTokenTable.tsx @@ -1,4 +1,4 @@ -import { faKey, faTrashCan } from "@fortawesome/free-solid-svg-icons"; +import { faFolder, faKey, faTrashCan } from "@fortawesome/free-solid-svg-icons"; import { FontAwesomeIcon } from "@fortawesome/react-fontawesome"; import { @@ -18,71 +18,82 @@ import { useGetUserWsServiceTokens } from "@app/hooks/api"; import { UsePopUpState } from "@app/hooks/usePopUp"; type Props = { - handlePopUpOpen: ( - popUpName: keyof UsePopUpState<["deleteAPITokenConfirmation"]>, - { - name, - id - }: { - name: string; - id: string; - } - ) => void; + handlePopUpOpen: ( + popUpName: keyof UsePopUpState<["deleteAPITokenConfirmation"]>, + { + name, + id + }: { + name: string; + id: string; + } + ) => void; }; -export const ServiceTokenTable = ({ - handlePopUpOpen -}: Props) => { - const { currentWorkspace } = useWorkspace(); - const { data, isLoading } = useGetUserWsServiceTokens({ - workspaceID: currentWorkspace?._id || "" - }); +export const ServiceTokenTable = ({ handlePopUpOpen }: Props) => { + const { currentWorkspace } = useWorkspace(); + const { data, isLoading } = useGetUserWsServiceTokens({ + workspaceID: currentWorkspace?._id || "" + }); - return ( - - - - - - - - - - - - {isLoading && } - {!isLoading && data && data.map((row) => ( - - - - - - - + return ( + +
Token NameEnvironmentSecret PathValid Until -
{row.name}{row.environment}{row.secretPath}{row.expiresAt && new Date(row.expiresAt).toUTCString()} - - handlePopUpOpen("deleteAPITokenConfirmation", { - name: row.name, - id: row._id - }) - } - colorSchema="danger" - ariaLabel="delete" - > - - -
+ + + + + + + + + {isLoading && } + {!isLoading && + data && + data.map((row) => ( + + + - - - )} - -
Token NameEnvrionment - Secret PathValid Until +
{row.name} +
+ {row?.scopes.map(({ secretPath, environment }) => ( +
+
{environment}
+ + {secretPath} +
))} - {!isLoading && data && data?.length === 0 && ( -
- -
-
- ); -} \ No newline at end of file +
+ + {row.expiresAt && new Date(row.expiresAt).toUTCString()} + + + handlePopUpOpen("deleteAPITokenConfirmation", { + name: row.name, + id: row._id + }) + } + colorSchema="danger" + ariaLabel="delete" + > + + + + + ))} + {!isLoading && data && data?.length === 0 && ( + + + + + + )} + + + + ); +}; diff --git a/helm-charts/secrets-operator/Chart.yaml b/helm-charts/secrets-operator/Chart.yaml index c3e0073b5f..d44467decc 100644 --- a/helm-charts/secrets-operator/Chart.yaml +++ b/helm-charts/secrets-operator/Chart.yaml @@ -13,9 +13,9 @@ type: application # This is the chart version. This version number should be incremented each time you make changes # to the chart and its templates, including the app version. # Versions are expected to follow Semantic Versioning (https://semver.org/) -version: 0.1.6 +version: 0.2.0 # This is the version number of the application being deployed. This version number should be # incremented each time you make changes to the application. Versions are not expected to # follow Semantic Versioning. They should reflect the version the application is using. # It is recommended to use it with quotes. -appVersion: "0.1.8" +appVersion: "0.2.0" diff --git a/helm-charts/secrets-operator/templates/infisicalsecret-crd.yaml b/helm-charts/secrets-operator/templates/infisicalsecret-crd.yaml index ceeac67093..51dd18a416 100644 --- a/helm-charts/secrets-operator/templates/infisicalsecret-crd.yaml +++ b/helm-charts/secrets-operator/templates/infisicalsecret-crd.yaml @@ -63,6 +63,16 @@ spec: type: object serviceToken: properties: + secretsScope: + properties: + envSlug: + type: string + secretsPath: + type: string + required: + - envSlug + - secretsPath + type: object serviceTokenSecretReference: properties: secretName: @@ -77,6 +87,7 @@ spec: - secretNamespace type: object required: + - secretsScope - serviceTokenSecretReference type: object type: object diff --git a/k8-operator/api/v1alpha1/infisicalsecret_types.go b/k8-operator/api/v1alpha1/infisicalsecret_types.go index 203394417a..3b61bd215f 100644 --- a/k8-operator/api/v1alpha1/infisicalsecret_types.go +++ b/k8-operator/api/v1alpha1/infisicalsecret_types.go @@ -4,8 +4,19 @@ import ( metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" ) +type Authentication struct { + // +kubebuilder:validation:Optional + ServiceAccount ServiceAccountDetails `json:"serviceAccount"` + // +kubebuilder:validation:Optional + ServiceToken ServiceTokenDetails `json:"serviceToken"` +} + type ServiceTokenDetails struct { + // +kubebuilder:validation:Required ServiceTokenSecretReference KubeSecretReference `json:"serviceTokenSecretReference"` + + // +kubebuilder:validation:Required + SecretsScope SecretScopeInWorkspace `json:"secretsScope"` } type ServiceAccountDetails struct { @@ -14,11 +25,12 @@ type ServiceAccountDetails struct { EnvironmentName string `json:"environmentName"` } -type Authentication struct { - // +kubebuilder:validation:Optional - ServiceAccount ServiceAccountDetails `json:"serviceAccount"` - // +kubebuilder:validation:Optional - ServiceToken ServiceTokenDetails `json:"serviceToken"` +type SecretScopeInWorkspace struct { + // +kubebuilder:validation:Required + SecretsPath string `json:"secretsPath"` + + // +kubebuilder:validation:Required + EnvSlug string `json:"envSlug"` } type KubeSecretReference struct { diff --git a/k8-operator/api/v1alpha1/zz_generated.deepcopy.go b/k8-operator/api/v1alpha1/zz_generated.deepcopy.go index 97c0f4fc76..01000431c8 100644 --- a/k8-operator/api/v1alpha1/zz_generated.deepcopy.go +++ b/k8-operator/api/v1alpha1/zz_generated.deepcopy.go @@ -157,6 +157,21 @@ func (in *KubeSecretReference) DeepCopy() *KubeSecretReference { return out } +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *SecretScopeInWorkspace) DeepCopyInto(out *SecretScopeInWorkspace) { + *out = *in +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SecretScopeInWorkspace. +func (in *SecretScopeInWorkspace) DeepCopy() *SecretScopeInWorkspace { + if in == nil { + return nil + } + out := new(SecretScopeInWorkspace) + in.DeepCopyInto(out) + return out +} + // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *ServiceAccountDetails) DeepCopyInto(out *ServiceAccountDetails) { *out = *in @@ -177,6 +192,7 @@ func (in *ServiceAccountDetails) DeepCopy() *ServiceAccountDetails { func (in *ServiceTokenDetails) DeepCopyInto(out *ServiceTokenDetails) { *out = *in out.ServiceTokenSecretReference = in.ServiceTokenSecretReference + out.SecretsScope = in.SecretsScope } // DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ServiceTokenDetails. diff --git a/k8-operator/config/crd/bases/secrets.infisical.com_infisicalsecrets.yaml b/k8-operator/config/crd/bases/secrets.infisical.com_infisicalsecrets.yaml index f47efb0614..07433f7817 100644 --- a/k8-operator/config/crd/bases/secrets.infisical.com_infisicalsecrets.yaml +++ b/k8-operator/config/crd/bases/secrets.infisical.com_infisicalsecrets.yaml @@ -63,6 +63,16 @@ spec: type: object serviceToken: properties: + secretsScope: + properties: + envSlug: + type: string + secretsPath: + type: string + required: + - envSlug + - secretsPath + type: object serviceTokenSecretReference: properties: secretName: @@ -77,6 +87,7 @@ spec: - secretNamespace type: object required: + - secretsScope - serviceTokenSecretReference type: object type: object diff --git a/k8-operator/config/samples/sample.yaml b/k8-operator/config/samples/sample.yaml index 971df9f7c1..4c5059d9f7 100644 --- a/k8-operator/config/samples/sample.yaml +++ b/k8-operator/config/samples/sample.yaml @@ -3,8 +3,8 @@ kind: InfisicalSecret metadata: name: infisicalsecret-sample spec: - hostAPI: http://localhost:7070/api - resyncInterval: 60 + hostAPI: http://localhost:8764/api + resyncInterval: 10 authentication: serviceAccount: serviceAccountSecretReference: @@ -16,10 +16,13 @@ spec: serviceTokenSecretReference: secretName: service-token secretNamespace: default + secretsScope: + envSlug: dev + secretsPath: "/" managedSecretReference: secretName: managed-secret secretNamespace: default - # To be depreciated soon - tokenSecretReference: - secretName: service-token - secretNamespace: default + # # To be depreciated soon + # tokenSecretReference: + # secretName: service-token + # secretNamespace: default diff --git a/k8-operator/controllers/infisicalsecret_helper.go b/k8-operator/controllers/infisicalsecret_helper.go index 32216e2e37..509de94ac8 100644 --- a/k8-operator/controllers/infisicalsecret_helper.go +++ b/k8-operator/controllers/infisicalsecret_helper.go @@ -219,7 +219,10 @@ func (r *InfisicalSecretReconciler) ReconcileInfisicalSecret(ctx context.Context fmt.Println("ReconcileInfisicalSecret: Fetched secrets via service account") } else if infisicalToken != "" { - plainTextSecretsFromApi, fullEncryptedSecretsResponse, err = util.GetPlainTextSecretsViaServiceToken(infisicalToken, secretVersionBasedOnETag) + envSlug := infisicalSecret.Spec.Authentication.ServiceToken.SecretsScope.EnvSlug + secretsPath := infisicalSecret.Spec.Authentication.ServiceToken.SecretsScope.SecretsPath + + plainTextSecretsFromApi, fullEncryptedSecretsResponse, err = util.GetPlainTextSecretsViaServiceToken(infisicalToken, secretVersionBasedOnETag, envSlug, secretsPath) if err != nil { return fmt.Errorf("\nfailed to get secrets because [err=%v]", err) } diff --git a/k8-operator/kubectl-install/install-secrets-operator.yaml b/k8-operator/kubectl-install/install-secrets-operator.yaml index f362bcfa1d..dad6dcf3a3 100644 --- a/k8-operator/kubectl-install/install-secrets-operator.yaml +++ b/k8-operator/kubectl-install/install-secrets-operator.yaml @@ -70,6 +70,16 @@ spec: type: object serviceToken: properties: + secretsScope: + properties: + envSlug: + type: string + secretsPath: + type: string + required: + - envSlug + - secretsPath + type: object serviceTokenSecretReference: properties: secretName: @@ -83,6 +93,7 @@ spec: - secretNamespace type: object required: + - secretsScope - serviceTokenSecretReference type: object type: object diff --git a/k8-operator/packages/util/secrets.go b/k8-operator/packages/util/secrets.go index 5087cd03aa..28ba472530 100644 --- a/k8-operator/packages/util/secrets.go +++ b/k8-operator/packages/util/secrets.go @@ -3,6 +3,8 @@ package util import ( "encoding/base64" "fmt" + "path" + "regexp" "strings" "github.com/Infisical/infisical/k8-operator/packages/api" @@ -48,7 +50,7 @@ func GetServiceTokenDetails(infisicalToken string) (api.GetServiceTokenDetailsRe return serviceTokenDetails, nil } -func GetPlainTextSecretsViaServiceToken(fullServiceToken string, etag string) ([]model.SingleEnvironmentVariable, api.GetEncryptedSecretsV3Response, error) { +func GetPlainTextSecretsViaServiceToken(fullServiceToken string, etag string, envSlug string, secretPath string) ([]model.SingleEnvironmentVariable, api.GetEncryptedSecretsV3Response, error) { serviceTokenParts := strings.SplitN(fullServiceToken, ".", 4) if len(serviceTokenParts) < 4 { return nil, api.GetEncryptedSecretsV3Response{}, fmt.Errorf("invalid service token entered. Please double check your service token and try again") @@ -68,9 +70,9 @@ func GetPlainTextSecretsViaServiceToken(fullServiceToken string, etag string) ([ encryptedSecretsResponse, err := api.CallGetSecretsV3(httpClient, api.GetEncryptedSecretsV3Request{ WorkspaceId: serviceTokenDetails.Workspace, - Environment: serviceTokenDetails.Environment, + Environment: envSlug, ETag: etag, - SecretPath: serviceTokenDetails.SecretPath, + SecretPath: secretPath, }) if err != nil { @@ -92,7 +94,10 @@ func GetPlainTextSecretsViaServiceToken(fullServiceToken string, etag string) ([ return nil, api.GetEncryptedSecretsV3Response{}, fmt.Errorf("unable to decrypt your secrets [err=%v]", err) } - return plainTextSecrets, encryptedSecretsResponse, nil + // expand secrets that are referenced + expandedSecrets := ExpandSecrets(plainTextSecrets, fullServiceToken) + + return expandedSecrets, encryptedSecretsResponse, nil } // Fetches plaintext secrets from an API endpoint using a service account. @@ -252,3 +257,104 @@ func GetPlainTextSecrets(key []byte, encryptedSecretsResponse api.GetEncryptedSe return plainTextSecrets, nil } + +var secRefRegex = regexp.MustCompile(`\${([^\}]*)}`) + +func recursivelyExpandSecret(expandedSecs map[string]string, interpolatedSecs map[string]string, crossSecRefFetch func(env string, path []string, key string) string, key string) string { + if v, ok := expandedSecs[key]; ok { + return v + } + + interpolatedVal, ok := interpolatedSecs[key] + if !ok { + return "" + // panic(fmt.Errorf("Could not find referred secret with key name %s", key), "Please check it refers a") + } + + refs := secRefRegex.FindAllStringSubmatch(interpolatedVal, -1) + for _, val := range refs { + // key: "${something}" val: [${something},something] + interpolatedExp, interpolationKey := val[0], val[1] + ref := strings.Split(interpolationKey, ".") + + // ${KEY1} => [key1] + if len(ref) == 1 { + val := recursivelyExpandSecret(expandedSecs, interpolatedSecs, crossSecRefFetch, interpolationKey) + interpolatedVal = strings.ReplaceAll(interpolatedVal, interpolatedExp, val) + continue + } + + // cross board reference ${env.folder.key1} => [env folder key1] + if len(ref) > 1 { + secEnv, tmpSecPath, secKey := ref[0], ref[1:len(ref)-1], ref[len(ref)-1] + interpolatedSecs[interpolationKey] = crossSecRefFetch(secEnv, tmpSecPath, secKey) // get the reference value + val := recursivelyExpandSecret(expandedSecs, interpolatedSecs, crossSecRefFetch, interpolationKey) + interpolatedVal = strings.ReplaceAll(interpolatedVal, interpolatedExp, val) + } + + } + expandedSecs[key] = interpolatedVal + return interpolatedVal +} + +func ExpandSecrets(secrets []model.SingleEnvironmentVariable, infisicalToken string) []model.SingleEnvironmentVariable { + expandedSecs := make(map[string]string) + interpolatedSecs := make(map[string]string) + // map[env.secret-path][keyname]Secret + crossEnvRefSecs := make(map[string]map[string]model.SingleEnvironmentVariable) // a cache to hold all cross board reference secrets + + for _, sec := range secrets { + // get all references in a secret + refs := secRefRegex.FindAllStringSubmatch(sec.Value, -1) + // nil means its a secret without reference + if refs == nil { + expandedSecs[sec.Key] = sec.Value // atomic secrets without any interpolation + } else { + interpolatedSecs[sec.Key] = sec.Value + } + } + + for i, sec := range secrets { + // already present pick that up + if expandedVal, ok := expandedSecs[sec.Key]; ok { + secrets[i].Value = expandedVal + continue + } + + expandedVal := recursivelyExpandSecret(expandedSecs, interpolatedSecs, func(env string, secPaths []string, secKey string) string { + secPaths = append([]string{"/"}, secPaths...) + secPath := path.Join(secPaths...) + + secPathDot := strings.Join(secPaths, ".") + uniqKey := fmt.Sprintf("%s.%s", env, secPathDot) + + if crossRefSec, ok := crossEnvRefSecs[uniqKey]; !ok { + // if not in cross reference cache, fetch it from server + refSecs, _, err := GetPlainTextSecretsViaServiceToken(infisicalToken, "", env, secPath) + if err != nil { + fmt.Println("HELLO===>", "MOO", err) + // HandleError(err, fmt.Sprintf("Could not fetch secrets in environment: %s secret-path: %s", env, secPath), "If you are using a service token to fetch secrets, please ensure it is valid") + } + refSecsByKey := getSecretsByKeys(refSecs) + // save it to avoid calling api again for same environment and folder path + crossEnvRefSecs[uniqKey] = refSecsByKey + return refSecsByKey[secKey].Value + } else { + return crossRefSec[secKey].Value + } + }, sec.Key) + + secrets[i].Value = expandedVal + } + return secrets +} + +func getSecretsByKeys(secrets []model.SingleEnvironmentVariable) map[string]model.SingleEnvironmentVariable { + secretMapByName := make(map[string]model.SingleEnvironmentVariable, len(secrets)) + + for _, secret := range secrets { + secretMapByName[secret.Key] = secret + } + + return secretMapByName +}