merge main

This commit is contained in:
Scott Wilson
2025-07-30 16:38:35 -07:00
369 changed files with 2581 additions and 27780 deletions

View File

@@ -1,153 +0,0 @@
name: Build and release CLI
on:
workflow_dispatch:
push:
# run only against tags
tags:
- "infisical-cli/v*.*.*"
permissions:
contents: write
jobs:
cli-integration-tests:
name: Run tests before deployment
uses: ./.github/workflows/run-cli-tests.yml
secrets:
CLI_TESTS_UA_CLIENT_ID: ${{ secrets.CLI_TESTS_UA_CLIENT_ID }}
CLI_TESTS_UA_CLIENT_SECRET: ${{ secrets.CLI_TESTS_UA_CLIENT_SECRET }}
CLI_TESTS_SERVICE_TOKEN: ${{ secrets.CLI_TESTS_SERVICE_TOKEN }}
CLI_TESTS_PROJECT_ID: ${{ secrets.CLI_TESTS_PROJECT_ID }}
CLI_TESTS_ENV_SLUG: ${{ secrets.CLI_TESTS_ENV_SLUG }}
CLI_TESTS_USER_EMAIL: ${{ secrets.CLI_TESTS_USER_EMAIL }}
CLI_TESTS_USER_PASSWORD: ${{ secrets.CLI_TESTS_USER_PASSWORD }}
CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE: ${{ secrets.CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE }}
npm-release:
runs-on: ubuntu-latest
env:
working-directory: ./npm
needs:
- cli-integration-tests
- goreleaser
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Extract version
run: |
VERSION=$(echo ${{ github.ref_name }} | sed 's/infisical-cli\/v//')
echo "Version extracted: $VERSION"
echo "CLI_VERSION=$VERSION" >> $GITHUB_ENV
- name: Print version
run: echo ${{ env.CLI_VERSION }}
- name: Setup Node
uses: actions/setup-node@8f152de45cc393bb48ce5d89d36b731f54556e65 # v4.0.0
with:
node-version: 20
cache: "npm"
cache-dependency-path: ./npm/package-lock.json
- name: Install dependencies
working-directory: ${{ env.working-directory }}
run: npm install --ignore-scripts
- name: Set NPM version
working-directory: ${{ env.working-directory }}
run: npm version ${{ env.CLI_VERSION }} --allow-same-version --no-git-tag-version
- name: Setup NPM
working-directory: ${{ env.working-directory }}
run: |
echo 'registry="https://registry.npmjs.org/"' > ./.npmrc
echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >> ./.npmrc
echo 'registry="https://registry.npmjs.org/"' > ~/.npmrc
echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >> ~/.npmrc
env:
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
- name: Pack NPM
working-directory: ${{ env.working-directory }}
run: npm pack
- name: Publish NPM
working-directory: ${{ env.working-directory }}
run: npm publish --tarball=./infisical-sdk-${{github.ref_name}} --access public --registry=https://registry.npmjs.org/
env:
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
goreleaser:
runs-on: ubuntu-latest-8-cores
needs: [cli-integration-tests]
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
- name: 🐋 Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: 🔧 Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- run: git fetch --force --tags
- run: echo "Ref name ${{github.ref_name}}"
- uses: actions/setup-go@v3
with:
go-version: ">=1.19.3"
cache: true
cache-dependency-path: cli/go.sum
- name: Setup for libssl1.0-dev
run: |
echo 'deb http://security.ubuntu.com/ubuntu bionic-security main' | sudo tee -a /etc/apt/sources.list
sudo apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 3B4FE6ACC0B21F32
sudo apt update
sudo apt-get install -y libssl1.0-dev
- name: OSXCross for CGO Support
run: |
mkdir ../../osxcross
git clone https://github.com/plentico/osxcross-target.git ../../osxcross/target
- uses: goreleaser/goreleaser-action@v4
with:
distribution: goreleaser-pro
version: v1.26.2-pro
args: release --clean
env:
GITHUB_TOKEN: ${{ secrets.GO_RELEASER_GITHUB_TOKEN }}
POSTHOG_API_KEY_FOR_CLI: ${{ secrets.POSTHOG_API_KEY_FOR_CLI }}
FURY_TOKEN: ${{ secrets.FURYPUSHTOKEN }}
AUR_KEY: ${{ secrets.AUR_KEY }}
GORELEASER_KEY: ${{ secrets.GORELEASER_KEY }}
- uses: actions/setup-python@v4
- run: pip install --upgrade cloudsmith-cli
- uses: ruby/setup-ruby@354a1ad156761f5ee2b7b13fa8e09943a5e8d252
with:
ruby-version: "3.3" # Not needed with a .ruby-version, .tool-versions or mise.toml
bundler-cache: true # runs 'bundle install' and caches installed gems automatically
- name: Install deb-s3
run: gem install deb-s3
- name: Configure GPG Key
run: echo -n "$GPG_SIGNING_KEY" | base64 --decode | gpg --batch --import
env:
GPG_SIGNING_KEY: ${{ secrets.GPG_SIGNING_KEY }}
GPG_SIGNING_KEY_PASSPHRASE: ${{ secrets.GPG_SIGNING_KEY_PASSPHRASE }}
- name: Publish to CloudSmith
run: sh cli/upload_to_cloudsmith.sh
env:
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}
INFISICAL_CLI_S3_BUCKET: ${{ secrets.INFISICAL_CLI_S3_BUCKET }}
INFISICAL_CLI_REPO_SIGNING_KEY_ID: ${{ secrets.INFISICAL_CLI_REPO_SIGNING_KEY_ID }}
AWS_ACCESS_KEY_ID: ${{ secrets.INFISICAL_CLI_REPO_AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.INFISICAL_CLI_REPO_AWS_SECRET_ACCESS_KEY }}
- name: Invalidate Cloudfront cache
run: aws cloudfront create-invalidation --distribution-id $CLOUDFRONT_DISTRIBUTION_ID --paths '/deb/dists/stable/*'
env:
AWS_ACCESS_KEY_ID: ${{ secrets.INFISICAL_CLI_REPO_AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.INFISICAL_CLI_REPO_AWS_SECRET_ACCESS_KEY }}
CLOUDFRONT_DISTRIBUTION_ID: ${{ secrets.INFISICAL_CLI_REPO_CLOUDFRONT_DISTRIBUTION_ID }}

View File

@@ -1,55 +0,0 @@
name: Go CLI Tests
on:
pull_request:
types: [opened, synchronize]
paths:
- "cli/**"
workflow_dispatch:
workflow_call:
secrets:
CLI_TESTS_UA_CLIENT_ID:
required: true
CLI_TESTS_UA_CLIENT_SECRET:
required: true
CLI_TESTS_SERVICE_TOKEN:
required: true
CLI_TESTS_PROJECT_ID:
required: true
CLI_TESTS_ENV_SLUG:
required: true
CLI_TESTS_USER_EMAIL:
required: true
CLI_TESTS_USER_PASSWORD:
required: true
CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE:
required: true
jobs:
test:
defaults:
run:
working-directory: ./cli
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Setup Go
uses: actions/setup-go@v4
with:
go-version: "1.21.x"
- name: Install dependencies
run: go get .
- name: Test with the Go CLI
env:
CLI_TESTS_UA_CLIENT_ID: ${{ secrets.CLI_TESTS_UA_CLIENT_ID }}
CLI_TESTS_UA_CLIENT_SECRET: ${{ secrets.CLI_TESTS_UA_CLIENT_SECRET }}
CLI_TESTS_SERVICE_TOKEN: ${{ secrets.CLI_TESTS_SERVICE_TOKEN }}
CLI_TESTS_PROJECT_ID: ${{ secrets.CLI_TESTS_PROJECT_ID }}
CLI_TESTS_ENV_SLUG: ${{ secrets.CLI_TESTS_ENV_SLUG }}
CLI_TESTS_USER_EMAIL: ${{ secrets.CLI_TESTS_USER_EMAIL }}
CLI_TESTS_USER_PASSWORD: ${{ secrets.CLI_TESTS_USER_PASSWORD }}
# INFISICAL_VAULT_FILE_PASSPHRASE: ${{ secrets.CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE }}
run: go test -v -count=1 ./test

View File

@@ -1,241 +0,0 @@
# This is an example .goreleaser.yml file with some sensible defaults.
# Make sure to check the documentation at https://goreleaser.com
# before:
# hooks:
# # You may remove this if you don't use go modules.
# - cd cli && go mod tidy
# # you may remove this if you don't need go generate
# - cd cli && go generate ./...
before:
hooks:
- ./cli/scripts/completions.sh
- ./cli/scripts/manpages.sh
monorepo:
tag_prefix: infisical-cli/
dir: cli
builds:
- id: darwin-build
binary: infisical
ldflags:
- -X github.com/Infisical/infisical-merge/packages/util.CLI_VERSION={{ .Version }}
- -X github.com/Infisical/infisical-merge/packages/telemetry.POSTHOG_API_KEY_FOR_CLI={{ .Env.POSTHOG_API_KEY_FOR_CLI }}
flags:
- -trimpath
env:
- CGO_ENABLED=1
- CC=/home/runner/work/osxcross/target/bin/o64-clang
- CXX=/home/runner/work/osxcross/target/bin/o64-clang++
goos:
- darwin
ignore:
- goos: darwin
goarch: "386"
dir: ./cli
- id: all-other-builds
env:
- CGO_ENABLED=0
binary: infisical
ldflags:
- -X github.com/Infisical/infisical-merge/packages/util.CLI_VERSION={{ .Version }}
- -X github.com/Infisical/infisical-merge/packages/telemetry.POSTHOG_API_KEY_FOR_CLI={{ .Env.POSTHOG_API_KEY_FOR_CLI }}
flags:
- -trimpath
goos:
- freebsd
- linux
- netbsd
- openbsd
- windows
goarch:
- "386"
- amd64
- arm
- arm64
goarm:
- "6"
- "7"
ignore:
- goos: windows
goarch: "386"
- goos: freebsd
goarch: "386"
dir: ./cli
archives:
- format_overrides:
- goos: windows
format: zip
files:
- ../README*
- ../LICENSE*
- ../manpages/*
- ../completions/*
release:
replace_existing_draft: true
mode: "replace"
checksum:
name_template: "checksums.txt"
snapshot:
name_template: "{{ .Version }}-devel"
# publishers:
# - name: fury.io
# ids:
# - infisical
# dir: "{{ dir .ArtifactPath }}"
# cmd: curl -F package=@{{ .ArtifactName }} https://{{ .Env.FURY_TOKEN }}@push.fury.io/infisical/
brews:
- name: infisical
tap:
owner: Infisical
name: homebrew-get-cli
commit_author:
name: "Infisical"
email: ai@infisical.com
folder: Formula
homepage: "https://infisical.com"
description: "The official Infisical CLI"
install: |-
bin.install "infisical"
bash_completion.install "completions/infisical.bash" => "infisical"
zsh_completion.install "completions/infisical.zsh" => "_infisical"
fish_completion.install "completions/infisical.fish"
man1.install "manpages/infisical.1.gz"
- name: "infisical@{{.Version}}"
tap:
owner: Infisical
name: homebrew-get-cli
commit_author:
name: "Infisical"
email: ai@infisical.com
folder: Formula
homepage: "https://infisical.com"
description: "The official Infisical CLI"
install: |-
bin.install "infisical"
bash_completion.install "completions/infisical.bash" => "infisical"
zsh_completion.install "completions/infisical.zsh" => "_infisical"
fish_completion.install "completions/infisical.fish"
man1.install "manpages/infisical.1.gz"
nfpms:
- id: infisical
package_name: infisical
builds:
- all-other-builds
vendor: Infisical, Inc
homepage: https://infisical.com/
maintainer: Infisical, Inc
description: The offical Infisical CLI
license: MIT
formats:
- rpm
- deb
- apk
- archlinux
bindir: /usr/bin
contents:
- src: ./completions/infisical.bash
dst: /etc/bash_completion.d/infisical
- src: ./completions/infisical.fish
dst: /usr/share/fish/vendor_completions.d/infisical.fish
- src: ./completions/infisical.zsh
dst: /usr/share/zsh/site-functions/_infisical
- src: ./manpages/infisical.1.gz
dst: /usr/share/man/man1/infisical.1.gz
scoop:
bucket:
owner: Infisical
name: scoop-infisical
commit_author:
name: "Infisical"
email: ai@infisical.com
homepage: "https://infisical.com"
description: "The official Infisical CLI"
license: MIT
winget:
- name: infisical
publisher: infisical
license: MIT
homepage: https://infisical.com
short_description: "The official Infisical CLI"
repository:
owner: infisical
name: winget-pkgs
branch: "infisical-{{.Version}}"
pull_request:
enabled: true
draft: false
base:
owner: microsoft
name: winget-pkgs
branch: master
aurs:
- name: infisical-bin
homepage: "https://infisical.com"
description: "The official Infisical CLI"
maintainers:
- Infisical, Inc <support@infisical.com>
license: MIT
private_key: "{{ .Env.AUR_KEY }}"
git_url: "ssh://aur@aur.archlinux.org/infisical-bin.git"
package: |-
# bin
install -Dm755 "./infisical" "${pkgdir}/usr/bin/infisical"
# license
install -Dm644 "./LICENSE" "${pkgdir}/usr/share/licenses/infisical/LICENSE"
# completions
mkdir -p "${pkgdir}/usr/share/bash-completion/completions/"
mkdir -p "${pkgdir}/usr/share/zsh/site-functions/"
mkdir -p "${pkgdir}/usr/share/fish/vendor_completions.d/"
install -Dm644 "./completions/infisical.bash" "${pkgdir}/usr/share/bash-completion/completions/infisical"
install -Dm644 "./completions/infisical.zsh" "${pkgdir}/usr/share/zsh/site-functions/_infisical"
install -Dm644 "./completions/infisical.fish" "${pkgdir}/usr/share/fish/vendor_completions.d/infisical.fish"
# man pages
install -Dm644 "./manpages/infisical.1.gz" "${pkgdir}/usr/share/man/man1/infisical.1.gz"
dockers:
- dockerfile: docker/alpine
goos: linux
goarch: amd64
use: buildx
ids:
- all-other-builds
image_templates:
- "infisical/cli:{{ .Major }}.{{ .Minor }}.{{ .Patch }}-amd64"
- "infisical/cli:latest-amd64"
build_flag_templates:
- "--pull"
- "--platform=linux/amd64"
- dockerfile: docker/alpine
goos: linux
goarch: amd64
use: buildx
ids:
- all-other-builds
image_templates:
- "infisical/cli:{{ .Major }}.{{ .Minor }}.{{ .Patch }}-arm64"
- "infisical/cli:latest-arm64"
build_flag_templates:
- "--pull"
- "--platform=linux/arm64"
docker_manifests:
- name_template: "infisical/cli:{{ .Major }}.{{ .Minor }}.{{ .Patch }}"
image_templates:
- "infisical/cli:{{ .Major }}.{{ .Minor }}.{{ .Patch }}-amd64"
- "infisical/cli:{{ .Major }}.{{ .Minor }}.{{ .Patch }}-arm64"
- name_template: "infisical/cli:latest"
image_templates:
- "infisical/cli:latest-amd64"
- "infisical/cli:latest-arm64"

View File

@@ -34,6 +34,8 @@ ENV VITE_INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
ARG CAPTCHA_SITE_KEY
ENV VITE_CAPTCHA_SITE_KEY $CAPTCHA_SITE_KEY
ENV NODE_OPTIONS="--max-old-space-size=8192"
# Build
RUN npm run build
@@ -209,6 +211,11 @@ EXPOSE 443
RUN grep -v 'import "./lib/telemetry/instrumentation.mjs";' dist/main.mjs > dist/main.mjs.tmp && \
mv dist/main.mjs.tmp dist/main.mjs
# The OpenSSL library is installed in different locations in different architectures (x86_64 and arm64).
# This is a workaround to avoid errors when the library is not found.
RUN ln -sf /usr/local/lib64/ossl-modules /usr/local/lib/ossl-modules || \
ln -sf /usr/local/lib/ossl-modules /usr/local/lib64/ossl-modules
USER non-root-user
CMD ["./standalone-entrypoint.sh"]

View File

@@ -7,7 +7,6 @@
"": {
"name": "backend",
"version": "1.0.0",
"hasInstallScript": true,
"license": "ISC",
"dependencies": {
"@aws-sdk/client-elasticache": "^3.637.0",
@@ -34,7 +33,7 @@
"@gitbeaker/rest": "^42.5.0",
"@google-cloud/kms": "^4.5.0",
"@infisical/quic": "^1.0.8",
"@node-saml/passport-saml": "^5.0.1",
"@node-saml/passport-saml": "^5.1.0",
"@octokit/auth-app": "^7.1.1",
"@octokit/core": "^5.2.1",
"@octokit/plugin-paginate-graphql": "^4.0.1",
@@ -9574,20 +9573,20 @@
}
},
"node_modules/@node-saml/node-saml": {
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/@node-saml/node-saml/-/node-saml-5.0.1.tgz",
"integrity": "sha512-YQzFPEC+CnsfO9AFYnwfYZKIzOLx3kITaC1HrjHVLTo6hxcQhc+LgHODOMvW4VCV95Gwrz1MshRUWCPzkDqmnA==",
"version": "5.1.0",
"resolved": "https://registry.npmjs.org/@node-saml/node-saml/-/node-saml-5.1.0.tgz",
"integrity": "sha512-t3cJnZ4aC7HhPZ6MGylGZULvUtBOZ6FzuUndaHGXjmIZHXnLfC/7L8a57O9Q9V7AxJGKAiRM5zu2wNm9EsvQpw==",
"license": "MIT",
"dependencies": {
"@types/debug": "^4.1.12",
"@types/qs": "^6.9.11",
"@types/qs": "^6.9.18",
"@types/xml-encryption": "^1.2.4",
"@types/xml2js": "^0.4.14",
"@xmldom/is-dom-node": "^1.0.1",
"@xmldom/xmldom": "^0.8.10",
"debug": "^4.3.4",
"xml-crypto": "^6.0.1",
"xml-encryption": "^3.0.2",
"debug": "^4.4.0",
"xml-crypto": "^6.1.2",
"xml-encryption": "^3.1.0",
"xml2js": "^0.6.2",
"xmlbuilder": "^15.1.1",
"xpath": "^0.0.34"
@@ -9597,9 +9596,9 @@
}
},
"node_modules/@node-saml/node-saml/node_modules/debug": {
"version": "4.4.0",
"resolved": "https://registry.npmjs.org/debug/-/debug-4.4.0.tgz",
"integrity": "sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA==",
"version": "4.4.1",
"resolved": "https://registry.npmjs.org/debug/-/debug-4.4.1.tgz",
"integrity": "sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==",
"license": "MIT",
"dependencies": {
"ms": "^2.1.3"
@@ -9636,14 +9635,14 @@
}
},
"node_modules/@node-saml/passport-saml": {
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/@node-saml/passport-saml/-/passport-saml-5.0.1.tgz",
"integrity": "sha512-fMztg3zfSnjLEgxvpl6HaDMNeh0xeQX4QHiF9e2Lsie2dc4qFE37XYbQZhVmn8XJ2awPpSWLQ736UskYgGU8lQ==",
"version": "5.1.0",
"resolved": "https://registry.npmjs.org/@node-saml/passport-saml/-/passport-saml-5.1.0.tgz",
"integrity": "sha512-pBm+iFjv9eihcgeJuSUs4c0AuX1QEFdHwP8w1iaWCfDzXdeWZxUBU5HT2bY2S4dvNutcy+A9hYsH7ZLBGtgwDg==",
"license": "MIT",
"dependencies": {
"@node-saml/node-saml": "^5.0.1",
"@types/express": "^4.17.21",
"@types/passport": "^1.0.16",
"@node-saml/node-saml": "^5.1.0",
"@types/express": "^4.17.23",
"@types/passport": "^1.0.17",
"@types/passport-strategy": "^0.2.38",
"passport": "^0.7.0",
"passport-strategy": "^1.0.0"
@@ -13351,9 +13350,10 @@
"license": "MIT"
},
"node_modules/@types/express": {
"version": "4.17.21",
"resolved": "https://registry.npmjs.org/@types/express/-/express-4.17.21.tgz",
"integrity": "sha512-ejlPM315qwLpaQlQDTjPdsUFSc6ZsP4AN6AlWnogPjQ7CVi7PYF3YVz+CY3jE2pwYf7E/7HlDAN0rV2GxTG0HQ==",
"version": "4.17.23",
"resolved": "https://registry.npmjs.org/@types/express/-/express-4.17.23.tgz",
"integrity": "sha512-Crp6WY9aTYP3qPi2wGDo9iUe/rceX01UMhnF1jmwDcKCFM6cx7YhGP/Mpr3y9AASpfHixIG0E6azCcL5OcDHsQ==",
"license": "MIT",
"dependencies": {
"@types/body-parser": "*",
"@types/express-serve-static-core": "^4.17.33",
@@ -13523,9 +13523,10 @@
}
},
"node_modules/@types/passport": {
"version": "1.0.16",
"resolved": "https://registry.npmjs.org/@types/passport/-/passport-1.0.16.tgz",
"integrity": "sha512-FD0qD5hbPWQzaM0wHUnJ/T0BBCJBxCeemtnCwc/ThhTg3x9jfrAcRUmj5Dopza+MfFS9acTe3wk7rcVnRIp/0A==",
"version": "1.0.17",
"resolved": "https://registry.npmjs.org/@types/passport/-/passport-1.0.17.tgz",
"integrity": "sha512-aciLyx+wDwT2t2/kJGJR2AEeBz0nJU4WuRX04Wu9Dqc5lSUtwu0WERPHYsLhF9PtseiAMPBGNUOtFjxZ56prsg==",
"license": "MIT",
"dependencies": {
"@types/express": "*"
}
@@ -31953,9 +31954,9 @@
"license": "MIT"
},
"node_modules/xml-crypto": {
"version": "6.0.1",
"resolved": "https://registry.npmjs.org/xml-crypto/-/xml-crypto-6.0.1.tgz",
"integrity": "sha512-v05aU7NS03z4jlZ0iZGRFeZsuKO1UfEbbYiaeRMiATBFs6Jq9+wqKquEMTn4UTrYZ9iGD8yz3KT4L9o2iF682w==",
"version": "6.1.2",
"resolved": "https://registry.npmjs.org/xml-crypto/-/xml-crypto-6.1.2.tgz",
"integrity": "sha512-leBOVQdVi8FvPJrMYoum7Ici9qyxfE4kVi+AkpUoYCSXaQF4IlBm1cneTK9oAxR61LpYxTx7lNcsnBIeRpGW2w==",
"license": "MIT",
"dependencies": {
"@xmldom/is-dom-node": "^1.0.1",

View File

@@ -153,7 +153,7 @@
"@gitbeaker/rest": "^42.5.0",
"@google-cloud/kms": "^4.5.0",
"@infisical/quic": "^1.0.8",
"@node-saml/passport-saml": "^5.0.1",
"@node-saml/passport-saml": "^5.1.0",
"@octokit/auth-app": "^7.1.1",
"@octokit/core": "^5.2.1",
"@octokit/plugin-paginate-graphql": "^4.0.1",

View File

@@ -0,0 +1,19 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasColumn(TableName.Project, "secretDetectionIgnoreValues"))) {
await knex.schema.alterTable(TableName.Project, (t) => {
t.specificType("secretDetectionIgnoreValues", "text[]");
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.Project, "secretDetectionIgnoreValues")) {
await knex.schema.alterTable(TableName.Project, (t) => {
t.dropColumn("secretDetectionIgnoreValues");
});
}
}

View File

@@ -30,7 +30,8 @@ export const ProjectsSchema = z.object({
hasDeleteProtection: z.boolean().default(false).nullable().optional(),
secretSharing: z.boolean().default(true),
showSnapshotsLegacy: z.boolean().default(false),
defaultProduct: z.string().nullable().optional()
defaultProduct: z.string().nullable().optional(),
secretDetectionIgnoreValues: z.string().array().nullable().optional()
});
export type TProjects = z.infer<typeof ProjectsSchema>;

View File

@@ -5,13 +5,14 @@
// TODO(akhilmhdh): With tony find out the api structure and fill it here
import { ForbiddenError } from "@casl/ability";
import { AxiosError } from "axios";
import { CronJob } from "cron";
import { Knex } from "knex";
import { TKeyStoreFactory } from "@app/keystore/keystore";
import { getConfig } from "@app/lib/config/env";
import { verifyOfflineLicense } from "@app/lib/crypto";
import { NotFoundError } from "@app/lib/errors";
import { BadRequestError, NotFoundError } from "@app/lib/errors";
import { logger } from "@app/lib/logger";
import { TIdentityOrgDALFactory } from "@app/services/identity/identity-org-dal";
import { TOrgDALFactory } from "@app/services/org/org-dal";
@@ -603,10 +604,22 @@ export const licenseServiceFactory = ({
});
}
const { data } = await licenseServerCloudApi.request.delete(
`/api/license-server/v1/customers/${organization.customerId}/billing-details/payment-methods/${pmtMethodId}`
);
return data;
try {
const { data } = await licenseServerCloudApi.request.delete(
`/api/license-server/v1/customers/${organization.customerId}/billing-details/payment-methods/${pmtMethodId}`
);
return data;
} catch (error) {
if (error instanceof AxiosError) {
throw new BadRequestError({
// eslint-disable-next-line @typescript-eslint/no-unsafe-member-access
message: `Failed to remove payment method: ${error.response?.data?.message}`
});
}
throw new BadRequestError({
message: "Unable to remove payment method"
});
}
};
const getOrgTaxIds = async ({ orgId, actor, actorId, actorAuthMethod, actorOrgId }: TGetOrgTaxIdDTO) => {

View File

@@ -579,6 +579,9 @@ export const scimServiceFactory = ({
});
const serverCfg = await getServerCfg();
const hasEmailChanged = email?.toLowerCase() !== membership.email;
const defaultEmailVerified =
org.orgAuthMethod === OrgAuthMethod.OIDC ? serverCfg.trustOidcEmails : serverCfg.trustSamlEmails;
await userDAL.transaction(async (tx) => {
await userAliasDAL.update(
{
@@ -605,8 +608,7 @@ export const scimServiceFactory = ({
firstName,
email: email?.toLowerCase(),
lastName,
isEmailVerified:
org.orgAuthMethod === OrgAuthMethod.OIDC ? serverCfg.trustOidcEmails : serverCfg.trustSamlEmails
isEmailVerified: hasEmailChanged ? defaultEmailVerified : undefined
},
tx
);

View File

@@ -65,10 +65,14 @@ import { SmtpTemplates, TSmtpService } from "@app/services/smtp/smtp-service";
import { TUserDALFactory } from "@app/services/user/user-dal";
import { TLicenseServiceFactory } from "../license/license-service";
import { throwIfMissingSecretReadValueOrDescribePermission } from "../permission/permission-fns";
import {
hasSecretReadValueOrDescribePermission,
throwIfMissingSecretReadValueOrDescribePermission
} from "../permission/permission-fns";
import { TPermissionServiceFactory } from "../permission/permission-service-types";
import { ProjectPermissionSecretActions, ProjectPermissionSub } from "../permission/project-permission";
import { TSecretApprovalPolicyDALFactory } from "../secret-approval-policy/secret-approval-policy-dal";
import { scanSecretPolicyViolations } from "../secret-scanning-v2/secret-scanning-v2-fns";
import { TSecretSnapshotServiceFactory } from "../secret-snapshot/secret-snapshot-service";
import { TSecretApprovalRequestDALFactory } from "./secret-approval-request-dal";
import { sendApprovalEmailsFn } from "./secret-approval-request-fns";
@@ -276,13 +280,19 @@ export const secretApprovalRequestServiceFactory = ({
) {
throw new ForbiddenRequestError({ message: "User has insufficient privileges" });
}
const hasSecretReadAccess = permission.can(
ProjectPermissionSecretActions.DescribeAndReadValue,
ProjectPermissionSub.Secrets
);
const getHasSecretReadAccess = (environment: string, tags: { slug: string }[], secretPath?: string) => {
const canRead = hasSecretReadValueOrDescribePermission(permission, ProjectPermissionSecretActions.ReadValue, {
environment,
secretPath: secretPath || "/",
secretTags: tags.map((i) => i.slug)
});
return canRead;
};
let secrets;
const secretPath = await folderDAL.findSecretPathByFolderIds(secretApprovalRequest.projectId, [
secretApprovalRequest.folderId
]);
if (shouldUseSecretV2Bridge) {
const { decryptor: secretManagerDecryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.SecretManager,
@@ -298,8 +308,8 @@ export const secretApprovalRequestServiceFactory = ({
version: el.version,
secretMetadata: el.secretMetadata as ResourceMetadataDTO,
isRotatedSecret: el.secret?.isRotatedSecret ?? false,
secretValueHidden: !hasSecretReadAccess,
secretValue: !hasSecretReadAccess
secretValueHidden: !getHasSecretReadAccess(secretApprovalRequest.environment, el.tags, secretPath?.[0]?.path),
secretValue: !getHasSecretReadAccess(secretApprovalRequest.environment, el.tags, secretPath?.[0]?.path)
? INFISICAL_SECRET_VALUE_HIDDEN_MASK
: el.secret && el.secret.isRotatedSecret
? undefined
@@ -314,8 +324,12 @@ export const secretApprovalRequestServiceFactory = ({
secretKey: el.secret.key,
id: el.secret.id,
version: el.secret.version,
secretValueHidden: !hasSecretReadAccess,
secretValue: !hasSecretReadAccess
secretValueHidden: !getHasSecretReadAccess(
secretApprovalRequest.environment,
el.tags,
secretPath?.[0]?.path
),
secretValue: !getHasSecretReadAccess(secretApprovalRequest.environment, el.tags, secretPath?.[0]?.path)
? INFISICAL_SECRET_VALUE_HIDDEN_MASK
: el.secret.encryptedValue
? secretManagerDecryptor({ cipherTextBlob: el.secret.encryptedValue }).toString()
@@ -330,8 +344,12 @@ export const secretApprovalRequestServiceFactory = ({
secretKey: el.secretVersion.key,
id: el.secretVersion.id,
version: el.secretVersion.version,
secretValueHidden: !hasSecretReadAccess,
secretValue: !hasSecretReadAccess
secretValueHidden: !getHasSecretReadAccess(
secretApprovalRequest.environment,
el.tags,
secretPath?.[0]?.path
),
secretValue: !getHasSecretReadAccess(secretApprovalRequest.environment, el.tags, secretPath?.[0]?.path)
? INFISICAL_SECRET_VALUE_HIDDEN_MASK
: el.secretVersion.encryptedValue
? secretManagerDecryptor({ cipherTextBlob: el.secretVersion.encryptedValue }).toString()
@@ -349,7 +367,7 @@ export const secretApprovalRequestServiceFactory = ({
const encryptedSecrets = await secretApprovalRequestSecretDAL.findByRequestId(secretApprovalRequest.id);
secrets = encryptedSecrets.map((el) => ({
...el,
secretValueHidden: !hasSecretReadAccess,
secretValueHidden: !getHasSecretReadAccess(secretApprovalRequest.environment, el.tags, secretPath?.[0]?.path),
...decryptSecretWithBot(el, botKey),
secret: el.secret
? {
@@ -369,9 +387,6 @@ export const secretApprovalRequestServiceFactory = ({
: undefined
}));
}
const secretPath = await folderDAL.findSecretPathByFolderIds(secretApprovalRequest.projectId, [
secretApprovalRequest.folderId
]);
return { ...secretApprovalRequest, secretPath: secretPath?.[0]?.path || "/", commits: secrets };
};
@@ -1412,6 +1427,20 @@ export const secretApprovalRequestServiceFactory = ({
projectId
});
const project = await projectDAL.findById(projectId);
await scanSecretPolicyViolations(
projectId,
secretPath,
[
...(data[SecretOperations.Create] || []),
...(data[SecretOperations.Update] || []).filter((el) => el.secretValue)
].map((el) => ({
secretKey: el.secretKey,
secretValue: el.secretValue as string
})),
project.secretDetectionIgnoreValues || []
);
// for created secret approval change
const createdSecrets = data[SecretOperations.Create];
if (createdSecrets && createdSecrets?.length) {

View File

@@ -21,6 +21,8 @@ const GRAPH_API_BASE = "https://graph.microsoft.com/v1.0";
type AzureErrorResponse = { error: { message: string } };
const EXPIRY_PADDING_IN_DAYS = 3;
const sleep = async () =>
new Promise((resolve) => {
setTimeout(resolve, 1000);
@@ -33,7 +35,8 @@ export const azureClientSecretRotationFactory: TRotationFactory<
const {
connection,
parameters: { objectId, clientId: clientIdParam },
secretsMapping
secretsMapping,
rotationInterval
} = secretRotation;
/**
@@ -50,7 +53,7 @@ export const azureClientSecretRotationFactory: TRotationFactory<
)}-${now.getFullYear()}`;
const endDateTime = new Date();
endDateTime.setFullYear(now.getFullYear() + 5);
endDateTime.setDate(now.getDate() + rotationInterval * 2 + EXPIRY_PADDING_IN_DAYS); // give 72 hour buffer
try {
const { data } = await request.post<AzureAddPasswordResponse>(
@@ -195,6 +198,12 @@ export const azureClientSecretRotationFactory: TRotationFactory<
callback
) => {
const credentials = await $rotateClientSecret();
// 2.5 years as expiry is set to x2 interval for the inactive period of credential
if (rotationInterval > Math.floor(365 * 2.5) - EXPIRY_PADDING_IN_DAYS) {
throw new BadRequestError({ message: "Azure does not support token duration over 5 years" });
}
return callback(credentials);
};

View File

@@ -51,6 +51,7 @@ const baseSecretRotationV2Query = ({
db.ref("encryptedCredentials").withSchema(TableName.AppConnection).as("connectionEncryptedCredentials"),
db.ref("description").withSchema(TableName.AppConnection).as("connectionDescription"),
db.ref("version").withSchema(TableName.AppConnection).as("connectionVersion"),
db.ref("gatewayId").withSchema(TableName.AppConnection).as("connectionGatewayId"),
db.ref("createdAt").withSchema(TableName.AppConnection).as("connectionCreatedAt"),
db.ref("updatedAt").withSchema(TableName.AppConnection).as("connectionUpdatedAt"),
db
@@ -104,6 +105,7 @@ const expandSecretRotation = <T extends Awaited<ReturnType<typeof baseSecretRota
connectionCreatedAt,
connectionUpdatedAt,
connectionVersion,
connectionGatewayId,
connectionIsPlatformManagedCredentials,
...el
} = secretRotation;
@@ -123,6 +125,7 @@ const expandSecretRotation = <T extends Awaited<ReturnType<typeof baseSecretRota
createdAt: connectionCreatedAt,
updatedAt: connectionUpdatedAt,
version: connectionVersion,
gatewayId: connectionGatewayId,
isPlatformManagedCredentials: connectionIsPlatformManagedCredentials
},
folder: {

View File

@@ -1,12 +1,22 @@
import { AxiosError } from "axios";
import { exec } from "child_process";
import { join } from "path";
import picomatch from "picomatch";
import RE2 from "re2";
import { readFindingsFile } from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-fns";
import {
createTempFolder,
deleteTempFolder,
readFindingsFile,
writeTextToFile
} from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-fns";
import { SecretMatch } from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-queue-types";
import { BITBUCKET_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION } from "@app/ee/services/secret-scanning-v2/bitbucket";
import { GITHUB_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION } from "@app/ee/services/secret-scanning-v2/github";
import { GITLAB_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION } from "@app/ee/services/secret-scanning-v2/gitlab";
import { getConfig } from "@app/lib/config/env";
import { crypto } from "@app/lib/crypto";
import { BadRequestError } from "@app/lib/errors";
import { titleCaseToCamelCase } from "@app/lib/fn";
import { SecretScanningDataSource, SecretScanningFindingSeverity } from "./secret-scanning-v2-enums";
@@ -48,6 +58,19 @@ export function scanDirectory(inputPath: string, outputPath: string, configPath?
});
}
export function scanFile(inputPath: string): Promise<void> {
return new Promise((resolve, reject) => {
const command = `infisical scan --exit-code=77 --source "${inputPath}" --no-git`;
exec(command, (error) => {
if (error && error.code === 77) {
reject(error);
} else {
resolve();
}
});
});
}
export const scanGitRepositoryAndGetFindings = async (
scanPath: string,
findingsPath: string,
@@ -142,3 +165,47 @@ export const parseScanErrorMessage = (err: unknown): string => {
? errorMessage
: `${errorMessage.substring(0, MAX_MESSAGE_LENGTH - 3)}...`;
};
export const scanSecretPolicyViolations = async (
projectId: string,
secretPath: string,
secrets: { secretKey: string; secretValue: string }[],
ignoreValues: string[]
) => {
const appCfg = getConfig();
if (!appCfg.PARAMS_FOLDER_SECRET_DETECTION_ENABLED) {
return;
}
const match = appCfg.PARAMS_FOLDER_SECRET_DETECTION_PATHS?.find(
(el) => el.projectId === projectId && picomatch.isMatch(secretPath, el.secretPath, { strictSlashes: false })
);
if (!match) {
return;
}
const tempFolder = await createTempFolder();
try {
const scanPromises = secrets
.filter((secret) => !ignoreValues.includes(secret.secretValue))
.map(async (secret) => {
const secretFilePath = join(tempFolder, `${crypto.nativeCrypto.randomUUID()}.txt`);
await writeTextToFile(secretFilePath, `${secret.secretKey}=${secret.secretValue}`);
try {
await scanFile(secretFilePath);
} catch (error) {
throw new BadRequestError({
message: `Secret value detected in ${secret.secretKey}. Please add this instead to the designated secrets path in the project.`,
name: "SecretPolicyViolation"
});
}
});
await Promise.all(scanPromises);
} finally {
await deleteTempFolder(tempFolder);
}
};

View File

@@ -704,7 +704,8 @@ export const PROJECTS = {
hasDeleteProtection: "Enable or disable delete protection for the project.",
secretSharing: "Enable or disable secret sharing for the project.",
showSnapshotsLegacy: "Enable or disable legacy snapshots for the project.",
defaultProduct: "The default product in which the project will open"
defaultProduct: "The default product in which the project will open",
secretDetectionIgnoreValues: "The list of secret values to ignore for secret detection."
},
GET_KEY: {
workspaceId: "The ID of the project to get the key from."
@@ -2252,7 +2253,9 @@ export const AppConnections = {
AZURE_DEVOPS: {
code: "The OAuth code to use to connect with Azure DevOps.",
tenantId: "The Tenant ID to use to connect with Azure DevOps.",
orgName: "The Organization name to use to connect with Azure DevOps."
orgName: "The Organization name to use to connect with Azure DevOps.",
clientId: "The Client ID to use to connect with Azure Client Secrets.",
clientSecret: "The Client Secret to use to connect with Azure Client Secrets."
},
OCI: {
userOcid: "The OCID (Oracle Cloud Identifier) of the user making the request.",
@@ -2399,12 +2402,18 @@ export const SecretSyncs = {
env: "The name of the GitHub environment."
},
AZURE_KEY_VAULT: {
vaultBaseUrl: "The base URL of the Azure Key Vault to sync secrets to. Example: https://example.vault.azure.net/"
vaultBaseUrl: "The base URL of the Azure Key Vault to sync secrets to. Example: https://example.vault.azure.net/",
tenantId: "The Tenant ID to use to connect with Azure Client Secrets.",
clientId: "The Client ID to use to connect with Azure Client Secrets.",
clientSecret: "The Client Secret to use to connect with Azure Client Secrets."
},
AZURE_APP_CONFIGURATION: {
configurationUrl:
"The URL of the Azure App Configuration to sync secrets to. Example: https://example.azconfig.io/",
label: "An optional label to assign to secrets created in Azure App Configuration."
label: "An optional label to assign to secrets created in Azure App Configuration.",
tenantId: "The Tenant ID to use to connect with Azure Client Secrets.",
clientId: "The Client ID to use to connect with Azure Client Secrets.",
clientSecret: "The Client Secret to use to connect with Azure Client Secrets."
},
AZURE_DEVOPS: {
devopsProjectId: "The ID of the Azure DevOps project to sync secrets to.",

View File

@@ -204,6 +204,17 @@ const envSchema = z
WORKFLOW_SLACK_CLIENT_SECRET: zpStr(z.string().optional()),
ENABLE_MSSQL_SECRET_ROTATION_ENCRYPT: zodStrBool.default("true"),
// Special Detection Feature
PARAMS_FOLDER_SECRET_DETECTION_PATHS: zpStr(
z
.string()
.optional()
.transform((val) => {
if (!val) return undefined;
return JSON.parse(val) as { secretPath: string; projectId: string }[];
})
),
// HSM
HSM_LIB_PATH: zpStr(z.string().optional()),
HSM_PIN: zpStr(z.string().optional()),
@@ -358,6 +369,7 @@ const envSchema = z
Boolean(data.HSM_LIB_PATH) && Boolean(data.HSM_PIN) && Boolean(data.HSM_KEY_LABEL) && data.HSM_SLOT !== undefined,
samlDefaultOrgSlug: data.DEFAULT_SAML_ORG_SLUG,
SECRET_SCANNING_ORG_WHITELIST: data.SECRET_SCANNING_ORG_WHITELIST?.split(","),
PARAMS_FOLDER_SECRET_DETECTION_ENABLED: (data.PARAMS_FOLDER_SECRET_DETECTION_PATHS?.length ?? 0) > 0,
INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_ID:
data.INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_ID || data.INF_APP_CONNECTION_AZURE_CLIENT_ID,
INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_SECRET:
@@ -484,7 +496,7 @@ export const overwriteSchema: {
]
},
azureAppConfiguration: {
name: "Azure App Configuration",
name: "Azure App Connection: App Configuration",
fields: [
{
key: "INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_ID",
@@ -497,7 +509,7 @@ export const overwriteSchema: {
]
},
azureKeyVault: {
name: "Azure Key Vault",
name: "Azure App Connection: Key Vault",
fields: [
{
key: "INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID",
@@ -510,7 +522,7 @@ export const overwriteSchema: {
]
},
azureClientSecrets: {
name: "Azure Client Secrets",
name: "Azure App Connection: Client Secrets",
fields: [
{
key: "INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_ID",
@@ -523,7 +535,7 @@ export const overwriteSchema: {
]
},
azureDevOps: {
name: "Azure DevOps",
name: "Azure App Connection: DevOps",
fields: [
{
key: "INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_ID",

View File

@@ -14,6 +14,11 @@ export const blockLocalAndPrivateIpAddresses = async (url: string) => {
if (appCfg.isDevelopmentMode) return;
const validUrl = new URL(url);
if (validUrl.username || validUrl.password) {
throw new BadRequestError({ message: "URLs with user credentials (e.g., user:pass@) are not allowed" });
}
const inputHostIps: string[] = [];
if (isIPv4(validUrl.hostname)) {
inputHostIps.push(validUrl.hostname);

View File

@@ -1044,6 +1044,15 @@ export const registerRoutes = async (
kmsService
});
const gatewayService = gatewayServiceFactory({
permissionService,
gatewayDAL,
kmsService,
licenseService,
orgGatewayConfigDAL,
keyStore
});
const secretSyncQueue = secretSyncQueueFactory({
queueService,
secretSyncDAL,
@@ -1067,7 +1076,8 @@ export const registerRoutes = async (
secretVersionTagV2BridgeDAL,
resourceMetadataDAL,
appConnectionDAL,
licenseService
licenseService,
gatewayService
});
const secretQueueService = secretQueueFactory({
@@ -1238,6 +1248,7 @@ export const registerRoutes = async (
const secretV2BridgeService = secretV2BridgeServiceFactory({
folderDAL,
projectDAL,
secretVersionDAL: secretVersionV2BridgeDAL,
folderCommitService,
secretQueueService,
@@ -1489,15 +1500,6 @@ export const registerRoutes = async (
licenseService
});
const gatewayService = gatewayServiceFactory({
permissionService,
gatewayDAL,
kmsService,
licenseService,
orgGatewayConfigDAL,
keyStore
});
const identityKubernetesAuthService = identityKubernetesAuthServiceFactory({
identityKubernetesAuthDAL,
identityOrgMembershipDAL,

View File

@@ -271,7 +271,8 @@ export const SanitizedProjectSchema = ProjectsSchema.pick({
auditLogsRetentionDays: true,
hasDeleteProtection: true,
secretSharing: true,
showSnapshotsLegacy: true
showSnapshotsLegacy: true,
secretDetectionIgnoreValues: true
});
export const SanitizedTagSchema = SecretTagsSchema.pick({

View File

@@ -52,7 +52,8 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => {
defaultAuthOrgAuthEnforced: z.boolean().nullish(),
defaultAuthOrgAuthMethod: z.string().nullish(),
isSecretScanningDisabled: z.boolean(),
kubernetesAutoFetchServiceAccountToken: z.boolean()
kubernetesAutoFetchServiceAccountToken: z.boolean(),
paramsFolderSecretDetectionEnabled: z.boolean()
})
})
}
@@ -67,7 +68,8 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => {
fipsEnabled: crypto.isFipsModeEnabled(),
isMigrationModeOn: serverEnvs.MAINTENANCE_MODE,
isSecretScanningDisabled: serverEnvs.DISABLE_SECRET_SCANNING,
kubernetesAutoFetchServiceAccountToken: serverEnvs.KUBERNETES_AUTO_FETCH_SERVICE_ACCOUNT_TOKEN
kubernetesAutoFetchServiceAccountToken: serverEnvs.KUBERNETES_AUTO_FETCH_SERVICE_ACCOUNT_TOKEN,
paramsFolderSecretDetectionEnabled: serverEnvs.PARAMS_FOLDER_SECRET_DETECTION_ENABLED
}
};
}
@@ -685,6 +687,7 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => {
rateLimit: writeLimit
},
schema: {
hide: false,
body: z.object({
email: z.string().email().trim().min(1),
password: z.string().trim().min(1),

View File

@@ -369,7 +369,11 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
.describe(PROJECTS.UPDATE.slug),
secretSharing: z.boolean().optional().describe(PROJECTS.UPDATE.secretSharing),
showSnapshotsLegacy: z.boolean().optional().describe(PROJECTS.UPDATE.showSnapshotsLegacy),
defaultProduct: z.nativeEnum(ProjectType).optional().describe(PROJECTS.UPDATE.defaultProduct)
defaultProduct: z.nativeEnum(ProjectType).optional().describe(PROJECTS.UPDATE.defaultProduct),
secretDetectionIgnoreValues: z
.array(z.string())
.optional()
.describe(PROJECTS.UPDATE.secretDetectionIgnoreValues)
}),
response: {
200: z.object({
@@ -392,7 +396,8 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
hasDeleteProtection: req.body.hasDeleteProtection,
slug: req.body.slug,
secretSharing: req.body.secretSharing,
showSnapshotsLegacy: req.body.showSnapshotsLegacy
showSnapshotsLegacy: req.body.showSnapshotsLegacy,
secretDetectionIgnoreValues: req.body.secretDetectionIgnoreValues
},
actorAuthMethod: req.permission.authMethod,
actorId: req.permission.id,

View File

@@ -583,7 +583,7 @@ export const appConnectionServiceFactory = ({
deleteAppConnection,
connectAppConnectionById,
listAvailableAppConnectionsForUser,
github: githubConnectionService(connectAppConnectionById),
github: githubConnectionService(connectAppConnectionById, gatewayService),
githubRadar: githubRadarConnectionService(connectAppConnectionById),
gcp: gcpConnectionService(connectAppConnectionById),
databricks: databricksConnectionService(connectAppConnectionById, appConnectionDAL, kmsService),

View File

@@ -1,3 +1,4 @@
export enum AzureAppConfigurationConnectionMethod {
OAuth = "oauth"
OAuth = "oauth",
ClientSecret = "client-secret"
}

View File

@@ -1,3 +1,4 @@
/* eslint-disable no-case-declarations */
import { AxiosError, AxiosResponse } from "axios";
import { getConfig } from "@app/lib/config/env";
@@ -19,7 +20,10 @@ export const getAzureAppConfigurationConnectionListItem = () => {
return {
name: "Azure App Configuration" as const,
app: AppConnection.AzureAppConfiguration as const,
methods: Object.values(AzureAppConfigurationConnectionMethod) as [AzureAppConfigurationConnectionMethod.OAuth],
methods: Object.values(AzureAppConfigurationConnectionMethod) as [
AzureAppConfigurationConnectionMethod.OAuth,
AzureAppConfigurationConnectionMethod.ClientSecret
],
oauthClientId: INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_ID
};
};
@@ -35,71 +39,111 @@ export const validateAzureAppConfigurationConnectionCredentials = async (
SITE_URL
} = getConfig();
if (
!INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_ID ||
!INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_SECRET
) {
throw new InternalServerError({
message: `Azure ${getAppConnectionMethodName(method)} environment variables have not been configured`
});
}
let tokenResp: AxiosResponse<ExchangeCodeAzureResponse> | null = null;
let tokenError: AxiosError | null = null;
try {
tokenResp = await request.post<ExchangeCodeAzureResponse>(
IntegrationUrls.AZURE_TOKEN_URL.replace("common", inputCredentials.tenantId || "common"),
new URLSearchParams({
grant_type: "authorization_code",
code: inputCredentials.code,
scope: `openid offline_access https://azconfig.io/.default`,
client_id: INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_ID,
client_secret: INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_SECRET,
redirect_uri: `${SITE_URL}/organization/app-connections/azure/oauth/callback`
})
);
} catch (e: unknown) {
if (e instanceof AxiosError) {
tokenError = e;
} else {
throw new BadRequestError({
message: `Unable to validate connection: verify credentials`
});
}
}
if (tokenError) {
if (tokenError instanceof AxiosError) {
throw new BadRequestError({
message: `Failed to get access token: ${
(tokenError?.response?.data as { error_description?: string })?.error_description || "Unknown error"
}`
});
} else {
throw new InternalServerError({
message: "Failed to get access token"
});
}
}
if (!tokenResp) {
throw new InternalServerError({
message: `Failed to get access token: Token was empty with no error`
});
}
switch (method) {
case AzureAppConfigurationConnectionMethod.OAuth:
if (
!INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_ID ||
!INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_SECRET
) {
throw new InternalServerError({
message: `Azure ${getAppConnectionMethodName(method)} environment variables have not been configured`
});
}
let tokenResp: AxiosResponse<ExchangeCodeAzureResponse> | null = null;
let tokenError: AxiosError | null = null;
const oauthCredentials = inputCredentials as { code: string; tenantId?: string };
try {
tokenResp = await request.post<ExchangeCodeAzureResponse>(
IntegrationUrls.AZURE_TOKEN_URL.replace("common", oauthCredentials.tenantId || "common"),
new URLSearchParams({
grant_type: "authorization_code",
code: oauthCredentials.code,
scope: `openid offline_access https://azconfig.io/.default`,
client_id: INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_ID,
client_secret: INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_SECRET,
redirect_uri: `${SITE_URL}/organization/app-connections/azure/oauth/callback`
})
);
} catch (e: unknown) {
if (e instanceof AxiosError) {
tokenError = e;
} else {
throw new BadRequestError({
message: `Unable to validate connection: verify credentials`
});
}
}
if (tokenError) {
if (tokenError instanceof AxiosError) {
throw new BadRequestError({
message: `Failed to get access token: ${
(tokenError?.response?.data as { error_description?: string })?.error_description || "Unknown error"
}`
});
} else {
throw new InternalServerError({
message: "Failed to get access token"
});
}
}
if (!tokenResp) {
throw new InternalServerError({
message: `Failed to get access token: Token was empty with no error`
});
}
return {
tenantId: inputCredentials.tenantId,
tenantId: oauthCredentials.tenantId,
accessToken: tokenResp.data.access_token,
refreshToken: tokenResp.data.refresh_token,
expiresAt: Date.now() + tokenResp.data.expires_in * 1000
};
case AzureAppConfigurationConnectionMethod.ClientSecret:
const { tenantId, clientId, clientSecret } = inputCredentials as {
tenantId: string;
clientId: string;
clientSecret: string;
};
try {
const { data: clientData } = await request.post<ExchangeCodeAzureResponse>(
IntegrationUrls.AZURE_TOKEN_URL.replace("common", tenantId || "common"),
new URLSearchParams({
grant_type: "client_credentials",
scope: `https://azconfig.io/.default`,
client_id: clientId,
client_secret: clientSecret
})
);
return {
tenantId,
accessToken: clientData.access_token,
expiresAt: Date.now() + clientData.expires_in * 1000,
clientId,
clientSecret
};
} catch (e: unknown) {
if (e instanceof AxiosError) {
throw new BadRequestError({
message: `Failed to get access token: ${
(e?.response?.data as { error_description?: string })?.error_description || "Unknown error"
}`
});
} else {
throw new InternalServerError({
message: "Failed to get access token"
});
}
}
default:
throw new InternalServerError({
message: `Unhandled Azure connection method: ${method as AzureAppConfigurationConnectionMethod}`
message: `Unhandled Azure App Configuration connection method: ${method as AzureAppConfigurationConnectionMethod}`
});
}
};

View File

@@ -22,6 +22,29 @@ export const AzureAppConfigurationConnectionOAuthOutputCredentialsSchema = z.obj
expiresAt: z.number()
});
export const AzureAppConfigurationConnectionClientSecretInputCredentialsSchema = z.object({
clientId: z
.string()
.uuid()
.trim()
.min(1, "Client ID required")
.max(50, "Client ID must be at most 50 characters long"),
clientSecret: z
.string()
.trim()
.min(1, "Client Secret required")
.max(50, "Client Secret must be at most 50 characters long"),
tenantId: z.string().uuid().trim().min(1, "Tenant ID required")
});
export const AzureAppConfigurationConnectionClientSecretOutputCredentialsSchema = z.object({
clientId: z.string(),
clientSecret: z.string(),
tenantId: z.string(),
accessToken: z.string(),
expiresAt: z.number()
});
export const ValidateAzureAppConfigurationConnectionCredentialsSchema = z.discriminatedUnion("method", [
z.object({
method: z
@@ -30,6 +53,14 @@ export const ValidateAzureAppConfigurationConnectionCredentialsSchema = z.discri
credentials: AzureAppConfigurationConnectionOAuthInputCredentialsSchema.describe(
AppConnections.CREATE(AppConnection.AzureAppConfiguration).credentials
)
}),
z.object({
method: z
.literal(AzureAppConfigurationConnectionMethod.ClientSecret)
.describe(AppConnections.CREATE(AppConnection.AzureAppConfiguration).method),
credentials: AzureAppConfigurationConnectionClientSecretInputCredentialsSchema.describe(
AppConnections.CREATE(AppConnection.AzureAppConfiguration).credentials
)
})
]);
@@ -39,9 +70,13 @@ export const CreateAzureAppConfigurationConnectionSchema = ValidateAzureAppConfi
export const UpdateAzureAppConfigurationConnectionSchema = z
.object({
credentials: AzureAppConfigurationConnectionOAuthInputCredentialsSchema.optional().describe(
AppConnections.UPDATE(AppConnection.AzureAppConfiguration).credentials
)
credentials: z
.union([
AzureAppConfigurationConnectionOAuthInputCredentialsSchema,
AzureAppConfigurationConnectionClientSecretInputCredentialsSchema
])
.optional()
.describe(AppConnections.UPDATE(AppConnection.AzureAppConfiguration).credentials)
})
.and(GenericUpdateAppConnectionFieldsSchema(AppConnection.AzureAppConfiguration));
@@ -55,6 +90,10 @@ export const AzureAppConfigurationConnectionSchema = z.intersection(
z.object({
method: z.literal(AzureAppConfigurationConnectionMethod.OAuth),
credentials: AzureAppConfigurationConnectionOAuthOutputCredentialsSchema
}),
z.object({
method: z.literal(AzureAppConfigurationConnectionMethod.ClientSecret),
credentials: AzureAppConfigurationConnectionClientSecretOutputCredentialsSchema
})
])
);
@@ -65,6 +104,13 @@ export const SanitizedAzureAppConfigurationConnectionSchema = z.discriminatedUni
credentials: AzureAppConfigurationConnectionOAuthOutputCredentialsSchema.pick({
tenantId: true
})
}),
BaseAzureAppConfigurationConnectionSchema.extend({
method: z.literal(AzureAppConfigurationConnectionMethod.ClientSecret),
credentials: AzureAppConfigurationConnectionClientSecretOutputCredentialsSchema.pick({
clientId: true,
tenantId: true
})
})
]);

View File

@@ -4,6 +4,7 @@ import { DiscriminativePick } from "@app/lib/types";
import { AppConnection } from "../app-connection-enums";
import {
AzureAppConfigurationConnectionClientSecretOutputCredentialsSchema,
AzureAppConfigurationConnectionOAuthOutputCredentialsSchema,
AzureAppConfigurationConnectionSchema,
CreateAzureAppConfigurationConnectionSchema,
@@ -39,3 +40,7 @@ export type ExchangeCodeAzureResponse = {
export type TAzureAppConfigurationConnectionCredentials = z.infer<
typeof AzureAppConfigurationConnectionOAuthOutputCredentialsSchema
>;
export type TAzureAppConfigurationConnectionClientSecretCredentials = z.infer<
typeof AzureAppConfigurationConnectionClientSecretOutputCredentialsSchema
>;

View File

@@ -1,4 +1,5 @@
export enum AzureDevOpsConnectionMethod {
OAuth = "oauth",
AccessToken = "access-token"
AccessToken = "access-token",
ClientSecret = "client-secret"
}

View File

@@ -18,6 +18,7 @@ import { AppConnection } from "../app-connection-enums";
import { AzureDevOpsConnectionMethod } from "./azure-devops-enums";
import {
ExchangeCodeAzureResponse,
TAzureDevOpsConnectionClientSecretCredentials,
TAzureDevOpsConnectionConfig,
TAzureDevOpsConnectionCredentials
} from "./azure-devops-types";
@@ -30,7 +31,8 @@ export const getAzureDevopsConnectionListItem = () => {
app: AppConnection.AzureDevOps as const,
methods: Object.values(AzureDevOpsConnectionMethod) as [
AzureDevOpsConnectionMethod.OAuth,
AzureDevOpsConnectionMethod.AccessToken
AzureDevOpsConnectionMethod.AccessToken,
AzureDevOpsConnectionMethod.ClientSecret
],
oauthClientId: INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_ID
};
@@ -53,11 +55,7 @@ export const getAzureDevopsConnection = async (
});
}
const credentials = (await decryptAppConnectionCredentials({
orgId: appConnection.orgId,
kmsService,
encryptedCredentials: appConnection.encryptedCredentials
})) as TAzureDevOpsConnectionCredentials;
const currentTime = Date.now();
// Handle different connection methods
switch (appConnection.method) {
@@ -69,12 +67,17 @@ export const getAzureDevopsConnection = async (
});
}
if (!("refreshToken" in credentials)) {
const oauthCredentials = (await decryptAppConnectionCredentials({
orgId: appConnection.orgId,
kmsService,
encryptedCredentials: appConnection.encryptedCredentials
})) as TAzureDevOpsConnectionCredentials;
if (!("refreshToken" in oauthCredentials)) {
throw new BadRequestError({ message: "Invalid OAuth credentials" });
}
const { refreshToken, tenantId } = credentials;
const currentTime = Date.now();
const { refreshToken, tenantId } = oauthCredentials;
const { data } = await request.post<ExchangeCodeAzureResponse>(
IntegrationUrls.AZURE_TOKEN_URL.replace("common", tenantId || "common"),
@@ -87,29 +90,75 @@ export const getAzureDevopsConnection = async (
})
);
const updatedCredentials = {
...credentials,
const updatedOAuthCredentials = {
...oauthCredentials,
accessToken: data.access_token,
expiresAt: currentTime + data.expires_in * 1000,
refreshToken: data.refresh_token
};
const encryptedCredentials = await encryptAppConnectionCredentials({
credentials: updatedCredentials,
const encryptedOAuthCredentials = await encryptAppConnectionCredentials({
credentials: updatedOAuthCredentials,
orgId: appConnection.orgId,
kmsService
});
await appConnectionDAL.updateById(appConnection.id, { encryptedCredentials });
await appConnectionDAL.updateById(appConnection.id, { encryptedCredentials: encryptedOAuthCredentials });
return data.access_token;
case AzureDevOpsConnectionMethod.AccessToken:
if (!("accessToken" in credentials)) {
const accessTokenCredentials = (await decryptAppConnectionCredentials({
orgId: appConnection.orgId,
kmsService,
encryptedCredentials: appConnection.encryptedCredentials
})) as { accessToken: string };
if (!("accessToken" in accessTokenCredentials)) {
throw new BadRequestError({ message: "Invalid API token credentials" });
}
// For access token, return the basic auth token directly
return credentials.accessToken;
return accessTokenCredentials.accessToken;
case AzureDevOpsConnectionMethod.ClientSecret:
const clientSecretCredentials = (await decryptAppConnectionCredentials({
orgId: appConnection.orgId,
kmsService,
encryptedCredentials: appConnection.encryptedCredentials
})) as TAzureDevOpsConnectionClientSecretCredentials;
const { accessToken, expiresAt, clientId, clientSecret, tenantId: clientTenantId } = clientSecretCredentials;
// Check if token is still valid (with 5 minute buffer)
if (accessToken && expiresAt && expiresAt > currentTime + 300000) {
return accessToken;
}
const { data: clientData } = await request.post<ExchangeCodeAzureResponse>(
IntegrationUrls.AZURE_TOKEN_URL.replace("common", clientTenantId || "common"),
new URLSearchParams({
grant_type: "client_credentials",
scope: `https://app.vssps.visualstudio.com/.default`,
client_id: clientId,
client_secret: clientSecret
})
);
const updatedClientCredentials = {
...clientSecretCredentials,
accessToken: clientData.access_token,
expiresAt: currentTime + clientData.expires_in * 1000
};
const encryptedClientCredentials = await encryptAppConnectionCredentials({
credentials: updatedClientCredentials,
orgId: appConnection.orgId,
kmsService
});
await appConnectionDAL.updateById(appConnection.id, { encryptedCredentials: encryptedClientCredentials });
return clientData.access_token;
default:
throw new BadRequestError({ message: `Unsupported connection method` });
@@ -138,7 +187,7 @@ export const validateAzureDevOpsConnectionCredentials = async (config: TAzureDev
let tokenError: AxiosError | null = null;
try {
const oauthCredentials = inputCredentials as { code: string; tenantId: string };
const oauthCredentials = inputCredentials as { code: string; tenantId: string; orgName: string };
tokenResp = await request.post<ExchangeCodeAzureResponse>(
IntegrationUrls.AZURE_TOKEN_URL.replace("common", oauthCredentials.tenantId || "common"),
new URLSearchParams({
@@ -262,9 +311,67 @@ export const validateAzureDevOpsConnectionCredentials = async (config: TAzureDev
});
}
case AzureDevOpsConnectionMethod.ClientSecret:
const { tenantId, clientId, clientSecret, orgName } = inputCredentials as {
tenantId: string;
clientId: string;
clientSecret: string;
orgName: string;
};
try {
// First, get the access token using client credentials flow
const { data: clientData } = await request.post<ExchangeCodeAzureResponse>(
IntegrationUrls.AZURE_TOKEN_URL.replace("common", tenantId || "common"),
new URLSearchParams({
grant_type: "client_credentials",
scope: `https://app.vssps.visualstudio.com/.default`,
client_id: clientId,
client_secret: clientSecret
})
);
// Validate access to the specific organization
const response = await request.get(
`${IntegrationUrls.AZURE_DEVOPS_API_URL}/${encodeURIComponent(orgName)}/_apis/projects?api-version=7.2-preview.2&$top=1`,
{
headers: {
Authorization: `Bearer ${clientData.access_token}`
}
}
);
if (response.status !== 200) {
throw new BadRequestError({
message: `Failed to validate connection to organization '${orgName}': ${response.status}`
});
}
return {
tenantId,
clientId,
clientSecret,
orgName,
accessToken: clientData.access_token,
expiresAt: Date.now() + clientData.expires_in * 1000
};
} catch (e: unknown) {
if (e instanceof AxiosError) {
throw new BadRequestError({
message: `Failed to authenticate with Azure DevOps using client credentials: ${
(e?.response?.data as { error_description?: string })?.error_description || e.message
}`
});
} else {
throw new InternalServerError({
message: "Failed to validate Azure DevOps client credentials"
});
}
}
default:
throw new InternalServerError({
message: `Unhandled Azure connection method: ${method as AzureDevOpsConnectionMethod}`
message: `Unhandled Azure DevOps connection method: ${method as AzureDevOpsConnectionMethod}`
});
}
};

View File

@@ -38,6 +38,42 @@ export const AzureDevOpsConnectionAccessTokenOutputCredentialsSchema = z.object(
orgName: z.string()
});
export const AzureDevOpsConnectionClientSecretInputCredentialsSchema = z.object({
clientId: z
.string()
.uuid()
.trim()
.min(1, "Client ID required")
.max(50, "Client ID must be at most 50 characters long")
.describe(AppConnections.CREDENTIALS.AZURE_DEVOPS.clientId),
clientSecret: z
.string()
.trim()
.min(1, "Client Secret required")
.max(50, "Client Secret must be at most 50 characters long")
.describe(AppConnections.CREDENTIALS.AZURE_DEVOPS.clientSecret),
tenantId: z
.string()
.uuid()
.trim()
.min(1, "Tenant ID required")
.describe(AppConnections.CREDENTIALS.AZURE_DEVOPS.tenantId),
orgName: z
.string()
.trim()
.min(1, "Organization name required")
.describe(AppConnections.CREDENTIALS.AZURE_DEVOPS.orgName)
});
export const AzureDevOpsConnectionClientSecretOutputCredentialsSchema = z.object({
clientId: z.string(),
clientSecret: z.string(),
tenantId: z.string(),
orgName: z.string(),
accessToken: z.string(),
expiresAt: z.number()
});
export const ValidateAzureDevOpsConnectionCredentialsSchema = z.discriminatedUnion("method", [
z.object({
method: z
@@ -54,6 +90,14 @@ export const ValidateAzureDevOpsConnectionCredentialsSchema = z.discriminatedUni
credentials: AzureDevOpsConnectionAccessTokenInputCredentialsSchema.describe(
AppConnections.CREATE(AppConnection.AzureDevOps).credentials
)
}),
z.object({
method: z
.literal(AzureDevOpsConnectionMethod.ClientSecret)
.describe(AppConnections.CREATE(AppConnection.AzureDevOps).method),
credentials: AzureDevOpsConnectionClientSecretInputCredentialsSchema.describe(
AppConnections.CREATE(AppConnection.AzureDevOps).credentials
)
})
]);
@@ -64,7 +108,11 @@ export const CreateAzureDevOpsConnectionSchema = ValidateAzureDevOpsConnectionCr
export const UpdateAzureDevOpsConnectionSchema = z
.object({
credentials: z
.union([AzureDevOpsConnectionOAuthInputCredentialsSchema, AzureDevOpsConnectionAccessTokenInputCredentialsSchema])
.union([
AzureDevOpsConnectionOAuthInputCredentialsSchema,
AzureDevOpsConnectionAccessTokenInputCredentialsSchema,
AzureDevOpsConnectionClientSecretInputCredentialsSchema
])
.optional()
.describe(AppConnections.UPDATE(AppConnection.AzureDevOps).credentials)
})
@@ -84,6 +132,10 @@ export const AzureDevOpsConnectionSchema = z.intersection(
z.object({
method: z.literal(AzureDevOpsConnectionMethod.AccessToken),
credentials: AzureDevOpsConnectionAccessTokenOutputCredentialsSchema
}),
z.object({
method: z.literal(AzureDevOpsConnectionMethod.ClientSecret),
credentials: AzureDevOpsConnectionClientSecretOutputCredentialsSchema
})
])
);
@@ -101,6 +153,14 @@ export const SanitizedAzureDevOpsConnectionSchema = z.discriminatedUnion("method
credentials: AzureDevOpsConnectionAccessTokenOutputCredentialsSchema.pick({
orgName: true
})
}),
BaseAzureDevOpsConnectionSchema.extend({
method: z.literal(AzureDevOpsConnectionMethod.ClientSecret),
credentials: AzureDevOpsConnectionClientSecretOutputCredentialsSchema.pick({
clientId: true,
tenantId: true,
orgName: true
})
})
]);

View File

@@ -52,6 +52,11 @@ const getAuthHeaders = (appConnection: TAzureDevOpsConnection, accessToken: stri
Authorization: `Basic ${basicAuthToken}`,
Accept: "application/json"
};
case AzureDevOpsConnectionMethod.ClientSecret:
return {
Authorization: `Bearer ${accessToken}`,
Accept: "application/json"
};
default:
throw new BadRequestError({ message: "Unsupported connection method" });
}

View File

@@ -4,6 +4,7 @@ import { DiscriminativePick } from "@app/lib/types";
import { AppConnection } from "../app-connection-enums";
import {
AzureDevOpsConnectionClientSecretOutputCredentialsSchema,
AzureDevOpsConnectionOAuthOutputCredentialsSchema,
AzureDevOpsConnectionSchema,
CreateAzureDevOpsConnectionSchema,
@@ -27,6 +28,10 @@ export type TAzureDevOpsConnectionConfig = DiscriminativePick<
export type TAzureDevOpsConnectionCredentials = z.infer<typeof AzureDevOpsConnectionOAuthOutputCredentialsSchema>;
export type TAzureDevOpsConnectionClientSecretCredentials = z.infer<
typeof AzureDevOpsConnectionClientSecretOutputCredentialsSchema
>;
export interface ExchangeCodeAzureResponse {
token_type: string;
scope: string;

View File

@@ -1,3 +1,4 @@
export enum AzureKeyVaultConnectionMethod {
OAuth = "oauth"
OAuth = "oauth",
ClientSecret = "client-secret"
}

View File

@@ -1,3 +1,4 @@
/* eslint-disable no-case-declarations */
import { AxiosError, AxiosResponse } from "axios";
import { getConfig } from "@app/lib/config/env";
@@ -16,25 +17,16 @@ import { AppConnection } from "../app-connection-enums";
import { AzureKeyVaultConnectionMethod } from "./azure-key-vault-connection-enums";
import {
ExchangeCodeAzureResponse,
TAzureKeyVaultConnectionClientSecretCredentials,
TAzureKeyVaultConnectionConfig,
TAzureKeyVaultConnectionCredentials
} from "./azure-key-vault-connection-types";
export const getAzureConnectionAccessToken = async (
connectionId: string,
appConnectionDAL: Pick<TAppConnectionDALFactory, "findById" | "update">,
appConnectionDAL: Pick<TAppConnectionDALFactory, "findById" | "updateById">,
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">
) => {
const appCfg = getConfig();
if (
!appCfg.INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID ||
!appCfg.INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_SECRET
) {
throw new BadRequestError({
message: `Azure environment variables have not been configured`
});
}
const appConnection = await appConnectionDAL.findById(connectionId);
if (!appConnection) {
@@ -49,49 +41,101 @@ export const getAzureConnectionAccessToken = async (
throw new BadRequestError({ message: `Connection with ID '${connectionId}' is not a valid Azure connection` });
}
const credentials = (await decryptAppConnectionCredentials({
orgId: appConnection.orgId,
kmsService,
encryptedCredentials: appConnection.encryptedCredentials
})) as TAzureKeyVaultConnectionCredentials;
const currentTime = Date.now();
const { data } = await request.post<ExchangeCodeAzureResponse>(
IntegrationUrls.AZURE_TOKEN_URL.replace("common", credentials.tenantId || "common"),
new URLSearchParams({
grant_type: "refresh_token",
scope: `openid offline_access`,
client_id: appCfg.INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID,
client_secret: appCfg.INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_SECRET,
refresh_token: credentials.refreshToken
})
);
switch (appConnection.method) {
case AzureKeyVaultConnectionMethod.OAuth:
const appCfg = getConfig();
if (
!appCfg.INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID ||
!appCfg.INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_SECRET
) {
throw new BadRequestError({
message: `Azure environment variables have not been configured`
});
}
const accessExpiresAt = new Date();
accessExpiresAt.setSeconds(accessExpiresAt.getSeconds() + data.expires_in);
const oauthCredentials = (await decryptAppConnectionCredentials({
orgId: appConnection.orgId,
kmsService,
encryptedCredentials: appConnection.encryptedCredentials
})) as TAzureKeyVaultConnectionCredentials;
const updatedCredentials = {
...credentials,
accessToken: data.access_token,
expiresAt: accessExpiresAt.getTime(),
refreshToken: data.refresh_token
};
const { data } = await request.post<ExchangeCodeAzureResponse>(
IntegrationUrls.AZURE_TOKEN_URL.replace("common", oauthCredentials.tenantId || "common"),
new URLSearchParams({
grant_type: "refresh_token",
scope: `openid offline_access https://vault.azure.net/.default`,
client_id: appCfg.INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID,
client_secret: appCfg.INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_SECRET,
refresh_token: oauthCredentials.refreshToken
})
);
const encryptedCredentials = await encryptAppConnectionCredentials({
credentials: updatedCredentials,
orgId: appConnection.orgId,
kmsService
});
const updatedOAuthCredentials = {
...oauthCredentials,
accessToken: data.access_token,
expiresAt: currentTime + data.expires_in * 1000,
refreshToken: data.refresh_token
};
await appConnectionDAL.update(
{ id: connectionId },
{
encryptedCredentials
}
);
const encryptedOAuthCredentials = await encryptAppConnectionCredentials({
credentials: updatedOAuthCredentials,
orgId: appConnection.orgId,
kmsService
});
return {
accessToken: data.access_token
};
await appConnectionDAL.updateById(appConnection.id, { encryptedCredentials: encryptedOAuthCredentials });
return {
accessToken: data.access_token
};
case AzureKeyVaultConnectionMethod.ClientSecret:
const clientSecretCredentials = (await decryptAppConnectionCredentials({
orgId: appConnection.orgId,
kmsService,
encryptedCredentials: appConnection.encryptedCredentials
})) as TAzureKeyVaultConnectionClientSecretCredentials;
const { accessToken, expiresAt, clientId, clientSecret, tenantId } = clientSecretCredentials;
// Check if token is still valid (with 5 minute buffer)
if (accessToken && expiresAt && expiresAt > currentTime + 300000) {
return { accessToken };
}
const { data: clientData } = await request.post<ExchangeCodeAzureResponse>(
IntegrationUrls.AZURE_TOKEN_URL.replace("common", tenantId || "common"),
new URLSearchParams({
grant_type: "client_credentials",
scope: `https://vault.azure.net/.default`,
client_id: clientId,
client_secret: clientSecret
})
);
const updatedClientCredentials = {
...clientSecretCredentials,
accessToken: clientData.access_token,
expiresAt: currentTime + clientData.expires_in * 1000
};
const encryptedClientCredentials = await encryptAppConnectionCredentials({
credentials: updatedClientCredentials,
orgId: appConnection.orgId,
kmsService
});
await appConnectionDAL.updateById(appConnection.id, { encryptedCredentials: encryptedClientCredentials });
return { accessToken: clientData.access_token };
default:
throw new InternalServerError({
message: `Unhandled Azure Key Vault connection method: ${appConnection.method as AzureKeyVaultConnectionMethod}`
});
}
};
export const getAzureKeyVaultConnectionListItem = () => {
@@ -100,7 +144,10 @@ export const getAzureKeyVaultConnectionListItem = () => {
return {
name: "Azure Key Vault" as const,
app: AppConnection.AzureKeyVault as const,
methods: Object.values(AzureKeyVaultConnectionMethod) as [AzureKeyVaultConnectionMethod.OAuth],
methods: Object.values(AzureKeyVaultConnectionMethod) as [
AzureKeyVaultConnectionMethod.OAuth,
AzureKeyVaultConnectionMethod.ClientSecret
],
oauthClientId: INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID
};
};
@@ -111,68 +158,108 @@ export const validateAzureKeyVaultConnectionCredentials = async (config: TAzureK
const { INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID, INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_SECRET, SITE_URL } =
getConfig();
if (!INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID || !INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_SECRET) {
throw new InternalServerError({
message: `Azure ${getAppConnectionMethodName(method)} environment variables have not been configured`
});
}
let tokenResp: AxiosResponse<ExchangeCodeAzureResponse> | null = null;
let tokenError: AxiosError | null = null;
try {
tokenResp = await request.post<ExchangeCodeAzureResponse>(
IntegrationUrls.AZURE_TOKEN_URL.replace("common", inputCredentials.tenantId || "common"),
new URLSearchParams({
grant_type: "authorization_code",
code: inputCredentials.code,
scope: `openid offline_access https://vault.azure.net/.default`,
client_id: INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID,
client_secret: INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_SECRET,
redirect_uri: `${SITE_URL}/organization/app-connections/azure/oauth/callback`
})
);
} catch (e: unknown) {
if (e instanceof AxiosError) {
tokenError = e;
} else {
throw new BadRequestError({
message: `Unable to validate connection: verify credentials`
});
}
}
if (tokenError) {
if (tokenError instanceof AxiosError) {
throw new BadRequestError({
message: `Failed to get access token: ${
(tokenError?.response?.data as { error_description?: string })?.error_description || "Unknown error"
}`
});
} else {
throw new InternalServerError({
message: "Failed to get access token"
});
}
}
if (!tokenResp) {
throw new InternalServerError({
message: `Failed to get access token: Token was empty with no error`
});
}
switch (method) {
case AzureKeyVaultConnectionMethod.OAuth:
if (!INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID || !INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_SECRET) {
throw new InternalServerError({
message: `Azure ${getAppConnectionMethodName(method)} environment variables have not been configured`
});
}
let tokenResp: AxiosResponse<ExchangeCodeAzureResponse> | null = null;
let tokenError: AxiosError | null = null;
const oauthCredentials = inputCredentials as { code: string; tenantId?: string };
try {
tokenResp = await request.post<ExchangeCodeAzureResponse>(
IntegrationUrls.AZURE_TOKEN_URL.replace("common", oauthCredentials.tenantId || "common"),
new URLSearchParams({
grant_type: "authorization_code",
code: oauthCredentials.code,
scope: `openid offline_access https://vault.azure.net/.default`,
client_id: INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID,
client_secret: INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_SECRET,
redirect_uri: `${SITE_URL}/organization/app-connections/azure/oauth/callback`
})
);
} catch (e: unknown) {
if (e instanceof AxiosError) {
tokenError = e;
} else {
throw new BadRequestError({
message: `Unable to validate connection: verify credentials`
});
}
}
if (tokenError) {
if (tokenError instanceof AxiosError) {
throw new BadRequestError({
message: `Failed to get access token: ${
(tokenError?.response?.data as { error_description?: string })?.error_description || "Unknown error"
}`
});
} else {
throw new InternalServerError({
message: "Failed to get access token"
});
}
}
if (!tokenResp) {
throw new InternalServerError({
message: `Failed to get access token: Token was empty with no error`
});
}
return {
tenantId: inputCredentials.tenantId,
tenantId: oauthCredentials.tenantId,
accessToken: tokenResp.data.access_token,
refreshToken: tokenResp.data.refresh_token,
expiresAt: Date.now() + tokenResp.data.expires_in * 1000
};
case AzureKeyVaultConnectionMethod.ClientSecret:
const { tenantId, clientId, clientSecret } = inputCredentials as {
tenantId: string;
clientId: string;
clientSecret: string;
};
try {
const { data: clientData } = await request.post<ExchangeCodeAzureResponse>(
IntegrationUrls.AZURE_TOKEN_URL.replace("common", tenantId || "common"),
new URLSearchParams({
grant_type: "client_credentials",
scope: `https://vault.azure.net/.default`,
client_id: clientId,
client_secret: clientSecret
})
);
return {
tenantId,
accessToken: clientData.access_token,
expiresAt: Date.now() + clientData.expires_in * 1000,
clientId,
clientSecret
};
} catch (e: unknown) {
if (e instanceof AxiosError) {
throw new BadRequestError({
message: `Failed to get access token: ${
(e?.response?.data as { error_description?: string })?.error_description || "Unknown error"
}`
});
} else {
throw new InternalServerError({
message: "Failed to get access token"
});
}
}
default:
throw new InternalServerError({
message: `Unhandled Azure connection method: ${method as AzureKeyVaultConnectionMethod}`
message: `Unhandled Azure Key Vault connection method: ${method as AzureKeyVaultConnectionMethod}`
});
}
};

View File

@@ -22,6 +22,29 @@ export const AzureKeyVaultConnectionOAuthOutputCredentialsSchema = z.object({
expiresAt: z.number()
});
export const AzureKeyVaultConnectionClientSecretInputCredentialsSchema = z.object({
clientId: z
.string()
.uuid()
.trim()
.min(1, "Client ID required")
.max(50, "Client ID must be at most 50 characters long"),
clientSecret: z
.string()
.trim()
.min(1, "Client Secret required")
.max(50, "Client Secret must be at most 50 characters long"),
tenantId: z.string().uuid().trim().min(1, "Tenant ID required")
});
export const AzureKeyVaultConnectionClientSecretOutputCredentialsSchema = z.object({
clientId: z.string(),
clientSecret: z.string(),
tenantId: z.string(),
accessToken: z.string(),
expiresAt: z.number()
});
export const ValidateAzureKeyVaultConnectionCredentialsSchema = z.discriminatedUnion("method", [
z.object({
method: z
@@ -30,6 +53,14 @@ export const ValidateAzureKeyVaultConnectionCredentialsSchema = z.discriminatedU
credentials: AzureKeyVaultConnectionOAuthInputCredentialsSchema.describe(
AppConnections.CREATE(AppConnection.AzureKeyVault).credentials
)
}),
z.object({
method: z
.literal(AzureKeyVaultConnectionMethod.ClientSecret)
.describe(AppConnections.CREATE(AppConnection.AzureKeyVault).method),
credentials: AzureKeyVaultConnectionClientSecretInputCredentialsSchema.describe(
AppConnections.CREATE(AppConnection.AzureKeyVault).credentials
)
})
]);
@@ -39,9 +70,13 @@ export const CreateAzureKeyVaultConnectionSchema = ValidateAzureKeyVaultConnecti
export const UpdateAzureKeyVaultConnectionSchema = z
.object({
credentials: AzureKeyVaultConnectionOAuthInputCredentialsSchema.optional().describe(
AppConnections.UPDATE(AppConnection.AzureKeyVault).credentials
)
credentials: z
.union([
AzureKeyVaultConnectionOAuthInputCredentialsSchema,
AzureKeyVaultConnectionClientSecretInputCredentialsSchema
])
.optional()
.describe(AppConnections.UPDATE(AppConnection.AzureKeyVault).credentials)
})
.and(GenericUpdateAppConnectionFieldsSchema(AppConnection.AzureKeyVault));
@@ -55,6 +90,10 @@ export const AzureKeyVaultConnectionSchema = z.intersection(
z.object({
method: z.literal(AzureKeyVaultConnectionMethod.OAuth),
credentials: AzureKeyVaultConnectionOAuthOutputCredentialsSchema
}),
z.object({
method: z.literal(AzureKeyVaultConnectionMethod.ClientSecret),
credentials: AzureKeyVaultConnectionClientSecretOutputCredentialsSchema
})
])
);
@@ -65,6 +104,13 @@ export const SanitizedAzureKeyVaultConnectionSchema = z.discriminatedUnion("meth
credentials: AzureKeyVaultConnectionOAuthOutputCredentialsSchema.pick({
tenantId: true
})
}),
BaseAzureKeyVaultConnectionSchema.extend({
method: z.literal(AzureKeyVaultConnectionMethod.ClientSecret),
credentials: AzureKeyVaultConnectionClientSecretOutputCredentialsSchema.pick({
clientId: true,
tenantId: true
})
})
]);

View File

@@ -4,6 +4,7 @@ import { DiscriminativePick } from "@app/lib/types";
import { AppConnection } from "../app-connection-enums";
import {
AzureKeyVaultConnectionClientSecretOutputCredentialsSchema,
AzureKeyVaultConnectionOAuthOutputCredentialsSchema,
AzureKeyVaultConnectionSchema,
CreateAzureKeyVaultConnectionSchema,
@@ -36,3 +37,7 @@ export type ExchangeCodeAzureResponse = {
};
export type TAzureKeyVaultConnectionCredentials = z.infer<typeof AzureKeyVaultConnectionOAuthOutputCredentialsSchema>;
export type TAzureKeyVaultConnectionClientSecretCredentials = z.infer<
typeof AzureKeyVaultConnectionClientSecretOutputCredentialsSchema
>;

View File

@@ -1,10 +1,16 @@
import { createAppAuth } from "@octokit/auth-app";
import { Octokit } from "@octokit/rest";
import { AxiosResponse } from "axios";
import { AxiosError, AxiosRequestConfig, AxiosResponse } from "axios";
import https from "https";
import RE2 from "re2";
import { verifyHostInputValidity } from "@app/ee/services/dynamic-secret/dynamic-secret-fns";
import { TGatewayServiceFactory } from "@app/ee/services/gateway/gateway-service";
import { getConfig } from "@app/lib/config/env";
import { request } from "@app/lib/config/request";
import { request as httpRequest } from "@app/lib/config/request";
import { BadRequestError, ForbiddenRequestError, InternalServerError } from "@app/lib/errors";
import { GatewayProxyProtocol, withGatewayProxy } from "@app/lib/gateway";
import { logger } from "@app/lib/logger";
import { blockLocalAndPrivateIpAddresses } from "@app/lib/validator";
import { getAppConnectionMethodName } from "@app/services/app-connection/app-connection-fns";
import { IntegrationUrls } from "@app/services/integration-auth/integration-list";
@@ -24,123 +30,224 @@ export const getGitHubConnectionListItem = () => {
};
};
export const getGitHubClient = (appConnection: TGitHubConnection) => {
export const requestWithGitHubGateway = async <T>(
appConnection: { gatewayId?: string | null },
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">,
requestConfig: AxiosRequestConfig
): Promise<AxiosResponse<T>> => {
const { gatewayId } = appConnection;
// If gateway isn't set up, don't proxy request
if (!gatewayId) {
return httpRequest.request(requestConfig);
}
const url = new URL(requestConfig.url as string);
await blockLocalAndPrivateIpAddresses(url.toString());
const [targetHost] = await verifyHostInputValidity(url.host, true);
const relayDetails = await gatewayService.fnGetGatewayClientTlsByGatewayId(gatewayId);
const [relayHost, relayPort] = relayDetails.relayAddress.split(":");
return withGatewayProxy(
async (proxyPort) => {
const httpsAgent = new https.Agent({
servername: targetHost
});
url.protocol = "https:";
url.host = `localhost:${proxyPort}`;
const finalRequestConfig: AxiosRequestConfig = {
...requestConfig,
url: url.toString(),
httpsAgent,
headers: {
...requestConfig.headers,
Host: targetHost
}
};
try {
return await httpRequest.request(finalRequestConfig);
} catch (error) {
const axiosError = error as AxiosError;
logger.error("Error during GitHub gateway request:", axiosError.message, axiosError.response?.data);
throw error;
}
},
{
protocol: GatewayProxyProtocol.Tcp,
targetHost,
targetPort: 443,
relayHost,
relayPort: Number(relayPort),
identityId: relayDetails.identityId,
orgId: relayDetails.orgId,
tlsOptions: {
ca: relayDetails.certChain,
cert: relayDetails.certificate,
key: relayDetails.privateKey.toString()
}
}
);
};
export const getGitHubAppAuthToken = async (appConnection: TGitHubConnection) => {
const appCfg = getConfig();
const { method, credentials } = appConnection;
let client: Octokit;
const appId = appCfg.INF_APP_CONNECTION_GITHUB_APP_ID;
const appPrivateKey = appCfg.INF_APP_CONNECTION_GITHUB_APP_PRIVATE_KEY;
switch (method) {
case GitHubConnectionMethod.App:
if (!appId || !appPrivateKey) {
throw new InternalServerError({
message: `GitHub ${getAppConnectionMethodName(method).replace("GitHub", "")} has not been configured`
});
}
client = new Octokit({
authStrategy: createAppAuth,
auth: {
appId,
privateKey: appPrivateKey,
installationId: credentials.installationId
}
});
break;
case GitHubConnectionMethod.OAuth:
client = new Octokit({
auth: credentials.accessToken
});
break;
default:
throw new InternalServerError({
message: `Unhandled GitHub connection method: ${method as GitHubConnectionMethod}`
});
if (!appId || !appPrivateKey) {
throw new InternalServerError({
message: `GitHub App keys are not configured.`
});
}
return client;
if (appConnection.method !== GitHubConnectionMethod.App) {
throw new InternalServerError({ message: "Cannot generate GitHub App token for non-app connection" });
}
const appAuth = createAppAuth({
appId,
privateKey: appPrivateKey,
installationId: appConnection.credentials.installationId
});
const { token } = await appAuth({ type: "installation" });
return token;
};
function extractNextPageUrl(linkHeader: string | undefined): string | null {
if (!linkHeader) return null;
const links = linkHeader.split(",");
const nextLink = links.find((link) => link.includes('rel="next"'));
if (!nextLink) return null;
const match = new RE2(/<([^>]+)>/).exec(nextLink);
return match ? match[1] : null;
}
export const makePaginatedGitHubRequest = async <T, R = T[]>(
appConnection: TGitHubConnection,
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">,
path: string,
dataMapper?: (data: R) => T[]
): Promise<T[]> => {
const { credentials, method } = appConnection;
const token =
method === GitHubConnectionMethod.OAuth ? credentials.accessToken : await getGitHubAppAuthToken(appConnection);
let url: string | null = `https://api.${credentials.host || "github.com"}${path}`;
let results: T[] = [];
let i = 0;
while (url && i < 1000) {
// eslint-disable-next-line no-await-in-loop
const response: AxiosResponse<R> = await requestWithGitHubGateway<R>(appConnection, gatewayService, {
url,
method: "GET",
headers: {
Accept: "application/vnd.github+json",
Authorization: `Bearer ${token}`,
"X-GitHub-Api-Version": "2022-11-28"
}
});
const items = dataMapper ? dataMapper(response.data) : (response.data as unknown as T[]);
results = results.concat(items);
url = extractNextPageUrl(response.headers.link as string | undefined);
i += 1;
}
return results;
};
type GitHubOrganization = {
login: string;
id: number;
type: string;
};
type GitHubRepository = {
id: number;
name: string;
owner: GitHubOrganization;
permissions?: {
admin: boolean;
maintain: boolean;
push: boolean;
triage: boolean;
pull: boolean;
};
};
export const getGitHubRepositories = async (appConnection: TGitHubConnection) => {
const client = getGitHubClient(appConnection);
type GitHubEnvironment = {
id: number;
name: string;
};
let repositories: GitHubRepository[];
switch (appConnection.method) {
case GitHubConnectionMethod.App:
repositories = await client.paginate("GET /installation/repositories");
break;
case GitHubConnectionMethod.OAuth:
default:
repositories = (await client.paginate("GET /user/repos")).filter((repo) => repo.permissions?.admin);
break;
export const getGitHubRepositories = async (
appConnection: TGitHubConnection,
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">
) => {
if (appConnection.method === GitHubConnectionMethod.App) {
return makePaginatedGitHubRequest<GitHubRepository, { repositories: GitHubRepository[] }>(
appConnection,
gatewayService,
"/installation/repositories",
(data) => data.repositories
);
}
return repositories;
const repos = await makePaginatedGitHubRequest<GitHubRepository>(appConnection, gatewayService, "/user/repos");
return repos.filter((repo) => repo.permissions?.admin);
};
export const getGitHubOrganizations = async (appConnection: TGitHubConnection) => {
const client = getGitHubClient(appConnection);
export const getGitHubOrganizations = async (
appConnection: TGitHubConnection,
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">
) => {
if (appConnection.method === GitHubConnectionMethod.App) {
const installationRepositories = await makePaginatedGitHubRequest<
GitHubRepository,
{ repositories: GitHubRepository[] }
>(appConnection, gatewayService, "/installation/repositories", (data) => data.repositories);
let organizations: GitHubOrganization[];
switch (appConnection.method) {
case GitHubConnectionMethod.App: {
const installationRepositories = await client.paginate("GET /installation/repositories");
const organizationMap: Record<string, GitHubOrganization> = {};
installationRepositories.forEach((repo) => {
if (repo.owner.type === "Organization") {
organizationMap[repo.owner.id] = repo.owner;
}
});
organizations = Object.values(organizationMap);
break;
}
case GitHubConnectionMethod.OAuth:
default:
organizations = await client.paginate("GET /user/orgs");
break;
}
return organizations;
};
export const getGitHubEnvironments = async (appConnection: TGitHubConnection, owner: string, repo: string) => {
const client = getGitHubClient(appConnection);
try {
const environments = await client.paginate("GET /repos/{owner}/{repo}/environments", {
owner,
repo
const organizationMap: Record<string, GitHubOrganization> = {};
installationRepositories.forEach((repo) => {
if (repo.owner.type === "Organization") {
organizationMap[repo.owner.id] = repo.owner;
}
});
return environments;
} catch (e) {
// repo doesn't have envs
if ((e as { status: number }).status === 404) {
return [];
}
return Object.values(organizationMap);
}
throw e;
return makePaginatedGitHubRequest<GitHubOrganization>(appConnection, gatewayService, "/user/orgs");
};
export const getGitHubEnvironments = async (
appConnection: TGitHubConnection,
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">,
owner: string,
repo: string
) => {
try {
return await makePaginatedGitHubRequest<GitHubEnvironment, { environments: GitHubEnvironment[] }>(
appConnection,
gatewayService,
`/repos/${encodeURIComponent(owner)}/${encodeURIComponent(repo)}/environments`,
(data) => data.environments
);
} catch (error) {
const axiosError = error as AxiosError;
if (axiosError.response?.status === 404) return [];
throw error;
}
};
@@ -159,9 +266,11 @@ export function isGithubErrorResponse(data: GithubTokenRespData): data is Github
return "error" in data;
}
export const validateGitHubConnectionCredentials = async (config: TGitHubConnectionConfig) => {
export const validateGitHubConnectionCredentials = async (
config: TGitHubConnectionConfig,
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">
) => {
const { credentials, method } = config;
const {
INF_APP_CONNECTION_GITHUB_OAUTH_CLIENT_ID,
INF_APP_CONNECTION_GITHUB_OAUTH_CLIENT_SECRET,
@@ -192,10 +301,13 @@ export const validateGitHubConnectionCredentials = async (config: TGitHubConnect
}
let tokenResp: AxiosResponse<GithubTokenRespData>;
const host = credentials.host || "github.com";
try {
tokenResp = await request.get<GithubTokenRespData>("https://github.com/login/oauth/access_token", {
params: {
tokenResp = await requestWithGitHubGateway<GithubTokenRespData>(config, gatewayService, {
url: `https://${host}/login/oauth/access_token`,
method: "POST",
data: {
client_id: clientId,
client_secret: clientSecret,
code: credentials.code,
@@ -203,7 +315,7 @@ export const validateGitHubConnectionCredentials = async (config: TGitHubConnect
},
headers: {
Accept: "application/json",
"Accept-Encoding": "application/json"
"Content-Type": "application/json"
}
});
@@ -233,7 +345,7 @@ export const validateGitHubConnectionCredentials = async (config: TGitHubConnect
throw new InternalServerError({ message: `Missing access token: ${tokenResp.data.error}` });
}
const installationsResp = await request.get<{
const installationsResp = await requestWithGitHubGateway<{
installations: {
id: number;
account: {
@@ -242,7 +354,8 @@ export const validateGitHubConnectionCredentials = async (config: TGitHubConnect
id: number;
};
}[];
}>(IntegrationUrls.GITHUB_USER_INSTALLATIONS, {
}>(config, gatewayService, {
url: IntegrationUrls.GITHUB_USER_INSTALLATIONS.replace("api.github.com", `api.${host}`),
headers: {
Accept: "application/json",
Authorization: `Bearer ${tokenResp.data.access_token}`,

View File

@@ -11,20 +11,24 @@ import {
import { GitHubConnectionMethod } from "./github-connection-enums";
export const GitHubConnectionOAuthInputCredentialsSchema = z.object({
code: z.string().trim().min(1, "OAuth code required")
code: z.string().trim().min(1, "OAuth code required"),
host: z.string().trim().optional()
});
export const GitHubConnectionAppInputCredentialsSchema = z.object({
code: z.string().trim().min(1, "GitHub App code required"),
installationId: z.string().min(1, "GitHub App Installation ID required")
installationId: z.string().min(1, "GitHub App Installation ID required"),
host: z.string().trim().optional()
});
export const GitHubConnectionOAuthOutputCredentialsSchema = z.object({
accessToken: z.string()
accessToken: z.string(),
host: z.string().trim().optional()
});
export const GitHubConnectionAppOutputCredentialsSchema = z.object({
installationId: z.string()
installationId: z.string(),
host: z.string().trim().optional()
});
export const ValidateGitHubConnectionCredentialsSchema = z.discriminatedUnion("method", [
@@ -43,7 +47,9 @@ export const ValidateGitHubConnectionCredentialsSchema = z.discriminatedUnion("m
]);
export const CreateGitHubConnectionSchema = ValidateGitHubConnectionCredentialsSchema.and(
GenericCreateAppConnectionFieldsSchema(AppConnection.GitHub)
GenericCreateAppConnectionFieldsSchema(AppConnection.GitHub, {
supportsGateways: true
})
);
export const UpdateGitHubConnectionSchema = z
@@ -53,7 +59,11 @@ export const UpdateGitHubConnectionSchema = z
.optional()
.describe(AppConnections.UPDATE(AppConnection.GitHub).credentials)
})
.and(GenericUpdateAppConnectionFieldsSchema(AppConnection.GitHub));
.and(
GenericUpdateAppConnectionFieldsSchema(AppConnection.GitHub, {
supportsGateways: true
})
);
const BaseGitHubConnectionSchema = BaseAppConnectionSchema.extend({ app: z.literal(AppConnection.GitHub) });

View File

@@ -1,3 +1,4 @@
import { TGatewayServiceFactory } from "@app/ee/services/gateway/gateway-service";
import { OrgServiceActor } from "@app/lib/types";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
import {
@@ -19,11 +20,14 @@ type TListGitHubEnvironmentsDTO = {
owner: string;
};
export const githubConnectionService = (getAppConnection: TGetAppConnectionFunc) => {
export const githubConnectionService = (
getAppConnection: TGetAppConnectionFunc,
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">
) => {
const listRepositories = async (connectionId: string, actor: OrgServiceActor) => {
const appConnection = await getAppConnection(AppConnection.GitHub, connectionId, actor);
const repositories = await getGitHubRepositories(appConnection);
const repositories = await getGitHubRepositories(appConnection, gatewayService);
return repositories;
};
@@ -31,7 +35,7 @@ export const githubConnectionService = (getAppConnection: TGetAppConnectionFunc)
const listOrganizations = async (connectionId: string, actor: OrgServiceActor) => {
const appConnection = await getAppConnection(AppConnection.GitHub, connectionId, actor);
const organizations = await getGitHubOrganizations(appConnection);
const organizations = await getGitHubOrganizations(appConnection, gatewayService);
return organizations;
};
@@ -42,7 +46,7 @@ export const githubConnectionService = (getAppConnection: TGetAppConnectionFunc)
) => {
const appConnection = await getAppConnection(AppConnection.GitHub, connectionId, actor);
const environments = await getGitHubEnvironments(appConnection, owner, repo);
const environments = await getGitHubEnvironments(appConnection, gatewayService, owner, repo);
return environments;
};

View File

@@ -17,4 +17,7 @@ export type TGitHubConnectionInput = z.infer<typeof CreateGitHubConnectionSchema
export type TValidateGitHubConnectionCredentialsSchema = typeof ValidateGitHubConnectionCredentialsSchema;
export type TGitHubConnectionConfig = DiscriminativePick<TGitHubConnectionInput, "method" | "app" | "credentials">;
export type TGitHubConnectionConfig = DiscriminativePick<
TGitHubConnectionInput,
"method" | "app" | "credentials" | "gatewayId"
>;

View File

@@ -645,7 +645,7 @@ export const projectServiceFactory = ({
const updateProject = async ({ actor, actorId, actorOrgId, actorAuthMethod, update, filter }: TUpdateProjectDTO) => {
const project = await projectDAL.findProjectByFilter(filter);
const { permission } = await permissionService.getProjectPermission({
const { permission, hasRole } = await permissionService.getProjectPermission({
actor,
actorId,
projectId: project.id,
@@ -667,6 +667,12 @@ export const projectServiceFactory = ({
}
}
if (update.secretDetectionIgnoreValues && !hasRole(ProjectMembershipRole.Admin)) {
throw new ForbiddenRequestError({
message: "Only admins can update secret detection ignore values"
});
}
const updatedProject = await projectDAL.updateById(project.id, {
name: update.name,
description: update.description,
@@ -676,7 +682,8 @@ export const projectServiceFactory = ({
slug: update.slug,
secretSharing: update.secretSharing,
defaultProduct: update.defaultProduct,
showSnapshotsLegacy: update.showSnapshotsLegacy
showSnapshotsLegacy: update.showSnapshotsLegacy,
secretDetectionIgnoreValues: update.secretDetectionIgnoreValues
});
return updatedProject;

View File

@@ -96,6 +96,7 @@ export type TUpdateProjectDTO = {
slug?: string;
secretSharing?: boolean;
showSnapshotsLegacy?: boolean;
secretDetectionIgnoreValues?: string[];
};
} & Omit<TProjectPermission, "projectId">;

View File

@@ -13,7 +13,7 @@ import { TSecretMap } from "@app/services/secret-sync/secret-sync-types";
import { TAzureAppConfigurationSyncWithCredentials } from "./azure-app-configuration-sync-types";
type TAzureAppConfigurationSyncFactoryDeps = {
appConnectionDAL: Pick<TAppConnectionDALFactory, "findById" | "update">;
appConnectionDAL: Pick<TAppConnectionDALFactory, "findById" | "updateById">;
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
};

View File

@@ -12,7 +12,7 @@ import { SecretSyncError } from "../secret-sync-errors";
import { GetAzureKeyVaultSecret, TAzureKeyVaultSyncWithCredentials } from "./azure-key-vault-sync-types";
type TAzureKeyVaultSyncFactoryDeps = {
appConnectionDAL: Pick<TAppConnectionDALFactory, "findById" | "update">;
appConnectionDAL: Pick<TAppConnectionDALFactory, "findById" | "updateById">;
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
};

View File

@@ -1,7 +1,12 @@
import { Octokit } from "@octokit/rest";
import sodium from "libsodium-wrappers";
import { getGitHubClient } from "@app/services/app-connection/github";
import { TGatewayServiceFactory } from "@app/ee/services/gateway/gateway-service";
import {
getGitHubAppAuthToken,
GitHubConnectionMethod,
makePaginatedGitHubRequest,
requestWithGitHubGateway
} from "@app/services/app-connection/github";
import { GitHubSyncScope, GitHubSyncVisibility } from "@app/services/secret-sync/github/github-sync-enums";
import { SecretSyncError } from "@app/services/secret-sync/secret-sync-errors";
import { matchesSchema } from "@app/services/secret-sync/secret-sync-fns";
@@ -12,155 +17,165 @@ import { TGitHubPublicKey, TGitHubSecret, TGitHubSecretPayload, TGitHubSyncWithC
// TODO: rate limit handling
const getEncryptedSecrets = async (client: Octokit, secretSync: TGitHubSyncWithCredentials) => {
let encryptedSecrets: TGitHubSecret[];
const { destinationConfig } = secretSync;
const getEncryptedSecrets = async (
secretSync: TGitHubSyncWithCredentials,
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">
) => {
const { destinationConfig, connection } = secretSync;
let path: string;
switch (destinationConfig.scope) {
case GitHubSyncScope.Organization: {
encryptedSecrets = await client.paginate("GET /orgs/{org}/actions/secrets", {
org: destinationConfig.org
});
path = `/orgs/${encodeURIComponent(destinationConfig.org)}/actions/secrets`;
break;
}
case GitHubSyncScope.Repository: {
encryptedSecrets = await client.paginate("GET /repos/{owner}/{repo}/actions/secrets", {
owner: destinationConfig.owner,
repo: destinationConfig.repo
});
path = `/repos/${encodeURIComponent(destinationConfig.owner)}/${encodeURIComponent(destinationConfig.repo)}/actions/secrets`;
break;
}
case GitHubSyncScope.RepositoryEnvironment:
default: {
encryptedSecrets = await client.paginate("GET /repos/{owner}/{repo}/environments/{environment_name}/secrets", {
owner: destinationConfig.owner,
repo: destinationConfig.repo,
environment_name: destinationConfig.env
});
path = `/repos/${encodeURIComponent(destinationConfig.owner)}/${encodeURIComponent(destinationConfig.repo)}/environments/${encodeURIComponent(destinationConfig.env)}/secrets`;
break;
}
}
return encryptedSecrets;
return makePaginatedGitHubRequest<TGitHubSecret, { secrets: TGitHubSecret[] }>(
connection,
gatewayService,
path,
(data) => data.secrets
);
};
const getPublicKey = async (client: Octokit, secretSync: TGitHubSyncWithCredentials) => {
let publicKey: TGitHubPublicKey;
const { destinationConfig } = secretSync;
const getPublicKey = async (
secretSync: TGitHubSyncWithCredentials,
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">,
token: string
) => {
const { destinationConfig, connection } = secretSync;
let path: string;
switch (destinationConfig.scope) {
case GitHubSyncScope.Organization: {
publicKey = (
await client.request("GET /orgs/{org}/actions/secrets/public-key", {
org: destinationConfig.org
})
).data;
path = `/orgs/${encodeURIComponent(destinationConfig.org)}/actions/secrets/public-key`;
break;
}
case GitHubSyncScope.Repository: {
publicKey = (
await client.request("GET /repos/{owner}/{repo}/actions/secrets/public-key", {
owner: destinationConfig.owner,
repo: destinationConfig.repo
})
).data;
path = `/repos/${encodeURIComponent(destinationConfig.owner)}/${encodeURIComponent(destinationConfig.repo)}/actions/secrets/public-key`;
break;
}
case GitHubSyncScope.RepositoryEnvironment:
default: {
publicKey = (
await client.request("GET /repos/{owner}/{repo}/environments/{environment_name}/secrets/public-key", {
owner: destinationConfig.owner,
repo: destinationConfig.repo,
environment_name: destinationConfig.env
})
).data;
path = `/repos/${encodeURIComponent(destinationConfig.owner)}/${encodeURIComponent(destinationConfig.repo)}/environments/${encodeURIComponent(destinationConfig.env)}/secrets/public-key`;
break;
}
}
return publicKey;
const response = await requestWithGitHubGateway<TGitHubPublicKey>(connection, gatewayService, {
url: `https://api.${connection.credentials.host || "github.com"}${path}`,
method: "GET",
headers: {
Accept: "application/vnd.github+json",
Authorization: `Bearer ${token}`,
"X-GitHub-Api-Version": "2022-11-28"
}
});
return response.data;
};
const deleteSecret = async (
client: Octokit,
secretSync: TGitHubSyncWithCredentials,
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">,
token: string,
encryptedSecret: TGitHubSecret
) => {
const { destinationConfig } = secretSync;
const { destinationConfig, connection } = secretSync;
let path: string;
switch (destinationConfig.scope) {
case GitHubSyncScope.Organization: {
await client.request(`DELETE /orgs/{org}/actions/secrets/{secret_name}`, {
org: destinationConfig.org,
secret_name: encryptedSecret.name
});
path = `/orgs/${encodeURIComponent(destinationConfig.org)}/actions/secrets/${encodeURIComponent(encryptedSecret.name)}`;
break;
}
case GitHubSyncScope.Repository: {
await client.request("DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}", {
owner: destinationConfig.owner,
repo: destinationConfig.repo,
secret_name: encryptedSecret.name
});
path = `/repos/${encodeURIComponent(destinationConfig.owner)}/${encodeURIComponent(destinationConfig.repo)}/actions/secrets/${encodeURIComponent(encryptedSecret.name)}`;
break;
}
case GitHubSyncScope.RepositoryEnvironment:
default: {
await client.request("DELETE /repos/{owner}/{repo}/environments/{environment_name}/secrets/{secret_name}", {
owner: destinationConfig.owner,
repo: destinationConfig.repo,
environment_name: destinationConfig.env,
secret_name: encryptedSecret.name
});
path = `/repos/${encodeURIComponent(destinationConfig.owner)}/${encodeURIComponent(destinationConfig.repo)}/environments/${encodeURIComponent(destinationConfig.env)}/secrets/${encodeURIComponent(encryptedSecret.name)}`;
break;
}
}
await requestWithGitHubGateway(connection, gatewayService, {
url: `https://api.${connection.credentials.host || "github.com"}${path}`,
method: "DELETE",
headers: {
Accept: "application/vnd.github+json",
Authorization: `Bearer ${token}`,
"X-GitHub-Api-Version": "2022-11-28"
}
});
};
const putSecret = async (client: Octokit, secretSync: TGitHubSyncWithCredentials, payload: TGitHubSecretPayload) => {
const { destinationConfig } = secretSync;
const putSecret = async (
secretSync: TGitHubSyncWithCredentials,
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">,
token: string,
payload: TGitHubSecretPayload
) => {
const { destinationConfig, connection } = secretSync;
let path: string;
let body: Record<string, string | number[]> = payload;
switch (destinationConfig.scope) {
case GitHubSyncScope.Organization: {
const { visibility, selectedRepositoryIds } = destinationConfig;
await client.request(`PUT /orgs/{org}/actions/secrets/{secret_name}`, {
org: destinationConfig.org,
path = `/orgs/${encodeURIComponent(destinationConfig.org)}/actions/secrets/${encodeURIComponent(payload.secret_name)}`;
body = {
...payload,
visibility,
...(visibility === GitHubSyncVisibility.Selected && {
selected_repository_ids: selectedRepositoryIds
})
});
};
break;
}
case GitHubSyncScope.Repository: {
await client.request("PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}", {
owner: destinationConfig.owner,
repo: destinationConfig.repo,
...payload
});
path = `/repos/${encodeURIComponent(destinationConfig.owner)}/${encodeURIComponent(destinationConfig.repo)}/actions/secrets/${encodeURIComponent(payload.secret_name)}`;
break;
}
case GitHubSyncScope.RepositoryEnvironment:
default: {
await client.request("PUT /repos/{owner}/{repo}/environments/{environment_name}/secrets/{secret_name}", {
owner: destinationConfig.owner,
repo: destinationConfig.repo,
environment_name: destinationConfig.env,
...payload
});
path = `/repos/${encodeURIComponent(destinationConfig.owner)}/${encodeURIComponent(destinationConfig.repo)}/environments/${encodeURIComponent(destinationConfig.env)}/secrets/${encodeURIComponent(payload.secret_name)}`;
break;
}
}
await requestWithGitHubGateway(connection, gatewayService, {
url: `https://api.${connection.credentials.host || "github.com"}${path}`,
method: "PUT",
headers: {
Accept: "application/vnd.github+json",
Authorization: `Bearer ${token}`,
"X-GitHub-Api-Version": "2022-11-28"
},
data: body
});
};
export const GithubSyncFns = {
syncSecrets: async (secretSync: TGitHubSyncWithCredentials, secretMap: TSecretMap) => {
syncSecrets: async (
secretSync: TGitHubSyncWithCredentials,
ogSecretMap: TSecretMap,
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">
) => {
const secretMap = Object.fromEntries(Object.entries(ogSecretMap).map(([i, v]) => [i.toUpperCase(), v]));
switch (secretSync.destinationConfig.scope) {
case GitHubSyncScope.Organization:
if (Object.values(secretMap).length > 1000) {
@@ -187,38 +202,40 @@ export const GithubSyncFns = {
);
}
const client = getGitHubClient(secretSync.connection);
const { connection } = secretSync;
const token =
connection.method === GitHubConnectionMethod.OAuth
? connection.credentials.accessToken
: await getGitHubAppAuthToken(connection);
const encryptedSecrets = await getEncryptedSecrets(client, secretSync);
const encryptedSecrets = await getEncryptedSecrets(secretSync, gatewayService);
const publicKey = await getPublicKey(secretSync, gatewayService, token);
const publicKey = await getPublicKey(client, secretSync);
await sodium.ready;
for await (const key of Object.keys(secretMap)) {
// convert secret & base64 key to Uint8Array.
const binaryKey = sodium.from_base64(publicKey.key, sodium.base64_variants.ORIGINAL);
const binarySecretValue = sodium.from_string(secretMap[key].value);
await sodium.ready.then(async () => {
for await (const key of Object.keys(secretMap)) {
// convert secret & base64 key to Uint8Array.
const binaryKey = sodium.from_base64(publicKey.key, sodium.base64_variants.ORIGINAL);
const binarySecretValue = sodium.from_string(secretMap[key].value);
// encrypt secret using libsodium
const encryptedBytes = sodium.crypto_box_seal(binarySecretValue, binaryKey);
// encrypt secret using libsodium
const encryptedBytes = sodium.crypto_box_seal(binarySecretValue, binaryKey);
// convert encrypted Uint8Array to base64
const encryptedSecretValue = sodium.to_base64(encryptedBytes, sodium.base64_variants.ORIGINAL);
// convert encrypted Uint8Array to base64
const encryptedSecretValue = sodium.to_base64(encryptedBytes, sodium.base64_variants.ORIGINAL);
try {
await putSecret(client, secretSync, {
secret_name: key,
encrypted_value: encryptedSecretValue,
key_id: publicKey.key_id
});
} catch (error) {
throw new SecretSyncError({
error,
secretKey: key
});
}
try {
await putSecret(secretSync, gatewayService, token, {
secret_name: key,
encrypted_value: encryptedSecretValue,
key_id: publicKey.key_id
});
} catch (error) {
throw new SecretSyncError({
error,
secretKey: key
});
}
});
}
if (secretSync.syncOptions.disableSecretDeletion) return;
@@ -228,21 +245,31 @@ export const GithubSyncFns = {
continue;
if (!(encryptedSecret.name in secretMap)) {
await deleteSecret(client, secretSync, encryptedSecret);
await deleteSecret(secretSync, gatewayService, token, encryptedSecret);
}
}
},
getSecrets: async (secretSync: TGitHubSyncWithCredentials) => {
throw new Error(`${SECRET_SYNC_NAME_MAP[secretSync.destination]} does not support importing secrets.`);
},
removeSecrets: async (secretSync: TGitHubSyncWithCredentials, secretMap: TSecretMap) => {
const client = getGitHubClient(secretSync.connection);
removeSecrets: async (
secretSync: TGitHubSyncWithCredentials,
ogSecretMap: TSecretMap,
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">
) => {
const secretMap = Object.fromEntries(Object.entries(ogSecretMap).map(([i, v]) => [i.toUpperCase(), v]));
const encryptedSecrets = await getEncryptedSecrets(client, secretSync);
const { connection } = secretSync;
const token =
connection.method === GitHubConnectionMethod.OAuth
? connection.credentials.accessToken
: await getGitHubAppAuthToken(connection);
const encryptedSecrets = await getEncryptedSecrets(secretSync, gatewayService);
for await (const encryptedSecret of encryptedSecrets) {
if (encryptedSecret.name in secretMap) {
await deleteSecret(client, secretSync, encryptedSecret);
await deleteSecret(secretSync, gatewayService, token, encryptedSecret);
}
}
}

View File

@@ -1,6 +1,7 @@
import { AxiosError } from "axios";
import handlebars from "handlebars";
import { TGatewayServiceFactory } from "@app/ee/services/gateway/gateway-service";
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
import { OCI_VAULT_SYNC_LIST_OPTION, OCIVaultSyncFns } from "@app/ee/services/secret-sync/oci-vault";
import { BadRequestError } from "@app/lib/errors";
@@ -97,6 +98,7 @@ export const listSecretSyncOptions = () => {
type TSyncSecretDeps = {
appConnectionDAL: Pick<TAppConnectionDALFactory, "findById" | "update" | "updateById">;
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">;
};
// Add schema to secret keys
@@ -191,7 +193,7 @@ export const SecretSyncFns = {
syncSecrets: (
secretSync: TSecretSyncWithCredentials,
secretMap: TSecretMap,
{ kmsService, appConnectionDAL }: TSyncSecretDeps
{ kmsService, appConnectionDAL, gatewayService }: TSyncSecretDeps
): Promise<void> => {
const schemaSecretMap = addSchema(secretMap, secretSync.environment?.slug || "", secretSync.syncOptions.keySchema);
@@ -201,7 +203,7 @@ export const SecretSyncFns = {
case SecretSync.AWSSecretsManager:
return AwsSecretsManagerSyncFns.syncSecrets(secretSync, schemaSecretMap);
case SecretSync.GitHub:
return GithubSyncFns.syncSecrets(secretSync, schemaSecretMap);
return GithubSyncFns.syncSecrets(secretSync, schemaSecretMap, gatewayService);
case SecretSync.GCPSecretManager:
return GcpSyncFns.syncSecrets(secretSync, schemaSecretMap);
case SecretSync.AzureKeyVault:
@@ -395,7 +397,7 @@ export const SecretSyncFns = {
removeSecrets: (
secretSync: TSecretSyncWithCredentials,
secretMap: TSecretMap,
{ kmsService, appConnectionDAL }: TSyncSecretDeps
{ kmsService, appConnectionDAL, gatewayService }: TSyncSecretDeps
): Promise<void> => {
const schemaSecretMap = addSchema(secretMap, secretSync.environment?.slug || "", secretSync.syncOptions.keySchema);
@@ -405,7 +407,7 @@ export const SecretSyncFns = {
case SecretSync.AWSSecretsManager:
return AwsSecretsManagerSyncFns.removeSecrets(secretSync, schemaSecretMap);
case SecretSync.GitHub:
return GithubSyncFns.removeSecrets(secretSync, schemaSecretMap);
return GithubSyncFns.removeSecrets(secretSync, schemaSecretMap, gatewayService);
case SecretSync.GCPSecretManager:
return GcpSyncFns.removeSecrets(secretSync, schemaSecretMap);
case SecretSync.AzureKeyVault:

View File

@@ -4,6 +4,7 @@ import { Job } from "bullmq";
import { ProjectMembershipRole, SecretType } from "@app/db/schemas";
import { EventType, TAuditLogServiceFactory } from "@app/ee/services/audit-log/audit-log-types";
import { TGatewayServiceFactory } from "@app/ee/services/gateway/gateway-service";
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
import { KeyStorePrefixes, TKeyStoreFactory } from "@app/keystore/keystore";
import { getConfig } from "@app/lib/config/env";
@@ -96,6 +97,7 @@ type TSecretSyncQueueFactoryDep = {
resourceMetadataDAL: Pick<TResourceMetadataDALFactory, "insertMany" | "delete">;
folderCommitService: Pick<TFolderCommitServiceFactory, "createCommit">;
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">;
};
type SecretSyncActionJob = Job<
@@ -138,7 +140,8 @@ export const secretSyncQueueFactory = ({
secretVersionTagV2BridgeDAL,
resourceMetadataDAL,
folderCommitService,
licenseService
licenseService,
gatewayService
}: TSecretSyncQueueFactoryDep) => {
const appCfg = getConfig();
@@ -353,7 +356,8 @@ export const secretSyncQueueFactory = ({
const importedSecrets = await SecretSyncFns.getSecrets(secretSync, {
appConnectionDAL,
kmsService
kmsService,
gatewayService
});
if (!Object.keys(importedSecrets).length) return {};
@@ -481,7 +485,8 @@ export const secretSyncQueueFactory = ({
await SecretSyncFns.syncSecrets(secretSyncWithCredentials, secretMap, {
appConnectionDAL,
kmsService
kmsService,
gatewayService
});
isSynced = true;
@@ -730,7 +735,8 @@ export const secretSyncQueueFactory = ({
secretMap,
{
appConnectionDAL,
kmsService
kmsService,
gatewayService
}
);

View File

@@ -25,6 +25,7 @@ import {
import { TSecretApprovalPolicyServiceFactory } from "@app/ee/services/secret-approval-policy/secret-approval-policy-service";
import { TSecretApprovalRequestDALFactory } from "@app/ee/services/secret-approval-request/secret-approval-request-dal";
import { TSecretApprovalRequestSecretDALFactory } from "@app/ee/services/secret-approval-request/secret-approval-request-secret-dal";
import { scanSecretPolicyViolations } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-fns";
import { TSecretSnapshotServiceFactory } from "@app/ee/services/secret-snapshot/secret-snapshot-service";
import { TKeyStoreFactory } from "@app/keystore/keystore";
import { DatabaseErrorCode } from "@app/lib/error-codes";
@@ -38,6 +39,7 @@ import { ActorType } from "../auth/auth-type";
import { TCommitResourceChangeDTO, TFolderCommitServiceFactory } from "../folder-commit/folder-commit-service";
import { TKmsServiceFactory } from "../kms/kms-service";
import { KmsDataKey } from "../kms/kms-types";
import { TProjectDALFactory } from "../project/project-dal";
import { TProjectEnvDALFactory } from "../project-env/project-env-dal";
import { TReminderServiceFactory } from "../reminder/reminder-types";
import { TResourceMetadataDALFactory } from "../resource-metadata/resource-metadata-dal";
@@ -88,6 +90,7 @@ import { TSecretVersionV2TagDALFactory } from "./secret-version-tag-dal";
type TSecretV2BridgeServiceFactoryDep = {
secretDAL: TSecretV2BridgeDALFactory;
projectDAL: Pick<TProjectDALFactory, "findById">;
secretVersionDAL: TSecretVersionV2DALFactory;
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
secretVersionTagDAL: Pick<TSecretVersionV2TagDALFactory, "insertMany">;
@@ -126,6 +129,7 @@ export type TSecretV2BridgeServiceFactory = ReturnType<typeof secretV2BridgeServ
*/
export const secretV2BridgeServiceFactory = ({
secretDAL,
projectDAL,
projectEnvDAL,
secretTagDAL,
secretVersionDAL,
@@ -295,6 +299,19 @@ export const secretV2BridgeServiceFactory = ({
})
);
const project = await projectDAL.findById(projectId);
await scanSecretPolicyViolations(
projectId,
secretPath,
[
{
secretKey: inputSecret.secretName,
secretValue: inputSecret.secretValue
}
],
project.secretDetectionIgnoreValues || []
);
const { nestedReferences, localReferences } = getAllSecretReferences(inputSecret.secretValue);
const allSecretReferences = nestedReferences.concat(
localReferences.map((el) => ({ secretKey: el, secretPath, environment }))
@@ -506,6 +523,21 @@ export const secretV2BridgeServiceFactory = ({
const { secretName, secretValue } = inputSecret;
if (secretValue) {
const project = await projectDAL.findById(projectId);
await scanSecretPolicyViolations(
projectId,
secretPath,
[
{
secretKey: inputSecret.newSecretName || secretName,
secretValue
}
],
project.secretDetectionIgnoreValues || []
);
}
const { encryptor: secretManagerEncryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.SecretManager,
projectId
@@ -1585,6 +1617,9 @@ export const secretV2BridgeServiceFactory = ({
if (secrets.length)
throw new BadRequestError({ message: `Secret already exist: ${secrets.map((el) => el.key).join(",")}` });
const project = await projectDAL.findById(projectId);
await scanSecretPolicyViolations(projectId, secretPath, inputSecrets, project.secretDetectionIgnoreValues || []);
// get all tags
const sanitizedTagIds = inputSecrets.flatMap(({ tagIds = [] }) => tagIds);
const tags = sanitizedTagIds.length ? await secretTagDAL.findManyTagsById(projectId, sanitizedTagIds) : [];
@@ -1925,6 +1960,19 @@ export const secretV2BridgeServiceFactory = ({
});
await $validateSecretReferences(projectId, permission, secretReferences, tx);
const project = await projectDAL.findById(projectId);
await scanSecretPolicyViolations(
projectId,
secretPath,
secretsToUpdate
.filter((el) => el.secretValue)
.map((el) => ({
secretKey: el.newSecretName || el.secretKey,
secretValue: el.secretValue as string
})),
project.secretDetectionIgnoreValues || []
);
const bulkUpdatedSecrets = await fnSecretBulkUpdate({
folderId,
orgId: actorOrgId,

4
cli/.gitignore vendored
View File

@@ -1,4 +0,0 @@
.infisical.json
dist/
agent-config.test.yaml
.test.env

View File

@@ -1,3 +0,0 @@
bea0ff6e05a4de73a5db625d4ae181a015b50855:frontend/components/utilities/attemptLogin.js:stripe-access-token:147
bea0ff6e05a4de73a5db625d4ae181a015b50855:backend/src/json/integrations.json:generic-api-key:5
1961b92340e5d2613acae528b886c842427ce5d0:frontend/components/utilities/attemptLogin.js:stripe-access-token:148

View File

@@ -1,37 +0,0 @@
infisical:
address: "https://app.infisical.com/"
auth:
type: "universal-auth"
config:
client-id: "./client-id"
client-secret: "./client-secret"
remove_client_secret_on_read: false
sinks:
- type: "file"
config:
path: "access-token"
templates:
- template-content: |
{{- with secret "202f04d7-e4cb-43d4-a292-e893712d61fc" "dev" "/" }}
{{- range . }}
{{ .Key }}={{ .Value }}
{{- end }}
{{- end }}
destination-path: my-dot-env-0.env
config:
polling-interval: 60s
execute:
command: docker-compose -f docker-compose.prod.yml down && docker-compose -f docker-compose.prod.yml up -d
- base64-template-content: e3stIHdpdGggc2VjcmV0ICIyMDJmMDRkNy1lNGNiLTQzZDQtYTI5Mi1lODkzNzEyZDYxZmMiICJkZXYiICIvIiB9fQp7ey0gcmFuZ2UgLiB9fQp7eyAuS2V5IH19PXt7IC5WYWx1ZSB9fQp7ey0gZW5kIH19Cnt7LSBlbmQgfX0=
destination-path: my-dot-env.env
config:
polling-interval: 60s
execute:
command: docker-compose -f docker-compose.prod.yml down && docker-compose -f docker-compose.prod.yml up -d
- source-path: my-dot-ev-secret-template1
destination-path: my-dot-env-1.env
config:
exec:
command: mkdir hello-world1

View File

@@ -1,103 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package detect
import (
"encoding/json"
"fmt"
"os"
"path/filepath"
"github.com/Infisical/infisical-merge/detect/report"
)
func IsNew(finding report.Finding, redact uint, baseline []report.Finding) bool {
// Explicitly testing each property as it gives significantly better performance in comparison to cmp.Equal(). Drawback is that
// the code requires maintenance if/when the Finding struct changes
for _, b := range baseline {
if finding.RuleID == b.RuleID &&
finding.Description == b.Description &&
finding.StartLine == b.StartLine &&
finding.EndLine == b.EndLine &&
finding.StartColumn == b.StartColumn &&
finding.EndColumn == b.EndColumn &&
(redact > 0 || (finding.Match == b.Match && finding.Secret == b.Secret)) &&
finding.File == b.File &&
finding.Commit == b.Commit &&
finding.Author == b.Author &&
finding.Email == b.Email &&
finding.Date == b.Date &&
finding.Message == b.Message &&
// Omit checking finding.Fingerprint - if the format of the fingerprint changes, the users will see unexpected behaviour
finding.Entropy == b.Entropy {
return false
}
}
return true
}
func LoadBaseline(baselinePath string) ([]report.Finding, error) {
bytes, err := os.ReadFile(baselinePath)
if err != nil {
return nil, fmt.Errorf("could not open %s", baselinePath)
}
var previousFindings []report.Finding
err = json.Unmarshal(bytes, &previousFindings)
if err != nil {
return nil, fmt.Errorf("the format of the file %s is not supported", baselinePath)
}
return previousFindings, nil
}
func (d *Detector) AddBaseline(baselinePath string, source string) error {
if baselinePath != "" {
absoluteSource, err := filepath.Abs(source)
if err != nil {
return err
}
absoluteBaseline, err := filepath.Abs(baselinePath)
if err != nil {
return err
}
relativeBaseline, err := filepath.Rel(absoluteSource, absoluteBaseline)
if err != nil {
return err
}
baseline, err := LoadBaseline(baselinePath)
if err != nil {
return err
}
d.baseline = baseline
baselinePath = relativeBaseline
}
d.baselinePath = baselinePath
return nil
}

View File

@@ -1,70 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package scm
import (
"fmt"
"strings"
)
type Platform int
const (
UnknownPlatform Platform = iota
NoPlatform // Explicitly disable the feature
GitHubPlatform
GitLabPlatform
AzureDevOpsPlatform
BitBucketPlatform
// TODO: Add others.
)
func (p Platform) String() string {
return [...]string{
"unknown",
"none",
"github",
"gitlab",
"azuredevops",
"bitbucket",
}[p]
}
func PlatformFromString(s string) (Platform, error) {
switch strings.ToLower(s) {
case "", "unknown":
return UnknownPlatform, nil
case "none":
return NoPlatform, nil
case "github":
return GitHubPlatform, nil
case "gitlab":
return GitLabPlatform, nil
case "azuredevops":
return AzureDevOpsPlatform, nil
case "bitbucket":
return BitBucketPlatform, nil
default:
return UnknownPlatform, fmt.Errorf("invalid scm platform value: %s", s)
}
}

View File

@@ -1,159 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package config
import (
"fmt"
"strings"
"golang.org/x/exp/maps"
"github.com/Infisical/infisical-merge/detect/regexp"
)
type AllowlistMatchCondition int
const (
AllowlistMatchOr AllowlistMatchCondition = iota
AllowlistMatchAnd
)
func (a AllowlistMatchCondition) String() string {
return [...]string{
"OR",
"AND",
}[a]
}
// Allowlist allows a rule to be ignored for specific
// regexes, paths, and/or commits
type Allowlist struct {
// Short human readable description of the allowlist.
Description string
// MatchCondition determines whether all criteria must match.
MatchCondition AllowlistMatchCondition
// Commits is a slice of commit SHAs that are allowed to be ignored. Defaults to "OR".
Commits []string
// Paths is a slice of path regular expressions that are allowed to be ignored.
Paths []*regexp.Regexp
// Can be `match` or `line`.
//
// If `match` the _Regexes_ will be tested against the match of the _Rule.Regex_.
//
// If `line` the _Regexes_ will be tested against the entire line.
//
// If RegexTarget is empty, it will be tested against the found secret.
RegexTarget string
// Regexes is slice of content regular expressions that are allowed to be ignored.
Regexes []*regexp.Regexp
// StopWords is a slice of stop words that are allowed to be ignored.
// This targets the _secret_, not the content of the regex match like the
// Regexes slice.
StopWords []string
// validated is an internal flag to track whether `Validate()` has been called.
validated bool
}
func (a *Allowlist) Validate() error {
if a.validated {
return nil
}
// Disallow empty allowlists.
if len(a.Commits) == 0 &&
len(a.Paths) == 0 &&
len(a.Regexes) == 0 &&
len(a.StopWords) == 0 {
return fmt.Errorf("must contain at least one check for: commits, paths, regexes, or stopwords")
}
// Deduplicate commits and stopwords.
if len(a.Commits) > 0 {
uniqueCommits := make(map[string]struct{})
for _, commit := range a.Commits {
uniqueCommits[commit] = struct{}{}
}
a.Commits = maps.Keys(uniqueCommits)
}
if len(a.StopWords) > 0 {
uniqueStopwords := make(map[string]struct{})
for _, stopWord := range a.StopWords {
uniqueStopwords[stopWord] = struct{}{}
}
a.StopWords = maps.Keys(uniqueStopwords)
}
a.validated = true
return nil
}
// CommitAllowed returns true if the commit is allowed to be ignored.
func (a *Allowlist) CommitAllowed(c string) (bool, string) {
if a == nil || c == "" {
return false, ""
}
for _, commit := range a.Commits {
if commit == c {
return true, c
}
}
return false, ""
}
// PathAllowed returns true if the path is allowed to be ignored.
func (a *Allowlist) PathAllowed(path string) bool {
if a == nil || path == "" {
return false
}
return anyRegexMatch(path, a.Paths)
}
// RegexAllowed returns true if the regex is allowed to be ignored.
func (a *Allowlist) RegexAllowed(secret string) bool {
if a == nil || secret == "" {
return false
}
return anyRegexMatch(secret, a.Regexes)
}
func (a *Allowlist) ContainsStopWord(s string) (bool, string) {
if a == nil || s == "" {
return false, ""
}
s = strings.ToLower(s)
for _, stopWord := range a.StopWords {
if strings.Contains(s, strings.ToLower(stopWord)) {
return true, stopWord
}
}
return false, ""
}

View File

@@ -1,426 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package config
import (
_ "embed"
"errors"
"fmt"
"sort"
"strings"
"github.com/spf13/viper"
"github.com/Infisical/infisical-merge/detect/logging"
"github.com/Infisical/infisical-merge/detect/regexp"
)
const DefaultScanConfigFileName = ".infisical-scan.toml"
const DefaultScanConfigEnvName = "INFISICAL_SCAN_CONFIG"
const DefaultInfisicalIgnoreFineName = ".infisicalignore"
var (
//go:embed gitleaks.toml
DefaultConfig string
// use to keep track of how many configs we can extend
// yea I know, globals bad
extendDepth int
)
const maxExtendDepth = 2
// ViperConfig is the config struct used by the Viper config package
// to parse the config file. This struct does not include regular expressions.
// It is used as an intermediary to convert the Viper config to the Config struct.
type ViperConfig struct {
Title string
Description string
Extend Extend
Rules []struct {
ID string
Description string
Path string
Regex string
SecretGroup int
Entropy float64
Keywords []string
Tags []string
// Deprecated: this is a shim for backwards-compatibility.
// TODO: Remove this in 9.x.
AllowList *viperRuleAllowlist
Allowlists []*viperRuleAllowlist
}
// Deprecated: this is a shim for backwards-compatibility.
// TODO: Remove this in 9.x.
AllowList *viperGlobalAllowlist
Allowlists []*viperGlobalAllowlist
}
type viperRuleAllowlist struct {
Description string
Condition string
Commits []string
Paths []string
RegexTarget string
Regexes []string
StopWords []string
}
type viperGlobalAllowlist struct {
TargetRules []string
viperRuleAllowlist `mapstructure:",squash"`
}
// Config is a configuration struct that contains rules and an allowlist if present.
type Config struct {
Title string
Extend Extend
Path string
Description string
Rules map[string]Rule
Keywords map[string]struct{}
// used to keep sarif results consistent
OrderedRules []string
Allowlists []*Allowlist
}
// Extend is a struct that allows users to define how they want their
// configuration extended by other configuration files.
type Extend struct {
Path string
URL string
UseDefault bool
DisabledRules []string
}
func (vc *ViperConfig) Translate() (Config, error) {
var (
keywords = make(map[string]struct{})
orderedRules []string
rulesMap = make(map[string]Rule)
ruleAllowlists = make(map[string][]*Allowlist)
)
// Validate individual rules.
for _, vr := range vc.Rules {
var (
pathPat *regexp.Regexp
regexPat *regexp.Regexp
)
if vr.Path != "" {
pathPat = regexp.MustCompile(vr.Path)
}
if vr.Regex != "" {
regexPat = regexp.MustCompile(vr.Regex)
}
if vr.Keywords == nil {
vr.Keywords = []string{}
} else {
for i, k := range vr.Keywords {
keyword := strings.ToLower(k)
keywords[keyword] = struct{}{}
vr.Keywords[i] = keyword
}
}
if vr.Tags == nil {
vr.Tags = []string{}
}
cr := Rule{
RuleID: vr.ID,
Description: vr.Description,
Regex: regexPat,
SecretGroup: vr.SecretGroup,
Entropy: vr.Entropy,
Path: pathPat,
Keywords: vr.Keywords,
Tags: vr.Tags,
}
// Parse the rule allowlists, including the older format for backwards compatibility.
if vr.AllowList != nil {
// TODO: Remove this in v9.
if len(vr.Allowlists) > 0 {
return Config{}, fmt.Errorf("%s: [rules.allowlist] is deprecated, it cannot be used alongside [[rules.allowlist]]", cr.RuleID)
}
vr.Allowlists = append(vr.Allowlists, vr.AllowList)
}
for _, a := range vr.Allowlists {
allowlist, err := parseAllowlist(a)
if err != nil {
return Config{}, fmt.Errorf("%s: [[rules.allowlists]] %w", cr.RuleID, err)
}
cr.Allowlists = append(cr.Allowlists, allowlist)
}
orderedRules = append(orderedRules, cr.RuleID)
rulesMap[cr.RuleID] = cr
}
// Assemble the config.
c := Config{
Title: vc.Title,
Description: vc.Description,
Extend: vc.Extend,
Rules: rulesMap,
Keywords: keywords,
OrderedRules: orderedRules,
}
// Parse the config allowlists, including the older format for backwards compatibility.
if vc.AllowList != nil {
// TODO: Remove this in v9.
if len(vc.Allowlists) > 0 {
return Config{}, errors.New("[allowlist] is deprecated, it cannot be used alongside [[allowlists]]")
}
vc.Allowlists = append(vc.Allowlists, vc.AllowList)
}
for _, a := range vc.Allowlists {
allowlist, err := parseAllowlist(&a.viperRuleAllowlist)
if err != nil {
return Config{}, fmt.Errorf("[[allowlists]] %w", err)
}
// Allowlists with |targetRules| aren't added to the global list.
if len(a.TargetRules) > 0 {
for _, ruleID := range a.TargetRules {
// It's not possible to validate |ruleID| until after extend.
ruleAllowlists[ruleID] = append(ruleAllowlists[ruleID], allowlist)
}
} else {
c.Allowlists = append(c.Allowlists, allowlist)
}
}
if maxExtendDepth != extendDepth {
// disallow both usedefault and path from being set
if c.Extend.Path != "" && c.Extend.UseDefault {
return Config{}, errors.New("unable to load config due to extend.path and extend.useDefault being set")
}
if c.Extend.UseDefault {
if err := c.extendDefault(); err != nil {
return Config{}, err
}
} else if c.Extend.Path != "" {
if err := c.extendPath(); err != nil {
return Config{}, err
}
}
}
// Validate the rules after everything has been assembled (including extended configs).
if extendDepth == 0 {
for _, rule := range c.Rules {
if err := rule.Validate(); err != nil {
return Config{}, err
}
}
// Populate targeted configs.
for ruleID, allowlists := range ruleAllowlists {
rule, ok := c.Rules[ruleID]
if !ok {
return Config{}, fmt.Errorf("[[allowlists]] target rule ID '%s' does not exist", ruleID)
}
rule.Allowlists = append(rule.Allowlists, allowlists...)
c.Rules[ruleID] = rule
}
}
return c, nil
}
func parseAllowlist(a *viperRuleAllowlist) (*Allowlist, error) {
var matchCondition AllowlistMatchCondition
switch strings.ToUpper(a.Condition) {
case "AND", "&&":
matchCondition = AllowlistMatchAnd
case "", "OR", "||":
matchCondition = AllowlistMatchOr
default:
return nil, fmt.Errorf("unknown allowlist |condition| '%s' (expected 'and', 'or')", a.Condition)
}
// Validate the target.
regexTarget := a.RegexTarget
if regexTarget != "" {
switch regexTarget {
case "secret":
regexTarget = ""
case "match", "line":
// do nothing
default:
return nil, fmt.Errorf("unknown allowlist |regexTarget| '%s' (expected 'match', 'line')", regexTarget)
}
}
var allowlistRegexes []*regexp.Regexp
for _, a := range a.Regexes {
allowlistRegexes = append(allowlistRegexes, regexp.MustCompile(a))
}
var allowlistPaths []*regexp.Regexp
for _, a := range a.Paths {
allowlistPaths = append(allowlistPaths, regexp.MustCompile(a))
}
allowlist := &Allowlist{
Description: a.Description,
MatchCondition: matchCondition,
Commits: a.Commits,
Paths: allowlistPaths,
RegexTarget: regexTarget,
Regexes: allowlistRegexes,
StopWords: a.StopWords,
}
if err := allowlist.Validate(); err != nil {
return nil, err
}
return allowlist, nil
}
func (c *Config) GetOrderedRules() []Rule {
var orderedRules []Rule
for _, id := range c.OrderedRules {
if _, ok := c.Rules[id]; ok {
orderedRules = append(orderedRules, c.Rules[id])
}
}
return orderedRules
}
func (c *Config) extendDefault() error {
extendDepth++
viper.SetConfigType("toml")
if err := viper.ReadConfig(strings.NewReader(DefaultConfig)); err != nil {
return fmt.Errorf("failed to load extended default config, err: %w", err)
}
defaultViperConfig := ViperConfig{}
if err := viper.Unmarshal(&defaultViperConfig); err != nil {
return fmt.Errorf("failed to load extended default config, err: %w", err)
}
cfg, err := defaultViperConfig.Translate()
if err != nil {
return fmt.Errorf("failed to load extended default config, err: %w", err)
}
logging.Debug().Msg("extending config with default config")
c.extend(cfg)
return nil
}
func (c *Config) extendPath() error {
extendDepth++
viper.SetConfigFile(c.Extend.Path)
if err := viper.ReadInConfig(); err != nil {
return fmt.Errorf("failed to load extended config, err: %w", err)
}
extensionViperConfig := ViperConfig{}
if err := viper.Unmarshal(&extensionViperConfig); err != nil {
return fmt.Errorf("failed to load extended config, err: %w", err)
}
cfg, err := extensionViperConfig.Translate()
if err != nil {
return fmt.Errorf("failed to load extended config, err: %w", err)
}
logging.Debug().Msgf("extending config with %s", c.Extend.Path)
c.extend(cfg)
return nil
}
func (c *Config) extendURL() {
// TODO
}
func (c *Config) extend(extensionConfig Config) {
// Get config name for helpful log messages.
var configName string
if c.Extend.Path != "" {
configName = c.Extend.Path
} else {
configName = "default"
}
// Convert |Config.DisabledRules| into a map for ease of access.
disabledRuleIDs := map[string]struct{}{}
for _, id := range c.Extend.DisabledRules {
if _, ok := extensionConfig.Rules[id]; !ok {
logging.Warn().
Str("rule-id", id).
Str("config", configName).
Msg("Disabled rule doesn't exist in extended config.")
}
disabledRuleIDs[id] = struct{}{}
}
for ruleID, baseRule := range extensionConfig.Rules {
// Skip the rule.
if _, ok := disabledRuleIDs[ruleID]; ok {
logging.Debug().
Str("rule-id", ruleID).
Str("config", configName).
Msg("Ignoring rule from extended config.")
continue
}
currentRule, ok := c.Rules[ruleID]
if !ok {
// Rule doesn't exist, add it to the config.
c.Rules[ruleID] = baseRule
for _, k := range baseRule.Keywords {
c.Keywords[k] = struct{}{}
}
c.OrderedRules = append(c.OrderedRules, ruleID)
} else {
// Rule exists, merge our changes into the base.
if currentRule.Description != "" {
baseRule.Description = currentRule.Description
}
if currentRule.Entropy != 0 {
baseRule.Entropy = currentRule.Entropy
}
if currentRule.SecretGroup != 0 {
baseRule.SecretGroup = currentRule.SecretGroup
}
if currentRule.Regex != nil {
baseRule.Regex = currentRule.Regex
}
if currentRule.Path != nil {
baseRule.Path = currentRule.Path
}
baseRule.Tags = append(baseRule.Tags, currentRule.Tags...)
baseRule.Keywords = append(baseRule.Keywords, currentRule.Keywords...)
for _, a := range currentRule.Allowlists {
baseRule.Allowlists = append(baseRule.Allowlists, a)
}
// The keywords from the base rule and the extended rule must be merged into the global keywords list
for _, k := range baseRule.Keywords {
c.Keywords[k] = struct{}{}
}
c.Rules[ruleID] = baseRule
}
}
// append allowlists, not attempting to merge
for _, a := range extensionConfig.Allowlists {
c.Allowlists = append(c.Allowlists, a)
}
// sort to keep extended rules in order
sort.Strings(c.OrderedRules)
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,114 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package config
import (
"fmt"
"strings"
"github.com/Infisical/infisical-merge/detect/regexp"
)
// Rules contain information that define details on how to detect secrets
type Rule struct {
// RuleID is a unique identifier for this rule
RuleID string
// Description is the description of the rule.
Description string
// Entropy is a float representing the minimum shannon
// entropy a regex group must have to be considered a secret.
Entropy float64
// SecretGroup is an int used to extract secret from regex
// match and used as the group that will have its entropy
// checked if `entropy` is set.
SecretGroup int
// Regex is a golang regular expression used to detect secrets.
Regex *regexp.Regexp
// Path is a golang regular expression used to
// filter secrets by path
Path *regexp.Regexp
// Tags is an array of strings used for metadata
// and reporting purposes.
Tags []string
// Keywords are used for pre-regex check filtering. Rules that contain
// keywords will perform a quick string compare check to make sure the
// keyword(s) are in the content being scanned.
Keywords []string
// Allowlists allows a rule to be ignored for specific commits, paths, regexes, and/or stopwords.
Allowlists []*Allowlist
// validated is an internal flag to track whether `Validate()` has been called.
validated bool
}
// Validate guards against common misconfigurations.
func (r *Rule) Validate() error {
if r.validated {
return nil
}
// Ensure |id| is present.
if strings.TrimSpace(r.RuleID) == "" {
// Try to provide helpful context, since |id| is empty.
var context string
if r.Regex != nil {
context = ", regex: " + r.Regex.String()
} else if r.Path != nil {
context = ", path: " + r.Path.String()
} else if r.Description != "" {
context = ", description: " + r.Description
}
return fmt.Errorf("rule |id| is missing or empty" + context)
}
// Ensure the rule actually matches something.
if r.Regex == nil && r.Path == nil {
return fmt.Errorf("%s: both |regex| and |path| are empty, this rule will have no effect", r.RuleID)
}
// Ensure |secretGroup| works.
if r.Regex != nil && r.SecretGroup > r.Regex.NumSubexp() {
return fmt.Errorf("%s: invalid regex secret group %d, max regex secret group %d", r.RuleID, r.SecretGroup, r.Regex.NumSubexp())
}
for _, allowlist := range r.Allowlists {
// This will probably never happen.
if allowlist == nil {
continue
}
if err := allowlist.Validate(); err != nil {
return fmt.Errorf("%s: %w", r.RuleID, err)
}
}
r.validated = true
return nil
}

View File

@@ -1,46 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package config
import (
"github.com/Infisical/infisical-merge/detect/regexp"
)
func anyRegexMatch(f string, res []*regexp.Regexp) bool {
for _, re := range res {
if regexMatched(f, re) {
return true
}
}
return false
}
func regexMatched(f string, re *regexp.Regexp) bool {
if re == nil {
return false
}
if re.FindString(f) != "" {
return true
}
return false
}

View File

@@ -1,328 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package detect
import (
"bytes"
"encoding/base64"
"fmt"
"regexp"
"unicode"
"github.com/Infisical/infisical-merge/detect/logging"
)
var b64LikelyChars [128]byte
var b64Regexp = regexp.MustCompile(`[\w/+-]{16,}={0,3}`)
var decoders = []func(string) ([]byte, error){
base64.StdEncoding.DecodeString,
base64.RawURLEncoding.DecodeString,
}
func init() {
// Basically look for anything that isn't just letters
for _, c := range `0123456789+/-_` {
b64LikelyChars[c] = 1
}
}
// EncodedSegment represents a portion of text that is encoded in some way.
// `decode` supports recusive decoding and can result in "segment trees".
// There can be multiple segments in the original text, so each can be thought
// of as its own tree with the root being the original segment.
type EncodedSegment struct {
// The parent segment in a segment tree. If nil, it is a root segment
parent *EncodedSegment
// Relative start/end are the bounds of the encoded value in the current pass.
relativeStart int
relativeEnd int
// Absolute start/end refer to the bounds of the root segment in this segment
// tree
absoluteStart int
absoluteEnd int
// Decoded start/end refer to the bounds of the decoded value in the current
// pass. These can differ from relative values because decoding can shrink
// or grow the size of the segment.
decodedStart int
decodedEnd int
// This is the actual decoded content in the segment
decodedValue string
// This is the type of encoding
encoding string
}
// isChildOf inspects the bounds of two segments to determine
// if one should be the child of another
func (s EncodedSegment) isChildOf(parent EncodedSegment) bool {
return parent.decodedStart <= s.relativeStart && parent.decodedEnd >= s.relativeEnd
}
// decodedOverlaps checks if the decoded bounds of the segment overlaps a range
func (s EncodedSegment) decodedOverlaps(start, end int) bool {
return start <= s.decodedEnd && end >= s.decodedStart
}
// adjustMatchIndex takes the matchIndex from the current decoding pass and
// updates it to match the absolute matchIndex in the original text.
func (s EncodedSegment) adjustMatchIndex(matchIndex []int) []int {
// The match is within the bounds of the segment so we just return
// the absolute start and end of the root segment.
if s.decodedStart <= matchIndex[0] && matchIndex[1] <= s.decodedEnd {
return []int{
s.absoluteStart,
s.absoluteEnd,
}
}
// Since it overlaps one side and/or the other, we're going to have to adjust
// and climb parents until we're either at the root or we've determined
// we're fully inside one of the parent segments.
adjustedMatchIndex := make([]int, 2)
if matchIndex[0] < s.decodedStart {
// It starts before the encoded segment so adjust the start to match
// the location before it was decoded
matchStartDelta := s.decodedStart - matchIndex[0]
adjustedMatchIndex[0] = s.relativeStart - matchStartDelta
} else {
// It starts within the encoded segment so set the bound to the
// relative start
adjustedMatchIndex[0] = s.relativeStart
}
if matchIndex[1] > s.decodedEnd {
// It ends after the encoded segment so adjust the end to match
// the location before it was decoded
matchEndDelta := matchIndex[1] - s.decodedEnd
adjustedMatchIndex[1] = s.relativeEnd + matchEndDelta
} else {
// It ends within the encoded segment so set the bound to the relative end
adjustedMatchIndex[1] = s.relativeEnd
}
// We're still not at a root segment so we'll need to keep on adjusting
if s.parent != nil {
return s.parent.adjustMatchIndex(adjustedMatchIndex)
}
return adjustedMatchIndex
}
// depth reports how many levels of decoding needed to be done (default is 1)
func (s EncodedSegment) depth() int {
depth := 1
// Climb the tree and increment the depth
for current := &s; current.parent != nil; current = current.parent {
depth++
}
return depth
}
// tags returns additional meta data tags related to the types of segments
func (s EncodedSegment) tags() []string {
return []string{
fmt.Sprintf("decoded:%s", s.encoding),
fmt.Sprintf("decode-depth:%d", s.depth()),
}
}
// Decoder decodes various types of data in place
type Decoder struct {
decodedMap map[string]string
}
// NewDecoder creates a default decoder struct
func NewDecoder() *Decoder {
return &Decoder{
decodedMap: make(map[string]string),
}
}
// decode returns the data with the values decoded in-place
func (d *Decoder) decode(data string, parentSegments []EncodedSegment) (string, []EncodedSegment) {
segments := d.findEncodedSegments(data, parentSegments)
if len(segments) > 0 {
result := bytes.NewBuffer(make([]byte, 0, len(data)))
relativeStart := 0
for _, segment := range segments {
result.WriteString(data[relativeStart:segment.relativeStart])
result.WriteString(segment.decodedValue)
relativeStart = segment.relativeEnd
}
result.WriteString(data[relativeStart:])
return result.String(), segments
}
return data, segments
}
// findEncodedSegments finds the encoded segments in the data and updates the
// segment tree for this pass
func (d *Decoder) findEncodedSegments(data string, parentSegments []EncodedSegment) []EncodedSegment {
if len(data) == 0 {
return []EncodedSegment{}
}
matchIndices := b64Regexp.FindAllStringIndex(data, -1)
if matchIndices == nil {
return []EncodedSegment{}
}
segments := make([]EncodedSegment, 0, len(matchIndices))
// Keeps up with offsets from the text changing size as things are decoded
decodedShift := 0
for _, matchIndex := range matchIndices {
encodedValue := data[matchIndex[0]:matchIndex[1]]
if !isLikelyB64(encodedValue) {
d.decodedMap[encodedValue] = ""
continue
}
decodedValue, alreadyDecoded := d.decodedMap[encodedValue]
// We haven't decoded this yet, so go ahead and decode it
if !alreadyDecoded {
decodedValue = decodeValue(encodedValue)
d.decodedMap[encodedValue] = decodedValue
}
// Skip this segment because there was nothing to check
if len(decodedValue) == 0 {
continue
}
// Create a segment for the encoded data
segment := EncodedSegment{
relativeStart: matchIndex[0],
relativeEnd: matchIndex[1],
absoluteStart: matchIndex[0],
absoluteEnd: matchIndex[1],
decodedStart: matchIndex[0] + decodedShift,
decodedEnd: matchIndex[0] + decodedShift + len(decodedValue),
decodedValue: decodedValue,
encoding: "base64",
}
// Shift decoded start and ends based on size changes
decodedShift += len(decodedValue) - len(encodedValue)
// Adjust the absolute position of segments contained in parent segments
for _, parentSegment := range parentSegments {
if segment.isChildOf(parentSegment) {
segment.absoluteStart = parentSegment.absoluteStart
segment.absoluteEnd = parentSegment.absoluteEnd
segment.parent = &parentSegment
break
}
}
logging.Debug().Msgf("segment found: %#v", segment)
segments = append(segments, segment)
}
return segments
}
// decoders tries a list of decoders and returns the first successful one
func decodeValue(encodedValue string) string {
for _, decoder := range decoders {
decodedValue, err := decoder(encodedValue)
if err == nil && len(decodedValue) > 0 && isASCII(decodedValue) {
return string(decodedValue)
}
}
return ""
}
func isASCII(b []byte) bool {
for i := 0; i < len(b); i++ {
if b[i] > unicode.MaxASCII || b[i] < '\t' {
return false
}
}
return true
}
// Skip a lot of method signatures and things at the risk of missing about
// 1% of base64
func isLikelyB64(s string) bool {
for _, c := range s {
if b64LikelyChars[c] != 0 {
return true
}
}
return false
}
// Find a segment where the decoded bounds overlaps a range
func segmentWithDecodedOverlap(encodedSegments []EncodedSegment, start, end int) *EncodedSegment {
for _, segment := range encodedSegments {
if segment.decodedOverlaps(start, end) {
return &segment
}
}
return nil
}
func (s EncodedSegment) currentLine(currentRaw string) string {
start := 0
end := len(currentRaw)
// Find the start of the range
for i := s.decodedStart; i > -1; i-- {
c := currentRaw[i]
if c == '\n' {
start = i
break
}
}
// Find the end of the range
for i := s.decodedEnd; i < end; i++ {
c := currentRaw[i]
if c == '\n' {
end = i
break
}
}
return currentRaw[start:end]
}

View File

@@ -1,699 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package detect
import (
"bufio"
"context"
"fmt"
"os"
"runtime"
"strings"
"sync"
"sync/atomic"
"time"
"github.com/Infisical/infisical-merge/detect/config"
"github.com/Infisical/infisical-merge/detect/logging"
"github.com/Infisical/infisical-merge/detect/regexp"
"github.com/Infisical/infisical-merge/detect/report"
ahocorasick "github.com/BobuSumisu/aho-corasick"
"github.com/fatih/semgroup"
"github.com/rs/zerolog"
"github.com/spf13/viper"
"golang.org/x/exp/maps"
)
const (
gitleaksAllowSignature = "gitleaks:allow"
chunkSize = 100 * 1_000 // 100kb
// SlowWarningThreshold is the amount of time to wait before logging that a file is slow.
// This is useful for identifying problematic files and tuning the allowlist.
SlowWarningThreshold = 5 * time.Second
)
var (
newLineRegexp = regexp.MustCompile("\n")
isWindows = runtime.GOOS == "windows"
)
// Detector is the main detector struct
type Detector struct {
// Config is the configuration for the detector
Config config.Config
// Redact is a flag to redact findings. This is exported
// so users using gitleaks as a library can set this flag
// without calling `detector.Start(cmd *cobra.Command)`
Redact uint
// verbose is a flag to print findings
Verbose bool
// MaxDecodeDepths limits how many recursive decoding passes are allowed
MaxDecodeDepth int
// files larger than this will be skipped
MaxTargetMegaBytes int
// followSymlinks is a flag to enable scanning symlink files
FollowSymlinks bool
// NoColor is a flag to disable color output
NoColor bool
// IgnoreGitleaksAllow is a flag to ignore gitleaks:allow comments.
IgnoreGitleaksAllow bool
// commitMap is used to keep track of commits that have been scanned.
// This is only used for logging purposes and git scans.
commitMap map[string]bool
// findingMutex is to prevent concurrent access to the
// findings slice when adding findings.
findingMutex *sync.Mutex
// findings is a slice of report.Findings. This is the result
// of the detector's scan which can then be used to generate a
// report.
findings []report.Finding
// prefilter is a ahocorasick struct used for doing efficient string
// matching given a set of words (keywords from the rules in the config)
prefilter ahocorasick.Trie
// a list of known findings that should be ignored
baseline []report.Finding
// path to baseline
baselinePath string
// gitleaksIgnore
gitleaksIgnore map[string]struct{}
// Sema (https://github.com/fatih/semgroup) controls the concurrency
Sema *semgroup.Group
// report-related settings.
ReportPath string
Reporter report.Reporter
TotalBytes atomic.Uint64
}
// Fragment contains the data to be scanned
type Fragment struct {
// Raw is the raw content of the fragment
Raw string
Bytes []byte
// FilePath is the path to the file, if applicable.
// The path separator MUST be normalized to `/`.
FilePath string
SymlinkFile string
// WindowsFilePath is the path with the original separator.
// This provides a backwards-compatible solution to https://github.com/gitleaks/gitleaks/issues/1565.
WindowsFilePath string `json:"-"` // TODO: remove this in v9.
// CommitSHA is the SHA of the commit if applicable
CommitSHA string
// newlineIndices is a list of indices of newlines in the raw content.
// This is used to calculate the line location of a finding
newlineIndices [][]int
}
// NewDetector creates a new detector with the given config
func NewDetector(cfg config.Config) *Detector {
return &Detector{
commitMap: make(map[string]bool),
gitleaksIgnore: make(map[string]struct{}),
findingMutex: &sync.Mutex{},
findings: make([]report.Finding, 0),
Config: cfg,
prefilter: *ahocorasick.NewTrieBuilder().AddStrings(maps.Keys(cfg.Keywords)).Build(),
Sema: semgroup.NewGroup(context.Background(), 40),
}
}
// NewDetectorDefaultConfig creates a new detector with the default config
func NewDetectorDefaultConfig() (*Detector, error) {
viper.SetConfigType("toml")
err := viper.ReadConfig(strings.NewReader(config.DefaultConfig))
if err != nil {
return nil, err
}
var vc config.ViperConfig
err = viper.Unmarshal(&vc)
if err != nil {
return nil, err
}
cfg, err := vc.Translate()
if err != nil {
return nil, err
}
return NewDetector(cfg), nil
}
func (d *Detector) AddGitleaksIgnore(gitleaksIgnorePath string) error {
logging.Debug().Msgf("found .gitleaksignore file: %s", gitleaksIgnorePath)
file, err := os.Open(gitleaksIgnorePath)
if err != nil {
return err
}
defer func() {
// https://github.com/securego/gosec/issues/512
if err := file.Close(); err != nil {
logging.Warn().Msgf("Error closing .gitleaksignore file: %s\n", err)
}
}()
scanner := bufio.NewScanner(file)
replacer := strings.NewReplacer("\\", "/")
for scanner.Scan() {
line := strings.TrimSpace(scanner.Text())
// Skip lines that start with a comment
if line == "" || strings.HasPrefix(line, "#") {
continue
}
// Normalize the path.
// TODO: Make this a breaking change in v9.
s := strings.Split(line, ":")
switch len(s) {
case 3:
// Global fingerprint.
// `file:rule-id:start-line`
s[0] = replacer.Replace(s[0])
case 4:
// Commit fingerprint.
// `commit:file:rule-id:start-line`
s[1] = replacer.Replace(s[1])
default:
logging.Warn().Str("fingerprint", line).Msg("Invalid .gitleaksignore entry")
}
d.gitleaksIgnore[strings.Join(s, ":")] = struct{}{}
}
return nil
}
// DetectBytes scans the given bytes and returns a list of findings
func (d *Detector) DetectBytes(content []byte) []report.Finding {
return d.DetectString(string(content))
}
// DetectString scans the given string and returns a list of findings
func (d *Detector) DetectString(content string) []report.Finding {
return d.Detect(Fragment{
Raw: content,
})
}
// Detect scans the given fragment and returns a list of findings
func (d *Detector) Detect(fragment Fragment) []report.Finding {
if fragment.Bytes == nil {
d.TotalBytes.Add(uint64(len(fragment.Raw)))
}
d.TotalBytes.Add(uint64(len(fragment.Bytes)))
var (
findings []report.Finding
logger = func() zerolog.Logger {
l := logging.With().Str("path", fragment.FilePath)
if fragment.CommitSHA != "" {
l = l.Str("commit", fragment.CommitSHA)
}
return l.Logger()
}()
)
// check if filepath is allowed
if fragment.FilePath != "" {
// is the path our config or baseline file?
if fragment.FilePath == d.Config.Path || (d.baselinePath != "" && fragment.FilePath == d.baselinePath) {
logging.Trace().Msg("skipping file: matches config or baseline path")
return findings
}
}
// check if commit or filepath is allowed.
if isAllowed, event := checkCommitOrPathAllowed(logger, fragment, d.Config.Allowlists); isAllowed {
event.Msg("skipping file: global allowlist")
return findings
}
// add newline indices for location calculation in detectRule
fragment.newlineIndices = newLineRegexp.FindAllStringIndex(fragment.Raw, -1)
// setup variables to handle different decoding passes
currentRaw := fragment.Raw
encodedSegments := []EncodedSegment{}
currentDecodeDepth := 0
decoder := NewDecoder()
for {
// build keyword map for prefiltering rules
keywords := make(map[string]bool)
normalizedRaw := strings.ToLower(currentRaw)
matches := d.prefilter.MatchString(normalizedRaw)
for _, m := range matches {
keywords[normalizedRaw[m.Pos():int(m.Pos())+len(m.Match())]] = true
}
for _, rule := range d.Config.Rules {
if len(rule.Keywords) == 0 {
// if no keywords are associated with the rule always scan the
// fragment using the rule
findings = append(findings, d.detectRule(fragment, currentRaw, rule, encodedSegments)...)
continue
}
// check if keywords are in the fragment
for _, k := range rule.Keywords {
if _, ok := keywords[strings.ToLower(k)]; ok {
findings = append(findings, d.detectRule(fragment, currentRaw, rule, encodedSegments)...)
break
}
}
}
// increment the depth by 1 as we start our decoding pass
currentDecodeDepth++
// stop the loop if we've hit our max decoding depth
if currentDecodeDepth > d.MaxDecodeDepth {
break
}
// decode the currentRaw for the next pass
currentRaw, encodedSegments = decoder.decode(currentRaw, encodedSegments)
// stop the loop when there's nothing else to decode
if len(encodedSegments) == 0 {
break
}
}
return filter(findings, d.Redact)
}
// detectRule scans the given fragment for the given rule and returns a list of findings
func (d *Detector) detectRule(fragment Fragment, currentRaw string, r config.Rule, encodedSegments []EncodedSegment) []report.Finding {
var (
findings []report.Finding
logger = func() zerolog.Logger {
l := logging.With().Str("rule-id", r.RuleID).Str("path", fragment.FilePath)
if fragment.CommitSHA != "" {
l = l.Str("commit", fragment.CommitSHA)
}
return l.Logger()
}()
)
// check if commit or file is allowed for this rule.
if isAllowed, event := checkCommitOrPathAllowed(logger, fragment, r.Allowlists); isAllowed {
event.Msg("skipping file: rule allowlist")
return findings
}
if r.Path != nil {
if r.Regex == nil && len(encodedSegments) == 0 {
// Path _only_ rule
if r.Path.MatchString(fragment.FilePath) || (fragment.WindowsFilePath != "" && r.Path.MatchString(fragment.WindowsFilePath)) {
finding := report.Finding{
RuleID: r.RuleID,
Description: r.Description,
File: fragment.FilePath,
SymlinkFile: fragment.SymlinkFile,
Match: fmt.Sprintf("file detected: %s", fragment.FilePath),
Tags: r.Tags,
}
return append(findings, finding)
}
} else {
// if path is set _and_ a regex is set, then we need to check both
// so if the path does not match, then we should return early and not
// consider the regex
if !(r.Path.MatchString(fragment.FilePath) || (fragment.WindowsFilePath != "" && r.Path.MatchString(fragment.WindowsFilePath))) {
return findings
}
}
}
// if path only rule, skip content checks
if r.Regex == nil {
return findings
}
// if flag configure and raw data size bigger then the flag
if d.MaxTargetMegaBytes > 0 {
rawLength := len(currentRaw) / 1000000
if rawLength > d.MaxTargetMegaBytes {
logger.Debug().
Int("size", rawLength).
Int("max-size", d.MaxTargetMegaBytes).
Msg("skipping fragment: size")
return findings
}
}
// use currentRaw instead of fragment.Raw since this represents the current
// decoding pass on the text
for _, matchIndex := range r.Regex.FindAllStringIndex(currentRaw, -1) {
// Extract secret from match
secret := strings.Trim(currentRaw[matchIndex[0]:matchIndex[1]], "\n")
// For any meta data from decoding
var metaTags []string
currentLine := ""
// Check if the decoded portions of the segment overlap with the match
// to see if its potentially a new match
if len(encodedSegments) > 0 {
if segment := segmentWithDecodedOverlap(encodedSegments, matchIndex[0], matchIndex[1]); segment != nil {
matchIndex = segment.adjustMatchIndex(matchIndex)
metaTags = append(metaTags, segment.tags()...)
currentLine = segment.currentLine(currentRaw)
} else {
// This item has already been added to a finding
continue
}
} else {
// Fixes: https://github.com/gitleaks/gitleaks/issues/1352
// removes the incorrectly following line that was detected by regex expression '\n'
matchIndex[1] = matchIndex[0] + len(secret)
}
// determine location of match. Note that the location
// in the finding will be the line/column numbers of the _match_
// not the _secret_, which will be different if the secretGroup
// value is set for this rule
loc := location(fragment, matchIndex)
if matchIndex[1] > loc.endLineIndex {
loc.endLineIndex = matchIndex[1]
}
finding := report.Finding{
RuleID: r.RuleID,
Description: r.Description,
StartLine: loc.startLine,
EndLine: loc.endLine,
StartColumn: loc.startColumn,
EndColumn: loc.endColumn,
Line: fragment.Raw[loc.startLineIndex:loc.endLineIndex],
Match: secret,
Secret: secret,
File: fragment.FilePath,
SymlinkFile: fragment.SymlinkFile,
Tags: append(r.Tags, metaTags...),
}
if !d.IgnoreGitleaksAllow && strings.Contains(finding.Line, gitleaksAllowSignature) {
logger.Trace().
Str("finding", finding.Secret).
Msg("skipping finding: 'gitleaks:allow' signature")
continue
}
if currentLine == "" {
currentLine = finding.Line
}
// Set the value of |secret|, if the pattern contains at least one capture group.
// (The first element is the full match, hence we check >= 2.)
groups := r.Regex.FindStringSubmatch(finding.Secret)
if len(groups) >= 2 {
if r.SecretGroup > 0 {
if len(groups) <= r.SecretGroup {
// Config validation should prevent this
continue
}
finding.Secret = groups[r.SecretGroup]
} else {
// If |secretGroup| is not set, we will use the first suitable capture group.
for _, s := range groups[1:] {
if len(s) > 0 {
finding.Secret = s
break
}
}
}
}
// check entropy
entropy := shannonEntropy(finding.Secret)
finding.Entropy = float32(entropy)
if r.Entropy != 0.0 {
// entropy is too low, skip this finding
if entropy <= r.Entropy {
logger.Trace().
Str("finding", finding.Secret).
Float32("entropy", finding.Entropy).
Msg("skipping finding: low entropy")
continue
}
}
// check if the result matches any of the global allowlists.
if isAllowed, event := checkFindingAllowed(logger, finding, fragment, currentLine, d.Config.Allowlists); isAllowed {
event.Msg("skipping finding: global allowlist")
continue
}
// check if the result matches any of the rule allowlists.
if isAllowed, event := checkFindingAllowed(logger, finding, fragment, currentLine, r.Allowlists); isAllowed {
event.Msg("skipping finding: rule allowlist")
continue
}
findings = append(findings, finding)
}
return findings
}
// AddFinding synchronously adds a finding to the findings slice
func (d *Detector) AddFinding(finding report.Finding) {
globalFingerprint := fmt.Sprintf("%s:%s:%d", finding.File, finding.RuleID, finding.StartLine)
if finding.Commit != "" {
finding.Fingerprint = fmt.Sprintf("%s:%s:%s:%d", finding.Commit, finding.File, finding.RuleID, finding.StartLine)
} else {
finding.Fingerprint = globalFingerprint
}
// check if we should ignore this finding
logger := logging.With().Str("finding", finding.Secret).Logger()
if _, ok := d.gitleaksIgnore[globalFingerprint]; ok {
logger.Debug().
Str("fingerprint", globalFingerprint).
Msg("skipping finding: global fingerprint")
return
} else if finding.Commit != "" {
// Awkward nested if because I'm not sure how to chain these two conditions.
if _, ok := d.gitleaksIgnore[finding.Fingerprint]; ok {
logger.Debug().
Str("fingerprint", finding.Fingerprint).
Msgf("skipping finding: fingerprint")
return
}
}
if d.baseline != nil && !IsNew(finding, d.Redact, d.baseline) {
logger.Debug().
Str("fingerprint", finding.Fingerprint).
Msgf("skipping finding: baseline")
return
}
d.findingMutex.Lock()
d.findings = append(d.findings, finding)
if d.Verbose {
printFinding(finding, d.NoColor)
}
d.findingMutex.Unlock()
}
// Findings returns the findings added to the detector
func (d *Detector) Findings() []report.Finding {
return d.findings
}
// AddCommit synchronously adds a commit to the commit slice
func (d *Detector) addCommit(commit string) {
d.commitMap[commit] = true
}
// checkCommitOrPathAllowed evaluates |fragment| against all provided |allowlists|.
//
// If the match condition is "OR", only commit and path are checked.
// Otherwise, if regexes or stopwords are defined this will fail.
func checkCommitOrPathAllowed(
logger zerolog.Logger,
fragment Fragment,
allowlists []*config.Allowlist,
) (bool, *zerolog.Event) {
if fragment.FilePath == "" && fragment.CommitSHA == "" {
return false, nil
}
for _, a := range allowlists {
var (
isAllowed bool
allowlistChecks []bool
commitAllowed, _ = a.CommitAllowed(fragment.CommitSHA)
pathAllowed = a.PathAllowed(fragment.FilePath) || (fragment.WindowsFilePath != "" && a.PathAllowed(fragment.WindowsFilePath))
)
// If the condition is "AND" we need to check all conditions.
if a.MatchCondition == config.AllowlistMatchAnd {
if len(a.Commits) > 0 {
allowlistChecks = append(allowlistChecks, commitAllowed)
}
if len(a.Paths) > 0 {
allowlistChecks = append(allowlistChecks, pathAllowed)
}
// These will be checked later.
if len(a.Regexes) > 0 {
continue
}
if len(a.StopWords) > 0 {
continue
}
isAllowed = allTrue(allowlistChecks)
} else {
isAllowed = commitAllowed || pathAllowed
}
if isAllowed {
event := logger.Trace().Str("condition", a.MatchCondition.String())
if commitAllowed {
event.Bool("allowed-commit", commitAllowed)
}
if pathAllowed {
event.Bool("allowed-path", pathAllowed)
}
return true, event
}
}
return false, nil
}
// checkFindingAllowed evaluates |finding| against all provided |allowlists|.
//
// If the match condition is "OR", only regex and stopwords are run. (Commit and path should be handled separately).
// Otherwise, all conditions are checked.
//
// TODO: The method signature is awkward. I can't think of a better way to log helpful info.
func checkFindingAllowed(
logger zerolog.Logger,
finding report.Finding,
fragment Fragment,
currentLine string,
allowlists []*config.Allowlist,
) (bool, *zerolog.Event) {
for _, a := range allowlists {
allowlistTarget := finding.Secret
switch a.RegexTarget {
case "match":
allowlistTarget = finding.Match
case "line":
allowlistTarget = currentLine
}
var (
checks []bool
isAllowed bool
commitAllowed bool
commit string
pathAllowed bool
regexAllowed = a.RegexAllowed(allowlistTarget)
containsStopword, word = a.ContainsStopWord(finding.Secret)
)
// If the condition is "AND" we need to check all conditions.
if a.MatchCondition == config.AllowlistMatchAnd {
// Determine applicable checks.
if len(a.Commits) > 0 {
commitAllowed, commit = a.CommitAllowed(fragment.CommitSHA)
checks = append(checks, commitAllowed)
}
if len(a.Paths) > 0 {
pathAllowed = a.PathAllowed(fragment.FilePath) || (fragment.WindowsFilePath != "" && a.PathAllowed(fragment.WindowsFilePath))
checks = append(checks, pathAllowed)
}
if len(a.Regexes) > 0 {
checks = append(checks, regexAllowed)
}
if len(a.StopWords) > 0 {
checks = append(checks, containsStopword)
}
isAllowed = allTrue(checks)
} else {
isAllowed = regexAllowed || containsStopword
}
if isAllowed {
event := logger.Trace().
Str("finding", finding.Secret).
Str("condition", a.MatchCondition.String())
if commitAllowed {
event.Str("allowed-commit", commit)
}
if pathAllowed {
event.Bool("allowed-path", pathAllowed)
}
if regexAllowed {
event.Bool("allowed-regex", regexAllowed)
}
if containsStopword {
event.Str("allowed-stopword", word)
}
return true, event
}
}
return false, nil
}
func allTrue(bools []bool) bool {
for _, check := range bools {
if !check {
return false
}
}
return true
}
func fileExists(fileName string) bool {
// check for a .infisicalignore file
info, err := os.Stat(fileName)
if err != nil && !os.IsNotExist(err) {
return false
}
if info != nil && err == nil {
if !info.IsDir() {
return true
}
}
return false
}

View File

@@ -1,225 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package detect
import (
"bufio"
"bytes"
"io"
"os"
"path/filepath"
"strings"
"time"
"github.com/h2non/filetype"
"github.com/Infisical/infisical-merge/detect/logging"
"github.com/Infisical/infisical-merge/detect/report"
"github.com/Infisical/infisical-merge/detect/sources"
)
const maxPeekSize = 25 * 1_000 // 10kb
func (d *Detector) DetectFiles(paths <-chan sources.ScanTarget) ([]report.Finding, error) {
for pa := range paths {
d.Sema.Go(func() error {
logger := logging.With().Str("path", pa.Path).Logger()
logger.Trace().Msg("Scanning path")
f, err := os.Open(pa.Path)
if err != nil {
if os.IsPermission(err) {
logger.Warn().Msg("Skipping file: permission denied")
return nil
}
return err
}
defer func() {
_ = f.Close()
}()
// Get file size
fileInfo, err := f.Stat()
if err != nil {
return err
}
fileSize := fileInfo.Size()
if d.MaxTargetMegaBytes > 0 {
rawLength := fileSize / 1000000
if rawLength > int64(d.MaxTargetMegaBytes) {
logger.Debug().
Int64("size", rawLength).
Msg("Skipping file: exceeds --max-target-megabytes")
return nil
}
}
var (
// Buffer to hold file chunks
reader = bufio.NewReaderSize(f, chunkSize)
buf = make([]byte, chunkSize)
totalLines = 0
)
for {
n, err := reader.Read(buf)
// "Callers should always process the n > 0 bytes returned before considering the error err."
// https://pkg.go.dev/io#Reader
if n > 0 {
// Only check the filetype at the start of file.
if totalLines == 0 {
// TODO: could other optimizations be introduced here?
if mimetype, err := filetype.Match(buf[:n]); err != nil {
return nil
} else if mimetype.MIME.Type == "application" {
return nil // skip binary files
}
}
// Try to split chunks across large areas of whitespace, if possible.
peekBuf := bytes.NewBuffer(buf[:n])
if readErr := readUntilSafeBoundary(reader, n, maxPeekSize, peekBuf); readErr != nil {
return readErr
}
// Count the number of newlines in this chunk
chunk := peekBuf.String()
linesInChunk := strings.Count(chunk, "\n")
totalLines += linesInChunk
fragment := Fragment{
Raw: chunk,
Bytes: peekBuf.Bytes(),
}
if pa.Symlink != "" {
fragment.SymlinkFile = pa.Symlink
}
if isWindows {
fragment.FilePath = filepath.ToSlash(pa.Path)
fragment.SymlinkFile = filepath.ToSlash(fragment.SymlinkFile)
fragment.WindowsFilePath = pa.Path
} else {
fragment.FilePath = pa.Path
}
timer := time.AfterFunc(SlowWarningThreshold, func() {
logger.Debug().Msgf("Taking longer than %s to inspect fragment", SlowWarningThreshold.String())
})
for _, finding := range d.Detect(fragment) {
// need to add 1 since line counting starts at 1
finding.StartLine += (totalLines - linesInChunk) + 1
finding.EndLine += (totalLines - linesInChunk) + 1
d.AddFinding(finding)
}
if timer != nil {
timer.Stop()
timer = nil
}
}
if err != nil {
if err == io.EOF {
return nil
}
return err
}
}
})
}
if err := d.Sema.Wait(); err != nil {
return d.findings, err
}
return d.findings, nil
}
// readUntilSafeBoundary consumes |f| until it finds two consecutive `\n` characters, up to |maxPeekSize|.
// This hopefully avoids splitting. (https://github.com/gitleaks/gitleaks/issues/1651)
func readUntilSafeBoundary(r *bufio.Reader, n int, maxPeekSize int, peekBuf *bytes.Buffer) error {
if peekBuf.Len() == 0 {
return nil
}
// Does the buffer end in consecutive newlines?
var (
data = peekBuf.Bytes()
lastChar = data[len(data)-1]
newlineCount = 0 // Tracks consecutive newlines
)
if isWhitespace(lastChar) {
for i := len(data) - 1; i >= 0; i-- {
lastChar = data[i]
if lastChar == '\n' {
newlineCount++
// Stop if two consecutive newlines are found
if newlineCount >= 2 {
return nil
}
} else if lastChar == '\r' || lastChar == ' ' || lastChar == '\t' {
// The presence of other whitespace characters (`\r`, ` `, `\t`) shouldn't reset the count.
// (Intentionally do nothing.)
} else {
break
}
}
}
// If not, read ahead until we (hopefully) find some.
newlineCount = 0
for {
data = peekBuf.Bytes()
// Check if the last character is a newline.
lastChar = data[len(data)-1]
if lastChar == '\n' {
newlineCount++
// Stop if two consecutive newlines are found
if newlineCount >= 2 {
break
}
} else if lastChar == '\r' || lastChar == ' ' || lastChar == '\t' {
// The presence of other whitespace characters (`\r`, ` `, `\t`) shouldn't reset the count.
// (Intentionally do nothing.)
} else {
newlineCount = 0 // Reset if a non-newline character is found
}
// Stop growing the buffer if it reaches maxSize
if (peekBuf.Len() - n) >= maxPeekSize {
break
}
// Read additional data into a temporary buffer
b, err := r.ReadByte()
if err != nil {
if err == io.EOF {
break
}
return err
}
peekBuf.WriteByte(b)
}
return nil
}

View File

@@ -1,216 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package detect
import (
"bytes"
"errors"
"fmt"
"net/url"
"os/exec"
"regexp"
"strings"
"time"
"github.com/Infisical/infisical-merge/detect/cmd/scm"
"github.com/gitleaks/go-gitdiff/gitdiff"
"github.com/Infisical/infisical-merge/detect/logging"
"github.com/Infisical/infisical-merge/detect/report"
"github.com/Infisical/infisical-merge/detect/sources"
)
func (d *Detector) DetectGit(cmd *sources.GitCmd, remote *RemoteInfo) ([]report.Finding, error) {
defer cmd.Wait()
var (
diffFilesCh = cmd.DiffFilesCh()
errCh = cmd.ErrCh()
)
// loop to range over both DiffFiles (stdout) and ErrCh (stderr)
for diffFilesCh != nil || errCh != nil {
select {
case gitdiffFile, open := <-diffFilesCh:
if !open {
diffFilesCh = nil
break
}
// skip binary files
if gitdiffFile.IsBinary || gitdiffFile.IsDelete {
continue
}
// Check if commit is allowed
commitSHA := ""
if gitdiffFile.PatchHeader != nil {
commitSHA = gitdiffFile.PatchHeader.SHA
for _, a := range d.Config.Allowlists {
if ok, c := a.CommitAllowed(gitdiffFile.PatchHeader.SHA); ok {
logging.Trace().Str("allowed-commit", c).Msg("skipping commit: global allowlist")
continue
}
}
}
d.addCommit(commitSHA)
d.Sema.Go(func() error {
for _, textFragment := range gitdiffFile.TextFragments {
if textFragment == nil {
return nil
}
fragment := Fragment{
Raw: textFragment.Raw(gitdiff.OpAdd),
CommitSHA: commitSHA,
FilePath: gitdiffFile.NewName,
}
timer := time.AfterFunc(SlowWarningThreshold, func() {
logging.Debug().
Str("commit", commitSHA[:7]).
Str("path", fragment.FilePath).
Msgf("Taking longer than %s to inspect fragment", SlowWarningThreshold.String())
})
for _, finding := range d.Detect(fragment) {
d.AddFinding(augmentGitFinding(remote, finding, textFragment, gitdiffFile))
}
if timer != nil {
timer.Stop()
timer = nil
}
}
return nil
})
case err, open := <-errCh:
if !open {
errCh = nil
break
}
return d.findings, err
}
}
if err := d.Sema.Wait(); err != nil {
return d.findings, err
}
logging.Info().Msgf("%d commits scanned.", len(d.commitMap))
logging.Debug().Msg("Note: this number might be smaller than expected due to commits with no additions")
return d.findings, nil
}
type RemoteInfo struct {
Platform scm.Platform
Url string
}
func NewRemoteInfo(platform scm.Platform, source string) *RemoteInfo {
if platform == scm.NoPlatform {
return &RemoteInfo{Platform: platform}
}
remoteUrl, err := getRemoteUrl(source)
if err != nil {
if strings.Contains(err.Error(), "No remote configured") {
logging.Debug().Msg("skipping finding links: repository has no configured remote.")
platform = scm.NoPlatform
} else {
logging.Error().Err(err).Msg("skipping finding links: unable to parse remote URL")
}
goto End
}
if platform == scm.UnknownPlatform {
platform = platformFromHost(remoteUrl)
if platform == scm.UnknownPlatform {
logging.Info().
Str("host", remoteUrl.Hostname()).
Msg("Unknown SCM platform. Use --platform to include links in findings.")
} else {
logging.Debug().
Str("host", remoteUrl.Hostname()).
Str("platform", platform.String()).
Msg("SCM platform parsed from host")
}
}
End:
var rUrl string
if remoteUrl != nil {
rUrl = remoteUrl.String()
}
return &RemoteInfo{
Platform: platform,
Url: rUrl,
}
}
var sshUrlpat = regexp.MustCompile(`^git@([a-zA-Z0-9.-]+):([\w/.-]+?)(?:\.git)?$`)
func getRemoteUrl(source string) (*url.URL, error) {
// This will return the first remote — typically, "origin".
cmd := exec.Command("git", "ls-remote", "--quiet", "--get-url")
if source != "." {
cmd.Dir = source
}
stdout, err := cmd.Output()
if err != nil {
var exitError *exec.ExitError
if errors.As(err, &exitError) {
return nil, fmt.Errorf("command failed (%d): %w, stderr: %s", exitError.ExitCode(), err, string(bytes.TrimSpace(exitError.Stderr)))
}
return nil, err
}
remoteUrl := string(bytes.TrimSpace(stdout))
if matches := sshUrlpat.FindStringSubmatch(remoteUrl); matches != nil {
remoteUrl = fmt.Sprintf("https://%s/%s", matches[1], matches[2])
}
remoteUrl = strings.TrimSuffix(remoteUrl, ".git")
parsedUrl, err := url.Parse(remoteUrl)
if err != nil {
return nil, fmt.Errorf("unable to parse remote URL: %w", err)
}
// Remove any user info.
parsedUrl.User = nil
return parsedUrl, nil
}
func platformFromHost(u *url.URL) scm.Platform {
switch strings.ToLower(u.Hostname()) {
case "github.com":
return scm.GitHubPlatform
case "gitlab.com":
return scm.GitLabPlatform
case "dev.azure.com", "visualstudio.com":
return scm.AzureDevOpsPlatform
case "bitbucket.org":
return scm.BitBucketPlatform
default:
return scm.UnknownPlatform
}
}

View File

@@ -1,102 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package detect
// Location represents a location in a file
type Location struct {
startLine int
endLine int
startColumn int
endColumn int
startLineIndex int
endLineIndex int
}
func location(fragment Fragment, matchIndex []int) Location {
var (
prevNewLine int
location Location
lineSet bool
_lineNum int
)
start := matchIndex[0]
end := matchIndex[1]
// default startLineIndex to 0
location.startLineIndex = 0
// Fixes: https://github.com/zricethezav/gitleaks/issues/1037
// When a fragment does NOT have any newlines, a default "newline"
// will be counted to make the subsequent location calculation logic work
// for fragments will no newlines.
if len(fragment.newlineIndices) == 0 {
fragment.newlineIndices = [][]int{
{len(fragment.Raw), len(fragment.Raw) + 1},
}
}
for lineNum, pair := range fragment.newlineIndices {
_lineNum = lineNum
newLineByteIndex := pair[0]
if prevNewLine <= start && start < newLineByteIndex {
lineSet = true
location.startLine = lineNum
location.endLine = lineNum
location.startColumn = (start - prevNewLine) + 1 // +1 because counting starts at 1
location.startLineIndex = prevNewLine
location.endLineIndex = newLineByteIndex
}
if prevNewLine < end && end <= newLineByteIndex {
location.endLine = lineNum
location.endColumn = (end - prevNewLine)
location.endLineIndex = newLineByteIndex
}
prevNewLine = pair[0]
}
if !lineSet {
// if lines never get set then that means the secret is most likely
// on the last line of the diff output and the diff output does not have
// a newline
location.startColumn = (start - prevNewLine) + 1 // +1 because counting starts at 1
location.endColumn = (end - prevNewLine)
location.startLine = _lineNum + 1
location.endLine = _lineNum + 1
// search for new line byte index
i := 0
for end+i < len(fragment.Raw) {
if fragment.Raw[end+i] == '\n' {
break
}
if fragment.Raw[end+i] == '\r' {
break
}
i++
}
location.endLineIndex = end + i
}
return location
}

View File

@@ -1,72 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package logging
import (
"os"
"github.com/rs/zerolog"
)
var Logger zerolog.Logger
func init() {
// send all logs to stdout
Logger = zerolog.New(zerolog.ConsoleWriter{Out: os.Stderr}).
Level(zerolog.InfoLevel).
With().Timestamp().Logger()
}
func With() zerolog.Context {
return Logger.With()
}
func Trace() *zerolog.Event {
return Logger.Trace()
}
func Debug() *zerolog.Event {
return Logger.Debug()
}
func Info() *zerolog.Event {
return Logger.Info()
}
func Warn() *zerolog.Event {
return Logger.Warn()
}
func Error() *zerolog.Event {
return Logger.Error()
}
func Err(err error) *zerolog.Event {
return Logger.Err(err)
}
func Fatal() *zerolog.Event {
return Logger.Fatal()
}
func Panic() *zerolog.Event {
return Logger.Panic()
}

View File

@@ -1,149 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package detect
import (
"bufio"
"bytes"
"errors"
"io"
"github.com/Infisical/infisical-merge/detect/report"
)
// DetectReader accepts an io.Reader and a buffer size for the reader in KB
func (d *Detector) DetectReader(r io.Reader, bufSize int) ([]report.Finding, error) {
reader := bufio.NewReader(r)
buf := make([]byte, 1000*bufSize)
findings := []report.Finding{}
for {
n, err := reader.Read(buf)
// "Callers should always process the n > 0 bytes returned before considering the error err."
// https://pkg.go.dev/io#Reader
if n > 0 {
// Try to split chunks across large areas of whitespace, if possible.
peekBuf := bytes.NewBuffer(buf[:n])
if readErr := readUntilSafeBoundary(reader, n, maxPeekSize, peekBuf); readErr != nil {
return findings, readErr
}
fragment := Fragment{
Raw: peekBuf.String(),
}
for _, finding := range d.Detect(fragment) {
findings = append(findings, finding)
if d.Verbose {
printFinding(finding, d.NoColor)
}
}
}
if err != nil {
if err == io.EOF {
break
}
return findings, err
}
}
return findings, nil
}
// StreamDetectReader streams the detection results from the provided io.Reader.
// It reads data using the specified buffer size (in KB) and processes each chunk through
// the existing detection logic. Findings are sent down the returned findings channel as soon as
// they are detected, while a separate error channel signals a terminal error (or nil upon successful completion).
// The function returns two channels:
// - findingsCh: a receive-only channel that emits report.Finding objects as they are found.
// - errCh: a receive-only channel that emits a single final error (or nil if no error occurred)
// once the stream ends.
//
// Recommended Usage:
//
// Since there will only ever be a single value on the errCh, it is recommended to consume the findingsCh
// first. Once findingsCh is closed, the consumer should then read from errCh to determine
// if the stream completed successfully or if an error occurred.
//
// This design avoids the need for a select loop, keeping client code simple.
//
// Example:
//
// // Assume detector is an instance of *Detector and myReader implements io.Reader.
// findingsCh, errCh := detector.StreamDetectReader(myReader, 64) // using 64 KB buffer size
//
// // Process findings as they arrive.
// for finding := range findingsCh {
// fmt.Printf("Found secret: %+v\n", finding)
// }
//
// // After the findings channel is closed, check the final error.
// if err := <-errCh; err != nil {
// log.Fatalf("StreamDetectReader encountered an error: %v", err)
// } else {
// fmt.Println("Scanning completed successfully.")
// }
func (d *Detector) StreamDetectReader(r io.Reader, bufSize int) (<-chan report.Finding, <-chan error) {
findingsCh := make(chan report.Finding, 1)
errCh := make(chan error, 1)
go func() {
defer close(findingsCh)
defer close(errCh)
reader := bufio.NewReader(r)
buf := make([]byte, 1000*bufSize)
for {
n, err := reader.Read(buf)
if n > 0 {
peekBuf := bytes.NewBuffer(buf[:n])
if readErr := readUntilSafeBoundary(reader, n, maxPeekSize, peekBuf); readErr != nil {
errCh <- readErr
return
}
fragment := Fragment{Raw: peekBuf.String()}
for _, finding := range d.Detect(fragment) {
findingsCh <- finding
if d.Verbose {
printFinding(finding, d.NoColor)
}
}
}
if err != nil {
if errors.Is(err, io.EOF) {
errCh <- nil
return
}
errCh <- err
return
}
}
}()
return findingsCh, errCh
}

View File

@@ -1,37 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
//go:build !gore2regex
package regexp
import (
re "regexp"
)
const Version = "stdlib"
type Regexp = re.Regexp
func MustCompile(str string) *re.Regexp {
return re.MustCompile(str)
}

View File

@@ -1,37 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
//go:build gore2regex
package regexp
import (
re "github.com/wasilibs/go-re2"
)
const Version = "github.com/wasilibs/go-re2"
type Regexp = re.Regexp
func MustCompile(str string) *re.Regexp {
return re.MustCompile(str)
}

View File

@@ -1,26 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package report
const version = "v8.0.0"
const driver = "gitleaks"

View File

@@ -1,100 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package report
import (
"encoding/csv"
"io"
"strconv"
"strings"
)
type CsvReporter struct {
}
var _ Reporter = (*CsvReporter)(nil)
func (r *CsvReporter) Write(w io.WriteCloser, findings []Finding) error {
if len(findings) == 0 {
return nil
}
var (
cw = csv.NewWriter(w)
err error
)
columns := []string{"RuleID",
"Commit",
"File",
"SymlinkFile",
"Secret",
"Match",
"StartLine",
"EndLine",
"StartColumn",
"EndColumn",
"Author",
"Message",
"Date",
"Email",
"Fingerprint",
"Tags",
}
// A miserable attempt at "omitempty" so tests don't yell at me.
if findings[0].Link != "" {
columns = append(columns, "Link")
}
if err = cw.Write(columns); err != nil {
return err
}
for _, f := range findings {
row := []string{f.RuleID,
f.Commit,
f.File,
f.SymlinkFile,
f.Secret,
f.Match,
strconv.Itoa(f.StartLine),
strconv.Itoa(f.EndLine),
strconv.Itoa(f.StartColumn),
strconv.Itoa(f.EndColumn),
f.Author,
f.Message,
f.Date,
f.Email,
f.Fingerprint,
strings.Join(f.Tags, " "),
}
if findings[0].Link != "" {
row = append(row, f.Link)
}
if err = cw.Write(row); err != nil {
return err
}
}
cw.Flush()
return cw.Error()
}

View File

@@ -1,92 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package report
import (
"math"
"strings"
)
// Finding contains information about strings that
// have been captured by a tree-sitter query.
type Finding struct {
// Rule is the name of the rule that was matched
RuleID string
Description string
StartLine int
EndLine int
StartColumn int
EndColumn int
Line string `json:"-"`
Match string
// Secret contains the full content of what is matched in
// the tree-sitter query.
Secret string
// File is the name of the file containing the finding
File string
SymlinkFile string
Commit string
Link string `json:",omitempty"`
// Entropy is the shannon entropy of Value
Entropy float32
Author string
Email string
Date string
Message string
Tags []string
// unique identifier
Fingerprint string
}
// Redact removes sensitive information from a finding.
func (f *Finding) Redact(percent uint) {
secret := maskSecret(f.Secret, percent)
if percent >= 100 {
secret = "REDACTED"
}
f.Line = strings.Replace(f.Line, f.Secret, secret, -1)
f.Match = strings.Replace(f.Match, f.Secret, secret, -1)
f.Secret = secret
}
func maskSecret(secret string, percent uint) string {
if percent > 100 {
percent = 100
}
len := float64(len(secret))
if len <= 0 {
return secret
}
prc := float64(100 - percent)
lth := int64(math.RoundToEven(len * prc / float64(100)))
return secret[:lth] + "..."
}

View File

@@ -1,39 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package report
import (
"encoding/json"
"io"
)
type JsonReporter struct {
}
var _ Reporter = (*JsonReporter)(nil)
func (t *JsonReporter) Write(w io.WriteCloser, findings []Finding) error {
encoder := json.NewEncoder(w)
encoder.SetIndent("", " ")
return encoder.Encode(findings)
}

View File

@@ -1,129 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package report
import (
"encoding/json"
"encoding/xml"
"fmt"
"io"
"strconv"
)
type JunitReporter struct {
}
var _ Reporter = (*JunitReporter)(nil)
func (r *JunitReporter) Write(w io.WriteCloser, findings []Finding) error {
testSuites := TestSuites{
TestSuites: getTestSuites(findings),
}
io.WriteString(w, xml.Header)
encoder := xml.NewEncoder(w)
encoder.Indent("", "\t")
return encoder.Encode(testSuites)
}
func getTestSuites(findings []Finding) []TestSuite {
return []TestSuite{
{
Failures: strconv.Itoa(len(findings)),
Name: "gitleaks",
Tests: strconv.Itoa(len(findings)),
TestCases: getTestCases(findings),
Time: "",
},
}
}
func getTestCases(findings []Finding) []TestCase {
testCases := []TestCase{}
for _, f := range findings {
testCase := TestCase{
Classname: f.Description,
Failure: getFailure(f),
File: f.File,
Name: getMessage(f),
Time: "",
}
testCases = append(testCases, testCase)
}
return testCases
}
func getFailure(f Finding) Failure {
return Failure{
Data: getData(f),
Message: getMessage(f),
Type: f.Description,
}
}
func getData(f Finding) string {
data, err := json.MarshalIndent(f, "", "\t")
if err != nil {
fmt.Println(err)
return ""
}
return string(data)
}
func getMessage(f Finding) string {
if f.Commit == "" {
return fmt.Sprintf("%s has detected a secret in file %s, line %s.", f.RuleID, f.File, strconv.Itoa(f.StartLine))
}
return fmt.Sprintf("%s has detected a secret in file %s, line %s, at commit %s.", f.RuleID, f.File, strconv.Itoa(f.StartLine), f.Commit)
}
type TestSuites struct {
XMLName xml.Name `xml:"testsuites"`
TestSuites []TestSuite
}
type TestSuite struct {
XMLName xml.Name `xml:"testsuite"`
Failures string `xml:"failures,attr"`
Name string `xml:"name,attr"`
Tests string `xml:"tests,attr"`
TestCases []TestCase `xml:"testcase"`
Time string `xml:"time,attr"`
}
type TestCase struct {
XMLName xml.Name `xml:"testcase"`
Classname string `xml:"classname,attr"`
Failure Failure `xml:"failure"`
File string `xml:"file,attr"`
Name string `xml:"name,attr"`
Time string `xml:"time,attr"`
}
type Failure struct {
XMLName xml.Name `xml:"failure"`
Data string `xml:",chardata"`
Message string `xml:"message,attr"`
Type string `xml:"type,attr"`
}

View File

@@ -1,38 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package report
import (
"io"
)
const (
// https://cwe.mitre.org/data/definitions/798.html
CWE = "CWE-798"
CWE_DESCRIPTION = "Use of Hard-coded Credentials"
StdoutReportPath = "-"
)
type Reporter interface {
Write(w io.WriteCloser, findings []Finding) error
}

View File

@@ -1,239 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package report
import (
"encoding/json"
"fmt"
"io"
"github.com/Infisical/infisical-merge/detect/config"
)
type SarifReporter struct {
OrderedRules []config.Rule
}
var _ Reporter = (*SarifReporter)(nil)
func (r *SarifReporter) Write(w io.WriteCloser, findings []Finding) error {
sarif := Sarif{
Schema: "https://json.schemastore.org/sarif-2.1.0.json",
Version: "2.1.0",
Runs: r.getRuns(findings),
}
encoder := json.NewEncoder(w)
encoder.SetIndent("", " ")
return encoder.Encode(sarif)
}
func (r *SarifReporter) getRuns(findings []Finding) []Runs {
return []Runs{
{
Tool: r.getTool(),
Results: getResults(findings),
},
}
}
func (r *SarifReporter) getTool() Tool {
tool := Tool{
Driver: Driver{
Name: driver,
SemanticVersion: version,
InformationUri: "https://github.com/gitleaks/gitleaks",
Rules: r.getRules(),
},
}
// if this tool has no rules, ensure that it is represented as [] instead of null/nil
if hasEmptyRules(tool) {
tool.Driver.Rules = make([]Rules, 0)
}
return tool
}
func hasEmptyRules(tool Tool) bool {
return len(tool.Driver.Rules) == 0
}
func (r *SarifReporter) getRules() []Rules {
// TODO for _, rule := range cfg.Rules {
var rules []Rules
for _, rule := range r.OrderedRules {
rules = append(rules, Rules{
ID: rule.RuleID,
Description: ShortDescription{
Text: rule.Description,
},
})
}
return rules
}
func messageText(f Finding) string {
if f.Commit == "" {
return fmt.Sprintf("%s has detected secret for file %s.", f.RuleID, f.File)
}
return fmt.Sprintf("%s has detected secret for file %s at commit %s.", f.RuleID, f.File, f.Commit)
}
func getResults(findings []Finding) []Results {
results := []Results{}
for _, f := range findings {
r := Results{
Message: Message{
Text: messageText(f),
},
RuleId: f.RuleID,
Locations: getLocation(f),
// This information goes in partial fingerprings until revision
// data can be added somewhere else
PartialFingerPrints: PartialFingerPrints{
CommitSha: f.Commit,
Email: f.Email,
CommitMessage: f.Message,
Date: f.Date,
Author: f.Author,
},
Properties: Properties{
Tags: f.Tags,
},
}
results = append(results, r)
}
return results
}
func getLocation(f Finding) []Locations {
uri := f.File
if f.SymlinkFile != "" {
uri = f.SymlinkFile
}
return []Locations{
{
PhysicalLocation: PhysicalLocation{
ArtifactLocation: ArtifactLocation{
URI: uri,
},
Region: Region{
StartLine: f.StartLine,
EndLine: f.EndLine,
StartColumn: f.StartColumn,
EndColumn: f.EndColumn,
Snippet: Snippet{
Text: f.Secret,
},
},
},
},
}
}
type PartialFingerPrints struct {
CommitSha string `json:"commitSha"`
Email string `json:"email"`
Author string `json:"author"`
Date string `json:"date"`
CommitMessage string `json:"commitMessage"`
}
type Sarif struct {
Schema string `json:"$schema"`
Version string `json:"version"`
Runs []Runs `json:"runs"`
}
type ShortDescription struct {
Text string `json:"text"`
}
type FullDescription struct {
Text string `json:"text"`
}
type Rules struct {
ID string `json:"id"`
Description ShortDescription `json:"shortDescription"`
}
type Driver struct {
Name string `json:"name"`
SemanticVersion string `json:"semanticVersion"`
InformationUri string `json:"informationUri"`
Rules []Rules `json:"rules"`
}
type Tool struct {
Driver Driver `json:"driver"`
}
type Message struct {
Text string `json:"text"`
}
type ArtifactLocation struct {
URI string `json:"uri"`
}
type Region struct {
StartLine int `json:"startLine"`
StartColumn int `json:"startColumn"`
EndLine int `json:"endLine"`
EndColumn int `json:"endColumn"`
Snippet Snippet `json:"snippet"`
}
type Snippet struct {
Text string `json:"text"`
}
type PhysicalLocation struct {
ArtifactLocation ArtifactLocation `json:"artifactLocation"`
Region Region `json:"region"`
}
type Locations struct {
PhysicalLocation PhysicalLocation `json:"physicalLocation"`
}
type Properties struct {
Tags []string `json:"tags"`
}
type Results struct {
Message Message `json:"message"`
RuleId string `json:"ruleId"`
Locations []Locations `json:"locations"`
PartialFingerPrints `json:"partialFingerprints"`
Properties Properties `json:"properties"`
}
type Runs struct {
Tool Tool `json:"tool"`
Results []Results `json:"results"`
}

View File

@@ -1,68 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package report
import (
"fmt"
"io"
"os"
"text/template"
"github.com/Masterminds/sprig/v3"
)
type TemplateReporter struct {
template *template.Template
}
var _ Reporter = (*TemplateReporter)(nil)
func NewTemplateReporter(templatePath string) (*TemplateReporter, error) {
if templatePath == "" {
return nil, fmt.Errorf("template path cannot be empty")
}
file, err := os.ReadFile(templatePath)
if err != nil {
return nil, fmt.Errorf("error reading file: %w", err)
}
templateText := string(file)
// TODO: Add helper functions like escaping for JSON, XML, etc.
t := template.New("custom")
t = t.Funcs(sprig.TxtFuncMap())
t, err = t.Parse(templateText)
if err != nil {
return nil, fmt.Errorf("error parsing file: %w", err)
}
return &TemplateReporter{template: t}, nil
}
// writeTemplate renders the findings using the user-provided template.
// https://www.digitalocean.com/community/tutorials/how-to-use-templates-in-go
func (t *TemplateReporter) Write(w io.WriteCloser, findings []Finding) error {
if err := t.template.Execute(w, findings); err != nil {
return err
}
return nil
}

View File

@@ -1,127 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package sources
import (
"io/fs"
"os"
"path/filepath"
"runtime"
"github.com/fatih/semgroup"
"github.com/Infisical/infisical-merge/detect/config"
"github.com/Infisical/infisical-merge/detect/logging"
)
type ScanTarget struct {
Path string
Symlink string
}
var isWindows = runtime.GOOS == "windows"
func DirectoryTargets(source string, s *semgroup.Group, followSymlinks bool, allowlists []*config.Allowlist) (<-chan ScanTarget, error) {
paths := make(chan ScanTarget)
s.Go(func() error {
defer close(paths)
return filepath.Walk(source,
func(path string, fInfo os.FileInfo, err error) error {
logger := logging.With().Str("path", path).Logger()
if err != nil {
if os.IsPermission(err) {
// This seems to only fail on directories at this stage.
logger.Warn().Msg("Skipping directory: permission denied")
return filepath.SkipDir
}
return err
}
// Empty; nothing to do here.
if fInfo.Size() == 0 {
return nil
}
// Unwrap symlinks, if |followSymlinks| is set.
scanTarget := ScanTarget{
Path: path,
}
if fInfo.Mode().Type() == fs.ModeSymlink {
if !followSymlinks {
logger.Debug().Msg("Skipping symlink")
return nil
}
realPath, err := filepath.EvalSymlinks(path)
if err != nil {
return err
}
realPathFileInfo, _ := os.Stat(realPath)
if realPathFileInfo.IsDir() {
logger.Warn().Str("target", realPath).Msg("Skipping symlinked directory")
return nil
}
scanTarget.Path = realPath
scanTarget.Symlink = path
}
// TODO: Also run this check against the resolved symlink?
var skip bool
for _, a := range allowlists {
skip = a.PathAllowed(path) ||
// TODO: Remove this in v9.
// This is an awkward hack to mitigate https://github.com/gitleaks/gitleaks/issues/1641.
(isWindows && a.PathAllowed(filepath.ToSlash(path)))
if skip {
break
}
}
if fInfo.IsDir() {
// Directory
if skip {
logger.Debug().Msg("Skipping directory due to global allowlist")
return filepath.SkipDir
}
if fInfo.Name() == ".git" {
// Don't scan .git directories.
// TODO: Add this to the config allowlist, instead of hard-coding it.
return filepath.SkipDir
}
} else {
// File
if skip {
logger.Debug().Msg("Skipping file due to global allowlist")
return nil
}
paths <- scanTarget
}
return nil
})
})
return paths, nil
}

View File

@@ -1,211 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package sources
import (
"bufio"
"errors"
"io"
"os/exec"
"path/filepath"
"regexp"
"strings"
"github.com/gitleaks/go-gitdiff/gitdiff"
"github.com/Infisical/infisical-merge/detect/logging"
)
var quotedOptPattern = regexp.MustCompile(`^(?:"[^"]+"|'[^']+')$`)
// GitCmd helps to work with Git's output.
type GitCmd struct {
cmd *exec.Cmd
diffFilesCh <-chan *gitdiff.File
errCh <-chan error
}
// NewGitLogCmd returns `*DiffFilesCmd` with two channels: `<-chan *gitdiff.File` and `<-chan error`.
// Caller should read everything from channels until receiving a signal about their closure and call
// the `func (*DiffFilesCmd) Wait()` error in order to release resources.
func NewGitLogCmd(source string, logOpts string) (*GitCmd, error) {
sourceClean := filepath.Clean(source)
var cmd *exec.Cmd
if logOpts != "" {
args := []string{"-C", sourceClean, "log", "-p", "-U0"}
// Ensure that the user-provided |logOpts| aren't wrapped in quotes.
// https://github.com/gitleaks/gitleaks/issues/1153
userArgs := strings.Split(logOpts, " ")
var quotedOpts []string
for _, element := range userArgs {
if quotedOptPattern.MatchString(element) {
quotedOpts = append(quotedOpts, element)
}
}
if len(quotedOpts) > 0 {
logging.Warn().Msgf("the following `--log-opts` values may not work as expected: %v\n\tsee https://github.com/gitleaks/gitleaks/issues/1153 for more information", quotedOpts)
}
args = append(args, userArgs...)
cmd = exec.Command("git", args...)
} else {
cmd = exec.Command("git", "-C", sourceClean, "log", "-p", "-U0",
"--full-history", "--all")
}
logging.Debug().Msgf("executing: %s", cmd.String())
stdout, err := cmd.StdoutPipe()
if err != nil {
return nil, err
}
stderr, err := cmd.StderrPipe()
if err != nil {
return nil, err
}
if err := cmd.Start(); err != nil {
return nil, err
}
errCh := make(chan error)
go listenForStdErr(stderr, errCh)
gitdiffFiles, err := gitdiff.Parse(stdout)
if err != nil {
return nil, err
}
return &GitCmd{
cmd: cmd,
diffFilesCh: gitdiffFiles,
errCh: errCh,
}, nil
}
// NewGitDiffCmd returns `*DiffFilesCmd` with two channels: `<-chan *gitdiff.File` and `<-chan error`.
// Caller should read everything from channels until receiving a signal about their closure and call
// the `func (*DiffFilesCmd) Wait()` error in order to release resources.
func NewGitDiffCmd(source string, staged bool) (*GitCmd, error) {
sourceClean := filepath.Clean(source)
var cmd *exec.Cmd
cmd = exec.Command("git", "-C", sourceClean, "diff", "-U0", "--no-ext-diff", ".")
if staged {
cmd = exec.Command("git", "-C", sourceClean, "diff", "-U0", "--no-ext-diff",
"--staged", ".")
}
logging.Debug().Msgf("executing: %s", cmd.String())
stdout, err := cmd.StdoutPipe()
if err != nil {
return nil, err
}
stderr, err := cmd.StderrPipe()
if err != nil {
return nil, err
}
if err := cmd.Start(); err != nil {
return nil, err
}
errCh := make(chan error)
go listenForStdErr(stderr, errCh)
gitdiffFiles, err := gitdiff.Parse(stdout)
if err != nil {
return nil, err
}
return &GitCmd{
cmd: cmd,
diffFilesCh: gitdiffFiles,
errCh: errCh,
}, nil
}
// DiffFilesCh returns a channel with *gitdiff.File.
func (c *GitCmd) DiffFilesCh() <-chan *gitdiff.File {
return c.diffFilesCh
}
// ErrCh returns a channel that could produce an error if there is something in stderr.
func (c *GitCmd) ErrCh() <-chan error {
return c.errCh
}
// Wait waits for the command to exit and waits for any copying to
// stdin or copying from stdout or stderr to complete.
//
// Wait also closes underlying stdout and stderr.
func (c *GitCmd) Wait() (err error) {
return c.cmd.Wait()
}
// listenForStdErr listens for stderr output from git, prints it to stdout,
// sends to errCh and closes it.
func listenForStdErr(stderr io.ReadCloser, errCh chan<- error) {
defer close(errCh)
var errEncountered bool
scanner := bufio.NewScanner(stderr)
for scanner.Scan() {
// if git throws one of the following errors:
//
// exhaustive rename detection was skipped due to too many files.
// you may want to set your diff.renameLimit variable to at least
// (some large number) and retry the command.
//
// inexact rename detection was skipped due to too many files.
// you may want to set your diff.renameLimit variable to at least
// (some large number) and retry the command.
//
// Auto packing the repository in background for optimum performance.
// See "git help gc" for manual housekeeping.
//
// we skip exiting the program as git log -p/git diff will continue
// to send data to stdout and finish executing. This next bit of
// code prevents gitleaks from stopping mid scan if this error is
// encountered
if strings.Contains(scanner.Text(),
"exhaustive rename detection was skipped") ||
strings.Contains(scanner.Text(),
"inexact rename detection was skipped") ||
strings.Contains(scanner.Text(),
"you may want to set your diff.renameLimit") ||
strings.Contains(scanner.Text(),
"See \"git help gc\" for manual housekeeping") ||
strings.Contains(scanner.Text(),
"Auto packing the repository in background for optimum performance") {
logging.Warn().Msg(scanner.Text())
} else {
logging.Error().Msgf("[git] %s", scanner.Text())
errEncountered = true
}
}
if errEncountered {
errCh <- errors.New("stderr is not empty")
return
}
}

View File

@@ -1,280 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package detect
import (
// "encoding/json"
"fmt"
"math"
"path/filepath"
"strings"
"time"
"github.com/Infisical/infisical-merge/detect/cmd/scm"
"github.com/Infisical/infisical-merge/detect/logging"
"github.com/Infisical/infisical-merge/detect/report"
"github.com/charmbracelet/lipgloss"
"github.com/gitleaks/go-gitdiff/gitdiff"
)
// augmentGitFinding updates the start and end line numbers of a finding to include the
// delta from the git diff
func augmentGitFinding(remote *RemoteInfo, finding report.Finding, textFragment *gitdiff.TextFragment, f *gitdiff.File) report.Finding {
if !strings.HasPrefix(finding.Match, "file detected") {
finding.StartLine += int(textFragment.NewPosition)
finding.EndLine += int(textFragment.NewPosition)
}
if f.PatchHeader != nil {
finding.Commit = f.PatchHeader.SHA
if f.PatchHeader.Author != nil {
finding.Author = f.PatchHeader.Author.Name
finding.Email = f.PatchHeader.Author.Email
}
finding.Date = f.PatchHeader.AuthorDate.UTC().Format(time.RFC3339)
finding.Message = f.PatchHeader.Message()
// Results from `git diff` shouldn't have a link.
if finding.Commit != "" {
finding.Link = createScmLink(remote.Platform, remote.Url, finding)
}
}
return finding
}
var linkCleaner = strings.NewReplacer(
" ", "%20",
"%", "%25",
)
func createScmLink(scmPlatform scm.Platform, remoteUrl string, finding report.Finding) string {
if scmPlatform == scm.UnknownPlatform || scmPlatform == scm.NoPlatform {
return ""
}
// Clean the path.
var (
filePath = linkCleaner.Replace(finding.File)
ext = strings.ToLower(filepath.Ext(filePath))
)
switch scmPlatform {
case scm.GitHubPlatform:
link := fmt.Sprintf("%s/blob/%s/%s", remoteUrl, finding.Commit, filePath)
if ext == ".ipynb" || ext == ".md" {
link += "?plain=1"
}
if finding.StartLine != 0 {
link += fmt.Sprintf("#L%d", finding.StartLine)
}
if finding.EndLine != finding.StartLine {
link += fmt.Sprintf("-L%d", finding.EndLine)
}
return link
case scm.GitLabPlatform:
link := fmt.Sprintf("%s/blob/%s/%s", remoteUrl, finding.Commit, filePath)
if finding.StartLine != 0 {
link += fmt.Sprintf("#L%d", finding.StartLine)
}
if finding.EndLine != finding.StartLine {
link += fmt.Sprintf("-%d", finding.EndLine)
}
return link
case scm.AzureDevOpsPlatform:
link := fmt.Sprintf("%s/commit/%s?path=/%s", remoteUrl, finding.Commit, filePath)
// Add line information if applicable
if finding.StartLine != 0 {
link += fmt.Sprintf("&line=%d", finding.StartLine)
}
if finding.EndLine != finding.StartLine {
link += fmt.Sprintf("&lineEnd=%d", finding.EndLine)
}
// This is a bit dirty, but Azure DevOps does not highlight the line when the lineStartColumn and lineEndColumn are not provided
link += "&lineStartColumn=1&lineEndColumn=10000000&type=2&lineStyle=plain&_a=files"
return link
case scm.BitBucketPlatform:
link := fmt.Sprintf("%s/src/%s/%s", remoteUrl, finding.Commit, filePath)
if finding.StartLine != 0 {
link += fmt.Sprintf("#lines-%d", finding.StartLine)
}
if finding.EndLine != finding.StartLine {
link += fmt.Sprintf(":%d", finding.EndLine)
}
return link
default:
// This should never happen.
return ""
}
}
// shannonEntropy calculates the entropy of data using the formula defined here:
// https://en.wiktionary.org/wiki/Shannon_entropy
// Another way to think about what this is doing is calculating the number of bits
// needed to on average encode the data. So, the higher the entropy, the more random the data, the
// more bits needed to encode that data.
func shannonEntropy(data string) (entropy float64) {
if data == "" {
return 0
}
charCounts := make(map[rune]int)
for _, char := range data {
charCounts[char]++
}
invLength := 1.0 / float64(len(data))
for _, count := range charCounts {
freq := float64(count) * invLength
entropy -= freq * math.Log2(freq)
}
return entropy
}
// filter will dedupe and redact findings
func filter(findings []report.Finding, redact uint) []report.Finding {
var retFindings []report.Finding
for _, f := range findings {
include := true
if strings.Contains(strings.ToLower(f.RuleID), "generic") {
for _, fPrime := range findings {
if f.StartLine == fPrime.StartLine &&
f.Commit == fPrime.Commit &&
f.RuleID != fPrime.RuleID &&
strings.Contains(fPrime.Secret, f.Secret) &&
!strings.Contains(strings.ToLower(fPrime.RuleID), "generic") {
genericMatch := strings.Replace(f.Match, f.Secret, "REDACTED", -1)
betterMatch := strings.Replace(fPrime.Match, fPrime.Secret, "REDACTED", -1)
logging.Trace().Msgf("skipping %s finding (%s), %s rule takes precedence (%s)", f.RuleID, genericMatch, fPrime.RuleID, betterMatch)
include = false
break
}
}
}
if redact > 0 {
f.Redact(redact)
}
if include {
retFindings = append(retFindings, f)
}
}
return retFindings
}
func printFinding(f report.Finding, noColor bool) {
// trim all whitespace and tabs
f.Line = strings.TrimSpace(f.Line)
f.Secret = strings.TrimSpace(f.Secret)
f.Match = strings.TrimSpace(f.Match)
isFileMatch := strings.HasPrefix(f.Match, "file detected:")
skipColor := noColor
finding := ""
var secret lipgloss.Style
// Matches from filenames do not have a |line| or |secret|
if !isFileMatch {
matchInLineIDX := strings.Index(f.Line, f.Match)
secretInMatchIdx := strings.Index(f.Match, f.Secret)
skipColor = false
if matchInLineIDX == -1 || noColor {
skipColor = true
matchInLineIDX = 0
}
start := f.Line[0:matchInLineIDX]
startMatchIdx := 0
if matchInLineIDX > 20 {
startMatchIdx = matchInLineIDX - 20
start = "..." + f.Line[startMatchIdx:matchInLineIDX]
}
matchBeginning := lipgloss.NewStyle().SetString(f.Match[0:secretInMatchIdx]).Foreground(lipgloss.Color("#f5d445"))
secret = lipgloss.NewStyle().SetString(f.Secret).
Bold(true).
Italic(true).
Foreground(lipgloss.Color("#f05c07"))
matchEnd := lipgloss.NewStyle().SetString(f.Match[secretInMatchIdx+len(f.Secret):]).Foreground(lipgloss.Color("#f5d445"))
lineEndIdx := matchInLineIDX + len(f.Match)
if len(f.Line)-1 <= lineEndIdx {
lineEndIdx = len(f.Line)
}
lineEnd := f.Line[lineEndIdx:]
if len(f.Secret) > 100 {
secret = lipgloss.NewStyle().SetString(f.Secret[0:100] + "...").
Bold(true).
Italic(true).
Foreground(lipgloss.Color("#f05c07"))
}
if len(lineEnd) > 20 {
lineEnd = lineEnd[0:20] + "..."
}
finding = fmt.Sprintf("%s%s%s%s%s\n", strings.TrimPrefix(strings.TrimLeft(start, " "), "\n"), matchBeginning, secret, matchEnd, lineEnd)
}
if skipColor || isFileMatch {
fmt.Printf("%-12s %s\n", "Finding:", f.Match)
fmt.Printf("%-12s %s\n", "Secret:", f.Secret)
} else {
fmt.Printf("%-12s %s", "Finding:", finding)
fmt.Printf("%-12s %s\n", "Secret:", secret)
}
fmt.Printf("%-12s %s\n", "RuleID:", f.RuleID)
fmt.Printf("%-12s %f\n", "Entropy:", f.Entropy)
if f.File == "" {
fmt.Println("")
return
}
if len(f.Tags) > 0 {
fmt.Printf("%-12s %s\n", "Tags:", f.Tags)
}
fmt.Printf("%-12s %s\n", "File:", f.File)
fmt.Printf("%-12s %d\n", "Line:", f.StartLine)
if f.Commit == "" {
fmt.Printf("%-12s %s\n", "Fingerprint:", f.Fingerprint)
fmt.Println("")
return
}
fmt.Printf("%-12s %s\n", "Commit:", f.Commit)
fmt.Printf("%-12s %s\n", "Author:", f.Author)
fmt.Printf("%-12s %s\n", "Email:", f.Email)
fmt.Printf("%-12s %s\n", "Date:", f.Date)
fmt.Printf("%-12s %s\n", "Fingerprint:", f.Fingerprint)
if f.Link != "" {
fmt.Printf("%-12s %s\n", "Link:", f.Link)
}
fmt.Println("")
}
func isWhitespace(ch byte) bool {
return ch == ' ' || ch == '\t' || ch == '\n' || ch == '\r'
}

View File

@@ -1,9 +0,0 @@
FROM alpine
RUN apk add --no-cache tini
## Upgrade OpenSSL libraries to mitigate known vulnerabilities as the current Alpine image has not been patched yet.
RUN apk update && apk upgrade --no-cache libcrypto3 libssl3
COPY infisical /bin/infisical
ENTRYPOINT ["/sbin/tini", "--", "/bin/infisical"]

View File

@@ -1,183 +0,0 @@
module github.com/Infisical/infisical-merge
go 1.23.0
toolchain go1.23.5
require (
github.com/BobuSumisu/aho-corasick v1.0.3
github.com/Masterminds/sprig/v3 v3.3.0
github.com/bradleyjkemp/cupaloy/v2 v2.8.0
github.com/charmbracelet/lipgloss v0.9.1
github.com/creack/pty v1.1.21
github.com/denisbrodbeck/machineid v1.0.1
github.com/fatih/semgroup v1.2.0
github.com/gitleaks/go-gitdiff v0.9.1
github.com/h2non/filetype v1.1.3
github.com/infisical/go-sdk v0.5.96
github.com/infisical/infisical-kmip v0.3.5
github.com/mattn/go-isatty v0.0.20
github.com/muesli/ansi v0.0.0-20221106050444-61f0cd9a192a
github.com/muesli/mango-cobra v1.2.0
github.com/muesli/reflow v0.3.0
github.com/muesli/roff v0.1.0
github.com/pion/dtls/v3 v3.0.4
github.com/pion/logging v0.2.3
github.com/pion/turn/v4 v4.0.0
github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c
github.com/pkg/errors v0.9.1
github.com/posthog/posthog-go v0.0.0-20221221115252-24dfed35d71a
github.com/quic-go/quic-go v0.50.0
github.com/rs/cors v1.11.0
github.com/rs/zerolog v1.26.1
github.com/spf13/cobra v1.6.1
github.com/spf13/viper v1.8.1
github.com/stretchr/testify v1.10.0
github.com/wasilibs/go-re2 v1.10.0
golang.org/x/crypto v0.36.0
golang.org/x/exp v0.0.0-20250228200357-dead58393ab7
golang.org/x/sys v0.31.0
golang.org/x/term v0.30.0
gopkg.in/yaml.v2 v2.4.0
gopkg.in/yaml.v3 v3.0.1
k8s.io/api v0.31.4
k8s.io/apimachinery v0.31.4
k8s.io/client-go v0.31.4
)
require (
cloud.google.com/go/auth v0.7.0 // indirect
cloud.google.com/go/auth/oauth2adapt v0.2.2 // indirect
cloud.google.com/go/compute/metadata v0.4.0 // indirect
cloud.google.com/go/iam v1.1.11 // indirect
dario.cat/mergo v1.0.1 // indirect
github.com/Masterminds/goutils v1.1.1 // indirect
github.com/Masterminds/semver/v3 v3.3.0 // indirect
github.com/alessio/shellescape v1.4.1 // indirect
github.com/asaskevich/govalidator v0.0.0-20200907205600-7a23bdc65eef // indirect
github.com/aws/aws-sdk-go-v2 v1.27.2 // indirect
github.com/aws/aws-sdk-go-v2/config v1.27.18 // indirect
github.com/aws/aws-sdk-go-v2/credentials v1.17.18 // indirect
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.5 // indirect
github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.9 // indirect
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.9 // indirect
github.com/aws/aws-sdk-go-v2/internal/ini v1.8.0 // indirect
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.2 // indirect
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.11 // indirect
github.com/aws/aws-sdk-go-v2/service/sso v1.20.11 // indirect
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.24.5 // indirect
github.com/aws/aws-sdk-go-v2/service/sts v1.28.12 // indirect
github.com/aws/smithy-go v1.20.2 // indirect
github.com/aymanbagabas/go-osc52/v2 v2.0.1 // indirect
github.com/chzyer/readline v1.5.1 // indirect
github.com/danieljoos/wincred v1.2.0 // indirect
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect
github.com/dvsekhvalnov/jose2go v1.6.0 // indirect
github.com/emicklei/go-restful/v3 v3.11.0 // indirect
github.com/felixge/httpsnoop v1.0.4 // indirect
github.com/fsnotify/fsnotify v1.4.9 // indirect
github.com/fxamacker/cbor/v2 v2.7.0 // indirect
github.com/go-logr/logr v1.4.2 // indirect
github.com/go-logr/stdr v1.2.2 // indirect
github.com/go-openapi/errors v0.20.2 // indirect
github.com/go-openapi/jsonpointer v0.21.0 // indirect
github.com/go-openapi/jsonreference v0.20.2 // indirect
github.com/go-openapi/strfmt v0.21.3 // indirect
github.com/go-openapi/swag v0.23.0 // indirect
github.com/go-task/slim-sprig/v3 v3.0.0 // indirect
github.com/godbus/dbus/v5 v5.1.0 // indirect
github.com/gogo/protobuf v1.3.2 // indirect
github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect
github.com/golang/protobuf v1.5.4 // indirect
github.com/google/gnostic-models v0.6.9 // indirect
github.com/google/go-cmp v0.7.0 // indirect
github.com/google/gofuzz v1.2.0 // indirect
github.com/google/pprof v0.0.0-20250302191652-9094ed2288e7 // indirect
github.com/google/s2a-go v0.1.7 // indirect
github.com/google/uuid v1.6.0 // indirect
github.com/googleapis/enterprise-certificate-proxy v0.3.2 // indirect
github.com/googleapis/gax-go/v2 v2.12.5 // indirect
github.com/gosimple/slug v1.15.0 // indirect
github.com/gosimple/unidecode v1.0.1 // indirect
github.com/hashicorp/golang-lru/v2 v2.0.7 // indirect
github.com/hashicorp/hcl v1.0.0 // indirect
github.com/huandu/xstrings v1.5.0 // indirect
github.com/josharian/intern v1.0.0 // indirect
github.com/json-iterator/go v1.1.12 // indirect
github.com/lucasb-eyer/go-colorful v1.2.0 // indirect
github.com/magiconair/properties v1.8.5 // indirect
github.com/mailru/easyjson v0.7.7 // indirect
github.com/mattn/go-colorable v0.1.13 // indirect
github.com/mattn/go-runewidth v0.0.15 // indirect
github.com/mitchellh/copystructure v1.2.0 // indirect
github.com/mitchellh/mapstructure v1.4.1 // indirect
github.com/mitchellh/reflectwalk v1.0.2 // indirect
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
github.com/modern-go/reflect2 v1.0.2 // indirect
github.com/mtibben/percent v0.2.1 // indirect
github.com/muesli/mango v0.1.0 // indirect
github.com/muesli/mango-pflag v0.1.0 // indirect
github.com/muesli/termenv v0.15.2 // indirect
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect
github.com/oklog/ulid v1.3.1 // indirect
github.com/onsi/ginkgo/v2 v2.22.2 // indirect
github.com/pelletier/go-toml v1.9.3 // indirect
github.com/pion/randutil v0.1.0 // indirect
github.com/pion/stun/v3 v3.0.0 // indirect
github.com/pion/transport/v3 v3.0.7 // indirect
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect
github.com/rivo/uniseg v0.2.0 // indirect
github.com/shopspring/decimal v1.4.0 // indirect
github.com/spf13/afero v1.6.0 // indirect
github.com/spf13/cast v1.7.0 // indirect
github.com/spf13/jwalterweatherman v1.1.0 // indirect
github.com/subosito/gotenv v1.2.0 // indirect
github.com/tetratelabs/wazero v1.9.0 // indirect
github.com/wasilibs/wazero-helpers v0.0.0-20240620070341-3dff1577cd52 // indirect
github.com/wlynxg/anet v0.0.5 // indirect
github.com/x448/float16 v0.8.4 // indirect
github.com/xtgo/uuid v0.0.0-20140804021211-a0b114877d4c // indirect
go.mongodb.org/mongo-driver v1.10.0 // indirect
go.opencensus.io v0.24.0 // indirect
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0 // indirect
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0 // indirect
go.opentelemetry.io/otel v1.24.0 // indirect
go.opentelemetry.io/otel/metric v1.24.0 // indirect
go.opentelemetry.io/otel/trace v1.24.0 // indirect
go.uber.org/mock v0.5.0 // indirect
golang.org/x/mod v0.23.0 // indirect
golang.org/x/net v0.38.0 // indirect
golang.org/x/oauth2 v0.27.0 // indirect
golang.org/x/sync v0.12.0 // indirect
golang.org/x/text v0.23.0 // indirect
golang.org/x/time v0.9.0 // indirect
golang.org/x/tools v0.30.0 // indirect
google.golang.org/api v0.188.0 // indirect
google.golang.org/genproto/googleapis/api v0.0.0-20240701130421-f6361c86f094 // indirect
google.golang.org/genproto/googleapis/rpc v0.0.0-20240708141625-4ad9e859172b // indirect
google.golang.org/grpc v1.64.1 // indirect
google.golang.org/protobuf v1.36.5 // indirect
gopkg.in/inf.v0 v0.9.1 // indirect
gopkg.in/ini.v1 v1.62.0 // indirect
k8s.io/klog/v2 v2.130.1 // indirect
k8s.io/kube-openapi v0.0.0-20250318190949-c8a335a9a2ff // indirect
k8s.io/utils v0.0.0-20241104100929-3ea5e8cea738 // indirect
sigs.k8s.io/json v0.0.0-20241010143419-9aa6b5e7a4b3 // indirect
sigs.k8s.io/randfill v1.0.0 // indirect
sigs.k8s.io/structured-merge-diff/v4 v4.6.0 // indirect
sigs.k8s.io/yaml v1.4.0 // indirect
)
require (
github.com/fatih/color v1.17.0
github.com/go-resty/resty/v2 v2.16.5
github.com/inconshreveable/mousetrap v1.0.1 // indirect
github.com/jedib0t/go-pretty v4.3.0+incompatible
github.com/manifoldco/promptui v0.9.0
github.com/spf13/pflag v1.0.5 // indirect
github.com/zalando/go-keyring v0.2.3
)
replace github.com/zalando/go-keyring => github.com/Infisical/go-keyring v1.0.2
replace github.com/pion/turn/v4 => github.com/Infisical/turn/v4 v4.0.1

View File

@@ -1,951 +0,0 @@
cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU=
cloud.google.com/go v0.44.1/go.mod h1:iSa0KzasP4Uvy3f1mN/7PiObzGgflwredwwASm/v6AU=
cloud.google.com/go v0.44.2/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY=
cloud.google.com/go v0.45.1/go.mod h1:RpBamKRgapWJb87xiFSdk4g1CME7QZg3uwTez+TSTjc=
cloud.google.com/go v0.46.3/go.mod h1:a6bKKbmY7er1mI7TEI4lsAkts/mkhTSZK8w33B4RAg0=
cloud.google.com/go v0.50.0/go.mod h1:r9sluTvynVuxRIOHXQEHMFffphuXHOMZMycpNR5e6To=
cloud.google.com/go v0.52.0/go.mod h1:pXajvRH/6o3+F9jDHZWQ5PbGhn+o8w9qiu/CffaVdO4=
cloud.google.com/go v0.53.0/go.mod h1:fp/UouUEsRkN6ryDKNW/Upv/JBKnv6WDthjR6+vze6M=
cloud.google.com/go v0.54.0/go.mod h1:1rq2OEkV3YMf6n/9ZvGWI3GWw0VoqH/1x2nd8Is/bPc=
cloud.google.com/go v0.56.0/go.mod h1:jr7tqZxxKOVYizybht9+26Z/gUq7tiRzu+ACVAMbKVk=
cloud.google.com/go v0.57.0/go.mod h1:oXiQ6Rzq3RAkkY7N6t3TcE6jE+CIBBbA36lwQ1JyzZs=
cloud.google.com/go v0.62.0/go.mod h1:jmCYTdRCQuc1PHIIJ/maLInMho30T/Y0M4hTdTShOYc=
cloud.google.com/go v0.65.0/go.mod h1:O5N8zS7uWy9vkA9vayVHs65eM1ubvY4h553ofrNHObY=
cloud.google.com/go v0.72.0/go.mod h1:M+5Vjvlc2wnp6tjzE102Dw08nGShTscUx2nZMufOKPI=
cloud.google.com/go v0.74.0/go.mod h1:VV1xSbzvo+9QJOxLDaJfTjx5e+MePCpCWwvftOeQmWk=
cloud.google.com/go v0.78.0/go.mod h1:QjdrLG0uq+YwhjoVOLsS1t7TW8fs36kLs4XO5R5ECHg=
cloud.google.com/go v0.79.0/go.mod h1:3bzgcEeQlzbuEAYu4mrWhKqWjmpprinYgKJLgKHnbb8=
cloud.google.com/go v0.81.0/go.mod h1:mk/AM35KwGk/Nm2YSeZbxXdrNK3KZOYHmLkOqC2V6E0=
cloud.google.com/go/auth v0.7.0 h1:kf/x9B3WTbBUHkC+1VS8wwwli9TzhSt0vSTVBmMR8Ts=
cloud.google.com/go/auth v0.7.0/go.mod h1:D+WqdrpcjmiCgWrXmLLxOVq1GACoE36chW6KXoEvuIw=
cloud.google.com/go/auth/oauth2adapt v0.2.2 h1:+TTV8aXpjeChS9M+aTtN/TjdQnzJvmzKFt//oWu7HX4=
cloud.google.com/go/auth/oauth2adapt v0.2.2/go.mod h1:wcYjgpZI9+Yu7LyYBg4pqSiaRkfEK3GQcpb7C/uyF1Q=
cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o=
cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE=
cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc=
cloud.google.com/go/bigquery v1.5.0/go.mod h1:snEHRnqQbz117VIFhE8bmtwIDY80NLUZUMb4Nv6dBIg=
cloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4gLoIoXIAPc=
cloud.google.com/go/bigquery v1.8.0/go.mod h1:J5hqkt3O0uAFnINi6JXValWIb1v0goeZM77hZzJN/fQ=
cloud.google.com/go/compute/metadata v0.4.0 h1:vHzJCWaM4g8XIcm8kopr3XmDA4Gy/lblD3EhhSux05c=
cloud.google.com/go/compute/metadata v0.4.0/go.mod h1:SIQh1Kkb4ZJ8zJ874fqVkslA29PRXuleyj6vOzlbK7M=
cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE=
cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk=
cloud.google.com/go/firestore v1.1.0/go.mod h1:ulACoGHTpvq5r8rxGJ4ddJZBZqakUQqClKRT5SZwBmk=
cloud.google.com/go/iam v1.1.11 h1:0mQ8UKSfdHLut6pH9FM3bI55KWR46ketn0PuXleDyxw=
cloud.google.com/go/iam v1.1.11/go.mod h1:biXoiLWYIKntto2joP+62sd9uW5EpkZmKIvfNcTWlnQ=
cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I=
cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw=
cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA=
cloud.google.com/go/pubsub v1.3.1/go.mod h1:i+ucay31+CNRpDW4Lu78I4xXG+O1r/MAHgjpRVR+TSU=
cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw=
cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0ZeosJ0Rtdos=
cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk=
cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs=
cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0=
dario.cat/mergo v1.0.1 h1:Ra4+bf83h2ztPIQYNP99R6m+Y7KfnARDfID+a+vLl4s=
dario.cat/mergo v1.0.1/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk=
dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU=
github.com/BobuSumisu/aho-corasick v1.0.3 h1:uuf+JHwU9CHP2Vx+wAy6jcksJThhJS9ehR8a+4nPE9g=
github.com/BobuSumisu/aho-corasick v1.0.3/go.mod h1:hm4jLcvZKI2vRF2WDU1N4p/jpWtpOzp3nLmi9AzX/XE=
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo=
github.com/Infisical/go-keyring v1.0.2 h1:dWOkI/pB/7RocfSJgGXbXxLDcVYsdslgjEPmVhb+nl8=
github.com/Infisical/go-keyring v1.0.2/go.mod h1:LWOnn/sw9FxDW/0VY+jHFAfOFEe03xmwBVSfJnBowto=
github.com/Infisical/turn/v4 v4.0.1 h1:omdelNsnFfzS5cu86W5OBR68by68a8sva4ogR0lQQnw=
github.com/Infisical/turn/v4 v4.0.1/go.mod h1:pMMKP/ieNAG/fN5cZiN4SDuyKsXtNTr0ccN7IToA1zs=
github.com/Masterminds/goutils v1.1.1 h1:5nUrii3FMTL5diU80unEVvNevw1nH4+ZV4DSLVJLSYI=
github.com/Masterminds/goutils v1.1.1/go.mod h1:8cTjp+g8YejhMuvIA5y2vz3BpJxksy863GQaJW2MFNU=
github.com/Masterminds/semver/v3 v3.3.0 h1:B8LGeaivUe71a5qox1ICM/JLl0NqZSW5CHyL+hmvYS0=
github.com/Masterminds/semver/v3 v3.3.0/go.mod h1:4V+yj/TJE1HU9XfppCwVMZq3I84lprf4nC11bSS5beM=
github.com/Masterminds/sprig/v3 v3.3.0 h1:mQh0Yrg1XPo6vjYXgtf5OtijNAKJRNcTdOOGZe3tPhs=
github.com/Masterminds/sprig/v3 v3.3.0/go.mod h1:Zy1iXRYNqNLUolqCpL4uhk6SHUMAOSCzdgBfDb35Lz0=
github.com/alessio/shellescape v1.4.1 h1:V7yhSDDn8LP4lc4jS8pFkt0zCnzVJlG5JXy9BVKJUX0=
github.com/alessio/shellescape v1.4.1/go.mod h1:PZAiSCk0LJaZkiCSkPv8qIobYglO3FPpyFjDCtHLS30=
github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY=
github.com/armon/circbuf v0.0.0-20150827004946-bbbad097214e/go.mod h1:3U/XgcO3hCbHZ8TKRvWD2dDTCfh9M9ya+I9JpbB7O8o=
github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmVTwzkszR9V5SSuryQ31EELlFMUz1kKyl939pY=
github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8=
github.com/asaskevich/govalidator v0.0.0-20200907205600-7a23bdc65eef h1:46PFijGLmAjMPwCCCo7Jf0W6f9slllCkkv7vyc1yOSg=
github.com/asaskevich/govalidator v0.0.0-20200907205600-7a23bdc65eef/go.mod h1:WaHUgvxTVq04UNunO+XhnAqY/wQc+bxr74GqbsZ/Jqw=
github.com/aws/aws-sdk-go-v2 v1.27.2 h1:pLsTXqX93rimAOZG2FIYraDQstZaaGVVN4tNw65v0h8=
github.com/aws/aws-sdk-go-v2 v1.27.2/go.mod h1:ffIFB97e2yNsv4aTSGkqtHnppsIJzw7G7BReUZ3jCXM=
github.com/aws/aws-sdk-go-v2/config v1.27.18 h1:wFvAnwOKKe7QAyIxziwSKjmer9JBMH1vzIL6W+fYuKk=
github.com/aws/aws-sdk-go-v2/config v1.27.18/go.mod h1:0xz6cgdX55+kmppvPm2IaKzIXOheGJhAufacPJaXZ7c=
github.com/aws/aws-sdk-go-v2/credentials v1.17.18 h1:D/ALDWqK4JdY3OFgA2thcPO1c9aYTT5STS/CvnkqY1c=
github.com/aws/aws-sdk-go-v2/credentials v1.17.18/go.mod h1:JuitCWq+F5QGUrmMPsk945rop6bB57jdscu+Glozdnc=
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.5 h1:dDgptDO9dxeFkXy+tEgVkzSClHZje/6JkPW5aZyEvrQ=
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.5/go.mod h1:gjvE2KBUgUQhcv89jqxrIxH9GaKs1JbZzWejj/DaHGA=
github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.9 h1:cy8ahBJuhtM8GTTSyOkfy6WVPV1IE+SS5/wfXUYuulw=
github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.9/go.mod h1:CZBXGLaJnEZI6EVNcPd7a6B5IC5cA/GkRWtu9fp3S6Y=
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.9 h1:A4SYk07ef04+vxZToz9LWvAXl9LW0NClpPpMsi31cz0=
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.9/go.mod h1:5jJcHuwDagxN+ErjQ3PU3ocf6Ylc/p9x+BLO/+X4iXw=
github.com/aws/aws-sdk-go-v2/internal/ini v1.8.0 h1:hT8rVHwugYE2lEfdFE0QWVo81lF7jMrYJVDWI+f+VxU=
github.com/aws/aws-sdk-go-v2/internal/ini v1.8.0/go.mod h1:8tu/lYfQfFe6IGnaOdrpVgEL2IrrDOf6/m9RQum4NkY=
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.2 h1:Ji0DY1xUsUr3I8cHps0G+XM3WWU16lP6yG8qu1GAZAs=
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.2/go.mod h1:5CsjAbs3NlGQyZNFACh+zztPDI7fU6eW9QsxjfnuBKg=
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.11 h1:o4T+fKxA3gTMcluBNZZXE9DNaMkJuUL1O3mffCUjoJo=
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.11/go.mod h1:84oZdJ+VjuJKs9v1UTC9NaodRZRseOXCTgku+vQJWR8=
github.com/aws/aws-sdk-go-v2/service/sso v1.20.11 h1:gEYM2GSpr4YNWc6hCd5nod4+d4kd9vWIAWrmGuLdlMw=
github.com/aws/aws-sdk-go-v2/service/sso v1.20.11/go.mod h1:gVvwPdPNYehHSP9Rs7q27U1EU+3Or2ZpXvzAYJNh63w=
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.24.5 h1:iXjh3uaH3vsVcnyZX7MqCoCfcyxIrVE9iOQruRaWPrQ=
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.24.5/go.mod h1:5ZXesEuy/QcO0WUnt+4sDkxhdXRHTu2yG0uCSH8B6os=
github.com/aws/aws-sdk-go-v2/service/sts v1.28.12 h1:M/1u4HBpwLuMtjlxuI2y6HoVLzF5e2mfxHCg7ZVMYmk=
github.com/aws/aws-sdk-go-v2/service/sts v1.28.12/go.mod h1:kcfd+eTdEi/40FIbLq4Hif3XMXnl5b/+t/KTfLt9xIk=
github.com/aws/smithy-go v1.20.2 h1:tbp628ireGtzcHDDmLT/6ADHidqnwgF57XOXZe6tp4Q=
github.com/aws/smithy-go v1.20.2/go.mod h1:krry+ya/rV9RDcV/Q16kpu6ypI4K2czasz0NC3qS14E=
github.com/aymanbagabas/go-osc52/v2 v2.0.1 h1:HwpRHbFMcZLEVr42D4p7XBqjyuxQH5SMiErDT4WkJ2k=
github.com/aymanbagabas/go-osc52/v2 v2.0.1/go.mod h1:uYgXzlJ7ZpABp8OJ+exZzJJhRNQ2ASbcXHWsFqH8hp8=
github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs=
github.com/bketelsen/crypt v0.0.4/go.mod h1:aI6NrJ0pMGgvZKL1iVgXLnfIFJtfV+bKCoqOes/6LfM=
github.com/bradleyjkemp/cupaloy/v2 v2.8.0 h1:any4BmKE+jGIaMpnU8YgH/I2LPiLBufr6oMMlVBbn9M=
github.com/bradleyjkemp/cupaloy/v2 v2.8.0/go.mod h1:bm7JXdkRd4BHJk9HpwqAI8BoAY1lps46Enkdqw6aRX0=
github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
github.com/charmbracelet/lipgloss v0.9.1 h1:PNyd3jvaJbg4jRHKWXnCj1akQm4rh8dbEzN1p/u1KWg=
github.com/charmbracelet/lipgloss v0.9.1/go.mod h1:1mPmG4cxScwUQALAAnacHaigiiHB9Pmr+v1VEawJl6I=
github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI=
github.com/chzyer/logex v1.2.1 h1:XHDu3E6q+gdHgsdTPH6ImJMIp436vR6MPtH8gP05QzM=
github.com/chzyer/logex v1.2.1/go.mod h1:JLbx6lG2kDbNRFnfkgvh4eRJRPX1QCoOIWomwysCBrQ=
github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI=
github.com/chzyer/readline v1.5.1 h1:upd/6fQk4src78LMRzh5vItIt361/o4uq553V8B5sGI=
github.com/chzyer/readline v1.5.1/go.mod h1:Eh+b79XXUwfKfcPLepksvw2tcLE/Ct21YObkaSkeBlk=
github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU=
github.com/chzyer/test v1.0.0 h1:p3BQDXSxOhOG0P9z6/hGnII4LGiEPOYBhs8asl/fC04=
github.com/chzyer/test v1.0.0/go.mod h1:2JlltgoNkt4TW/z9V/IzDdFaMTM2JPIi26O1pF38GC8=
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc=
github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk=
github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk=
github.com/coreos/go-semver v0.3.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk=
github.com/coreos/go-systemd/v22 v22.3.2/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc=
github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU=
github.com/cpuguy83/go-md2man/v2 v2.0.2/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
github.com/creack/pty v1.1.21 h1:1/QdRyBaHHJP61QkWMXlOIBfsgdDeeKfK8SYVUWJKf0=
github.com/creack/pty v1.1.21/go.mod h1:MOBLtS5ELjhRRrroQr9kyvTxUAFNvYEK993ew/Vr4O4=
github.com/danieljoos/wincred v1.2.0 h1:ozqKHaLK0W/ii4KVbbvluM91W2H3Sh0BncbUNPS7jLE=
github.com/danieljoos/wincred v1.2.0/go.mod h1:FzQLLMKBFdvu+osBrnFODiv32YGwCfx0SkRa/eYHgec=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM=
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/denisbrodbeck/machineid v1.0.1 h1:geKr9qtkB876mXguW2X6TU4ZynleN6ezuMSRhl4D7AQ=
github.com/denisbrodbeck/machineid v1.0.1/go.mod h1:dJUwb7PTidGDeYyUBmXZ2GphQBbjJCrnectwCyxcUSI=
github.com/dvsekhvalnov/jose2go v1.6.0 h1:Y9gnSnP4qEI0+/uQkHvFXeD2PLPJeXEL+ySMEA2EjTY=
github.com/dvsekhvalnov/jose2go v1.6.0/go.mod h1:QsHjhyTlD/lAVqn/NSbVZmSCGeDehTB/mPZadG+mhXU=
github.com/emicklei/go-restful/v3 v3.11.0 h1:rAQeMHw1c7zTmncogyy8VvRZwtkmkZ4FxERmMY4rD+g=
github.com/emicklei/go-restful/v3 v3.11.0/go.mod h1:6n3XBCmQQb25CM2LCACGz8ukIrRry+4bhvbpWn3mrbc=
github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98=
github.com/envoyproxy/go-control-plane v0.9.7/go.mod h1:cwu0lG7PUMfa9snN8LXBig5ynNVH9qI8YYLbd1fK2po=
github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk=
github.com/envoyproxy/go-control-plane v0.9.9-0.20210217033140-668b12f5399d/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk=
github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c=
github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4=
github.com/fatih/color v1.17.0 h1:GlRw1BRJxkpqUCBKzKOw098ed57fEsKeNjpTe3cSjK4=
github.com/fatih/color v1.17.0/go.mod h1:YZ7TlrGPkiz6ku9fK3TLD/pl3CpsiFyu8N92HLgmosI=
github.com/fatih/semgroup v1.2.0 h1:h/OLXwEM+3NNyAdZEpMiH1OzfplU09i2qXPVThGZvyg=
github.com/fatih/semgroup v1.2.0/go.mod h1:1KAD4iIYfXjE4U13B48VM4z9QUwV5Tt8O4rS879kgm8=
github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg=
github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U=
github.com/frankban/quicktest v1.14.6 h1:7Xjx+VpznH+oBnejlPUj8oUpdxnVs4f8XU8WnHkI4W8=
github.com/frankban/quicktest v1.14.6/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0=
github.com/fsnotify/fsnotify v1.4.9 h1:hsms1Qyu0jgnwNXIxa+/V/PDsU6CfLf6CNO8H7IWoS4=
github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ=
github.com/fxamacker/cbor/v2 v2.7.0 h1:iM5WgngdRBanHcxugY4JySA0nk1wZorNOpTgCMedv5E=
github.com/fxamacker/cbor/v2 v2.7.0/go.mod h1:pxXPTn3joSm21Gbwsv0w9OSA2y1HFR9qXEeXQVeNoDQ=
github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
github.com/gitleaks/go-gitdiff v0.9.1 h1:ni6z6/3i9ODT685OLCTf+s/ERlWUNWQF4x1pvoNICw0=
github.com/gitleaks/go-gitdiff v0.9.1/go.mod h1:pKz0X4YzCKZs30BL+weqBIG7mx0jl4tF1uXV9ZyNvrA=
github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU=
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A=
github.com/go-logr/logr v1.4.2 h1:6pFjapn8bFcIbiKo3XT4j/BhANplGihG6tvd+8rYgrY=
github.com/go-logr/logr v1.4.2/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY=
github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag=
github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE=
github.com/go-openapi/errors v0.20.2 h1:dxy7PGTqEh94zj2E3h1cUmQQWiM1+aeCROfAr02EmK8=
github.com/go-openapi/errors v0.20.2/go.mod h1:cM//ZKUKyO06HSwqAelJ5NsEMMcpa6VpXe8DOa1Mi1M=
github.com/go-openapi/jsonpointer v0.19.6/go.mod h1:osyAmYz/mB/C3I+WsTTSgw1ONzaLJoLCyoi6/zppojs=
github.com/go-openapi/jsonpointer v0.21.0 h1:YgdVicSA9vH5RiHs9TZW5oyafXZFc6+2Vc1rr/O9oNQ=
github.com/go-openapi/jsonpointer v0.21.0/go.mod h1:IUyH9l/+uyhIYQ/PXVA41Rexl+kOkAPDdXEYns6fzUY=
github.com/go-openapi/jsonreference v0.20.2 h1:3sVjiK66+uXK/6oQ8xgcRKcFgQ5KXa2KvnJRumpMGbE=
github.com/go-openapi/jsonreference v0.20.2/go.mod h1:Bl1zwGIM8/wsvqjsOQLJ/SH+En5Ap4rVB5KVcIDZG2k=
github.com/go-openapi/strfmt v0.21.3 h1:xwhj5X6CjXEZZHMWy1zKJxvW9AfHC9pkyUjLvHtKG7o=
github.com/go-openapi/strfmt v0.21.3/go.mod h1:k+RzNO0Da+k3FrrynSNN8F7n/peCmQQqbbXjtDfvmGg=
github.com/go-openapi/swag v0.22.3/go.mod h1:UzaqsxGiab7freDnrUUra0MwWfN/q7tE4j+VcZ0yl14=
github.com/go-openapi/swag v0.23.0 h1:vsEVJDUo2hPJ2tu0/Xc+4noaxyEffXNIs3cOULZ+GrE=
github.com/go-openapi/swag v0.23.0/go.mod h1:esZ8ITTYEsH1V2trKHjAN8Ai7xHb8RV+YSZ577vPjgQ=
github.com/go-resty/resty/v2 v2.16.5 h1:hBKqmWrr7uRc3euHVqmh1HTHcKn99Smr7o5spptdhTM=
github.com/go-resty/resty/v2 v2.16.5/go.mod h1:hkJtXbA2iKHzJheXYvQ8snQES5ZLGKMwQ07xAwp/fiA=
github.com/go-task/slim-sprig/v3 v3.0.0 h1:sUs3vkvUymDpBKi3qH1YSqBQk9+9D/8M2mN1vB6EwHI=
github.com/go-task/slim-sprig/v3 v3.0.0/go.mod h1:W848ghGpv3Qj3dhTPRyJypKRiqCdHZiAzKg9hl15HA8=
github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA=
github.com/godbus/dbus/v5 v5.1.0 h1:4KLkAxT3aOY8Li4FRJe/KvhoNFFxo0m6fNuFUO8QJUk=
github.com/godbus/dbus/v5 v5.1.0/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA=
github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q=
github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q=
github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q=
github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da h1:oI5xCqsCo564l8iNU+DwB5epxmsaqB+rhGL0m5jtYqE=
github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y=
github.com/golang/mock v1.4.0/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw=
github.com/golang/mock v1.4.1/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw=
github.com/golang/mock v1.4.3/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw=
github.com/golang/mock v1.4.4/go.mod h1:l3mdAwkq5BuhzHwde/uurv3sEJeZMXNpwsxVWU71h+4=
github.com/golang/mock v1.5.0/go.mod h1:CWnOUgYIOo4TcNZ0wHX3YZCqsaM1I1Jvs6v3mP3KVu8=
github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw=
github.com/golang/protobuf v1.3.4/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw=
github.com/golang/protobuf v1.3.5/go.mod h1:6O5/vntMXwX2lRkT1hjjk0nAC1IDOTvTlVgjlRvqsdk=
github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8=
github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA=
github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs=
github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w=
github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0=
github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8=
github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI=
github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI=
github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk=
github.com/golang/protobuf v1.5.1/go.mod h1:DopwsBzvsk0Fs44TXzsVbJyPhcCPeIwnvohx4u74HPM=
github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY=
github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek=
github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps=
github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
github.com/google/gnostic-models v0.6.9 h1:MU/8wDLif2qCXZmzncUQ/BOfxWfthHi63KqpoNbWqVw=
github.com/google/gnostic-models v0.6.9/go.mod h1:CiWsm0s6BSQd1hRn8/QmxqB6BesYcbSZxsz9b0KuDBw=
github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M=
github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.4.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU=
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
github.com/google/gofuzz v1.2.0 h1:xRy4A+RhZaiKjJ1bPfwQ8sedCA+YS2YcCHW6ec7JMi0=
github.com/google/gofuzz v1.2.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs=
github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0=
github.com/google/martian/v3 v3.1.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0=
github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc=
github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc=
github.com/google/pprof v0.0.0-20191218002539-d4f498aebedc/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=
github.com/google/pprof v0.0.0-20200212024743-f11f1df84d12/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=
github.com/google/pprof v0.0.0-20200229191704-1ebb73c60ed3/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=
github.com/google/pprof v0.0.0-20200430221834-fc25d7d30c6d/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=
github.com/google/pprof v0.0.0-20200708004538-1a94d8640e99/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=
github.com/google/pprof v0.0.0-20201023163331-3e6fc7fc9c4c/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
github.com/google/pprof v0.0.0-20201203190320-1bf35d6f28c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
github.com/google/pprof v0.0.0-20210122040257-d980be63207e/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
github.com/google/pprof v0.0.0-20210226084205-cbba55b83ad5/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
github.com/google/pprof v0.0.0-20250302191652-9094ed2288e7 h1:+J3r2e8+RsmN3vKfo75g0YSY61ms37qzPglu4p0sGro=
github.com/google/pprof v0.0.0-20250302191652-9094ed2288e7/go.mod h1:vavhavw2zAxS5dIdcRluK6cSGGPlZynqzFM8NdvU144=
github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI=
github.com/google/s2a-go v0.1.7 h1:60BLSyTrOV4/haCDW4zb1guZItoSq8foHCXrAnjBo/o=
github.com/google/s2a-go v0.1.7/go.mod h1:50CgR4k1jNlWBu4UfS4AcfhVe1r6pdZPygJ3R8F0Qdw=
github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/googleapis/enterprise-certificate-proxy v0.3.2 h1:Vie5ybvEvT75RniqhfFxPRy3Bf7vr3h0cechB90XaQs=
github.com/googleapis/enterprise-certificate-proxy v0.3.2/go.mod h1:VLSiSSBs/ksPL8kq3OBOQ6WRI2QnaFynd1DCjZ62+V0=
github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg=
github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk=
github.com/googleapis/gax-go/v2 v2.12.5 h1:8gw9KZK8TiVKB6q3zHY3SBzLnrGp6HQjyfYBYGmXdxA=
github.com/googleapis/gax-go/v2 v2.12.5/go.mod h1:BUDKcWo+RaKq5SC9vVYL0wLADa3VcfswbOMMRmB9H3E=
github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1 h1:EGx4pi6eqNxGaHF6qqu48+N2wcFQ5qg5FXgOdqsJ5d8=
github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
github.com/gosimple/slug v1.15.0 h1:wRZHsRrRcs6b0XnxMUBM6WK1U1Vg5B0R7VkIf1Xzobo=
github.com/gosimple/slug v1.15.0/go.mod h1:UiRaFH+GEilHstLUmcBgWcI42viBN7mAb818JrYOeFQ=
github.com/gosimple/unidecode v1.0.1 h1:hZzFTMMqSswvf0LBJZCZgThIZrpDHFXux9KeGmn6T/o=
github.com/gosimple/unidecode v1.0.1/go.mod h1:CP0Cr1Y1kogOtx0bJblKzsVWrqYaqfNOnHzpgWw4Awc=
github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw=
github.com/h2non/filetype v1.1.3 h1:FKkx9QbD7HR/zjK1Ia5XiBsq9zdLi5Kf3zGyFTAFkGg=
github.com/h2non/filetype v1.1.3/go.mod h1:319b3zT68BvV+WRj7cwy856M2ehB3HqNOt6sy1HndBY=
github.com/hashicorp/consul/api v1.1.0/go.mod h1:VmuI/Lkw1nC05EYQWNKwWGbkg+FbDBtguAZLlVdkD9Q=
github.com/hashicorp/consul/sdk v0.1.1/go.mod h1:VKf9jXwCTEY1QZP2MOLRhb5i/I/ssyNV1vwHyQBF0x8=
github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
github.com/hashicorp/go-cleanhttp v0.5.1/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80=
github.com/hashicorp/go-immutable-radix v1.0.0/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60=
github.com/hashicorp/go-msgpack v0.5.3/go.mod h1:ahLV/dePpqEmjfWmKiqvPkv/twdG7iPBM1vqhUKIvfM=
github.com/hashicorp/go-multierror v1.0.0/go.mod h1:dHtQlpGsu+cZNNAkkCN/P3hoUDHhCYQXV3UM06sGGrk=
github.com/hashicorp/go-rootcerts v1.0.0/go.mod h1:K6zTfqpRlCUIjkwsN4Z+hiSfzSTQa6eBIzfwKfwNnHU=
github.com/hashicorp/go-sockaddr v1.0.0/go.mod h1:7Xibr9yA9JjQq1JpNB2Vw7kxv8xerXegt+ozgdvDeDU=
github.com/hashicorp/go-syslog v1.0.0/go.mod h1:qPfqrKkXGihmCqbJM2mZgkZGvKG1dFdvsLplgctolz4=
github.com/hashicorp/go-uuid v1.0.0/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro=
github.com/hashicorp/go-uuid v1.0.1/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro=
github.com/hashicorp/go.net v0.0.1/go.mod h1:hjKkEWcCURg++eb33jQU7oqQcI9XDCnUzHA0oac0k90=
github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
github.com/hashicorp/golang-lru/v2 v2.0.7 h1:a+bsQ5rvGLjzHuww6tVxozPZFVghXaHOwFs4luLUK2k=
github.com/hashicorp/golang-lru/v2 v2.0.7/go.mod h1:QeFd9opnmA6QUJc5vARoKUSoFhyfM2/ZepoAG6RGpeM=
github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4=
github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ=
github.com/hashicorp/logutils v1.0.0/go.mod h1:QIAnNjmIWmVIIkWDTG1z5v++HQmx9WQRO+LraFDTW64=
github.com/hashicorp/mdns v1.0.0/go.mod h1:tL+uN++7HEJ6SQLQ2/p+z2pH24WQKWjBPkE0mNTz8vQ=
github.com/hashicorp/memberlist v0.1.3/go.mod h1:ajVTdAv/9Im8oMAAj5G31PhhMCZJV2pPBoIllUwCN7I=
github.com/hashicorp/serf v0.8.2/go.mod h1:6hOLApaqBFA1NXqRQAsxw9QxuDEvNxSQRwA/JwenrHc=
github.com/huandu/xstrings v1.5.0 h1:2ag3IFq9ZDANvthTwTiqSSZLjDc+BedvHPAp5tJy2TI=
github.com/huandu/xstrings v1.5.0/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE=
github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
github.com/inconshreveable/mousetrap v1.0.1 h1:U3uMjPSQEBMNp1lFxmllqCPM6P5u/Xq7Pgzkat/bFNc=
github.com/inconshreveable/mousetrap v1.0.1/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
github.com/infisical/go-sdk v0.5.96 h1:huky6bQ1Y3oRdPb5MO3Ru868qZaPHUxZ7kP7FPNRn48=
github.com/infisical/go-sdk v0.5.96/go.mod h1:ExjqFLRz7LSpZpGluqDLvFl6dFBLq5LKyLW7GBaMAIs=
github.com/infisical/infisical-kmip v0.3.5 h1:QM3s0e18B+mYv3a9HQNjNAlbwZJBzXq5BAJM2scIeiE=
github.com/infisical/infisical-kmip v0.3.5/go.mod h1:bO1M4YtKyutNg1bREPmlyZspC5duSR7hyQ3lPmLzrIs=
github.com/jedib0t/go-pretty v4.3.0+incompatible h1:CGs8AVhEKg/n9YbUenWmNStRW2PHJzaeDodcfvRAbIo=
github.com/jedib0t/go-pretty v4.3.0+incompatible/go.mod h1:XemHduiw8R651AF9Pt4FwCTKeG3oo7hrHJAoznj9nag=
github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY=
github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y=
github.com/json-iterator/go v1.1.11/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM=
github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo=
github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU=
github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk=
github.com/jtolds/gls v4.20.0+incompatible h1:xdiiI2gbIgH/gLH7ADydsJ1uDOEzR8yvV7C0MuV77Wo=
github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU=
github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8=
github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
github.com/klauspost/compress v1.13.6/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk=
github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg=
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI=
github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE=
github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk=
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
github.com/lucasb-eyer/go-colorful v1.2.0 h1:1nnpGOrhyZZuNyfu1QjKiUICQ74+3FNCN69Aj6K7nkY=
github.com/lucasb-eyer/go-colorful v1.2.0/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0=
github.com/magiconair/properties v1.8.5 h1:b6kJs+EmPFMYGkow9GiUyCyOvIwYetYJ3fSaWak/Gls=
github.com/magiconair/properties v1.8.5/go.mod h1:y3VJvCyxH9uVvJTWEGAELF3aiYNyPKd5NZ3oSwXrF60=
github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0=
github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc=
github.com/manifoldco/promptui v0.9.0 h1:3V4HzJk1TtXW1MTZMP7mdlwbBpIinw3HztaIlYthEiA=
github.com/manifoldco/promptui v0.9.0/go.mod h1:ka04sppxSGFAtxX0qhlYQjISsg9mR4GWtQEhdbn6Pgg=
github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU=
github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA=
github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg=
github.com/mattn/go-isatty v0.0.3/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4=
github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
github.com/mattn/go-runewidth v0.0.12/go.mod h1:RAqKPSqVFrSLVXbA8x7dzmKdmGzieGRCM46jaSJTDAk=
github.com/mattn/go-runewidth v0.0.15 h1:UNAjwbU9l54TA3KzvqLGxwWjHmMgBUVhBiTjelZgg3U=
github.com/mattn/go-runewidth v0.0.15/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg=
github.com/mitchellh/cli v1.0.0/go.mod h1:hNIlj7HEI86fIcpObd7a0FcrxTWetlwJDGcceTlRvqc=
github.com/mitchellh/copystructure v1.2.0 h1:vpKXTN4ewci03Vljg/q9QvCGUDttBOGBIa15WveJJGw=
github.com/mitchellh/copystructure v1.2.0/go.mod h1:qLl+cE2AmVv+CoeAwDPye/v+N2HKCj9FbZEVFJRxO9s=
github.com/mitchellh/go-homedir v1.0.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0=
github.com/mitchellh/go-testing-interface v1.0.0/go.mod h1:kRemZodwjscx+RGhAo8eIhFbs2+BFgRtFPeD/KE+zxI=
github.com/mitchellh/gox v0.4.0/go.mod h1:Sd9lOJ0+aimLBi73mGofS1ycjY8lL3uZM3JPS42BGNg=
github.com/mitchellh/iochan v1.0.0/go.mod h1:JwYml1nuB7xOzsp52dPpHFffvOCDupsG0QubkSMEySY=
github.com/mitchellh/mapstructure v0.0.0-20160808181253-ca63d7c062ee/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y=
github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y=
github.com/mitchellh/mapstructure v1.3.3/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
github.com/mitchellh/mapstructure v1.4.1 h1:CpVNEelQCZBooIPDn+AR3NpivK/TIKU8bDxdASFVQag=
github.com/mitchellh/mapstructure v1.4.1/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
github.com/mitchellh/reflectwalk v1.0.2 h1:G2LzWKi524PWgd3mLHV8Y5k7s6XUvT0Gef6zxSIeXaQ=
github.com/mitchellh/reflectwalk v1.0.2/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw=
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg=
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M=
github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk=
github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe/go.mod h1:wL8QJuTMNUDYhXwkmfOly8iTdp5TEcJFWZD2D7SIkUc=
github.com/mtibben/percent v0.2.1 h1:5gssi8Nqo8QU/r2pynCm+hBQHpkB/uNK7BJCFogWdzs=
github.com/mtibben/percent v0.2.1/go.mod h1:KG9uO+SZkUp+VkRHsCdYQV3XSZrrSpR3O9ibNBTZrns=
github.com/muesli/ansi v0.0.0-20221106050444-61f0cd9a192a h1:jlDOeO5TU0pYlbc/y6PFguab5IjANI0Knrpg3u/ton4=
github.com/muesli/ansi v0.0.0-20221106050444-61f0cd9a192a/go.mod h1:CJlz5H+gyd6CUWT45Oy4q24RdLyn7Md9Vj2/ldJBSIo=
github.com/muesli/mango v0.1.0 h1:DZQK45d2gGbql1arsYA4vfg4d7I9Hfx5rX/GCmzsAvI=
github.com/muesli/mango v0.1.0/go.mod h1:5XFpbC8jY5UUv89YQciiXNlbi+iJgt29VDC5xbzrLL4=
github.com/muesli/mango-cobra v1.2.0 h1:DQvjzAM0PMZr85Iv9LIMaYISpTOliMEg+uMFtNbYvWg=
github.com/muesli/mango-cobra v1.2.0/go.mod h1:vMJL54QytZAJhCT13LPVDfkvCUJ5/4jNUKF/8NC2UjA=
github.com/muesli/mango-pflag v0.1.0 h1:UADqbYgpUyRoBja3g6LUL+3LErjpsOwaC9ywvBWe7Sg=
github.com/muesli/mango-pflag v0.1.0/go.mod h1:YEQomTxaCUp8PrbhFh10UfbhbQrM/xJ4i2PB8VTLLW0=
github.com/muesli/reflow v0.3.0 h1:IFsN6K9NfGtjeggFP+68I4chLZV2yIKsXJFNZ+eWh6s=
github.com/muesli/reflow v0.3.0/go.mod h1:pbwTDkVPibjO2kyvBQRBxTWEEGDGq0FlB1BIKtnHY/8=
github.com/muesli/roff v0.1.0 h1:YD0lalCotmYuF5HhZliKWlIx7IEhiXeSfq7hNjFqGF8=
github.com/muesli/roff v0.1.0/go.mod h1:pjAHQM9hdUUwm/krAfrLGgJkXJ+YuhtsfZ42kieB2Ig=
github.com/muesli/termenv v0.15.2 h1:GohcuySI0QmI3wN8Ok9PtKGkgkFIk7y6Vpb5PvrY+Wo=
github.com/muesli/termenv v0.15.2/go.mod h1:Epx+iuz8sNs7mNKhxzH4fWXGNpZwUaJKRS1noLXviQ8=
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 h1:C3w9PqII01/Oq1c1nUAm88MOHcQC9l5mIlSMApZMrHA=
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ=
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno=
github.com/oklog/ulid v1.3.1 h1:EGfNDEx6MqHz8B3uNV6QAib1UR2Lm97sHi3ocA6ESJ4=
github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U=
github.com/onsi/ginkgo/v2 v2.22.2 h1:/3X8Panh8/WwhU/3Ssa6rCKqPLuAkVY2I0RoyDLySlU=
github.com/onsi/ginkgo/v2 v2.22.2/go.mod h1:oeMosUL+8LtarXBHu/c0bx2D/K9zyQ6uX3cTyztHwsk=
github.com/onsi/gomega v1.36.2 h1:koNYke6TVk6ZmnyHrCXba/T/MoLBXFjeC1PtvYgw0A8=
github.com/onsi/gomega v1.36.2/go.mod h1:DdwyADRjrc825LhMEkD76cHR5+pUnjhUN8GlHlRPHzY=
github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc=
github.com/pelletier/go-toml v1.9.3 h1:zeC5b1GviRUyKYd6OJPvBU/mcVDVoL1OhT17FCt5dSQ=
github.com/pelletier/go-toml v1.9.3/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c=
github.com/pion/dtls/v3 v3.0.4 h1:44CZekewMzfrn9pmGrj5BNnTMDCFwr+6sLH+cCuLM7U=
github.com/pion/dtls/v3 v3.0.4/go.mod h1:R373CsjxWqNPf6MEkfdy3aSe9niZvL/JaKlGeFphtMg=
github.com/pion/logging v0.2.3 h1:gHuf0zpoh1GW67Nr6Gj4cv5Z9ZscU7g/EaoC/Ke/igI=
github.com/pion/logging v0.2.3/go.mod h1:z8YfknkquMe1csOrxK5kc+5/ZPAzMxbKLX5aXpbpC90=
github.com/pion/randutil v0.1.0 h1:CFG1UdESneORglEsnimhUjf33Rwjubwj6xfiOXBa3mA=
github.com/pion/randutil v0.1.0/go.mod h1:XcJrSMMbbMRhASFVOlj/5hQial/Y8oH/HVo7TBZq+j8=
github.com/pion/stun/v3 v3.0.0 h1:4h1gwhWLWuZWOJIJR9s2ferRO+W3zA/b6ijOI6mKzUw=
github.com/pion/stun/v3 v3.0.0/go.mod h1:HvCN8txt8mwi4FBvS3EmDghW6aQJ24T+y+1TKjB5jyU=
github.com/pion/transport/v3 v3.0.7 h1:iRbMH05BzSNwhILHoBoAPxoB9xQgOaJk+591KC9P1o0=
github.com/pion/transport/v3 v3.0.7/go.mod h1:YleKiTZ4vqNxVwh77Z0zytYi7rXHl7j6uPLGhhz9rwo=
github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c h1:+mdjkGKdHQG3305AYmdv1U2eRNDiU2ErMBj1gwrq8eQ=
github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c/go.mod h1:7rwL4CYBLnjLxUqIJNnCWiEdr3bn6IUYi15bNlnbCCU=
github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/sftp v1.10.1/go.mod h1:lYOWFsE0bwd1+KfKJaKeuokY15vzFx25BLbzYYoAxZI=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U=
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/posener/complete v1.1.1/go.mod h1:em0nMJCgc9GFtwrmVmEMR/ZL6WyhyjMBndrE9hABlRI=
github.com/posthog/posthog-go v0.0.0-20221221115252-24dfed35d71a h1:Ey0XWvrg6u6hyIn1Kd/jCCmL+bMv9El81tvuGBbxZGg=
github.com/posthog/posthog-go v0.0.0-20221221115252-24dfed35d71a/go.mod h1:oa2sAs9tGai3VldabTV0eWejt/O4/OOD7azP8GaikqU=
github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
github.com/quic-go/quic-go v0.50.0 h1:3H/ld1pa3CYhkcc20TPIyG1bNsdhn9qZBGN3b9/UyUo=
github.com/quic-go/quic-go v0.50.0/go.mod h1:Vim6OmUvlYdwBhXP9ZVrtGmCMWa3wEqhq3NgYrI8b4E=
github.com/rivo/uniseg v0.1.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
github.com/rivo/uniseg v0.2.0 h1:S1pD9weZBuJdFmowNwbpi7BJ8TNftyUImj/0WQi72jY=
github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ=
github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
github.com/rogpeppe/go-internal v1.12.0 h1:exVL4IDcn6na9z1rAb56Vxr+CgyK3nn3O+epU5NdKM8=
github.com/rogpeppe/go-internal v1.12.0/go.mod h1:E+RYuTGaKKdloAfM02xzb0FW3Paa99yedzYV+kq4uf4=
github.com/rs/cors v1.11.0 h1:0B9GE/r9Bc2UxRMMtymBkHTenPkHDv0CW4Y98GBY+po=
github.com/rs/cors v1.11.0/go.mod h1:XyqrcTp5zjWr1wsJ8PIRZssZ8b/WMcMf71DJnit4EMU=
github.com/rs/xid v1.3.0/go.mod h1:trrq9SKmegXys3aeAKXMUTdJsYXVwGY3RLcfgqegfbg=
github.com/rs/zerolog v1.26.1 h1:/ihwxqH+4z8UxyI70wM1z9yCvkWcfz/a3mj48k/Zngc=
github.com/rs/zerolog v1.26.1/go.mod h1:/wSSJWX7lVrsOwlbyTRSOJvqRlc+WjWlfes+CiJ+tmc=
github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts=
github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc=
github.com/shopspring/decimal v1.4.0 h1:bxl37RwXBklmTi0C79JfXCEBD1cqqHt0bbgBAGFp81k=
github.com/shopspring/decimal v1.4.0/go.mod h1:gawqmDU56v4yIKSwfBSFip1HdCCXN8/+DMd9qYNcwME=
github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc=
github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d h1:zE9ykElWQ6/NYmHa3jpm/yHnI4xSofP+UP6SpjHcSeM=
github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc=
github.com/smartystreets/goconvey v1.6.4 h1:fv0U8FUIMPNf1L9lnHLvLhgicrIVChEkdzIKYqbNC9s=
github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA=
github.com/spf13/afero v1.6.0 h1:xoax2sJ2DT8S8xA2paPFjDCScCNeWsg75VG0DLRreiY=
github.com/spf13/afero v1.6.0/go.mod h1:Ai8FlHk4v/PARR026UzYexafAt9roJ7LcLMAmO6Z93I=
github.com/spf13/cast v1.3.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE=
github.com/spf13/cast v1.7.0 h1:ntdiHjuueXFgm5nzDRdOS4yfT43P5Fnud6DH50rz/7w=
github.com/spf13/cast v1.7.0/go.mod h1:ancEpBxwJDODSW/UG4rDrAqiKolqNNh2DX3mk86cAdo=
github.com/spf13/cobra v1.6.1 h1:o94oiPyS4KD1mPy2fmcYYHHfCxLqYjJOhGsCHFZtEzA=
github.com/spf13/cobra v1.6.1/go.mod h1:IOw/AERYS7UzyrGinqmz6HLUo219MORXGxhbaJUqzrY=
github.com/spf13/jwalterweatherman v1.1.0 h1:ue6voC5bR5F8YxI5S67j9i582FU4Qvo2bmqnqMYADFk=
github.com/spf13/jwalterweatherman v1.1.0/go.mod h1:aNWZUN0dPAAO/Ljvb5BEdw96iTZ0EXowPYD95IqWIGo=
github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=
github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
github.com/spf13/viper v1.8.1 h1:Kq1fyeebqsBfbjZj4EL7gj2IO0mMaiyjYUWcUsl2O44=
github.com/spf13/viper v1.8.1/go.mod h1:o0Pch8wJ9BVSWGQMbra6iw0oQ5oktSIBaujf1rJH9Ns=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY=
github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA=
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA=
github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA=
github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
github.com/subosito/gotenv v1.2.0 h1:Slr1R9HxAlEKefgq5jn9U+DnETlIUa6HfgEzj0g5d7s=
github.com/subosito/gotenv v1.2.0/go.mod h1:N0PQaV/YGNqwC0u51sEeR/aUtSLEXKX9iv69rRypqCw=
github.com/tetratelabs/wazero v1.9.0 h1:IcZ56OuxrtaEz8UYNRHBrUa9bYeX9oVY93KspZZBf/I=
github.com/tetratelabs/wazero v1.9.0/go.mod h1:TSbcXCfFP0L2FGkRPxHphadXPjo1T6W+CseNNY7EkjM=
github.com/tidwall/pretty v1.0.0 h1:HsD+QiTn7sK6flMKIvNmpqz1qrpP3Ps6jOKIKMooyg4=
github.com/tidwall/pretty v1.0.0/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk=
github.com/urfave/cli v1.22.5/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0=
github.com/wasilibs/go-re2 v1.10.0 h1:vQZEBYZOCA9jdBMmrO4+CvqyCj0x4OomXTJ4a5/urQ0=
github.com/wasilibs/go-re2 v1.10.0/go.mod h1:k+5XqO2bCJS+QpGOnqugyfwC04nw0jaglmjrrkG8U6o=
github.com/wasilibs/wazero-helpers v0.0.0-20240620070341-3dff1577cd52 h1:OvLBa8SqJnZ6P+mjlzc2K7PM22rRUPE1x32G9DTPrC4=
github.com/wasilibs/wazero-helpers v0.0.0-20240620070341-3dff1577cd52/go.mod h1:jMeV4Vpbi8osrE/pKUxRZkVaA0EX7NZN0A9/oRzgpgY=
github.com/wlynxg/anet v0.0.5 h1:J3VJGi1gvo0JwZ/P1/Yc/8p63SoW98B5dHkYDmpgvvU=
github.com/wlynxg/anet v0.0.5/go.mod h1:eay5PRQr7fIVAMbTbchTnO9gG65Hg/uYGdc7mguHxoA=
github.com/x448/float16 v0.8.4 h1:qLwI1I70+NjRFUR3zs1JPUCgaCXSh3SW62uAKT1mSBM=
github.com/x448/float16 v0.8.4/go.mod h1:14CWIYCyZA/cWjXOioeEpHeN/83MdbZDRQHoFcYsOfg=
github.com/xdg-go/pbkdf2 v1.0.0/go.mod h1:jrpuAogTd400dnrH08LKmI/xc1MbPOebTwRqcT5RDeI=
github.com/xdg-go/scram v1.1.1/go.mod h1:RaEWvsqvNKKvBPvcKeFjrG2cJqOkHTiyTpzz23ni57g=
github.com/xdg-go/stringprep v1.0.3/go.mod h1:W3f5j4i+9rC0kuIEJL0ky1VpHXQU3ocBgklLGvcBnW8=
github.com/xtgo/uuid v0.0.0-20140804021211-a0b114877d4c h1:3lbZUMbMiGUW/LMkfsEABsc5zNT9+b1CvsJx47JzJ8g=
github.com/xtgo/uuid v0.0.0-20140804021211-a0b114877d4c/go.mod h1:UrdRz5enIKZ63MEE3IF9l2/ebyx59GyGgPi+tICQdmM=
github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d/go.mod h1:rHwXgn7JulP+udvsHwJoVG1YGAP6VLg4y9I5dyZdqmA=
github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
github.com/yuin/goldmark v1.4.0/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
go.etcd.io/etcd/api/v3 v3.5.0/go.mod h1:cbVKeC6lCfl7j/8jBhAK6aIYO9XOjdptoxU/nLQcPvs=
go.etcd.io/etcd/client/pkg/v3 v3.5.0/go.mod h1:IJHfcCEKxYu1Os13ZdwCwIUTUVGYTSAM3YSwc9/Ac1g=
go.etcd.io/etcd/client/v2 v2.305.0/go.mod h1:h9puh54ZTgAKtEbut2oe9P4L/oqKCVB6xsXlzd7alYQ=
go.mongodb.org/mongo-driver v1.10.0 h1:UtV6N5k14upNp4LTduX0QCufG124fSu25Wz9tu94GLg=
go.mongodb.org/mongo-driver v1.10.0/go.mod h1:wsihk0Kdgv8Kqu1Anit4sfK+22vSFbUrAVEYRhCXrA8=
go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU=
go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8=
go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk=
go.opencensus.io v0.23.0/go.mod h1:XItmlyltB5F7CS4xOC1DcqMoFqwtC6OG2xF7mCv7P7E=
go.opencensus.io v0.24.0 h1:y73uSU6J157QMP2kn2r30vwW1A2W2WFwSCGnAVxeaD0=
go.opencensus.io v0.24.0/go.mod h1:vNK8G9p7aAivkbmorf4v+7Hgx+Zs0yY+0fOtgBfjQKo=
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0 h1:4Pp6oUg3+e/6M4C0A/3kJ2VYa++dsWVTtGgLVj5xtHg=
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0/go.mod h1:Mjt1i1INqiaoZOMGR1RIUJN+i3ChKoFRqzrRQhlkbs0=
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0 h1:jq9TW8u3so/bN+JPT166wjOI6/vQPF6Xe7nMNIltagk=
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0/go.mod h1:p8pYQP+m5XfbZm9fxtSKAbM6oIllS7s2AfxrChvc7iw=
go.opentelemetry.io/otel v1.24.0 h1:0LAOdjNmQeSTzGBzduGe/rU4tZhMwL5rWgtp9Ku5Jfo=
go.opentelemetry.io/otel v1.24.0/go.mod h1:W7b9Ozg4nkF5tWI5zsXkaKKDjdVjpD4oAt9Qi/MArHo=
go.opentelemetry.io/otel/metric v1.24.0 h1:6EhoGWWK28x1fbpA4tYTOWBkPefTDQnb8WSGXlc88kI=
go.opentelemetry.io/otel/metric v1.24.0/go.mod h1:VYhLe1rFfxuTXLgj4CBiyz+9WYBA8pNGJgDcSFRKBco=
go.opentelemetry.io/otel/trace v1.24.0 h1:CsKnnL4dUAr/0llH9FKuc698G04IrpWV0MQA/Y1YELI=
go.opentelemetry.io/otel/trace v1.24.0/go.mod h1:HPc3Xr/cOApsBI154IU0OI0HJexz+aw5uPdbs3UCjNU=
go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc=
go.uber.org/mock v0.5.0 h1:KAMbZvZPyBPWgD14IrIQ38QCyjwpvVVV6K/bHl1IwQU=
go.uber.org/mock v0.5.0/go.mod h1:ge71pBPLYDk7QIi1LupWxdAykm7KIEFchiOqd6z7qMM=
go.uber.org/multierr v1.6.0/go.mod h1:cdWPpRnG4AhwMwsgIHip0KRBQjJy5kYEpYjJxpXp9iU=
go.uber.org/zap v1.17.0/go.mod h1:MXVU+bhUf/A7Xi2HNOnopQOrmycQ5Ih87HtOu4q5SSo=
golang.org/x/crypto v0.0.0-20181029021203-45a5f77698d3/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20190820162420-60c769a6c586/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/crypto v0.0.0-20211215165025-cf75a172585e/go.mod h1:P+XmwS30IXTQdn5tA2iutPOUgjI07+tq3H3K9MVA1s8=
golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
golang.org/x/crypto v0.36.0 h1:AnAEvhDddvBdpY+uR+MyHmuZzzNqXSe/GvuDeob5L34=
golang.org/x/crypto v0.36.0/go.mod h1:Y4J0ReaxCR1IMaabaSMugxJES1EpwhBHhv2bDHklZvc=
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8=
golang.org/x/exp v0.0.0-20190829153037-c13cbed26979/go.mod h1:86+5VVa7VpoJ4kLfm080zCjGlMRFzhUhsZKEZO7MGek=
golang.org/x/exp v0.0.0-20191030013958-a1ab85dbe136/go.mod h1:JXzH8nQsPlswgeRAPE3MuO9GYsAcnJvJ4vnMwN/5qkY=
golang.org/x/exp v0.0.0-20191129062945-2f5052295587/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4=
golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4=
golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4=
golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM=
golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU=
golang.org/x/exp v0.0.0-20250228200357-dead58393ab7 h1:aWwlzYV971S4BXRS9AmqwDLAD85ouC6X+pocatKY58c=
golang.org/x/exp v0.0.0-20250228200357-dead58393ab7/go.mod h1:BHOTPb3L19zxehTsLoJXVaTktb06DFgmdW6Wb9s8jqk=
golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js=
golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU=
golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
golang.org/x/lint v0.0.0-20190409202823-959b441ac422/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
golang.org/x/lint v0.0.0-20190909230951-414d861bb4ac/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f/go.mod h1:5qLYkcX4OjUUV8bRuDixDT3tpyyb+LUpUlRWLxfhWrs=
golang.org/x/lint v0.0.0-20200130185559-910be7a94367/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY=
golang.org/x/lint v0.0.0-20200302205851-738671d3881b/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY=
golang.org/x/lint v0.0.0-20201208152925-83fdc39ff7b5/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY=
golang.org/x/lint v0.0.0-20210508222113-6edffad5e616/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY=
golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE=
golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o=
golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc=
golang.org/x/mod v0.1.0/go.mod h1:0QHyrYULN0/3qlju5TqG8bIK38QM8yzMo5ekMj3DlcY=
golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg=
golang.org/x/mod v0.1.1-0.20191107180719-034126e5016b/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg=
golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.23.0 h1:Zb7khfcRGKk+kqfxFaP5tZqCnDZMjC5VtUBs87Hr6QM=
golang.org/x/mod v0.23.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY=
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20181023162649-9b4f9f5ad519/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20181201002055-351d144fa1fc/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks=
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20190628185345-da137c7871d7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20200114155413-6afb5195e5aa/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20200222125558-5a598a2470a0/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
golang.org/x/net v0.0.0-20200501053045-e0ff5e5a1de5/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
golang.org/x/net v0.0.0-20200506145744-7e3656a0809f/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
golang.org/x/net v0.0.0-20200513185701-a91f0712d120/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
golang.org/x/net v0.0.0-20200520182314-0ba52f642ac2/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=
golang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=
golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=
golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
golang.org/x/net v0.0.0-20201031054903-ff519b6c9102/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
golang.org/x/net v0.0.0-20201209123823-ac852fbbde11/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
golang.org/x/net v0.0.0-20210119194325-5f4716e94777/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
golang.org/x/net v0.0.0-20210316092652-d523dce5a7f4/go.mod h1:RBQZq4jEuRlivfhVLdyRGr576XBO4/greRjx4P4O3yc=
golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM=
golang.org/x/net v0.0.0-20210805182204-aaa1db679c0d/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.38.0 h1:vRMAPTMaeGqVhG5QyLJHqNDwecKTomGeqbnfZyKlBI8=
golang.org/x/net v0.38.0/go.mod h1:ivrbrMbzFq5J41QOQh0siUuly180yBYtLp+CKbEaFx8=
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.0.0-20200902213428-5d25da1a8d43/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
golang.org/x/oauth2 v0.0.0-20201109201403-9fd604954f58/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
golang.org/x/oauth2 v0.0.0-20201208152858-08078c50e5b5/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
golang.org/x/oauth2 v0.0.0-20210218202405-ba52d332ba99/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
golang.org/x/oauth2 v0.0.0-20210220000619-9bb904979d93/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
golang.org/x/oauth2 v0.0.0-20210313182246-cd4f82c27b84/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
golang.org/x/oauth2 v0.0.0-20210402161424-2e8d93401602/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
golang.org/x/oauth2 v0.27.0 h1:da9Vo7/tDv5RH/7nZDz1eMGS/q1Vv1N/7FCrBhI9I3M=
golang.org/x/oauth2 v0.27.0/go.mod h1:onh5ek6nERTohokkhCD/y2cV4Do3fxFHFuAejCkRWT8=
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.12.0 h1:MHc5BpPuC30uJk597Ri8TV3CNZcTLu6B6z4lJy+g6Jw=
golang.org/x/sync v0.12.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA=
golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20181026203630-95b1ffbd15a5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20181122145206-62eef0e2fa9b/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200113162924-86b910548bc1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200331124033-c3d80250170d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200501052902-10377860bb8e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200511232937-7e40ca221e25/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200515095857-1151b9dac4a9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200905004654-be1d3432aa8f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20201201145000-ef89a241ccb3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210104204734-6f8348627aad/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210220050731-9a76102bfb43/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210305230114-8fe3ee5dd75b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210315160823-c6e025ad8005/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210320140829-1e4c9ba3b0c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210403161142-5e06dd20ab57/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20210809222454-d867a43fc93e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220310020820-b874c991c1a5/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.31.0 h1:ioabZlmFYtWhL+TRYpcnNlLwhyxaM9kWTDEmfnprqik=
golang.org/x/sys v0.31.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.30.0 h1:PQ39fJZ+mfadBm0y5WlL4vlM7Sx1Hgf13sMIY2+QS9Y=
golang.org/x/term v0.30.0/go.mod h1:NYYFdzHoI5wRh/h5tDMdMqCqPJZEuNqVR5xJLd/n67g=
golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
golang.org/x/text v0.23.0 h1:D71I7dUrlY+VX0gQShAThNGHFxZ13dGLBHQLVl1mJlY=
golang.org/x/text v0.23.0/go.mod h1:/BLNzu4aZCJ1+kcD0DNRotWKage4q2rGVAg4o22unh4=
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.9.0 h1:EsRrnYcQiGH+5FfbgvV4AP7qEZstoyrHB0DzarOQ4ZY=
golang.org/x/time v0.9.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY=
golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20191112195655-aa38f8e97acc/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20191125144606-a911d9008d1f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20191216173652-a0e659d51361/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/tools v0.0.0-20191227053925-7b8e75db28f4/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/tools v0.0.0-20200117161641-43d50277825c/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/tools v0.0.0-20200122220014-bf1340f18c4a/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/tools v0.0.0-20200204074204-1cc6d1ef6c74/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/tools v0.0.0-20200207183749-b753a1ba74fa/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/tools v0.0.0-20200212150539-ea181f53ac56/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/tools v0.0.0-20200224181240-023911ca70b2/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/tools v0.0.0-20200227222343-706bc42d1f0d/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/tools v0.0.0-20200304193943-95d2e580d8eb/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw=
golang.org/x/tools v0.0.0-20200312045724-11d5b4c81c7d/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw=
golang.org/x/tools v0.0.0-20200331025713-a30bf2db82d4/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8=
golang.org/x/tools v0.0.0-20200501065659-ab2804fb9c9d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
golang.org/x/tools v0.0.0-20200512131952-2bc93b1c0c88/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
golang.org/x/tools v0.0.0-20200515010526-7d3b6ebf133d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
golang.org/x/tools v0.0.0-20200618134242-20370b0cb4b2/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
golang.org/x/tools v0.0.0-20200729194436-6467de6f59a7/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA=
golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA=
golang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA=
golang.org/x/tools v0.0.0-20200904185747-39188db58858/go.mod h1:Cj7w3i3Rnn0Xh82ur9kSqwfTHTeVxaDqrfMjpcNT6bE=
golang.org/x/tools v0.0.0-20201110124207-079ba7bd75cd/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
golang.org/x/tools v0.0.0-20201201161351-ac6f37ff4c2a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
golang.org/x/tools v0.0.0-20201208233053-a543418bbed2/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
golang.org/x/tools v0.0.0-20210105154028-b0ab187a4818/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0=
golang.org/x/tools v0.1.2/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
golang.org/x/tools v0.1.7/go.mod h1:LGqMHiF4EqQNHR1JncWGqT5BVaXmza+X+BDGol+dOxo=
golang.org/x/tools v0.30.0 h1:BgcpHewrV5AUp2G9MebG4XPFI1E2W41zU1SaqVA9vJY=
golang.org/x/tools v0.30.0/go.mod h1:c347cR/OJfw5TI+GfX7RUPNMdDRRbjvYTS0jPyvsVtY=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE=
google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M=
google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg=
google.golang.org/api v0.9.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg=
google.golang.org/api v0.13.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI=
google.golang.org/api v0.14.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI=
google.golang.org/api v0.15.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI=
google.golang.org/api v0.17.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE=
google.golang.org/api v0.18.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE=
google.golang.org/api v0.19.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE=
google.golang.org/api v0.20.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE=
google.golang.org/api v0.22.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE=
google.golang.org/api v0.24.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE=
google.golang.org/api v0.28.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE=
google.golang.org/api v0.29.0/go.mod h1:Lcubydp8VUV7KeIHD9z2Bys/sm/vGKnG1UHuDBSrHWM=
google.golang.org/api v0.30.0/go.mod h1:QGmEvQ87FHZNiUVJkT14jQNYJ4ZJjdRF23ZXz5138Fc=
google.golang.org/api v0.35.0/go.mod h1:/XrVsuzM0rZmrsbjJutiuftIzeuTQcEeaYcSk/mQ1dg=
google.golang.org/api v0.36.0/go.mod h1:+z5ficQTmoYpPn8LCUNVpK5I7hwkpjbcgqA7I34qYtE=
google.golang.org/api v0.40.0/go.mod h1:fYKFpnQN0DsDSKRVRcQSDQNtqWPfM9i+zNPxepjRCQ8=
google.golang.org/api v0.41.0/go.mod h1:RkxM5lITDfTzmyKFPt+wGrCJbVfniCr2ool8kTBzRTU=
google.golang.org/api v0.43.0/go.mod h1:nQsDGjRXMo4lvh5hP0TKqF244gqhGcr/YSIykhUk/94=
google.golang.org/api v0.44.0/go.mod h1:EBOGZqzyhtvMDoxwS97ctnh0zUmYY6CxqXsc1AvkYD8=
google.golang.org/api v0.188.0 h1:51y8fJ/b1AaaBRJr4yWm96fPcuxSo0JcegXE3DaHQHw=
google.golang.org/api v0.188.0/go.mod h1:VR0d+2SIiWOYG3r/jdm7adPW9hI2aRv9ETOSCQ9Beag=
google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM=
google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0=
google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc=
google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
google.golang.org/genproto v0.0.0-20190502173448-54afdca5d873/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
google.golang.org/genproto v0.0.0-20190801165951-fa694d86fc64/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc=
google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc=
google.golang.org/genproto v0.0.0-20190911173649-1774047e7e51/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8=
google.golang.org/genproto v0.0.0-20191108220845-16a3f7862a1a/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
google.golang.org/genproto v0.0.0-20191115194625-c23dd37a84c9/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
google.golang.org/genproto v0.0.0-20191216164720-4f79533eabd1/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
google.golang.org/genproto v0.0.0-20191230161307-f3c370f40bfb/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
google.golang.org/genproto v0.0.0-20200115191322-ca5a22157cba/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
google.golang.org/genproto v0.0.0-20200122232147-0452cf42e150/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
google.golang.org/genproto v0.0.0-20200204135345-fa8e72b47b90/go.mod h1:GmwEX6Z4W5gMy59cAlVYjN9JhxgbQH6Gn+gFDQe2lzA=
google.golang.org/genproto v0.0.0-20200212174721-66ed5ce911ce/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
google.golang.org/genproto v0.0.0-20200224152610-e50cd9704f63/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
google.golang.org/genproto v0.0.0-20200228133532-8c2c7df3a383/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
google.golang.org/genproto v0.0.0-20200305110556-506484158171/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
google.golang.org/genproto v0.0.0-20200312145019-da6875a35672/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
google.golang.org/genproto v0.0.0-20200331122359-1ee6d9798940/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
google.golang.org/genproto v0.0.0-20200430143042-b979b6f78d84/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
google.golang.org/genproto v0.0.0-20200511104702-f5ebc3bea380/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
google.golang.org/genproto v0.0.0-20200513103714-09dca8ec2884/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
google.golang.org/genproto v0.0.0-20200515170657-fc4c6c6a6587/go.mod h1:YsZOwe1myG/8QRHRsmBRE1LrgQY60beZKjly0O1fX9U=
google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo=
google.golang.org/genproto v0.0.0-20200618031413-b414f8b61790/go.mod h1:jDfRM7FcilCzHH/e9qn6dsT145K34l5v+OpcnNgKAAA=
google.golang.org/genproto v0.0.0-20200729003335-053ba62fc06f/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
google.golang.org/genproto v0.0.0-20200804131852-c06518451d9c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
google.golang.org/genproto v0.0.0-20200825200019-8632dd797987/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
google.golang.org/genproto v0.0.0-20200904004341-0bd0a958aa1d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
google.golang.org/genproto v0.0.0-20201109203340-2640f1f9cdfb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
google.golang.org/genproto v0.0.0-20201201144952-b05cb90ed32e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
google.golang.org/genproto v0.0.0-20201210142538-e3217bee35cc/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
google.golang.org/genproto v0.0.0-20201214200347-8c77b98c765d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
google.golang.org/genproto v0.0.0-20210222152913-aa3ee6e6a81c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
google.golang.org/genproto v0.0.0-20210303154014-9728d6b83eeb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
google.golang.org/genproto v0.0.0-20210310155132-4ce2db91004e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
google.golang.org/genproto v0.0.0-20210319143718-93e7006c17a6/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
google.golang.org/genproto v0.0.0-20210402141018-6c239bbf2bb1/go.mod h1:9lPAdzaEmUacj36I+k7YKbEc5CXzPIeORRgDAUOu28A=
google.golang.org/genproto v0.0.0-20210602131652-f16073e35f0c/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0=
google.golang.org/genproto/googleapis/api v0.0.0-20240701130421-f6361c86f094 h1:0+ozOGcrp+Y8Aq8TLNN2Aliibms5LEzsq99ZZmAGYm0=
google.golang.org/genproto/googleapis/api v0.0.0-20240701130421-f6361c86f094/go.mod h1:fJ/e3If/Q67Mj99hin0hMhiNyCRmt6BQ2aWIJshUSJw=
google.golang.org/genproto/googleapis/rpc v0.0.0-20240708141625-4ad9e859172b h1:04+jVzTs2XBnOZcPsLnmrTGqltqJbZQ1Ey26hjYdQQ0=
google.golang.org/genproto/googleapis/rpc v0.0.0-20240708141625-4ad9e859172b/go.mod h1:Ue6ibwXGpU+dqIcODieyLOcgj7z8+IcskoNIgZxtrFY=
google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38=
google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM=
google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg=
google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY=
google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
google.golang.org/grpc v1.27.1/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
google.golang.org/grpc v1.28.0/go.mod h1:rpkK4SK4GF4Ach/+MFLZUBavHOvF2JJB5uozKKal+60=
google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk=
google.golang.org/grpc v1.30.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak=
google.golang.org/grpc v1.31.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak=
google.golang.org/grpc v1.31.1/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak=
google.golang.org/grpc v1.33.1/go.mod h1:fr5YgcSWrqhRRxogOsw7RzIpsmvOZ6IcH4kBYTpR3n0=
google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc=
google.golang.org/grpc v1.34.0/go.mod h1:WotjhfgOW/POjDeRt8vscBtXq+2VjORFy659qA51WJ8=
google.golang.org/grpc v1.35.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU=
google.golang.org/grpc v1.36.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU=
google.golang.org/grpc v1.36.1/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU=
google.golang.org/grpc v1.38.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM=
google.golang.org/grpc v1.64.1 h1:LKtvyfbX3UGVPFcGqJ9ItpVWW6oN/2XqTxfAnwRRXiA=
google.golang.org/grpc v1.64.1/go.mod h1:hiQF4LFZelK2WKaP6W0L92zGHtiQdZxk8CrSdvyjeP0=
google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8=
google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0=
google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM=
google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE=
google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo=
google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU=
google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU=
google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU=
google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGjtUeSXeh4=
google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c=
google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw=
google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
google.golang.org/protobuf v1.36.5 h1:tPhr+woSbjfYvY6/GPufUoYizxw1cF/yFoxJ2fmpwlM=
google.golang.org/protobuf v1.36.5/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20200902074654-038fdea0a05b/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI=
gopkg.in/inf.v0 v0.9.1 h1:73M5CoZyi3ZLMOyDlQh031Cx6N9NDJ2Vvfl76EDAgDc=
gopkg.in/inf.v0 v0.9.1/go.mod h1:cWUDdTG/fYaXco+Dcufb5Vnc6Gp2YChqWtbxRZE0mXw=
gopkg.in/ini.v1 v1.62.0 h1:duBzk771uxoUuOlyRLkHsygud9+5lrlGjdFBb4mSKDU=
gopkg.in/ini.v1 v1.62.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.0-20200605160147-a5ece683394c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg=
honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k=
honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k=
k8s.io/api v0.31.4 h1:I2QNzitPVsPeLQvexMEsj945QumYraqv9m74isPDKhM=
k8s.io/api v0.31.4/go.mod h1:d+7vgXLvmcdT1BCo79VEgJxHHryww3V5np2OYTr6jdw=
k8s.io/apimachinery v0.31.4 h1:8xjE2C4CzhYVm9DGf60yohpNUh5AEBnPxCryPBECmlM=
k8s.io/apimachinery v0.31.4/go.mod h1:rsPdaZJfTfLsNJSQzNHQvYoTmxhoOEofxtOsF3rtsMo=
k8s.io/client-go v0.31.4 h1:t4QEXt4jgHIkKKlx06+W3+1JOwAFU/2OPiOo7H92eRQ=
k8s.io/client-go v0.31.4/go.mod h1:kvuMro4sFYIa8sulL5Gi5GFqUPvfH2O/dXuKstbaaeg=
k8s.io/klog/v2 v2.130.1 h1:n9Xl7H1Xvksem4KFG4PYbdQCQxqc/tTUyrgXaOhHSzk=
k8s.io/klog/v2 v2.130.1/go.mod h1:3Jpz1GvMt720eyJH1ckRHK1EDfpxISzJ7I9OYgaDtPE=
k8s.io/kube-openapi v0.0.0-20250318190949-c8a335a9a2ff h1:/usPimJzUKKu+m+TE36gUyGcf03XZEP0ZIKgKj35LS4=
k8s.io/kube-openapi v0.0.0-20250318190949-c8a335a9a2ff/go.mod h1:5jIi+8yX4RIb8wk3XwBo5Pq2ccx4FP10ohkbSKCZoK8=
k8s.io/utils v0.0.0-20241104100929-3ea5e8cea738 h1:M3sRQVHv7vB20Xc2ybTt7ODCeFj6JSWYFzOFnYeS6Ro=
k8s.io/utils v0.0.0-20241104100929-3ea5e8cea738/go.mod h1:OLgZIPagt7ERELqWJFomSt595RzquPNLL48iOWgYOg0=
rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8=
rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0=
rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA=
sigs.k8s.io/json v0.0.0-20241010143419-9aa6b5e7a4b3 h1:/Rv+M11QRah1itp8VhT6HoVx1Ray9eB4DBr+K+/sCJ8=
sigs.k8s.io/json v0.0.0-20241010143419-9aa6b5e7a4b3/go.mod h1:18nIHnGi6636UCz6m8i4DhaJ65T6EruyzmoQqI2BVDo=
sigs.k8s.io/randfill v0.0.0-20250304075658-069ef1bbf016/go.mod h1:XeLlZ/jmk4i1HRopwe7/aU3H5n1zNUcX6TM94b3QxOY=
sigs.k8s.io/randfill v1.0.0 h1:JfjMILfT8A6RbawdsK2JXGBR5AQVfd+9TbzrlneTyrU=
sigs.k8s.io/randfill v1.0.0/go.mod h1:XeLlZ/jmk4i1HRopwe7/aU3H5n1zNUcX6TM94b3QxOY=
sigs.k8s.io/structured-merge-diff/v4 v4.6.0 h1:IUA9nvMmnKWcj5jl84xn+T5MnlZKThmUW1TdblaLVAc=
sigs.k8s.io/structured-merge-diff/v4 v4.6.0/go.mod h1:dDy58f92j70zLsuZVuUX5Wp9vtxXpaZnkPGWeqDfCps=
sigs.k8s.io/yaml v1.4.0 h1:Mk1wCc2gy/F0THH0TAp1QYyJNzRm2KCLy3o5ASXVI5E=
sigs.k8s.io/yaml v1.4.0/go.mod h1:Ejl7/uTz7PSA4eKMyQCUTnhZYNmLIl+5c2lQPGR2BPY=

View File

@@ -1,4 +0,0 @@
FROM alpine
RUN apk add --no-cache tini
COPY infisical /bin/infisical
ENTRYPOINT ["/sbin/tini", "--", "/bin/infisical"]

View File

@@ -1,5 +0,0 @@
[infisical]
name=Infisical CLI
baseurl=https://yum.fury.io/infisical/
enabled=1
gpgcheck=0

View File

@@ -1,17 +0,0 @@
/*
Copyright (c) 2023 Infisical Inc.
*/
package main
import (
"os"
"github.com/Infisical/infisical-merge/packages/cmd"
"github.com/rs/zerolog"
"github.com/rs/zerolog/log"
)
func main() {
log.Logger = log.Output(zerolog.ConsoleWriter{Out: os.Stderr})
cmd.Execute()
}

View File

@@ -1,652 +0,0 @@
package api
import (
"fmt"
"net/http"
"strings"
"github.com/Infisical/infisical-merge/packages/config"
"github.com/go-resty/resty/v2"
"github.com/rs/zerolog/log"
)
const USER_AGENT = "cli"
const (
operationCallGetRawSecretsV3 = "CallGetRawSecretsV3"
operationCallGetEncryptedWorkspaceKey = "CallGetEncryptedWorkspaceKey"
operationCallGetServiceTokenDetails = "CallGetServiceTokenDetails"
operationCallLogin1V3 = "CallLogin1V3"
operationCallVerifyMfaToken = "CallVerifyMfaToken"
operationCallLogin2V3 = "CallLogin2V3"
operationCallGetAllOrganizations = "CallGetAllOrganizations"
operationCallSelectOrganization = "CallSelectOrganization"
operationCallGetAllWorkSpacesUserBelongsTo = "CallGetAllWorkSpacesUserBelongsTo"
operationCallGetProjectById = "CallGetProjectById"
operationCallIsAuthenticated = "CallIsAuthenticated"
operationCallGetNewAccessTokenWithRefreshToken = "CallGetNewAccessTokenWithRefreshToken"
operationCallGetFoldersV1 = "CallGetFoldersV1"
operationCallCreateFolderV1 = "CallCreateFolderV1"
operationCallDeleteFolderV1 = "CallDeleteFolderV1"
operationCallDeleteSecretsV3 = "CallDeleteSecretsV3"
operationCallCreateServiceToken = "CallCreateServiceToken"
operationCallUniversalAuthLogin = "CallUniversalAuthLogin"
operationCallMachineIdentityRefreshAccessToken = "CallMachineIdentityRefreshAccessToken"
operationCallFetchSingleSecretByName = "CallFetchSingleSecretByName"
operationCallCreateRawSecretsV3 = "CallCreateRawSecretsV3"
operationCallUpdateRawSecretsV3 = "CallUpdateRawSecretsV3"
operationCallRegisterGatewayIdentityV1 = "CallRegisterGatewayIdentityV1"
operationCallExchangeRelayCertV1 = "CallExchangeRelayCertV1"
operationCallGatewayHeartBeatV1 = "CallGatewayHeartBeatV1"
operationCallBootstrapInstance = "CallBootstrapInstance"
)
func CallGetEncryptedWorkspaceKey(httpClient *resty.Client, request GetEncryptedWorkspaceKeyRequest) (GetEncryptedWorkspaceKeyResponse, error) {
endpoint := fmt.Sprintf("%v/v2/workspace/%v/encrypted-key", config.INFISICAL_URL, request.WorkspaceId)
var result GetEncryptedWorkspaceKeyResponse
response, err := httpClient.
R().
SetResult(&result).
SetHeader("User-Agent", USER_AGENT).
Get(endpoint)
if err != nil {
return GetEncryptedWorkspaceKeyResponse{}, NewGenericRequestError(operationCallGetEncryptedWorkspaceKey, err)
}
if response.IsError() {
return GetEncryptedWorkspaceKeyResponse{}, NewAPIErrorWithResponse(operationCallGetEncryptedWorkspaceKey, response, nil)
}
return result, nil
}
func CallGetServiceTokenDetailsV2(httpClient *resty.Client) (GetServiceTokenDetailsResponse, error) {
var tokenDetailsResponse GetServiceTokenDetailsResponse
response, err := httpClient.
R().
SetResult(&tokenDetailsResponse).
SetHeader("User-Agent", USER_AGENT).
Get(fmt.Sprintf("%v/v2/service-token", config.INFISICAL_URL))
if err != nil {
return GetServiceTokenDetailsResponse{}, NewGenericRequestError(operationCallGetServiceTokenDetails, err)
}
if response.IsError() {
return GetServiceTokenDetailsResponse{}, NewAPIErrorWithResponse(operationCallGetServiceTokenDetails, response, nil)
}
return tokenDetailsResponse, nil
}
func CallLogin1V2(httpClient *resty.Client, request GetLoginOneV2Request) (GetLoginOneV2Response, error) {
var loginOneV2Response GetLoginOneV2Response
response, err := httpClient.
R().
SetResult(&loginOneV2Response).
SetHeader("User-Agent", USER_AGENT).
SetBody(request).
Post(fmt.Sprintf("%v/v3/auth/login1", config.INFISICAL_URL))
if err != nil {
return GetLoginOneV2Response{}, NewGenericRequestError(operationCallLogin1V3, err)
}
if response.IsError() {
return GetLoginOneV2Response{}, NewAPIErrorWithResponse(operationCallLogin1V3, response, nil)
}
return loginOneV2Response, nil
}
func CallVerifyMfaToken(httpClient *resty.Client, request VerifyMfaTokenRequest) (*VerifyMfaTokenResponse, *VerifyMfaTokenErrorResponse, error) {
var verifyMfaTokenResponse VerifyMfaTokenResponse
var responseError VerifyMfaTokenErrorResponse
response, err := httpClient.
R().
SetResult(&verifyMfaTokenResponse).
SetHeader("User-Agent", USER_AGENT).
SetError(&responseError).
SetBody(request).
Post(fmt.Sprintf("%v/v2/auth/mfa/verify", config.INFISICAL_URL))
cookies := response.Cookies()
// Find a cookie by name
cookieName := "jid"
var refreshToken *http.Cookie
for _, cookie := range cookies {
if cookie.Name == cookieName {
refreshToken = cookie
break
}
}
// When MFA is enabled
if refreshToken != nil {
verifyMfaTokenResponse.RefreshToken = refreshToken.Value
}
if err != nil {
return nil, nil, NewGenericRequestError(operationCallVerifyMfaToken, err)
}
if response.IsError() {
return nil, &responseError, nil
}
return &verifyMfaTokenResponse, nil, nil
}
func CallLogin2V2(httpClient *resty.Client, request GetLoginTwoV2Request) (GetLoginTwoV2Response, error) {
var loginTwoV2Response GetLoginTwoV2Response
response, err := httpClient.
R().
SetResult(&loginTwoV2Response).
SetHeader("User-Agent", USER_AGENT).
SetBody(request).
Post(fmt.Sprintf("%v/v3/auth/login2", config.INFISICAL_URL))
cookies := response.Cookies()
// Find a cookie by name
cookieName := "jid"
var refreshToken *http.Cookie
for _, cookie := range cookies {
if cookie.Name == cookieName {
refreshToken = cookie
break
}
}
// When MFA is enabled
if refreshToken != nil {
loginTwoV2Response.RefreshToken = refreshToken.Value
}
if err != nil {
return GetLoginTwoV2Response{}, NewGenericRequestError(operationCallLogin2V3, err)
}
if response.IsError() {
return GetLoginTwoV2Response{}, NewAPIErrorWithResponse(operationCallLogin2V3, response, nil)
}
return loginTwoV2Response, nil
}
func CallGetAllOrganizations(httpClient *resty.Client) (GetOrganizationsResponse, error) {
var orgResponse GetOrganizationsResponse
response, err := httpClient.
R().
SetResult(&orgResponse).
SetHeader("User-Agent", USER_AGENT).
Get(fmt.Sprintf("%v/v1/organization", config.INFISICAL_URL))
if err != nil {
return GetOrganizationsResponse{}, NewGenericRequestError(operationCallGetAllOrganizations, err)
}
if response.IsError() {
return GetOrganizationsResponse{}, NewAPIErrorWithResponse(operationCallGetAllOrganizations, response, nil)
}
return orgResponse, nil
}
func CallSelectOrganization(httpClient *resty.Client, request SelectOrganizationRequest) (SelectOrganizationResponse, error) {
var selectOrgResponse SelectOrganizationResponse
response, err := httpClient.
R().
SetBody(request).
SetResult(&selectOrgResponse).
SetHeader("User-Agent", USER_AGENT).
Post(fmt.Sprintf("%v/v3/auth/select-organization", config.INFISICAL_URL))
if err != nil {
return SelectOrganizationResponse{}, NewGenericRequestError(operationCallSelectOrganization, err)
}
if response.IsError() {
return SelectOrganizationResponse{}, NewAPIErrorWithResponse(operationCallSelectOrganization, response, nil)
}
return selectOrgResponse, nil
}
func CallGetAllWorkSpacesUserBelongsTo(httpClient *resty.Client) (GetWorkSpacesResponse, error) {
var workSpacesResponse GetWorkSpacesResponse
response, err := httpClient.
R().
SetResult(&workSpacesResponse).
SetHeader("User-Agent", USER_AGENT).
Get(fmt.Sprintf("%v/v1/workspace", config.INFISICAL_URL))
if err != nil {
return GetWorkSpacesResponse{}, err
}
if response.IsError() {
return GetWorkSpacesResponse{}, fmt.Errorf("CallGetAllWorkSpacesUserBelongsTo: Unsuccessful response: [response=%v]", response)
}
return workSpacesResponse, nil
}
func CallGetProjectById(httpClient *resty.Client, id string) (Project, error) {
var projectResponse GetProjectByIdResponse
response, err := httpClient.
R().
SetResult(&projectResponse).
SetHeader("User-Agent", USER_AGENT).
Get(fmt.Sprintf("%v/v1/workspace/%s", config.INFISICAL_URL, id))
if err != nil {
return Project{}, NewGenericRequestError(operationCallGetProjectById, err)
}
if response.IsError() {
return Project{}, NewAPIErrorWithResponse(operationCallGetProjectById, response, nil)
}
return projectResponse.Project, nil
}
func CallIsAuthenticated(httpClient *resty.Client) bool {
var workSpacesResponse GetWorkSpacesResponse
response, err := httpClient.
R().
SetResult(&workSpacesResponse).
SetHeader("User-Agent", USER_AGENT).
Post(fmt.Sprintf("%v/v1/auth/checkAuth", config.INFISICAL_URL))
if err != nil {
return false
}
if response.IsError() {
log.Debug().Msgf("%s: Unsuccessful response: [response=%v]", operationCallIsAuthenticated, response)
return false
}
return true
}
func CallGetNewAccessTokenWithRefreshToken(httpClient *resty.Client, refreshToken string) (GetNewAccessTokenWithRefreshTokenResponse, error) {
var newAccessToken GetNewAccessTokenWithRefreshTokenResponse
response, err := httpClient.
R().
SetResult(&newAccessToken).
SetHeader("User-Agent", USER_AGENT).
SetCookie(&http.Cookie{
Name: "jid",
Value: refreshToken,
}).
Post(fmt.Sprintf("%v/v1/auth/token", config.INFISICAL_URL))
if err != nil {
return GetNewAccessTokenWithRefreshTokenResponse{}, NewGenericRequestError(operationCallGetNewAccessTokenWithRefreshToken, err)
}
if response.IsError() {
return GetNewAccessTokenWithRefreshTokenResponse{}, NewAPIErrorWithResponse(operationCallGetNewAccessTokenWithRefreshToken, response, nil)
}
return newAccessToken, nil
}
func CallGetFoldersV1(httpClient *resty.Client, request GetFoldersV1Request) (GetFoldersV1Response, error) {
var foldersResponse GetFoldersV1Response
httpRequest := httpClient.
R().
SetResult(&foldersResponse).
SetHeader("User-Agent", USER_AGENT).
SetQueryParam("environment", request.Environment).
SetQueryParam("workspaceId", request.WorkspaceId).
SetQueryParam("directory", request.FoldersPath)
response, err := httpRequest.Get(fmt.Sprintf("%v/v1/folders", config.INFISICAL_URL))
if err != nil {
return GetFoldersV1Response{}, NewGenericRequestError(operationCallGetFoldersV1, err)
}
if response.IsError() {
return GetFoldersV1Response{}, NewAPIErrorWithResponse(operationCallGetFoldersV1, response, nil)
}
return foldersResponse, nil
}
func CallCreateFolderV1(httpClient *resty.Client, request CreateFolderV1Request) (CreateFolderV1Response, error) {
var folderResponse CreateFolderV1Response
httpRequest := httpClient.
R().
SetResult(&folderResponse).
SetHeader("User-Agent", USER_AGENT).
SetBody(request)
response, err := httpRequest.Post(fmt.Sprintf("%v/v1/folders", config.INFISICAL_URL))
if err != nil {
return CreateFolderV1Response{}, NewGenericRequestError(operationCallCreateFolderV1, err)
}
if response.IsError() {
return CreateFolderV1Response{}, NewAPIErrorWithResponse(operationCallCreateFolderV1, response, nil)
}
return folderResponse, nil
}
func CallDeleteFolderV1(httpClient *resty.Client, request DeleteFolderV1Request) (DeleteFolderV1Response, error) {
var folderResponse DeleteFolderV1Response
httpRequest := httpClient.
R().
SetResult(&folderResponse).
SetHeader("User-Agent", USER_AGENT).
SetBody(request)
response, err := httpRequest.Delete(fmt.Sprintf("%v/v1/folders/%v", config.INFISICAL_URL, request.FolderName))
if err != nil {
return DeleteFolderV1Response{}, NewGenericRequestError(operationCallDeleteFolderV1, err)
}
if response.IsError() {
return DeleteFolderV1Response{}, NewAPIErrorWithResponse(operationCallDeleteFolderV1, response, nil)
}
return folderResponse, nil
}
func CallDeleteSecretsRawV3(httpClient *resty.Client, request DeleteSecretV3Request) error {
var secretsResponse GetEncryptedSecretsV3Response
response, err := httpClient.
R().
SetResult(&secretsResponse).
SetHeader("User-Agent", USER_AGENT).
SetBody(request).
Delete(fmt.Sprintf("%v/v3/secrets/raw/%s", config.INFISICAL_URL, request.SecretName))
if err != nil {
return NewGenericRequestError(operationCallDeleteSecretsV3, err)
}
if response.IsError() {
additionalContext := "Please make sure your secret path, workspace and environment name are all correct."
return NewAPIErrorWithResponse(operationCallDeleteSecretsV3, response, &additionalContext)
}
return nil
}
func CallCreateServiceToken(httpClient *resty.Client, request CreateServiceTokenRequest) (CreateServiceTokenResponse, error) {
var createServiceTokenResponse CreateServiceTokenResponse
response, err := httpClient.
R().
SetResult(&createServiceTokenResponse).
SetHeader("User-Agent", USER_AGENT).
SetBody(request).
Post(fmt.Sprintf("%v/v2/service-token/", config.INFISICAL_URL))
if err != nil {
return CreateServiceTokenResponse{}, NewGenericRequestError(operationCallCreateServiceToken, err)
}
if response.IsError() {
return CreateServiceTokenResponse{}, NewAPIErrorWithResponse(operationCallCreateServiceToken, response, nil)
}
return createServiceTokenResponse, nil
}
func CallUniversalAuthLogin(httpClient *resty.Client, request UniversalAuthLoginRequest) (UniversalAuthLoginResponse, error) {
var universalAuthLoginResponse UniversalAuthLoginResponse
response, err := httpClient.
R().
SetResult(&universalAuthLoginResponse).
SetHeader("User-Agent", USER_AGENT).
SetBody(request).
Post(fmt.Sprintf("%v/v1/auth/universal-auth/login/", config.INFISICAL_URL))
if err != nil {
return UniversalAuthLoginResponse{}, NewGenericRequestError(operationCallUniversalAuthLogin, err)
}
if response.IsError() {
return UniversalAuthLoginResponse{}, NewAPIErrorWithResponse(operationCallUniversalAuthLogin, response, nil)
}
return universalAuthLoginResponse, nil
}
func CallMachineIdentityRefreshAccessToken(httpClient *resty.Client, request UniversalAuthRefreshRequest) (UniversalAuthRefreshResponse, error) {
var universalAuthRefreshResponse UniversalAuthRefreshResponse
response, err := httpClient.
R().
SetResult(&universalAuthRefreshResponse).
SetHeader("User-Agent", USER_AGENT).
SetBody(request).
Post(fmt.Sprintf("%v/v1/auth/token/renew", config.INFISICAL_URL))
if err != nil {
return UniversalAuthRefreshResponse{}, NewGenericRequestError(operationCallMachineIdentityRefreshAccessToken, err)
}
if response.IsError() {
return UniversalAuthRefreshResponse{}, NewAPIErrorWithResponse(operationCallMachineIdentityRefreshAccessToken, response, nil)
}
return universalAuthRefreshResponse, nil
}
func CallGetRawSecretsV3(httpClient *resty.Client, request GetRawSecretsV3Request) (GetRawSecretsV3Response, error) {
var getRawSecretsV3Response GetRawSecretsV3Response
req := httpClient.
R().
SetResult(&getRawSecretsV3Response).
SetHeader("User-Agent", USER_AGENT).
SetBody(request).
SetQueryParam("workspaceId", request.WorkspaceId).
SetQueryParam("environment", request.Environment).
SetQueryParam("secretPath", request.SecretPath)
if request.TagSlugs != "" {
req.SetQueryParam("tagSlugs", request.TagSlugs)
}
if request.IncludeImport {
req.SetQueryParam("include_imports", "true")
}
if request.Recursive {
req.SetQueryParam("recursive", "true")
}
if request.ExpandSecretReferences {
req.SetQueryParam("expandSecretReferences", "true")
}
response, err := req.Get(fmt.Sprintf("%v/v3/secrets/raw", config.INFISICAL_URL))
if err != nil {
return GetRawSecretsV3Response{}, NewGenericRequestError(operationCallGetRawSecretsV3, err)
}
if response.IsError() &&
(strings.Contains(response.String(), "bot_not_found_error") ||
strings.Contains(strings.ToLower(response.String()), "failed to find bot key") ||
strings.Contains(strings.ToLower(response.String()), "bot is not active")) {
additionalContext := fmt.Sprintf(`Project with id %s is incompatible with your current CLI version. Upgrade your project by visiting the project settings page. If you're self-hosting and project upgrade option isn't yet available, contact your administrator to upgrade your Infisical instance to the latest release.`, request.WorkspaceId)
return GetRawSecretsV3Response{}, NewAPIErrorWithResponse(operationCallGetRawSecretsV3, response, &additionalContext)
}
if response.IsError() {
return GetRawSecretsV3Response{}, NewAPIErrorWithResponse(operationCallGetRawSecretsV3, response, nil)
}
getRawSecretsV3Response.ETag = response.Header().Get(("etag"))
return getRawSecretsV3Response, nil
}
func CallFetchSingleSecretByName(httpClient *resty.Client, request GetRawSecretV3ByNameRequest) (GetRawSecretV3ByNameResponse, error) {
var getRawSecretV3ByNameResponse GetRawSecretV3ByNameResponse
response, err := httpClient.
R().
SetHeader("User-Agent", USER_AGENT).
SetResult(&getRawSecretV3ByNameResponse).
SetBody(request).
SetQueryParam("expandSecretReferences", "true").
SetQueryParam("include_imports", "true").
SetQueryParam("environment", request.Environment).
SetQueryParam("secretPath", request.SecretPath).
SetQueryParam("workspaceId", request.WorkspaceID).
SetQueryParam("type", "shared").
Get(fmt.Sprintf("%v/v3/secrets/raw/%s", config.INFISICAL_URL, request.SecretName))
if err != nil {
return GetRawSecretV3ByNameResponse{}, NewGenericRequestError(operationCallFetchSingleSecretByName, err)
}
if response.IsError() {
return GetRawSecretV3ByNameResponse{}, NewAPIErrorWithResponse(operationCallFetchSingleSecretByName, response, nil)
}
getRawSecretV3ByNameResponse.ETag = response.Header().Get(("etag"))
return getRawSecretV3ByNameResponse, nil
}
func CallCreateDynamicSecretLeaseV1(httpClient *resty.Client, request CreateDynamicSecretLeaseV1Request) (CreateDynamicSecretLeaseV1Response, error) {
var createDynamicSecretLeaseResponse CreateDynamicSecretLeaseV1Response
response, err := httpClient.
R().
SetResult(&createDynamicSecretLeaseResponse).
SetHeader("User-Agent", USER_AGENT).
SetBody(request).
Post(fmt.Sprintf("%v/v1/dynamic-secrets/leases", config.INFISICAL_URL))
if err != nil {
return CreateDynamicSecretLeaseV1Response{}, fmt.Errorf("CreateDynamicSecretLeaseV1: Unable to complete api request [err=%w]", err)
}
if response.IsError() {
return CreateDynamicSecretLeaseV1Response{}, fmt.Errorf("CreateDynamicSecretLeaseV1: Unsuccessful response [%v %v] [status-code=%v] [response=%v]", response.Request.Method, response.Request.URL, response.StatusCode(), response.String())
}
return createDynamicSecretLeaseResponse, nil
}
func CallCreateRawSecretsV3(httpClient *resty.Client, request CreateRawSecretV3Request) error {
response, err := httpClient.
R().
SetHeader("User-Agent", USER_AGENT).
SetBody(request).
Post(fmt.Sprintf("%v/v3/secrets/raw/%s", config.INFISICAL_URL, request.SecretName))
if err != nil {
return NewGenericRequestError(operationCallCreateRawSecretsV3, err)
}
if response.IsError() {
return NewAPIErrorWithResponse(operationCallCreateRawSecretsV3, response, nil)
}
return nil
}
func CallUpdateRawSecretsV3(httpClient *resty.Client, request UpdateRawSecretByNameV3Request) error {
response, err := httpClient.
R().
SetHeader("User-Agent", USER_AGENT).
SetBody(request).
Patch(fmt.Sprintf("%v/v3/secrets/raw/%s", config.INFISICAL_URL, request.SecretName))
if err != nil {
return NewGenericRequestError(operationCallUpdateRawSecretsV3, err)
}
if response.IsError() {
return NewAPIErrorWithResponse(operationCallUpdateRawSecretsV3, response, nil)
}
return nil
}
func CallRegisterGatewayIdentityV1(httpClient *resty.Client) (*GetRelayCredentialsResponseV1, error) {
var resBody GetRelayCredentialsResponseV1
response, err := httpClient.
R().
SetResult(&resBody).
SetHeader("User-Agent", USER_AGENT).
Post(fmt.Sprintf("%v/v1/gateways/register-identity", config.INFISICAL_URL))
if err != nil {
return nil, NewGenericRequestError(operationCallRegisterGatewayIdentityV1, err)
}
if response.IsError() {
return nil, NewAPIErrorWithResponse(operationCallRegisterGatewayIdentityV1, response, nil)
}
return &resBody, nil
}
func CallExchangeRelayCertV1(httpClient *resty.Client, request ExchangeRelayCertRequestV1) (*ExchangeRelayCertResponseV1, error) {
var resBody ExchangeRelayCertResponseV1
response, err := httpClient.
R().
SetResult(&resBody).
SetBody(request).
SetHeader("User-Agent", USER_AGENT).
Post(fmt.Sprintf("%v/v1/gateways/exchange-cert", config.INFISICAL_URL))
if err != nil {
return nil, NewGenericRequestError(operationCallExchangeRelayCertV1, err)
}
if response.IsError() {
return nil, NewAPIErrorWithResponse(operationCallExchangeRelayCertV1, response, nil)
}
return &resBody, nil
}
func CallGatewayHeartBeatV1(httpClient *resty.Client) error {
response, err := httpClient.
R().
SetHeader("User-Agent", USER_AGENT).
Post(fmt.Sprintf("%v/v1/gateways/heartbeat", config.INFISICAL_URL))
if err != nil {
return NewGenericRequestError(operationCallGatewayHeartBeatV1, err)
}
if response.IsError() {
return NewAPIErrorWithResponse(operationCallGatewayHeartBeatV1, response, nil)
}
return nil
}
func CallBootstrapInstance(httpClient *resty.Client, request BootstrapInstanceRequest) (BootstrapInstanceResponse, error) {
var resBody BootstrapInstanceResponse
response, err := httpClient.
R().
SetResult(&resBody).
SetHeader("User-Agent", USER_AGENT).
SetBody(request).
Post(fmt.Sprintf("%v/v1/admin/bootstrap", request.Domain))
if err != nil {
return BootstrapInstanceResponse{}, NewGenericRequestError(operationCallBootstrapInstance, err)
}
if response.IsError() {
return BootstrapInstanceResponse{}, NewAPIErrorWithResponse(operationCallBootstrapInstance, response, nil)
}
return resBody, nil
}

View File

@@ -1,80 +0,0 @@
package api
import (
"fmt"
"github.com/go-resty/resty/v2"
"github.com/infisical/go-sdk/packages/util"
)
type GenericRequestError struct {
err error
operation string
}
func (e *GenericRequestError) Error() string {
return fmt.Sprintf("%s: Unable to complete api request [err=%v]", e.operation, e.err)
}
func NewGenericRequestError(operation string, err error) *GenericRequestError {
return &GenericRequestError{err: err, operation: operation}
}
// APIError represents an error response from the API
type APIError struct {
AdditionalContext string `json:"additionalContext,omitempty"`
Operation string `json:"operation"`
Method string `json:"method"`
URL string `json:"url"`
StatusCode int `json:"statusCode"`
ErrorMessage string `json:"message,omitempty"`
ReqId string `json:"reqId,omitempty"`
}
func (e *APIError) Error() string {
msg := fmt.Sprintf(
"%s Unsuccessful response [%v %v] [status-code=%v] [request-id=%v]",
e.Operation,
e.Method,
e.URL,
e.StatusCode,
e.ReqId,
)
if e.ErrorMessage != "" {
msg = fmt.Sprintf("%s [message=\"%s\"]", msg, e.ErrorMessage)
}
if e.AdditionalContext != "" {
msg = fmt.Sprintf("%s [additional-context=\"%s\"]", msg, e.AdditionalContext)
}
return msg
}
func NewAPIErrorWithResponse(operation string, res *resty.Response, additionalContext *string) error {
errorMessage := util.TryParseErrorBody(res)
reqId := util.TryExtractReqId(res)
if res == nil {
return NewGenericRequestError(operation, fmt.Errorf("response is nil"))
}
apiError := &APIError{
Operation: operation,
Method: res.Request.Method,
URL: res.Request.URL,
StatusCode: res.StatusCode(),
ReqId: reqId,
}
if additionalContext != nil && *additionalContext != "" {
apiError.AdditionalContext = *additionalContext
}
if errorMessage != "" {
apiError.ErrorMessage = errorMessage
}
return apiError
}

View File

@@ -1,689 +0,0 @@
package api
import "time"
// Stores info for login one
type LoginOneRequest struct {
Email string `json:"email"`
ClientPublicKey string `json:"clientPublicKey"`
}
type LoginOneResponse struct {
ServerPublicKey string `json:"serverPublicKey"`
ServerSalt string `json:"salt"`
}
// Stores info for login two
type LoginTwoRequest struct {
Email string `json:"email"`
ClientProof string `json:"clientProof"`
}
type LoginTwoResponse struct {
JTWToken string `json:"token"`
RefreshToken string `json:"refreshToken"`
PublicKey string `json:"publicKey"`
EncryptedPrivateKey string `json:"encryptedPrivateKey"`
IV string `json:"iv"`
Tag string `json:"tag"`
}
type PullSecretsRequest struct {
Environment string `json:"environment"`
}
type PullSecretsResponse struct {
Secrets []struct {
ID string `json:"_id"`
Workspace string `json:"workspace"`
Type string `json:"type"`
Environment string `json:"environment"`
SecretKeyCiphertext string `json:"secretKeyCiphertext"`
SecretKeyIV string `json:"secretKeyIV"`
SecretKeyTag string `json:"secretKeyTag"`
SecretKeyHash string `json:"secretKeyHash"`
SecretValueCiphertext string `json:"secretValueCiphertext"`
SecretValueIV string `json:"secretValueIV"`
SecretValueTag string `json:"secretValueTag"`
SecretValueHash string `json:"secretValueHash"`
V int `json:"__v"`
CreatedAt time.Time `json:"createdAt"`
UpdatedAt time.Time `json:"updatedAt"`
User string `json:"user,omitempty"`
} `json:"secrets"`
Key struct {
ID string `json:"_id"`
EncryptedKey string `json:"encryptedKey"`
Nonce string `json:"nonce"`
Sender struct {
ID string `json:"_id"`
Email string `json:"email"`
CustomerID string `json:"customerId"`
CreatedAt time.Time `json:"createdAt"`
UpdatedAt time.Time `json:"updatedAt"`
V int `json:"__v"`
FirstName string `json:"firstName"`
LastName string `json:"lastName"`
PublicKey string `json:"publicKey"`
} `json:"sender"`
Receiver string `json:"receiver"`
Workspace string `json:"workspace"`
V int `json:"__v"`
CreatedAt time.Time `json:"createdAt"`
UpdatedAt time.Time `json:"updatedAt"`
} `json:"key"`
}
type PullSecretsByInfisicalTokenResponse struct {
Secrets []struct {
ID string `json:"_id"`
Workspace string `json:"workspace"`
Type string `json:"type"`
Environment string `json:"environment"`
SecretKey struct {
Workspace string `json:"workspace"`
Ciphertext string `json:"ciphertext"`
Iv string `json:"iv"`
Tag string `json:"tag"`
Hash string `json:"hash"`
} `json:"secretKey"`
SecretValue struct {
Workspace string `json:"workspace"`
Ciphertext string `json:"ciphertext"`
Iv string `json:"iv"`
Tag string `json:"tag"`
Hash string `json:"hash"`
} `json:"secretValue"`
} `json:"secrets"`
Key struct {
EncryptedKey string `json:"encryptedKey"`
Nonce string `json:"nonce"`
Sender struct {
PublicKey string `json:"publicKey"`
} `json:"sender"`
Receiver struct {
RefreshVersion int `json:"refreshVersion"`
ID string `json:"_id"`
Email string `json:"email"`
CustomerID string `json:"customerId"`
CreatedAt time.Time `json:"createdAt"`
UpdatedAt time.Time `json:"updatedAt"`
V int `json:"__v"`
FirstName string `json:"firstName"`
LastName string `json:"lastName"`
PublicKey string `json:"publicKey"`
} `json:"receiver"`
Workspace string `json:"workspace"`
} `json:"key"`
}
type GetWorkSpacesResponse struct {
Workspaces []struct {
ID string `json:"_id"`
Name string `json:"name"`
Plan string `json:"plan,omitempty"`
V int `json:"__v"`
OrganizationId string `json:"orgId"`
} `json:"workspaces"`
}
type GetProjectByIdResponse struct {
Project Project `json:"workspace"`
}
type GetOrganizationsResponse struct {
Organizations []struct {
ID string `json:"id"`
Name string `json:"name"`
} `json:"organizations"`
}
type SelectOrganizationResponse struct {
Token string `json:"token"`
MfaEnabled bool `json:"isMfaEnabled"`
MfaMethod string `json:"mfaMethod"`
}
type SelectOrganizationRequest struct {
OrganizationId string `json:"organizationId"`
}
type Secret struct {
SecretKeyCiphertext string `json:"secretKeyCiphertext,omitempty"`
SecretKeyIV string `json:"secretKeyIV,omitempty"`
SecretKeyTag string `json:"secretKeyTag,omitempty"`
SecretKeyHash string `json:"secretKeyHash,omitempty"`
SecretValueCiphertext string `json:"secretValueCiphertext,omitempty"`
SecretValueIV string `json:"secretValueIV,omitempty"`
SecretValueTag string `json:"secretValueTag,omitempty"`
SecretValueHash string `json:"secretValueHash,omitempty"`
SecretCommentCiphertext string `json:"secretCommentCiphertext,omitempty"`
SecretCommentIV string `json:"secretCommentIV,omitempty"`
SecretCommentTag string `json:"secretCommentTag,omitempty"`
SecretCommentHash string `json:"secretCommentHash,omitempty"`
Type string `json:"type,omitempty"`
ID string `json:"id,omitempty"`
PlainTextKey string `json:"plainTextKey"`
}
type Project struct {
ID string `json:"id"`
Name string `json:"name"`
Slug string `json:"slug"`
}
type RawSecret struct {
SecretKey string `json:"secretKey,omitempty"`
SecretValue string `json:"secretValue,omitempty"`
Type string `json:"type,omitempty"`
SecretComment string `json:"secretComment,omitempty"`
ID string `json:"id,omitempty"`
}
type GetEncryptedWorkspaceKeyRequest struct {
WorkspaceId string `json:"workspaceId"`
}
type GetEncryptedWorkspaceKeyResponse struct {
ID string `json:"_id"`
EncryptedKey string `json:"encryptedKey"`
Nonce string `json:"nonce"`
Sender struct {
ID string `json:"_id"`
Email string `json:"email"`
RefreshVersion int `json:"refreshVersion"`
CreatedAt time.Time `json:"createdAt"`
UpdatedAt time.Time `json:"updatedAt"`
V int `json:"__v"`
FirstName string `json:"firstName"`
LastName string `json:"lastName"`
PublicKey string `json:"publicKey"`
} `json:"sender"`
Receiver string `json:"receiver"`
Workspace string `json:"workspace"`
V int `json:"__v"`
CreatedAt time.Time `json:"createdAt"`
UpdatedAt time.Time `json:"updatedAt"`
}
type GetSecretsByWorkspaceIdAndEnvironmentRequest struct {
EnvironmentName string `json:"environmentName"`
WorkspaceId string `json:"workspaceId"`
}
type GetServiceTokenDetailsResponse struct {
ID string `json:"_id"`
Name string `json:"name"`
Workspace string `json:"workspace"`
ExpiresAt time.Time `json:"expiresAt"`
EncryptedKey string `json:"encryptedKey"`
Iv string `json:"iv"`
Tag string `json:"tag"`
CreatedAt time.Time `json:"createdAt"`
UpdatedAt time.Time `json:"updatedAt"`
Scopes []struct {
Environment string `json:"environment"`
SecretPath string `json:"secretPath"`
} `json:"scopes"`
}
type GetAccessibleEnvironmentsRequest struct {
WorkspaceId string `json:"workspaceId"`
}
type GetAccessibleEnvironmentsResponse struct {
AccessibleEnvironments []struct {
Name string `json:"name"`
Slug string `json:"slug"`
IsWriteDenied bool `json:"isWriteDenied"`
} `json:"accessibleEnvironments"`
}
type GetLoginOneV2Request struct {
Email string `json:"email"`
ClientPublicKey string `json:"clientPublicKey"`
}
type GetLoginOneV2Response struct {
ServerPublicKey string `json:"serverPublicKey"`
Salt string `json:"salt"`
}
type GetLoginTwoV2Request struct {
Email string `json:"email"`
ClientProof string `json:"clientProof"`
Password string `json:"password"`
}
type GetLoginTwoV2Response struct {
MfaEnabled bool `json:"mfaEnabled"`
EncryptionVersion int `json:"encryptionVersion"`
Token string `json:"token"`
PublicKey string `json:"publicKey"`
EncryptedPrivateKey string `json:"encryptedPrivateKey"`
Iv string `json:"iv"`
Tag string `json:"tag"`
ProtectedKey string `json:"protectedKey"`
ProtectedKeyIV string `json:"protectedKeyIV"`
ProtectedKeyTag string `json:"protectedKeyTag"`
RefreshToken string `json:"RefreshToken"`
}
type VerifyMfaTokenRequest struct {
Email string `json:"email"`
MFAToken string `json:"mfaToken"`
MFAMethod string `json:"mfaMethod"`
}
type VerifyMfaTokenResponse struct {
EncryptionVersion int `json:"encryptionVersion"`
Token string `json:"token"`
PublicKey string `json:"publicKey"`
EncryptedPrivateKey string `json:"encryptedPrivateKey"`
Iv string `json:"iv"`
Tag string `json:"tag"`
ProtectedKey string `json:"protectedKey"`
ProtectedKeyIV string `json:"protectedKeyIV"`
ProtectedKeyTag string `json:"protectedKeyTag"`
RefreshToken string `json:"refreshToken"`
}
type VerifyMfaTokenErrorResponse struct {
Type string `json:"type"`
Message string `json:"message"`
Context struct {
Code string `json:"code"`
TriesLeft int `json:"triesLeft"`
} `json:"context"`
Level int `json:"level"`
LevelName string `json:"level_name"`
StatusCode int `json:"status_code"`
DatetimeIso time.Time `json:"datetime_iso"`
Application string `json:"application"`
Extra []interface{} `json:"extra"`
}
type GetNewAccessTokenWithRefreshTokenResponse struct {
Token string `json:"token"`
}
type GetEncryptedSecretsV3Request struct {
Environment string `json:"environment"`
WorkspaceId string `json:"workspaceId"`
SecretPath string `json:"secretPath"`
IncludeImport bool `json:"include_imports"`
Recursive bool `json:"recursive"`
}
type GetFoldersV1Request struct {
Environment string `json:"environment"`
WorkspaceId string `json:"workspaceId"`
FoldersPath string `json:"foldersPath"`
}
type GetFoldersV1Response struct {
Folders []struct {
ID string `json:"id"`
Name string `json:"name"`
} `json:"folders"`
}
type CreateFolderV1Request struct {
FolderName string `json:"name"`
WorkspaceId string `json:"workspaceId"`
Environment string `json:"environment"`
Path string `json:"path"`
}
type CreateFolderV1Response struct {
Folder struct {
ID string `json:"id"`
Name string `json:"name"`
} `json:"folder"`
}
type DeleteFolderV1Request struct {
FolderName string `json:"folderName"`
WorkspaceId string `json:"workspaceId"`
Environment string `json:"environment"`
Directory string `json:"directory"`
}
type DeleteFolderV1Response struct {
Folders []struct {
ID string `json:"id"`
Name string `json:"name"`
} `json:"folders"`
}
type EncryptedSecretV3 struct {
ID string `json:"_id"`
Version int `json:"version"`
Workspace string `json:"workspace"`
Type string `json:"type"`
Tags []struct {
ID string `json:"_id"`
Name string `json:"name"`
Slug string `json:"slug"`
Workspace string `json:"workspace"`
} `json:"tags"`
Environment string `json:"environment"`
SecretKeyCiphertext string `json:"secretKeyCiphertext"`
SecretKeyIV string `json:"secretKeyIV"`
SecretKeyTag string `json:"secretKeyTag"`
SecretValueCiphertext string `json:"secretValueCiphertext"`
SecretValueIV string `json:"secretValueIV"`
SecretValueTag string `json:"secretValueTag"`
SecretCommentCiphertext string `json:"secretCommentCiphertext"`
SecretCommentIV string `json:"secretCommentIV"`
SecretCommentTag string `json:"secretCommentTag"`
Algorithm string `json:"algorithm"`
KeyEncoding string `json:"keyEncoding"`
Folder string `json:"folder"`
V int `json:"__v"`
CreatedAt time.Time `json:"createdAt"`
UpdatedAt time.Time `json:"updatedAt"`
}
type ImportedSecretV3 struct {
Environment string `json:"environment"`
FolderId string `json:"folderId"`
SecretPath string `json:"secretPath"`
Secrets []EncryptedSecretV3 `json:"secrets"`
}
type ImportedRawSecretV3 struct {
SecretPath string `json:"secretPath"`
Environment string `json:"environment"`
FolderId string `json:"folderId"`
Secrets []struct {
ID string `json:"id"`
Workspace string `json:"workspace"`
Environment string `json:"environment"`
Version int `json:"version"`
Type string `json:"type"`
SecretKey string `json:"secretKey"`
SecretValue string `json:"secretValue"`
SecretComment string `json:"secretComment"`
} `json:"secrets"`
}
type GetEncryptedSecretsV3Response struct {
Secrets []EncryptedSecretV3 `json:"secrets"`
ImportedSecrets []ImportedSecretV3 `json:"imports,omitempty"`
}
type CreateSecretV3Request struct {
SecretName string `json:"secretName"`
WorkspaceID string `json:"workspaceId"`
Type string `json:"type"`
Environment string `json:"environment"`
SecretKeyCiphertext string `json:"secretKeyCiphertext"`
SecretKeyIV string `json:"secretKeyIV"`
SecretKeyTag string `json:"secretKeyTag"`
SecretValueCiphertext string `json:"secretValueCiphertext"`
SecretValueIV string `json:"secretValueIV"`
SecretValueTag string `json:"secretValueTag"`
SecretCommentCiphertext string `json:"secretCommentCiphertext"`
SecretCommentIV string `json:"secretCommentIV"`
SecretCommentTag string `json:"secretCommentTag"`
SecretPath string `json:"secretPath"`
}
type CreateRawSecretV3Request struct {
SecretName string `json:"-"`
WorkspaceID string `json:"workspaceId"`
Type string `json:"type,omitempty"`
Environment string `json:"environment"`
SecretPath string `json:"secretPath,omitempty"`
SecretValue string `json:"secretValue"`
SecretComment string `json:"secretComment,omitempty"`
SkipMultilineEncoding bool `json:"skipMultilineEncoding,omitempty"`
}
type DeleteSecretV3Request struct {
SecretName string `json:"secretName"`
WorkspaceId string `json:"workspaceId"`
Environment string `json:"environment"`
Type string `json:"type,omitempty"`
SecretPath string `json:"secretPath,omitempty"`
}
type UpdateSecretByNameV3Request struct {
WorkspaceID string `json:"workspaceId"`
Environment string `json:"environment"`
Type string `json:"type"`
SecretPath string `json:"secretPath"`
SecretValueCiphertext string `json:"secretValueCiphertext"`
SecretValueIV string `json:"secretValueIV"`
SecretValueTag string `json:"secretValueTag"`
}
type UpdateRawSecretByNameV3Request struct {
SecretName string `json:"-"`
WorkspaceID string `json:"workspaceId"`
Environment string `json:"environment"`
SecretPath string `json:"secretPath,omitempty"`
SecretValue string `json:"secretValue"`
Type string `json:"type,omitempty"`
}
type GetSingleSecretByNameV3Request struct {
SecretName string `json:"secretName"`
WorkspaceId string `json:"workspaceId"`
Environment string `json:"environment"`
Type string `json:"type"`
SecretPath string `json:"secretPath"`
}
type GetSingleSecretByNameSecretResponse struct {
Secrets []struct {
ID string `json:"_id"`
Version int `json:"version"`
Workspace string `json:"workspace"`
Type string `json:"type"`
Environment string `json:"environment"`
SecretKeyCiphertext string `json:"secretKeyCiphertext"`
SecretKeyIV string `json:"secretKeyIV"`
SecretKeyTag string `json:"secretKeyTag"`
SecretValueCiphertext string `json:"secretValueCiphertext"`
SecretValueIV string `json:"secretValueIV"`
SecretValueTag string `json:"secretValueTag"`
SecretCommentCiphertext string `json:"secretCommentCiphertext"`
SecretCommentIV string `json:"secretCommentIV"`
SecretCommentTag string `json:"secretCommentTag"`
Algorithm string `json:"algorithm"`
KeyEncoding string `json:"keyEncoding"`
Folder string `json:"folder"`
V int `json:"__v"`
CreatedAt time.Time `json:"createdAt"`
UpdatedAt time.Time `json:"updatedAt"`
} `json:"secrets"`
}
type ScopePermission struct {
Environment string `json:"environment"`
SecretPath string `json:"secretPath"`
}
type CreateServiceTokenRequest struct {
Name string `json:"name"`
WorkspaceId string `json:"workspaceId"`
Scopes []ScopePermission `json:"scopes"`
ExpiresIn int `json:"expiresIn"`
EncryptedKey string `json:"encryptedKey"`
Iv string `json:"iv"`
Tag string `json:"tag"`
RandomBytes string `json:"randomBytes"`
Permissions []string `json:"permissions"`
}
type ServiceTokenData struct {
ID string `json:"_id"`
Name string `json:"name"`
Workspace string `json:"workspace"`
Scopes []interface{} `json:"scopes"`
User string `json:"user"`
LastUsed time.Time `json:"lastUsed"`
Permissions []string `json:"permissions"`
CreatedAt time.Time `json:"createdAt"`
UpdatedAt time.Time `json:"updatedAt"`
}
type CreateServiceTokenResponse struct {
ServiceToken string `json:"serviceToken"`
ServiceTokenData ServiceTokenData `json:"serviceTokenData"`
}
type UniversalAuthLoginRequest struct {
ClientSecret string `json:"clientSecret"`
ClientId string `json:"clientId"`
}
type UniversalAuthLoginResponse struct {
AccessToken string `json:"accessToken"`
AccessTokenTTL int `json:"expiresIn"`
TokenType string `json:"tokenType"`
AccessTokenMaxTTL int `json:"accessTokenMaxTTL"`
}
type UniversalAuthRefreshRequest struct {
AccessToken string `json:"accessToken"`
}
type UniversalAuthRefreshResponse struct {
AccessToken string `json:"accessToken"`
AccessTokenTTL int `json:"expiresIn"`
TokenType string `json:"tokenType"`
AccessTokenMaxTTL int `json:"accessTokenMaxTTL"`
}
type CreateDynamicSecretLeaseV1Request struct {
Environment string `json:"environment"`
ProjectSlug string `json:"projectSlug"`
SecretPath string `json:"secretPath,omitempty"`
Slug string `json:"slug"`
TTL string `json:"ttl,omitempty"`
}
type CreateDynamicSecretLeaseV1Response struct {
Lease struct {
Id string `json:"id"`
ExpireAt time.Time `json:"expireAt"`
} `json:"lease"`
DynamicSecret struct {
Id string `json:"id"`
DefaultTTL string `json:"defaultTTL"`
MaxTTL string `json:"maxTTL"`
Type string `json:"type"`
} `json:"dynamicSecret"`
Data map[string]interface{} `json:"data"`
}
type GetRawSecretsV3Request struct {
Environment string `json:"environment"`
WorkspaceId string `json:"workspaceId"`
SecretPath string `json:"secretPath"`
IncludeImport bool `json:"include_imports"`
Recursive bool `json:"recursive"`
TagSlugs string `json:"tagSlugs,omitempty"`
ExpandSecretReferences bool `json:"expandSecretReferences,omitempty"`
}
type GetRawSecretsV3Response struct {
Secrets []struct {
ID string `json:"_id"`
Version int `json:"version"`
Workspace string `json:"workspace"`
Type string `json:"type"`
Environment string `json:"environment"`
SecretKey string `json:"secretKey"`
SecretValue string `json:"secretValue"`
SecretComment string `json:"secretComment"`
SecretPath string `json:"secretPath"`
} `json:"secrets"`
Imports []ImportedRawSecretV3 `json:"imports"`
ETag string
}
type GetRawSecretV3ByNameRequest struct {
SecretName string `json:"secretName"`
WorkspaceID string `json:"workspaceId"`
Type string `json:"type,omitempty"`
Environment string `json:"environment"`
SecretPath string `json:"secretPath,omitempty"`
}
type GetRawSecretV3ByNameResponse struct {
Secret struct {
ID string `json:"_id"`
Version int `json:"version"`
Workspace string `json:"workspace"`
Type string `json:"type"`
Environment string `json:"environment"`
SecretKey string `json:"secretKey"`
SecretValue string `json:"secretValue"`
SecretComment string `json:"secretComment"`
SecretPath string `json:"secretPath"`
} `json:"secret"`
ETag string
}
type GetRelayCredentialsResponseV1 struct {
TurnServerUsername string `json:"turnServerUsername"`
TurnServerPassword string `json:"turnServerPassword"`
TurnServerRealm string `json:"turnServerRealm"`
TurnServerAddress string `json:"turnServerAddress"`
InfisicalStaticIp string `json:"infisicalStaticIp"`
}
type ExchangeRelayCertRequestV1 struct {
RelayAddress string `json:"relayAddress"`
}
type ExchangeRelayCertResponseV1 struct {
SerialNumber string `json:"serialNumber"`
PrivateKey string `json:"privateKey"`
Certificate string `json:"certificate"`
CertificateChain string `json:"certificateChain"`
}
type BootstrapInstanceRequest struct {
Email string `json:"email"`
Password string `json:"password"`
Organization string `json:"organization"`
Domain string `json:"domain"`
}
type BootstrapInstanceResponse struct {
Message string `json:"message"`
Identity BootstrapIdentity `json:"identity"`
Organization BootstrapOrganization `json:"organization"`
User BootstrapUser `json:"user"`
}
type BootstrapIdentity struct {
ID string `json:"id"`
Name string `json:"name"`
Credentials BootstrapIdentityCredentials `json:"credentials"`
}
type BootstrapIdentityCredentials struct {
Token string `json:"token"`
}
type BootstrapOrganization struct {
ID string `json:"id"`
Name string `json:"name"`
Slug string `json:"slug"`
}
type BootstrapUser struct {
ID string `json:"id"`
Email string `json:"email"`
FirstName string `json:"firstName"`
LastName string `json:"lastName"`
Username string `json:"username"`
SuperAdmin bool `json:"superAdmin"`
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,277 +0,0 @@
/*
Copyright (c) 2023 Infisical Inc.
*/
package cmd
import (
"bytes"
"context"
"encoding/base64"
"encoding/json"
"fmt"
"os"
"text/template"
"github.com/Infisical/infisical-merge/packages/api"
"github.com/Infisical/infisical-merge/packages/util"
"github.com/rs/zerolog/log"
"github.com/spf13/cobra"
corev1 "k8s.io/api/core/v1"
"k8s.io/apimachinery/pkg/api/errors"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/client-go/kubernetes"
"k8s.io/client-go/rest"
)
// handleK8SecretOutput processes the k8-secret output type by creating a Kubernetes secret
func handleK8SecretOutput(bootstrapResponse api.BootstrapInstanceResponse, k8SecretTemplate, k8SecretName, k8SecretNamespace string) error {
// Create in-cluster config
config, err := rest.InClusterConfig()
if err != nil {
return fmt.Errorf("failed to create in-cluster config: %v", err)
}
// Create Kubernetes client
clientset, err := kubernetes.NewForConfig(config)
if err != nil {
return fmt.Errorf("failed to create Kubernetes client: %v", err)
}
// Parse and execute the template to render the data/stringData section
tmpl, err := template.New("k8-secret-template").Funcs(template.FuncMap{
"encodeBase64": func(s string) string {
return base64.StdEncoding.EncodeToString([]byte(s))
},
}).Parse(k8SecretTemplate)
if err != nil {
return fmt.Errorf("failed to parse output template: %v", err)
}
var renderedDataSection bytes.Buffer
err = tmpl.Execute(&renderedDataSection, bootstrapResponse)
if err != nil {
return fmt.Errorf("failed to execute output template: %v", err)
}
// Parse the rendered template as JSON to validate it's valid
var dataSection map[string]interface{}
if err := json.Unmarshal(renderedDataSection.Bytes(), &dataSection); err != nil {
return fmt.Errorf("template output is not valid JSON: %v", err)
}
// Prepare the secret data and stringData maps
secretData := make(map[string][]byte)
secretStringData := make(map[string]string)
// Process the dataSection to separate data and stringData
if data, exists := dataSection["data"]; exists {
if dataMap, ok := data.(map[string]interface{}); ok {
for key, value := range dataMap {
if strValue, ok := value.(string); ok {
secretData[key] = []byte(strValue)
}
}
}
}
if stringData, exists := dataSection["stringData"]; exists {
if stringDataMap, ok := stringData.(map[string]interface{}); ok {
for key, value := range stringDataMap {
if strValue, ok := value.(string); ok {
secretStringData[key] = strValue
}
}
}
}
// Create the Kubernetes secret object
k8sSecret := &corev1.Secret{
ObjectMeta: metav1.ObjectMeta{
Name: k8SecretName,
Namespace: k8SecretNamespace,
},
Type: corev1.SecretTypeOpaque,
Data: secretData,
StringData: secretStringData,
}
ctx := context.Background()
secretsClient := clientset.CoreV1().Secrets(k8SecretNamespace)
// Check if secret already exists
existingSecret, err := secretsClient.Get(ctx, k8SecretName, metav1.GetOptions{})
if err != nil {
if errors.IsNotFound(err) {
// Secret doesn't exist, create it
_, err = secretsClient.Create(ctx, k8sSecret, metav1.CreateOptions{})
if err != nil {
return fmt.Errorf("failed to create Kubernetes secret: %v", err)
}
log.Info().Msgf("Successfully created Kubernetes secret '%s' in namespace '%s'", k8SecretName, k8SecretNamespace)
} else {
return fmt.Errorf("failed to check if Kubernetes secret exists: %v", err)
}
} else {
// Secret exists, update it
k8sSecret.ObjectMeta.ResourceVersion = existingSecret.ObjectMeta.ResourceVersion
_, err = secretsClient.Update(ctx, k8sSecret, metav1.UpdateOptions{})
if err != nil {
return fmt.Errorf("failed to update Kubernetes secret: %v", err)
}
log.Info().Msgf("Successfully updated Kubernetes secret '%s' in namespace '%s'", k8SecretName, k8SecretNamespace)
}
return nil
}
var bootstrapCmd = &cobra.Command{
Use: "bootstrap",
Short: "Used to bootstrap your Infisical instance",
DisableFlagsInUseLine: true,
Example: "infisical bootstrap",
Args: cobra.NoArgs,
Run: func(cmd *cobra.Command, args []string) {
email, _ := cmd.Flags().GetString("email")
if email == "" {
if envEmail, ok := os.LookupEnv(util.INFISICAL_BOOTSTRAP_EMAIL_NAME); ok {
email = envEmail
}
}
if email == "" {
log.Error().Msg("email is required")
return
}
password, _ := cmd.Flags().GetString("password")
if password == "" {
if envPassword, ok := os.LookupEnv(util.INFISICAL_BOOTSTRAP_PASSWORD_NAME); ok {
password = envPassword
}
}
if password == "" {
log.Error().Msg("password is required")
return
}
organization, _ := cmd.Flags().GetString("organization")
if organization == "" {
if envOrganization, ok := os.LookupEnv(util.INFISICAL_BOOTSTRAP_ORGANIZATION_NAME); ok {
organization = envOrganization
}
}
if organization == "" {
log.Error().Msg("organization is required")
return
}
domain, _ := cmd.Flags().GetString("domain")
if domain == "" {
if envDomain, ok := os.LookupEnv("INFISICAL_API_URL"); ok {
domain = envDomain
}
}
if domain == "" {
log.Error().Msg("domain is required")
return
}
outputType, err := cmd.Flags().GetString("output")
if err != nil {
log.Error().Msgf("Failed to get output type: %v", err)
return
}
k8SecretTemplate, err := cmd.Flags().GetString("k8-secret-template")
if err != nil {
log.Error().Msgf("Failed to get k8-secret-template: %v", err)
}
k8SecretName, err := cmd.Flags().GetString("k8-secret-name")
if err != nil {
log.Error().Msgf("Failed to get k8-secret-name: %v", err)
}
k8SecretNamespace, err := cmd.Flags().GetString("k8-secret-namespace")
if err != nil {
log.Error().Msgf("Failed to get k8-secret-namespace: %v", err)
}
if outputType == "k8-secret" {
if k8SecretTemplate == "" {
log.Error().Msg("k8-secret-template is required when using k8-secret output type")
return
}
if k8SecretName == "" {
log.Error().Msg("k8-secret-name is required when using k8-secret output type")
return
}
if k8SecretNamespace == "" {
log.Error().Msg("k8-secret-namespace is required when using k8-secret output type")
return
}
}
httpClient, err := util.GetRestyClientWithCustomHeaders()
if err != nil {
log.Error().Msgf("Failed to get resty client with custom headers: %v", err)
return
}
ignoreIfBootstrapped, err := cmd.Flags().GetBool("ignore-if-bootstrapped")
if err != nil {
log.Error().Msgf("Failed to get ignore-if-bootstrapped flag: %v", err)
return
}
httpClient.SetHeader("Accept", "application/json")
bootstrapResponse, err := api.CallBootstrapInstance(httpClient, api.BootstrapInstanceRequest{
Domain: util.AppendAPIEndpoint(domain),
Email: email,
Password: password,
Organization: organization,
})
if err != nil {
if !ignoreIfBootstrapped {
log.Error().Msgf("Failed to bootstrap instance: %v", err)
}
return
}
if outputType == "k8-secret" {
if err := handleK8SecretOutput(bootstrapResponse, k8SecretTemplate, k8SecretName, k8SecretNamespace); err != nil {
log.Error().Msgf("Failed to handle k8-secret output: %v", err)
return
}
} else {
responseJSON, err := json.MarshalIndent(bootstrapResponse, "", " ")
if err != nil {
log.Fatal().Msgf("Failed to convert response to JSON: %v", err)
return
}
fmt.Println(string(responseJSON))
}
},
}
func init() {
bootstrapCmd.Flags().String("domain", "", "The domain of your self-hosted Infisical instance")
bootstrapCmd.Flags().String("email", "", "The desired email address of the instance admin")
bootstrapCmd.Flags().String("password", "", "The desired password of the instance admin")
bootstrapCmd.Flags().String("organization", "", "The name of the organization to create for the instance")
bootstrapCmd.Flags().String("output", "", "The type of output to use for the bootstrap command (json or k8-secret)")
bootstrapCmd.Flags().Bool("ignore-if-bootstrapped", false, "Whether to continue on error if the instance has already been bootstrapped")
bootstrapCmd.Flags().String("k8-secret-template", "{\"data\":{\"token\":\"{{.Identity.Credentials.Token}}\"}}", "The template to use for rendering the Kubernetes secret (entire secret JSON)")
bootstrapCmd.Flags().String("k8-secret-namespace", "", "The namespace to create the Kubernetes secret in")
bootstrapCmd.Flags().String("k8-secret-name", "", "The name of the Kubernetes secret to create")
rootCmd.AddCommand(bootstrapCmd)
}

View File

@@ -1,49 +0,0 @@
package cmd
import (
"testing"
"github.com/Infisical/infisical-merge/packages/models"
)
func TestFilterReservedEnvVars(t *testing.T) {
// some test env vars.
// HOME and PATH are reserved key words and should be filtered out
// XDG_SESSION_ID and LC_CTYPE are reserved key word prefixes and should be filtered out
// The filter function only checks the keys of the env map, so we dont need to set any values
env := map[string]models.SingleEnvironmentVariable{
"test": {},
"test2": {},
"HOME": {},
"PATH": {},
"XDG_SESSION_ID": {},
"LC_CTYPE": {},
}
// check to see if there are any reserved key words in secrets to inject
filterReservedEnvVars(env)
if len(env) != 2 {
t.Errorf("Expected 2 secrets to be returned, got %d", len(env))
}
if _, ok := env["test"]; !ok {
t.Errorf("Expected test to be returned")
}
if _, ok := env["test2"]; !ok {
t.Errorf("Expected test2 to be returned")
}
if _, ok := env["HOME"]; ok {
t.Errorf("Expected HOME to be filtered out")
}
if _, ok := env["PATH"]; ok {
t.Errorf("Expected PATH to be filtered out")
}
if _, ok := env["XDG_SESSION_ID"]; ok {
t.Errorf("Expected XDG_SESSION_ID to be filtered out")
}
if _, ok := env["LC_CTYPE"]; ok {
t.Errorf("Expected LC_CTYPE to be filtered out")
}
}

View File

@@ -1,676 +0,0 @@
/*
Copyright (c) 2023 Infisical Inc.
*/
package cmd
import (
"context"
"fmt"
"github.com/Infisical/infisical-merge/packages/api"
"github.com/Infisical/infisical-merge/packages/config"
"github.com/Infisical/infisical-merge/packages/visualize"
// "github.com/Infisical/infisical-merge/packages/models"
"github.com/Infisical/infisical-merge/packages/util"
// "github.com/Infisical/infisical-merge/packages/visualize"
"github.com/posthog/posthog-go"
"github.com/spf13/cobra"
infisicalSdk "github.com/infisical/go-sdk"
infisicalSdkModels "github.com/infisical/go-sdk/packages/models"
)
var dynamicSecretCmd = &cobra.Command{
Example: `infisical dynamic-secrets`,
Short: "Used to list dynamic secrets",
Use: "dynamic-secrets",
DisableFlagsInUseLine: true,
Args: cobra.NoArgs,
Run: getDynamicSecretList,
}
func getDynamicSecretList(cmd *cobra.Command, args []string) {
environmentName, _ := cmd.Flags().GetString("env")
if !cmd.Flags().Changed("env") {
environmentFromWorkspace := util.GetEnvFromWorkspaceFile()
if environmentFromWorkspace != "" {
environmentName = environmentFromWorkspace
}
}
token, err := util.GetInfisicalToken(cmd)
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
projectId, err := cmd.Flags().GetString("projectId")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
projectSlug, err := cmd.Flags().GetString("project-slug")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
secretsPath, err := cmd.Flags().GetString("path")
if err != nil {
util.HandleError(err, "Unable to parse path flag")
}
var infisicalToken string
httpClient, err := util.GetRestyClientWithCustomHeaders()
if err != nil {
util.HandleError(err, "Unable to get resty client with custom headers")
}
if projectId == "" && projectSlug == "" {
workspaceFile, err := util.GetWorkSpaceFromFile()
if err != nil {
util.PrintErrorMessageAndExit("Please either run infisical init to connect to a project, pass in project slug with --project-slug flag, or pass in project id with --projectId flag")
}
projectId = workspaceFile.WorkspaceId
}
if token != nil && (token.Type == util.SERVICE_TOKEN_IDENTIFIER || token.Type == util.UNIVERSAL_AUTH_TOKEN_IDENTIFIER) {
infisicalToken = token.Token
} else {
util.RequireLogin()
loggedInUserDetails, err := util.GetCurrentLoggedInUserDetails(true)
if err != nil {
util.HandleError(err, "Unable to authenticate")
}
if loggedInUserDetails.LoginExpired {
loggedInUserDetails = util.EstablishUserLoginSession()
}
infisicalToken = loggedInUserDetails.UserCredentials.JTWToken
}
httpClient.SetAuthToken(infisicalToken)
customHeaders, err := util.GetInfisicalCustomHeadersMap()
if err != nil {
util.HandleError(err, "Unable to get custom headers")
}
infisicalClient := infisicalSdk.NewInfisicalClient(context.Background(), infisicalSdk.Config{
SiteUrl: config.INFISICAL_URL,
UserAgent: api.USER_AGENT,
AutoTokenRefresh: false,
CustomHeaders: customHeaders,
})
infisicalClient.Auth().SetAccessToken(infisicalToken)
if projectSlug == "" {
projectDetails, err := api.CallGetProjectById(httpClient, projectId)
if err != nil {
util.HandleError(err, "To fetch project details")
}
projectSlug = projectDetails.Slug
}
dynamicSecretRootCredentials, err := infisicalClient.DynamicSecrets().List(infisicalSdk.ListDynamicSecretsRootCredentialsOptions{
ProjectSlug: projectSlug,
SecretPath: secretsPath,
EnvironmentSlug: environmentName,
})
if err != nil {
util.HandleError(err, "To fetch dynamic secret root credentials details")
}
visualize.PrintAllDynamicRootCredentials(dynamicSecretRootCredentials)
Telemetry.CaptureEvent("cli-command:dynamic-secrets", posthog.NewProperties().Set("count", len(dynamicSecretRootCredentials)).Set("version", util.CLI_VERSION))
}
var dynamicSecretLeaseCmd = &cobra.Command{
Example: `lease`,
Short: "Manage leases for dynamic secrets",
Use: "lease",
DisableFlagsInUseLine: true,
}
var dynamicSecretLeaseCreateCmd = &cobra.Command{
Example: `lease create <dynamic secret name>"`,
Short: "Used to lease dynamic secret by name",
Use: "create [dynamic-secret]",
DisableFlagsInUseLine: true,
Args: cobra.ExactArgs(1),
Run: createDynamicSecretLeaseByName,
}
func createDynamicSecretLeaseByName(cmd *cobra.Command, args []string) {
dynamicSecretRootCredentialName := args[0]
environmentName, _ := cmd.Flags().GetString("env")
if !cmd.Flags().Changed("env") {
environmentFromWorkspace := util.GetEnvFromWorkspaceFile()
if environmentFromWorkspace != "" {
environmentName = environmentFromWorkspace
}
}
token, err := util.GetInfisicalToken(cmd)
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
projectId, err := cmd.Flags().GetString("projectId")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
projectSlug, err := cmd.Flags().GetString("project-slug")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
ttl, err := cmd.Flags().GetString("ttl")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
secretsPath, err := cmd.Flags().GetString("path")
if err != nil {
util.HandleError(err, "Unable to parse path flag")
}
plainOutput, err := cmd.Flags().GetBool("plain")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
var infisicalToken string
httpClient, err := util.GetRestyClientWithCustomHeaders()
if err != nil {
util.HandleError(err, "Unable to get resty client with custom headers")
}
if projectId == "" && projectSlug == "" {
workspaceFile, err := util.GetWorkSpaceFromFile()
if err != nil {
util.PrintErrorMessageAndExit("Please either run infisical init to connect to a project, pass in project id with --projectId flag, or pass in project slug with --project-slug flag")
}
projectId = workspaceFile.WorkspaceId
}
if token != nil && (token.Type == util.SERVICE_TOKEN_IDENTIFIER || token.Type == util.UNIVERSAL_AUTH_TOKEN_IDENTIFIER) {
infisicalToken = token.Token
} else {
util.RequireLogin()
loggedInUserDetails, err := util.GetCurrentLoggedInUserDetails(true)
if err != nil {
util.HandleError(err, "Unable to authenticate")
}
if loggedInUserDetails.LoginExpired {
loggedInUserDetails = util.EstablishUserLoginSession()
}
infisicalToken = loggedInUserDetails.UserCredentials.JTWToken
}
httpClient.SetAuthToken(infisicalToken)
customHeaders, err := util.GetInfisicalCustomHeadersMap()
if err != nil {
util.HandleError(err, "Unable to get custom headers")
}
infisicalClient := infisicalSdk.NewInfisicalClient(context.Background(), infisicalSdk.Config{
SiteUrl: config.INFISICAL_URL,
UserAgent: api.USER_AGENT,
AutoTokenRefresh: false,
CustomHeaders: customHeaders,
})
infisicalClient.Auth().SetAccessToken(infisicalToken)
if projectSlug == "" {
projectDetails, err := api.CallGetProjectById(httpClient, projectId)
if err != nil {
util.HandleError(err, "To fetch project details")
}
projectSlug = projectDetails.Slug
}
dynamicSecretRootCredential, err := infisicalClient.DynamicSecrets().GetByName(infisicalSdk.GetDynamicSecretRootCredentialByNameOptions{
DynamicSecretName: dynamicSecretRootCredentialName,
ProjectSlug: projectSlug,
SecretPath: secretsPath,
EnvironmentSlug: environmentName,
})
if err != nil {
util.HandleError(err, "To fetch dynamic secret root credentials details")
}
// for Kubernetes dynamic secrets only
kubernetesNamespace, err := cmd.Flags().GetString("kubernetes-namespace")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
config := map[string]any{}
if kubernetesNamespace != "" {
config["namespace"] = kubernetesNamespace
}
leaseCredentials, _, leaseDetails, err := infisicalClient.DynamicSecrets().Leases().Create(infisicalSdk.CreateDynamicSecretLeaseOptions{
DynamicSecretName: dynamicSecretRootCredential.Name,
ProjectSlug: projectSlug,
TTL: ttl,
SecretPath: secretsPath,
EnvironmentSlug: environmentName,
Config: config,
})
if err != nil {
util.HandleError(err, "To lease dynamic secret")
}
if plainOutput {
for key, value := range leaseCredentials {
if cred, ok := value.(string); ok {
fmt.Printf("%s=%s\n", key, cred)
}
}
} else {
fmt.Println("Dynamic Secret Leasing")
fmt.Printf("Name: %s\n", dynamicSecretRootCredential.Name)
fmt.Printf("Provider: %s\n", dynamicSecretRootCredential.Type)
fmt.Printf("Lease ID: %s\n", leaseDetails.Id)
fmt.Printf("Expire At: %s\n", leaseDetails.ExpireAt.Local().Format("02-Jan-2006 03:04:05 PM"))
visualize.PrintAllDyamicSecretLeaseCredentials(leaseCredentials)
}
Telemetry.CaptureEvent("cli-command:dynamic-secrets lease", posthog.NewProperties().Set("type", dynamicSecretRootCredential.Type).Set("version", util.CLI_VERSION))
}
var dynamicSecretLeaseRenewCmd = &cobra.Command{
Example: `lease renew <dynamic secret name>"`,
Short: "Used to renew dynamic secret lease by name",
Use: "renew [lease-id]",
DisableFlagsInUseLine: true,
Args: cobra.ExactArgs(1),
Run: renewDynamicSecretLeaseByName,
}
func renewDynamicSecretLeaseByName(cmd *cobra.Command, args []string) {
dynamicSecretLeaseId := args[0]
environmentName, _ := cmd.Flags().GetString("env")
if !cmd.Flags().Changed("env") {
environmentFromWorkspace := util.GetEnvFromWorkspaceFile()
if environmentFromWorkspace != "" {
environmentName = environmentFromWorkspace
}
}
token, err := util.GetInfisicalToken(cmd)
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
projectId, err := cmd.Flags().GetString("projectId")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
projectSlug, err := cmd.Flags().GetString("project-slug")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
ttl, err := cmd.Flags().GetString("ttl")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
secretsPath, err := cmd.Flags().GetString("path")
if err != nil {
util.HandleError(err, "Unable to parse path flag")
}
var infisicalToken string
httpClient, err := util.GetRestyClientWithCustomHeaders()
if err != nil {
util.HandleError(err, "Unable to get resty client with custom headers")
}
if projectId == "" && projectSlug == "" {
workspaceFile, err := util.GetWorkSpaceFromFile()
if err != nil {
util.PrintErrorMessageAndExit("Please either run infisical init to connect to a project, pass in project slug with --project-slug flag, or pass in project id with --projectId flag")
}
projectId = workspaceFile.WorkspaceId
}
if token != nil && (token.Type == util.SERVICE_TOKEN_IDENTIFIER || token.Type == util.UNIVERSAL_AUTH_TOKEN_IDENTIFIER) {
infisicalToken = token.Token
} else {
util.RequireLogin()
loggedInUserDetails, err := util.GetCurrentLoggedInUserDetails(true)
if err != nil {
util.HandleError(err, "Unable to authenticate")
}
if loggedInUserDetails.LoginExpired {
loggedInUserDetails = util.EstablishUserLoginSession()
}
infisicalToken = loggedInUserDetails.UserCredentials.JTWToken
}
httpClient.SetAuthToken(infisicalToken)
customHeaders, err := util.GetInfisicalCustomHeadersMap()
if err != nil {
util.HandleError(err, "Unable to get custom headers")
}
infisicalClient := infisicalSdk.NewInfisicalClient(context.Background(), infisicalSdk.Config{
SiteUrl: config.INFISICAL_URL,
UserAgent: api.USER_AGENT,
AutoTokenRefresh: false,
CustomHeaders: customHeaders,
})
infisicalClient.Auth().SetAccessToken(infisicalToken)
if projectSlug == "" {
projectDetails, err := api.CallGetProjectById(httpClient, projectId)
if err != nil {
util.HandleError(err, "To fetch project details")
}
projectSlug = projectDetails.Slug
}
if err != nil {
util.HandleError(err, "To fetch dynamic secret root credentials details")
}
leaseDetails, err := infisicalClient.DynamicSecrets().Leases().RenewById(infisicalSdk.RenewDynamicSecretLeaseOptions{
ProjectSlug: projectSlug,
TTL: ttl,
SecretPath: secretsPath,
EnvironmentSlug: environmentName,
LeaseId: dynamicSecretLeaseId,
})
if err != nil {
util.HandleError(err, "To renew dynamic secret lease")
}
fmt.Println("Successfully renewed dynamic secret lease")
visualize.PrintAllDynamicSecretLeases([]infisicalSdkModels.DynamicSecretLease{leaseDetails})
Telemetry.CaptureEvent("cli-command:dynamic-secrets lease renew", posthog.NewProperties().Set("version", util.CLI_VERSION))
}
var dynamicSecretLeaseRevokeCmd = &cobra.Command{
Example: `lease delete <dynamic secret name>"`,
Short: "Used to delete dynamic secret lease by name",
Use: "delete [lease-id]",
DisableFlagsInUseLine: true,
Args: cobra.ExactArgs(1),
Run: revokeDynamicSecretLeaseByName,
}
func revokeDynamicSecretLeaseByName(cmd *cobra.Command, args []string) {
dynamicSecretLeaseId := args[0]
environmentName, _ := cmd.Flags().GetString("env")
if !cmd.Flags().Changed("env") {
environmentFromWorkspace := util.GetEnvFromWorkspaceFile()
if environmentFromWorkspace != "" {
environmentName = environmentFromWorkspace
}
}
token, err := util.GetInfisicalToken(cmd)
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
projectId, err := cmd.Flags().GetString("projectId")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
projectSlug, err := cmd.Flags().GetString("project-slug")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
secretsPath, err := cmd.Flags().GetString("path")
if err != nil {
util.HandleError(err, "Unable to parse path flag")
}
var infisicalToken string
httpClient, err := util.GetRestyClientWithCustomHeaders()
if err != nil {
util.HandleError(err, "Unable to get resty client with custom headers")
}
if projectId == "" && projectSlug == "" {
workspaceFile, err := util.GetWorkSpaceFromFile()
if err != nil {
util.PrintErrorMessageAndExit("Please either run infisical init to connect to a project, pass in project slug with --project-slug flag, or pass in project id with --projectId flag")
}
projectId = workspaceFile.WorkspaceId
}
if token != nil && (token.Type == util.SERVICE_TOKEN_IDENTIFIER || token.Type == util.UNIVERSAL_AUTH_TOKEN_IDENTIFIER) {
infisicalToken = token.Token
} else {
util.RequireLogin()
loggedInUserDetails, err := util.GetCurrentLoggedInUserDetails(true)
if err != nil {
util.HandleError(err, "Unable to authenticate")
}
if loggedInUserDetails.LoginExpired {
loggedInUserDetails = util.EstablishUserLoginSession()
}
infisicalToken = loggedInUserDetails.UserCredentials.JTWToken
}
httpClient.SetAuthToken(infisicalToken)
customHeaders, err := util.GetInfisicalCustomHeadersMap()
if err != nil {
util.HandleError(err, "Unable to get custom headers")
}
infisicalClient := infisicalSdk.NewInfisicalClient(context.Background(), infisicalSdk.Config{
SiteUrl: config.INFISICAL_URL,
UserAgent: api.USER_AGENT,
AutoTokenRefresh: false,
CustomHeaders: customHeaders,
})
infisicalClient.Auth().SetAccessToken(infisicalToken)
if projectSlug == "" {
projectDetails, err := api.CallGetProjectById(httpClient, projectId)
if err != nil {
util.HandleError(err, "To fetch project details")
}
projectSlug = projectDetails.Slug
}
if err != nil {
util.HandleError(err, "To fetch dynamic secret root credentials details")
}
leaseDetails, err := infisicalClient.DynamicSecrets().Leases().DeleteById(infisicalSdk.DeleteDynamicSecretLeaseOptions{
ProjectSlug: projectSlug,
SecretPath: secretsPath,
EnvironmentSlug: environmentName,
LeaseId: dynamicSecretLeaseId,
})
if err != nil {
util.HandleError(err, "To revoke dynamic secret lease")
}
fmt.Println("Successfully revoked dynamic secret lease")
visualize.PrintAllDynamicSecretLeases([]infisicalSdkModels.DynamicSecretLease{leaseDetails})
Telemetry.CaptureEvent("cli-command:dynamic-secrets lease revoke", posthog.NewProperties().Set("version", util.CLI_VERSION))
}
var dynamicSecretLeaseListCmd = &cobra.Command{
Example: `lease list <dynamic secret name>"`,
Short: "Used to list leases of a dynamic secret by name",
Use: "list [dynamic-secret]",
DisableFlagsInUseLine: true,
Args: cobra.ExactArgs(1),
Run: listDynamicSecretLeaseByName,
}
func listDynamicSecretLeaseByName(cmd *cobra.Command, args []string) {
dynamicSecretRootCredentialName := args[0]
environmentName, _ := cmd.Flags().GetString("env")
if !cmd.Flags().Changed("env") {
environmentFromWorkspace := util.GetEnvFromWorkspaceFile()
if environmentFromWorkspace != "" {
environmentName = environmentFromWorkspace
}
}
token, err := util.GetInfisicalToken(cmd)
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
projectId, err := cmd.Flags().GetString("projectId")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
projectSlug, err := cmd.Flags().GetString("project-slug")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
secretsPath, err := cmd.Flags().GetString("path")
if err != nil {
util.HandleError(err, "Unable to parse path flag")
}
var infisicalToken string
httpClient, err := util.GetRestyClientWithCustomHeaders()
if err != nil {
util.HandleError(err, "Unable to get resty client with custom headers")
}
if projectId == "" && projectSlug == "" {
workspaceFile, err := util.GetWorkSpaceFromFile()
if err != nil {
util.PrintErrorMessageAndExit("Please either run infisical init to connect to a project, pass in project slug with --project-slug flag, or pass in project id with --projectId flag")
}
projectId = workspaceFile.WorkspaceId
}
if token != nil && (token.Type == util.SERVICE_TOKEN_IDENTIFIER || token.Type == util.UNIVERSAL_AUTH_TOKEN_IDENTIFIER) {
infisicalToken = token.Token
} else {
util.RequireLogin()
loggedInUserDetails, err := util.GetCurrentLoggedInUserDetails(true)
if err != nil {
util.HandleError(err, "Unable to authenticate")
}
if loggedInUserDetails.LoginExpired {
loggedInUserDetails = util.EstablishUserLoginSession()
}
infisicalToken = loggedInUserDetails.UserCredentials.JTWToken
}
httpClient.SetAuthToken(infisicalToken)
customHeaders, err := util.GetInfisicalCustomHeadersMap()
if err != nil {
util.HandleError(err, "Unable to get custom headers")
}
infisicalClient := infisicalSdk.NewInfisicalClient(context.Background(), infisicalSdk.Config{
SiteUrl: config.INFISICAL_URL,
UserAgent: api.USER_AGENT,
AutoTokenRefresh: false,
CustomHeaders: customHeaders,
})
infisicalClient.Auth().SetAccessToken(infisicalToken)
if projectSlug == "" {
projectDetails, err := api.CallGetProjectById(httpClient, projectId)
if err != nil {
util.HandleError(err, "To fetch project details")
}
projectSlug = projectDetails.Slug
}
dynamicSecretLeases, err := infisicalClient.DynamicSecrets().Leases().List(infisicalSdk.ListDynamicSecretLeasesOptions{
DynamicSecretName: dynamicSecretRootCredentialName,
ProjectSlug: projectSlug,
SecretPath: secretsPath,
EnvironmentSlug: environmentName,
})
if err != nil {
util.HandleError(err, "To fetch dynamic secret leases list")
}
visualize.PrintAllDynamicSecretLeases(dynamicSecretLeases)
Telemetry.CaptureEvent("cli-command:dynamic-secrets lease list", posthog.NewProperties().Set("lease-count", len(dynamicSecretLeases)).Set("version", util.CLI_VERSION))
}
func init() {
dynamicSecretLeaseCreateCmd.Flags().StringP("path", "p", "/", "The path from where dynamic secret should be leased from")
dynamicSecretLeaseCreateCmd.Flags().String("token", "", "Create dynamic secret leases using machine identity access token")
dynamicSecretLeaseCreateCmd.Flags().String("projectId", "", "Manually set the projectId to fetch leased from when using machine identity based auth")
dynamicSecretLeaseCreateCmd.Flags().String("project-slug", "", "Manually set the project-slug to create lease in")
dynamicSecretLeaseCreateCmd.Flags().String("ttl", "", "The lease lifetime TTL. If not provided the default TTL of dynamic secret will be used.")
dynamicSecretLeaseCreateCmd.Flags().Bool("plain", false, "Print leased credentials without formatting, one per line")
// Kubernetes specific flags
dynamicSecretLeaseCreateCmd.Flags().String("kubernetes-namespace", "", "The namespace to create the lease in. Only used for Kubernetes dynamic secrets.")
dynamicSecretLeaseCmd.AddCommand(dynamicSecretLeaseCreateCmd)
dynamicSecretLeaseListCmd.Flags().StringP("path", "p", "/", "The path from where dynamic secret should be leased from")
dynamicSecretLeaseListCmd.Flags().String("token", "", "Fetch dynamic secret leases machine identity access token")
dynamicSecretLeaseListCmd.Flags().String("projectId", "", "Manually set the projectId to fetch leased from when using machine identity based auth")
dynamicSecretLeaseListCmd.Flags().String("project-slug", "", "Manually set the project-slug to list leases from")
dynamicSecretLeaseCmd.AddCommand(dynamicSecretLeaseListCmd)
dynamicSecretLeaseRenewCmd.Flags().StringP("path", "p", "/", "The path from where dynamic secret should be leased from")
dynamicSecretLeaseRenewCmd.Flags().String("token", "", "Renew dynamic secrets machine identity access token")
dynamicSecretLeaseRenewCmd.Flags().String("projectId", "", "Manually set the projectId to fetch leased from when using machine identity based auth")
dynamicSecretLeaseRenewCmd.Flags().String("project-slug", "", "Manually set the project-slug to renew lease in")
dynamicSecretLeaseRenewCmd.Flags().String("ttl", "", "The lease lifetime TTL. If not provided the default TTL of dynamic secret will be used.")
dynamicSecretLeaseCmd.AddCommand(dynamicSecretLeaseRenewCmd)
dynamicSecretLeaseRevokeCmd.Flags().StringP("path", "p", "/", "The path from where dynamic secret should be leased from")
dynamicSecretLeaseRevokeCmd.Flags().String("token", "", "Delete dynamic secrets using machine identity access token")
dynamicSecretLeaseRevokeCmd.Flags().String("projectId", "", "Manually set the projectId to fetch leased from when using machine identity based auth")
dynamicSecretLeaseRevokeCmd.Flags().String("project-slug", "", "Manually set the project-slug to revoke lease from")
dynamicSecretLeaseCmd.AddCommand(dynamicSecretLeaseRevokeCmd)
dynamicSecretCmd.AddCommand(dynamicSecretLeaseCmd)
dynamicSecretCmd.Flags().String("token", "", "Fetch secrets using service token or machine identity access token")
dynamicSecretCmd.Flags().String("projectId", "", "Manually set the projectId to fetch dynamic-secret when using machine identity based auth")
dynamicSecretCmd.Flags().String("project-slug", "", "Manually set the project-slug to fetch dynamic-secret from")
dynamicSecretCmd.PersistentFlags().String("env", "dev", "Used to select the environment name on which actions should be taken on")
dynamicSecretCmd.Flags().String("path", "/", "get dynamic secret within a folder path")
rootCmd.AddCommand(dynamicSecretCmd)
}

View File

@@ -1,240 +0,0 @@
/*
Copyright (c) 2023 Infisical Inc.
*/
package cmd
import (
"encoding/csv"
"encoding/json"
"fmt"
"os"
"strings"
"github.com/Infisical/infisical-merge/packages/models"
"github.com/Infisical/infisical-merge/packages/util"
"github.com/rs/zerolog/log"
"github.com/spf13/cobra"
"gopkg.in/yaml.v2"
)
const (
FormatDotenv string = "dotenv"
FormatJson string = "json"
FormatCSV string = "csv"
FormatYaml string = "yaml"
FormatDotEnvExport string = "dotenv-export"
)
// exportCmd represents the export command
var exportCmd = &cobra.Command{
Use: "export",
Short: "Used to export environment variables to a file",
DisableFlagsInUseLine: true,
Example: "infisical export --env=prod --format=json > secrets.json",
Args: cobra.NoArgs,
Run: func(cmd *cobra.Command, args []string) {
environmentName, _ := cmd.Flags().GetString("env")
if !cmd.Flags().Changed("env") {
environmentFromWorkspace := util.GetEnvFromWorkspaceFile()
if environmentFromWorkspace != "" {
environmentName = environmentFromWorkspace
}
}
shouldExpandSecrets, err := cmd.Flags().GetBool("expand")
if err != nil {
util.HandleError(err)
}
includeImports, err := cmd.Flags().GetBool("include-imports")
if err != nil {
util.HandleError(err)
}
projectId, err := cmd.Flags().GetString("projectId")
if err != nil {
util.HandleError(err)
}
token, err := util.GetInfisicalToken(cmd)
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
format, err := cmd.Flags().GetString("format")
if err != nil {
util.HandleError(err)
}
templatePath, err := cmd.Flags().GetString("template")
if err != nil {
util.HandleError(err)
}
secretOverriding, err := cmd.Flags().GetBool("secret-overriding")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
tagSlugs, err := cmd.Flags().GetString("tags")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
secretsPath, err := cmd.Flags().GetString("path")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
request := models.GetAllSecretsParameters{
Environment: environmentName,
TagSlugs: tagSlugs,
WorkspaceId: projectId,
SecretsPath: secretsPath,
IncludeImport: includeImports,
ExpandSecretReferences: shouldExpandSecrets,
}
if token != nil && token.Type == util.SERVICE_TOKEN_IDENTIFIER {
request.InfisicalToken = token.Token
} else if token != nil && token.Type == util.UNIVERSAL_AUTH_TOKEN_IDENTIFIER {
request.UniversalAuthAccessToken = token.Token
}
if templatePath != "" {
sigChan := make(chan os.Signal, 1)
dynamicSecretLeases := NewDynamicSecretLeaseManager(sigChan)
newEtag := ""
accessToken := ""
if token != nil {
accessToken = token.Token
} else {
log.Debug().Msg("GetAllEnvironmentVariables: Trying to fetch secrets using logged in details")
loggedInUserDetails, err := util.GetCurrentLoggedInUserDetails(true)
if err != nil {
util.HandleError(err)
}
accessToken = loggedInUserDetails.UserCredentials.JTWToken
}
processedTemplate, err := ProcessTemplate(1, templatePath, nil, accessToken, "", &newEtag, dynamicSecretLeases)
if err != nil {
util.HandleError(err)
}
fmt.Print(processedTemplate.String())
return
}
secrets, err := util.GetAllEnvironmentVariables(request, "")
if err != nil {
util.HandleError(err, "Unable to fetch secrets")
}
if secretOverriding {
secrets = util.OverrideSecrets(secrets, util.SECRET_TYPE_PERSONAL)
} else {
secrets = util.OverrideSecrets(secrets, util.SECRET_TYPE_SHARED)
}
var output string
secrets = util.FilterSecretsByTag(secrets, tagSlugs)
secrets = util.SortSecretsByKeys(secrets)
output, err = formatEnvs(secrets, format)
if err != nil {
util.HandleError(err)
}
fmt.Print(output)
// Telemetry.CaptureEvent("cli-command:export", posthog.NewProperties().Set("secretsCount", len(secrets)).Set("version", util.CLI_VERSION))
},
}
func init() {
rootCmd.AddCommand(exportCmd)
exportCmd.Flags().StringP("env", "e", "dev", "Set the environment (dev, prod, etc.) from which your secrets should be pulled from")
exportCmd.Flags().Bool("expand", true, "Parse shell parameter expansions in your secrets")
exportCmd.Flags().StringP("format", "f", "dotenv", "Set the format of the output file (dotenv, json, csv)")
exportCmd.Flags().Bool("secret-overriding", true, "Prioritizes personal secrets, if any, with the same name over shared secrets")
exportCmd.Flags().Bool("include-imports", true, "Imported linked secrets")
exportCmd.Flags().String("token", "", "Fetch secrets using service token or machine identity access token")
exportCmd.Flags().StringP("tags", "t", "", "filter secrets by tag slugs")
exportCmd.Flags().String("projectId", "", "manually set the projectId to export secrets from")
exportCmd.Flags().String("path", "/", "get secrets within a folder path")
exportCmd.Flags().String("template", "", "The path to the template file used to render secrets")
}
// Format according to the format flag
func formatEnvs(envs []models.SingleEnvironmentVariable, format string) (string, error) {
switch strings.ToLower(format) {
case FormatDotenv:
return formatAsDotEnv(envs), nil
case FormatDotEnvExport:
return formatAsDotEnvExport(envs), nil
case FormatJson:
return formatAsJson(envs), nil
case FormatCSV:
return formatAsCSV(envs), nil
case FormatYaml:
return formatAsYaml(envs)
default:
return "", fmt.Errorf("invalid format type: %s. Available format types are [%s]", format, []string{FormatDotenv, FormatJson, FormatCSV, FormatYaml, FormatDotEnvExport})
}
}
// Format environment variables as a CSV file
func formatAsCSV(envs []models.SingleEnvironmentVariable) string {
csvString := &strings.Builder{}
writer := csv.NewWriter(csvString)
writer.Write([]string{"Key", "Value"})
for _, env := range envs {
writer.Write([]string{env.Key, env.Value})
}
writer.Flush()
return csvString.String()
}
// Format environment variables as a dotenv file
func formatAsDotEnv(envs []models.SingleEnvironmentVariable) string {
var dotenv string
for _, env := range envs {
dotenv += fmt.Sprintf("%s='%s'\n", env.Key, env.Value)
}
return dotenv
}
// Format environment variables as a dotenv file with export at the beginning
func formatAsDotEnvExport(envs []models.SingleEnvironmentVariable) string {
var dotenv string
for _, env := range envs {
dotenv += fmt.Sprintf("export %s='%s'\n", env.Key, env.Value)
}
return dotenv
}
func formatAsYaml(envs []models.SingleEnvironmentVariable) (string, error) {
m := make(map[string]string)
for _, env := range envs {
m[env.Key] = env.Value
}
yamlBytes, err := yaml.Marshal(m)
if err != nil {
return "", fmt.Errorf("failed to format environment variables as YAML: %w", err)
}
return string(yamlBytes), nil
}
// Format environment variables as a JSON file
func formatAsJson(envs []models.SingleEnvironmentVariable) string {
// Dump as a json array
json, err := json.Marshal(envs)
if err != nil {
log.Err(err).Msgf("Unable to marshal environment variables to JSON")
return ""
}
return string(json)
}

View File

@@ -1,79 +0,0 @@
package cmd
import (
"testing"
"github.com/Infisical/infisical-merge/packages/models"
"github.com/stretchr/testify/assert"
"gopkg.in/yaml.v2"
)
func TestFormatAsYaml(t *testing.T) {
tests := []struct {
name string
input []models.SingleEnvironmentVariable
expected string
}{
{
name: "Empty input",
input: []models.SingleEnvironmentVariable{},
expected: "{}\n",
},
{
name: "Single environment variable",
input: []models.SingleEnvironmentVariable{
{Key: "KEY1", Value: "VALUE1"},
},
expected: "KEY1: VALUE1\n",
},
{
name: "Multiple environment variables",
input: []models.SingleEnvironmentVariable{
{Key: "KEY1", Value: "VALUE1"},
{Key: "KEY2", Value: "VALUE2"},
{Key: "KEY3", Value: "VALUE3"},
},
expected: "KEY1: VALUE1\nKEY2: VALUE2\nKEY3: VALUE3\n",
},
{
name: "Overwriting duplicate keys",
input: []models.SingleEnvironmentVariable{
{Key: "KEY1", Value: "VALUE1"},
{Key: "KEY1", Value: "VALUE2"},
},
expected: "KEY1: VALUE2\n",
},
{
name: "Special characters in values",
input: []models.SingleEnvironmentVariable{
{Key: "KEY1", Value: "Value with spaces"},
{Key: "KEY2", Value: "Value:with:colons"},
{Key: "KEY3", Value: "Value\nwith\nnewlines"},
},
expected: "KEY1: Value with spaces\nKEY2: Value:with:colons\nKEY3: |-\n Value\n with\n newlines\n",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
result, err := formatAsYaml(tt.input)
assert.NoError(t, err)
// Compare the result with the expected output
assert.Equal(t, tt.expected, result)
// Additionally, parse the result back into a map to ensure it's valid YAML
var resultMap map[string]string
err = yaml.Unmarshal([]byte(result), &resultMap)
assert.NoError(t, err)
// Create an expected map from the input
expectedMap := make(map[string]string)
for _, env := range tt.input {
expectedMap[env.Key] = env.Value
}
assert.Equal(t, expectedMap, resultMap)
})
}
}

View File

@@ -1,209 +0,0 @@
package cmd
import (
"errors"
"fmt"
"github.com/Infisical/infisical-merge/packages/models"
"github.com/Infisical/infisical-merge/packages/util"
"github.com/Infisical/infisical-merge/packages/visualize"
"github.com/posthog/posthog-go"
"github.com/spf13/cobra"
)
var folderCmd = &cobra.Command{
Use: "folders",
Short: "Create, delete, and list folders",
DisableFlagsInUseLine: true,
Run: func(cmd *cobra.Command, args []string) {
cmd.Help()
},
}
var getCmd = &cobra.Command{
Use: "get",
Short: "Get folders in a directory",
Run: func(cmd *cobra.Command, args []string) {
environmentName, _ := cmd.Flags().GetString("env")
if !cmd.Flags().Changed("env") {
environmentFromWorkspace := util.GetEnvFromWorkspaceFile()
if environmentFromWorkspace != "" {
environmentName = environmentFromWorkspace
}
}
projectId, err := cmd.Flags().GetString("projectId")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
token, err := util.GetInfisicalToken(cmd)
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
foldersPath, err := cmd.Flags().GetString("path")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
request := models.GetAllFoldersParameters{
Environment: environmentName,
WorkspaceId: projectId,
FoldersPath: foldersPath,
}
if token != nil && token.Type == util.SERVICE_TOKEN_IDENTIFIER {
request.InfisicalToken = token.Token
} else if token != nil && token.Type == util.UNIVERSAL_AUTH_TOKEN_IDENTIFIER {
request.UniversalAuthAccessToken = token.Token
}
folders, err := util.GetAllFolders(request)
if err != nil {
util.HandleError(err, "Unable to get folders")
}
visualize.PrintAllFoldersDetails(folders, foldersPath)
Telemetry.CaptureEvent("cli-command:folders get", posthog.NewProperties().Set("folderCount", len(folders)).Set("version", util.CLI_VERSION))
},
}
var createCmd = &cobra.Command{
Use: "create",
Short: "Create a folder",
Run: func(cmd *cobra.Command, args []string) {
environmentName, _ := cmd.Flags().GetString("env")
if !cmd.Flags().Changed("env") {
environmentFromWorkspace := util.GetEnvFromWorkspaceFile()
if environmentFromWorkspace != "" {
environmentName = environmentFromWorkspace
}
}
token, err := util.GetInfisicalToken(cmd)
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
projectId, err := cmd.Flags().GetString("projectId")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
folderPath, err := cmd.Flags().GetString("path")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
folderName, err := cmd.Flags().GetString("name")
if err != nil {
util.HandleError(err, "Unable to parse name flag")
}
if folderName == "" {
util.HandleError(errors.New("invalid folder name, folder name cannot be empty"))
}
if err != nil {
util.HandleError(err, "Unable to get workspace file")
}
if projectId == "" {
workspaceFile, err := util.GetWorkSpaceFromFile()
if err != nil {
util.PrintErrorMessageAndExit("Please either run infisical init to connect to a project or pass in project id with --projectId flag")
}
projectId = workspaceFile.WorkspaceId
}
params := models.CreateFolderParameters{
FolderName: folderName,
Environment: environmentName,
FolderPath: folderPath,
WorkspaceId: projectId,
}
if token != nil && (token.Type == util.SERVICE_TOKEN_IDENTIFIER || token.Type == util.UNIVERSAL_AUTH_TOKEN_IDENTIFIER) {
params.InfisicalToken = token.Token
}
_, err = util.CreateFolder(params)
if err != nil {
util.HandleError(err, "Unable to create folder")
}
util.PrintSuccessMessage(fmt.Sprintf("folder named `%s` created in path %s", folderName, folderPath))
Telemetry.CaptureEvent("cli-command:folders create", posthog.NewProperties().Set("version", util.CLI_VERSION))
},
}
var deleteCmd = &cobra.Command{
Use: "delete",
Short: "Delete a folder",
Run: func(cmd *cobra.Command, args []string) {
environmentName, _ := cmd.Flags().GetString("env")
if !cmd.Flags().Changed("env") {
environmentFromWorkspace := util.GetEnvFromWorkspaceFile()
if environmentFromWorkspace != "" {
environmentName = environmentFromWorkspace
}
}
token, err := util.GetInfisicalToken(cmd)
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
projectId, err := cmd.Flags().GetString("projectId")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
folderPath, err := cmd.Flags().GetString("path")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
folderName, err := cmd.Flags().GetString("name")
if err != nil {
util.HandleError(err, "Unable to parse name flag")
}
if folderName == "" {
util.HandleError(errors.New("invalid folder name, folder name cannot be empty"))
}
if projectId == "" {
workspaceFile, err := util.GetWorkSpaceFromFile()
if err != nil {
util.PrintErrorMessageAndExit("Please either run infisical init to connect to a project or pass in project id with --projectId flag")
}
projectId = workspaceFile.WorkspaceId
}
params := models.DeleteFolderParameters{
FolderName: folderName,
WorkspaceId: projectId,
Environment: environmentName,
FolderPath: folderPath,
}
if token != nil && (token.Type == util.SERVICE_TOKEN_IDENTIFIER || token.Type == util.UNIVERSAL_AUTH_TOKEN_IDENTIFIER) {
params.InfisicalToken = token.Token
}
_, err = util.DeleteFolder(params)
if err != nil {
util.HandleError(err, "Unable to delete folder")
}
util.PrintSuccessMessage(fmt.Sprintf("folder named `%s` deleted in path %s", folderName, folderPath))
Telemetry.CaptureEvent("cli-command:folders delete", posthog.NewProperties().Set("version", util.CLI_VERSION))
},
}

View File

@@ -1,318 +0,0 @@
package cmd
import (
"context"
"fmt"
"os"
"os/exec"
"os/signal"
"runtime"
"sync/atomic"
"syscall"
"time"
"github.com/Infisical/infisical-merge/packages/api"
"github.com/Infisical/infisical-merge/packages/config"
"github.com/Infisical/infisical-merge/packages/gateway"
"github.com/Infisical/infisical-merge/packages/util"
infisicalSdk "github.com/infisical/go-sdk"
"github.com/pkg/errors"
"github.com/posthog/posthog-go"
"github.com/rs/zerolog/log"
"github.com/spf13/cobra"
)
func getInfisicalSdkInstance(cmd *cobra.Command) (infisicalSdk.InfisicalClientInterface, context.CancelFunc, error) {
ctx, cancel := context.WithCancel(cmd.Context())
infisicalClient := infisicalSdk.NewInfisicalClient(ctx, infisicalSdk.Config{
SiteUrl: config.INFISICAL_URL,
UserAgent: api.USER_AGENT,
})
token, err := util.GetInfisicalToken(cmd)
if err != nil {
cancel()
return nil, nil, err
}
// if the --token param is set, we use it directly for authentication
if token != nil {
infisicalClient.Auth().SetAccessToken(token.Token)
return infisicalClient, cancel, nil
}
// if the --token param is not set, we use the auth-method flag to determine the authentication method, and perform the appropriate login flow based on that
authMethod, err := util.GetCmdFlagOrEnv(cmd, "auth-method", []string{util.INFISICAL_AUTH_METHOD_NAME})
if err != nil {
cancel()
return nil, nil, err
}
authMethodValid, strategy := util.IsAuthMethodValid(authMethod, false)
if !authMethodValid {
util.PrintErrorMessageAndExit(fmt.Sprintf("Invalid login method: %s", authMethod))
}
sdkAuthenticator := util.NewSdkAuthenticator(infisicalClient, cmd)
authStrategies := map[util.AuthStrategyType]func() (credential infisicalSdk.MachineIdentityCredential, e error){
util.AuthStrategy.UNIVERSAL_AUTH: sdkAuthenticator.HandleUniversalAuthLogin,
util.AuthStrategy.KUBERNETES_AUTH: sdkAuthenticator.HandleKubernetesAuthLogin,
util.AuthStrategy.AZURE_AUTH: sdkAuthenticator.HandleAzureAuthLogin,
util.AuthStrategy.GCP_ID_TOKEN_AUTH: sdkAuthenticator.HandleGcpIdTokenAuthLogin,
util.AuthStrategy.GCP_IAM_AUTH: sdkAuthenticator.HandleGcpIamAuthLogin,
util.AuthStrategy.AWS_IAM_AUTH: sdkAuthenticator.HandleAwsIamAuthLogin,
util.AuthStrategy.OIDC_AUTH: sdkAuthenticator.HandleOidcAuthLogin,
util.AuthStrategy.JWT_AUTH: sdkAuthenticator.HandleJwtAuthLogin,
}
_, err = authStrategies[strategy]()
if err != nil {
cancel()
return nil, nil, err
}
return infisicalClient, cancel, nil
}
var gatewayCmd = &cobra.Command{
Use: "gateway",
Short: "Run the Infisical gateway or manage its systemd service",
Long: "Run the Infisical gateway in the foreground or manage its systemd service installation. Use 'gateway install' to set up the systemd service.",
Example: `infisical gateway --token=<token>
sudo infisical gateway install --token=<token> --domain=<domain>`,
DisableFlagsInUseLine: true,
Args: cobra.NoArgs,
Run: func(cmd *cobra.Command, args []string) {
infisicalClient, cancelSdk, err := getInfisicalSdkInstance(cmd)
if err != nil {
util.HandleError(err, "unable to get infisical client")
}
defer cancelSdk()
var accessToken atomic.Value
accessToken.Store(infisicalClient.Auth().GetAccessToken())
if accessToken.Load().(string) == "" {
util.HandleError(errors.New("no access token found"))
}
Telemetry.CaptureEvent("cli-command:gateway", posthog.NewProperties().Set("version", util.CLI_VERSION))
sigCh := make(chan os.Signal, 1)
signal.Notify(sigCh, syscall.SIGINT, syscall.SIGTERM)
sigStopCh := make(chan bool, 1)
ctx, cancelCmd := context.WithCancel(cmd.Context())
defer cancelCmd()
go func() {
<-sigCh
close(sigStopCh)
cancelCmd()
cancelSdk()
// If we get a second signal, force exit
<-sigCh
log.Warn().Msgf("Force exit triggered")
os.Exit(1)
}()
var gatewayInstance *gateway.Gateway
// Token refresh goroutine - runs every 10 seconds
go func() {
tokenRefreshTicker := time.NewTicker(10 * time.Second)
defer tokenRefreshTicker.Stop()
for {
select {
case <-tokenRefreshTicker.C:
if ctx.Err() != nil {
return
}
newToken := infisicalClient.Auth().GetAccessToken()
if newToken != "" && newToken != accessToken.Load().(string) {
accessToken.Store(newToken)
if gatewayInstance != nil {
gatewayInstance.UpdateIdentityAccessToken(newToken)
}
}
case <-ctx.Done():
return
}
}
}()
// Main gateway retry loop with proper context handling
retryTicker := time.NewTicker(5 * time.Second)
defer retryTicker.Stop()
for {
if ctx.Err() != nil {
log.Info().Msg("Shutting down gateway")
return
}
gatewayInstance, err := gateway.NewGateway(accessToken.Load().(string))
if err != nil {
util.HandleError(err)
}
if err = gatewayInstance.ConnectWithRelay(); err != nil {
if ctx.Err() != nil {
log.Info().Msg("Shutting down gateway")
return
}
log.Error().Msgf("Gateway connection error with relay: %s", err)
log.Info().Msg("Retrying connection in 5 seconds...")
select {
case <-retryTicker.C:
continue
case <-ctx.Done():
log.Info().Msg("Shutting down gateway")
return
}
}
err = gatewayInstance.Listen(ctx)
if ctx.Err() != nil {
log.Info().Msg("Gateway shutdown complete")
return
}
log.Error().Msgf("Gateway listen error: %s", err)
log.Info().Msg("Retrying connection in 5 seconds...")
select {
case <-retryTicker.C:
continue
case <-ctx.Done():
log.Info().Msg("Shutting down gateway")
return
}
}
},
}
var gatewayInstallCmd = &cobra.Command{
Use: "install",
Short: "Install and enable systemd service for the gateway (requires sudo)",
Long: "Install and enable systemd service for the gateway. Must be run with sudo on Linux.",
Example: "sudo infisical gateway install --token=<token> --domain=<domain>",
DisableFlagsInUseLine: true,
Args: cobra.NoArgs,
Run: func(cmd *cobra.Command, args []string) {
if runtime.GOOS != "linux" {
util.HandleError(fmt.Errorf("systemd service installation is only supported on Linux"))
}
if os.Geteuid() != 0 {
util.HandleError(fmt.Errorf("systemd service installation requires root/sudo privileges"))
}
token, err := util.GetInfisicalToken(cmd)
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
if token == nil {
util.HandleError(errors.New("Token not found"))
}
domain, err := cmd.Flags().GetString("domain")
if err != nil {
util.HandleError(err, "Unable to parse domain flag")
}
if err := gateway.InstallGatewaySystemdService(token.Token, domain); err != nil {
util.HandleError(err, "Failed to install systemd service")
}
enableCmd := exec.Command("systemctl", "enable", "infisical-gateway")
if err := enableCmd.Run(); err != nil {
util.HandleError(err, "Failed to enable systemd service")
}
log.Info().Msg("Successfully installed and enabled infisical-gateway service")
log.Info().Msg("To start the service, run: sudo systemctl start infisical-gateway")
},
}
var gatewayUninstallCmd = &cobra.Command{
Use: "uninstall",
Short: "Uninstall and remove systemd service for the gateway (requires sudo)",
Long: "Uninstall and remove systemd service for the gateway. Must be run with sudo on Linux.",
Example: "sudo infisical gateway uninstall",
DisableFlagsInUseLine: true,
Args: cobra.NoArgs,
Run: func(cmd *cobra.Command, args []string) {
if runtime.GOOS != "linux" {
util.HandleError(fmt.Errorf("systemd service installation is only supported on Linux"))
}
if os.Geteuid() != 0 {
util.HandleError(fmt.Errorf("systemd service installation requires root/sudo privileges"))
}
if err := gateway.UninstallGatewaySystemdService(); err != nil {
util.HandleError(err, "Failed to uninstall systemd service")
}
},
}
var gatewayRelayCmd = &cobra.Command{
Example: `infisical gateway relay`,
Short: "Used to run infisical gateway relay",
Use: "relay",
DisableFlagsInUseLine: true,
Args: cobra.NoArgs,
Run: func(cmd *cobra.Command, args []string) {
relayConfigFilePath, err := cmd.Flags().GetString("config")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
if relayConfigFilePath == "" {
util.HandleError(errors.New("Missing config file"))
}
gatewayRelay, err := gateway.NewGatewayRelay(relayConfigFilePath)
if err != nil {
util.HandleError(err, "Failed to initialize gateway")
}
err = gatewayRelay.Run()
if err != nil {
util.HandleError(err, "Failed to start gateway")
}
},
}
func init() {
gatewayCmd.Flags().String("token", "", "connect with Infisical using machine identity access token. if not provided, you must set the auth-method flag")
gatewayCmd.Flags().String("auth-method", "", "login method [universal-auth, kubernetes, azure, gcp-id-token, gcp-iam, aws-iam, oidc-auth]. if not provided, you must set the token flag")
gatewayCmd.Flags().String("client-id", "", "client id for universal auth")
gatewayCmd.Flags().String("client-secret", "", "client secret for universal auth")
gatewayCmd.Flags().String("machine-identity-id", "", "machine identity id for kubernetes, azure, gcp-id-token, gcp-iam, and aws-iam auth methods")
gatewayCmd.Flags().String("service-account-token-path", "", "service account token path for kubernetes auth")
gatewayCmd.Flags().String("service-account-key-file-path", "", "service account key file path for GCP IAM auth")
gatewayCmd.Flags().String("jwt", "", "JWT for jwt-based auth methods [oidc-auth, jwt-auth]")
gatewayInstallCmd.Flags().String("token", "", "Connect with Infisical using machine identity access token")
gatewayInstallCmd.Flags().String("domain", "", "Domain of your self-hosted Infisical instance")
gatewayRelayCmd.Flags().String("config", "", "Relay config yaml file path")
gatewayCmd.AddCommand(gatewayInstallCmd)
gatewayCmd.AddCommand(gatewayUninstallCmd)
gatewayCmd.AddCommand(gatewayRelayCmd)
rootCmd.AddCommand(gatewayCmd)
}

View File

@@ -1,195 +0,0 @@
/*
Copyright (c) 2023 Infisical Inc.
*/
package cmd
import (
"encoding/json"
"fmt"
"github.com/Infisical/infisical-merge/packages/api"
"github.com/Infisical/infisical-merge/packages/models"
"github.com/Infisical/infisical-merge/packages/util"
"github.com/manifoldco/promptui"
"github.com/posthog/posthog-go"
"github.com/rs/zerolog/log"
"github.com/spf13/cobra"
)
// runCmd represents the run command
var initCmd = &cobra.Command{
Use: "init",
Short: "Used to connect your local project with Infisical project",
DisableFlagsInUseLine: true,
Example: "infisical init",
Args: cobra.ExactArgs(0),
PreRun: func(cmd *cobra.Command, args []string) {
util.RequireLogin()
},
Run: func(cmd *cobra.Command, args []string) {
if util.WorkspaceConfigFileExistsInCurrentPath() {
shouldOverride, err := shouldOverrideWorkspacePrompt()
if err != nil {
log.Error().Msg("Unable to parse your answer")
log.Debug().Err(err)
return
}
if !shouldOverride {
return
}
}
userCreds, err := util.GetCurrentLoggedInUserDetails(true)
if err != nil {
util.HandleError(err, "Unable to get your login details")
}
if userCreds.LoginExpired {
userCreds = util.EstablishUserLoginSession()
}
httpClient, err := util.GetRestyClientWithCustomHeaders()
if err != nil {
util.HandleError(err, "Unable to get resty client with custom headers")
}
httpClient.SetAuthToken(userCreds.UserCredentials.JTWToken)
organizationResponse, err := api.CallGetAllOrganizations(httpClient)
if err != nil {
util.HandleError(err, "Unable to pull organizations that belong to you")
}
organizations := organizationResponse.Organizations
organizationNames := util.GetOrganizationsNameList(organizationResponse)
prompt := promptui.Select{
Label: "Which Infisical organization would you like to select a project from?",
Items: organizationNames,
Size: 7,
}
index, _, err := prompt.Run()
if err != nil {
util.HandleError(err)
}
selectedOrganization := organizations[index]
tokenResponse, err := api.CallSelectOrganization(httpClient, api.SelectOrganizationRequest{OrganizationId: selectedOrganization.ID})
if tokenResponse.MfaEnabled {
i := 1
for i < 6 {
mfaVerifyCode := askForMFACode(tokenResponse.MfaMethod)
httpClient, err := util.GetRestyClientWithCustomHeaders()
if err != nil {
util.HandleError(err, "Unable to get resty client with custom headers")
}
httpClient.SetAuthToken(tokenResponse.Token)
verifyMFAresponse, mfaErrorResponse, requestError := api.CallVerifyMfaToken(httpClient, api.VerifyMfaTokenRequest{
Email: userCreds.UserCredentials.Email,
MFAToken: mfaVerifyCode,
MFAMethod: tokenResponse.MfaMethod,
})
if requestError != nil {
util.HandleError(err)
break
} else if mfaErrorResponse != nil {
if mfaErrorResponse.Context.Code == "mfa_invalid" {
msg := fmt.Sprintf("Incorrect, verification code. You have %v attempts left", 5-i)
fmt.Println(msg)
if i == 5 {
util.PrintErrorMessageAndExit("No tries left, please try again in a bit")
break
}
}
if mfaErrorResponse.Context.Code == "mfa_expired" {
util.PrintErrorMessageAndExit("Your 2FA verification code has expired, please try logging in again")
break
}
i++
} else {
httpClient.SetAuthToken(verifyMFAresponse.Token)
tokenResponse, err = api.CallSelectOrganization(httpClient, api.SelectOrganizationRequest{OrganizationId: selectedOrganization.ID})
break
}
}
}
if err != nil {
util.HandleError(err, "Unable to select organization")
}
// set the config jwt token to the new token
userCreds.UserCredentials.JTWToken = tokenResponse.Token
err = util.StoreUserCredsInKeyRing(&userCreds.UserCredentials)
httpClient.SetAuthToken(tokenResponse.Token)
if err != nil {
util.HandleError(err, "Unable to store your user credentials")
}
workspaceResponse, err := api.CallGetAllWorkSpacesUserBelongsTo(httpClient)
if err != nil {
util.HandleError(err, "Unable to pull projects that belong to you")
}
filteredWorkspaces, workspaceNames := util.GetWorkspacesInOrganization(workspaceResponse, selectedOrganization.ID)
prompt = promptui.Select{
Label: "Which of your Infisical projects would you like to connect this project to?",
Items: workspaceNames,
Size: 7,
}
index, _, err = prompt.Run()
if err != nil {
util.HandleError(err)
}
err = writeWorkspaceFile(filteredWorkspaces[index])
if err != nil {
util.HandleError(err)
}
Telemetry.CaptureEvent("cli-command:init", posthog.NewProperties().Set("version", util.CLI_VERSION))
},
}
func init() {
rootCmd.AddCommand(initCmd)
}
func writeWorkspaceFile(selectedWorkspace models.Workspace) error {
workspaceFileToSave := models.WorkspaceConfigFile{
WorkspaceId: selectedWorkspace.ID,
}
marshalledWorkspaceFile, err := json.MarshalIndent(workspaceFileToSave, "", " ")
if err != nil {
return err
}
err = util.WriteToFile(util.INFISICAL_WORKSPACE_CONFIG_FILE_NAME, marshalledWorkspaceFile, 0600)
if err != nil {
return err
}
return nil
}
func shouldOverrideWorkspacePrompt() (bool, error) {
prompt := promptui.Select{
Label: "A workspace config file already exists here. Would you like to override? Select[Yes/No]",
Items: []string{"No", "Yes"},
}
_, result, err := prompt.Run()
if err != nil {
return false, err
}
return result == "Yes", nil
}

View File

@@ -1,103 +0,0 @@
/*
Copyright (c) 2023 Infisical Inc.
*/
package cmd
import (
"fmt"
"github.com/Infisical/infisical-merge/packages/config"
"github.com/Infisical/infisical-merge/packages/util"
kmip "github.com/infisical/infisical-kmip"
"github.com/spf13/cobra"
)
var kmipCmd = &cobra.Command{
Example: `infisical kmip`,
Short: "Used to manage KMIP servers",
Use: "kmip",
DisableFlagsInUseLine: true,
Args: cobra.NoArgs,
}
var kmipStartCmd = &cobra.Command{
Example: `infisical kmip start`,
Short: "Used to start a KMIP server",
Use: "start",
DisableFlagsInUseLine: true,
Args: cobra.NoArgs,
Run: startKmipServer,
}
func startKmipServer(cmd *cobra.Command, args []string) {
listenAddr, err := cmd.Flags().GetString("listen-address")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
identityAuthMethod, err := cmd.Flags().GetString("identity-auth-method")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
authMethodValid, strategy := util.IsAuthMethodValid(identityAuthMethod, false)
if !authMethodValid {
util.PrintErrorMessageAndExit(fmt.Sprintf("Invalid login method: %s", identityAuthMethod))
}
var identityClientId string
var identityClientSecret string
if strategy == util.AuthStrategy.UNIVERSAL_AUTH {
identityClientId, err = util.GetCmdFlagOrEnv(cmd, "identity-client-id", []string{util.INFISICAL_UNIVERSAL_AUTH_CLIENT_ID_NAME})
if err != nil {
util.HandleError(err, "Unable to parse identity client ID")
}
identityClientSecret, err = util.GetCmdFlagOrEnv(cmd, "identity-client-secret", []string{util.INFISICAL_UNIVERSAL_AUTH_CLIENT_SECRET_NAME})
if err != nil {
util.HandleError(err, "Unable to parse identity client secret")
}
} else {
util.PrintErrorMessageAndExit(fmt.Sprintf("Unsupported login method: %s", identityAuthMethod))
}
serverName, err := cmd.Flags().GetString("server-name")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
certificateTTL, err := cmd.Flags().GetString("certificate-ttl")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
hostnamesOrIps, err := cmd.Flags().GetString("hostnames-or-ips")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
kmip.StartServer(kmip.ServerConfig{
Addr: listenAddr,
InfisicalBaseAPIURL: config.INFISICAL_URL,
IdentityClientId: identityClientId,
IdentityClientSecret: identityClientSecret,
ServerName: serverName,
CertificateTTL: certificateTTL,
HostnamesOrIps: hostnamesOrIps,
})
}
func init() {
kmipStartCmd.Flags().String("listen-address", "localhost:5696", "The address for the KMIP server to listen on. Defaults to localhost:5696")
kmipStartCmd.Flags().String("identity-auth-method", string(util.AuthStrategy.UNIVERSAL_AUTH), "The auth method to use for authenticating the machine identity. Defaults to universal-auth.")
kmipStartCmd.Flags().String("identity-client-id", "", "Universal auth client ID of machine identity")
kmipStartCmd.Flags().String("identity-client-secret", "", "Universal auth client secret of machine identity")
kmipStartCmd.Flags().String("server-name", "kmip-server", "The name of the KMIP server")
kmipStartCmd.Flags().String("certificate-ttl", "1y", "The TTL duration for the server certificate")
kmipStartCmd.Flags().String("hostnames-or-ips", "", "Comma-separated list of hostnames or IPs")
kmipCmd.AddCommand(kmipStartCmd)
rootCmd.AddCommand(kmipCmd)
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,35 +0,0 @@
/*
Copyright (c) 2023 Infisical Inc.
*/
package cmd
import (
"fmt"
"os"
mcobra "github.com/muesli/mango-cobra"
"github.com/muesli/roff"
"github.com/spf13/cobra"
)
var manCmd = &cobra.Command{
Use: "man",
Short: "generates the manpages",
SilenceUsage: true,
DisableFlagsInUseLine: true,
Hidden: true,
Args: cobra.NoArgs,
RunE: func(cmd *cobra.Command, args []string) error {
manPage, err := mcobra.NewManPage(1, rootCmd)
if err != nil {
return err
}
_, err = fmt.Fprint(os.Stdout, manPage.Build(roff.NewDocument()))
return err
},
}
func init() {
rootCmd.AddCommand(manCmd)
}

View File

@@ -1,20 +0,0 @@
# MANAGED BY INFISICAL CLI (Do not modify): START
infisicalScanEnabled=$(git config --bool hooks.infisical-scan)
if [ "$infisicalScanEnabled" != "false" ]; then
infisical scan git-changes -v --staged
exitCode=$?
if [ $exitCode -eq 1 ]; then
echo "Commit blocked: Infisical scan has uncovered secrets in your git commit"
echo "To disable the Infisical scan precommit hook run the following command:"
echo ""
echo " git config hooks.infisical-scan false"
echo ""
exit 1
fi
else
echo 'Warning: infisical scan precommit disabled'
fi
# MANAGED BY INFISICAL CLI (Do not modify): END

View File

@@ -1,20 +0,0 @@
#!/bin/sh
# MANAGED BY INFISICAL CLI (Do not modify): START
infisicalScanEnabled=$(git config --bool hooks.infisical-scan)
if [ "$infisicalScanEnabled" != "false" ]; then
infisical scan git-changes -v --staged
exitCode=$?
if [ $exitCode -eq 1 ]; then
echo "Commit blocked: Infisical scan has uncovered secrets in your git commit"
echo "To disable the Infisical scan precommit hook run the following command:"
echo ""
echo " git config hooks.infisical-scan false"
echo ""
exit 1
fi
else
echo 'Warning: infisical scan precommit disabled'
fi
# MANAGED BY INFISICAL CLI (Do not modify): END

Some files were not shown because too many files have changed in this diff Show More