Compare commits

...

72 Commits

Author SHA1 Message Date
Samuel Attard
796be5100a restore testing 2024-06-15 20:33:05 -07:00
Samuel Attard
f0923d8ca8 test a release build for fun 2024-06-15 18:51:36 -07:00
Samuel Attard
755a667b7c normalize test path 2024-06-15 16:42:59 -07:00
Samuel Attard
3e5e61ae96 maybe fix gn check? 2024-06-15 16:17:36 -07:00
Samuel Attard
00472c7e15 copy node.lib 2024-06-15 16:08:54 -07:00
Samuel Attard
60e769a12f msvs 2022 2024-06-15 13:14:50 -07:00
Samuel Attard
e2f9230952 Update move-artifacts.sh 2024-06-15 12:48:46 -07:00
Samuel Attard
2015489829 ez 2024-06-15 12:28:11 -07:00
Samuel Attard
3dab62a5b0 no symlinks for win restore 2024-06-15 12:04:54 -07:00
Samuel Attard
973ed113ec restore src cache win gn check 2024-06-15 11:49:34 -07:00
Samuel Attard
0b709a9fd4 fix src tar 2024-06-15 11:39:21 -07:00
Samuel Attard
8172dee9d1 fix windows gn check 2024-06-15 00:20:43 -07:00
Samuel Attard
69412cb5f1 cry sigh move on 2024-06-15 00:12:50 -07:00
Samuel Attard
684a029d92 maybe fix gn check 2024-06-14 23:56:30 -07:00
Samuel Attard
f32e8766a7 build: fix artifact src thingy 2024-06-14 23:47:09 -07:00
Samuel Attard
41447af5bb Merge remote-tracking branch 'origin/main' into win-cross 2024-06-14 23:37:09 -07:00
Samuel Attard
4a92ec0e90 build: add skip-windows flag 2024-06-14 23:36:14 -07:00
Samuel Attard
af4eaada02 build: run tests on windows normally 2024-06-14 23:22:05 -07:00
Samuel Attard
7ab051c083 build: fix dirs arr 2024-06-14 22:05:58 -05:00
Samuel Attard
062580c4a5 fix deref 2024-06-14 17:44:50 -05:00
Samuel Attard
6af09fc7b8 deref symlink 2024-06-14 16:23:58 -05:00
Samuel Attard
258da5e0d5 chore: set build-type 2024-06-14 15:57:32 -05:00
Samuel Attard
218b836e28 test no need split 2024-06-14 15:34:23 -05:00
Shelley Vohr
6bf83b389b build: add GN check step (#42508)
* build: add GN check step

* Remove extra loggin

* Fix BUILD_TOOLS_SHA

* Fix concurrency group naming
2024-06-14 15:29:11 -05:00
Samuel Attard
534e4c1236 halp 2024-06-14 15:10:43 -05:00
Samuel Attard
c958cccd22 build: windows tests on windows hosts 2024-06-14 14:53:41 -05:00
Samuel Attard
7f9f9f34b5 Merge remote-tracking branch 'origin/main' into win-cross 2024-06-14 14:50:54 -05:00
Samuel Attard
e9b8188e9f build: fix artifact platform 2024-06-14 14:49:44 -05:00
Samuel Attard
2c985366c8 build: fix dubious ownership error in version bumper spec (#42509) 2024-06-14 13:47:58 -05:00
Samuel Attard
89d77e39f4 foo 2024-06-14 12:15:47 -05:00
Samuel Attard
1a0bac3328 Merge remote-tracking branch 'origin/main' into win-cross 2024-06-14 12:13:09 -05:00
Samuel Attard
f6fc8b204b target-platform not uname 2024-06-14 11:46:22 -05:00
Samuel Attard
6d6f7d0ee6 force snapshot 2024-06-14 11:22:06 -05:00
Keeley Hammond
429d50bb18 build: add production-release environment to publish jobs (#42483)
* build: move publish workflows to use production environment

* build: pass in target_cpu to GN_EXTRA_ARGS on Linux
2024-06-14 11:14:38 -05:00
Keeley Hammond
5b98be3ccb build: add docs-only path gating to GHA (#42499)
* build: add docs only workflow

* build: add path filtering action

* build: add actions/checkout

* build: explicitly set shell to bash -_-

* build: why regex when you could do something much dumber

* build: correct conditionals

* test: (do not merge) add base for testing

* test: remove git base (used for testing)
2024-06-14 11:08:41 -05:00
John Kleinschmidt
5397560ab3 build: adjust concurrency for release branches (#42507)
Co-authored-by: Shelley Vohr <shelley.vohr@gmail.com>
2024-06-14 11:08:21 -05:00
Samuel Attard
2649a5bbec check right manifest 2024-06-14 11:01:14 -05:00
Samuel Attard
c2c3673e8a build: fix linux tests (#42496)
* build: use runuser for electron spec runner

* chown

* run tests in priv

* fixed

* build: setup testing on arm for GHA

* no build-tools for test

* start xvfb for the right user

* no more gn-build-type

* debug env

* ue xvfb-run

* use 8 core for node tests

* build: do test sharding on linux

* fix: disable hung node test

* build: index splits are hard

* build: use --init to reap children

* allow write junit

* use custom xvfb wrapper

* pipefail

* dont kill xvfb, its already dead

---------

Co-authored-by: John Kleinschmidt <jkleinsc@electronjs.org>
2024-06-14 10:57:28 -05:00
Shelley Vohr
c1094013eb build: allow running only mac, lint, or linux (#42504)
build: allow running only mac or linux
2024-06-14 10:08:04 -05:00
Keeley Hammond
8650682c5b build: revert removal of CIPD patch from depot tools (#42500)
Revert "build: remove the CIPD patch from depot tools (#42484)"

This reverts commit a0a8bd2222.
2024-06-14 09:11:30 -05:00
Samuel Attard
72b5e59dce build: checkout should invalidate src cache 2024-06-14 09:01:50 -05:00
Samuel Attard
4cd51a13b0 nuke vs toolchain 2024-06-14 08:49:46 -05:00
Shelley Vohr
422511753f build: use BUILD_TYPE from env (#42498)
build: use BUILD_TYPE from env
2024-06-14 06:47:18 -05:00
Samuel Attard
5677aab327 restore win x 2024-06-14 02:40:36 -05:00
Samuel Attard
d6ad4ca931 crying 2024-06-14 02:35:30 -05:00
Samuel Attard
2d756bfa0b make fuse fs 2024-06-14 02:28:28 -05:00
Samuel Attard
821feffdd3 fix: add e d for toolchain vars 2024-06-14 01:39:02 -05:00
Samuel Attard
6c47012326 fix: add e d for toolchain vars 2024-06-14 01:26:21 -05:00
Samuel Attard
61dfa7ebc3 build: force enable win toolchain 2024-06-14 01:04:52 -05:00
Samuel Attard
86dfd98746 build: add fuse cap to docker windows cross checkout 2024-06-14 00:44:43 -05:00
Samuel Attard
8f7c6d7a81 build: try win/cross on gha 2024-06-14 00:31:49 -05:00
Samuel Attard
9f862af743 build: add lint GHA job (#42494) 2024-06-13 22:14:49 -05:00
John Kleinschmidt
16b2a09787 build: only load SDK for mac builds (#42485) 2024-06-13 17:49:38 -05:00
Keeley Hammond
ad8e89de01 build: fix v8 toolchain for macOS x64 (#42492)
build: do better
2024-06-13 17:14:57 -05:00
Samuel Attard
b7aad14e8d build: run gha on tag not branch (#42490) 2024-06-13 17:14:45 -05:00
Keeley Hammond
c41ded2e89 build: remove gclient-extra-args from publish jobs (#42491)
build: remove gclient-extra-args from publish ymls
2024-06-13 17:11:47 -05:00
Keeley Hammond
4701795dc0 build: fix datetime module for Linux Publish (#42489) 2024-06-13 16:36:45 -05:00
Samuel Attard
cfdc623c4d build: pin and dedupe build image sha (#42488) 2024-06-13 16:35:47 -05:00
Keeley Hammond
9e066e7b74 build: add v8_toolchain to darwin/x64 (#42487)
build: add v8_toolchain to darwin/x64
2024-06-13 16:35:20 -05:00
Shelley Vohr
9fe1b05025 build: upload separate artifact for src files (#42486) 2024-06-13 16:35:13 -05:00
Keeley Hammond
a0a8bd2222 build: remove the CIPD patch from depot tools (#42484)
* build: try to remove the CIPD patch from depot tools

* build: remove cipd patch from GHA
2024-06-13 16:06:16 -05:00
Samuel Attard
b92a4023c1 build: unify pipelines (#42482) 2024-06-13 16:02:38 -05:00
Tomáš Hübelbauer
15c404a3c8 docs: fix the Apple HIG link for Dock context menu (#42450)
Fix the Apple human interface guidelines link for Dock context menu

The link seems to have changed. This is the up to date link.
2024-06-13 15:24:05 -05:00
Shelley Vohr
0affad3be6 build: add several missing test steps for GHA (#42479) 2024-06-13 14:26:20 -05:00
Shelley Vohr
30885e5f9f build: add Linux GHA test step (#42460)
* build: add Linux GHA test step

* Switch to medium AKS runners

* Add missing BUILD_TYPE to restore-artifact

* Fix untar to current dir

* Remove known hosts logic

* Add missing Node.js headers step

* Fix for active SSH sessions

* Fix storing artifacts

* Build on x64 for test
2024-06-13 14:14:33 -05:00
Keeley Hammond
75d0e725be build: fix conditional for sas token (#42481) 2024-06-13 09:43:06 -05:00
Keeley Hammond
42266546eb build: move hunspell generation to Linux (#42480) 2024-06-13 09:37:36 -05:00
Samuel Attard
f72096194f build: reuse checkout steps across mac and linux (#42475) 2024-06-13 09:09:35 -05:00
Samuel Attard
1e219e457b build: generate deps hash quietly (#42476) 2024-06-13 01:43:33 -05:00
Keeley Hammond
a4f201a5f3 build: add needed steps/tweaks to Linux publish job (#42477)
* build: add libcxx to Linux publish

* build: temp change ref to branch

* build: remove hunspell dictionaries

* build: modify release build script for linux

* build: switch back to main
2024-06-12 23:13:17 -05:00
Shelley Vohr
5abefc5dc3 build: use GN_EXTRA_ARGS on macOS (#42473)
* build: use GN_EXTRA_ARGS on macOS

* Switch back to main
2024-06-12 22:12:30 -05:00
Keeley Hammond
9d6c894e89 build: save/restore cache on Linux builds (#42472)
* build: update checkout and cache restore for Linux

* build: clean up variables

* build: temporarily set ref to branch

* build: actually check if cache exists or not and correctly store it

* build: correct cache paths

* build: restore electron_node/deps/v8

* build: restore sha to @main, not branch

* build: remove cache_key and use $DEPSHASH

* build: remove sas, backup_cache logic

* build: revert openssl deletion
2024-06-12 17:41:07 -05:00
38 changed files with 2030 additions and 1361 deletions

View File

@@ -0,0 +1,204 @@
name: 'Build Electron'
description: 'Builds Electron & Friends'
inputs:
target-arch:
description: 'Target arch'
required: true
target-platform:
description: 'Target platform'
required: true
artifact-platform:
description: 'Artifact platform, should be linux, darwin or mas'
required: true
step-suffix:
description: 'Suffix for build steps'
required: false
default: ''
is-release:
description: 'Is release build'
required: true
generate-symbols:
description: 'Generate symbols'
required: true
upload-to-storage:
description: 'Upload to storage'
required: true
runs:
using: "composite"
steps:
- name: Set GN_EXTRA_ARGS for MacOS x64 Builds
shell: bash
if: ${{ inputs.target-arch == 'x64' && inputs.target-platform == 'macos' }}
run: |
GN_APPENDED_ARGS="$GN_EXTRA_ARGS v8_snapshot_toolchain=\"//build/toolchain/mac:clang_x64\""
echo "GN_EXTRA_ARGS=$GN_APPENDED_ARGS" >> $GITHUB_ENV
- name: Set GN_EXTRA_ARGS for Win/cross
shell: bash
if: ${{ inputs.target-platform == 'windows' }}
run: |
GN_APPENDED_ARGS="$GN_EXTRA_ARGS use_v8_context_snapshot=true target_os=\"win\""
echo "GN_EXTRA_ARGS=$GN_APPENDED_ARGS" >> $GITHUB_ENV
- name: Build Electron ${{ inputs.step-suffix }}
shell: bash
run: |
rm -rf "src/out/Default/Electron Framework.framework"
rm -rf src/out/Default/Electron*.app
cd src/electron
# TODO(codebytere): remove this once we figure out why .git/packed-refs is initially missing
git pack-refs
cd ..
if [ "`uname`" = "Darwin" ]; then
ulimit -n 10000
sudo launchctl limit maxfiles 65536 200000
fi
NINJA_SUMMARIZE_BUILD=1 e build -j $NUMBER_OF_NINJA_PROCESSES
cp out/Default/.ninja_log out/electron_ninja_log
node electron/script/check-symlinks.js
- name: Build Electron dist.zip ${{ inputs.step-suffix }}
shell: bash
run: |
cd src
e build electron:electron_dist_zip -j $NUMBER_OF_NINJA_PROCESSES
if [ "${{ env.CHECK_DIST_MANIFEST }}" = "true" ]; then
target_os=${{ inputs.target-platform == 'linux' && 'linux' || (inputs.target-platform == 'windows' && 'win' || 'mac') }}
if [ "${{ inputs.artifact-platform }}" = "mas" ]; then
target_os="${target_os}_mas"
fi
electron/script/zip_manifests/check-zip-manifest.py out/Default/dist.zip electron/script/zip_manifests/dist_zip.$target_os.${{ inputs.target-arch }}.manifest
fi
- name: Build Mksnapshot ${{ inputs.step-suffix }}
shell: bash
run: |
cd src
e build electron:electron_mksnapshot -j $NUMBER_OF_NINJA_PROCESSES
gn desc out/Default v8:run_mksnapshot_default args > out/Default/mksnapshot_args
# Remove unused args from mksnapshot_args
SEDOPTION="-i"
if [ "`uname`" = "Darwin" ]; then
SEDOPTION="-i ''"
fi
sed $SEDOPTION '/.*builtins-pgo/d' out/Default/mksnapshot_args
sed $SEDOPTION '/--turbo-profiling-input/d' out/Default/mksnapshot_args
sed $SEDOPTION '/The gn arg use_goma=true .*/d' out/Default/mksnapshot_args
if [ "${{ inputs.target-platform }}" = "linux" ]; then
if [ "${{ inputs.target-arch }}" = "arm" ]; then
electron/script/strip-binaries.py --file $PWD/out/Default/clang_x86_v8_arm/mksnapshot
electron/script/strip-binaries.py --file $PWD/out/Default/clang_x86_v8_arm/v8_context_snapshot_generator
elif [ "${{ inputs.target-arch }}" = "arm64" ]; then
electron/script/strip-binaries.py --file $PWD/out/Default/clang_x64_v8_arm64/mksnapshot
electron/script/strip-binaries.py --file $PWD/out/Default/clang_x64_v8_arm64/v8_context_snapshot_generator
else
electron/script/strip-binaries.py --file $PWD/out/Default/mksnapshot
electron/script/strip-binaries.py --file $PWD/out/Default/v8_context_snapshot_generator
fi
fi
e build electron:electron_mksnapshot_zip -j $NUMBER_OF_NINJA_PROCESSES
(cd out/Default; zip mksnapshot.zip mksnapshot_args gen/v8/embedded.S)
- name: Generate Cross-Arch Snapshot (arm/arm64) ${{ inputs.step-suffix }}
shell: bash
if: ${{ (inputs.target-arch == 'arm' || inputs.target-arch == 'arm64') && inputs.target-platform == 'linux' }}
run: |
cd src
if [ "${{ inputs.target-arch }}" = "arm" ]; then
MKSNAPSHOT_PATH="clang_x86_v8_arm"
elif [ "${{ inputs.target-arch }}" = "arm64" ]; then
MKSNAPSHOT_PATH="clang_x64_v8_arm64"
fi
cp "out/Default/$MKSNAPSHOT_PATH/mksnapshot" out/Default
cp "out/Default/$MKSNAPSHOT_PATH/v8_context_snapshot_generator" out/Default
cp "out/Default/$MKSNAPSHOT_PATH/libffmpeg.so" out/Default
python3 electron/script/verify-mksnapshot.py --source-root "$PWD" --build-dir out/Default --create-snapshot-only
mkdir cross-arch-snapshots
cp out/Default-mksnapshot-test/*.bin cross-arch-snapshots
# Clean up so that ninja does not get confused
rm -f out/Default/libffmpeg.so
- name: Build Chromedriver ${{ inputs.step-suffix }}
shell: bash
run: |
cd src
e build electron:electron_chromedriver -j $NUMBER_OF_NINJA_PROCESSES
e build electron:electron_chromedriver_zip
- name: Build Node.js headers ${{ inputs.step-suffix }}
shell: bash
run: |
cd src
e build electron:node_headers
- name: Generate & Zip Symbols ${{ inputs.step-suffix }}
shell: bash
run: |
# Generate breakpad symbols on release builds
if [ "${{ inputs.generate-symbols }}" = "true" ]; then
e build electron:electron_symbols
fi
cd src
export BUILD_PATH="$(pwd)/out/Default"
e build electron:licenses
e build electron:electron_version_file
if [ "${{ inputs.is-release }}" = "true" ]; then
DELETE_DSYMS_AFTER_ZIP=1 electron/script/zip-symbols.py -b $BUILD_PATH
else
electron/script/zip-symbols.py -b $BUILD_PATH
fi
- name: Generate FFMpeg ${{ inputs.step-suffix }}
shell: bash
if: ${{ inputs.is-release == 'true' }}
run: |
cd src
gn gen out/ffmpeg --args="import(\"//electron/build/args/ffmpeg.gn\") use_remoteexec=true $GN_EXTRA_ARGS"
autoninja -C out/ffmpeg electron:electron_ffmpeg_zip -j $NUMBER_OF_NINJA_PROCESSES
- name: Generate Hunspell Dictionaries ${{ inputs.step-suffix }}
shell: bash
if: ${{ inputs.is-release == 'true' && inputs.target-platform == 'linux' }}
run: |
cd src
autoninja -C out/Default electron:hunspell_dictionaries_zip -j $NUMBER_OF_NINJA_PROCESSES
- name: Generate Libcxx ${{ inputs.step-suffix }}
shell: bash
if: ${{ inputs.is-release == 'true' && inputs.target-platform == 'linux' }}
run: |
cd src
autoninja -C out/Default electron:libcxx_headers_zip -j $NUMBER_OF_NINJA_PROCESSES
autoninja -C out/Default electron:libcxxabi_headers_zip -j $NUMBER_OF_NINJA_PROCESSES
autoninja -C out/Default electron:libcxx_objects_zip -j $NUMBER_OF_NINJA_PROCESSES
- name: Generate TypeScript Definitions ${{ inputs.step-suffix }}
if: ${{ inputs.is-release == 'true' }}
shell: bash
run: |
cd src/electron
node script/yarn create-typescript-definitions
# TODO(vertedinde): These uploads currently point to a different Azure bucket & GitHub Repo
- name: Publish Electron Dist ${{ inputs.step-suffix }}
if: ${{ inputs.is-release == 'true' }}
shell: bash
run: |
rm -rf src/out/Default/obj
cd src/electron
if [ "${{ inputs.upload-to-storage }}" = "1" ]; then
echo 'Uploading Electron release distribution to Azure'
script/release/uploaders/upload.py --verbose --upload_to_storage
else
echo 'Uploading Electron release distribution to GitHub releases'
script/release/uploaders/upload.py --verbose
fi
# The current generated_artifacts_<< artifact.key >> name was taken from CircleCI
# to ensure we don't break anything, but we may be able to improve that.
- name: Move all Generated Artifacts to Upload Folder ${{ inputs.step-suffix }}
shell: bash
run: ./src/electron/script/actions/move-artifacts.sh
- name: Upload Generated Artifacts ${{ inputs.step-suffix }}
uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808
with:
name: generated_artifacts_${{ inputs.artifact-platform }}_${{ env.TARGET_ARCH }}
path: ./generated_artifacts_${{ inputs.artifact-platform }}_${{ env.TARGET_ARCH }}
- name: Upload Src Artifacts ${{ inputs.step-suffix }}
uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808
with:
name: src_artifacts_${{ inputs.artifact-platform }}_${{ env.TARGET_ARCH }}
path: ./src_artifacts_${{ inputs.artifact-platform }}_${{ env.TARGET_ARCH }}

164
.github/actions/checkout/action.yml vendored Normal file
View File

@@ -0,0 +1,164 @@
name: 'Checkout'
description: 'Checks out Electron and stores it in the AKS Cache'
inputs:
generate-sas-token:
description: 'Whether to generate and persist a SAS token for the item in the cache'
required: false
default: 'false'
runs:
using: "composite"
steps:
- name: Set GIT_CACHE_PATH to make gclient to use the cache
shell: bash
run: |
echo "GIT_CACHE_PATH=$(pwd)/git-cache" >> $GITHUB_ENV
- name: Install Dependencies
shell: bash
run: |
cd src/electron
node script/yarn install
- name: Load Build Tools
shell: bash
run: |
export BUILD_TOOLS_SHA=2f67e10b9b6b5700b1c7940df412b0345257d9ae
npm i -g @electron/build-tools
e auto-update disable
- name: Get Depot Tools
shell: bash
run: |
git clone --depth=1 https://chromium.googlesource.com/chromium/tools/depot_tools.git
sed -i '/ninjalog_uploader_wrapper.py/d' ./depot_tools/autoninja
# Remove swift-format dep from cipd on macOS until we send a patch upstream.
cd depot_tools
git apply --3way ../src/electron/.github/workflows/config/gclient.diff
# Ensure depot_tools does not update.
test -d depot_tools && cd depot_tools
touch .disable_auto_update
- name: Add Depot Tools to PATH
shell: bash
run: echo "$(pwd)/depot_tools" >> $GITHUB_PATH
- name: Generate DEPS Hash
shell: bash
run: |
node src/electron/script/generate-deps-hash.js && cat src/electron/.depshash-target
echo "DEPSHASH=v1-src-cache-$(shasum src/electron/.depshash | cut -f1 -d' ')" >> $GITHUB_ENV
- name: Generate SAS Key
if: ${{ inputs.generate-sas-token == 'true' }}
shell: bash
run: |
curl --unix-socket /var/run/sas/sas.sock --fail "http://foo/$DEPSHASH.tar" > sas-token
- name: Save SAS Key
if: ${{ inputs.generate-sas-token == 'true' }}
uses: actions/cache/save@v4
with:
path: |
sas-token
key: sas-key-${{ github.run_number }}-${{ github.run_attempt }}
- name: Check If Cache Exists
id: check-cache
shell: bash
run: |
cache_path=/mnt/cross-instance-cache/$DEPSHASH.tar
echo "Using cache key: $DEPSHASH"
echo "Checking for cache in: $cache_path"
if [ ! -f "$cache_path" ]; then
echo "cache_exists=false" >> $GITHUB_OUTPUT
echo "Cache Does Not Exist for $DEPSHASH"
else
echo "cache_exists=true" >> $GITHUB_OUTPUT
echo "Cache Already Exists for $DEPSHASH, Skipping.."
fi
- name: Gclient Sync
if: steps.check-cache.outputs.cache_exists == 'false'
shell: bash
run: |
e d gclient config \
--name "src/electron" \
--unmanaged \
${GCLIENT_EXTRA_ARGS} \
"$GITHUB_SERVER_URL/$GITHUB_REPOSITORY"
if [ "$TARGET_OS" != "" ]; then
echo "target_os=['$TARGET_OS']" >> ./.gclient
fi
ELECTRON_USE_THREE_WAY_MERGE_FOR_PATCHES=1 e d gclient sync --with_branch_heads --with_tags -vvvvv
if [ "${{ inputs.is-release }}" != "true" ]; then
# Re-export all the patches to check if there were changes.
python3 src/electron/script/export_all_patches.py src/electron/patches/config.json
cd src/electron
git update-index --refresh || true
if ! git diff-index --quiet HEAD --; then
# There are changes to the patches. Make a git commit with the updated patches
git add patches
GIT_COMMITTER_NAME="PatchUp" GIT_COMMITTER_EMAIL="73610968+patchup[bot]@users.noreply.github.com" git commit -m "chore: update patches" --author="PatchUp <73610968+patchup[bot]@users.noreply.github.com>"
# Export it
mkdir -p ../../patches
git format-patch -1 --stdout --keep-subject --no-stat --full-index > ../../patches/update-patches.patch
if (node ./script/push-patch.js 2> /dev/null > /dev/null); then
echo
echo "======================================================================"
echo "Changes to the patches when applying, we have auto-pushed the diff to the current branch"
echo "A new CI job will kick off shortly"
echo "======================================================================"
exit 1
else
echo
echo "======================================================================"
echo "There were changes to the patches when applying."
echo "Check the CI artifacts for a patch you can apply to fix it."
echo "======================================================================"
exit 1
fi
fi
fi
# delete all .git directories under src/ except for
# third_party/angle/ and third_party/dawn/ because of build time generation of files
# gen/angle/commit.h depends on third_party/angle/.git/HEAD
# https://chromium-review.googlesource.com/c/angle/angle/+/2074924
# and dawn/common/Version_autogen.h depends on third_party/dawn/.git/HEAD
# https://dawn-review.googlesource.com/c/dawn/+/83901
# TODO: maybe better to always leave out */.git/HEAD file for all targets ?
- name: Delete .git directories under src to free space
if: steps.check-cache.outputs.cache_exists == 'false'
shell: bash
run: |
cd src
( find . -type d -name ".git" -not -path "./third_party/angle/*" -not -path "./third_party/dawn/*" -not -path "./electron/*" ) | xargs rm -rf
- name: Minimize Cache Size for Upload
if: steps.check-cache.outputs.cache_exists == 'false'
shell: bash
run: |
rm -rf src/android_webview
rm -rf src/ios/chrome
rm -rf src/third_party/blink/web_tests
rm -rf src/third_party/blink/perf_tests
rm -rf src/chrome/test/data/xr/webvr_info
rm -rf src/third_party/angle/third_party/VK-GL-CTS/src
rm -rf src/third_party/swift-toolchain
rm -rf src/third_party/swiftshader/tests/regres/testlists
rm -rf src/electron
- name: Compress Src Directory
if: steps.check-cache.outputs.cache_exists == 'false'
shell: bash
run: |
echo "Uncompressed src size: $(du -sh src | cut -f1 -d' ')"
tar -cf $DEPSHASH.tar src
echo "Compressed src to $(du -sh $DEPSHASH.tar | cut -f1 -d' ')"
cp ./$DEPSHASH.tar /mnt/cross-instance-cache/
- name: Persist Src Cache
if: steps.check-cache.outputs.cache_exists == 'false'
shell: bash
run: |
final_cache_path=/mnt/cross-instance-cache/$DEPSHASH.tar
echo "Using cache key: $DEPSHASH"
echo "Checking path: $final_cache_path"
if [ ! -f "$final_cache_path" ]; then
echo "Cache key not found"
exit 1
else
echo "Cache key persisted in $final_cache_path"
fi

View File

@@ -0,0 +1,61 @@
name: 'Fix Sync macOS'
description: 'Checks out Electron and stores it in the AKS Cache'
runs:
using: "composite"
steps:
- name: Fix Sync
shell: bash
# This step is required to correct for differences between "gclient sync"
# on Linux and the expected state on macOS. This requires:
# 1. Fixing Clang Install (wrong binary)
# 2. Fixing esbuild (wrong binary)
# 3. Fixing rustc (wrong binary)
# 4. Fixing gn (wrong binary)
# 5. Fix reclient (wrong binary)
# 6. Fixing dsymutil (wrong binary)
# 7. Ensuring we are using the correct ninja and adding it to PATH
# 8. Fixing angle (wrong remote)
run : |
SEDOPTION="-i ''"
rm -rf src/third_party/llvm-build
python3 src/tools/clang/scripts/update.py
echo 'infra/3pp/tools/esbuild/${platform}' `gclient getdep --deps-file=src/third_party/devtools-frontend/src/DEPS -r 'third_party/esbuild:infra/3pp/tools/esbuild/${platform}'` > esbuild_ensure_file
# Remove extra output from calling gclient getdep which always calls update_depot_tools
sed -i '' "s/Updating depot_tools... //g" esbuild_ensure_file
cipd ensure --root src/third_party/devtools-frontend/src/third_party/esbuild -ensure-file esbuild_ensure_file
rm -rf src/third_party/rust-toolchain
python3 src/tools/rust/update_rust.py
# Prevent calling gclient getdep which always calls update_depot_tools
echo 'gn/gn/mac-${arch}' `gclient getdep --deps-file=src/DEPS -r 'src/buildtools/mac:gn/gn/mac-${arch}'` > gn_ensure_file
sed -i '' "s/Updating depot_tools... //g" gn_ensure_file
cipd ensure --root src/buildtools/mac -ensure-file gn_ensure_file
# Prevent calling gclient getdep which always calls update_depot_tools
echo 'infra/rbe/client/${platform}' `gclient getdep --deps-file=src/DEPS -r 'src/buildtools/reclient:infra/rbe/client/${platform}'` > gn_ensure_file
sed -i '' "s/Updating depot_tools... //g" gn_ensure_file
cipd ensure --root src/buildtools/reclient -ensure-file gn_ensure_file
python3 src/buildtools/reclient_cfgs/configure_reclient_cfgs.py --rbe_instance "projects/rbe-chrome-untrusted/instances/default_instance" --reproxy_cfg_template reproxy.cfg.template --rewrapper_cfg_project "" --skip_remoteexec_cfg_fetch
if [ "${{ env.TARGET_ARCH }}" == "arm64" ]; then
DSYM_SHA_FILE=src/tools/clang/dsymutil/bin/dsymutil.arm64.sha1
else
DSYM_SHA_FILE=src/tools/clang/dsymutil/bin/dsymutil.x64.sha1
fi
python3 src/third_party/depot_tools/download_from_google_storage.py --no_resume --no_auth --bucket chromium-browser-clang -s $DSYM_SHA_FILE -o src/tools/clang/dsymutil/bin/dsymutil
echo 'infra/3pp/tools/ninja/${platform}' `gclient getdep --deps-file=src/DEPS -r 'src/third_party/ninja:infra/3pp/tools/ninja/${platform}'` > ninja_ensure_file
sed $SEDOPTION "s/Updating depot_tools... //g" ninja_ensure_file
cipd ensure --root src/third_party/ninja -ensure-file ninja_ensure_file
echo "$(pwd)/src/third_party/ninja" >> $GITHUB_PATH
cd src/third_party/angle
rm -f .git/objects/info/alternates
git remote set-url origin https://chromium.googlesource.com/angle/angle.git
cp .git/config .git/config.backup
git remote remove origin
mv .git/config.backup .git/config
git fetch

View File

@@ -0,0 +1,65 @@
name: 'Free Space macOS'
description: 'Checks out Electron and stores it in the AKS Cache'
runs:
using: "composite"
steps:
- name: Free Space on MacOS
shell: bash
run: |
sudo mkdir -p $TMPDIR/del-target
tmpify() {
if [ -d "$1" ]; then
sudo mv "$1" $TMPDIR/del-target/$(echo $1|shasum -a 256|head -n1|cut -d " " -f1)
fi
}
strip_universal_deep() {
opwd=$(pwd)
cd $1
f=$(find . -perm +111 -type f)
for fp in $f
do
if [[ $(file "$fp") == *"universal binary"* ]]; then
if [ "`arch`" == "arm64" ]; then
if [[ $(file "$fp") == *"x86_64"* ]]; then
sudo lipo -remove x86_64 "$fp" -o "$fp" || true
fi
else
if [[ $(file "$fp") == *"arm64e)"* ]]; then
sudo lipo -remove arm64e "$fp" -o "$fp" || true
fi
if [[ $(file "$fp") == *"arm64)"* ]]; then
sudo lipo -remove arm64 "$fp" -o "$fp" || true
fi
fi
fi
done
cd $opwd
}
tmpify /Library/Developer/CoreSimulator
tmpify ~/Library/Developer/CoreSimulator
tmpify $(xcode-select -p)/Platforms/AppleTVOS.platform
tmpify $(xcode-select -p)/Platforms/iPhoneOS.platform
tmpify $(xcode-select -p)/Platforms/WatchOS.platform
tmpify $(xcode-select -p)/Platforms/WatchSimulator.platform
tmpify $(xcode-select -p)/Platforms/AppleTVSimulator.platform
tmpify $(xcode-select -p)/Platforms/iPhoneSimulator.platform
tmpify $(xcode-select -p)/Toolchains/XcodeDefault.xctoolchain/usr/metal/ios
tmpify $(xcode-select -p)/Toolchains/XcodeDefault.xctoolchain/usr/lib/swift
tmpify $(xcode-select -p)/Toolchains/XcodeDefault.xctoolchain/usr/lib/swift-5.0
tmpify ~/.rubies
tmpify ~/Library/Caches/Homebrew
tmpify /usr/local/Homebrew
sudo rm -rf $TMPDIR/del-target
sudo rm -rf /Applications/Safari.app
sudo rm -rf ~/project/src/third_party/catapult/tracing/test_data
sudo rm -rf ~/project/src/third_party/angle/third_party/VK-GL-CTS
# lipo off some huge binaries arm64 versions to save space
strip_universal_deep $(xcode-select -p)/../SharedFrameworks
# strip_arm_deep /System/Volumes/Data/Library/Developer/CommandLineTools/usr

View File

@@ -0,0 +1,36 @@
name: 'Restore Cache AKS'
description: 'Restores Electron src cache via AKS'
runs:
using: "composite"
steps:
- name: Restore and Ensure Src Cache
shell: bash
run: |
cache_path=/mnt/cross-instance-cache/$DEPSHASH.tar
echo "Using cache key: $DEPSHASH"
echo "Checking for cache in: $cache_path"
if [ ! -f "$cache_path" ]; then
echo "Cache Does Not Exist for $DEPSHASH - exiting"
exit 1
else
echo "Found Cache for $DEPSHASH at $cache_path"
fi
echo "Persisted cache is $(du -sh $cache_path | cut -f1)"
mkdir temp-cache
tar -xf $cache_path -C temp-cache
echo "Unzipped cache is $(du -sh temp-cache/src | cut -f1)"
if [ -d "temp-cache/src" ]; then
echo "Relocating Cache"
rm -rf src
mv temp-cache/src src
fi
if [ ! -d "src/third_party/blink" ]; then
echo "Cache was not correctly restored - exiting"
exit 1
fi
echo "Wiping Electron Directory"
rm -rf src/electron

View File

@@ -0,0 +1,51 @@
name: 'Restore Cache AZCopy'
description: 'Restores Electron src cache via AZCopy'
runs:
using: "composite"
steps:
- name: Obtain SAS Key
uses: actions/cache/restore@v4
with:
path: |
sas-token
key: sas-key-${{ github.run_number }}-${{ github.run_attempt }}
- name: Download Src Cache from AKS
# The cache will always exist here as a result of the checkout job
# Either it was uploaded to Azure in the checkout job for this commit
# or it was uploaded in the checkout job for a previous commit.
uses: nick-fields/retry@7152eba30c6575329ac0576536151aca5a72780e # v3.0.0
with:
timeout_minutes: 20
max_attempts: 3
retry_on: error
command: |
sas_token=$(cat sas-token)
azcopy copy \
"https://${{ env.AZURE_AKS_CACHE_STORAGE_ACCOUNT }}.file.core.windows.net/${{ env.AZURE_AKS_CACHE_SHARE_NAME }}/${{ env.CACHE_PATH }}?$sas_token" $DEPSHASH.tar
- name: Clean SAS Key
shell: bash
run: rm -f sas-token
- name: Unzip and Ensure Src Cache
shell: bash
run: |
echo "Downloaded cache is $(du -sh $DEPSHASH.tar | cut -f1)"
mkdir temp-cache
tar -xf $DEPSHASH.tar -C temp-cache
echo "Unzipped cache is $(du -sh temp-cache/src | cut -f1)"
if [ -d "temp-cache/src" ]; then
echo "Relocating Cache"
rm -rf src
mv temp-cache/src src
echo "Deleting zip file"
rm -rf $DEPSHASH.tar
fi
if [ ! -d "src/third_party/blink" ]; then
echo "Cache was not correctly restored - exiting"
exit 1
fi
echo "Wiping Electron Directory"
rm -rf src/electron

228
.github/workflows/build.yml vendored Normal file
View File

@@ -0,0 +1,228 @@
name: Build
on:
workflow_dispatch:
inputs:
build-image-sha:
type: string
description: 'SHA for electron/build image'
default: 'cf814a4d2501e8e843caea071a6b70a48e78b855'
required: true
skip-macos:
type: boolean
description: 'Skip macOS builds'
default: false
required: false
skip-linux:
type: boolean
description: 'Skip Linux builds'
default: false
required: false
skip-windows:
type: boolean
description: 'Skip Windows builds'
default: false
required: false
skip-lint:
type: boolean
description: 'Skip lint check'
default: false
required: false
# push:
# pull_request:
jobs:
changes:
runs-on: ubuntu-latest
permissions:
pull-requests: read
outputs:
docs: ${{ steps.filter.outputs.docs }}
src: ${{ steps.filter.outputs.src }}
steps:
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 #v4.0.2
- uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2
id: filter
with:
filters: |
docs:
- 'docs/**'
src:
- '!docs/**'
# Lint Jobs
lint:
if: ${{ !inputs.skip-lint }}
uses: ./.github/workflows/pipeline-electron-lint.yml
with:
container: '{"image":"ghcr.io/electron/build:${{ inputs.build-image-sha }}","options":"--user root"}'
secrets: inherit
# Docs Only Jobs
docs-only:
needs: changes
if: ${{ needs.changes.outputs.docs == 'true' && needs.changes.outputs.src == 'false'}}
uses: ./.github/workflows/pipeline-electron-docs-only.yml
with:
container: '{"image":"ghcr.io/electron/build:${{ inputs.build-image-sha }}","options":"--user root"}'
secrets: inherit
# Checkout Jobs
checkout-windows:
needs: changes
if: ${{ needs.changes.outputs.src == 'true' && !inputs.skip-windows }}
runs-on: aks-linux-large
container:
image: ghcr.io/electron/build:${{ inputs.build-image-sha }}
options: --user root --device /dev/fuse --cap-add SYS_ADMIN
volumes:
- /mnt/cross-instance-cache:/mnt/cross-instance-cache
env:
GCLIENT_EXTRA_ARGS: '--custom-var=checkout_win=True'
TARGET_OS: 'win'
ELECTRON_DEPOT_TOOLS_WIN_TOOLCHAIN: '1'
steps:
- name: Checkout Electron
uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
with:
path: src/electron
fetch-depth: 0
- name: Checkout & Sync & Save
uses: ./src/electron/.github/actions/checkout
checkout-macos:
needs: changes
if: false
# if: ${{ needs.changes.outputs.src == 'true' && !inputs.skip-macos}}
runs-on: aks-linux-large
container:
image: ghcr.io/electron/build:${{ inputs.build-image-sha }}
options: --user root
volumes:
- /mnt/cross-instance-cache:/mnt/cross-instance-cache
- /var/run/sas:/var/run/sas
env:
GCLIENT_EXTRA_ARGS: '--custom-var=checkout_mac=True --custom-var=host_os=mac'
steps:
- name: Checkout Electron
uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
with:
path: src/electron
fetch-depth: 0
- name: Checkout & Sync & Save
uses: ./src/electron/.github/actions/checkout
with:
generate-sas-token: 'true'
checkout-linux:
needs: changes
if: false
# if: ${{ needs.changes.outputs.src == 'true' && !inputs.skip-linux}}
runs-on: aks-linux-large
container:
image: ghcr.io/electron/build:${{ inputs.build-image-sha }}
options: --user root
volumes:
- /mnt/cross-instance-cache:/mnt/cross-instance-cache
- /var/run/sas:/var/run/sas
env:
GCLIENT_EXTRA_ARGS: '--custom-var=checkout_arm=True --custom-var=checkout_arm64=True'
steps:
- name: Checkout Electron
uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
with:
path: src/electron
fetch-depth: 0
- name: Checkout & Sync & Save
uses: ./src/electron/.github/actions/checkout
# Build Jobs - These cascade into testing jobs
macos-x64:
uses: ./.github/workflows/pipeline-electron-build-and-test.yml
needs: checkout-macos
with:
build-runs-on: macos-14-xlarge
test-runs-on: macos-14-xlarge
target-platform: macos
target-arch: x64
is-release: false
gn-build-type: testing
generate-symbols: false
upload-to-storage: '0'
secrets: inherit
macos-arm64:
uses: ./.github/workflows/pipeline-electron-build-and-test.yml
needs: checkout-macos
with:
build-runs-on: macos-14-xlarge
test-runs-on: macos-14-xlarge
target-platform: macos
target-arch: arm64
is-release: false
gn-build-type: testing
generate-symbols: false
upload-to-storage: '0'
secrets: inherit
linux-x64:
uses: ./.github/workflows/pipeline-electron-build-and-test-and-nan.yml
needs: checkout-linux
with:
build-runs-on: aks-linux-large
test-runs-on: aks-linux-medium
build-container: '{"image":"ghcr.io/electron/build:${{ inputs.build-image-sha }}","options":"--user root","volumes":["/mnt/cross-instance-cache:/mnt/cross-instance-cache"]}'
test-container: '{"image":"ghcr.io/electron/build:${{ inputs.build-image-sha }}","options":"--user root --privileged --init"}'
target-platform: linux
target-arch: x64
is-release: false
gn-build-type: testing
generate-symbols: false
upload-to-storage: '0'
secrets: inherit
windows-x64:
uses: ./.github/workflows/pipeline-electron-build-and-test.yml
needs: checkout-windows
with:
build-runs-on: aks-linux-large
test-runs-on: windows-2022
build-container: '{"image":"ghcr.io/electron/build:${{ inputs.build-image-sha }}","options":"--user root --device /dev/fuse --cap-add SYS_ADMIN","volumes":["/mnt/cross-instance-cache:/mnt/cross-instance-cache"]}'
target-platform: windows
target-arch: x64
is-release: false
gn-build-type: testing
generate-symbols: false
upload-to-storage: '0'
secrets: inherit
linux-arm:
uses: ./.github/workflows/pipeline-electron-build-and-test.yml
needs: checkout-linux
with:
build-runs-on: aks-linux-large
test-runs-on: aks-linux-arm-medium
build-container: '{"image":"ghcr.io/electron/build:${{ inputs.build-image-sha }}","options":"--user root","volumes":["/mnt/cross-instance-cache:/mnt/cross-instance-cache"]}'
test-container: '{"image":"ghcr.io/electron/test:arm32v7-${{ inputs.build-image-sha }}","options":"--user root --privileged --init"}'
target-platform: linux
target-arch: arm
is-release: false
gn-build-type: testing
generate-symbols: false
upload-to-storage: '0'
secrets: inherit
linux-arm64:
uses: ./.github/workflows/pipeline-electron-build-and-test.yml
needs: checkout-linux
with:
build-runs-on: aks-linux-large
test-runs-on: aks-linux-arm-medium
build-container: '{"image":"ghcr.io/electron/build:${{ inputs.build-image-sha }}","options":"--user root","volumes":["/mnt/cross-instance-cache:/mnt/cross-instance-cache"]}'
test-container: '{"image":"ghcr.io/electron/test:arm64v8-${{ inputs.build-image-sha }}","options":"--user root --privileged --init"}'
target-platform: linux
target-arch: arm64
is-release: false
gn-build-type: testing
generate-symbols: false
upload-to-storage: '0'
secrets: inherit

View File

@@ -1,26 +0,0 @@
{
"root": "/Users/runner/work/electron/electron/",
"remotes": {
"electron": {
"origin": "https://github.com/electron/electron.git"
}
},
"gen": {
"args": [
"import(\"//electron/build/args/release.gn\")",
"use_remoteexec = true",
"target_cpu = \"arm64\"",
"is_mas_build = true"
],
"out": "Default"
},
"env": {
"CHROMIUM_BUILDTOOLS_PATH": "/Users/runner/work/electron/electron/src/buildtools",
"GIT_CACHE_PATH": "/Users/runner/work/electron/electron/.git-cache"
},
"$schema": "file:///home/builduser/.electron_build_tools/evm-config.schema.json",
"configValidationLevel": "strict",
"reclient": "remote_exec",
"goma": "none",
"preserveXcode": 5
}

View File

@@ -1,26 +0,0 @@
{
"root": "/Users/runner/work/electron/electron/",
"remotes": {
"electron": {
"origin": "https://github.com/electron/electron.git"
}
},
"gen": {
"args": [
"import(\"//electron/build/args/release.gn\")",
"use_remoteexec = true",
"target_cpu = \"x64\"",
"is_mas_build = true"
],
"out": "Default"
},
"env": {
"CHROMIUM_BUILDTOOLS_PATH": "/Users/runner/work/electron/electron/src/buildtools",
"GIT_CACHE_PATH": "/Users/runner/work/electron/electron/.git-cache"
},
"$schema": "file:///home/builduser/.electron_build_tools/evm-config.schema.json",
"configValidationLevel": "strict",
"reclient": "remote_exec",
"goma": "none",
"preserveXcode": 5
}

View File

@@ -1,25 +0,0 @@
{
"root": "/Users/runner/work/electron/electron/",
"remotes": {
"electron": {
"origin": "https://github.com/electron/electron.git"
}
},
"gen": {
"args": [
"import(\"//electron/build/args/testing.gn\")",
"use_remoteexec = true",
"is_mas_build = true"
],
"out": "Default"
},
"env": {
"CHROMIUM_BUILDTOOLS_PATH": "/Users/runner/work/electron/electron/src/buildtools",
"GIT_CACHE_PATH": "/Users/runner/work/electron/electron/.git-cache"
},
"$schema": "file:///home/builduser/.electron_build_tools/evm-config.schema.json",
"configValidationLevel": "strict",
"reclient": "remote_exec",
"goma": "none",
"preserveXcode": 5
}

View File

@@ -1,26 +0,0 @@
{
"root": "/Users/runner/work/electron/electron/",
"remotes": {
"electron": {
"origin": "https://github.com/electron/electron.git"
}
},
"gen": {
"args": [
"import(\"//electron/build/args/testing.gn\")",
"use_remoteexec = true",
"target_cpu = \"x64\"",
"is_mas_build = true"
],
"out": "Default"
},
"env": {
"CHROMIUM_BUILDTOOLS_PATH": "/Users/runner/work/electron/electron/src/buildtools",
"GIT_CACHE_PATH": "/Users/runner/work/electron/electron/.git-cache"
},
"$schema": "file:///home/builduser/.electron_build_tools/evm-config.schema.json",
"configValidationLevel": "strict",
"reclient": "remote_exec",
"goma": "none",
"preserveXcode": 5
}

View File

@@ -1,17 +0,0 @@
name: Build Linux
on:
workflow_dispatch:
# push:
# pull_request:
jobs:
build:
uses: electron/electron/.github/workflows/linux-pipeline.yml@main
with:
is-release: false
gn-config: //electron/build/args/testing.gn
gn-build-type: testing
generate-symbols: false
upload-to-storage: '0'
secrets: inherit

View File

@@ -1,427 +0,0 @@
name: Pipeline Linux
on:
workflow_call:
inputs:
is-release:
description: 'Whether this build job is a release job'
required: true
type: boolean
default: false
gn-config:
description: 'The gn arg configuration to use'
required: true
type: string
default: //electron/build/args/testing.gn
gn-build-type:
description: 'The gn build type - testing or release'
required: true
type: string
default: testing
generate-symbols:
description: 'Whether or not to generate symbols'
required: true
type: boolean
default: false
upload-to-storage:
description: 'Whether or not to upload build artifacts to external storage'
required: true
type: string
default: '0'
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
env:
AZURE_STORAGE_ACCOUNT: ${{ secrets.AZURE_STORAGE_ACCOUNT }}
AZURE_STORAGE_KEY: ${{ secrets.AZURE_STORAGE_KEY }}
AZURE_STORAGE_CONTAINER_NAME: ${{ secrets.AZURE_STORAGE_CONTAINER_NAME }}
ELECTRON_ARTIFACTS_BLOB_STORAGE: ${{ secrets.ELECTRON_ARTIFACTS_BLOB_STORAGE }}
ELECTRON_RBE_JWT: ${{ secrets.ELECTRON_RBE_JWT }}
ELECTRON_GITHUB_TOKEN: ${{ secrets.ELECTRON_GITHUB_TOKEN }}
GN_CONFIG: ${{ inputs.gn-config }}
# Disable pre-compiled headers to reduce out size - only useful for rebuilds
GN_BUILDFLAG_ARGS: 'enable_precompiled_headers = false'
GCLIENT_EXTRA_ARGS: '--custom-var=checkout_arm=True --custom-var=checkout_arm64=True'
CHECK_DIST_MANIFEST: true
IS_GHA_RELEASE: true
ELECTRON_OUT_DIR: Default
jobs:
checkout:
runs-on: aks-linux-large
container:
image: ghcr.io/electron/build:latest
options: --user root
steps:
- name: Checkout Electron
uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
with:
path: src/electron
fetch-depth: 0
- name: Set GIT_CACHE_PATH to make gclient to use the cache
run: |
echo "GIT_CACHE_PATH=$(pwd)/git-cache" >> $GITHUB_ENV
- name: Install Dependencies
run: |
cd src/electron
node script/yarn install
- name: Get Depot Tools
timeout-minutes: 5
run: |
git clone --depth=1 https://chromium.googlesource.com/chromium/tools/depot_tools.git
sed -i '/ninjalog_uploader_wrapper.py/d' ./depot_tools/autoninja
cd depot_tools
git apply --3way ../src/electron/.github/workflows/config/gclient.diff
# Ensure depot_tools does not update.
test -d depot_tools && cd depot_tools
touch .disable_auto_update
- name: Add Depot Tools to PATH
run: echo "$(pwd)/depot_tools" >> $GITHUB_PATH
- name: Generate DEPS Hash
run: |
node src/electron/script/generate-deps-hash.js && cat src/electron/.depshash-target
echo "DEPSHASH=v1-src-cache-$(shasum src/electron/.depshash | cut -f1 -d' ')" >> $GITHUB_ENV
- name: Check If Cache Exists
id: check-cache
run: |
exists_json=$(az storage blob exists \
--account-name $AZURE_STORAGE_ACCOUNT \
--account-key $AZURE_STORAGE_KEY \
--container-name $AZURE_STORAGE_CONTAINER_NAME \
--name $DEPSHASH)
cache_exists=$(echo $exists_json | jq -r '.exists')
echo "cache_exists=$cache_exists" >> $GITHUB_OUTPUT
if (test "$cache_exists" = "true"); then
echo "Cache Exists for $DEPSHASH"
else
echo "Cache Does Not Exist for $DEPSHASH"
fi
- name: Gclient Sync
if: steps.check-cache.outputs.cache_exists == 'false'
run: |
gclient config \
--name "src/electron" \
--unmanaged \
${GCLIENT_EXTRA_ARGS} \
"$GITHUB_SERVER_URL/$GITHUB_REPOSITORY"
ELECTRON_USE_THREE_WAY_MERGE_FOR_PATCHES=1 gclient sync --with_branch_heads --with_tags -vvvvv
if [ "${{ inputs.is-release }}" != "true" ]; then
# Re-export all the patches to check if there were changes.
python3 src/electron/script/export_all_patches.py src/electron/patches/config.json
cd src/electron
git update-index --refresh || true
if ! git diff-index --quiet HEAD --; then
# There are changes to the patches. Make a git commit with the updated patches
git add patches
GIT_COMMITTER_NAME="PatchUp" GIT_COMMITTER_EMAIL="73610968+patchup[bot]@users.noreply.github.com" git commit -m "chore: update patches" --author="PatchUp <73610968+patchup[bot]@users.noreply.github.com>"
# Export it
mkdir -p ../../patches
git format-patch -1 --stdout --keep-subject --no-stat --full-index > ../../patches/update-patches.patch
if (node ./script/push-patch.js 2> /dev/null > /dev/null); then
echo
echo "======================================================================"
echo "Changes to the patches when applying, we have auto-pushed the diff to the current branch"
echo "A new CI job will kick off shortly"
echo "======================================================================"
exit 1
else
echo
echo "======================================================================"
echo "There were changes to the patches when applying."
echo "Check the CI artifacts for a patch you can apply to fix it."
echo "======================================================================"
exit 1
fi
fi
fi
# delete all .git directories under src/ except for
# third_party/angle/ and third_party/dawn/ because of build time generation of files
# gen/angle/commit.h depends on third_party/angle/.git/HEAD
# https://chromium-review.googlesource.com/c/angle/angle/+/2074924
# and dawn/common/Version_autogen.h depends on third_party/dawn/.git/HEAD
# https://dawn-review.googlesource.com/c/dawn/+/83901
# TODO: maybe better to always leave out */.git/HEAD file for all targets ?
- name: Delete .git directories under src to free space
if: steps.check-cache.outputs.cache_exists == 'false'
run: |
cd src
( find . -type d -name ".git" -not -path "./third_party/angle/*" -not -path "./third_party/dawn/*" -not -path "./electron/*" ) | xargs rm -rf
- name: Minimize Cache Size for Upload
if: steps.check-cache.outputs.cache_exists == 'false'
run: |
rm -rf src/android_webview
rm -rf src/ios/chrome
rm -rf src/third_party/blink/web_tests
rm -rf src/third_party/blink/perf_tests
rm -rf src/chrome/test/data/xr/webvr_info
rm -rf src/third_party/angle/third_party/VK-GL-CTS/src
rm -rf src/third_party/swift-toolchain
rm -rf src/third_party/swiftshader/tests/regres/testlists
rm -rf src/electron
rm -rf src/third_party/electron_node/deps/openssl
rm -rf src/third_party/electron_node/deps/v8
- name: Compress Src Directory
if: steps.check-cache.outputs.cache_exists == 'false'
run: |
echo "Uncompressed src size: $(du -sh src | cut -f1 -d' ')"
tar -cvf $DEPSHASH.tar src
echo "Compressed src to $(du -sh $DEPSHASH.tar | cut -f1 -d' ')"
- name: Upload Compressed Src Cache to Azure
if: steps.check-cache.outputs.cache_exists == 'false'
run: |
az storage blob upload \
--account-name $AZURE_STORAGE_ACCOUNT \
--account-key $AZURE_STORAGE_KEY \
--container-name $AZURE_STORAGE_CONTAINER_NAME \
--file $DEPSHASH.tar \
--name $DEPSHASH
build:
strategy:
fail-fast: false
matrix:
build-arch: [ arm64 ] # x64, arm
env:
TARGET_ARCH: ${{ matrix.build-arch }}
runs-on: aks-linux-large
container:
image: ghcr.io/electron/build:latest
options: --user root
needs: checkout
steps:
- name: Load Build Tools
run: |
export BUILD_TOOLS_SHA=ef894bc3cfa99d84a3b731252da0f83f500e4032
npm i -g @electron/build-tools
e auto-update disable
e init --root=$(pwd) --out=Default ${{ inputs.gn-build-type }} --import ${{ inputs.gn-build-type }} --target-cpu ${{ matrix.build-arch }}
- name: Checkout Electron
uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
with:
path: src/electron
fetch-depth: 0
- name: Install Dependencies
run: |
cd src/electron
node script/yarn install
- name: Set GN_EXTRA_ARGS
run: |
if [ "${{ matrix.build-arch }}" = "arm" ]; then
GN_EXTRA_ARGS='target_cpu="arm" build_tflite_with_xnnpack=false'
elif [ "${{ matrix.build-arch }}" = "arm64" ]; then
GN_EXTRA_ARGS='target_cpu="arm64" fatal_linker_warnings=false enable_linux_installer=false'
fi
echo "GN_EXTRA_ARGS=$GN_EXTRA_ARGS" >> $GITHUB_ENV
- name: Get Depot Tools
timeout-minutes: 5
run: |
git clone --depth=1 https://chromium.googlesource.com/chromium/tools/depot_tools.git
sed -i '/ninjalog_uploader_wrapper.py/d' ./depot_tools/autoninja
cd depot_tools
git apply --3way ../src/electron/.github/workflows/config/gclient.diff
# Ensure depot_tools does not update.
test -d depot_tools && cd depot_tools
touch .disable_auto_update
- name: Add Depot Tools to PATH
run: echo "$(pwd)/depot_tools" >> $GITHUB_PATH
- name: Generate DEPS Hash
run: |
node src/electron/script/generate-deps-hash.js && cat src/electron/.depshash-target
echo "DEPSHASH=v1-src-cache-$(shasum src/electron/.depshash | cut -f1 -d' ')" >> $GITHUB_ENV
- name: Download Src Cache
# The cache will always exist here as a result of the checkout job
# Either it was uploaded to Azure in the checkout job for this commit
# or it was uploaded in the checkout job for a previous commit.
uses: nick-fields/retry@7152eba30c6575329ac0576536151aca5a72780e # v3.0.0
with:
timeout_minutes: 20
max_attempts: 3
retry_on: error
command: |
az storage blob download \
--account-name $AZURE_STORAGE_ACCOUNT \
--account-key $AZURE_STORAGE_KEY \
--container-name $AZURE_STORAGE_CONTAINER_NAME \
--name $DEPSHASH \
--file $DEPSHASH.tar
- name: Unzip and Ensure Src Cache
run: |
echo "Downloaded cache is $(du -sh $DEPSHASH.tar | cut -f1)"
mkdir temp-cache
tar -xf $DEPSHASH.tar -C temp-cache
echo "Unzipped cache is $(du -sh temp-cache/src | cut -f1)"
if [ -d "temp-cache/src" ]; then
echo "Relocating Cache"
rm -rf src
mv temp-cache/src src
echo "Deleting zip file"
rm -rf $DEPSHASH.tar
fi
if [ ! -d "src/third_party/blink" ]; then
echo "Cache was not correctly restored - exiting"
exit 1
fi
echo "Wiping Electron Directory"
rm -rf src/electron
- name: Checkout Electron
uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
with:
path: src/electron
fetch-depth: 0
- name: Run Electron Only Hooks
run: |
echo "Running Electron Only Hooks"
gclient runhooks --spec="solutions=[{'name':'src/electron','url':None,'deps_file':'DEPS','custom_vars':{'process_deps':False},'managed':False}]"
- name: Regenerate DEPS Hash
run: |
(cd src/electron && git checkout .) && node src/electron/script/generate-deps-hash.js && cat src/electron/.depshash-target
echo "DEPSHASH=$(shasum src/electron/.depshash | cut -f1 -d' ')" >> $GITHUB_ENV
- name: Add CHROMIUM_BUILDTOOLS_PATH to env
run: echo "CHROMIUM_BUILDTOOLS_PATH=$(pwd)/src/buildtools" >> $GITHUB_ENV
- name: Fix Known Hosts on Linux
run: src/electron/.circleci/fix-known-hosts.sh
- name: Install build-tools & Setup RBE
run: |
echo "NUMBER_OF_NINJA_PROCESSES=300" >> $GITHUB_ENV
cd ~/.electron_build_tools
npx yarn --ignore-engines
# Pull down credential helper and print status
node -e "require('./src/utils/reclient.js').downloadAndPrepare({})"
HELPER=$(node -p "require('./src/utils/reclient.js').helperPath({})")
$HELPER login
echo 'RBE_service='`node -e "console.log(require('./src/utils/reclient.js').serviceAddress)"` >> $GITHUB_ENV
echo 'RBE_experimental_credentials_helper='`node -e "console.log(require('./src/utils/reclient.js').helperPath({}))"` >> $GITHUB_ENV
echo 'RBE_experimental_credentials_helper_args=print' >> $GITHUB_ENV
- name: Build Electron
run: |
cd src/electron
# TODO(codebytere): remove this once we figure out why .git/packed-refs is initially missing
git pack-refs
cd ..
NINJA_SUMMARIZE_BUILD=1 e build -j $NUMBER_OF_NINJA_PROCESSES
cp out/Default/.ninja_log out/electron_ninja_log
node electron/script/check-symlinks.js
- name: Build Electron dist.zip
run: |
cd src
e build electron:electron_dist_zip -j $NUMBER_OF_NINJA_PROCESSES
if [ "${{ env.CHECK_DIST_MANIFEST }}" = "true" ]; then
target_os=linux
target_cpu=${{ matrix.build-arch }}
electron/script/zip_manifests/check-zip-manifest.py out/Default/dist.zip electron/script/zip_manifests/dist_zip.$target_os.${{ matrix.build-arch }}.manifest
fi
- name: Build Mksnapshot
run: |
cd src
e build electron:electron_mksnapshot -j $NUMBER_OF_NINJA_PROCESSES
gn desc out/Default v8:run_mksnapshot_default args > out/Default/mksnapshot_args
# Remove unused args from mksnapshot_args
SEDOPTION="-i"
sed $SEDOPTION '/.*builtins-pgo/d' out/Default/mksnapshot_args
sed $SEDOPTION '/--turbo-profiling-input/d' out/Default/mksnapshot_args
sed $SEDOPTION '/The gn arg use_goma=true .*/d' out/Default/mksnapshot_args
if [ "${{ matrix.build-arch }}" = "arm" ]; then
electron/script/strip-binaries.py --file $PWD/out/Default/clang_x86_v8_arm/mksnapshot
electron/script/strip-binaries.py --file $PWD/out/Default/clang_x86_v8_arm/v8_context_snapshot_generator
elif [ "${{ matrix.build-arch }}" = "arm64" ]; then
electron/script/strip-binaries.py --file $PWD/out/Default/clang_x64_v8_arm64/mksnapshot
electron/script/strip-binaries.py --file $PWD/out/Default/clang_x64_v8_arm64/v8_context_snapshot_generator
else
electron/script/strip-binaries.py --file $PWD/out/Default/mksnapshot
electron/script/strip-binaries.py --file $PWD/out/Default/v8_context_snapshot_generator
fi
e build electron:electron_mksnapshot_zip -j $NUMBER_OF_NINJA_PROCESSES
(cd out/Default; zip mksnapshot.zip mksnapshot_args gen/v8/embedded.S)
- name: Generate Cross-Arch Snapshot (arm/arm64)
if: ${{ matrix.build-arch == 'arm' }} || ${{ matrix.build-arch == 'arm64' }}
run: |
cd src
if [ "${{ matrix.build-arch }}" = "arm" ]; then
export MKSNAPSHOT_PATH="clang_x86_v8_arm"
elif [ "${{ matrix.build-arch }}" = "arm64" ]; then
export MKSNAPSHOT_PATH="clang_x64_v8_arm64"
fi
cp "out/Default/$MKSNAPSHOT_PATH/mksnapshot" out/Default
cp "out/Default/$MKSNAPSHOT_PATH/v8_context_snapshot_generator" out/Default
cp "out/Default/$MKSNAPSHOT_PATH/libffmpeg.so" out/Default
python3 electron/script/verify-mksnapshot.py --source-root "$PWD" --build-dir out/Default --create-snapshot-only
mkdir cross-arch-snapshots
cp out/Default-mksnapshot-test/*.bin cross-arch-snapshots
# Clean up so that ninja does not get confused
rm -f out/Default/libffmpeg.so
- name: Build Chromedriver
run: |
cd src
e build electron:electron_chromedriver -j $NUMBER_OF_NINJA_PROCESSES
e build electron:electron_chromedriver_zip
- name: Generate & Zip Symbols
run: |
# Generate breakpad symbols on release builds
if [ "${{ inputs.generate-symbols }}" = "true" ]; then
e build electron:electron_symbols
fi
cd src
export BUILD_PATH="$(pwd)/out/Default"
e build electron:licenses
e build electron:electron_version_file
if [ "${{ inputs.is-release }}" = "true" ]; then
DELETE_DSYMS_AFTER_ZIP=1 electron/script/zip-symbols.py -b $BUILD_PATH
else
electron/script/zip-symbols.py -b $BUILD_PATH
fi
- name: Generate FFMpeg
if: ${{ inputs.is-release }}
run: |
cd src
gn gen out/ffmpeg --args="import(\"//electron/build/args/ffmpeg.gn\") use_remoteexec=true $GN_EXTRA_ARGS"
autoninja -C out/ffmpeg electron:electron_ffmpeg_zip -j $NUMBER_OF_NINJA_PROCESSES
- name: Generate Hunspell Dictionaries
if: ${{ inputs.is-release }}
run: |
cd src
autoninja -C out/Default electron:hunspell_dictionaries_zip -j $NUMBER_OF_NINJA_PROCESSES
- name: Generate TypeScript Definitions
if: ${{ inputs.is-release }}
run: |
cd src/electron
node script/yarn create-typescript-definitions
- name: Publish Electron Dist
if: ${{ inputs.is-release }}
run: |
rm -rf src/out/Default/obj
cd src/electron
if [ "${{ inputs.upload-to-storage }}" = "1" ]; then
echo 'Uploading Electron release distribution to Azure'
script/release/uploaders/upload.py --verbose --upload_to_storage
else
echo 'Uploading Electron release distribution to GitHub releases'
script/release/uploaders/upload.py --verbose
fi
- name: Move all Generated Artifacts to Upload Folder
run: ./src/electron/script/actions/move-artifacts.sh
- name: Upload Generated Artifacts
uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808
with:
name: generated_artifacts_linux_${{ matrix.build-arch }}
path: ./generated_artifacts_linux_${{ matrix.build-arch }}

View File

@@ -3,22 +3,80 @@ name: Publish Linux
on:
workflow_dispatch:
inputs:
build-image-sha:
type: string
description: 'SHA for electron/build image'
default: 'cf814a4d2501e8e843caea071a6b70a48e78b855'
upload-to-storage:
description: 'Uploads to Azure storage'
required: false
default: '1'
type: string
run-macos-publish:
run-linux-publish:
description: 'Run the publish jobs vs just the build jobs'
type: boolean
default: false
jobs:
publish:
uses: electron/electron/.github/workflows/linux-pipeline.yml@main
checkout-linux:
runs-on: aks-linux-large
container:
image: ghcr.io/electron/build:${{ inputs.build-image-sha }}
options: --user root
volumes:
- /mnt/cross-instance-cache:/mnt/cross-instance-cache
- /var/run/sas:/var/run/sas
env:
GCLIENT_EXTRA_ARGS: '--custom-var=checkout_mac=True --custom-var=host_os=mac'
steps:
- name: Checkout Electron
uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
with:
path: src/electron
fetch-depth: 0
- name: Checkout & Sync & Save
uses: ./src/electron/.github/actions/checkout
publish-x64:
uses: ./.github/workflows/pipeline-segment-electron-build.yml
needs: checkout-linux
with:
environment: production-release
build-runs-on: aks-linux-large
build-container: '{"image":"ghcr.io/electron/build:${{ inputs.build-image-sha }}","options":"--user root","volumes":["/mnt/cross-instance-cache:/mnt/cross-instance-cache"]}'
target-platform: linux
target-arch: x64
is-release: true
gn-build-type: release
generate-symbols: true
upload-to-storage: ${{ inputs.upload-to-storage }}
secrets: inherit
publish-arm:
uses: ./.github/workflows/pipeline-segment-electron-build.yml
needs: checkout-linux
with:
environment: production-release
build-runs-on: aks-linux-large
build-container: '{"image":"ghcr.io/electron/build:${{ inputs.build-image-sha }}","options":"--user root","volumes":["/mnt/cross-instance-cache:/mnt/cross-instance-cache"]}'
target-platform: linux
target-arch: arm
is-release: true
gn-build-type: release
generate-symbols: true
upload-to-storage: ${{ inputs.upload-to-storage }}
secrets: inherit
publish-arm64:
uses: ./.github/workflows/pipeline-segment-electron-build.yml
needs: checkout-linux
with:
environment: production-release
build-runs-on: aks-linux-large
build-container: '{"image":"ghcr.io/electron/build:${{ inputs.build-image-sha }}","options":"--user root","volumes":["/mnt/cross-instance-cache:/mnt/cross-instance-cache"]}'
target-platform: linux
target-arch: arm64
is-release: true
gn-config: //electron/build/args/release.gn
gn-build-type: release
generate-symbols: true
upload-to-storage: ${{ inputs.upload-to-storage }}

View File

@@ -1,17 +0,0 @@
name: Build MacOS
on:
workflow_dispatch:
# push
# pull_request:
jobs:
build:
uses: electron/electron/.github/workflows/macos-pipeline.yml@main
with:
is-release: false
gn-config: //electron/build/args/testing.gn
gn-build-type: testing
generate-symbols: false
upload-to-storage: '0'
secrets: inherit

View File

@@ -1,734 +0,0 @@
name: Build MacOS
on:
workflow_call:
inputs:
is-release:
description: 'Whether this build job is a release job'
required: true
type: boolean
default: false
gn-config:
description: 'The gn arg configuration to use'
required: true
type: string
default: //electron/build/args/testing.gn
gn-build-type:
description: 'The gn build type - testing or release'
required: true
type: string
default: testing
generate-symbols:
description: 'Whether or not to generate symbols'
required: true
type: boolean
default: false
upload-to-storage:
description: 'Whether or not to upload build artifacts to external storage'
required: true
type: string
default: '0'
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
env:
AZURE_AKS_CACHE_STORAGE_ACCOUNT: ${{ secrets.AZURE_AKS_CACHE_STORAGE_ACCOUNT }}
AZURE_AKS_CACHE_SHARE_NAME: ${{ secrets.AZURE_AKS_CACHE_SHARE_NAME }}
ELECTRON_ARTIFACTS_BLOB_STORAGE: ${{ secrets.ELECTRON_ARTIFACTS_BLOB_STORAGE }}
ELECTRON_RBE_JWT: ${{ secrets.ELECTRON_RBE_JWT }}
ELECTRON_GITHUB_TOKEN: ${{ secrets.ELECTRON_GITHUB_TOKEN }}
GN_CONFIG: ${{ inputs.gn-config }}
# Disable pre-compiled headers to reduce out size - only useful for rebuilds
GN_BUILDFLAG_ARGS: 'enable_precompiled_headers = false'
GCLIENT_EXTRA_ARGS: '--custom-var=checkout_mac=True --custom-var=host_os=mac'
# Only disable this in the Asan build
CHECK_DIST_MANIFEST: true
IS_GHA_RELEASE: true
ELECTRON_OUT_DIR: Default
jobs:
checkout:
runs-on: aks-linux-large
container:
image: ghcr.io/electron/build:latest
options: --user root
volumes:
- /mnt/cross-instance-cache:/mnt/cross-instance-cache
- /var/run/sas:/var/run/sas
steps:
- name: Checkout Electron
uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
with:
path: src/electron
fetch-depth: 0
- name: Set GIT_CACHE_PATH to make gclient to use the cache
run: |
echo "GIT_CACHE_PATH=$(pwd)/git-cache" >> $GITHUB_ENV
- name: Install Dependencies
run: |
cd src/electron
node script/yarn install
- name: Get Depot Tools
timeout-minutes: 5
run: |
git clone --depth=1 https://chromium.googlesource.com/chromium/tools/depot_tools.git
sed -i '/ninjalog_uploader_wrapper.py/d' ./depot_tools/autoninja
# Remove swift-format dep from cipd on macOS until we send a patch upstream.
cd depot_tools
git apply --3way ../src/electron/.github/workflows/config/gclient.diff
# Ensure depot_tools does not update.
test -d depot_tools && cd depot_tools
touch .disable_auto_update
- name: Add Depot Tools to PATH
run: echo "$(pwd)/depot_tools" >> $GITHUB_PATH
- name: Generate DEPS Hash
run: |
node src/electron/script/generate-deps-hash.js && cat src/electron/.depshash-target
echo "DEPSHASH=v1-src-cache-$(shasum src/electron/.depshash | cut -f1 -d' ')" >> $GITHUB_ENV
- name: Generate SAS Key
run: |
curl --unix-socket /var/run/sas/sas.sock --fail "http://foo/$DEPSHASH.tar" > sas-token
- name: Save SAS Key
uses: actions/cache/save@v4
with:
path: |
sas-token
key: sas-key-${{ github.run_number }}-${{ github.run_attempt }}
- name: Check If Cache Exists
id: check-cache
run: |
cache_key=$DEPSHASH
cache_path=/mnt/cross-instance-cache/${cache_key}.tar
echo "Using cache key: $cache_key"
echo "Checking for cache in: $cache_path"
if [ ! -f "$cache_path" ]; then
echo "Cache Does Not Exist for $DEPSHASH"
else
echo "Cache Already Exists for $DEPSHASH, Skipping.."
fi
- name: Gclient Sync
if: steps.check-cache.outputs.cache_exists == 'false'
run: |
gclient config \
--name "src/electron" \
--unmanaged \
${GCLIENT_EXTRA_ARGS} \
"$GITHUB_SERVER_URL/$GITHUB_REPOSITORY"
ELECTRON_USE_THREE_WAY_MERGE_FOR_PATCHES=1 gclient sync --with_branch_heads --with_tags -vvvvv
if [ "${{ inputs.is-release }}" != "true" ]; then
# Re-export all the patches to check if there were changes.
python3 src/electron/script/export_all_patches.py src/electron/patches/config.json
cd src/electron
git update-index --refresh || true
if ! git diff-index --quiet HEAD --; then
# There are changes to the patches. Make a git commit with the updated patches
git add patches
GIT_COMMITTER_NAME="PatchUp" GIT_COMMITTER_EMAIL="73610968+patchup[bot]@users.noreply.github.com" git commit -m "chore: update patches" --author="PatchUp <73610968+patchup[bot]@users.noreply.github.com>"
# Export it
mkdir -p ../../patches
git format-patch -1 --stdout --keep-subject --no-stat --full-index > ../../patches/update-patches.patch
if (node ./script/push-patch.js 2> /dev/null > /dev/null); then
echo
echo "======================================================================"
echo "Changes to the patches when applying, we have auto-pushed the diff to the current branch"
echo "A new CI job will kick off shortly"
echo "======================================================================"
exit 1
else
echo
echo "======================================================================"
echo "There were changes to the patches when applying."
echo "Check the CI artifacts for a patch you can apply to fix it."
echo "======================================================================"
exit 1
fi
fi
fi
# delete all .git directories under src/ except for
# third_party/angle/ and third_party/dawn/ because of build time generation of files
# gen/angle/commit.h depends on third_party/angle/.git/HEAD
# https://chromium-review.googlesource.com/c/angle/angle/+/2074924
# and dawn/common/Version_autogen.h depends on third_party/dawn/.git/HEAD
# https://dawn-review.googlesource.com/c/dawn/+/83901
# TODO: maybe better to always leave out */.git/HEAD file for all targets ?
- name: Delete .git directories under src to free space
if: steps.check-cache.outputs.cache_exists == 'false'
run: |
cd src
( find . -type d -name ".git" -not -path "./third_party/angle/*" -not -path "./third_party/dawn/*" -not -path "./electron/*" ) | xargs rm -rf
- name: Minimize Cache Size for Upload
if: steps.check-cache.outputs.cache_exists == 'false'
run: |
rm -rf src/android_webview
rm -rf src/ios/chrome
rm -rf src/third_party/blink/web_tests
rm -rf src/third_party/blink/perf_tests
rm -rf src/chrome/test/data/xr/webvr_info
rm -rf src/third_party/angle/third_party/VK-GL-CTS/src
rm -rf src/third_party/swift-toolchain
rm -rf src/third_party/swiftshader/tests/regres/testlists
rm -rf src/electron
- name: Compress Src Directory
if: steps.check-cache.outputs.cache_exists == 'false'
run: |
echo "Uncompressed src size: $(du -sh src | cut -f1 -d' ')"
tar -cf $DEPSHASH.tar src
echo "Compressed src to $(du -sh $DEPSHASH.tar | cut -f1 -d' ')"
- name: Move src folder to cross-OS portal
if: steps.check-cache.outputs.cache_exists == 'false'
run: |
cp ./$DEPSHASH.tar /mnt/cross-instance-cache/
sudo mkdir -p /var/portal
sudo chown -R $(id -u):$(id -g) /var/portal
mv ./src /var/portal
- name: Persist Src Cache
if: steps.check-cache.outputs.cache_exists == 'false'
run: |
cache_key=$DEPSHASH
backup_cache_path=/var/portal
final_cache_path=/mnt/cross-instance-cache/${cache_key}.tar
echo "Using cache key: $cache_key"
echo "Checking path: $final_cache_path"
if [ ! -f "$final_cache_path" ]; then
echo "Cache key not found, storing tarball"
tmp_container=/mnt/cross-instance-cache/tmp/${{ github.sha }}
tmp_cache_path=$tmp_container/${cache_key}.tar
mkdir -p $tmp_container
if [ -f "$backup_cache_path" ]; then
tar -cf $tmp_cache_path -C $(dirname $backup_cache_path) ./$(basename $backup_cache_path)
else
tar -cf $tmp_cache_path -C $backup_cache_path/ ./
fi
mv -vn $tmp_cache_path $final_cache_path
rm -rf $tmp_container
else
echo "Cache key already exists, skipping.."
fi
build:
strategy:
fail-fast: false
matrix:
build-arch: [ arm64, x64 ]
# macos-large is x64, macos-xlarge is arm64
# More runner information: https://github.com/actions/runner-images/blob/main/README.md#available-images
runs-on: macos-14-xlarge
needs: checkout
env:
TARGET_ARCH: ${{ matrix.build-arch }}
steps:
- name: Load Build Tools
run: |
export BUILD_TOOLS_SHA=ef894bc3cfa99d84a3b731252da0f83f500e4032
npm i -g @electron/build-tools
e auto-update disable
e init --root=$(pwd) --out=Default ${{ inputs.gn-build-type }} --import ${{ inputs.gn-build-type }} --target-cpu ${{ matrix.build-arch }}
e use ${{ inputs.gn-build-type }}
- name: Checkout Electron
uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
with:
path: src/electron
fetch-depth: 0
- name: Setup Node.js/npm
uses: actions/setup-node@60edb5dd545a775178f52524783378180af0d1f8
with:
node-version: 20.11.x
cache: yarn
cache-dependency-path: src/electron/yarn.lock
- name: Install Dependencies
run: |
cd src/electron
node script/yarn install
brew install azcopy
- name: Load Target Arch & CPU
run: |
echo "target_cpu=${{ matrix.build-arch }}" >> $GITHUB_ENV
echo "host_cpu=${{ matrix.build-arch }}" >> $GITHUB_ENV
- name: Get Depot Tools
timeout-minutes: 5
run: |
git clone --depth=1 https://chromium.googlesource.com/chromium/tools/depot_tools.git
# remove ninjalog_uploader_wrapper.py from autoninja since we don't use it and it causes problems
sed -i '' '/ninjalog_uploader_wrapper.py/d' ./depot_tools/autoninja
# Ensure depot_tools does not update.
test -d depot_tools && cd depot_tools
touch .disable_auto_update
- name: Add Depot Tools to PATH
run: echo "$(pwd)/depot_tools" >> $GITHUB_PATH
- name: Generate DEPS Hash
run: |
node src/electron/script/generate-deps-hash.js && cat src/electron/.depshash-target
DEPSHASH=v1-src-cache-$(shasum src/electron/.depshash | cut -f1 -d' ')
echo "DEPSHASH=$DEPSHASH" >> $GITHUB_ENV
echo "CACHE_PATH=$DEPSHASH.tar" >> $GITHUB_ENV
- name: Obtain SAS Key
uses: actions/cache/restore@v4
with:
path: |
sas-token
key: sas-key-${{ github.run_number }}-${{ github.run_attempt }}
- name: Download Src Cache from AKS
# The cache will always exist here as a result of the checkout job
# Either it was uploaded to Azure in the checkout job for this commit
# or it was uploaded in the checkout job for a previous commit.
uses: nick-fields/retry@7152eba30c6575329ac0576536151aca5a72780e # v3.0.0
with:
timeout_minutes: 20
max_attempts: 3
retry_on: error
command: |
sas_token=$(cat sas-token)
azcopy copy \
"https://${{ env.AZURE_AKS_CACHE_STORAGE_ACCOUNT }}.file.core.windows.net/${{ env.AZURE_AKS_CACHE_SHARE_NAME }}/${{ env.CACHE_PATH }}?$sas_token" $DEPSHASH.tar
- name: Clean SAS Key
run: rm -f sas-token
- name: Unzip and Ensure Src Cache
run: |
echo "Downloaded cache is $(du -sh $DEPSHASH.tar | cut -f1)"
mkdir temp-cache
tar -xf $DEPSHASH.tar -C temp-cache
echo "Unzipped cache is $(du -sh temp-cache/src | cut -f1)"
if [ -d "temp-cache/src" ]; then
echo "Relocating Cache"
rm -rf src
mv temp-cache/src src
echo "Deleting zip file"
rm -rf $DEPSHASH.tar
fi
if [ ! -d "src/third_party/blink" ]; then
echo "Cache was not correctly restored - exiting"
exit 1
fi
echo "Wiping Electron Directory"
rm -rf src/electron
- name: Checkout Electron
uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
with:
path: src/electron
fetch-depth: 0
- name: Run Electron Only Hooks
run: |
gclient runhooks --spec="solutions=[{'name':'src/electron','url':None,'deps_file':'DEPS','custom_vars':{'process_deps':False},'managed':False}]"
- name: Regenerate DEPS Hash
run: |
(cd src/electron && git checkout .) && node src/electron/script/generate-deps-hash.js && cat src/electron/.depshash-target
echo "DEPSHASH=$(shasum src/electron/.depshash | cut -f1 -d' ')" >> $GITHUB_ENV
- name: Add CHROMIUM_BUILDTOOLS_PATH to env
run: echo "CHROMIUM_BUILDTOOLS_PATH=$(pwd)/src/buildtools" >> $GITHUB_ENV
- name: Fix Sync
# This step is required to correct for differences between "gclient sync"
# on Linux and the expected state on macOS. This requires:
# 1. Fixing Clang Install (wrong binary)
# 2. Fixing esbuild (wrong binary)
# 3. Fixing rustc (wrong binary)
# 4. Fixing gn (wrong binary)
# 5. Fix reclient (wrong binary)
# 6. Fixing dsymutil (wrong binary)
# 7. Ensuring we are using the correct ninja and adding it to PATH
# 8. Fixing angle (wrong remote)
run : |
SEDOPTION="-i ''"
rm -rf src/third_party/llvm-build
python3 src/tools/clang/scripts/update.py
echo 'infra/3pp/tools/esbuild/${platform}' `gclient getdep --deps-file=src/third_party/devtools-frontend/src/DEPS -r 'third_party/esbuild:infra/3pp/tools/esbuild/${platform}'` > esbuild_ensure_file
# Remove extra output from calling gclient getdep which always calls update_depot_tools
sed -i '' "s/Updating depot_tools... //g" esbuild_ensure_file
cipd ensure --root src/third_party/devtools-frontend/src/third_party/esbuild -ensure-file esbuild_ensure_file
rm -rf src/third_party/rust-toolchain
python3 src/tools/rust/update_rust.py
# Prevent calling gclient getdep which always calls update_depot_tools
echo 'gn/gn/mac-${arch}' `gclient getdep --deps-file=src/DEPS -r 'src/buildtools/mac:gn/gn/mac-${arch}'` > gn_ensure_file
sed -i '' "s/Updating depot_tools... //g" gn_ensure_file
cipd ensure --root src/buildtools/mac -ensure-file gn_ensure_file
# Prevent calling gclient getdep which always calls update_depot_tools
echo 'infra/rbe/client/${platform}' `gclient getdep --deps-file=src/DEPS -r 'src/buildtools/reclient:infra/rbe/client/${platform}'` > gn_ensure_file
sed -i '' "s/Updating depot_tools... //g" gn_ensure_file
cipd ensure --root src/buildtools/reclient -ensure-file gn_ensure_file
python3 src/buildtools/reclient_cfgs/configure_reclient_cfgs.py --rbe_instance "projects/rbe-chrome-untrusted/instances/default_instance" --reproxy_cfg_template reproxy.cfg.template --rewrapper_cfg_project "" --skip_remoteexec_cfg_fetch
if [ "${{ env.TARGET_ARCH }}" == "arm64" ]; then
DSYM_SHA_FILE=src/tools/clang/dsymutil/bin/dsymutil.arm64.sha1
else
DSYM_SHA_FILE=src/tools/clang/dsymutil/bin/dsymutil.x64.sha1
fi
python3 src/third_party/depot_tools/download_from_google_storage.py --no_resume --no_auth --bucket chromium-browser-clang -s $DSYM_SHA_FILE -o src/tools/clang/dsymutil/bin/dsymutil
echo 'infra/3pp/tools/ninja/${platform}' `gclient getdep --deps-file=src/DEPS -r 'src/third_party/ninja:infra/3pp/tools/ninja/${platform}'` > ninja_ensure_file
sed $SEDOPTION "s/Updating depot_tools... //g" ninja_ensure_file
cipd ensure --root src/third_party/ninja -ensure-file ninja_ensure_file
echo "$(pwd)/src/third_party/ninja" >> $GITHUB_PATH
cd src/third_party/angle
rm -f .git/objects/info/alternates
git remote set-url origin https://chromium.googlesource.com/angle/angle.git
cp .git/config .git/config.backup
git remote remove origin
mv .git/config.backup .git/config
git fetch
- name: Install build-tools & Setup RBE
run: |
echo "NUMBER_OF_NINJA_PROCESSES=200" >> $GITHUB_ENV
cd ~/.electron_build_tools
npx yarn --ignore-engines
# Pull down credential helper and print status
node -e "require('./src/utils/reclient.js').downloadAndPrepare({})"
HELPER=$(node -p "require('./src/utils/reclient.js').helperPath({})")
$HELPER login
echo 'RBE_service='`node -e "console.log(require('./src/utils/reclient.js').serviceAddress)"` >> $GITHUB_ENV
echo 'RBE_experimental_credentials_helper='`node -e "console.log(require('./src/utils/reclient.js').helperPath({}))"` >> $GITHUB_ENV
echo 'RBE_experimental_credentials_helper_args=print' >> $GITHUB_ENV
- name: Free Space on MacOS
run: |
sudo mkdir -p $TMPDIR/del-target
tmpify() {
if [ -d "$1" ]; then
sudo mv "$1" $TMPDIR/del-target/$(echo $1|shasum -a 256|head -n1|cut -d " " -f1)
fi
}
strip_universal_deep() {
opwd=$(pwd)
cd $1
f=$(find . -perm +111 -type f)
for fp in $f
do
if [[ $(file "$fp") == *"universal binary"* ]]; then
if [ "`arch`" == "arm64" ]; then
if [[ $(file "$fp") == *"x86_64"* ]]; then
sudo lipo -remove x86_64 "$fp" -o "$fp" || true
fi
else
if [[ $(file "$fp") == *"arm64e)"* ]]; then
sudo lipo -remove arm64e "$fp" -o "$fp" || true
fi
if [[ $(file "$fp") == *"arm64)"* ]]; then
sudo lipo -remove arm64 "$fp" -o "$fp" || true
fi
fi
fi
done
cd $opwd
}
tmpify /Library/Developer/CoreSimulator
tmpify ~/Library/Developer/CoreSimulator
tmpify $(xcode-select -p)/Platforms/AppleTVOS.platform
tmpify $(xcode-select -p)/Platforms/iPhoneOS.platform
tmpify $(xcode-select -p)/Platforms/WatchOS.platform
tmpify $(xcode-select -p)/Platforms/WatchSimulator.platform
tmpify $(xcode-select -p)/Platforms/AppleTVSimulator.platform
tmpify $(xcode-select -p)/Platforms/iPhoneSimulator.platform
tmpify $(xcode-select -p)/Toolchains/XcodeDefault.xctoolchain/usr/metal/ios
tmpify $(xcode-select -p)/Toolchains/XcodeDefault.xctoolchain/usr/lib/swift
tmpify $(xcode-select -p)/Toolchains/XcodeDefault.xctoolchain/usr/lib/swift-5.0
tmpify ~/.rubies
tmpify ~/Library/Caches/Homebrew
tmpify /usr/local/Homebrew
# the contents of build/linux/strip_binary.gni aren't used, but
# https://chromium-review.googlesource.com/c/chromium/src/+/4278307
# needs the file to exist.
# mv ~/project/src/build/linux/strip_binary.gni "${TMPDIR}"/
# tmpify ~/project/src/build/linux/
# mkdir -p ~/project/src/build/linux
# mv "${TMPDIR}/strip_binary.gni" ~/project/src/build/linux/
sudo rm -rf $TMPDIR/del-target
# sudo rm -rf "/System/Library/Desktop Pictures"
# sudo rm -rf /System/Library/Templates/Data
# sudo rm -rf /System/Library/Speech/Voices
# sudo rm -rf "/System/Library/Screen Savers"
# sudo rm -rf /System/Volumes/Data/Library/Developer/CommandLineTools/SDKs
# sudo rm -rf "/System/Volumes/Data/Library/Application Support/Apple/Photos/Print Products"
# sudo rm -rf /System/Volumes/Data/Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk/
# sudo rm -rf /System/Volumes/Data/Library/Java
# sudo rm -rf /System/Volumes/Data/Library/Ruby
# sudo rm -rf /System/Volumes/Data/Library/Printers
# sudo rm -rf /System/iOSSupport
# sudo rm -rf /System/Applications/*.app
# sudo rm -rf /System/Applications/Utilities/*.app
# sudo rm -rf /System/Library/LinguisticData
# sudo rm -rf /System/Volumes/Data/private/var/db/dyld/*
# sudo rm -rf /System/Library/Fonts/*
# sudo rm -rf /System/Library/PreferencePanes
# sudo rm -rf /System/Library/AssetsV2/*
sudo rm -rf /Applications/Safari.app
sudo rm -rf ~/project/src/third_party/catapult/tracing/test_data
sudo rm -rf ~/project/src/third_party/angle/third_party/VK-GL-CTS
# lipo off some huge binaries arm64 versions to save space
strip_universal_deep $(xcode-select -p)/../SharedFrameworks
# strip_arm_deep /System/Volumes/Data/Library/Developer/CommandLineTools/usr
- name: Build Electron (darwin)
run: |
cd src/electron
# TODO(codebytere): remove this once we figure out why .git/packed-refs is initially missing
git pack-refs
cd ..
ulimit -n 10000
sudo launchctl limit maxfiles 65536 200000
NINJA_SUMMARIZE_BUILD=1 e build -j $NUMBER_OF_NINJA_PROCESSES
cp out/Default/.ninja_log out/electron_ninja_log
node electron/script/check-symlinks.js
- name: Build Electron dist.zip (darwin)
run: |
cd src
e build electron:electron_dist_zip -j $NUMBER_OF_NINJA_PROCESSES
if [ "${{ env.CHECK_DIST_MANIFEST }}" == "true" ]; then
target_os=mac
electron/script/zip_manifests/check-zip-manifest.py out/Default/dist.zip electron/script/zip_manifests/dist_zip.$target_os.${{ env.TARGET_ARCH }}.manifest
fi
- name: Build Mksnapshot (darwin)
run: |
cd src
e build electron:electron_mksnapshot -j $NUMBER_OF_NINJA_PROCESSES
gn desc out/Default v8:run_mksnapshot_default args > out/Default/mksnapshot_args
# Remove unused args from mksnapshot_args
SEDOPTION="-i ''"
sed $SEDOPTION '/.*builtins-pgo/d' out/Default/mksnapshot_args
sed $SEDOPTION '/--turbo-profiling-input/d' out/Default/mksnapshot_args
sed $SEDOPTION '/The gn arg use_goma=true .*/d' out/Default/mksnapshot_args
e build electron:electron_mksnapshot_zip -j $NUMBER_OF_NINJA_PROCESSES
(cd out/Default; zip mksnapshot.zip mksnapshot_args gen/v8/embedded.S)
- name: Build Chromedriver (darwin)
run: |
cd src
e build electron:electron_chromedriver -j $NUMBER_OF_NINJA_PROCESSES
e build electron:electron_chromedriver_zip
# NOTE (vertedinde): We strip binaries/symbols on the Linux job, not the Mac job
- name: Generate & Zip Symbols (darwin)
run: |
# Generate breakpad symbols on release builds
if [ "${{ inputs.generate-symbols }}" == "true" ]; then
e build electron:electron_symbols
fi
cd src
export BUILD_PATH="$(pwd)/out/Default"
e build electron:licenses
e build electron:electron_version_file
if [ "${{ inputs.is-release }}" == "true" ]; then
DELETE_DSYMS_AFTER_ZIP=1 electron/script/zip-symbols.py -b $BUILD_PATH
else
electron/script/zip-symbols.py -b $BUILD_PATH
fi
- name: Generate FFMpeg (darwin)
if: ${{ inputs.is-release }}
run: |
cd src
gn gen out/ffmpeg --args="import(\"//electron/build/args/ffmpeg.gn\") use_remoteexec=true $GN_EXTRA_ARGS"
autoninja -C out/ffmpeg electron:electron_ffmpeg_zip -j $NUMBER_OF_NINJA_PROCESSES
- name: Generate Hunspell Dictionaries
if: ${{ inputs.is-release }}
run: |
cd src
autoninja -C out/Default electron:hunspell_dictionaries_zip -j $NUMBER_OF_NINJA_PROCESSES
- name: Generate TypeScript Definitions
if: ${{ inputs.is-release }}
run: |
cd src/electron
node script/yarn create-typescript-definitions
# TODO(vertedinde): These uploads currently point to a different Azure bucket & GitHub Repo
- name: Publish Electron Dist
if: ${{ inputs.is-release }}
run: |
rm -rf src/out/Default/obj
cd src/electron
if [ "${{ inputs.upload-to-storage }}" == "1" ]; then
echo 'Uploading Electron release distribution to Azure'
script/release/uploaders/upload.py --verbose --upload_to_storage
else
echo 'Uploading Electron release distribution to GitHub releases'
script/release/uploaders/upload.py --verbose
fi
# The current generated_artifacts_<< artifact.key >> name was taken from CircleCI
# to ensure we don't break anything, but we may be able to improve that.
- name: Move all Generated Artifacts to Upload Folder
run: ./src/electron/script/actions/move-artifacts.sh
- name: Upload Generated Artifacts
uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808
with:
name: generated_artifacts_darwin_${{ env.TARGET_ARCH }}
path: ./generated_artifacts_darwin_${{ env.TARGET_ARCH }}
- name: Create MAS Config
run: |
mv src/electron/.github/workflows/config/${{ inputs.gn-build-type }}/${{ matrix.build-arch }}/evm.mas.json $HOME/.electron_build_tools/configs/evm.mas.json
echo "MAS_BUILD=true" >> $GITHUB_ENV
e use mas
- name: Build Electron (mas)
run: |
rm -rf "src/out/Default/Electron Framework.framework"
rm -rf src/out/Default/Electron*.app
cd src/electron
# TODO(codebytere): remove this once we figure out why .git/packed-refs is initially missing
git pack-refs
cd ..
ulimit -n 10000
sudo launchctl limit maxfiles 65536 200000
NINJA_SUMMARIZE_BUILD=1 e build -j $NUMBER_OF_NINJA_PROCESSES
cp out/Default/.ninja_log out/electron_ninja_log
node electron/script/check-symlinks.js
- name: Build Electron dist.zip (mas)
run: |
cd src
e build electron:electron_dist_zip -j $NUMBER_OF_NINJA_PROCESSES
if [ "${{ env.CHECK_DIST_MANIFEST }}" == "true" ]; then
target_os=mac_mas
electron/script/zip_manifests/check-zip-manifest.py out/Default/dist.zip electron/script/zip_manifests/dist_zip.$target_os.${{ env.TARGET_ARCH }}.manifest
fi
- name: Build Mksnapshot (mas)
run: |
cd src
e build electron:electron_mksnapshot -j $NUMBER_OF_NINJA_PROCESSES
gn desc out/Default v8:run_mksnapshot_default args > out/Default/mksnapshot_args
# Remove unused args from mksnapshot_args
SEDOPTION="-i ''"
sed $SEDOPTION '/.*builtins-pgo/d' out/Default/mksnapshot_args
sed $SEDOPTION '/--turbo-profiling-input/d' out/Default/mksnapshot_args
sed $SEDOPTION '/The gn arg use_goma=true .*/d' out/Default/mksnapshot_args
e build electron:electron_mksnapshot_zip -j $NUMBER_OF_NINJA_PROCESSES
(cd out/Default; zip mksnapshot.zip mksnapshot_args gen/v8/embedded.S)
- name: Build Chromedriver (mas)
run: |
cd src
e build electron:electron_chromedriver -j $NUMBER_OF_NINJA_PROCESSES
e build electron:electron_chromedriver_zip
- name: Build Node Headers
run: |
cd src
e build electron:node_headers
- name: Generate & Zip Symbols (mas)
run: |
if [ "${{ inputs.generate-symbols }}" == "true" ]; then
e build electron:electron_symbols
fi
cd src
export BUILD_PATH="$(pwd)/out/Default"
e build electron:licenses
e build electron:electron_version_file
if [ "${{ inputs.is-release }}" == "true" ]; then
DELETE_DSYMS_AFTER_ZIP=1 electron/script/zip-symbols.py -b $BUILD_PATH
else
electron/script/zip-symbols.py -b $BUILD_PATH
fi
- name: Generate FFMpeg (mas)
if: ${{ inputs.is-release }}
run: |
cd src
gn gen out/ffmpeg --args="import(\"//electron/build/args/ffmpeg.gn\") use_remoteexec=true $GN_EXTRA_ARGS"
autoninja -C out/ffmpeg electron:electron_ffmpeg_zip -j $NUMBER_OF_NINJA_PROCESSES
# TODO(vertedinde): These uploads currently point to a different Azure bucket & GitHub Repo
- name: Publish Electron Dist
if: ${{ inputs.is-release }}
run: |
rm -rf src/out/Default/obj
cd src/electron
if [ "${{ inputs.upload-to-storage }}" == "1" ]; then
echo 'Uploading Electron release distribution to Azure'
script/release/uploaders/upload.py --verbose --upload_to_storage
else
echo 'Uploading Electron release distribution to GitHub releases'
script/release/uploaders/upload.py --verbose
fi
- name: Move all Generated Artifacts to Upload Folder (mas)
run: ./src/electron/script/actions/move-artifacts.sh
- name: Upload Generated Artifacts
uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808
with:
name: generated_artifacts_mas_${{ env.TARGET_ARCH }}
path: ./generated_artifacts_mas_${{ env.TARGET_ARCH }}
test:
if: ${{ inputs.is-release == false }}
runs-on: macos-14-xlarge
needs: build
strategy:
fail-fast: false
matrix:
build-type: [ darwin, mas ]
target-arch: [ x64, arm64 ]
env:
BUILD_TYPE: ${{ matrix.build-type }}
TARGET_ARCH: ${{ matrix.target-arch }}
steps:
- name: Load Build Tools
run: |
export BUILD_TOOLS_SHA=ef894bc3cfa99d84a3b731252da0f83f500e4032
npm i -g @electron/build-tools
e auto-update disable
e init --root=$(pwd) --out=Default ${{ inputs.gn-build-type }}
- name: Checkout Electron
uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
with:
path: src/electron
fetch-depth: 0
- name: Install Dependencies
run: |
cd src/electron
node script/yarn install
- name: Get Depot Tools
timeout-minutes: 5
run: |
git clone --depth=1 https://chromium.googlesource.com/chromium/tools/depot_tools.git
if [ "`uname`" == "Darwin" ]; then
# remove ninjalog_uploader_wrapper.py from autoninja since we don't use it and it causes problems
sed -i '' '/ninjalog_uploader_wrapper.py/d' ./depot_tools/autoninja
else
sed -i '/ninjalog_uploader_wrapper.py/d' ./depot_tools/autoninja
# Remove swift-format dep from cipd on macOS until we send a patch upstream.
cd depot_tools
git apply --3way ../src/electron/.github/workflows/config/gclient.diff
fi
# Ensure depot_tools does not update.
test -d depot_tools && cd depot_tools
touch .disable_auto_update
- name: Add Depot Tools to PATH
run: echo "$(pwd)/depot_tools" >> $GITHUB_PATH
- name: Download Generated Artifacts
uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e
with:
name: generated_artifacts_${{ matrix.build-type }}_${{ matrix.target-arch }}
path: ./generated_artifacts_${{ matrix.build-type }}_${{ matrix.target-arch }}
- name: Restore Generated Artifacts
run: ./src/electron/script/actions/restore-artifacts.sh
- name: Unzip Dist, Mksnapshot & Chromedriver
run: |
cd src/out/Default
unzip -:o dist.zip
unzip -:o chromedriver.zip
unzip -:o mksnapshot.zip
- name: Import & Trust Self-Signed Codesigning Cert on MacOS
run: |
sudo security authorizationdb write com.apple.trust-settings.admin allow
cd src/electron
./script/codesign/generate-identity.sh
- name: Run Electron Tests
env:
MOCHA_REPORTER: mocha-multi-reporters
ELECTRON_TEST_RESULTS_DIR: junit
MOCHA_MULTI_REPORTERS: mocha-junit-reporter, tap
ELECTRON_DISABLE_SECURITY_WARNINGS: 1
ELECTRON_SKIP_NATIVE_MODULE_TESTS: true
run: |
cd src/electron
node script/yarn test --runners=main --trace-uncaught --enable-logging

View File

@@ -3,6 +3,11 @@ name: Publish MacOS
on:
workflow_dispatch:
inputs:
build-image-sha:
type: string
description: 'SHA for electron/build image'
default: 'cf814a4d2501e8e843caea071a6b70a48e78b855'
required: true
upload-to-storage:
description: 'Uploads to Azure storage'
required: false
@@ -14,11 +19,50 @@ on:
default: false
jobs:
publish:
uses: electron/electron/.github/workflows/macos-pipeline.yml@main
checkout-macos:
runs-on: aks-linux-large
container:
image: ghcr.io/electron/build:${{ inputs.build-image-sha }}
options: --user root
volumes:
- /mnt/cross-instance-cache:/mnt/cross-instance-cache
- /var/run/sas:/var/run/sas
env:
GCLIENT_EXTRA_ARGS: '--custom-var=checkout_mac=True --custom-var=host_os=mac'
steps:
- name: Checkout Electron
uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
with:
path: src/electron
fetch-depth: 0
- name: Checkout & Sync & Save
uses: ./src/electron/.github/actions/checkout
with:
generate-sas-token: 'true'
publish-x64:
uses: ./.github/workflows/pipeline-segment-electron-build.yml
needs: checkout-macos
with:
environment: production-release
build-runs-on: macos-14-xlarge
target-platform: macos
target-arch: x64
is-release: true
gn-build-type: release
generate-symbols: true
upload-to-storage: ${{ inputs.upload-to-storage }}
secrets: inherit
publish-arm64:
uses: ./.github/workflows/pipeline-segment-electron-build.yml
needs: checkout-macos
with:
environment: production-release
build-runs-on: macos-14-xlarge
target-platform: macos
target-arch: arm64
is-release: true
gn-config: //electron/build/args/release.gn
gn-build-type: release
generate-symbols: true
upload-to-storage: ${{ inputs.upload-to-storage }}

View File

@@ -0,0 +1,88 @@
name: Electron Build & Test (+ Node + NaN) Pipeline
on:
workflow_call:
inputs:
target-platform:
type: string
description: 'Platform to run on, can be macos or linux'
required: true
target-arch:
type: string
description: 'Arch to build for, can be x64, arm64 or arm'
required: true
build-runs-on:
type: string
description: 'What host to run the build'
required: true
test-runs-on:
type: string
description: 'What host to run the tests on'
required: true
build-container:
type: string
description: 'JSON container information for aks runs-on'
required: false
default: '{"image":null}'
test-container:
type: string
description: 'JSON container information for testing'
required: false
default: '{"image":null}'
is-release:
description: 'Whether this build job is a release job'
required: true
type: boolean
default: false
gn-build-type:
description: 'The gn build type - testing or release'
required: true
type: string
default: testing
generate-symbols:
description: 'Whether or not to generate symbols'
required: true
type: boolean
default: false
upload-to-storage:
description: 'Whether or not to upload build artifacts to external storage'
required: true
type: string
default: '0'
concurrency:
group: electron-build-and-test-and-nan-${{ inputs.target-platform }}-${{ inputs.target-arch }}-${{ github.ref }}
cancel-in-progress: ${{ github.ref != 'refs/heads/main' && !endsWith(github.ref, '-x-y') }}
jobs:
build:
uses: ./.github/workflows/pipeline-segment-electron-build.yml
with:
build-runs-on: ${{ inputs.build-runs-on }}
build-container: ${{ inputs.build-container }}
target-platform: ${{ inputs.target-platform }}
target-arch: ${{ inputs.target-arch }}
is-release: ${{ inputs.is-release }}
gn-build-type: ${{ inputs.gn-build-type }}
generate-symbols: ${{ inputs.generate-symbols }}
upload-to-storage: ${{ inputs.upload-to-storage }}
secrets: inherit
test:
uses: ./.github/workflows/pipeline-segment-electron-test.yml
needs: build
with:
target-arch: ${{ inputs.target-arch }}
target-platform: ${{ inputs.target-platform }}
test-runs-on: ${{ inputs.test-runs-on }}
test-container: ${{ inputs.test-container }}
secrets: inherit
nn-test:
uses: ./.github/workflows/pipeline-segment-node-nan-test.yml
needs: build
with:
target-arch: ${{ inputs.target-arch }}
target-platform: ${{ inputs.target-platform }}
test-runs-on: ${{ inputs.test-runs-on }}
test-container: ${{ inputs.test-container }}
gn-build-type: ${{ inputs.gn-build-type }}
secrets: inherit

View File

@@ -0,0 +1,88 @@
name: Electron Build & Test Pipeline
on:
workflow_call:
inputs:
target-platform:
type: string
description: 'Platform to run on, can be macos or linux'
required: true
target-arch:
type: string
description: 'Arch to build for, can be x64, arm64 or arm'
required: true
build-runs-on:
type: string
description: 'What host to run the build'
required: true
test-runs-on:
type: string
description: 'What host to run the tests on'
required: true
build-container:
type: string
description: 'JSON container information for aks runs-on'
required: false
default: '{"image":null}'
test-container:
type: string
description: 'JSON container information for testing'
required: false
default: '{"image":null}'
is-release:
description: 'Whether this build job is a release job'
required: true
type: boolean
default: false
gn-build-type:
description: 'The gn build type - testing or release'
required: true
type: string
default: testing
generate-symbols:
description: 'Whether or not to generate symbols'
required: true
type: boolean
default: false
upload-to-storage:
description: 'Whether or not to upload build artifacts to external storage'
required: true
type: string
default: '0'
concurrency:
group: electron-build-and-test-${{ inputs.target-platform }}-${{ inputs.target-arch }}-${{ github.ref }}
cancel-in-progress: ${{ github.ref != 'refs/heads/main' && !endsWith(github.ref, '-x-y') }}
jobs:
build:
uses: ./.github/workflows/pipeline-segment-electron-build.yml
with:
build-runs-on: ${{ inputs.build-runs-on }}
build-container: ${{ inputs.build-container }}
target-platform: ${{ inputs.target-platform }}
target-arch: ${{ inputs.target-arch }}
is-release: ${{ inputs.is-release }}
gn-build-type: ${{ inputs.gn-build-type }}
generate-symbols: ${{ inputs.generate-symbols }}
upload-to-storage: ${{ inputs.upload-to-storage }}
secrets: inherit
gn-check:
if: ${{ inputs.target-platform == 'macos' || inputs.target-arch != 'arm' }}
uses: ./.github/workflows/pipeline-segment-electron-gn-check.yml
with:
target-platform: ${{ inputs.target-platform }}
target-arch: ${{ inputs.target-arch }}
check-runs-on: ${{ inputs.build-runs-on }}
check-container: ${{ inputs.build-container }}
gn-build-type: ${{ inputs.gn-build-type }}
secrets: inherit
test:
uses: ./.github/workflows/pipeline-segment-electron-test.yml
needs: build
with:
target-arch: ${{ inputs.target-arch }}
target-platform: ${{ inputs.target-platform }}
test-runs-on: ${{ inputs.test-runs-on }}
test-container: ${{ inputs.test-container }}
secrets: inherit

View File

@@ -0,0 +1,43 @@
name: Electron Docs Compile
on:
workflow_call:
inputs:
container:
required: true
description: 'Container to run the docs-only ts compile in'
type: string
concurrency:
group: electron-docs-only-${{ github.ref }}
cancel-in-progress: true
jobs:
docs-only:
name: Docs Only Compile
runs-on: aks-linux-medium
timeout-minutes: 20
container: ${{ fromJSON(inputs.container) }}
steps:
- name: Checkout Electron
uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
with:
path: src/electron
fetch-depth: 0
- name: Install Dependencies
run: |
cd src/electron
node script/yarn install
- name: Run TS/JS compile
shell: bash
run: |
cd src/electron
node script/yarn create-typescript-definitions
node script/yarn tsc -p tsconfig.default_app.json --noEmit
for f in build/webpack/*.js
do
out="${f:29}"
if [ "$out" != "base.js" ]; then
node script/yarn webpack --config $f --output-filename=$out --output-path=./.tmp --env mode=development
fi
done

View File

@@ -0,0 +1,77 @@
name: Electron Lint
on:
workflow_call:
inputs:
container:
required: true
description: 'Container to run lint in'
type: string
concurrency:
group: electron-lint-${{ github.ref }}
cancel-in-progress: ${{ github.ref != 'refs/heads/main' && !endsWith(github.ref, '-x-y') }}
jobs:
lint:
name: Lint
runs-on: aks-linux-medium
timeout-minutes: 20
container: ${{ fromJSON(inputs.container) }}
steps:
- name: Checkout Electron
uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
with:
path: src/electron
fetch-depth: 0
- name: Install Dependencies
run: |
cd src/electron
node script/yarn install
- name: Setup third_party Depot Tools
shell: bash
run: |
# "depot_tools" has to be checkout into "//third_party/depot_tools" so pylint.py can a "pylintrc" file.
git clone https://chromium.googlesource.com/chromium/tools/depot_tools.git src/third_party/depot_tools
echo "$(pwd)/src/third_party/depot_tools" >> $GITHUB_PATH
- name: Download GN Binary
shell: bash
run: |
chromium_revision="$(grep -A1 chromium_version src/electron/DEPS | tr -d '\n' | cut -d\' -f4)"
gn_version="$(curl -sL "https://chromium.googlesource.com/chromium/src/+/${chromium_revision}/DEPS?format=TEXT" | base64 -d | grep gn_version | head -n1 | cut -d\' -f4)"
cipd ensure -ensure-file - -root . <<-CIPD
\$ServiceURL https://chrome-infra-packages.appspot.com/
@Subdir src/buildtools/linux64
gn/gn/linux-amd64 $gn_version
CIPD
buildtools_path="$(pwd)/src/buildtools"
echo "CHROMIUM_BUILDTOOLS_PATH=$buildtools_path" >> $GITHUB_ENV
- name: Download clang-format Binary
shell: bash
run: |
chromium_revision="$(grep -A1 chromium_version src/electron/DEPS | tr -d '\n' | cut -d\' -f4)"
mkdir -p src/buildtools
curl -sL "https://chromium.googlesource.com/chromium/src/+/${chromium_revision}/buildtools/DEPS?format=TEXT" | base64 -d > src/buildtools/DEPS
gclient runhooks --spec="solutions=[{'name':'src/buildtools','url':None,'deps_file':'DEPS','custom_vars':{'process_deps':True},'managed':False}]"
- name: Run Lint
shell: bash
run: |
# gn.py tries to find a gclient root folder starting from the current dir.
# When it fails and returns "None" path, the whole script fails. Let's "fix" it.
touch .gclient
# Another option would be to checkout "buildtools" inside the Electron checkout,
# but then we would lint its contents (at least gn format), and it doesn't pass it.
cd src/electron
node script/yarn install --frozen-lockfile
node script/yarn lint
- name: Run Script Typechecker
shell: bash
run: |
cd src/electron
node script/yarn tsc -p tsconfig.script.json

View File

@@ -0,0 +1,216 @@
name: Pipeline Segment - Electron Build
on:
workflow_call:
inputs:
environment:
description: using the production or testing environment
required: false
type: string
target-platform:
type: string
description: 'Platform to run on, can be macos, linux or windows'
required: true
target-arch:
type: string
description: 'Arch to build for, can be x64, arm64 or arm'
required: true
build-runs-on:
type: string
description: 'What host to run the build'
required: true
build-container:
type: string
description: 'JSON container information for aks runs-on'
required: false
default: '{"image":null}'
is-release:
description: 'Whether this build job is a release job'
required: true
type: boolean
default: false
gn-build-type:
description: 'The gn build type - testing or release'
required: true
type: string
default: testing
generate-symbols:
description: 'Whether or not to generate symbols'
required: true
type: boolean
default: false
upload-to-storage:
description: 'Whether or not to upload build artifacts to external storage'
required: true
type: string
default: '0'
concurrency:
group: electron-build-${{ inputs.target-platform }}-${{ inputs.target-arch }}-${{ github.ref }}
cancel-in-progress: ${{ github.ref != 'refs/heads/main' && !endsWith(github.ref, '-x-y') }}
env:
AZURE_AKS_CACHE_STORAGE_ACCOUNT: ${{ secrets.AZURE_AKS_CACHE_STORAGE_ACCOUNT }}
AZURE_AKS_CACHE_SHARE_NAME: ${{ secrets.AZURE_AKS_CACHE_SHARE_NAME }}
ELECTRON_ARTIFACTS_BLOB_STORAGE: ${{ secrets.ELECTRON_ARTIFACTS_BLOB_STORAGE }}
ELECTRON_RBE_JWT: ${{ secrets.ELECTRON_RBE_JWT }}
ELECTRON_GITHUB_TOKEN: ${{ secrets.ELECTRON_GITHUB_TOKEN }}
# Disable pre-compiled headers to reduce out size - only useful for rebuilds
GN_BUILDFLAG_ARGS: 'enable_precompiled_headers=false'
GCLIENT_EXTRA_ARGS: ${{ inputs.target-platform == 'macos' && '--custom-var=checkout_mac=True --custom-var=host_os=mac' || (inputs.target-platform == 'linux' && '--custom-var=checkout_arm=True --custom-var=checkout_arm64=True' || '--custom-var=checkout_win=True') }}
# Only disable this in the Asan build
CHECK_DIST_MANIFEST: true
IS_GHA_RELEASE: true
ELECTRON_OUT_DIR: Default
jobs:
build:
runs-on: ${{ inputs.build-runs-on }}
container: ${{ fromJSON(inputs.build-container) }}
environment: ${{ inputs.environment }}
env:
TARGET_ARCH: ${{ inputs.target-arch }}
steps:
- name: Create src dir
run: mkdir src
- name: Checkout Electron
uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
with:
path: src/electron
fetch-depth: 0
- name: Setup Node.js/npm
if: ${{ inputs.target-platform == 'macos' }}
uses: actions/setup-node@60edb5dd545a775178f52524783378180af0d1f8
with:
node-version: 20.11.x
cache: yarn
cache-dependency-path: src/electron/yarn.lock
- name: Install Dependencies
run: |
cd src/electron
node script/yarn install
- name: Install AZCopy
if: ${{ inputs.target-platform == 'macos' }}
run: brew install azcopy
- name: Enable windows toolchain
if: ${{ inputs.target-platform == 'windows' }}
run: |
echo "ELECTRON_DEPOT_TOOLS_WIN_TOOLCHAIN=1" >> $GITHUB_ENV
- name: Set GN_EXTRA_ARGS for Linux
if: ${{ inputs.target-platform == 'linux' }}
run: |
if [ "${{ inputs.target-arch }}" = "arm" ]; then
GN_EXTRA_ARGS='target_cpu="arm" build_tflite_with_xnnpack=false'
elif [ "${{ inputs.target-arch }}" = "arm64" ]; then
GN_EXTRA_ARGS='target_cpu="arm64" fatal_linker_warnings=false enable_linux_installer=false'
fi
echo "GN_EXTRA_ARGS=$GN_EXTRA_ARGS" >> $GITHUB_ENV
- name: Get Depot Tools
timeout-minutes: 5
run: |
git clone --depth=1 https://chromium.googlesource.com/chromium/tools/depot_tools.git
SEDOPTION="-i"
if [ "`uname`" = "Darwin" ]; then
SEDOPTION="-i ''"
fi
# remove ninjalog_uploader_wrapper.py from autoninja since we don't use it and it causes problems
sed $SEDOPTION '/ninjalog_uploader_wrapper.py/d' ./depot_tools/autoninja
# Ensure depot_tools does not update.
test -d depot_tools && cd depot_tools
if [ "`uname`" = "Linux" ]; then
git apply --3way ../src/electron/.github/workflows/config/gclient.diff
fi
touch .disable_auto_update
- name: Add Depot Tools to PATH
run: echo "$(pwd)/depot_tools" >> $GITHUB_PATH
- name: Generate DEPS Hash
run: |
node src/electron/script/generate-deps-hash.js && cat src/electron/.depshash-target
DEPSHASH=v1-src-cache-$(shasum src/electron/.depshash | cut -f1 -d' ')
echo "DEPSHASH=$DEPSHASH" >> $GITHUB_ENV
echo "CACHE_PATH=$DEPSHASH.tar" >> $GITHUB_ENV
- name: Restore src cache via AZCopy
if: ${{ inputs.target-platform == 'macos' }}
uses: ./src/electron/.github/actions/restore-cache-azcopy
- name: Restore src cache via AKS
if: ${{ inputs.target-platform == 'linux' || inputs.target-platform == 'windows' }}
uses: ./src/electron/.github/actions/restore-cache-aks
- name: Checkout Electron
uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
with:
path: src/electron
fetch-depth: 0
- name: Load Build Tools
run: |
export BUILD_TOOLS_SHA=ff3e40a9a2ebb735c18b6450ecd5ddaa8bb364a9
npm i -g @electron/build-tools
e auto-update disable
e init -f --root=$(pwd) --out=Default ${{ inputs.gn-build-type }} --import ${{ inputs.gn-build-type }} --target-cpu ${{ inputs.target-arch }} --only-sdk
- name: Run Electron Only Hooks
run: |
echo "solutions=[{'name':'src/electron','url':None,'deps_file':'DEPS','custom_vars':{'process_deps':False},'managed':False}]" > tmpgclient
if [ "${{ inputs.target-platform }}" = "windows" ]; then
echo "solutions=[{'name':'src/electron','url':None,'deps_file':'DEPS','custom_vars':{'process_deps':False,'install_sysroot':False,'checkout_win':True},'managed':False}]" > tmpgclient
echo "target_os=['win']" >> tmpgclient
fi
gclient runhooks --gclientfile=tmpgclient
# Fix VS Toolchain
if [ "${{ inputs.target-platform }}" = "windows" ]; then
rm -rf src/third_party/depot_tools/win_toolchain/vs_files
e d python3 src/build/vs_toolchain.py update --force
fi
- name: Regenerate DEPS Hash
run: |
(cd src/electron && git checkout .) && node src/electron/script/generate-deps-hash.js && cat src/electron/.depshash-target
echo "DEPSHASH=$(shasum src/electron/.depshash | cut -f1 -d' ')" >> $GITHUB_ENV
- name: Add CHROMIUM_BUILDTOOLS_PATH to env
run: echo "CHROMIUM_BUILDTOOLS_PATH=$(pwd)/src/buildtools" >> $GITHUB_ENV
- name: Fix Sync (macOS)
if: ${{ inputs.target-platform == 'macos' }}
uses: ./src/electron/.github/actions/fix-sync-macos
- name: Install build-tools & Setup RBE
run: |
echo "NUMBER_OF_NINJA_PROCESSES=${{ inputs.target-platform == 'linux' && '300' || '200' }}" >> $GITHUB_ENV
cd ~/.electron_build_tools
npx yarn --ignore-engines
# Pull down credential helper and print status
node -e "require('./src/utils/reclient.js').downloadAndPrepare({})"
HELPER=$(node -p "require('./src/utils/reclient.js').helperPath({})")
$HELPER login
echo 'RBE_service='`node -e "console.log(require('./src/utils/reclient.js').serviceAddress)"` >> $GITHUB_ENV
echo 'RBE_experimental_credentials_helper='`node -e "console.log(require('./src/utils/reclient.js').helperPath({}))"` >> $GITHUB_ENV
echo 'RBE_experimental_credentials_helper_args=print' >> $GITHUB_ENV
- name: Free up space (macOS)
if: ${{ inputs.target-platform == 'macos' }}
uses: ./src/electron/.github/actions/free-space-macos
- name: Build Electron
uses: ./src/electron/.github/actions/build-electron
with:
target-arch: ${{ inputs.target-arch }}
target-platform: ${{ inputs.target-platform }}
artifact-platform: ${{ inputs.target-platform == 'macos' && 'darwin' || inputs.target-platform }}
is-release: '${{ inputs.is-release }}'
generate-symbols: '${{ inputs.generate-symbols }}'
upload-to-storage: '${{ inputs.upload-to-storage }}'
- name: Set GN_EXTRA_ARGS for MAS Build
if: ${{ inputs.target-platform == 'macos' }}
run: |
echo "MAS_BUILD=true" >> $GITHUB_ENV
GN_EXTRA_ARGS='is_mas_build=true'
echo "GN_EXTRA_ARGS=$GN_EXTRA_ARGS" >> $GITHUB_ENV
- name: Build Electron (MAS)
if: ${{ inputs.target-platform == 'macos' }}
uses: ./src/electron/.github/actions/build-electron
with:
target-arch: ${{ inputs.target-arch }}
target-platform: ${{ inputs.target-platform }}
artifact-platform: 'mas'
is-release: '${{ inputs.is-release }}'
generate-symbols: '${{ inputs.generate-symbols }}'
upload-to-storage: '${{ inputs.upload-to-storage }}'
step-suffix: '(mas)'

View File

@@ -0,0 +1,157 @@
name: Pipeline Segment - Electron GN Check
on:
workflow_call:
inputs:
target-platform:
type: string
description: 'Platform to run on, can be macos or linux'
required: true
target-arch:
type: string
description: 'Arch to build for, can be x64, arm64 or arm'
required: true
check-runs-on:
type: string
description: 'What host to run the tests on'
required: true
check-container:
type: string
description: 'JSON container information for aks runs-on'
required: false
default: '{"image":null}'
gn-build-type:
description: 'The gn build type - testing or release'
required: true
type: string
default: testing
concurrency:
group: electron-gn-check-${{ inputs.target-platform }}-${{ inputs.target-arch }}-${{ github.ref }}
cancel-in-progress: true
env:
ELECTRON_RBE_JWT: ${{ secrets.ELECTRON_RBE_JWT }}
GN_BUILDFLAG_ARGS: 'enable_precompiled_headers=false'
GCLIENT_EXTRA_ARGS: ${{ inputs.target-platform == 'macos' && '--custom-var=checkout_mac=True --custom-var=host_os=mac' || (inputs.target-platform == 'linux' && '--custom-var=checkout_arm=True --custom-var=checkout_arm64=True' || '--custom-var=checkout_win=True') }}
ELECTRON_OUT_DIR: Default
TARGET_ARCH: ${{ inputs.target-arch }}
jobs:
gn-check:
# TODO(codebytere): Change this to medium VM
runs-on: ${{ inputs.check-runs-on }}
container: ${{ fromJSON(inputs.check-container) }}
strategy:
fail-fast: false
matrix:
build-type: ${{ inputs.target-platform == 'macos' && fromJSON('["darwin","mas"]') || (inputs.target-platform == 'windows' && fromJSON('["windows"]') || fromJSON('["linux"]')) }}
env:
BUILD_TYPE: ${{ matrix.build-type }}
TARGET_ARCH: ${{ inputs.target-arch }}
steps:
- name: Load Build Tools
run: |
export BUILD_TOOLS_SHA=ff3e40a9a2ebb735c18b6450ecd5ddaa8bb364a9
npm i -g @electron/build-tools
e auto-update disable
e init -f --root=$(pwd) --out=Default ${{ inputs.gn-build-type }} --import ${{ inputs.gn-build-type }} --target-cpu ${{ inputs.target-arch }} --only-sdk
- name: Checkout Electron
uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
with:
path: src/electron
fetch-depth: 0
- name: Get Depot Tools
timeout-minutes: 5
run: |
git clone --depth=1 https://chromium.googlesource.com/chromium/tools/depot_tools.git
SEDOPTION="-i"
if [ "`uname`" = "Darwin" ]; then
SEDOPTION="-i ''"
fi
# remove ninjalog_uploader_wrapper.py from autoninja since we don't use it and it causes problems
sed $SEDOPTION '/ninjalog_uploader_wrapper.py/d' ./depot_tools/autoninja
# Ensure depot_tools does not update.
test -d depot_tools && cd depot_tools
if [ "`uname`" = "Linux" ]; then
git apply --3way ../src/electron/.github/workflows/config/gclient.diff
fi
touch .disable_auto_update
- name: Add Depot Tools to PATH
run: echo "$(pwd)/depot_tools" >> $GITHUB_PATH
- name: Set GN_EXTRA_ARGS for Linux
if: ${{ inputs.target-platform == 'linux' }}
run: |
if [ "${{ inputs.target-arch }}" = "arm" ]; then
GN_EXTRA_ARGS='build_tflite_with_xnnpack=false'
elif [ "${{ inputs.target-arch }}" = "arm64" ]; then
GN_EXTRA_ARGS='fatal_linker_warnings=false enable_linux_installer=false'
fi
echo "GN_EXTRA_ARGS=$GN_EXTRA_ARGS" >> $GITHUB_ENV
- name: Set GN_EXTRA_ARGS for Win/cross
if: ${{ inputs.target-platform == 'windows' }}
run: |
GN_APPENDED_ARGS="$GN_EXTRA_ARGS use_v8_context_snapshot=true target_os=\"win\""
echo "GN_EXTRA_ARGS=$GN_APPENDED_ARGS" >> $GITHUB_ENV
- name: Enable windows toolchain
if: ${{ inputs.target-platform == 'windows' }}
run: |
echo "ELECTRON_DEPOT_TOOLS_WIN_TOOLCHAIN=1" >> $GITHUB_ENV
- name: Generate DEPS Hash
run: |
node src/electron/script/generate-deps-hash.js && cat src/electron/.depshash-target
DEPSHASH=v1-src-cache-$(shasum src/electron/.depshash | cut -f1 -d' ')
echo "DEPSHASH=$DEPSHASH" >> $GITHUB_ENV
echo "CACHE_PATH=$DEPSHASH.tar" >> $GITHUB_ENV
- name: Restore src cache via AZCopy
if: ${{ inputs.target-platform == 'macos' }}
uses: ./src/electron/.github/actions/restore-cache-azcopy
- name: Restore src cache via AKS
if: ${{ inputs.target-platform == 'linux' || inputs.target-platform == 'windows' }}
uses: ./src/electron/.github/actions/restore-cache-aks
- name: Run Electron Only Hooks
run: |
echo "solutions=[{'name':'src/electron','url':None,'deps_file':'DEPS','custom_vars':{'process_deps':False},'managed':False}]" > tmpgclient
if [ "${{ inputs.target-platform }}" = "windows" ]; then
echo "solutions=[{'name':'src/electron','url':None,'deps_file':'DEPS','custom_vars':{'process_deps':False,'install_sysroot':False,'checkout_win':True},'managed':False}]" > tmpgclient
echo "target_os=['win']" >> tmpgclient
fi
gclient runhooks --gclientfile=tmpgclient
- name: Regenerate DEPS Hash
run: |
(cd src/electron && git checkout .) && node src/electron/script/generate-deps-hash.js && cat src/electron/.depshash-target
echo "DEPSHASH=$(shasum src/electron/.depshash | cut -f1 -d' ')" >> $GITHUB_ENV
- name: Add CHROMIUM_BUILDTOOLS_PATH to env
run: echo "CHROMIUM_BUILDTOOLS_PATH=$(pwd)/src/buildtools" >> $GITHUB_ENV
- name: Checkout Electron
uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
with:
path: src/electron
fetch-depth: 0
- name: Default GN gen
run: |
cd src/electron
git pack-refs
cd ..
e build --only-gen
- name: Run GN Check
run: |
cd src
e d gn check out/Default //electron:electron_lib
e d gn check out/Default //electron:electron_app
e d gn check out/Default //electron/shell/common/api:mojo
# Check the hunspell filenames
node electron/script/gen-hunspell-filenames.js --check
node electron/script/gen-libc++-filenames.js --check
- name: Wait for active SSH sessions
if: always() && !cancelled()
run: |
while [ -f /var/.ssh-lock ]
do
sleep 60
done

View File

@@ -0,0 +1,129 @@
name: Pipeline Segment - Electron Test
on:
workflow_call:
inputs:
target-platform:
type: string
description: 'Platform to run on, can be macos or linux'
required: true
target-arch:
type: string
description: 'Arch to build for, can be x64, arm64 or arm'
required: true
test-runs-on:
type: string
description: 'What host to run the tests on'
required: true
test-container:
type: string
description: 'JSON container information for aks runs-on'
required: false
default: '{"image":null}'
concurrency:
group: electron-test-${{ inputs.target-platform }}-${{ inputs.target-arch }}-${{ github.ref }}
cancel-in-progress: ${{ github.ref != 'refs/heads/main' && !endsWith(github.ref, '-x-y') }}
env:
ELECTRON_OUT_DIR: Default
ELECTRON_RBE_JWT: ${{ secrets.ELECTRON_RBE_JWT }}
jobs:
test:
runs-on: ${{ inputs.test-runs-on }}
container: ${{ fromJSON(inputs.test-container) }}
strategy:
fail-fast: false
matrix:
build-type: ${{ inputs.target-platform == 'macos' && fromJSON('["darwin","mas"]') || (inputs.target-platform == 'windows' && fromJSON('["windows"]') || fromJSON('["linux"]')) }}
shard: ${{ inputs.target-platform == 'macos' && fromJSON('[1]') || fromJSON('[1, 2, 3]') }}
env:
BUILD_TYPE: ${{ matrix.build-type }}
TARGET_ARCH: ${{ inputs.target-arch }}
steps:
- name: Checkout Electron
uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
with:
path: src/electron
fetch-depth: 0
- name: Install Dependencies
shell: bash
run: |
cd src/electron
node script/yarn install
- name: Get Depot Tools
timeout-minutes: 5
shell: bash
run: |
git clone --depth=1 https://chromium.googlesource.com/chromium/tools/depot_tools.git
if [ "`uname`" = "Darwin" ]; then
# remove ninjalog_uploader_wrapper.py from autoninja since we don't use it and it causes problems
sed -i '' '/ninjalog_uploader_wrapper.py/d' ./depot_tools/autoninja
else
sed -i '/ninjalog_uploader_wrapper.py/d' ./depot_tools/autoninja
fi
# Ensure depot_tools does not update.
test -d depot_tools && cd depot_tools
touch .disable_auto_update
- name: Add Depot Tools to PATH
shell: bash
run: echo "$(pwd)/depot_tools" >> $GITHUB_PATH
- name: Download Generated Artifacts
uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e
with:
name: generated_artifacts_${{ matrix.build-type }}_${{ inputs.target-arch }}
path: ./generated_artifacts_${{ matrix.build-type }}_${{ inputs.target-arch }}
- name: Download Src Artifacts
uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e
with:
name: src_artifacts_${{ matrix.build-type }}_${{ env.TARGET_ARCH }}
path: ./src_artifacts_${{ matrix.build-type }}_${{ env.TARGET_ARCH }}
- name: Restore Generated Artifacts
shell: bash
run: ./src/electron/script/actions/restore-artifacts.sh
- name: Unzip Dist, Mksnapshot & Chromedriver
shell: bash
run: |
cd src/out/Default
unzip -:o dist.zip
unzip -:o chromedriver.zip
unzip -:o mksnapshot.zip
# - name: Import & Trust Self-Signed Codesigning Cert on MacOS
# if: ${{ inputs.target-platform == 'macos' }}
# run: |
# sudo security authorizationdb write com.apple.trust-settings.admin allow
# cd src/electron
# ./script/codesign/generate-identity.sh
- name: Run Electron Tests
shell: bash
env:
MOCHA_REPORTER: mocha-multi-reporters
ELECTRON_TEST_RESULTS_DIR: junit
MOCHA_MULTI_REPORTERS: mocha-junit-reporter, tap
ELECTRON_DISABLE_SECURITY_WARNINGS: 1
ELECTRON_SKIP_NATIVE_MODULE_TESTS: true
DISPLAY: ':99.0'
NPM_CONFIG_MSVS_VERSION: '2022'
run: |
cd src/electron
# Get which tests are on this shard
tests_files=$(node script/split-tests ${{ matrix.shard }} ${{ inputs.target-platform == 'macos' && 1 || 3 }})
# Run tests
if [ "${{ inputs.target-platform }}" != "linux" ]; then
node script/yarn test --runners=main --trace-uncaught --enable-logging --files $tests_files
else
chown :builduser .. && chmod g+w ..
chown -R :builduser . && chmod -R g+w .
chmod 4755 ../out/Default/chrome-sandbox
runuser -u builduser -- git config --global --add safe.directory $(pwd)
runuser -u builduser -- xvfb-run script/actions/run-tests.sh script/yarn test --runners=main --trace-uncaught --enable-logging --files $tests_files
fi
- name: Wait for active SSH sessions
if: always() && !cancelled()
shell: bash
run: |
while [ -f /var/.ssh-lock ]
do
sleep 60
done

View File

@@ -0,0 +1,167 @@
name: Pipeline Segment - Node/Nan Test
on:
workflow_call:
inputs:
target-platform:
type: string
description: 'Platform to run on, can be macos or linux'
required: true
target-arch:
type: string
description: 'Arch to build for, can be x64, arm64 or arm'
required: true
test-runs-on:
type: string
description: 'What host to run the tests on'
required: true
test-container:
type: string
description: 'JSON container information for aks runs-on'
required: false
default: '{"image":null}'
gn-build-type:
description: 'The gn build type - testing or release'
required: true
type: string
default: testing
concurrency:
group: electron-node-nan-test-${{ inputs.target-platform }}-${{ inputs.target-arch }}-${{ github.ref }}
cancel-in-progress: ${{ github.ref != 'refs/heads/main' && !endsWith(github.ref, '-x-y') }}
env:
ELECTRON_OUT_DIR: Default
ELECTRON_RBE_JWT: ${{ secrets.ELECTRON_RBE_JWT }}
jobs:
node-tests:
name: Run Node.js Tests
runs-on: aks-linux-medium-plus
timeout-minutes: 20
env:
TARGET_ARCH: ${{ inputs.target-arch }}
BUILD_TYPE: linux
container: ${{ fromJSON(inputs.test-container) }}
steps:
- name: Load Build Tools
run: |
export BUILD_TOOLS_SHA=ff3e40a9a2ebb735c18b6450ecd5ddaa8bb364a9
npm i -g @electron/build-tools
e auto-update disable
e init --root=$(pwd) --out=Default ${{ inputs.gn-build-type }} --import ${{ inputs.gn-build-type }} --target-cpu ${{ inputs.target-arch }}
- name: Checkout Electron
uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
with:
path: src/electron
fetch-depth: 0
- name: Install Dependencies
run: |
cd src/electron
node script/yarn install
- name: Get Depot Tools
timeout-minutes: 5
run: |
git clone --depth=1 https://chromium.googlesource.com/chromium/tools/depot_tools.git
sed -i '/ninjalog_uploader_wrapper.py/d' ./depot_tools/autoninja
cd depot_tools
git apply --3way ../src/electron/.github/workflows/config/gclient.diff
# Ensure depot_tools does not update.
test -d depot_tools && cd depot_tools
touch .disable_auto_update
- name: Add Depot Tools to PATH
run: echo "$(pwd)/depot_tools" >> $GITHUB_PATH
- name: Download Generated Artifacts
uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e
with:
name: generated_artifacts_${{ env.BUILD_TYPE }}_${{ env.TARGET_ARCH }}
path: ./generated_artifacts_${{ env.BUILD_TYPE }}_${{ env.TARGET_ARCH }}
- name: Download Src Artifacts
uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e
with:
name: src_artifacts_linux_${{ env.TARGET_ARCH }}
path: ./src_artifacts_linux_${{ env.TARGET_ARCH }}
- name: Restore Generated Artifacts
run: ./src/electron/script/actions/restore-artifacts.sh
- name: Unzip Dist
run: |
cd src/out/Default
unzip -:o dist.zip
- name: Setup Linux for Headless Testing
run: sh -e /etc/init.d/xvfb start
- name: Run Node.js Tests
run: |
cd src
node electron/script/node-spec-runner.js --default --jUnitDir=junit
- name: Wait for active SSH sessions
if: always() && !cancelled()
run: |
while [ -f /var/.ssh-lock ]
do
sleep 60
done
nan-tests:
name: Run Nan Tests
runs-on: aks-linux-medium
timeout-minutes: 20
env:
TARGET_ARCH: ${{ inputs.target-arch }}
BUILD_TYPE: linux
container: ${{ fromJSON(inputs.test-container) }}
steps:
- name: Load Build Tools
run: |
export BUILD_TOOLS_SHA=ff3e40a9a2ebb735c18b6450ecd5ddaa8bb364a9
npm i -g @electron/build-tools
e auto-update disable
e init --root=$(pwd) --out=Default ${{ inputs.gn-build-type }}
- name: Checkout Electron
uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
with:
path: src/electron
fetch-depth: 0
- name: Install Dependencies
run: |
cd src/electron
node script/yarn install
- name: Get Depot Tools
timeout-minutes: 5
run: |
git clone --depth=1 https://chromium.googlesource.com/chromium/tools/depot_tools.git
sed -i '/ninjalog_uploader_wrapper.py/d' ./depot_tools/autoninja
cd depot_tools
git apply --3way ../src/electron/.github/workflows/config/gclient.diff
# Ensure depot_tools does not update.
test -d depot_tools && cd depot_tools
touch .disable_auto_update
- name: Add Depot Tools to PATH
run: echo "$(pwd)/depot_tools" >> $GITHUB_PATH
- name: Download Generated Artifacts
uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e
with:
name: generated_artifacts_${{ env.BUILD_TYPE }}_${{ env.TARGET_ARCH }}
path: ./generated_artifacts_${{ env.BUILD_TYPE }}_${{ env.TARGET_ARCH }}
- name: Download Src Artifacts
uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e
with:
name: src_artifacts_linux_${{ env.TARGET_ARCH }}
path: ./src_artifacts_linux_${{ env.TARGET_ARCH }}
- name: Restore Generated Artifacts
run: ./src/electron/script/actions/restore-artifacts.sh
- name: Unzip Dist
run: |
cd src/out/Default
unzip -:o dist.zip
- name: Setup Linux for Headless Testing
run: sh -e /etc/init.d/xvfb start
- name: Run Node.js Tests
run: |
cd src
node electron/script/nan-spec-runner.js
- name: Wait for active SSH sessions
if: always() && !cancelled()
run: |
while [ -f /var/.ssh-lock ]
do
sleep 60
done

View File

@@ -80,4 +80,4 @@ Returns `Menu | null` - The application's [dock menu][dock-menu].
Sets the `image` associated with this dock icon.
[dock-menu]: https://developer.apple.com/macos/human-interface-guidelines/menus/dock-menus/
[dock-menu]: https://developer.apple.com/design/human-interface-guidelines/dock-menus

View File

@@ -7,17 +7,32 @@ if [ "`uname`" == "Darwin" ]; then
BUILD_TYPE="mas"
fi
elif [ "`uname`" == "Linux" ]; then
BUILD_TYPE="linux"
if [ "$ELECTRON_DEPOT_TOOLS_WIN_TOOLCHAIN" == "1" ]; then
BUILD_TYPE="windows"
else
BUILD_TYPE="linux"
fi
else
echo "Unsupported platform"
exit 1
fi
echo Creating generated_artifacts_${BUILD_TYPE}_${TARGET_ARCH}...
rm -rf generated_artifacts_${BUILD_TYPE}_${TARGET_ARCH}
mkdir generated_artifacts_${BUILD_TYPE}_${TARGET_ARCH}
GENERATED_ARTIFACTS="generated_artifacts_${BUILD_TYPE}_${TARGET_ARCH}"
echo Creating $GENERATED_ARTIFACTS...
rm -rf $GENERATED_ARTIFACTS
mkdir $GENERATED_ARTIFACTS
SRC_ARTIFACTS="src_artifacts_${BUILD_TYPE}_${TARGET_ARCH}"
echo Creating $SRC_ARTIFACTS...
rm -rf $SRC_ARTIFACTS
mkdir $SRC_ARTIFACTS
mv_if_exist() {
if [ -f "$1" ] || [ -d "$1" ]; then
echo Storing $1
mv $1 generated_artifacts_${BUILD_TYPE}_${TARGET_ARCH}
mv $1 $GENERATED_ARTIFACTS
else
echo Skipping $1 - It is not present on disk
fi
@@ -26,43 +41,67 @@ mv_if_exist() {
cp_if_exist() {
if [ -f "$1" ] || [ -d "$1" ]; then
echo Storing $1
cp $1 generated_artifacts_${BUILD_TYPE}_${TARGET_ARCH}
cp $1 $GENERATED_ARTIFACTS
else
echo Skipping $1 - It is not present on disk
fi
}
tar_src_dirs_if_exist() {
mkdir build_artifacts
move_src_dirs_if_exist() {
mkdir src_artifacts
for dir in \
src/out/Default/gen/node_headers \
src/out/Default/overlapped-checker \
src/out/Default/ffmpeg \
src/out/Default/hunspell_dictionaries \
src/electron \
src/third_party/electron_node \
src/third_party/nan \
src/cross-arch-snapshots \
src/third_party/llvm-build \
src/build/linux \
src/buildtools/mac \
src/buildtools/third_party/libc++ \
src/buildtools/third_party/libc++abi \
src/third_party/libc++ \
src/third_party/libc++abi \
src/out/Default/obj/buildtools/third_party \
src/v8/tools/builtins-pgo
dirs=("src/out/Default/gen/node_headers" \
"src/out/Default/overlapped-checker" \
"src/out/Default/ffmpeg" \
"src/out/Default/hunspell_dictionaries" \
"src/third_party/electron_node" \
"src/third_party/nan" \
"src/cross-arch-snapshots" \
"src/buildtools/mac" \
"src/buildtools/third_party/libc++" \
"src/buildtools/third_party/libc++abi" \
"src/third_party/libc++" \
"src/third_party/libc++abi" \
"src/out/Default/obj/buildtools/third_party" \
"src/v8/tools/builtins-pgo")
# Only do this for linux build type, this folder
# exists for windows builds on linux hosts but we do
# not need it
if [ "$BUILD_TYPE" == "linux" ]; then
dirs+=('src/build/linux')
fi
# llvm-build is the host toolchain, for windows we need
# a different toolchain so no point copying this one
if [ "$BUILD_TYPE" != "windows" ]; then
dirs+=('src/third_party/llvm-build')
fi
# On windows we should clean up two symlinks that aren't
# compatible with the windows test runner
if [ "$BUILD_TYPE" == "windows" ]; then
rm -f src/third_party/electron_node/tools/node_modules/eslint/node_modules/eslint
rm -f src/third_party/electron_node/tools/node_modules/eslint/node_modules/.bin/eslint
rm -f src/third_party/electron_node/out/tools/bin/python
# Also need to copy electron.lib to node.lib for native module testing purposes
mkdir -p src/out/Default/gen/node_headers/Release
cp src/out/Default/electron.lib src/out/Default/gen/node_headers/Release/node.lib
fi
for dir in "${dirs[@]}"
do
if [ -d "$dir" ]; then
mkdir -p build_artifacts/$dir
cp -r $dir build_artifacts/$dir
mkdir -p src_artifacts/$(dirname $dir)
cp -r $dir/ src_artifacts/$dir
fi
done
tar -cf build_artifacts.tarbuild_artifacts
tar -C src_artifacts -cf src_artifacts.tar ./
mv_if_exist build_artifacts.tar
echo Storing src_artifacts.tar
mv src_artifacts.tar $SRC_ARTIFACTS
}
# Generated Artifacts
@@ -77,4 +116,4 @@ mv_if_exist src/cross-arch-snapshots
cp_if_exist src/out/electron_ninja_log
cp_if_exist src/out/Default/.ninja_log
tar_src_dirs_if_exist
move_src_dirs_if_exist

View File

@@ -1,24 +1,29 @@
#!/bin/bash
GENERATED_ARTIFACTS="generated_artifacts_${BUILD_TYPE}_${TARGET_ARCH}"
SRC_ARTIFACTS="src_artifacts_${BUILD_TYPE}_${TARGET_ARCH}"
mv_if_exist() {
if [ -f "generated_artifacts_${BUILD_TYPE}_${TARGET_ARCH}/$1" ] || [ -d "generated_artifacts_${BUILD_TYPE}_${TARGET_ARCH}/$1" ]; then
if [ -f "${GENERATED_ARTIFACTS}/$1" ] || [ -d "${GENERATED_ARTIFACTS}/$1" ]; then
echo Restoring $1 to $2
mkdir -p $2
mv generated_artifacts_${BUILD_TYPE}_${TARGET_ARCH}/$1 $2
mv $GENERATED_ARTIFACTS/$1 $2
else
echo Skipping $1 - It is not present on disk
fi
}
untar_if_exist() {
if [ -f "generated_artifacts_${BUILD_TYPE}_${TARGET_ARCH}/$1" ] || [ -d "generated_artifacts_${BUILD_TYPE}_${TARGET_ARCH}/$1" ]; then
echo Restoring $1 to $2
tar -xf generated_artifacts_${BUILD_TYPE}_${TARGET_ARCH}/$1 $2
if [ -f "${SRC_ARTIFACTS}/$1" ] || [ -d "${SRC_ARTIFACTS}/$1" ]; then
echo Restoring $1 to current directory
tar -xf ${SRC_ARTIFACTS}/$1
else
echo Skipping $1 - It is not present on disk
fi
}
echo Restoring artifacts from $GENERATED_ARTIFACTS
# Restore generated artifacts
mv_if_exist dist.zip src/out/Default
mv_if_exist node_headers.tar.gz src/out/Default/gen
@@ -29,5 +34,7 @@ mv_if_exist ffmpeg.zip src/out/ffmpeg
mv_if_exist hunspell_dictionaries.zip src/out/Default
mv_if_exist cross-arch-snapshots src
# Restore build artifacts
untar_if_exist build_artifacts.tar ./
echo Restoring artifacts from $SRC_ARTIFACTS
# Restore src artifacts
untar_if_exist src_artifacts.tar

7
script/actions/run-tests.sh Executable file
View File

@@ -0,0 +1,7 @@
#!/bin/bash
set -euo pipefail
export DISPLAY=:99
Xvfb :99 -screen 0 1024x768x16 -ac &
XVFB_PID=$!
node "$@"

View File

@@ -16,7 +16,8 @@ const HASH_VERSIONS = {
const filesToHash = [
path.resolve(__dirname, '../DEPS'),
path.resolve(__dirname, '../yarn.lock'),
path.resolve(__dirname, '../script/sysroots.json')
path.resolve(__dirname, '../script/sysroots.json'),
path.resolve(__dirname, '../.github/actions/checkout/action.yml')
];
const addAllFiles = (dir) => {
@@ -36,12 +37,10 @@ addAllFiles(path.resolve(__dirname, '../patches'));
// Create Hash
const hasher = crypto.createHash('SHA256');
const addToHashAndLog = (s) => {
console.log('Hashing:', s);
return hasher.update(s);
};
addToHashAndLog(`HASH_VERSION:${HASH_VERSIONS[process.platform] || FALLBACK_HASH_VERSION}`);
for (const file of filesToHash) {
console.log('Hashing Content:', file, crypto.createHash('SHA256').update(fs.readFileSync(file)).digest('hex'));
hasher.update(fs.readFileSync(file));
}

View File

@@ -21,9 +21,9 @@ async function main () {
const outDir = utils.getOutDir({ shouldLog: true });
const nodeDir = path.resolve(BASE, 'out', outDir, 'gen', 'node_headers');
const env = {
npm_config_msvs_version: '2019',
...process.env,
npm_config_nodedir: nodeDir,
npm_config_msvs_version: '2019',
npm_config_arch: process.env.NPM_CONFIG_ARCH,
npm_config_yes: 'true'
};
@@ -90,16 +90,16 @@ async function main () {
env.LDFLAGS = ldflags;
}
const { status: buildStatus } = cp.spawnSync(NPX_CMD, ['node-gyp', 'rebuild', '--verbose', '--directory', 'test', '-j', 'max'], {
const { status: buildStatus, signal } = cp.spawnSync(NPX_CMD, ['node-gyp', 'rebuild', '--verbose', '--directory', 'test', '-j', 'max'], {
env,
cwd: NAN_DIR,
stdio: 'inherit',
shell: process.platform === 'win32'
});
if (buildStatus !== 0) {
if (buildStatus !== 0 || signal != null) {
console.error('Failed to build nan test modules');
return process.exit(buildStatus);
return process.exit(buildStatus !== 0 ? buildStatus : signal);
}
const { status: installStatus } = cp.spawnSync(NPX_CMD, [`yarn@${YARN_VERSION}`, 'install'], {
@@ -108,9 +108,10 @@ async function main () {
stdio: 'inherit',
shell: process.platform === 'win32'
});
if (installStatus !== 0) {
if (installStatus !== 0 || signal != null) {
console.error('Failed to install nan node_modules');
return process.exit(installStatus);
return process.exit(installStatus !== 0 ? installStatus : signal);
}
const onlyTests = args.only && args.only.split(',');

View File

@@ -4,6 +4,8 @@
"parallel/test-bootstrap-modules",
"parallel/test-child-process-fork-exec-path",
"parallel/test-code-cache",
"parallel/test-cluster-primary-error",
"parallel/test-cluster-primary-kill",
"parallel/test-crypto-aes-wrap",
"parallel/test-crypto-authenticated-stream",
"parallel/test-crypto-des3-wrap",

View File

@@ -33,6 +33,7 @@ const circleCIPublishIndividualArches = {
};
const ghActionsPublishWorkflows = [
'linux-publish',
'macos-publish'
];
@@ -92,7 +93,7 @@ async function githubActionsCall (targetBranch, workflowName, options) {
}
await octokit.request(`POST ${GH_ACTIONS_API_URL}/workflows/${workflowName}.yml/dispatches`, {
ref: buildRequest.branch,
ref: `refs/tags/${options.newVersion}`,
inputs: {
...buildRequest.parameters
},

View File

@@ -158,9 +158,10 @@ async function pushRelease (branch) {
}
}
async function runReleaseBuilds (branch) {
async function runReleaseBuilds (branch, newVersion) {
await ciReleaseBuild(branch, {
ghRelease: true
ghRelease: true,
newVersion
});
}
@@ -190,6 +191,8 @@ async function verifyNewVersion () {
console.log(`${fail} Aborting release of ${newVersion}`);
process.exit();
}
return newVersion;
}
async function promptForVersion (version) {
@@ -225,10 +228,10 @@ async function prepareRelease (isBeta, notesOnly) {
} else {
const changes = await changesToRelease();
if (changes) {
await verifyNewVersion();
const newVersion = await verifyNewVersion();
await createRelease(currentBranch, isBeta);
await pushRelease(currentBranch);
await runReleaseBuilds(currentBranch);
await runReleaseBuilds(currentBranch, newVersion);
} else {
console.log('There are no new changes to this branch since the last release, aborting release.');
process.exit(1);

View File

@@ -48,7 +48,7 @@ def main():
if args.verbose:
enable_verbose_mode()
if args.upload_to_storage:
utcnow = datetime.datetime.now(datetime.UTC)
utcnow = datetime.datetime.utcnow()
args.upload_timestamp = utcnow.strftime('%Y%m%d')
build_version = get_electron_build_version()

View File

@@ -191,9 +191,9 @@ async function installSpecModules (dir) {
const CXXFLAGS = ['-std=c++17', process.env.CXXFLAGS].filter(x => !!x).join(' ');
const env = {
npm_config_msvs_version: '2019',
...process.env,
CXXFLAGS,
npm_config_msvs_version: '2019',
npm_config_yes: 'true'
};
if (args.electronVersion) {

32
script/split-tests.js Normal file
View File

@@ -0,0 +1,32 @@
const fs = require('node:fs');
const glob = require('glob');
const currentShard = parseInt(process.argv[2], 10);
const shardCount = parseInt(process.argv[3], 10);
const specFiles = glob.sync('spec/*-spec.ts');
const buckets = [];
for (let i = 0; i < shardCount; i++) {
buckets.push([]);
}
const testsInSpecFile = Object.create(null);
for (const specFile of specFiles) {
const testContent = fs.readFileSync(specFile, 'utf8');
testsInSpecFile[specFile] = testContent.split('it(').length;
}
specFiles.sort((a, b) => {
return testsInSpecFile[b] - testsInSpecFile[a];
});
let shard = 0;
for (const specFile of specFiles) {
buckets[shard].push(specFile);
shard++;
if (shard === shardCount) shard = 0;
}
console.log(buckets[currentShard - 1].join(' '));

View File

@@ -125,7 +125,7 @@ app.whenReady().then(async () => {
const validTestPaths = argv.files && argv.files.map(file =>
path.isAbsolute(file)
? path.relative(baseElectronDir, file)
: file);
: path.normalize(file));
const filter = (file) => {
if (!/-spec\.[tj]s$/.test(file)) {
return false;