Compare commits

..

1 Commits

Author SHA1 Message Date
Keeley Hammond
0c73dad61f chore: cherry-pick 3fdedec45691 from v8 2024-12-09 13:48:53 -08:00
248 changed files with 3553 additions and 24573 deletions

6
.gitattributes vendored
View File

@@ -1,14 +1,8 @@
# `git apply` and friends don't understand CRLF, even on windows. Force those
# files to be checked out with LF endings even if core.autocrlf is true.
*.patch text eol=lf
DEPS text eol=lf
yarn.lock text eol=lf
script/zip_manifests/*.manifest text eol=lf
patches/**/.patches merge=union
# Patch file line ending exceptions (patches that affect CRLF files)
reland_lzma_sdk_update_to_24_09.patch -text
# Source code and markdown files should always use LF as line ending.
*.c text eol=lf
*.cc text eol=lf

View File

@@ -5,10 +5,10 @@ inputs:
description: 'Target arch'
required: true
target-platform:
description: 'Target platform, should be linux, win, macos'
description: 'Target platform'
required: true
artifact-platform:
description: 'Artifact platform, should be linux, win, darwin or mas'
description: 'Artifact platform, should be linux, darwin or mas'
required: true
step-suffix:
description: 'Suffix for build steps'
@@ -71,7 +71,7 @@ runs:
cd src
e build --target electron:electron_dist_zip -j $NUMBER_OF_NINJA_PROCESSES
if [ "${{ inputs.is-asan }}" != "true" ]; then
target_os=${{ inputs.target-platform == 'macos' && 'mac' || inputs.target-platform }}
target_os=${{ inputs.target-platform == 'linux' && 'linux' || 'mac'}}
if [ "${{ inputs.artifact-platform }}" = "mas" ]; then
target_os="${target_os}_mas"
fi
@@ -82,7 +82,7 @@ runs:
run: |
cd src
e build --target electron:electron_mksnapshot -j $NUMBER_OF_NINJA_PROCESSES
ELECTRON_DEPOT_TOOLS_DISABLE_LOG=1 e d gn desc out/Default v8:run_mksnapshot_default args > out/Default/mksnapshot_args
gn desc out/Default v8:run_mksnapshot_default args > out/Default/mksnapshot_args
# Remove unused args from mksnapshot_args
SEDOPTION="-i"
if [ "`uname`" = "Darwin" ]; then
@@ -91,7 +91,7 @@ runs:
sed $SEDOPTION '/.*builtins-pgo/d' out/Default/mksnapshot_args
sed $SEDOPTION '/--turbo-profiling-input/d' out/Default/mksnapshot_args
if [ "${{ inputs.target-platform }}" = "linux" ]; then
if [ "`uname`" = "Linux" ]; then
if [ "${{ inputs.target-arch }}" = "arm" ]; then
electron/script/strip-binaries.py --file $PWD/out/Default/clang_x86_v8_arm/mksnapshot
electron/script/strip-binaries.py --file $PWD/out/Default/clang_x86_v8_arm/v8_context_snapshot_generator
@@ -105,15 +105,7 @@ runs:
fi
e build --target electron:electron_mksnapshot_zip -j $NUMBER_OF_NINJA_PROCESSES
if [ "${{ inputs.target-platform }}" = "win" ]; then
cd out/Default
powershell Compress-Archive -update mksnapshot_args mksnapshot.zip
powershell mkdir mktmp\\gen\\v8
powershell Copy-Item gen\\v8\\embedded.S mktmp\\gen\\v8
powershell Compress-Archive -update -Path mktmp\\gen mksnapshot.zip
else
(cd out/Default; zip mksnapshot.zip mksnapshot_args gen/v8/embedded.S)
fi
(cd out/Default; zip mksnapshot.zip mksnapshot_args gen/v8/embedded.S)
- name: Generate Cross-Arch Snapshot (arm/arm64) ${{ inputs.step-suffix }}
shell: bash
if: ${{ (inputs.target-arch == 'arm' || inputs.target-arch == 'arm64') && inputs.target-platform == 'linux' }}
@@ -145,25 +137,6 @@ runs:
run: |
cd src
e build --target electron:node_headers
- name: Create installed_software.json ${{ inputs.step-suffix }}
shell: powershell
if: ${{ inputs.is-release == 'true' && inputs.target-platform == 'win' }}
run: |
cd src
Get-CimInstance -Namespace root\cimv2 -Class Win32_product | Select vendor, description, @{l='install_location';e='InstallLocation'}, @{l='install_date';e='InstallDate'}, @{l='install_date_2';e='InstallDate2'}, caption, version, name, @{l='sku_number';e='SKUNumber'} | ConvertTo-Json | Out-File -Encoding utf8 -FilePath .\installed_software.json
- name: Profile Windows Toolchain ${{ inputs.step-suffix }}
shell: bash
if: ${{ inputs.is-release == 'true' && inputs.target-platform == 'win' }}
run: |
cd src
python3 electron/build/profile_toolchain.py --output-json=out/Default/windows_toolchain_profile.json
- name: Add msdia140.dll to Path ${{ inputs.step-suffix }}
shell: bash
if: ${{ inputs.is-release == 'true' && inputs.target-platform == 'win' }}
run: |
# Needed for msdia140.dll on 64-bit windows
cd src
export PATH="$PATH:$(pwd)/third_party/llvm-build/Release+Asserts/bin"
- name: Generate & Zip Symbols ${{ inputs.step-suffix }}
shell: bash
run: |

View File

@@ -5,12 +5,6 @@ inputs:
description: 'Whether to generate and persist a SAS token for the item in the cache'
required: false
default: 'false'
use-cache:
description: 'Whether to persist the cache to the shared drive'
required: false
default: 'true'
target-platform:
description: 'Target platform, should be linux, win, macos'
runs:
using: "composite"
steps:
@@ -19,62 +13,65 @@ runs:
run: |
echo "GIT_CACHE_PATH=$(pwd)/git-cache" >> $GITHUB_ENV
- name: Install Dependencies
uses: ./src/electron/.github/actions/install-dependencies
- name: Set Chromium Git Cookie
uses: ./src/electron/.github/actions/set-chromium-cookie
- name: Install Build Tools
uses: ./src/electron/.github/actions/install-build-tools
shell: bash
run: |
cd src/electron
node script/yarn install --frozen-lockfile
- name: Get Depot Tools
shell: bash
run: |
git clone --depth=1 https://chromium.googlesource.com/chromium/tools/depot_tools.git
sed -i '/ninjalog_uploader_wrapper.py/d' ./depot_tools/autoninja
# Remove swift-format dep from cipd on macOS until we send a patch upstream.
cd depot_tools
git apply --3way ../src/electron/.github/workflows/config/gclient.diff
# Ensure depot_tools does not update.
test -d depot_tools && cd depot_tools
touch .disable_auto_update
- name: Add Depot Tools to PATH
shell: bash
run: echo "$(pwd)/depot_tools" >> $GITHUB_PATH
- name: Generate DEPS Hash
shell: bash
run: |
node src/electron/script/generate-deps-hash.js
DEPSHASH="v1-src-cache-$(cat src/electron/.depshash)"
echo "DEPSHASH=$DEPSHASH" >> $GITHUB_ENV
echo "CACHE_FILE=$DEPSHASH.tar" >> $GITHUB_ENV
if [ "${{ inputs.target-platform }}" = "win" ]; then
echo "CACHE_DRIVE=/mnt/win-cache" >> $GITHUB_ENV
else
echo "CACHE_DRIVE=/mnt/cross-instance-cache" >> $GITHUB_ENV
fi
node src/electron/script/generate-deps-hash.js && cat src/electron/.depshash-target
echo "DEPSHASH=v1-src-cache-$(shasum src/electron/.depshash | cut -f1 -d' ')" >> $GITHUB_ENV
- name: Generate SAS Key
if: ${{ inputs.generate-sas-token == 'true' }}
shell: bash
run: |
curl --unix-socket /var/run/sas/sas.sock --fail "http://foo/$CACHE_FILE?platform=${{ inputs.target-platform }}" > sas-token
curl --unix-socket /var/run/sas/sas.sock --fail "http://foo/$DEPSHASH.tar" > sas-token
- name: Save SAS Key
if: ${{ inputs.generate-sas-token == 'true' }}
uses: actions/cache/save@d4323d4df104b026a6aa633fdb11d772146be0bf
uses: actions/cache/save@v4
with:
path: sas-token
key: sas-key-${{ inputs.target-platform }}-${{ github.run_number }}-${{ github.run_attempt }}
enableCrossOsArchive: true
path: |
sas-token
key: sas-key-${{ github.run_number }}-${{ github.run_attempt }}
- name: Check If Cache Exists
id: check-cache
shell: bash
run: |
if [[ "${{ inputs.use-cache }}" == "false" ]]; then
echo "Not using cache this time..."
cache_path=/mnt/cross-instance-cache/$DEPSHASH.tar
echo "Using cache key: $DEPSHASH"
echo "Checking for cache in: $cache_path"
if [ ! -f "$cache_path" ] || [ `du $cache_path | cut -f1` = "0" ]; then
echo "cache_exists=false" >> $GITHUB_OUTPUT
echo "Cache Does Not Exist for $DEPSHASH"
else
cache_path=$CACHE_DRIVE/$CACHE_FILE
echo "Using cache key: $DEPSHASH"
echo "Checking for cache in: $cache_path"
if [ ! -f "$cache_path" ] || [ `du $cache_path | cut -f1` = "0" ]; then
echo "cache_exists=false" >> $GITHUB_OUTPUT
echo "Cache Does Not Exist for $DEPSHASH"
else
echo "cache_exists=true" >> $GITHUB_OUTPUT
echo "Cache Already Exists for $DEPSHASH, Skipping.."
fi
echo "cache_exists=true" >> $GITHUB_OUTPUT
echo "Cache Already Exists for $DEPSHASH, Skipping.."
fi
- name: Check cross instance cache disk space
if: steps.check-cache.outputs.cache_exists == 'false' && inputs.use-cache == 'true'
if: steps.check-cache.outputs.cache_exists == 'false'
shell: bash
run: |
# if there is less than 35 GB free space then creating the cache might fail so exit early
freespace=`df -m $CACHE_DRIVE | grep -w $CACHE_DRIVE | awk '{print $4}'`
freespace_human=`df -h $CACHE_DRIVE | grep -w $CACHE_DRIVE | awk '{print $4}'`
if [ $freespace -le 35000 ]; then
# if there is less than 20 GB free space then creating the cache might fail so exit early
freespace=`df -m /mnt/cross-instance-cache | grep -w /mnt/cross-instance-cache | awk '{print $4}'`
freespace_human=`df -h /mnt/cross-instance-cache | grep -w /mnt/cross-instance-cache | awk '{print $4}'`
if [ $freespace -le 20000 ]; then
echo "The cross mount cache has $freespace_human free space which is not enough - exiting"
exit 1
else
@@ -84,17 +81,13 @@ runs:
if: steps.check-cache.outputs.cache_exists == 'false'
shell: bash
run: |
e d gclient config \
gclient config \
--name "src/electron" \
--unmanaged \
${GCLIENT_EXTRA_ARGS} \
"$GITHUB_SERVER_URL/$GITHUB_REPOSITORY"
if [ "$TARGET_OS" != "" ]; then
echo "target_os=['$TARGET_OS']" >> ./.gclient
fi
ELECTRON_USE_THREE_WAY_MERGE_FOR_PATCHES=1 e d gclient sync --with_branch_heads --with_tags -vv
ELECTRON_USE_THREE_WAY_MERGE_FOR_PATCHES=1 gclient sync --with_branch_heads --with_tags -vvvvv
if [ "${{ inputs.is-release }}" != "true" && -n "${{ env.PATCH_UP_APP_CREDS }}" ]; then
# Re-export all the patches to check if there were changes.
python3 src/electron/script/export_all_patches.py src/electron/patches/config.json
@@ -135,13 +128,13 @@ runs:
# https://dawn-review.googlesource.com/c/dawn/+/83901
# TODO: maybe better to always leave out */.git/HEAD file for all targets ?
- name: Delete .git directories under src to free space
if: ${{ steps.check-cache.outputs.cache_exists == 'false' && inputs.use-cache == 'true' }}
if: steps.check-cache.outputs.cache_exists == 'false'
shell: bash
run: |
cd src
( find . -type d -name ".git" -not -path "./third_party/angle/*" -not -path "./third_party/dawn/*" -not -path "./electron/*" ) | xargs rm -rf
- name: Minimize Cache Size for Upload
if: ${{ steps.check-cache.outputs.cache_exists == 'false' && inputs.use-cache == 'true' }}
if: steps.check-cache.outputs.cache_exists == 'false'
shell: bash
run: |
rm -rf src/android_webview
@@ -152,23 +145,20 @@ runs:
rm -rf src/third_party/angle/third_party/VK-GL-CTS/src
rm -rf src/third_party/swift-toolchain
rm -rf src/third_party/swiftshader/tests/regres/testlists
cp src/electron/.github/actions/checkout/action.yml ./
rm -rf src/electron
mkdir -p src/electron/.github/actions/checkout
mv action.yml src/electron/.github/actions/checkout
- name: Compress Src Directory
if: ${{ steps.check-cache.outputs.cache_exists == 'false' && inputs.use-cache == 'true' }}
if: steps.check-cache.outputs.cache_exists == 'false'
shell: bash
run: |
echo "Uncompressed src size: $(du -sh src | cut -f1 -d' ')"
tar -cf $CACHE_FILE src
echo "Compressed src to $(du -sh $CACHE_FILE | cut -f1 -d' ')"
cp ./$CACHE_FILE $CACHE_DRIVE/
tar -cf $DEPSHASH.tar src
echo "Compressed src to $(du -sh $DEPSHASH.tar | cut -f1 -d' ')"
cp ./$DEPSHASH.tar /mnt/cross-instance-cache/
- name: Persist Src Cache
if: ${{ steps.check-cache.outputs.cache_exists == 'false' && inputs.use-cache == 'true' }}
if: steps.check-cache.outputs.cache_exists == 'false'
shell: bash
run: |
final_cache_path=$CACHE_DRIVE/$CACHE_FILE
final_cache_path=/mnt/cross-instance-cache/$DEPSHASH.tar
echo "Using cache key: $DEPSHASH"
echo "Checking path: $final_cache_path"
if [ ! -f "$final_cache_path" ]; then

View File

@@ -1,40 +0,0 @@
name: 'CIPD install'
description: 'Installs the specified CIPD package'
inputs:
cipd-root-prefix-path:
description: 'Path to prepend to installation directory'
default: ''
dependency:
description: 'Name of dependency to install'
deps-file:
description: 'Location of DEPS file that defines the dependency'
installation-dir:
description: 'Location to install dependency'
target-platform:
description: 'Target platform, should be linux, win, macos'
package:
description: 'Package to install'
runs:
using: "composite"
steps:
- name: Delete wrong ${{ inputs.dependency }}
shell: bash
run : |
rm -rf ${{ inputs.cipd-root-prefix-path }}${{ inputs.installation-dir }}
- name: Create ensure file for ${{ inputs.dependency }}
shell: bash
run: |
echo '${{ inputs.package }}' `e d gclient getdep --deps-file=${{ inputs.deps-file }} -r '${{ inputs.installation-dir }}:${{ inputs.package }}'` > ${{ inputs.dependency }}_ensure_file
cat ${{ inputs.dependency }}_ensure_file
- name: CIPD installation of ${{ inputs.dependency }} (macOS)
if: ${{ inputs.target-platform == 'macos' }}
shell: bash
run: |
echo "ensuring ${{ inputs.dependency }} on macOS"
e d cipd ensure --root ${{ inputs.cipd-root-prefix-path }}${{ inputs.installation-dir }} -ensure-file ${{ inputs.dependency }}_ensure_file
- name: CIPD installation of ${{ inputs.dependency }} (Windows)
if: ${{ inputs.target-platform == 'win' }}
shell: powershell
run: |
echo "ensuring ${{ inputs.dependency }} on Windows"
e d cipd ensure --root ${{ inputs.cipd-root-prefix-path }}${{ inputs.installation-dir }} -ensure-file ${{ inputs.dependency }}_ensure_file

View File

@@ -0,0 +1,61 @@
name: 'Fix Sync macOS'
description: 'Checks out Electron and stores it in the AKS Cache'
runs:
using: "composite"
steps:
- name: Fix Sync
shell: bash
# This step is required to correct for differences between "gclient sync"
# on Linux and the expected state on macOS. This requires:
# 1. Fixing Clang Install (wrong binary)
# 2. Fixing esbuild (wrong binary)
# 3. Fixing rustc (wrong binary)
# 4. Fixing gn (wrong binary)
# 5. Fix reclient (wrong binary)
# 6. Fixing dsymutil (wrong binary)
# 7. Ensuring we are using the correct ninja and adding it to PATH
# 8. Fixing angle (wrong remote)
run : |
SEDOPTION="-i ''"
rm -rf src/third_party/llvm-build
python3 src/tools/clang/scripts/update.py
echo 'infra/3pp/tools/esbuild/${platform}' `gclient getdep --deps-file=src/third_party/devtools-frontend/src/DEPS -r 'third_party/esbuild:infra/3pp/tools/esbuild/${platform}'` > esbuild_ensure_file
# Remove extra output from calling gclient getdep which always calls update_depot_tools
sed -i '' "s/Updating depot_tools... //g" esbuild_ensure_file
cipd ensure --root src/third_party/devtools-frontend/src/third_party/esbuild -ensure-file esbuild_ensure_file
rm -rf src/third_party/rust-toolchain
python3 src/tools/rust/update_rust.py
# Prevent calling gclient getdep which always calls update_depot_tools
echo 'gn/gn/mac-${arch}' `gclient getdep --deps-file=src/DEPS -r 'src/buildtools/mac:gn/gn/mac-${arch}'` > gn_ensure_file
sed -i '' "s/Updating depot_tools... //g" gn_ensure_file
cipd ensure --root src/buildtools/mac -ensure-file gn_ensure_file
# Prevent calling gclient getdep which always calls update_depot_tools
echo 'infra/rbe/client/${platform}' `gclient getdep --deps-file=src/DEPS -r 'src/buildtools/reclient:infra/rbe/client/${platform}'` > gn_ensure_file
sed -i '' "s/Updating depot_tools... //g" gn_ensure_file
cipd ensure --root src/buildtools/reclient -ensure-file gn_ensure_file
python3 src/buildtools/reclient_cfgs/configure_reclient_cfgs.py --rbe_instance "projects/rbe-chrome-untrusted/instances/default_instance" --reproxy_cfg_template reproxy.cfg.template --rewrapper_cfg_project "" --skip_remoteexec_cfg_fetch
if [ "${{ env.TARGET_ARCH }}" == "arm64" ]; then
DSYM_SHA_FILE=src/tools/clang/dsymutil/bin/dsymutil.arm64.sha1
else
DSYM_SHA_FILE=src/tools/clang/dsymutil/bin/dsymutil.x64.sha1
fi
python3 src/third_party/depot_tools/download_from_google_storage.py --no_resume --no_auth --bucket chromium-browser-clang -s $DSYM_SHA_FILE -o src/tools/clang/dsymutil/bin/dsymutil
echo 'infra/3pp/tools/ninja/${platform}' `gclient getdep --deps-file=src/DEPS -r 'src/third_party/ninja:infra/3pp/tools/ninja/${platform}'` > ninja_ensure_file
sed $SEDOPTION "s/Updating depot_tools... //g" ninja_ensure_file
cipd ensure --root src/third_party/ninja -ensure-file ninja_ensure_file
echo "$(pwd)/src/third_party/ninja" >> $GITHUB_PATH
cd src/third_party/angle
rm -f .git/objects/info/alternates
git remote set-url origin https://chromium.googlesource.com/angle/angle.git
cp .git/config .git/config.backup
git remote remove origin
mv .git/config.backup .git/config
git fetch

View File

@@ -1,121 +0,0 @@
name: 'Fix Sync'
description: 'Ensures proper binaries are in place'
# This action is required to correct for differences between "gclient sync"
# on Linux and the expected state on macOS/windows. This requires:
# 1. Fixing Clang Install (wrong binary)
# 2. Fixing esbuild (wrong binary)
# 3. Fixing rustc (wrong binary)
# 4. Fixing gn (wrong binary)
# 5. Fix reclient (wrong binary)
# 6. Fixing dsymutil (wrong binary)
# 7. Ensuring we are using the correct ninja and adding it to PATH
# 8. Fixing angle (wrong remote)
# 9. Install windows toolchain on Windows
# 10. Fix node binary on Windows
# 11. Fix rc binary on Windows
inputs:
target-platform:
description: 'Target platform, should be linux, win, macos'
runs:
using: "composite"
steps:
- name: Fix clang
shell: bash
run : |
rm -rf src/third_party/llvm-build
python3 src/tools/clang/scripts/update.py
- name: Fix esbuild
uses: ./src/electron/.github/actions/cipd-install
with:
cipd-root-prefix-path: src/third_party/devtools-frontend/src/
dependency: esbuild
deps-file: src/third_party/devtools-frontend/src/DEPS
installation-dir: third_party/esbuild
target-platform: ${{ inputs.target-platform }}
package: infra/3pp/tools/esbuild/${platform}
- name: Fix rustc
shell: bash
run : |
rm -rf src/third_party/rust-toolchain
python3 src/tools/rust/update_rust.py
- name: Fix gn (macOS)
if: ${{ inputs.target-platform == 'macos' }}
uses: ./src/electron/.github/actions/cipd-install
with:
dependency: gn
deps-file: src/DEPS
installation-dir: src/buildtools/mac
target-platform: ${{ inputs.target-platform }}
package: gn/gn/mac-${arch}
- name: Fix gn (Windows)
if: ${{ inputs.target-platform == 'win' }}
uses: ./src/electron/.github/actions/cipd-install
with:
dependency: gn
deps-file: src/DEPS
installation-dir: src/buildtools/win
target-platform: ${{ inputs.target-platform }}
package: gn/gn/windows-amd64
- name: Fix reclient
uses: ./src/electron/.github/actions/cipd-install
with:
dependency: reclient
deps-file: src/DEPS
installation-dir: src/buildtools/reclient
target-platform: ${{ inputs.target-platform }}
package: infra/rbe/client/${platform}
- name: Configure reclient configs
shell: bash
run : |
python3 src/buildtools/reclient_cfgs/configure_reclient_cfgs.py --rbe_instance "projects/rbe-chrome-untrusted/instances/default_instance" --reproxy_cfg_template reproxy.cfg.template --rewrapper_cfg_project "" --skip_remoteexec_cfg_fetch
- name: Fix dsymutil (macOS)
if: ${{ inputs.target-platform == 'macos' }}
shell: bash
run : |
# Fix dsymutil
if [ "${{ inputs.target-platform }}" = "macos" ]; then
if [ "${{ env.TARGET_ARCH }}" == "arm64" ]; then
DSYM_SHA_FILE=src/tools/clang/dsymutil/bin/dsymutil.arm64.sha1
else
DSYM_SHA_FILE=src/tools/clang/dsymutil/bin/dsymutil.x64.sha1
fi
python3 src/third_party/depot_tools/download_from_google_storage.py --no_resume --no_auth --bucket chromium-browser-clang -s $DSYM_SHA_FILE -o src/tools/clang/dsymutil/bin/dsymutil
fi
- name: Fix ninja
uses: ./src/electron/.github/actions/cipd-install
with:
dependency: ninja
deps-file: src/DEPS
installation-dir: src/third_party/ninja
target-platform: ${{ inputs.target-platform }}
package: infra/3pp/tools/ninja/${platform}
- name: Set ninja in path
shell: bash
run : |
echo "$(pwd)/src/third_party/ninja" >> $GITHUB_PATH
- name: Fixup angle git
shell: bash
run : |
cd src/third_party/angle
rm -f .git/objects/info/alternates
git remote set-url origin https://chromium.googlesource.com/angle/angle.git
cp .git/config .git/config.backup
git remote remove origin
mv .git/config.backup .git/config
git fetch
- name: Get Windows toolchain
if: ${{ inputs.target-platform == 'win' }}
shell: powershell
run: e d vpython3 src\build\vs_toolchain.py update --force
- name: Download nodejs
if: ${{ inputs.target-platform == 'win' }}
shell: powershell
run: |
$nodedeps = e d gclient getdep --deps-file=src/DEPS -r src/third_party/node/win | ConvertFrom-JSON
$sha1 = $nodedeps.object_name.split('/')[1]
python3 src\third_party\depot_tools\download_from_google_storage.py --no_resume --no_auth --bucket chromium-nodejs -o src\third_party\node\win\node.exe $sha1
- name: Install rc
if: ${{ inputs.target-platform == 'win' }}
shell: bash
run: |
python3 src/third_party/depot_tools/download_from_google_storage.py --no_resume --no_auth --bucket chromium-browser-clang/rc -s src/build/toolchain/win/rc/win/rc.exe.sha1

View File

@@ -6,17 +6,6 @@ runs:
- name: Install Build Tools
shell: bash
run: |
if [ "$(expr substr $(uname -s) 1 10)" == "MSYS_NT-10" ]; then
git config --global core.filemode false
git config --global core.autocrlf false
git config --global branch.autosetuprebase always
fi
export BUILD_TOOLS_SHA=8246e57791b0af4ae5975eb96f09855f9269b1cd
export BUILD_TOOLS_SHA=eeb1a11392e4cec08fd926c93b31ab556dc0c23b
npm i -g @electron/build-tools
e auto-update disable
e d auto-update disable
if [ "$(expr substr $(uname -s) 1 10)" == "MSYS_NT-10" ]; then
e d cipd.bat --version
cp "C:\Python311\python.exe" "C:\Python311\python3.exe"
fi
echo "$HOME/.electron_build_tools/third_party/depot_tools" >> $GITHUB_PATH

View File

@@ -1,21 +0,0 @@
name: 'Install Dependencies'
description: 'Installs yarn depdencies using cache when available'
runs:
using: "composite"
steps:
- name: Get yarn cache directory path
shell: bash
id: yarn-cache-dir-path
run: echo "dir=$(node src/electron/script/yarn cache dir)" >> $GITHUB_OUTPUT
- uses: actions/cache@1bd1e32a3bdc45362d1e726936510720a7c30a57
id: yarn-cache
with:
path: ${{ steps.yarn-cache-dir-path.outputs.dir }}
key: ${{ runner.os }}-yarn-${{ hashFiles('src/electron/yarn.lock') }}
restore-keys: |
${{ runner.os }}-yarn-
- name: Install Dependencies
shell: bash
run: |
cd src/electron
node script/yarn install --frozen-lockfile --prefer-offline

View File

@@ -1,20 +1,12 @@
name: 'Restore Cache AKS'
description: 'Restores Electron src cache via AKS'
inputs:
target-platform:
description: 'Target platform, should be linux, win, macos'
runs:
using: "composite"
steps:
- name: Restore and Ensure Src Cache
shell: bash
run: |
if [ "${{ inputs.target-platform }}" = "win" ]; then
cache_path=/mnt/win-cache/$DEPSHASH.tar
else
cache_path=/mnt/cross-instance-cache/$DEPSHASH.tar
fi
cache_path=/mnt/cross-instance-cache/$DEPSHASH.tar
echo "Using cache key: $DEPSHASH"
echo "Checking for cache in: $cache_path"
if [ ! -f "$cache_path" ]; then

View File

@@ -1,25 +1,22 @@
name: 'Restore Cache AZCopy'
description: 'Restores Electron src cache via AZCopy'
inputs:
target-platform:
description: 'Target platform, should be linux, win, macos'
runs:
using: "composite"
steps:
- name: Obtain SAS Key
continue-on-error: true
uses: actions/cache/restore@d4323d4df104b026a6aa633fdb11d772146be0bf
uses: actions/cache/restore@0c45773b623bea8c8e75f6c82b208c3cf94ea4f9
with:
path: sas-token
key: sas-key-${{ inputs.target-platform }}-${{ github.run_number }}-1
enableCrossOsArchive: true
path: |
sas-token
key: sas-key-${{ github.run_number }}-1
- name: Obtain SAS Key
continue-on-error: true
uses: actions/cache/restore@d4323d4df104b026a6aa633fdb11d772146be0bf
uses: actions/cache/restore@0c45773b623bea8c8e75f6c82b208c3cf94ea4f9
with:
path: sas-token
key: sas-key-${{ inputs.target-platform }}-${{ github.run_number }}-${{ github.run_attempt }}
enableCrossOsArchive: true
path: |
sas-token
key: sas-key-${{ github.run_number }}-${{ github.run_attempt }}
- name: Download Src Cache from AKS
# The cache will always exist here as a result of the checkout job
# Either it was uploaded to Azure in the checkout job for this commit
@@ -29,30 +26,21 @@ runs:
timeout_minutes: 30
max_attempts: 3
retry_on: error
shell: bash
command: |
sas_token=$(cat sas-token)
if [ -z $sas-token ]; then
echo "SAS Token not found; exiting src cache download early..."
exit 1
else
if [ "${{ inputs.target-platform }}" = "win" ]; then
azcopy copy --log-level=ERROR \
"https://${{ env.AZURE_AKS_CACHE_STORAGE_ACCOUNT }}.file.core.windows.net/${{ env.AZURE_AKS_WIN_CACHE_SHARE_NAME }}/${{ env.CACHE_PATH }}?$sas_token" $DEPSHASH.tar
else
azcopy copy --log-level=ERROR \
"https://${{ env.AZURE_AKS_CACHE_STORAGE_ACCOUNT }}.file.core.windows.net/${{ env.AZURE_AKS_CACHE_SHARE_NAME }}/${{ env.CACHE_PATH }}?$sas_token" $DEPSHASH.tar
fi
fi
azcopy copy --log-level=ERROR \
"https://${{ env.AZURE_AKS_CACHE_STORAGE_ACCOUNT }}.file.core.windows.net/${{ env.AZURE_AKS_CACHE_SHARE_NAME }}/${{ env.CACHE_PATH }}?$sas_token" $DEPSHASH.tar
env:
AZURE_AKS_CACHE_STORAGE_ACCOUNT: f723719aa87a34622b5f7f3
AZURE_AKS_CACHE_SHARE_NAME: pvc-f6a4089f-b082-4bee-a3f9-c3e1c0c02d8f
AZURE_AKS_WIN_CACHE_SHARE_NAME: pvc-71dec4f2-0d44-4fd1-a2c3-add049d70bdf
- name: Clean SAS Key
shell: bash
run: rm -f sas-token
- name: Unzip and Ensure Src Cache
if: ${{ inputs.target-platform == 'macos' }}
shell: bash
run: |
echo "Downloaded cache is $(du -sh $DEPSHASH.tar | cut -f1)"
@@ -80,45 +68,4 @@ runs:
fi
echo "Wiping Electron Directory"
rm -rf src/electron
- name: Unzip and Ensure Src Cache (Windows)
if: ${{ inputs.target-platform == 'win' }}
shell: powershell
run: |
$src_cache = "$env:DEPSHASH.tar"
$cache_size = $(Get-Item $src_cache).length
Write-Host "Downloaded cache is $cache_size"
if ($cache_size -eq 0) {
Write-Host "Cache is empty - exiting"
exit 1
}
$TEMP_DIR=New-Item -ItemType Directory -Path temp-cache
$TEMP_DIR_PATH = $TEMP_DIR.FullName
C:\ProgramData\Chocolatey\bin\7z.exe -y x $src_cache -o"$TEMP_DIR_PATH"
- name: Move Src Cache (Windows)
if: ${{ inputs.target-platform == 'win' }}
uses: nick-fields/retry@7152eba30c6575329ac0576536151aca5a72780e # v3.0.0
with:
timeout_minutes: 30
max_attempts: 3
retry_on: error
shell: powershell
command: |
if (Test-Path "temp-cache\src") {
Write-Host "Relocating Cache"
Remove-Item -Recurse -Force src
Move-Item temp-cache\src src
Write-Host "Deleting zip file"
Remove-Item -Force $src_cache
}
if (-Not (Test-Path "src\third_party\blink")) {
Write-Host "Cache was not correctly restored - exiting"
exit 1
}
Write-Host "Wiping Electron Directory"
Remove-Item -Recurse -Force src\electron
rm -rf src/electron

View File

@@ -1,26 +0,0 @@
name: 'Set Chromium Git Cookie'
description: 'Sets an authenticated cookie from Chromium to allow for a higher request limit'
runs:
using: "composite"
steps:
- name: Set the git cookie from chromium.googlesource.com (Unix)
if: ${{ runner.os != 'Windows' && env.CHROMIUM_GIT_COOKIE }}
shell: bash
run: |
eval 'set +o history' 2>/dev/null || setopt HIST_IGNORE_SPACE 2>/dev/null
touch ~/.gitcookies
chmod 0600 ~/.gitcookies
git config --global http.cookiefile ~/.gitcookies
tr , \\t <<\__END__ >>~/.gitcookies
${{ env.CHROMIUM_GIT_COOKIE }}
__END__
eval 'set -o history' 2>/dev/null || unsetopt HIST_IGNORE_SPACE 2>/dev/null
- name: Set the git cookie from chromium.googlesource.com (Windows)
if: ${{ runner.os == 'Windows' && env.CHROMIUM_GIT_COOKIE_WINDOWS_STRING }}
shell: cmd
run: |
git config --global http.cookiefile "%USERPROFILE%\.gitcookies"
powershell -noprofile -nologo -command Write-Output "${{ env.CHROMIUM_GIT_COOKIE_WINDOWS_STRING }}" >>"%USERPROFILE%\.gitcookies"

View File

@@ -18,11 +18,6 @@ on:
description: 'Skip Linux builds'
default: false
required: false
skip-windows:
type: boolean
description: 'Skip Windows builds'
default: false
required: false
skip-lint:
type: boolean
description: 'Skip lint check'
@@ -33,11 +28,7 @@ on:
- main
- '[1-9][0-9]-x-y'
pull_request:
defaults:
run:
shell: bash
jobs:
setup:
runs-on: ubuntu-latest
@@ -49,9 +40,7 @@ jobs:
build-image-sha: ${{ steps.set-output.outputs.build-image-sha }}
docs-only: ${{ steps.set-output.outputs.docs-only }}
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.0.2
with:
ref: ${{ github.event.pull_request.head.sha }}
- uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 #v4.0.2
- uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2
id: filter
with:
@@ -100,7 +89,6 @@ jobs:
- /mnt/cross-instance-cache:/mnt/cross-instance-cache
- /var/run/sas:/var/run/sas
env:
CHROMIUM_GIT_COOKIE: ${{ secrets.CHROMIUM_GIT_COOKIE }}
GCLIENT_EXTRA_ARGS: '--custom-var=checkout_mac=True --custom-var=host_os=mac'
outputs:
build-image-sha: ${{ needs.setup.outputs.build-image-sha }}
@@ -110,12 +98,10 @@ jobs:
with:
path: src/electron
fetch-depth: 0
ref: ${{ github.event.pull_request.head.sha }}
- name: Checkout & Sync & Save
uses: ./src/electron/.github/actions/checkout
with:
generate-sas-token: 'true'
target-platform: macos
checkout-linux:
needs: setup
@@ -128,7 +114,6 @@ jobs:
- /mnt/cross-instance-cache:/mnt/cross-instance-cache
- /var/run/sas:/var/run/sas
env:
CHROMIUM_GIT_COOKIE: ${{ secrets.CHROMIUM_GIT_COOKIE }}
GCLIENT_EXTRA_ARGS: '--custom-var=checkout_arm=True --custom-var=checkout_arm64=True'
PATCH_UP_APP_CREDS: ${{ secrets.PATCH_UP_APP_CREDS }}
outputs:
@@ -139,74 +124,9 @@ jobs:
with:
path: src/electron
fetch-depth: 0
ref: ${{ github.event.pull_request.head.sha }}
- name: Checkout & Sync & Save
uses: ./src/electron/.github/actions/checkout
checkout-windows:
needs: setup
if: ${{ needs.setup.outputs.src == 'true' && !inputs.skip-windows }}
runs-on: electron-arc-linux-amd64-32core
container:
image: ghcr.io/electron/build:${{ needs.setup.outputs.build-image-sha }}
options: --user root --device /dev/fuse --cap-add SYS_ADMIN
volumes:
- /mnt/win-cache:/mnt/win-cache
- /var/run/sas:/var/run/sas
env:
CHROMIUM_GIT_COOKIE: ${{ secrets.CHROMIUM_GIT_COOKIE }}
CHROMIUM_GIT_COOKIE_WINDOWS_STRING: ${{ secrets.CHROMIUM_GIT_COOKIE_WINDOWS_STRING }}
GCLIENT_EXTRA_ARGS: '--custom-var=checkout_win=True'
TARGET_OS: 'win'
ELECTRON_DEPOT_TOOLS_WIN_TOOLCHAIN: '1'
outputs:
build-image-sha: ${{ needs.setup.outputs.build-image-sha}}
steps:
- name: Checkout Electron
uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
with:
path: src/electron
fetch-depth: 0
ref: ${{ github.event.pull_request.head.sha }}
- name: Checkout & Sync & Save
uses: ./src/electron/.github/actions/checkout
with:
generate-sas-token: 'true'
target-platform: win
# GN Check Jobs
macos-gn-check:
uses: ./.github/workflows/pipeline-segment-electron-gn-check.yml
needs: checkout-macos
with:
target-platform: macos
target-archs: x64 arm64
check-runs-on: macos-14
gn-build-type: testing
secrets: inherit
linux-gn-check:
uses: ./.github/workflows/pipeline-segment-electron-gn-check.yml
needs: checkout-linux
with:
target-platform: linux
target-archs: x64 arm arm64
check-runs-on: electron-arc-linux-amd64-8core
check-container: '{"image":"ghcr.io/electron/build:${{ needs.checkout-linux.outputs.build-image-sha }}","options":"--user root","volumes":["/mnt/cross-instance-cache:/mnt/cross-instance-cache"]}'
gn-build-type: testing
secrets: inherit
windows-gn-check:
uses: ./.github/workflows/pipeline-segment-electron-gn-check.yml
needs: checkout-windows
with:
target-platform: win
target-archs: x64 x86 arm64
check-runs-on: electron-arc-linux-amd64-8core
check-container: '{"image":"ghcr.io/electron/build:${{ needs.checkout-windows.outputs.build-image-sha }}","options":"--user root --device /dev/fuse --cap-add SYS_ADMIN","volumes":["/mnt/win-cache:/mnt/win-cache"]}'
gn-build-type: testing
secrets: inherit
# Build Jobs - These cascade into testing jobs
macos-x64:
permissions:
@@ -217,6 +137,7 @@ jobs:
needs: checkout-macos
with:
build-runs-on: macos-14-xlarge
check-runs-on: macos-14
test-runs-on: macos-13
target-platform: macos
target-arch: x64
@@ -235,6 +156,7 @@ jobs:
needs: checkout-macos
with:
build-runs-on: macos-14-xlarge
check-runs-on: macos-14
test-runs-on: macos-14
target-platform: macos
target-arch: arm64
@@ -253,6 +175,7 @@ jobs:
needs: checkout-linux
with:
build-runs-on: electron-arc-linux-amd64-32core
check-runs-on: electron-arc-linux-amd64-8core
test-runs-on: electron-arc-linux-amd64-4core
build-container: '{"image":"ghcr.io/electron/build:${{ needs.checkout-linux.outputs.build-image-sha }}","options":"--user root","volumes":["/mnt/cross-instance-cache:/mnt/cross-instance-cache"]}'
test-container: '{"image":"ghcr.io/electron/build:${{ needs.checkout-linux.outputs.build-image-sha }}","options":"--user root --privileged --init"}'
@@ -273,6 +196,7 @@ jobs:
needs: checkout-linux
with:
build-runs-on: electron-arc-linux-amd64-32core
check-runs-on: electron-arc-linux-amd64-8core
test-runs-on: electron-arc-linux-amd64-4core
build-container: '{"image":"ghcr.io/electron/build:${{ needs.checkout-linux.outputs.build-image-sha }}","options":"--user root","volumes":["/mnt/cross-instance-cache:/mnt/cross-instance-cache"]}'
test-container: '{"image":"ghcr.io/electron/build:${{ needs.checkout-linux.outputs.build-image-sha }}","options":"--user root --privileged --init"}'
@@ -294,6 +218,7 @@ jobs:
needs: checkout-linux
with:
build-runs-on: electron-arc-linux-amd64-32core
check-runs-on: electron-arc-linux-amd64-8core
test-runs-on: electron-arc-linux-arm64-4core
build-container: '{"image":"ghcr.io/electron/build:${{ needs.checkout-linux.outputs.build-image-sha }}","options":"--user root","volumes":["/mnt/cross-instance-cache:/mnt/cross-instance-cache"]}'
test-container: '{"image":"ghcr.io/electron/test:arm32v7-${{ needs.checkout-linux.outputs.build-image-sha }}","options":"--user root --privileged --init","volumes":["/home/runner/externals:/mnt/runner-externals"]}'
@@ -314,6 +239,7 @@ jobs:
needs: checkout-linux
with:
build-runs-on: electron-arc-linux-amd64-32core
check-runs-on: electron-arc-linux-amd64-8core
test-runs-on: electron-arc-linux-arm64-4core
build-container: '{"image":"ghcr.io/electron/build:${{ needs.checkout-linux.outputs.build-image-sha }}","options":"--user root","volumes":["/mnt/cross-instance-cache:/mnt/cross-instance-cache"]}'
test-container: '{"image":"ghcr.io/electron/test:arm64v8-${{ needs.checkout-linux.outputs.build-image-sha }}","options":"--user root --privileged --init"}'
@@ -325,67 +251,10 @@ jobs:
upload-to-storage: '0'
secrets: inherit
windows-x64:
permissions:
contents: read
issues: read
pull-requests: read
uses: ./.github/workflows/pipeline-electron-build-and-test.yml
needs: checkout-windows
if: ${{ needs.setup.outputs.src == 'true' && !inputs.skip-windows }}
with:
build-runs-on: electron-arc-windows-amd64-16core
test-runs-on: windows-latest
target-platform: win
target-arch: x64
is-release: false
gn-build-type: testing
generate-symbols: false
upload-to-storage: '0'
secrets: inherit
windows-x86:
permissions:
contents: read
issues: read
pull-requests: read
uses: ./.github/workflows/pipeline-electron-build-and-test.yml
needs: checkout-windows
if: ${{ needs.setup.outputs.src == 'true' && !inputs.skip-windows }}
with:
build-runs-on: electron-arc-windows-amd64-16core
test-runs-on: windows-latest
target-platform: win
target-arch: x86
is-release: false
gn-build-type: testing
generate-symbols: false
upload-to-storage: '0'
secrets: inherit
windows-arm64:
permissions:
contents: read
issues: read
pull-requests: read
uses: ./.github/workflows/pipeline-electron-build-and-test.yml
needs: checkout-windows
if: ${{ needs.setup.outputs.src == 'true' && !inputs.skip-windows }}
with:
build-runs-on: electron-arc-windows-amd64-16core
test-runs-on: electron-hosted-windows-arm64-4core
target-platform: win
target-arch: arm64
is-release: false
gn-build-type: testing
generate-symbols: false
upload-to-storage: '0'
secrets: inherit
gha-done:
name: GitHub Actions Completed
runs-on: ubuntu-latest
needs: [docs-only, macos-x64, macos-arm64, linux-x64, linux-x64-asan, linux-arm, linux-arm64, windows-x64, windows-x86, windows-arm64]
needs: [docs-only, macos-x64, macos-arm64, linux-x64, linux-x64-asan, linux-arm, linux-arm64]
if: always() && !contains(needs.*.result, 'failure')
steps:
- name: GitHub Actions Jobs Done

View File

@@ -1,12 +1,8 @@
name: Clean Source Cache
description: |
This workflow cleans up the source cache on the cross-instance cache volume
to free up space. It runs daily at midnight and clears files older than 15 days.
on:
schedule:
- cron: "0 0 * * *"
- cron: "0 0 * * SUN" # Run at midnight every Sunday
jobs:
clean-src-cache:
@@ -16,14 +12,10 @@ jobs:
options: --user root
volumes:
- /mnt/cross-instance-cache:/mnt/cross-instance-cache
- /mnt/win-cache:/mnt/win-cache
steps:
- name: Cleanup Source Cache
shell: bash
run: |
df -h /mnt/cross-instance-cache
find /mnt/cross-instance-cache -type f -mtime +15 -delete
find /mnt/cross-instance-cache -type f -mtime +30 -delete
df -h /mnt/cross-instance-cache
df -h /mnt/win-cache
find /mnt/win-cache -type f -mtime +15 -delete
df -h /mnt/win-cache

14
.github/workflows/config/gclient.diff vendored Normal file
View File

@@ -0,0 +1,14 @@
diff --git a/gclient.py b/gclient.py
index 59e2b4c5197928bdba1ef69bdbe637d7dfe471c1..b4bae5e48c83c84bd867187afaf40eed16e69851 100755
--- a/gclient.py
+++ b/gclient.py
@@ -783,7 +783,8 @@ class Dependency(gclient_utils.WorkItem, DependencySettings):
not condition or "non_git_source" not in condition):
continue
cipd_root = self.GetCipdRoot()
- for package in dep_value.get('packages', []):
+ packages = dep_value.get('packages', [])
+ for package in (x for x in packages if "infra/3pp/tools/swift-format" not in x.get('package')):
deps_to_add.append(
CipdDependency(parent=self,
name=name,

View File

@@ -39,7 +39,6 @@ jobs:
uses: ./src/electron/.github/actions/checkout
with:
generate-sas-token: 'true'
target-platform: macos
publish-x64-darwin:
uses: ./.github/workflows/pipeline-segment-electron-build.yml

View File

@@ -5,7 +5,7 @@ on:
inputs:
target-platform:
type: string
description: 'Platform to run on, can be macos, win or linux.'
description: 'Platform to run on, can be macos or linux'
required: true
target-arch:
type: string
@@ -15,6 +15,10 @@ on:
type: string
description: 'What host to run the build'
required: true
check-runs-on:
type: string
description: 'What host to run the gn-check'
required: true
test-runs-on:
type: string
description: 'What host to run the tests on'
@@ -56,8 +60,8 @@ on:
default: false
concurrency:
group: electron-build-and-test-and-nan-${{ inputs.target-platform }}-${{ inputs.target-arch }}-${{ github.ref_protected == true && github.run_id || github.ref }}
cancel-in-progress: ${{ github.ref_protected != true }}
group: electron-build-and-test-and-nan-${{ inputs.target-platform }}-${{ inputs.target-arch }}-${{ github.ref }}
cancel-in-progress: ${{ github.ref != 'refs/heads/main' && !endsWith(github.ref, '-x-y') }}
jobs:
build:
@@ -72,6 +76,16 @@ jobs:
generate-symbols: ${{ inputs.generate-symbols }}
upload-to-storage: ${{ inputs.upload-to-storage }}
secrets: inherit
gn-check:
uses: ./.github/workflows/pipeline-segment-electron-gn-check.yml
with:
target-platform: ${{ inputs.target-platform }}
target-arch: ${{ inputs.target-arch }}
check-runs-on: ${{ inputs.check-runs-on }}
check-container: ${{ inputs.build-container }}
gn-build-type: ${{ inputs.gn-build-type }}
is-asan: ${{ inputs.is-asan }}
secrets: inherit
test:
uses: ./.github/workflows/pipeline-segment-electron-test.yml
needs: build

View File

@@ -5,7 +5,7 @@ on:
inputs:
target-platform:
type: string
description: 'Platform to run on, can be macos, win or linux'
description: 'Platform to run on, can be macos or linux'
required: true
target-arch:
type: string
@@ -15,6 +15,10 @@ on:
type: string
description: 'What host to run the build'
required: true
check-runs-on:
type: string
description: 'What host to run the gn-check'
required: true
test-runs-on:
type: string
description: 'What host to run the tests on'
@@ -56,8 +60,8 @@ on:
default: false
concurrency:
group: electron-build-and-test-${{ inputs.target-platform }}-${{ inputs.target-arch }}-${{ github.ref_protected == true && github.run_id || github.ref }}
cancel-in-progress: ${{ github.ref_protected != true }}
group: electron-build-and-test-${{ inputs.target-platform }}-${{ inputs.target-arch }}-${{ github.ref }}
cancel-in-progress: ${{ github.ref != 'refs/heads/main' && !endsWith(github.ref, '-x-y') }}
permissions:
contents: read
@@ -78,6 +82,16 @@ jobs:
upload-to-storage: ${{ inputs.upload-to-storage }}
is-asan: ${{ inputs.is-asan}}
secrets: inherit
gn-check:
uses: ./.github/workflows/pipeline-segment-electron-gn-check.yml
with:
target-platform: ${{ inputs.target-platform }}
target-arch: ${{ inputs.target-arch }}
check-runs-on: ${{ inputs.check-runs-on }}
check-container: ${{ inputs.build-container }}
gn-build-type: ${{ inputs.gn-build-type }}
is-asan: ${{ inputs.is-asan }}
secrets: inherit
test:
uses: ./.github/workflows/pipeline-segment-electron-test.yml
needs: build

View File

@@ -24,9 +24,10 @@ jobs:
with:
path: src/electron
fetch-depth: 0
ref: ${{ github.event.pull_request.head.sha }}
- name: Install Dependencies
uses: ./src/electron/.github/actions/install-dependencies
run: |
cd src/electron
node script/yarn install --frozen-lockfile
- name: Run TS/JS compile
shell: bash
run: |

View File

@@ -9,11 +9,8 @@ on:
type: string
concurrency:
group: electron-lint-${{ github.ref_protected == true && github.run_id || github.ref }}
cancel-in-progress: ${{ github.ref_protected != true }}
env:
CHROMIUM_GIT_COOKIE: ${{ secrets.CHROMIUM_GIT_COOKIE }}
group: electron-lint-${{ github.ref }}
cancel-in-progress: ${{ github.ref != 'refs/heads/main' && !endsWith(github.ref, '-x-y') }}
jobs:
lint:
@@ -27,11 +24,10 @@ jobs:
with:
path: src/electron
fetch-depth: 0
ref: ${{ github.event.pull_request.head.sha }}
- name: Install Dependencies
uses: ./src/electron/.github/actions/install-dependencies
- name: Set Chromium Git Cookie
uses: ./src/electron/.github/actions/set-chromium-cookie
run: |
cd src/electron
node script/yarn install --frozen-lockfile
- name: Setup third_party Depot Tools
shell: bash
run: |

View File

@@ -9,11 +9,11 @@ on:
type: string
target-platform:
type: string
description: 'Platform to run on, can be macos, win or linux'
description: 'Platform to run on, can be macos or linux'
required: true
target-arch:
type: string
description: 'Arch to build for, can be x64, arm64, ia32 or arm'
description: 'Arch to build for, can be x64, arm64 or arm'
required: true
target-variant:
type: string
@@ -61,24 +61,19 @@ on:
concurrency:
group: electron-build-${{ inputs.target-platform }}-${{ inputs.target-arch }}-${{ inputs.target-variant }}-${{ inputs.is-asan }}-${{ github.ref_protected == true && github.run_id || github.ref }}
cancel-in-progress: ${{ github.ref_protected != true }}
group: electron-build-${{ inputs.target-platform }}-${{ inputs.target-arch }}-${{ inputs.target-variant }}-${{ inputs.is-asan }}-${{ github.ref }}
cancel-in-progress: ${{ github.ref != 'refs/heads/main' && !endsWith(github.ref, '-x-y') }}
env:
CHROMIUM_GIT_COOKIE: ${{ secrets.CHROMIUM_GIT_COOKIE }}
CHROMIUM_GIT_COOKIE_WINDOWS_STRING: ${{ secrets.CHROMIUM_GIT_COOKIE_WINDOWS_STRING }}
ELECTRON_ARTIFACTS_BLOB_STORAGE: ${{ secrets.ELECTRON_ARTIFACTS_BLOB_STORAGE }}
ELECTRON_RBE_JWT: ${{ secrets.ELECTRON_RBE_JWT }}
SUDOWOODO_EXCHANGE_URL: ${{ secrets.SUDOWOODO_EXCHANGE_URL }}
SUDOWOODO_EXCHANGE_TOKEN: ${{ secrets.SUDOWOODO_EXCHANGE_TOKEN }}
GCLIENT_EXTRA_ARGS: ${{ inputs.target-platform == 'macos' && '--custom-var=checkout_mac=True --custom-var=host_os=mac' || inputs.target-platform == 'win' && '--custom-var=checkout_win=True' || '--custom-var=checkout_arm=True --custom-var=checkout_arm64=True' }}
GCLIENT_EXTRA_ARGS: ${{ inputs.target-platform == 'macos' && '--custom-var=checkout_mac=True --custom-var=host_os=mac' || '--custom-var=checkout_arm=True --custom-var=checkout_arm64=True' }}
ELECTRON_OUT_DIR: Default
jobs:
build:
defaults:
run:
shell: bash
runs-on: ${{ inputs.build-runs-on }}
container: ${{ fromJSON(inputs.build-container) }}
environment: ${{ inputs.environment }}
@@ -86,14 +81,12 @@ jobs:
TARGET_ARCH: ${{ inputs.target-arch }}
steps:
- name: Create src dir
run: |
mkdir src
run: mkdir src
- name: Checkout Electron
uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938
with:
path: src/electron
fetch-depth: 0
ref: ${{ github.event.pull_request.head.sha }}
- name: Free up space (macOS)
if: ${{ inputs.target-platform == 'macos' }}
uses: ./src/electron/.github/actions/free-space-macos
@@ -108,7 +101,9 @@ jobs:
cache: yarn
cache-dependency-path: src/electron/yarn.lock
- name: Install Dependencies
uses: ./src/electron/.github/actions/install-dependencies
run: |
cd src/electron
node script/yarn install --frozen-lockfile
- name: Install AZCopy
if: ${{ inputs.target-platform == 'macos' }}
run: brew install azcopy
@@ -127,21 +122,36 @@ jobs:
GN_EXTRA_ARGS='is_asan=true'
fi
echo "GN_EXTRA_ARGS=$GN_EXTRA_ARGS" >> $GITHUB_ENV
- name: Set Chromium Git Cookie
uses: ./src/electron/.github/actions/set-chromium-cookie
- name: Install Build Tools
uses: ./src/electron/.github/actions/install-build-tools
- name: Get Depot Tools
timeout-minutes: 5
run: |
git clone --filter=tree:0 https://chromium.googlesource.com/chromium/tools/depot_tools.git
SEDOPTION="-i"
if [ "`uname`" = "Darwin" ]; then
SEDOPTION="-i ''"
fi
# remove ninjalog_uploader_wrapper.py from autoninja since we don't use it and it causes problems
sed $SEDOPTION '/ninjalog_uploader_wrapper.py/d' ./depot_tools/autoninja
# Ensure depot_tools does not update.
test -d depot_tools && cd depot_tools
if [ "`uname`" = "Linux" ]; then
git apply --3way ../src/electron/.github/workflows/config/gclient.diff
fi
touch .disable_auto_update
- name: Add Depot Tools to PATH
run: echo "$(pwd)/depot_tools" >> $GITHUB_PATH
- name: Generate DEPS Hash
run: |
node src/electron/script/generate-deps-hash.js
DEPSHASH=v1-src-cache-$(cat src/electron/.depshash)
node src/electron/script/generate-deps-hash.js && cat src/electron/.depshash-target
DEPSHASH=v1-src-cache-$(shasum src/electron/.depshash | cut -f1 -d' ')
echo "DEPSHASH=$DEPSHASH" >> $GITHUB_ENV
echo "CACHE_PATH=$DEPSHASH.tar" >> $GITHUB_ENV
- name: Restore src cache via AZCopy
if: ${{ inputs.target-platform != 'linux' }}
if: ${{ inputs.target-platform == 'macos' }}
uses: ./src/electron/.github/actions/restore-cache-azcopy
with:
target-platform: ${{ inputs.target-platform }}
- name: Restore src cache via AKS
if: ${{ inputs.target-platform == 'linux' }}
uses: ./src/electron/.github/actions/restore-cache-aks
@@ -150,29 +160,26 @@ jobs:
with:
path: src/electron
fetch-depth: 0
ref: ${{ github.event.pull_request.head.sha }}
- name: Fix Sync
if: ${{ inputs.target-platform != 'linux' }}
uses: ./src/electron/.github/actions/fix-sync
with:
target-platform: ${{ inputs.target-platform }}
env:
ELECTRON_DEPOT_TOOLS_DISABLE_LOG: true
- name: Install Build Tools
uses: ./src/electron/.github/actions/install-build-tools
- name: Init Build Tools
run: |
e init -f --root=$(pwd) --out=Default ${{ inputs.gn-build-type }} --import ${{ inputs.gn-build-type }} --target-cpu ${{ inputs.target-arch }}
- name: Run Electron Only Hooks
run: |
e d gclient runhooks --spec="solutions=[{'name':'src/electron','url':None,'deps_file':'DEPS','custom_vars':{'process_deps':False},'managed':False}]"
gclient runhooks --spec="solutions=[{'name':'src/electron','url':None,'deps_file':'DEPS','custom_vars':{'process_deps':False},'managed':False}]"
- name: Regenerate DEPS Hash
run: |
(cd src/electron && git checkout .) && node src/electron/script/generate-deps-hash.js
echo "DEPSHASH=$(cat src/electron/.depshash)" >> $GITHUB_ENV
(cd src/electron && git checkout .) && node src/electron/script/generate-deps-hash.js && cat src/electron/.depshash-target
echo "DEPSHASH=$(shasum src/electron/.depshash | cut -f1 -d' ')" >> $GITHUB_ENV
- name: Add CHROMIUM_BUILDTOOLS_PATH to env
run: echo "CHROMIUM_BUILDTOOLS_PATH=$(pwd)/src/buildtools" >> $GITHUB_ENV
- name: Fix Sync (macOS)
if: ${{ inputs.target-platform == 'macos' }}
uses: ./src/electron/.github/actions/fix-sync-macos
- name: Setup Number of Ninja Processes
run: |
echo "NUMBER_OF_NINJA_PROCESSES=${{ inputs.target-platform != 'macos' && '300' || '200' }}" >> $GITHUB_ENV
echo "NUMBER_OF_NINJA_PROCESSES=${{ inputs.target-platform == 'linux' && '300' || '200' }}" >> $GITHUB_ENV
- name: Free up space (macOS)
if: ${{ inputs.target-platform == 'macos' }}
uses: ./src/electron/.github/actions/free-space-macos
@@ -182,7 +189,7 @@ jobs:
with:
target-arch: ${{ inputs.target-arch }}
target-platform: ${{ inputs.target-platform }}
artifact-platform: ${{ inputs.target-platform == 'macos' && 'darwin' || inputs.target-platform }}
artifact-platform: ${{ inputs.target-platform == 'linux' && 'linux' || 'darwin' }}
is-release: '${{ inputs.is-release }}'
generate-symbols: '${{ inputs.generate-symbols }}'
strip-binaries: '${{ inputs.strip-binaries }}'

View File

@@ -5,11 +5,11 @@ on:
inputs:
target-platform:
type: string
description: 'Platform to run on, can be macos, win or linux'
description: 'Platform to run on, can be macos or linux'
required: true
target-archs:
target-arch:
type: string
description: 'Archs to check for, can be x64, x86, arm64 or arm space separated'
description: 'Arch to build for, can be x64, arm64 or arm'
required: true
check-runs-on:
type: string
@@ -25,30 +25,35 @@ on:
required: true
type: string
default: testing
is-asan:
description: 'Building the Address Sanitizer (ASan) Linux build'
required: false
type: boolean
default: false
concurrency:
group: electron-gn-check-${{ inputs.target-platform }}-${{ github.ref }}
group: electron-gn-check-${{ inputs.target-platform }}-${{ inputs.target-arch }}-${{ inputs.is-asan }}-${{ github.ref }}
cancel-in-progress: true
env:
ELECTRON_RBE_JWT: ${{ secrets.ELECTRON_RBE_JWT }}
GCLIENT_EXTRA_ARGS: ${{ inputs.target-platform == 'macos' && '--custom-var=checkout_mac=True --custom-var=host_os=mac' || (inputs.target-platform == 'linux' && '--custom-var=checkout_arm=True --custom-var=checkout_arm64=True' || '--custom-var=checkout_win=True') }}
GCLIENT_EXTRA_ARGS: ${{ inputs.target-platform == 'macos' && '--custom-var=checkout_mac=True --custom-var=host_os=mac' || '--custom-var=checkout_arm=True --custom-var=checkout_arm64=True' }}
ELECTRON_OUT_DIR: Default
TARGET_ARCH: ${{ inputs.target-arch }}
jobs:
gn-check:
defaults:
run:
shell: bash
# TODO(codebytere): Change this to medium VM
runs-on: ${{ inputs.check-runs-on }}
container: ${{ fromJSON(inputs.check-container) }}
env:
TARGET_ARCH: ${{ inputs.target-arch }}
steps:
- name: Checkout Electron
uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938
with:
path: src/electron
fetch-depth: 0
ref: ${{ github.event.pull_request.head.sha }}
- name: Cleanup disk space on macOS
if: ${{ inputs.target-platform == 'macos' }}
shell: bash
@@ -65,49 +70,61 @@ jobs:
sudo rm -rf $TMPDIR/del-target
- name: Check disk space after freeing up space
if: ${{ inputs.target-platform == 'macos' }}
run: df -h
- name: Set Chromium Git Cookie
uses: ./src/electron/.github/actions/set-chromium-cookie
run: df -h
- name: Install Build Tools
uses: ./src/electron/.github/actions/install-build-tools
- name: Enable windows toolchain
if: ${{ inputs.target-platform == 'win' }}
- name: Init Build Tools
run: |
echo "ELECTRON_DEPOT_TOOLS_WIN_TOOLCHAIN=1" >> $GITHUB_ENV
e init -f --root=$(pwd) --out=Default ${{ inputs.gn-build-type }} --import ${{ inputs.gn-build-type }} --target-cpu ${{ inputs.target-arch }}
- name: Get Depot Tools
timeout-minutes: 5
run: |
git clone --filter=tree:0 https://chromium.googlesource.com/chromium/tools/depot_tools.git
SEDOPTION="-i"
if [ "`uname`" = "Darwin" ]; then
SEDOPTION="-i ''"
fi
# remove ninjalog_uploader_wrapper.py from autoninja since we don't use it and it causes problems
sed $SEDOPTION '/ninjalog_uploader_wrapper.py/d' ./depot_tools/autoninja
# Ensure depot_tools does not update.
test -d depot_tools && cd depot_tools
if [ "`uname`" = "Linux" ]; then
git apply --3way ../src/electron/.github/workflows/config/gclient.diff
fi
touch .disable_auto_update
- name: Add Depot Tools to PATH
run: echo "$(pwd)/depot_tools" >> $GITHUB_PATH
- name: Set GN_EXTRA_ARGS for Linux
if: ${{ inputs.target-platform == 'linux' }}
run: |
if [ "${{ inputs.target-arch }}" = "arm" ]; then
GN_EXTRA_ARGS='build_tflite_with_xnnpack=false'
elif [ "${{ inputs.target-arch }}" = "arm64" ]; then
GN_EXTRA_ARGS='fatal_linker_warnings=false enable_linux_installer=false'
fi
echo "GN_EXTRA_ARGS=$GN_EXTRA_ARGS" >> $GITHUB_ENV
- name: Generate DEPS Hash
run: |
node src/electron/script/generate-deps-hash.js
DEPSHASH=v1-src-cache-$(cat src/electron/.depshash)
node src/electron/script/generate-deps-hash.js && cat src/electron/.depshash-target
DEPSHASH=v1-src-cache-$(shasum src/electron/.depshash | cut -f1 -d' ')
echo "DEPSHASH=$DEPSHASH" >> $GITHUB_ENV
echo "CACHE_PATH=$DEPSHASH.tar" >> $GITHUB_ENV
- name: Restore src cache via AZCopy
if: ${{ inputs.target-platform == 'macos' }}
uses: ./src/electron/.github/actions/restore-cache-azcopy
with:
target-platform: ${{ inputs.target-platform }}
- name: Restore src cache via AKS
if: ${{ inputs.target-platform == 'linux' || inputs.target-platform == 'win' }}
if: ${{ inputs.target-platform == 'linux' }}
uses: ./src/electron/.github/actions/restore-cache-aks
with:
target-platform: ${{ inputs.target-platform }}
- name: Run Electron Only Hooks
run: |
echo "solutions=[{'name':'src/electron','url':None,'deps_file':'DEPS','custom_vars':{'process_deps':False},'managed':False}]" > tmpgclient
if [ "${{ inputs.target-platform }}" = "win" ]; then
echo "solutions=[{'name':'src/electron','url':None,'deps_file':'DEPS','custom_vars':{'process_deps':False,'install_sysroot':False,'checkout_win':True},'managed':False}]" > tmpgclient
echo "target_os=['win']" >> tmpgclient
fi
e d gclient runhooks --gclientfile=tmpgclient
# Fix VS Toolchain
if [ "${{ inputs.target-platform }}" = "win" ]; then
rm -rf src/third_party/depot_tools/win_toolchain/vs_files
e d python3 src/build/vs_toolchain.py update --force
fi
gclient runhooks --spec="solutions=[{'name':'src/electron','url':None,'deps_file':'DEPS','custom_vars':{'process_deps':False},'managed':False}]"
- name: Regenerate DEPS Hash
run: |
(cd src/electron && git checkout .) && node src/electron/script/generate-deps-hash.js
echo "DEPSHASH=$(cat src/electron/.depshash)" >> $GITHUB_ENV
(cd src/electron && git checkout .) && node src/electron/script/generate-deps-hash.js && cat src/electron/.depshash-target
echo "DEPSHASH=$(shasum src/electron/.depshash | cut -f1 -d' ')" >> $GITHUB_ENV
- name: Add CHROMIUM_BUILDTOOLS_PATH to env
run: echo "CHROMIUM_BUILDTOOLS_PATH=$(pwd)/src/buildtools" >> $GITHUB_ENV
- name: Checkout Electron
@@ -115,46 +132,30 @@ jobs:
with:
path: src/electron
fetch-depth: 0
ref: ${{ github.event.pull_request.head.sha }}
- name: Install Dependencies
uses: ./src/electron/.github/actions/install-dependencies
run: |
cd src/electron
node script/yarn install --frozen-lockfile
- name: Default GN gen
run: |
cd src/electron
git pack-refs
- name: Run GN Check for ${{ inputs.target-archs }}
cd ..
e build --only-gen
- name: Run GN Check
run: |
for target_cpu in ${{ inputs.target-archs }}
do
e init -f --root=$(pwd) --out=Default ${{ inputs.gn-build-type }} --import ${{ inputs.gn-build-type }} --target-cpu $target_cpu
cd src
export GN_EXTRA_ARGS="target_cpu=\"$target_cpu\""
if [ "${{ inputs.target-platform }}" = "linux" ]; then
if [ "$target_cpu" = "arm" ]; then
export GN_EXTRA_ARGS="$GN_EXTRA_ARGS build_tflite_with_xnnpack=false"
elif [ "$target_cpu" = "arm64" ]; then
export GN_EXTRA_ARGS="$GN_EXTRA_ARGS fatal_linker_warnings=false enable_linux_installer=false"
fi
fi
if [ "${{ inputs.target-platform }}" = "win" ]; then
export GN_EXTRA_ARGS="$GN_EXTRA_ARGS use_v8_context_snapshot=true target_os=\"win\""
fi
cd src
gn check out/Default //electron:electron_lib
gn check out/Default //electron:electron_app
gn check out/Default //electron/shell/common:mojo
gn check out/Default //electron/shell/common:plugin
e build --only-gen
e d gn check out/Default //electron:electron_lib
e d gn check out/Default //electron:electron_app
e d gn check out/Default //electron/shell/common:mojo
e d gn check out/Default //electron/shell/common:plugin
# Check the hunspell filenames
node electron/script/gen-hunspell-filenames.js --check
node electron/script/gen-libc++-filenames.js --check
cd ..
done
# Check the hunspell filenames
node electron/script/gen-hunspell-filenames.js --check
node electron/script/gen-libc++-filenames.js --check
- name: Wait for active SSH sessions
if: always() && !cancelled()
shell: bash
run: |
while [ -f /var/.ssh-lock ]
do

View File

@@ -5,7 +5,7 @@ on:
inputs:
target-platform:
type: string
description: 'Platform to run on, can be macos, win or linux'
description: 'Platform to run on, can be macos or linux'
required: true
target-arch:
type: string
@@ -27,8 +27,8 @@ on:
default: false
concurrency:
group: electron-test-${{ inputs.target-platform }}-${{ inputs.target-arch }}-${{ inputs.is-asan }}-${{ github.ref_protected == true && github.run_id || github.ref }}
cancel-in-progress: ${{ github.ref_protected != true }}
group: electron-test-${{ inputs.target-platform }}-${{ inputs.target-arch }}-${{ inputs.is-asan }}-${{ github.ref }}
cancel-in-progress: ${{ github.ref != 'refs/heads/main' && !endsWith(github.ref, '-x-y') }}
permissions:
contents: read
@@ -36,52 +36,27 @@ permissions:
pull-requests: read
env:
CHROMIUM_GIT_COOKIE: ${{ secrets.CHROMIUM_GIT_COOKIE }}
CHROMIUM_GIT_COOKIE_WINDOWS_STRING: ${{ secrets.CHROMIUM_GIT_COOKIE_WINDOWS_STRING }}
ELECTRON_OUT_DIR: Default
ELECTRON_RBE_JWT: ${{ secrets.ELECTRON_RBE_JWT }}
jobs:
test:
defaults:
run:
shell: bash
runs-on: ${{ inputs.test-runs-on }}
container: ${{ fromJSON(inputs.test-container) }}
strategy:
fail-fast: false
matrix:
build-type: ${{ inputs.target-platform == 'macos' && fromJSON('["darwin","mas"]') || (inputs.target-platform == 'win' && fromJSON('["win"]') || fromJSON('["linux"]')) }}
shard: ${{ inputs.target-platform == 'linux' && fromJSON('[1, 2, 3]') || fromJSON('[1, 2]') }}
build-type: ${{ inputs.target-platform == 'macos' && fromJSON('["darwin","mas"]') || fromJSON('["linux"]') }}
shard: ${{ inputs.target-platform == 'macos' && fromJSON('[1, 2]') || fromJSON('[1, 2, 3]') }}
env:
BUILD_TYPE: ${{ matrix.build-type }}
TARGET_ARCH: ${{ inputs.target-arch }}
ARTIFACT_KEY: ${{ matrix.build-type }}_${{ inputs.target-arch }}
steps:
- name: Fix node20 on arm32 runners
if: ${{ inputs.target-arch == 'arm' && inputs.target-platform == 'linux' }}
if: ${{ inputs.target-arch == 'arm' }}
run: |
cp $(which node) /mnt/runner-externals/node20/bin/
- name: Install Git on Windows arm64 runners
if: ${{ inputs.target-arch == 'arm64' && inputs.target-platform == 'win' }}
shell: powershell
run: |
Set-ExecutionPolicy Bypass -Scope Process -Force
[System.Net.ServicePointManager]::SecurityProtocol = [System.Net.ServicePointManager]::SecurityProtocol -bor 3072
iex ((New-Object System.Net.WebClient).DownloadString('https://community.chocolatey.org/install.ps1'))
choco install -y --no-progress git.install --params "'/GitAndUnixToolsOnPath'"
choco install -y --no-progress git
choco install -y --no-progress python --version 3.11.9
choco install -y --no-progress visualstudio2022-workload-vctools --package-parameters "--add Microsoft.VisualStudio.Component.VC.Tools.ARM64"
echo "C:\Program Files\Git\cmd" | Out-File -FilePath $env:GITHUB_PATH -Encoding utf8 -Append
echo "C:\Program Files\Git\bin" | Out-File -FilePath $env:GITHUB_PATH -Encoding utf8 -Append
echo "C:\Python311" | Out-File -FilePath $env:GITHUB_PATH -Encoding utf8 -Append
cp "C:\Python311\python.exe" "C:\Python311\python3.exe"
- name: Setup Node.js/npm
if: ${{ inputs.target-platform == 'win' }}
uses: actions/setup-node@0a44ba7841725637a19e28fa30b79a866c81b0a6
with:
node-version: 20.11.x
- name: Add TCC permissions on macOS
if: ${{ inputs.target-platform == 'macos' }}
run: |
@@ -120,20 +95,24 @@ jobs:
with:
path: src/electron
fetch-depth: 0
ref: ${{ github.event.pull_request.head.sha }}
- name: Install Dependencies
uses: ./src/electron/.github/actions/install-dependencies
- name: Set Chromium Git Cookie
uses: ./src/electron/.github/actions/set-chromium-cookie
run: |
cd src/electron
node script/yarn install --frozen-lockfile
- name: Get Depot Tools
timeout-minutes: 5
run: |
git config --global core.filemode false
git config --global core.autocrlf false
git config --global branch.autosetuprebase always
git clone --filter=tree:0 https://chromium.googlesource.com/chromium/tools/depot_tools.git
# Ensure depot_tools does not update.
test -d depot_tools && cd depot_tools
if [ "`uname`" = "Darwin" ]; then
# remove ninjalog_uploader_wrapper.py from autoninja since we don't use it and it causes problems
sed -i '' '/ninjalog_uploader_wrapper.py/d' ./autoninja
else
sed -i '/ninjalog_uploader_wrapper.py/d' ./autoninja
# Remove swift-format dep from cipd on macOS until we send a patch upstream.
git apply --3way ../src/electron/.github/workflows/config/gclient.diff
fi
touch .disable_auto_update
- name: Add Depot Tools to PATH
run: echo "$(pwd)/depot_tools" >> $GITHUB_PATH
@@ -155,17 +134,7 @@ jobs:
path: ./src_artifacts_${{ matrix.build-type }}_${{ inputs.target-arch }}
- name: Restore Generated Artifacts
run: ./src/electron/script/actions/restore-artifacts.sh
- name: Unzip Dist, Mksnapshot & Chromedriver (win)
if: ${{ inputs.target-platform == 'win' }}
shell: powershell
run: |
Set-ExecutionPolicy Bypass -Scope Process -Force
cd src/out/Default
Expand-Archive -Force dist.zip -DestinationPath ./
Expand-Archive -Force chromedriver.zip -DestinationPath ./
Expand-Archive -Force mksnapshot.zip -DestinationPath ./
- name: Unzip Dist, Mksnapshot & Chromedriver (unix)
if: ${{ inputs.target-platform != 'win' }}
- name: Unzip Dist, Mksnapshot & Chromedriver
run: |
cd src/out/Default
unzip -:o dist.zip
@@ -189,24 +158,15 @@ jobs:
ELECTRON_DISABLE_SECURITY_WARNINGS: 1
ELECTRON_SKIP_NATIVE_MODULE_TESTS: true
DISPLAY: ':99.0'
NPM_CONFIG_MSVS_VERSION: '2022'
run: |
cd src/electron
export ELECTRON_TEST_RESULTS_DIR=`pwd`/junit
# Get which tests are on this shard
tests_files=$(node script/split-tests ${{ matrix.shard }} ${{ inputs.target-platform == 'linux' && 3 || 2 }})
tests_files=$(node script/split-tests ${{ matrix.shard }} ${{ inputs.target-platform == 'macos' && 2 || 3 }})
# Run tests
if [ "${{ inputs.target-platform }}" != "linux" ]; then
if [ "`uname`" = "Darwin" ]; then
echo "About to start tests"
if [ "${{ inputs.target-platform }}" = "win" ]; then
if [ "${{ inputs.target-arch }}" = "x86" ]; then
export npm_config_arch="ia32"
fi
if [ "${{ inputs.target-arch }}" = "arm64" ]; then
export ELECTRON_FORCE_TEST_SUITE_EXIT="true"
fi
fi
node script/yarn test --runners=main --trace-uncaught --enable-logging --files $tests_files
else
chown :builduser .. && chmod g+w ..
@@ -237,21 +197,19 @@ jobs:
DD_CIVISIBILITY_LOGS_ENABLED: true
DD_TAGS: "os.architecture:${{ inputs.target-arch }},os.family:${{ inputs.target-platform }},os.platform:${{ inputs.target-platform }},asan:${{ inputs.is-asan }}"
run: |
if ! [ -z $DD_API_KEY ] && [ -f src/electron/junit/test-results-main.xml ]; then
export DATADOG_PATH=`node src/electron/script/yarn global bin`
$DATADOG_PATH/datadog-ci junit upload src/electron/junit/test-results-main.xml
if ! [ -z $DD_API_KEY ]; then
datadog-ci junit upload src/electron/junit/test-results-main.xml
fi
if: always() && !cancelled()
- name: Upload Test Artifacts
if: always() && !cancelled()
uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874
with:
name: test_artifacts_${{ env.ARTIFACT_KEY }}_${{ matrix.shard }}
name: test_artifacts_${{ env.ARTIFACT_KEY }}
path: src/electron/spec/artifacts
if-no-files-found: ignore
- name: Wait for active SSH sessions
if: always() && !cancelled()
shell: bash
run: |
while [ -f /var/.ssh-lock ]
do

View File

@@ -5,7 +5,7 @@ on:
inputs:
target-platform:
type: string
description: 'Platform to run on, can be macos, win or linux'
description: 'Platform to run on, can be macos or linux'
required: true
target-arch:
type: string
@@ -27,11 +27,10 @@ on:
default: testing
concurrency:
group: electron-node-nan-test-${{ inputs.target-platform }}-${{ inputs.target-arch }}-${{ github.ref_protected == true && github.run_id || github.ref }}
cancel-in-progress: ${{ github.ref_protected != true }}
group: electron-node-nan-test-${{ inputs.target-platform }}-${{ inputs.target-arch }}-${{ github.ref }}
cancel-in-progress: ${{ github.ref != 'refs/heads/main' && !endsWith(github.ref, '-x-y') }}
env:
CHROMIUM_GIT_COOKIE: ${{ secrets.CHROMIUM_GIT_COOKIE }}
ELECTRON_OUT_DIR: Default
ELECTRON_RBE_JWT: ${{ secrets.ELECTRON_RBE_JWT }}
@@ -39,7 +38,7 @@ jobs:
node-tests:
name: Run Node.js Tests
runs-on: electron-arc-linux-amd64-8core
timeout-minutes: 30
timeout-minutes: 20
env:
TARGET_ARCH: ${{ inputs.target-arch }}
BUILD_TYPE: linux
@@ -50,16 +49,26 @@ jobs:
with:
path: src/electron
fetch-depth: 0
ref: ${{ github.event.pull_request.head.sha }}
- name: Set Chromium Git Cookie
uses: ./src/electron/.github/actions/set-chromium-cookie
- name: Install Build Tools
uses: ./src/electron/.github/actions/install-build-tools
- name: Init Build Tools
run: |
e init -f --root=$(pwd) --out=Default ${{ inputs.gn-build-type }} --import ${{ inputs.gn-build-type }} --target-cpu ${{ inputs.target-arch }}
- name: Install Dependencies
uses: ./src/electron/.github/actions/install-dependencies
run: |
cd src/electron
node script/yarn install --frozen-lockfile
- name: Get Depot Tools
timeout-minutes: 5
run: |
git clone --filter=tree:0 https://chromium.googlesource.com/chromium/tools/depot_tools.git
sed -i '/ninjalog_uploader_wrapper.py/d' ./depot_tools/autoninja
# Ensure depot_tools does not update.
test -d depot_tools && cd depot_tools
git apply --3way ../src/electron/.github/workflows/config/gclient.diff
touch .disable_auto_update
- name: Add Depot Tools to PATH
run: echo "$(pwd)/depot_tools" >> $GITHUB_PATH
- name: Download Generated Artifacts
uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16
with:
@@ -84,7 +93,6 @@ jobs:
node electron/script/node-spec-runner.js --default --jUnitDir=junit
- name: Wait for active SSH sessions
if: always() && !cancelled()
shell: bash
run: |
while [ -f /var/.ssh-lock ]
do
@@ -93,7 +101,7 @@ jobs:
nan-tests:
name: Run Nan Tests
runs-on: electron-arc-linux-amd64-4core
timeout-minutes: 30
timeout-minutes: 20
env:
TARGET_ARCH: ${{ inputs.target-arch }}
BUILD_TYPE: linux
@@ -104,16 +112,26 @@ jobs:
with:
path: src/electron
fetch-depth: 0
ref: ${{ github.event.pull_request.head.sha }}
- name: Set Chromium Git Cookie
uses: ./src/electron/.github/actions/set-chromium-cookie
- name: Install Build Tools
uses: ./src/electron/.github/actions/install-build-tools
- name: Init Build Tools
run: |
e init -f --root=$(pwd) --out=Default ${{ inputs.gn-build-type }}
- name: Install Dependencies
uses: ./src/electron/.github/actions/install-dependencies
run: |
cd src/electron
node script/yarn install --frozen-lockfile
- name: Get Depot Tools
timeout-minutes: 5
run: |
git clone --filter=tree:0 https://chromium.googlesource.com/chromium/tools/depot_tools.git
sed -i '/ninjalog_uploader_wrapper.py/d' ./depot_tools/autoninja
# Ensure depot_tools does not update.
test -d depot_tools && cd depot_tools
git apply --3way ../src/electron/.github/workflows/config/gclient.diff
touch .disable_auto_update
- name: Add Depot Tools to PATH
run: echo "$(pwd)/depot_tools" >> $GITHUB_PATH
- name: Download Generated Artifacts
uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16
with:
@@ -137,7 +155,6 @@ jobs:
cd src
node electron/script/nan-spec-runner.js
- name: Wait for active SSH sessions
shell: bash
if: always() && !cancelled()
run: |
while [ -f /var/.ssh-lock ]

View File

@@ -0,0 +1,73 @@
name: Update AppVeyor Image
# Run chron daily Mon-Fri
on:
workflow_dispatch:
schedule:
- cron: '0 8 * * 1-5' # runs 8:00 every business day (see https://crontab.guru)
permissions: {}
jobs:
bake-appveyor-image:
name: Bake AppVeyor Image
runs-on: ubuntu-latest
steps:
- name: Generate GitHub App token
uses: electron/github-app-auth-action@384fd19694fe7b6dcc9a684746c6976ad78228ae # v1.1.1
id: generate-token
with:
creds: ${{ secrets.APPVEYOR_UPDATER_GH_APP_CREDS }}
- name: Checkout
uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
with:
fetch-depth: 0
token: ${{ steps.generate-token.outputs.token }}
- name: Yarn install
run: |
node script/yarn.js install --frozen-lockfile
- name: Set Repo for Commit
run: git config --global --add safe.directory $GITHUB_WORKSPACE
- name: Check AppVeyor Image
env:
APPVEYOR_TOKEN: ${{ secrets.APPVEYOR_TOKEN }}
run: |
node ./script/prepare-appveyor
if [ -f ./image_version.txt ]; then
echo "APPVEYOR_IMAGE_VERSION="$(cat image_version.txt)"" >> $GITHUB_ENV
rm image_version.txt
fi
- name: (Optionally) Update Appveyor Image
if: ${{ env.APPVEYOR_IMAGE_VERSION }}
uses: mikefarah/yq@bbdd97482f2d439126582a59689eb1c855944955 # v4.44.3
with:
cmd: |
yq '.image = "${{ env.APPVEYOR_IMAGE_VERSION }}"' "appveyor.yml" > "appveyor2.yml"
yq '.image = "${{ env.APPVEYOR_IMAGE_VERSION }}"' "appveyor-woa.yml" > "appveyor-woa2.yml"
- name: (Optionally) Generate Commit Diff
if: ${{ env.APPVEYOR_IMAGE_VERSION }}
run: |
diff -w -B appveyor.yml appveyor2.yml > appveyor.diff || true
patch -f appveyor.yml < appveyor.diff
rm appveyor2.yml appveyor.diff
git add appveyor.yml
- name: (Optionally) Generate Commit Diff for WOA
if: ${{ env.APPVEYOR_IMAGE_VERSION }}
run: |
diff -w -B appveyor-woa.yml appveyor-woa2.yml > appveyor-woa.diff || true
patch -f appveyor-woa.yml < appveyor-woa.diff
rm appveyor-woa2.yml appveyor-woa.diff
git add appveyor-woa.yml
- name: (Optionally) Commit to Branch
if: ${{ env.APPVEYOR_IMAGE_VERSION }}
uses: dsanders11/github-app-commit-action@43de6da2f4d927e997c0784c7a0b61bd19ad6aac # v1.5.0
with:
message: 'build: update appveyor image to latest version'
ref: bump-appveyor-image
token: ${{ steps.generate-token.outputs.token }}
- name: (Optionally) Create Pull Request
if: ${{ env.APPVEYOR_IMAGE_VERSION }}
run: |
printf "This PR updates appveyor.yml to the latest baked image, ${{ env.APPVEYOR_IMAGE_VERSION }}.\n\nNotes: none" | gh pr create --head bump-appveyor-image --label no-backport --label semver/none --title 'build: update appveyor image to latest version' --body-file=-
env:
GITHUB_TOKEN: ${{ steps.generate-token.outputs.token }}

View File

@@ -1,88 +0,0 @@
name: Publish Windows
on:
workflow_dispatch:
inputs:
build-image-sha:
type: string
description: 'SHA for electron/build image'
default: 'bc2f48b2415a670de18d13605b1cf0eb5fdbaae1'
required: true
upload-to-storage:
description: 'Uploads to Azure storage'
required: false
default: '1'
type: string
run-windows-publish:
description: 'Run the publish jobs vs just the build jobs'
type: boolean
default: false
jobs:
checkout-windows:
runs-on: electron-arc-linux-amd64-32core
container:
image: ghcr.io/electron/build:${{ inputs.build-image-sha }}
options: --user root --device /dev/fuse --cap-add SYS_ADMIN
volumes:
- /mnt/win-cache:/mnt/win-cache
- /var/run/sas:/var/run/sas
env:
GCLIENT_EXTRA_ARGS: '--custom-var=checkout_win=True'
TARGET_OS: 'win'
ELECTRON_DEPOT_TOOLS_WIN_TOOLCHAIN: '1'
outputs:
build-image-sha: ${{ inputs.build-image-sha }}
steps:
- name: Checkout Electron
uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
with:
path: src/electron
fetch-depth: 0
- name: Checkout & Sync & Save
uses: ./src/electron/.github/actions/checkout
with:
generate-sas-token: 'true'
target-platform: win
publish-x64-win:
uses: ./.github/workflows/pipeline-segment-electron-build.yml
needs: checkout-windows
with:
environment: production-release
build-runs-on: electron-arc-windows-amd64-16core
target-platform: win
target-arch: x64
is-release: true
gn-build-type: release
generate-symbols: true
upload-to-storage: ${{ inputs.upload-to-storage }}
secrets: inherit
publish-arm64-win:
uses: ./.github/workflows/pipeline-segment-electron-build.yml
needs: checkout-windows
with:
environment: production-release
build-runs-on: electron-arc-windows-amd64-16core
target-platform: win
target-arch: arm64
is-release: true
gn-build-type: release
generate-symbols: true
upload-to-storage: ${{ inputs.upload-to-storage }}
secrets: inherit
publish-x86-win:
uses: ./.github/workflows/pipeline-segment-electron-build.yml
needs: checkout-windows
with:
environment: production-release
build-runs-on: electron-arc-windows-amd64-16core
target-platform: win
target-arch: x86
is-release: true
gn-build-type: release
generate-symbols: true
upload-to-storage: ${{ inputs.upload-to-storage }}
secrets: inherit

1
.gitignore vendored
View File

@@ -48,6 +48,7 @@ ts-gen
# Used to accelerate CI builds
.depshash
.depshash-target
# Used to accelerate builds after sync
patches/mtime-cache.json

View File

@@ -427,7 +427,6 @@ source_set("electron_lib") {
"chromium_src:chrome_spellchecker",
"shell/common:mojo",
"shell/common:plugin",
"shell/common:web_contents_utility",
"shell/services/node/public/mojom",
"//base:base_static",
"//base/allocator:buildflags",

4
DEPS
View File

@@ -2,9 +2,9 @@ gclient_gn_args_from = 'src'
vars = {
'chromium_version':
'130.0.6723.191',
'130.0.6723.152',
'node_version':
'v20.18.3',
'v20.18.1',
'nan_version':
'e14bdcd1f72d62bca1d541b66da43130384ec213',
'squirrel.mac_version':

107
appveyor-bake.yml Normal file
View File

@@ -0,0 +1,107 @@
# The config is used to bake appveyor images, not for running CI jobs.
# The config expects the following environment variables to be set:
# - "APPVEYOR_BAKE_IMAGE" e.g. 'electron-99.0.4767.0'. Name of the image to be baked.
# Typically named after the Chromium version on which the image is built.
# This can be set dynamically in the prepare-appveyor script.
version: 1.0.{build}
build_cloud: electronhq-16-core
image: base-bake-image
environment:
GIT_CACHE_PATH: C:\Users\appveyor\libcc_cache
ELECTRON_OUT_DIR: Default
ELECTRON_ENABLE_STACK_DUMPING: 1
MOCHA_REPORTER: mocha-multi-reporters
MOCHA_MULTI_REPORTERS: mocha-appveyor-reporter, tap
DEPOT_TOOLS_WIN_TOOLCHAIN: 0
PYTHONIOENCODING: UTF-8
# The following lines are needed when baking from a completely new image (eg MicrosoftWindowsServer:WindowsServer:2019-Datacenter:latest via image: base-windows-server2019)
# init:
# - ps: $blockRdp = $true; iex ((new-object net.webclient).DownloadString('https://raw.githubusercontent.com/appveyor/ci/master/scripts/enable-rdp.ps1'))
# - appveyor version
# - ps: $ErrorActionPreference = 'Stop'
# - ps: 'Write-Host "OS Build: $((Get-CimInstance Win32_OperatingSystem).BuildNumber)"'
# clone_folder: '%USERPROFILE%\image-bake-scripts'
# clone_script:
# - ps: Invoke-WebRequest "https://github.com/appveyor/build-images/archive/1f90d94e74c8243c909a09b994e527584dfcb838.zip" -OutFile "$env:temp\scripts.zip"
# - ps: Expand-Archive -Path "$env:temp\scripts.zip" -DestinationPath "$env:temp\scripts" -Force
# - ps: Copy-Item -Path "$env:temp\scripts\build-images-1f90d94e74c8243c909a09b994e527584dfcb838\scripts\Windows\*" -Destination $env:APPVEYOR_BUILD_FOLDER -Recurse
build_script:
# The following lines are needed when baking from a completely new image (eg MicrosoftWindowsServer:WindowsServer:2019-Datacenter:latest via image: base-windows-server2019)
# - ps: .\init_server.ps1
# - ps: .\extend_system_volume.ps1
# # Restart VM
# - ps: Start-Sleep -s 5; Restart-Computer
# - ps: Start-Sleep -s 5
# - appveyor version
# - ps: .\install_path_utils.ps1
# - ps: .\install_powershell_core.ps1
# - ps: .\install_powershell_get.ps1
# - ps: .\install_7zip.ps1
# - ps: .\install_chocolatey.ps1
# - ps: .\install_webpi.ps1
# - ps: .\install_nuget.ps1
# - ps: .\install_pstools.ps1
# - ps: .\install_git.ps1
# - ps: .\install_git_lfs.ps1
# # Restart VM
# - ps: Start-Sleep -s 5; Restart-Computer
# - ps: Start-Sleep -s 5
# END LINES FOR COMPLETELY NEW IMAGE
- git config --global core.longpaths true
- ps: >-
if (-not (Test-Path -Path C:\projects\src)) {
New-Item -Path C:\projects\src -ItemType Directory
}
- cd C:\projects\
- git clone -q --branch=%APPVEYOR_REPO_BRANCH% https://github.com/electron/electron.git C:\projects\src\electron
- git clone --depth=1 https://chromium.googlesource.com/chromium/tools/depot_tools.git
- ps: $env:PATH="$pwd\depot_tools;$env:PATH"
- update_depot_tools.bat
# Uncomment the following line if windows deps change
# - src\electron\script\setup-win-for-dev.bat
- >-
gclient config
--name "src\electron"
--unmanaged
%GCLIENT_EXTRA_ARGS%
"https://github.com/electron/electron"
- ps: cd src\electron
- ps: node script\generate-deps-hash.js
- ps: $depshash = Get-Content .\.depshash -Raw
- ps: Copy-Item -path .\.depshash -destination ..\.depshash
- ps: cd ..\..
- gclient sync --with_branch_heads --with_tags --nohooks
- ps: regsvr32 /s "C:\Program Files\Microsoft Visual Studio\2022\Community\DIA SDK\bin\amd64\msdia140.dll"
- ps: set vs2022_install="C:\Program Files\Microsoft Visual Studio\2022\Community"
# The following lines are needed when baking from a completely new image (eg MicrosoftWindowsServer:WindowsServer:2019-Datacenter:latest via image: base-windows-server2019)
# # Restart VM
# - ps: Start-Sleep -s 5; Restart-Computer
# - ps: Start-Sleep -s 5
# - cd %USERPROFILE%\image-bake-scripts
# - appveyor version
# - ps: .\optimize_dotnet_runtime.ps1
# - ps: .\disable_windows_background_services.ps1
# - ps: .\enforce_windows_firewall.ps1
# - ps: .\cleanup_windows.ps1
# END LINES FOR COMPLETELY NEW IMAGE
on_image_bake:
- ps: >-
echo "Baking image: $env:APPVEYOR_BAKE_IMAGE at dir $PWD"
- ps: Remove-Item -Recurse -Force C:\projects\depot_tools
- ps: Remove-Item -Recurse -Force C:\projects\src\electron
# Uncomment these lines and set APPVEYOR_RDP_PASSWORD in project settings to enable RDP after bake is done
# # on_finish:
# - ps: >-
# $blockRdp = $true; iex ((new-object net.webclient).DownloadString('https://raw.githubusercontent.com/appveyor/ci/master/scripts/enable-rdp.ps1'))

352
appveyor-woa.yml Normal file
View File

@@ -0,0 +1,352 @@
# NOTE IF CHANGING THIS FILE, ALSO APPLY THE CHANGE TO appveyor.yml
# IF APPLICABLE!!!!
#
#
# The config expects the following environment variables to be set:
# - "GN_CONFIG" Build type. One of {'testing', 'release'}.
# - "GN_EXTRA_ARGS" Additional gn arguments for a build config,
# e.g. 'target_cpu="x86"' to build for a 32bit platform.
# https://gn.googlesource.com/gn/+/main/docs/reference.md#var_target_cpu
# Don't forget to set up "NPM_CONFIG_ARCH" and "TARGET_ARCH" accordingly
# if you pass a custom value for 'target_cpu'.
# - "ELECTRON_RELEASE" Set it to '1' upload binaries on success.
# - "NPM_CONFIG_ARCH" E.g. 'x86'. Is used to build native Node.js modules.
# Must match 'target_cpu' passed to "GN_EXTRA_ARGS" and "TARGET_ARCH" value.
# - "TARGET_ARCH" Choose from {'ia32', 'x64', 'arm', 'arm64'}.
# Is used in some publishing scripts, but does NOT affect the Electron binary.
# Must match 'target_cpu' passed to "GN_EXTRA_ARGS" and "NPM_CONFIG_ARCH" value.
# - "UPLOAD_TO_STORAGE" Set it to '1' upload a release to the Azure bucket.
# Otherwise the release will be uploaded to the GitHub Releases.
# (The value is only checked if "ELECTRON_RELEASE" is defined.)
#
# The publishing scripts expect access tokens to be defined as env vars,
# but those are not covered here.
#
# AppVeyor docs on variables:
# https://www.appveyor.com/docs/environment-variables/
# https://www.appveyor.com/docs/build-configuration/#secure-variables
# https://www.appveyor.com/docs/build-configuration/#custom-environment-variables
version: 1.0.{build}
build_cloud: electronhq-16-core
image: e-130.0.6695.0-node-20
environment:
GIT_CACHE_PATH: C:\Users\appveyor\libcc_cache
ELECTRON_OUT_DIR: Default
ELECTRON_ENABLE_STACK_DUMPING: 1
ELECTRON_ALSO_LOG_TO_STDERR: 1
MOCHA_REPORTER: mocha-multi-reporters
MOCHA_MULTI_REPORTERS: "@marshallofsound/mocha-appveyor-reporter, mocha-junit-reporter, tap"
DEPOT_TOOLS_WIN_TOOLCHAIN: 1
DEPOT_TOOLS_WIN_TOOLCHAIN_BASE_URL: "https://dev-cdn.electronjs.org/windows-toolchains/_"
GYP_MSVS_HASH_7393122652: 3ba76c5c20
PYTHONIOENCODING: UTF-8
matrix:
- job_name: Build Arm on X64 Windows
- job_name: Test On Windows On Arm Hardware 1
job_depends_on: Build Arm on X64 Windows
APPVEYOR_BUILD_WORKER_IMAGE: base-woa
APPVEYOR_BUILD_WORKER_CLOUD: electronhq-woa
shard: 1
- job_name: Test On Windows On Arm Hardware 2
job_depends_on: Build Arm on X64 Windows
APPVEYOR_BUILD_WORKER_IMAGE: base-woa
APPVEYOR_BUILD_WORKER_CLOUD: electronhq-woa
shard: 2
clone_script:
- ps: git clone -q $("--branch=" + $Env:APPVEYOR_REPO_BRANCH) $("https://github.com/" + $Env:APPVEYOR_REPO_NAME + ".git") $Env:APPVEYOR_BUILD_FOLDER
- ps: if (!$Env:APPVEYOR_PULL_REQUEST_NUMBER) {$("git checkout -qf " + $Env:APPVEYOR_REPO_COMMIT)}
- ps: if ($Env:APPVEYOR_PULL_REQUEST_NUMBER) {git fetch -q origin +refs/pull/$($Env:APPVEYOR_PULL_REQUEST_NUMBER)/head; git checkout -qf FETCH_HEAD}
clone_folder: C:\projects\src\electron
skip_branch_with_pr: true
# the first failed job cancels other jobs and fails entire build
matrix:
fast_finish: true
for:
- matrix:
only:
- job_name: Build Arm on X64 Windows
build_script:
# TODO: Remove --ignore-engines once WOA image is up to node 20
- ps: |
node script/yarn.js install --frozen-lockfile --ignore-engines
node script/doc-only-change.js --prNumber=$env:APPVEYOR_PULL_REQUEST_NUMBER
$env:SHOULD_SKIP_ARTIFACT_VALIDATION = "false"
if ($LASTEXITCODE -eq 0) {
Write-warning "Skipping build for doc-only change"
$env:SHOULD_SKIP_ARTIFACT_VALIDATION = "true"
Exit-AppveyorBuild
} else {
$global:LASTEXITCODE = 0
}
- cd ..
- ps: Write-Host "Building $env:GN_CONFIG build"
- git config --global core.longpaths true
- ps: >-
if (Test-Path -Path "$pwd\depot_tools") {
Remove-Item -Recurse -Force $pwd\depot_tools
}
- ps: >-
if (Test-Path -Path "$pwd\build-tools") {
Remove-Item -Recurse -Force $pwd\build-tools
}
- git clone --depth=1 https://chromium.googlesource.com/chromium/tools/depot_tools.git
- ps: New-Item -Name depot_tools\.disable_auto_update -ItemType File
- depot_tools\bootstrap\win_tools.bat
- ps: |
Set-Content -Path $pwd\depot_tools\build_telemetry.cfg -Value '{"user": "info@electronjs.org", "status": "opt-out", "countdown": 10, "version": 1}'
- ps: $env:PATH="$pwd\depot_tools;$env:PATH"
- ps: >-
if (Test-Path -Path "$pwd\src\electron") {
Remove-Item -Recurse -Force $pwd\src\electron
}
- git clone https://github.com/electron/build-tools.git
- cd build-tools
- npx yarn --ignore-engines
- mkdir third_party
- ps: >-
node -e "require('./src/utils/reclient.js').downloadAndPrepare({})"
- ps: $env:RECLIENT_HELPER = node -p "require('./src/utils/reclient.js').helperPath({})"
- ps: >-
& $env:RECLIENT_HELPER login
- ps: >-
$env:RBE_service = node -e "console.log(require('./src/utils/reclient.js').serviceAddress)"
- ps: >-
$env:RBE_experimental_credentials_helper = $env:RECLIENT_HELPER
- ps: >-
$env:RBE_experimental_credentials_helper_args = "print"
- ps: >-
if ($env:ELECTRON_RBE_JWT -eq '') {
$env:RBE_fail_early_min_action_count = "0"
$env:RBE_fail_early_min_fallback_ratio = "0"
}
- cd ..\..
- ps: $env:CHROMIUM_BUILDTOOLS_PATH="$pwd\src\buildtools"
- ps: >-
if ($env:GN_CONFIG -ne 'release') {
$env:NINJA_STATUS="[%r processes, %f/%t @ %o/s : %es] "
}
- gclient config --name "src\electron" --unmanaged %GCLIENT_EXTRA_ARGS% "https://github.com/electron/electron"
# Patches are applied in the image bake. Check depshash to see if patches have changed.
- ps: $env:RUN_GCLIENT_SYNC="false"
- ps: $depshash_baked = Get-Content .\src\.depshash -Raw
- ps: cd src\electron
- ps: node script\generate-deps-hash.js
- ps: $depshash = Get-Content .\.depshash -Raw
- ps: cd ..\..
- ps: >-
if ($depshash_baked -ne $depshash) {
$env:RUN_GCLIENT_SYNC="true"
}
- if "%RUN_GCLIENT_SYNC%"=="true" ( gclient sync --with_branch_heads --with_tags ) else ( gclient runhooks )
- cd src
- ps: $env:PATH="$pwd\third_party\ninja;$env:PATH"
- set BUILD_CONFIG_PATH=//electron/build/args/%GN_CONFIG%.gn
- gn gen out/Default "--args=import(\"%BUILD_CONFIG_PATH%\") use_remoteexec=true %GN_EXTRA_ARGS% "
- gn check out/Default //electron:electron_lib
- gn check out/Default //electron:electron_app
- gn check out/Default //electron/shell/common:mojo
- gn check out/Default //electron/shell/common:plugin
- autoninja -j 300 -C out/Default electron:electron_app
- if "%GN_CONFIG%"=="testing" ( python C:\depot_tools\post_build_ninja_summary.py -C out\Default )
- gn gen out/ffmpeg "--args=import(\"//electron/build/args/ffmpeg.gn\") use_remoteexec=true %GN_EXTRA_ARGS%"
- autoninja -C out/ffmpeg electron:electron_ffmpeg_zip
- autoninja -C out/Default electron:electron_dist_zip
- gn desc out/Default v8:run_mksnapshot_default args > out/Default/default_mksnapshot_args
# Remove unused args from mksnapshot_args
- ps: >-
Get-Content out/Default/default_mksnapshot_args | Where-Object { -not $_.Contains('--turbo-profiling-input') -And -not $_.Contains('builtins-pgo') } | Set-Content out/Default/mksnapshot_args
- autoninja -C out/Default electron:electron_mksnapshot_zip
- cd out\Default
- 7z a mksnapshot.zip mksnapshot_args gen\v8\embedded.S
- cd ..\..
- autoninja -C out/Default electron:hunspell_dictionaries_zip
- autoninja -C out/Default electron:electron_chromedriver_zip
- autoninja -C out/Default electron:node_headers
- ps: >-
Get-CimInstance -Namespace root\cimv2 -Class Win32_product | Select vendor, description, @{l='install_location';e='InstallLocation'}, @{l='install_date';e='InstallDate'}, @{l='install_date_2';e='InstallDate2'}, caption, version, name, @{l='sku_number';e='SKUNumber'} | ConvertTo-Json | Out-File -Encoding utf8 -FilePath .\installed_software.json
- python3 electron/build/profile_toolchain.py --output-json=out/Default/windows_toolchain_profile.json
- 7z a node_headers.zip out\Default\gen\node_headers
- 7z a nan.zip third_party\nan
- ps: >-
if ($env:GN_CONFIG -eq 'release') {
# Needed for msdia140.dll on 64-bit windows
$env:Path += ";$pwd\third_party\llvm-build\Release+Asserts\bin"
}
- if "%GN_CONFIG%"=="release" ( autoninja -C out/Default electron:electron_symbols )
- ps: >-
if ($env:GN_CONFIG -eq 'release') {
python3 electron\script\zip-symbols.py
appveyor-retry appveyor PushArtifact out/Default/symbols.zip
} else {
# It's useful to have pdb files when debugging testing builds that are
# built on CI.
7z a pdb.zip out\Default\*.pdb
}
- ps: |
$manifest_file = "electron/script/zip_manifests/dist_zip.win.$env:TARGET_ARCH.manifest"
python3 electron/script/zip_manifests/check-zip-manifest.py out/Default/dist.zip $manifest_file
if ($LASTEXITCODE -ne 0) {
throw "Zip contains files not listed in the manifest $manifest_file"
}
- ps: |
cd C:\projects\src
$missing_artifacts = $false
if ($env:SHOULD_SKIP_ARTIFACT_VALIDATION -eq 'true') {
Write-warning "Skipping artifact validation for doc-only $env:APPVEYOR_PROJECT_NAME"
} else {
$artifacts_to_validate = 'dist.zip','windows_toolchain_profile.json','chromedriver.zip','ffmpeg.zip','node_headers.zip','mksnapshot.zip','electron.lib','hunspell_dictionaries.zip','nan.zip'
foreach($artifact_name in $artifacts_to_validate) {
if ($artifact_name -eq 'ffmpeg.zip') {
$artifact_file = "out\ffmpeg\ffmpeg.zip"
} elseif (
$artifact_name -eq 'node_headers.zip') {
$artifact_file = $artifact_name
} elseif (
$artifact_name -eq 'nan.zip') {
$artifact_file = $artifact_name
} else {
$artifact_file = "out\Default\$artifact_name"
}
if (-not(Test-Path $artifact_file)) {
Write-warning "$artifact_name is missing and cannot be added to artifacts"
$missing_artifacts = $true
}
}
}
if ($missing_artifacts) {
throw "Build failed due to missing artifacts"
}
deploy_script:
- cd electron
- ps: >-
if (Test-Path Env:\ELECTRON_RELEASE) {
if (Test-Path Env:\UPLOAD_TO_STORAGE) {
Write-Output "Uploading Electron release distribution to azure"
& python3 script\release\uploaders\upload.py --verbose --upload_to_storage
} else {
Write-Output "Uploading Electron release distribution to github releases"
& python3 script\release\uploaders\upload.py --verbose
}
}
on_finish:
# Uncomment this lines to enable RDP
# - ps: $blockRdp = $true; iex ((new-object net.webclient).DownloadString('https://raw.githubusercontent.com/appveyor/ci/master/scripts/enable-rdp.ps1'))
- cd C:\projects\src
- if exist out\Default\windows_toolchain_profile.json ( appveyor-retry appveyor PushArtifact out\Default\windows_toolchain_profile.json )
- if exist out\Default\dist.zip (appveyor-retry appveyor PushArtifact out\Default\dist.zip)
- if exist out\Default\chromedriver.zip (appveyor-retry appveyor PushArtifact out\Default\chromedriver.zip)
- if exist out\ffmpeg\ffmpeg.zip (appveyor-retry appveyor PushArtifact out\ffmpeg\ffmpeg.zip)
- if exist node_headers.zip (appveyor-retry appveyor PushArtifact node_headers.zip)
- if exist nan.zip (appveyor-retry appveyor PushArtifact nan.zip)
- if exist out\Default\mksnapshot.zip (appveyor-retry appveyor PushArtifact out\Default\mksnapshot.zip)
- if exist out\Default\hunspell_dictionaries.zip (appveyor-retry appveyor PushArtifact out\Default\hunspell_dictionaries.zip)
- if exist out\Default\electron.lib (appveyor-retry appveyor PushArtifact out\Default\electron.lib)
- ps: >-
if ((Test-Path "pdb.zip") -And ($env:GN_CONFIG -ne 'release')) {
appveyor-retry appveyor PushArtifact pdb.zip
}
- matrix:
only:
- job_name: Test On Windows On Arm Hardware 1
- job_name: Test On Windows On Arm Hardware 2
environment:
IGNORE_YARN_INSTALL_ERROR: 1
ELECTRON_TEST_RESULTS_DIR: C:\projects\src\electron\junit
MOCHA_MULTI_REPORTERS: "@marshallofsound/mocha-appveyor-reporter, mocha-junit-reporter, tap"
MOCHA_REPORTER: mocha-multi-reporters
ELECTRON_SKIP_NATIVE_MODULE_TESTS: true
DD_ENV: ci
DD_SERVICE: electron
DD_CIVISIBILITY_LOGS_ENABLED: true
DD_GIT_REPOSITORY_URL: "https://github.com/electron/electron.git"
build_script:
- ps: |
node script/yarn.js install --frozen-lockfile --ignore-engines
node script/doc-only-change.js --prNumber=$env:APPVEYOR_PULL_REQUEST_NUMBER
if ($LASTEXITCODE -eq 0) {
Write-warning "Skipping build for doc only change"
Exit-AppveyorBuild
} else {
$global:LASTEXITCODE = 0
}
- ps: Invoke-WebRequest -Uri "https://github.com/DataDog/datadog-ci/releases/latest/download/datadog-ci_win-x64" -OutFile "C:\projects\src\electron\datadog-ci.exe"
- cd ..
- mkdir out\Default
- cd ..
- ps: |
# Download build artifacts
$apiUrl = 'https://ci.appveyor.com/api'
$build_info = Invoke-RestMethod -Method Get -Uri "$apiUrl/projects/$env:APPVEYOR_ACCOUNT_NAME/$env:APPVEYOR_PROJECT_SLUG/builds/$env:APPVEYOR_BUILD_ID"
$artifacts_to_download = @('dist.zip','ffmpeg.zip','node_headers.zip','electron.lib', 'nan.zip')
foreach ($job in $build_info.build.jobs) {
if ($job.name -eq "Build Arm on X64 Windows") {
$jobId = $job.jobId
foreach($artifact_name in $artifacts_to_download) {
if ($artifact_name -eq 'electron.lib') {
$outfile = "src\out\Default\$artifact_name"
} else {
$outfile = $artifact_name
}
Invoke-RestMethod -Method Get -Uri "$apiUrl/buildjobs/$jobId/artifacts/$artifact_name" -OutFile $outfile
}
# Uncomment the following lines to download the pdb.zip to show real stacktraces when crashes happen during testing
Invoke-RestMethod -Method Get -Uri "$apiUrl/buildjobs/$jobId/artifacts/pdb.zip" -OutFile pdb.zip
7z x -y -osrc pdb.zip
}
}
- ps: |
$out_default_zips = @('dist.zip')
foreach($zip_name in $out_default_zips) {
7z x -y -osrc\out\Default $zip_name
}
- ps: 7z x -y -osrc\out\ffmpeg ffmpeg.zip
- ps: 7z x -y -osrc node_headers.zip
- ps: 7z x -y -osrc nan.zip
test_script:
# Workaround for https://github.com/appveyor/ci/issues/2420
- set "PATH=%PATH%;C:\Program Files\Git\mingw64\libexec\git-core"
- ps: |
cd src
New-Item .\out\Default\gen\node_headers\Release -Type directory
Copy-Item -path .\out\Default\electron.lib -destination .\out\Default\gen\node_headers\Release\node.lib
- set npm_config_nodedir=%cd%\out\Default\gen\node_headers
- set npm_config_arch=arm64
- cd electron
# Explicitly set npm_config_arch because the .env doesn't persist
- ps: >-
if ($env:TARGET_ARCH -eq 'ia32') {
$env:npm_config_arch = "ia32"
}
- ps: $env:tests_files=node script\split-tests $env:shard 2
- echo "Running shard %shard% specs %tests_files%"
- echo Running main test suite & node script/yarn test --runners=main --enable-logging --disable-features=CalculateNativeWinOcclusion --files %tests_files%
- cd ..
- echo Verifying non proprietary ffmpeg & python electron\script\verify-ffmpeg.py --build-dir out\Default --source-root %cd% --ffmpeg-path out\ffmpeg
on_finish:
# Uncomment these lines to enable RDP
# - ps: $blockRdp = $true; iex ((new-object net.webclient).DownloadString('https://raw.githubusercontent.com/appveyor/ci/master/scripts/enable-rdp.ps1'))
- if exist electron\junit\test-results-main.xml ( appveyor-retry appveyor PushArtifact electron\junit\test-results-main.xml )
- ps: |
if ($env:DD_API_KEY) {
$env:DD_GIT_COMMIT_SHA = $env:APPVEYOR_REPO_COMMIT
$env:DD_GIT_BRANCH = $env:APPVEYOR_PULL_REQUEST_HEAD_REPO_BRANCH
$env:DD_TAGS = "os.architecture:$env:TARGET_ARCH,os.family:windows,os.platform:win32"
if (Test-Path -Path "C:\projects\src\electron\junit\test-results-main.xml") {
C:\projects\src\electron\datadog-ci.exe junit upload --verbose C:\projects\src\electron\junit\test-results-main.xml
}
}

345
appveyor.yml Normal file
View File

@@ -0,0 +1,345 @@
# NOTE IF CHANGING THIS FILE, ALSO APPLY THE CHANGE TO appveyor-woa.yml
# IF APPLICABLE!!!!
#
#
# The config expects the following environment variables to be set:
# - "GN_CONFIG" Build type. One of {'testing', 'release'}.
# - "GN_EXTRA_ARGS" Additional gn arguments for a build config,
# e.g. 'target_cpu="x86"' to build for a 32bit platform.
# https://gn.googlesource.com/gn/+/main/docs/reference.md#var_target_cpu
# Don't forget to set up "NPM_CONFIG_ARCH" and "TARGET_ARCH" accordingly
# if you pass a custom value for 'target_cpu'.
# - "ELECTRON_RELEASE" Set it to '1' upload binaries on success.
# - "NPM_CONFIG_ARCH" E.g. 'x86'. Is used to build native Node.js modules.
# Must match 'target_cpu' passed to "GN_EXTRA_ARGS" and "TARGET_ARCH" value.
# - "TARGET_ARCH" Choose from {'ia32', 'x64', 'arm', 'arm64'}.
# Is used in some publishing scripts, but does NOT affect the Electron binary.
# Must match 'target_cpu' passed to "GN_EXTRA_ARGS" and "NPM_CONFIG_ARCH" value.
# - "UPLOAD_TO_STORAGE" Set it to '1' upload a release to the Azure bucket.
# Otherwise the release will be uploaded to the GitHub Releases.
# (The value is only checked if "ELECTRON_RELEASE" is defined.)
#
# The publishing scripts expect access tokens to be defined as env vars,
# but those are not covered here.
#
# AppVeyor docs on variables:
# https://www.appveyor.com/docs/environment-variables/
# https://www.appveyor.com/docs/build-configuration/#secure-variables
# https://www.appveyor.com/docs/build-configuration/#custom-environment-variables
version: 1.0.{build}
build_cloud: electronhq-16-core
image: e-130.0.6695.0-node-20
environment:
GIT_CACHE_PATH: C:\Users\appveyor\libcc_cache
ELECTRON_OUT_DIR: Default
ELECTRON_ENABLE_STACK_DUMPING: 1
ELECTRON_ALSO_LOG_TO_STDERR: 1
MOCHA_REPORTER: mocha-multi-reporters
MOCHA_MULTI_REPORTERS: "@marshallofsound/mocha-appveyor-reporter, mocha-junit-reporter, tap"
DEPOT_TOOLS_WIN_TOOLCHAIN: 1
DEPOT_TOOLS_WIN_TOOLCHAIN_BASE_URL: "https://dev-cdn.electronjs.org/windows-toolchains/_"
GYP_MSVS_HASH_7393122652: 3ba76c5c20
PYTHONIOENCODING: UTF-8
matrix:
- job_name: Build
- job_name: Test 1
job_depends_on: Build
shard: 1
- job_name: Test 2
job_depends_on: Build
shard: 2
clone_script:
- ps: git clone -q $("--branch=" + $Env:APPVEYOR_REPO_BRANCH) $("https://github.com/" + $Env:APPVEYOR_REPO_NAME + ".git") $Env:APPVEYOR_BUILD_FOLDER
- ps: if (!$Env:APPVEYOR_PULL_REQUEST_NUMBER) {$("git checkout -qf " + $Env:APPVEYOR_REPO_COMMIT)}
- ps: if ($Env:APPVEYOR_PULL_REQUEST_NUMBER) {git fetch -q origin +refs/pull/$($Env:APPVEYOR_PULL_REQUEST_NUMBER)/head; git checkout -qf FETCH_HEAD}
clone_folder: C:\projects\src\electron
skip_branch_with_pr: true
# the first failed job cancels other jobs and fails entire build
matrix:
fast_finish: true
for:
- matrix:
only:
- job_name: Build
build_script:
- ps: |
node script/yarn.js install --frozen-lockfile
node script/doc-only-change.js --prNumber=$env:APPVEYOR_PULL_REQUEST_NUMBER
$env:SHOULD_SKIP_ARTIFACT_VALIDATION = "false"
if ($LASTEXITCODE -eq 0) {
Write-warning "Skipping build for doc-only change"
$env:SHOULD_SKIP_ARTIFACT_VALIDATION = "true"
Exit-AppveyorBuild
} else {
$global:LASTEXITCODE = 0
}
- cd ..
- ps: Write-Host "Building $env:GN_CONFIG build"
- git config --global core.longpaths true
- ps: >-
if (Test-Path -Path "$pwd\depot_tools") {
Remove-Item -Recurse -Force $pwd\depot_tools
}
- ps: >-
if (Test-Path -Path "$pwd\build-tools") {
Remove-Item -Recurse -Force $pwd\build-tools
}
- git clone --depth=1 https://chromium.googlesource.com/chromium/tools/depot_tools.git
- ps: New-Item -Name depot_tools\.disable_auto_update -ItemType File
- depot_tools\bootstrap\win_tools.bat
- ps: |
Set-Content -Path $pwd\depot_tools\build_telemetry.cfg -Value '{"user": "info@electronjs.org", "status": "opt-out", "countdown": 10, "version": 1}'
- ps: $env:PATH="$pwd\depot_tools;$env:PATH"
- ps: >-
if (Test-Path -Path "$pwd\src\electron") {
Remove-Item -Recurse -Force $pwd\src\electron
}
- git clone https://github.com/electron/build-tools.git
- cd build-tools
- npx yarn --ignore-engines
- mkdir third_party
- ps: >-
node -e "require('./src/utils/reclient.js').downloadAndPrepare({})"
- ps: $env:RECLIENT_HELPER = node -p "require('./src/utils/reclient.js').helperPath({})"
- ps: >-
& $env:RECLIENT_HELPER login
- ps: >-
$env:RBE_service = node -e "console.log(require('./src/utils/reclient.js').serviceAddress)"
- ps: >-
$env:RBE_experimental_credentials_helper = $env:RECLIENT_HELPER
- ps: >-
$env:RBE_experimental_credentials_helper_args = "print"
- ps: >-
if ($env:ELECTRON_RBE_JWT -eq '') {
$env:RBE_fail_early_min_action_count = "0"
$env:RBE_fail_early_min_fallback_ratio = "0"
}
- cd ..\..
- ps: $env:CHROMIUM_BUILDTOOLS_PATH="$pwd\src\buildtools"
- ps: >-
if ($env:GN_CONFIG -ne 'release') {
$env:NINJA_STATUS="[%r processes, %f/%t @ %o/s : %es] "
}
- gclient config --name "src\electron" --unmanaged %GCLIENT_EXTRA_ARGS% "https://github.com/electron/electron"
# Patches are applied in the image bake. Check depshash to see if patches have changed.
- ps: $env:RUN_GCLIENT_SYNC="false"
- ps: $depshash_baked = Get-Content .\src\.depshash -Raw
- ps: cd src\electron
- ps: node script\generate-deps-hash.js
- ps: $depshash = Get-Content .\.depshash -Raw
- ps: cd ..\..
- ps: >-
if ($depshash_baked -ne $depshash) {
$env:RUN_GCLIENT_SYNC="true"
}
- if "%RUN_GCLIENT_SYNC%"=="true" ( gclient sync --with_branch_heads --with_tags ) else ( gclient runhooks )
- cd src
- ps: $env:PATH="$pwd\third_party\ninja;$env:PATH"
- set BUILD_CONFIG_PATH=//electron/build/args/%GN_CONFIG%.gn
- gn gen out/Default "--args=import(\"%BUILD_CONFIG_PATH%\") use_remoteexec=true %GN_EXTRA_ARGS% "
- gn check out/Default //electron:electron_lib
- gn check out/Default //electron:electron_app
- gn check out/Default //electron/shell/common:mojo
- gn check out/Default //electron/shell/common:plugin
- autoninja -j 300 -C out/Default electron:electron_app
- if "%GN_CONFIG%"=="testing" ( python C:\depot_tools\post_build_ninja_summary.py -C out\Default )
- gn gen out/ffmpeg "--args=import(\"//electron/build/args/ffmpeg.gn\") use_remoteexec=true %GN_EXTRA_ARGS%"
- autoninja -C out/ffmpeg electron:electron_ffmpeg_zip
- autoninja -C out/Default electron:electron_dist_zip
- gn desc out/Default v8:run_mksnapshot_default args > out/Default/default_mksnapshot_args
# Remove unused args from mksnapshot_args
- ps: >-
Get-Content out/Default/default_mksnapshot_args | Where-Object { -not $_.Contains('--turbo-profiling-input') -And -not $_.Contains('builtins-pgo') } | Set-Content out/Default/mksnapshot_args
- autoninja -C out/Default electron:electron_mksnapshot_zip
- cd out\Default
- 7z a mksnapshot.zip mksnapshot_args gen\v8\embedded.S
- cd ..\..
- autoninja -C out/Default electron:hunspell_dictionaries_zip
- autoninja -C out/Default electron:electron_chromedriver_zip
- autoninja -C out/Default electron:node_headers
- ps: >-
Get-CimInstance -Namespace root\cimv2 -Class Win32_product | Select vendor, description, @{l='install_location';e='InstallLocation'}, @{l='install_date';e='InstallDate'}, @{l='install_date_2';e='InstallDate2'}, caption, version, name, @{l='sku_number';e='SKUNumber'} | ConvertTo-Json | Out-File -Encoding utf8 -FilePath .\installed_software.json
- python3 electron/build/profile_toolchain.py --output-json=out/Default/windows_toolchain_profile.json
- 7z a node_headers.zip out\Default\gen\node_headers
- ps: >-
if ($env:GN_CONFIG -eq 'release') {
# Needed for msdia140.dll on 64-bit windows
$env:Path += ";$pwd\third_party\llvm-build\Release+Asserts\bin"
}
- if "%GN_CONFIG%"=="release" ( autoninja -C out/Default electron:electron_symbols )
- ps: >-
if ($env:GN_CONFIG -eq 'release') {
python3 electron\script\zip-symbols.py
appveyor-retry appveyor PushArtifact out/Default/symbols.zip
} else {
# It's useful to have pdb files when debugging testing builds that are
# built on CI.
7z a pdb.zip out\Default\*.pdb
}
- ps: |
$manifest_file = "electron/script/zip_manifests/dist_zip.win.$env:TARGET_ARCH.manifest"
python3 electron/script/zip_manifests/check-zip-manifest.py out/Default/dist.zip $manifest_file
if ($LASTEXITCODE -ne 0) {
throw "Zip contains files not listed in the manifest $manifest_file"
}
- ps: |
cd C:\projects\src
$missing_artifacts = $false
if ($env:SHOULD_SKIP_ARTIFACT_VALIDATION -eq 'true') {
Write-warning "Skipping artifact validation for doc-only $env:APPVEYOR_PROJECT_NAME"
} else {
$artifacts_to_validate = 'dist.zip','windows_toolchain_profile.json','chromedriver.zip','ffmpeg.zip','node_headers.zip','mksnapshot.zip','electron.lib','hunspell_dictionaries.zip'
foreach($artifact_name in $artifacts_to_validate) {
if ($artifact_name -eq 'ffmpeg.zip') {
$artifact_file = "out\ffmpeg\ffmpeg.zip"
} elseif (
$artifact_name -eq 'node_headers.zip') {
$artifact_file = $artifact_name
} else {
$artifact_file = "out\Default\$artifact_name"
}
if (-not(Test-Path $artifact_file)) {
Write-warning "$artifact_name is missing and cannot be added to artifacts"
$missing_artifacts = $true
}
}
}
if ($missing_artifacts) {
throw "Build failed due to missing artifacts"
}
deploy_script:
- cd electron
- ps: >-
if (Test-Path Env:\ELECTRON_RELEASE) {
if (Test-Path Env:\UPLOAD_TO_STORAGE) {
Write-Output "Uploading Electron release distribution to azure"
& python3 script\release\uploaders\upload.py --verbose --upload_to_storage
} else {
Write-Output "Uploading Electron release distribution to github releases"
& python3 script\release\uploaders\upload.py --verbose
}
}
on_finish:
# Uncomment this lines to enable RDP
# - ps: $blockRdp = $true; iex ((new-object net.webclient).DownloadString('https://raw.githubusercontent.com/appveyor/ci/master/scripts/enable-rdp.ps1'))
- cd C:\projects\src
- if exist out\Default\windows_toolchain_profile.json ( appveyor-retry appveyor PushArtifact out\Default\windows_toolchain_profile.json )
- if exist out\Default\dist.zip (appveyor-retry appveyor PushArtifact out\Default\dist.zip)
- if exist out\Default\chromedriver.zip (appveyor-retry appveyor PushArtifact out\Default\chromedriver.zip)
- if exist out\ffmpeg\ffmpeg.zip (appveyor-retry appveyor PushArtifact out\ffmpeg\ffmpeg.zip)
- if exist node_headers.zip (appveyor-retry appveyor PushArtifact node_headers.zip)
- if exist out\Default\mksnapshot.zip (appveyor-retry appveyor PushArtifact out\Default\mksnapshot.zip)
- if exist out\Default\hunspell_dictionaries.zip (appveyor-retry appveyor PushArtifact out\Default\hunspell_dictionaries.zip)
- if exist out\Default\electron.lib (appveyor-retry appveyor PushArtifact out\Default\electron.lib)
- ps: >-
if ((Test-Path "pdb.zip") -And ($env:GN_CONFIG -ne 'release')) {
appveyor-retry appveyor PushArtifact pdb.zip
}
- matrix:
only:
- job_name: Test 1
- job_name: Test 2
environment:
DD_ENV: ci
DD_SERVICE: electron
DD_CIVISIBILITY_LOGS_ENABLED: true
DD_GIT_REPOSITORY_URL: "https://github.com/electron/electron.git"
ELECTRON_TEST_RESULTS_DIR: C:\projects\src\electron\junit
init:
- ps: |
if ($env:RUN_TESTS -ne 'true') {
Write-warning "Skipping tests for $env:APPVEYOR_PROJECT_NAME"; Exit-AppveyorBuild
}
build_script:
- ps: |
node script/yarn.js install --frozen-lockfile
node script/doc-only-change.js --prNumber=$env:APPVEYOR_PULL_REQUEST_NUMBER
if ($LASTEXITCODE -eq 0) {
Write-warning "Skipping build for doc only change"
Exit-AppveyorBuild
} else {
$global:LASTEXITCODE = 0
}
- npm install -g @datadog/datadog-ci
- cd ..
- mkdir out\Default
- cd ..
- ps: |
# Download build artifacts
$apiUrl = 'https://ci.appveyor.com/api'
$build_info = Invoke-RestMethod -Method Get -Uri "$apiUrl/projects/$env:APPVEYOR_ACCOUNT_NAME/$env:APPVEYOR_PROJECT_SLUG/builds/$env:APPVEYOR_BUILD_ID"
$artifacts_to_download = @('dist.zip','chromedriver.zip','ffmpeg.zip','node_headers.zip','mksnapshot.zip','electron.lib')
foreach ($job in $build_info.build.jobs) {
if ($job.name -eq "Build") {
$jobId = $job.jobId
foreach($artifact_name in $artifacts_to_download) {
if ($artifact_name -eq 'electron.lib') {
$outfile = "src\out\Default\$artifact_name"
} else {
$outfile = $artifact_name
}
Invoke-RestMethod -Method Get -Uri "$apiUrl/buildjobs/$jobId/artifacts/$artifact_name" -OutFile $outfile
}
# Uncomment the following lines to download the pdb.zip to show real stacktraces when crashes happen during testing
Invoke-RestMethod -Method Get -Uri "$apiUrl/buildjobs/$jobId/artifacts/pdb.zip" -OutFile pdb.zip
7z x -y -osrc pdb.zip
}
}
- ps: |
$out_default_zips = @('dist.zip','chromedriver.zip','mksnapshot.zip')
foreach($zip_name in $out_default_zips) {
7z x -y -osrc\out\Default $zip_name
}
- ps: 7z x -y -osrc\out\ffmpeg ffmpeg.zip
- ps: 7z x -y -osrc node_headers.zip
test_script:
# Workaround for https://github.com/appveyor/ci/issues/2420
- set "PATH=%PATH%;C:\Program Files\Git\mingw64\libexec\git-core"
- ps: |
cd src
New-Item .\out\Default\gen\node_headers\Release -Type directory
Copy-Item -path .\out\Default\electron.lib -destination .\out\Default\gen\node_headers\Release\node.lib
- cd electron
# Explicitly set npm_config_arch because the .env doesn't persist
- ps: >-
if ($env:TARGET_ARCH -eq 'ia32') {
$env:npm_config_arch = "ia32"
}
- ps: $env:tests_files=node script\split-tests $env:shard 2
- echo "Running shard %shard% specs %tests_files%"
- echo Running main test suite & node script/yarn test -- --trace-uncaught --runners=main --enable-logging --files %tests_files%
- cd ..
- echo Verifying non proprietary ffmpeg & python electron\script\verify-ffmpeg.py --build-dir out\Default --source-root %cd% --ffmpeg-path out\ffmpeg
- echo "About to verify mksnapshot"
- echo Verifying mksnapshot & python electron\script\verify-mksnapshot.py --build-dir out\Default --source-root %cd%
- echo "Done verifying mksnapshot"
- echo Verifying chromedriver & python electron\script\verify-chromedriver.py --build-dir out\Default --source-root %cd%
- echo "Done verifying chromedriver"
on_finish:
# Uncomment these lines to enable RDP
# - ps: $blockRdp = $true; iex ((new-object net.webclient).DownloadString('https://raw.githubusercontent.com/appveyor/ci/master/scripts/enable-rdp.ps1'))
- if exist electron\junit\test-results-main.xml ( appveyor-retry appveyor PushArtifact electron\junit\test-results-main.xml )
- ps: |
if ($env:RUN_TESTS -eq 'true' -And $env:DD_API_KEY) {
$env:DD_GIT_COMMIT_SHA = $env:APPVEYOR_REPO_COMMIT
$env:DD_GIT_BRANCH = $env:APPVEYOR_PULL_REQUEST_HEAD_REPO_BRANCH
$env:DD_TAGS = "os.architecture:$env:TARGET_ARCH,os.family:windows,os.platform:win32"
if (Test-Path -Path "C:\projects\src\electron\junit\test-results-main.xml") {
C:\Users\appveyor\AppData\Roaming\npm\datadog-ci.ps1 junit upload --verbose C:\projects\src\electron\junit\test-results-main.xml
}
}

View File

@@ -59,7 +59,7 @@ def skip_path(dep, dist_zip, target_cpu):
and dep == "snapshot_blob.bin"
)
)
if should_skip and os.environ.get('ELECTRON_DEBUG_ZIP_SKIP') == '1':
if should_skip:
print("Skipping {}".format(dep))
return should_skip

View File

@@ -306,12 +306,6 @@ Set the default value of the `verbatim` parameter in the Node.js [`dns.lookup()`
The default is `verbatim` and `dns.setDefaultResultOrder()` have higher priority than `--dns-result-order`.
### `--diagnostic-dir=directory`
Set the directory to which all Node.js diagnostic output files are written. Defaults to current working directory.
Affects the default output directory of [v8.setHeapSnapshotNearHeapLimit](https://nodejs.org/docs/latest/api/v8.html#v8setheapsnapshotnearheaplimitlimit).
[app]: app.md
[append-switch]: command-line.md#commandlineappendswitchswitch-value
[debugging-main-process]: ../tutorial/debugging-main-process.md

View File

@@ -26,10 +26,7 @@ Emitted when system changes to battery power.
### Event: 'thermal-state-change' _macOS_
Returns:
* `details` Event\<\>
* `state` string - The system's new thermal state. Can be `unknown`, `nominal`, `fair`, `serious`, `critical`.
* `state` string - The system's new thermal state. Can be `unknown`, `nominal`, `fair`, `serious`, `critical`.
Emitted when the thermal state of the system changes. Notification of a change
in the thermal status of the system, such as entering a critical temperature
@@ -45,8 +42,7 @@ See https://developer.apple.com/library/archive/documentation/Performance/Concep
### Event: 'speed-limit-change' _macOS_ _Windows_
* `details` Event\<\>
* `limit` number - The operating system's advertised speed limit for CPUs, in percent.
* `limit` number - The operating system's advertised speed limit for CPUs, in percent.
Notification of a change in the operating system's advertised speed limit for
CPUs, in percent. Values below 100 indicate that the system is impairing

View File

@@ -933,7 +933,6 @@ session.fromPartition('some-partition').setPermissionRequestHandler((webContents
* `storage-access` - Allows content loaded in a third-party context to request access to third-party cookies using the [Storage Access API](https://developer.mozilla.org/en-US/docs/Web/API/Storage_Access_API).
* `top-level-storage-access` - Allow top-level sites to request third-party cookie access on behalf of embedded content originating from another site in the same related website set using the [Storage Access API](https://developer.mozilla.org/en-US/docs/Web/API/Storage_Access_API).
* `usb` - Expose non-standard Universal Serial Bus (USB) compatible devices services to the web with the [WebUSB API](https://developer.mozilla.org/en-US/docs/Web/API/WebUSB_API).
* `deprecated-sync-clipboard-read` _Deprecated_ - Request access to run `document.execCommand("paste")`
* `requestingOrigin` string - The origin URL of the permission check
* `details` Object - Some properties are only available on certain permission types.
* `embeddingOrigin` string (optional) - The origin of the frame embedding the frame that made the permission check. Only set for cross-origin sub frames making permission checks.

View File

@@ -148,7 +148,6 @@
this will cause the `preferred-size-changed` event to be emitted on the
`WebContents` when the preferred size changes. Default is `false`.
* `transparent` boolean (optional) - Whether to enable background transparency for the guest page. Default is `true`. **Note:** The guest page's text and background colors are derived from the [color scheme](https://developer.mozilla.org/en-US/docs/Web/CSS/color-scheme) of its root element. When transparency is enabled, the text color will still change accordingly but the background will remain transparent.
* `enableDeprecatedPaste` boolean (optional) _Deprecated_ - Whether to enable the `paste` [execCommand](https://developer.mozilla.org/en-US/docs/Web/API/Document/execCommand). Default is `false`.
[chrome-content-scripts]: https://developer.chrome.com/extensions/content_scripts#execution-environment
[runtime-enabled-features]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/blink/renderer/platform/runtime_enabled_features.json5

View File

@@ -14,15 +14,6 @@ This document uses the following convention to categorize breaking changes:
## Planned Breaking API Changes (33.0)
### Deprecated: `document.execCommand("paste")`
The synchronous clipboard read API [document.execCommand("paste")](https://developer.mozilla.org/en-US/docs/Mozilla/Add-ons/WebExtensions/Interact_with_the_clipboard) has been
deprecated in favor of [async clipboard API](https://developer.mozilla.org/en-US/docs/Web/API/Clipboard_API). This is to align with the browser defaults.
The `enableDeprecatedPaste` option on `WebPreferences` that triggers the permission
checks for this API and the associated permission type `deprecated-sync-clipboard-read`
are also deprecated.
### Behavior Changed: frame properties may retrieve detached WebFrameMain instances or none at all
APIs which provide access to a `WebFrameMain` instance may return an instance
@@ -166,14 +157,6 @@ win.webContents.navigationHistory.canGoToOffset()
win.webContents.navigationHistory.goToOffset(index)
```
### Behavior changed: Directory `databases` in `userData` will be deleted
If you have a directory called `databases` in the directory returned by
`app.getPath('userData')`, it will be deleted when Electron 32 is first run.
The `databases` directory was used by WebSQL, which was removed in Electron 31.
Chromium now performs a cleanup that deletes this directory. See
[issue #45396](https://github.com/electron/electron/issues/45396).
## Planned Breaking API Changes (31.0)
### Removed: `WebSQL` support

View File

@@ -11,7 +11,7 @@
<button id="clickme">Test Bluetooth</button>
<button id="cancel">Cancel Bluetooth Request</button>
<p>Currently selected bluetooth device: <strong id="device-name"></strong></p>
<p>Currently selected bluetooth device: <strong id="device-name""></strong></p>
<script src="./renderer.js"></script>
</body>

View File

@@ -514,9 +514,9 @@ WebContents.prototype.canGoForward = function () {
};
const canGoToOffsetDeprecated = deprecate.warnOnce('webContents.canGoToOffset', 'webContents.navigationHistory.canGoToOffset');
WebContents.prototype.canGoToOffset = function (index: number) {
WebContents.prototype.canGoToOffset = function () {
canGoToOffsetDeprecated();
return this._canGoToOffset(index);
return this._canGoToOffset();
};
const clearHistoryDeprecated = deprecate.warnOnce('webContents.clearHistory', 'webContents.navigationHistory.clear');

View File

@@ -15,7 +15,7 @@ export const windowSetup = (isWebView: boolean, isHiddenPage: boolean) => {
// But we do not support prompt().
window.prompt = function () {
throw new Error('prompt() is not supported.');
throw new Error('prompt() is and will not be supported.');
};
if (contextIsolationEnabled) internalContextBridge.overrideGlobalValueFromIsolatedWorld(['prompt'], window.prompt);

View File

@@ -84,7 +84,7 @@ feat_filter_out_non-shareable_windows_in_the_current_application_in.patch
disable_freezing_flags_after_init_in_node.patch
short-circuit_permissions_checks_in_mediastreamdevicescontroller.patch
chore_add_electron_deps_to_gitignores.patch
chore_modify_chromium_handling_of_mouse_events.patch
chore_allow_chromium_to_handle_synthetic_mouse_events_for_touch.patch
add_maximized_parameter_to_linuxui_getwindowframeprovider.patch
add_electron_deps_to_license_credits_file.patch
fix_crash_loading_non-standard_schemes_in_iframes.patch
@@ -128,7 +128,7 @@ fix_font_face_resolution_when_renderer_is_blocked.patch
feat_enable_passing_exit_code_on_service_process_crash.patch
chore_remove_reference_to_chrome_browser_themes.patch
feat_enable_customizing_symbol_color_in_framecaptionbutton.patch
build_allow_electron_mojom_interfaces_to_depend_on_blink.patch
build_expose_webplugininfo_interface_to_electron.patch
feat_allow_usage_of_sccontentsharingpicker_on_supported_platforms.patch
fix_software_compositing_infinite_loop.patch
ui_add_missing_shortcut_text_for_vkey_command_on_linux.patch
@@ -136,17 +136,3 @@ osr_shared_texture_remove_keyed_mutex_on_win_dxgi.patch
refactor_unfilter_unresponsive_events.patch
wayland_support_outgoing_dnd_sessions_with_no_offered_mime_types.patch
support_bstr_pkey_appusermodel_id_in_windows_shortcuts.patch
cherry-pick-3dc17c461b12.patch
cherry-pick-35f86d6a0a03.patch
ignore_parse_errors_for_pkey_appusermodel_toastactivatorclsid.patch
feat_add_signals_when_embedder_cleanup_callbacks_run_for.patch
feat_separate_content_settings_callback_for_sync_and_async_clipboard.patch
fix_osr_stutter_in_both_cpu_and_gpu_capture_when_page_has_animation.patch
reland_lzma_sdk_update_to_24_09.patch
cherry-pick-521faebc8a7c.patch
cherry-pick-9dacf5694dfd.patch
cherry-pick-0adceb6159fb.patch
add_a_flag_to_enable_strict_js_compliance_in_audioworklet.patch
remove_denormalenabler_from_scriptprocessornode.patch
allow_denormal_flushing_to_outlive_scoped_object.patch
fix_take_snapped_status_into_account_when_showing_a_window.patch

View File

@@ -1,333 +0,0 @@
From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001
From: Michael Wilson <mjwilson@chromium.org>
Date: Thu, 12 Dec 2024 08:45:53 -0800
Subject: Add a flag to enable strict JS compliance in AudioWorklet
AudioWorklet and ScriptProcessorNode are not strictly JavaScript spec
compliant because we disable denormal numbers for performance reasons.
This CL adds a flag to allow experimenting with enabling denormal
numbers in AudioWorklet and ScriptProcessorNode, so that we can
quantify the actual performance impact.
The flag can also be used as a server-side switch.
Bug: 382005099
Change-Id: Ib41253cc42dd2f16c262036817cf3db4697f986f
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/6077677
Reviewed-by: Kentaro Hara <haraken@chromium.org>
Commit-Queue: Michael Wilson <mjwilson@chromium.org>
Reviewed-by: Hongchan Choi <hongchan@chromium.org>
Reviewed-by: Leszek Swirski <leszeks@chromium.org>
Cr-Commit-Position: refs/heads/main@{#1395444}
diff --git a/third_party/blink/common/features.cc b/third_party/blink/common/features.cc
index 6024875954b389d43e53e03ebafa4a5016d56416..98740bfbe45bcffdd30c5afc0ebafde6cd61a13d 100644
--- a/third_party/blink/common/features.cc
+++ b/third_party/blink/common/features.cc
@@ -2545,6 +2545,12 @@ BASE_FEATURE(kWebAppManifestLockScreen,
"WebAppManifestLockScreen",
base::FEATURE_DISABLED_BY_DEFAULT);
+// Allow denormals in AudioWorklet and ScriptProcessorNode, to enable strict
+// JavaScript denormal compliance. See https://crbug.com/382005099.
+BASE_FEATURE(kWebAudioAllowDenormalInProcessing,
+ "WebAudioAllowDenormalInProcessing",
+ base::FEATURE_DISABLED_BY_DEFAULT);
+
// Enabling this flag bypasses additional buffering when using the Web Audio
// API, which may reduce audio output latency but may also increase the
// probability of an audio glitch.
diff --git a/third_party/blink/public/common/features.h b/third_party/blink/public/common/features.h
index d3f22b63862cacad8079f11725173d9b7c599610..bf2dcbc3c28d6d0317bc1668f16c25a6233d836f 100644
--- a/third_party/blink/public/common/features.h
+++ b/third_party/blink/public/common/features.h
@@ -1719,6 +1719,7 @@ BLINK_COMMON_EXPORT BASE_DECLARE_FEATURE(kWebAppEnableScopeExtensions);
BLINK_COMMON_EXPORT BASE_DECLARE_FEATURE(kWebAppEnableUrlHandlers);
BLINK_COMMON_EXPORT BASE_DECLARE_FEATURE(kWebAppManifestLockScreen);
+BLINK_COMMON_EXPORT BASE_DECLARE_FEATURE(kWebAudioAllowDenormalInProcessing);
BLINK_COMMON_EXPORT BASE_DECLARE_FEATURE(kWebAudioBypassOutputBuffering);
BLINK_COMMON_EXPORT BASE_DECLARE_FEATURE(
kWebAudioContextConstructorEchoCancellation);
diff --git a/third_party/blink/renderer/modules/webaudio/audio_worklet_handler.cc b/third_party/blink/renderer/modules/webaudio/audio_worklet_handler.cc
index 0382f578a4f98cbac422d5f927c73a6b922c01b8..9a662e7730d3e01dcf8e69f66c4eafa9dd7dd031 100644
--- a/third_party/blink/renderer/modules/webaudio/audio_worklet_handler.cc
+++ b/third_party/blink/renderer/modules/webaudio/audio_worklet_handler.cc
@@ -29,6 +29,7 @@
#include "third_party/blink/renderer/modules/webaudio/cross_thread_audio_worklet_processor_info.h"
#include "third_party/blink/renderer/platform/audio/audio_bus.h"
#include "third_party/blink/renderer/platform/audio/audio_utilities.h"
+#include "third_party/blink/renderer/platform/audio/denormal_disabler.h"
#include "third_party/blink/renderer/platform/bindings/exception_messages.h"
#include "third_party/blink/renderer/platform/heap/persistent.h"
#include "third_party/blink/renderer/platform/instrumentation/tracing/trace_event.h"
@@ -52,7 +53,9 @@ AudioWorkletHandler::AudioWorkletHandler(
const AudioWorkletNodeOptions* options)
: AudioHandler(kNodeTypeAudioWorklet, node, sample_rate),
name_(name),
- param_handler_map_(param_handler_map) {
+ param_handler_map_(param_handler_map),
+ allow_denormal_in_processing_(base::FeatureList::IsEnabled(
+ features::kWebAudioAllowDenormalInProcessing)) {
DCHECK(IsMainThread());
for (const auto& param_name : param_handler_map_.Keys()) {
@@ -112,7 +115,7 @@ scoped_refptr<AudioWorkletHandler> AudioWorkletHandler::Create(
param_handler_map, options));
}
-void AudioWorkletHandler::Process(uint32_t frames_to_process) {
+void AudioWorkletHandler::ProcessInternal(uint32_t frames_to_process) {
DCHECK(Context()->IsAudioThread());
TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("webaudio.audionode"),
@@ -175,6 +178,15 @@ void AudioWorkletHandler::Process(uint32_t frames_to_process) {
}
}
+void AudioWorkletHandler::Process(uint32_t frames_to_process) {
+ if (allow_denormal_in_processing_) {
+ DenormalEnabler denormal_enabler;
+ ProcessInternal(frames_to_process);
+ } else {
+ ProcessInternal(frames_to_process);
+ }
+}
+
void AudioWorkletHandler::CheckNumberOfChannelsForInput(AudioNodeInput* input) {
DCHECK(Context()->IsAudioThread());
Context()->AssertGraphOwner();
diff --git a/third_party/blink/renderer/modules/webaudio/audio_worklet_handler.h b/third_party/blink/renderer/modules/webaudio/audio_worklet_handler.h
index e6291f5e9e25433281646965f048a7f2abfc8c01..3ec80cd49a87a76ac03df105b37f1ae17437a328 100644
--- a/third_party/blink/renderer/modules/webaudio/audio_worklet_handler.h
+++ b/third_party/blink/renderer/modules/webaudio/audio_worklet_handler.h
@@ -68,6 +68,10 @@ class AudioWorkletHandler final : public AudioHandler {
HashMap<String, scoped_refptr<AudioParamHandler>> param_handler_map,
const AudioWorkletNodeOptions*);
+ // Used to avoid code duplication when using scoped objects that affect
+ // `Process`.
+ void ProcessInternal(uint32_t frames_to_process);
+
String name_;
double tail_time_ = std::numeric_limits<double>::infinity();
@@ -102,6 +106,9 @@ class AudioWorkletHandler final : public AudioHandler {
// when a processor stops invoking the user-defined `process()` callback.
bool is_processor_active_ = true;
+ // Cached feature flag value
+ const bool allow_denormal_in_processing_;
+
base::WeakPtrFactory<AudioWorkletHandler> weak_ptr_factory_{this};
};
diff --git a/third_party/blink/renderer/modules/webaudio/script_processor_handler.cc b/third_party/blink/renderer/modules/webaudio/script_processor_handler.cc
index fd6ead021f9f656331d838fb6733cb0fb5220b12..8f9641bebb1a97d2963f8858e58a61cf2434770b 100644
--- a/third_party/blink/renderer/modules/webaudio/script_processor_handler.cc
+++ b/third_party/blink/renderer/modules/webaudio/script_processor_handler.cc
@@ -26,6 +26,7 @@
#include "third_party/blink/renderer/modules/webaudio/base_audio_context.h"
#include "third_party/blink/renderer/modules/webaudio/realtime_audio_destination_node.h"
#include "third_party/blink/renderer/modules/webaudio/script_processor_node.h"
+#include "third_party/blink/renderer/platform/audio/denormal_disabler.h"
#include "third_party/blink/renderer/platform/bindings/exception_state.h"
#include "third_party/blink/renderer/platform/scheduler/public/post_cross_thread_task.h"
#include "third_party/blink/renderer/platform/wtf/cross_thread_copier_base.h"
@@ -48,7 +49,9 @@ ScriptProcessorHandler::ScriptProcessorHandler(
internal_input_bus_(AudioBus::Create(
number_of_input_channels,
node.context()->GetDeferredTaskHandler().RenderQuantumFrames(),
- false)) {
+ false)),
+ allow_denormal_in_processing_(base::FeatureList::IsEnabled(
+ features::kWebAudioAllowDenormalInProcessing)) {
DCHECK_GE(buffer_size_,
node.context()->GetDeferredTaskHandler().RenderQuantumFrames());
DCHECK_LE(number_of_input_channels, BaseAudioContext::MaxNumberOfChannels());
@@ -109,7 +112,7 @@ void ScriptProcessorHandler::Initialize() {
AudioHandler::Initialize();
}
-void ScriptProcessorHandler::Process(uint32_t frames_to_process) {
+void ScriptProcessorHandler::ProcessInternal(uint32_t frames_to_process) {
TRACE_EVENT_BEGIN0(TRACE_DISABLED_BY_DEFAULT("webaudio.audionode"),
"ScriptProcessorHandler::Process");
@@ -238,6 +241,15 @@ void ScriptProcessorHandler::Process(uint32_t frames_to_process) {
"ScriptProcessorHandler::Process");
}
+void ScriptProcessorHandler::Process(uint32_t frames_to_process) {
+ if (allow_denormal_in_processing_) {
+ DenormalEnabler denormal_enabler;
+ ProcessInternal(frames_to_process);
+ } else {
+ ProcessInternal(frames_to_process);
+ }
+}
+
void ScriptProcessorHandler::FireProcessEvent(uint32_t double_buffer_index) {
DCHECK(IsMainThread());
diff --git a/third_party/blink/renderer/modules/webaudio/script_processor_handler.h b/third_party/blink/renderer/modules/webaudio/script_processor_handler.h
index 006881fbef2fc74bde5bf8aadc9716367451b122..a960426678a5da72071f6defa046a4517fcb1cf7 100644
--- a/third_party/blink/renderer/modules/webaudio/script_processor_handler.h
+++ b/third_party/blink/renderer/modules/webaudio/script_processor_handler.h
@@ -65,6 +65,11 @@ class ScriptProcessorHandler final : public AudioHandler {
uint32_t number_of_output_channels,
const HeapVector<Member<AudioBuffer>>& input_buffers,
const HeapVector<Member<AudioBuffer>>& output_buffers);
+
+ // Used to avoid code duplication when using scoped objects that affect
+ // `Process`.
+ void ProcessInternal(uint32_t frames_to_process);
+
double TailTime() const override;
double LatencyTime() const override;
bool RequiresTailProcessing() const final;
@@ -92,6 +97,9 @@ class ScriptProcessorHandler final : public AudioHandler {
scoped_refptr<base::SingleThreadTaskRunner> task_runner_;
+ // Cached feature flag value
+ const bool allow_denormal_in_processing_;
+
base::WeakPtrFactory<ScriptProcessorHandler> weak_ptr_factory_{this};
FRIEND_TEST_ALL_PREFIXES(ScriptProcessorNodeTest, BufferLifetime);
diff --git a/third_party/blink/renderer/platform/audio/denormal_disabler.h b/third_party/blink/renderer/platform/audio/denormal_disabler.h
index e8fadf60eea81b017dc29b39c2d1cfe8c102999b..ac1cdfa026aa1f845a892e96200fd9de46a45c92 100644
--- a/third_party/blink/renderer/platform/audio/denormal_disabler.h
+++ b/third_party/blink/renderer/platform/audio/denormal_disabler.h
@@ -52,28 +52,28 @@ namespace blink {
#endif
#if defined(HAVE_DENORMAL)
-class DenormalDisabler {
- DISALLOW_NEW();
-
+class DenormalModifier {
public:
- DenormalDisabler() { DisableDenormals(); }
-
- ~DenormalDisabler() { RestoreState(); }
-
- // This is a nop if we can flush denormals to zero in hardware.
- static inline float FlushDenormalFloatToZero(float f) { return f; }
+ virtual ~DenormalModifier() = default;
private:
unsigned saved_csr_ = 0;
#if defined(COMPILER_GCC) && defined(ARCH_CPU_X86_FAMILY)
+ protected:
inline void DisableDenormals() {
saved_csr_ = GetCSR();
SetCSR(saved_csr_ | 0x8040);
}
+ inline void EnableDenormals() {
+ saved_csr_ = GetCSR();
+ SetCSR(saved_csr_ & (~0x8040));
+ }
+
inline void RestoreState() { SetCSR(saved_csr_); }
+ private:
inline int GetCSR() {
int result;
asm volatile("stmxcsr %0" : "=m"(result));
@@ -86,6 +86,7 @@ class DenormalDisabler {
}
#elif BUILDFLAG(IS_WIN) && defined(COMPILER_MSVC)
+ protected:
inline void DisableDenormals() {
// Save the current state, and set mode to flush denormals.
//
@@ -95,11 +96,18 @@ class DenormalDisabler {
_controlfp_s(&unused, _DN_FLUSH, _MCW_DN);
}
+ inline void EnableDenormals() {
+ _controlfp_s(&saved_csr_, 0, 0);
+ unsigned unused;
+ _controlfp_s(&unused, _DN_SAVE, _MCW_DN);
+ }
+
inline void RestoreState() {
unsigned unused;
_controlfp_s(&unused, saved_csr_, _MCW_DN);
}
#elif defined(ARCH_CPU_ARM_FAMILY)
+ protected:
inline void DisableDenormals() {
saved_csr_ = GetStatusWord();
// Bit 24 is the flush-to-zero mode control bit. Setting it to 1 flushes
@@ -107,8 +115,14 @@ class DenormalDisabler {
SetStatusWord(saved_csr_ | (1 << 24));
}
+ inline void EnableDenormals() {
+ saved_csr_ = GetStatusWord();
+ SetStatusWord(saved_csr_ & (~(1 << 24)));
+ }
+
inline void RestoreState() { SetStatusWord(saved_csr_); }
+ private:
inline int GetStatusWord() {
int result;
#if defined(ARCH_CPU_ARM64)
@@ -130,13 +144,33 @@ class DenormalDisabler {
#endif
};
+class DenormalDisabler final : public DenormalModifier {
+ DISALLOW_NEW();
+
+ public:
+ DenormalDisabler() { DisableDenormals(); }
+ ~DenormalDisabler() final { RestoreState(); }
+
+ // This is a nop if we can flush denormals to zero in hardware.
+ static inline float FlushDenormalFloatToZero(float f) { return f; }
+};
+
+class DenormalEnabler final : public DenormalModifier {
+ DISALLOW_NEW();
+
+ public:
+ DenormalEnabler() { EnableDenormals(); }
+ ~DenormalEnabler() final { RestoreState(); }
+};
+
#else
// FIXME: add implementations for other architectures and compilers
class DenormalDisabler {
STACK_ALLOCATED();
public:
- DenormalDisabler() {}
+ DenormalDisabler() = default;
+ ~DenormalDisabler() = default;
// Assume the worst case that other architectures and compilers
// need to flush denormals to zero manually.
@@ -145,6 +179,14 @@ class DenormalDisabler {
}
};
+class DenormalEnabler {
+ STACK_ALLOCATED();
+
+ public:
+ DenormalEnabler() = default;
+ ~DenormalEnabler() = default;
+};
+
#endif
} // namespace blink

View File

@@ -1,293 +0,0 @@
From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001
From: Michael Wilson <mjwilson@chromium.org>
Date: Fri, 7 Feb 2025 13:33:40 -0800
Subject: Allow denormal flushing to outlive scoped object
After this refactor we can disable or enable denormals for longer than
a scoped object.
Use this new functionality in audio_worklet_global_scope.cc.
(cherry picked from commit 93c4f6fb0a0f10562ef9a637449605caae9200e6)
Bug: 382005099
Change-Id: I54f4810a4ec035f639d50275e14dae03b726b876
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/6226252
Reviewed-by: Hongchan Choi <hongchan@chromium.org>
Reviewed-by: Kentaro Hara <haraken@chromium.org>
Commit-Queue: Michael Wilson <mjwilson@chromium.org>
Cr-Original-Commit-Position: refs/heads/main@{#1415886}
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/6242822
Reviewed-by: Dave Tapuska <dtapuska@chromium.org>
Cr-Commit-Position: refs/branch-heads/6998@{#221}
Cr-Branched-From: de9c6fafd8ae5c6ea0438764076ca7d04a0b165d-refs/heads/main@{#1415337}
diff --git a/third_party/blink/renderer/modules/webaudio/audio_worklet_global_scope.cc b/third_party/blink/renderer/modules/webaudio/audio_worklet_global_scope.cc
index c9bd1e8934d7058cb4c8044aa5618033ec975cec..09de112b96b6062f702d57e6181dd39e681e99a1 100644
--- a/third_party/blink/renderer/modules/webaudio/audio_worklet_global_scope.cc
+++ b/third_party/blink/renderer/modules/webaudio/audio_worklet_global_scope.cc
@@ -23,6 +23,7 @@
#include "third_party/blink/renderer/modules/webaudio/audio_worklet_processor.h"
#include "third_party/blink/renderer/modules/webaudio/audio_worklet_processor_definition.h"
#include "third_party/blink/renderer/modules/webaudio/cross_thread_audio_worklet_processor_info.h"
+#include "third_party/blink/renderer/platform/audio/denormal_disabler.h"
#include "third_party/blink/renderer/platform/bindings/callback_method_retriever.h"
#include "third_party/blink/renderer/platform/heap/garbage_collected.h"
@@ -34,6 +35,9 @@ AudioWorkletGlobalScope::AudioWorkletGlobalScope(
: WorkletGlobalScope(std::move(creation_params),
thread->GetWorkerReportingProxy(),
thread) {
+ // Disable denormals for performance.
+ DenormalModifier::DisableDenormals();
+
// Audio is prone to jank introduced by e.g. the garbage collector. Workers
// are generally put in a background mode (as they are non-visible). Audio is
// an exception here, requiring low-latency behavior similar to any visible
diff --git a/third_party/blink/renderer/platform/BUILD.gn b/third_party/blink/renderer/platform/BUILD.gn
index 12da20fba1cceefd7f0960dd6411aef328f70832..73caf67339ad22cc3033c2869c91402dac460609 100644
--- a/third_party/blink/renderer/platform/BUILD.gn
+++ b/third_party/blink/renderer/platform/BUILD.gn
@@ -2129,6 +2129,7 @@ source_set("blink_platform_unittests_sources") {
"animation/timing_function_test.cc",
"audio/audio_destination_test.cc",
"audio/audio_frame_stats_accumulator_test.cc",
+ "audio/denormal_disabler_test.cc",
"audio/push_pull_fifo_multithread_test.cc",
"audio/push_pull_fifo_test.cc",
"audio/vector_math_test.cc",
diff --git a/third_party/blink/renderer/platform/audio/denormal_disabler.h b/third_party/blink/renderer/platform/audio/denormal_disabler.h
index ac1cdfa026aa1f845a892e96200fd9de46a45c92..a50d7b884e8fdc65f4c1fbe6b5cab7a7801a3b62 100644
--- a/third_party/blink/renderer/platform/audio/denormal_disabler.h
+++ b/third_party/blink/renderer/platform/audio/denormal_disabler.h
@@ -56,74 +56,65 @@ class DenormalModifier {
public:
virtual ~DenormalModifier() = default;
- private:
- unsigned saved_csr_ = 0;
-
#if defined(COMPILER_GCC) && defined(ARCH_CPU_X86_FAMILY)
- protected:
- inline void DisableDenormals() {
- saved_csr_ = GetCSR();
- SetCSR(saved_csr_ | 0x8040);
+ public:
+ static void DisableDenormals() {
+ unsigned old_csr = GetCsr();
+ SetCsr(old_csr | 0x8040);
}
- inline void EnableDenormals() {
- saved_csr_ = GetCSR();
- SetCSR(saved_csr_ & (~0x8040));
+ static void EnableDenormals() {
+ unsigned old_csr = GetCsr();
+ SetCsr(old_csr & (~0x8040));
}
- inline void RestoreState() { SetCSR(saved_csr_); }
-
- private:
- inline int GetCSR() {
+ protected:
+ static inline unsigned GetCsr() {
int result;
asm volatile("stmxcsr %0" : "=m"(result));
return result;
}
- inline void SetCSR(int a) {
+ static inline void SetCsr(int a) {
int temp = a;
asm volatile("ldmxcsr %0" : : "m"(temp));
}
#elif BUILDFLAG(IS_WIN) && defined(COMPILER_MSVC)
+ public:
+ static void DisableDenormals() { SetCsr(_DN_FLUSH); }
+
+ static void EnableDenormals() { SetCsr(_DN_SAVE); }
+
protected:
- inline void DisableDenormals() {
- // Save the current state, and set mode to flush denormals.
- //
- // http://stackoverflow.com/questions/637175/possible-bug-in-controlfp-s-may-not-restore-control-word-correctly
- _controlfp_s(&saved_csr_, 0, 0);
- unsigned unused;
- _controlfp_s(&unused, _DN_FLUSH, _MCW_DN);
+ static inline unsigned GetCsr() {
+ unsigned result;
+ _controlfp_s(&result, 0, 0);
+ return result;
}
- inline void EnableDenormals() {
- _controlfp_s(&saved_csr_, 0, 0);
+ static inline void SetCsr(unsigned a) {
+ // http://stackoverflow.com/questions/637175/possible-bug-in-controlfp-s-may-not-restore-control-word-correctly
unsigned unused;
- _controlfp_s(&unused, _DN_SAVE, _MCW_DN);
+ _controlfp_s(&unused, a, _MCW_DN);
}
- inline void RestoreState() {
- unsigned unused;
- _controlfp_s(&unused, saved_csr_, _MCW_DN);
- }
#elif defined(ARCH_CPU_ARM_FAMILY)
- protected:
- inline void DisableDenormals() {
- saved_csr_ = GetStatusWord();
+ public:
+ static void DisableDenormals() {
+ unsigned old_csr = GetCsr();
// Bit 24 is the flush-to-zero mode control bit. Setting it to 1 flushes
// denormals to 0.
- SetStatusWord(saved_csr_ | (1 << 24));
+ SetCsr(old_csr | (1 << 24));
}
- inline void EnableDenormals() {
- saved_csr_ = GetStatusWord();
- SetStatusWord(saved_csr_ & (~(1 << 24)));
+ static void EnableDenormals() {
+ unsigned old_csr = GetCsr();
+ SetCsr(old_csr & (~(1 << 24)));
}
- inline void RestoreState() { SetStatusWord(saved_csr_); }
-
- private:
- inline int GetStatusWord() {
+ protected:
+ static inline unsigned GetCsr() {
int result;
#if defined(ARCH_CPU_ARM64)
asm volatile("mrs %x[result], FPCR" : [result] "=r"(result));
@@ -133,7 +124,7 @@ class DenormalModifier {
return result;
}
- inline void SetStatusWord(int a) {
+ static inline void SetCsr(int a) {
#if defined(ARCH_CPU_ARM64)
asm volatile("msr FPCR, %x[src]" : : [src] "r"(a));
#else
@@ -148,24 +139,44 @@ class DenormalDisabler final : public DenormalModifier {
DISALLOW_NEW();
public:
- DenormalDisabler() { DisableDenormals(); }
- ~DenormalDisabler() final { RestoreState(); }
+ DenormalDisabler() {
+ // Save the current state, and set mode to flush denormals.
+ saved_csr_ = GetCsr();
+ DisableDenormals();
+ }
+ ~DenormalDisabler() final { SetCsr(saved_csr_); }
// This is a nop if we can flush denormals to zero in hardware.
static inline float FlushDenormalFloatToZero(float f) { return f; }
+
+ private:
+ unsigned saved_csr_ = 0;
};
class DenormalEnabler final : public DenormalModifier {
DISALLOW_NEW();
public:
- DenormalEnabler() { EnableDenormals(); }
- ~DenormalEnabler() final { RestoreState(); }
+ DenormalEnabler() {
+ saved_csr_ = GetCsr();
+ EnableDenormals();
+ }
+ ~DenormalEnabler() final { SetCsr(saved_csr_); }
+
+ private:
+ unsigned saved_csr_ = 0;
};
#else
// FIXME: add implementations for other architectures and compilers
-class DenormalDisabler {
+class DenormalModifier final {
+ public:
+ virtual ~DenormalModifier() = default;
+ static void DisableDenormals() {}
+ static void EnableDenormals() {}
+};
+
+class DenormalDisabler final {
STACK_ALLOCATED();
public:
@@ -179,7 +190,7 @@ class DenormalDisabler {
}
};
-class DenormalEnabler {
+class DenormalEnabler final {
STACK_ALLOCATED();
public:
diff --git a/third_party/blink/renderer/platform/audio/denormal_disabler_test.cc b/third_party/blink/renderer/platform/audio/denormal_disabler_test.cc
new file mode 100644
index 0000000000000000000000000000000000000000..5083bbf2da9d4e0e12f1a4608d5e14e4ca910297
--- /dev/null
+++ b/third_party/blink/renderer/platform/audio/denormal_disabler_test.cc
@@ -0,0 +1,51 @@
+// Copyright 2025 The Chromium Authors
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "third_party/blink/renderer/platform/audio/denormal_disabler.h"
+
+#include "testing/gtest/include/gtest/gtest.h"
+
+namespace blink {
+
+namespace {
+
+bool DenormalsAreFlushedToZero() {
+ volatile double denorm = 2.225e-308;
+ return !((denorm / 2.0) > 0.0);
+}
+
+TEST(DenormalDisablerTest, DisableScoped) {
+ const bool already_flushed = DenormalsAreFlushedToZero();
+ if (!already_flushed) {
+ DenormalDisabler scoped_disabler;
+ EXPECT_TRUE(DenormalsAreFlushedToZero());
+ }
+}
+
+TEST(DenormalDisablerTest, EnableScoped) {
+ const bool already_flushed = DenormalsAreFlushedToZero();
+ if (!already_flushed) {
+ DenormalDisabler scoped_disabler;
+ EXPECT_TRUE(DenormalsAreFlushedToZero());
+ {
+ DenormalEnabler scoped_enabler;
+ EXPECT_FALSE(DenormalsAreFlushedToZero());
+ }
+ EXPECT_TRUE(DenormalsAreFlushedToZero());
+ }
+}
+
+TEST(DenormalDisablerTest, ModifyUnscoped) {
+ const bool already_flushed = DenormalsAreFlushedToZero();
+ if (!already_flushed) {
+ DenormalModifier::DisableDenormals();
+ EXPECT_TRUE(DenormalsAreFlushedToZero());
+ DenormalModifier::EnableDenormals();
+ EXPECT_FALSE(DenormalsAreFlushedToZero());
+ }
+}
+
+} // namespace
+
+} // namespace blink

View File

@@ -1,24 +1,20 @@
From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001
From: deepak1556 <hop2deep@gmail.com>
Date: Fri, 9 Aug 2024 22:39:47 +0900
Subject: build: allow electron mojom interfaces to depend on blink
mojom_platform
Subject: build: expose webplugininfo interface to electron
Needed for:
1) //electron/shell/common:plugin
2) //electron/shell/common:web_contents_utility
Allows implementing electron::mojom::ElectronPluginInfoHost interface
which provides plugin details between browser<->renderer.
diff --git a/content/public/common/BUILD.gn b/content/public/common/BUILD.gn
index 0b7e43edf99ce901141ca9271f7130658525fd23..45585ff2141034b135a3671d47fe1d4783489077 100644
index 0b7e43edf99ce901141ca9271f7130658525fd23..88dbc54c6b7d857cc0c572eb831d457348af236c 100644
--- a/content/public/common/BUILD.gn
+++ b/content/public/common/BUILD.gn
@@ -370,6 +370,8 @@ mojom("interfaces") {
@@ -370,6 +370,7 @@ mojom("interfaces") {
"//content/common/*",
"//extensions/common:mojom",
"//extensions/common:mojom_blink",
+ "//electron/shell/common:plugin",
+ "//electron/shell/common:web_contents_utility",
]
sources = [

View File

@@ -1,62 +0,0 @@
From 0adceb6159fb6cf8c7e66062964b07d522f32fd7 Mon Sep 17 00:00:00 2001
From: Alexander Cooper <alcooper@chromium.org>
Date: Wed, 19 Feb 2025 14:50:50 -0800
Subject: [PATCH] Speculative fix for cancelling current xr animation frame
When cancelling an XrAnimationFrame we don't actually need to clear the
list of async tasks, and in fact attempting to clear the current async
task can cause errors. This list is only populated while callbacks are
being executed, and is cleared once those callbacks are finished
executing. Removing the callback id from
`current_callback_frame_requests`, which is populated for the same
life span as the async_tasks, is sufficient to ensure the cancelled
callback does not run.
Note: This is a speculative fix because even when
external/wpt/webxr/xrSession_cancelAnimationFrame.https.html
was modified to cancel the current frame, Issue 396481096 did not
repro; however, from code analysis this should fix the issue.
(cherry picked from commit 263f1bf5a386c1de1dfea09ca2d5e6abab287476)
Fixed: 396481096
Change-Id: Ic53895c4ab9cb39b8f9d2263749f5914f484a9f5
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/6266810
Commit-Queue: Alexander Cooper <alcooper@chromium.org>
Auto-Submit: Alexander Cooper <alcooper@chromium.org>
Reviewed-by: Brandon Jones <bajones@chromium.org>
Cr-Original-Commit-Position: refs/heads/main@{#1420734}
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/6282799
Bot-Commit: Rubber Stamper <rubber-stamper@appspot.gserviceaccount.com>
Commit-Queue: Rubber Stamper <rubber-stamper@appspot.gserviceaccount.com>
Cr-Commit-Position: refs/branch-heads/6834@{#5493}
Cr-Branched-From: 47a3549fac11ee8cb7be6606001ede605b302b9f-refs/heads/main@{#1381561}
---
diff --git a/third_party/blink/renderer/modules/xr/xr_frame_request_callback_collection.cc b/third_party/blink/renderer/modules/xr/xr_frame_request_callback_collection.cc
index af51506..9a071c6 100644
--- a/third_party/blink/renderer/modules/xr/xr_frame_request_callback_collection.cc
+++ b/third_party/blink/renderer/modules/xr/xr_frame_request_callback_collection.cc
@@ -41,7 +41,13 @@
callback_frame_requests_.erase(id);
callback_async_tasks_.erase(id);
current_callback_frame_requests_.erase(id);
- current_callback_async_tasks_.erase(id);
+ // We intentionally do not erase from `current_callback_async_tasks_` here.
+ // If we are not actively processing a set of callbacks these will be empty.
+ // If we *are* actively processing callbacks, we cannot erase the task of
+ // the current callback, and these tasks will get cleaned up once the
+ // callbacks are finished processing. Removing the id from
+ // `current_callback_frame_requests_` is enough to ensure that the callback
+ // is not run.
}
}
@@ -70,7 +76,6 @@
auto it_frame_request = current_callback_frame_requests_.find(id);
auto it_async_task = current_callback_async_tasks_.find(id);
if (it_frame_request == current_callback_frame_requests_.end()) {
- DCHECK_EQ(current_callback_async_tasks_.end(), it_async_task);
continue;
}
CHECK_NE(current_callback_async_tasks_.end(), it_async_task,

File diff suppressed because it is too large Load Diff

View File

@@ -1,114 +0,0 @@
From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001
From: Orko Garai <orko@igalia.com>
Date: Fri, 29 Nov 2024 16:17:04 +0000
Subject: Wayland IME: Underline composition text fallback
At this time text-input-v3 does not provide any styling information.
As a quality-of-life improvement, ensure that a default composition
style is applied so that the composition text is underlined.
This will also ensure that the user experience is consistent with
ozone/x11.
Bug: 355238629
Change-Id: I8d4bce5e5700510d72f114bb57171f43646be098
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/5741768
Commit-Queue: Orko Garai <orko@igalia.com>
Reviewed-by: Darren Shen <shend@chromium.org>
Reviewed-by: Kramer Ge <fangzhoug@chromium.org>
Cr-Commit-Position: refs/heads/main@{#1389833}
diff --git a/ui/ozone/platform/wayland/host/wayland_input_method_context.cc b/ui/ozone/platform/wayland/host/wayland_input_method_context.cc
index 71a51ce25b808ee5dc5359ce99becac3da3869bc..1985765618f228102f411282f9476f04a450b4e1 100644
--- a/ui/ozone/platform/wayland/host/wayland_input_method_context.cc
+++ b/ui/ozone/platform/wayland/host/wayland_input_method_context.cc
@@ -598,6 +598,7 @@ void WaylandInputMethodContext::OnPreeditString(
const gfx::Range& preedit_cursor) {
CompositionText composition_text;
composition_text.text = base::UTF8ToUTF16(text);
+ bool has_composition_style = false;
for (const auto& span : spans) {
auto start_offset = OffsetFromUTF8Offset(text, span.index);
if (!start_offset)
@@ -608,9 +609,18 @@ void WaylandInputMethodContext::OnPreeditString(
const auto& style = span.style;
if (!style.has_value())
continue;
+ if (style->type == ImeTextSpan::Type::kComposition) {
+ has_composition_style = true;
+ }
composition_text.ime_text_spans.emplace_back(style->type, *start_offset,
*end_offset, style->thickness);
}
+ if (!composition_text.text.empty() && !has_composition_style) {
+ // If no explicit composition style is specified, add default composition
+ // style to the composition text.
+ composition_text.ime_text_spans.emplace_back(
+ ImeTextSpan::Type::kComposition, 0, composition_text.text.length());
+ }
if (!preedit_cursor.IsValid()) {
// This is the case if a preceding preedit_cursor event in text-input-v1 was
// not received or an explicit negative value was requested to hide the
diff --git a/ui/ozone/platform/wayland/host/wayland_input_method_context_unittest.cc b/ui/ozone/platform/wayland/host/wayland_input_method_context_unittest.cc
index 41a3f5de13b506871f00780b7803bd65a169c22c..22d7a4b82a2e9183dbe1556a73ec236b4cac5048 100644
--- a/ui/ozone/platform/wayland/host/wayland_input_method_context_unittest.cc
+++ b/ui/ozone/platform/wayland/host/wayland_input_method_context_unittest.cc
@@ -1324,6 +1324,34 @@ TEST_P(WaylandInputMethodContextTest, SetInputTypeAfterFocus) {
});
}
+TEST_P(WaylandInputMethodContextTest, OnPreeditChangedDefaultCompositionStyle) {
+ constexpr std::string_view kPreeditString("PreeditString");
+ constexpr gfx::Range kSelection{7, 13};
+ input_method_context_->OnPreeditString(
+ kPreeditString,
+ // No composition style provided.
+ {{1,
+ 3,
+ {{ImeTextSpan::Type::kMisspellingSuggestion,
+ ImeTextSpan::Thickness::kNone}}}},
+ kSelection);
+ EXPECT_TRUE(input_method_context_delegate_->was_on_preedit_changed_called());
+ EXPECT_EQ(input_method_context_delegate_->last_preedit()->ime_text_spans,
+ (ImeTextSpans{ImeTextSpan(ImeTextSpan::Type::kMisspellingSuggestion,
+ 1, 4, ImeTextSpan::Thickness::kNone),
+ // Default composition should be applied.
+ ImeTextSpan(ImeTextSpan::Type::kComposition, 0,
+ kPreeditString.size(),
+ ImeTextSpan::Thickness::kThin)}));
+ EXPECT_EQ(
+ input_method_context_->predicted_state_for_testing().surrounding_text,
+ u"PreeditString");
+ EXPECT_EQ(input_method_context_->predicted_state_for_testing().composition,
+ gfx::Range(0, kPreeditString.size()));
+ EXPECT_EQ(input_method_context_->predicted_state_for_testing().selection,
+ kSelection);
+}
+
TEST_P(WaylandInputMethodContextTest, OnPreeditChanged) {
constexpr std::string_view kPreeditString("PreeditString");
constexpr gfx::Range kSelection{7, 13};
@@ -1331,13 +1359,19 @@ TEST_P(WaylandInputMethodContextTest, OnPreeditChanged) {
kPreeditString,
{{0,
static_cast<uint32_t>(kPreeditString.size()),
- {{ImeTextSpan::Type::kComposition, ImeTextSpan::Thickness::kThin}}}},
+ {{ImeTextSpan::Type::kComposition, ImeTextSpan::Thickness::kThick}}},
+ {1,
+ 3,
+ {{ImeTextSpan::Type::kMisspellingSuggestion,
+ ImeTextSpan::Thickness::kNone}}}},
kSelection);
EXPECT_TRUE(input_method_context_delegate_->was_on_preedit_changed_called());
EXPECT_EQ(input_method_context_delegate_->last_preedit()->ime_text_spans,
- ImeTextSpans{ImeTextSpan(ImeTextSpan::Type::kComposition, 0,
- kPreeditString.size(),
- ImeTextSpan::Thickness::kThin)});
+ (ImeTextSpans{ImeTextSpan(ImeTextSpan::Type::kComposition, 0,
+ kPreeditString.size(),
+ ImeTextSpan::Thickness::kThick),
+ ImeTextSpan(ImeTextSpan::Type::kMisspellingSuggestion,
+ 1, 4, ImeTextSpan::Thickness::kNone)}));
EXPECT_EQ(
input_method_context_->predicted_state_for_testing().surrounding_text,
u"PreeditString");

View File

@@ -1,33 +0,0 @@
From 521faebc8a7cffe23177c6600bfcfb3c0b9ab1dc Mon Sep 17 00:00:00 2001
From: Geoff Lang <geofflang@chromium.org>
Date: Thu, 06 Mar 2025 19:39:37 -0800
Subject: [PATCH] Disable setting primtive restart for WebGL in the cmd decoder.
Until it's blocked in ANGLE for WebGL contexts, disable it in the
command decoder on the service side.
Bug: 401059730
Change-Id: Ia9c7d951cbd122454afec2f884968e0a709cee77
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/6334632
Reviewed-by: Shahbaz Youssefi <syoussefi@chromium.org>
Reviewed-by: Kenneth Russell <kbr@chromium.org>
Commit-Queue: Kenneth Russell <kbr@chromium.org>
Cr-Commit-Position: refs/heads/main@{#1429307}
---
diff --git a/gpu/command_buffer/service/gles2_cmd_decoder_passthrough.cc b/gpu/command_buffer/service/gles2_cmd_decoder_passthrough.cc
index ad23480..733c553 100644
--- a/gpu/command_buffer/service/gles2_cmd_decoder_passthrough.cc
+++ b/gpu/command_buffer/service/gles2_cmd_decoder_passthrough.cc
@@ -2170,6 +2170,11 @@
case GL_DEBUG_OUTPUT:
return true;
+ case GL_PRIMITIVE_RESTART_FIXED_INDEX:
+ // Disable setting primitive restart at the command decoder level until
+ // it's blocked in ANGLE for WebGL contexts.
+ return feature_info_->IsWebGLContext();
+
default:
return false;
}

View File

@@ -1,94 +0,0 @@
From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001
From: Geoff Lang <geofflang@chromium.org>
Date: Thu, 6 Mar 2025 16:02:41 -0800
Subject: Move WebGL primitive restart state setting to the GPU process.
ANGLE will validate and initialize this state and errors are generated
when the WebGL client also initializes it on startup.
Initialize it even in the passthrough command decoder temporarily so
that ANGLE can roll without breaking WebGL tests.
Bug: 401059730
Change-Id: I0bfee710673bbcea6f915ffc4fc9be20438a2654
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/6330188
Auto-Submit: Geoff Lang <geofflang@chromium.org>
Commit-Queue: Kenneth Russell <kbr@chromium.org>
Reviewed-by: Kenneth Russell <kbr@chromium.org>
Cr-Commit-Position: refs/heads/main@{#1429228}
diff --git a/gpu/command_buffer/service/gles2_cmd_decoder.cc b/gpu/command_buffer/service/gles2_cmd_decoder.cc
index 03a26a5f81dee1cd1bba28621c1ecd30ea709df8..60447a8e71e056db01515db8bc6c56048537870f 100644
--- a/gpu/command_buffer/service/gles2_cmd_decoder.cc
+++ b/gpu/command_buffer/service/gles2_cmd_decoder.cc
@@ -3285,6 +3285,13 @@ gpu::ContextResult GLES2DecoderImpl::Initialize(
}
}
+ if (feature_info_->context_type() == CONTEXT_TYPE_WEBGL2) {
+ // If WebGL 2, the PRIMITIVE_RESTART_FIXED_INDEX should be always enabled.
+ // See the section <Primitive Restart is Always Enabled> in WebGL 2 spec:
+ // https://www.khronos.org/registry/webgl/specs/latest/2.0/#4.1.4
+ DoEnable(GL_PRIMITIVE_RESTART_FIXED_INDEX);
+ }
+
if (group_->gpu_preferences().enable_gpu_driver_debug_logging &&
feature_info_->feature_flags().khr_debug) {
InitializeGLDebugLogging(true, GLDebugMessageCallback, &logger_);
diff --git a/gpu/command_buffer/service/gles2_cmd_decoder_passthrough.cc b/gpu/command_buffer/service/gles2_cmd_decoder_passthrough.cc
index 54310b8878fa7aeca45e6001cb884a794272138c..e7abe4cb8542aa767ca150db3163f860f9a04b59 100644
--- a/gpu/command_buffer/service/gles2_cmd_decoder_passthrough.cc
+++ b/gpu/command_buffer/service/gles2_cmd_decoder_passthrough.cc
@@ -1065,6 +1065,17 @@ gpu::ContextResult GLES2DecoderPassthroughImpl::Initialize(
api()->glDisableFn(GL_TEXTURE_RECTANGLE_ANGLE);
#endif
+ // TEMPORARY: Set primitive restart to enabled by default for WebGL2. Clear
+ // errors afterwards so that when this state is initialized and validated in
+ // ANGLE, it will not generate errors during command buffer initialization.
+ if (feature_info_->context_type() == CONTEXT_TYPE_WEBGL2) {
+ // If WebGL 2, the PRIMITIVE_RESTART_FIXED_INDEX should be always enabled.
+ // See the section <Primitive Restart is Always Enabled> in WebGL 2 spec:
+ // https://www.khronos.org/registry/webgl/specs/latest/2.0/#4.1.4
+ api()->glEnableFn(GL_PRIMITIVE_RESTART_FIXED_INDEX);
+ CheckErrorCallbackState();
+ }
+
// Register this object as a GPU switching observer.
if (feature_info_->IsWebGLContext()) {
ui::GpuSwitchingManager::GetInstance()->AddObserver(this);
diff --git a/gpu/command_buffer/service/gles2_cmd_decoder_unittest_base.cc b/gpu/command_buffer/service/gles2_cmd_decoder_unittest_base.cc
index 1683b266f349d4b70ae2861cf4f05542380d8c44..0a0c1b3f6f9fe2caddf86602d2ae9978eff928f1 100644
--- a/gpu/command_buffer/service/gles2_cmd_decoder_unittest_base.cc
+++ b/gpu/command_buffer/service/gles2_cmd_decoder_unittest_base.cc
@@ -440,6 +440,13 @@ ContextResult GLES2DecoderTestBase::MaybeInitDecoderWithWorkarounds(
}
#endif
+ if (init.context_type == CONTEXT_TYPE_WEBGL2 &&
+ group_->feature_info()->gl_version_info().is_es3) {
+ EXPECT_CALL(*gl_, Enable(GL_PRIMITIVE_RESTART_FIXED_INDEX))
+ .Times(1)
+ .RetiresOnSaturation();
+ }
+
if (context_->HasRobustness()) {
EXPECT_CALL(*gl_, GetGraphicsResetStatusARB())
.WillOnce(Return(init.lose_context_on_init ? GL_GUILTY_CONTEXT_RESET_ARB
diff --git a/third_party/blink/renderer/modules/webgl/webgl_rendering_context_base.cc b/third_party/blink/renderer/modules/webgl/webgl_rendering_context_base.cc
index 6057cf83454f0deabc1904cb5e87b306bda4e788..304f4c7beb87212c70e91770d494254da7ed24bf 100644
--- a/third_party/blink/renderer/modules/webgl/webgl_rendering_context_base.cc
+++ b/third_party/blink/renderer/modules/webgl/webgl_rendering_context_base.cc
@@ -1432,12 +1432,6 @@ void WebGLRenderingContextBase::InitializeNewContext() {
->GetCapabilities()
.mesa_framebuffer_flip_y;
- // If WebGL 2, the PRIMITIVE_RESTART_FIXED_INDEX should be always enabled.
- // See the section <Primitive Restart is Always Enabled> in WebGL 2 spec:
- // https://www.khronos.org/registry/webgl/specs/latest/2.0/#4.1.4
- if (IsWebGL2())
- ContextGL()->Enable(GL_PRIMITIVE_RESTART_FIXED_INDEX);
-
// This ensures that the context has a valid "lastFlushID" and won't be
// mistakenly identified as the "least recently used" context.
ContextGL()->Flush();

View File

@@ -1,38 +1,11 @@
From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001
From: deepak1556 <hop2deep@gmail.com>
Date: Fri, 29 Jul 2022 00:29:35 +0900
Subject: chore: modify chromium handling of mouse events
This patch does the following:
Subject: chore: allow chromium to handle synthetic mouse events for touch
1. When Windows Control Overlay is enabled, it allows chromium to handle synthetic mouse events generated for touch
With WCO, allow chromium to handle synthetic mouse events generated for touch
actions in the non-client caption area.
2. It calls HandleMouseEvent on the delegate earlier in HandleMouseEventInternal, so that Electron can selectively disable
draggable regions to allow events to propagate to the underlying renderer.
diff --git a/ui/events/event.h b/ui/events/event.h
index e002d4011ddd1dd26f7c412a337221d57e9e9bd4..7ac7313f3af5145bc7513f22c02a8e099e7abdea 100644
--- a/ui/events/event.h
+++ b/ui/events/event.h
@@ -583,6 +583,9 @@ class EVENTS_EXPORT MouseEvent : public LocatedEvent {
const PointerDetails& pointer_details() const { return pointer_details_; }
+ bool is_system_menu() const { return is_system_menu_; }
+ void set_is_system_menu(bool is_menu) { is_system_menu_ = is_menu; }
+
// Event:
std::string ToString() const override;
std::unique_ptr<Event> Clone() const override;
@@ -615,6 +618,8 @@ class EVENTS_EXPORT MouseEvent : public LocatedEvent {
// Structure for holding pointer details for implementing PointerEvents API.
PointerDetails pointer_details_;
+
+ bool is_system_menu_ = false;
};
class ScrollEvent;
diff --git a/ui/views/widget/desktop_aura/desktop_window_tree_host_win.cc b/ui/views/widget/desktop_aura/desktop_window_tree_host_win.cc
index 94366475b7f9d128d8208de44d4d8a11096b146b..5867a8ae7e416ddc29a8a251dc6271009f3409db 100644
--- a/ui/views/widget/desktop_aura/desktop_window_tree_host_win.cc
@@ -61,7 +34,7 @@ index 286fcdf651131d231b07a52a53a1945c144c79ea..73d0729784e339b9abc20f7f22bccee1
Widget* GetWidget();
const Widget* GetWidget() const;
diff --git a/ui/views/win/hwnd_message_handler.cc b/ui/views/win/hwnd_message_handler.cc
index d29cdda6208576f1be3f5a6857fc068bb1397b23..93961374b6ecaf9a169dd8a02c235387e8cad609 100644
index d29cdda6208576f1be3f5a6857fc068bb1397b23..24212831633eb3190db9a344f0fcbd5f25a959b4 100644
--- a/ui/views/win/hwnd_message_handler.cc
+++ b/ui/views/win/hwnd_message_handler.cc
@@ -3123,15 +3123,19 @@ LRESULT HWNDMessageHandler::HandleMouseEventInternal(UINT message,
@@ -86,33 +59,6 @@ index d29cdda6208576f1be3f5a6857fc068bb1397b23..93961374b6ecaf9a169dd8a02c235387
return 0;
}
@@ -3152,6 +3156,7 @@ LRESULT HWNDMessageHandler::HandleMouseEventInternal(UINT message,
// handle alt-space, or in the frame itself.
is_right_mouse_pressed_on_caption_ = false;
ReleaseCapture();
+
// |point| is in window coordinates, but WM_NCHITTEST and TrackPopupMenu()
// expect screen coordinates.
POINT screen_point = CR_POINT_INITIALIZER_FROM_LPARAM(l_param);
@@ -3159,7 +3164,17 @@ LRESULT HWNDMessageHandler::HandleMouseEventInternal(UINT message,
w_param = static_cast<WPARAM>(SendMessage(
hwnd(), WM_NCHITTEST, 0, MAKELPARAM(screen_point.x, screen_point.y)));
if (w_param == HTCAPTION || w_param == HTSYSMENU) {
- ShowSystemMenuAtScreenPixelLocation(hwnd(), gfx::Point(screen_point));
+ LONG message_time = GetMessageTime();
+ CHROME_MSG msg = {hwnd(),
+ message,
+ w_param,
+ l_param,
+ static_cast<DWORD>(message_time),
+ {CR_GET_X_LPARAM(l_param), CR_GET_Y_LPARAM(l_param)}};
+ ui::MouseEvent event(msg);
+ event.set_is_system_menu(true);
+ if (!delegate_->HandleMouseEvent(&event))
+ ShowSystemMenuAtScreenPixelLocation(hwnd(), gfx::Point(screen_point));
return 0;
}
} else if (message == WM_NCLBUTTONDOWN &&
diff --git a/ui/views/win/hwnd_message_handler_delegate.h b/ui/views/win/hwnd_message_handler_delegate.h
index fde18715e33ee67f64740ebda7c641954360483c..cf1ea75f8e9dd1d95045736959c4f3ca1a27a1ab 100644
--- a/ui/views/win/hwnd_message_handler_delegate.h

View File

@@ -1,168 +0,0 @@
From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001
From: deepak1556 <hop2deep@gmail.com>
Date: Wed, 29 Jan 2025 17:01:03 +0900
Subject: feat: add signals when embedder cleanup callbacks run for
gin::wrappable
Current setup of finalization callbacks does not work well with
gin_helper::CleanedUpAtExit for wrappables specifically on environment
shutdown leading to UAF in the second pass.
Details at https://github.com/microsoft/vscode/issues/192119#issuecomment-2375851531
The signals exposed in this patch does the following 2 things,
1) Fix weak state of the wrapped object when the finializer callbacks
have not yet been processed
2) Avoid calling into the second pass when the embedder has already
destroyed the wrapped object via CleanedUpAtExit.
This patch is more of a bandaid fix to improve the lifetime
management with existing finalizer callbacks. We should be able to
remove this patch once gin::Wrappable can be managed by V8 Oilpan
Refs https://issues.chromium.org/issues/40210365 which is blocked
on https://issues.chromium.org/issues/42203693
diff --git a/gin/isolate_holder.cc b/gin/isolate_holder.cc
index e5ee2c6b3cb787ff9f8272d4344a1e18c44971e2..22469cf0ab1025eefcf94e2cd351087e52182130 100644
--- a/gin/isolate_holder.cc
+++ b/gin/isolate_holder.cc
@@ -34,6 +34,8 @@ v8::ArrayBuffer::Allocator* g_array_buffer_allocator = nullptr;
const intptr_t* g_reference_table = nullptr;
v8::FatalErrorCallback g_fatal_error_callback = nullptr;
v8::OOMErrorCallback g_oom_error_callback = nullptr;
+bool g_initialized_microtasks_runner = false;
+bool g_destroyed_microtasks_runner = false;
std::unique_ptr<v8::Isolate::CreateParams> getModifiedIsolateParams(
std::unique_ptr<v8::Isolate::CreateParams> params,
@@ -194,10 +196,26 @@ IsolateHolder::getDefaultIsolateParams() {
return params;
}
+// static
+bool IsolateHolder::DestroyedMicrotasksRunner() {
+ return g_initialized_microtasks_runner &&
+ g_destroyed_microtasks_runner;
+}
+
void IsolateHolder::EnableIdleTasks(
std::unique_ptr<V8IdleTaskRunner> idle_task_runner) {
DCHECK(isolate_data_.get());
isolate_data_->EnableIdleTasks(std::move(idle_task_runner));
}
+void IsolateHolder::WillCreateMicrotasksRunner() {
+ DCHECK(!g_initialized_microtasks_runner);
+ g_initialized_microtasks_runner = true;
+}
+
+void IsolateHolder::WillDestroyMicrotasksRunner() {
+ DCHECK(g_initialized_microtasks_runner);
+ g_destroyed_microtasks_runner = true;
+}
+
} // namespace gin
diff --git a/gin/public/isolate_holder.h b/gin/public/isolate_holder.h
index c22b0a7f9af621573e888a518ccdc22293ce07ef..d3e5ced425df54f42534cec5cc0c5bbfb9d79c6c 100644
--- a/gin/public/isolate_holder.h
+++ b/gin/public/isolate_holder.h
@@ -130,6 +130,8 @@ class GIN_EXPORT IsolateHolder {
// Should only be called after v8::IsolateHolder::Initialize() is invoked.
static std::unique_ptr<v8::Isolate::CreateParams> getDefaultIsolateParams();
+ static bool DestroyedMicrotasksRunner();
+
v8::Isolate* isolate() { return isolate_; }
// This method returns if v8::Locker is needed to access isolate.
@@ -143,6 +145,9 @@ class GIN_EXPORT IsolateHolder {
void EnableIdleTasks(std::unique_ptr<V8IdleTaskRunner> idle_task_runner);
+ void WillCreateMicrotasksRunner();
+ void WillDestroyMicrotasksRunner();
+
// This method returns V8IsolateMemoryDumpProvider of this isolate, used for
// testing.
V8IsolateMemoryDumpProvider* isolate_memory_dump_provider_for_testing()
diff --git a/gin/wrappable.cc b/gin/wrappable.cc
index 402355cb836cea14e9ee725a142a4bad44fd5bed..7e7f028dcfb87c7b80adebabac19ced8791f642e 100644
--- a/gin/wrappable.cc
+++ b/gin/wrappable.cc
@@ -13,6 +13,9 @@ namespace gin {
WrappableBase::WrappableBase() = default;
WrappableBase::~WrappableBase() {
+ if (!wrapper_.IsEmpty()) {
+ wrapper_.ClearWeak();
+ }
wrapper_.Reset();
}
@@ -28,15 +31,24 @@ const char* WrappableBase::GetTypeName() {
void WrappableBase::FirstWeakCallback(
const v8::WeakCallbackInfo<WrappableBase>& data) {
WrappableBase* wrappable = data.GetParameter();
- wrappable->dead_ = true;
- wrappable->wrapper_.Reset();
- data.SetSecondPassCallback(SecondWeakCallback);
+ WrappableBase* wrappable_from_field =
+ static_cast<WrappableBase*>(data.GetInternalField(1));
+ if (wrappable && wrappable == wrappable_from_field) {
+ wrappable->dead_ = true;
+ wrappable->wrapper_.Reset();
+ data.SetSecondPassCallback(SecondWeakCallback);
+ }
}
void WrappableBase::SecondWeakCallback(
const v8::WeakCallbackInfo<WrappableBase>& data) {
+ if (IsolateHolder::DestroyedMicrotasksRunner()) {
+ return;
+ }
WrappableBase* wrappable = data.GetParameter();
- delete wrappable;
+ if (wrappable) {
+ delete wrappable;
+ }
}
v8::MaybeLocal<v8::Object> WrappableBase::GetWrapperImpl(v8::Isolate* isolate,
@@ -71,10 +83,16 @@ v8::MaybeLocal<v8::Object> WrappableBase::GetWrapperImpl(v8::Isolate* isolate,
void* values[] = {info, this};
wrapper->SetAlignedPointerInInternalFields(2, indices, values);
wrapper_.Reset(isolate, wrapper);
- wrapper_.SetWeak(this, FirstWeakCallback, v8::WeakCallbackType::kParameter);
+ wrapper_.SetWeak(this, FirstWeakCallback, v8::WeakCallbackType::kInternalFields);
return v8::MaybeLocal<v8::Object>(wrapper);
}
+void WrappableBase::ClearWeak() {
+ if (!wrapper_.IsEmpty()) {
+ wrapper_.ClearWeak();
+ }
+}
+
namespace internal {
void* FromV8Impl(v8::Isolate* isolate, v8::Local<v8::Value> val,
diff --git a/gin/wrappable.h b/gin/wrappable.h
index 4e7115685a5bf6997e78edcc1851e28bd00b1aa2..ca51fe33605e855438e88969e3d3cc734ef4523e 100644
--- a/gin/wrappable.h
+++ b/gin/wrappable.h
@@ -80,6 +80,13 @@ class GIN_EXPORT WrappableBase {
v8::MaybeLocal<v8::Object> GetWrapperImpl(v8::Isolate* isolate,
WrapperInfo* wrapper_info);
+ // Make this wrappable strong again. This is useful when the wrappable is
+ // destroyed outside the finalizer callbacks and we want to avoid scheduling
+ // the weak callbacks if they haven't been scheduled yet.
+ // NOTE!!! this does not prevent finalization callbacks from running if they
+ // have already been processed.
+ void ClearWeak();
+
private:
static void FirstWeakCallback(
const v8::WeakCallbackInfo<WrappableBase>& data);

View File

@@ -1,114 +0,0 @@
From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001
From: deepak1556 <hop2deep@gmail.com>
Date: Thu, 30 Jan 2025 20:28:38 +0900
Subject: feat: separate content settings callback for sync and async clipboard
`AllowReadFromClipboard` is called from both the types without a way to differentiate.
[sync path] - third_party/blink/renderer/core/editing/commands/clipboard_commands.cc
[async path] - third_party/blink/renderer/modules/clipboard/clipboard_promise.cc
This patch adds a new callback to separate these two paths so that we
can have sync permission checks for the sync path.
Additionally, `blink::PermissionType::DEPRECATED_SYNC_CLIPBOARD_READ`
has been added to support type conversion in permission policy checks. We have extended
`blink::PermissionType` in `electron::WebContentsPermissionHelper::PermissionType`
but it is hard to import the latter into the content permission layer checks.
This patch will be removed when the deprecated sync api support is
removed.
diff --git a/components/permissions/permission_util.cc b/components/permissions/permission_util.cc
index 335d56aa7dc434cf0e3248e44f0d48eccbdc06c5..a7cfe652a7fb06fbc3f99378bbf30173862d2cef 100644
--- a/components/permissions/permission_util.cc
+++ b/components/permissions/permission_util.cc
@@ -369,6 +369,7 @@ ContentSettingsType PermissionUtil::PermissionTypeToContentSettingTypeSafe(
return ContentSettingsType::AUTOMATIC_FULLSCREEN;
case PermissionType::WEB_APP_INSTALLATION:
return ContentSettingsType::WEB_APP_INSTALLATION;
+ case PermissionType::DEPRECATED_SYNC_CLIPBOARD_READ:
case PermissionType::NUM:
break;
}
diff --git a/content/browser/permissions/permission_controller_impl.cc b/content/browser/permissions/permission_controller_impl.cc
index 6dd0324d7d899c7e207ab641d9615f47ff14d4ce..7303dc8fbfe22416b31e9955d143952ea1574c37 100644
--- a/content/browser/permissions/permission_controller_impl.cc
+++ b/content/browser/permissions/permission_controller_impl.cc
@@ -95,6 +95,7 @@ PermissionToSchedulingFeature(PermissionType permission_name) {
case PermissionType::POINTER_LOCK:
case PermissionType::AUTOMATIC_FULLSCREEN:
case PermissionType::WEB_APP_INSTALLATION:
+ case PermissionType::DEPRECATED_SYNC_CLIPBOARD_READ:
return std::nullopt;
}
}
diff --git a/third_party/blink/common/permissions/permission_utils.cc b/third_party/blink/common/permissions/permission_utils.cc
index 0aa40dfe497454424d7fd410e7e9ee8d6227ecd1..9ed047dad4ea6ed1f746bbe975a031f52f278c03 100644
--- a/third_party/blink/common/permissions/permission_utils.cc
+++ b/third_party/blink/common/permissions/permission_utils.cc
@@ -102,6 +102,8 @@ std::string GetPermissionString(PermissionType permission) {
return "AutomaticFullscreen";
case PermissionType::WEB_APP_INSTALLATION:
return "WebAppInstallation";
+ case PermissionType::DEPRECATED_SYNC_CLIPBOARD_READ:
+ return "DeprecatedSyncClipboardRead";
case PermissionType::NUM:
NOTREACHED_IN_MIGRATION();
return std::string();
@@ -177,6 +179,7 @@ PermissionTypeToPermissionsPolicyFeature(PermissionType permission) {
case PermissionType::NOTIFICATIONS:
case PermissionType::KEYBOARD_LOCK:
case PermissionType::POINTER_LOCK:
+ case PermissionType::DEPRECATED_SYNC_CLIPBOARD_READ:
return std::nullopt;
case PermissionType::NUM:
diff --git a/third_party/blink/public/common/permissions/permission_utils.h b/third_party/blink/public/common/permissions/permission_utils.h
index 6c6f1d49d1ff702650d23c0fa3f2c40e7908c5a1..7dc5d2cb91923b1e1f23df74dc601bf90013f29b 100644
--- a/third_party/blink/public/common/permissions/permission_utils.h
+++ b/third_party/blink/public/common/permissions/permission_utils.h
@@ -64,6 +64,7 @@ enum class PermissionType {
AUTOMATIC_FULLSCREEN = 40,
HAND_TRACKING = 41,
WEB_APP_INSTALLATION = 42,
+ DEPRECATED_SYNC_CLIPBOARD_READ = 43,
// Always keep this at the end.
NUM,
diff --git a/third_party/blink/public/platform/web_content_settings_client.h b/third_party/blink/public/platform/web_content_settings_client.h
index 6119f7f3f73379b9bdd219342f23bdde4509ab8c..5e1edfe683e6df8db2a3c8060df523f7af47c230 100644
--- a/third_party/blink/public/platform/web_content_settings_client.h
+++ b/third_party/blink/public/platform/web_content_settings_client.h
@@ -58,6 +58,9 @@ class WebContentSettingsClient {
// Controls whether access to write the clipboard is allowed for this frame.
virtual bool AllowWriteToClipboard() { return false; }
+ // Controls whether synchronous access to read the clipboard is allowed for this frame.
+ virtual bool AllowReadFromClipboardSync() { return false; }
+
// Controls whether enabling Web Components API for this frame.
virtual bool AllowWebComponents(bool default_value) { return default_value; }
diff --git a/third_party/blink/renderer/core/editing/commands/clipboard_commands.cc b/third_party/blink/renderer/core/editing/commands/clipboard_commands.cc
index 20ebd3f2f5fa7b16ad1b2081ca41b007bc78a354..b248e3135182d36a6524c2e626157a0e4c759d14 100644
--- a/third_party/blink/renderer/core/editing/commands/clipboard_commands.cc
+++ b/third_party/blink/renderer/core/editing/commands/clipboard_commands.cc
@@ -121,7 +121,7 @@ bool ClipboardCommands::CanReadClipboard(LocalFrame& frame,
return true;
}
return frame.GetContentSettingsClient() &&
- frame.GetContentSettingsClient()->AllowReadFromClipboard();
+ frame.GetContentSettingsClient()->AllowReadFromClipboardSync();
}
bool ClipboardCommands::CanWriteClipboard(LocalFrame& frame,
@@ -300,7 +300,7 @@ bool ClipboardCommands::PasteSupported(LocalFrame* frame) {
return true;
}
return frame->GetContentSettingsClient() &&
- frame->GetContentSettingsClient()->AllowReadFromClipboard();
+ frame->GetContentSettingsClient()->AllowReadFromClipboardSync();
}
bool ClipboardCommands::ExecuteCopy(LocalFrame& frame,

View File

@@ -40,18 +40,18 @@ accessing uninitialized lower indexes can return garbage values that cannot be n
Refer to v8::EmbedderDataSlot::store_aligned_pointer for context.
diff --git a/gin/public/gin_embedders.h b/gin/public/gin_embedders.h
index 8d7c5631fd8f1499c67384286f0e3c4037673b32..2b7bdfbac06a42e6bc51eb65e023c3673e6eb885 100644
index 8d7c5631fd8f1499c67384286f0e3c4037673b32..99b2e2f63be8a46c5546dd53bc9b05e8c54e857c 100644
--- a/gin/public/gin_embedders.h
+++ b/gin/public/gin_embedders.h
@@ -20,6 +20,8 @@ enum GinEmbedder : uint16_t {
@@ -18,6 +18,8 @@ namespace gin {
enum GinEmbedder : uint16_t {
kEmbedderNativeGin,
kEmbedderBlink,
kEmbedderPDFium,
kEmbedderFuchsia,
+ kEmbedderElectron,
+ kEmbedderBlinkTag,
kEmbedderPDFium,
kEmbedderFuchsia,
};
} // namespace gin
diff --git a/third_party/blink/renderer/platform/bindings/script_state.cc b/third_party/blink/renderer/platform/bindings/script_state.cc
index e4a27a24c83dd1a478b2ada8b6c8220076790791..c76dc818f38a62fff63852dbecbc85e304ac731d 100644
--- a/third_party/blink/renderer/platform/bindings/script_state.cc
@@ -86,7 +86,7 @@ index e4a27a24c83dd1a478b2ada8b6c8220076790791..c76dc818f38a62fff63852dbecbc85e3
// Cut the reference from ScriptState to V8 context.
diff --git a/third_party/blink/renderer/platform/bindings/script_state.h b/third_party/blink/renderer/platform/bindings/script_state.h
index e9b16a9c71b9631222d0745428fea06be2e74472..a85431a73bae23b5d47bc0338430c120de538864 100644
index e9b16a9c71b9631222d0745428fea06be2e74472..aba4d930a9a45fb43e0aaac26af7df4fa07fc447 100644
--- a/third_party/blink/renderer/platform/bindings/script_state.h
+++ b/third_party/blink/renderer/platform/bindings/script_state.h
@@ -184,7 +184,12 @@ class PLATFORM_EXPORT ScriptState : public GarbageCollected<ScriptState> {
@@ -103,7 +103,7 @@ index e9b16a9c71b9631222d0745428fea06be2e74472..a85431a73bae23b5d47bc0338430c120
return nullptr;
}
ScriptState* script_state =
@@ -251,9 +256,14 @@ class PLATFORM_EXPORT ScriptState : public GarbageCollected<ScriptState> {
@@ -251,9 +256,15 @@ class PLATFORM_EXPORT ScriptState : public GarbageCollected<ScriptState> {
static void SetCreateCallback(CreateCallback);
friend class ScriptStateImpl;
@@ -114,6 +114,7 @@ index e9b16a9c71b9631222d0745428fea06be2e74472..a85431a73bae23b5d47bc0338430c120
static_cast<int>(gin::kEmbedderBlink);
+ static constexpr int kV8ContextPerContextDataTagIndex =
+ static_cast<int>(gin::kPerContextDataStartIndex) +
+ static_cast<int>(gin::kEmbedderBlink) +
+ static_cast<int>(gin::kEmbedderBlinkTag);
};

View File

@@ -1,236 +0,0 @@
From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001
From: reito <cnschwarzer@qq.com>
Date: Wed, 12 Feb 2025 20:42:02 +0800
Subject: fix: osr stutter in both cpu and gpu capture when page has animation.
https://crrev.org/c/6232721
https://crbug.com/391118566
There's bug in VideoCaptureOracle that cause stutter in both cpu and gpu capture when page has animation.
The upstream has a fix, which will be available in Chromium M135.
Backport this fix for Electron versions before that.
diff --git a/media/capture/content/video_capture_oracle.cc b/media/capture/content/video_capture_oracle.cc
index f57bef2e36eeebc97316fd2d3bcc3ba1bbc78b43..3708a2f7e3fb6a83cfafbe54ad923db14cd39014 100644
--- a/media/capture/content/video_capture_oracle.cc
+++ b/media/capture/content/video_capture_oracle.cc
@@ -118,8 +118,9 @@ void VideoCaptureOracle::SetCaptureSizeConstraints(
void VideoCaptureOracle::SetAutoThrottlingEnabled(bool enabled) {
const bool was_enabled =
(capture_size_throttling_mode_ != kThrottlingDisabled);
- if (was_enabled == enabled)
+ if (was_enabled == enabled) {
return;
+ }
capture_size_throttling_mode_ =
enabled ? kThrottlingEnabled : kThrottlingDisabled;
VLOG(1) << "Capture size auto-throttling is now "
@@ -127,19 +128,22 @@ void VideoCaptureOracle::SetAutoThrottlingEnabled(bool enabled) {
// When not auto-throttling, have the CaptureResolutionChooser target the max
// resolution within constraints.
- if (!enabled)
+ if (!enabled) {
resolution_chooser_.SetTargetFrameArea(std::numeric_limits<int>::max());
+ }
- if (next_frame_number_ > 0)
+ if (next_frame_number_ > 0) {
CommitCaptureSizeAndReset(GetFrameTimestamp(next_frame_number_ - 1));
+ }
}
void VideoCaptureOracle::SetSourceSize(const gfx::Size& source_size) {
resolution_chooser_.SetSourceSize(source_size);
// If the |resolution_chooser_| computed a new capture size, that will become
// visible via a future call to ObserveEventAndDecideCapture().
- source_size_change_time_ = (next_frame_number_ == 0) ?
- base::TimeTicks() : GetFrameTimestamp(next_frame_number_ - 1);
+ source_size_change_time_ = (next_frame_number_ == 0)
+ ? base::TimeTicks()
+ : GetFrameTimestamp(next_frame_number_ - 1);
}
bool VideoCaptureOracle::ObserveEventAndDecideCapture(
@@ -172,6 +176,15 @@ bool VideoCaptureOracle::ObserveEventAndDecideCapture(
if (should_sample) {
event_time = content_sampler_.frame_timestamp();
duration_of_next_frame_ = content_sampler_.sampling_period();
+ } else {
+ // https://crbug.com/391118566
+ // The content sampler may not sample the frame, if the
+ // `detected_region_` does not match the `damage_rect`. In this case,
+ // the capture may halt up to kNonAnimatingThreshold (250ms) and cause
+ // the video stutter, until it recovers and do another animation
+ // detection. To avoid this, we should use the smoothing sampler as a
+ // fallback to prevent the bad output.
+ should_sample = smoothing_sampler_.ShouldSample();
}
last_time_animation_was_detected_ = event_time;
} else {
@@ -199,8 +212,9 @@ bool VideoCaptureOracle::ObserveEventAndDecideCapture(
break;
}
- if (!should_sample)
+ if (!should_sample) {
return false;
+ }
// If the exact duration of the next frame has not been determined, estimate
// it using the difference between the current and last frame.
@@ -374,16 +388,18 @@ void VideoCaptureOracle::RecordConsumerFeedback(
// resource_utilization feedback.
- if (capture_size_throttling_mode_ == kThrottlingDisabled)
+ if (capture_size_throttling_mode_ == kThrottlingDisabled) {
return;
+ }
if (!std::isfinite(feedback.resource_utilization)) {
LOG(DFATAL) << "Non-finite utilization provided by consumer for frame #"
<< frame_number << ": " << feedback.resource_utilization;
return;
}
- if (feedback.resource_utilization <= 0.0)
+ if (feedback.resource_utilization <= 0.0) {
return; // Non-positive values are normal, meaning N/A.
+ }
if (capture_size_throttling_mode_ != kThrottlingActive) {
VLOG(1) << "Received consumer feedback at frame #" << frame_number
@@ -554,12 +570,14 @@ int VideoCaptureOracle::AnalyzeForIncreasedArea(base::TimeTicks analyze_time) {
const int current_area = capture_size_.GetArea();
const int increased_area =
resolution_chooser_.FindLargerFrameSize(current_area, 1).GetArea();
- if (increased_area <= current_area)
+ if (increased_area <= current_area) {
return -1;
+ }
// Determine whether the buffer pool could handle an increase in area.
- if (!HasSufficientRecentFeedback(buffer_pool_utilization_, analyze_time))
+ if (!HasSufficientRecentFeedback(buffer_pool_utilization_, analyze_time)) {
return -1;
+ }
if (buffer_pool_utilization_.current() > 0.0) {
const int buffer_capable_area = base::saturated_cast<int>(
current_area / buffer_pool_utilization_.current());
@@ -594,8 +612,9 @@ int VideoCaptureOracle::AnalyzeForIncreasedArea(base::TimeTicks analyze_time) {
// At this point, the system is currently under-utilized. Reset the start
// time if the system was not under-utilized when the last analysis was made.
- if (start_time_of_underutilization_.is_null())
+ if (start_time_of_underutilization_.is_null()) {
start_time_of_underutilization_ = analyze_time;
+ }
// If the under-utilization started soon after the last source size change,
// permit an immediate increase in the capture area. This allows the system
diff --git a/media/capture/content/video_capture_oracle_unittest.cc b/media/capture/content/video_capture_oracle_unittest.cc
index 066676fa998db6782270ddbf42fe176d88eb30d4..6cd7567e91bc8c496846a685aa1506c7548f3a21 100644
--- a/media/capture/content/video_capture_oracle_unittest.cc
+++ b/media/capture/content/video_capture_oracle_unittest.cc
@@ -158,21 +158,26 @@ TEST(VideoCaptureOracleTest, TransitionsSmoothlyBetweenSamplers) {
const bool provide_animated_content_event =
(i % 100) >= 25 && (i % 100) < 75;
- // Only the few events that trigger the lock-out transition should be
- // dropped, because the AnimatedContentSampler doesn't yet realize the
- // animation ended. Otherwise, the oracle should always decide to sample
- // because one of its samplers says to.
- const bool require_oracle_says_sample = (i % 100) < 75 || (i % 100) >= 78;
+ // https://crbug.com/391118566
+ // Previously the AnimatedContentSampler has a bug that cause jank.
+ // The oracle should always use SmoothEventSampler as a fallback. If
+ // AnimatedContentSampler doesn't yet realize the animation ended or
+ // doesn't keep up with the prediction it make, and it will wait for
+ // kNonAnimatingThreshold before it lock-out and hand over to smooth
+ // handler. This will cause the video to stutter and it is unacceptable.
+ // So, when the AnimatedContentSampler goes into wrong state, we now
+ // use SmoothEventSampler's decision as a fallback to prevent jank output
+ // and still has a overall limit on capture frequency.
const bool oracle_says_sample = oracle.ObserveEventAndDecideCapture(
VideoCaptureOracle::kCompositorUpdate,
provide_animated_content_event ? animation_damage_rect : gfx::Rect(),
t);
- if (require_oracle_says_sample)
- ASSERT_TRUE(oracle_says_sample);
- if (!oracle_says_sample) {
- ASSERT_EQ(base::TimeDelta(), oracle.estimated_frame_duration());
- continue;
- }
+
+ // Because we now use SmoothEventSampler as a fallback, oracle should
+ // always say sample. The previous AnimatedContentSampler lock-out
+ // dropped frame are now revived by SmoothEventSampler, since this test's
+ // capture frequency always meets min capture limit requirement.
+ ASSERT_TRUE(oracle_says_sample);
ASSERT_LT(base::TimeDelta(), oracle.estimated_frame_duration());
const int frame_number = oracle.next_frame_number();
@@ -184,12 +189,9 @@ TEST(VideoCaptureOracleTest, TransitionsSmoothlyBetweenSamplers) {
if (!last_frame_timestamp.is_null()) {
const base::TimeDelta delta = frame_timestamp - last_frame_timestamp;
EXPECT_LE(event_increment.InMicroseconds(), delta.InMicroseconds());
- // Right after the AnimatedContentSampler lock-out transition, there were
- // a few frames dropped, so allow a gap in the timestamps. Otherwise, the
- // delta between frame timestamps should never be more than 2X the
+ // The delta between frame timestamps should never be more than 2X the
// |event_increment|.
- const base::TimeDelta max_acceptable_delta =
- (i % 100) == 78 ? event_increment * 5 : event_increment * 2;
+ const base::TimeDelta max_acceptable_delta = event_increment * 2;
EXPECT_GE(max_acceptable_delta.InMicroseconds(), delta.InMicroseconds());
}
last_frame_timestamp = frame_timestamp;
@@ -444,9 +446,9 @@ void RunAutoThrottleTest(bool is_content_animating,
// expect the resolution to remain constant. Repeat.
for (int i = 0; i < 2; ++i) {
const gfx::Size starting_size = oracle.capture_size();
- SCOPED_TRACE(::testing::Message() << "Stepping down from "
- << starting_size.ToString()
- << ", i=" << i);
+ SCOPED_TRACE(::testing::Message()
+ << "Stepping down from " << starting_size.ToString()
+ << ", i=" << i);
gfx::Size stepped_down_size;
end_t = t + base::Seconds(10);
@@ -471,9 +473,10 @@ void RunAutoThrottleTest(bool is_content_animating,
oracle.RecordCapture(with_consumer_feedback ? 0.25 : utilization);
base::TimeTicks ignored;
ASSERT_TRUE(oracle.CompleteCapture(frame_number, true, &ignored));
- if (with_consumer_feedback)
+ if (with_consumer_feedback) {
oracle.RecordConsumerFeedback(frame_number,
media::VideoCaptureFeedback(utilization));
+ }
}
}
@@ -482,9 +485,9 @@ void RunAutoThrottleTest(bool is_content_animating,
// utilization and expect the resolution to remain constant. Repeat.
for (int i = 0; i < 2; ++i) {
const gfx::Size starting_size = oracle.capture_size();
- SCOPED_TRACE(::testing::Message() << "Stepping up from "
- << starting_size.ToString()
- << ", i=" << i);
+ SCOPED_TRACE(::testing::Message()
+ << "Stepping up from " << starting_size.ToString()
+ << ", i=" << i);
gfx::Size stepped_up_size;
end_t = t + base::Seconds(is_content_animating ? 90 : 10);
@@ -513,9 +516,10 @@ void RunAutoThrottleTest(bool is_content_animating,
oracle.RecordCapture(with_consumer_feedback ? 0.25 : utilization);
base::TimeTicks ignored;
ASSERT_TRUE(oracle.CompleteCapture(frame_number, true, &ignored));
- if (with_consumer_feedback)
+ if (with_consumer_feedback) {
oracle.RecordConsumerFeedback(frame_number,
media::VideoCaptureFeedback(utilization));
+ }
}
}
}

View File

@@ -1,57 +0,0 @@
From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001
From: Shelley Vohr <shelley.vohr@gmail.com>
Date: Thu, 13 Mar 2025 10:47:00 +0100
Subject: fix: take Snapped status into account when showing a window
Adjusts HWNDMessageHandler::Show to correctly restore windows that were
in a snapped state prior to being hidden or maximized. From Windows
documentation at
https://learn.microsoft.com/en-us/windows/win32/api/winuser/nf-winuser-iswindowarranged:
> A snapped window (see Snap your windows) is considered to be arranged.
> You should treat arranged as a window state similar to maximized. Arranged,
> maximized, and minimized are mutually exclusive states.
The logic already took into account a window being maximized and
correctly restored it, but if the window was snapped prior to this CL it
would be removed from its snapped state when re-shown. This fixes that.
Upstreamed at https://chromium-review.googlesource.com/c/chromium/src/+/6330848.
diff --git a/ui/views/win/hwnd_message_handler.cc b/ui/views/win/hwnd_message_handler.cc
index c77c90d949d6e50d99412cda0510dce1239010de..b4e64272f0c089ee6d48456f6973e715832e3001 100644
--- a/ui/views/win/hwnd_message_handler.cc
+++ b/ui/views/win/hwnd_message_handler.cc
@@ -656,7 +656,8 @@ void HWNDMessageHandler::Show(ui::WindowShowState show_state,
SetWindowPlacement(hwnd(), &placement);
native_show_state = SW_SHOWMAXIMIZED;
} else {
- const bool is_maximized = IsMaximized();
+ const bool is_maximized_or_arranged =
+ IsMaximized() || IsWindowArranged(hwnd());
// Use SW_SHOW/SW_SHOWNA instead of SW_SHOWNORMAL/SW_SHOWNOACTIVATE so that
// the window is not restored to its original position if it is maximized.
@@ -665,7 +666,7 @@ void HWNDMessageHandler::Show(ui::WindowShowState show_state,
// some platforms restore the position, some do not. See crbug.com/1296710
switch (show_state) {
case ui::SHOW_STATE_INACTIVE:
- native_show_state = is_maximized ? SW_SHOWNA : SW_SHOWNOACTIVATE;
+ native_show_state = is_maximized_or_arranged ? SW_SHOWNA : SW_SHOWNOACTIVATE;
break;
case ui::SHOW_STATE_MAXIMIZED:
native_show_state = SW_SHOWMAXIMIZED;
@@ -676,9 +677,11 @@ void HWNDMessageHandler::Show(ui::WindowShowState show_state,
case ui::SHOW_STATE_NORMAL:
if ((GetWindowLong(hwnd(), GWL_EXSTYLE) & WS_EX_TRANSPARENT) ||
(GetWindowLong(hwnd(), GWL_EXSTYLE) & WS_EX_NOACTIVATE)) {
- native_show_state = is_maximized ? SW_SHOWNA : SW_SHOWNOACTIVATE;
+ native_show_state =
+ is_maximized_or_arranged ? SW_SHOWNA : SW_SHOWNOACTIVATE;
} else {
- native_show_state = is_maximized ? SW_SHOW : SW_SHOWNORMAL;
+ native_show_state =
+ is_maximized_or_arranged ? SW_SHOW : SW_SHOWNORMAL;
}
break;
case ui::SHOW_STATE_FULLSCREEN:

View File

@@ -1,40 +0,0 @@
From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?David=20L=C3=B6nnhager?= <dv.lnh.d@gmail.com>
Date: Fri, 17 Jan 2025 14:30:48 +0100
Subject: Ignore parse errors for PKEY_AppUserModel_ToastActivatorCLSID
Some shortcuts store this as a string UUID as opposed to VT_CLSID,
hitting NOTREACHED() and sometimes breaking parsing in Electron.
Ignore this error instead.
Bug: N/A
Change-Id: I9fc472212b2d3afac2c8e18a2159bc2d50bbdf98
diff --git a/AUTHORS b/AUTHORS
index bd9327b5b477f256570429132131cbeeb78bbbfe..55fb5f73f99e802489fb2b995277b491513295cd 100644
--- a/AUTHORS
+++ b/AUTHORS
@@ -334,6 +334,7 @@ David Futcher <david.mike.futcher@gmail.com>
David Jin <davidjin@amazon.com>
David Lechner <david@pybricks.com>
David Leen <davileen@amazon.com>
+David Lönnhager <dv.lnh.d@gmail.com>
David Manouchehri <david@davidmanouchehri.com>
David McAllister <mcdavid@amazon.com>
David Michael Barr <david.barr@samsung.com>
diff --git a/base/win/shortcut.cc b/base/win/shortcut.cc
index 02f3e63d16c3324f546f6155d722900f0a81131a..1dfdb0c8dc5a7368382e73a0db1b4d135b4d2176 100644
--- a/base/win/shortcut.cc
+++ b/base/win/shortcut.cc
@@ -342,8 +342,9 @@ bool ResolveShortcutProperties(const FilePath& shortcut_path,
*(pv_toast_activator_clsid.get().puuid));
break;
default:
- NOTREACHED() << "Unexpected variant type: "
- << pv_toast_activator_clsid.get().vt;
+ // Shortcuts may use strings to represent the CLSID. This case is
+ // ignored.
+ break;
}
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,90 +0,0 @@
From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001
From: Michael Wilson <mjwilson@chromium.org>
Date: Thu, 30 Jan 2025 14:09:57 -0800
Subject: Remove DenormalEnabler from ScriptProcessorNode
This is a follow-up to https://crrev.com/c/6077677
After experimenting, ScriptProcessorNode JavaScript is already running
in a complaint mode so the DenormalEnabler is not necessary.
Bug: 382005099
Change-Id: If9774e60640446c567270a8f065500beecc8a40b
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/6219685
Commit-Queue: Michael Wilson <mjwilson@chromium.org>
Reviewed-by: Alvin Ji <alvinji@chromium.org>
Cr-Commit-Position: refs/heads/main@{#1413754}
diff --git a/third_party/blink/renderer/modules/webaudio/script_processor_handler.cc b/third_party/blink/renderer/modules/webaudio/script_processor_handler.cc
index 8f9641bebb1a97d2963f8858e58a61cf2434770b..fd6ead021f9f656331d838fb6733cb0fb5220b12 100644
--- a/third_party/blink/renderer/modules/webaudio/script_processor_handler.cc
+++ b/third_party/blink/renderer/modules/webaudio/script_processor_handler.cc
@@ -26,7 +26,6 @@
#include "third_party/blink/renderer/modules/webaudio/base_audio_context.h"
#include "third_party/blink/renderer/modules/webaudio/realtime_audio_destination_node.h"
#include "third_party/blink/renderer/modules/webaudio/script_processor_node.h"
-#include "third_party/blink/renderer/platform/audio/denormal_disabler.h"
#include "third_party/blink/renderer/platform/bindings/exception_state.h"
#include "third_party/blink/renderer/platform/scheduler/public/post_cross_thread_task.h"
#include "third_party/blink/renderer/platform/wtf/cross_thread_copier_base.h"
@@ -49,9 +48,7 @@ ScriptProcessorHandler::ScriptProcessorHandler(
internal_input_bus_(AudioBus::Create(
number_of_input_channels,
node.context()->GetDeferredTaskHandler().RenderQuantumFrames(),
- false)),
- allow_denormal_in_processing_(base::FeatureList::IsEnabled(
- features::kWebAudioAllowDenormalInProcessing)) {
+ false)) {
DCHECK_GE(buffer_size_,
node.context()->GetDeferredTaskHandler().RenderQuantumFrames());
DCHECK_LE(number_of_input_channels, BaseAudioContext::MaxNumberOfChannels());
@@ -112,7 +109,7 @@ void ScriptProcessorHandler::Initialize() {
AudioHandler::Initialize();
}
-void ScriptProcessorHandler::ProcessInternal(uint32_t frames_to_process) {
+void ScriptProcessorHandler::Process(uint32_t frames_to_process) {
TRACE_EVENT_BEGIN0(TRACE_DISABLED_BY_DEFAULT("webaudio.audionode"),
"ScriptProcessorHandler::Process");
@@ -241,15 +238,6 @@ void ScriptProcessorHandler::ProcessInternal(uint32_t frames_to_process) {
"ScriptProcessorHandler::Process");
}
-void ScriptProcessorHandler::Process(uint32_t frames_to_process) {
- if (allow_denormal_in_processing_) {
- DenormalEnabler denormal_enabler;
- ProcessInternal(frames_to_process);
- } else {
- ProcessInternal(frames_to_process);
- }
-}
-
void ScriptProcessorHandler::FireProcessEvent(uint32_t double_buffer_index) {
DCHECK(IsMainThread());
diff --git a/third_party/blink/renderer/modules/webaudio/script_processor_handler.h b/third_party/blink/renderer/modules/webaudio/script_processor_handler.h
index a960426678a5da72071f6defa046a4517fcb1cf7..308a9ba845ab3413462540a70564ad85091cf180 100644
--- a/third_party/blink/renderer/modules/webaudio/script_processor_handler.h
+++ b/third_party/blink/renderer/modules/webaudio/script_processor_handler.h
@@ -66,10 +66,6 @@ class ScriptProcessorHandler final : public AudioHandler {
const HeapVector<Member<AudioBuffer>>& input_buffers,
const HeapVector<Member<AudioBuffer>>& output_buffers);
- // Used to avoid code duplication when using scoped objects that affect
- // `Process`.
- void ProcessInternal(uint32_t frames_to_process);
-
double TailTime() const override;
double LatencyTime() const override;
bool RequiresTailProcessing() const final;
@@ -97,9 +93,6 @@ class ScriptProcessorHandler final : public AudioHandler {
scoped_refptr<base::SingleThreadTaskRunner> task_runner_;
- // Cached feature flag value
- const bool allow_denormal_in_processing_;
-
base::WeakPtrFactory<ScriptProcessorHandler> weak_ptr_factory_{this};
FRIEND_TEST_ALL_PREFIXES(ScriptProcessorNodeTest, BufferLifetime);

View File

@@ -11,6 +11,5 @@
{ "patch_dir": "src/electron/patches/Mantle", "repo": "src/third_party/squirrel.mac/vendor/Mantle" },
{ "patch_dir": "src/electron/patches/ReactiveObjC", "repo": "src/third_party/squirrel.mac/vendor/ReactiveObjC" },
{ "patch_dir": "src/electron/patches/webrtc", "repo": "src/third_party/webrtc" },
{ "patch_dir": "src/electron/patches/reclient-configs", "repo": "src/third_party/engflow-reclient-configs" },
{ "patch_dir": "src/electron/patches/skia", "repo": "src/third_party/skia" }
{ "patch_dir": "src/electron/patches/reclient-configs", "repo": "src/third_party/engflow-reclient-configs" }
]

View File

@@ -48,9 +48,8 @@ test_update_v8-stats_test_for_v8_12_6.patch
src_do_not_use_soon-to-be-deprecated_v8_api.patch
fix_add_property_query_interceptors.patch
src_stop_using_deprecated_fields_of_fastapicallbackoptions.patch
build_don_t_redefine_win32_lean_and_mean.patch
src_use_supported_api_to_get_stalled_tla_messages.patch
build_compile_with_c_20_support.patch
add_v8_taskpirority_to_foreground_task_runner_signature.patch
build_restore_clang_as_default_compiler_on_macos.patch
build_remove_explicit_linker_call_to_libm_on_macos.patch
build_define_nominmax_in_common_gypi.patch

View File

@@ -537,21 +537,17 @@ index 0e69d7383762f6b81c5b57698aa9d121d5a9c401..35bbeb37acc7ccb14b4b8a644ec3d4c7
cflags_c = [
"-mavx512vl",
diff --git a/deps/cares/BUILD.gn b/deps/cares/BUILD.gn
index ac19ac73ed1e24c61cb679f3851685b79cfc8b39..ef745c19f2b1cd433cc43c834a18db7eaa8e5162 100644
index ac19ac73ed1e24c61cb679f3851685b79cfc8b39..7f4885631a85a25692e8969991951be02e5d73f1 100644
--- a/deps/cares/BUILD.gn
+++ b/deps/cares/BUILD.gn
@@ -1,14 +1,188 @@
@@ -1,14 +1,175 @@
-##############################################################################
-# #
-# DO NOT EDIT THIS FILE! #
-# #
-##############################################################################
+config("cares_config") {
+ include_dirs = [
+ "include",
+ "src/lib",
+ "src/lib/include",
+ ]
+ include_dirs = [ "include", "src/lib" ]
+}
+static_library("cares") {
+ defines = [ "CARES_STATICLIB" ]
@@ -567,19 +563,20 @@ index ac19ac73ed1e24c61cb679f3851685b79cfc8b39..ef745c19f2b1cd433cc43c834a18db7e
+
+ sources = [
+ "include/ares.h",
+ "include/ares_build.h",
+ "include/ares_dns.h",
+ "include/ares_dns_record.h",
+ "include/ares_nameser.h",
+ "include/ares_version.h",
+ "src/lib/ares_addrinfo2hostent.c",
+ "src/lib/ares_addrinfo_localhost.c",
+ "src/lib/ares__addrinfo2hostent.c",
+ "src/lib/ares__addrinfo_localhost.c",
+ "src/lib/ares__close_sockets.c",
+ "src/lib/ares__hosts_file.c",
+ "src/lib/ares__parse_into_addrinfo.c",
+ "src/lib/ares__socket.c",
+ "src/lib/ares__sortaddrinfo.c",
+ "src/lib/ares_android.c",
+ "src/lib/ares_android.h",
+ "src/lib/ares_cancel.c",
+ "src/lib/ares_close_sockets.c",
+ "src/lib/ares_conn.c",
+ "src/lib/ares_conn.h",
+ "src/lib/ares_cookie.c",
+ "src/lib/ares_data.c",
+ "src/lib/ares_data.h",
@@ -593,43 +590,43 @@ index ac19ac73ed1e24c61cb679f3851685b79cfc8b39..ef745c19f2b1cd433cc43c834a18db7e
+ "src/lib/ares_gethostbyaddr.c",
+ "src/lib/ares_gethostbyname.c",
+ "src/lib/ares_getnameinfo.c",
+ "src/lib/ares_hosts_file.c",
+ "src/lib/ares_inet_net_pton.h",
+ "src/lib/ares_init.c",
+ "src/lib/ares_ipv6.h",
+ "src/lib/ares_library_init.c",
+ "src/lib/ares_metrics.c",
+ "src/lib/ares_options.c",
+ "src/lib/ares_parse_into_addrinfo.c",
+ "src/lib/ares_platform.c",
+ "src/lib/ares_platform.h",
+ "src/lib/ares_private.h",
+ "src/lib/ares_process.c",
+ "src/lib/ares_qcache.c",
+ "src/lib/ares_query.c",
+ "src/lib/ares_search.c",
+ "src/lib/ares_send.c",
+ "src/lib/ares_set_socket_functions.c",
+ "src/lib/ares_setup.h",
+ "src/lib/ares_socket.c",
+ "src/lib/ares_socket.h",
+ "src/lib/ares_sortaddrinfo.c",
+ "src/lib/ares_strerror.c",
+ "src/lib/ares_sysconfig.c",
+ "src/lib/ares_sysconfig_files.c",
+ "src/lib/ares_timeout.c",
+ "src/lib/ares_update_servers.c",
+ "src/lib/ares_version.c",
+ "src/lib/dsa/ares_array.c",
+ "src/lib/dsa/ares_htable.c",
+ "src/lib/dsa/ares_htable.h",
+ "src/lib/dsa/ares_htable_asvp.c",
+ "src/lib/dsa/ares_htable_dict.c",
+ "src/lib/dsa/ares_htable_strvp.c",
+ "src/lib/dsa/ares_htable_szvp.c",
+ "src/lib/dsa/ares_htable_vpstr.c",
+ "src/lib/dsa/ares_htable_vpvp.c",
+ "src/lib/dsa/ares_llist.c",
+ "src/lib/dsa/ares_slist.c",
+ "src/lib/dsa/ares_slist.h",
+ "src/lib/dsa/ares__array.c",
+ "src/lib/dsa/ares__array.h",
+ "src/lib/dsa/ares__htable.c",
+ "src/lib/dsa/ares__htable.h",
+ "src/lib/dsa/ares__htable_asvp.c",
+ "src/lib/dsa/ares__htable_asvp.h",
+ "src/lib/dsa/ares__htable_strvp.c",
+ "src/lib/dsa/ares__htable_strvp.h",
+ "src/lib/dsa/ares__htable_szvp.c",
+ "src/lib/dsa/ares__htable_szvp.h",
+ "src/lib/dsa/ares__htable_vpvp.c",
+ "src/lib/dsa/ares__htable_vpvp.h",
+ "src/lib/dsa/ares__llist.c",
+ "src/lib/dsa/ares__llist.h",
+ "src/lib/dsa/ares__slist.c",
+ "src/lib/dsa/ares__slist.h",
+ "src/lib/event/ares_event.h",
+ "src/lib/event/ares_event_configchg.c",
+ "src/lib/event/ares_event_epoll.c",
@@ -640,17 +637,6 @@ index ac19ac73ed1e24c61cb679f3851685b79cfc8b39..ef745c19f2b1cd433cc43c834a18db7e
+ "src/lib/event/ares_event_wake_pipe.c",
+ "src/lib/event/ares_event_win32.c",
+ "src/lib/event/ares_event_win32.h",
+ "src/lib/include/ares_array.h",
+ "src/lib/include/ares_buf.h",
+ "src/lib/include/ares_htable_asvp.h",
+ "src/lib/include/ares_htable_dict.h",
+ "src/lib/include/ares_htable_strvp.h",
+ "src/lib/include/ares_htable_szvp.h",
+ "src/lib/include/ares_htable_vpstr.h",
+ "src/lib/include/ares_htable_vpvp.h",
+ "src/lib/include/ares_llist.h",
+ "src/lib/include/ares_mem.h",
+ "src/lib/include/ares_str.h",
+ "src/lib/inet_net_pton.c",
+ "src/lib/inet_ntop.c",
+ "src/lib/legacy/ares_create_query.c",
@@ -677,22 +663,23 @@ index ac19ac73ed1e24c61cb679f3851685b79cfc8b39..ef745c19f2b1cd433cc43c834a18db7e
+ "src/lib/record/ares_dns_private.h",
+ "src/lib/record/ares_dns_record.c",
+ "src/lib/record/ares_dns_write.c",
+ "src/lib/str/ares_buf.c",
+ "src/lib/str/ares__buf.c",
+ "src/lib/str/ares__buf.h",
+ "src/lib/str/ares_str.c",
+ "src/lib/str/ares_str.h",
+ "src/lib/str/ares_strcasecmp.c",
+ "src/lib/str/ares_strcasecmp.h",
+ "src/lib/str/ares_strsplit.c",
+ "src/lib/str/ares_strsplit.h",
+ "src/lib/util/ares_iface_ips.c",
+ "src/lib/util/ares_iface_ips.h",
+ "src/lib/util/ares__iface_ips.c",
+ "src/lib/util/ares__iface_ips.h",
+ "src/lib/util/ares__threads.c",
+ "src/lib/util/ares__threads.h",
+ "src/lib/util/ares__timeval.c",
+ "src/lib/util/ares_math.c",
+ "src/lib/util/ares_math.h",
+ "src/lib/util/ares_rand.c",
+ "src/lib/util/ares_rand.h",
+ "src/lib/util/ares_threads.c",
+ "src/lib/util/ares_threads.h",
+ "src/lib/util/ares_time.h",
+ "src/lib/util/ares_timeval.c",
+ "src/lib/util/ares_uri.c",
+ "src/lib/util/ares_uri.h",
+ "src/tools/ares_getopt.c",
+ "src/tools/ares_getopt.h",
+ ]
+
+ if (!is_win) {
@@ -2260,6 +2247,19 @@ index 706ea4f5cb90525c8ea56f794320a733c45a193f..c7ae7759595bfc7fdc31dab174a7514d
}
} // namespace builtins
diff --git a/src/node_builtins.h b/src/node_builtins.h
index 1cb85b9058d06555382e565dc32192a9fa48ed9f..cec9be01abd107e8612f70daf19b4834e118ffcf 100644
--- a/src/node_builtins.h
+++ b/src/node_builtins.h
@@ -74,6 +74,8 @@ using BuiltinCodeCacheMap =
// Generated by tools/js2c.py as node_javascript.cc
void RegisterExternalReferencesForInternalizedBuiltinCode(
ExternalReferenceRegistry* registry);
+void EmbedderRegisterExternalReferencesForInternalizedBuiltinCode(
+ ExternalReferenceRegistry* registry);
// Handles compilation and caching of built-in JavaScript modules and
// bootstrap scripts, whose source are bundled into the binary as static data.
diff --git a/tools/generate_gn_filenames_json.py b/tools/generate_gn_filenames_json.py
new file mode 100755
index 0000000000000000000000000000000000000000..37c16859003e61636fe2f1a4040b1e904c472d0b
@@ -2409,10 +2409,10 @@ index 0000000000000000000000000000000000000000..9be3ac447f9a4dde23fefc26e0b922b4
+ transformed_f.write(transformed_contents)
+
diff --git a/tools/install.py b/tools/install.py
index 2dceb5c39ea4a11034ce93899fa26dc406e0b5d0..a425de8ad7833f4d39c842fd896539c1f77468bc 100755
index b132c7bf26c02886a7ab341a1973bf449744ba0f..757e3e60a7be01fac55c5fbb010dbbae00b1bfca 100755
--- a/tools/install.py
+++ b/tools/install.py
@@ -270,6 +270,7 @@ def headers(options, action):
@@ -264,6 +264,7 @@ def headers(options, action):
'include/v8-forward.h',
'include/v8-function-callback.h',
'include/v8-function.h',
@@ -2420,7 +2420,7 @@ index 2dceb5c39ea4a11034ce93899fa26dc406e0b5d0..a425de8ad7833f4d39c842fd896539c1
'include/v8-initialization.h',
'include/v8-internal.h',
'include/v8-isolate.h',
@@ -290,6 +291,8 @@ def headers(options, action):
@@ -284,6 +285,8 @@ def headers(options, action):
'include/v8-promise.h',
'include/v8-proxy.h',
'include/v8-regexp.h',
@@ -2429,7 +2429,7 @@ index 2dceb5c39ea4a11034ce93899fa26dc406e0b5d0..a425de8ad7833f4d39c842fd896539c1
'include/v8-script.h',
'include/v8-snapshot.h',
'include/v8-statistics.h',
@@ -396,7 +399,7 @@ def parse_options(args):
@@ -390,7 +393,7 @@ def parse_options(args):
parser.add_argument('--build-dir', help='the location of built binaries',
default='out/Release')
parser.add_argument('--v8-dir', help='the location of V8',

View File

@@ -10,7 +10,7 @@ V8 requires C++20 support as of https://chromium-review.googlesource.com/c/v8/v8
This can be removed when Electron upgrades to a version of Node.js containing the required V8 version.
diff --git a/common.gypi b/common.gypi
index 690068f093f12b6831f8ccce41289d02d7047a7a..5a3df388773ad288553bf036be42dc1a0ba75c09 100644
index bdf1a1f33f3ea09d933757c7fee87c563cc833ab..2eb62610db2f0ebf68fa9a55ffba98291ecfe451 100644
--- a/common.gypi
+++ b/common.gypi
@@ -305,7 +305,7 @@

View File

@@ -1,49 +0,0 @@
From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001
From: Shelley Vohr <shelley.vohr@gmail.com>
Date: Mon, 3 Mar 2025 12:05:55 +0100
Subject: build: define NOMINMAX in common.gypi
V8 and Node.js had defined NOMINMAX on Windows for a long time. In
recent changes, V8 added std::numeric_limits::min usages in its
header files which caused addons without NOMINMAX defines failed
to compile.
Define NOMINMAX in common.gypi so that addons can be compiled with
the latest V8 header files.
NAN includes uv.h before node.h, which makes
these defines effectiveless. Nevertheless, the include order should not be
significant.
PR-URL: https://github.com/nodejs/node/pull/52794
diff --git a/common.gypi b/common.gypi
index 9bbf1b277eb17d78ca385643c3177638fd75866a..959ee74af88b44d31f2e6fa65c6f260820bd8c46 100644
--- a/common.gypi
+++ b/common.gypi
@@ -480,6 +480,10 @@
'_HAS_EXCEPTIONS=0',
'BUILDING_V8_SHARED=1',
'BUILDING_UV_SHARED=1',
+ # Stop <windows.h> from defining macros that conflict with
+ # std::min() and std::max(). We don't use <windows.h> (much)
+ # but we still inherit it from uv.h.
+ 'NOMINMAX',
],
}],
[ 'OS in "linux freebsd openbsd solaris aix os400"', {
diff --git a/node.gypi b/node.gypi
index 46c7c7e353f29b8b3e5937ec80a5924020548980..212750978fb5c24fffe410cbdfe8dbdfe2ea8b42 100644
--- a/node.gypi
+++ b/node.gypi
@@ -63,10 +63,6 @@
'FD_SETSIZE=1024',
# we need to use node's preferred "win32" rather than gyp's preferred "win"
'NODE_PLATFORM="win32"',
- # Stop <windows.h> from defining macros that conflict with
- # std::min() and std::max(). We don't use <windows.h> (much)
- # but we still inherit it from uv.h.
- 'NOMINMAX',
'_UNICODE=1',
],
'msvs_precompiled_header': 'tools/msvs/pch/node_pch.h',

View File

@@ -0,0 +1,36 @@
From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001
From: Shelley Vohr <shelley.vohr@gmail.com>
Date: Fri, 23 Aug 2024 16:50:19 +0200
Subject: build: don't redefine WIN32_LEAN_AND_MEAN
https://github.com/nodejs/node/pull/53722 added a new define for WIN32_LEAN_AND_MEAN
without first checking to see if it was defined - other areas in c-ares do this so
we should here as well. Compilation errors occur otherwise:
../../third_party/electron_node/deps/cares/include\ares_build.h(168,11): error: 'WIN32_LEAN_AND_MEAN' macro redefined [-Werror,-Wmacro-redefined]
168 | # define WIN32_LEAN_AND_MEAN
| ^
<command line>(25,9): note: previous definition is here
25 | #define WIN32_LEAN_AND_MEAN 1
| ^
1 error generated.
[287 processes, 49437/51449 @ 48.5/s : 1018.562s] CC obj/third_party/electron_node/deps/cares/cares/ares__socket.obj
FAILED: obj/third_party/electron_node/deps/cares/cares/ares__socket.obj
This should be upstreamed.
diff --git a/deps/cares/include/ares_build.h b/deps/cares/include/ares_build.h
index 18a92606a817145302c73b5081b4c989799bc620..bafd26d9210d2347fec41f028e9e65088b83c48c 100644
--- a/deps/cares/include/ares_build.h
+++ b/deps/cares/include/ares_build.h
@@ -165,7 +165,9 @@
# define CARES_TYPEOF_ARES_SOCKLEN_T int
#elif defined(_WIN32)
-# define WIN32_LEAN_AND_MEAN
+# ifndef WIN32_LEAN_AND_MEAN
+# define WIN32_LEAN_AND_MEAN
+# endif
# define CARES_TYPEOF_ARES_SOCKLEN_T int
# define CARES_HAVE_WINDOWS_H 1
# define CARES_HAVE_SYS_TYPES_H 1

View File

@@ -7,7 +7,7 @@ Subject: build: ensure native module compilation fails if not using a new
This should not be upstreamed, it is a quality-of-life patch for downstream module builders.
diff --git a/common.gypi b/common.gypi
index 776a6dcf3a729a65b367fb5b4c5685f841089eea..690068f093f12b6831f8ccce41289d02d7047a7a 100644
index 697b8bba6a55358924d6986f2eb347a99ff73889..bdf1a1f33f3ea09d933757c7fee87c563cc833ab 100644
--- a/common.gypi
+++ b/common.gypi
@@ -86,6 +86,8 @@
@@ -40,10 +40,10 @@ index 776a6dcf3a729a65b367fb5b4c5685f841089eea..690068f093f12b6831f8ccce41289d02
# list in v8/BUILD.gn.
['v8_enable_v8_checks == 1', {
diff --git a/configure.py b/configure.py
index 0df90b176e9b5403efdb1393c0f2f37bb53dc6b2..ece665915ad4d6e02762ec3165cf7b987a87949d 100755
index 0d089c35d1720e05c4c61d0226a2ebc276b65d6e..cf19b9d092698e1697508e8891926947bc2f7b12 100755
--- a/configure.py
+++ b/configure.py
@@ -1595,6 +1595,7 @@ def configure_library(lib, output, pkgname=None):
@@ -1585,6 +1585,7 @@ def configure_library(lib, output, pkgname=None):
def configure_v8(o):
@@ -52,7 +52,7 @@ index 0df90b176e9b5403efdb1393c0f2f37bb53dc6b2..ece665915ad4d6e02762ec3165cf7b98
o['variables']['v8_enable_javascript_promise_hooks'] = 1
o['variables']['v8_enable_lite_mode'] = 1 if options.v8_lite_mode else 0
diff --git a/src/node.h b/src/node.h
index 7d9d9afd049237646f82c3e22f5e7a8af7314a84..a941d9987abcbb0f9db3072b8c896a5f328dd027 100644
index 4f2eb9d0aab88b70c86339e750799080e980d7da..df3fb3372d6357b5d77b4f683e309b8483998128 100644
--- a/src/node.h
+++ b/src/node.h
@@ -22,6 +22,12 @@

View File

@@ -46,21 +46,12 @@ index c7ae7759595bfc7fdc31dab174a7514ddd8345e7..4bf80aa6cc6385dc376fd0a3538efc27
AddExternalizedBuiltin(
"internal/deps/cjs-module-lexer/lexer",
diff --git a/src/node_builtins.h b/src/node_builtins.h
index a73de23a1debfdac66873e0baccf882e383bfc36..c3c987d535285be84026ad0c633650bd2067d22d 100644
index cec9be01abd107e8612f70daf19b4834e118ffcf..3d9c6b962423555257bad4ebaad9ebd821d00042 100644
--- a/src/node_builtins.h
+++ b/src/node_builtins.h
@@ -74,6 +74,8 @@ using BuiltinCodeCacheMap =
// Generated by tools/js2c.cc as node_javascript.cc
void RegisterExternalReferencesForInternalizedBuiltinCode(
ExternalReferenceRegistry* registry);
+void EmbedderRegisterExternalReferencesForInternalizedBuiltinCode(
+ ExternalReferenceRegistry* registry);
@@ -138,6 +138,7 @@ class NODE_EXTERN_PRIVATE BuiltinLoader {
// Handles compilation and caching of built-in JavaScript modules and
// bootstrap scripts, whose source are bundled into the binary as static data.
@@ -136,6 +138,7 @@ class NODE_EXTERN_PRIVATE BuiltinLoader {
// Generated by tools/js2c.cc as node_javascript.cc
// Generated by tools/js2c.py as node_javascript.cc
void LoadJavaScriptSource(); // Loads data into source_
+ void LoadEmbedderJavaScriptSource(); // Loads embedder data into source_
UnionBytes GetConfig(); // Return data for config.gypi

View File

@@ -1,59 +0,0 @@
From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001
From: deepak1556 <hop2deep@gmail.com>
Date: Mon, 3 Feb 2025 21:44:36 +0900
Subject: build: remove explicit linker call to libm on macOS
/usr/lib/libm.tbd is available via libSystem.*.dylib and
reexports sanitizer symbols. When building for asan
this becomes an issue as the linker will resolve the symbols
from the system library rather from libclang_rt.*
For V8 that rely on specific version of these symbols
that get bundled as part of clang, for ex:
https://source.chromium.org/chromium/chromium/src/+/main:v8/src/heap/cppgc/platform.cc;l=93-97
accepting nullptr for shadow_offset in `asan_get_shadow_mapping`,
linking to system version that doesn't support this will lead to
a crash.
Clang driver eventually links with `-lSystem`
https://github.com/llvm/llvm-project/blob/e82f93890daefeb38fe2a22ee3db87a89948ec57/clang/lib/Driver/ToolChains/Darwin.cpp#L1628-L1631,
this is done after linking the sanitizer libraries which
ensures right order of resolution for the symbols.
PR-URL: https://github.com/nodejs/node/pull/56901
Reviewed-By: Joyee Cheung <joyeec9h3@gmail.com>
Reviewed-By: Chengzhong Wu <legendecas@gmail.com>
Reviewed-By: Luigi Pinca <luigipinca@gmail.com>
Reviewed-By: Shelley Vohr <shelley.vohr@gmail.com>
diff --git a/deps/brotli/unofficial.gni b/deps/brotli/unofficial.gni
index 5e07e106672a04508a77584c109c97a67926c858..91001fa43ea4807d061f296eaeccb7512e34863e 100644
--- a/deps/brotli/unofficial.gni
+++ b/deps/brotli/unofficial.gni
@@ -25,7 +25,7 @@ template("brotli_gn_build") {
} else if (target_os == "freebsd") {
defines = [ "OS_FREEBSD" ]
}
- if (!is_win) {
+ if (is_linux) {
libs = [ "m" ]
}
if (is_clang || !is_win) {
diff --git a/deps/uv/unofficial.gni b/deps/uv/unofficial.gni
index 7a73f891e3fc3261b77af97af63fca2eade49849..bda1b5dc899558c2b4a22377dde9fb3bcce5488c 100644
--- a/deps/uv/unofficial.gni
+++ b/deps/uv/unofficial.gni
@@ -82,11 +82,11 @@ template("uv_gn_build") {
]
}
if (is_posix) {
- libs = [ "m" ]
ldflags = [ "-pthread" ]
}
if (is_linux) {
- libs += [
+ libs = [
+ "m",
"dl",
"rt",
]

View File

@@ -11,7 +11,7 @@ node-gyp will use the result of `process.config` that reflects the environment
in which the binary got built.
diff --git a/common.gypi b/common.gypi
index 5a3df388773ad288553bf036be42dc1a0ba75c09..9bbf1b277eb17d78ca385643c3177638fd75866a 100644
index 2eb62610db2f0ebf68fa9a55ffba98291ecfe451..3ec08ee144b586d05c4e49c2251416734cbc02c5 100644
--- a/common.gypi
+++ b/common.gypi
@@ -125,6 +125,7 @@

View File

@@ -26,12 +26,12 @@ index 364469160af5e348f8890417de16a63c0d1dca67..75d5f58fe02fa8cfa7716ffaf761d567
try {
resolvedArgv = Module._resolveFilename(process.argv[1], null, false);
diff --git a/lib/internal/process/pre_execution.js b/lib/internal/process/pre_execution.js
index 0bbabb80c26a1208860f6d32447c0ae53316f501..ba18bfa3cd17c2b8e977f53651ce2744b17a829f 100644
index a05d2846050c2f505eac16320f645e79182a27f6..348bb80ef7fae1e0e5f529b1313093eeadad9276 100644
--- a/lib/internal/process/pre_execution.js
+++ b/lib/internal/process/pre_execution.js
@@ -245,12 +245,14 @@ function patchProcessObject(expandArgv1) {
// the entry point.
if (expandArgv1 && process.argv[1] && process.argv[1][0] !== '-') {
@@ -247,12 +247,14 @@ function patchProcessObject(expandArgv1) {
if (expandArgv1 && process.argv[1] &&
!StringPrototypeStartsWith(process.argv[1], '-')) {
// Expand process.argv[1] into a full path.
- const path = require('path');
- try {

View File

@@ -7,7 +7,7 @@ common.gypi is a file that's included in the node header bundle, despite
the fact that we do not build node with gyp.
diff --git a/common.gypi b/common.gypi
index 04852d81103ef83c5213464f543839dea6f0b181..776a6dcf3a729a65b367fb5b4c5685f841089eea 100644
index 1ece4f5e494533ea0fa25e0d35143fe424dbf70b..697b8bba6a55358924d6986f2eb347a99ff73889 100644
--- a/common.gypi
+++ b/common.gypi
@@ -88,6 +88,23 @@

View File

@@ -58,10 +58,10 @@ index b9098d102b40adad7fafcc331ac62870617019b9..cb9269a31e073caf86164aa39c064037
}
diff --git a/src/node_options.cc b/src/node_options.cc
index b7ef44b018c7aec59d8311642a811d1280247689..e19b9d88a908154dbcfd0b0e3efbc7510609f810 100644
index efbe48e10b8408642a6b5010b1a7a3749068188a..9b5f49ebb478a21acf8f0d6ed012ed6829a8a320 100644
--- a/src/node_options.cc
+++ b/src/node_options.cc
@@ -1409,14 +1409,16 @@ void GetEmbedderOptions(const FunctionCallbackInfo<Value>& args) {
@@ -1405,14 +1405,16 @@ void GetEmbedderOptions(const FunctionCallbackInfo<Value>& args) {
}
Isolate* isolate = args.GetIsolate();

View File

@@ -347,7 +347,7 @@ index 81a469c226c261564dee1e0b06b6571b18a41f1f..58b66045dba4201b7ebedd78b129420f
const availableCurves = new Set(crypto.getCurves());
diff --git a/test/parallel/test-crypto-dh-errors.js b/test/parallel/test-crypto-dh-errors.js
index 476ca64b4425b5b8b0fa2dc8352ee6f03d563813..2250a8f24a875d6af198426891870b450078ee5f 100644
index fcf1922bcdba733af6c22f142db4f7b099947757..9f72ae4e41a113e752f40795103c2af514538780 100644
--- a/test/parallel/test-crypto-dh-errors.js
+++ b/test/parallel/test-crypto-dh-errors.js
@@ -32,9 +32,9 @@ for (const bits of [-1, 0, 1]) {
@@ -362,6 +362,24 @@ index 476ca64b4425b5b8b0fa2dc8352ee6f03d563813..2250a8f24a875d6af198426891870b45
});
}
}
@@ -43,7 +43,7 @@ for (const g of [-1, 1]) {
const ex = {
code: 'ERR_OSSL_DH_BAD_GENERATOR',
name: 'Error',
- message: /bad generator/,
+ message: /bad generator|BAD_GENERATOR/,
};
assert.throws(() => crypto.createDiffieHellman('abcdef', g), ex);
assert.throws(() => crypto.createDiffieHellman('abcdef', 'hex', g), ex);
@@ -55,7 +55,7 @@ for (const g of [Buffer.from([]),
const ex = {
code: 'ERR_OSSL_DH_BAD_GENERATOR',
name: 'Error',
- message: /bad generator/,
+ message: /bad generator|BAD_GENERATOR/,
};
assert.throws(() => crypto.createDiffieHellman('abcdef', g), ex);
assert.throws(() => crypto.createDiffieHellman('abcdef', 'hex', g), ex);
diff --git a/test/parallel/test-crypto-dh.js b/test/parallel/test-crypto-dh.js
index 8ae0a002fec0944737d2c6ae73fc8956e41beb50..5b37236a6c2f1ec1761d8143c8ea6a7e2a837a7a 100644
--- a/test/parallel/test-crypto-dh.js
@@ -517,6 +535,19 @@ index f1f14b472997e76bb4100edb1c6cf4fc24d1074d..5057e3f9bc5bb78aceffa5e79530f8ce
});
// No-pad encrypted string should return the same:
diff --git a/test/parallel/test-crypto-private-decrypt-gh32240.js b/test/parallel/test-crypto-private-decrypt-gh32240.js
index 1785f5eef3d202976666081d09850ed744d83446..e88227a215ba4f7fa196f7642ae694a57d55b3ca 100644
--- a/test/parallel/test-crypto-private-decrypt-gh32240.js
+++ b/test/parallel/test-crypto-private-decrypt-gh32240.js
@@ -24,7 +24,7 @@ const pkeyEncrypted =
pair.privateKey.export({
type: 'pkcs1',
format: 'pem',
- cipher: 'aes128',
+ cipher: 'aes-128-cbc',
passphrase: 'secret',
});
diff --git a/test/parallel/test-crypto-rsa-dsa.js b/test/parallel/test-crypto-rsa-dsa.js
index 5f4fafdfffbf726b7cb39c472baa3df25c9794cf..73bb53b0405b20f51b13326cc70e52755c674366 100644
--- a/test/parallel/test-crypto-rsa-dsa.js
@@ -660,23 +691,90 @@ index 008ab129f0e019c659eecf5a76b7eb412c947fe3..6688f5d916f50e1e4fcfff1619c8634a
cipher.end('Papaya!'); // Should not cause an unhandled exception.
diff --git a/test/parallel/test-crypto-x509.js b/test/parallel/test-crypto-x509.js
index ee4d96b476864ed5ecad7e8421f41a39bb2f9268..099e7812bfa700a42c2849f946fb819f691c85cf 100644
index 89a7521544f7051edc1779138551bbad1972b3fb..91df6acc65d4003999f29f0fa5f639056b21ee3b 100644
--- a/test/parallel/test-crypto-x509.js
+++ b/test/parallel/test-crypto-x509.js
@@ -323,6 +323,7 @@ oans248kpal88CGqsN2so/wZKxVnpiXlPHMdiNL7hRSUqlHkUi07FrP2Htg8kjI=
legacyObjectCheck.serialNumberPattern);
@@ -111,7 +111,7 @@ const der = Buffer.from(
'5A:42:63:E0:21:2F:D6:70:63:07:96:6F:27:A7:78:12:08:02:7A:8B'
);
assert.strictEqual(x509.keyUsage, undefined);
- assert.strictEqual(x509.serialNumber, '147D36C1C2F74206DE9FAB5F2226D78ADB00A426');
+ assert.match(x509.serialNumber, /147D36C1C2F74206DE9FAB5F2226D78ADB00A426/i);
assert.deepStrictEqual(x509.raw, der);
@@ -253,6 +253,16 @@ oans248kpal88CGqsN2so/wZKxVnpiXlPHMdiNL7hRSUqlHkUi07FrP2Htg8kjI=
});
mc.port2.postMessage(x509);
+ const modulusOSSL = 'D456320AFB20D3827093DC2C4284ED04DFBABD56E1DDAE529E28B790CD42' +
+ '56DB273349F3735FFD337C7A6363ECCA5A27B7F73DC7089A96C6D886DB0C' +
+ '62388F1CDD6A963AFCD599D5800E587A11F908960F84ED50BA25A28303EC' +
+ 'DA6E684FBE7BAEDC9CE8801327B1697AF25097CEE3F175E400984C0DB6A8' +
+ 'EB87BE03B4CF94774BA56FFFC8C63C68D6ADEB60ABBE69A7B14AB6A6B9E7' +
+ 'BAA89B5ADAB8EB07897C07F6D4FA3D660DFF574107D28E8F63467A788624' +
+ 'C574197693E959CEA1362FFAE1BBA10C8C0D88840ABFEF103631B2E8F5C3' +
+ '9B5548A7EA57E8A39F89291813F45A76C448033A2B7ED8403F4BAA147CF3' +
+ '5E2D2554AA65CE49695797095BF4DC6B';
+
// Verify that legacy encoding works
const legacyObjectCheck = {
subject: Object.assign({ __proto__: null }, {
@@ -277,15 +287,7 @@ oans248kpal88CGqsN2so/wZKxVnpiXlPHMdiNL7hRSUqlHkUi07FrP2Htg8kjI=
'OCSP - URI': ['http://ocsp.nodejs.org/'],
'CA Issuers - URI': ['http://ca.nodejs.org/ca.cert']
}),
- modulus: 'D456320AFB20D3827093DC2C4284ED04DFBABD56E1DDAE529E28B790CD42' +
- '56DB273349F3735FFD337C7A6363ECCA5A27B7F73DC7089A96C6D886DB0C' +
- '62388F1CDD6A963AFCD599D5800E587A11F908960F84ED50BA25A28303EC' +
- 'DA6E684FBE7BAEDC9CE8801327B1697AF25097CEE3F175E400984C0DB6A8' +
- 'EB87BE03B4CF94774BA56FFFC8C63C68D6ADEB60ABBE69A7B14AB6A6B9E7' +
- 'BAA89B5ADAB8EB07897C07F6D4FA3D660DFF574107D28E8F63467A788624' +
- 'C574197693E959CEA1362FFAE1BBA10C8C0D88840ABFEF103631B2E8F5C3' +
- '9B5548A7EA57E8A39F89291813F45A76C448033A2B7ED8403F4BAA147CF3' +
- '5E2D2554AA65CE49695797095BF4DC6B',
+ modulusPattern: new RegExp(modulusOSSL, 'i'),
bits: 2048,
exponent: '0x10001',
valid_from: 'Sep 3 21:40:37 2022 GMT',
@@ -298,7 +300,7 @@ oans248kpal88CGqsN2so/wZKxVnpiXlPHMdiNL7hRSUqlHkUi07FrP2Htg8kjI=
'51:62:18:39:E2:E2:77:F5:86:11:E8:C0:CA:54:43:7C:76:83:19:05:D0:03:' +
'24:21:B8:EB:14:61:FB:24:16:EB:BD:51:1A:17:91:04:30:03:EB:68:5F:DC:' +
'86:E1:D1:7C:FB:AF:78:ED:63:5F:29:9C:32:AF:A1:8E:22:96:D1:02',
- serialNumber: '147D36C1C2F74206DE9FAB5F2226D78ADB00A426'
+ serialNumberPattern: /147D36C1C2F74206DE9FAB5F2226D78ADB00A426/i
};
const legacyObject = x509.toLegacyObject();
@@ -307,7 +309,7 @@ oans248kpal88CGqsN2so/wZKxVnpiXlPHMdiNL7hRSUqlHkUi07FrP2Htg8kjI=
assert.deepStrictEqual(legacyObject.subject, legacyObjectCheck.subject);
assert.deepStrictEqual(legacyObject.issuer, legacyObjectCheck.issuer);
assert.deepStrictEqual(legacyObject.infoAccess, legacyObjectCheck.infoAccess);
- assert.strictEqual(legacyObject.modulus, legacyObjectCheck.modulus);
+ assert.match(legacyObject.modulus, legacyObjectCheck.modulusPattern);
assert.strictEqual(legacyObject.bits, legacyObjectCheck.bits);
assert.strictEqual(legacyObject.exponent, legacyObjectCheck.exponent);
assert.strictEqual(legacyObject.valid_from, legacyObjectCheck.valid_from);
@@ -316,11 +318,12 @@ oans248kpal88CGqsN2so/wZKxVnpiXlPHMdiNL7hRSUqlHkUi07FrP2Htg8kjI=
assert.strictEqual(
legacyObject.fingerprint256,
legacyObjectCheck.fingerprint256);
- assert.strictEqual(
+ assert.match(
legacyObject.serialNumber,
- legacyObjectCheck.serialNumber);
+ legacyObjectCheck.serialNumberPattern);
}
+/*
{
// This X.509 Certificate can be parsed by OpenSSL because it contains a
// structurally sound TBSCertificate structure. However, the SPKI field of the
@@ -361,3 +362,4 @@ UcXd/5qu2GhokrKU2cPttU+XAN2Om6a0
@@ -359,3 +362,4 @@ UcXd/5qu2GhokrKU2cPttU+XAN2Om6a0
assert.strictEqual(cert.checkIssued(cert), false);
}
+*/
\ No newline at end of file
diff --git a/test/parallel/test-crypto.js b/test/parallel/test-crypto.js
index a8ceb169de2b3de73f062083c42292babc673e73..8fb950d0814e5014faf5c1ef576b65795857da1b 100644
--- a/test/parallel/test-crypto.js
@@ -790,7 +888,7 @@ index 543ee176fb6af38874fee9f14be76f3fdda11060..fef9f1bc2f9fc6c220cf47847e86e038
}
diff --git a/test/parallel/test-https-agent-session-eviction.js b/test/parallel/test-https-agent-session-eviction.js
index e0986e53c1103b63cf15002a7fa4ce8bc4844d90..33c8a2aa72c56dd4a98558aab2102f03fae2b3cf 100644
index da5600710560b22049eba1ef18bbb742d447a673..8917b96f666de916616af2fb3ce3a58d00af7438 100644
--- a/test/parallel/test-https-agent-session-eviction.js
+++ b/test/parallel/test-https-agent-session-eviction.js
@@ -14,7 +14,7 @@ const options = {
@@ -802,8 +900,35 @@ index e0986e53c1103b63cf15002a7fa4ce8bc4844d90..33c8a2aa72c56dd4a98558aab2102f03
};
// Create TLS1.2 server
diff --git a/test/parallel/test-tls-getcertificate-x509.js b/test/parallel/test-tls-getcertificate-x509.js
index aa685ca9e09cf0d17ff4d5480089e9977dd51f72..ccafa427433922155c1afd5d95ba69d8108825ef 100644
--- a/test/parallel/test-tls-getcertificate-x509.js
+++ b/test/parallel/test-tls-getcertificate-x509.js
@@ -20,9 +20,7 @@ const server = tls.createServer(options, function(cleartext) {
server.once('secureConnection', common.mustCall(function(socket) {
const cert = socket.getX509Certificate();
assert(cert instanceof X509Certificate);
- assert.strictEqual(
- cert.serialNumber,
- '5B75D77EDC7FB5B7FA9F1424DA4C64FB815DCBDE');
+ assert.match(cert.serialNumber, /5B75D77EDC7FB5B7FA9F1424DA4C64FB815DCBDE/i)
}));
server.listen(0, common.mustCall(function() {
@@ -33,10 +31,7 @@ server.listen(0, common.mustCall(function() {
const peerCert = socket.getPeerX509Certificate();
assert(peerCert.issuerCertificate instanceof X509Certificate);
assert.strictEqual(peerCert.issuerCertificate.issuerCertificate, undefined);
- assert.strictEqual(
- peerCert.issuerCertificate.serialNumber,
- '147D36C1C2F74206DE9FAB5F2226D78ADB00A425'
- );
+ assert.match(peerCert.issuerCertificate.serialNumber, /147D36C1C2F74206DE9FAB5F2226D78ADB00A425/i);
server.close();
}));
socket.end('Hello');
diff --git a/test/parallel/test-tls-getprotocol.js b/test/parallel/test-tls-getprotocol.js
index a9c8775e2f112f2b5e1f4e80f22264f219bf6a9d..4550d28125379e6043962826b8e97b692d63804b 100644
index 571f400cea574662bda6be8eecd22ceccaf75420..2296362dc625ee663df11927297ccf124233a19b 100644
--- a/test/parallel/test-tls-getprotocol.js
+++ b/test/parallel/test-tls-getprotocol.js
@@ -27,7 +27,7 @@ const clientConfigs = [

View File

@@ -19,7 +19,7 @@ index 22248b753c14960122f1d6b9bfe6b89fdb8d2010..9d245a04fbcb98dcd1c61e60f7cfe528
context = { __proto__: context, source };
}
diff --git a/lib/internal/modules/esm/translators.js b/lib/internal/modules/esm/translators.js
index 9c9b403774e51e2f7e1201ad53e60e98fc3b26e8..477a3b2cf61fcaa154f01ac77c8047ef548ac1f8 100644
index f3dfc69cd2cdec50bc3b3f7cb2d63349812d87dd..b6f2d7194cb75ecc8c47869761c63184707ade40 100644
--- a/lib/internal/modules/esm/translators.js
+++ b/lib/internal/modules/esm/translators.js
@@ -375,6 +375,9 @@ function cjsPreparseModuleExports(filename, source) {

View File

@@ -48,10 +48,10 @@ index 7b77af35a1dfebf6ad45ace521f1a55b5fa18293..ac24cf305bd5995ad13b37ee36f9e1fe
ArrayPrototypePush(schemes, 'https', 'http');
}
diff --git a/lib/internal/modules/esm/resolve.js b/lib/internal/modules/esm/resolve.js
index 93c7a040fd47f050b10b949e4f619d85660110c8..77b85490b1a65b594bb140deb07b33f3accc9736 100644
index e73a8ad60a13925d6773c32cead8d04ec9d96ee7..52cdb7d5e14a18ed7b1b65e429729cf47dce3f98 100644
--- a/lib/internal/modules/esm/resolve.js
+++ b/lib/internal/modules/esm/resolve.js
@@ -760,6 +760,8 @@ function packageImportsResolve(name, base, conditions) {
@@ -741,6 +741,8 @@ function packageImportsResolve(name, base, conditions) {
throw importNotDefined(name, packageJSONUrl, base);
}
@@ -60,7 +60,7 @@ index 93c7a040fd47f050b10b949e4f619d85660110c8..77b85490b1a65b594bb140deb07b33f3
/**
* Returns the package type for a given URL.
* @param {URL} url - The URL to get the package type for.
@@ -820,6 +822,11 @@ function packageResolve(specifier, base, conditions) {
@@ -801,6 +803,11 @@ function packageResolve(specifier, base, conditions) {
return new URL('node:' + specifier);
}
@@ -73,7 +73,7 @@ index 93c7a040fd47f050b10b949e4f619d85660110c8..77b85490b1a65b594bb140deb07b33f3
parsePackageName(specifier, base);
diff --git a/lib/internal/modules/esm/translators.js b/lib/internal/modules/esm/translators.js
index 044d820161a5f9809dd042be7e53b717de8678ab..5281d7c220c8adee0f3d7269a0116d082726ec91 100644
index 8f4b6b25d8889686d00613fd9821b0aa822a946a..89ca269294ee1afa7f5aeb0ac6b8958f7a8b49d0 100644
--- a/lib/internal/modules/esm/translators.js
+++ b/lib/internal/modules/esm/translators.js
@@ -272,7 +272,7 @@ function createCJSModuleWrap(url, source, isMain, loadCJS = loadCJSModule) {
@@ -108,7 +108,7 @@ index 044d820161a5f9809dd042be7e53b717de8678ab..5281d7c220c8adee0f3d7269a0116d08
// or as the initial entry point when the ESM loader handles a CommonJS entry.
translators.set('commonjs', async function commonjsStrategy(url, source,
diff --git a/lib/internal/url.js b/lib/internal/url.js
index 42debfc20005b0d293e5507546dcd8caec2b6a4e..4b6d07b14bd92ac527b7f4bd55ff7005967cee7d 100644
index e6ed5466b8807a52633d8406824058bdc8c2ce13..e055facddf086eb8fb456b865ce006cdb7602b0a 100644
--- a/lib/internal/url.js
+++ b/lib/internal/url.js
@@ -1485,6 +1485,8 @@ function fileURLToPath(path, options = kEmptyObject) {

View File

@@ -22,7 +22,7 @@ index 46106fa94b3055648e4f01cd28860d427268a253..e0bf37f09dceb93af58990438ab577a9
int thread_pool_size,
node::tracing::TracingController* tracing_controller) {
diff --git a/src/node.h b/src/node.h
index ec5f6d0d25731dfb5ceeae3cd8749630298a8ba0..7d9d9afd049237646f82c3e22f5e7a8af7314a84 100644
index 6373adacb628459a4c9d7237da2587aee318e2d8..4f2eb9d0aab88b70c86339e750799080e980d7da 100644
--- a/src/node.h
+++ b/src/node.h
@@ -133,6 +133,7 @@ struct SnapshotData;

View File

@@ -388,7 +388,7 @@ index cf051585e779e2b03bd7b95fe5008b89cc7f8162..9de49c6828468fdf846dcd4ad445390f
#if NODE_OPENSSL_HAS_QUIC
#include <openssl/quic.h>
diff --git a/src/node_options.cc b/src/node_options.cc
index f6ff810953b224b7e343d91e1065d95bc3e78d39..b7ef44b018c7aec59d8311642a811d1280247689 100644
index e325b082dec6ae891a737a4fa7775937e4d7b7f9..efbe48e10b8408642a6b5010b1a7a3749068188a 100644
--- a/src/node_options.cc
+++ b/src/node_options.cc
@@ -6,7 +6,7 @@

View File

@@ -38,7 +38,7 @@ index ac24cf305bd5995ad13b37ee36f9e1fe3589c5d7..22248b753c14960122f1d6b9bfe6b89f
const match = RegExpPrototypeExec(DATA_URL_PATTERN, url.pathname);
if (!match) {
diff --git a/lib/internal/modules/esm/resolve.js b/lib/internal/modules/esm/resolve.js
index 77b85490b1a65b594bb140deb07b33f3accc9736..da99cc2d4ca930222ca018ddf42ee2e5f898576a 100644
index 52cdb7d5e14a18ed7b1b65e429729cf47dce3f98..69f73f829706deddc4f328b78af9d58434af647d 100644
--- a/lib/internal/modules/esm/resolve.js
+++ b/lib/internal/modules/esm/resolve.js
@@ -24,7 +24,7 @@ const {
@@ -50,7 +50,7 @@ index 77b85490b1a65b594bb140deb07b33f3accc9736..da99cc2d4ca930222ca018ddf42ee2e5
const { getOptionValue } = require('internal/options');
// Do not eagerly grab .manifest, it may be in TDZ
const policy = getOptionValue('--experimental-policy') ?
@@ -257,7 +257,7 @@ function finalizeResolution(resolved, base, preserveSymlinks) {
@@ -251,7 +251,7 @@ function finalizeResolution(resolved, base, preserveSymlinks) {
throw err;
}
@@ -59,7 +59,7 @@ index 77b85490b1a65b594bb140deb07b33f3accc9736..da99cc2d4ca930222ca018ddf42ee2e5
StringPrototypeSlice(path, -1) : path));
// Check for stats.isDirectory()
@@ -285,7 +285,7 @@ function finalizeResolution(resolved, base, preserveSymlinks) {
@@ -267,7 +267,7 @@ function finalizeResolution(resolved, base, preserveSymlinks) {
}
if (!preserveSymlinks) {
@@ -68,7 +68,7 @@ index 77b85490b1a65b594bb140deb07b33f3accc9736..da99cc2d4ca930222ca018ddf42ee2e5
[internalFS.realpathCacheKey]: realpathCache,
});
const { search, hash } = resolved;
@@ -845,7 +845,7 @@ function packageResolve(specifier, base, conditions) {
@@ -826,7 +826,7 @@ function packageResolve(specifier, base, conditions) {
let packageJSONPath = fileURLToPath(packageJSONUrl);
let lastPath;
do {
@@ -78,7 +78,7 @@ index 77b85490b1a65b594bb140deb07b33f3accc9736..da99cc2d4ca930222ca018ddf42ee2e5
// Check for !stat.isDirectory()
if (stat !== 1) {
diff --git a/lib/internal/modules/esm/translators.js b/lib/internal/modules/esm/translators.js
index 5281d7c220c8adee0f3d7269a0116d082726ec91..9c9b403774e51e2f7e1201ad53e60e98fc3b26e8 100644
index 89ca269294ee1afa7f5aeb0ac6b8958f7a8b49d0..f3dfc69cd2cdec50bc3b3f7cb2d63349812d87dd 100644
--- a/lib/internal/modules/esm/translators.js
+++ b/lib/internal/modules/esm/translators.js
@@ -36,7 +36,7 @@ const {

View File

@@ -15,7 +15,7 @@ to recognize asar files.
This reverts commit 9cf2e1f55b8446a7cde23699d00a3be73aa0c8f1.
diff --git a/lib/internal/modules/esm/resolve.js b/lib/internal/modules/esm/resolve.js
index da99cc2d4ca930222ca018ddf42ee2e5f898576a..34631a1f4ffea33ba520874e6571054d3d9c057e 100644
index 69f73f829706deddc4f328b78af9d58434af647d..1d53a2a47423150e822bb917b2725d3a6a794814 100644
--- a/lib/internal/modules/esm/resolve.js
+++ b/lib/internal/modules/esm/resolve.js
@@ -36,10 +36,9 @@ const preserveSymlinksMain = getOptionValue('--preserve-symlinks-main');

View File

@@ -161,10 +161,10 @@ index a30c25a3a61dfe73944731760404c555f2782d72..6137551bb8168d8fa9a3e6bc79c3d1e7
inline MultiIsolatePlatform* platform() const;
inline const SnapshotData* snapshot_data() const;
diff --git a/src/node.h b/src/node.h
index a941d9987abcbb0f9db3072b8c896a5f328dd027..1a95ac78f7ba6c361db43b1f03c684cbc172de0f 100644
index df3fb3372d6357b5d77b4f683e309b8483998128..01e8a4f2ed905bf5bbb803419012a014c204b460 100644
--- a/src/node.h
+++ b/src/node.h
@@ -1555,24 +1555,14 @@ void RegisterSignalHandler(int signal,
@@ -1561,24 +1561,14 @@ void RegisterSignalHandler(int signal,
bool reset_handler = false);
#endif // _WIN32

View File

@@ -118,10 +118,10 @@ index 4ba261014695cf1aa8eb53b21a2873f4c4ea8e43..b695d131bcdc331974f544924138bb5e
v8::Local<v8::ArrayBuffer> ToArrayBuffer(Environment* env);
diff --git a/src/node_i18n.cc b/src/node_i18n.cc
index 1ddae30e97257be9f8145dcd419203dcf77aadac..bebe6fd7d139bcdd71e2be35831fd8a1d89ab15f 100644
index 2aa7cd98ecc179519a6bb1932dafa86a38bda4f5..79376bef2e674f05fd95380dd419e8778cb98623 100644
--- a/src/node_i18n.cc
+++ b/src/node_i18n.cc
@@ -103,7 +103,7 @@ namespace {
@@ -106,7 +106,7 @@ namespace {
template <typename T>
MaybeLocal<Object> ToBufferEndian(Environment* env, MaybeStackBuffer<T>* buf) {
@@ -130,7 +130,7 @@ index 1ddae30e97257be9f8145dcd419203dcf77aadac..bebe6fd7d139bcdd71e2be35831fd8a1
if (ret.IsEmpty())
return ret;
@@ -180,7 +180,7 @@ MaybeLocal<Object> TranscodeLatin1ToUcs2(Environment* env,
@@ -183,7 +183,7 @@ MaybeLocal<Object> TranscodeLatin1ToUcs2(Environment* env,
return {};
}
@@ -139,7 +139,7 @@ index 1ddae30e97257be9f8145dcd419203dcf77aadac..bebe6fd7d139bcdd71e2be35831fd8a1
}
MaybeLocal<Object> TranscodeFromUcs2(Environment* env,
@@ -225,7 +225,7 @@ MaybeLocal<Object> TranscodeUcs2FromUtf8(Environment* env,
@@ -228,7 +228,7 @@ MaybeLocal<Object> TranscodeUcs2FromUtf8(Environment* env,
return {};
}
@@ -148,7 +148,7 @@ index 1ddae30e97257be9f8145dcd419203dcf77aadac..bebe6fd7d139bcdd71e2be35831fd8a1
}
MaybeLocal<Object> TranscodeUtf8FromUcs2(Environment* env,
@@ -249,7 +249,7 @@ MaybeLocal<Object> TranscodeUtf8FromUcs2(Environment* env,
@@ -252,7 +252,7 @@ MaybeLocal<Object> TranscodeUtf8FromUcs2(Environment* env,
return {};
}

View File

@@ -7,7 +7,7 @@ Instead of disabling the tests, flag them as flaky so they still run
but don't cause CI failures on flakes.
diff --git a/test/parallel/parallel.status b/test/parallel/parallel.status
index a7860449225092529e1a262aed3f30fb21212e9d..1a55ea53c602b3fede260fffc173dfabc0f594d4 100644
index 79a953df7da64b7d7580e099a5cc5160e7842999..94616df356cab50c8ef4099e7863f5986deed377 100644
--- a/test/parallel/parallel.status
+++ b/test/parallel/parallel.status
@@ -5,6 +5,16 @@ prefix parallel
@@ -28,7 +28,7 @@ index a7860449225092529e1a262aed3f30fb21212e9d..1a55ea53c602b3fede260fffc173dfab
test-net-write-fully-async-hex-string: PASS, FLAKY
# https://github.com/nodejs/node/issues/52273
diff --git a/test/sequential/sequential.status b/test/sequential/sequential.status
index dd2275ddc9540464d838476f998cea683d5ceccc..796e3c56174d9cd2b9d3969150d9605b32c46f0b 100644
index 073b29cce8dbca4c8d92ad666f9244ad511296db..338d20263f29a630febb96567f3cb708623bd09a 100644
--- a/test/sequential/sequential.status
+++ b/test/sequential/sequential.status
@@ -7,6 +7,18 @@ prefix sequential

View File

@@ -1 +0,0 @@
ganesh_avoid_overflow_when_combining_aahairlineops.patch

View File

@@ -1,62 +0,0 @@
From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001
From: James Godfrey-Kittle <jamesgk@google.com>
Date: Tue, 17 Dec 2024 12:14:17 -0500
Subject: Avoid overflow when combining AAHairlineOps
Bug: b/382786791
Change-Id: I955d943015cce76f75221df9fab0897a6f22fe4b
Reviewed-on: https://skia-review.googlesource.com/c/skia/+/930577
Reviewed-by: Michael Ludwig <michaelludwig@google.com>
Commit-Queue: James Godfrey-Kittle <jamesgk@google.com>
(cherry picked from commit 8b030e47588af50f56ef380d81a17667baeb582b)
Reviewed-on: https://skia-review.googlesource.com/c/skia/+/935337
Reviewed-by: James Godfrey-Kittle <jamesgk@google.com>
Auto-Submit: Michael Ludwig <michaelludwig@google.com>
Commit-Queue: Michael Ludwig <michaelludwig@google.com>
diff --git a/src/gpu/ganesh/ops/AAHairLinePathRenderer.cpp b/src/gpu/ganesh/ops/AAHairLinePathRenderer.cpp
index 352790443969808d7147d57061a712de0224a867..2c04f3109806e2fd057efef31263b52dd6fe2035 100644
--- a/src/gpu/ganesh/ops/AAHairLinePathRenderer.cpp
+++ b/src/gpu/ganesh/ops/AAHairLinePathRenderer.cpp
@@ -27,6 +27,7 @@
#include "include/private/base/SkPoint_impl.h"
#include "include/private/base/SkTArray.h"
#include "include/private/gpu/ganesh/GrTypesPriv.h"
+#include "src/base/SkSafeMath.h"
#include "src/core/SkGeometry.h"
#include "src/core/SkMatrixPriv.h"
#include "src/core/SkPointPriv.h"
@@ -1219,16 +1220,28 @@ void AAHairlineOp::onPrepareDraws(GrMeshDrawTarget* target) {
int instanceCount = fPaths.size();
bool convertConicsToQuads = !target->caps().shaderCaps()->fFloatIs32Bits;
- for (int i = 0; i < instanceCount; i++) {
+ SkSafeMath safeMath;
+ for (int i = 0; i < instanceCount && safeMath.ok(); i++) {
const PathData& args = fPaths[i];
- quadCount += gather_lines_and_quads(args.fPath, args.fViewMatrix, args.fDevClipBounds,
- args.fCapLength, convertConicsToQuads, &lines, &quads,
- &conics, &qSubdivs, &cWeights);
+ quadCount = safeMath.addInt(quadCount,
+ gather_lines_and_quads(args.fPath,
+ args.fViewMatrix,
+ args.fDevClipBounds,
+ args.fCapLength,
+ convertConicsToQuads,
+ &lines,
+ &quads,
+ &conics,
+ &qSubdivs,
+ &cWeights));
}
int lineCount = lines.size() / 2;
int conicCount = conics.size() / 3;
- int quadAndConicCount = conicCount + quadCount;
+ int quadAndConicCount = safeMath.addInt(conicCount, quadCount);
+ if (!safeMath.ok()) {
+ return;
+ }
static constexpr int kMaxLines = SK_MaxS32 / kLineSegNumVertices;
static constexpr int kMaxQuadsAndConics = SK_MaxS32 / kQuadNumVertices;

View File

@@ -1,17 +1,4 @@
chore_allow_customizing_microtask_policy_per_context.patch
deps_add_v8_object_setinternalfieldfornodecore.patch
fix_disable_scope_reuse_associated_dchecks.patch
fix_compiler_failure_on_older_clang.patch
cherry-pick-3c2d220ad025.patch
merged_reland_lower_the_maximum_js_parameter_count.patch
cherry-pick-1c7ff4d5477f.patch
cherry-pick-8834c16acfcc.patch
cherry-pick-9209292e7898.patch
cherry-pick-97e828af5cbc.patch
cherry-pick-ca504d096c39.patch
cherry-pick-8131c09bc129.patch
wasm_fix_freeing_of_identical_shared_wrappers.patch
merged_wasm_replace_dead_code_set_with_is_dying_bit.patch
merged_fix_out_of_bound_string_access.patch
cherry-pick-91343bb45c78.patch
cherry-pick-2b4812d502b2.patch
cherry-pick-3fdedec45691.patch

View File

@@ -1,60 +0,0 @@
From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Olivier=20Fl=C3=BCckiger?= <olivf@chromium.org>
Date: Mon, 27 Jan 2025 14:50:34 +0100
Subject: Merged: [turbofan] LoadField's type with recorded FieldType depends
on stability
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Bug: 390465670
(cherry picked from commit f920b6b2c2b1132cd1fbe1198500ceeaedcaa146)
Change-Id: I0b430909275e583e5c0ecf2840e143fef461cbad
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/6218882
Reviewed-by: Darius Mercadier <dmercadier@chromium.org>
Commit-Queue: Darius Mercadier <dmercadier@chromium.org>
Auto-Submit: Olivier Flückiger <olivf@chromium.org>
Commit-Queue: Olivier Flückiger <olivf@chromium.org>
Cr-Commit-Position: refs/branch-heads/13.2@{#72}
Cr-Branched-From: 24068c59cedad9ee976ddc05431f5f497b1ebd71-refs/heads/13.2.152@{#1}
Cr-Branched-From: 6054ba94db0969220be4f94dc1677fc4696bdc4f-refs/heads/main@{#97085}
diff --git a/src/compiler/access-info.cc b/src/compiler/access-info.cc
index 595f4ad9fb5a0c744aed9226eb9c536ef156b5cf..8338b2d13daf37ef238fe5cdf22a4ff9b2a0362a 100644
--- a/src/compiler/access-info.cc
+++ b/src/compiler/access-info.cc
@@ -474,8 +474,9 @@ PropertyAccessInfo AccessInfoFactory::ComputeDataFieldAccessInfo(
OptionalMapRef maybe_field_map =
TryMakeRef(broker(), FieldType::AsClass(*descriptors_field_type));
if (!maybe_field_map.has_value()) return Invalid();
- field_type = Type::For(maybe_field_map.value(), broker());
field_map = maybe_field_map;
+ // field_type can only be inferred from field_map if it is stable and we
+ // add a stability dependency. This happens on use in the access builder.
}
} else {
CHECK(details_representation.IsTagged());
@@ -1168,8 +1169,9 @@ PropertyAccessInfo AccessInfoFactory::LookupTransition(
OptionalMapRef maybe_field_map =
TryMakeRef(broker(), FieldType::AsClass(*descriptors_field_type));
if (!maybe_field_map.has_value()) return Invalid();
- field_type = Type::For(maybe_field_map.value(), broker());
field_map = maybe_field_map;
+ // field_type can only be inferred from field_map if it is stable and we
+ // add a stability dependency. This happens on use in the access builder.
}
}
diff --git a/src/compiler/property-access-builder.cc b/src/compiler/property-access-builder.cc
index 43203d2390ff3ed06a6de9fe54409de86820e729..8714c91d7572329fead93d65fdd439d81b3ea36f 100644
--- a/src/compiler/property-access-builder.cc
+++ b/src/compiler/property-access-builder.cc
@@ -318,6 +318,7 @@ Node* PropertyAccessBuilder::BuildLoadDataField(
if (field_map->is_stable()) {
dependencies()->DependOnStableMap(field_map.value());
field_access.map = field_map;
+ field_access.type = Type::For(*field_map, broker());
}
}
}

View File

@@ -1,38 +0,0 @@
From 2b4812d502b2bbd2eeace4d383dd1bb3252702ba Mon Sep 17 00:00:00 2001
From: Olivier Flückiger <olivf@chromium.org>
Date: Thu, 27 Feb 2025 09:19:22 +0100
Subject: [PATCH] merged: [maglev] Add missing ClearAllocationBlock
Fixed: 398065918
(cherry picked from commit eb9b25970b0ad4a3f8ce23d8de3583c62e5d6b87)
Change-Id: I20f3979984c1df11509f1630cf4c4c4460d6a83a
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/6304712
Reviewed-by: Victor Gomes <victorgomes@chromium.org>
Auto-Submit: Olivier Flückiger <olivf@chromium.org>
Commit-Queue: Olivier Flückiger <olivf@chromium.org>
Commit-Queue: Victor Gomes <victorgomes@chromium.org>
Cr-Commit-Position: refs/branch-heads/13.4@{#29}
Cr-Branched-From: 0f87a54dade4353b6ece1d7591ca8c66f90c1c93-refs/heads/13.4.114@{#1}
Cr-Branched-From: 27af2e9363b2701abc5f3feb701b1dad7d1a9fe8-refs/heads/main@{#98459}
---
diff --git a/src/maglev/maglev-graph-builder.cc b/src/maglev/maglev-graph-builder.cc
index bf6a86f..1535afc 100644
--- a/src/maglev/maglev-graph-builder.cc
+++ b/src/maglev/maglev-graph-builder.cc
@@ -12487,7 +12487,13 @@
CreateHeapNumber(node->Cast<Float64Constant>()->value()),
allocation_type);
} else {
- node = GetTaggedValue(node);
+ ValueNode* new_node = GetTaggedValue(node);
+ if (new_node != node && new_node->properties().can_allocate()) {
+ // TODO(olivf): Remove this and instead always clear when we
+ // emit an allocating instruction.
+ ClearCurrentAllocationBlock();
+ }
+ node = new_node;
}
values[i] = node;
}

View File

@@ -1,106 +0,0 @@
From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001
From: Shu-yu Guo <syg@chromium.org>
Date: Mon, 18 Nov 2024 16:02:28 -0800
Subject: Merged: [interpreter] Fix hole elision scope for switch jump tables
(cherry picked from commit 5c3b50c26c50e68dbedf8ff991249e75e46ef06e)
Change-Id: Id6bf2b62598b85a05c6cc7bd06b6cce673d7342a
Bug: 374627491
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/6042925
Commit-Queue: Shu-yu Guo <syg@chromium.org>
Commit-Queue: Rezvan Mahdavi Hezaveh <rezvan@chromium.org>
Auto-Submit: Shu-yu Guo <syg@chromium.org>
Reviewed-by: Rezvan Mahdavi Hezaveh <rezvan@chromium.org>
Cr-Commit-Position: refs/branch-heads/13.2@{#18}
Cr-Branched-From: 24068c59cedad9ee976ddc05431f5f497b1ebd71-refs/heads/13.2.152@{#1}
Cr-Branched-From: 6054ba94db0969220be4f94dc1677fc4696bdc4f-refs/heads/main@{#97085}
diff --git a/src/interpreter/bytecode-generator.cc b/src/interpreter/bytecode-generator.cc
index c0f7bca3472c857d88fdd29b51d9dd412cfbce12..415eb3a06c5330a5e2d2be4c559746fe590219d2 100644
--- a/src/interpreter/bytecode-generator.cc
+++ b/src/interpreter/bytecode-generator.cc
@@ -2458,6 +2458,9 @@ void BytecodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
// Are we still using any if-else bytecodes to evaluate the switch?
bool use_jumps = n_comp_cases != 0;
+ // Does the comparison for non-jump table jumps need an elision scope?
+ bool jump_comparison_needs_hole_check_elision_scope = false;
+
SwitchBuilder switch_builder(builder(), block_coverage_builder_, stmt,
n_comp_cases, jump_table);
ControlScopeForBreakable scope(this, stmt, &switch_builder);
@@ -2515,6 +2518,10 @@ void BytecodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
info.covered_cases);
if (use_jumps) {
+ // When using a jump table, the first jump comparison is conditionally
+ // executed if the discriminant wasn't matched by anything in the jump
+ // table, and so needs its own elision scope.
+ jump_comparison_needs_hole_check_elision_scope = true;
builder()->LoadAccumulatorWithRegister(r1);
}
}
@@ -2535,16 +2542,14 @@ void BytecodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
// The comparisons linearly dominate, so no need to open a new elision
// scope for each one.
std::optional<HoleCheckElisionScope> elider;
- bool first_jump_emitted = false;
for (int i = 0; i < clauses->length(); ++i) {
CaseClause* clause = clauses->at(i);
if (clause->is_default()) {
info.default_case = i;
} else if (!info.CaseExists(clause->label())) {
- // The first non-default label is
- // unconditionally executed, so we only need to emplace it before
- // visiting the second non-default label.
- if (first_jump_emitted) elider.emplace(this);
+ if (jump_comparison_needs_hole_check_elision_scope && !elider) {
+ elider.emplace(this);
+ }
// Perform label comparison as if via '===' with tag.
VisitForAccumulatorValue(clause->label());
@@ -2555,7 +2560,9 @@ void BytecodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
#endif
switch_builder.JumpToCaseIfTrue(ToBooleanMode::kAlreadyBoolean,
case_compare_ctr++);
- first_jump_emitted = true;
+ // The second and subsequent non-default comparisons are always
+ // conditionally executed, and need an elision scope.
+ jump_comparison_needs_hole_check_elision_scope = true;
}
}
}
diff --git a/test/mjsunit/regress/regress-374627491.js b/test/mjsunit/regress/regress-374627491.js
new file mode 100644
index 0000000000000000000000000000000000000000..ebb7e1d93f788f10606b4787cfacd79c3807ca0c
--- /dev/null
+++ b/test/mjsunit/regress/regress-374627491.js
@@ -0,0 +1,26 @@
+// Copyright 2024 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+class B { }
+class C extends B {
+ constructor() {
+ let x = 0;
+ switch (0) {
+ case 0:
+ case 1:
+ case 2:
+ case 3:
+ case 4:
+ case 5:
+ case 6:
+ case 7:
+ case 8:
+ case 9:
+ x += this;
+ break;
+ case this:
+ }
+ }
+}
+assertThrows(() => { new C(); }, ReferenceError);

View File

@@ -0,0 +1,910 @@
From 3fdedec45691a3ab005d62c3295436507e8d277a Mon Sep 17 00:00:00 2001
From: Clemens Backes <clemensb@chromium.org>
Date: Tue, 19 Nov 2024 18:17:33 +0100
Subject: [PATCH] Merged: [wasm] Remove relative type indexes from canonical types
Those relative types were leaking from the type canonicalizer, which
leads to type confusion in callers.
This CL fully removes the concept of relative type indexes (and thus
removes the `CanonicalRelativeField` bit from the bitfield in
`ValueTypeBase`). During canonicalization we pass the start and end of
the recursion group into hashing and equality checking, and use this to
compute relative indexes within the recursion group on demand. The
stored version will always have absolute indexes though.
R=jkummerow@chromium.org
Bug: 379009132
(cherry picked from commit 20d9a7f760c018183c836283017a321638b66810)
Change-Id: I9bee6b37b9da36684f8c5b2866725eac79c896ad
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/6049645
Commit-Queue: Clemens Backes <clemensb@chromium.org>
Reviewed-by: Jakob Kummerow <jkummerow@chromium.org>
Cr-Commit-Position: refs/branch-heads/13.2@{#22}
Cr-Branched-From: 24068c59cedad9ee976ddc05431f5f497b1ebd71-refs/heads/13.2.152@{#1}
Cr-Branched-From: 6054ba94db0969220be4f94dc1677fc4696bdc4f-refs/heads/main@{#97085}
---
diff --git a/src/base/bounds.h b/src/base/bounds.h
index 85f7bba..1646e81 100644
--- a/src/base/bounds.h
+++ b/src/base/bounds.h
@@ -14,9 +14,11 @@
// Checks if value is in range [lower_limit, higher_limit] using a single
// branch.
template <typename T, typename U>
+ requires((std::is_integral_v<T> || std::is_enum_v<T>) &&
+ (std::is_integral_v<U> || std::is_enum_v<U>)) &&
+ (sizeof(U) <= sizeof(T))
inline constexpr bool IsInRange(T value, U lower_limit, U higher_limit) {
DCHECK_LE(lower_limit, higher_limit);
- static_assert(sizeof(U) <= sizeof(T));
using unsigned_T = typename std::make_unsigned<T>::type;
// Use static_cast to support enum classes.
return static_cast<unsigned_T>(static_cast<unsigned_T>(value) -
@@ -27,10 +29,12 @@
// Like IsInRange but for the half-open range [lower_limit, higher_limit).
template <typename T, typename U>
+ requires((std::is_integral_v<T> || std::is_enum_v<T>) &&
+ (std::is_integral_v<U> || std::is_enum_v<U>)) &&
+ (sizeof(U) <= sizeof(T))
inline constexpr bool IsInHalfOpenRange(T value, U lower_limit,
U higher_limit) {
DCHECK_LE(lower_limit, higher_limit);
- static_assert(sizeof(U) <= sizeof(T));
using unsigned_T = typename std::make_unsigned<T>::type;
// Use static_cast to support enum classes.
return static_cast<unsigned_T>(static_cast<unsigned_T>(value) -
diff --git a/src/wasm/canonical-types.cc b/src/wasm/canonical-types.cc
index 3443018..2ecb78b 100644
--- a/src/wasm/canonical-types.cc
+++ b/src/wasm/canonical-types.cc
@@ -43,11 +43,17 @@
// Multiple threads could try to register recursive groups concurrently.
// TODO(manoskouk): Investigate if we can fine-grain the synchronization.
base::MutexGuard mutex_guard(&mutex_);
+ // Compute the first canonical index in the recgroup in the case that it does
+ // not already exist.
+ CanonicalTypeIndex first_new_canonical_index{
+ static_cast<uint32_t>(canonical_supertypes_.size())};
+
DCHECK_GE(module->types.size(), start_index + size);
- CanonicalGroup group{&zone_, size};
+ CanonicalGroup group{&zone_, size, first_new_canonical_index};
for (uint32_t i = 0; i < size; i++) {
- group.types[i] = CanonicalizeTypeDef(module, module->types[start_index + i],
- start_index);
+ group.types[i] = CanonicalizeTypeDef(
+ module, ModuleTypeIndex{start_index + i}, ModuleTypeIndex{start_index},
+ first_new_canonical_index);
}
if (CanonicalTypeIndex canonical_index = FindCanonicalGroup(group);
canonical_index.valid()) {
@@ -62,22 +68,13 @@
// allocated in {CanonicalizeTypeDef{).
return;
}
- // Identical group not found. Add new canonical representatives for the new
- // types.
- uint32_t first_canonical_index =
- static_cast<uint32_t>(canonical_supertypes_.size());
- canonical_supertypes_.resize(first_canonical_index + size);
+ canonical_supertypes_.resize(first_new_canonical_index.index + size);
CheckMaxCanonicalIndex();
for (uint32_t i = 0; i < size; i++) {
CanonicalType& canonical_type = group.types[i];
- // Compute the canonical index of the supertype: If it is relative, we
- // need to add {first_canonical_index}.
- canonical_supertypes_[first_canonical_index + i] =
- canonical_type.is_relative_supertype
- ? CanonicalTypeIndex{canonical_type.supertype.index +
- first_canonical_index}
- : canonical_type.supertype;
- CanonicalTypeIndex canonical_id{first_canonical_index + i};
+ canonical_supertypes_[first_new_canonical_index.index + i] =
+ canonical_type.supertype;
+ CanonicalTypeIndex canonical_id{first_new_canonical_index.index + i};
module->isorecursive_canonical_type_ids[start_index + i] = canonical_id;
if (canonical_type.kind == CanonicalType::kFunction) {
const CanonicalSig* sig = canonical_type.function_sig;
@@ -85,15 +82,13 @@
}
}
// Check that this canonical ID is not used yet.
- DCHECK(std::none_of(canonical_singleton_groups_.begin(),
- canonical_singleton_groups_.end(), [=](auto& entry) {
- return entry.second.index == first_canonical_index;
- }));
- DCHECK(std::none_of(canonical_groups_.begin(), canonical_groups_.end(),
- [=](auto& entry) {
- return entry.second.index == first_canonical_index;
- }));
- canonical_groups_.emplace(group, CanonicalTypeIndex{first_canonical_index});
+ DCHECK(std::none_of(
+ canonical_singleton_groups_.begin(), canonical_singleton_groups_.end(),
+ [=](auto& entry) { return entry.index == first_new_canonical_index; }));
+ DCHECK(std::none_of(
+ canonical_groups_.begin(), canonical_groups_.end(),
+ [=](auto& entry) { return entry.start == first_new_canonical_index; }));
+ canonical_groups_.emplace(group);
}
void TypeCanonicalizer::AddRecursiveSingletonGroup(WasmModule* module) {
@@ -105,8 +100,11 @@
uint32_t start_index) {
base::MutexGuard guard(&mutex_);
DCHECK_GT(module->types.size(), start_index);
- CanonicalTypeIndex canonical_index = AddRecursiveGroup(
- CanonicalizeTypeDef(module, module->types[start_index], start_index));
+ CanonicalTypeIndex first_new_canonical_index{
+ static_cast<uint32_t>(canonical_supertypes_.size())};
+ CanonicalTypeIndex canonical_index = AddRecursiveGroup(CanonicalizeTypeDef(
+ module, ModuleTypeIndex{start_index}, ModuleTypeIndex{start_index},
+ first_new_canonical_index));
module->isorecursive_canonical_type_ids[start_index] = canonical_index;
}
@@ -118,7 +116,6 @@
#endif
const bool kFinal = true;
const bool kNotShared = false;
- const bool kNonRelativeSupertype = false;
// Because of the checks above, we can treat the type_def as canonical.
// TODO(366180605): It would be nice to not have to rely on a cast here.
// Is there a way to avoid it? In the meantime, these asserts provide at
@@ -127,13 +124,14 @@
static_assert(CanonicalValueType::Primitive(kI32).raw_bit_field() ==
ValueType::Primitive(kI32).raw_bit_field());
CanonicalType canonical{reinterpret_cast<const CanonicalSig*>(sig),
- CanonicalTypeIndex{kNoSuperType}, kFinal, kNotShared,
- kNonRelativeSupertype};
+ CanonicalTypeIndex{kNoSuperType}, kFinal, kNotShared};
base::MutexGuard guard(&mutex_);
// Fast path lookup before canonicalizing (== copying into the
// TypeCanonicalizer's zone) the function signature.
- CanonicalTypeIndex index =
- FindCanonicalGroup(CanonicalSingletonGroup{canonical});
+ CanonicalTypeIndex hypothetical_new_canonical_index{
+ static_cast<uint32_t>(canonical_supertypes_.size())};
+ CanonicalTypeIndex index = FindCanonicalGroup(
+ CanonicalSingletonGroup{canonical, hypothetical_new_canonical_index});
if (index.valid()) return index;
// Copy into this class's zone, then call the generic {AddRecursiveGroup}.
CanonicalSig::Builder builder(&zone_, sig->return_count(),
@@ -145,12 +143,16 @@
builder.AddParam(CanonicalValueType{param});
}
canonical.function_sig = builder.Get();
- return AddRecursiveGroup(canonical);
+ CanonicalTypeIndex canonical_index = AddRecursiveGroup(canonical);
+ DCHECK_EQ(canonical_index, hypothetical_new_canonical_index);
+ return canonical_index;
}
CanonicalTypeIndex TypeCanonicalizer::AddRecursiveGroup(CanonicalType type) {
mutex_.AssertHeld(); // The caller must hold the mutex.
- CanonicalSingletonGroup group{type};
+ CanonicalTypeIndex new_canonical_index{
+ static_cast<uint32_t>(canonical_supertypes_.size())};
+ CanonicalSingletonGroup group{type, new_canonical_index};
if (CanonicalTypeIndex index = FindCanonicalGroup(group); index.valid()) {
// Make sure this signature can be looked up later.
DCHECK_IMPLIES(type.kind == CanonicalType::kFunction,
@@ -158,26 +160,21 @@
return index;
}
static_assert(kMaxCanonicalTypes <= kMaxUInt32);
- CanonicalTypeIndex index{static_cast<uint32_t>(canonical_supertypes_.size())};
// Check that this canonical ID is not used yet.
- DCHECK(std::none_of(canonical_singleton_groups_.begin(),
- canonical_singleton_groups_.end(),
- [=](auto& entry) { return entry.second == index; }));
- DCHECK(std::none_of(canonical_groups_.begin(), canonical_groups_.end(),
- [=](auto& entry) { return entry.second == index; }));
- canonical_singleton_groups_.emplace(group, index);
- // Compute the canonical index of the supertype: If it is relative, we
- // need to add {canonical_index}.
- canonical_supertypes_.push_back(
- type.is_relative_supertype
- ? CanonicalTypeIndex{type.supertype.index + index.index}
- : type.supertype);
+ DCHECK(std::none_of(
+ canonical_singleton_groups_.begin(), canonical_singleton_groups_.end(),
+ [=](auto& entry) { return entry.index == new_canonical_index; }));
+ DCHECK(std::none_of(
+ canonical_groups_.begin(), canonical_groups_.end(),
+ [=](auto& entry) { return entry.start == new_canonical_index; }));
+ canonical_singleton_groups_.emplace(group);
+ canonical_supertypes_.push_back(type.supertype);
if (type.kind == CanonicalType::kFunction) {
const CanonicalSig* sig = type.function_sig;
- CHECK(canonical_function_sigs_.emplace(index, sig).second);
+ CHECK(canonical_function_sigs_.emplace(new_canonical_index, sig).second);
}
CheckMaxCanonicalIndex();
- return index;
+ return new_canonical_index;
}
const CanonicalSig* TypeCanonicalizer::LookupFunctionSignature(
@@ -194,34 +191,22 @@
{kPredefinedArrayI16Index, {kWasmI16}}};
for (auto [index, element_type] : kPredefinedArrayTypes) {
DCHECK_EQ(index.index, canonical_singleton_groups_.size());
- CanonicalSingletonGroup group;
static constexpr bool kMutable = true;
// TODO(jkummerow): Decide whether this should be final or nonfinal.
static constexpr bool kFinal = true;
static constexpr bool kShared = false; // TODO(14616): Fix this.
- static constexpr bool kNonRelativeSupertype = false;
CanonicalArrayType* type =
zone_.New<CanonicalArrayType>(element_type, kMutable);
- group.type = CanonicalType(type, CanonicalTypeIndex{kNoSuperType}, kFinal,
- kShared, kNonRelativeSupertype);
- canonical_singleton_groups_.emplace(group, index);
+ CanonicalSingletonGroup group{
+ .type = CanonicalType(type, CanonicalTypeIndex{kNoSuperType}, kFinal,
+ kShared),
+ .index = index};
+ canonical_singleton_groups_.emplace(group);
canonical_supertypes_.emplace_back(CanonicalTypeIndex{kNoSuperType});
DCHECK_LE(canonical_supertypes_.size(), kMaxCanonicalTypes);
}
}
-CanonicalValueType TypeCanonicalizer::CanonicalizeValueType(
- const WasmModule* module, ValueType type,
- uint32_t recursive_group_start) const {
- if (!type.has_index()) return CanonicalValueType{type};
- static_assert(kMaxCanonicalTypes <= (1u << ValueType::kHeapTypeBits));
- return type.ref_index().index >= recursive_group_start
- ? CanonicalValueType::WithRelativeIndex(
- type.kind(), type.ref_index().index - recursive_group_start)
- : CanonicalValueType::FromIndex(
- type.kind(), module->canonical_type_id(type.ref_index()));
-}
-
bool TypeCanonicalizer::IsCanonicalSubtype(CanonicalTypeIndex sub_index,
CanonicalTypeIndex super_index) {
// Fast path without synchronization:
@@ -259,62 +244,75 @@
}
TypeCanonicalizer::CanonicalType TypeCanonicalizer::CanonicalizeTypeDef(
- const WasmModule* module, TypeDefinition type,
- uint32_t recursive_group_start) {
+ const WasmModule* module, ModuleTypeIndex module_type_idx,
+ ModuleTypeIndex recgroup_start,
+ CanonicalTypeIndex canonical_recgroup_start) {
mutex_.AssertHeld(); // The caller must hold the mutex.
- CanonicalTypeIndex supertype{kNoSuperType};
- bool is_relative_supertype = false;
- if (type.supertype.index < recursive_group_start) {
- supertype = module->canonical_type_id(type.supertype);
- } else if (type.supertype.valid()) {
- supertype =
- CanonicalTypeIndex{type.supertype.index - recursive_group_start};
- is_relative_supertype = true;
- }
+
+ auto CanonicalizeTypeIndex = [=](ModuleTypeIndex type_index) {
+ DCHECK(type_index.valid());
+ return type_index < recgroup_start
+ // This references a type from an earlier recgroup; use the
+ // already-canonicalized type index.
+ ? module->canonical_type_id(type_index)
+ // For types within the same recgroup, generate indexes assuming
+ // that this is a new canonical recgroup.
+ : CanonicalTypeIndex{canonical_recgroup_start.index +
+ (type_index.index - recgroup_start.index)};
+ };
+
+ auto CanonicalizeValueType = [=](ValueType type) {
+ if (!type.has_index()) return CanonicalValueType{type};
+ static_assert(kMaxCanonicalTypes <= (1u << ValueType::kHeapTypeBits));
+ return CanonicalValueType::FromIndex(
+ type.kind(), CanonicalizeTypeIndex(type.ref_index()));
+ };
+
+ TypeDefinition type = module->type(module_type_idx);
+ CanonicalTypeIndex supertype = type.supertype.valid()
+ ? CanonicalizeTypeIndex(type.supertype)
+ : CanonicalTypeIndex::Invalid();
switch (type.kind) {
case TypeDefinition::kFunction: {
const FunctionSig* original_sig = type.function_sig;
CanonicalSig::Builder builder(&zone_, original_sig->return_count(),
original_sig->parameter_count());
for (ValueType ret : original_sig->returns()) {
- builder.AddReturn(
- CanonicalizeValueType(module, ret, recursive_group_start));
+ builder.AddReturn(CanonicalizeValueType(ret));
}
for (ValueType param : original_sig->parameters()) {
- builder.AddParam(
- CanonicalizeValueType(module, param, recursive_group_start));
+ builder.AddParam(CanonicalizeValueType(param));
}
return CanonicalType(builder.Get(), supertype, type.is_final,
- type.is_shared, is_relative_supertype);
+ type.is_shared);
}
case TypeDefinition::kStruct: {
const StructType* original_type = type.struct_type;
CanonicalStructType::Builder builder(&zone_,
original_type->field_count());
for (uint32_t i = 0; i < original_type->field_count(); i++) {
- builder.AddField(CanonicalizeValueType(module, original_type->field(i),
- recursive_group_start),
+ builder.AddField(CanonicalizeValueType(original_type->field(i)),
original_type->mutability(i),
original_type->field_offset(i));
}
builder.set_total_fields_size(original_type->total_fields_size());
return CanonicalType(
builder.Build(CanonicalStructType::Builder::kUseProvidedOffsets),
- supertype, type.is_final, type.is_shared, is_relative_supertype);
+ supertype, type.is_final, type.is_shared);
}
case TypeDefinition::kArray: {
- CanonicalValueType element_type = CanonicalizeValueType(
- module, type.array_type->element_type(), recursive_group_start);
+ CanonicalValueType element_type =
+ CanonicalizeValueType(type.array_type->element_type());
CanonicalArrayType* array_type = zone_.New<CanonicalArrayType>(
element_type, type.array_type->mutability());
- return CanonicalType(array_type, supertype, type.is_final, type.is_shared,
- is_relative_supertype);
+ return CanonicalType(array_type, supertype, type.is_final,
+ type.is_shared);
}
}
}
// Returns the index of the canonical representative of the first type in this
-// group, or -1 if an identical group does not exist.
+// group if it exists, and `CanonicalTypeIndex::Invalid()` otherwise.
CanonicalTypeIndex TypeCanonicalizer::FindCanonicalGroup(
const CanonicalGroup& group) const {
// Groups of size 0 do not make sense here; groups of size 1 should use
@@ -322,7 +320,7 @@
DCHECK_LT(1, group.types.size());
auto it = canonical_groups_.find(group);
return it == canonical_groups_.end() ? CanonicalTypeIndex::Invalid()
- : it->second;
+ : it->start;
}
// Returns the canonical index of the given group if it already exists.
@@ -330,10 +328,8 @@
const CanonicalSingletonGroup& group) const {
auto it = canonical_singleton_groups_.find(group);
static_assert(kMaxCanonicalTypes <= kMaxInt);
- if (it == canonical_singleton_groups_.end()) {
- return CanonicalTypeIndex::Invalid();
- }
- return it->second;
+ return it == canonical_singleton_groups_.end() ? CanonicalTypeIndex::Invalid()
+ : it->index;
}
size_t TypeCanonicalizer::EstimateCurrentMemoryConsumption() const {
diff --git a/src/wasm/canonical-types.h b/src/wasm/canonical-types.h
index 64cfc5c..42cb526 100644
--- a/src/wasm/canonical-types.h
+++ b/src/wasm/canonical-types.h
@@ -11,6 +11,7 @@
#include <unordered_map>
+#include "src/base/bounds.h"
#include "src/base/functional.h"
#include "src/wasm/value-type.h"
#include "src/wasm/wasm-module.h"
@@ -144,106 +145,226 @@
bool is_final = false;
bool is_shared = false;
uint8_t subtyping_depth = 0;
- bool is_relative_supertype;
constexpr CanonicalType(const CanonicalSig* sig,
CanonicalTypeIndex supertype, bool is_final,
- bool is_shared, bool is_relative_supertype)
+ bool is_shared)
: function_sig(sig),
supertype(supertype),
kind(kFunction),
is_final(is_final),
- is_shared(is_shared),
- is_relative_supertype(is_relative_supertype) {}
+ is_shared(is_shared) {}
constexpr CanonicalType(const CanonicalStructType* type,
CanonicalTypeIndex supertype, bool is_final,
- bool is_shared, bool is_relative_supertype)
+ bool is_shared)
: struct_type(type),
supertype(supertype),
kind(kStruct),
is_final(is_final),
- is_shared(is_shared),
- is_relative_supertype(is_relative_supertype) {}
+ is_shared(is_shared) {}
constexpr CanonicalType(const CanonicalArrayType* type,
CanonicalTypeIndex supertype, bool is_final,
- bool is_shared, bool is_relative_supertype)
+ bool is_shared)
: array_type(type),
supertype(supertype),
kind(kArray),
is_final(is_final),
- is_shared(is_shared),
- is_relative_supertype(is_relative_supertype) {}
+ is_shared(is_shared) {}
constexpr CanonicalType() = default;
+ };
- bool operator==(const CanonicalType& other) const {
- if (supertype != other.supertype) return false;
- if (kind != other.kind) return false;
- if (is_final != other.is_final) return false;
- if (is_shared != other.is_shared) return false;
- if (is_relative_supertype != other.is_relative_supertype) return false;
- if (kind == kFunction) return *function_sig == *other.function_sig;
- if (kind == kStruct) return *struct_type == *other.struct_type;
- DCHECK_EQ(kArray, kind);
- return *array_type == *other.array_type;
+ // Define the range of a recursion group; for use in {CanonicalHashing} and
+ // {CanonicalEquality}.
+ struct RecursionGroupRange {
+ const CanonicalTypeIndex start;
+ const CanonicalTypeIndex end;
+
+ bool Contains(CanonicalTypeIndex index) const {
+ return base::IsInRange(index.index, start.index, end.index);
}
- bool operator!=(const CanonicalType& other) const {
- return !operator==(other);
+ CanonicalTypeIndex RelativeIndex(CanonicalTypeIndex index) const {
+ return Contains(index)
+ // Make the value_type relative within the recursion group.
+ ? CanonicalTypeIndex{index.index - start.index}
+ : index;
+ }
+
+ CanonicalValueType RelativeType(CanonicalValueType type) const {
+ return type.has_index()
+ ? CanonicalValueType::FromIndex(
+ type.kind(), RelativeIndex(type.ref_index()))
+ : type;
+ }
+ };
+
+ // Support for hashing of recursion groups, where type indexes have to be
+ // hashed relative to the recursion group.
+ struct CanonicalHashing {
+ base::Hasher hasher;
+ const RecursionGroupRange recgroup;
+
+ explicit CanonicalHashing(RecursionGroupRange recgroup)
+ : recgroup{recgroup} {}
+
+ void Add(CanonicalType type) {
+ CanonicalTypeIndex relative_supertype =
+ recgroup.RelativeIndex(type.supertype);
+ uint32_t metadata =
+ (relative_supertype.index << 1) | (type.is_final ? 1 : 0);
+ hasher.Add(metadata);
+ switch (type.kind) {
+ case CanonicalType::kFunction:
+ Add(*type.function_sig);
+ break;
+ case CanonicalType::kStruct:
+ Add(*type.struct_type);
+ break;
+ case CanonicalType::kArray:
+ Add(*type.array_type);
+ break;
+ }
+ }
+
+ void Add(CanonicalValueType value_type) {
+ hasher.Add(recgroup.RelativeType(value_type));
+ }
+
+ void Add(const CanonicalSig& sig) {
+ hasher.Add(sig.parameter_count());
+ for (CanonicalValueType type : sig.all()) Add(type);
+ }
+
+ void Add(const CanonicalStructType& struct_type) {
+ hasher.AddRange(struct_type.mutabilities());
+ for (const ValueTypeBase& field : struct_type.fields()) {
+ Add(CanonicalValueType{field});
+ }
+ }
+
+ void Add(const CanonicalArrayType& array_type) {
+ hasher.Add(array_type.mutability());
+ Add(array_type.element_type());
+ }
+
+ size_t hash() const { return hasher.hash(); }
+ };
+
+ // Support for equality checking of recursion groups, where type indexes have
+ // to be compared relative to their respective recursion group.
+ struct CanonicalEquality {
+ // Recursion group bounds for LHS and RHS.
+ const RecursionGroupRange recgroup1;
+ const RecursionGroupRange recgroup2;
+
+ CanonicalEquality(RecursionGroupRange recgroup1,
+ RecursionGroupRange recgroup2)
+ : recgroup1{recgroup1}, recgroup2{recgroup2} {}
+
+ bool EqualType(const CanonicalType& type1,
+ const CanonicalType& type2) const {
+ if (recgroup1.RelativeIndex(type1.supertype) !=
+ recgroup2.RelativeIndex(type2.supertype)) {
+ return false;
+ }
+ if (type1.is_final != type2.is_final) return false;
+ if (type1.is_shared != type2.is_shared) return false;
+ switch (type1.kind) {
+ case CanonicalType::kFunction:
+ return type2.kind == CanonicalType::kFunction &&
+ EqualSig(*type1.function_sig, *type2.function_sig);
+ case CanonicalType::kStruct:
+ return type2.kind == CanonicalType::kStruct &&
+ EqualStructType(*type1.struct_type, *type2.struct_type);
+ case CanonicalType::kArray:
+ return type2.kind == CanonicalType::kArray &&
+ EqualArrayType(*type1.array_type, *type2.array_type);
+ }
+ }
+
+ bool EqualTypes(base::Vector<const CanonicalType> types1,
+ base::Vector<const CanonicalType> types2) const {
+ return std::equal(types1.begin(), types1.end(), types2.begin(),
+ types2.end(),
+ std::bind_front(&CanonicalEquality::EqualType, this));
+ }
+
+ bool EqualValueType(CanonicalValueType type1,
+ CanonicalValueType type2) const {
+ return recgroup1.RelativeType(type1) == recgroup2.RelativeType(type2);
+ }
+
+ bool EqualSig(const CanonicalSig& sig1, const CanonicalSig& sig2) const {
+ if (sig1.parameter_count() != sig2.parameter_count()) return false;
+ return std::equal(
+ sig1.all().begin(), sig1.all().end(), sig2.all().begin(),
+ sig2.all().end(),
+ std::bind_front(&CanonicalEquality::EqualValueType, this));
+ }
+
+ bool EqualStructType(const CanonicalStructType& type1,
+ const CanonicalStructType& type2) const {
+ return std::equal(
+ type1.fields().begin(), type1.fields().end(), type2.fields().begin(),
+ type2.fields().end(),
+ std::bind_front(&CanonicalEquality::EqualValueType, this));
+ }
+
+ bool EqualArrayType(const CanonicalArrayType& type1,
+ const CanonicalArrayType& type2) const {
+ return type1.mutability() == type2.mutability() &&
+ EqualValueType(type1.element_type(), type2.element_type());
+ }
+ };
+
+ struct CanonicalGroup {
+ CanonicalGroup(Zone* zone, size_t size, CanonicalTypeIndex start)
+ : types(zone->AllocateVector<CanonicalType>(size)), start(start) {
+ // size >= 2; otherwise a `CanonicalSingletonGroup` should have been used.
+ DCHECK_LE(2, size);
+ }
+
+ bool operator==(const CanonicalGroup& other) const {
+ CanonicalTypeIndex end{start.index +
+ static_cast<uint32_t>(types.size() - 1)};
+ CanonicalTypeIndex other_end{
+ other.start.index + static_cast<uint32_t>(other.types.size() - 1)};
+ CanonicalEquality equality{{start, end}, {other.start, other_end}};
+ return equality.EqualTypes(types, other.types);
}
size_t hash_value() const {
- uint32_t metadata = (supertype.index << 2) | (is_final ? 2 : 0) |
- (is_relative_supertype ? 1 : 0);
- base::Hasher hasher;
- hasher.Add(metadata);
- if (kind == kFunction) {
- hasher.Add(*function_sig);
- } else if (kind == kStruct) {
- hasher.Add(*struct_type);
- } else {
- DCHECK_EQ(kArray, kind);
- hasher.Add(*array_type);
+ CanonicalTypeIndex end{start.index + static_cast<uint32_t>(types.size()) -
+ 1};
+ CanonicalHashing hasher{{start, end}};
+ for (CanonicalType t : types) {
+ hasher.Add(t);
}
return hasher.hash();
}
- };
- struct CanonicalGroup {
- CanonicalGroup(Zone* zone, size_t size)
- : types(zone->AllocateVector<CanonicalType>(size)) {}
-
- bool operator==(const CanonicalGroup& other) const {
- return types == other.types;
- }
-
- bool operator!=(const CanonicalGroup& other) const {
- return types != other.types;
- }
-
- size_t hash_value() const {
- return base::Hasher{}.AddRange(types.begin(), types.end()).hash();
- }
// The storage of this vector is the TypeCanonicalizer's zone_.
- base::Vector<CanonicalType> types;
+ const base::Vector<CanonicalType> types;
+ const CanonicalTypeIndex start;
};
struct CanonicalSingletonGroup {
- struct hash {
- size_t operator()(const CanonicalSingletonGroup& group) const {
- return group.hash_value();
- }
- };
-
bool operator==(const CanonicalSingletonGroup& other) const {
- return type == other.type;
+ CanonicalEquality equality{{index, index}, {other.index, other.index}};
+ return equality.EqualType(type, other.type);
}
- size_t hash_value() const { return type.hash_value(); }
+ size_t hash_value() const {
+ CanonicalHashing hasher{{index, index}};
+ hasher.Add(type);
+ return hasher.hash();
+ }
CanonicalType type;
+ CanonicalTypeIndex index;
};
void AddPredefinedArrayTypes();
@@ -251,30 +372,25 @@
CanonicalTypeIndex FindCanonicalGroup(const CanonicalGroup&) const;
CanonicalTypeIndex FindCanonicalGroup(const CanonicalSingletonGroup&) const;
- // Canonicalize all types present in {type} (including supertype) according to
- // {CanonicalizeValueType}.
- CanonicalType CanonicalizeTypeDef(const WasmModule* module,
- TypeDefinition type,
- uint32_t recursive_group_start);
-
- // An indexed type gets mapped to a {CanonicalValueType::WithRelativeIndex}
- // if its index points inside the new canonical group; otherwise, the index
- // gets mapped to its canonical representative.
- CanonicalValueType CanonicalizeValueType(
- const WasmModule* module, ValueType type,
- uint32_t recursive_group_start) const;
+ // Canonicalize the module-specific type at `module_type_idx` within the
+ // recursion group starting at `recursion_group_start`, using
+ // `canonical_recgroup_start` as the start offset of types within the
+ // recursion group.
+ CanonicalType CanonicalizeTypeDef(
+ const WasmModule* module, ModuleTypeIndex module_type_idx,
+ ModuleTypeIndex recgroup_start,
+ CanonicalTypeIndex canonical_recgroup_start);
CanonicalTypeIndex AddRecursiveGroup(CanonicalType type);
void CheckMaxCanonicalIndex() const;
std::vector<CanonicalTypeIndex> canonical_supertypes_;
- // Maps groups of size >=2 to the canonical id of the first type.
- std::unordered_map<CanonicalGroup, CanonicalTypeIndex,
- base::hash<CanonicalGroup>>
+ // Set of all known canonical recgroups of size >=2.
+ std::unordered_set<CanonicalGroup, base::hash<CanonicalGroup>>
canonical_groups_;
- // Maps group of size 1 to the canonical id of the type.
- std::unordered_map<CanonicalSingletonGroup, CanonicalTypeIndex,
+ // Set of all known canonical recgroups of size 1.
+ std::unordered_set<CanonicalSingletonGroup,
base::hash<CanonicalSingletonGroup>>
canonical_singleton_groups_;
// Maps canonical indices back to the function signature.
diff --git a/src/wasm/std-object-sizes.h b/src/wasm/std-object-sizes.h
index 6b16ef2..e1b11d3 100644
--- a/src/wasm/std-object-sizes.h
+++ b/src/wasm/std-object-sizes.h
@@ -45,8 +45,8 @@
return raw * 4 / 3;
}
-template <typename T>
-inline size_t ContentSize(std::unordered_set<T> set) {
+template <typename T, typename Hash>
+inline size_t ContentSize(const std::unordered_set<T, Hash>& set) {
// Very rough lower bound approximation: two internal pointers per entry.
size_t raw = set.size() * (sizeof(T) + 2 * sizeof(void*));
// In the spirit of computing lower bounds of definitely-used memory,
diff --git a/src/wasm/struct-types.h b/src/wasm/struct-types.h
index 1e2b02b..ae85c0d 100644
--- a/src/wasm/struct-types.h
+++ b/src/wasm/struct-types.h
@@ -266,8 +266,9 @@
// Support base::hash<StructTypeBase>.
inline size_t hash_value(const StructTypeBase& type) {
+ // Note: If you update this you probably also want to update
+ // `CanonicalHashing::Add(CanonicalStructType)`.
return base::Hasher{}
- .Add(type.field_count())
.AddRange(type.fields())
.AddRange(type.mutabilities())
.hash();
@@ -324,6 +325,8 @@
return base::Hasher::Combine(type.element_type(), type.mutability());
}
inline size_t hash_value(const CanonicalArrayType& type) {
+ // Note: If you update this you probably also want to update
+ // `CanonicalHashing::Add(CanonicalArrayType)`.
return base::Hasher::Combine(type.element_type(), type.mutability());
}
diff --git a/src/wasm/value-type.h b/src/wasm/value-type.h
index 906da41..e823698 100644
--- a/src/wasm/value-type.h
+++ b/src/wasm/value-type.h
@@ -63,13 +63,12 @@
struct ModuleTypeIndex : public TypeIndex {
inline static constexpr ModuleTypeIndex Invalid();
// Can't use "=default" because the base class doesn't have operator<=>.
- bool operator==(const ModuleTypeIndex& other) const {
- return index == other.index;
- }
- auto operator<=>(const ModuleTypeIndex& other) const {
+ bool operator==(ModuleTypeIndex other) const { return index == other.index; }
+ auto operator<=>(ModuleTypeIndex other) const {
return index <=> other.index;
}
};
+ASSERT_TRIVIALLY_COPYABLE(ModuleTypeIndex);
constexpr ModuleTypeIndex ModuleTypeIndex::Invalid() {
return ModuleTypeIndex{ModuleTypeIndex::kInvalid};
@@ -78,13 +77,14 @@
struct CanonicalTypeIndex : public TypeIndex {
inline static constexpr CanonicalTypeIndex Invalid();
- bool operator==(const CanonicalTypeIndex& other) const {
+ bool operator==(CanonicalTypeIndex other) const {
return index == other.index;
}
- auto operator<=>(const CanonicalTypeIndex& other) const {
+ auto operator<=>(CanonicalTypeIndex other) const {
return index <=> other.index;
}
};
+ASSERT_TRIVIALLY_COPYABLE(CanonicalTypeIndex);
constexpr CanonicalTypeIndex CanonicalTypeIndex::Invalid() {
return CanonicalTypeIndex{CanonicalTypeIndex::kInvalid};
@@ -610,8 +610,6 @@
// A ValueType is encoded by two components: a ValueKind and a heap
// representation (for reference types/rtts). Those are encoded into 32 bits
// using base::BitField.
-// ValueType encoding includes an additional bit marking the index of a type as
-// relative. This should only be used during type canonicalization.
// {ValueTypeBase} shouldn't be used directly; code should be using one of
// the subclasses. To enforce this, the public interface is limited to
// type index agnostic getters.
@@ -849,7 +847,7 @@
/**************************** Static constants ******************************/
static constexpr int kKindBits = 5;
static constexpr int kHeapTypeBits = 20;
- static constexpr int kLastUsedBit = 25;
+ static constexpr int kLastUsedBit = 24;
static const intptr_t kBitFieldOffset;
@@ -908,17 +906,12 @@
using KindField = base::BitField<ValueKind, 0, kKindBits>;
using HeapTypeField = KindField::Next<uint32_t, kHeapTypeBits>;
- // Marks a type as a canonical type which uses an index relative to its
- // recursive group start. Used only during type canonicalization.
- using CanonicalRelativeField = HeapTypeField::Next<bool, 1>;
static_assert(kV8MaxWasmTypes < (1u << kHeapTypeBits),
"Type indices fit in kHeapTypeBits");
// This is implemented defensively against field order changes.
- static_assert(kLastUsedBit ==
- std::max(KindField::kLastUsedBit,
- std::max(HeapTypeField::kLastUsedBit,
- CanonicalRelativeField::kLastUsedBit)),
+ static_assert(kLastUsedBit == std::max(KindField::kLastUsedBit,
+ HeapTypeField::kLastUsedBit),
"kLastUsedBit is consistent");
constexpr explicit ValueTypeBase(uint32_t bit_field)
@@ -1058,13 +1051,6 @@
KindField::encode(kind) | HeapTypeField::encode(index.index))};
}
- static constexpr CanonicalValueType WithRelativeIndex(ValueKind kind,
- uint32_t index) {
- return CanonicalValueType{
- ValueTypeBase(KindField::encode(kind) | HeapTypeField::encode(index) |
- CanonicalRelativeField::encode(true))};
- }
-
static constexpr CanonicalValueType FromRawBitField(uint32_t bit_field) {
return CanonicalValueType{ValueTypeBase::FromRawBitField(bit_field)};
}
@@ -1079,10 +1065,6 @@
constexpr CanonicalTypeIndex ref_index() const {
return CanonicalTypeIndex{ValueTypeBase::ref_index()};
}
-
- constexpr bool is_canonical_relative() const {
- return has_index() && CanonicalRelativeField::decode(bit_field_);
- }
};
ASSERT_TRIVIALLY_COPYABLE(CanonicalValueType);
diff --git a/src/wasm/wasm-objects.cc b/src/wasm/wasm-objects.cc
index b80b809..fbe6735 100644
--- a/src/wasm/wasm-objects.cc
+++ b/src/wasm/wasm-objects.cc
@@ -2689,7 +2689,7 @@
bool WasmExportedFunctionData::MatchesSignature(
wasm::CanonicalTypeIndex other_canonical_type_index) {
- return wasm::GetWasmEngine()->type_canonicalizer()->IsCanonicalSubtype(
+ return wasm::GetTypeCanonicalizer()->IsCanonicalSubtype(
sig_index(), other_canonical_type_index);
}
diff --git a/test/unittests/wasm/subtyping-unittest.cc b/test/unittests/wasm/subtyping-unittest.cc
index 4d02be7..33a8649 100644
--- a/test/unittests/wasm/subtyping-unittest.cc
+++ b/test/unittests/wasm/subtyping-unittest.cc
@@ -74,9 +74,15 @@
// Set up two identical modules.
for (WasmModule* module : {module1, module2}) {
- /* 0 */ DefineStruct(module, {mut(ref(2)), immut(refNull(2))});
- /* 1 */ DefineStruct(module, {mut(ref(2)), immut(ref(2))}, Idx{0});
- /* 2 */ DefineArray(module, immut(ref(0)));
+ // Three mutually recursive types.
+ /* 0 */ DefineStruct(module, {mut(ref(2)), immut(refNull(2))},
+ kNoSuperType, false, false, false);
+ /* 1 */ DefineStruct(module, {mut(ref(2)), immut(ref(2))}, Idx{0}, false,
+ false, false);
+ /* 2 */ DefineArray(module, immut(ref(0)), kNoSuperType, false, false,
+ false);
+ GetTypeCanonicalizer()->AddRecursiveGroup(module, 3);
+
/* 3 */ DefineArray(module, immut(ref(1)), Idx{2});
/* 4 */ DefineStruct(module, {mut(ref(2)), immut(ref(3)), immut(kWasmF64)},
Idx{1});

View File

@@ -1,190 +0,0 @@
From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001
From: Darius Mercadier <dmercadier@chromium.org>
Date: Tue, 14 Jan 2025 08:51:44 +0100
Subject: Merged: [maglev] Fix Phi untagging bug with CheckNumber(HoleyFloat64)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
So far, CheckNumber(untagged phi) was always removed, but for a
holey float64, CheckNumber should fail on the hole nan.
Bug: chromium:389330329
(cherry picked from commit cc4c963c72b8da0c7c927680ef856cebdd87d60c)
Change-Id: I4bc634789e2d8d7f364e36b69dceeeb2886c54b1
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/6198231
Reviewed-by: Olivier Flückiger <olivf@chromium.org>
Commit-Queue: Olivier Flückiger <olivf@chromium.org>
Auto-Submit: Darius Mercadier <dmercadier@chromium.org>
Cr-Commit-Position: refs/branch-heads/13.2@{#68}
Cr-Branched-From: 24068c59cedad9ee976ddc05431f5f497b1ebd71-refs/heads/13.2.152@{#1}
Cr-Branched-From: 6054ba94db0969220be4f94dc1677fc4696bdc4f-refs/heads/main@{#97085}
diff --git a/src/compiler/turboshaft/maglev-graph-building-phase.cc b/src/compiler/turboshaft/maglev-graph-building-phase.cc
index ac201ef2b17a9aeef14df98537e9abbde54055d7..806087bf2b6c38450c212a86296b93c731a3d125 100644
--- a/src/compiler/turboshaft/maglev-graph-building-phase.cc
+++ b/src/compiler/turboshaft/maglev-graph-building-phase.cc
@@ -2372,7 +2372,14 @@ class GraphBuilder {
__ DeoptimizeIf(RootEqual(node->object_input(), RootIndex::kTheHoleValue),
frame_state, DeoptimizeReason::kHole,
node->eager_deopt_info()->feedback_to_update());
- SetMap(node, Map(node->object_input()));
+ return maglev::ProcessResult::kContinue;
+ }
+ maglev::ProcessResult Process(maglev::CheckHoleyFloat64NotHole* node,
+ const maglev::ProcessingState& state) {
+ GET_FRAME_STATE_MAYBE_ABORT(frame_state, node->eager_deopt_info());
+ __ DeoptimizeIf(__ Float64IsHole(Map(node->float64_input())), frame_state,
+ DeoptimizeReason::kHole,
+ node->eager_deopt_info()->feedback_to_update());
return maglev::ProcessResult::kContinue;
}
diff --git a/src/maglev/maglev-ir.cc b/src/maglev/maglev-ir.cc
index ab3d63c2e2e18332acf9b1a5fab2a6764124debe..1598458cc5902c91d6fae55f63365d25e01e1198 100644
--- a/src/maglev/maglev-ir.cc
+++ b/src/maglev/maglev-ir.cc
@@ -109,7 +109,7 @@ void NodeBase::CheckCanOverwriteWith(Opcode new_opcode,
#define CASE(op) \
case Opcode::k##op: { \
DCHECK_EQ(old_input_count, StaticInputCount(static_cast<op*>(this))); \
- DCHECK_EQ(sizeof(op), old_sizeof); \
+ DCHECK_LE(sizeof(op), old_sizeof); \
break; \
}
NODE_BASE_LIST(CASE)
@@ -3387,6 +3387,19 @@ void CheckNotHole::GenerateCode(MaglevAssembler* masm,
__ EmitEagerDeoptIf(kEqual, DeoptimizeReason::kHole, this);
}
+void CheckHoleyFloat64NotHole::SetValueLocationConstraints() {
+ UseRegister(float64_input());
+ set_temporaries_needed(1);
+}
+void CheckHoleyFloat64NotHole::GenerateCode(MaglevAssembler* masm,
+ const ProcessingState& state) {
+ MaglevAssembler::TemporaryRegisterScope temps(masm);
+ Register scratch = temps.AcquireScratch();
+ __ JumpIfHoleNan(ToDoubleRegister(float64_input()), scratch,
+ __ GetDeoptLabel(this, DeoptimizeReason::kHole),
+ Label::kFar);
+}
+
void ConvertHoleToUndefined::SetValueLocationConstraints() {
UseRegister(object_input());
DefineSameAsFirst(this);
diff --git a/src/maglev/maglev-ir.h b/src/maglev/maglev-ir.h
index 1464025ef38bef809ce2a7429c7ddc8dcbd73798..a82d5c8ef1a20b1cdb4aaad939b0c1ffb58fe402 100644
--- a/src/maglev/maglev-ir.h
+++ b/src/maglev/maglev-ir.h
@@ -310,6 +310,7 @@ class ExceptionHandlerInfo;
V(CheckMapsWithMigration) \
V(CheckDetectableCallable) \
V(CheckNotHole) \
+ V(CheckHoleyFloat64NotHole) \
V(CheckNumber) \
V(CheckSmi) \
V(CheckString) \
@@ -9262,6 +9263,24 @@ class CheckNotHole : public FixedInputNodeT<1, CheckNotHole> {
void PrintParams(std::ostream&, MaglevGraphLabeller*) const {}
};
+class CheckHoleyFloat64NotHole
+ : public FixedInputNodeT<1, CheckHoleyFloat64NotHole> {
+ using Base = FixedInputNodeT<1, CheckHoleyFloat64NotHole>;
+
+ public:
+ explicit CheckHoleyFloat64NotHole(uint64_t bitfield) : Base(bitfield) {}
+
+ static constexpr OpProperties kProperties = OpProperties::EagerDeopt();
+ static constexpr
+ typename Base::InputTypes kInputTypes{ValueRepresentation::kHoleyFloat64};
+
+ Input& float64_input() { return input(0); }
+
+ void SetValueLocationConstraints();
+ void GenerateCode(MaglevAssembler*, const ProcessingState&);
+ void PrintParams(std::ostream&, MaglevGraphLabeller*) const {}
+};
+
class ConvertHoleToUndefined
: public FixedInputValueNodeT<1, ConvertHoleToUndefined> {
using Base = FixedInputValueNodeT<1, ConvertHoleToUndefined>;
diff --git a/src/maglev/maglev-phi-representation-selector.cc b/src/maglev/maglev-phi-representation-selector.cc
index b4d913dd45a488d70c92436bdbf173425c39d4c5..05054ab5cf06f356de929f74acef9c429d89fdea 100644
--- a/src/maglev/maglev-phi-representation-selector.cc
+++ b/src/maglev/maglev-phi-representation-selector.cc
@@ -772,13 +772,23 @@ ProcessResult MaglevPhiRepresentationSelector::UpdateNodePhiInput(
ProcessResult MaglevPhiRepresentationSelector::UpdateNodePhiInput(
CheckNumber* node, Phi* phi, int input_index,
const ProcessingState& state) {
- if (phi->value_representation() != ValueRepresentation::kTagged) {
- // The phi was untagged, so we know that it's a number. We thus remove this
- // CheckNumber from the graph.
- return ProcessResult::kRemove;
+ switch (phi->value_representation()) {
+ case ValueRepresentation::kInt32:
+ case ValueRepresentation::kFloat64:
+ // The phi was untagged to a Int32 or Float64, so we know that it's a
+ // number. We thus remove this CheckNumber from the graph.
+ return ProcessResult::kRemove;
+ case ValueRepresentation::kHoleyFloat64:
+ // We need to check that the phi is not the hole nan.
+ node->OverwriteWith<CheckHoleyFloat64NotHole>();
+ return ProcessResult::kContinue;
+ case ValueRepresentation::kTagged:
+ // {phi} wasn't untagged, so we don't need to do anything.
+ return ProcessResult::kContinue;
+ case ValueRepresentation::kUint32:
+ case ValueRepresentation::kIntPtr:
+ UNREACHABLE();
}
- return UpdateNodePhiInput(static_cast<NodeBase*>(node), phi, input_index,
- state);
}
// If the input of a StoreTaggedFieldNoWriteBarrier was a Phi that got
diff --git a/test/mjsunit/maglev/regress-389330329.js b/test/mjsunit/maglev/regress-389330329.js
new file mode 100644
index 0000000000000000000000000000000000000000..fb7500ffdc7800276a4e8257e31c0044017f0402
--- /dev/null
+++ b/test/mjsunit/maglev/regress-389330329.js
@@ -0,0 +1,37 @@
+// Copyright 2025 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// Flags: --allow-natives-syntax --no-maglev-loop-peeling
+// Flags: --maglev --no-always-turbofan
+
+let obj = { y: 19.5 };
+let arr = [, 2.5];
+function foo(limit) {
+ let val = arr[0];
+
+ for (let i = 0; i < limit; i += 1) {
+ i += val;
+ val = 40;
+ }
+
+ try { val.meh(); } catch (e) {}
+
+ obj.y = val;
+}
+
+%PrepareFunctionForOptimization(foo);
+foo(1);
+
+%OptimizeMaglevOnNextCall(foo);
+foo(0);
+// {foo} should have deopted right after being optimized.
+assertUnoptimized(foo);
+assertEquals(undefined, obj.y);
+
+
+%OptimizeMaglevOnNextCall(foo);
+foo(0);
+// {foo} should remain optimized now.
+assertOptimized(foo);
+assertEquals(undefined, obj.y);

View File

@@ -1,152 +0,0 @@
From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Olivier=20Fl=C3=BCckiger?= <olivf@chromium.org>
Date: Mon, 3 Feb 2025 10:18:36 +0100
Subject: Merged: [runtime] Fix write barrier check in FastCloneJSObject
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Add missing check for page being marked.
Fixed: 392521083
(cherry picked from commit ce071a295e54b32bf7f03373da943678231cb1ee)
Change-Id: Iccfc1617862a6010ab34389aa4931f45e7389c05
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/6221320
Auto-Submit: Olivier Flückiger <olivf@chromium.org>
Commit-Queue: Igor Sheludko <ishell@chromium.org>
Commit-Queue: Olivier Flückiger <olivf@chromium.org>
Reviewed-by: Igor Sheludko <ishell@chromium.org>
Cr-Commit-Position: refs/branch-heads/13.2@{#74}
Cr-Branched-From: 24068c59cedad9ee976ddc05431f5f497b1ebd71-refs/heads/13.2.152@{#1}
Cr-Branched-From: 6054ba94db0969220be4f94dc1677fc4696bdc4f-refs/heads/main@{#97085}
diff --git a/src/codegen/code-stub-assembler-inl.h b/src/codegen/code-stub-assembler-inl.h
index e50133901daf151f50673355220f19c87c7b63ef..eab3ffbb73c1816e9cf62ae401eafe0ffc2133c6 100644
--- a/src/codegen/code-stub-assembler-inl.h
+++ b/src/codegen/code-stub-assembler-inl.h
@@ -215,9 +215,8 @@ TNode<Object> CodeStubAssembler::FastCloneJSObject(
Label if_no_write_barrier(this),
if_needs_write_barrier(this, Label::kDeferred);
- TNode<BoolT> needs_write_barrier = IsPageFlagReset(
- BitcastTaggedToWord(target), MemoryChunk::kIsInYoungGenerationMask);
- Branch(needs_write_barrier, &if_needs_write_barrier, &if_no_write_barrier);
+ TrySkipWriteBarrier(target, &if_needs_write_barrier);
+ Goto(&if_no_write_barrier);
BIND(&if_needs_write_barrier);
EmitCopyLoop(true);
diff --git a/src/codegen/code-stub-assembler.cc b/src/codegen/code-stub-assembler.cc
index bd2c37d3f10b019851a413d7577360735ba98311..c867c2d5c85cb235e970c642141e6cf27582adb0 100644
--- a/src/codegen/code-stub-assembler.cc
+++ b/src/codegen/code-stub-assembler.cc
@@ -5604,21 +5604,18 @@ void CodeStubAssembler::FillFixedDoubleArrayWithZero(
std::make_pair(MachineType::UintPtr(), byte_length));
}
-void CodeStubAssembler::JumpIfPointersFromHereAreInteresting(
- TNode<Object> object, Label* interesting) {
- Label finished(this);
- TNode<IntPtrT> object_word = BitcastTaggedToWord(object);
- TNode<IntPtrT> object_page_header = MemoryChunkFromAddress(object_word);
- TNode<IntPtrT> page_flags = UncheckedCast<IntPtrT>(
- Load(MachineType::IntPtr(), object_page_header,
- IntPtrConstant(MemoryChunkLayout::kFlagsOffset)));
- Branch(
- WordEqual(WordAnd(page_flags,
- IntPtrConstant(
- MemoryChunk::kPointersFromHereAreInterestingMask)),
- IntPtrConstant(0)),
- &finished, interesting);
- BIND(&finished);
+void CodeStubAssembler::TrySkipWriteBarrier(TNode<Object> object,
+ Label* if_needs_write_barrier) {
+ TNode<BoolT> may_need_write_barrier =
+ IsPageFlagSet(BitcastTaggedToWord(object),
+ MemoryChunk::kPointersFromHereAreInterestingMask);
+ // TODO(olivf): Also skip the WB with V8_ENABLE_STICKY_MARK_BITS if the mark
+ // bit is set.
+ GotoIf(may_need_write_barrier, if_needs_write_barrier);
+
+ CSA_DCHECK(this, TaggedEqual(CallRuntime(Runtime::kIsNoWriteBarrierNeeded,
+ NoContextConstant(), object),
+ TrueConstant()));
}
void CodeStubAssembler::MoveElements(ElementsKind kind,
@@ -5646,7 +5643,7 @@ void CodeStubAssembler::MoveElements(ElementsKind kind,
// The write barrier can be ignored if {dst_elements} is in new space, or if
// the elements pointer is FixedDoubleArray.
if (needs_barrier_check) {
- JumpIfPointersFromHereAreInteresting(elements, &needs_barrier);
+ TrySkipWriteBarrier(elements, &needs_barrier);
}
const TNode<IntPtrT> source_byte_length =
@@ -5740,7 +5737,7 @@ void CodeStubAssembler::CopyElements(ElementsKind kind,
// The write barrier can be ignored if {dst_elements} is in new space, or if
// the elements pointer is FixedDoubleArray.
if (needs_barrier_check) {
- JumpIfPointersFromHereAreInteresting(dst_elements, &needs_barrier);
+ TrySkipWriteBarrier(dst_elements, &needs_barrier);
}
TNode<IntPtrT> source_byte_length =
diff --git a/src/codegen/code-stub-assembler.h b/src/codegen/code-stub-assembler.h
index 7902e08b163436ac4bca4ee129b0edd14c81698e..1bc6cce71567336e24313dd20f8f96609fd2aef7 100644
--- a/src/codegen/code-stub-assembler.h
+++ b/src/codegen/code-stub-assembler.h
@@ -2295,8 +2295,7 @@ class V8_EXPORT_PRIVATE CodeStubAssembler
HoleConversionMode convert_holes = HoleConversionMode::kDontConvert,
TVariable<BoolT>* var_holes_converted = nullptr);
- void JumpIfPointersFromHereAreInteresting(TNode<Object> object,
- Label* interesting);
+ void TrySkipWriteBarrier(TNode<Object> object, Label* if_needs_write_barrier);
// Efficiently copy elements within a single array. The regions
// [src_index, src_index + length) and [dst_index, dst_index + length)
diff --git a/src/runtime/runtime-test.cc b/src/runtime/runtime-test.cc
index b76afdb9fe2acd2e9071e9998972319fff25a460..19b6a93b39f28d8a750770f7916c542940dc1541 100644
--- a/src/runtime/runtime-test.cc
+++ b/src/runtime/runtime-test.cc
@@ -2206,5 +2206,26 @@ RUNTIME_FUNCTION(Runtime_GetFeedback) {
#endif // OBJECT_PRINT
}
+RUNTIME_FUNCTION(Runtime_IsNoWriteBarrierNeeded) {
+ HandleScope scope(isolate);
+ DisallowGarbageCollection no_gc;
+ if (args.length() != 1) {
+ return CrashUnlessFuzzing(isolate);
+ }
+ DirectHandle<Object> object = args.at(0);
+ if (!(*object).IsHeapObject()) {
+ return CrashUnlessFuzzing(isolate);
+ }
+ auto heap_object = Cast<HeapObject>(object);
+ if (InReadOnlySpace(*heap_object)) {
+ return ReadOnlyRoots(isolate).true_value();
+ }
+ if (WriteBarrier::GetWriteBarrierModeForObject(*heap_object, no_gc) !=
+ WriteBarrierMode::SKIP_WRITE_BARRIER) {
+ return ReadOnlyRoots(isolate).false_value();
+ }
+ return ReadOnlyRoots(isolate).true_value();
+}
+
} // namespace internal
} // namespace v8
diff --git a/src/runtime/runtime.h b/src/runtime/runtime.h
index 6088fced3a07b63a008b1d8b20597c8f12e06aa8..bb3d12dcaab31947b04d0923dc81cebab6c0a321 100644
--- a/src/runtime/runtime.h
+++ b/src/runtime/runtime.h
@@ -578,6 +578,7 @@ namespace internal {
F(IsEfficiencyModeEnabled, 0, 1) \
F(IsInPlaceInternalizableString, 1, 1) \
F(IsInternalizedString, 1, 1) \
+ F(IsNoWriteBarrierNeeded, 1, 1) \
F(IsMaglevEnabled, 0, 1) \
F(IsSameHeapObject, 2, 1) \
F(IsSharedString, 1, 1) \

View File

@@ -1,48 +0,0 @@
From 91343bb45c78ac5cf3d214f68161d8150d81fa8c Mon Sep 17 00:00:00 2001
From: Darius Mercadier <dmercadier@chromium.org>
Date: Tue, 18 Feb 2025 09:32:04 +0100
Subject: [PATCH] [M132-LTS][turbofan] Disable escape analysis for TrustedHeapConstant
More precisely: prevent eliding objects that contain
TrustedHeapConstant, because it can lead to this constant flowing into
a Phis where other inputs are regular HeapConstant, which confuses
decompression optimization and leads to memory corruption.
(cherry picked from commit b75e527fb521dca5e7621928846c0c7c6becc8dd)
Fixed: chromium:390743124
Change-Id: Ic60e4d7dd156367f7d4bb385d422591384c3033c
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/6278358
Reviewed-by: Nico Hartmann <nicohartmann@chromium.org>
Commit-Queue: Nico Hartmann <nicohartmann@chromium.org>
Auto-Submit: Darius Mercadier <dmercadier@chromium.org>
Cr-Original-Commit-Position: refs/heads/main@{#98748}
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/6317725
Reviewed-by: Darius Mercadier <dmercadier@chromium.org>
Commit-Queue: Gyuyoung Kim (xWF) <qkim@google.com>
Cr-Commit-Position: refs/branch-heads/13.2@{#82}
Cr-Branched-From: 24068c59cedad9ee976ddc05431f5f497b1ebd71-refs/heads/13.2.152@{#1}
Cr-Branched-From: 6054ba94db0969220be4f94dc1677fc4696bdc4f-refs/heads/main@{#97085}
---
diff --git a/src/compiler/escape-analysis.cc b/src/compiler/escape-analysis.cc
index eb223bc..c9a7bc9 100644
--- a/src/compiler/escape-analysis.cc
+++ b/src/compiler/escape-analysis.cc
@@ -622,6 +622,16 @@
Node* value = current->ValueInput(1);
const VirtualObject* vobject = current->GetVirtualObject(object);
Variable var;
+ if (value->opcode() == IrOpcode::kTrustedHeapConstant) {
+ // TODO(dmercadier): enable escaping objects containing
+ // TrustedHeapConstants. This is currently disabled because it leads to
+ // bugs when Trusted HeapConstant and regular HeapConstant flow into the
+ // same Phi, which can then be marked as Compressed, messing up the
+ // tagging of the Trusted HeapConstant.
+ current->SetEscaped(object);
+ current->SetEscaped(value);
+ break;
+ }
// BoundedSize fields cannot currently be materialized by the deoptimizer,
// so we must not dematerialze them.
if (vobject && !vobject->HasEscaped() &&

View File

@@ -1,74 +0,0 @@
From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001
From: Darius Mercadier <dmercadier@chromium.org>
Date: Wed, 8 Jan 2025 14:37:24 +0100
Subject: Merged: [turbofan] Fix CallWithSpread bug when array prototype has
changed
Bug: chromium:385386138
(cherry picked from commit 1be46f0e2e71159a74d8614f9ac108d334797729)
Change-Id: I8b0c95c4072652e6b9ccab792b6685e0e0dcfbb9
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/6172284
Auto-Submit: Darius Mercadier <dmercadier@chromium.org>
Commit-Queue: Darius Mercadier <dmercadier@chromium.org>
Reviewed-by: Nico Hartmann <nicohartmann@chromium.org>
Cr-Commit-Position: refs/branch-heads/13.2@{#58}
Cr-Branched-From: 24068c59cedad9ee976ddc05431f5f497b1ebd71-refs/heads/13.2.152@{#1}
Cr-Branched-From: 6054ba94db0969220be4f94dc1677fc4696bdc4f-refs/heads/main@{#97085}
diff --git a/src/compiler/js-call-reducer.cc b/src/compiler/js-call-reducer.cc
index cc9ec63ce09cbd09e8484b9ab075545e351de7eb..7c248c80a519708eaa2a25e9289fd6094ed52424 100644
--- a/src/compiler/js-call-reducer.cc
+++ b/src/compiler/js-call-reducer.cc
@@ -5223,6 +5223,18 @@ TNode<Object> JSCallReducerAssembler::ReduceJSCallWithArrayLikeOrSpreadOfEmpty(
DCHECK_EQ(static_cast<Node*>(arguments_list)->opcode(),
IrOpcode::kJSCreateEmptyLiteralArray);
+ // Check that arguments_list's prototype is still an array prototype.
+ TNode<Map> map = LoadMap(TNode<HeapObject>::UncheckedCast(arguments_list));
+ TNode<HeapObject> proto = TNode<HeapObject>::UncheckedCast(
+ LoadField(AccessBuilder::ForMapPrototype(), map));
+ TNode<HeapObject> initial_array_prototype =
+ HeapConstant(broker()
+ ->target_native_context()
+ .initial_array_prototype(broker())
+ .object());
+ TNode<Boolean> check = ReferenceEqual(proto, initial_array_prototype);
+ CheckIf(check, DeoptimizeReason::kWrongMap, p.feedback());
+
// Turn the JSCallWithArrayLike or JSCallWithSpread roughly into:
//
// "arguments_list array is still empty?"
diff --git a/test/mjsunit/compiler/regress-385386138.js b/test/mjsunit/compiler/regress-385386138.js
new file mode 100644
index 0000000000000000000000000000000000000000..72dadea5ff5b0732822e6a15a34bd3357d5ff7c2
--- /dev/null
+++ b/test/mjsunit/compiler/regress-385386138.js
@@ -0,0 +1,27 @@
+// Copyright 2024 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// Flags: --allow-natives-syntax --turbofan --no-always-turbofan
+
+function foo() {
+ let val = [];
+ val.__proto__ = RegExp();
+ return Math.max(...val);
+}
+
+%PrepareFunctionForOptimization(foo);
+assertThrows(() => foo(), TypeError,
+ "Spread syntax requires ...iterable[Symbol.iterator] to be a function");
+
+%OptimizeFunctionOnNextCall(foo);
+assertThrows(() => foo(), TypeError,
+ "Spread syntax requires ...iterable[Symbol.iterator] to be a function");
+assertUnoptimized(foo);
+
+
+%OptimizeFunctionOnNextCall(foo);
+assertThrows(() => foo(), TypeError,
+ "Spread syntax requires ...iterable[Symbol.iterator] to be a function");
+// TF should not have speculatively optimized CallWithSpread.
+assertOptimized(foo);

View File

@@ -1,96 +0,0 @@
From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Olivier=20Fl=C3=BCckiger?= <olivf@chromium.org>
Date: Wed, 8 Jan 2025 16:06:43 +0100
Subject: Merged: [maglev] regalloc: handle non-loop resumable_loops
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Resumable loops which are not loops can be either:
1. An unreachable loop with only a back-edge
2. A fall-through to a resumable loop with a dead back-edge
Only (1) starts with an empty register state.
Fixed: 386143468
(cherry picked from commit b44bd24761f1a2eae131bd90be15b5a68cc70f83)
Change-Id: I9ebb028fe17c6f1de00825837acec6f8169dbf67
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/6172463
Auto-Submit: Olivier Flückiger <olivf@chromium.org>
Reviewed-by: Toon Verwaest <verwaest@chromium.org>
Commit-Queue: Toon Verwaest <verwaest@chromium.org>
Cr-Commit-Position: refs/branch-heads/13.2@{#60}
Cr-Branched-From: 24068c59cedad9ee976ddc05431f5f497b1ebd71-refs/heads/13.2.152@{#1}
Cr-Branched-From: 6054ba94db0969220be4f94dc1677fc4696bdc4f-refs/heads/main@{#97085}
diff --git a/src/maglev/maglev-interpreter-frame-state.cc b/src/maglev/maglev-interpreter-frame-state.cc
index 562f88cabaf9a32b0197c7762bb1336541b95a29..21561ea57364d9fbcecb118596141ac3af081798 100644
--- a/src/maglev/maglev-interpreter-frame-state.cc
+++ b/src/maglev/maglev-interpreter-frame-state.cc
@@ -1338,6 +1338,23 @@ void MergePointInterpreterFrameState::ReducePhiPredecessorCount(unsigned num) {
}
}
+bool MergePointInterpreterFrameState::IsUnreachable() const {
+ DCHECK_EQ(predecessors_so_far_, predecessor_count_);
+ if (predecessor_count_ > 1) {
+ return false;
+ }
+ // This should actually only support predecessor_count == 1, but we
+ // currently don't eliminate resumable loop headers (and subsequent code
+ // until the next resume) that end up being unreachable from JumpLoop.
+ if (predecessor_count_ == 0) {
+ DCHECK(is_resumable_loop());
+ return true;
+ }
+ DCHECK_EQ(predecessor_count_, 1);
+ DCHECK_IMPLIES(is_loop(), predecessor_at(0)->control_node()->Is<JumpLoop>());
+ return is_loop();
+}
+
} // namespace maglev
} // namespace internal
} // namespace v8
diff --git a/src/maglev/maglev-interpreter-frame-state.h b/src/maglev/maglev-interpreter-frame-state.h
index 0f84499a4f605013d4a9328497ef9fa1825a53af..f3802780bc49aa10869e1dd725aa769474c7697a 100644
--- a/src/maglev/maglev-interpreter-frame-state.h
+++ b/src/maglev/maglev-interpreter-frame-state.h
@@ -935,6 +935,8 @@ class MergePointInterpreterFrameState {
predecessors_so_far_ == 0;
}
+ bool IsUnreachable() const;
+
BasicBlockType basic_block_type() const {
return kBasicBlockTypeBits::decode(bitfield_);
}
diff --git a/src/maglev/maglev-regalloc.cc b/src/maglev/maglev-regalloc.cc
index d21bc5128b9b8d70b86bf121a75846f57e0f113b..b11a8684e457f6a1abb6a7b1b24eab7296ae488c 100644
--- a/src/maglev/maglev-regalloc.cc
+++ b/src/maglev/maglev-regalloc.cc
@@ -155,7 +155,7 @@ bool IsLiveAtTarget(ValueNode* node, ControlNode* source, BasicBlock* target) {
}
// Drop all values on resumable loop headers.
- if (target->has_state() && target->state()->is_resumable_loop()) return false;
+ if (target->is_loop() && target->state()->is_resumable_loop()) return false;
// TODO(verwaest): This should be true but isn't because we don't yet
// eliminate dead code.
@@ -407,13 +407,9 @@ void StraightForwardRegisterAllocator::AllocateRegisters() {
if (block->state()->is_exception_handler()) {
// Exceptions start from a blank state of register values.
ClearRegisterValues();
- } else if (block->state()->is_resumable_loop() &&
- block->state()->predecessor_count() <= 1) {
+ } else if (block->state()->IsUnreachable()) {
// Loops that are only reachable through JumpLoop start from a blank
// state of register values.
- // This should actually only support predecessor_count == 1, but we
- // currently don't eliminate resumable loop headers (and subsequent code
- // until the next resume) that end up being unreachable from JumpLoop.
ClearRegisterValues();
} else {
InitializeRegisterValues(block->state()->register_state());

View File

@@ -1,43 +0,0 @@
From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001
From: Toon Verwaest <verwaest@chromium.org>
Date: Tue, 7 Jan 2025 13:35:04 +0100
Subject: Merged: [compiler] Check max-args when calling bound functions
Bug: 385155406
(cherry picked from commit d35770876597b8c25de3c483b9368686f3a9fda8)
Change-Id: I98c7166588f4099c5665ff94dc65da5c25be7535
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/6175036
Reviewed-by: Leszek Swirski <leszeks@chromium.org>
Commit-Queue: Leszek Swirski <leszeks@chromium.org>
Auto-Submit: Toon Verwaest <verwaest@chromium.org>
Cr-Commit-Position: refs/branch-heads/13.2@{#62}
Cr-Branched-From: 24068c59cedad9ee976ddc05431f5f497b1ebd71-refs/heads/13.2.152@{#1}
Cr-Branched-From: 6054ba94db0969220be4f94dc1677fc4696bdc4f-refs/heads/main@{#97085}
diff --git a/src/compiler/js-call-reducer.cc b/src/compiler/js-call-reducer.cc
index 7c248c80a519708eaa2a25e9289fd6094ed52424..c96910c0c12d8fc12fc21621227a6671896f23ab 100644
--- a/src/compiler/js-call-reducer.cc
+++ b/src/compiler/js-call-reducer.cc
@@ -4651,6 +4651,9 @@ Reduction JSCallReducer::ReduceJSCall(Node* node) {
// succeed.
FixedArrayRef bound_arguments = function.bound_arguments(broker());
const int bound_arguments_length = bound_arguments.length();
+ if (arity + bound_arguments_length > Code::kMaxArguments) {
+ return NoChange();
+ }
static constexpr int kInlineSize = 16; // Arbitrary.
base::SmallVector<Node*, kInlineSize> args;
for (int i = 0; i < bound_arguments_length; ++i) {
diff --git a/src/compiler/js-inlining.cc b/src/compiler/js-inlining.cc
index ea49bb803ac043ad88853dace478c66b496f5735..04ae603717a39357bc4460cafb1713a6a4666544 100644
--- a/src/compiler/js-inlining.cc
+++ b/src/compiler/js-inlining.cc
@@ -250,6 +250,7 @@ FrameState JSInliner::CreateArtificialFrameState(
Node* context, Node* callee) {
const int parameter_count_with_receiver =
parameter_count + JSCallOrConstructNode::kReceiverOrNewTargetInputCount;
+ CHECK_LE(parameter_count_with_receiver, kMaxUInt16);
const FrameStateFunctionInfo* state_info =
common()->CreateFrameStateFunctionInfo(frame_state_type,
parameter_count_with_receiver, 0,

View File

@@ -1,44 +0,0 @@
From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001
From: Peter Kasting <pkasting@chromium.org>
Date: Wed, 23 Oct 2024 09:38:17 -0700
Subject: Fix compiler failure on older Clang.
On crrev.com/c/5774729 someone reported that omitting `typename` in a
few places caused an error for them. This should be allowed in C++20,
but apparently was not accepted by (what the author believes is)
Clang 15. I don't know whether V8 officially supports this version.
Since it's harmless to explicitly add the `typename` here, go ahead and
do so to make life less painful.
Bug: none
Change-Id: I97a125a6ac9fa21fa15723888ca00790cc4fb4ee
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/5957255
Reviewed-by: Anton Bikineev <bikineev@chromium.org>
Commit-Queue: Leszek Swirski <leszeks@chromium.org>
Auto-Submit: Peter Kasting <pkasting@chromium.org>
Cr-Commit-Position: refs/heads/main@{#96791}
diff --git a/include/v8-internal.h b/include/v8-internal.h
index a13db2bd74ad4b412cf8bd067c7b25c2acb1bcb8..99099fb1bb617014365b1ba3eaf4bd99d6eb6d4b 100644
--- a/include/v8-internal.h
+++ b/include/v8-internal.h
@@ -1430,7 +1430,7 @@ struct MaybeDefineIteratorConcept {};
template <typename Iterator>
struct MaybeDefineIteratorConcept<
Iterator, std::enable_if_t<kHaveIteratorConcept<Iterator>>> {
- using iterator_concept = Iterator::iterator_concept;
+ using iterator_concept = typename Iterator::iterator_concept;
};
// Otherwise fall back to `std::iterator_traits<Iterator>` if possible.
template <typename Iterator>
@@ -1443,7 +1443,8 @@ struct MaybeDefineIteratorConcept<
// TODO(pkasting): Add this unconditionally after dropping support for old
// libstdc++ versions.
#if __has_include(<ranges>)
- using iterator_concept = std::iterator_traits<Iterator>::iterator_concept;
+ using iterator_concept =
+ typename std::iterator_traits<Iterator>::iterator_concept;
#endif
};

View File

@@ -1,44 +0,0 @@
From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001
From: Frank Tang <ftang@chromium.org>
Date: Tue, 4 Feb 2025 17:16:24 -0800
Subject: Merged: Fix out of bound string access
Bug: 386857213
(cherry picked from commit 0242cac4b20305b03b74c2e9588003378eebeb77)
Change-Id: I354e9a246ccfe36bf12fce13597f295093cde2a3
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/6245138
Commit-Queue: Shu-yu Guo <syg@chromium.org>
Commit-Queue: Deepti Gandluri <gdeepti@chromium.org>
Reviewed-by: Deepti Gandluri <gdeepti@chromium.org>
Auto-Submit: Shu-yu Guo <syg@chromium.org>
Cr-Commit-Position: refs/branch-heads/13.2@{#78}
Cr-Branched-From: 24068c59cedad9ee976ddc05431f5f497b1ebd71-refs/heads/13.2.152@{#1}
Cr-Branched-From: 6054ba94db0969220be4f94dc1677fc4696bdc4f-refs/heads/main@{#97085}
diff --git a/src/objects/js-date-time-format.cc b/src/objects/js-date-time-format.cc
index 038929afeb0c8340b7e1b01e4e9d9810a10f1fee..5de38e23ea136711ace1e019cc2189a9a9bad57a 100644
--- a/src/objects/js-date-time-format.cc
+++ b/src/objects/js-date-time-format.cc
@@ -1685,6 +1685,10 @@ std::optional<std::string> GetOffsetTimeZone(Isolate* isolate,
if (m0 == ':') {
// Ignore ':'
p++;
+ if (len == p) {
+ // Error
+ return std::nullopt;
+ }
m0 = flat.Get(p);
}
if (len - p != 2) {
diff --git a/test/intl/regress-386857213.js b/test/intl/regress-386857213.js
new file mode 100644
index 0000000000000000000000000000000000000000..fec35fcc4bbc6c9920c56b7b7be1b5588254c460
--- /dev/null
+++ b/test/intl/regress-386857213.js
@@ -0,0 +1,5 @@
+// Copyright 2025 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+//
+assertThrows(() => new Intl.DateTimeFormat("en", {timeZone: "+09:"}), RangeError);

View File

@@ -1,107 +0,0 @@
From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001
From: Thibaud Michaud <thibaudm@chromium.org>
Date: Mon, 10 Feb 2025 14:31:16 +0100
Subject: Merged: "Reland "Lower the maximum JS parameter count""
This is a reland of commit 1827ed8345369ca50a55a10ab3e45bcc581c6339
Before the change, one of the nodes had more than 2^16 inputs
so optimization bailed out.
After the change, the function has fewer parameters and gets
optimized, and the register allocator struggles with that many
parameters and times out.
Just mark the test as slow for now.
Original change's description:
> Lower the maximum JS parameter count
>
> To allow extra implicit arguments on the call node without overflowing
> the uint16_t input count, in particular in the wasm-to-js wrapper where
> we don't have a bailout mechanism.
>
> R=verwaest@chromium.org
>
> Fixed: 394350433
> Change-Id: I61d2e2387539cafd6a0909c3ee035c93d0217be3
> Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/6239302
> Reviewed-by: Toon Verwaest <verwaest@chromium.org>
> Commit-Queue: Thibaud Michaud <thibaudm@chromium.org>
> Cr-Commit-Position: refs/heads/main@{#98556}
(cherry picked from commit 84a0e230dabc2c874a129c2280d6be4f45636225)
Change-Id: Ibdfbc0850ca709f0418efdb1ed89a82796a9c378
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/6268260
Reviewed-by: Toon Verwaest <verwaest@chromium.org>
Commit-Queue: Thibaud Michaud <thibaudm@chromium.org>
Cr-Commit-Position: refs/branch-heads/13.2@{#80}
Cr-Branched-From: 24068c59cedad9ee976ddc05431f5f497b1ebd71-refs/heads/13.2.152@{#1}
Cr-Branched-From: 6054ba94db0969220be4f94dc1677fc4696bdc4f-refs/heads/main@{#97085}
diff --git a/src/objects/code.h b/src/objects/code.h
index a9c1b5bc007eccad8f1731810d65a3f5c51f22f2..2e24c18407577084d3f523ede0663234420db8db 100644
--- a/src/objects/code.h
+++ b/src/objects/code.h
@@ -450,7 +450,9 @@ class Code : public ExposedTrustedObject {
// Reserve one argument count value as the "don't adapt arguments" sentinel.
static const int kArgumentsBits = 16;
- static const int kMaxArguments = (1 << kArgumentsBits) - 2;
+ // Slightly less than 2^kArgumentBits-1 to allow for extra implicit arguments
+ // on the call nodes without overflowing the uint16_t input_count.
+ static const int kMaxArguments = (1 << kArgumentsBits) - 10;
private:
inline void set_instruction_start(IsolateForSandbox isolate, Address value);
diff --git a/test/mjsunit/mjsunit.status b/test/mjsunit/mjsunit.status
index 2104677a3124e0f29c04b38155bad6dd5ef51c67..d1889ce528a284cef35613f426479304d68a4f2e 100644
--- a/test/mjsunit/mjsunit.status
+++ b/test/mjsunit/mjsunit.status
@@ -228,6 +228,10 @@
# TODO(v8:12783): Turboshaft instruction selection not ported to these platforms yet.
'wasm/turboshaft/instruction-selection': [PASS, ['arch in [riscv32]', SKIP]],
+
+ # TODO(thibaudm): Register allocation struggles with the function in this
+ # test, which has the maximum allowed number of parameters.
+ 'regress/regress-crbug-724153': [SLOW],
}], # ALWAYS
################################################################################
diff --git a/test/mjsunit/regress/regress-11491.js b/test/mjsunit/regress/regress-11491.js
index 795480a15db69b3ca30e97fc49d283546be3319e..4e188d44226341f5bba843dd10a46ff1fbaa4897 100644
--- a/test/mjsunit/regress/regress-11491.js
+++ b/test/mjsunit/regress/regress-11491.js
@@ -4,7 +4,7 @@
function test() {
// Create a generator constructor with the maximum number of allowed parameters.
- const args = new Array(65535);
+ const args = new Array(65526);
function* gen() {}
const c = gen.constructor.apply(null, args);
diff --git a/test/mjsunit/regress/regress-crbug-724153.js b/test/mjsunit/regress/regress-crbug-724153.js
index a571f8e0bf5e85accc53a926358e61aea6c3d981..282532e5026270334b2d2c40f77578e2596ab67c 100644
--- a/test/mjsunit/regress/regress-crbug-724153.js
+++ b/test/mjsunit/regress/regress-crbug-724153.js
@@ -6,7 +6,7 @@
(function TestParameterLimit() {
var src = '(function f(a,';
- for (var i = 0; i < 65534 - 2; i++) {
+ for (var i = 0; i < 65525 - 2; i++) {
src += 'b' + i + ',';
}
src += 'c) { return a + c })';
diff --git a/test/mjsunit/regress/regress-v8-6716.js b/test/mjsunit/regress/regress-v8-6716.js
index 87b72e148820e416ae698b9b414f3d5ce2b1bcb1..df8c06887720dd6694576e13d8423c8304da93f2 100644
--- a/test/mjsunit/regress/regress-v8-6716.js
+++ b/test/mjsunit/regress/regress-v8-6716.js
@@ -3,5 +3,5 @@
// found in the LICENSE file.
function f() {}
-var a = Array(2 ** 16 - 2); // Elements in large-object-space.
+var a = Array(2 ** 16 - 10); // Elements in large-object-space.
f.bind(...a);

View File

@@ -1,198 +0,0 @@
From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001
From: Jakob Kummerow <jkummerow@chromium.org>
Date: Thu, 6 Feb 2025 14:42:13 +0100
Subject: Merged: [wasm] Replace {dead_code_} set with {is_dying_} bit
This saves some memory, and fixes a bug.
Fixed: 391907159
(cherry picked from commit 33ca4f51e5dbba9817eba16fd3249e66a880cf33)
Change-Id: Iad93b3e7290c25ddcedf806cc85c4401a5fcb0fc
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/6239106
Auto-Submit: Jakob Kummerow <jkummerow@chromium.org>
Commit-Queue: Jakob Kummerow <jkummerow@chromium.org>
Commit-Queue: Matthias Liedtke <mliedtke@chromium.org>
Reviewed-by: Matthias Liedtke <mliedtke@chromium.org>
Cr-Commit-Position: refs/branch-heads/13.2@{#76}
Cr-Branched-From: 24068c59cedad9ee976ddc05431f5f497b1ebd71-refs/heads/13.2.152@{#1}
Cr-Branched-From: 6054ba94db0969220be4f94dc1677fc4696bdc4f-refs/heads/main@{#97085}
diff --git a/src/wasm/wasm-code-manager.h b/src/wasm/wasm-code-manager.h
index e0ec794fab33445b088220fbeb68ea02c1f35912..c04de89acf98209fe2dd32c5aa3868e4c2a234af 100644
--- a/src/wasm/wasm-code-manager.h
+++ b/src/wasm/wasm-code-manager.h
@@ -316,6 +316,9 @@ class V8_EXPORT_PRIVATE WasmCode final {
return ForDebuggingField::decode(flags_);
}
+ bool is_dying() const { return dying_; }
+ void mark_as_dying() { dying_ = true; }
+
// Returns {true} for Liftoff code that sets up a feedback vector slot in its
// stack frame.
// TODO(jkummerow): This can be dropped when we ship Wasm inlining.
@@ -450,6 +453,10 @@ class V8_EXPORT_PRIVATE WasmCode final {
using ForDebuggingField = ExecutionTierField::Next<ForDebugging, 2>;
using FrameHasFeedbackSlotField = ForDebuggingField::Next<bool, 1>;
+ // Will be set to {true} the first time this code object is considered
+ // "potentially dead" (to be confirmed by the next Wasm Code GC cycle).
+ std::atomic<bool> dying_{false};
+
// WasmCode is ref counted. Counters are held by:
// 1) The jump table / code table.
// 2) {WasmCodeRefScope}s.
diff --git a/src/wasm/wasm-engine.cc b/src/wasm/wasm-engine.cc
index 54ac5a1a4927d858577b9a4d984dd554999107a4..33e008c1678037e3ba7cce3a6cc16e3273e1b2ce 100644
--- a/src/wasm/wasm-engine.cc
+++ b/src/wasm/wasm-engine.cc
@@ -1700,7 +1700,6 @@ void WasmEngine::FreeNativeModule(NativeModule* native_module) {
}
// If any code objects are currently tracked as dead or near-dead, remove
// references belonging to the NativeModule that's being deleted.
- std::erase_if(dead_code_, part_of_native_module);
std::erase_if(potentially_dead_code_, part_of_native_module);
native_module_cache_.Erase(native_module);
@@ -1783,9 +1782,11 @@ void WasmEngine::ReportLiveCodeFromStackForGC(Isolate* isolate) {
bool WasmEngine::AddPotentiallyDeadCode(WasmCode* code) {
base::MutexGuard guard(&mutex_);
- if (dead_code_.contains(code)) return false; // Code is already dead.
+ if (code->is_dying()) return false;
auto added = potentially_dead_code_.insert(code);
- if (!added.second) return false; // An entry already existed.
+ DCHECK(added.second);
+ USE(added);
+ code->mark_as_dying();
new_potentially_dead_code_size_ += code->instructions().size();
if (v8_flags.wasm_code_gc) {
// Trigger a GC if 64kB plus 10% of committed code are potentially dead.
@@ -1831,19 +1832,17 @@ void WasmEngine::FreeDeadCodeLocked(const DeadCodeMap& dead_code,
const std::vector<WasmCode*>& code_vec = dead_code_entry.second;
TRACE_CODE_GC("Freeing %zu code object%s of module %p.\n", code_vec.size(),
code_vec.size() == 1 ? "" : "s", native_module);
- for (WasmCode* code : code_vec) {
- DCHECK(dead_code_.contains(code));
- dead_code_.erase(code);
- }
+#if DEBUG
+ for (WasmCode* code : code_vec) DCHECK(code->is_dying());
+#endif // DEBUG
native_module->FreeCode(base::VectorOf(code_vec));
}
if (dead_wrappers.size()) {
TRACE_CODE_GC("Freeing %zu wrapper%s.\n", dead_wrappers.size(),
dead_wrappers.size() == 1 ? "" : "s");
- for (WasmCode* code : dead_wrappers) {
- DCHECK(dead_code_.contains(code));
- dead_code_.erase(code);
- }
+#if DEBUG
+ for (WasmCode* code : dead_wrappers) DCHECK(code->is_dying());
+#endif // DEBUG
GetWasmImportWrapperCache()->Free(dead_wrappers);
}
}
@@ -1933,16 +1932,15 @@ void WasmEngine::PotentiallyFinishCurrentGC() {
if (!current_gc_info_->outstanding_isolates.empty()) return;
// All remaining code in {current_gc_info->dead_code} is really dead.
- // Move it from the set of potentially dead code to the set of dead code,
- // and decrement its ref count.
+ // Remove it from the set of potentially dead code, and decrement its
+ // ref count.
size_t num_freed = 0;
DeadCodeMap dead_code;
std::vector<WasmCode*> dead_wrappers;
for (WasmCode* code : current_gc_info_->dead_code) {
DCHECK(potentially_dead_code_.contains(code));
+ DCHECK(code->is_dying());
potentially_dead_code_.erase(code);
- DCHECK(!dead_code_.contains(code));
- dead_code_.insert(code);
if (code->DecRefOnDeadCode()) {
NativeModule* native_module = code->native_module();
if (native_module) {
@@ -1966,7 +1964,7 @@ void WasmEngine::PotentiallyFinishCurrentGC() {
}
size_t WasmEngine::EstimateCurrentMemoryConsumption() const {
- UPDATE_WHEN_CLASS_CHANGES(WasmEngine, 800);
+ UPDATE_WHEN_CLASS_CHANGES(WasmEngine, 760);
UPDATE_WHEN_CLASS_CHANGES(IsolateInfo, 184);
UPDATE_WHEN_CLASS_CHANGES(NativeModuleInfo, 56);
UPDATE_WHEN_CLASS_CHANGES(CurrentGCInfo, 96);
@@ -1977,7 +1975,6 @@ size_t WasmEngine::EstimateCurrentMemoryConsumption() const {
result += ContentSize(async_compile_jobs_);
result += async_compile_jobs_.size() * sizeof(AsyncCompileJob);
result += ContentSize(potentially_dead_code_);
- result += ContentSize(dead_code_);
// TODO(14106): Do we care about {compilation_stats_}?
// TODO(14106): Do we care about {code_tracer_}?
diff --git a/src/wasm/wasm-engine.h b/src/wasm/wasm-engine.h
index 1511753825110dedf8a6e9b8137e593a0fd2f618..50bf7c84996eef7f5ca7c160b1047050c2da30b9 100644
--- a/src/wasm/wasm-engine.h
+++ b/src/wasm/wasm-engine.h
@@ -483,12 +483,8 @@ class V8_EXPORT_PRIVATE WasmEngine {
size_t new_potentially_dead_code_size_ = 0;
// Set of potentially dead code. This set holds one ref for each code object,
// until code is detected to be really dead. At that point, the ref count is
- // decremented and code is moved to the {dead_code} set. If the code is
- // finally deleted, it is also removed from {dead_code}.
+ // decremented and code is removed from the set.
std::unordered_set<WasmCode*> potentially_dead_code_;
- // Code that is not being executed in any isolate any more, but the ref count
- // did not drop to zero yet.
- std::unordered_set<WasmCode*> dead_code_;
int8_t num_code_gcs_triggered_ = 0;
// If an engine-wide GC is currently running, this pointer stores information
diff --git a/src/wasm/wasm-import-wrapper-cache.cc b/src/wasm/wasm-import-wrapper-cache.cc
index 0554f3d50a288b34bd4729a33ac523c4902d93e0..56fa64302665fdbd036c95e29610748ccd541959 100644
--- a/src/wasm/wasm-import-wrapper-cache.cc
+++ b/src/wasm/wasm-import-wrapper-cache.cc
@@ -150,7 +150,10 @@ WasmCode* WasmImportWrapperCache::CompileWasmImportCallWrapper(
// Now that we have the lock (in the form of the cache_scope), check
// again whether another thread has just created the wrapper.
wasm_code = cache_scope[key];
- if (wasm_code) return wasm_code;
+ if (wasm_code) {
+ WasmCodeRefScope::AddRef(wasm_code);
+ if (!wasm_code->is_dying()) return wasm_code;
+ }
wasm_code = cache_scope.AddWrapper(key, std::move(result),
WasmCode::Kind::kWasmToJsWrapper);
@@ -216,6 +219,7 @@ WasmCode* WasmImportWrapperCache::MaybeGet(ImportCallKind kind,
entry_map_.find({kind, canonical_type_index, expected_arity, suspend});
if (it == entry_map_.end()) return nullptr;
WasmCodeRefScope::AddRef(it->second);
+ if (it->second->is_dying()) return nullptr;
return it->second;
}
@@ -230,6 +234,9 @@ WasmCode* WasmImportWrapperCache::Lookup(Address pc) const {
DCHECK_EQ(candidate->instruction_start(), iter->first);
if (!candidate->contains(pc)) return nullptr;
WasmCodeRefScope::AddRef(candidate);
+ // Note: this function is used for iterating the stack, where dying
+ // code objects can still have their last few activations, so we
+ // must return {candidate} even if {candidate->is_dying()}.
return candidate;
}
diff --git a/test/cctest/wasm/wasm-run-utils.cc b/test/cctest/wasm/wasm-run-utils.cc
index fb4f0c3c39aaba90729dfd1f6b9ed3f90d25ed1d..2063983ecb44eb3f50b33258e40cf7cc6d7e3089 100644
--- a/test/cctest/wasm/wasm-run-utils.cc
+++ b/test/cctest/wasm/wasm-run-utils.cc
@@ -294,6 +294,7 @@ void TestingModuleBuilder::AddIndirectFunctionTable(
WasmTableObject::AddUse(isolate_, table_obj, instance_object_, table_index);
if (function_indexes) {
+ WasmCodeRefScope code_ref_scope;
for (uint32_t i = 0; i < table_size; ++i) {
WasmFunction& function = test_module_->functions[function_indexes[i]];
int sig_id = test_module_->canonical_sig_id(function.sig_index);

View File

@@ -1,397 +0,0 @@
From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001
From: Jakob Kummerow <jkummerow@chromium.org>
Date: Wed, 18 Sep 2024 11:16:07 +0200
Subject: Fix freeing of identical shared wrappers
The early-return path of WasmImportWrapperCache::Free checked the
wrong map for emptiness: {entry_map_} can have key collisions, so
previous Free() calls can empty it even though {codes_} is still
non-empty.
Drive-by: the failing test case should not have created identical
wrappers, but could do so when running multi-threaded; while this
is harmless, it's cleaner to avoid that.
Fixed: 366007153
Bug: 42204526
Change-Id: I5e387c83ec9b45a650044b7243d6e68907bc5219
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/5869839
Reviewed-by: Matthias Liedtke <mliedtke@chromium.org>
Auto-Submit: Jakob Kummerow <jkummerow@chromium.org>
Commit-Queue: Jakob Kummerow <jkummerow@chromium.org>
Cr-Commit-Position: refs/heads/main@{#96155}
diff --git a/src/runtime/runtime-wasm.cc b/src/runtime/runtime-wasm.cc
index 2fba518bb41e950da75012b95bccf240ac9ca166..bfd7408b78db6e8dd53e28d0e61085ee8e6e97f7 100644
--- a/src/runtime/runtime-wasm.cc
+++ b/src/runtime/runtime-wasm.cc
@@ -18,7 +18,6 @@
#include "src/objects/objects-inl.h"
#include "src/strings/unicode-inl.h"
#include "src/trap-handler/trap-handler.h"
-#include "src/wasm/compilation-environment-inl.h"
#include "src/wasm/module-compiler.h"
#include "src/wasm/serialized-signature-inl.h"
#include "src/wasm/value-type.h"
@@ -756,28 +755,9 @@ RUNTIME_FUNCTION(Runtime_TierUpWasmToJSWrapper) {
wasm::WasmCode* wasm_code =
cache->MaybeGet(kind, canonical_sig_index, expected_arity, suspend);
if (!wasm_code) {
- wasm::CompilationEnv env = wasm::CompilationEnv::ForModule(native_module);
- wasm::WasmCompilationResult result = compiler::CompileWasmImportCallWrapper(
- &env, kind, &sig, false, expected_arity, suspend);
- {
- wasm::WasmImportWrapperCache::ModificationScope cache_scope(cache);
- wasm::WasmImportWrapperCache::CacheKey key(kind, canonical_sig_index,
- expected_arity, suspend);
- wasm_code = cache_scope.AddWrapper(
- key, std::move(result), wasm::WasmCode::Kind::kWasmToJsWrapper);
- }
- // To avoid lock order inversion, code printing must happen after the
- // end of the {cache_scope}.
- wasm_code->MaybePrint();
- isolate->counters()->wasm_generated_code_size()->Increment(
- wasm_code->instructions().length());
- isolate->counters()->wasm_reloc_size()->Increment(
- wasm_code->reloc_info().length());
- if (V8_UNLIKELY(native_module->log_code())) {
- wasm::GetWasmEngine()->LogWrapperCode(base::VectorOf(&wasm_code, 1));
- // Log the code immediately in the current isolate.
- wasm::GetWasmEngine()->LogOutstandingCodesForIsolate(isolate);
- }
+ wasm_code = cache->CompileWasmImportCallWrapper(
+ isolate, native_module, kind, &sig, canonical_sig_index, false,
+ expected_arity, suspend);
}
// Note: we don't need to decrement any refcounts here, because tier-up
// doesn't overwrite an existing compiled wrapper, and the generic wrapper
diff --git a/src/wasm/module-compiler.cc b/src/wasm/module-compiler.cc
index 43f1215ea5a394f93d837603de50859eef78ce3f..d5a9e007258e1d7aab33c8b0a29a6fcdaf4452b4 100644
--- a/src/wasm/module-compiler.cc
+++ b/src/wasm/module-compiler.cc
@@ -4622,8 +4622,9 @@ void CompileJsToWasmWrappers(Isolate* isolate, const WasmModule* module) {
}
}
-WasmCode* CompileImportWrapperForTest(NativeModule* native_module,
- Counters* counters, ImportCallKind kind,
+WasmCode* CompileImportWrapperForTest(Isolate* isolate,
+ NativeModule* native_module,
+ ImportCallKind kind,
const FunctionSig* sig,
uint32_t canonical_type_index,
int expected_arity, Suspend suspend) {
@@ -4637,35 +4638,9 @@ WasmCode* CompileImportWrapperForTest(NativeModule* native_module,
return nullptr;
}
- CompilationEnv env = CompilationEnv::ForModule(native_module);
- WasmCompilationResult result = compiler::CompileWasmImportCallWrapper(
- &env, kind, sig, source_positions, expected_arity, suspend);
-
- DCHECK(result.inlining_positions.empty());
- DCHECK(result.deopt_data.empty());
- WasmCode* code;
- {
- // There was no cache entry when we called this function, but in the
- // meantime a different module could have created one. Simply discard the
- // new wrapper if so.
- WasmImportWrapperCache::ModificationScope cache_scope(
- GetWasmImportWrapperCache());
- WasmImportWrapperCache::CacheKey key(kind, canonical_type_index,
- expected_arity, suspend);
- if (V8_UNLIKELY(cache_scope[key] != nullptr)) return cache_scope[key];
- code = cache_scope.AddWrapper(key, std::move(result),
- WasmCode::Kind::kWasmToJsWrapper);
- }
- // To avoid lock order inversion, code printing must happen after the
- // end of the {cache_scope}.
- code->MaybePrint();
- counters->wasm_generated_code_size()->Increment(
- code->instructions().length());
- counters->wasm_reloc_size()->Increment(code->reloc_info().length());
- if (native_module->log_code()) {
- GetWasmEngine()->LogWrapperCode(base::VectorOf(&code, 1));
- }
- return code;
+ return GetWasmImportWrapperCache()->CompileWasmImportCallWrapper(
+ isolate, native_module, kind, sig, canonical_type_index, source_positions,
+ expected_arity, suspend);
}
} // namespace v8::internal::wasm
diff --git a/src/wasm/module-compiler.h b/src/wasm/module-compiler.h
index d6cf89381ceaaf38f299f4d3f75d474d54e82633..b757e6385301af9a2d3816d3d4d3fb552c4842b8 100644
--- a/src/wasm/module-compiler.h
+++ b/src/wasm/module-compiler.h
@@ -70,8 +70,9 @@ V8_EXPORT_PRIVATE WasmError ValidateAndSetBuiltinImports(
// cache entry. Assumes the key already exists in the cache but has not been
// compiled yet.
V8_EXPORT_PRIVATE
-WasmCode* CompileImportWrapperForTest(NativeModule* native_module,
- Counters* counters, ImportCallKind kind,
+WasmCode* CompileImportWrapperForTest(Isolate* isolate,
+ NativeModule* native_module,
+ ImportCallKind kind,
const FunctionSig* sig,
uint32_t canonical_type_index,
int expected_arity, Suspend suspend);
diff --git a/src/wasm/module-instantiate.cc b/src/wasm/module-instantiate.cc
index f4c2a75f7fd44d6ff2b2ab61d06d9d8b2f1965c2..8d9f81a7f1dad2f37f2b036fdf8b4da3c70e2c38 100644
--- a/src/wasm/module-instantiate.cc
+++ b/src/wasm/module-instantiate.cc
@@ -2007,38 +2007,9 @@ bool InstanceBuilder::ProcessImportedFunction(
// generic wrapper will be used (see above).
NativeModule* native_module = trusted_instance_data->native_module();
bool source_positions = is_asmjs_module(native_module->module());
- CompilationEnv env = CompilationEnv::ForModule(native_module);
- WasmCompilationResult result = compiler::CompileWasmImportCallWrapper(
- &env, kind, expected_sig, source_positions, expected_arity,
- resolved.suspend());
- bool code_is_new = false;
- {
- WasmImportWrapperCache::ModificationScope cache_scope(cache);
- WasmImportWrapperCache::CacheKey key(
- kind, canonical_sig_id, expected_arity, resolved.suspend());
- // Now that we have the lock (in the form of the cache_scope), check
- // again whether another thread has just created the wrapper.
- wasm_code = cache_scope[key];
- if (!wasm_code) {
- wasm_code = cache_scope.AddWrapper(
- key, std::move(result), WasmCode::Kind::kWasmToJsWrapper);
- code_is_new = true;
- }
- }
- if (code_is_new) {
- // To avoid lock order inversion, code printing must happen after the
- // end of the {cache_scope}.
- wasm_code->MaybePrint();
- isolate_->counters()->wasm_generated_code_size()->Increment(
- wasm_code->instructions().length());
- isolate_->counters()->wasm_reloc_size()->Increment(
- wasm_code->reloc_info().length());
- if (V8_UNLIKELY(native_module->log_code())) {
- GetWasmEngine()->LogWrapperCode(base::VectorOf(&wasm_code, 1));
- // Log the code immediately in the current isolate.
- GetWasmEngine()->LogOutstandingCodesForIsolate(isolate_);
- }
- }
+ wasm_code = cache->CompileWasmImportCallWrapper(
+ isolate_, native_module, kind, expected_sig, canonical_sig_id,
+ source_positions, expected_arity, resolved.suspend());
}
DCHECK_NOT_NULL(wasm_code);
diff --git a/src/wasm/wasm-code-pointer-table.cc b/src/wasm/wasm-code-pointer-table.cc
index c7b5ae3455bb1290651b5fcf7460a31772069d88..725cca9de5f59c7430c2c4f437c95342bb93d928 100644
--- a/src/wasm/wasm-code-pointer-table.cc
+++ b/src/wasm/wasm-code-pointer-table.cc
@@ -13,10 +13,7 @@ void WasmCodePointerTable::Initialize() { Base::Initialize(); }
void WasmCodePointerTable::TearDown() {
SweepSegments(0);
- // TODO(366007153): the WasmCode destructor sometimes doesn't get called and
- // we can have a leftover entry in the table. Re-enable the DCHECK once the
- // bug is fixed.
- // DCHECK(freelist_head_.load().is_empty());
+ DCHECK(freelist_head_.load().is_empty());
Base::TearDown();
}
diff --git a/src/wasm/wasm-import-wrapper-cache.cc b/src/wasm/wasm-import-wrapper-cache.cc
index 92b6470ce094a0c8b026ea4208a59c25384da3d4..0554f3d50a288b34bd4729a33ac523c4902d93e0 100644
--- a/src/wasm/wasm-import-wrapper-cache.cc
+++ b/src/wasm/wasm-import-wrapper-cache.cc
@@ -9,6 +9,8 @@
#include "src/codegen/assembler-inl.h"
#include "src/codegen/flush-instruction-cache.h"
#include "src/common/code-memory-access-inl.h"
+#include "src/compiler/wasm-compiler.h"
+#include "src/wasm/compilation-environment-inl.h"
#include "src/wasm/function-compiler.h"
#include "src/wasm/std-object-sizes.h"
#include "src/wasm/wasm-code-manager.h"
@@ -134,6 +136,41 @@ WasmCode* WasmImportWrapperCache::ModificationScope::AddWrapper(
return code;
}
+WasmCode* WasmImportWrapperCache::CompileWasmImportCallWrapper(
+ Isolate* isolate, NativeModule* native_module, ImportCallKind kind,
+ const FunctionSig* sig, uint32_t canonical_sig_index, bool source_positions,
+ int expected_arity, Suspend suspend) {
+ CompilationEnv env = CompilationEnv::ForModule(native_module);
+ WasmCompilationResult result = compiler::CompileWasmImportCallWrapper(
+ &env, kind, sig, source_positions, expected_arity, suspend);
+ WasmCode* wasm_code;
+ {
+ ModificationScope cache_scope(this);
+ CacheKey key(kind, canonical_sig_index, expected_arity, suspend);
+ // Now that we have the lock (in the form of the cache_scope), check
+ // again whether another thread has just created the wrapper.
+ wasm_code = cache_scope[key];
+ if (wasm_code) return wasm_code;
+
+ wasm_code = cache_scope.AddWrapper(key, std::move(result),
+ WasmCode::Kind::kWasmToJsWrapper);
+ }
+
+ // To avoid lock order inversion, code printing must happen after the
+ // end of the {cache_scope}.
+ wasm_code->MaybePrint();
+ isolate->counters()->wasm_generated_code_size()->Increment(
+ wasm_code->instructions().length());
+ isolate->counters()->wasm_reloc_size()->Increment(
+ wasm_code->reloc_info().length());
+ if (V8_UNLIKELY(native_module->log_code())) {
+ GetWasmEngine()->LogWrapperCode(base::VectorOf(&wasm_code, 1));
+ // Log the code immediately in the current isolate.
+ GetWasmEngine()->LogOutstandingCodesForIsolate(isolate);
+ }
+ return wasm_code;
+}
+
void WasmImportWrapperCache::LogForIsolate(Isolate* isolate) {
for (const auto& entry : codes_) {
entry.second->LogCode(isolate, "", -1); // No source URL, no ScriptId.
@@ -142,7 +179,7 @@ void WasmImportWrapperCache::LogForIsolate(Isolate* isolate) {
void WasmImportWrapperCache::Free(std::vector<WasmCode*>& wrappers) {
base::MutexGuard lock(&mutex_);
- if (entry_map_.empty() || wrappers.empty()) return;
+ if (codes_.empty() || wrappers.empty()) return;
// {WasmCodeAllocator::FreeCode()} wants code objects to be sorted.
std::sort(wrappers.begin(), wrappers.end(), [](WasmCode* a, WasmCode* b) {
return a->instruction_start() < b->instruction_start();
diff --git a/src/wasm/wasm-import-wrapper-cache.h b/src/wasm/wasm-import-wrapper-cache.h
index 4768e90d0d2332650eacf093932465262676d1c7..4124bd77059c9b59e691dcc9c2bda44efe9f4609 100644
--- a/src/wasm/wasm-import-wrapper-cache.h
+++ b/src/wasm/wasm-import-wrapper-cache.h
@@ -98,6 +98,11 @@ class WasmImportWrapperCache {
return iter->second;
}
+ WasmCode* CompileWasmImportCallWrapper(
+ Isolate* isolate, NativeModule* native_module, ImportCallKind kind,
+ const FunctionSig* sig, uint32_t canonical_sig_index,
+ bool source_positions, int expected_arity, Suspend suspend);
+
private:
std::unique_ptr<WasmCodeAllocator> code_allocator_;
mutable base::Mutex mutex_;
diff --git a/src/wasm/wasm-objects.cc b/src/wasm/wasm-objects.cc
index f6c100687a88d8fa269ce02506ab0a82b2c5ffbb..a34d48c24cb8cd478705ecb31664186bcd9ac4f3 100644
--- a/src/wasm/wasm-objects.cc
+++ b/src/wasm/wasm-objects.cc
@@ -19,7 +19,6 @@
#include "src/roots/roots-inl.h"
#include "src/utils/utils.h"
#include "src/wasm/code-space-access.h"
-#include "src/wasm/compilation-environment-inl.h"
#include "src/wasm/module-compiler.h"
#include "src/wasm/module-decoder.h"
#include "src/wasm/module-instantiate.h"
@@ -1919,28 +1918,9 @@ void WasmTrustedInstanceData::ImportWasmJSFunctionIntoTable(
} else if (UseGenericWasmToJSWrapper(kind, sig, resolved.suspend())) {
call_target = Builtins::EntryOf(Builtin::kWasmToJsWrapperAsm, isolate);
} else {
- wasm::CompilationEnv env = wasm::CompilationEnv::ForModule(native_module);
- wasm::WasmCompilationResult result = compiler::CompileWasmImportCallWrapper(
- &env, kind, sig, false, expected_arity, suspend);
- {
- wasm::WasmImportWrapperCache::ModificationScope cache_scope(cache);
- wasm::WasmImportWrapperCache::CacheKey key(kind, canonical_sig_id,
- expected_arity, suspend);
- wasm_code = cache_scope.AddWrapper(
- key, std::move(result), wasm::WasmCode::Kind::kWasmToJsWrapper);
- }
- // To avoid lock order inversion, code printing must happen after the
- // end of the {cache_scope}.
- wasm_code->MaybePrint();
- isolate->counters()->wasm_generated_code_size()->Increment(
- wasm_code->instructions().length());
- isolate->counters()->wasm_reloc_size()->Increment(
- wasm_code->reloc_info().length());
- if (V8_UNLIKELY(native_module->log_code())) {
- wasm::GetWasmEngine()->LogWrapperCode(base::VectorOf(&wasm_code, 1));
- // Log the code immediately in the current isolate.
- wasm::GetWasmEngine()->LogOutstandingCodesForIsolate(isolate);
- }
+ wasm_code = cache->CompileWasmImportCallWrapper(
+ isolate, native_module, kind, sig, canonical_sig_id, false,
+ expected_arity, suspend);
call_target = wasm_code->instruction_start();
}
diff --git a/test/cctest/wasm/test-wasm-import-wrapper-cache.cc b/test/cctest/wasm/test-wasm-import-wrapper-cache.cc
index e713648644e0d6dfc191f35d61a6e3be71ca6d38..57c5caa5eea3dd059348431270d58f48d8bb318e 100644
--- a/test/cctest/wasm/test-wasm-import-wrapper-cache.cc
+++ b/test/cctest/wasm/test-wasm-import-wrapper-cache.cc
@@ -42,9 +42,9 @@ TEST(CacheHit) {
int expected_arity = static_cast<int>(sig->parameter_count());
{
WasmCodeRefScope wasm_code_ref_scope;
- WasmCode* c1 = CompileImportWrapperForTest(
- module.get(), isolate->counters(), kind, sig, canonical_type_index,
- expected_arity, kNoSuspend);
+ WasmCode* c1 = CompileImportWrapperForTest(isolate, module.get(), kind, sig,
+ canonical_type_index,
+ expected_arity, kNoSuspend);
CHECK_NOT_NULL(c1);
CHECK_EQ(WasmCode::Kind::kWasmToJsWrapper, c1->kind());
@@ -79,8 +79,8 @@ TEST(CacheMissSig) {
uint32_t canonical_type_index2 =
GetTypeCanonicalizer()->AddRecursiveGroup(sig2);
- WasmCode* c1 = CompileImportWrapperForTest(module.get(), isolate->counters(),
- kind, sig1, canonical_type_index1,
+ WasmCode* c1 = CompileImportWrapperForTest(isolate, module.get(), kind, sig1,
+ canonical_type_index1,
expected_arity1, kNoSuspend);
CHECK_NOT_NULL(c1);
@@ -105,8 +105,8 @@ TEST(CacheMissKind) {
uint32_t canonical_type_index =
GetTypeCanonicalizer()->AddRecursiveGroup(sig);
- WasmCode* c1 = CompileImportWrapperForTest(module.get(), isolate->counters(),
- kind1, sig, canonical_type_index,
+ WasmCode* c1 = CompileImportWrapperForTest(isolate, module.get(), kind1, sig,
+ canonical_type_index,
expected_arity, kNoSuspend);
CHECK_NOT_NULL(c1);
@@ -134,8 +134,8 @@ TEST(CacheHitMissSig) {
uint32_t canonical_type_index2 =
GetTypeCanonicalizer()->AddRecursiveGroup(sig2);
- WasmCode* c1 = CompileImportWrapperForTest(module.get(), isolate->counters(),
- kind, sig1, canonical_type_index1,
+ WasmCode* c1 = CompileImportWrapperForTest(isolate, module.get(), kind, sig1,
+ canonical_type_index1,
expected_arity1, kNoSuspend);
CHECK_NOT_NULL(c1);
@@ -146,8 +146,8 @@ TEST(CacheHitMissSig) {
CHECK_NULL(c2);
- c2 = CompileImportWrapperForTest(module.get(), isolate->counters(), kind,
- sig2, canonical_type_index2, expected_arity2,
+ c2 = CompileImportWrapperForTest(isolate, module.get(), kind, sig2,
+ canonical_type_index2, expected_arity2,
kNoSuspend);
CHECK_NE(c1, c2);
diff --git a/test/cctest/wasm/wasm-run-utils.cc b/test/cctest/wasm/wasm-run-utils.cc
index a8b93c9ebd53cf10b0c833139358254127d5deca..fb4f0c3c39aaba90729dfd1f6b9ed3f90d25ed1d 100644
--- a/test/cctest/wasm/wasm-run-utils.cc
+++ b/test/cctest/wasm/wasm-run-utils.cc
@@ -99,7 +99,7 @@ TestingModuleBuilder::TestingModuleBuilder(
kNoSuspend);
if (import_wrapper == nullptr) {
import_wrapper = CompileImportWrapperForTest(
- native_module_, isolate_->counters(), kind, sig, canonical_type_index,
+ isolate_, native_module_, kind, sig, canonical_type_index,
static_cast<int>(sig->parameter_count()), kNoSuspend);
}

View File

@@ -1,8 +1,6 @@
#!/bin/bash
if [ "$(expr substr $(uname -s) 1 10)" == "MSYS_NT-10" ]; then
BUILD_TYPE="win"
elif [ "`uname`" == "Darwin" ]; then
if [ "`uname`" == "Darwin" ]; then
if [ -z "$MAS_BUILD" ]; then
BUILD_TYPE="darwin"
else
@@ -48,47 +46,23 @@ cp_if_exist() {
move_src_dirs_if_exist() {
mkdir src_artifacts
dirs=("src/out/Default/gen/node_headers" \
"src/out/Default/overlapped-checker" \
"src/out/Default/ffmpeg" \
"src/out/Default/hunspell_dictionaries" \
"src/third_party/electron_node" \
"src/third_party/nan" \
"src/cross-arch-snapshots" \
"src/buildtools/mac" \
"src/buildtools/third_party/libc++" \
"src/buildtools/third_party/libc++abi" \
"src/third_party/libc++" \
"src/third_party/libc++abi" \
"src/out/Default/obj/buildtools/third_party" \
"src/v8/tools/builtins-pgo")
# Only do this for linux build type, this folder
# exists for windows builds on linux hosts but we do
# not need it
if [ "$BUILD_TYPE" == "linux" ]; then
dirs+=('src/build/linux')
fi
# llvm-build is the host toolchain, for windows we need
# a different toolchain so no point copying this one
if [ "$BUILD_TYPE" != "win" ]; then
dirs+=('src/third_party/llvm-build')
fi
# On windows we should clean up two symlinks that aren't
# compatible with the windows test runner
if [ "$BUILD_TYPE" == "win" ]; then
rm -f src/third_party/electron_node/tools/node_modules/eslint/node_modules/eslint
rm -f src/third_party/electron_node/tools/node_modules/eslint/node_modules/.bin/eslint
rm -f src/third_party/electron_node/out/tools/bin/python
# Also need to copy electron.lib to node.lib for native module testing purposes
mkdir -p src/out/Default/gen/node_headers/Release
cp src/out/Default/electron.lib src/out/Default/gen/node_headers/Release/node.lib
fi
for dir in "${dirs[@]}"
for dir in \
src/out/Default/gen/node_headers \
src/out/Default/overlapped-checker \
src/out/Default/ffmpeg \
src/out/Default/hunspell_dictionaries \
src/third_party/electron_node \
src/third_party/nan \
src/cross-arch-snapshots \
src/third_party/llvm-build \
src/build/linux \
src/buildtools/mac \
src/buildtools/third_party/libc++ \
src/buildtools/third_party/libc++abi \
src/third_party/libc++ \
src/third_party/libc++abi \
src/out/Default/obj/buildtools/third_party \
src/v8/tools/builtins-pgo
do
if [ -d "$dir" ]; then
mkdir -p src_artifacts/$(dirname $dir)
@@ -96,7 +70,7 @@ move_src_dirs_if_exist() {
fi
done
tar -C src_artifacts -cf src_artifacts.tar .
tar -C src_artifacts -cf src_artifacts.tar ./
echo Storing src_artifacts.tar
mv src_artifacts.tar $SRC_ARTIFACTS

Some files were not shown because too many files have changed in this diff Show More