Compare commits

..

18 Commits

Author SHA1 Message Date
Samuel Attard
f05f1ea820 build: the win32 tests should build ia32 native modules 2019-11-29 23:48:24 -08:00
Samuel Attard
7b26847157 build: windows zip manifest is ia32 not x86 2019-11-29 18:23:46 -08:00
Samuel Attard
466967c686 build: do not run electron hooks on the checkout-and-save-cache step group 2019-11-29 12:10:09 -08:00
Samuel Attard
2be256c5fd build: ensure we use the bash find on windows 2019-11-29 12:04:20 -08:00
Samuel Attard
cdbf50973e build: ensure the Release directory exists before copying the lib file 2019-11-29 11:21:00 -08:00
Samuel Attard
2b848eb781 build: copy the node.lib file for the tests 2019-11-29 01:54:57 -08:00
Samuel Attard
69585bc75e build: wipe large files on windows two 2019-11-29 01:38:49 -08:00
Samuel Attard
f6ace9c345 build: do not try and start xfvb on windows 2019-11-29 01:21:58 -08:00
Samuel Attard
149768e3dd build: windows 32 bit on circle 2019-11-29 00:34:02 -08:00
Samuel Attard
0f0103d8ad build: run windows tests 2019-11-29 00:30:57 -08:00
Samuel Attard
99f837b841 build: re-enable sccache 2019-11-28 18:23:08 -08:00
Samuel Attard
5a61a3e2ee build: support checking windows manifest on circle 2019-11-28 16:10:48 -08:00
Samuel Attard
0cf46dfb46 build: disable sccache 2019-11-28 14:46:52 -08:00
Samuel Attard
2ed95fa5d8 build: use vpython for the ninja summary 2019-11-28 14:29:38 -08:00
Samuel Attard
812b9ccc44 build: enable sccache on windows circle 2019-11-28 02:34:48 -08:00
Samuel Attard
96026e1b74 Force the .exe suffix for ninja calls 2019-11-28 01:11:38 -08:00
Samuel Attard
52998c6b61 build: put depot_tools first on windows 2019-11-28 00:51:22 -08:00
Samuel Attard
ddf5d7f5b7 build: experiment with circleci windows support 2019-11-27 18:37:18 -08:00
2827 changed files with 125951 additions and 188701 deletions

View File

@@ -1 +0,0 @@
config-staging

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -1,34 +0,0 @@
const cp = require('child_process');
const fs = require('fs-extra');
const path = require('path');
const yaml = require('js-yaml');
const STAGING_DIR = path.resolve(__dirname, '..', 'config-staging');
function copyAndExpand(dir = './') {
const absDir = path.resolve(__dirname, dir);
const targetDir = path.resolve(STAGING_DIR, dir);
if (!fs.existsSync(targetDir)) {
fs.mkdirSync(targetDir);
}
for (const file of fs.readdirSync(absDir)) {
if (!file.endsWith('.yml')) {
if (fs.statSync(path.resolve(absDir, file)).isDirectory()) {
copyAndExpand(path.join(dir, file));
}
continue;
}
fs.writeFileSync(path.resolve(targetDir, file), yaml.dump(yaml.load(fs.readFileSync(path.resolve(absDir, file), 'utf8')), {
noRefs: true,
}));
}
}
if (fs.pathExists(STAGING_DIR)) fs.removeSync(STAGING_DIR);
copyAndExpand();
const output = cp.spawnSync(process.env.CIRCLECI_BINARY || 'circleci', ['config', 'pack', STAGING_DIR]);
fs.writeFileSync(path.resolve(STAGING_DIR, 'built.yml'), output.stdout.toString());

View File

@@ -1,51 +0,0 @@
executor:
name: linux-docker
size: medium
steps:
- checkout:
path: src/electron
- run:
name: Setup third_party Depot Tools
command: |
# "depot_tools" has to be checkout into "//third_party/depot_tools" so pylint.py can a "pylintrc" file.
git clone https://chromium.googlesource.com/chromium/tools/depot_tools.git src/third_party/depot_tools
echo 'export PATH="$PATH:'"$PWD"'/src/third_party/depot_tools"' >> $BASH_ENV
- run:
name: Download GN Binary
command: |
chromium_revision="$(grep -A1 chromium_version src/electron/DEPS | tr -d '\n' | cut -d\' -f4)"
gn_version="$(curl -sL "https://chromium.googlesource.com/chromium/src/+/${chromium_revision}/DEPS?format=TEXT" | base64 -d | grep gn_version | head -n1 | cut -d\' -f4)"
cipd ensure -ensure-file - -root . \<<-CIPD
\$ServiceURL https://chrome-infra-packages.appspot.com/
@Subdir src/buildtools/linux64
gn/gn/linux-amd64 $gn_version
CIPD
echo 'export CHROMIUM_BUILDTOOLS_PATH="'"$PWD"'/src/buildtools"' >> $BASH_ENV
- run:
name: Download clang-format Binary
command: |
chromium_revision="$(grep -A1 chromium_version src/electron/DEPS | tr -d '\n' | cut -d\' -f4)"
sha1_path='buildtools/linux64/clang-format.sha1'
curl -sL "https://chromium.googlesource.com/chromium/src/+/${chromium_revision}/${sha1_path}?format=TEXT" | base64 -d > "src/${sha1_path}"
download_from_google_storage.py --no_resume --no_auth --bucket chromium-clang-format -s "src/${sha1_path}"
- run:
name: Run Lint
command: |
# gn.py tries to find a gclient root folder starting from the current dir.
# When it fails and returns "None" path, the whole script fails. Let's "fix" it.
touch .gclient
# Another option would be to checkout "buildtools" inside the Electron checkout,
# but then we would lint its contents (at least gn format), and it doesn't pass it.
cd src/electron
node script/yarn install --frozen-lockfile
node script/yarn lint
- run:
name: Run Script Typechecker
command: |
cd src/electron
node script/yarn tsc -p tsconfig.script.json

View File

@@ -1,10 +0,0 @@
{
"name": "@electron/circleci-config",
"version": "0.0.0",
"private": true,
"license": "MIT",
"dependencies": {
"fs-extra": "^10.1.0",
"js-yaml": "^4.1.0"
}
}

View File

@@ -1,43 +0,0 @@
# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
# yarn lockfile v1
argparse@^2.0.1:
version "2.0.1"
resolved "https://registry.yarnpkg.com/argparse/-/argparse-2.0.1.tgz#246f50f3ca78a3240f6c997e8a9bd1eac49e4b38"
integrity sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==
fs-extra@^10.1.0:
version "10.1.0"
resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-10.1.0.tgz#02873cfbc4084dde127eaa5f9905eef2325d1abf"
integrity sha512-oRXApq54ETRj4eMiFzGnHWGy+zo5raudjuxN0b8H7s/RU2oW0Wvsx9O0ACRN/kRq9E8Vu/ReskGB5o3ji+FzHQ==
dependencies:
graceful-fs "^4.2.0"
jsonfile "^6.0.1"
universalify "^2.0.0"
graceful-fs@^4.1.6, graceful-fs@^4.2.0:
version "4.2.10"
resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.10.tgz#147d3a006da4ca3ce14728c7aefc287c367d7a6c"
integrity sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA==
js-yaml@^4.1.0:
version "4.1.0"
resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-4.1.0.tgz#c1fb65f8f5017901cdd2c951864ba18458a10602"
integrity sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==
dependencies:
argparse "^2.0.1"
jsonfile@^6.0.1:
version "6.1.0"
resolved "https://registry.yarnpkg.com/jsonfile/-/jsonfile-6.1.0.tgz#bc55b2634793c679ec6403094eb13698a6ec0aae"
integrity sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ==
dependencies:
universalify "^2.0.0"
optionalDependencies:
graceful-fs "^4.1.6"
universalify@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/universalify/-/universalify-2.0.0.tgz#75a4984efedc4b08975c5aeb73f530d02df25717"
integrity sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ==

View File

@@ -3,6 +3,5 @@
set -e
mkdir -p ~/.ssh
echo "github.com ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIOMqqnkVzrm0SdG6UOoqKLsabgH5C9okWi0dh2l9GKJl
github.com ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBEmKSENjQEezOmxkZMy7opKgwFB9nkt5YRrYMjNuG5N87uRgg6CLrbo5wAdT/y6v0mKV0U2w0WZ2YB/++Tpockg=
github.com ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQCj7ndNxQowgcQnjshcLrqPEiiphnt+VTTvDP6mHBL9j1aNUkY4Ue1gvwnGLVlOhGeYrnZaMgRK6+PKCUXaDbC7qtbW8gIkhL7aGCsOr/C56SJMy/BCZfxd1nWzAOxSDPgVsmerOBYfNqltV9/hWCqBywINIR+5dIg6JTJ72pcEpEjcYgXkE2YEFXV1JHnsKgbLWNlhScqb2UmyRkQyytRLtL+38TGxkxCflmO+5Z8CSSNY7GidjMIZ7Q4zMjA2n1nGrlTDkzwDCsw+wqFPGQA179cnfGWOWRVruj16z6XyvxvjJwbz0wQZ75XK5tKSb7FNyeIEs4TT4jk+S4dhPeAUC5y+bDYirYgM4GC7uEnztnZyaVWQ7B381AK4Qdrwt51ZqExKbQpTUNn+EjqoTwvqNj4kqx5QUCI0ThS/YkOxJCXmPUWZbhjpCg56i+2aB6CmK2JGhn57K5mj0MNdBXA4/WnwH6XoPWJzK5Nyu2zB3nAZp+S5hpQs+p1vN1/wsjk=" >> ~/.ssh/known_hosts
echo "|1|B3r+7aO0/x90IdefihIjxIoJrrk=|OJddGDfhbuLFc1bUyy84hhIw57M= ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAQEAq2A7hRGmdnm9tUDbO9IDSwBK6TbQa+PXYPCPy6rbTrTtw7PHkccKrpp0yVhp5HdEIcKr6pLlVDBfOLX9QUsyCOV0wzfjIJNlGEYsdlLJizHhbn2mUjvSAHQqZETYP81eFzLQNnPHt4EVVUh7VfDESU84KezmD5QlWpXLmvU31/yMf+Se8xhHTvKSCZIFImWwoG6mbUoWf9nzpIoaSjB+weqqUUmpaaasXVal72J+UX2B+2RPW3RcT0eOzQgqlJL3RKrTJvdsjE3JEAvGq3lGHSZXy28G3skua2SmVi/w4yCE6gbODqnTWlg7+wC604ydGXA8VJiS5ap43JXiUFFAaQ==
|1|rGlEvW55DtzNZp+pzw9gvyOyKi4=|LLWr+7qlkAlw3YGGVfLHHxB/kR0= ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAQEAq2A7hRGmdnm9tUDbO9IDSwBK6TbQa+PXYPCPy6rbTrTtw7PHkccKrpp0yVhp5HdEIcKr6pLlVDBfOLX9QUsyCOV0wzfjIJNlGEYsdlLJizHhbn2mUjvSAHQqZETYP81eFzLQNnPHt4EVVUh7VfDESU84KezmD5QlWpXLmvU31/yMf+Se8xhHTvKSCZIFImWwoG6mbUoWf9nzpIoaSjB+weqqUUmpaaasXVal72J+UX2B+2RPW3RcT0eOzQgqlJL3RKrTJvdsjE3JEAvGq3lGHSZXy28G3skua2SmVi/w4yCE6gbODqnTWlg7+wC604ydGXA8VJiS5ap43JXiUFFAaQ==" >> ~/.ssh/known_hosts

View File

@@ -1,4 +0,0 @@
---
Checks: '-modernize-use-nullptr'
InheritParentConfig: true
...

View File

@@ -1,58 +0,0 @@
# Electron Dev on Codespaces
Welcome to the Codespaces Electron Developer Environment.
## Quick Start
Upon creation of your codespace you should have [build tools](https://github.com/electron/build-tools) installed and an initialized gclient checkout of Electron. In order to build electron you'll need to run the following command.
```bash
e build
```
The initial build will take ~8 minutes. Incremental builds are substantially quicker. If you pull changes from upstream that touch either the `patches` folder or the `DEPS` folder you will have to run `e sync` in order to keep your checkout up to date.
## Directory Structure
Codespaces doesn't lean very well into gclient based checkouts, the directory structure is slightly strange. There are two locations for the `electron` checkout that both map to the same files under the hood.
```graphql
# Primary gclient checkout container
/workspaces/gclient/*
src/* - # Chromium checkout
electron - # Electron checkout
# Symlinked Electron checkout (identical to the above)
/workspaces/electron
```
## Goma
If you are a maintainer [with Goma access](../docs/development/goma.md) it should be automatically configured and authenticated when you spin up a new codespaces instance. You can validate this by checking `e d goma_auth info` or by checking that your build-tools configuration has a goma mode of `cluster`.
## Running Electron
You can run Electron in a few ways. If you just want to see if it launches:
```bash
# Enter an interactive JS prompt headlessly
xvfb-run e start -i
```
But if you want to actually see Electron you will need to use the built-in VNC capability. If you click "Ports" in codespaces and then open the `VNC web client` forwarded port you should see a web based VNC portal in your browser. When you are asked for a password use `builduser`.
Once in the VNC UI you can open `Applications -> System -> XTerm` which will open a VNC based terminal app and then you can run `e start` like normal and Electron will open in your VNC session.
## Running Tests
You run tests via build-tools and `xvfb`.
```bash
# Run all tests
xvfb-run e test
# Run the main process tests
xvfb-run e test --runners=main
# Run the old remote tests
xvfb-run e test --runners=remote
```

View File

@@ -1,66 +0,0 @@
{
"dockerComposeFile": "docker-compose.yml",
"service": "buildtools",
"onCreateCommand": ".devcontainer/on-create-command.sh",
"updateContentCommand": ".devcontainer/update-content-command.sh",
"workspaceFolder": "/workspaces/gclient/src/electron",
"forwardPorts": [8088, 6080, 5901],
"portsAttributes": {
"8088": {
"label": "Goma Control Panel",
"onAutoForward": "silent"
},
"6080": {
"label": "VNC web client (noVNC)",
"onAutoForward": "silent"
},
"5901": {
"label": "VNC TCP port",
"onAutoForward": "silent"
}
},
"hostRequirements": {
"storage": "128gb",
"cpus": 16
},
"remoteUser": "builduser",
"customizations": {
"codespaces": {
"openFiles": [
".devcontainer/README.md"
]
},
"vscode": {
"extensions": ["joeleinbinder.mojom-language",
"rafaelmaiolla.diff",
"surajbarkale.ninja",
"ms-vscode.cpptools",
"mutantdino.resourcemonitor",
"dbaeumer.vscode-eslint",
"shakram02.bash-beautify",
"marshallofsound.gnls-electron",
"CircleCI.circleci"
],
"settings": {
"editor.tabSize": 2,
"bashBeautify.tabSize": 2,
"typescript.tsdk": "node_modules/typescript/lib",
"[gn]": {
"editor.formatOnSave": true
},
"[javascript]": {
"editor.codeActionsOnSave": {
"source.fixAll.eslint": true
}
},
"[typescript]": {
"editor.codeActionsOnSave": {
"source.fixAll.eslint": true
}
},
"javascript.preferences.quoteStyle": "single",
"typescript.preferences.quoteStyle": "single"
}
}
}
}

View File

@@ -1,19 +0,0 @@
version: '3'
services:
buildtools:
image: ghcr.io/electron/devcontainer:3d8d44d0f15b05bef6149e448f9cc522111847e9
volumes:
- ..:/workspaces/gclient/src/electron:cached
- /var/run/docker.sock:/var/run/docker.sock
command: /bin/sh -c "while sleep 1000; do :; done"
user: builduser
cap_add:
- SYS_PTRACE
security_opt:
- seccomp:unconfined

View File

@@ -1,82 +0,0 @@
#!/bin/bash
set -eo pipefail
buildtools=$HOME/.electron_build_tools
gclient_root=/workspaces/gclient
buildtools_configs=/workspaces/buildtools-configs
export PATH="$PATH:$buildtools/src"
# Create the persisted buildtools config folder
mkdir -p $buildtools_configs
mkdir -p $gclient_root/.git-cache
rm -f $buildtools/configs
ln -s $buildtools_configs $buildtools/configs
# Write the gclient config if it does not already exist
if [ ! -f $gclient_root/.gclient ]; then
echo "Creating gclient config"
echo "solutions = [
{ \"name\" : \"src/electron\",
\"url\" : \"https://github.com/electron/electron\",
\"deps_file\" : \"DEPS\",
\"managed\" : False,
\"custom_deps\" : {
},
\"custom_vars\": {},
},
]
" >$gclient_root/.gclient
fi
# Write the default buildtools config file if it does
# not already exist
if [ ! -f $buildtools/configs/evm.testing.json ]; then
echo "Creating build-tools testing config"
write_config() {
echo "
{
\"goma\": \"$1\",
\"root\": \"/workspaces/gclient\",
\"remotes\": {
\"electron\": {
\"origin\": \"https://github.com/electron/electron.git\"
}
},
\"gen\": {
\"args\": [
\"import(\\\"//electron/build/args/testing.gn\\\")\",
\"import(\\\"/home/builduser/.electron_build_tools/third_party/goma.gn\\\")\"
],
\"out\": \"Testing\"
},
\"env\": {
\"CHROMIUM_BUILDTOOLS_PATH\": \"/workspaces/gclient/src/buildtools\",
\"GIT_CACHE_PATH\": \"/workspaces/gclient/.git-cache\"
},
\"\$schema\": \"file:///home/builduser/.electron_build_tools/evm-config.schema.json\"
}
" >$buildtools/configs/evm.testing.json
}
# Start out as cache only
write_config cache-only
e use testing
# Attempt to auth to the goma service via codespaces tokens
# if it works we can use the goma cluster
export NOTGOMA_CODESPACES_TOKEN=$GITHUB_TOKEN
if e d goma_auth login; then
echo "$GITHUB_USER has GOMA access - switching to cluster mode"
write_config cluster
fi
else
echo "build-tools testing config already exists"
# Re-auth with the goma cluster regardless.
NOTGOMA_CODESPACES_TOKEN=$GITHUB_TOKEN e d goma_auth login || true
fi

View File

@@ -1,10 +0,0 @@
#!/bin/bash
set -eo pipefail
buildtools=$HOME/.electron_build_tools
export PATH="$PATH:$buildtools/src"
# Sync latest
e d gclient sync --with_branch_heads --with_tags

View File

@@ -1,2 +1,3 @@
*
!tools/xvfb-init.sh
!build/install-build-deps.sh

View File

@@ -4,3 +4,4 @@
APPVEYOR_CLOUD_TOKEN=
CIRCLE_TOKEN=
ELECTRON_GITHUB_TOKEN=
VSTS_TOKEN=

View File

@@ -1,5 +1,4 @@
{
"root": true,
"extends": "standard",
"parser": "@typescript-eslint/parser",
"plugins": ["@typescript-eslint"],
@@ -7,39 +6,39 @@
"browser": true
},
"rules": {
"semi": ["error", "always"],
"no-var": "error",
"no-unused-vars": "off",
"guard-for-in": "error",
"no-unused-vars": 0,
"no-global-assign": 0,
"@typescript-eslint/no-unused-vars": ["error", {
"vars": "all",
"args": "after-used",
"ignoreRestSiblings": true
"ignoreRestSiblings": false
}],
"prefer-const": ["error", {
"destructuring": "all"
}],
"standard/no-callback-literal": "off"
"standard/no-callback-literal": "off",
"node/no-deprecated-api": 0
},
"parserOptions": {
"ecmaVersion": 6,
"sourceType": "module"
},
"globals": {
"standardScheme": "readonly"
},
"overrides": [
{
"files": "*.ts",
"files": "*.js",
"rules": {
"no-undef": "off",
"no-redeclare": "off",
"@typescript-eslint/no-redeclare": ["error"],
"no-use-before-define": "off"
"@typescript-eslint/no-unused-vars": "off"
}
},
{
"files": "*.d.ts",
"rules": {
"no-useless-constructor": "off",
"@typescript-eslint/no-unused-vars": "off"
"no-useless-constructor": "off",
"@typescript-eslint/no-unused-vars": "off"
}
}
]

View File

@@ -1,5 +0,0 @@
# Atom --> Electron rename
d9321f4df751fa32813fab1b6387bbd61bd681d0
34c4c8d5088fa183f56baea28809de6f2a427e02
# Enable JS Semicolons
5d657dece4102e5e5304d42e8004b6ad64c0fcda

27
.gitattributes vendored
View File

@@ -1,30 +1,3 @@
# `git apply` and friends don't understand CRLF, even on windows. Force those
# files to be checked out with LF endings even if core.autocrlf is true.
*.patch text eol=lf
patches/**/.patches merge=union
# Source code and markdown files should always use LF as line ending.
*.c text eol=lf
*.cc text eol=lf
*.cpp text eol=lf
*.csv text eol=lf
*.grd text eol=lf
*.grdp text eol=lf
*.gn text eol=lf
*.gni text eol=lf
*.h text eol=lf
*.html text eol=lf
*.idl text eol=lf
*.in text eol=lf
*.js text eol=lf
*.json text eol=lf
*.json5 text eol=lf
*.md text eol=lf
*.mm text eol=lf
*.mojom text eol=lf
*.proto text eol=lf
*.py text eol=lf
*.ps1 text eol=lf
*.sh text eol=lf
*.ts text eol=lf
*.txt text eol=lf

23
.github/CODEOWNERS vendored
View File

@@ -3,17 +3,20 @@
# https://help.github.com/articles/about-codeowners
# https://git-scm.com/docs/gitignore
# Most stuff in here is owned by the Community & Safety WG...
/.github/* @electron/wg-community
# ...except the Admin WG maintains this file.
/.github/CODEOWNERS @electron/wg-admin
# Upgrades WG
/patches/ @electron/patch-owners
/patches/ @electron/wg-upgrades
DEPS @electron/wg-upgrades
# Releases WG
/docs/breaking-changes.md @electron/wg-releases
/npm/ @electron/wg-releases
/script/release @electron/wg-releases
# Docs & Tooling WG
/default_app/ @electron/wg-docs-tools
/docs/ @electron/wg-docs-tools
# Security WG
/lib/browser/devtools.ts @electron/wg-security
/lib/browser/guest-view-manager.ts @electron/wg-security
/lib/browser/rpc-server.ts @electron/wg-security
/lib/renderer/security-warnings.ts @electron/wg-security
# Releases WG
/npm/ @electron/wg-releases
/script/release @electron/wg-releases

58
.github/ISSUE_TEMPLATE/Bug_report.md vendored Normal file
View File

@@ -0,0 +1,58 @@
---
name: Bug report
about: Create a report to help us improve Electron
---
<!-- As an open source project with a dedicated but small maintainer team, it can sometimes take a long time for issues to be addressed so please be patient and we will get back to you as soon as we can.
-->
### Preflight Checklist
<!-- Please ensure you've completed the following steps by replacing [ ] with [x]-->
* [ ] I have read the [Contributing Guidelines](https://github.com/electron/electron/blob/master/CONTRIBUTING.md) for this project.
* [ ] I agree to follow the [Code of Conduct](https://github.com/electron/electron/blob/master/CODE_OF_CONDUCT.md) that this project adheres to.
* [ ] I have searched the issue tracker for an issue that matches the one I want to file, without success.
### Issue Details
* **Electron Version:**
* <!-- (output of `node_modules/.bin/electron --version`) e.g. 4.0.3 -->
* **Operating System:**
* <!-- (Platform and Version) e.g. macOS 10.13.6 / Windows 10 (1803) / Ubuntu 18.04 x64 -->
* **Last Known Working Electron version:**
* <!-- (if applicable) e.g. 3.1.0 -->
### Expected Behavior
<!-- A clear and concise description of what you expected to happen. -->
### Actual Behavior
<!-- A clear and concise description of what actually happened. -->
### To Reproduce
<!--
Your best chance of getting this bug looked at quickly is to provide an example.
-->
<!--
For bugs that can be encapsulated in a small experiment, you can use Electron Fiddle (https://github.com/electron/fiddle) to publish your example to a GitHub Gist and link it your bug report.
-->
<!--
If Fiddle is insufficient to produce an example, please provide an example REPOSITORY that can be cloned and run. You can fork electron-quick-start (https://github.com/electron/electron-quick-start) and include a link to the branch with your changes.
-->
<!--
If you provide a URL, please list the commands required to clone/setup/run your repo e.g.
```sh
$ git clone $YOUR_URL -b $BRANCH
$ npm install
$ npm start || electron .
```
-->
### Screenshots
<!-- If applicable, add screenshots to help explain your problem. -->
### Additional Information
<!-- Add any other context about the problem here. -->

View File

@@ -0,0 +1,27 @@
---
name: Feature request
about: Suggest an idea for Electron
---
<!-- As an open source project with a dedicated but small maintainer team, it can sometimes take a long time for issues to be addressed so please be patient and we will get back to you as soon as we can.
-->
### Preflight Checklist
<!-- Please ensure you've completed the following steps by replacing [ ] with [x]-->
* [ ] I have read the [Contributing Guidelines](https://github.com/electron/electron/blob/master/CONTRIBUTING.md) for this project.
* [ ] I agree to follow the [Code of Conduct](https://github.com/electron/electron/blob/master/CODE_OF_CONDUCT.md) that this project adheres to.
* [ ] I have searched the issue tracker for a feature request that matches the one I want to file, without success.
### Problem Description
<!-- Is your feature request related to a problem? Please add a clear and concise description of what the problem is. -->
### Proposed Solution
<!-- Describe the solution you'd like in a clear and concise manner -->
### Alternatives Considered
<!-- A clear and concise description of any alternative solutions or features you've considered. -->
### Additional Information
<!-- Add any other context about the problem here. -->

View File

@@ -1,81 +0,0 @@
name: Bug Report
description: Report an Electron bug
title: "[Bug]: "
labels: "bug :beetle:"
projects: ["electron/90"]
body:
- type: checkboxes
attributes:
label: Preflight Checklist
description: Please ensure you've completed all of the following.
options:
- label: I have read the [Contributing Guidelines](https://github.com/electron/electron/blob/main/CONTRIBUTING.md) for this project.
required: true
- label: I agree to follow the [Code of Conduct](https://github.com/electron/electron/blob/main/CODE_OF_CONDUCT.md) that this project adheres to.
required: true
- label: I have searched the [issue tracker](https://www.github.com/electron/electron/issues) for a bug report that matches the one I want to file, without success.
required: true
- type: input
attributes:
label: Electron Version
description: |
What version of Electron are you using?
Note: Please only report issues for [currently supported versions of Electron](https://www.electronjs.org/docs/latest/tutorial/support#currently-supported-versions).
placeholder: 17.0.0
validations:
required: true
- type: dropdown
attributes:
label: What operating system are you using?
options:
- Windows
- macOS
- Ubuntu
- Other Linux
- Other (specify below)
validations:
required: true
- type: input
attributes:
label: Operating System Version
description: What operating system version are you using? On Windows, click Start button > Settings > System > About. On macOS, click the Apple Menu > About This Mac. On Linux, use lsb_release or uname -a.
placeholder: "e.g. Windows 10 version 1909, macOS Catalina 10.15.7, or Ubuntu 20.04"
validations:
required: true
- type: dropdown
attributes:
label: What arch are you using?
options:
- x64
- ia32
- arm64 (including Apple Silicon)
- Other (specify below)
validations:
required: true
- type: input
attributes:
label: Last Known Working Electron version
description: What is the last version of Electron this worked in, if applicable?
placeholder: 16.0.0
- type: textarea
attributes:
label: Expected Behavior
description: A clear and concise description of what you expected to happen.
validations:
required: true
- type: textarea
attributes:
label: Actual Behavior
description: A clear description of what actually happens.
validations:
required: true
- type: input
attributes:
label: Testcase Gist URL
description: If you can reproduce the issue in a standalone test case, please use [Electron Fiddle](https://github.com/electron/fiddle) to create one and to publish it as a [GitHub gist](https://gist.github.com) and put the gist URL here. This is **the best way** to ensure this issue is triaged quickly.
placeholder: https://gist.github.com/...
- type: textarea
attributes:
label: Additional Information
description: If your problem needs further explanation, or if the issue you're seeing cannot be reproduced in a gist, please add more information here.

View File

@@ -1,40 +0,0 @@
name: Feature Request
description: Suggest an idea for Electron
title: "[Feature Request]: "
labels: "enhancement :sparkles:"
body:
- type: checkboxes
attributes:
label: Preflight Checklist
description: Please ensure you've completed all of the following.
options:
- label: I have read the [Contributing Guidelines](https://github.com/electron/electron/blob/main/CONTRIBUTING.md) for this project.
required: true
- label: I agree to follow the [Code of Conduct](https://github.com/electron/electron/blob/main/CODE_OF_CONDUCT.md) that this project adheres to.
required: true
- label: I have searched the [issue tracker](https://www.github.com/electron/electron/issues) for a feature request that matches the one I want to file, without success.
required: true
- type: textarea
attributes:
label: Problem Description
description: Please add a clear and concise description of the problem you are seeking to solve with this feature request.
validations:
required: true
- type: textarea
attributes:
label: Proposed Solution
description: Describe the solution you'd like in a clear and concise manner.
validations:
required: true
- type: textarea
attributes:
label: Alternatives Considered
description: A clear and concise description of any alternative solutions or features you've considered.
validations:
required: true
- type: textarea
attributes:
label: Additional Information
description: Add any other context about the problem here.
validations:
required: false

View File

@@ -0,0 +1,25 @@
---
name: Mac App Store Private API Rejection
about: Your app was rejected from the Mac App Store for using private API's
---
<!-- As an open source project with a dedicated but small maintainer team, it can sometimes take a long time for issues to be addressed so please be patient and we will get back to you as soon as we can.
-->
### Preflight Checklist
<!-- Please ensure you've completed the following steps by replacing [ ] with [x]-->
* [ ] I have read the [Contributing Guidelines](https://github.com/electron/electron/blob/master/CONTRIBUTING.md) for this project.
* [ ] I agree to follow the [Code of Conduct](https://github.com/electron/electron/blob/master/CODE_OF_CONDUCT.md) that this project adheres to.
### Issue Details
* **Electron Version:**
* <!-- (output of `node_modules/.bin/electron --version`) e.g. 4.0.3 -->
### Rejection Email
<!-- Paste the contents of your rejection email here, censoring any private information such as app names.-->
### Additional Information
<!-- Add any other context about the problem here. -->

View File

@@ -1,30 +0,0 @@
name: Report Mac App Store Private API Rejection
description: Your app was rejected from the Mac App Store for using private API's
title: "[MAS Rejection]: "
body:
- type: checkboxes
attributes:
label: Preflight Checklist
description: Please ensure you've completed all of the following.
options:
- label: I have read the [Contributing Guidelines](https://github.com/electron/electron/blob/main/CONTRIBUTING.md) for this project.
required: true
- label: I agree to follow the [Code of Conduct](https://github.com/electron/electron/blob/main/CODE_OF_CONDUCT.md) that this project adheres to.
required: true
- type: input
attributes:
label: Electron Version
description: What version of Electron are you using?
placeholder: 12.0.0
validations:
required: true
- type: textarea
attributes:
label: Rejection Email
description: Paste the contents of your rejection email here, censoring any private information such as app names.
validations:
required: true
- type: textarea
attributes:
label: Additional Information
description: Add any other context about the problem here.

View File

@@ -0,0 +1,10 @@
---
name: Security report
about: Do not create an issue for security reports, send an email to security@electronjs.org
---
### Notice
**DO NOT** create an issue for security reports.
Send an email to: **security@electronjs.org**.

View File

@@ -1,10 +1,9 @@
#### Description of Change
<!--
Thank you for your Pull Request. Please provide a description above and review
the requirements below.
Contributors guide: https://github.com/electron/electron/blob/main/CONTRIBUTING.md
Contributors guide: https://github.com/electron/electron/blob/master/CONTRIBUTING.md
-->
#### Checklist
@@ -12,10 +11,11 @@ Contributors guide: https://github.com/electron/electron/blob/main/CONTRIBUTING.
- [ ] PR description included and stakeholders cc'd
- [ ] `npm test` passes
- [ ] tests are [changed or added](https://github.com/electron/electron/blob/main/docs/development/testing.md)
- [ ] relevant documentation, tutorials, templates and examples are changed or added
- [ ] [PR release notes](https://github.com/electron/clerk/blob/main/README.md) describe the change in a way relevant to app developers, and are [capitalized, punctuated, and past tense](https://github.com/electron/clerk/blob/main/README.md#examples).
- [ ] tests are [changed or added](https://github.com/electron/electron/blob/master/docs/development/testing.md)
- [ ] relevant documentation is changed or added
- [ ] PR title follows semantic [commit guidelines](https://github.com/electron/electron/blob/master/docs/development/pull-requests.md#commit-message-guidelines)
- [ ] [PR release notes](https://github.com/electron/clerk/blob/master/README.md) describe the change in a way relevant to app developers, and are [capitalized, punctuated, and past tense](https://github.com/electron/clerk/blob/master/README.md#examples).
#### Release Notes
Notes: <!-- Please add a one-line description for app developers to read in the release notes, or 'none' if no notes relevant to app developers. Examples and help on special cases: https://github.com/electron/clerk/blob/main/README.md#examples -->
Notes: <!-- Please add a one-line description for app developers to read in the release notes, or `no-notes` if no notes relevant to app developers. Examples and help on special cases: https://github.com/electron/clerk/blob/master/README.md#examples -->

18
.github/config.yml vendored
View File

@@ -2,7 +2,7 @@
newPRWelcomeComment: |
💖 Thanks for opening this pull request! 💖
We use [semantic commit messages](https://github.com/electron/electron/blob/main/docs/development/pull-requests.md#commit-message-guidelines) to streamline the release process. Before your pull request can be merged, you should **update your pull request title** to start with a semantic prefix.
We use [semantic commit messages](https://github.com/electron/electron/blob/master/docs/development/pull-requests.md#commit-message-guidelines) to streamline the release process. Before your pull request can be merged, you should **update your pull request title** to start with a semantic prefix.
Examples of commit messages with semantic prefixes:
@@ -12,9 +12,9 @@ newPRWelcomeComment: |
Things that will help get your PR across the finish line:
- Follow the JavaScript, C++, and Python [coding style](https://github.com/electron/electron/blob/main/docs/development/coding-style.md).
- Follow the JavaScript, C++, and Python [coding style](https://github.com/electron/electron/blob/master/docs/development/coding-style.md).
- Run `npm run lint` locally to catch formatting errors earlier.
- Document any user-facing changes you've made following the [documentation styleguide](https://github.com/electron/electron/blob/main/docs/styleguide.md).
- Document any user-facing changes you've made following the [documentation styleguide](https://github.com/electron/electron/blob/master/docs/styleguide.md).
- Include tests when adding/changing behavior.
- Include screenshots and animated GIFs whenever possible.
@@ -25,3 +25,15 @@ newPRWelcomeComment: |
# Comment to be posted to on pull requests merged by a first time user
firstPRMergeComment: >
Congrats on merging your first pull request! 🎉🎉🎉
# Users authorized to run manual trop backports
authorizedUsers:
- alexeykuzmin
- ckerr
- codebytere
- deepak1556
- jkleinsc
- MarshallOfSound
- miniak
- nornagon
- zcbenz

View File

@@ -1,165 +0,0 @@
name: Branch Created
on:
create:
permissions: {}
jobs:
release-branch-created:
name: Release Branch Created
if: ${{ github.event.ref_type == 'branch' && endsWith(github.event.ref, '-x-y') }}
permissions:
contents: read
pull-requests: write
repository-projects: write # Required for labels
runs-on: ubuntu-latest
steps:
- name: Determine Major Version
id: check-major-version
run: |
if [[ ${{ github.event.ref }} =~ ^([0-9]+)-x-y$ ]]; then
echo "MAJOR=${BASH_REMATCH[1]}" >> "$GITHUB_OUTPUT"
else
echo "Not a release branch: ${{ github.event.ref }}"
fi
- name: New Release Branch Tasks
if: ${{ steps.check-major-version.outputs.MAJOR }}
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GH_REPO: electron/electron
MAJOR: ${{ steps.check-major-version.outputs.MAJOR }}
NUM_SUPPORTED_VERSIONS: 3
run: |
PREVIOUS_MAJOR=$((MAJOR - 1))
UNSUPPORTED_MAJOR=$((MAJOR - NUM_SUPPORTED_VERSIONS - 1))
# Create new labels
gh label create $MAJOR-x-y --color 8d9ee8 || true
gh label create target/$MAJOR-x-y --color ad244f --description "PR should also be added to the \"${MAJOR}-x-y\" branch." || true
gh label create merged/$MAJOR-x-y --color 61a3c6 --description "PR was merged to the \"${MAJOR}-x-y\" branch." || true
gh label create in-flight/$MAJOR-x-y --color db69a6 || true
gh label create needs-manual-bp/$MAJOR-x-y --color 8b5dba || true
# Change color of old labels
gh label edit $UNSUPPORTED_MAJOR-x-y --color ededed || true
gh label edit target/$UNSUPPORTED_MAJOR-x-y --color ededed || true
gh label edit merged/$UNSUPPORTED_MAJOR-x-y --color ededed || true
gh label edit in-flight/$UNSUPPORTED_MAJOR-x-y --color ededed || true
gh label edit needs-manual-bp/$UNSUPPORTED_MAJOR-x-y --color ededed || true
# Add the new target label to any PRs which:
# * target the previous major
# * are in-flight for the previous major
# * need manual backport for the previous major
for PREVIOUS_MAJOR_LABEL in target/$PREVIOUS_MAJOR-x-y in-flight/$PREVIOUS_MAJOR-x-y needs-manual-bp/$PREVIOUS_MAJOR-x-y; do
PULL_REQUESTS=$(gh pr list --label $PREVIOUS_MAJOR_LABEL --jq .[].number --json number --limit 500)
if [[ $PULL_REQUESTS ]]; then
echo $PULL_REQUESTS | xargs -n 1 gh pr edit --add-label target/$MAJOR-x-y || true
fi
done
- name: Generate GitHub App token
if: ${{ steps.check-major-version.outputs.MAJOR }}
uses: electron/github-app-auth-action@cc6751b3b5e4edc5b9a4ad0a021ac455653b6dc8 # v1.0.0
id: generate-token
with:
creds: ${{ secrets.RELEASE_BOARD_GH_APP_CREDS }}
org: electron
- name: Create Release Project Board
if: ${{ steps.check-major-version.outputs.MAJOR }}
env:
GITHUB_TOKEN: ${{ steps.generate-token.outputs.token }}
MAJOR: ${{ steps.check-major-version.outputs.MAJOR }}
ELECTRON_ORG_ID: "O_kgDOAMybxg"
ELECTRON_REPO_ID: "R_kgDOAI8xSw"
TEMPLATE_PROJECT_ID: "PVT_kwDOAMybxs4AQvib"
run: |
# Copy template to create new project board
PROJECT_ID=$(gh api graphql -f query='mutation ($ownerId: ID!, $projectId: ID!, $title: String!) {
copyProjectV2(input: {
includeDraftIssues: true,
ownerId: $ownerId,
projectId: $projectId,
title: $title
}) {
projectV2 {
id
}
}
}' -f ownerId=$ELECTRON_ORG_ID -f projectId=$TEMPLATE_PROJECT_ID -f title="${MAJOR}-x-y" | jq -r '.data.copyProjectV2.projectV2.id')
# Make the new project public
gh api graphql -f query='mutation ($projectId: ID!) {
updateProjectV2(input: {
projectId: $projectId,
public: true,
}) {
projectV2 {
id
}
}
}' -f projectId=$PROJECT_ID
# Link the new project to the Electron repository
gh api graphql -f query='mutation ($projectId: ID!, $repositoryId: ID!) {
linkProjectV2ToRepository(input: {
projectId: $projectId,
repositoryId: $repositoryId
}) {
clientMutationId
}
}' -f projectId=$PROJECT_ID -f repositoryId=$ELECTRON_REPO_ID
# Get all draft issues on the new project board
gh api graphql -f query='query ($id: ID!) {
node(id: $id) {
... on ProjectV2 {
items(first: 100) {
nodes {
... on ProjectV2Item {
id
content {
... on DraftIssue { id title
body
}
}
}
}
}
}
}
}' -f id=$PROJECT_ID > issues.json
PROJECT_ITEMS=$(jq '.data.node.items.nodes[] | select(.content.id != null) | .id' issues.json)
#
# Do template replacement for draft issues
#
echo "{\"major\": $MAJOR, \"next-major\": $((MAJOR + 1)), \"prev-major\": $((MAJOR - 1))}" > variables.json
# npx mustache is annoyingly slow, so install mustache directly
yarn add -D mustache
for PROJECT_ITEM_ID in $PROJECT_ITEMS; do
# These are done with the raw output flag and sent to file to better retain formatting
jq -r ".data.node.items.nodes[] | select(.id == $PROJECT_ITEM_ID) | .content.title" issues.json > title.txt
jq -r ".data.node.items.nodes[] | select(.id == $PROJECT_ITEM_ID) | .content.body" issues.json > body.txt
./node_modules/.bin/mustache variables.json title.txt new_title.txt
./node_modules/.bin/mustache variables.json body.txt new_body.txt
# Only update draft issues which had content change when interpolated
if ! cmp --silent -- new_title.txt title.txt || ! cmp --silent -- new_body.txt body.txt; then
DRAFT_ISSUE_ID=$(jq ".data.node.items.nodes[] | select(.id == $PROJECT_ITEM_ID) | .content.id" issues.json)
gh api graphql -f query='mutation ($draftIssueId: ID!, $title: String!, $body: String!) {
updateProjectV2DraftIssue(input: {
draftIssueId: $draftIssueId,
title: $title,
body: $body
}) {
draftIssue {
id
}
}
}' -f draftIssueId=$DRAFT_ISSUE_ID -f title="$(cat new_title.txt)" -f body="$(cat new_body.txt)"
fi
done

View File

@@ -1,26 +0,0 @@
name: Issue Commented
on:
issue_comment:
types:
- created
permissions: {}
jobs:
issue-commented:
name: Remove blocked/need-repro on comment
if: ${{ contains(github.event.issue.labels.*.name, 'blocked/need-repro') && !contains(fromJSON('["MEMBER", "OWNER"]'), github.event.comment.author_association) }}
runs-on: ubuntu-latest
steps:
- name: Generate GitHub App token
uses: electron/github-app-auth-action@cc6751b3b5e4edc5b9a4ad0a021ac455653b6dc8 # v1.0.0
id: generate-token
with:
creds: ${{ secrets.ISSUE_TRIAGE_GH_APP_CREDS }}
- name: Remove label
env:
GITHUB_TOKEN: ${{ steps.generate-token.outputs.token }}
ISSUE_URL: ${{ github.event.issue.html_url }}
run: |
gh issue edit $ISSUE_URL --remove-label 'blocked/need-repro'

View File

@@ -1,70 +0,0 @@
name: Issue Labeled
on:
issues:
types: [labeled]
permissions: # added using https://github.com/step-security/secure-workflows
contents: read
jobs:
issue-labeled-blocked:
name: blocked/* label added
if: startsWith(github.event.label.name, 'blocked/')
runs-on: ubuntu-latest
steps:
- name: Generate GitHub App token
uses: electron/github-app-auth-action@cc6751b3b5e4edc5b9a4ad0a021ac455653b6dc8 # v1.0.0
id: generate-token
with:
creds: ${{ secrets.ISSUE_TRIAGE_GH_APP_CREDS }}
org: electron
- name: Set status
uses: github/update-project-action@2d475e08804f11f4022df7e21f5816531e97cb64 # v2
with:
github_token: ${{ steps.generate-token.outputs.token }}
organization: electron
project_number: 90
content_id: ${{ github.event.issue.node_id }}
field: Status
value: 🛑 Blocked
issue-labeled-blocked-need-repro:
name: blocked/need-repro label added
if: github.event.label.name == 'blocked/need-repro'
permissions:
issues: write # for actions-cool/issues-helper to update issues
runs-on: ubuntu-latest
steps:
- name: Check if comment needed
id: check-for-comment
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GH_REPO: electron/electron
run: |
set -eo pipefail
COMMENT_COUNT=$(gh issue view ${{ github.event.issue.number }} --comments --json comments | jq '[ .comments[] | select(.author.login == "github-actions" or .authorAssociation == "OWNER" or .authorAssociation == "MEMBER") | select(.body | startswith("<!-- blocked/need-repro -->")) ] | length')
if [[ $COMMENT_COUNT -eq 0 ]]; then
echo "SHOULD_COMMENT=1" >> "$GITHUB_OUTPUT"
fi
- name: Generate GitHub App token
if: ${{ steps.check-for-comment.outputs.SHOULD_COMMENT }}
uses: electron/github-app-auth-action@cc6751b3b5e4edc5b9a4ad0a021ac455653b6dc8 # v1.0.0
id: generate-token
with:
creds: ${{ secrets.ISSUE_TRIAGE_GH_APP_CREDS }}
- name: Create comment
if: ${{ steps.check-for-comment.outputs.SHOULD_COMMENT }}
uses: actions-cool/issues-helper@275328970dbc3bfc3bc43f5fe741bf3638300c0a # v3.3.3
with:
actions: 'create-comment'
token: ${{ steps.generate-token.outputs.token }}
body: |
<!-- blocked/need-repro -->
Hello @${{ github.event.issue.user.login }}. Thanks for reporting this and helping to make Electron better!
Would it be possible for you to make a standalone testcase with only the code necessary to reproduce the issue? For example, [Electron Fiddle](https://www.electronjs.org/fiddle) is a great tool for making small test cases and makes it easy to publish your test case to a [gist](https://gist.github.com) that Electron maintainers can use.
Stand-alone test cases make fixing issues go more smoothly: it ensure everyone's looking at the same issue, it removes all unnecessary variables from the equation, and it can also provide the basis for automated regression tests.
Now adding the https://github.com/electron/electron/labels/blocked%2Fneed-repro label for this reason. After you make a test case, please link to it in a followup comment. This issue will be closed in 10 days if the above is not addressed.

View File

@@ -1,40 +0,0 @@
name: Issue Unlabeled
on:
issues:
types: [unlabeled]
permissions:
contents: read
jobs:
issue-unlabeled-blocked:
name: All blocked/* labels removed
if: startsWith(github.event.label.name, 'blocked/')
runs-on: ubuntu-latest
steps:
- name: Check for any blocked labels
id: check-for-blocked-labels
run: |
set -eo pipefail
BLOCKED_LABEL_COUNT=$(echo '${{ toJSON(github.event.issue.labels.*.name) }}' | jq '[ .[] | select(startswith("blocked/")) ] | length')
if [[ $BLOCKED_LABEL_COUNT -eq 0 ]]; then
echo "NOT_BLOCKED=1" >> "$GITHUB_OUTPUT"
fi
- name: Generate GitHub App token
if: ${{ steps.check-for-blocked-labels.outputs.NOT_BLOCKED }}
uses: electron/github-app-auth-action@cc6751b3b5e4edc5b9a4ad0a021ac455653b6dc8 # v1.0.0
id: generate-token
with:
creds: ${{ secrets.ISSUE_TRIAGE_GH_APP_CREDS }}
org: electron
- name: Set status
if: ${{ steps.check-for-blocked-labels.outputs.NOT_BLOCKED }}
uses: github/update-project-action@2d475e08804f11f4022df7e21f5816531e97cb64 # v2
with:
github_token: ${{ steps.generate-token.outputs.token }}
organization: electron
project_number: 90
content_id: ${{ github.event.issue.node_id }}
field: Status
value: 📥 Was Blocked

View File

@@ -1,30 +0,0 @@
name: Pull Request Labeled
on:
pull_request:
types: [labeled]
permissions:
contents: read
jobs:
pull-request-labeled-deprecation-review-complete:
name: deprecation-review/complete label added
if: github.event.label.name == 'deprecation-review/complete ✅'
runs-on: ubuntu-latest
steps:
- name: Generate GitHub App token
uses: electron/github-app-auth-action@cc6751b3b5e4edc5b9a4ad0a021ac455653b6dc8 # v1.0.0
id: generate-token
with:
creds: ${{ secrets.RELEASE_BOARD_GH_APP_CREDS }}
org: electron
- name: Set status
uses: dsanders11/update-project-action@7ade91760df70df76770a238abee7a4869e01cf8
with:
github_token: ${{ steps.generate-token.outputs.token }}
organization: electron
project_number: 94
content_id: ${{ github.event.pull_request.node_id }}
field: Status
value: ✅ Reviewed

View File

@@ -1,54 +0,0 @@
name: Scorecards supply-chain security
on:
# Only the default branch is supported.
branch_protection_rule:
schedule:
- cron: '44 17 * * 0'
push:
branches: [ "main" ]
# Declare default permissions as read only.
permissions: read-all
jobs:
analysis:
name: Scorecards analysis
runs-on: ubuntu-latest
permissions:
# Needed to upload the results to code-scanning dashboard.
security-events: write
# Used to receive a badge.
id-token: write
steps:
- name: "Checkout code"
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # tag=v3.1.0
with:
persist-credentials: false
- name: "Run analysis"
uses: ossf/scorecard-action@e38b1902ae4f44df626f11ba0734b14fb91f8f86 # tag=v2.1.2
with:
results_file: results.sarif
results_format: sarif
# Publish the results for public repositories to enable scorecard badges. For more details, see
# https://github.com/ossf/scorecard-action#publishing-results.
# For private repositories, `publish_results` will automatically be set to `false`, regardless
# of the value entered here.
publish_results: true
# Upload the results as artifacts (optional). Commenting out will disable uploads of run results in SARIF
# format to the repository Actions tab.
- name: "Upload artifact"
uses: actions/upload-artifact@0b7f8abb1508181956e8e162db84b466c27e18ce # tag=v3.1.2
with:
name: SARIF file
path: results.sarif
retention-days: 5
# Upload the results to GitHub's code scanning dashboard.
- name: "Upload to code-scanning"
uses: github/codeql-action/upload-sarif@959cbb7472c4d4ad70cdfe6f4976053fe48ab394 # tag=v2.1.27
with:
sarif_file: results.sarif

View File

@@ -1,26 +0,0 @@
name: "Check Semantic Commit"
on:
pull_request:
types:
- opened
- edited
- synchronize
permissions:
contents: read
jobs:
main:
permissions:
pull-requests: read # for amannn/action-semantic-pull-request to analyze PRs
statuses: write # for amannn/action-semantic-pull-request to mark status of analyzed PR
name: Validate PR Title
runs-on: ubuntu-latest
steps:
- name: semantic-pull-request
uses: amannn/action-semantic-pull-request@01d5fd8a8ebb9aafe902c40c53f0f4744f7381eb # tag: v5
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
validateSingleCommit: false

View File

@@ -1,52 +0,0 @@
name: 'Close stale issues'
on:
workflow_dispatch:
schedule:
# 1:30am every day
- cron: '30 1 * * *'
permissions: {}
jobs:
stale:
runs-on: ubuntu-latest
steps:
- name: Generate GitHub App token
uses: electron/github-app-auth-action@cc6751b3b5e4edc5b9a4ad0a021ac455653b6dc8 # v1.0.0
id: generate-token
with:
creds: ${{ secrets.ISSUE_TRIAGE_GH_APP_CREDS }}
- uses: actions/stale@5ebf00ea0e4c1561e9b43a292ed34424fb1d4578 # tag: v6.0.1
with:
repo-token: ${{ steps.generate-token.outputs.token }}
days-before-stale: 90
days-before-close: 30
stale-issue-label: stale
operations-per-run: 1750
stale-issue-message: >
This issue has been automatically marked as stale. **If this issue is still affecting you, please leave any comment** (for example, "bump"), and we'll keep it open. If you have any new additional information—in particular, if this is still reproducible in the [latest version of Electron](https://www.electronjs.org/releases/stable) or in the [beta](https://www.electronjs.org/releases/beta)—please include it with your comment!
close-issue-message: >
This issue has been closed due to inactivity, and will not be monitored. If this is a bug and you can reproduce this issue on a [supported version of Electron](https://www.electronjs.org/docs/latest/tutorial/electron-timelines#timeline) please open a new issue and include instructions for reproducing the issue.
exempt-issue-labels: "discussion,security \U0001F512,enhancement :sparkles:,status/confirmed"
only-pr-labels: not-a-real-label
pending-repro:
runs-on: ubuntu-latest
if: ${{ always() }}
needs: stale
steps:
- name: Generate GitHub App token
uses: electron/github-app-auth-action@cc6751b3b5e4edc5b9a4ad0a021ac455653b6dc8 # v1.0.0
id: generate-token
with:
creds: ${{ secrets.ISSUE_TRIAGE_GH_APP_CREDS }}
- uses: actions/stale@5ebf00ea0e4c1561e9b43a292ed34424fb1d4578 # tag: v6.0.1
with:
repo-token: ${{ steps.generate-token.outputs.token }}
days-before-stale: -1
days-before-close: 10
remove-stale-when-updated: false
stale-issue-label: blocked/need-repro
stale-pr-label: not-a-real-label
operations-per-run: 1750
close-issue-message: >
Unfortunately, without a way to reproduce this issue, we're unable to continue investigation. This issue has been closed and will not be monitored further. If you're able to provide a minimal test case that reproduces this issue on a [supported version of Electron](https://www.electronjs.org/docs/latest/tutorial/electron-timelines#timeline) please open a new issue and include instructions for reproducing the issue.

View File

@@ -1,74 +0,0 @@
name: Update AppVeyor Image
# Run chron daily Mon-Fri
on:
workflow_dispatch:
schedule:
- cron: '0 8 * * 1-5' # runs 8:00 every business day (see https://crontab.guru)
permissions:
contents: write
pull-requests: write
jobs:
bake-appveyor-image:
name: Bake AppVeyor Image
permissions:
contents: write
pull-requests: write # to create a new PR with updated Appveyor images
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # v3.1.0
with:
fetch-depth: 0
- name: Yarn install
run: |
node script/yarn.js install --frozen-lockfile
- name: Set Repo for Commit
run: git config --global --add safe.directory $GITHUB_WORKSPACE
- name: Check AppVeyor Image
env:
APPVEYOR_TOKEN: ${{ secrets.APPVEYOR_TOKEN }}
run: |
node ./script/prepare-appveyor
if [ -f ./image_version.txt ]; then
echo "APPVEYOR_IMAGE_VERSION="$(cat image_version.txt)"" >> $GITHUB_ENV
rm image_version.txt
fi
- name: (Optionally) Update Appveyor Image
if: ${{ env.APPVEYOR_IMAGE_VERSION }}
uses: mikefarah/yq@1c7dc0e88aad311c89889bc5ce5d8f96931a1bd0 # v4.27.2
with:
cmd: |
yq '.image = "${{ env.APPVEYOR_IMAGE_VERSION }}"' "appveyor.yml" > "appveyor2.yml"
yq '.image = "${{ env.APPVEYOR_IMAGE_VERSION }}"' "appveyor-woa.yml" > "appveyor-woa2.yml"
- name: (Optionally) Generate Commit Diff
if: ${{ env.APPVEYOR_IMAGE_VERSION }}
run: |
diff -w -B appveyor.yml appveyor2.yml > appveyor.diff || true
patch -f appveyor.yml < appveyor.diff
rm appveyor2.yml appveyor.diff
- name: (Optionally) Generate Commit Diff for WOA
if: ${{ env.APPVEYOR_IMAGE_VERSION }}
run: |
diff -w -B appveyor-woa.yml appveyor-woa2.yml > appveyor-woa.diff || true
patch -f appveyor-woa.yml < appveyor-woa.diff
rm appveyor-woa2.yml appveyor-woa.diff
- name: (Optionally) Commit and Pull Request
if: ${{ env.APPVEYOR_IMAGE_VERSION }}
uses: peter-evans/create-pull-request@2b011faafdcbc9ceb11414d64d0573f37c774b04 # v4.2.3
with:
token: ${{ secrets.GITHUB_TOKEN }}
commit-message: 'build: update appveyor image to latest version'
committer: GitHub <noreply@github.com>
author: ${{ github.actor }} <${{ github.actor }}@users.noreply.github.com>
signoff: false
branch: bump-appveyor-image
delete-branch: true
reviewers: electron/wg-releases
title: 'build: update appveyor image to latest version'
labels: semver/none,no-backport
body: |
This PR updates appveyor.yml to the latest baked image, ${{ env.APPVEYOR_IMAGE_VERSION }}.
Notes: none

30
.gitignore vendored
View File

@@ -17,6 +17,24 @@
*.xcodeproj
/.idea/
/dist/
/external_binaries/
/out/
/vendor/.gclient
/vendor/debian_jessie_mips64-sysroot/
/vendor/debian_stretch_amd64-sysroot/
/vendor/debian_stretch_arm-sysroot/
/vendor/debian_stretch_arm64-sysroot/
/vendor/debian_stretch_i386-sysroot/
/vendor/gcc-4.8.3-d197-n64-loongson/
/vendor/readme-gcc483-loongson.txt
/vendor/download/
/vendor/llvm-build/
/vendor/llvm/
/vendor/npm/
/vendor/python_26/
/vendor/native_mksnapshot
/vendor/LICENSES.chromium.html
/vendor/pyyaml
node_modules/
SHASUMS256.txt
**/package-lock.json
@@ -26,7 +44,6 @@ compile_commands.json
# npm package
/npm/dist
/npm/path.txt
/npm/checksums.json
.npmrc
@@ -38,19 +55,14 @@ electron.d.ts
spec/.hash
# Eslint Cache
.eslintcache*
.eslintcache
# Generated native addon files
/spec/fixtures/native-addon/echo/build/
/spec-main/fixtures/native-addon/echo/build/
# If someone runs tsc this is where stuff will end up
ts-gen
# Used to accelerate CI builds
.depshash
.depshash-target
# Used to accelerate builds after sync
patches/mtime-cache.json
spec/fixtures/logo.png
.depshash-target

6
.gitmodules vendored Normal file
View File

@@ -0,0 +1,6 @@
[submodule "vendor/requests"]
path = vendor/requests
url = https://github.com/kennethreitz/requests
[submodule "vendor/boto"]
path = vendor/boto
url = https://github.com/boto/boto.git

View File

@@ -1,4 +0,0 @@
#!/bin/sh
. "$(dirname "$0")/_/husky.sh"
npm run precommit

View File

@@ -1,4 +0,0 @@
#!/bin/sh
. "$(dirname "$0")/_/husky.sh"
npm run prepack

View File

@@ -1,6 +0,0 @@
{
"default": false,
"no-trailing-spaces": {
"br_spaces": 0
}
}

View File

@@ -1,3 +0,0 @@
{
"extends": "@electron/lint-roller/configs/markdownlint.json"
}

1
.nvmrc
View File

@@ -1 +0,0 @@
16

1420
BUILD.gn

File diff suppressed because it is too large Load Diff

View File

@@ -1,135 +1,46 @@
# Code of Conduct
# Contributor Covenant Code of Conduct:
As a member project of the OpenJS Foundation, Electron uses [Contributor Covenant v2.0](https://contributor-covenant.org/version/2/0/code_of_conduct) as their code of conduct. The full text is included [below](#contributor-covenant-code-of-conduct) in English, and translations are available from the Contributor Covenant organisation:
## Our Pledge
* [contributor-covenant.org/translations](https://www.contributor-covenant.org/translations)
* [github.com/ContributorCovenant](https://github.com/ContributorCovenant/contributor_covenant/tree/release/content/version/2/0)
In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to making participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, gender identity and expression, level of experience, nationality, personal appearance, race, religion, or sexual identity and orientation.
## Contributor Covenant Code of Conduct
## Our Standards
### Our Pledge
Examples of behavior that contributes to creating a positive environment include:
We as members, contributors, and leaders pledge to make participation in our
community a harassment-free experience for everyone, regardless of age, body
size, visible or invisible disability, ethnicity, sex characteristics, gender
identity and expression, level of experience, education, socio-economic status,
nationality, personal appearance, race, religion, or sexual identity
and orientation.
* Using welcoming and inclusive language
* Being respectful of differing viewpoints and experiences
* Gracefully accepting constructive criticism
* Focusing on what is best for the community
* Showing empathy towards other community members
We pledge to act and interact in ways that contribute to an open, welcoming,
diverse, inclusive, and healthy community.
Examples of unacceptable behavior by participants include:
### Our Standards
Examples of behavior that contributes to a positive environment for our
community include:
* Demonstrating empathy and kindness toward other people
* Being respectful of differing opinions, viewpoints, and experiences
* Giving and gracefully accepting constructive feedback
* Accepting responsibility and apologizing to those affected by our mistakes,
and learning from the experience
* Focusing on what is best not just for us as individuals, but for the
overall community
Examples of unacceptable behavior include:
* The use of sexualized language or imagery, and sexual attention or
advances of any kind
* Trolling, insulting or derogatory comments, and personal or political attacks
* The use of sexualized language or imagery and unwelcome sexual attention or advances
* Trolling, insulting/derogatory comments, and personal or political attacks
* Public or private harassment
* Publishing others' private information, such as a physical or email
address, without their explicit permission
* Other conduct which could reasonably be considered inappropriate in a
professional setting
* Publishing others' private information, such as a physical or electronic address, without explicit permission
* Other conduct which could reasonably be considered inappropriate in a professional setting
### Enforcement Responsibilities
## Our Responsibilities
Community leaders are responsible for clarifying and enforcing our standards of
acceptable behavior and will take appropriate and fair corrective action in
response to any behavior that they deem inappropriate, threatening, offensive,
or harmful.
Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior.
Community leaders have the right and responsibility to remove, edit, or reject
comments, commits, code, wiki edits, issues, and other contributions that are
not aligned to this Code of Conduct, and will communicate reasons for moderation
decisions when appropriate.
Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful.
### Scope
## Scope
This Code of Conduct applies within all community spaces, and also applies when
an individual is officially representing the community in public spaces.
Examples of representing our community include using an official e-mail address,
posting via an official social media account, or acting as an appointed
representative at an online or offline event.
This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of representing a project or community include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers.
### Enforcement
## Enforcement
Instances of abusive, harassing, or otherwise unacceptable behavior may be
reported to the community leaders responsible for enforcement at
[coc@electronjs.org](mailto:coc@electronjs.org).
All complaints will be reviewed and investigated promptly and fairly.
Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team at [coc@electronjs.org](mailto:coc@electronjs.org). All complaints will be reviewed and investigated and will result in a response that is deemed necessary and appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately.
All community leaders are obligated to respect the privacy and security of the
reporter of any incident.
Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership.
### Enforcement Guidelines
## Attribution
Community leaders will follow these Community Impact Guidelines in determining
the consequences for any action they deem in violation of this Code of Conduct:
This Code of Conduct is adapted from the [Contributor-Covenant][homepage], version 1.4, available at [https://contributor-covenant.org/version/1/4][version]
#### 1. Correction
**Community Impact**: Use of inappropriate language or other behavior deemed
unprofessional or unwelcome in the community.
**Consequence**: A private, written warning from community leaders, providing
clarity around the nature of the violation and an explanation of why the
behavior was inappropriate. A public apology may be requested.
#### 2. Warning
**Community Impact**: A violation through a single incident or series
of actions.
**Consequence**: A warning with consequences for continued behavior. No
interaction with the people involved, including unsolicited interaction with
those enforcing the Code of Conduct, for a specified period of time. This
includes avoiding interactions in community spaces as well as external channels
like social media. Violating these terms may lead to a temporary or
permanent ban.
#### 3. Temporary Ban
**Community Impact**: A serious violation of community standards, including
sustained inappropriate behavior.
**Consequence**: A temporary ban from any sort of interaction or public
communication with the community for a specified period of time. No public or
private interaction with the people involved, including unsolicited interaction
with those enforcing the Code of Conduct, is allowed during this period.
Violating these terms may lead to a permanent ban.
#### 4. Permanent Ban
**Community Impact**: Demonstrating a pattern of violation of community
standards, including sustained inappropriate behavior, harassment of an
individual, or aggression toward or disparagement of classes of individuals.
**Consequence**: A permanent ban from any sort of public interaction within
the community.
### Attribution
This Code of Conduct is adapted from the [Contributor Covenant][homepage],
version 2.0, available at
https://www.contributor-covenant.org/version/2/0/code_of_conduct.html.
Community Impact Guidelines were inspired by [Mozilla's code of conduct
enforcement ladder](https://github.com/mozilla/diversity).
[homepage]: https://www.contributor-covenant.org
For answers to common questions about this code of conduct, see the FAQ at
https://www.contributor-covenant.org/faq. Translations are available at
https://www.contributor-covenant.org/translations.
[homepage]: https://contributor-covenant.org
[version]: https://contributor-covenant.org/version/1/4/

View File

@@ -22,21 +22,21 @@ Issues are created [here](https://github.com/electron/electron/issues/new).
### Issue Closure
Bug reports will be closed if the issue has been inactive and the latest affected version no longer receives support. At the moment, Electron maintains its three latest major versions, with a new major version being released every 8 weeks. (For more information on Electron's release cadence, see [this blog post](https://electronjs.org/blog/8-week-cadence).)
Bug reports will be closed if the issue has been inactive and the latest affected version no longer receives support. At the moment, Electron maintains its three latest major versions, with a new major version being released every 12 weeks. (For more information on Electron's release cadence, see [this blog post](https://electronjs.org/blog/12-week-cadence).)
_If an issue has been closed and you still feel it's relevant, feel free to ping a maintainer or add a comment!_
### Languages
We accept issues in _any_ language.
When an issue is posted in a language besides English, it is acceptable and encouraged to post an English-translated copy as a reply.
We accept issues in *any* language.
When an issue is posted in a language besides English, it is acceptable and encouraged to post an English-translated copy as a reply.
Anyone may post the translated reply.
In most cases, a quick pass through translation software is sufficient.
Having the original text _as well as_ the translation can help mitigate translation errors.
Responses to posted issues may or may not be in the original language.
**Please note** that using non-English as an attempt to circumvent our [Code of Conduct](https://github.com/electron/electron/blob/main/CODE_OF_CONDUCT.md) will be an immediate, and possibly indefinite, ban from the project.
**Please note** that using non-English as an attempt to circumvent our [Code of Conduct](https://github.com/electron/electron/blob/master/CODE_OF_CONDUCT.md) will be an immediate, and possibly indefinite, ban from the project.
## [Pull Requests](https://electronjs.org/docs/development/pull-requests)
@@ -47,28 +47,24 @@ dependencies, and tools contained in the `electron/electron` repository.
* [Step 1: Fork](https://electronjs.org/docs/development/pull-requests#step-1-fork)
* [Step 2: Build](https://electronjs.org/docs/development/pull-requests#step-2-build)
* [Step 3: Branch](https://electronjs.org/docs/development/pull-requests#step-3-branch)
* [Making Changes](https://electronjs.org/docs/development/pull-requests#making-changes)
* [The Process of Making Changes](https://electronjs.org/docs/development/pull-requests#the-process-of-making-changes)
* [Step 4: Code](https://electronjs.org/docs/development/pull-requests#step-4-code)
* [Step 5: Commit](https://electronjs.org/docs/development/pull-requests#step-5-commit)
* [Commit message guidelines](https://electronjs.org/docs/development/pull-requests#commit-message-guidelines)
* [Step 6: Rebase](https://electronjs.org/docs/development/pull-requests#step-6-rebase)
* [Step 7: Test](https://electronjs.org/docs/development/pull-requests#step-7-test)
* [Step 8: Push](https://electronjs.org/docs/development/pull-requests#step-8-push)
* [Step 9: Opening the Pull Request](https://electronjs.org/docs/development/pull-requests#step-9-opening-the-pull-request)
* [Step 10: Discuss and Update](https://electronjs.org/docs/development/pull-requests#step-10-discuss-and-update)
* [Step 8: Opening the Pull Request](https://electronjs.org/docs/development/pull-requests#step-8-opening-the-pull-request)
* [Step 9: Discuss and Update](#step-9-discuss-and-update)
* [Approval and Request Changes Workflow](https://electronjs.org/docs/development/pull-requests#approval-and-request-changes-workflow)
* [Step 11: Landing](https://electronjs.org/docs/development/pull-requests#step-11-landing)
* [Step 10: Landing](https://electronjs.org/docs/development/pull-requests#step-10-landing)
* [Continuous Integration Testing](https://electronjs.org/docs/development/pull-requests#continuous-integration-testing)
### Dependencies Upgrades Policy
Dependencies in Electron's `package.json` or `yarn.lock` files should only be altered by maintainers. For security reasons, we will not accept PRs that alter our `package.json` or `yarn.lock` files. We invite contributors to make requests updating these files in our issue tracker. If the change is significantly complicated, draft PRs are welcome, with the understanding that these PRs will be closed in favor of a duplicate PR submitted by an Electron maintainer.
## Style Guides
See [Coding Style](https://electronjs.org/docs/development/coding-style) for information about which standards Electron adheres to in different parts of its codebase.
## Further Reading
For more in-depth guides on developing Electron, see
For more in-depth guides on developing Electron, see
[/docs/development](/docs/development/README.md)

125
DEPS
View File

@@ -1,31 +1,32 @@
gclient_gn_args_from = 'src'
gclient_gn_args_file = 'src/build/config/gclient_args.gni'
gclient_gn_args = [
'build_with_chromium',
'checkout_android',
'checkout_android_native_support',
'checkout_libaom',
'checkout_nacl',
'checkout_oculus_sdk',
'checkout_openxr'
]
vars = {
'chromium_version':
'118.0.5991.0',
'dc9525d251bf30828899e4cd7161f6dc6507023f',
'node_version':
'v18.17.1',
'v12.13.0',
'nan_version':
'16fa32231e2ccd89d2804b3f765319128b20c4ac',
'squirrel.mac_version':
'0e5d146ba13101a1302d59ea6e6e0b3cace4ae38',
'reactiveobjc_version':
'74ab5baccc6f7202c8ac69a8d1e152c29dc1ea76',
'mantle_version':
'78d3966b3c331292ea29ec38661b25df0a245948',
'2ee313aaca52e2b478965ac50eb5082520380d1b',
'boto_version': 'f7574aa6cc2c819430c1f05e9a1a1a666ef8169b',
'pyyaml_version': '3.12',
'requests_version': 'e4d59bedfd3c7f4f254f4f5d036587bcd8152458',
'boto_git': 'https://github.com/boto',
'chromium_git': 'https://chromium.googlesource.com',
'electron_git': 'https://github.com/electron',
'nodejs_git': 'https://github.com/nodejs',
'requests_git': 'https://github.com/kennethreitz',
'yaml_git': 'https://github.com/yaml',
'squirrel_git': 'https://github.com/Squirrel',
'reactiveobjc_git': 'https://github.com/ReactiveCocoa',
'mantle_git': 'https://github.com/Mantle',
# The path of the sysroots.json file.
'sysroots_json_path': 'electron/script/sysroots.json',
# KEEP IN SYNC WITH utils.js FILE
'yarn_version': '1.15.2',
@@ -33,27 +34,27 @@ vars = {
# To be able to build clean Chromium from sources.
'apply_patches': True,
# To use an mtime cache for patched files to speed up builds.
'use_mtime_cache': True,
# Python interface to Amazon Web Services. Is used for releases only.
'checkout_boto': False,
# To allow in-house builds to checkout those manually.
'checkout_chromium': True,
'checkout_node': True,
'checkout_nan': True,
'checkout_pgo_profiles': True,
# It's only needed to parse the native tests configurations.
'checkout_pyyaml': False,
'use_rts': False,
'mac_xcode_version': 'default',
'generate_location_tags': False,
# Python "requests" module is used for releases only.
'checkout_requests': False,
# To allow running hooks without parsing the DEPS tree
'process_deps': True,
# It is always needed for normal Electron builds,
# but might be impossible for custom in-house builds.
'download_external_binaries': True,
'checkout_nacl':
False,
'checkout_libaom':
@@ -68,10 +69,6 @@ vars = {
False,
'checkout_android_native_support':
False,
'checkout_google_benchmark':
False,
'checkout_clang_tidy':
True,
}
deps = {
@@ -87,71 +84,67 @@ deps = {
'url': (Var("nodejs_git")) + '/node.git@' + (Var("node_version")),
'condition': 'checkout_node and process_deps',
},
'src/third_party/pyyaml': {
'src/electron/vendor/pyyaml': {
'url': (Var("yaml_git")) + '/pyyaml.git@' + (Var("pyyaml_version")),
'condition': 'checkout_pyyaml and process_deps',
},
'src/third_party/squirrel.mac': {
'url': Var("squirrel_git") + '/Squirrel.Mac.git@' + Var("squirrel.mac_version"),
'condition': 'process_deps',
'src/electron/vendor/boto': {
'url': Var('boto_git') + '/boto.git' + '@' + Var('boto_version'),
'condition': 'checkout_boto and process_deps',
},
'src/third_party/squirrel.mac/vendor/ReactiveObjC': {
'url': Var("reactiveobjc_git") + '/ReactiveObjC.git@' + Var("reactiveobjc_version"),
'condition': 'process_deps'
'src/electron/vendor/requests': {
'url': Var('requests_git') + '/requests.git' + '@' + Var('requests_version'),
'condition': 'checkout_requests and process_deps',
},
'src/third_party/squirrel.mac/vendor/Mantle': {
'url': Var("mantle_git") + '/Mantle.git@' + Var("mantle_version"),
'condition': 'process_deps',
}
}
pre_deps_hooks = [
{
'name': 'generate_mtime_cache',
'condition': '(checkout_chromium and apply_patches and use_mtime_cache) and process_deps',
'pattern': 'src/electron',
'action': [
'python3',
'src/electron/script/patches-mtime-cache.py',
'generate',
'--cache-file',
'src/electron/patches/mtime-cache.json',
'--patches-config',
'src/electron/patches/config.json',
],
},
]
hooks = [
{
'name': 'patch_chromium',
'condition': '(checkout_chromium and apply_patches) and process_deps',
'pattern': 'src/electron',
'action': [
'python3',
'python',
'src/electron/script/apply_all_patches.py',
'src/electron/patches/config.json',
],
},
{
'name': 'apply_mtime_cache',
'condition': '(checkout_chromium and apply_patches and use_mtime_cache) and process_deps',
'pattern': 'src/electron',
'name': 'electron_external_binaries',
'pattern': 'src/electron/script/update-external-binaries.py',
'condition': 'download_external_binaries',
'action': [
'python3',
'src/electron/script/patches-mtime-cache.py',
'apply',
'--cache-file',
'src/electron/patches/mtime-cache.json',
'src/electron/script/update-external-binaries.py',
],
},
{
'name': 'electron_npm_deps',
'pattern': 'src/electron/package.json',
'action': [
'python3',
'python',
'-c',
'import os, subprocess; os.chdir(os.path.join("src", "electron")); subprocess.check_call(["python3", "script/lib/npx.py", "yarn@' + (Var("yarn_version")) + '", "install", "--frozen-lockfile"]);',
'import os, subprocess; os.chdir(os.path.join("src", "electron")); subprocess.check_call(["python", "script/lib/npx.py", "yarn@' + (Var("yarn_version")) + '", "install", "--frozen-lockfile"]);',
],
},
{
'name': 'setup_boto',
'pattern': 'src/electron',
'condition': 'checkout_boto and process_deps',
'action': [
'python',
'-c',
'import os, subprocess; os.chdir(os.path.join("src", "electron", "vendor", "boto")); subprocess.check_call(["python", "setup.py", "build"]);',
],
},
{
'name': 'setup_requests',
'pattern': 'src/electron',
'condition': 'checkout_requests and process_deps',
'action': [
'python',
'-c',
'import os, subprocess; os.chdir(os.path.join("src", "electron", "vendor", "requests")); subprocess.check_call(["python", "setup.py", "build"]);',
],
},
]

1
ELECTRON_VERSION Normal file
View File

@@ -0,0 +1 @@
9.0.0-nightly.20191127

View File

@@ -1,5 +1,4 @@
Copyright (c) Electron contributors
Copyright (c) 2013-2020 GitHub Inc.
Copyright (c) 2013-2019 GitHub Inc.
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the

View File

@@ -1,22 +1,23 @@
[![Electron Logo](https://electronjs.org/images/electron-logo.svg)](https://electronjs.org)
[![CircleCI Build Status](https://circleci.com/gh/electron/electron/tree/main.svg?style=shield)](https://circleci.com/gh/electron/electron/tree/main)
[![AppVeyor Build Status](https://ci.appveyor.com/api/projects/status/4lggi9dpjc1qob7k/branch/main?svg=true)](https://ci.appveyor.com/project/electron-bot/electron-ljo26/branch/main)
[![Electron Discord Invite](https://img.shields.io/discord/745037351163527189?color=%237289DA&label=chat&logo=discord&logoColor=white)](https://discord.gg/electronjs)
:memo: Available Translations: 🇨🇳 🇧🇷 🇪🇸 🇯🇵 🇷🇺 🇫🇷 🇺🇸 🇩🇪.
View these docs in other languages on our [Crowdin](https://crowdin.com/project/electron) project.
[![CircleCI Build Status](https://circleci.com/gh/electron/electron/tree/master.svg?style=shield)](https://circleci.com/gh/electron/electron/tree/master)
[![AppVeyor Build Status](https://ci.appveyor.com/api/projects/status/4lggi9dpjc1qob7k/branch/master?svg=true)](https://ci.appveyor.com/project/electron-bot/electron-ljo26/branch/master)
[![devDependency Status](https://david-dm.org/electron/electron/dev-status.svg)](https://david-dm.org/electron/electron?type=dev)
:memo: Available Translations: 🇨🇳 🇹🇼 🇧🇷 🇪🇸 🇰🇷 🇯🇵 🇷🇺 🇫🇷 🇹🇭 🇳🇱 🇹🇷 🇮🇩 🇺🇦 🇨🇿 🇮🇹 🇵🇱.
View these docs in other languages at [electron/i18n](https://github.com/electron/i18n/tree/master/content/).
The Electron framework lets you write cross-platform desktop applications
using JavaScript, HTML and CSS. It is based on [Node.js](https://nodejs.org/) and
[Chromium](https://www.chromium.org) and is used by the [Atom
editor](https://github.com/atom/atom) and many other [apps](https://electronjs.org/apps).
Follow [@electronjs](https://twitter.com/electronjs) on Twitter for important
Follow [@ElectronJS](https://twitter.com/electronjs) on Twitter for important
announcements.
This project adheres to the Contributor Covenant
[code of conduct](https://github.com/electron/electron/tree/main/CODE_OF_CONDUCT.md).
[code of conduct](https://github.com/electron/electron/tree/master/CODE_OF_CONDUCT.md).
By participating, you are expected to uphold this code. Please report unacceptable
behavior to [coc@electronjs.org](mailto:coc@electronjs.org).
@@ -27,23 +28,15 @@ The preferred method is to install Electron as a development dependency in your
app:
```sh
npm install electron --save-dev
npm install electron --save-dev [--save-exact]
```
For more installation options and troubleshooting tips, see
[installation](docs/tutorial/installation.md). For info on how to manage Electron versions in your apps, see
The `--save-exact` flag is recommended for Electron prior to version 2, as it does not follow semantic
versioning. As of version 2.0.0, Electron follows semver, so you don't need `--save-exact` flag. For info on how to manage Electron versions in your apps, see
[Electron versioning](docs/tutorial/electron-versioning.md).
## Platform support
Each Electron release provides binaries for macOS, Windows, and Linux.
* macOS (Catalina and up): Electron provides 64-bit Intel and ARM binaries for macOS. Apple Silicon support was added in Electron 11.
* Windows (Windows 10 and up): Electron provides `ia32` (`x86`), `x64` (`amd64`), and `arm64` binaries for Windows. Windows on ARM support was added in Electron 5.0.8. Support for Windows 7, 8 and 8.1 was [removed in Electron 23, in line with Chromium's Windows deprecation policy](https://www.electronjs.org/blog/windows-7-to-8-1-deprecation-notice).
* Linux: The prebuilt binaries of Electron are built on Ubuntu 20.04. They have also been verified to work on:
* Ubuntu 14.04 and newer
* Fedora 24 and newer
* Debian 8 and newer
For more installation options and troubleshooting tips, see
[installation](docs/tutorial/installation.md).
## Quick start & Electron Fiddle
@@ -65,10 +58,13 @@ npm start
## Resources for learning Electron
* [electronjs.org/docs](https://electronjs.org/docs) - All of Electron's documentation
* [electron/fiddle](https://github.com/electron/fiddle) - A tool to build, run, and package small Electron experiments
* [electron/electron-quick-start](https://github.com/electron/electron-quick-start) - A very basic starter Electron app
* [electronjs.org/community#boilerplates](https://electronjs.org/community#boilerplates) - Sample starter apps created by the community
- [electronjs.org/docs](https://electronjs.org/docs) - All of Electron's documentation
- [electron/fiddle](https://github.com/electron/fiddle) - A tool to build, run, and package small Electron experiments
- [electron/electron-quick-start](https://github.com/electron/electron-quick-start) - A very basic starter Electron app
- [electronjs.org/community#boilerplates](https://electronjs.org/community#boilerplates) - Sample starter apps created by the community
- [electron/simple-samples](https://github.com/electron/simple-samples) - Small applications with ideas for taking them further
- [electron/electron-api-demos](https://github.com/electron/electron-api-demos) - An Electron app that teaches you how to use Electron
- [hokein/electron-sample-apps](https://github.com/hokein/electron-sample-apps) - Small demo apps for the various Electron APIs
## Programmatic usage
@@ -78,7 +74,7 @@ binary. Use this to spawn Electron from Node scripts:
```javascript
const electron = require('electron')
const proc = require('node:child_process')
const proc = require('child_process')
// will print something similar to /Users/maf/.../Electron
console.log(electron)
@@ -89,15 +85,11 @@ const child = proc.spawn(electron)
### Mirrors
* [China](https://npmmirror.com/mirrors/electron/)
- [China](https://npm.taobao.org/mirrors/electron)
See the [Advanced Installation Instructions](https://www.electronjs.org/docs/latest/tutorial/installation#mirror) to learn how to use a custom mirror.
## Documentation Translations
## Documentation translations
We crowdsource translations for our documentation via [Crowdin](https://crowdin.com/project/electron).
We currently accept translations for Chinese (Simplified), French, German, Japanese, Portuguese,
Russian, and Spanish.
Find documentation translations in [electron/i18n](https://github.com/electron/i18n).
## Contributing
@@ -106,10 +98,10 @@ If you are interested in reporting/fixing issues and contributing directly to th
## Community
Info on reporting bugs, getting help, finding third-party tools and sample apps,
and more can be found on the [Community page](https://www.electronjs.org/community).
and more can be found in the [support document](docs/tutorial/support.md#finding-support).
## License
[MIT](https://github.com/electron/electron/blob/main/LICENSE)
[MIT](https://github.com/electron/electron/blob/master/LICENSE)
When using Electron logos, make sure to follow [OpenJS Foundation Trademark Policy](https://openjsf.org/wp-content/uploads/sites/84/2021/01/OpenJS-Foundation-Trademark-Policy-2021-01-12.docx.pdf).
When using the Electron or other GitHub logos, be sure to follow the [GitHub logo guidelines](https://github.com/logos).

View File

@@ -2,16 +2,11 @@
The Electron team and community take security bugs in Electron seriously. We appreciate your efforts to responsibly disclose your findings, and will make every effort to acknowledge your contributions.
To report a security issue, please use the GitHub Security Advisory ["Report a Vulnerability"](https://github.com/electron/electron/security/advisories/new) tab.
To report a security issue, email [security@electronjs.org](mailto:security@electronjs.org) and include the word "SECURITY" in the subject line.
The Electron team will send a response indicating the next steps in handling your report. After the initial reply to your report, the security team will keep you informed of the progress towards a fix and full announcement, and may ask for additional information or guidance.
Report security bugs in third-party modules to the person or team maintaining the module. You can also report a vulnerability through the [npm contact form](https://www.npmjs.com/support) by selecting "I'm reporting a security vulnerability".
## The Electron Security Notification Process
For context on Electron's security notification process, please see the [Notifications](https://github.com/electron/governance/blob/main/wg-security/membership-and-notifications.md#notifications) section of the Security WG's [Membership and Notifications](https://github.com/electron/governance/blob/main/wg-security/membership-and-notifications.md) Governance document.
Report security bugs in third-party modules to the person or team maintaining the module. You can also report a vulnerability through the [Node Security Project](https://nodesecurity.io/report).
## Learning More About Security
To learn more about securing an Electron application, please see the [security tutorial](docs/tutorial/security.md).

View File

@@ -1,108 +0,0 @@
# The config is used to bake appveyor images, not for running CI jobs.
# The config expects the following environment variables to be set:
# - "APPVEYOR_BAKE_IMAGE" e.g. 'electron-99.0.4767.0'. Name of the image to be baked.
# Typically named after the Chromium version on which the image is built.
# This can be set dynamically in the prepare-appveyor script.
version: 1.0.{build}
build_cloud: electronhq-16-core
image: e-112.0.5607.0-vs2022
environment:
GIT_CACHE_PATH: C:\Users\appveyor\libcc_cache
ELECTRON_OUT_DIR: Default
ELECTRON_ENABLE_STACK_DUMPING: 1
MOCHA_REPORTER: mocha-multi-reporters
MOCHA_MULTI_REPORTERS: mocha-appveyor-reporter, tap
GOMA_FALLBACK_ON_AUTH_FAILURE: true
DEPOT_TOOLS_WIN_TOOLCHAIN: 0
PYTHONIOENCODING: UTF-8
# The following lines are needed when baking from a completely new image (eg MicrosoftWindowsServer:WindowsServer:2019-Datacenter:latest via image: base-windows-server2019)
# init:
# - ps: $blockRdp = $true; iex ((new-object net.webclient).DownloadString('https://raw.githubusercontent.com/appveyor/ci/master/scripts/enable-rdp.ps1'))
# - appveyor version
# - ps: $ErrorActionPreference = 'Stop'
# - ps: 'Write-Host "OS Build: $((Get-CimInstance Win32_OperatingSystem).BuildNumber)"'
# clone_folder: '%USERPROFILE%\image-bake-scripts'
# clone_script:
# - ps: Invoke-WebRequest "https://github.com/appveyor/build-images/archive/1f90d94e74c8243c909a09b994e527584dfcb838.zip" -OutFile "$env:temp\scripts.zip"
# - ps: Expand-Archive -Path "$env:temp\scripts.zip" -DestinationPath "$env:temp\scripts" -Force
# - ps: Copy-Item -Path "$env:temp\scripts\build-images-1f90d94e74c8243c909a09b994e527584dfcb838\scripts\Windows\*" -Destination $env:APPVEYOR_BUILD_FOLDER -Recurse
build_script:
# The following lines are needed when baking from a completely new image (eg MicrosoftWindowsServer:WindowsServer:2019-Datacenter:latest via image: base-windows-server2019)
# - ps: .\init_server.ps1
# - ps: .\extend_system_volume.ps1
# # Restart VM
# - ps: Start-Sleep -s 5; Restart-Computer
# - ps: Start-Sleep -s 5
# - appveyor version
# - ps: .\install_path_utils.ps1
# - ps: .\install_powershell_core.ps1
# - ps: .\install_powershell_get.ps1
# - ps: .\install_7zip.ps1
# - ps: .\install_chocolatey.ps1
# - ps: .\install_webpi.ps1
# - ps: .\install_nuget.ps1
# - ps: .\install_pstools.ps1
# - ps: .\install_git.ps1
# - ps: .\install_git_lfs.ps1
# # Restart VM
# - ps: Start-Sleep -s 5; Restart-Computer
# - ps: Start-Sleep -s 5
# END LINES FOR COMPLETELY NEW IMAGE
- git config --global core.longpaths true
- ps: >-
if (-not (Test-Path -Path C:\projects\src)) {
New-Item -Path C:\projects\src -ItemType Directory
}
- cd C:\projects\
- git clone -q --branch=%APPVEYOR_REPO_BRANCH% https://github.com/electron/electron.git C:\projects\src\electron
- git clone --depth=1 https://chromium.googlesource.com/chromium/tools/depot_tools.git
- ps: $env:PATH="$pwd\depot_tools;$env:PATH"
- update_depot_tools.bat
# Uncomment the following line if windows deps change
# - src\electron\script\setup-win-for-dev.bat
- >-
gclient config
--name "src\electron"
--unmanaged
%GCLIENT_EXTRA_ARGS%
"https://github.com/electron/electron"
- ps: cd src\electron
- ps: node script\generate-deps-hash.js
- ps: $depshash = Get-Content .\.depshash -Raw
- ps: Copy-Item -path .\.depshash -destination ..\.depshash
- ps: cd ..\..
- gclient sync --with_branch_heads --with_tags --nohooks
- ps: regsvr32 /s "C:\Program Files\Microsoft Visual Studio\2022\Community\DIA SDK\bin\amd64\msdia140.dll"
- ps: set vs2022_install="C:\Program Files\Microsoft Visual Studio\2022\Community"
# The following lines are needed when baking from a completely new image (eg MicrosoftWindowsServer:WindowsServer:2019-Datacenter:latest via image: base-windows-server2019)
# # Restart VM
# - ps: Start-Sleep -s 5; Restart-Computer
# - ps: Start-Sleep -s 5
# - cd %USERPROFILE%\image-bake-scripts
# - appveyor version
# - ps: .\optimize_dotnet_runtime.ps1
# - ps: .\disable_windows_background_services.ps1
# - ps: .\enforce_windows_firewall.ps1
# - ps: .\cleanup_windows.ps1
# END LINES FOR COMPLETELY NEW IMAGE
on_image_bake:
- ps: >-
echo "Baking image: $env:APPVEYOR_BAKE_IMAGE at dir $PWD"
- ps: Remove-Item -Recurse -Force C:\projects\depot_tools
- ps: Remove-Item -Recurse -Force C:\projects\src\electron
# Uncomment these lines and set APPVEYOR_RDP_PASSWORD in project settings to enable RDP after bake is done
# # on_finish:
# - ps: >-
# $blockRdp = $true; iex ((new-object net.webclient).DownloadString('https://raw.githubusercontent.com/appveyor/ci/master/scripts/enable-rdp.ps1'))

View File

@@ -1,322 +0,0 @@
# NOTE IF CHANGING THIS FILE, ALSO APPLY THE CHANGE TO appveyor.yml
# IF APPLICABLE!!!!
#
#
# The config expects the following environment variables to be set:
# - "GN_CONFIG" Build type. One of {'testing', 'release'}.
# - "GN_EXTRA_ARGS" Additional gn arguments for a build config,
# e.g. 'target_cpu="x86"' to build for a 32bit platform.
# https://gn.googlesource.com/gn/+/main/docs/reference.md#var_target_cpu
# Don't forget to set up "NPM_CONFIG_ARCH" and "TARGET_ARCH" accordingly
# if you pass a custom value for 'target_cpu'.
# - "ELECTRON_RELEASE" Set it to '1' upload binaries on success.
# - "NPM_CONFIG_ARCH" E.g. 'x86'. Is used to build native Node.js modules.
# Must match 'target_cpu' passed to "GN_EXTRA_ARGS" and "TARGET_ARCH" value.
# - "TARGET_ARCH" Choose from {'ia32', 'x64', 'arm', 'arm64'}.
# Is used in some publishing scripts, but does NOT affect the Electron binary.
# Must match 'target_cpu' passed to "GN_EXTRA_ARGS" and "NPM_CONFIG_ARCH" value.
# - "UPLOAD_TO_STORAGE" Set it to '1' upload a release to the Azure bucket.
# Otherwise the release will be uploaded to the GitHub Releases.
# (The value is only checked if "ELECTRON_RELEASE" is defined.)
#
# The publishing scripts expect access tokens to be defined as env vars,
# but those are not covered here.
#
# AppVeyor docs on variables:
# https://www.appveyor.com/docs/environment-variables/
# https://www.appveyor.com/docs/build-configuration/#secure-variables
# https://www.appveyor.com/docs/build-configuration/#custom-environment-variables
version: 1.0.{build}
build_cloud: electronhq-16-core
image: e-118.0.5949.0
environment:
GIT_CACHE_PATH: C:\Users\appveyor\libcc_cache
ELECTRON_OUT_DIR: Default
ELECTRON_ENABLE_STACK_DUMPING: 1
ELECTRON_ALSO_LOG_TO_STDERR: 1
MOCHA_REPORTER: mocha-multi-reporters
MOCHA_MULTI_REPORTERS: "@marshallofsound/mocha-appveyor-reporter, tap"
GOMA_FALLBACK_ON_AUTH_FAILURE: true
DEPOT_TOOLS_WIN_TOOLCHAIN: 1
DEPOT_TOOLS_WIN_TOOLCHAIN_BASE_URL: "https://dev-cdn.electronjs.org/windows-toolchains/_"
GYP_MSVS_HASH_27370823e7: 28622d16b1
PYTHONIOENCODING: UTF-8
matrix:
- job_name: Build Arm on X64 Windows
- job_name: Test On Windows On Arm Hardware
job_depends_on: Build Arm on X64 Windows
APPVEYOR_BUILD_WORKER_IMAGE: base-woa
APPVEYOR_BUILD_WORKER_CLOUD: electronhq-woa
clone_script:
- ps: git clone -q $("--branch=" + $Env:APPVEYOR_REPO_BRANCH) $("https://github.com/" + $Env:APPVEYOR_REPO_NAME + ".git") $Env:APPVEYOR_BUILD_FOLDER
- ps: if (!$Env:APPVEYOR_PULL_REQUEST_NUMBER) {$("git checkout -qf " + $Env:APPVEYOR_REPO_COMMIT)}
- ps: if ($Env:APPVEYOR_PULL_REQUEST_NUMBER) {git fetch -q origin +refs/pull/$($Env:APPVEYOR_PULL_REQUEST_NUMBER)/head; git checkout -qf FETCH_HEAD}
clone_folder: C:\projects\src\electron
skip_branch_with_pr: true
# the first failed job cancels other jobs and fails entire build
matrix:
fast_finish: true
for:
- matrix:
only:
- job_name: Build Arm on X64 Windows
build_script:
- ps: |
node script/yarn.js install --frozen-lockfile
node script/doc-only-change.js --prNumber=$env:APPVEYOR_PULL_REQUEST_NUMBER
$env:SHOULD_SKIP_ARTIFACT_VALIDATION = "false"
if ($LASTEXITCODE -eq 0) {
Write-warning "Skipping build for doc-only change"
$env:SHOULD_SKIP_ARTIFACT_VALIDATION = "true"
Exit-AppveyorBuild
} else {
$global:LASTEXITCODE = 0
}
- cd ..
- ps: Write-Host "Building $env:GN_CONFIG build"
- git config --global core.longpaths true
- ps: >-
if (Test-Path -Path "$pwd\depot_tools") {
Remove-Item -Recurse -Force $pwd\depot_tools
}
- ps: >-
if (Test-Path -Path "$pwd\build-tools") {
Remove-Item -Recurse -Force $pwd\build-tools
}
- git clone --depth=1 https://chromium.googlesource.com/chromium/tools/depot_tools.git
- ps: New-Item -Name depot_tools\.disable_auto_update -ItemType File
- depot_tools\bootstrap\win_tools.bat
- ps: $env:PATH="$pwd\depot_tools;$env:PATH"
- ps: >-
if (Test-Path -Path "$pwd\src\electron") {
Remove-Item -Recurse -Force $pwd\src\electron
}
- ps: >-
if (Test-Path 'env:RAW_GOMA_AUTH') {
$env:GOMA_OAUTH2_CONFIG_FILE = "$pwd\.goma_oauth2_config"
$env:RAW_GOMA_AUTH | Set-Content $env:GOMA_OAUTH2_CONFIG_FILE
}
- git clone https://github.com/electron/build-tools.git
- cd build-tools
- npm install
- mkdir third_party
- ps: >-
node -e "require('./src/utils/goma.js').downloadAndPrepare({ gomaOneForAll: true })"
- ps: $env:GN_GOMA_FILE = node -e "console.log(require('./src/utils/goma.js').gnFilePath)"
- ps: $env:LOCAL_GOMA_DIR = node -e "console.log(require('./src/utils/goma.js').dir)"
- cd ..\..
- ps: .\src\electron\script\start-goma.ps1 -gomaDir $env:LOCAL_GOMA_DIR
- ps: >-
if (Test-Path 'env:RAW_GOMA_AUTH') {
$goma_login = python3 $env:LOCAL_GOMA_DIR\goma_auth.py info
if ($goma_login -eq 'Login as Fermi Planck') {
Write-warning "Goma authentication is correct";
} else {
Write-warning "WARNING!!!!!! Goma authentication is incorrect; please update Goma auth token.";
$host.SetShouldExit(1)
}
}
- ps: $env:CHROMIUM_BUILDTOOLS_PATH="$pwd\src\buildtools"
- ps: >-
if ($env:GN_CONFIG -ne 'release') {
$env:NINJA_STATUS="[%r processes, %f/%t @ %o/s : %es] "
}
- gclient config --name "src\electron" --unmanaged %GCLIENT_EXTRA_ARGS% "https://github.com/electron/electron"
# Patches are applied in the image bake. Check depshash to see if patches have changed.
- ps: $env:RUN_GCLIENT_SYNC="false"
- ps: $depshash_baked = Get-Content .\src\.depshash -Raw
- ps: cd src\electron
- ps: node script\generate-deps-hash.js
- ps: $depshash = Get-Content .\.depshash -Raw
- ps: cd ..\..
- ps: >-
if ($depshash_baked -ne $depshash) {
$env:RUN_GCLIENT_SYNC="true"
}
- if "%RUN_GCLIENT_SYNC%"=="true" ( gclient sync --with_branch_heads --with_tags ) else ( gclient runhooks )
- cd src
- ps: $env:PATH="$pwd\third_party\ninja;$env:PATH"
- set BUILD_CONFIG_PATH=//electron/build/args/%GN_CONFIG%.gn
- gn gen out/Default "--args=import(\"%BUILD_CONFIG_PATH%\") import(\"%GN_GOMA_FILE%\") %GN_EXTRA_ARGS% "
- gn check out/Default //electron:electron_lib
- gn check out/Default //electron:electron_app
- gn check out/Default //electron/shell/common/api:mojo
- if DEFINED GN_GOMA_FILE (ninja -j 300 -C out/Default electron:electron_app) else (ninja -C out/Default electron:electron_app)
- if "%GN_CONFIG%"=="testing" ( python C:\depot_tools\post_build_ninja_summary.py -C out\Default )
- gn gen out/ffmpeg "--args=import(\"//electron/build/args/ffmpeg.gn\") %GN_EXTRA_ARGS%"
- ninja -C out/ffmpeg electron:electron_ffmpeg_zip
- ninja -C out/Default electron:electron_dist_zip
- ninja -C out/Default shell_browser_ui_unittests
- gn desc out/Default v8:run_mksnapshot_default args > out/Default/default_mksnapshot_args
# Remove unused args from mksnapshot_args
- ps: >-
Get-Content out/Default/default_mksnapshot_args | Where-Object { -not $_.Contains('--turbo-profiling-input') -And -not $_.Contains('builtins-pgo') } | Set-Content out/Default/mksnapshot_args
- ninja -C out/Default electron:electron_mksnapshot_zip
- cd out\Default
- 7z a mksnapshot.zip mksnapshot_args gen\v8\embedded.S
- cd ..\..
- ninja -C out/Default electron:hunspell_dictionaries_zip
- ninja -C out/Default electron:electron_chromedriver_zip
- ninja -C out/Default electron:node_headers
- python3 %LOCAL_GOMA_DIR%\goma_ctl.py stat
- ps: >-
Get-CimInstance -Namespace root\cimv2 -Class Win32_product | Select vendor, description, @{l='install_location';e='InstallLocation'}, @{l='install_date';e='InstallDate'}, @{l='install_date_2';e='InstallDate2'}, caption, version, name, @{l='sku_number';e='SKUNumber'} | ConvertTo-Json | Out-File -Encoding utf8 -FilePath .\installed_software.json
- python3 electron/build/profile_toolchain.py --output-json=out/Default/windows_toolchain_profile.json
- 7z a node_headers.zip out\Default\gen\node_headers
- ps: >-
if ($env:GN_CONFIG -eq 'release') {
# Needed for msdia140.dll on 64-bit windows
$env:Path += ";$pwd\third_party\llvm-build\Release+Asserts\bin"
ninja -C out/Default electron:electron_symbols
}
- ps: >-
if ($env:GN_CONFIG -eq 'release') {
python3 electron\script\zip-symbols.py
appveyor-retry appveyor PushArtifact out/Default/symbols.zip
} else {
# It's useful to have pdb files when debugging testing builds that are
# built on CI.
7z a pdb.zip out\Default\*.pdb
}
- python3 electron/script/zip_manifests/check-zip-manifest.py out/Default/dist.zip electron/script/zip_manifests/dist_zip.win.%TARGET_ARCH%.manifest
- ps: |
cd C:\projects\src
$missing_artifacts = $false
if ($env:SHOULD_SKIP_ARTIFACT_VALIDATION -eq 'true') {
Write-warning "Skipping artifact validation for doc-only $env:APPVEYOR_PROJECT_NAME"
} else {
$artifacts_to_validate = 'dist.zip','windows_toolchain_profile.json','shell_browser_ui_unittests.exe','chromedriver.zip','ffmpeg.zip','node_headers.zip','mksnapshot.zip','electron.lib','hunspell_dictionaries.zip'
foreach($artifact_name in $artifacts_to_validate) {
if ($artifact_name -eq 'ffmpeg.zip') {
$artifact_file = "out\ffmpeg\ffmpeg.zip"
} elseif (
$artifact_name -eq 'node_headers.zip') {
$artifact_file = $artifact_name
} else {
$artifact_file = "out\Default\$artifact_name"
}
if (-not(Test-Path $artifact_file)) {
Write-warning "$artifact_name is missing and cannot be added to artifacts"
$missing_artifacts = $true
}
}
}
if ($missing_artifacts) {
throw "Build failed due to missing artifacts"
}
deploy_script:
- cd electron
- ps: >-
if (Test-Path Env:\ELECTRON_RELEASE) {
if (Test-Path Env:\UPLOAD_TO_STORAGE) {
Write-Output "Uploading Electron release distribution to azure"
& python3 script\release\uploaders\upload.py --verbose --upload_to_storage
} else {
Write-Output "Uploading Electron release distribution to github releases"
& python3 script\release\uploaders\upload.py --verbose
}
}
on_finish:
# Uncomment this lines to enable RDP
# - ps: $blockRdp = $true; iex ((new-object net.webclient).DownloadString('https://raw.githubusercontent.com/appveyor/ci/master/scripts/enable-rdp.ps1'))
- cd C:\projects\src
- if exist out\Default\windows_toolchain_profile.json ( appveyor-retry appveyor PushArtifact out\Default\windows_toolchain_profile.json )
- if exist out\Default\dist.zip (appveyor-retry appveyor PushArtifact out\Default\dist.zip)
- if exist out\Default\shell_browser_ui_unittests.exe (appveyor-retry appveyor PushArtifact out\Default\shell_browser_ui_unittests.exe)
- if exist out\Default\chromedriver.zip (appveyor-retry appveyor PushArtifact out\Default\chromedriver.zip)
- if exist out\ffmpeg\ffmpeg.zip (appveyor-retry appveyor PushArtifact out\ffmpeg\ffmpeg.zip)
- if exist node_headers.zip (appveyor-retry appveyor PushArtifact node_headers.zip)
- if exist out\Default\mksnapshot.zip (appveyor-retry appveyor PushArtifact out\Default\mksnapshot.zip)
- if exist out\Default\hunspell_dictionaries.zip (appveyor-retry appveyor PushArtifact out\Default\hunspell_dictionaries.zip)
- if exist out\Default\electron.lib (appveyor-retry appveyor PushArtifact out\Default\electron.lib)
- ps: >-
if ((Test-Path "pdb.zip") -And ($env:GN_CONFIG -ne 'release')) {
appveyor-retry appveyor PushArtifact pdb.zip
}
- matrix:
only:
- job_name: Test On Windows On Arm Hardware
environment:
IGNORE_YARN_INSTALL_ERROR: 1
ELECTRON_TEST_RESULTS_DIR: junit
MOCHA_MULTI_REPORTERS: 'mocha-junit-reporter, tap'
MOCHA_REPORTER: mocha-multi-reporters
ELECTRON_SKIP_NATIVE_MODULE_TESTS: true
build_script:
- ps: |
node script/yarn.js install --frozen-lockfile
node script/doc-only-change.js --prNumber=$env:APPVEYOR_PULL_REQUEST_NUMBER
if ($LASTEXITCODE -eq 0) {
Write-warning "Skipping build for doc only change"
Exit-AppveyorBuild
} else {
$global:LASTEXITCODE = 0
}
- cd ..
- mkdir out\Default
- cd ..
- ps: |
# Download build artifacts
$apiUrl = 'https://ci.appveyor.com/api'
$build_info = Invoke-RestMethod -Method Get -Uri "$apiUrl/projects/$env:APPVEYOR_ACCOUNT_NAME/$env:APPVEYOR_PROJECT_SLUG/builds/$env:APPVEYOR_BUILD_ID"
$artifacts_to_download = @('dist.zip','ffmpeg.zip','node_headers.zip','electron.lib')
foreach ($job in $build_info.build.jobs) {
if ($job.name -eq "Build Arm on X64 Windows") {
$jobId = $job.jobId
foreach($artifact_name in $artifacts_to_download) {
if ($artifact_name -eq 'shell_browser_ui_unittests.exe' -Or $artifact_name -eq 'electron.lib') {
$outfile = "src\out\Default\$artifact_name"
} else {
$outfile = $artifact_name
}
Invoke-RestMethod -Method Get -Uri "$apiUrl/buildjobs/$jobId/artifacts/$artifact_name" -OutFile $outfile
}
# Uncomment the following lines to download the pdb.zip to show real stacktraces when crashes happen during testing
# Invoke-RestMethod -Method Get -Uri "$apiUrl/buildjobs/$jobId/artifacts/pdb.zip" -OutFile pdb.zip
# 7z x -y -osrc pdb.zip
}
}
- ps: |
$out_default_zips = @('dist.zip')
foreach($zip_name in $out_default_zips) {
7z x -y -osrc\out\Default $zip_name
}
- ps: 7z x -y -osrc\out\ffmpeg ffmpeg.zip
- ps: 7z x -y -osrc node_headers.zip
test_script:
# Workaround for https://github.com/appveyor/ci/issues/2420
- set "PATH=%PATH%;C:\Program Files\Git\mingw64\libexec\git-core"
- ps: |
cd src
New-Item .\out\Default\gen\node_headers\Release -Type directory
Copy-Item -path .\out\Default\electron.lib -destination .\out\Default\gen\node_headers\Release\node.lib
- set npm_config_nodedir=%cd%\out\Default\gen\node_headers
- set npm_config_arch=arm64
- cd electron
# Explicitly set npm_config_arch because the .env doesn't persist
- ps: >-
if ($env:TARGET_ARCH -eq 'ia32') {
$env:npm_config_arch = "ia32"
}
- echo Running main test suite & node script/yarn test --runners=main --enable-logging --disable-features=CalculateNativeWinOcclusion
- cd ..
- echo Verifying non proprietary ffmpeg & python electron\script\verify-ffmpeg.py --build-dir out\Default --source-root %cd% --ffmpeg-path out\ffmpeg
on_finish:
# Uncomment these lines to enable RDP
# - ps: $blockRdp = $true; iex ((new-object net.webclient).DownloadString('https://raw.githubusercontent.com/appveyor/ci/master/scripts/enable-rdp.ps1'))
- if exist electron\electron.log ( appveyor-retry appveyor PushArtifact electron\electron.log )

View File

@@ -1,22 +1,18 @@
# NOTE IF CHANGING THIS FILE, ALSO APPLY THE CHANGE TO appveyor-woa.yml
# IF APPLICABLE!!!!
#
#
# The config expects the following environment variables to be set:
# - "GN_CONFIG" Build type. One of {'testing', 'release'}.
# - "GN_EXTRA_ARGS" Additional gn arguments for a build config,
# e.g. 'target_cpu="x86"' to build for a 32bit platform.
# https://gn.googlesource.com/gn/+/main/docs/reference.md#var_target_cpu
# Don't forget to set up "NPM_CONFIG_ARCH" and "TARGET_ARCH" accordingly
# https://gn.googlesource.com/gn/+/master/docs/reference.md#target_cpu
# Don't forget to set up "NPM_CONFIG_ARCH" and "TARGET_ARCH" accordningly
# if you pass a custom value for 'target_cpu'.
# - "ELECTRON_RELEASE" Set it to '1' upload binaries on success.
# - "NPM_CONFIG_ARCH" E.g. 'x86'. Is used to build native Node.js modules.
# Must match 'target_cpu' passed to "GN_EXTRA_ARGS" and "TARGET_ARCH" value.
# - "TARGET_ARCH" Choose from {'ia32', 'x64', 'arm', 'arm64'}.
# - "TARGET_ARCH" Choose from {'ia32', 'x64', 'arm', 'arm64', 'mips64el'}.
# Is used in some publishing scripts, but does NOT affect the Electron binary.
# Must match 'target_cpu' passed to "GN_EXTRA_ARGS" and "NPM_CONFIG_ARCH" value.
# - "UPLOAD_TO_STORAGE" Set it to '1' upload a release to the Azure bucket.
# Otherwise the release will be uploaded to the GitHub Releases.
# - "UPLOAD_TO_S3" Set it to '1' upload a release to the S3 bucket.
# Otherwise the release will be uploaded to the Github Releases.
# (The value is only checked if "ELECTRON_RELEASE" is defined.)
#
# The publishing scripts expect access tokens to be defined as env vars,
@@ -27,296 +23,163 @@
# https://www.appveyor.com/docs/build-configuration/#secure-variables
# https://www.appveyor.com/docs/build-configuration/#custom-environment-variables
# Uncomment these lines to enable RDP
#on_finish:
# - ps: $blockRdp = $true; iex ((new-object net.webclient).DownloadString('https://raw.githubusercontent.com/appveyor/ci/master/scripts/enable-rdp.ps1'))
version: 1.0.{build}
build_cloud: electronhq-16-core
image: e-118.0.5949.0
build_cloud: libcc-20
image: vs2019-16.3-10.0.18362
environment:
GIT_CACHE_PATH: C:\Users\appveyor\libcc_cache
GIT_CACHE_PATH: C:\Users\electron\libcc_cache
ELECTRON_OUT_DIR: Default
ELECTRON_ENABLE_STACK_DUMPING: 1
ELECTRON_ALSO_LOG_TO_STDERR: 1
MOCHA_REPORTER: mocha-multi-reporters
MOCHA_MULTI_REPORTERS: "@marshallofsound/mocha-appveyor-reporter, tap"
GOMA_FALLBACK_ON_AUTH_FAILURE: true
DEPOT_TOOLS_WIN_TOOLCHAIN: 1
DEPOT_TOOLS_WIN_TOOLCHAIN_BASE_URL: "https://dev-cdn.electronjs.org/windows-toolchains/_"
GYP_MSVS_HASH_27370823e7: 28622d16b1
PYTHONIOENCODING: UTF-8
MOCHA_MULTI_REPORTERS: mocha-appveyor-reporter, tap
notifications:
- provider: Webhook
url: https://electron-mission-control.herokuapp.com/rest/appveyor-hook
method: POST
headers:
x-mission-control-secret:
secure: 90BLVPcqhJPG7d24v0q/RRray6W3wDQ8uVQlQjOHaBWkw1i8FoA1lsjr2C/v1dVok+tS2Pi6KxDctPUkwIb4T27u4RhvmcPzQhVpfwVJAG9oNtq+yKN7vzHfg7k/pojEzVdJpQLzeJGcSrZu7VY39Q==
on_build_success: false
on_build_failure: true
on_build_status_changed: false
build_script:
- ps: >-
if(($env:APPVEYOR_PULL_REQUEST_HEAD_REPO_NAME -split "/")[0] -eq ($env:APPVEYOR_REPO_NAME -split "/")[0]) {
Write-warning "Skipping PR build for branch"; Exit-AppveyorBuild
} else {
node script/yarn.js install --frozen-lockfile
matrix:
- job_name: Build
- job_name: Test
job_depends_on: Build
clone_script:
- ps: git clone -q $("--branch=" + $Env:APPVEYOR_REPO_BRANCH) $("https://github.com/" + $Env:APPVEYOR_REPO_NAME + ".git") $Env:APPVEYOR_BUILD_FOLDER
- ps: if (!$Env:APPVEYOR_PULL_REQUEST_NUMBER) {$("git checkout -qf " + $Env:APPVEYOR_REPO_COMMIT)}
- ps: if ($Env:APPVEYOR_PULL_REQUEST_NUMBER) {git fetch -q origin +refs/pull/$($Env:APPVEYOR_PULL_REQUEST_NUMBER)/head; git checkout -qf FETCH_HEAD}
clone_folder: C:\projects\src\electron
skip_branch_with_pr: true
# the first failed job cancels other jobs and fails entire build
matrix:
fast_finish: true
for:
- matrix:
only:
- job_name: Build
build_script:
- ps: |
node script/yarn.js install --frozen-lockfile
node script/doc-only-change.js --prNumber=$env:APPVEYOR_PULL_REQUEST_NUMBER
$env:SHOULD_SKIP_ARTIFACT_VALIDATION = "false"
if ($LASTEXITCODE -eq 0) {
Write-warning "Skipping build for doc-only change"
$env:SHOULD_SKIP_ARTIFACT_VALIDATION = "true"
Exit-AppveyorBuild
} else {
$global:LASTEXITCODE = 0
}
- cd ..
- ps: Write-Host "Building $env:GN_CONFIG build"
- git config --global core.longpaths true
- ps: >-
if (Test-Path -Path "$pwd\depot_tools") {
Remove-Item -Recurse -Force $pwd\depot_tools
}
- ps: >-
if (Test-Path -Path "$pwd\build-tools") {
Remove-Item -Recurse -Force $pwd\build-tools
}
- git clone --depth=1 https://chromium.googlesource.com/chromium/tools/depot_tools.git
- ps: New-Item -Name depot_tools\.disable_auto_update -ItemType File
- depot_tools\bootstrap\win_tools.bat
- ps: $env:PATH="$pwd\depot_tools;$env:PATH"
- ps: >-
if (Test-Path -Path "$pwd\src\electron") {
Remove-Item -Recurse -Force $pwd\src\electron
}
- ps: >-
if (Test-Path 'env:RAW_GOMA_AUTH') {
$env:GOMA_OAUTH2_CONFIG_FILE = "$pwd\.goma_oauth2_config"
$env:RAW_GOMA_AUTH | Set-Content $env:GOMA_OAUTH2_CONFIG_FILE
}
- git clone https://github.com/electron/build-tools.git
- cd build-tools
- npm install
- mkdir third_party
- ps: >-
node -e "require('./src/utils/goma.js').downloadAndPrepare({ gomaOneForAll: true })"
- ps: $env:GN_GOMA_FILE = node -e "console.log(require('./src/utils/goma.js').gnFilePath)"
- ps: $env:LOCAL_GOMA_DIR = node -e "console.log(require('./src/utils/goma.js').dir)"
- cd ..\..
- ps: .\src\electron\script\start-goma.ps1 -gomaDir $env:LOCAL_GOMA_DIR
- ps: >-
if (Test-Path 'env:RAW_GOMA_AUTH') {
$goma_login = python3 $env:LOCAL_GOMA_DIR\goma_auth.py info
if ($goma_login -eq 'Login as Fermi Planck') {
Write-warning "Goma authentication is correct";
} else {
Write-warning "WARNING!!!!!! Goma authentication is incorrect; please update Goma auth token.";
$host.SetShouldExit(1)
if ($(node script/doc-only-change.js --prNumber=$env:APPVEYOR_PULL_REQUEST_NUMBER --prBranch=$env:APPVEYOR_REPO_BRANCH;$LASTEXITCODE -eq 0)) {
Write-warning "Skipping build for doc only change"; Exit-AppveyorBuild
}
}
- echo "Building $env:GN_CONFIG build"
- git config --global core.longpaths true
- cd ..
- mkdir src
- ps: Move-Item $env:APPVEYOR_BUILD_FOLDER -Destination src\electron
- ps: $env:CHROMIUM_BUILDTOOLS_PATH="$pwd\src\buildtools"
- ps: $env:SCCACHE_PATH="$pwd\src\electron\external_binaries\sccache.exe"
- ps: >-
if ($env:GN_CONFIG -eq 'release') {
$env:GCLIENT_EXTRA_ARGS="$env:GCLIENT_EXTRA_ARGS --custom-var=checkout_boto=True --custom-var=checkout_requests=True"
} else {
$env:NINJA_STATUS="[%r processes, %f/%t @ %o/s : %es] "
}
- >-
gclient config
--name "src\electron"
--unmanaged
%GCLIENT_EXTRA_ARGS%
"https://github.com/electron/electron"
- ps: >-
if ($env:GN_CONFIG -eq 'release') {
gclient sync --with_branch_heads --with_tags --reset
} else {
cd src\electron
node script\generate-deps-hash.js
$depshash = Get-Content .\.depshash -Raw
$zipfile = "Z:\$depshash.7z"
cd ..\..
if (Test-Path -Path $zipfile) {
# file exists, unzip and then gclient sync
7z x -y $zipfile -mmt=30 -aoa
# update external binaries
python src/electron/script/update-external-binaries.py
} else {
# file does not exist, gclient sync, then zip
gclient sync --with_branch_heads --with_tags --reset
if ($env:TARGET_ARCH -ne 'ia32') {
# archive current source for future use
# only run on x64/woa to avoid contention saving
if ($(7z a $zipfile src -xr!android_webview -xr!electron -xr'!*\.git' -xr!third_party\WebKit\LayoutTests! -xr!third_party\blink\web_tests -xr!third_party\blink\perf_tests -slp -t7z -mmt=30;$LASTEXITCODE -ne 0)) {
Write-warning "Could not save source to shared drive; continuing anyway"
}
}
- ps: $env:CHROMIUM_BUILDTOOLS_PATH="$pwd\src\buildtools"
- ps: >-
if ($env:GN_CONFIG -ne 'release') {
$env:NINJA_STATUS="[%r processes, %f/%t @ %o/s : %es] "
}
- gclient config --name "src\electron" --unmanaged %GCLIENT_EXTRA_ARGS% "https://github.com/electron/electron"
# Patches are applied in the image bake. Check depshash to see if patches have changed.
- ps: $env:RUN_GCLIENT_SYNC="false"
- ps: $depshash_baked = Get-Content .\src\.depshash -Raw
- ps: cd src\electron
- ps: node script\generate-deps-hash.js
- ps: $depshash = Get-Content .\.depshash -Raw
- ps: cd ..\..
- ps: >-
if ($depshash_baked -ne $depshash) {
$env:RUN_GCLIENT_SYNC="true"
}
- if "%RUN_GCLIENT_SYNC%"=="true" ( gclient sync --with_branch_heads --with_tags ) else ( gclient runhooks )
- cd src
- ps: $env:PATH="$pwd\third_party\ninja;$env:PATH"
- set BUILD_CONFIG_PATH=//electron/build/args/%GN_CONFIG%.gn
- gn gen out/Default "--args=import(\"%BUILD_CONFIG_PATH%\") import(\"%GN_GOMA_FILE%\") %GN_EXTRA_ARGS% "
- gn check out/Default //electron:electron_lib
- gn check out/Default //electron:electron_app
- gn check out/Default //electron/shell/common/api:mojo
- if DEFINED GN_GOMA_FILE (ninja -j 300 -C out/Default electron:electron_app) else (ninja -C out/Default electron:electron_app)
- if "%GN_CONFIG%"=="testing" ( python C:\depot_tools\post_build_ninja_summary.py -C out\Default )
- gn gen out/ffmpeg "--args=import(\"//electron/build/args/ffmpeg.gn\") %GN_EXTRA_ARGS%"
- ninja -C out/ffmpeg electron:electron_ffmpeg_zip
- ninja -C out/Default electron:electron_dist_zip
- ninja -C out/Default shell_browser_ui_unittests
- gn desc out/Default v8:run_mksnapshot_default args > out/Default/default_mksnapshot_args
# Remove unused args from mksnapshot_args
- ps: >-
Get-Content out/Default/default_mksnapshot_args | Where-Object { -not $_.Contains('--turbo-profiling-input') -And -not $_.Contains('builtins-pgo') } | Set-Content out/Default/mksnapshot_args
- ninja -C out/Default electron:electron_mksnapshot_zip
- cd out\Default
- 7z a mksnapshot.zip mksnapshot_args gen\v8\embedded.S
- cd ..\..
- ninja -C out/Default electron:hunspell_dictionaries_zip
- ninja -C out/Default electron:electron_chromedriver_zip
- ninja -C out/Default electron:node_headers
- python3 %LOCAL_GOMA_DIR%\goma_ctl.py stat
- ps: >-
Get-CimInstance -Namespace root\cimv2 -Class Win32_product | Select vendor, description, @{l='install_location';e='InstallLocation'}, @{l='install_date';e='InstallDate'}, @{l='install_date_2';e='InstallDate2'}, caption, version, name, @{l='sku_number';e='SKUNumber'} | ConvertTo-Json | Out-File -Encoding utf8 -FilePath .\installed_software.json
- python3 electron/build/profile_toolchain.py --output-json=out/Default/windows_toolchain_profile.json
- 7z a node_headers.zip out\Default\gen\node_headers
- ps: >-
if ($env:GN_CONFIG -eq 'release') {
# Needed for msdia140.dll on 64-bit windows
$env:Path += ";$pwd\third_party\llvm-build\Release+Asserts\bin"
ninja -C out/Default electron:electron_symbols
}
- ps: >-
if ($env:GN_CONFIG -eq 'release') {
python3 electron\script\zip-symbols.py
appveyor-retry appveyor PushArtifact out/Default/symbols.zip
} else {
# It's useful to have pdb files when debugging testing builds that are
# built on CI.
7z a pdb.zip out\Default\*.pdb
}
- python3 electron/script/zip_manifests/check-zip-manifest.py out/Default/dist.zip electron/script/zip_manifests/dist_zip.win.%TARGET_ARCH%.manifest
- ps: |
cd C:\projects\src
$missing_artifacts = $false
if ($env:SHOULD_SKIP_ARTIFACT_VALIDATION -eq 'true') {
Write-warning "Skipping artifact validation for doc-only $env:APPVEYOR_PROJECT_NAME"
} else {
$artifacts_to_validate = 'dist.zip','windows_toolchain_profile.json','shell_browser_ui_unittests.exe','chromedriver.zip','ffmpeg.zip','node_headers.zip','mksnapshot.zip','electron.lib','hunspell_dictionaries.zip'
foreach($artifact_name in $artifacts_to_validate) {
if ($artifact_name -eq 'ffmpeg.zip') {
$artifact_file = "out\ffmpeg\ffmpeg.zip"
} elseif (
$artifact_name -eq 'node_headers.zip') {
$artifact_file = $artifact_name
} else {
$artifact_file = "out\Default\$artifact_name"
}
if (-not(Test-Path $artifact_file)) {
Write-warning "$artifact_name is missing and cannot be added to artifacts"
$missing_artifacts = $true
}
}
}
if ($missing_artifacts) {
throw "Build failed due to missing artifacts"
}
deploy_script:
- cd electron
- ps: >-
if (Test-Path Env:\ELECTRON_RELEASE) {
if (Test-Path Env:\UPLOAD_TO_STORAGE) {
Write-Output "Uploading Electron release distribution to azure"
& python3 script\release\uploaders\upload.py --verbose --upload_to_storage
} else {
Write-Output "Uploading Electron release distribution to github releases"
& python3 script\release\uploaders\upload.py --verbose
}
}
on_finish:
# Uncomment this lines to enable RDP
# - ps: $blockRdp = $true; iex ((new-object net.webclient).DownloadString('https://raw.githubusercontent.com/appveyor/ci/master/scripts/enable-rdp.ps1'))
- cd C:\projects\src
- if exist out\Default\windows_toolchain_profile.json ( appveyor-retry appveyor PushArtifact out\Default\windows_toolchain_profile.json )
- if exist out\Default\dist.zip (appveyor-retry appveyor PushArtifact out\Default\dist.zip)
- if exist out\Default\shell_browser_ui_unittests.exe (appveyor-retry appveyor PushArtifact out\Default\shell_browser_ui_unittests.exe)
- if exist out\Default\chromedriver.zip (appveyor-retry appveyor PushArtifact out\Default\chromedriver.zip)
- if exist out\ffmpeg\ffmpeg.zip (appveyor-retry appveyor PushArtifact out\ffmpeg\ffmpeg.zip)
- if exist node_headers.zip (appveyor-retry appveyor PushArtifact node_headers.zip)
- if exist out\Default\mksnapshot.zip (appveyor-retry appveyor PushArtifact out\Default\mksnapshot.zip)
- if exist out\Default\hunspell_dictionaries.zip (appveyor-retry appveyor PushArtifact out\Default\hunspell_dictionaries.zip)
- if exist out\Default\electron.lib (appveyor-retry appveyor PushArtifact out\Default\electron.lib)
- ps: >-
if ((Test-Path "pdb.zip") -And ($env:GN_CONFIG -ne 'release')) {
appveyor-retry appveyor PushArtifact pdb.zip
}
- matrix:
only:
- job_name: Test
init:
- ps: |
if ($env:RUN_TESTS -ne 'true') {
Write-warning "Skipping tests for $env:APPVEYOR_PROJECT_NAME"; Exit-AppveyorBuild
}
build_script:
- ps: |
node script/yarn.js install --frozen-lockfile
node script/doc-only-change.js --prNumber=$env:APPVEYOR_PULL_REQUEST_NUMBER
if ($LASTEXITCODE -eq 0) {
Write-warning "Skipping build for doc only change"
Exit-AppveyorBuild
} else {
$global:LASTEXITCODE = 0
}
- cd ..
- mkdir out\Default
- cd ..
- ps: |
# Download build artifacts
$apiUrl = 'https://ci.appveyor.com/api'
$build_info = Invoke-RestMethod -Method Get -Uri "$apiUrl/projects/$env:APPVEYOR_ACCOUNT_NAME/$env:APPVEYOR_PROJECT_SLUG/builds/$env:APPVEYOR_BUILD_ID"
$artifacts_to_download = @('dist.zip','shell_browser_ui_unittests.exe','chromedriver.zip','ffmpeg.zip','node_headers.zip','mksnapshot.zip','electron.lib')
foreach ($job in $build_info.build.jobs) {
if ($job.name -eq "Build") {
$jobId = $job.jobId
foreach($artifact_name in $artifacts_to_download) {
if ($artifact_name -eq 'shell_browser_ui_unittests.exe' -Or $artifact_name -eq 'electron.lib') {
$outfile = "src\out\Default\$artifact_name"
} else {
$outfile = $artifact_name
}
Invoke-RestMethod -Method Get -Uri "$apiUrl/buildjobs/$jobId/artifacts/$artifact_name" -OutFile $outfile
}
# Uncomment the following lines to download the pdb.zip to show real stacktraces when crashes happen during testing
# Invoke-RestMethod -Method Get -Uri "$apiUrl/buildjobs/$jobId/artifacts/pdb.zip" -OutFile pdb.zip
# 7z x -y -osrc pdb.zip
}
}
- ps: |
$out_default_zips = @('dist.zip','chromedriver.zip','mksnapshot.zip')
foreach($zip_name in $out_default_zips) {
7z x -y -osrc\out\Default $zip_name
}
- ps: 7z x -y -osrc\out\ffmpeg ffmpeg.zip
- ps: 7z x -y -osrc node_headers.zip
test_script:
# Workaround for https://github.com/appveyor/ci/issues/2420
- set "PATH=%PATH%;C:\Program Files\Git\mingw64\libexec\git-core"
- ps: |
cd src
New-Item .\out\Default\gen\node_headers\Release -Type directory
Copy-Item -path .\out\Default\electron.lib -destination .\out\Default\gen\node_headers\Release\node.lib
- cd electron
# Explicitly set npm_config_arch because the .env doesn't persist
- ps: >-
if ($env:TARGET_ARCH -eq 'ia32') {
$env:npm_config_arch = "ia32"
}
- echo Running main test suite & node script/yarn test -- --trace-uncaught --runners=main --enable-logging=file --log-file=%cd%\electron.log
- echo Running native test suite & node script/yarn test -- --trace-uncaught --runners=native --enable-logging=file --log-file=%cd%\electron.log
- cd ..
- echo Verifying non proprietary ffmpeg & python electron\script\verify-ffmpeg.py --build-dir out\Default --source-root %cd% --ffmpeg-path out\ffmpeg
- echo "About to verify mksnapshot"
- echo Verifying mksnapshot & python electron\script\verify-mksnapshot.py --build-dir out\Default --source-root %cd%
- echo "Done verifying mksnapshot"
- echo Verifying chromedriver & python electron\script\verify-chromedriver.py --build-dir out\Default --source-root %cd%
- echo "Done verifying chromedriver"
on_finish:
# Uncomment these lines to enable RDP
# - ps: $blockRdp = $true; iex ((new-object net.webclient).DownloadString('https://raw.githubusercontent.com/appveyor/ci/master/scripts/enable-rdp.ps1'))
- if exist electron\electron.log ( appveyor-retry appveyor PushArtifact electron\electron.log )
}
}
- cd src
- ps: $env:BUILD_CONFIG_PATH="//electron/build/args/%GN_CONFIG%.gn"
- gn gen out/Default "--args=import(\"%BUILD_CONFIG_PATH%\") %GN_EXTRA_ARGS% cc_wrapper=\"%SCCACHE_PATH%\""
- gn check out/Default //electron:electron_lib
- gn check out/Default //electron:electron_app
- gn check out/Default //electron:manifests
- gn check out/Default //electron/shell/common/api:mojo
- ninja -C out/Default electron:electron_app
- if "%GN_CONFIG%"=="testing" ( python C:\Users\electron\depot_tools\post_build_ninja_summary.py -C out\Default )
- gn gen out/ffmpeg "--args=import(\"//electron/build/args/ffmpeg.gn\") %GN_EXTRA_ARGS%"
- ninja -C out/ffmpeg electron:electron_ffmpeg_zip
- ninja -C out/Default electron:electron_dist_zip
- ninja -C out/Default shell_browser_ui_unittests
- ninja -C out/Default electron:electron_mksnapshot_zip
- ninja -C out/Default electron:hunspell_dictionaries_zip
- ninja -C out/Default electron:electron_chromedriver_zip
- ninja -C out/Default third_party/electron_node:headers
- cmd /C %SCCACHE_PATH% --show-stats
- python electron/build/profile_toolchain.py --output-json=out/Default/windows_toolchain_profile.json
- appveyor PushArtifact out/Default/windows_toolchain_profile.json
- appveyor PushArtifact out/Default/dist.zip
- appveyor PushArtifact out/Default/shell_browser_ui_unittests.exe
- appveyor PushArtifact out/Default/chromedriver.zip
- appveyor PushArtifact out/ffmpeg/ffmpeg.zip
- 7z a node_headers.zip out\Default\gen\node_headers
- appveyor PushArtifact node_headers.zip
- appveyor PushArtifact out/Default/mksnapshot.zip
- appveyor PushArtifact out/Default/hunspell_dictionaries.zip
- appveyor PushArtifact out/Default/electron.lib
- ps: >-
if ($env:GN_CONFIG -eq 'release') {
# Needed for msdia140.dll on 64-bit windows
$env:Path += ";$pwd\third_party\llvm-build\Release+Asserts\bin"
ninja -C out/Default electron:electron_symbols
}
- ps: >-
if ($env:GN_CONFIG -eq 'release') {
python electron\script\zip-symbols.py
appveyor PushArtifact out/Default/symbols.zip
} else {
# It's useful to have pdb files when debugging testing builds that are
# built on CI.
7z a pdb.zip out\Default\*.pdb
appveyor PushArtifact pdb.zip
}
- python electron/script/zip_manifests/check-zip-manifest.py out/Default/dist.zip electron/script/zip_manifests/dist_zip.win.%TARGET_ARCH%.manifest
test_script:
# Workaround for https://github.com/appveyor/ci/issues/2420
- set "PATH=%PATH%;C:\Program Files\Git\mingw64\libexec\git-core"
- ps: >-
if ((-Not (Test-Path Env:\TEST_WOA)) -And (-Not (Test-Path Env:\ELECTRON_RELEASE)) -And ($env:GN_CONFIG -in "testing", "release")) {
$env:RUN_TESTS="true"
}
- ps: >-
if ($env:RUN_TESTS -eq 'true') {
New-Item .\out\Default\gen\node_headers\Release -Type directory
Copy-Item -path .\out\Default\electron.lib -destination .\out\Default\gen\node_headers\Release\node.lib
} else {
echo "Skipping tests for $env:GN_CONFIG build"
}
- cd electron
- if "%RUN_TESTS%"=="true" ( echo Running test suite & node script/yarn test -- --enable-logging)
- cd ..
- if "%RUN_TESTS%"=="true" ( echo Verifying non proprietary ffmpeg & python electron\script\verify-ffmpeg.py --build-dir out\Default --source-root %cd% --ffmpeg-path out\ffmpeg )
- echo "About to verify mksnapshot"
- if "%RUN_TESTS%"=="true" ( echo Verifying mksnapshot & python electron\script\verify-mksnapshot.py --build-dir out\Default --source-root %cd% )
- echo "Done verifying mksnapshot"
deploy_script:
- cd electron
- ps: >-
if (Test-Path Env:\ELECTRON_RELEASE) {
if (Test-Path Env:\UPLOAD_TO_S3) {
Write-Output "Uploading Electron release distribution to s3"
& python script\release\uploaders\upload.py --upload_to_s3
} else {
Write-Output "Uploading Electron release distribution to github releases"
& python script\release\uploaders\upload.py
}
} elseif (Test-Path Env:\TEST_WOA) {
node script/release/ci-release-build.js --job=electron-woa-testing --ci=VSTS --armTest --appveyorJobId=$env:APPVEYOR_JOB_ID $env:APPVEYOR_REPO_BRANCH
}

92
azure-pipelines-woa.yml Normal file
View File

@@ -0,0 +1,92 @@
steps:
- task: CopyFiles@2
displayName: 'Copy Files to: src\electron'
inputs:
TargetFolder: src\electron
- script: |
cd src\electron
node script/yarn.js install --frozen-lockfile
displayName: 'Yarn install'
- powershell: |
$localArtifactPath = "$pwd\dist.zip"
$serverArtifactPath = "$env:APPVEYOR_URL/buildjobs/$env:APPVEYOR_JOB_ID/artifacts/dist.zip"
Invoke-RestMethod -Method Get -Uri $serverArtifactPath -OutFile $localArtifactPath -Headers @{ "Authorization" = "Bearer $env:APPVEYOR_TOKEN" }
& "${env:ProgramFiles(x86)}\7-Zip\7z.exe" x -osrc\out\Default -y $localArtifactPath
displayName: 'Download and extract dist.zip for test'
env:
APPVEYOR_TOKEN: $(APPVEYOR_TOKEN)
- powershell: |
$localArtifactPath = "$pwd\src\out\Default\shell_browser_ui_unittests.exe"
$serverArtifactPath = "$env:APPVEYOR_URL/buildjobs/$env:APPVEYOR_JOB_ID/artifacts/shell_browser_ui_unittests.exe"
Invoke-RestMethod -Method Get -Uri $serverArtifactPath -OutFile $localArtifactPath -Headers @{ "Authorization" = "Bearer $env:APPVEYOR_TOKEN" }
displayName: 'Download and extract native test executables for test'
env:
APPVEYOR_TOKEN: $(APPVEYOR_TOKEN)
- powershell: |
$localArtifactPath = "$pwd\ffmpeg.zip"
$serverArtifactPath = "$env:APPVEYOR_URL/buildjobs/$env:APPVEYOR_JOB_ID/artifacts/ffmpeg.zip"
Invoke-RestMethod -Method Get -Uri $serverArtifactPath -OutFile $localArtifactPath -Headers @{ "Authorization" = "Bearer $env:APPVEYOR_TOKEN" }
& "${env:ProgramFiles(x86)}\7-Zip\7z.exe" x -osrc\out\ffmpeg $localArtifactPath
displayName: 'Download and extract ffmpeg.zip for test'
env:
APPVEYOR_TOKEN: $(APPVEYOR_TOKEN)
- powershell: |
$localArtifactPath = "$pwd\src\node_headers.zip"
$serverArtifactPath = "$env:APPVEYOR_URL/buildjobs/$env:APPVEYOR_JOB_ID/artifacts/node_headers.zip"
Invoke-RestMethod -Method Get -Uri $serverArtifactPath -OutFile $localArtifactPath -Headers @{ "Authorization" = "Bearer $env:APPVEYOR_TOKEN" }
cd src
& "${env:ProgramFiles(x86)}\7-Zip\7z.exe" x -y node_headers.zip
displayName: 'Download node headers for test'
env:
APPVEYOR_TOKEN: $(APPVEYOR_TOKEN)
- powershell: |
$localArtifactPath = "$pwd\src\out\Default\electron.lib"
$serverArtifactPath = "$env:APPVEYOR_URL/buildjobs/$env:APPVEYOR_JOB_ID/artifacts/electron.lib"
Invoke-RestMethod -Method Get -Uri $serverArtifactPath -OutFile $localArtifactPath -Headers @{ "Authorization" = "Bearer $env:APPVEYOR_TOKEN" }
displayName: 'Download electron.lib for test'
env:
APPVEYOR_TOKEN: $(APPVEYOR_TOKEN)
- powershell: |
New-Item src\out\Default\gen\node_headers\Release -Type directory
Copy-Item -path src\out\Default\electron.lib -destination src\out\Default\gen\node_headers\Release\node.lib
displayName: 'Setup node headers'
- script: |
cd src
set npm_config_nodedir=%cd%\out\Default\gen\node_headers
set npm_config_arch=arm64
cd electron
node script/yarn test -- --enable-logging --verbose
displayName: 'Run Electron tests'
env:
ELECTRON_OUT_DIR: Default
IGNORE_YARN_INSTALL_ERROR: 1
ELECTRON_TEST_RESULTS_DIR: junit
MOCHA_MULTI_REPORTERS: 'mocha-junit-reporter, tap'
MOCHA_REPORTER: mocha-multi-reporters
- task: PublishTestResults@2
displayName: 'Publish Test Results'
inputs:
testResultsFiles: '*.xml'
searchFolder: '$(System.DefaultWorkingDirectory)/src/junit/'
condition: always()
- script: |
cd src
echo "Verifying non proprietary ffmpeg"
python electron\script\verify-ffmpeg.py --build-dir out\Default --source-root %cd% --ffmpeg-path out\ffmpeg
displayName: 'Verify ffmpeg'
- powershell: |
Get-Process | Where Name Like "electron*" | Stop-Process
Get-Process | Where Name Like "MicrosoftEdge*" | Stop-Process
displayName: 'Kill processes left running from last test run'
condition: always()

View File

@@ -1,8 +0,0 @@
{
"plugins": [
"unicorn"
],
"rules": {
"unicorn/prefer-node-protocol": "error"
}
}

View File

@@ -1,62 +1,24 @@
is_electron_build = true
root_extra_deps = [ "//electron" ]
# Registry of NMVs --> https://github.com/nodejs/node/blob/main/doc/abi_version_registry.json
node_module_version = 118
# Registry of NMVs --> https://github.com/nodejs/node/blob/master/doc/abi_version_registry.json
node_module_version = 76
v8_promise_internal_field_count = 1
v8_typed_array_max_size_in_heap = 0
v8_embedder_string = "-electron.0"
# TODO: this breaks mksnapshot
v8_enable_snapshot_native_code_counters = false
# we use this api
v8_enable_javascript_promise_hooks = true
enable_cdm_host_verification = false
proprietary_codecs = true
ffmpeg_branding = "Chrome"
enable_printing = true
# Removes DLLs from the build, which are only meant to be used for Chromium development.
# See https://github.com/electron/electron/pull/17985
enable_basic_printing = true
angle_enable_vulkan_validation_layers = false
dawn_enable_vulkan_validation_layers = false
# These are disabled because they cause the zip manifest to differ between
# testing and release builds.
# See https://chromium-review.googlesource.com/c/chromium/src/+/2774898.
enable_pseudolocales = false
# Make application name configurable at runtime for cookie crypto
allow_runtime_configurable_key_storage = true
# CET shadow stack is incompatible with v8, until v8 is CET compliant
# enabling this flag causes main process crashes where CET is enabled
# Ref: https://source.chromium.org/chromium/chromium/src/+/45fba672185aae233e75d6ddc81ea1e0b30db050:v8/BUILD.gn;l=357
enable_cet_shadow_stack = false
# For similar reasons, disable CFI, which is not well supported in V8.
# Chromium doesn't have any problems with this because they do not run
# V8 in the browser process.
# Ref: https://source.chromium.org/chromium/chromium/src/+/45fba672185aae233e75d6ddc81ea1e0b30db050:v8/BUILD.gn;l=281
is_cfi = false
# TODO: fix this once sysroots have been updated.
use_qt = false
# https://chromium-review.googlesource.com/c/chromium/src/+/4365718
# TODO(codebytere): fix perfetto incompatibility with Node.js.
use_perfetto_client_library = false
# Disables the builtins PGO for V8
v8_builtins_profiling_log_file = ""
# https://chromium.googlesource.com/chromium/src/+/main/docs/dangling_ptr.md
# TODO(vertedinde): hunt down dangling pointers on Linux
enable_dangling_raw_ptr_checks = false
# This flag speeds up the performance of fork/execve on linux systems.
# Ref: https://chromium-review.googlesource.com/c/v8/v8/+/4602858
v8_enable_private_mapping_fork_optimization = true
# TODO: disabled due to crashes. re-enable.
enable_osr = false

View File

@@ -1,4 +1,4 @@
root_extra_deps = [ "//electron/spec-chromium:spec" ]
root_extra_deps = [ "//electron/spec" ]
dcheck_always_on = true
is_debug = false

View File

@@ -1,22 +1,33 @@
template("node_action") {
assert(defined(invoker.script), "Need script path to run")
assert(defined(invoker.args), "Need script arguments")
import("node.gni")
# TODO(MarshallOfSound): Move to electron/node, this is the only place it is used now
# Run an action with a given working directory. Behaves identically to the
# action() target type, with the exception that it changes directory before
# running the script.
#
# Parameters:
# cwd [required]: Directory to change to before running the script.
template("chdir_action") {
action(target_name) {
forward_variables_from(invoker,
"*",
[
"deps",
"public_deps",
"sources",
"inputs",
"outputs",
"script",
"args",
])
if (!defined(inputs)) {
inputs = []
assert(defined(cwd), "Need cwd in $target_name")
script = "//electron/build/run-in-dir.py"
if (defined(sources)) {
sources += [ invoker.script ]
} else {
assert(defined(inputs))
inputs += [ invoker.script ]
}
inputs += [ invoker.script ]
script = "//electron/build/run-node.py"
args = [ rebase_path(invoker.script) ] + invoker.args
args = [
rebase_path(cwd),
rebase_path(invoker.script),
]
args += invoker.args
}
}
@@ -46,42 +57,4 @@ template("asar") {
rebase_path(outputs[0]),
]
}
node_action(target_name + "_header_hash") {
invoker_out = invoker.outputs
deps = [ ":" + invoker.target_name ]
sources = invoker.outputs
script = "//electron/script/gn-asar-hash.js"
outputs = [ "$target_gen_dir/asar_hashes/$target_name.hash" ]
args = [
rebase_path(invoker_out[0]),
rebase_path(outputs[0]),
]
}
}
template("asar_hashed_info_plist") {
node_action(target_name) {
assert(defined(invoker.plist_file),
"Need plist_file to add hashed assets to")
assert(defined(invoker.keys), "Need keys to replace with asset hash")
assert(defined(invoker.hash_targets), "Need hash_targets to read hash from")
deps = invoker.hash_targets
script = "//electron/script/gn-plist-but-with-hashes.js"
inputs = [ invoker.plist_file ]
outputs = [ "$target_gen_dir/hashed_plists/$target_name.plist" ]
hash_files = []
foreach(hash_target, invoker.hash_targets) {
hash_files += get_target_outputs(hash_target)
}
args = [
rebase_path(invoker.plist_file),
rebase_path(outputs[0]),
] + invoker.keys + rebase_path(hash_files)
}
}

View File

@@ -1,8 +1,26 @@
config("build_time_executable") {
configs = []
if (is_electron_build && !is_component_build) {
# The executables which have this config applied are dependent on ffmpeg,
# which is always a shared library in an Electron build. However, in the
# non-component build, executables don't have rpath set to search for
# libraries in the executable's directory, so ffmpeg cannot be found. So
# let's make sure rpath is set here.
# See '//build/config/gcc/BUILD.gn' for details on the rpath setting.
if (is_linux) {
configs += [ "//build/config/gcc:rpath_for_built_shared_libraries" ]
}
if (is_mac) {
ldflags = [ "-Wl,-rpath,@loader_path/." ]
}
}
}
# For MAS build, we force defining "MAS_BUILD".
config("mas_build") {
if (is_mas_build) {
defines = [ "IS_MAS_BUILD()=1" ]
} else {
defines = [ "IS_MAS_BUILD()=0" ]
defines = [ "MAS_BUILD" ]
}
}

View File

@@ -39,7 +39,7 @@ def main(dump_syms, binary, out_dir, stamp_file, dsym_file=None):
args += ["-g", dsym_file]
args += [binary]
symbol_data = subprocess.check_output(args).decode(sys.stdout.encoding)
symbol_data = subprocess.check_output(args)
symbol_path = os.path.join(out_dir, get_symbol_path(symbol_data))
mkdir_p(os.path.dirname(symbol_path))

View File

@@ -24,11 +24,7 @@ template("extract_symbols") {
assert(defined(invoker.binary), "Need binary to dump")
assert(defined(invoker.symbol_dir), "Need directory for symbol output")
if (host_os == "win" && target_cpu == "x86") {
dump_syms_label = "//third_party/breakpad:dump_syms(//build/toolchain/win:win_clang_x64)"
} else {
dump_syms_label = "//third_party/breakpad:dump_syms($host_toolchain)"
}
dump_syms_label = "//third_party/breakpad:dump_syms($host_toolchain)"
dump_syms_binary = get_label_info(dump_syms_label, "root_out_dir") +
"/dump_syms$_host_executable_suffix"
@@ -38,7 +34,9 @@ template("extract_symbols") {
dump_syms_binary,
]
stamp_file = "${target_gen_dir}/${target_name}.stamp"
outputs = [ stamp_file ]
outputs = [
stamp_file,
]
args = [
"./" + rebase_path(dump_syms_binary, root_build_dir),
rebase_path(invoker.binary, root_build_dir),

View File

@@ -1,94 +0,0 @@
#!/usr/bin/env python3
from collections import OrderedDict
import json
import os
import sys
dir_path = os.path.dirname(os.path.realpath(__file__))
SENTINEL = "dL7pKGdnNz796PbbjQWNKmHXBZaB9tsX"
TEMPLATE_H = """
#ifndef ELECTRON_FUSES_H_
#define ELECTRON_FUSES_H_
#if defined(WIN32)
#define FUSE_EXPORT __declspec(dllexport)
#else
#define FUSE_EXPORT __attribute__((visibility("default")))
#endif
namespace electron::fuses {
extern const volatile char kFuseWire[];
{getters}
} // namespace electron::fuses
#endif // ELECTRON_FUSES_H_
"""
TEMPLATE_CC = """
#include "electron/fuses.h"
namespace electron::fuses {
const volatile char kFuseWire[] = { /* sentinel */ {sentinel}, /* fuse_version */ {fuse_version}, /* fuse_wire_length */ {fuse_wire_length}, /* fuse_wire */ {initial_config}};
{getters}
} // namespace electron:fuses
"""
with open(os.path.join(dir_path, "fuses.json5"), 'r') as f:
fuse_defaults = json.loads(''.join(line for line in f.readlines() if not line.strip()[0] == "/"), object_pairs_hook=OrderedDict)
fuse_version = fuse_defaults['_version']
del fuse_defaults['_version']
del fuse_defaults['_schema']
del fuse_defaults['_comment']
if fuse_version >= pow(2, 8):
raise Exception("Fuse version can not exceed one byte in size")
fuses = fuse_defaults.keys()
initial_config = ""
getters_h = ""
getters_cc = ""
index = len(SENTINEL) + 1
for fuse in fuses:
index += 1
initial_config += fuse_defaults[fuse]
name = ''.join(word.title() for word in fuse.split('_'))
getters_h += "FUSE_EXPORT bool Is{name}Enabled();\n".replace("{name}", name)
getters_cc += """
bool Is{name}Enabled() {
return kFuseWire[{index}] == '1';
}
""".replace("{name}", name).replace("{index}", str(index))
def c_hex(n):
s = hex(n)[2:]
return "0x" + s.rjust(2, '0')
def hex_arr(s):
arr = []
for char in s:
arr.append(c_hex(ord(char)))
return ",".join(arr)
header = TEMPLATE_H.replace("{getters}", getters_h.strip())
impl = TEMPLATE_CC.replace("{sentinel}", hex_arr(SENTINEL))
impl = impl.replace("{fuse_version}", c_hex(fuse_version))
impl = impl.replace("{fuse_wire_length}", c_hex(len(fuses)))
impl = impl.replace("{initial_config}", hex_arr(initial_config))
impl = impl.replace("{getters}", getters_cc.strip())
with open(sys.argv[1], 'w') as f:
f.write(header)
with open(sys.argv[2], 'w') as f:
f.write(impl)

View File

@@ -1,12 +0,0 @@
{
"_comment": "Modifying the fuse schema in any breaking way should result in the _version prop being incremented. NEVER remove a fuse or change its meaning, instead mark it as removed with 'r'",
"_schema": "0 == off, 1 == on, r == removed fuse",
"_version": 1,
"run_as_node": "1",
"cookie_encryption": "0",
"node_options": "1",
"node_cli_inspect": "1",
"embedded_asar_integrity_validation": "0",
"only_load_app_from_asar": "0",
"load_browser_process_specific_v8_snapshot": "0"
}

View File

@@ -1,34 +0,0 @@
import os
import re
import sys
DEFINE_EXTRACT_REGEX = re.compile('^ *# *define (\w*)', re.MULTILINE)
def main(out_dir, headers):
defines = []
for filename in headers:
with open(filename, 'r') as f:
content = f.read()
defines += read_defines(content)
push_and_undef = ''
for define in defines:
push_and_undef += '#pragma push_macro("%s")\n' % define
push_and_undef += '#undef %s\n' % define
with open(os.path.join(out_dir, 'push_and_undef_node_defines.h'), 'w') as o:
o.write(push_and_undef)
pop = ''
for define in defines:
pop += '#pragma pop_macro("%s")\n' % define
with open(os.path.join(out_dir, 'pop_node_defines.h'), 'w') as o:
o.write(pop)
def read_defines(content):
defines = []
for match in DEFINE_EXTRACT_REGEX.finditer(content):
defines.append(match.group(1))
return defines
if __name__ == '__main__':
main(sys.argv[1], sys.argv[2:])

653
build/install-build-deps.sh Executable file
View File

@@ -0,0 +1,653 @@
#!/bin/bash -e
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# Script to install everything needed to build chromium (well, ideally, anyway)
# See https://chromium.googlesource.com/chromium/src/+/master/docs/linux_build_instructions.md
usage() {
echo "Usage: $0 [--options]"
echo "Options:"
echo "--[no-]syms: enable or disable installation of debugging symbols"
echo "--lib32: enable installation of 32-bit libraries, e.g. for V8 snapshot"
echo "--[no-]arm: enable or disable installation of arm cross toolchain"
echo "--[no-]chromeos-fonts: enable or disable installation of Chrome OS"\
"fonts"
echo "--[no-]nacl: enable or disable installation of prerequisites for"\
"building standalone NaCl and all its toolchains"
echo "--[no-]backwards-compatible: enable or disable installation of packages
that are no longer currently needed and have been removed from this
script. Useful for bisection."
echo "--no-prompt: silently select standard options/defaults"
echo "--quick-check: quickly try to determine if dependencies are installed"
echo " (this avoids interactive prompts and sudo commands,"
echo " so might not be 100% accurate)"
echo "--unsupported: attempt installation even on unsupported systems"
echo "Script will prompt interactively if options not given."
exit 1
}
# Checks whether a particular package is available in the repos.
# USAGE: $ package_exists <package name>
package_exists() {
# 'apt-cache search' takes a regex string, so eg. the +'s in packages like
# "libstdc++" need to be escaped.
local escaped="$(echo $1 | sed 's/[\~\+\.\:-]/\\&/g')"
[ ! -z "$(apt-cache search --names-only "${escaped}" | \
awk '$1 == "'$1'" { print $1; }')" ]
}
# These default to on because (some) bots need them and it keeps things
# simple for the bot setup if all bots just run the script in its default
# mode. Developers who don't want stuff they don't need installed on their
# own workstations can pass --no-arm --no-nacl when running the script.
do_inst_arm=1
do_inst_nacl=1
while [ "$1" != "" ]
do
case "$1" in
--syms) do_inst_syms=1;;
--no-syms) do_inst_syms=0;;
--lib32) do_inst_lib32=1;;
--arm) do_inst_arm=1;;
--no-arm) do_inst_arm=0;;
--chromeos-fonts) do_inst_chromeos_fonts=1;;
--no-chromeos-fonts) do_inst_chromeos_fonts=0;;
--nacl) do_inst_nacl=1;;
--no-nacl) do_inst_nacl=0;;
--backwards-compatible) do_inst_backwards_compatible=1;;
--no-backwards-compatible) do_inst_backwards_compatible=0;;
--add-cross-tool-repo) add_cross_tool_repo=1;;
--no-prompt) do_default=1
do_quietly="-qq --assume-yes"
;;
--quick-check) do_quick_check=1;;
--unsupported) do_unsupported=1;;
*) usage;;
esac
shift
done
if [ "$do_inst_arm" = "1" ]; then
do_inst_lib32=1
fi
# Check for lsb_release command in $PATH
if ! which lsb_release > /dev/null; then
echo "ERROR: lsb_release not found in \$PATH" >&2
exit 1;
fi
distro_codename=$(lsb_release --codename --short)
distro_id=$(lsb_release --id --short)
supported_codenames="(trusty|xenial|artful|bionic)"
supported_ids="(Debian)"
if [ 0 -eq "${do_unsupported-0}" ] && [ 0 -eq "${do_quick_check-0}" ] ; then
if [[ ! $distro_codename =~ $supported_codenames &&
! $distro_id =~ $supported_ids ]]; then
echo -e "ERROR: The only supported distros are\n" \
"\tUbuntu 14.04 LTS (trusty)\n" \
"\tUbuntu 16.04 LTS (xenial)\n" \
"\tUbuntu 17.10 (artful)\n" \
"\tUbuntu 18.04 LTS (bionic)\n" \
"\tDebian 8 (jessie) or later" >&2
exit 1
fi
if ! uname -m | egrep -q "i686|x86_64"; then
echo "Only x86 architectures are currently supported" >&2
exit
fi
fi
if [ "x$(id -u)" != x0 ] && [ 0 -eq "${do_quick_check-0}" ]; then
echo "Running as non-root user."
echo "You might have to enter your password one or more times for 'sudo'."
echo
fi
# Packages needed for chromeos only
chromeos_dev_list="libbluetooth-dev libxkbcommon-dev"
if package_exists realpath; then
chromeos_dev_list="${chromeos_dev_list} realpath"
fi
# Packages needed for development
dev_list="\
binutils
bison
bzip2
cdbs
curl
dbus-x11
dpkg-dev
elfutils
devscripts
fakeroot
flex
g++
git-core
git-svn
gperf
libappindicator3-dev
libasound2-dev
libatspi2.0-dev
libbrlapi-dev
libbz2-dev
libcairo2-dev
libcap-dev
libcups2-dev
libcurl4-gnutls-dev
libdrm-dev
libelf-dev
libffi-dev
libgbm-dev
libglib2.0-dev
libglu1-mesa-dev
libgnome-keyring-dev
libgtk-3-dev
libkrb5-dev
libnspr4-dev
libnss3-dev
libpam0g-dev
libpci-dev
libpulse-dev
libsctp-dev
libspeechd-dev
libsqlite3-dev
libssl-dev
libudev-dev
libwww-perl
libxslt1-dev
libxss-dev
libxt-dev
libxtst-dev
locales
openbox
p7zip
patch
perl
pkg-config
python
python-cherrypy3
python-crypto
python-dev
python-numpy
python-opencv
python-openssl
python-psutil
python-yaml
rpm
ruby
subversion
uuid-dev
wdiff
x11-utils
xcompmgr
xz-utils
zip
$chromeos_dev_list
"
# 64-bit systems need a minimum set of 32-bit compat packages for the pre-built
# NaCl binaries.
if file -L /sbin/init | grep -q 'ELF 64-bit'; then
dev_list="${dev_list} libc6-i386 lib32gcc1 lib32stdc++6"
fi
# Run-time libraries required by chromeos only
chromeos_lib_list="libpulse0 libbz2-1.0"
# List of required run-time libraries
common_lib_list="\
libappindicator3-1
libasound2
libatk1.0-0
libatspi2.0-0
libc6
libcairo2
libcap2
libcups2
libexpat1
libffi6
libfontconfig1
libfreetype6
libglib2.0-0
libgnome-keyring0
libgtk-3-0
libpam0g
libpango1.0-0
libpci3
libpcre3
libpixman-1-0
libspeechd2
libstdc++6
libsqlite3-0
libuuid1
libwayland-egl1-mesa
libx11-6
libx11-xcb1
libxau6
libxcb1
libxcomposite1
libxcursor1
libxdamage1
libxdmcp6
libxext6
libxfixes3
libxi6
libxinerama1
libxrandr2
libxrender1
libxtst6
zlib1g
"
# Full list of required run-time libraries
lib_list="\
$common_lib_list
$chromeos_lib_list
"
# 32-bit libraries needed e.g. to compile V8 snapshot for Android or armhf
lib32_list="linux-libc-dev:i386 libpci3:i386"
# 32-bit libraries needed for a 32-bit build
lib32_list="$lib32_list libx11-xcb1:i386"
# Packages that have been removed from this script. Regardless of configuration
# or options passed to this script, whenever a package is removed, it should be
# added here.
backwards_compatible_list="\
7za
fonts-indic
fonts-ipafont
fonts-stix
fonts-thai-tlwg
fonts-tlwg-garuda
language-pack-da
language-pack-fr
language-pack-he
language-pack-zh-hant
libappindicator-dev
libappindicator1
libappindicator3-1:i386
libexif-dev
libexif12
libexif12:i386
libgbm-dev
libgl1-mesa-dev
libgl1-mesa-glx:i386
libgles2-mesa-dev
libgtk2.0-0
libgtk2.0-0:i386
libgtk2.0-dev
mesa-common-dev
msttcorefonts
ttf-dejavu-core
ttf-indic-fonts
ttf-kochi-gothic
ttf-kochi-mincho
ttf-mscorefonts-installer
xfonts-mathml
"
case $distro_codename in
trusty)
backwards_compatible_list+=" \
libgbm-dev-lts-trusty
libgl1-mesa-dev-lts-trusty
libgl1-mesa-glx-lts-trusty:i386
libgles2-mesa-dev-lts-trusty
mesa-common-dev-lts-trusty"
;;
xenial)
backwards_compatible_list+=" \
libgbm-dev-lts-xenial
libgl1-mesa-dev-lts-xenial
libgl1-mesa-glx-lts-xenial:i386
libgles2-mesa-dev-lts-xenial
mesa-common-dev-lts-xenial"
;;
esac
# arm cross toolchain packages needed to build chrome on armhf
EM_REPO="deb http://emdebian.org/tools/debian/ jessie main"
EM_SOURCE=$(cat <<EOF
# Repo added by Chromium $0
${EM_REPO}
# deb-src http://emdebian.org/tools/debian/ jessie main
EOF
)
EM_ARCHIVE_KEY_FINGER="084C6C6F39159EDB67969AA87DE089671804772E"
GPP_ARM_PACKAGE="g++-arm-linux-gnueabihf"
case $distro_codename in
jessie)
eval $(apt-config shell APT_SOURCESDIR 'Dir::Etc::sourceparts/d')
CROSSTOOLS_LIST="${APT_SOURCESDIR}/crosstools.list"
arm_list="libc6-dev:armhf
linux-libc-dev:armhf"
if [ "$do_inst_arm" = "1" ]; then
if $(dpkg-query -W ${GPP_ARM_PACKAGE} &>/dev/null); then
arm_list+=" ${GPP_ARM_PACKAGE}"
else
if [ "${add_cross_tool_repo}" = "1" ]; then
gpg --keyserver pgp.mit.edu --recv-keys ${EM_ARCHIVE_KEY_FINGER}
gpg -a --export ${EM_ARCHIVE_KEY_FINGER} | sudo apt-key add -
if ! grep "^${EM_REPO}" "${CROSSTOOLS_LIST}" &>/dev/null; then
echo "${EM_SOURCE}" | sudo tee -a "${CROSSTOOLS_LIST}" >/dev/null
fi
arm_list+=" ${GPP_ARM_PACKAGE}"
else
echo "The Debian Cross-toolchains repository is necessary to"
echo "cross-compile Chromium for arm."
echo "Rerun with --add-deb-cross-tool-repo to have it added for you."
fi
fi
fi
;;
# All necessary ARM packages are available on the default repos on
# Debian 9 and later.
*)
arm_list="libc6-dev-armhf-cross
linux-libc-dev-armhf-cross
${GPP_ARM_PACKAGE}"
;;
esac
# Work around for dependency issue Ubuntu/Trusty: http://crbug.com/435056
case $distro_codename in
trusty)
arm_list+=" g++-4.8-multilib-arm-linux-gnueabihf
gcc-4.8-multilib-arm-linux-gnueabihf"
;;
xenial|artful|bionic)
arm_list+=" g++-5-multilib-arm-linux-gnueabihf
gcc-5-multilib-arm-linux-gnueabihf
gcc-arm-linux-gnueabihf"
;;
esac
# Packages to build NaCl, its toolchains, and its ports.
naclports_list="ant autoconf bison cmake gawk intltool xutils-dev xsltproc"
nacl_list="\
g++-mingw-w64-i686
lib32z1-dev
libasound2:i386
libcap2:i386
libelf-dev:i386
libfontconfig1:i386
libglib2.0-0:i386
libgpm2:i386
libgtk-3-0:i386
libncurses5:i386
lib32ncurses5-dev
libnss3:i386
libpango1.0-0:i386
libssl-dev:i386
libtinfo-dev
libtinfo-dev:i386
libtool
libuuid1:i386
libxcomposite1:i386
libxcursor1:i386
libxdamage1:i386
libxi6:i386
libxrandr2:i386
libxss1:i386
libxtst6:i386
texinfo
xvfb
${naclports_list}
"
if package_exists libssl1.1; then
nacl_list="${nacl_list} libssl1.1:i386"
elif package_exists libssl1.0.2; then
nacl_list="${nacl_list} libssl1.0.2:i386"
else
nacl_list="${nacl_list} libssl1.0.0:i386"
fi
# Some package names have changed over time
if package_exists libpng16-16; then
lib_list="${lib_list} libpng16-16"
else
lib_list="${lib_list} libpng12-0"
fi
if package_exists libnspr4; then
lib_list="${lib_list} libnspr4 libnss3"
else
lib_list="${lib_list} libnspr4-0d libnss3-1d"
fi
if package_exists libjpeg-dev; then
dev_list="${dev_list} libjpeg-dev"
else
dev_list="${dev_list} libjpeg62-dev"
fi
if package_exists libudev1; then
dev_list="${dev_list} libudev1"
nacl_list="${nacl_list} libudev1:i386"
else
dev_list="${dev_list} libudev0"
nacl_list="${nacl_list} libudev0:i386"
fi
if package_exists libbrlapi0.6; then
dev_list="${dev_list} libbrlapi0.6"
else
dev_list="${dev_list} libbrlapi0.5"
fi
if package_exists apache2.2-bin; then
dev_list="${dev_list} apache2.2-bin"
else
dev_list="${dev_list} apache2-bin"
fi
if package_exists libav-tools; then
dev_list="${dev_list} libav-tools"
fi
if package_exists php7.2-cgi; then
dev_list="${dev_list} php7.2-cgi libapache2-mod-php7.2"
elif package_exists php7.1-cgi; then
dev_list="${dev_list} php7.1-cgi libapache2-mod-php7.1"
elif package_exists php7.0-cgi; then
dev_list="${dev_list} php7.0-cgi libapache2-mod-php7.0"
else
dev_list="${dev_list} php5-cgi libapache2-mod-php5"
fi
# Some packages are only needed if the distribution actually supports
# installing them.
if package_exists appmenu-gtk; then
lib_list="$lib_list appmenu-gtk"
fi
# Cross-toolchain strip is needed for building the sysroots.
if package_exists binutils-arm-linux-gnueabihf; then
dev_list="${dev_list} binutils-arm-linux-gnueabihf"
fi
if package_exists binutils-aarch64-linux-gnu; then
dev_list="${dev_list} binutils-aarch64-linux-gnu"
fi
if package_exists binutils-mipsel-linux-gnu; then
dev_list="${dev_list} binutils-mipsel-linux-gnu"
fi
if package_exists binutils-mips64el-linux-gnuabi64; then
dev_list="${dev_list} binutils-mips64el-linux-gnuabi64"
fi
# When cross building for arm/Android on 64-bit systems the host binaries
# that are part of v8 need to be compiled with -m32 which means
# that basic multilib support is needed.
if file -L /sbin/init | grep -q 'ELF 64-bit'; then
# gcc-multilib conflicts with the arm cross compiler (at least in trusty) but
# g++-X.Y-multilib gives us the 32-bit support that we need. Find out the
# appropriate value of X and Y by seeing what version the current
# distribution's g++-multilib package depends on.
multilib_package=$(apt-cache depends g++-multilib --important | \
grep -E --color=never --only-matching '\bg\+\+-[0-9.]+-multilib\b')
lib32_list="$lib32_list $multilib_package"
fi
if [ "$do_inst_syms" = "1" ]; then
echo "Including debugging symbols."
# Debian is in the process of transitioning to automatic debug packages, which
# have the -dbgsym suffix (https://wiki.debian.org/AutomaticDebugPackages).
# Untransitioned packages have the -dbg suffix. And on some systems, neither
# will be available, so exclude the ones that are missing.
dbg_package_name() {
if package_exists "$1-dbgsym"; then
echo "$1-dbgsym"
elif package_exists "$1-dbg"; then
echo "$1-dbg"
fi
}
for package in "${common_lib_list}"; do
dbg_list="$dbg_list $(dbg_package_name ${package})"
done
# Debugging symbols packages not following common naming scheme
if [ "$(dbg_package_name libstdc++6)" == "" ]; then
if package_exists libstdc++6-8-dbg; then
dbg_list="${dbg_list} libstdc++6-8-dbg"
elif package_exists libstdc++6-7-dbg; then
dbg_list="${dbg_list} libstdc++6-7-dbg"
elif package_exists libstdc++6-6-dbg; then
dbg_list="${dbg_list} libstdc++6-6-dbg"
elif package_exists libstdc++6-5-dbg; then
dbg_list="${dbg_list} libstdc++6-5-dbg"
elif package_exists libstdc++6-4.9-dbg; then
dbg_list="${dbg_list} libstdc++6-4.9-dbg"
elif package_exists libstdc++6-4.8-dbg; then
dbg_list="${dbg_list} libstdc++6-4.8-dbg"
elif package_exists libstdc++6-4.7-dbg; then
dbg_list="${dbg_list} libstdc++6-4.7-dbg"
elif package_exists libstdc++6-4.6-dbg; then
dbg_list="${dbg_list} libstdc++6-4.6-dbg"
fi
fi
if [ "$(dbg_package_name libatk1.0-0)" == "" ]; then
dbg_list="$dbg_list $(dbg_package_name libatk1.0)"
fi
if [ "$(dbg_package_name libpango1.0-0)" == "" ]; then
dbg_list="$dbg_list $(dbg_package_name libpango1.0-dev)"
fi
else
echo "Skipping debugging symbols."
dbg_list=
fi
if [ "$do_inst_lib32" = "1" ]; then
echo "Including 32-bit libraries."
else
echo "Skipping 32-bit libraries."
lib32_list=
fi
if [ "$do_inst_arm" = "1" ]; then
echo "Including ARM cross toolchain."
else
echo "Skipping ARM cross toolchain."
arm_list=
fi
if [ "$do_inst_nacl" = "1" ]; then
echo "Including NaCl, NaCl toolchain, NaCl ports dependencies."
else
echo "Skipping NaCl, NaCl toolchain, NaCl ports dependencies."
nacl_list=
fi
filtered_backwards_compatible_list=
if [ "$do_inst_backwards_compatible" = "1" ]; then
echo "Including backwards compatible packages."
for package in ${backwards_compatible_list}; do
if package_exists ${package}; then
filtered_backwards_compatible_list+=" ${package}"
fi
done
fi
# The `sort -r -s -t: -k2` sorts all the :i386 packages to the front, to avoid
# confusing dpkg-query (crbug.com/446172).
packages="$(
echo "${dev_list} ${lib_list} ${dbg_list} ${lib32_list} ${arm_list}" \
"${nacl_list}" ${filtered_backwards_compatible_list} | tr " " "\n" | \
sort -u | sort -r -s -t: -k2 | tr "\n" " "
)"
if [ 1 -eq "${do_quick_check-0}" ] ; then
if ! missing_packages="$(dpkg-query -W -f ' ' ${packages} 2>&1)"; then
# Distinguish between packages that actually aren't available to the
# system (i.e. not in any repo) and packages that just aren't known to
# dpkg (i.e. managed by apt).
missing_packages="$(echo "${missing_packages}" | awk '{print $NF}')"
not_installed=""
unknown=""
for p in ${missing_packages}; do
if apt-cache show ${p} > /dev/null 2>&1; then
not_installed="${p}\n${not_installed}"
else
unknown="${p}\n${unknown}"
fi
done
if [ -n "${not_installed}" ]; then
echo "WARNING: The following packages are not installed:"
echo -e "${not_installed}" | sed -e "s/^/ /"
fi
if [ -n "${unknown}" ]; then
echo "WARNING: The following packages are unknown to your system"
echo "(maybe missing a repo or need to 'sudo apt-get update'):"
echo -e "${unknown}" | sed -e "s/^/ /"
fi
exit 1
fi
exit 0
fi
if [ "$do_inst_lib32" = "1" ] || [ "$do_inst_nacl" = "1" ]; then
sudo dpkg --add-architecture i386
fi
sudo apt-get update
# We initially run "apt-get" with the --reinstall option and parse its output.
# This way, we can find all the packages that need to be newly installed
# without accidentally promoting any packages from "auto" to "manual".
# We then re-run "apt-get" with just the list of missing packages.
echo "Finding missing packages..."
# Intentionally leaving $packages unquoted so it's more readable.
echo "Packages required: " $packages
echo
new_list_cmd="sudo apt-get install --reinstall $(echo $packages)"
if new_list="$(yes n | LANGUAGE=en LANG=C $new_list_cmd)"; then
# We probably never hit this following line.
echo "No missing packages, and the packages are up to date."
elif [ $? -eq 1 ]; then
# We expect apt-get to have exit status of 1.
# This indicates that we cancelled the install with "yes n|".
new_list=$(echo "$new_list" |
sed -e '1,/The following NEW packages will be installed:/d;s/^ //;t;d')
new_list=$(echo "$new_list" | sed 's/ *$//')
if [ -z "$new_list" ] ; then
echo "No missing packages, and the packages are up to date."
else
echo "Installing missing packages: $new_list."
sudo apt-get install ${do_quietly-} ${new_list}
fi
echo
else
# An apt-get exit status of 100 indicates that a real error has occurred.
# I am intentionally leaving out the '"'s around new_list_cmd,
# as this makes it easier to cut and paste the output
echo "The following command failed: " ${new_list_cmd}
echo
echo "It produces the following output:"
yes n | $new_list_cmd || true
echo
echo "You will have to install the above packages yourself."
echo
exit 100
fi
# Install the Chrome OS default fonts. This must go after running
# apt-get, since install-chromeos-fonts depends on curl.
if [ "$do_inst_chromeos_fonts" != "0" ]; then
echo
echo "Installing Chrome OS fonts."
dir=`echo $0 | sed -r -e 's/\/[^/]+$//'`
if ! sudo $dir/linux/install-chromeos-fonts.py; then
echo "ERROR: The installation of the Chrome OS default fonts failed."
if [ `stat -f -c %T $dir` == "nfs" ]; then
echo "The reason is that your repo is installed on a remote file system."
else
echo "This is expected if your repo is installed on a remote file system."
fi
echo "It is recommended to install your repo on a local file system."
echo "You can skip the installation of the Chrome OS default founts with"
echo "the command line option: --no-chromeos-fonts."
exit 1
fi
else
echo "Skipping installation of Chrome OS fonts."
fi
echo "Installing locales."
CHROMIUM_LOCALES="da_DK.UTF-8 fr_FR.UTF-8 he_IL.UTF-8 zh_TW.UTF-8"
LOCALE_GEN=/etc/locale.gen
if [ -e ${LOCALE_GEN} ]; then
OLD_LOCALE_GEN="$(cat /etc/locale.gen)"
for CHROMIUM_LOCALE in ${CHROMIUM_LOCALES}; do
sudo sed -i "s/^# ${CHROMIUM_LOCALE}/${CHROMIUM_LOCALE}/" ${LOCALE_GEN}
done
# Regenerating locales can take a while, so only do it if we need to.
if (echo "${OLD_LOCALE_GEN}" | cmp -s ${LOCALE_GEN}); then
echo "Locales already up-to-date."
else
sudo locale-gen
fi
else
for CHROMIUM_LOCALE in ${CHROMIUM_LOCALES}; do
sudo locale-gen ${CHROMIUM_LOCALE}
done
fi

View File

@@ -4,11 +4,10 @@
# Cocoa .app bundle. The presence of these empty directories is sufficient to
# convince Cocoa that the application supports the named localization, even if
# an InfoPlist.strings file is not provided. Chrome uses these empty locale
# directories for its helper executable bundles, which do not otherwise
# directoires for its helper executable bundles, which do not otherwise
# require any direct Cocoa locale support.
import os
import errno
import sys
@@ -17,7 +16,7 @@ def main(args):
try:
os.makedirs(dirname)
except OSError as e:
if e.errno == errno.EEXIST:
if e.errno == os.errno.EEXIST:
# It's OK if it already exists
pass
else:

21
build/node.gni Normal file
View File

@@ -0,0 +1,21 @@
template("node_action") {
assert(defined(invoker.script), "Need script path to run")
assert(defined(invoker.args), "Need script argumets")
action(target_name) {
forward_variables_from(invoker,
[
"deps",
"public_deps",
"sources",
"inputs",
"outputs",
])
if (!defined(inputs)) {
inputs = []
}
inputs += [ invoker.script ]
script = "//electron/build/run-node.py"
args = [ rebase_path(invoker.script) ] + invoker.args
}
}

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python3
#!/usr/bin/env python
from __future__ import print_function
import os
import subprocess
@@ -15,6 +15,5 @@ args = [cmd, "run",
try:
subprocess.check_output(args, stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as e:
error_msg = "NPM script '{}' failed with code '{}':\n".format(sys.argv[2], e.returncode)
print(error_msg + e.output.decode('utf8'))
print("NPM script '" + sys.argv[2] + "' failed with code '" + str(e.returncode) + "':\n" + e.output)
sys.exit(e.returncode)

View File

@@ -1,16 +1,18 @@
template("npm_action") {
assert(defined(invoker.script),
"Need script name to run (must be defined in package.json)")
assert(defined(invoker.args), "Need script arguments")
assert(defined(invoker.args), "Need script argumets")
action("npm_pre_flight_" + target_name) {
inputs = [
"//electron/package.json",
"//electron/yarn.lock",
"package.json",
"yarn.lock",
]
script = "//electron/build/npm-run.py"
outputs = [ "$target_gen_dir/npm_pre_stamps/" + target_name + ".stamp" ]
outputs = [
"$target_gen_dir/npm_pre_stamps/" + target_name + ".stamp",
]
args = [
"--silent",

View File

@@ -1,5 +1,4 @@
from __future__ import unicode_literals
from __future__ import with_statement
import contextlib
import sys
import os
@@ -12,6 +11,7 @@ import find_depot_tools
from vs_toolchain import \
SetEnvironmentAndGetRuntimeDllDirs, \
SetEnvironmentAndGetSDKDir, \
GetVisualStudioVersion, \
NormalizePath
sys.path.append("%s/win_toolchain" % find_depot_tools.add_depot_tools_to_path())
@@ -20,10 +20,10 @@ from get_toolchain_if_necessary import CalculateHash
@contextlib.contextmanager
def cwd(directory):
def cwd(dir):
curdir = os.getcwd()
try:
os.chdir(directory)
os.chdir(dir)
yield
finally:
os.chdir(curdir)
@@ -34,10 +34,36 @@ def calculate_hash(root):
return CalculateHash('.', None)
def windows_installed_software():
# file_path = os.path.join(os.getcwd(), 'installed_software.json')
# return json.loads(open('installed_software.json').read().decode('utf-8'))
f = open('installed_software.json', encoding='utf-8-sig')
return json.load(f)
import win32com.client
strComputer = "."
objWMIService = win32com.client.Dispatch("WbemScripting.SWbemLocator")
objSWbemServices = objWMIService.ConnectServer(strComputer, "root\cimv2")
colItems = objSWbemServices.ExecQuery("Select * from Win32_Product")
items = []
for objItem in colItems:
item = {}
if objItem.Caption:
item['caption'] = objItem.Caption
if objItem.Caption:
item['description'] = objItem.Description
if objItem.InstallDate:
item['install_date'] = objItem.InstallDate
if objItem.InstallDate2:
item['install_date_2'] = objItem.InstallDate2
if objItem.InstallLocation:
item['install_location'] = objItem.InstallLocation
if objItem.Name:
item['name'] = objItem.Name
if objItem.SKUNumber:
item['sku_number'] = objItem.SKUNumber
if objItem.Vendor:
item['vendor'] = objItem.Vendor
if objItem.Version:
item['version'] = objItem.Version
items.append(item)
return items
def windows_profile():
@@ -45,18 +71,12 @@ def windows_profile():
win_sdk_dir = SetEnvironmentAndGetSDKDir()
path = NormalizePath(os.environ['GYP_MSVS_OVERRIDE_PATH'])
# since current windows executable are symbols path dependant,
# profile the current directory too
return {
'pwd': os.getcwd(),
'pwd': os.getcwd(), # since current windows executable are symbols path dependant, profile the current directory too
'installed_software': windows_installed_software(),
'sdks': [
{'name': 'vs', 'path': path, 'hash': calculate_hash(path)},
{
'name': 'wsdk',
'path': win_sdk_dir,
'hash': calculate_hash(win_sdk_dir),
},
{'name': 'wsdk', 'path': win_sdk_dir, 'hash': calculate_hash(win_sdk_dir)}
],
'runtime_lib_dirs': runtime_dll_dirs,
}
@@ -64,7 +84,7 @@ def windows_profile():
def main(options):
if sys.platform == 'win32':
with open(options.output_json, 'w') as f:
with open(options.output_json, 'wb') as f:
json.dump(windows_profile(), f)
else:
raise OSError("Unsupported OS")
@@ -74,5 +94,5 @@ if __name__ == '__main__':
parser = optparse.OptionParser()
parser.add_option('--output-json', metavar='FILE', default='profile.json',
help='write information about toolchain to FILE')
opts, args = parser.parse_args()
sys.exit(main(opts))
options, args = parser.parse_args()
sys.exit(main(options))

View File

@@ -51,7 +51,7 @@ template("compile_ib_files") {
# Template to compile and package Mac XIB files as bundle data.
# Arguments
# sources:
# list of string, sources to compile
# list of string, sources to comiple
# output_path:
# (optional) string, the path to use for the outputs list in the
# bundle_data step. If unspecified, defaults to bundle_resources_dir.
@@ -81,7 +81,9 @@ template("mac_xib_bundle_data") {
"visibility",
])
public_deps = [ ":$_compile_target_name" ]
public_deps = [
":$_compile_target_name",
]
sources = get_target_outputs(":$_compile_target_name")
_output_path = "{{bundle_resources_dir}}"
@@ -89,6 +91,8 @@ template("mac_xib_bundle_data") {
_output_path = invoker.output_path
}
outputs = [ "$_output_path/{{source_file_part}}" ]
outputs = [
"$_output_path/{{source_file_part}}",
]
}
}

View File

@@ -1,5 +1,6 @@
import sys
import os
import subprocess
def main(argv):
cwd = argv[1]

View File

@@ -1,18 +0,0 @@
#!/usr/bin/env python3
import os
import subprocess
import sys
source = sys.argv[1]
dest = sys.argv[2]
# Ensure any existing framework is removed
subprocess.check_output(["rm", "-rf", dest])
subprocess.check_output(["cp", "-a", source, dest])
# Strip headers, we do not need to ship them
subprocess.check_output(["rm", "-r", os.path.join(dest, "Headers")])
subprocess.check_output(
["rm", "-r", os.path.join(dest, "Versions", "Current", "Headers")]
)

View File

@@ -15,8 +15,12 @@ template("templated_file") {
"inputs",
"outputs",
])
inputs = [ invoker.template ]
outputs = [ invoker.output ]
inputs = [
invoker.template,
]
outputs = [
invoker.output,
]
script = "//electron/build/generate-template.py"
args = [
rebase_path(invoker.template),

View File

@@ -1 +0,0 @@
$full_version

View File

@@ -26,12 +26,19 @@ template("typescript_build") {
"//electron/typings/internal-electron.d.ts",
]
type_roots = "node_modules/@types,typings"
if (defined(invoker.type_root)) {
type_roots += "," + invoker.type_root
}
base_out_path = invoker.output_gen_dir + "/electron/"
args = [
"-p",
rebase_path(invoker.tsconfig),
"--outDir",
rebase_path("$base_out_path" + invoker.output_dir_name),
"--typeRoots",
type_roots,
]
outputs = []

View File

@@ -0,0 +1,2 @@
process.env.PRINT_WEBPACK_GRAPH = true
require('./run-compiler')

View File

@@ -0,0 +1,22 @@
const path = require('path')
const webpack = require('webpack')
const configPath = process.argv[2]
const outPath = path.resolve(process.argv[3])
const config = require(configPath)
config.output = {
path: path.dirname(outPath),
filename: path.basename(outPath)
}
webpack(config, (err, stats) => {
if (err) {
console.error(err)
process.exit(1)
} else if (stats.hasErrors()) {
console.error(stats.toString('normal'))
process.exit(1)
} else {
process.exit(0)
}
})

View File

@@ -1,5 +0,0 @@
module.exports = require('./webpack.config.base')({
target: 'asar',
alwaysHasNode: true,
targetDeletesNodeGlobals: true
});

View File

@@ -1,19 +1,22 @@
const fs = require('node:fs');
const path = require('node:path');
const webpack = require('webpack');
const TerserPlugin = require('terser-webpack-plugin');
const WrapperPlugin = require('wrapper-webpack-plugin');
const fs = require('fs')
const path = require('path')
const webpack = require('webpack')
const electronRoot = path.resolve(__dirname, '../..');
const electronRoot = path.resolve(__dirname, '../..')
const onlyPrintingGraph = !!process.env.PRINT_WEBPACK_GRAPH
class AccessDependenciesPlugin {
apply (compiler) {
apply(compiler) {
// Only hook into webpack when we are printing the dependency graph
if (!onlyPrintingGraph) return
compiler.hooks.compilation.tap('AccessDependenciesPlugin', compilation => {
compilation.hooks.finishModules.tap('AccessDependenciesPlugin', modules => {
const filePaths = modules.map(m => m.resource).filter(p => p).map(p => path.relative(electronRoot, p));
console.info(JSON.stringify(filePaths));
});
});
const filePaths = modules.map(m => m.resource).filter(p => p).map(p => path.relative(electronRoot, p))
console.info(JSON.stringify(filePaths))
})
})
}
}
@@ -21,154 +24,60 @@ module.exports = ({
alwaysHasNode,
loadElectronFromAlternateTarget,
targetDeletesNodeGlobals,
target,
wrapInitWithProfilingTimeout,
wrapInitWithTryCatch
target
}) => {
let entry = path.resolve(electronRoot, 'lib', target, 'init.ts');
let entry = path.resolve(electronRoot, 'lib', target, 'init.ts')
if (!fs.existsSync(entry)) {
entry = path.resolve(electronRoot, 'lib', target, 'init.js');
entry = path.resolve(electronRoot, 'lib', target, 'init.js')
}
const electronAPIFile = path.resolve(electronRoot, 'lib', loadElectronFromAlternateTarget || target, 'api', 'exports', 'electron.ts');
return (env = {}, argv = {}) => {
const onlyPrintingGraph = !!env.PRINT_WEBPACK_GRAPH;
const outputFilename = argv['output-filename'] || `${target}.bundle.js`;
const defines = {
BUILDFLAG: onlyPrintingGraph ? '(a => a)' : ''
};
if (env.buildflags) {
const flagFile = fs.readFileSync(env.buildflags, 'utf8');
for (const line of flagFile.split(/(\r\n|\r|\n)/g)) {
const flagMatch = line.match(/#define BUILDFLAG_INTERNAL_(.+?)\(\) \(([01])\)/);
if (flagMatch) {
const [, flagName, flagValue] = flagMatch;
defines[flagName] = JSON.stringify(Boolean(parseInt(flagValue, 10)));
return ({
mode: 'development',
devtool: 'inline-source-map',
entry,
target: alwaysHasNode ? 'node' : 'web',
output: {
filename: `${target}.bundle.js`
},
resolve: {
alias: {
'@electron/internal': path.resolve(electronRoot, 'lib'),
'electron': path.resolve(electronRoot, 'lib', loadElectronFromAlternateTarget || target, 'api', 'exports', 'electron.ts'),
// Force timers to resolve to our dependency that doens't use window.postMessage
'timers': path.resolve(electronRoot, 'node_modules', 'timers-browserify', 'main.js')
},
extensions: ['.ts', '.js']
},
module: {
rules: [{
test: /\.ts$/,
loader: 'ts-loader',
options: {
configFile: path.resolve(electronRoot, 'tsconfig.electron.json'),
transpileOnly: onlyPrintingGraph,
ignoreDiagnostics: [6059]
}
}
}
const ignoredModules = [];
if (defines.ENABLE_VIEWS_API === 'false') {
ignoredModules.push(
'@electron/internal/browser/api/views/image-view.js'
);
}
const plugins = [];
if (onlyPrintingGraph) {
plugins.push(new AccessDependenciesPlugin());
}
if (targetDeletesNodeGlobals) {
plugins.push(new webpack.ProvidePlugin({
Buffer: ['@electron/internal/common/webpack-provider', 'Buffer'],
global: ['@electron/internal/common/webpack-provider', '_global'],
process: ['@electron/internal/common/webpack-provider', 'process']
}));
}
// Webpack 5 no longer polyfills process or Buffer.
if (!alwaysHasNode) {
plugins.push(new webpack.ProvidePlugin({
Buffer: ['buffer', 'Buffer'],
process: 'process/browser'
}));
}
plugins.push(new webpack.ProvidePlugin({
Promise: ['@electron/internal/common/webpack-globals-provider', 'Promise']
}));
plugins.push(new webpack.DefinePlugin(defines));
if (wrapInitWithProfilingTimeout) {
plugins.push(new WrapperPlugin({
header: 'function ___electron_webpack_init__() {',
footer: `
};
if ((globalThis.process || binding.process).argv.includes("--profile-electron-init")) {
setTimeout(___electron_webpack_init__, 0);
} else {
___electron_webpack_init__();
}`
}));
}
if (wrapInitWithTryCatch) {
plugins.push(new WrapperPlugin({
header: 'try {',
footer: `
} catch (err) {
console.error('Electron ${outputFilename} script failed to run');
console.error(err);
}`
}));
}
return {
mode: 'development',
devtool: false,
entry,
target: alwaysHasNode ? 'node' : 'web',
output: {
filename: outputFilename
},
resolve: {
alias: {
'@electron/internal': path.resolve(electronRoot, 'lib'),
electron$: electronAPIFile,
'electron/main$': electronAPIFile,
'electron/renderer$': electronAPIFile,
'electron/common$': electronAPIFile,
// Force timers to resolve to our dependency that doesn't use window.postMessage
timers: path.resolve(electronRoot, 'node_modules', 'timers-browserify', 'main.js')
},
extensions: ['.ts', '.js'],
fallback: {
// We provide our own "timers" import above, any usage of setImmediate inside
// one of our renderer bundles should import it from the 'timers' package
setImmediate: false
}
},
module: {
rules: [{
test: (moduleName) => !onlyPrintingGraph && ignoredModules.includes(moduleName),
loader: 'null-loader'
}, {
test: /\.ts$/,
loader: 'ts-loader',
options: {
configFile: path.resolve(electronRoot, 'tsconfig.electron.json'),
transpileOnly: onlyPrintingGraph,
ignoreDiagnostics: [
// File '{0}' is not under 'rootDir' '{1}'.
6059
]
}
}]
},
node: {
__dirname: false,
__filename: false
},
optimization: {
minimize: env.mode === 'production',
minimizer: [
new TerserPlugin({
terserOptions: {
keep_classnames: true,
keep_fnames: true
}
})
]
},
plugins
};
};
};
}]
},
node: {
__dirname: false,
__filename: false,
// We provide our own "timers" import above, any usage of setImmediate inside
// one of our renderer bundles should import it from the 'timers' package
setImmediate: false,
},
plugins: [
new AccessDependenciesPlugin(),
...(targetDeletesNodeGlobals ? [
new webpack.ProvidePlugin({
process: ['@electron/internal/renderer/webpack-provider', 'process'],
global: ['@electron/internal/renderer/webpack-provider', '_global'],
Buffer: ['@electron/internal/renderer/webpack-provider', 'Buffer'],
})
] : []),
new webpack.ProvidePlugin({
Promise: ['@electron/internal/common/webpack-globals-provider', 'Promise'],
}),
]
})
}

View File

@@ -1,4 +1,4 @@
module.exports = require('./webpack.config.base')({
target: 'browser',
alwaysHasNode: true
});
})

View File

@@ -0,0 +1,4 @@
module.exports = require('./webpack.config.base')({
target: 'content_script',
alwaysHasNode: false
})

View File

@@ -1,5 +1,4 @@
module.exports = require('./webpack.config.base')({
target: 'isolated_renderer',
alwaysHasNode: false,
wrapInitWithTryCatch: true
});
alwaysHasNode: false
})

View File

@@ -1,7 +1,5 @@
module.exports = require('./webpack.config.base')({
target: 'renderer',
alwaysHasNode: true,
targetDeletesNodeGlobals: true,
wrapInitWithProfilingTimeout: true,
wrapInitWithTryCatch: true
});
targetDeletesNodeGlobals: true
})

View File

@@ -1,6 +1,4 @@
module.exports = require('./webpack.config.base')({
target: 'sandboxed_renderer',
alwaysHasNode: false,
wrapInitWithProfilingTimeout: true,
wrapInitWithTryCatch: true
});
alwaysHasNode: false
})

View File

@@ -1,4 +0,0 @@
module.exports = require('./webpack.config.base')({
target: 'utility',
alwaysHasNode: true
});

View File

@@ -2,6 +2,5 @@ module.exports = require('./webpack.config.base')({
target: 'worker',
loadElectronFromAlternateTarget: 'renderer',
alwaysHasNode: true,
targetDeletesNodeGlobals: true,
wrapInitWithTryCatch: true
});
targetDeletesNodeGlobals: true
})

View File

@@ -22,25 +22,13 @@ template("webpack_build") {
"//electron/typings/internal-electron.d.ts",
] + invoker.inputs
mode = "development"
if (is_official_build) {
mode = "production"
}
args = [
"--config",
rebase_path(invoker.config_file),
"--output-filename",
get_path_info(invoker.out_file, "file"),
"--output-path",
rebase_path(get_path_info(invoker.out_file, "dir")),
"--env",
"buildflags=" + rebase_path("$target_gen_dir/buildflags/buildflags.h"),
"--env",
"mode=" + mode,
rebase_path(invoker.out_file),
]
deps += [ "//electron/buildflags" ]
outputs = [ invoker.out_file ]
outputs = [
invoker.out_file,
]
}
}

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python3
#!/usr/bin/env python
from __future__ import print_function
import os
import subprocess
@@ -9,38 +9,17 @@ EXTENSIONS_TO_SKIP = [
'.pdb',
'.mojom.js',
'.mojom-lite.js',
'.info',
'.m.js',
# These are only needed for Chromium tests we don't run. Listed in
# 'extensions' because the mksnapshot zip has these under a subdirectory, and
# the PATHS_TO_SKIP is checked with |startswith|.
'dbgcore.dll',
'dbghelp.dll',
]
PATHS_TO_SKIP = [
# Skip because it is an output of //ui/gl that we don't need.
'angledata',
# Skip because these are outputs that we don't need.
'./libVkICD_mock_',
# Skip because these are outputs that we don't need.
'./VkICD_mock_',
# Skip because its an output of create_bundle from
# //build/config/mac/rules.gni that we don't need
'Electron.dSYM',
# Refs https://chromium-review.googlesource.com/c/angle/angle/+/2425197.
# Remove this when Angle themselves remove the file:
# https://issuetracker.google.com/issues/168736059
'gen/angle/angle_commit.h',
'angledata', #Skipping because it is an output of //ui/gl that we don't need
'./libVkICD_mock_', #Skipping because these are outputs that we don't need
'./VkICD_mock_', #Skipping because these are outputs that we don't need
# //chrome/browser:resources depends on this via
# //chrome/browser/resources/ssl/ssl_error_assistant, but we don't need to
# ship it.
'pyproto',
# Skip because these are outputs that we don't need.
'resources/inspector',
'gen/third_party/devtools-frontend/src',
'gen/ui/webui',
]
def skip_path(dep, dist_zip, target_cpu):
@@ -53,12 +32,7 @@ def skip_path(dep, dist_zip, target_cpu):
should_skip = (
any(dep.startswith(path) for path in PATHS_TO_SKIP) or
any(dep.endswith(ext) for ext in EXTENSIONS_TO_SKIP) or
(
"arm" in target_cpu
and dist_zip == "mksnapshot.zip"
and dep == "snapshot_blob.bin"
)
)
('arm' in target_cpu and dist_zip == 'mksnapshot.zip' and dep == 'snapshot_blob.bin'))
if should_skip:
print("Skipping {}".format(dep))
return should_skip
@@ -72,46 +46,29 @@ def execute(argv):
raise e
def main(argv):
dist_zip, runtime_deps, target_cpu, _, flatten_val, flatten_relative_to = argv
dist_zip, runtime_deps, target_cpu, target_os, flatten_val = argv
should_flatten = flatten_val == "true"
dist_files = set()
with open(runtime_deps) as f:
for dep in f.readlines():
dep = dep.strip()
if not skip_path(dep, dist_zip, target_cpu):
dist_files.add(dep)
dist_files.add(dep)
if sys.platform == 'darwin' and not should_flatten:
execute(['zip', '-r', '-y', dist_zip] + list(dist_files))
else:
with zipfile.ZipFile(
dist_zip, 'w', zipfile.ZIP_DEFLATED, allowZip64=True
) as z:
with zipfile.ZipFile(dist_zip, 'w', zipfile.ZIP_DEFLATED, allowZip64=True) as z:
for dep in dist_files:
if skip_path(dep, dist_zip, target_cpu):
continue
if os.path.isdir(dep):
for root, _, files in os.walk(dep):
for filename in files:
z.write(os.path.join(root, filename))
for root, dirs, files in os.walk(dep):
for file in files:
z.write(os.path.join(root, file))
else:
basename = os.path.basename(dep)
dirname = os.path.dirname(dep)
arcname = (
os.path.join(dirname, 'chrome-sandbox')
if basename == 'chrome_sandbox'
else dep
)
name_to_write = arcname
if should_flatten:
if flatten_relative_to:
if name_to_write.startswith(flatten_relative_to):
name_to_write = name_to_write[len(flatten_relative_to):]
else:
name_to_write = os.path.basename(arcname)
else:
name_to_write = os.path.basename(arcname)
z.write(
dep,
name_to_write,
)
arcname = os.path.join(dirname, 'chrome-sandbox') if basename == 'chrome_sandbox' else dep
z.write(dep, os.path.basename(arcname) if should_flatten else arcname)
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))

View File

@@ -1,47 +0,0 @@
#!/usr/bin/env python3
from __future__ import print_function
import os
import subprocess
import sys
import zipfile
def execute(argv):
try:
output = subprocess.check_output(argv, stderr=subprocess.STDOUT)
return output
except subprocess.CalledProcessError as e:
print(e.output)
raise e
def get_object_files(base_path, archive_name):
archive_file = os.path.join(base_path, archive_name)
output = execute(['nm', '-g', archive_file]).decode('ascii')
object_files = set()
lines = output.split("\n")
for line in lines:
if line.startswith(base_path):
object_file = line.split(":")[0]
object_files.add(object_file)
if line.startswith('nm: '):
object_file = line.split(":")[1].lstrip()
object_files.add(object_file)
return list(object_files) + [archive_file]
def main(argv):
dist_zip, = argv
out_dir = os.path.dirname(dist_zip)
base_path_libcxx = os.path.join(out_dir, 'obj/buildtools/third_party/libc++')
base_path_libcxxabi = os.path.join(out_dir, 'obj/buildtools/third_party/libc++abi')
object_files_libcxx = get_object_files(base_path_libcxx, 'libc++.a')
object_files_libcxxabi = get_object_files(base_path_libcxxabi, 'libc++abi.a')
with zipfile.ZipFile(
dist_zip, 'w', zipfile.ZIP_DEFLATED, allowZip64=True
) as z:
object_files_libcxx.sort()
for object_file in object_files_libcxx:
z.write(object_file, os.path.relpath(object_file, base_path_libcxx))
for object_file in object_files_libcxxabi:
z.write(object_file, os.path.relpath(object_file, base_path_libcxxabi))
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))

View File

@@ -9,10 +9,18 @@ buildflag_header("buildflags") {
header = "buildflags.h"
flags = [
"ENABLE_VIEWS_API=$enable_views_api",
"ENABLE_DESKTOP_CAPTURER=$enable_desktop_capturer",
"ENABLE_RUN_AS_NODE=$enable_run_as_node",
"ENABLE_OSR=$enable_osr",
"ENABLE_REMOTE_MODULE=$enable_remote_module",
"ENABLE_VIEW_API=$enable_view_api",
"ENABLE_PEPPER_FLASH=$enable_pepper_flash",
"ENABLE_PDF_VIEWER=$enable_pdf_viewer",
"ENABLE_TTS=$enable_tts",
"ENABLE_COLOR_CHOOSER=$enable_color_chooser",
"ENABLE_ELECTRON_EXTENSIONS=$enable_electron_extensions",
"ENABLE_BUILTIN_SPELLCHECKER=$enable_builtin_spellchecker",
"ENABLE_PICTURE_IN_PICTURE=$enable_picture_in_picture",
"OVERRIDE_LOCATION_PROVIDER=$enable_fake_location_provider",
]
}

View File

@@ -3,9 +3,24 @@
# found in the LICENSE file.
declare_args() {
enable_views_api = true
enable_desktop_capturer = true
enable_pdf_viewer = true
# Allow running Electron as a node binary.
enable_run_as_node = true
enable_osr = true
enable_remote_module = true
enable_view_api = false
enable_pdf_viewer = false
enable_tts = true
enable_color_chooser = true
enable_picture_in_picture = true
# Provide a fake location provider for mocking
# the geolocation responses. Disable it if you
@@ -13,8 +28,11 @@ declare_args() {
# Should not be enabled for release build.
enable_fake_location_provider = !is_official_build
# Enable flash plugin support.
enable_pepper_flash = true
# Enable Chrome extensions support.
enable_electron_extensions = true
enable_electron_extensions = false
# Enable Spellchecker support
enable_builtin_spellchecker = true

View File

@@ -2,11 +2,9 @@
# Use of this source code is governed by the MIT license that can be
# found in the LICENSE file.
import("//build/config/ozone.gni")
import("//build/config/ui.gni")
import("//components/spellcheck/spellcheck_build_features.gni")
import("//electron/buildflags/buildflags.gni")
import("//ppapi/buildflags/buildflags.gni")
import("//printing/buildflags/buildflags.gni")
import("//third_party/widevine/cdm/widevine.gni")
@@ -14,51 +12,34 @@ import("//third_party/widevine/cdm/widevine.gni")
static_library("chrome") {
visibility = [ "//electron:electron_lib" ]
sources = [
"//chrome/browser/accessibility/accessibility_ui.cc",
"//chrome/browser/accessibility/accessibility_ui.h",
"//chrome/browser/app_mode/app_mode_utils.cc",
"//chrome/browser/app_mode/app_mode_utils.h",
"//chrome/browser/browser_features.cc",
"//chrome/browser/browser_features.h",
"//chrome/browser/browser_process.cc",
"//chrome/browser/browser_process.h",
"//chrome/browser/devtools/devtools_contents_resizing_strategy.cc",
"//chrome/browser/devtools/devtools_contents_resizing_strategy.h",
"//chrome/browser/devtools/devtools_embedder_message_dispatcher.cc",
"//chrome/browser/devtools/devtools_embedder_message_dispatcher.h",
"//chrome/browser/devtools/devtools_eye_dropper.cc",
"//chrome/browser/devtools/devtools_eye_dropper.h",
"//chrome/browser/devtools/devtools_file_system_indexer.cc",
"//chrome/browser/devtools/devtools_file_system_indexer.h",
"//chrome/browser/devtools/devtools_settings.h",
"//chrome/browser/extensions/global_shortcut_listener.cc",
"//chrome/browser/extensions/global_shortcut_listener.h",
"//chrome/browser/extensions/global_shortcut_listener_mac.h",
"//chrome/browser/extensions/global_shortcut_listener_mac.mm",
"//chrome/browser/extensions/global_shortcut_listener_win.cc",
"//chrome/browser/extensions/global_shortcut_listener_win.h",
"//chrome/browser/icon_loader.cc",
"//chrome/browser/icon_loader.h",
"//chrome/browser/icon_loader_mac.mm",
"//chrome/browser/icon_loader_win.cc",
"//chrome/browser/icon_manager.cc",
"//chrome/browser/icon_manager.h",
"//chrome/browser/media/webrtc/desktop_capturer_wrapper.cc",
"//chrome/browser/media/webrtc/desktop_capturer_wrapper.h",
"//chrome/browser/media/webrtc/desktop_media_list.cc",
"//chrome/browser/media/webrtc/desktop_media_list.h",
"//chrome/browser/media/webrtc/desktop_media_list_base.cc",
"//chrome/browser/media/webrtc/desktop_media_list_base.h",
"//chrome/browser/media/webrtc/desktop_media_list_observer.h",
"//chrome/browser/media/webrtc/native_desktop_media_list.cc",
"//chrome/browser/media/webrtc/native_desktop_media_list.h",
"//chrome/browser/media/webrtc/thumbnail_capturer.cc",
"//chrome/browser/media/webrtc/thumbnail_capturer.h",
"//chrome/browser/media/webrtc/window_icon_util.h",
"//chrome/browser/media/webrtc/system_media_capture_permissions_mac.h",
"//chrome/browser/media/webrtc/system_media_capture_permissions_mac.mm",
"//chrome/browser/net/chrome_mojo_proxy_resolver_factory.cc",
"//chrome/browser/net/chrome_mojo_proxy_resolver_factory.h",
"//chrome/browser/net/proxy_config_monitor.cc",
"//chrome/browser/net/proxy_config_monitor.h",
"//chrome/browser/net/proxy_service_factory.cc",
"//chrome/browser/net/proxy_service_factory.h",
"//chrome/browser/picture_in_picture/picture_in_picture_window_manager.cc",
"//chrome/browser/picture_in_picture/picture_in_picture_window_manager.h",
"//chrome/browser/platform_util.cc",
"//chrome/browser/platform_util.h",
"//chrome/browser/predictors/preconnect_manager.cc",
"//chrome/browser/predictors/preconnect_manager.h",
"//chrome/browser/predictors/predictors_features.cc",
@@ -67,136 +48,34 @@ static_library("chrome") {
"//chrome/browser/predictors/proxy_lookup_client_impl.h",
"//chrome/browser/predictors/resolve_host_client_impl.cc",
"//chrome/browser/predictors/resolve_host_client_impl.h",
"//chrome/browser/process_singleton.h",
"//chrome/browser/process_singleton_internal.cc",
"//chrome/browser/process_singleton_internal.h",
"//chrome/browser/themes/browser_theme_pack.cc",
"//chrome/browser/themes/browser_theme_pack.h",
"//chrome/browser/themes/custom_theme_supplier.cc",
"//chrome/browser/themes/custom_theme_supplier.h",
"//chrome/browser/themes/theme_properties.cc",
"//chrome/browser/themes/theme_properties.h",
"//chrome/browser/ui/color/chrome_color_mixers.cc",
"//chrome/browser/ui/color/chrome_color_mixers.h",
"//chrome/browser/ui/exclusive_access/exclusive_access_bubble_type.cc",
"//chrome/browser/ui/exclusive_access/exclusive_access_bubble_type.h",
"//chrome/browser/ui/exclusive_access/exclusive_access_controller_base.cc",
"//chrome/browser/ui/exclusive_access/exclusive_access_controller_base.h",
"//chrome/browser/ui/exclusive_access/exclusive_access_manager.cc",
"//chrome/browser/ui/exclusive_access/exclusive_access_manager.h",
"//chrome/browser/ui/exclusive_access/fullscreen_controller.cc",
"//chrome/browser/ui/exclusive_access/fullscreen_controller.h",
"//chrome/browser/ui/exclusive_access/fullscreen_within_tab_helper.cc",
"//chrome/browser/ui/exclusive_access/fullscreen_within_tab_helper.h",
"//chrome/browser/ui/exclusive_access/keyboard_lock_controller.cc",
"//chrome/browser/ui/exclusive_access/keyboard_lock_controller.h",
"//chrome/browser/ui/exclusive_access/mouse_lock_controller.cc",
"//chrome/browser/ui/exclusive_access/mouse_lock_controller.h",
"//chrome/browser/ui/frame/window_frame_util.cc",
"//chrome/browser/ui/frame/window_frame_util.h",
"//chrome/browser/ui/ui_features.cc",
"//chrome/browser/ui/ui_features.h",
"//chrome/browser/ui/views/eye_dropper/eye_dropper.cc",
"//chrome/browser/ui/views/eye_dropper/eye_dropper.h",
"//chrome/browser/ui/views/eye_dropper/eye_dropper_view.cc",
"//chrome/browser/ui/views/eye_dropper/eye_dropper_view.h",
"//chrome/browser/ui/views/overlay/back_to_tab_label_button.cc",
"//chrome/browser/ui/views/overlay/close_image_button.cc",
"//chrome/browser/ui/views/overlay/close_image_button.h",
"//chrome/browser/ui/views/overlay/constants.h",
"//chrome/browser/ui/views/overlay/hang_up_button.cc",
"//chrome/browser/ui/views/overlay/hang_up_button.h",
"//chrome/browser/ui/views/overlay/overlay_window_image_button.cc",
"//chrome/browser/ui/views/overlay/overlay_window_image_button.h",
"//chrome/browser/ui/views/overlay/playback_image_button.cc",
"//chrome/browser/ui/views/overlay/playback_image_button.h",
"//chrome/browser/ui/views/overlay/resize_handle_button.cc",
"//chrome/browser/ui/views/overlay/resize_handle_button.h",
"//chrome/browser/ui/views/overlay/simple_overlay_window_image_button.cc",
"//chrome/browser/ui/views/overlay/simple_overlay_window_image_button.h",
"//chrome/browser/ui/views/overlay/skip_ad_label_button.cc",
"//chrome/browser/ui/views/overlay/skip_ad_label_button.h",
"//chrome/browser/ui/views/overlay/toggle_camera_button.cc",
"//chrome/browser/ui/views/overlay/toggle_camera_button.h",
"//chrome/browser/ui/views/overlay/toggle_microphone_button.cc",
"//chrome/browser/ui/views/overlay/toggle_microphone_button.h",
"//chrome/browser/ui/views/overlay/video_overlay_window_views.cc",
"//chrome/browser/ui/views/overlay/video_overlay_window_views.h",
"//chrome/browser/ssl/security_state_tab_helper.cc",
"//chrome/browser/ssl/security_state_tab_helper.h",
"//chrome/browser/ssl/tls_deprecation_config.cc",
"//chrome/browser/ui/autofill/popup_view_common.cc",
"//chrome/browser/ui/autofill/popup_view_common.h",
"//chrome/browser/win/chrome_process_finder.cc",
"//chrome/browser/win/chrome_process_finder.h",
"//extensions/browser/app_window/size_constraints.cc",
"//extensions/browser/app_window/size_constraints.h",
"//ui/views/native_window_tracker.h",
]
if (is_posix) {
sources += [ "//chrome/browser/process_singleton_posix.cc" ]
}
if (is_win) {
sources += [
"//chrome/browser/extensions/global_shortcut_listener_win.cc",
"//chrome/browser/extensions/global_shortcut_listener_win.h",
"//chrome/browser/icon_loader_win.cc",
"//chrome/browser/media/webrtc/window_icon_util_win.cc",
"//chrome/browser/process_singleton_win.cc",
"//chrome/browser/ui/frame/window_frame_util.h",
"//chrome/browser/ui/view_ids.h",
"//chrome/browser/win/chrome_process_finder.cc",
"//chrome/browser/win/chrome_process_finder.h",
"//chrome/browser/win/titlebar_config.cc",
"//chrome/browser/win/titlebar_config.h",
"//chrome/browser/win/titlebar_config.h",
"//chrome/child/v8_crashpad_support_win.cc",
"//chrome/child/v8_crashpad_support_win.h",
]
}
if (is_linux) {
sources += [ "//chrome/browser/media/webrtc/window_icon_util_ozone.cc" ]
}
if (use_aura) {
sources += [
"//chrome/browser/platform_util_aura.cc",
"//chrome/browser/ui/views/eye_dropper/eye_dropper_view_aura.cc",
"//ui/views/native_window_tracker_aura.cc",
"//ui/views/native_window_tracker_aura.h",
]
}
public_deps = [
"//chrome/browser:dev_ui_browser_resources",
"//chrome/browser/resources/accessibility:resources",
"//chrome/browser/ui/color:mixers",
"//chrome/common",
"//chrome/common:version_header",
"//components/keyed_service/content",
"//components/paint_preview/buildflags",
"//components/proxy_config",
"//components/services/language_detection/public/mojom",
"//components/security_state/content",
"//content/public/browser",
"//services/strings",
]
deps = [
"//chrome/app/vector_icons",
"//chrome/browser:resource_prefetch_predictor_proto",
"//chrome/browser/resource_coordinator:mojo_bindings",
"//chrome/browser/web_applications/mojom:mojom_web_apps_enum",
"//components/vector_icons:vector_icons",
"//ui/snapshot",
"//ui/views/controls/webview",
"//components/feature_engagement:buildflags",
]
if (is_linux) {
sources += [ "//chrome/browser/icon_loader_auralinux.cc" ]
if (use_ozone) {
deps += [ "//ui/ozone" ]
sources += [
"//chrome/browser/extensions/global_shortcut_listener_ozone.cc",
"//chrome/browser/extensions/global_shortcut_listener_ozone.h",
]
}
sources += [
"//chrome/browser/extensions/global_shortcut_listener_x11.cc",
"//chrome/browser/extensions/global_shortcut_listener_x11.h",
"//chrome/browser/ui/views/status_icons/concat_menu_model.cc",
"//chrome/browser/ui/views/status_icons/concat_menu_model.h",
"//chrome/browser/ui/views/status_icons/status_icon_linux_dbus.cc",
@@ -208,28 +87,66 @@ static_library("chrome") {
]
}
if (is_win) {
if (enable_desktop_capturer) {
sources += [
"//chrome/browser/win/icon_reader_service.cc",
"//chrome/browser/win/icon_reader_service.h",
"//chrome/browser/media/webrtc/desktop_media_list.h",
"//chrome/browser/media/webrtc/desktop_media_list_base.cc",
"//chrome/browser/media/webrtc/desktop_media_list_base.h",
"//chrome/browser/media/webrtc/desktop_media_list_observer.h",
"//chrome/browser/media/webrtc/native_desktop_media_list.cc",
"//chrome/browser/media/webrtc/native_desktop_media_list.h",
"//chrome/browser/media/webrtc/window_icon_util.h",
]
public_deps += [ "//chrome/services/util_win:lib" ]
deps += [ "//ui/snapshot" ]
}
if (is_mac) {
if (enable_color_chooser) {
sources += [
"//chrome/browser/extensions/global_shortcut_listener_mac.h",
"//chrome/browser/extensions/global_shortcut_listener_mac.mm",
"//chrome/browser/icon_loader_mac.mm",
"//chrome/browser/media/webrtc/system_media_capture_permissions_mac.h",
"//chrome/browser/media/webrtc/system_media_capture_permissions_mac.mm",
"//chrome/browser/media/webrtc/system_media_capture_permissions_stats_mac.h",
"//chrome/browser/media/webrtc/system_media_capture_permissions_stats_mac.mm",
"//chrome/browser/media/webrtc/window_icon_util_mac.mm",
"//chrome/browser/platform_util_mac.mm",
"//chrome/browser/process_singleton_mac.mm",
"//chrome/browser/ui/views/eye_dropper/eye_dropper_view_mac.h",
"//chrome/browser/ui/views/eye_dropper/eye_dropper_view_mac.mm",
"//chrome/browser/platform_util.cc",
"//chrome/browser/platform_util.h",
"//chrome/browser/ui/browser_dialogs.h",
"//chrome/browser/ui/color_chooser.h",
]
if (use_aura) {
sources += [
"//chrome/browser/platform_util_aura.cc",
"//chrome/browser/ui/views/color_chooser_aura.cc",
"//chrome/browser/ui/views/color_chooser_aura.h",
]
deps += [ "//components/feature_engagement" ]
}
if (is_mac) {
sources += [
"//chrome/browser/media/webrtc/window_icon_util_mac.mm",
"//chrome/browser/ui/cocoa/color_chooser_mac.h",
"//chrome/browser/ui/cocoa/color_chooser_mac.mm",
]
deps += [
"//components/remote_cocoa/app_shim",
"//components/remote_cocoa/browser",
]
}
if (is_win) {
sources += [
"//chrome/browser/media/webrtc/window_icon_util_win.cc",
"//chrome/browser/ui/views/color_chooser_dialog.cc",
"//chrome/browser/ui/views/color_chooser_dialog.h",
"//chrome/browser/ui/views/color_chooser_win.cc",
]
}
if (is_linux) {
sources += [ "//chrome/browser/media/webrtc/window_icon_util_x11.cc" ]
}
}
if (enable_tts) {
sources += [
"//chrome/browser/speech/tts_controller_delegate_impl.cc",
"//chrome/browser/speech/tts_controller_delegate_impl.h",
]
}
@@ -243,49 +160,33 @@ static_library("chrome") {
deps += [ "//components/cdm/renderer" ]
}
if (enable_printing) {
if (enable_basic_printing) {
sources += [
"//chrome/browser/bad_message.cc",
"//chrome/browser/bad_message.h",
"//chrome/browser/printing/print_job.cc",
"//chrome/browser/printing/print_job.h",
"//chrome/browser/printing/print_job_manager.cc",
"//chrome/browser/printing/print_job_manager.h",
"//chrome/browser/printing/print_job_worker.cc",
"//chrome/browser/printing/print_job_worker.h",
"//chrome/browser/printing/print_job_worker_oop.cc",
"//chrome/browser/printing/print_job_worker_oop.h",
"//chrome/browser/printing/print_view_manager_base.cc",
"//chrome/browser/printing/print_view_manager_base.h",
"//chrome/browser/printing/print_view_manager_basic.cc",
"//chrome/browser/printing/print_view_manager_basic.h",
"//chrome/browser/printing/printer_query.cc",
"//chrome/browser/printing/printer_query.h",
"//chrome/browser/printing/printer_query_oop.cc",
"//chrome/browser/printing/printer_query_oop.h",
"//chrome/browser/printing/printing_message_filter.cc",
"//chrome/browser/printing/printing_message_filter.h",
"//chrome/browser/printing/printing_service.cc",
"//chrome/browser/printing/printing_service.h",
"//components/printing/browser/print_to_pdf/pdf_print_job.cc",
"//components/printing/browser/print_to_pdf/pdf_print_job.h",
"//components/printing/browser/print_to_pdf/pdf_print_result.cc",
"//components/printing/browser/print_to_pdf/pdf_print_result.h",
"//components/printing/browser/print_to_pdf/pdf_print_utils.cc",
"//components/printing/browser/print_to_pdf/pdf_print_utils.h",
]
if (enable_oop_printing) {
sources += [
"//chrome/browser/printing/print_backend_service_manager.cc",
"//chrome/browser/printing/print_backend_service_manager.h",
]
}
public_deps += [
"//chrome/services/printing:lib",
"//components/printing/browser",
"//components/printing/renderer",
"//components/services/print_compositor",
"//components/services/print_compositor/public/cpp",
"//components/services/print_compositor/public/mojom",
"//printing/backend",
"//components/services/pdf_compositor",
"//components/services/pdf_compositor/public/cpp",
"//components/services/pdf_compositor/public/mojom",
]
deps += [
@@ -297,67 +198,79 @@ static_library("chrome") {
sources += [
"//chrome/browser/printing/pdf_to_emf_converter.cc",
"//chrome/browser/printing/pdf_to_emf_converter.h",
"//chrome/browser/printing/printer_xml_parser_impl.cc",
"//chrome/browser/printing/printer_xml_parser_impl.h",
"//chrome/utility/printing_handler.cc",
"//chrome/utility/printing_handler.h",
]
deps += [ "//printing:printing_base" ]
}
}
if (enable_electron_extensions) {
if (enable_picture_in_picture) {
sources += [
"//chrome/browser/extensions/chrome_url_request_util.cc",
"//chrome/browser/extensions/chrome_url_request_util.h",
"//chrome/browser/plugins/plugin_response_interceptor_url_loader_throttle.cc",
"//chrome/browser/plugins/plugin_response_interceptor_url_loader_throttle.h",
"//chrome/renderer/extensions/api/extension_hooks_delegate.cc",
"//chrome/renderer/extensions/api/extension_hooks_delegate.h",
"//chrome/renderer/extensions/api/tabs_hooks_delegate.cc",
"//chrome/renderer/extensions/api/tabs_hooks_delegate.h",
"//chrome/browser/picture_in_picture/picture_in_picture_window_manager.cc",
"//chrome/browser/picture_in_picture/picture_in_picture_window_manager.h",
"//chrome/browser/ui/views/overlay/back_to_tab_image_button.cc",
"//chrome/browser/ui/views/overlay/back_to_tab_image_button.h",
"//chrome/browser/ui/views/overlay/close_image_button.cc",
"//chrome/browser/ui/views/overlay/close_image_button.h",
"//chrome/browser/ui/views/overlay/overlay_window_views.cc",
"//chrome/browser/ui/views/overlay/overlay_window_views.h",
"//chrome/browser/ui/views/overlay/playback_image_button.cc",
"//chrome/browser/ui/views/overlay/playback_image_button.h",
"//chrome/browser/ui/views/overlay/resize_handle_button.cc",
"//chrome/browser/ui/views/overlay/resize_handle_button.h",
"//chrome/browser/ui/views/overlay/skip_ad_label_button.cc",
"//chrome/browser/ui/views/overlay/skip_ad_label_button.h",
"//chrome/browser/ui/views/overlay/track_image_button.cc",
"//chrome/browser/ui/views/overlay/track_image_button.h",
]
if (enable_pdf_viewer) {
sources += [
"//chrome/browser/pdf/chrome_pdf_stream_delegate.cc",
"//chrome/browser/pdf/chrome_pdf_stream_delegate.h",
"//chrome/browser/pdf/pdf_extension_util.cc",
"//chrome/browser/pdf/pdf_extension_util.h",
"//chrome/browser/pdf/pdf_frame_util.cc",
"//chrome/browser/pdf/pdf_frame_util.h",
"//chrome/browser/plugins/pdf_iframe_navigation_throttle.cc",
"//chrome/browser/plugins/pdf_iframe_navigation_throttle.h",
]
deps += [
"//components/pdf/browser",
"//components/pdf/renderer",
]
}
}
if (!is_mas_build) {
sources += [ "//chrome/browser/hang_monitor/hang_crash_dump.h" ]
if (is_mac) {
sources += [ "//chrome/browser/hang_monitor/hang_crash_dump_mac.cc" ]
} else if (is_win) {
sources += [ "//chrome/browser/hang_monitor/hang_crash_dump_win.cc" ]
} else {
sources += [ "//chrome/browser/hang_monitor/hang_crash_dump.cc" ]
}
deps += [
"//chrome/app/vector_icons",
"//components/vector_icons:vector_icons",
]
}
}
source_set("plugins") {
sources = []
deps = []
frameworks = []
libs = []
# browser side
sources += [
"//chrome/browser/renderer_host/pepper/chrome_browser_pepper_host_factory.cc",
"//chrome/browser/renderer_host/pepper/chrome_browser_pepper_host_factory.h",
"//chrome/browser/renderer_host/pepper/pepper_broker_message_filter.cc",
"//chrome/browser/renderer_host/pepper/pepper_broker_message_filter.h",
"//chrome/browser/renderer_host/pepper/pepper_isolated_file_system_message_filter.cc",
"//chrome/browser/renderer_host/pepper/pepper_isolated_file_system_message_filter.h",
]
deps += [
"//media:media_buildflags",
"//ppapi/buildflags",
"//ppapi/proxy:ipc",
"//services/device/public/mojom",
]
if (enable_pepper_flash) {
sources += [
"//chrome/browser/renderer_host/pepper/pepper_flash_browser_host.cc",
"//chrome/browser/renderer_host/pepper/pepper_flash_browser_host.h",
"//chrome/browser/renderer_host/pepper/pepper_flash_clipboard_message_filter.cc",
"//chrome/browser/renderer_host/pepper/pepper_flash_clipboard_message_filter.h",
"//chrome/browser/renderer_host/pepper/pepper_flash_drm_host.cc",
"//chrome/browser/renderer_host/pepper/pepper_flash_drm_host.h",
]
if (is_mac) {
sources += [
"//chrome/browser/renderer_host/pepper/monitor_finder_mac.h",
"//chrome/browser/renderer_host/pepper/monitor_finder_mac.mm",
]
libs += [ "CoreGraphics.framework" ]
}
if (is_linux) {
deps += [ "//components/services/font/public/cpp" ]
}
}
# renderer side
sources += [
@@ -366,87 +279,75 @@ source_set("plugins") {
"//chrome/renderer/pepper/pepper_shared_memory_message_filter.cc",
"//chrome/renderer/pepper/pepper_shared_memory_message_filter.h",
]
if (enable_pepper_flash) {
sources += [
"//chrome/renderer/pepper/pepper_flash_drm_renderer_host.cc",
"//chrome/renderer/pepper/pepper_flash_drm_renderer_host.h",
"//chrome/renderer/pepper/pepper_flash_font_file_host.cc",
"//chrome/renderer/pepper/pepper_flash_font_file_host.h",
"//chrome/renderer/pepper/pepper_flash_fullscreen_host.cc",
"//chrome/renderer/pepper/pepper_flash_fullscreen_host.h",
"//chrome/renderer/pepper/pepper_flash_menu_host.cc",
"//chrome/renderer/pepper/pepper_flash_menu_host.h",
"//chrome/renderer/pepper/pepper_flash_renderer_host.cc",
"//chrome/renderer/pepper/pepper_flash_renderer_host.h",
]
}
deps += [
"//components/strings",
"//media:media_buildflags",
"//services/device/public/mojom",
"//ppapi/host",
"//ppapi/proxy",
"//ppapi/proxy:ipc",
"//ppapi/shared_impl",
"//skia",
"//storage/browser",
]
if (enable_ppapi) {
deps += [
"//ppapi/buildflags",
"//ppapi/host",
"//ppapi/proxy",
"//ppapi/proxy:ipc",
"//ppapi/shared_impl",
]
}
}
# This source set is just so we don't have to depend on all of //chrome/browser
# You may have to add new files here during the upgrade if //chrome/browser/spellchecker
# gets more files
source_set("chrome_spellchecker") {
sources = []
deps = []
libs = []
public_deps = []
sources = [
"//chrome/browser/spellchecker/spell_check_host_chrome_impl.cc",
"//chrome/browser/spellchecker/spell_check_host_chrome_impl.h",
"//chrome/browser/spellchecker/spellcheck_custom_dictionary.cc",
"//chrome/browser/spellchecker/spellcheck_custom_dictionary.h",
"//chrome/browser/spellchecker/spellcheck_factory.cc",
"//chrome/browser/spellchecker/spellcheck_factory.h",
"//chrome/browser/spellchecker/spellcheck_hunspell_dictionary.cc",
"//chrome/browser/spellchecker/spellcheck_hunspell_dictionary.h",
"//chrome/browser/spellchecker/spellcheck_language_blacklist_policy_handler.cc",
"//chrome/browser/spellchecker/spellcheck_language_blacklist_policy_handler.h",
"//chrome/browser/spellchecker/spellcheck_language_policy_handler.cc",
"//chrome/browser/spellchecker/spellcheck_language_policy_handler.h",
"//chrome/browser/spellchecker/spellcheck_service.cc",
"//chrome/browser/spellchecker/spellcheck_service.h",
"//chrome/common/pref_names.h",
]
if (enable_builtin_spellchecker) {
if (has_spellcheck_panel) {
sources += [
"//chrome/browser/profiles/profile_keyed_service_factory.cc",
"//chrome/browser/profiles/profile_keyed_service_factory.h",
"//chrome/browser/profiles/profile_selections.cc",
"//chrome/browser/profiles/profile_selections.h",
"//chrome/browser/spellchecker/spell_check_host_chrome_impl.cc",
"//chrome/browser/spellchecker/spell_check_host_chrome_impl.h",
"//chrome/browser/spellchecker/spellcheck_custom_dictionary.cc",
"//chrome/browser/spellchecker/spellcheck_custom_dictionary.h",
"//chrome/browser/spellchecker/spellcheck_factory.cc",
"//chrome/browser/spellchecker/spellcheck_factory.h",
"//chrome/browser/spellchecker/spellcheck_hunspell_dictionary.cc",
"//chrome/browser/spellchecker/spellcheck_hunspell_dictionary.h",
"//chrome/browser/spellchecker/spellcheck_service.cc",
"//chrome/browser/spellchecker/spellcheck_service.h",
"//chrome/browser/spellchecker/spell_check_panel_host_impl.cc",
"//chrome/browser/spellchecker/spell_check_panel_host_impl.h",
]
if (!is_mac) {
sources += [
"//chrome/browser/spellchecker/spellcheck_language_blocklist_policy_handler.cc",
"//chrome/browser/spellchecker/spellcheck_language_blocklist_policy_handler.h",
"//chrome/browser/spellchecker/spellcheck_language_policy_handler.cc",
"//chrome/browser/spellchecker/spellcheck_language_policy_handler.h",
]
}
if (has_spellcheck_panel) {
sources += [
"//chrome/browser/spellchecker/spell_check_panel_host_impl.cc",
"//chrome/browser/spellchecker/spell_check_panel_host_impl.h",
]
}
if (use_browser_spellchecker) {
sources += [
"//chrome/browser/spellchecker/spelling_request.cc",
"//chrome/browser/spellchecker/spelling_request.h",
]
}
deps += [
"//base:base_static",
"//components/language/core/browser",
"//components/spellcheck:buildflags",
"//components/sync",
]
public_deps += [ "//chrome/common:constants" ]
}
public_deps += [
if (use_browser_spellchecker) {
sources += [
"//chrome/browser/spellchecker/spelling_request.cc",
"//chrome/browser/spellchecker/spelling_request.h",
]
}
deps = [
"//base:base_static",
"//components/language/core/browser",
"//components/spellcheck:buildflags",
"//components/sync",
]
public_deps = [
"//components/spellcheck/browser",
"//components/spellcheck/common",
"//components/spellcheck/renderer",

View File

@@ -2,13 +2,14 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "shell/browser/certificate_manager_model.h"
#include "chrome/browser/certificate_manager_model.h"
#include <utility>
#include "base/functional/bind.h"
#include "base/bind.h"
#include "base/logging.h"
#include "base/strings/utf_string_conversions.h"
#include "base/task/post_task.h"
#include "content/public/browser/browser_context.h"
#include "content/public/browser/browser_task_traits.h"
#include "content/public/browser/browser_thread.h"
@@ -35,7 +36,6 @@ net::NSSCertDatabase* GetNSSCertDatabaseForResourceContext(
// Linux has only a single persistent slot compared to ChromeOS's separate
// public and private slot.
// Redirect any slot usage to this persistent slot on Linux.
crypto::EnsureNSSInit();
g_nss_cert_database = new net::NSSCertDatabase(
crypto::ScopedPK11Slot(PK11_GetInternalKeySlot()) /* public slot */,
crypto::ScopedPK11Slot(PK11_GetInternalKeySlot()) /* private slot */);
@@ -69,12 +69,12 @@ net::NSSCertDatabase* GetNSSCertDatabaseForResourceContext(
// static
void CertificateManagerModel::Create(content::BrowserContext* browser_context,
CreationCallback callback) {
const CreationCallback& callback) {
DCHECK_CURRENTLY_ON(BrowserThread::UI);
content::GetIOThreadTaskRunner({})->PostTask(
FROM_HERE, base::BindOnce(&CertificateManagerModel::GetCertDBOnIOThread,
browser_context->GetResourceContext(),
std::move(callback)));
base::PostTask(
FROM_HERE, {BrowserThread::IO},
base::BindOnce(&CertificateManagerModel::GetCertDBOnIOThread,
browser_context->GetResourceContext(), callback));
}
CertificateManagerModel::CertificateManagerModel(
@@ -89,7 +89,7 @@ CertificateManagerModel::~CertificateManagerModel() = default;
int CertificateManagerModel::ImportFromPKCS12(
PK11SlotInfo* slot_info,
const std::string& data,
const std::u16string& password,
const base::string16& password,
bool is_extractable,
net::ScopedCERTCertificateList* imported_certs) {
return cert_db_->ImportFromPKCS12(slot_info, data, password, is_extractable,
@@ -129,41 +129,35 @@ bool CertificateManagerModel::Delete(CERTCertificate* cert) {
void CertificateManagerModel::DidGetCertDBOnUIThread(
net::NSSCertDatabase* cert_db,
bool is_user_db_available,
CreationCallback callback) {
const CreationCallback& callback) {
DCHECK_CURRENTLY_ON(BrowserThread::UI);
auto model = base::WrapUnique(
std::unique_ptr<CertificateManagerModel> model(
new CertificateManagerModel(cert_db, is_user_db_available));
std::move(callback).Run(std::move(model));
callback.Run(std::move(model));
}
// static
void CertificateManagerModel::DidGetCertDBOnIOThread(
CreationCallback callback,
const CreationCallback& callback,
net::NSSCertDatabase* cert_db) {
DCHECK_CURRENTLY_ON(BrowserThread::IO);
bool is_user_db_available = !!cert_db->GetPublicSlot();
content::GetUIThreadTaskRunner({})->PostTask(
FROM_HERE,
base::PostTask(
FROM_HERE, {BrowserThread::UI},
base::BindOnce(&CertificateManagerModel::DidGetCertDBOnUIThread, cert_db,
is_user_db_available, std::move(callback)));
is_user_db_available, callback));
}
// static
void CertificateManagerModel::GetCertDBOnIOThread(
content::ResourceContext* context,
CreationCallback callback) {
const CreationCallback& callback) {
DCHECK_CURRENTLY_ON(BrowserThread::IO);
auto split_callback = base::SplitOnceCallback(base::BindOnce(
&CertificateManagerModel::DidGetCertDBOnIOThread, std::move(callback)));
net::NSSCertDatabase* cert_db = GetNSSCertDatabaseForResourceContext(
context, std::move(split_callback.first));
// If the NSS database was already available, |cert_db| is non-null and
// |did_get_cert_db_callback| has not been called. Call it explicitly.
context, base::BindOnce(&CertificateManagerModel::DidGetCertDBOnIOThread,
callback));
if (cert_db)
std::move(split_callback.second).Run(cert_db);
DidGetCertDBOnIOThread(callback, cert_db);
}

View File

@@ -2,15 +2,17 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef ELECTRON_SHELL_BROWSER_CERTIFICATE_MANAGER_MODEL_H_
#define ELECTRON_SHELL_BROWSER_CERTIFICATE_MANAGER_MODEL_H_
#ifndef CHROME_BROWSER_CERTIFICATE_MANAGER_MODEL_H_
#define CHROME_BROWSER_CERTIFICATE_MANAGER_MODEL_H_
#include <map>
#include <memory>
#include <string>
#include "base/functional/callback.h"
#include "base/memory/raw_ptr.h"
#include "base/callback.h"
#include "base/macros.h"
#include "base/memory/ref_counted.h"
#include "base/strings/string16.h"
#include "net/cert/nss_cert_database.h"
namespace content {
@@ -22,18 +24,14 @@ class ResourceContext;
// manager dialog, and processes changes from the view.
class CertificateManagerModel {
public:
using CreationCallback =
base::OnceCallback<void(std::unique_ptr<CertificateManagerModel>)>;
typedef base::Callback<void(std::unique_ptr<CertificateManagerModel>)>
CreationCallback;
// Creates a CertificateManagerModel. The model will be passed to the callback
// when it is ready. The caller must ensure the model does not outlive the
// |browser_context|.
static void Create(content::BrowserContext* browser_context,
CreationCallback callback);
// disable copy
CertificateManagerModel(const CertificateManagerModel&) = delete;
CertificateManagerModel& operator=(const CertificateManagerModel&) = delete;
const CreationCallback& callback);
~CertificateManagerModel();
@@ -48,7 +46,7 @@ class CertificateManagerModel {
// Returns a net error code on failure.
int ImportFromPKCS12(PK11SlotInfo* slot_info,
const std::string& data,
const std::u16string& password,
const base::string16& password,
bool is_extractable,
net::ScopedCERTCertificateList* imported_certs);
@@ -102,16 +100,18 @@ class CertificateManagerModel {
// file for details.
static void DidGetCertDBOnUIThread(net::NSSCertDatabase* cert_db,
bool is_user_db_available,
CreationCallback callback);
static void DidGetCertDBOnIOThread(CreationCallback callback,
const CreationCallback& callback);
static void DidGetCertDBOnIOThread(const CreationCallback& callback,
net::NSSCertDatabase* cert_db);
static void GetCertDBOnIOThread(content::ResourceContext* context,
CreationCallback callback);
const CreationCallback& callback);
raw_ptr<net::NSSCertDatabase> cert_db_;
net::NSSCertDatabase* cert_db_;
// Whether the certificate database has a public slot associated with the
// profile. If not set, importing certificates is not allowed with this model.
bool is_user_db_available_;
DISALLOW_COPY_AND_ASSIGN(CertificateManagerModel);
};
#endif // ELECTRON_SHELL_BROWSER_CERTIFICATE_MANAGER_MODEL_H_
#endif // CHROME_BROWSER_CERTIFICATE_MANAGER_MODEL_H_

View File

@@ -0,0 +1,187 @@
// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef CHROME_BROWSER_PROCESS_SINGLETON_H_
#define CHROME_BROWSER_PROCESS_SINGLETON_H_
#if defined(OS_WIN)
#include <windows.h>
#endif // defined(OS_WIN)
#include <set>
#include <vector>
#include "base/callback.h"
#include "base/command_line.h"
#include "base/files/file_path.h"
#include "base/logging.h"
#include "base/memory/ref_counted.h"
#include "base/process/process.h"
#include "base/sequence_checker.h"
#include "ui/gfx/native_widget_types.h"
#if defined(OS_POSIX) && !defined(OS_ANDROID)
#include "base/files/scoped_temp_dir.h"
#endif
#if defined(OS_WIN)
#include "base/win/message_window.h"
#endif // defined(OS_WIN)
namespace base {
class CommandLine;
}
// ProcessSingleton ----------------------------------------------------------
//
// This class allows different browser processes to communicate with
// each other. It is named according to the user data directory, so
// we can be sure that no more than one copy of the application can be
// running at once with a given data directory.
//
// Implementation notes:
// - the Windows implementation uses an invisible global message window;
// - the Linux implementation uses a Unix domain socket in the user data dir.
class ProcessSingleton {
public:
enum NotifyResult {
PROCESS_NONE,
PROCESS_NOTIFIED,
PROFILE_IN_USE,
LOCK_ERROR,
};
// Implement this callback to handle notifications from other processes. The
// callback will receive the command line and directory with which the other
// Chrome process was launched. Return true if the command line will be
// handled within the current browser instance or false if the remote process
// should handle it (i.e., because the current process is shutting down).
using NotificationCallback = base::RepeatingCallback<bool(
const base::CommandLine::StringVector& command_line,
const base::FilePath& current_directory)>;
ProcessSingleton(const base::FilePath& user_data_dir,
const NotificationCallback& notification_callback);
~ProcessSingleton();
// Notify another process, if available. Otherwise sets ourselves as the
// singleton instance. Returns PROCESS_NONE if we became the singleton
// instance. Callers are guaranteed to either have notified an existing
// process or have grabbed the singleton (unless the profile is locked by an
// unreachable process).
// TODO(brettw): Make the implementation of this method non-platform-specific
// by making Linux re-use the Windows implementation.
NotifyResult NotifyOtherProcessOrCreate();
void StartListeningOnSocket();
void OnBrowserReady();
// Sets ourself up as the singleton instance. Returns true on success. If
// false is returned, we are not the singleton instance and the caller must
// exit.
// NOTE: Most callers should generally prefer NotifyOtherProcessOrCreate() to
// this method, only callers for whom failure is preferred to notifying
// another process should call this directly.
bool Create();
// Clear any lock state during shutdown.
void Cleanup();
#if defined(OS_POSIX) && !defined(OS_ANDROID)
static void DisablePromptForTesting();
#endif
#if defined(OS_WIN)
// Called to query whether to kill a hung browser process that has visible
// windows. Return true to allow killing the hung process.
using ShouldKillRemoteProcessCallback = base::RepeatingCallback<bool()>;
void OverrideShouldKillRemoteProcessCallbackForTesting(
const ShouldKillRemoteProcessCallback& display_dialog_callback);
#endif
protected:
// Notify another process, if available.
// Returns true if another process was found and notified, false if we should
// continue with the current process.
// On Windows, Create() has to be called before this.
NotifyResult NotifyOtherProcess();
#if defined(OS_POSIX) && !defined(OS_ANDROID)
// Exposed for testing. We use a timeout on Linux, and in tests we want
// this timeout to be short.
NotifyResult NotifyOtherProcessWithTimeout(
const base::CommandLine& command_line,
int retry_attempts,
const base::TimeDelta& timeout,
bool kill_unresponsive);
NotifyResult NotifyOtherProcessWithTimeoutOrCreate(
const base::CommandLine& command_line,
int retry_attempts,
const base::TimeDelta& timeout);
void OverrideCurrentPidForTesting(base::ProcessId pid);
void OverrideKillCallbackForTesting(
const base::RepeatingCallback<void(int)>& callback);
#endif
private:
NotificationCallback notification_callback_; // Handler for notifications.
#if defined(OS_WIN)
HWND remote_window_; // The HWND_MESSAGE of another browser.
base::win::MessageWindow window_; // The message-only window.
bool is_virtualized_; // Stuck inside Microsoft Softricity VM environment.
HANDLE lock_file_;
base::FilePath user_data_dir_;
ShouldKillRemoteProcessCallback should_kill_remote_process_callback_;
#elif defined(OS_POSIX) && !defined(OS_ANDROID)
// Start listening to the socket.
void StartListening(int sock);
// Return true if the given pid is one of our child processes.
// Assumes that the current pid is the root of all pids of the current
// instance.
bool IsSameChromeInstance(pid_t pid);
// Extract the process's pid from a symbol link path and if it is on
// the same host, kill the process, unlink the lock file and return true.
// If the process is part of the same chrome instance, unlink the lock file
// and return true without killing it.
// If the process is on a different host, return false.
bool KillProcessByLockPath();
// Default function to kill a process, overridable by tests.
void KillProcess(int pid);
// Allow overriding for tests.
base::ProcessId current_pid_;
// Function to call when the other process is hung and needs to be killed.
// Allows overriding for tests.
base::Callback<void(int)> kill_callback_;
// Path in file system to the socket.
base::FilePath socket_path_;
// Path in file system to the lock.
base::FilePath lock_path_;
// Path in file system to the cookie file.
base::FilePath cookie_path_;
// Temporary directory to hold the socket.
base::ScopedTempDir socket_dir_;
// Helper class for linux specific messages. LinuxWatcher is ref counted
// because it posts messages between threads.
class LinuxWatcher;
scoped_refptr<LinuxWatcher> watcher_;
int sock_;
bool listen_on_ready_ = false;
#endif
SEQUENCE_CHECKER(sequence_checker_);
DISALLOW_COPY_AND_ASSIGN(ProcessSingleton);
};
#endif // CHROME_BROWSER_PROCESS_SINGLETON_H_

File diff suppressed because it is too large Load Diff

Some files were not shown because too many files have changed in this diff Show More