Compare commits

...

126 Commits

Author SHA1 Message Date
Electron Bot
ac400e5cb9 Bump v2.0.10 2018-09-19 02:18:01 -07:00
Samuel Attard
333d5fb5d2 Revert "Bump v2.0.10"
This reverts commit f2c12ccbf5.
2018-09-19 19:10:43 +10:00
Electron Bot
f2c12ccbf5 Bump v2.0.10 2018-09-18 06:45:51 -07:00
trop[bot]
79af1ea6f1 chore: dont pass --stable through to bump-version.py (#14667)
I want to clean up this "stable" and "beta" code smell eventually but
for now this will unblock the 2.0.x releases.
2018-09-18 23:41:20 +10:00
Roller Bot
1ed7875b0b chore: bump libcc submodule to cbd04c0dccc7655cd42f02baee3a622d5170ac08 (#14654) 2018-09-18 17:37:00 +10:00
John Kleinschmidt
67a690e536 Merge pull request #14593 from electron/ignore-gn-debug-builds-2-0-x
ci: don't run gn debug build on older branches (2-0-x)
2018-09-13 09:24:02 -04:00
John Kleinschmidt
5f3bedd1e0 ci: don't run gn debug build on older branches (#14584)
* ci: don't run gn debug build on older branches

Older branches that build using gyp do not run both a debug and testing build.

* Actually skip the build if debug

(cherry picked from commit f924a16055)
2018-09-12 17:10:05 -04:00
Electron Bot
670a875792 Bump v2.0.9 2018-09-10 08:21:03 -07:00
trop[bot]
faf82245dc Fix documentation tyop (#14477) 2018-09-06 10:59:35 -07:00
Shelley Vohr
066eeb5b5f chore: add release-artifact-cleanup script (#14448) 2018-09-04 11:36:57 -07:00
trop[bot]
77ec86b894 docs: added session-created event documentation (backport: 2-0-x) (#14439)
* Added session-created event documentation

Emitted at 1c0ea0286e/lib/browser/api/session.js (L21)

* Removed trailing spaces
2018-09-04 09:26:58 +10:00
trop[bot]
bf82dc7896 fix: crash when tray is destroyed (#14366)
Release the view of status item before destroying it,
gives chance to perform cleanup on the view.
2018-08-28 23:14:08 -07:00
Charles Kerr
b4c5a30469 fix: dont parse arguments after a -- in the inspector (#14297) (#14334) 2018-08-27 14:39:38 -05:00
trop[bot]
7ed6e2b909 fix: don't crash on tray.setContextMenu(null) (#14330) 2018-08-27 12:51:09 -05:00
Wenjun Che
8edd18be90 fix: issue 10962, crash when open devtool (#13808)
* fix: crash when opening devtool (#10962)

* fix: fixed linting issues
2018-08-24 14:36:47 -07:00
Shelley Vohr
3bc9ae7f5b deps: update node for two backports (#14298) 2018-08-24 14:50:55 -05:00
Cheng Zhao
760c2327b4 fix: do not bubble up resize event from webview (#14272) 2018-08-23 15:42:46 +09:00
Cheng Zhao
fcb5069d24 Revert "fix: do not bubble up resize event from webview"
This reverts commit 221ab080a1.

It was an accidental push.
2018-08-23 09:31:10 +09:00
Cheng Zhao
221ab080a1 fix: do not bubble up resize event from webview 2018-08-23 09:29:08 +09:00
Electron Bot
addf069f26 Bump v2.0.8 2018-08-22 11:10:51 -07:00
Samuel Attard
80221e52d9 fix: inheritance of webPreferences sub properties 2018-08-22 12:20:04 -05:00
trop[bot]
3403b4a4de ci: add better logging around request failures for releasing (#14244) 2018-08-21 13:31:29 -07:00
trop[bot]
a1ac930f4d chore: retry for the meta dumper a few times (#14242) 2018-08-21 13:30:53 -07:00
Electron Bot
ed8396c6f5 Bump v2.0.8-nightly.20180820 2018-08-20 16:54:02 -07:00
Samuel Attard
a457d8823a Revert "Bump v2.0.8-nightly.20180820"
This reverts commit 2d44dcb8be.
2018-08-20 16:44:17 -07:00
Electron Bot
2d44dcb8be Bump v2.0.8-nightly.20180820 2018-08-20 13:33:19 -07:00
Samuel Attard
85da731867 fix: support installing nightlies on <= 2.0.x (#14224) 2018-08-20 11:18:45 -07:00
trop[bot]
3af5fdb831 chore: remove autorelease check logic (#14221) 2018-08-20 10:56:40 -07:00
trop[bot]
f1b197acbb chore: add option to return next version from prepare-release (backport: 2-0-x) (#14216)
* chore: add option to return next version from prepare-release

* shuffle logic
2018-08-20 08:35:28 -07:00
Electron Bot
d483c81887 Bump v2.0.8-nightly.20180819 2018-08-19 16:42:46 -07:00
Samuel Attard
3ef980ae2b chore: use metadumper service to create index.json file (#14158) (#14210) 2018-08-19 16:40:49 -07:00
Samuel Attard
36e68b46df Revert "Bump v2.0.8-nightly.20180819"
This reverts commit 148d0d8d29.
2018-08-19 15:38:56 -07:00
Samuel Attard
f611dfdb5c chore: stop auto releasing from windows CI 2018-08-19 15:38:47 -07:00
Electron Bot
148d0d8d29 Bump v2.0.8-nightly.20180819 2018-08-19 14:48:54 -07:00
trop[bot]
79a1382126 chore: match the bump commit exactly, reverts should not count (#14208) 2018-08-19 14:45:49 -07:00
Samuel Attard
b0d5ba1996 Revert "Bump v2.0.8-nightly.20180819"
This reverts commit 48a4b58cc1.
2018-08-19 14:24:11 -07:00
Samuel Attard
088dc70dae chore: disable tests on release builds, this is already done on master/3-0-x (#14206) 2018-08-19 14:22:57 -07:00
trop[bot]
ad686cca68 fix: #14160 (#14204) 2018-08-19 14:13:56 -07:00
Electron Bot
48a4b58cc1 Bump v2.0.8-nightly.20180819 2018-08-19 13:18:10 -07:00
Samuel Attard
c1ef824e4c chore: fix upload script for nightly support (#14203) 2018-08-19 13:03:15 -07:00
Samuel Attard
a2eb532720 Revert "Bump v2.0.8-nightly.20180819"
This reverts commit 0cb53ceb9d.
2018-08-19 12:57:00 -07:00
Electron Bot
0cb53ceb9d Bump v2.0.8-nightly.20180819 2018-08-19 12:33:38 -07:00
Samuel Attard
afe4e610ae chore: always target master for nightly releases (#14190) (#14202) 2018-08-19 12:28:21 -07:00
Shelley Vohr
74d90fbb33 chore: backport release script updates to 2-0-x (#14191)
* chore: alter release scripts to enable sudowoodo

* add example .env file

* chore: only prepare release if last commit not a bump (#14193)
2018-08-18 19:39:06 -07:00
trop[bot]
f3bd8f6133 docs: fix electron.d.ts typings (#14137) 2018-08-16 11:11:31 -07:00
John Kleinschmidt
863c511fbf Merge pull request #13988 from K900/update-lld
fix: update clang/lld version to fix #13972
2018-08-16 10:29:21 -07:00
trop[bot]
03d8689ded fix: add a hidden option to disable remote dereferencing (#14112) 2018-08-15 08:43:41 -07:00
John Kleinschmidt
ff2bc1d705 Merge pull request #14084 from electron/appveyor-rename-2-0-x
ci: rename appveyor-override to appveyor (2-0-x)
2018-08-14 09:56:17 -07:00
John Kleinschmidt
a1df8befde Rename appveyor-override to appveyor 2018-08-14 09:44:15 -07:00
trop[bot]
75909685f7 Fix flaky sandbox tests (#14055)
Windows apparently has issues running opening two windows at once which is causing flakiness on the mixed sandbox tests.
2018-08-13 15:14:21 -07:00
trop[bot]
2e0a015168 fix: make asarStatsToFsStats nherit from fs.stats (#14040) 2018-08-12 21:13:43 -07:00
K900
740b2797c5 vendor: update clang/lld version to fix #13972
This seems like some sort of a weird linker bug, so just update the
toolchain a bit.
2018-08-10 14:55:13 +03:00
Charles Kerr
5f372f7ecc Bump v2.0.7 2018-08-08 16:23:52 -05:00
Jeremy Apthorp
e4f4a1d9f9 chore: roll libcc (#13974)
Picks up electron/libchromiumcontent#637
2018-08-08 09:59:16 -07:00
John Kleinschmidt
bdc255fa9e Merge pull request #13966 from electron/2-0-x-backport-12809
fix: Prevent menu update while it's open (backport: 2-0-x)
2018-08-07 10:05:44 -04:00
John Kleinschmidt
e8e542b3e8 Merge pull request #13960 from electron/miniak/fix-promisify-2.0
fix: some APIs modified for ASAR support cannot be util.promisify'ed (backport: 2-0-x)
2018-08-07 10:04:56 -04:00
Zhuo Lu
dd89aa6c77 Memory safety 2018-08-06 23:50:00 -07:00
Zhuo Lu
79caff69f5 Fix code style 2018-08-06 23:50:00 -07:00
Zhuo Lu
67b10135b6 Update application menu on default runloop
Menu change should be prevented while the menu is open
2018-08-06 23:49:57 -07:00
Milan Burda
6a86831b7c fix: some APIs modified for ASAR support cannot be util.promisify'ed (#13845) 2018-08-07 00:20:54 +02:00
Birunthan Mohanathas
dffd17ab70 fix: Improve --enable-features/--disable-features handling (2.0.x) (#13921)
As it turns out, we can reinitialize the feature list directly after the
user JS script has been executed instead of much later. This allows
modifications to `--enable-features`/`--disable-features` to work with a
greater set of features.

This backports #13920 to `2-0-x`.
2018-08-03 11:09:53 +10:00
Birunthan Mohanathas
cf98934dd9 build: Bump libcc to latest (2.0.x) (#13919)
This picks up electron/libchromiumcontent#627.
2018-08-03 10:54:39 +10:00
trop[bot]
2fff138740 fix: handle SIGINT and SIGTERM from the Electron CLI helper (#13888)
Fixes #12840
2018-08-01 13:25:37 +10:00
Charles Kerr
82bc83c1de Bump v2.0.6 2018-07-31 18:49:35 -05:00
Birunthan Mohanathas
3a699741da build: Bump libcc to latest (2.0.x) (#13896)
This picks up electron/libchromiumcontent#624.
2018-07-31 15:45:39 -05:00
trop[bot]
237ad9a49d fix: util.promisify(setTimeout) (#13859) 2018-07-30 15:02:28 +10:00
trop[bot]
45158bdad5 docs: fix electron.d.ts typings (#13856) 2018-07-30 11:20:02 +10:00
Birunthan Mohanathas
cf4861b575 build: Bump libcc to latest (2.0.x) (#13825)
This picks up electron/libchromiumcontent#619.
2018-07-27 23:05:49 +10:00
Zeke Sikelianos
a76adba2b3 Merge pull request #13782 from electron/fix-enable-disable-features-2-0-x
fix: Use --enable-features and --disable-features (2.0.x)
2018-07-25 11:09:15 -07:00
Birunthan Mohanathas
6689dce5ba fix: Use --enable-features and --disable-features (2.0.x)
Unlike Chrome, we were not using the --enable-features and
--disable-features command-line arguments to initialize
`base::FeatureList`.
2018-07-24 17:12:39 +02:00
Cheng Zhao
9667b835ca fix: create WebContents for webview on request (#13714) 2018-07-20 10:45:57 -07:00
John Kleinschmidt
58295a3198 Merge pull request #13735 from electron/backport-13727-2-0-x
fix: use context counter as contextId (backport 2-0-x)
2018-07-20 10:22:00 -04:00
Cheng Zhao
b60125f782 fix: use webContentsId with contextId together
After after using `processId-contextCounter` as contextId, it may happen
that contexts in different WebContents sharing the same renderer process
get the same contextId. Using webContentsId as part of key in
ObjectsRegistry can fix this.
2018-07-20 10:53:16 +09:00
Cheng Zhao
2af6d9ced7 fix: use context counter as contextId (backport 2-0-x)
For sandboxed renderer it may not have a node::Environment in the context,
using a increasing counter as contextId works for all cases.
2018-07-19 11:35:49 -07:00
Charles Kerr
664c184fcb Bump v2.0.5 2018-07-13 13:55:07 -05:00
John Kleinschmidt
c125576d48 Merge pull request #13644 from electron/release-updates-2
chore: Updates for release process (2-0-x)
2018-07-12 10:31:19 -04:00
John Kleinschmidt
6030146b45 chore: Updates for release process (2-0-x)
* Fix Appveyor URL

* Update docs to reflect current process

Also added troubleshooting steps.

(cherry picked from commit 6b5ccec0c6)
2018-07-12 09:05:02 -04:00
Cheng Zhao
17297d85f8 Merge pull request #13625 from electron/proxy_resolver_oop
fix: use mojo v8 proxy resolver with a utility process
2018-07-12 21:59:12 +09:00
deepak1556
65bbf21c08 update libcc ref 2018-07-12 19:55:42 +09:00
deepak1556
00da6ab43b link mojo proxy resolver in component build 2018-07-12 19:55:42 +09:00
deepak1556
d6d2a46821 REVIEW: create proxy helper per browser context 2018-07-12 19:55:42 +09:00
deepak1556
423bdda1b1 REVIEW: remove pre early initialization of v8 2018-07-12 19:55:42 +09:00
deepak1556
849aeced8f build: use mojo v8 proxy resolver with a utility process 2018-07-12 19:55:42 +09:00
Cheng Zhao
05a7485880 Merge pull request #13604 from electron/fix-remote-2-0-x
fix: guard against double-freeing remote references (2-0-x)
2018-07-12 11:09:38 +09:00
Cheng Zhao
3db1b5a49a fix double-freeing remote references
After the page does navigations, garbage collection can still happen in
the old context. This commit changes to store references to remote objects
by _pages_, instead of by _WebContents_.
2018-07-12 10:45:00 +09:00
Cheng Zhao
1c272503c3 add API to return an unique ID for page 2018-07-12 10:45:00 +09:00
Cheng Zhao
1ebb8975a8 fix assertion caused by PrefServiceFactory::Create 2018-07-12 10:45:00 +09:00
John Kleinschmidt
fcf4c9cef1 build: Updates to the release process (2-0-x) (#13615)
* Update to run new AppVeyor jobs

AppVeyor builds got split into two jobs, `electron-x64` and `electron-ia32`

(cherry picked from commit 90339b7260)

* Move github release checking to node.js

Our upload script was relying on an old python script to find the GitHub release for uploading.

(cherry picked from commit 2040095b96)
2018-07-11 09:08:09 -05:00
John Kleinschmidt
3e5349bf39 Use Visual Studio 2015 for 2-0-x (#13607) 2018-07-10 09:40:22 -05:00
John Kleinschmidt
66988adb0e Bump v2.0.4 2018-07-03 14:39:36 -04:00
John Kleinschmidt
91be3f343f Merge pull request #13545 from electron/fix-core-bluetooth-linking
Fix crash on startup on macOS 10.9 due to linking with CoreBluetooth
2018-07-03 09:47:35 -04:00
Cheng Zhao
475006c10d fix: weak load CoreBluetooth when building with 10.10 SDK
The CoreBluetooth framework does not exist on OS X 10.9.
2018-07-03 16:37:07 +09:00
John Kleinschmidt
d71a7d89bb Merge pull request #13538 from electron/dwm-backport
fix: Disable chromium's redraw locking on Windows when DWM is disabled (#12501)
2018-07-02 16:10:22 -04:00
John Kleinschmidt
abcba2b509 Merge pull request #13516 from trop-bot/2-0-x-bp-fix--tray-setcontextmenu-crash-1530308984992
Backport (2-0-x) - fix: tray setContextMenu crash
2018-07-02 14:12:51 -04:00
John Kleinschmidt
d06c20f836 Merge branch 'trop-bot-2-0-x-bp-fix--tray-setcontextmenu-crash-1530308984992' into 2-0-x 2018-07-02 14:10:14 -04:00
John Kleinschmidt
3ab99663eb Merge branch '2-0-x-bp-fix--tray-setcontextmenu-crash-1530308984992' of git://github.com/trop-bot/electron into trop-bot-2-0-x-bp-fix--tray-setcontextmenu-crash-1530308984992 2018-07-02 14:08:34 -04:00
trop[bot]
a253323ea1 Do not capture cookies and credentials (#13537) 2018-07-03 02:03:48 +10:00
Heilig Benedek
4abe5a2963 Disable chromium's redraw locking on Windows when DWM is disabled (#12501)
* disable redraw locking on windows

* update libcc ref

(cherry picked from commit a14ebc80d2)
2018-07-02 08:54:37 -07:00
Maxine Whitely
aada5fb167 fix: tray setContextMenu crash
Co-authored-by: Zhuo Lu <sethlu@mintkit.net>
Co-authored-by: Kristine Zhang <kzhang@slack-corp.com>
2018-06-29 21:49:51 +00:00
Robo
4efed0f5ed fix: Revert "remove MarkHghMemoryUsage api" (#13421)
* fix: Revert "remove MarkHghMemoryUsage api"

This reverts commit 0de85fd49f.

* update native_mate ref
2018-06-26 10:44:59 -07:00
John Kleinschmidt
475a1e30d9 Add release build to VSTS 2018-06-21 09:33:06 -04:00
John Kleinschmidt
4aedc2c21a Bump v2.0.3 2018-06-21 09:25:33 -04:00
John Kleinschmidt
71668858db Merge pull request #13345 from electron/miniak/fix-libcc-2-0-x
fix: patches/086-backport-f0c82253.patch in libcc
2018-06-21 09:13:25 -04:00
Milan Burda
2573d52860 Bump libcc 2018-06-21 09:29:14 +02:00
John Kleinschmidt
989c2605b3 Merge pull request #13305 from electron/2-0-x_update-libcc-subreference
update submodule reference for libchromiumcontent
2018-06-20 09:36:17 -04:00
John Kleinschmidt
afcfd473d0 Merge pull request #13330 from trop-bot/2-0-x-bp-fix--pass-down-the-bool-value-of-enableautosize-to-setsize-1529484352071
Backport (2-0-x) - fix: pass down the bool value of enableAutoSize to setSize
2018-06-20 09:24:28 -04:00
Justin Guze
1499bafe59 creating new bool for autosize 2018-06-20 08:45:58 +00:00
Justin Guze
419fae76b4 fix: pass the boolean value of enableAutoSize to setSize
The webContents setSize API takes in an optional enableAutoSize boolean.
Looking in the code, if that property is set, regardless if you pass in
true or false, it will always set it to true. This change passes the
appropriate boolean value down properly.
2018-06-20 08:45:58 +00:00
trop[bot]
c7b85a104a Mark browser window affinity as experimental (#13295) 2018-06-20 17:19:46 +10:00
trop[bot]
c6ea424858 Disable node options in node config (#13324) 2018-06-20 17:06:29 +10:00
trop[bot]
ba23c0d600 set mac protocol to none (#13325) 2018-06-19 22:39:07 -07:00
Charles Kerr
d4e716477e update submodule referefence for libchromiumcontent 2018-06-19 14:43:57 -05:00
Jeremy Apthorp
34dbe5f176 Enable WebFrame method forwarding in sandboxed renderers (#12538) (#13076)
* Enable WebFrame method forwarding in sandboxed renderers

Fixes #9073

* Non-change to kick CI
2018-06-19 09:36:14 +10:00
Milan Burda
a09fa53d80 Add FILE_DIALOG_TREAT_PACKAGE_APP_AS_DIRECTORY to web open file dialog (#13220) (#13277) 2018-06-18 10:34:21 -05:00
Milan Burda
c140c84b32 Backport "Allow lookup of videodecoder service to fix HW video decoding on macOS 10.13" (#13275)
https://chromium-review.googlesource.com/677290
2018-06-18 21:35:37 +10:00
Charles Kerr
55ab1d7370 update submodule referefence for node (#13260) 2018-06-17 14:52:25 -07:00
trop[bot]
cfa247967f fix: tray title not inverting when highlighted (#13262) 2018-06-16 11:59:46 -05:00
John Kleinschmidt
311f7ac6bb Bump v2.0.2 2018-05-22 14:52:16 -04:00
trop[bot]
52a44facfd update locale documentation (#13027) 2018-05-21 15:07:46 -05:00
trop[bot]
5acb7a0226 Backport (2-0-x) - docs: Document missing quit events during shutdown (#13026)
* docs: Document missing events during shutdown

* docs: Better language
2018-05-21 12:52:18 -07:00
trop[bot]
afcf94a85c Add a tag to libchromiumcontent when a release is created. (#13025) 2018-05-21 13:35:56 -05:00
John Kleinschmidt
c42b468966 Merge pull request #13022 from trop-bot/2-0-x-bp-fix--menu.setapplicationmenu(null)-crash-on-linux-1526913387556
Backport (2-0-x) - fix: Menu.setApplicationMenu(null) crash on Linux
2018-05-21 11:06:36 -04:00
John Kleinschmidt
e0257acdf5 Merge pull request #13009 from ajmacd/ajm-libcc
Bump libcc ref to pick up patch file extension correction.
2018-05-21 11:05:34 -04:00
Jeremy Apthorp
b555434aa0 fix: Menu.setApplicationMenu(null) crash on Linux 2018-05-21 14:36:33 +00:00
trop[bot]
9574a4f472 Only expand maximum size constraint if there was a constraint originally (#13014) 2018-05-20 19:45:51 -07:00
Andrew MacDonald
2f167ce3d1 Bump libcc ref to pick up patch file extension correction. 2018-05-19 01:01:05 -07:00
101 changed files with 2649 additions and 688 deletions

View File

@@ -544,7 +544,6 @@ jobs:
TARGET_ARCH: x64
macos:
xcode: "9.0"
resource_class: xlarge
steps:
- checkout
- run:
@@ -643,7 +642,6 @@ jobs:
MAS_BUILD: 1
macos:
xcode: "9.0"
resource_class: xlarge
steps:
- checkout
- run:

7
.env.example Normal file
View File

@@ -0,0 +1,7 @@
# These env vars are only necessary for creating Electron releases.
# See docs/development/releasing.md
APPVEYOR_TOKEN=
CIRCLE_TOKEN=
ELECTRON_GITHUB_TOKEN=
VSTS_TOKEN=

69
appveyor.yml Normal file
View File

@@ -0,0 +1,69 @@
build_cloud: electron-16
image: electron-16-vs2015
build_script:
- ps: >-
if($env:SKIP_GYP_BUILD -eq "true") {
Write-warning "Skipping debug build for older branch"; Exit-AppveyorBuild
} elseif(($env:APPVEYOR_PULL_REQUEST_HEAD_REPO_NAME -split "/")[0] -eq ($env:APPVEYOR_REPO_NAME -split "/")[0]) {
Write-warning "Skipping PR build for branch"; Exit-AppveyorBuild
} else {
Add-Path "$env:ProgramFiles (x86)\Windows Kits\10\Debuggers\x64"
$env:path = "$env:ProgramFiles (x86)\Windows Kits\10\Debuggers\x64;$env:path"
if($env:APPVEYOR_SCHEDULED_BUILD -eq 'True') {
$env:RUN_RELEASE_BUILD = "1"
}
$Message = (git log --format=%B -n 1 HEAD) | Out-String
if ((Test-Path Env:\RUN_RELEASE_BUILD)) {
$env:ELECTRON_RELEASE = '1'
Write-Output "release build triggered from api"
}
if ((Test-Path Env:\ELECTRON_RELEASE)) {
Write-Output "Running release build"
python script\bootstrap.py --target_arch=$env:TARGET_ARCH
python script\build.py -c R
python script\create-dist.py
} else {
Write-Output "Running debug build"
python script\bootstrap.py --target_arch=$env:TARGET_ARCH --dev
python script\build.py -c D
}
if ($? -ne 'True') {
throw "Build failed with exit code $?"
} else {
"Build succeeded."
}
Push-AppveyorArtifact out
}
test_script:
- ps: >-
if (Test-Path Env:\ELECTRON_RELEASE) {
Write-Output "Skipping tests for release build"
} else {
Write-Output "Running tests for debug build"
python script\test.py --ci --rebuild_native_modules
if ($LASTEXITCODE -ne '0') {
throw "Tests failed with exit code $LASTEXITCODE"
} else {
Write-Output "Tests succeeded."
}
python script\verify-ffmpeg.py
if ($LASTEXITCODE -ne '0') {
throw "Verify ffmpeg failed with exit code $LASTEXITCODE"
} else {
"Verify ffmpeg succeeded."
}
}
artifacts:
- path: test-results.xml
name: test-results.xml
deploy_script:
- ps: >-
if (Test-Path Env:\ELECTRON_RELEASE) {
if (Test-Path Env:\RUN_RELEASE_BUILD) {
Write-Output "Uploading Electron release distribution to s3"
& python script\upload.py --upload_to_s3
} else {
Write-Output "Uploading Electron release distribution to github releases"
& python script\upload.py
}
}

View File

@@ -18,6 +18,12 @@
using content::BrowserThread;
namespace {
static scoped_nsobject<NSMenu> applicationMenu_;
} // namespace
namespace atom {
namespace api {
@@ -135,7 +141,18 @@ void Menu::SetApplicationMenu(Menu* base_menu) {
base::scoped_nsobject<AtomMenuController> menu_controller(
[[AtomMenuController alloc] initWithModel:menu->model_.get()
useDefaultAccelerator:YES]);
[NSApp setMainMenu:[menu_controller menu]];
NSRunLoop* currentRunLoop = [NSRunLoop currentRunLoop];
[currentRunLoop cancelPerformSelector:@selector(setMainMenu:)
target:NSApp
argument:applicationMenu_];
applicationMenu_.reset([[menu_controller menu] retain]);
[[NSRunLoop currentRunLoop]
performSelector:@selector(setMainMenu:)
target:NSApp
argument:applicationMenu_
order:0
modes:[NSArray arrayWithObject:NSDefaultRunLoopMode]];
// Ensure the menu_controller_ is destroyed after main menu is set.
menu_controller.swap(menu->menu_controller_);

View File

@@ -29,7 +29,6 @@
#include "base/guid.h"
#include "base/strings/string_number_conversions.h"
#include "base/strings/string_util.h"
#include "base/threading/thread_task_runner_handle.h"
#include "brightray/browser/media/media_device_id_salt.h"
#include "brightray/browser/net/devtools_network_conditions.h"
#include "brightray/browser/net/devtools_network_controller_handle.h"
@@ -47,7 +46,6 @@
#include "net/http/http_auth_handler_factory.h"
#include "net/http/http_auth_preferences.h"
#include "net/proxy/proxy_config_service_fixed.h"
#include "net/proxy/proxy_service.h"
#include "net/url_request/static_http_user_agent_settings.h"
#include "net/url_request/url_request_context.h"
#include "net/url_request/url_request_context_getter.h"
@@ -232,59 +230,6 @@ const char kPersistPrefix[] = "persist:";
// Referenced session objects.
std::map<uint32_t, v8::Global<v8::Object>> g_sessions;
class ResolveProxyHelper {
public:
ResolveProxyHelper(AtomBrowserContext* browser_context,
const GURL& url,
const Session::ResolveProxyCallback& callback)
: callback_(callback),
original_thread_(base::ThreadTaskRunnerHandle::Get()) {
scoped_refptr<net::URLRequestContextGetter> context_getter =
browser_context->url_request_context_getter();
context_getter->GetNetworkTaskRunner()->PostTask(
FROM_HERE,
base::Bind(&ResolveProxyHelper::ResolveProxy,
base::Unretained(this), context_getter, url));
}
void OnResolveProxyCompleted(int result) {
std::string proxy;
if (result == net::OK)
proxy = proxy_info_.ToPacString();
original_thread_->PostTask(FROM_HERE,
base::Bind(callback_, proxy));
delete this;
}
private:
void ResolveProxy(scoped_refptr<net::URLRequestContextGetter> context_getter,
const GURL& url) {
DCHECK_CURRENTLY_ON(content::BrowserThread::IO);
net::ProxyService* proxy_service =
context_getter->GetURLRequestContext()->proxy_service();
net::CompletionCallback completion_callback =
base::Bind(&ResolveProxyHelper::OnResolveProxyCompleted,
base::Unretained(this));
// Start the request.
int result = proxy_service->ResolveProxy(
url, "GET", &proxy_info_, completion_callback, &pac_req_, nullptr,
net::NetLogWithSource());
// Completed synchronously.
if (result != net::ERR_IO_PENDING)
completion_callback.Run(result);
}
Session::ResolveProxyCallback callback_;
net::ProxyInfo proxy_info_;
net::ProxyService::PacRequest* pac_req_;
scoped_refptr<base::SingleThreadTaskRunner> original_thread_;
DISALLOW_COPY_AND_ASSIGN(ResolveProxyHelper);
};
// Runs the callback in UI thread.
void RunCallbackInUI(const base::Callback<void()>& callback) {
BrowserThread::PostTask(BrowserThread::UI, FROM_HERE, callback);
@@ -490,8 +435,10 @@ void Session::OnDownloadCreated(content::DownloadManager* manager,
}
}
void Session::ResolveProxy(const GURL& url, ResolveProxyCallback callback) {
new ResolveProxyHelper(browser_context(), url, callback);
void Session::ResolveProxy(
const GURL& url,
const ResolveProxyHelper::ResolveProxyCallback& callback) {
browser_context_->GetResolveProxyHelper()->ResolveProxy(url, callback);
}
template<Session::CacheAction action>

View File

@@ -10,6 +10,7 @@
#include "atom/browser/api/trackable_object.h"
#include "atom/browser/atom_blob_reader.h"
#include "atom/browser/net/resolve_proxy_helper.h"
#include "base/values.h"
#include "content/public/browser/download_manager.h"
#include "native_mate/handle.h"
@@ -39,8 +40,6 @@ namespace api {
class Session: public mate::TrackableObject<Session>,
public content::DownloadManager::Observer {
public:
using ResolveProxyCallback = base::Callback<void(std::string)>;
enum class CacheAction {
CLEAR,
STATS,
@@ -62,7 +61,8 @@ class Session: public mate::TrackableObject<Session>,
v8::Local<v8::FunctionTemplate> prototype);
// Methods.
void ResolveProxy(const GURL& url, ResolveProxyCallback callback);
void ResolveProxy(const GURL& url,
const ResolveProxyHelper::ResolveProxyCallback& callback);
template<CacheAction action>
void DoCacheAction(const net::CompletionCallback& callback);
void ClearStorageData(mate::Arguments* args);

View File

@@ -206,7 +206,7 @@ void Tray::PopUpContextMenu(mate::Arguments* args) {
void Tray::SetContextMenu(v8::Isolate* isolate, mate::Handle<Menu> menu) {
menu_.Reset(isolate, menu.ToV8());
tray_icon_->SetContextMenu(menu->model());
tray_icon_->SetContextMenu(menu.IsEmpty() ? nullptr : menu->model());
}
gfx::Rect Tray::GetBounds() {

View File

@@ -113,7 +113,7 @@ struct Converter<atom::SetSizeParams> {
return false;
bool autosize;
if (params.Get("enableAutoSize", &autosize))
out->enable_auto_size.reset(new bool(true));
out->enable_auto_size.reset(new bool(autosize));
gfx::Size size;
if (params.Get("min", &size))
out->min_size.reset(new gfx::Size(size));
@@ -331,6 +331,10 @@ WebContents::WebContents(v8::Isolate* isolate, const mate::Dictionary& options)
request_id_(0),
background_throttling_(true),
enable_devtools_(true) {
// WebContents may need to emit events when it is garbage collected, so it
// has to be deleted in the first gc callback.
MarkHighMemoryUsage();
// Read options.
options.Get("backgroundThrottling", &background_throttling_);

View File

@@ -448,7 +448,6 @@ void AtomBrowserClient::SiteInstanceDeleting(
brightray::BrowserMainParts* AtomBrowserClient::OverrideCreateBrowserMainParts(
const content::MainFunctionParams&) {
v8::V8::Initialize(); // Init V8 before creating main parts.
return new AtomBrowserMainParts;
}

View File

@@ -16,6 +16,7 @@
#include "atom/browser/net/atom_network_delegate.h"
#include "atom/browser/net/atom_url_request_job_factory.h"
#include "atom/browser/net/http_protocol_handler.h"
#include "atom/browser/net/resolve_proxy_helper.h"
#include "atom/browser/web_view_manager.h"
#include "atom/common/atom_version.h"
#include "atom/common/chrome_version.h"
@@ -226,6 +227,14 @@ AtomBlobReader* AtomBrowserContext::GetBlobReader() {
return blob_reader_.get();
}
ResolveProxyHelper* AtomBrowserContext::GetResolveProxyHelper() {
if (!resolve_proxy_helper_.get()) {
resolve_proxy_helper_ =
base::MakeRefCounted<ResolveProxyHelper>(url_request_context_getter());
}
return resolve_proxy_helper_.get();
}
// static
scoped_refptr<AtomBrowserContext> AtomBrowserContext::From(
const std::string& partition, bool in_memory,

View File

@@ -18,6 +18,7 @@ class AtomBlobReader;
class AtomDownloadManagerDelegate;
class AtomNetworkDelegate;
class AtomPermissionManager;
class ResolveProxyHelper;
class WebViewManager;
class AtomBrowserContext : public brightray::BrowserContext {
@@ -51,6 +52,8 @@ class AtomBrowserContext : public brightray::BrowserContext {
// brightray::BrowserContext:
void RegisterPrefs(PrefRegistrySimple* pref_registry) override;
ResolveProxyHelper* GetResolveProxyHelper();
AtomBlobReader* GetBlobReader();
AtomCookieDelegate* cookie_delegate() const {
return cookie_delegate_.get();
@@ -62,6 +65,7 @@ class AtomBrowserContext : public brightray::BrowserContext {
~AtomBrowserContext() override;
private:
scoped_refptr<ResolveProxyHelper> resolve_proxy_helper_;
std::unique_ptr<AtomDownloadManagerDelegate> download_manager_delegate_;
std::unique_ptr<WebViewManager> guest_manager_;
std::unique_ptr<AtomPermissionManager> permission_manager_;

View File

@@ -151,6 +151,14 @@ void AtomBrowserMainParts::PostEarlyInitialization() {
// Wrap the uv loop with global env.
node_bindings_->set_uv_env(env);
// We already initialized the feature list in
// brightray::BrowserMainParts::PreEarlyInitialization(), but
// the user JS script would not have had a chance to alter the command-line
// switches at that point. Lets reinitialize it here to pick up the
// command-line changes.
base::FeatureList::ClearInstanceForTesting();
brightray::BrowserMainParts::InitializeFeatureList();
}
int AtomBrowserMainParts::PreCreateThreads() {

View File

@@ -77,6 +77,11 @@ bool Browser::RemoveAsDefaultProtocolClient(const std::string& protocol,
}
}
// No other app was found set it to none instead of setting it back to itself.
if ([identifier isEqualToString:(__bridge NSString*)other]) {
other = base::mac::NSToCFCast(@"None");
}
OSStatus return_code = LSSetDefaultHandlerForURLScheme(protocol_cf, other);
return return_code == noErr;
}

View File

@@ -340,7 +340,9 @@ gfx::Size NativeWindow::GetContentMinimumSize() const {
gfx::Size NativeWindow::GetContentMaximumSize() const {
gfx::Size maximum_size = GetContentSizeConstraints().GetMaximumSize();
#if defined(OS_WIN)
return GetExpandedWindowSize(this, maximum_size);
return GetContentSizeConstraints().HasMaximumSize()
? GetExpandedWindowSize(this, maximum_size)
: maximum_size;
#else
return maximum_size;
#endif

View File

@@ -0,0 +1,85 @@
// Copyright (c) 2018 GitHub, Inc.
// Use of this source code is governed by the MIT license that can be
// found in the LICENSE file.
#include "atom/browser/net/resolve_proxy_helper.h"
#include "base/threading/thread_task_runner_handle.h"
#include "net/url_request/url_request_context.h"
#include "net/url_request/url_request_context_getter.h"
namespace atom {
ResolveProxyHelper::ResolveProxyHelper(net::URLRequestContextGetter* getter)
: context_getter_(getter),
original_thread_(base::ThreadTaskRunnerHandle::Get()) {}
ResolveProxyHelper::~ResolveProxyHelper() {
// Clear all pending requests if the ProxyService is still alive.
pending_requests_.clear();
}
void ResolveProxyHelper::ResolveProxy(const GURL& url,
const ResolveProxyCallback& callback) {
// Enqueue the pending request.
pending_requests_.push_back(PendingRequest(url, callback));
// If nothing is in progress, start.
if (pending_requests_.size() == 1)
StartPendingRequest();
}
void ResolveProxyHelper::SendProxyResult(const std::string& proxy) {
CHECK(!pending_requests_.empty());
const auto& completed_request = pending_requests_.front();
if (!completed_request.callback.is_null())
completed_request.callback.Run(proxy);
// Clear the current (completed) request.
pending_requests_.pop_front();
// Start the next request.
if (!pending_requests_.empty())
StartPendingRequest();
}
void ResolveProxyHelper::StartPendingRequest() {
auto& request = pending_requests_.front();
context_getter_->GetNetworkTaskRunner()->PostTask(
FROM_HERE,
base::BindOnce(&ResolveProxyHelper::StartPendingRequestInIO,
base::Unretained(this), request.url, request.pac_req));
}
void ResolveProxyHelper::OnResolveProxyCompleted(int result) {
std::string proxy;
if (result == net::OK)
proxy = proxy_info_.ToPacString();
original_thread_->PostTask(
FROM_HERE, base::BindOnce(&ResolveProxyHelper::SendProxyResult,
base::Unretained(this), proxy));
}
void ResolveProxyHelper::StartPendingRequestInIO(
const GURL& url,
net::ProxyService::PacRequest* pac_req) {
// Verify the request wasn't started yet.
DCHECK(nullptr == pac_req);
auto proxy_service = context_getter_->GetURLRequestContext()->proxy_service();
// Start the request.
int result = proxy_service->ResolveProxy(
url, std::string(), &proxy_info_,
base::Bind(&ResolveProxyHelper::OnResolveProxyCompleted,
base::Unretained(this)),
&pac_req, nullptr, net::NetLogWithSource());
// Completed synchronously.
if (result != net::ERR_IO_PENDING)
OnResolveProxyCompleted(result);
}
} // namespace atom

View File

@@ -0,0 +1,59 @@
// Copyright (c) 2018 GitHub, Inc.
// Use of this source code is governed by the MIT license that can be
// found in the LICENSE file.
#ifndef ATOM_BROWSER_NET_RESOLVE_PROXY_HELPER_H_
#define ATOM_BROWSER_NET_RESOLVE_PROXY_HELPER_H_
#include <deque>
#include <string>
#include "base/memory/ref_counted.h"
#include "net/proxy/proxy_service.h"
#include "url/gurl.h"
namespace net {
class URLRequestContextGetter;
}
namespace atom {
class ResolveProxyHelper
: public base::RefCountedThreadSafe<ResolveProxyHelper> {
public:
using ResolveProxyCallback = base::Callback<void(std::string)>;
explicit ResolveProxyHelper(net::URLRequestContextGetter* getter);
void ResolveProxy(const GURL& url, const ResolveProxyCallback& callback);
private:
friend class base::RefCountedThreadSafe<ResolveProxyHelper>;
struct PendingRequest {
public:
PendingRequest(const GURL& url, const ResolveProxyCallback& callback)
: url(url), callback(callback), pac_req(nullptr) {}
GURL url;
ResolveProxyCallback callback;
net::ProxyService::PacRequest* pac_req;
};
~ResolveProxyHelper();
void StartPendingRequest();
void StartPendingRequestInIO(const GURL& request,
net::ProxyService::PacRequest* pac_req);
void SendProxyResult(const std::string& proxy);
void OnResolveProxyCompleted(int result);
net::ProxyInfo proxy_info_;
std::deque<PendingRequest> pending_requests_;
scoped_refptr<net::URLRequestContextGetter> context_getter_;
scoped_refptr<base::SingleThreadTaskRunner> original_thread_;
DISALLOW_COPY_AND_ASSIGN(ResolveProxyHelper);
};
} // namespace atom
#endif // ATOM_BROWSER_NET_RESOLVE_PROXY_HELPER_H_

View File

@@ -4,6 +4,8 @@
#include "atom/browser/node_debugger.h"
#include <string>
#include "base/command_line.h"
#include "base/strings/utf_string_conversions.h"
#include "libplatform/libplatform.h"
@@ -28,10 +30,15 @@ void NodeDebugger::Start(node::NodePlatform* platform) {
node::DebugOptions options;
for (auto& arg : base::CommandLine::ForCurrentProcess()->argv()) {
#if defined(OS_WIN)
options.ParseOption("Electron", base::UTF16ToUTF8(arg));
const std::string nice_arg = base::UTF16ToUTF8(arg);
#else
options.ParseOption("Electron", arg);
const std::string& nice_arg = arg;
#endif
// Stop handling arguments after a "--" to be consistent with Chromium
if (nice_arg == "--")
break;
options.ParseOption("Electron", nice_arg);
}
if (options.inspector_enabled()) {

View File

@@ -17,9 +17,9 @@
<key>CFBundleIconFile</key>
<string>electron.icns</string>
<key>CFBundleVersion</key>
<string>2.0.1</string>
<string>2.0.10</string>
<key>CFBundleShortVersionString</key>
<string>2.0.1</string>
<string>2.0.10</string>
<key>LSApplicationCategoryType</key>
<string>public.app-category.developer-tools</string>
<key>LSMinimumSystemVersion</key>

View File

@@ -56,8 +56,8 @@ END
//
VS_VERSION_INFO VERSIONINFO
FILEVERSION 2,0,1,0
PRODUCTVERSION 2,0,1,0
FILEVERSION 2,0,10,0
PRODUCTVERSION 2,0,10,0
FILEFLAGSMASK 0x3fL
#ifdef _DEBUG
FILEFLAGS 0x1L
@@ -74,12 +74,12 @@ BEGIN
BEGIN
VALUE "CompanyName", "GitHub, Inc."
VALUE "FileDescription", "Electron"
VALUE "FileVersion", "2.0.1"
VALUE "FileVersion", "2.0.10"
VALUE "InternalName", "electron.exe"
VALUE "LegalCopyright", "Copyright (C) 2015 GitHub, Inc. All rights reserved."
VALUE "OriginalFilename", "electron.exe"
VALUE "ProductName", "Electron"
VALUE "ProductVersion", "2.0.1"
VALUE "ProductVersion", "2.0.10"
VALUE "SquirrelAwareVersion", "1"
END
END

View File

@@ -123,7 +123,9 @@ static base::scoped_nsobject<NSMenu> recentDocumentsMenuSwap_;
[menu_ cancelTracking];
isMenuOpen_ = NO;
model_->MenuWillClose();
closeCallback.Run();
if (!closeCallback.is_null()) {
BrowserThread::PostTask(BrowserThread::UI, FROM_HERE, closeCallback);
}
}
}

View File

@@ -96,11 +96,6 @@ NSAlert* CreateNSAlert(NativeWindow* parent_window,
NSArray* ns_buttons = [alert buttons];
int button_count = static_cast<int>([ns_buttons count]);
// Bind cancel id button to escape key if there is more than one button
if (button_count > 1 && cancel_id >= 0 && cancel_id < button_count) {
[[ns_buttons objectAtIndex:cancel_id] setKeyEquivalent:@"\e"];
}
if (default_id >= 0 && default_id < button_count) {
// Focus the button at default_id if the user opted to do so.
// The first button added gets set as the default selected.
@@ -109,6 +104,11 @@ NSAlert* CreateNSAlert(NativeWindow* parent_window,
[[ns_buttons objectAtIndex:default_id] setKeyEquivalent:@"\r"];
}
// Bind cancel id button to escape key if there is more than one button
if (button_count > 1 && cancel_id >= 0 && cancel_id < button_count) {
[[ns_buttons objectAtIndex:cancel_id] setKeyEquivalent:@"\e"];
}
if (!checkbox_label.empty()) {
alert.showsSuppressionButton = YES;
alert.suppressionButton.title = base::SysUTF8ToNSString(checkbox_label);

View File

@@ -40,9 +40,16 @@ const CGFloat kVerticalTitleMargin = 2;
@implementation StatusItemView
- (void)dealloc {
trayIcon_ = nil;
menuController_ = nil;
[super dealloc];
}
- (id)initWithImage:(NSImage*)image icon:(atom::TrayIconCocoa*)icon {
image_.reset([image copy]);
trayIcon_ = icon;
menuController_ = nil;
highlight_mode_ = atom::TrayIcon::HighlightMode::SELECTION;
forceHighlight_ = NO;
inMouseEventSequence_ = NO;
@@ -85,6 +92,7 @@ const CGFloat kVerticalTitleMargin = 2;
trackingArea_.reset();
}
[[NSStatusBar systemStatusBar] removeStatusItem:statusItem_];
[statusItem_ setView:nil];
statusItem_.reset();
}
@@ -389,6 +397,11 @@ const CGFloat kVerticalTitleMargin = 2;
return YES;
}
- (void)setNeedsDisplay:(BOOL)display {
[self updateAttributedTitle];
[super setNeedsDisplay:display];
}
- (BOOL)shouldHighlight {
switch (highlight_mode_) {
case atom::TrayIcon::HighlightMode::ALWAYS:
@@ -449,11 +462,18 @@ void TrayIconCocoa::SetContextMenu(AtomMenuModel* menu_model) {
// Substribe to MenuClosed event.
if (menu_model_)
menu_model_->RemoveObserver(this);
menu_model->AddObserver(this);
// Create native menu.
menu_.reset([[AtomMenuController alloc] initWithModel:menu_model
useDefaultAccelerator:NO]);
menu_model_ = menu_model;
if (menu_model) {
menu_model->AddObserver(this);
// Create native menu.
menu_.reset([[AtomMenuController alloc] initWithModel:menu_model
useDefaultAccelerator:NO]);
} else {
menu_.reset();
}
[status_item_view_ setMenuController:menu_.get()];
}

View File

@@ -209,7 +209,9 @@ void GlobalMenuBarX11::SetMenu(AtomMenuModel* menu_model) {
DbusmenuMenuitem* root_item = menuitem_new();
menuitem_property_set(root_item, kPropertyLabel, "Root");
menuitem_property_set_bool(root_item, kPropertyVisible, true);
BuildMenuFromModel(menu_model, root_item);
if (menu_model != nullptr) {
BuildMenuFromModel(menu_model, root_item);
}
server_set_root(server_, root_item);
g_object_unref(root_item);

View File

@@ -25,4 +25,11 @@ bool AtomDesktopWindowTreeHostWin::PreHandleMSG(
return delegate_->PreHandleMSG(message, w_param, l_param, result);
}
bool AtomDesktopWindowTreeHostWin::HasNativeFrame() const {
// Since we never use chromium's titlebar implementation, we can just say
// that we use a native titlebar. This will disable the repaint locking when
// DWM composition is disabled.
return true;
}
} // namespace atom

View File

@@ -27,6 +27,7 @@ class AtomDesktopWindowTreeHostWin : public views::DesktopWindowTreeHostWin {
protected:
bool PreHandleMSG(
UINT message, WPARAM w_param, LPARAM l_param, LRESULT* result) override;
bool HasNativeFrame() const override;
private:
MessageHandlerDelegate* delegate_; // weak ref

View File

@@ -227,6 +227,7 @@ void WebDialogHelper::RunFileChooser(
flags |= file_dialog::FILE_DIALOG_MULTI_SELECTIONS;
case content::FileChooserParams::Open:
flags |= file_dialog::FILE_DIALOG_OPEN_FILE;
flags |= file_dialog::FILE_DIALOG_TREAT_PACKAGE_APP_AS_DIRECTORY;
break;
case content::FileChooserParams::UploadFolder:
flags |= file_dialog::FILE_DIALOG_OPEN_DIRECTORY;

View File

@@ -223,6 +223,7 @@ NativeImage::NativeImage(v8::Isolate* isolate, const gfx::Image& image)
isolate->AdjustAmountOfExternalAllocatedMemory(
image_.ToImageSkia()->bitmap()->computeSize64());
}
MarkHighMemoryUsage();
}
#if defined(OS_WIN)
@@ -237,6 +238,7 @@ NativeImage::NativeImage(v8::Isolate* isolate, const base::FilePath& hicon_path)
isolate->AdjustAmountOfExternalAllocatedMemory(
image_.ToImageSkia()->bitmap()->computeSize64());
}
MarkHighMemoryUsage();
}
#endif

View File

@@ -22,7 +22,7 @@ namespace std {
template <typename Type1, typename Type2>
struct hash<std::pair<Type1, Type2>> {
std::size_t operator()(std::pair<Type1, Type2> value) const {
return base::HashInts<Type1, Type2>(value.first, value.second);
return base::HashInts(base::Hash(value.first), value.second);
}
};
@@ -114,8 +114,9 @@ void Initialize(v8::Local<v8::Object> exports, v8::Local<v8::Value> unused,
dict.SetMethod("setRemoteCallbackFreer", &atom::RemoteCallbackFreer::BindTo);
dict.SetMethod("setRemoteObjectFreer", &atom::RemoteObjectFreer::BindTo);
dict.SetMethod("createIDWeakMap", &atom::api::KeyWeakMap<int32_t>::Create);
dict.SetMethod("createDoubleIDWeakMap",
&atom::api::KeyWeakMap<std::pair<int64_t, int32_t>>::Create);
dict.SetMethod(
"createDoubleIDWeakMap",
&atom::api::KeyWeakMap<std::pair<std::string, int32_t>>::Create);
dict.SetMethod("requestGarbageCollectionForTesting",
&RequestGarbageCollectionForTesting);
dict.SetMethod("isSameOrigin", &IsSameOrigin);

View File

@@ -15,17 +15,20 @@ namespace atom {
// static
void RemoteCallbackFreer::BindTo(v8::Isolate* isolate,
v8::Local<v8::Object> target,
const std::string& context_id,
int object_id,
content::WebContents* web_contents) {
new RemoteCallbackFreer(isolate, target, object_id, web_contents);
new RemoteCallbackFreer(isolate, target, context_id, object_id, web_contents);
}
RemoteCallbackFreer::RemoteCallbackFreer(v8::Isolate* isolate,
v8::Local<v8::Object> target,
const std::string& context_id,
int object_id,
content::WebContents* web_contents)
: ObjectLifeMonitor(isolate, target),
content::WebContentsObserver(web_contents),
context_id_(context_id),
object_id_(object_id) {
}
@@ -36,6 +39,7 @@ void RemoteCallbackFreer::RunDestructor() {
base::string16 channel =
base::ASCIIToUTF16("ELECTRON_RENDERER_RELEASE_CALLBACK");
base::ListValue args;
args.AppendString(context_id_);
args.AppendInteger(object_id_);
auto frame_host = web_contents()->GetMainFrame();
if (frame_host) {

View File

@@ -4,6 +4,9 @@
#ifndef ATOM_COMMON_API_REMOTE_CALLBACK_FREER_H_
#define ATOM_COMMON_API_REMOTE_CALLBACK_FREER_H_
#include <string>
#include "atom/common/api/object_life_monitor.h"
#include "content/public/browser/web_contents_observer.h"
@@ -14,12 +17,14 @@ class RemoteCallbackFreer : public ObjectLifeMonitor,
public:
static void BindTo(v8::Isolate* isolate,
v8::Local<v8::Object> target,
const std::string& context_id,
int object_id,
content::WebContents* web_conents);
protected:
RemoteCallbackFreer(v8::Isolate* isolate,
v8::Local<v8::Object> target,
const std::string& context_id,
int object_id,
content::WebContents* web_conents);
~RemoteCallbackFreer() override;
@@ -30,6 +35,7 @@ class RemoteCallbackFreer : public ObjectLifeMonitor,
void RenderViewDeleted(content::RenderViewHost*) override;
private:
std::string context_id_;
int object_id_;
DISALLOW_COPY_AND_ASSIGN(RemoteCallbackFreer);

View File

@@ -27,14 +27,19 @@ content::RenderFrame* GetCurrentRenderFrame() {
} // namespace
// static
void RemoteObjectFreer::BindTo(
v8::Isolate* isolate, v8::Local<v8::Object> target, int object_id) {
new RemoteObjectFreer(isolate, target, object_id);
void RemoteObjectFreer::BindTo(v8::Isolate* isolate,
v8::Local<v8::Object> target,
const std::string& context_id,
int object_id) {
new RemoteObjectFreer(isolate, target, context_id, object_id);
}
RemoteObjectFreer::RemoteObjectFreer(
v8::Isolate* isolate, v8::Local<v8::Object> target, int object_id)
RemoteObjectFreer::RemoteObjectFreer(v8::Isolate* isolate,
v8::Local<v8::Object> target,
const std::string& context_id,
int object_id)
: ObjectLifeMonitor(isolate, target),
context_id_(context_id),
object_id_(object_id),
routing_id_(MSG_ROUTING_NONE) {
content::RenderFrame* render_frame = GetCurrentRenderFrame();
@@ -55,6 +60,7 @@ void RemoteObjectFreer::RunDestructor() {
base::string16 channel = base::ASCIIToUTF16("ipc-message");
base::ListValue args;
args.AppendString("ELECTRON_BROWSER_DEREFERENCE");
args.AppendString(context_id_);
args.AppendInteger(object_id_);
render_frame->Send(new AtomFrameHostMsg_Message(render_frame->GetRoutingID(),
channel, args));

View File

@@ -5,23 +5,30 @@
#ifndef ATOM_COMMON_API_REMOTE_OBJECT_FREER_H_
#define ATOM_COMMON_API_REMOTE_OBJECT_FREER_H_
#include <string>
#include "atom/common/api/object_life_monitor.h"
namespace atom {
class RemoteObjectFreer : public ObjectLifeMonitor {
public:
static void BindTo(
v8::Isolate* isolate, v8::Local<v8::Object> target, int object_id);
static void BindTo(v8::Isolate* isolate,
v8::Local<v8::Object> target,
const std::string& context_id,
int object_id);
protected:
RemoteObjectFreer(
v8::Isolate* isolate, v8::Local<v8::Object> target, int object_id);
RemoteObjectFreer(v8::Isolate* isolate,
v8::Local<v8::Object> target,
const std::string& context_id,
int object_id);
~RemoteObjectFreer() override;
void RunDestructor() override;
private:
std::string context_id_;
int object_id_;
int routing_id_;

View File

@@ -7,7 +7,7 @@
#define ATOM_MAJOR_VERSION 2
#define ATOM_MINOR_VERSION 0
#define ATOM_PATCH_VERSION 1
#define ATOM_PATCH_VERSION 10
// #define ATOM_PRE_RELEASE_VERSION
#ifndef ATOM_STRINGIFY

View File

@@ -168,6 +168,7 @@ void NodeBindings::Initialize() {
// Init node.
// (we assume node::Init would not modify the parameters under embedded mode).
// NOTE: If you change this line, please ping @codebytere or @MarshallOfSound
node::Init(nullptr, nullptr, nullptr, nullptr);
#if defined(OS_WIN)

View File

@@ -82,6 +82,8 @@ void AtomRendererClient::RunScriptsAtDocumentEnd(
void AtomRendererClient::DidCreateScriptContext(
v8::Handle<v8::Context> context, content::RenderFrame* render_frame) {
RendererClientBase::DidCreateScriptContext(context, render_frame);
// Only allow node integration for the main frame, unless it is a devtools
// extension page.
if (!render_frame->IsMainFrame() && !IsDevToolsExtension(render_frame))

View File

@@ -153,6 +153,7 @@ void AtomSandboxedRendererClient::RenderViewCreated(
void AtomSandboxedRendererClient::DidCreateScriptContext(
v8::Handle<v8::Context> context, content::RenderFrame* render_frame) {
RendererClientBase::DidCreateScriptContext(context, render_frame);
// Only allow preload for the main frame or
// For devtools we still want to run the preload_bundle script

View File

@@ -19,7 +19,9 @@
#include "atom/renderer/preferences_manager.h"
#include "base/command_line.h"
#include "base/memory/ptr_util.h"
#include "base/process/process_handle.h"
#include "base/strings/string_split.h"
#include "base/strings/stringprintf.h"
#include "chrome/renderer/media/chrome_key_systems.h"
#include "chrome/renderer/pepper/pepper_helper.h"
#include "chrome/renderer/printing/print_web_view_helper.h"
@@ -44,6 +46,14 @@
#include <shlobj.h>
#endif
// This is defined in later versions of Chromium, remove this if you see
// compiler complaining duplicate defines.
#if defined(OS_WIN) || defined(OS_FUCHSIA)
#define CrPRIdPid "ld"
#else
#define CrPRIdPid "d"
#endif
namespace atom {
namespace {
@@ -78,6 +88,19 @@ RendererClientBase::RendererClientBase() {
RendererClientBase::~RendererClientBase() {
}
void RendererClientBase::DidCreateScriptContext(
v8::Handle<v8::Context> context,
content::RenderFrame* render_frame) {
// global.setHidden("contextId", `${processId}-${++nextContextId}`)
std::string context_id = base::StringPrintf(
"%" CrPRIdPid "-%d", base::GetCurrentProcId(), ++next_context_id_);
v8::Isolate* isolate = context->GetIsolate();
v8::Local<v8::String> key = mate::StringToSymbol(isolate, "contextId");
v8::Local<v8::Private> private_key = v8::Private::ForApi(isolate, key);
v8::Local<v8::Value> value = mate::ConvertToV8(isolate, context_id);
context->Global()->SetPrivate(context, private_key, value);
}
void RendererClientBase::AddRenderBindings(
v8::Isolate* isolate,
v8::Local<v8::Object> binding_object) {

View File

@@ -21,7 +21,7 @@ class RendererClientBase : public content::ContentRendererClient {
virtual ~RendererClientBase();
virtual void DidCreateScriptContext(
v8::Handle<v8::Context> context, content::RenderFrame* render_frame) = 0;
v8::Handle<v8::Context> context, content::RenderFrame* render_frame);
virtual void WillReleaseScriptContext(
v8::Handle<v8::Context> context, content::RenderFrame* render_frame) = 0;
virtual void DidClearWindowObject(content::RenderFrame* render_frame);
@@ -57,6 +57,9 @@ class RendererClientBase : public content::ContentRendererClient {
private:
std::unique_ptr<PreferencesManager> preferences_manager_;
bool isolated_world_;
// An increasing ID used for indentifying an V8 context in this process.
int next_context_id_ = 0;
};
} // namespace atom

View File

@@ -4,6 +4,13 @@
#include "atom/utility/atom_content_utility_client.h"
#include "content/public/common/service_manager_connection.h"
#include "content/public/common/simple_connection_filter.h"
#include "content/public/utility/utility_thread.h"
#include "mojo/public/cpp/bindings/strong_binding.h"
#include "net/proxy/mojo_proxy_resolver_factory_impl.h"
#include "services/service_manager/public/cpp/binder_registry.h"
#if defined(OS_WIN)
#include "base/memory/ptr_util.h"
#include "chrome/utility/printing_handler_win.h"
@@ -11,6 +18,16 @@
namespace atom {
namespace {
void CreateProxyResolverFactory(
net::interfaces::ProxyResolverFactoryRequest request) {
mojo::MakeStrongBinding(base::MakeUnique<net::MojoProxyResolverFactoryImpl>(),
std::move(request));
}
} // namespace
AtomContentUtilityClient::AtomContentUtilityClient() {
#if defined(OS_WIN)
handlers_.push_back(base::MakeUnique<printing::PrintingHandlerWin>());
@@ -20,6 +37,23 @@ AtomContentUtilityClient::AtomContentUtilityClient() {
AtomContentUtilityClient::~AtomContentUtilityClient() {
}
void AtomContentUtilityClient::UtilityThreadStarted() {
content::ServiceManagerConnection* connection =
content::ChildThread::Get()->GetServiceManagerConnection();
// NOTE: Some utility process instances are not connected to the Service
// Manager. Nothing left to do in that case.
if (!connection)
return;
auto registry = base::MakeUnique<service_manager::BinderRegistry>();
registry->AddInterface<net::interfaces::ProxyResolverFactory>(
base::Bind(CreateProxyResolverFactory),
base::ThreadTaskRunnerHandle::Get());
connection->AddConnectionFilter(
base::MakeUnique<content::SimpleConnectionFilter>(std::move(registry)));
}
bool AtomContentUtilityClient::OnMessageReceived(
const IPC::Message& message) {
#if defined(OS_WIN)

View File

@@ -20,6 +20,7 @@ class AtomContentUtilityClient : public content::ContentUtilityClient {
AtomContentUtilityClient();
~AtomContentUtilityClient() override;
void UtilityThreadStarted() override;
bool OnMessageReceived(const IPC::Message& message) override;
private:

View File

@@ -116,6 +116,8 @@
'<(libchromiumcontent_dir)/libcommon.a',
# services/device/wake_lock/power_save_blocker/
'<(libchromiumcontent_dir)/libpower_save_blocker.a',
# net/proxy/mojo_*
'<(libchromiumcontent_dir)/libnet_proxy_service.a',
# Friends of libpdf.a:
# On Linux we have to use "--whole-archive" to include
# all symbols, otherwise there will be plenty of
@@ -200,6 +202,8 @@
'<(libchromiumcontent_dir)/libcommon.a',
# services/device/wake_lock/power_save_blocker/
'<(libchromiumcontent_dir)/libpower_save_blocker.a',
# net/proxy/mojo_*
'<(libchromiumcontent_dir)/libnet_proxy_service.a',
# Friends of libpdf.a:
'<(libchromiumcontent_dir)/libpdf.a',
'<(libchromiumcontent_dir)/libppapi_cpp_objects.a',
@@ -268,6 +272,20 @@
],
},
}],
# In the OSX 10.10 SDK, CoreBluetooth became a top level framework.
# Previously, it was nested in IOBluetooth. In order for Chrome to run on
# OSes older than OSX 10.10, the top level CoreBluetooth framework must be
# weakly linked.
['mac_sdk=="10.10" and libchromiumcontent_component==0', {
'direct_dependent_settings': {
'xcode_settings': {
'OTHER_LDFLAGS': [
'-weak_framework',
'CoreBluetooth',
],
},
},
}],
]
}], # OS=="mac"
['OS=="win"', {
@@ -296,6 +314,8 @@
'<(libchromiumcontent_dir)/common.lib',
# services/device/wake_lock/power_save_blocker/
'<(libchromiumcontent_dir)/power_save_blocker.lib',
# net/proxy/mojo_*
'<(libchromiumcontent_dir)/net_proxy_service.lib',
# Friends of pdf.lib:
'<(libchromiumcontent_dir)/pdf.lib',
'<(libchromiumcontent_dir)/ppapi_cpp_objects.lib',

View File

@@ -100,14 +100,12 @@ BrowserContext::~BrowserContext() {
}
void BrowserContext::InitPrefs() {
base::ThreadRestrictions::ScopedAllowIO allow_io;
auto prefs_path = GetPath().Append(FILE_PATH_LITERAL("Preferences"));
PrefServiceFactory prefs_factory;
scoped_refptr<JsonPrefStore> pref_store =
base::MakeRefCounted<JsonPrefStore>(prefs_path);
{
base::ThreadRestrictions::ScopedAllowIO allow_io;
pref_store->ReadPrefs(); // Synchronous.
}
pref_store->ReadPrefs(); // Synchronous.
prefs_factory.set_user_prefs(pref_store);
auto registry = make_scoped_refptr(new PrefRegistrySimple);

View File

@@ -184,10 +184,20 @@ void OverrideAppLogsPath() {
}
#endif
void BrowserMainParts::PreEarlyInitialization() {
void BrowserMainParts::InitializeFeatureList() {
auto* cmd_line = base::CommandLine::ForCurrentProcess();
const auto enable_features =
cmd_line->GetSwitchValueASCII(switches::kEnableFeatures);
const auto disable_features =
cmd_line->GetSwitchValueASCII(switches::kDisableFeatures);
std::unique_ptr<base::FeatureList> feature_list(new base::FeatureList);
feature_list->InitializeFromCommandLine("", "");
feature_list->InitializeFromCommandLine(enable_features, disable_features);
base::FeatureList::SetInstance(std::move(feature_list));
}
void BrowserMainParts::PreEarlyInitialization() {
InitializeFeatureList();
OverrideAppLogsPath();
#if defined(USE_X11)
views::LinuxUI::SetInstance(BuildGtkUi());

View File

@@ -46,6 +46,8 @@ class BrowserMainParts : public content::BrowserMainParts {
int PreCreateThreads() override;
void PostDestroyThreads() override;
void InitializeFeatureList();
private:
#if defined(OS_MACOSX)
void InitializeMainNib();

View File

@@ -698,6 +698,8 @@ void InspectableWebContentsImpl::WebContentsDestroyed() {
for (const auto& pair : pending_requests_)
delete pair.first;
pending_requests_.clear();
if (view_ && view_->GetDelegate())
view_->GetDelegate()->DevToolsClosed();
}

View File

@@ -0,0 +1,117 @@
// Copyright 2015 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "brightray/browser/net/chrome_mojo_proxy_resolver_factory.h"
#include <utility>
#include "base/logging.h"
#include "base/memory/ptr_util.h"
#include "base/memory/singleton.h"
#include "base/single_thread_task_runner.h"
#include "base/strings/utf_string_conversions.h"
#include "base/threading/thread_task_runner_handle.h"
#include "content/public/browser/browser_thread.h"
#include "content/public/browser/utility_process_host.h"
#include "content/public/browser/utility_process_host_client.h"
#include "services/service_manager/public/cpp/interface_provider.h"
namespace {
const int kUtilityProcessIdleTimeoutSeconds = 5;
}
// static
ChromeMojoProxyResolverFactory* ChromeMojoProxyResolverFactory::GetInstance() {
DCHECK_CURRENTLY_ON(content::BrowserThread::IO);
return base::Singleton<
ChromeMojoProxyResolverFactory,
base::LeakySingletonTraits<ChromeMojoProxyResolverFactory>>::get();
}
ChromeMojoProxyResolverFactory::ChromeMojoProxyResolverFactory() {
DCHECK_CURRENTLY_ON(content::BrowserThread::IO);
}
ChromeMojoProxyResolverFactory::~ChromeMojoProxyResolverFactory() {
DCHECK(thread_checker_.CalledOnValidThread());
}
std::unique_ptr<base::ScopedClosureRunner>
ChromeMojoProxyResolverFactory::CreateResolver(
const std::string& pac_script,
mojo::InterfaceRequest<net::interfaces::ProxyResolver> req,
net::interfaces::ProxyResolverFactoryRequestClientPtr client) {
DCHECK(thread_checker_.CalledOnValidThread());
if (!resolver_factory_)
CreateFactory();
if (!resolver_factory_) {
// If factory creation failed, close |req|'s message pipe, which should
// cause a connection error.
req = nullptr;
return nullptr;
}
idle_timer_.Stop();
num_proxy_resolvers_++;
resolver_factory_->CreateResolver(pac_script, std::move(req),
std::move(client));
return base::MakeUnique<base::ScopedClosureRunner>(
base::Bind(&ChromeMojoProxyResolverFactory::OnResolverDestroyed,
base::Unretained(this)));
}
void ChromeMojoProxyResolverFactory::CreateFactory() {
DCHECK(thread_checker_.CalledOnValidThread());
DCHECK(!resolver_factory_);
DCHECK(!weak_utility_process_host_);
DVLOG(1) << "Attempting to create utility process for proxy resolver";
content::UtilityProcessHost* utility_process_host =
content::UtilityProcessHost::Create(
scoped_refptr<content::UtilityProcessHostClient>(),
base::ThreadTaskRunnerHandle::Get());
utility_process_host->SetName(base::ASCIIToUTF16("Electron Proxy Resolver"));
bool process_started = utility_process_host->Start();
if (process_started) {
BindInterface(utility_process_host, &resolver_factory_);
weak_utility_process_host_ = utility_process_host->AsWeakPtr();
} else {
LOG(ERROR) << "Unable to connect to utility process";
return;
}
resolver_factory_.set_connection_error_handler(base::Bind(
&ChromeMojoProxyResolverFactory::DestroyFactory, base::Unretained(this)));
}
void ChromeMojoProxyResolverFactory::DestroyFactory() {
resolver_factory_.reset();
delete weak_utility_process_host_.get();
weak_utility_process_host_.reset();
}
void ChromeMojoProxyResolverFactory::OnResolverDestroyed() {
DCHECK(thread_checker_.CalledOnValidThread());
DCHECK_GT(num_proxy_resolvers_, 0u);
if (--num_proxy_resolvers_ == 0) {
// When all proxy resolvers have been destroyed, the proxy resolver factory
// is no longer needed. However, new proxy resolvers may be created
// shortly after being destroyed (e.g. due to a network change).
//
// On desktop, where a utility process is used, if the utility process is
// shut down immediately, this would cause unnecessary process churn, so
// wait for an idle timeout before shutting down the proxy resolver utility
// process.
idle_timer_.Start(
FROM_HERE,
base::TimeDelta::FromSeconds(kUtilityProcessIdleTimeoutSeconds), this,
&ChromeMojoProxyResolverFactory::OnIdleTimeout);
}
}
void ChromeMojoProxyResolverFactory::OnIdleTimeout() {
DCHECK(thread_checker_.CalledOnValidThread());
DCHECK_EQ(num_proxy_resolvers_, 0u);
DestroyFactory();
}

View File

@@ -0,0 +1,73 @@
// Copyright 2015 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef BRIGHTRAY_BROWSER_NET_CHROME_MOJO_PROXY_RESOLVER_FACTORY_H_
#define BRIGHTRAY_BROWSER_NET_CHROME_MOJO_PROXY_RESOLVER_FACTORY_H_
#include <stddef.h>
#include <string>
#include "base/macros.h"
#include "base/memory/weak_ptr.h"
#include "base/threading/thread_checker.h"
#include "base/timer/timer.h"
#include "net/proxy/mojo_proxy_resolver_factory.h"
namespace content {
class UtilityProcessHost;
}
namespace base {
template <typename Type>
struct DefaultSingletonTraits;
} // namespace base
// A factory used to create connections to Mojo proxy resolver services. On
// Android, the proxy resolvers will run in the browser process, and on other
// platforms, they'll all be run in the same utility process. Utility process
// crashes are detected and the utility process is automatically restarted.
class ChromeMojoProxyResolverFactory : public net::MojoProxyResolverFactory {
public:
static ChromeMojoProxyResolverFactory* GetInstance();
// Overridden from net::MojoProxyResolverFactory:
std::unique_ptr<base::ScopedClosureRunner> CreateResolver(
const std::string& pac_script,
mojo::InterfaceRequest<net::interfaces::ProxyResolver> req,
net::interfaces::ProxyResolverFactoryRequestClientPtr client) override;
private:
friend struct base::DefaultSingletonTraits<ChromeMojoProxyResolverFactory>;
ChromeMojoProxyResolverFactory();
~ChromeMojoProxyResolverFactory() override;
// Creates the proxy resolver factory. On desktop, creates a new utility
// process before creating it out of process. On Android, creates it on the
// current thread.
void CreateFactory();
// Destroys |resolver_factory_|.
void DestroyFactory();
// Invoked each time a proxy resolver is destroyed.
void OnResolverDestroyed();
// Invoked once an idle timeout has elapsed after all proxy resolvers are
// destroyed.
void OnIdleTimeout();
net::interfaces::ProxyResolverFactoryPtr resolver_factory_;
base::WeakPtr<content::UtilityProcessHost> weak_utility_process_host_;
size_t num_proxy_resolvers_ = 0;
base::OneShotTimer idle_timer_;
base::ThreadChecker thread_checker_;
DISALLOW_COPY_AND_ASSIGN(ChromeMojoProxyResolverFactory);
};
#endif // BRIGHTRAY_BROWSER_NET_CHROME_MOJO_PROXY_RESOLVER_FACTORY_H_

View File

@@ -52,8 +52,7 @@ void NetLog::StartLogging() {
base::FilePath log_path =
command_line->GetSwitchValuePath(switches::kLogNetLog);
std::unique_ptr<base::Value> constants(GetConstants());
net::NetLogCaptureMode capture_mode =
net::NetLogCaptureMode::IncludeCookiesAndCredentials();
net::NetLogCaptureMode capture_mode = net::NetLogCaptureMode::Default();
file_net_log_observer_ =
net::FileNetLogObserver::CreateUnbounded(log_path, std::move(constants));

View File

@@ -13,6 +13,7 @@
#include "base/threading/sequenced_worker_pool.h"
#include "base/threading/worker_pool.h"
#include "brightray/browser/browser_client.h"
#include "brightray/browser/net/chrome_mojo_proxy_resolver_factory.h"
#include "brightray/browser/net/devtools_network_controller_handle.h"
#include "brightray/browser/net/devtools_network_transaction_factory.h"
#include "brightray/browser/net/require_ct_delegate.h"
@@ -41,7 +42,7 @@
#include "net/proxy/proxy_config_service.h"
#include "net/proxy/proxy_script_fetcher_impl.h"
#include "net/proxy/proxy_service.h"
#include "net/proxy/proxy_service_v8.h"
#include "net/proxy/proxy_service_mojo.h"
#include "net/ssl/channel_id_service.h"
#include "net/ssl/default_channel_id_store.h"
#include "net/ssl/ssl_config_service_defaults.h"
@@ -229,14 +230,12 @@ net::URLRequestContext* URLRequestContextGetter::GetURLRequestContext() {
storage_->set_proxy_service(net::ProxyService::CreateFixed(
proxy_config));
} else {
storage_->set_proxy_service(
net::CreateProxyServiceUsingV8ProxyResolver(
std::move(proxy_config_service_),
new net::ProxyScriptFetcherImpl(url_request_context_.get()),
dhcp_factory.Create(url_request_context_.get()),
host_resolver.get(),
nullptr,
url_request_context_->network_delegate()));
storage_->set_proxy_service(net::CreateProxyServiceUsingMojoFactory(
ChromeMojoProxyResolverFactory::GetInstance(),
std::move(proxy_config_service_),
new net::ProxyScriptFetcherImpl(url_request_context_.get()),
dhcp_factory.Create(url_request_context_.get()), host_resolver.get(),
nullptr, url_request_context_->network_delegate()));
}
std::vector<std::string> schemes;

View File

@@ -63,6 +63,8 @@
'browser/net/devtools_network_transaction.h',
'browser/net/devtools_network_upload_data_stream.cc',
'browser/net/devtools_network_upload_data_stream.h',
'browser/net/chrome_mojo_proxy_resolver_factory.cc',
'browser/net/chrome_mojo_proxy_resolver_factory.h',
'browser/net/require_ct_delegate.cc',
'browser/net/require_ct_delegate.h',
'browser/net_log.cc',

View File

@@ -147,6 +147,7 @@
'BUILDING_V8_SHARED',
'BUILDING_V8_PLATFORM_SHARED',
'BUILDING_V8_BASE_SHARED',
'NODE_WITHOUT_NODE_OPTIONS',
],
'conditions': [
['OS=="mac" and libchromiumcontent_component==0', {

View File

@@ -64,6 +64,9 @@ terminating the application.
then `before-quit` is emitted *after* emitting `close` event on all windows and
closing them.
**Note:** On Windows, this event will not be emitted if the app is closed due
to a shutdown/restart of the system or a user logout.
### Event: 'will-quit'
Returns:
@@ -77,6 +80,9 @@ terminating the application.
See the description of the `window-all-closed` event for the differences between
the `will-quit` and `window-all-closed` events.
**Note:** On Windows, this event will not be emitted if the app is closed due
to a shutdown/restart of the system or a user logout.
### Event: 'quit'
Returns:
@@ -86,6 +92,9 @@ Returns:
Emitted when the application is quitting.
**Note:** On Windows, this event will not be emitted if the app is closed due
to a shutdown/restart of the system or a user logout.
### Event: 'open-file' _macOS_
Returns:
@@ -356,6 +365,23 @@ assistive technologies, such as screen readers, are enabled or disabled.
See https://www.chromium.org/developers/design-documents/accessibility for more
details.
### Event: 'session-created'
Returns:
* `event` Event
* `session` [Session](session.md)
Emitted when Electron has created a new `session`.
```javascript
const {app} = require('electron')
app.on('session-created', (event, session) => {
console.log(session)
})
```
## Methods
The `app` object has the following methods:
@@ -522,8 +548,9 @@ Overrides the current application's name.
### `app.getLocale()`
Returns `String` - The current application locale. Possible return values are documented
[here](locales.md).
Returns `String` - The current application locale. Possible return values are documented [here](locales.md).
To set the locale, you'll want to use a command line switch at app startup, which may be found [here](https://github.com/electron/electron/blob/master/docs/api/chrome-command-line-switches.md).
**Note:** When distributing your packaged app, you have to also ship the
`locales` folder.

View File

@@ -287,7 +287,7 @@ It creates a new `BrowserWindow` with native properties as set by the `options`.
between the web pages even when you specified different values for them,
including but not limited to `preload`, `sandbox` and `nodeIntegration`.
So it is suggested to use exact same `webPreferences` for web pages with
the same `affinity`.
the same `affinity`. _This property is experimental_
* `zoomFactor` Number (optional) - The default zoom factor of the page, `3.0` represents
`300%`. Default is `1.0`.
* `javascript` Boolean (optional) - Enables JavaScript support. Default is `true`.
@@ -361,7 +361,7 @@ It creates a new `BrowserWindow` with native properties as set by the `options`.
script. You can use the `will-attach-webview` event on [webContents](web-contents.md)
to strip away the `preload` script and to validate or alter the
`<webview>`'s initial settings.
* `additionArguments` String[] (optional) - A list of strings that will be appended
* `additionalArguments` String[] (optional) - A list of strings that will be appended
to `process.argv` in the renderer process of this app. Useful for passing small
bits of data down to renderer process preload scripts.
@@ -1216,7 +1216,7 @@ mode set (but with a value within the valid range), `normal` will be assumed.
#### `win.setOverlayIcon(overlay, description)` _Windows_
* `overlay` [NativeImage](native-image.md) - the icon to display on the bottom
* `overlay` [NativeImage](native-image.md) | null - the icon to display on the bottom
right corner of the taskbar icon. If this parameter is `null`, the overlay is
cleared
* `description` String - a description that will be provided to Accessibility

View File

@@ -194,7 +194,7 @@ Sets the `image` associated with this tray icon.
#### `tray.setPressedImage(image)` _macOS_
* `image` [NativeImage](native-image.md)
* `image` ([NativeImage](native-image.md) | String)
Sets the `image` associated with this tray icon when pressed on macOS.
@@ -262,7 +262,7 @@ The `position` is only available on Windows, and it is (0, 0) by default.
#### `tray.setContextMenu(menu)`
* `menu` Menu
* `menu` Menu | null
Sets the context menu for this icon.

View File

@@ -173,6 +173,7 @@ Set the security origin of the isolated world.
Returns `Object`:
* `images` [MemoryUsageDetails](structures/memory-usage-details.md)
* `scripts` [MemoryUsageDetails](structures/memory-usage-details.md)
* `cssStyleSheets` [MemoryUsageDetails](structures/memory-usage-details.md)
* `xslStyleSheets` [MemoryUsageDetails](structures/memory-usage-details.md)
* `fonts` [MemoryUsageDetails](structures/memory-usage-details.md)

View File

@@ -2,11 +2,31 @@
This document describes the process for releasing a new version of Electron.
## Set your tokens and environment variables
You'll need Electron S3 credentials in order to create and
upload an Electron release. Contact a team member for more
information.
There are a handful of `*_TOKEN` environment variables needed by the release
scripts. Once you've generated these per-user tokens, you may want to keep
them in a local file that you can `source` when starting a release.
* `ELECTRON_GITHUB_TOKEN`:
Create as described at https://github.com/settings/tokens/new,
giving the token repo access scope.
* `APPVEYOR_TOKEN`:
Create a token from https://windows-ci.electronjs.org/api-token
If you don't have an account, ask a team member to add you.
* `CIRCLE_TOKEN`:
Create a token from "Personal API Tokens" at https://circleci.com/account/api
* `VSTS_TOKEN`:
Create a Personal Access Token at https://github.visualstudio.com/_usersSettings/tokens
with the scope of `Build (read and execute)`.
## Determine which branch to release from
- **If releasing beta,** run the scripts below from `master`.
- **If releasing a stable version,** run the scripts below from `1-7-x` or
`1-6-x`, depending on which version you are releasing for.
- **If releasing a stable version,** run the scripts below from the branch
you're stabilizing.
## Find out what version change is needed
Run `npm run prepare-release -- --notesOnly` to view auto generated release
@@ -14,6 +34,11 @@ notes. The notes generated should help you determine if this is a major, minor,
patch, or beta version change. Read the
[Version Change Rules](../tutorial/electron-versioning.md#semver) for more information.
**NB:** If releasing from a branch, e.g. 1-8-x, check out the branch with
`git checkout 1-8-x` rather than `git checkout -b remotes/origin/1-8-x`.
The scripts need `git rev-parse --abbrev-ref HEAD` to return a short name,
e.g. no `remotes/origin/`
## Run the prepare-release script
The prepare release script will do the following:
1. Check if a release is already in process and if so it will halt.
@@ -55,10 +80,11 @@ npm run prepare-release -- --stable
The `prepare-release` script will trigger the builds via API calls.
To monitor the build progress, see the following pages:
- [mac-ci.electronjs.org/blue/organizations/jenkins/electron-mas-x64-release/activity](https://mac-ci.electronjs.org/blue/organizations/jenkins/electron-mas-x64-release/activity) for Mac App Store
- [mac-ci.electronjs.org/blue/organizations/jenkins/electron-osx-x64-release/activity](https://mac-ci.electronjs.org/blue/organizations/jenkins/electron-osx-x64-release/activity) for OS X
- [circleci.com/gh/electron/electron](https://circleci.com/gh/electron) for Linux
- [windows-ci.electronjs.org/project/AppVeyor/electron](https://windows-ci.electronjs.org/project/AppVeyor/electron) for Windows
- [electron-release-mas-x64](https://github.visualstudio.com/electron/_build/index?context=allDefinitions&path=%5C&definitionId=19&_a=completed) for MAS builds.
- [electron-release-osx-x64](https://github.visualstudio.com/electron/_build/index?context=allDefinitions&path=%5C&definitionId=18&_a=completed) for OSX builds.
- [circleci.com/gh/electron/electron](https://circleci.com/gh/electron) for Linux builds.
- [windows-ci.electronjs.org/project/AppVeyor/electron-39ng6](https://windows-ci.electronjs.org/project/AppVeyor/electron-39ng6) for Windows 32-bit builds.
- [windows-ci.electronjs.org/project/AppVeyor/electron](https://windows-ci.electronjs.org/project/AppVeyor/electron) for Windows 64-bit builds.
## Compile release notes
@@ -159,14 +185,15 @@ This release is published to [npm](https://www.npmjs.com/package/electron) under
## Edit the release draft
1. Visit [the releases page] and you'll see a new draft release with placeholder release notes.
1. Edit the release and add release notes.
1. Uncheck the `prerelease` checkbox if you're publishing a stable release; leave it checked for beta releases.
1. Click 'Save draft'. **Do not click 'Publish release'!**
1. Wait for all builds to pass before proceeding.
1. You can run `npm run release -- --validateRelease` to verify that all of the
required files have been created for the release.
1. Visit [the releases page] and you'll see a new draft release with placeholder
release notes.
2. Edit the release and add release notes.
3. Click 'Save draft'. **Do not click 'Publish release'!**
4. Wait for all builds to pass before proceeding.
5. In the branch, verify that the release's files have been created:
```sh
$ npm run release -- --validateRelease
```
## Publish the release
@@ -181,17 +208,68 @@ on Windows by node-gyp to build native modules.
5. Validate that all of the required files are present on GitHub and S3 and have
the correct checksums as specified in the SHASUMS files.
6. Publish the release on GitHub
7. Delete the `release` branch.
## Publish to npm
Once the publish is successful, run `npm run publish-to-npm` to publish to
release to npm.
Before publishing to npm, you'll need to log into npm as Electron. Optionally,
you may find [npmrc](https://www.npmjs.com/package/npmrc) to be a useful way
to keep Electron's profile side-by-side with your own:
```sh
$ sudo npm install -g npmrc
$ npmrc -c electron
Removing old .npmrc (default)
Activating .npmrc "electron"
```
The Electron account's credentials are kept by GitHub.
"Electron - NPM" for the URL "https://www.npmjs.com/login".
```sh
$ npm login
Username: electron
Password:
Email: (this IS public) electron@github.com
```
Publish the release to npm.
```sh
$ npm whoami
electron
$ npm run publish-to-npm
```
[the releases page]: https://github.com/electron/electron/releases
[this bump commit]: https://github.com/electron/electron/commit/78ec1b8f89b3886b856377a1756a51617bc33f5a
[versioning]: /docs/tutorial/electron-versioning.md
# Troubleshooting
## Rerun broken builds
If a release build fails for some reason, you can use `script/ci-release-build.js` to rerun a release build:
### Rerun all linux builds:
```sh
node script/ci-release-build.js --ci=CircleCI --ghRelease TARGET_BRANCH
(TARGET_BRANCH) is the branch you are releasing from.
```
### Rerun all macOS builds:
```sh
node script/ci-release-build.js --ci=VSTS --ghRelease TARGET_BRANCH
(TARGET_BRANCH) is the branch you are releasing from.
```
### Rerun all Windows builds:
```sh
node script/ci-release-build.js --ci=AppVeyor --ghRelease TARGET_BRANCH
(TARGET_BRANCH) is the branch you are releasing from.
```
Additionally you can pass a job name to the script to run an individual job, eg:
````sh
node script/ci-release-build.js --ci=AppVeyor --ghRelease --job=electron-x64 TARGET_BRANCH
```
## Fix missing binaries of a release manually
In the case of a corrupted release with broken CI machines, we might have to

View File

@@ -4,7 +4,7 @@
'product_name%': 'Electron',
'company_name%': 'GitHub, Inc',
'company_abbr%': 'github',
'version%': '2.0.1',
'version%': '2.0.10',
'js2c_input_dir': '<(SHARED_INTERMEDIATE_DIR)/js2c',
},
'includes': [

View File

@@ -64,6 +64,7 @@
'lib/renderer/inspector.js',
'lib/renderer/override.js',
'lib/renderer/security-warnings.js',
'lib/renderer/web-frame-init.js',
'lib/renderer/window-setup.js',
'lib/renderer/web-view/guest-view-internal.js',
'lib/renderer/web-view/web-view.js',
@@ -273,6 +274,8 @@
'atom/browser/net/http_protocol_handler.h',
'atom/browser/net/js_asker.cc',
'atom/browser/net/js_asker.h',
'atom/browser/net/resolve_proxy_helper.cc',
'atom/browser/net/resolve_proxy_helper.h',
'atom/browser/net/url_request_about_job.cc',
'atom/browser/net/url_request_about_job.h',
'atom/browser/net/url_request_async_asar_job.cc',

View File

@@ -331,6 +331,10 @@ ipcMain.on('ELECTRON_GUEST_VIEW_MANAGER_CREATE_GUEST', function (event, params,
event.sender.send(`ELECTRON_RESPONSE_${requestId}`, createGuest(event.sender, params))
})
ipcMain.on('ELECTRON_GUEST_VIEW_MANAGER_CREATE_GUEST_SYNC', function (event, params) {
event.returnValue = createGuest(event.sender, params)
})
ipcMain.on('ELECTRON_GUEST_VIEW_MANAGER_ATTACH_GUEST', function (event, elementInstanceId, guestInstanceId, params) {
attachGuest(event, elementInstanceId, guestInstanceId, params)
})

View File

@@ -27,11 +27,11 @@ const mergeOptions = function (child, parent, visited) {
for (const key in parent) {
if (key === 'isBrowserView') continue
if (!hasProp.call(parent, key)) continue
if (key in child) continue
if (key in child && key !== 'webPreferences') continue
const value = parent[key]
if (typeof value === 'object') {
child[key] = mergeOptions({}, value, visited)
child[key] = mergeOptions(child[key] || {}, value, visited)
} else {
child[key] = value
}

View File

@@ -11,22 +11,22 @@ class ObjectsRegistry {
this.storage = {}
// Stores the IDs of objects referenced by WebContents.
// (webContentsId) => [id]
// (webContentsContextId) => [id]
this.owners = {}
}
// Register a new object and return its assigned ID. If the object is already
// registered then the already assigned ID would be returned.
add (webContents, obj) {
add (webContents, contextId, obj) {
// Get or assign an ID to the object.
const id = this.saveToStorage(obj)
// Add object to the set of referenced objects.
const webContentsId = webContents.getId()
let owner = this.owners[webContentsId]
const webContentsContextId = `${webContents.id}-${contextId}`
let owner = this.owners[webContentsContextId]
if (!owner) {
owner = this.owners[webContentsId] = new Set()
this.registerDeleteListener(webContents, webContentsId)
owner = this.owners[webContentsContextId] = new Set()
this.registerDeleteListener(webContents, contextId)
}
if (!owner.has(id)) {
owner.add(id)
@@ -43,25 +43,28 @@ class ObjectsRegistry {
}
// Dereference an object according to its ID.
remove (webContentsId, id) {
// Dereference from the storage.
this.dereference(id)
// Also remove the reference in owner.
let owner = this.owners[webContentsId]
// Note that an object may be double-freed (cleared when page is reloaded, and
// then garbage collected in old page).
remove (webContents, contextId, id) {
const webContentsContextId = `${webContents.id}-${contextId}`
let owner = this.owners[webContentsContextId]
if (owner) {
// Remove the reference in owner.
owner.delete(id)
// Dereference from the storage.
this.dereference(id)
}
}
// Clear all references to objects refrenced by the WebContents.
clear (webContentsId) {
let owner = this.owners[webContentsId]
clear (webContents, contextId) {
const webContentsContextId = `${webContents.id}-${contextId}`
let owner = this.owners[webContentsContextId]
if (!owner) return
for (let id of owner) this.dereference(id)
delete this.owners[webContentsId]
delete this.owners[webContentsContextId]
}
// Private: Saves the object into storage and assigns an ID for it.
@@ -80,6 +83,8 @@ class ObjectsRegistry {
// Private: Dereference the object from store.
dereference (id) {
if (process.env.ELECTRON_DISABLE_REMOTE_DEREFERENCING) return
let pointer = this.storage[id]
if (pointer == null) {
return
@@ -91,13 +96,13 @@ class ObjectsRegistry {
}
}
// Private: Clear the storage when webContents is reloaded/navigated.
registerDeleteListener (webContents, webContentsId) {
// Private: Clear the storage when renderer process is destoryed.
registerDeleteListener (webContents, contextId) {
const processId = webContents.getProcessId()
const listener = (event, deletedProcessId) => {
if (deletedProcessId === processId) {
webContents.removeListener('render-view-deleted', listener)
this.clear(webContentsId)
this.clear(webContents, contextId)
}
}
webContents.on('render-view-deleted', listener)

View File

@@ -55,7 +55,7 @@ let getObjectPrototype = function (object) {
}
// Convert a real value into meta data.
let valueToMeta = function (sender, value, optimizeSimpleObject = false) {
let valueToMeta = function (sender, contextId, value, optimizeSimpleObject = false) {
// Determine the type of value.
const meta = { type: typeof value }
if (meta.type === 'object') {
@@ -83,14 +83,14 @@ let valueToMeta = function (sender, value, optimizeSimpleObject = false) {
// Fill the meta object according to value's type.
if (meta.type === 'array') {
meta.members = value.map((el) => valueToMeta(sender, el))
meta.members = value.map((el) => valueToMeta(sender, contextId, el))
} else if (meta.type === 'object' || meta.type === 'function') {
meta.name = value.constructor ? value.constructor.name : ''
// Reference the original value if it's an object, because when it's
// passed to renderer we would assume the renderer keeps a reference of
// it.
meta.id = objectsRegistry.add(sender, value)
meta.id = objectsRegistry.add(sender, contextId, value)
meta.members = getObjectMembers(value)
meta.proto = getObjectPrototype(value)
} else if (meta.type === 'buffer') {
@@ -100,7 +100,7 @@ let valueToMeta = function (sender, value, optimizeSimpleObject = false) {
// Instead they should appear in the renderer process
value.then(function () {}, function () {})
meta.then = valueToMeta(sender, function (onFulfilled, onRejected) {
meta.then = valueToMeta(sender, contextId, function (onFulfilled, onRejected) {
value.then(onFulfilled, onRejected)
})
} else if (meta.type === 'error') {
@@ -168,7 +168,7 @@ const removeRemoteListenersAndLogWarning = (meta, args, callIntoRenderer) => {
}
// Convert array of meta data from renderer into array of real values.
const unwrapArgs = function (sender, args) {
const unwrapArgs = function (sender, contextId, args) {
const metaToValue = function (meta) {
let i, len, member, ref, returnValue
switch (meta.type) {
@@ -177,7 +177,7 @@ const unwrapArgs = function (sender, args) {
case 'remote-object':
return objectsRegistry.get(meta.id)
case 'array':
return unwrapArgs(sender, meta.value)
return unwrapArgs(sender, contextId, meta.value)
case 'buffer':
return Buffer.from(meta.value)
case 'date':
@@ -203,26 +203,26 @@ const unwrapArgs = function (sender, args) {
return returnValue
}
case 'function': {
// Merge webContentsId and meta.id, since meta.id can be the same in
// Merge contextId and meta.id, since meta.id can be the same in
// different webContents.
const webContentsId = sender.getId()
const objectId = [webContentsId, meta.id]
const objectId = [contextId, meta.id]
// Cache the callbacks in renderer.
if (rendererFunctions.has(objectId)) {
return rendererFunctions.get(objectId)
}
const webContentsId = sender.getId()
let callIntoRenderer = function (...args) {
if (!sender.isDestroyed() && webContentsId === sender.getId()) {
sender.send('ELECTRON_RENDERER_CALLBACK', meta.id, valueToMeta(sender, args))
sender.send('ELECTRON_RENDERER_CALLBACK', contextId, meta.id, valueToMeta(sender, contextId, args))
} else {
removeRemoteListenersAndLogWarning(meta, args, callIntoRenderer)
}
}
Object.defineProperty(callIntoRenderer, 'length', { value: meta.length })
v8Util.setRemoteCallbackFreer(callIntoRenderer, meta.id, sender)
v8Util.setRemoteCallbackFreer(callIntoRenderer, contextId, meta.id, sender)
rendererFunctions.set(objectId, callIntoRenderer)
return callIntoRenderer
}
@@ -235,19 +235,19 @@ const unwrapArgs = function (sender, args) {
// Call a function and send reply asynchronously if it's a an asynchronous
// style function and the caller didn't pass a callback.
const callFunction = function (event, func, caller, args) {
const callFunction = function (event, contextId, func, caller, args) {
let funcMarkedAsync, funcName, funcPassedCallback, ref, ret
funcMarkedAsync = v8Util.getHiddenValue(func, 'asynchronous')
funcPassedCallback = typeof args[args.length - 1] === 'function'
try {
if (funcMarkedAsync && !funcPassedCallback) {
args.push(function (ret) {
event.returnValue = valueToMeta(event.sender, ret, true)
event.returnValue = valueToMeta(event.sender, contextId, ret, true)
})
func.apply(caller, args)
} else {
ret = func.apply(caller, args)
event.returnValue = valueToMeta(event.sender, ret, true)
event.returnValue = valueToMeta(event.sender, contextId, ret, true)
}
} catch (error) {
// Catch functions thrown further down in function invocation and wrap
@@ -258,45 +258,45 @@ const callFunction = function (event, func, caller, args) {
}
}
ipcMain.on('ELECTRON_BROWSER_REQUIRE', function (event, module) {
ipcMain.on('ELECTRON_BROWSER_REQUIRE', function (event, contextId, module) {
try {
event.returnValue = valueToMeta(event.sender, process.mainModule.require(module))
event.returnValue = valueToMeta(event.sender, contextId, process.mainModule.require(module))
} catch (error) {
event.returnValue = exceptionToMeta(error)
}
})
ipcMain.on('ELECTRON_BROWSER_GET_BUILTIN', function (event, module) {
ipcMain.on('ELECTRON_BROWSER_GET_BUILTIN', function (event, contextId, module) {
try {
event.returnValue = valueToMeta(event.sender, electron[module])
event.returnValue = valueToMeta(event.sender, contextId, electron[module])
} catch (error) {
event.returnValue = exceptionToMeta(error)
}
})
ipcMain.on('ELECTRON_BROWSER_GLOBAL', function (event, name) {
ipcMain.on('ELECTRON_BROWSER_GLOBAL', function (event, contextId, name) {
try {
event.returnValue = valueToMeta(event.sender, global[name])
event.returnValue = valueToMeta(event.sender, contextId, global[name])
} catch (error) {
event.returnValue = exceptionToMeta(error)
}
})
ipcMain.on('ELECTRON_BROWSER_CURRENT_WINDOW', function (event) {
ipcMain.on('ELECTRON_BROWSER_CURRENT_WINDOW', function (event, contextId) {
try {
event.returnValue = valueToMeta(event.sender, event.sender.getOwnerBrowserWindow())
event.returnValue = valueToMeta(event.sender, contextId, event.sender.getOwnerBrowserWindow())
} catch (error) {
event.returnValue = exceptionToMeta(error)
}
})
ipcMain.on('ELECTRON_BROWSER_CURRENT_WEB_CONTENTS', function (event) {
event.returnValue = valueToMeta(event.sender, event.sender)
ipcMain.on('ELECTRON_BROWSER_CURRENT_WEB_CONTENTS', function (event, contextId) {
event.returnValue = valueToMeta(event.sender, contextId, event.sender)
})
ipcMain.on('ELECTRON_BROWSER_CONSTRUCTOR', function (event, id, args) {
ipcMain.on('ELECTRON_BROWSER_CONSTRUCTOR', function (event, contextId, id, args) {
try {
args = unwrapArgs(event.sender, args)
args = unwrapArgs(event.sender, contextId, args)
let constructor = objectsRegistry.get(id)
if (constructor == null) {
@@ -306,30 +306,30 @@ ipcMain.on('ELECTRON_BROWSER_CONSTRUCTOR', function (event, id, args) {
// Call new with array of arguments.
// http://stackoverflow.com/questions/1606797/use-of-apply-with-new-operator-is-this-possible
let obj = new (Function.prototype.bind.apply(constructor, [null].concat(args)))()
event.returnValue = valueToMeta(event.sender, obj)
event.returnValue = valueToMeta(event.sender, contextId, obj)
} catch (error) {
event.returnValue = exceptionToMeta(error)
}
})
ipcMain.on('ELECTRON_BROWSER_FUNCTION_CALL', function (event, id, args) {
ipcMain.on('ELECTRON_BROWSER_FUNCTION_CALL', function (event, contextId, id, args) {
try {
args = unwrapArgs(event.sender, args)
args = unwrapArgs(event.sender, contextId, args)
let func = objectsRegistry.get(id)
if (func == null) {
throwRPCError(`Cannot call function on missing remote object ${id}`)
}
callFunction(event, func, global, args)
callFunction(event, contextId, func, global, args)
} catch (error) {
event.returnValue = exceptionToMeta(error)
}
})
ipcMain.on('ELECTRON_BROWSER_MEMBER_CONSTRUCTOR', function (event, id, method, args) {
ipcMain.on('ELECTRON_BROWSER_MEMBER_CONSTRUCTOR', function (event, contextId, id, method, args) {
try {
args = unwrapArgs(event.sender, args)
args = unwrapArgs(event.sender, contextId, args)
let object = objectsRegistry.get(id)
if (object == null) {
@@ -339,30 +339,30 @@ ipcMain.on('ELECTRON_BROWSER_MEMBER_CONSTRUCTOR', function (event, id, method, a
// Call new with array of arguments.
let constructor = object[method]
let obj = new (Function.prototype.bind.apply(constructor, [null].concat(args)))()
event.returnValue = valueToMeta(event.sender, obj)
event.returnValue = valueToMeta(event.sender, contextId, obj)
} catch (error) {
event.returnValue = exceptionToMeta(error)
}
})
ipcMain.on('ELECTRON_BROWSER_MEMBER_CALL', function (event, id, method, args) {
ipcMain.on('ELECTRON_BROWSER_MEMBER_CALL', function (event, contextId, id, method, args) {
try {
args = unwrapArgs(event.sender, args)
args = unwrapArgs(event.sender, contextId, args)
let obj = objectsRegistry.get(id)
if (obj == null) {
throwRPCError(`Cannot call function '${method}' on missing remote object ${id}`)
}
callFunction(event, obj[method], obj, args)
callFunction(event, contextId, obj[method], obj, args)
} catch (error) {
event.returnValue = exceptionToMeta(error)
}
})
ipcMain.on('ELECTRON_BROWSER_MEMBER_SET', function (event, id, name, args) {
ipcMain.on('ELECTRON_BROWSER_MEMBER_SET', function (event, contextId, id, name, args) {
try {
args = unwrapArgs(event.sender, args)
args = unwrapArgs(event.sender, contextId, args)
let obj = objectsRegistry.get(id)
if (obj == null) {
@@ -376,7 +376,7 @@ ipcMain.on('ELECTRON_BROWSER_MEMBER_SET', function (event, id, name, args) {
}
})
ipcMain.on('ELECTRON_BROWSER_MEMBER_GET', function (event, id, name) {
ipcMain.on('ELECTRON_BROWSER_MEMBER_GET', function (event, contextId, id, name) {
try {
let obj = objectsRegistry.get(id)
@@ -384,25 +384,25 @@ ipcMain.on('ELECTRON_BROWSER_MEMBER_GET', function (event, id, name) {
throwRPCError(`Cannot get property '${name}' on missing remote object ${id}`)
}
event.returnValue = valueToMeta(event.sender, obj[name])
event.returnValue = valueToMeta(event.sender, contextId, obj[name])
} catch (error) {
event.returnValue = exceptionToMeta(error)
}
})
ipcMain.on('ELECTRON_BROWSER_DEREFERENCE', function (event, id) {
objectsRegistry.remove(event.sender.getId(), id)
ipcMain.on('ELECTRON_BROWSER_DEREFERENCE', function (event, contextId, id) {
objectsRegistry.remove(event.sender, contextId, id)
})
ipcMain.on('ELECTRON_BROWSER_CONTEXT_RELEASE', (e, contextId) => {
objectsRegistry.clear(contextId)
e.returnValue = null
ipcMain.on('ELECTRON_BROWSER_CONTEXT_RELEASE', (event, contextId) => {
objectsRegistry.clear(event.sender, contextId)
event.returnValue = null
})
ipcMain.on('ELECTRON_BROWSER_GUEST_WEB_CONTENTS', function (event, guestInstanceId) {
ipcMain.on('ELECTRON_BROWSER_GUEST_WEB_CONTENTS', function (event, contextId, guestInstanceId) {
try {
let guestViewManager = require('./guest-view-manager')
event.returnValue = valueToMeta(event.sender, guestViewManager.getGuest(guestInstanceId))
event.returnValue = valueToMeta(event.sender, contextId, guestViewManager.getGuest(guestInstanceId))
} catch (error) {
event.returnValue = exceptionToMeta(error)
}

View File

@@ -3,6 +3,7 @@
const {Buffer} = require('buffer')
const childProcess = require('child_process')
const path = require('path')
const util = require('util')
const hasProp = {}.hasOwnProperty
@@ -66,47 +67,40 @@
let nextInode = 0
const uid = process.getuid != null ? process.getuid() : 0
const gid = process.getgid != null ? process.getgid() : 0
const fakeTime = new Date()
const msec = (date) => (date || fakeTime).getTime()
const asarStatsToFsStats = function (stats) {
return {
dev: 1,
ino: ++nextInode,
mode: 33188,
nlink: 1,
uid: uid,
gid: gid,
rdev: 0,
atime: stats.atime || fakeTime,
birthtime: stats.birthtime || fakeTime,
mtime: stats.mtime || fakeTime,
ctime: stats.ctime || fakeTime,
size: stats.size,
isFile: function () {
return stats.isFile
},
isDirectory: function () {
return stats.isDirectory
},
isSymbolicLink: function () {
return stats.isLink
},
isBlockDevice: function () {
return false
},
isCharacterDevice: function () {
return false
},
isFIFO: function () {
return false
},
isSocket: function () {
return false
}
const {Stats, constants} = require('fs')
let mode = constants.S_IROTH ^ constants.S_IRGRP ^ constants.S_IRUSR ^ constants.S_IWUSR
if (stats.isFile) {
mode ^= constants.S_IFREG
} else if (stats.isDirectory) {
mode ^= constants.S_IFDIR
} else if (stats.isLink) {
mode ^= constants.S_IFLNK
}
return new Stats(
1, // dev
mode, // mode
1, // nlink
uid,
gid,
0, // rdev
undefined, // blksize
++nextInode, // ino
stats.size,
undefined, // blocks,
msec(stats.atime), // atim_msec
msec(stats.mtime), // mtim_msec
msec(stats.ctime), // ctim_msec
msec(stats.birthtime) // birthtim_msec
)
}
// Create a ENOENT error.
@@ -217,6 +211,28 @@
arguments[arg] = newPath
return old.apply(this, arguments)
}
if (old[util.promisify.custom]) {
module[name][util.promisify.custom] = function () {
const p = arguments[arg]
const [isAsar, asarPath, filePath] = splitPath(p)
if (!isAsar) {
return old[util.promisify.custom].apply(this, arguments)
}
const archive = getOrCreateArchive(asarPath)
if (!archive) {
return new Promise(() => invalidArchiveError(asarPath))
}
const newPath = archive.copyFileOut(filePath)
if (!newPath) {
return new Promise(() => notFoundError(asarPath, filePath))
}
arguments[arg] = newPath
return old[util.promisify.custom].apply(this, arguments)
}
}
}
// Override fs APIs.
@@ -373,6 +389,18 @@
})
}
fs.exists[util.promisify.custom] = function (p) {
const [isAsar, asarPath, filePath] = splitPath(p)
if (!isAsar) {
return exists[util.promisify.custom](p)
}
const archive = getOrCreateArchive(asarPath)
if (!archive) {
return new Promise(() => invalidArchiveError(asarPath))
}
return Promise.resolve(archive.stat(filePath) !== false)
}
const {existsSync} = fs
fs.existsSync = function (p) {
const [isAsar, asarPath, filePath] = splitPath(p)
@@ -680,18 +708,22 @@
// called by `childProcess.{exec,execSync}`, causing
// Electron to consider the full command as a single path
// to an archive.
['exec', 'execSync'].forEach(function (functionName) {
const old = childProcess[functionName]
childProcess[functionName] = function () {
const {exec, execSync} = childProcess
childProcess.exec = invokeWithNoAsar(exec)
childProcess.exec[util.promisify.custom] = invokeWithNoAsar(exec[util.promisify.custom])
childProcess.execSync = invokeWithNoAsar(execSync)
function invokeWithNoAsar (func) {
return function () {
const processNoAsarOriginalValue = process.noAsar
process.noAsar = true
try {
return old.apply(this, arguments)
return func.apply(this, arguments)
} finally {
process.noAsar = processNoAsarOriginalValue
}
}
})
}
overrideAPI(fs, 'open')
overrideAPI(childProcess, 'execFile')

View File

@@ -1,4 +1,5 @@
const timers = require('timers')
const util = require('util')
process.atomBinding = require('./atom-binding-setup')(process.binding, process.type)
@@ -8,11 +9,21 @@ process.atomBinding = require('./atom-binding-setup')(process.binding, process.t
// which would delay the callbacks for arbitrary long time. So we should
// initiatively activate the uv loop once setImmediate and process.nextTick is
// called.
var wrapWithActivateUvLoop = function (func) {
return function () {
process.activateUvLoop()
return func.apply(this, arguments)
const wrapWithActivateUvLoop = function (func) {
return wrap(func, function (func) {
return function () {
process.activateUvLoop()
return func.apply(this, arguments)
}
})
}
function wrap (func, wrapper) {
const wrapped = wrapper(func)
if (func[util.promisify.custom]) {
wrapped[util.promisify.custom] = wrapper(func[util.promisify.custom])
}
return wrapped
}
process.nextTick = wrapWithActivateUvLoop(process.nextTick)

View File

@@ -11,6 +11,18 @@ const resolvePromise = Promise.resolve.bind(Promise)
const callbacksRegistry = new CallbacksRegistry()
const remoteObjectCache = v8Util.createIDWeakMap()
// An unique ID that can represent current context.
const contextId = v8Util.getHiddenValue(global, 'contextId')
// Notify the main process when current context is going to be released.
// Note that when the renderer process is destroyed, the message may not be
// sent, we also listen to the "render-view-deleted" event in the main process
// to guard that situation.
process.on('exit', () => {
const command = 'ELECTRON_BROWSER_CONTEXT_RELEASE'
ipcRenderer.sendSync(command, contextId)
})
// Convert the arguments object into an array of meta data.
function wrapArgs (args, visited = new Set()) {
const valueToMeta = (value) => {
@@ -109,7 +121,7 @@ function setObjectMembers (ref, object, metaId, members) {
} else {
command = 'ELECTRON_BROWSER_MEMBER_CALL'
}
const ret = ipcRenderer.sendSync(command, metaId, member.name, wrapArgs(args))
const ret = ipcRenderer.sendSync(command, contextId, metaId, member.name, wrapArgs(args))
return metaToValue(ret)
}
@@ -128,7 +140,7 @@ function setObjectMembers (ref, object, metaId, members) {
} else if (member.type === 'get') {
descriptor.get = () => {
const command = 'ELECTRON_BROWSER_MEMBER_GET'
const meta = ipcRenderer.sendSync(command, metaId, member.name)
const meta = ipcRenderer.sendSync(command, contextId, metaId, member.name)
return metaToValue(meta)
}
@@ -136,7 +148,7 @@ function setObjectMembers (ref, object, metaId, members) {
descriptor.set = (value) => {
const args = wrapArgs([value])
const command = 'ELECTRON_BROWSER_MEMBER_SET'
const meta = ipcRenderer.sendSync(command, metaId, member.name, args)
const meta = ipcRenderer.sendSync(command, contextId, metaId, member.name, args)
if (meta != null) metaToValue(meta)
return value
}
@@ -166,7 +178,7 @@ function proxyFunctionProperties (remoteMemberFunction, metaId, name) {
if (loaded) return
loaded = true
const command = 'ELECTRON_BROWSER_MEMBER_GET'
const meta = ipcRenderer.sendSync(command, metaId, name)
const meta = ipcRenderer.sendSync(command, contextId, metaId, name)
setObjectMembers(remoteMemberFunction, remoteMemberFunction, meta.id, meta.members)
}
@@ -226,7 +238,7 @@ function metaToValue (meta) {
} else {
command = 'ELECTRON_BROWSER_FUNCTION_CALL'
}
const obj = ipcRenderer.sendSync(command, meta.id, wrapArgs(args))
const obj = ipcRenderer.sendSync(command, contextId, meta.id, wrapArgs(args))
return metaToValue(obj)
}
ret = remoteFunction
@@ -239,7 +251,7 @@ function metaToValue (meta) {
Object.defineProperty(ret.constructor, 'name', { value: meta.name })
// Track delegate obj's lifetime & tell browser to clean up when object is GCed.
v8Util.setRemoteObjectFreer(ret, meta.id)
v8Util.setRemoteObjectFreer(ret, contextId, meta.id)
v8Util.setHiddenValue(ret, 'atomId', meta.id)
remoteObjectCache.set(meta.id, ret)
return ret
@@ -257,57 +269,51 @@ function metaToPlainObject (meta) {
}
// Browser calls a callback in renderer.
ipcRenderer.on('ELECTRON_RENDERER_CALLBACK', (event, id, args) => {
ipcRenderer.on('ELECTRON_RENDERER_CALLBACK', (event, passedContextId, id, args) => {
if (passedContextId !== contextId) {
// The invoked callback belongs to an old page in this renderer.
return
}
callbacksRegistry.apply(id, metaToValue(args))
})
// A callback in browser is released.
ipcRenderer.on('ELECTRON_RENDERER_RELEASE_CALLBACK', (event, id) => {
ipcRenderer.on('ELECTRON_RENDERER_RELEASE_CALLBACK', (event, passedContextId, id) => {
if (passedContextId !== contextId) {
// The freed callback belongs to an old page in this renderer.
return
}
callbacksRegistry.remove(id)
})
process.on('exit', () => {
const command = 'ELECTRON_BROWSER_CONTEXT_RELEASE'
ipcRenderer.sendSync(command, initialContext)
})
exports.require = (module) => {
const command = 'ELECTRON_BROWSER_REQUIRE'
const meta = ipcRenderer.sendSync(command, module)
const meta = ipcRenderer.sendSync(command, contextId, module)
return metaToValue(meta)
}
// Alias to remote.require('electron').xxx.
exports.getBuiltin = (module) => {
const command = 'ELECTRON_BROWSER_GET_BUILTIN'
const meta = ipcRenderer.sendSync(command, module)
const meta = ipcRenderer.sendSync(command, contextId, module)
return metaToValue(meta)
}
exports.getCurrentWindow = () => {
const command = 'ELECTRON_BROWSER_CURRENT_WINDOW'
const meta = ipcRenderer.sendSync(command)
const meta = ipcRenderer.sendSync(command, contextId)
return metaToValue(meta)
}
// Get current WebContents object.
exports.getCurrentWebContents = () => {
return metaToValue(ipcRenderer.sendSync('ELECTRON_BROWSER_CURRENT_WEB_CONTENTS'))
}
const CONTEXT_ARG = '--context-id='
let initialContext = process.argv.find(arg => arg.startsWith(CONTEXT_ARG))
if (initialContext) {
initialContext = parseInt(initialContext.substr(CONTEXT_ARG.length), 10)
} else {
// In sandbox we need to pull this from remote
initialContext = exports.getCurrentWebContents().getId()
return metaToValue(ipcRenderer.sendSync('ELECTRON_BROWSER_CURRENT_WEB_CONTENTS', contextId))
}
// Get a global object in browser.
exports.getGlobal = (name) => {
const command = 'ELECTRON_BROWSER_GLOBAL'
const meta = ipcRenderer.sendSync(command, name)
const meta = ipcRenderer.sendSync(command, contextId, name)
return metaToValue(meta)
}
@@ -324,7 +330,7 @@ exports.createFunctionWithReturnValue = (returnValue) => {
// Get the guest WebContents from guestInstanceId.
exports.getGuestWebContents = (guestInstanceId) => {
const command = 'ELECTRON_BROWSER_GUEST_WEB_CONTENTS'
const meta = ipcRenderer.sendSync(command, guestInstanceId)
const meta = ipcRenderer.sendSync(command, contextId, guestInstanceId)
return metaToValue(meta)
}

View File

@@ -3,7 +3,6 @@
const events = require('events')
const path = require('path')
const Module = require('module')
const resolvePromise = Promise.resolve.bind(Promise)
// We modified the original process.argv to let node.js load the
// init.js, we need to restore it here.
@@ -26,7 +25,6 @@ var v8Util = process.atomBinding('v8_util')
v8Util.setHiddenValue(global, 'ipc', new events.EventEmitter())
// Use electron module after everything is ready.
const electron = require('electron')
const {
warnAboutNodeWithRemoteContent,
@@ -40,40 +38,7 @@ const {
shouldLogSecurityWarnings
} = require('./security-warnings')
// Call webFrame method.
electron.ipcRenderer.on('ELECTRON_INTERNAL_RENDERER_WEB_FRAME_METHOD', (event, method, args) => {
electron.webFrame[method](...args)
})
electron.ipcRenderer.on('ELECTRON_INTERNAL_RENDERER_SYNC_WEB_FRAME_METHOD', (event, requestId, method, args) => {
const result = electron.webFrame[method](...args)
event.sender.send(`ELECTRON_INTERNAL_BROWSER_SYNC_WEB_FRAME_RESPONSE_${requestId}`, result)
})
electron.ipcRenderer.on('ELECTRON_INTERNAL_RENDERER_ASYNC_WEB_FRAME_METHOD', (event, requestId, method, args) => {
const responseCallback = function (result) {
resolvePromise(result)
.then((resolvedResult) => {
event.sender.send(`ELECTRON_INTERNAL_BROWSER_ASYNC_WEB_FRAME_RESPONSE_${requestId}`, null, resolvedResult)
})
.catch((resolvedError) => {
if (resolvedError instanceof Error) {
// Errors get lost, because: JSON.stringify(new Error('Message')) === {}
// Take the serializable properties and construct a generic object
resolvedError = {
message: resolvedError.message,
stack: resolvedError.stack,
name: resolvedError.name,
__ELECTRON_SERIALIZED_ERROR__: true
}
}
event.sender.send(`ELECTRON_INTERNAL_BROWSER_ASYNC_WEB_FRAME_RESPONSE_${requestId}`, resolvedError)
})
}
args.push(responseCallback)
electron.webFrame[method](...args)
})
require('./web-frame-init')()
// Process command line arguments.
let nodeIntegration = 'false'

View File

@@ -0,0 +1,38 @@
const electron = require('electron')
module.exports = () => {
// Call webFrame method
electron.ipcRenderer.on('ELECTRON_INTERNAL_RENDERER_WEB_FRAME_METHOD', (event, method, args) => {
electron.webFrame[method](...args)
})
electron.ipcRenderer.on('ELECTRON_INTERNAL_RENDERER_SYNC_WEB_FRAME_METHOD', (event, requestId, method, args) => {
const result = electron.webFrame[method](...args)
event.sender.send(`ELECTRON_INTERNAL_BROWSER_SYNC_WEB_FRAME_RESPONSE_${requestId}`, result)
})
electron.ipcRenderer.on('ELECTRON_INTERNAL_RENDERER_ASYNC_WEB_FRAME_METHOD', (event, requestId, method, args) => {
const responseCallback = function (result) {
Promise.resolve(result)
.then((resolvedResult) => {
event.sender.send(`ELECTRON_INTERNAL_BROWSER_ASYNC_WEB_FRAME_RESPONSE_${requestId}`, null, resolvedResult)
})
.catch((resolvedError) => {
if (resolvedError instanceof Error) {
// Errors get lost, because: JSON.stringify(new Error('Message')) === {}
// Take the serializable properties and construct a generic object
resolvedError = {
message: resolvedError.message,
stack: resolvedError.stack,
name: resolvedError.name,
__ELECTRON_SERIALIZED_ERROR__: true
}
}
event.sender.send(`ELECTRON_INTERNAL_BROWSER_ASYNC_WEB_FRAME_RESPONSE_${requestId}`, resolvedError)
})
}
args.push(responseCallback)
electron.webFrame[method](...args)
})
}

View File

@@ -100,6 +100,9 @@ module.exports = {
ipcRenderer.send('ELECTRON_GUEST_VIEW_MANAGER_CREATE_GUEST', params, requestId)
ipcRenderer.once(`ELECTRON_RESPONSE_${requestId}`, callback)
},
createGuestSync: function (params) {
return ipcRenderer.sendSync('ELECTRON_GUEST_VIEW_MANAGER_CREATE_GUEST_SYNC', params)
},
attachGuest: function (elementInstanceId, guestInstanceId, params) {
ipcRenderer.send('ELECTRON_GUEST_VIEW_MANAGER_ATTACH_GUEST', elementInstanceId, guestInstanceId, params)
webFrame.attachGuest(elementInstanceId)

View File

@@ -230,7 +230,7 @@ class SrcAttribute extends WebViewAttribute {
}
parse () {
if (!this.webViewImpl.elementAttached || !this.webViewImpl.attributes[webViewConstants.ATTRIBUTE_PARTITION].validPartitionId) {
if (!this.webViewImpl.elementAttached || !this.webViewImpl.attributes[webViewConstants.ATTRIBUTE_PARTITION].validPartitionId || !this.getValue()) {
return
}
if (this.webViewImpl.guestInstanceId == null) {
@@ -240,9 +240,6 @@ class SrcAttribute extends WebViewAttribute {
}
return
}
if (!this.getValue()) {
return
}
// Navigate to |this.src|.
const opts = {}

View File

@@ -145,9 +145,7 @@ class WebViewImpl {
onElementResize (newSize) {
// Dispatch the 'resize' event.
const resizeEvent = new Event('resize', {
bubbles: true
})
const resizeEvent = new Event('resize')
// Using client size values, because when a webview is transformed `newSize`
// is incorrect
@@ -171,6 +169,10 @@ class WebViewImpl {
})
}
createGuestSync () {
this.attachGuestInstance(guestViewInternal.createGuestSync(this.buildParams()))
}
dispatchEvent (webViewEvent) {
this.webviewNode.dispatchEvent(webViewEvent)
}
@@ -418,7 +420,11 @@ const registerWebViewElement = function () {
// WebContents associated with this webview.
proto.getWebContents = function () {
return v8Util.getHiddenValue(this, 'internal').webContents
const internal = v8Util.getHiddenValue(this, 'internal')
if (!internal.webContents) {
internal.createGuestSync()
}
return internal.webContents
}
window.WebView = webFrame.registerEmbedderCustomElement('webview', {

View File

@@ -32,6 +32,8 @@ const preloadModules = new Map([
['timers', require('timers')]
])
require('../renderer/web-frame-init')()
// Pass different process object to the preload script(which should not have
// access to things like `process.atomBinding`).
const preloadProcess = new events.EventEmitter()

View File

@@ -8,3 +8,14 @@ var child = proc.spawn(electron, process.argv.slice(2), {stdio: 'inherit'})
child.on('close', function (code) {
process.exit(code)
})
const handleTerminationSignal = function (signal) {
process.on(signal, function signalHandler () {
if (!child.killed) {
child.kill(signal)
}
})
}
handleTerminationSignal('SIGINT')
handleTerminationSignal('SIGTERM')

View File

@@ -21,6 +21,12 @@ if (installedVersion === version && fs.existsSync(path.join(__dirname, platformP
process.exit(0)
}
var mirror
if (version.indexOf('nightly') !== -1) {
mirror = 'https://github.com/electron/nightlies/releases/download/v'
}
// downloads if not cached
download({
cache: process.env.electron_config_cache,
@@ -29,7 +35,8 @@ download({
arch: process.env.npm_config_arch,
strictSSL: process.env.npm_config_strict_ssl === 'true',
force: process.env.force_no_cache === 'true',
quiet: process.env.npm_config_loglevel === 'silent' || process.env.CI
quiet: process.env.npm_config_loglevel === 'silent' || process.env.CI,
mirror
}, extractFile)
// unzips and makes path.txt point at the correct executable

View File

@@ -1,6 +1,6 @@
{
"name": "electron",
"version": "2.0.1",
"version": "2.0.10",
"repository": "https://github.com/electron/electron",
"description": "Build cross platform desktop apps with JavaScript, HTML, and CSS",
"devDependencies": {

View File

@@ -5,12 +5,12 @@ import re
import sys
import argparse
from lib.util import execute, get_electron_version, parse_version, scoped_cwd
from lib.util import execute, get_electron_version, parse_version, scoped_cwd, \
is_nightly, is_beta, is_stable, get_next_nightly, get_next_beta, \
get_next_stable_from_pre, get_next_stable_from_stable, clean_parse_version
SOURCE_ROOT = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
def main():
parser = argparse.ArgumentParser(
@@ -34,14 +34,7 @@ def main():
action='store',
default=None,
dest='bump',
help='increment [major | minor | patch | beta]'
)
parser.add_argument(
'--stable',
action='store_true',
default= False,
dest='stable',
help='promote to stable (i.e. remove `-beta.x` suffix)'
help='increment [stable | beta | nightly]'
)
parser.add_argument(
'--dry-run',
@@ -52,36 +45,56 @@ def main():
)
args = parser.parse_args()
curr_version = get_electron_version()
if args.bump not in ['stable', 'beta', 'nightly']:
raise Exception('bump must be set to either stable, beta or nightly')
if is_nightly(curr_version):
if args.bump == 'nightly':
version = get_next_nightly(curr_version)
elif args.bump == 'beta':
version = get_next_beta(curr_version)
elif args.bump == 'stable':
version = get_next_stable_from_pre(curr_version)
else:
not_reached()
elif is_beta(curr_version):
if args.bump == 'nightly':
version = get_next_nightly(curr_version)
elif args.bump == 'beta':
version = get_next_beta(curr_version)
elif args.bump == 'stable':
version = get_next_stable_from_pre(curr_version)
else:
not_reached()
elif is_stable(curr_version):
if args.bump == 'nightly':
version = get_next_nightly(curr_version)
elif args.bump == 'beta':
raise Exception("You can\'t bump to a beta from stable")
elif args.bump == 'stable':
version = get_next_stable_from_stable(curr_version)
else:
not_reached()
else:
raise Exception("Invalid current version: " + curr_version)
if args.new_version == None and args.bump == None and args.stable == False:
parser.print_help()
return 1
increments = ['major', 'minor', 'patch', 'beta']
curr_version = get_electron_version()
versions = parse_version(re.sub('-beta', '', curr_version))
if args.bump in increments:
versions = increase_version(versions, increments.index(args.bump))
if versions[3] == '0':
# beta starts at 1
versions = increase_version(versions, increments.index('beta'))
if args.stable == True:
versions[3] = '0'
if args.new_version != None:
versions = parse_version(re.sub('-beta', '', args.new_version))
version = '.'.join(versions[:3])
suffix = '' if versions[3] == '0' else '-beta.' + versions[3]
versions = clean_parse_version(version)
suffix = ''
if '-' in version:
suffix = '-' + version.split('-')[1]
versions[3] = parse_version(version)[3]
version = version.split('-')[0]
if args.dry_run:
print 'new version number would be: {0}\n'.format(version + suffix)
return 0
with scoped_cwd(SOURCE_ROOT):
update_electron_gyp(version, suffix)
update_win_rc(version, versions)
@@ -92,6 +105,9 @@ def main():
print 'Bumped to version: {0}'.format(version + suffix)
def not_reached():
raise Exception('Unreachable code was reached')
def increase_version(versions, index):
for i in range(index + 1, 4):
versions[i] = '0'
@@ -100,7 +116,8 @@ def increase_version(versions, index):
def update_electron_gyp(version, suffix):
pattern = re.compile(" *'version%' *: *'[0-9.]+(-beta[0-9.]*)?'")
pattern = re.compile(" *'version%' *: *'[0-9.]+(-beta[0-9.]*)?(-dev)?"
+ "(-nightly[0-9.]*)?'")
with open('electron.gyp', 'r') as f:
lines = f.readlines()
@@ -192,7 +209,14 @@ def update_package_json(version, suffix):
def tag_version(version, suffix):
execute(['git', 'commit', '-a', '-m', 'Bump v{0}'.format(version + suffix)])
execute([
'git',
'commit',
'-a',
'-m',
'Bump v{0}'.format(version + suffix),
'-n'
])
if __name__ == '__main__':

View File

@@ -1,6 +1,14 @@
require('dotenv-safe').load()
const assert = require('assert')
const request = require('request')
const buildAppVeyorURL = 'https://windows-ci.electronjs.org/api/builds'
const vstsURL = 'https://github.visualstudio.com/electron/_apis/build'
const appVeyorJobs = {
'electron-x64': 'electron',
'electron-ia32': 'electron-39ng6'
}
const circleCIJobs = [
'electron-linux-arm',
@@ -10,6 +18,11 @@ const circleCIJobs = [
'electron-linux-x64'
]
const vstsJobs = [
'electron-release-mas-x64',
'electron-release-osx-x64'
]
async function makeRequest (requestOptions, parseResponse) {
return new Promise((resolve, reject) => {
request(requestOptions, (err, res, body) => {
@@ -21,8 +34,13 @@ async function makeRequest (requestOptions, parseResponse) {
resolve(body)
}
} else {
console.error('Error occurred while requesting:', requestOptions.url)
if (parseResponse) {
console.log('Error: ', `(status ${res.statusCode})`, err || JSON.parse(res.body), requestOptions)
try {
console.log('Error: ', `(status ${res.statusCode})`, err || JSON.parse(res.body), requestOptions)
} catch (err) {
console.log('Error: ', `(status ${res.statusCode})`, err || res.body, requestOptions)
}
} else {
console.log('Error: ', `(status ${res.statusCode})`, err || res.body, requestOptions)
}
@@ -33,7 +51,6 @@ async function makeRequest (requestOptions, parseResponse) {
}
async function circleCIcall (buildUrl, targetBranch, job, options) {
assert(process.env.CIRCLE_TOKEN, 'CIRCLE_TOKEN not found in environment')
console.log(`Triggering CircleCI to run build job: ${job} on branch: ${targetBranch} with release flag.`)
let buildRequest = {
'build_parameters': {
@@ -62,12 +79,21 @@ async function circleCIcall (buildUrl, targetBranch, job, options) {
}, true).catch(err => {
console.log('Error calling CircleCI:', err)
})
console.log(`Check ${circleResponse.build_url} for status. (${job})`)
console.log(`CircleCI release build request for ${job} successful. Check ${circleResponse.build_url} for status.`)
}
async function buildAppVeyor (targetBranch, options) {
console.log(`Triggering AppVeyor to run build on branch: ${targetBranch} with release flag.`)
assert(process.env.APPVEYOR_TOKEN, 'APPVEYOR_TOKEN not found in environment')
function buildAppVeyor (targetBranch, options) {
const validJobs = Object.keys(appVeyorJobs)
if (options.job) {
assert(validJobs.includes(options.job), `Unknown AppVeyor CI job name: ${options.job}. Valid values are: ${validJobs}.`)
callAppVeyor(targetBranch, options.job, options)
} else {
validJobs.forEach((job) => callAppVeyor(targetBranch, job, options))
}
}
async function callAppVeyor (targetBranch, job, options) {
console.log(`Triggering AppVeyor to run build job: ${job} on branch: ${targetBranch} with release flag.`)
let environmentVariables = {}
if (options.ghRelease) {
@@ -90,7 +116,7 @@ async function buildAppVeyor (targetBranch, options) {
},
body: JSON.stringify({
accountName: 'AppVeyor',
projectSlug: 'electron',
projectSlug: appVeyorJobs[job],
branch: targetBranch,
environmentVariables
}),
@@ -99,20 +125,83 @@ async function buildAppVeyor (targetBranch, options) {
let appVeyorResponse = await makeRequest(requestOpts, true).catch(err => {
console.log('Error calling AppVeyor:', err)
})
const buildUrl = `https://windows-ci.electronjs.org/project/AppVeyor/electron/build/${appVeyorResponse.version}`
console.log(`AppVeyor release build request successful. Check build status at ${buildUrl}`)
const buildUrl = `https://windows-ci.electronjs.org/project/AppVeyor/${appVeyorJobs[job]}/build/${appVeyorResponse.version}`
console.log(`AppVeyor release build request for ${job} successful. Check build status at ${buildUrl}`)
}
function buildCircleCI (targetBranch, options) {
const circleBuildUrl = `https://circleci.com/api/v1.1/project/github/electron/electron/tree/${targetBranch}?circle-token=${process.env.CIRCLE_TOKEN}`
if (options.job) {
assert(circleCIJobs.includes(options.job), `Unknown CI job name: ${options.job}.`)
assert(circleCIJobs.includes(options.job), `Unknown CircleCI job name: ${options.job}. Valid values are: ${circleCIJobs}.`)
circleCIcall(circleBuildUrl, targetBranch, options.job, options)
} else {
circleCIJobs.forEach((job) => circleCIcall(circleBuildUrl, targetBranch, job, options))
}
}
async function buildVSTS (targetBranch, options) {
if (options.job) {
assert(vstsJobs.includes(options.job), `Unknown VSTS CI job name: ${options.job}. Valid values are: ${vstsJobs}.`)
}
console.log(`Triggering VSTS to run build on branch: ${targetBranch} with release flag.`)
let environmentVariables = {}
if (!options.ghRelease) {
environmentVariables.UPLOAD_TO_S3 = 1
}
if (options.automaticRelease) {
environmentVariables.AUTO_RELEASE = 'true'
}
let requestOpts = {
url: `${vstsURL}/definitions?api-version=4.1`,
auth: {
user: '',
password: process.env.VSTS_TOKEN
},
headers: {
'Content-Type': 'application/json'
}
}
let vstsResponse = await makeRequest(requestOpts, true).catch(err => {
console.log('Error calling VSTS to get build definitions:', err)
})
let buildsToRun = []
if (options.job) {
buildsToRun = vstsResponse.value.filter(build => build.name === options.job)
} else {
buildsToRun = vstsResponse.value.filter(build => vstsJobs.includes(build.name))
}
buildsToRun.forEach((build) => callVSTSBuild(build, targetBranch, environmentVariables))
}
async function callVSTSBuild (build, targetBranch, environmentVariables) {
let buildBody = {
definition: build,
sourceBranch: targetBranch
}
if (Object.keys(environmentVariables).length !== 0) {
buildBody.parameters = JSON.stringify(environmentVariables)
}
let requestOpts = {
url: `${vstsURL}/builds?api-version=4.1`,
auth: {
user: '',
password: process.env.VSTS_TOKEN
},
headers: {
'Content-Type': 'application/json'
},
body: JSON.stringify(buildBody),
method: 'POST'
}
let vstsResponse = await makeRequest(requestOpts, true).catch(err => {
console.log(`Error calling VSTS for job ${build.name}`, err)
})
console.log(`VSTS release build request for ${build.name} successful. Check ${vstsResponse._links.web.href} for status.`)
}
function runRelease (targetBranch, options) {
if (options.ci) {
switch (options.ci) {
@@ -124,10 +213,19 @@ function runRelease (targetBranch, options) {
buildAppVeyor(targetBranch, options)
break
}
case 'VSTS': {
buildVSTS(targetBranch, options)
break
}
default: {
console.log(`Error! Unknown CI: ${options.ci}.`)
process.exit(1)
}
}
} else {
buildCircleCI(targetBranch, options)
buildAppVeyor(targetBranch, options)
buildVSTS(targetBranch, options)
}
}
@@ -140,7 +238,7 @@ if (require.main === module) {
const targetBranch = args._[0]
if (args._.length < 1) {
console.log(`Trigger CI to build release builds of electron.
Usage: ci-release-build.js [--job=CI_JOB_NAME] [--ci=CircleCI|AppVeyor] [--ghRelease] [--automaticRelease] TARGET_BRANCH
Usage: ci-release-build.js [--job=CI_JOB_NAME] [--ci=CircleCI|AppVeyor|VSTS] [--ghRelease] [--automaticRelease] TARGET_BRANCH
`)
process.exit(0)
}

39
script/find-release.js Normal file
View File

@@ -0,0 +1,39 @@
if (!process.env.CI) require('dotenv-safe').load()
const GitHub = require('github')
const github = new GitHub()
if (process.argv.length < 3) {
console.log('Usage: find-release version')
process.exit(1)
}
const version = process.argv[2]
async function findRelease () {
github.authenticate({type: 'token', token: process.env.ELECTRON_GITHUB_TOKEN})
let releases = await github.repos.getReleases({
owner: 'electron',
repo: version.indexOf('nightly') > 0 ? 'nightlies' : 'electron'
})
let targetRelease = releases.data.find(release => {
return release.tag_name === version
})
let returnObject = {}
if (targetRelease) {
returnObject = {
id: targetRelease.id,
draft: targetRelease.draft,
exists: true
}
} else {
returnObject = {
exists: false,
draft: false
}
}
console.log(JSON.stringify(returnObject))
}
findRelease()

View File

@@ -0,0 +1,29 @@
const { GitProcess } = require('dugite')
const path = require('path')
const semver = require('semver')
const gitDir = path.resolve(__dirname, '..')
async function determineNextMajorForMaster () {
let branchNames
let result = await GitProcess.exec(['branch', '-a', '--remote', '--list', 'origin/[0-9]-[0-9]-x'], gitDir)
if (result.exitCode === 0) {
branchNames = result.stdout.trim().split('\n')
const filtered = branchNames.map(b => b.replace('origin/', ''))
return getNextReleaseBranch(filtered)
} else {
throw new Error('Release branches could not be fetched.')
}
}
function getNextReleaseBranch (branches) {
const converted = branches.map(b => b.replace(/-/g, '.').replace('x', '0'))
const next = converted.reduce((v1, v2) => {
return semver.gt(v1, v2) ? v1 : v2
})
return parseInt(next.split('.')[0], 10)
}
determineNextMajorForMaster().then(console.info).catch((err) => {
console.error(err)
process.exit(1)
})

View File

@@ -1,76 +0,0 @@
#!/usr/bin/env python
import json
import os
import re
import sys
REQUESTS_DIR = os.path.abspath(os.path.join(__file__, '..', '..', '..',
'vendor', 'requests'))
sys.path.append(os.path.join(REQUESTS_DIR, 'build', 'lib'))
sys.path.append(os.path.join(REQUESTS_DIR, 'build', 'lib.linux-x86_64-2.7'))
import requests
GITHUB_URL = 'https://api.github.com'
GITHUB_UPLOAD_ASSET_URL = 'https://uploads.github.com'
class GitHub:
def __init__(self, access_token):
self._authorization = 'token %s' % access_token
pattern = '^/repos/{0}/{0}/releases/{1}/assets$'.format('[^/]+', '[0-9]+')
self._releases_upload_api_pattern = re.compile(pattern)
def __getattr__(self, attr):
return _Callable(self, '/%s' % attr)
def send(self, method, path, **kw):
if not 'headers' in kw:
kw['headers'] = dict()
headers = kw['headers']
headers['Authorization'] = self._authorization
headers['Accept'] = 'application/vnd.github.manifold-preview'
# Switch to a different domain for the releases uploading API.
if self._releases_upload_api_pattern.match(path):
url = '%s%s' % (GITHUB_UPLOAD_ASSET_URL, path)
else:
url = '%s%s' % (GITHUB_URL, path)
# Data are sent in JSON format.
if 'data' in kw:
kw['data'] = json.dumps(kw['data'])
r = getattr(requests, method)(url, **kw).json()
if 'message' in r:
raise Exception(json.dumps(r, indent=2, separators=(',', ': ')))
return r
class _Executable:
def __init__(self, gh, method, path):
self._gh = gh
self._method = method
self._path = path
def __call__(self, **kw):
return self._gh.send(self._method, self._path, **kw)
class _Callable(object):
def __init__(self, gh, name):
self._gh = gh
self._name = name
def __call__(self, *args):
if len(args) == 0:
return self
name = '%s/%s' % (self._name, '/'.join([str(arg) for arg in args]))
return _Callable(self._gh, name)
def __getattr__(self, attr):
if attr in ['get', 'put', 'post', 'patch', 'delete']:
return _Executable(self._gh, attr, self._name)
name = '%s/%s' % (self._name, attr)
return _Callable(self._gh, name)

View File

@@ -2,6 +2,7 @@
import atexit
import contextlib
import datetime
import errno
import platform
import re
@@ -87,7 +88,7 @@ def download(text, url, path):
downloaded_size = 0
block_size = 128
ci = os.environ.get('CI') == '1'
ci = os.environ.get('CI') is not None
while True:
buf = web_file.read(block_size)
@@ -287,3 +288,67 @@ def update_node_modules(dirname, env=None):
pass
else:
execute_stdout(args, env)
def clean_parse_version(v):
return parse_version(v.split("-")[0])
def is_stable(v):
return len(v.split(".")) == 3
def is_beta(v):
return 'beta' in v
def is_nightly(v):
return 'nightly' in v
def get_nightly_date():
return datetime.datetime.today().strftime('%Y%m%d')
def get_last_major():
return execute(['node', 'script/get-last-major-for-master.js'])
def get_next_nightly(v):
pv = clean_parse_version(v)
major = pv[0]; minor = pv[1]; patch = pv[2]
if (is_stable(v)):
patch = str(int(pv[2]) + 1)
if execute(['git', 'rev-parse', '--abbrev-ref', 'HEAD']) == "master":
major = str(get_last_major() + 1)
minor = '0'
patch = '0'
pre = 'nightly.' + get_nightly_date()
return make_version(major, minor, patch, pre)
def non_empty(thing):
return thing.strip() != ''
def get_next_beta(v):
pv = clean_parse_version(v)
tag_pattern = 'v' + pv[0] + '.' + pv[1] + '.' + pv[2] + '-beta.*'
tag_list = filter(
non_empty,
execute(['git', 'tag', '--list', '-l', tag_pattern]).strip().split('\n')
)
if len(tag_list) == 0:
return make_version(pv[0] , pv[1], pv[2], 'beta.1')
lv = parse_version(tag_list[-1])
return make_version(pv[0] , pv[1], pv[2], 'beta.' + str(int(lv[3]) + 1))
def get_next_stable_from_pre(v):
pv = clean_parse_version(v)
major = pv[0]; minor = pv[1]; patch = pv[2]
return make_version(major, minor, patch)
def get_next_stable_from_stable(v):
pv = clean_parse_version(v)
major = pv[0]; minor = pv[1]; patch = pv[2]
return make_version(major, minor, str(int(patch) + 1))
def make_version(major, minor, patch, pre = None):
if pre is None:
return major + '.' + minor + '.' + patch
return major + "." + minor + "." + patch + '-' + pre

View File

@@ -1,10 +1,10 @@
#!/usr/bin/env node
if (!process.env.CI) require('dotenv-safe').load()
require('colors')
const args = require('minimist')(process.argv.slice(2), {
boolean: ['automaticRelease', 'notesOnly', 'stable']
})
const assert = require('assert')
const ciReleaseBuild = require('./ci-release-build')
const { execSync } = require('child_process')
const fail = '\u2717'.red
@@ -15,13 +15,13 @@ const path = require('path')
const pkg = require('../package.json')
const readline = require('readline')
const versionType = args._[0]
const targetRepo = versionType === 'nightly' ? 'nightlies' : 'electron'
// TODO (future) automatically determine version based on conventional commits
// via conventional-recommended-bump
assert(process.env.ELECTRON_GITHUB_TOKEN, 'ELECTRON_GITHUB_TOKEN not found in environment')
if (!versionType && !args.notesOnly) {
console.log(`Usage: prepare-release versionType [major | minor | patch | beta]` +
console.log(`Usage: prepare-release versionType [stable | beta | nightly]` +
` (--stable) (--notesOnly) (--automaticRelease) (--branch)`)
process.exit(1)
}
@@ -30,13 +30,12 @@ const github = new GitHub()
const gitDir = path.resolve(__dirname, '..')
github.authenticate({type: 'token', token: process.env.ELECTRON_GITHUB_TOKEN})
function getNewVersion (dryRun) {
console.log(`Bumping for new "${versionType}" version.`)
let bumpScript = path.join(__dirname, 'bump-version.py')
let scriptArgs = [bumpScript, `--bump ${versionType}`]
if (args.stable) {
scriptArgs.push('--stable')
async function getNewVersion (dryRun) {
if (!dryRun) {
console.log(`Bumping for new "${versionType}" version.`)
}
let bumpScript = path.join(__dirname, 'bump-version.py')
let scriptArgs = [bumpScript, '--bump', versionType]
if (dryRun) {
scriptArgs.push('--dry-run')
}
@@ -50,6 +49,7 @@ function getNewVersion (dryRun) {
return newVersion
} catch (err) {
console.log(`${fail} Could not bump version, error was:`, err)
throw err
}
}
@@ -71,10 +71,13 @@ async function getCurrentBranch (gitDir) {
}
async function getReleaseNotes (currentBranch) {
if (versionType === 'nightly') {
return 'Nightlies do not get release notes, please compare tags for info'
}
console.log(`Generating release notes for ${currentBranch}.`)
let githubOpts = {
owner: 'electron',
repo: 'electron',
repo: targetRepo,
base: `v${pkg.version}`,
head: currentBranch
}
@@ -136,11 +139,11 @@ async function getReleaseNotes (currentBranch) {
async function createRelease (branchToTarget, isBeta) {
let releaseNotes = await getReleaseNotes(branchToTarget)
let newVersion = getNewVersion()
let newVersion = await getNewVersion()
await tagRelease(newVersion)
const githubOpts = {
owner: 'electron',
repo: 'electron'
repo: targetRepo
}
console.log(`Checking for existing draft release.`)
let releases = await github.repos.getReleases(githubOpts)
@@ -158,17 +161,24 @@ async function createRelease (branchToTarget, isBeta) {
githubOpts.draft = true
githubOpts.name = `electron ${newVersion}`
if (isBeta) {
githubOpts.body = `Note: This is a beta release. Please file new issues ` +
`for any bugs you find in it.\n \n This release is published to npm ` +
`under the beta tag and can be installed via npm install electron@beta, ` +
`or npm i electron@${newVersion.substr(1)}.\n \n ${releaseNotes}`
if (newVersion.indexOf('nightly') > 0) {
githubOpts.body = `Note: This is a nightly release. Please file new issues ` +
`for any bugs you find in it.\n \n This release is published to npm ` +
`under the nightly tag and can be installed via npm install electron@nightly, ` +
`or npm i electron@${newVersion.substr(1)}.\n \n ${releaseNotes}`
} else {
githubOpts.body = `Note: This is a beta release. Please file new issues ` +
`for any bugs you find in it.\n \n This release is published to npm ` +
`under the beta tag and can be installed via npm install electron@beta, ` +
`or npm i electron@${newVersion.substr(1)}.\n \n ${releaseNotes}`
}
githubOpts.name = `${githubOpts.name}`
githubOpts.prerelease = true
} else {
githubOpts.body = releaseNotes
}
githubOpts.tag_name = newVersion
githubOpts.target_commitish = branchToTarget
githubOpts.target_commitish = newVersion.indexOf('nightly') !== -1 ? 'master' : branchToTarget
await github.repos.createRelease(githubOpts)
.catch(err => {
console.log(`${fail} Error creating new release: `, err)
@@ -209,7 +219,7 @@ async function tagRelease (version) {
}
async function verifyNewVersion () {
let newVersion = getNewVersion(true)
let newVersion = await getNewVersion(true)
let response
if (args.automaticRelease) {
response = 'y'
@@ -237,26 +247,34 @@ async function promptForVersion (version) {
})
}
// function to determine if there have been commits to master since the last release
async function changesToRelease () {
let lastCommitWasRelease = new RegExp(`^Bump v[0-9.]*(-beta[0-9.]*)?(-nightly[0-9.]*)?$`, 'g')
let lastCommit = await GitProcess.exec(['log', '-n', '1', `--pretty=format:'%s'`], gitDir)
return !lastCommitWasRelease.test(lastCommit.stdout)
}
async function prepareRelease (isBeta, notesOnly) {
if (args.automaticRelease && (pkg.version.indexOf('beta') === -1 ||
versionType !== 'beta')) {
console.log(`${fail} Automatic release is only supported for beta releases`)
process.exit(1)
}
let currentBranch
if (args.branch) {
currentBranch = args.branch
if (args.dryRun) {
let newVersion = await getNewVersion(true)
console.log(newVersion)
} else {
currentBranch = await getCurrentBranch(gitDir)
}
if (notesOnly) {
let releaseNotes = await getReleaseNotes(currentBranch)
console.log(`Draft release notes are: \n${releaseNotes}`)
} else {
await verifyNewVersion()
await createRelease(currentBranch, isBeta)
await pushRelease(currentBranch)
await runReleaseBuilds(currentBranch)
const currentBranch = (args.branch) ? args.branch : await getCurrentBranch(gitDir)
if (notesOnly) {
let releaseNotes = await getReleaseNotes(currentBranch)
console.log(`Draft release notes are: \n${releaseNotes}`)
} else {
const changes = await changesToRelease(currentBranch)
if (changes) {
await verifyNewVersion()
await createRelease(currentBranch, isBeta)
await pushRelease(currentBranch)
await runReleaseBuilds(currentBranch)
} else {
console.log(`There are no new changes to this branch since the last release, aborting release.`)
process.exit(1)
}
}
}
}

View File

@@ -3,10 +3,15 @@ const fs = require('fs')
const path = require('path')
const childProcess = require('child_process')
const GitHubApi = require('github')
const {GitProcess} = require('dugite')
const request = require('request')
const assert = require('assert')
const rootPackageJson = require('../package.json')
if (!process.env.ELECTRON_NPM_OTP) {
console.error('Please set ELECTRON_NPM_OTP')
process.exit(1)
}
const github = new GitHubApi({
// debug: true,
headers: { 'User-Agent': 'electron-npm-publisher' },
@@ -68,7 +73,7 @@ new Promise((resolve, reject) => {
return github.repos.getReleases({
owner: 'electron',
repo: 'electron'
repo: rootPackageJson.version.indexOf('nightly') > 0 ? 'nightlies' : 'electron'
})
})
.then((releases) => {
@@ -103,8 +108,17 @@ new Promise((resolve, reject) => {
})
})
})
.then((release) => {
npmTag = release.prerelease ? 'beta' : 'latest'
.then(async (release) => {
if (release.tag_name.indexOf('nightly') > 0) {
const currentBranch = await getCurrentBranch()
if (currentBranch === 'master') {
npmTag = 'nightly'
} else {
npmTag = `nightly-${currentBranch}`
}
} else {
npmTag = release.prerelease ? 'beta' : 'latest'
}
})
.then(() => childProcess.execSync('npm pack', { cwd: tempDir }))
.then(() => {
@@ -115,13 +129,29 @@ new Promise((resolve, reject) => {
env: Object.assign({}, process.env, { electron_config_cache: tempDir }),
cwd: tempDir
})
const checkVersion = childProcess.execSync(`${path.join(tempDir, 'node_modules', '.bin', 'electron')} -v`)
assert.ok((`v${rootPackageJson.version}`.indexOf(checkVersion.toString().trim()) === 0), `Version is correct`)
resolve(tarballPath)
})
})
.then((tarballPath) => childProcess.execSync(`npm publish ${tarballPath} --tag ${npmTag}`))
.then((tarballPath) => childProcess.execSync(`npm publish ${tarballPath} --tag ${npmTag} --otp=${process.env.ELECTRON_NPM_OTP}`))
.catch((err) => {
console.error(`Error: ${err}`)
process.exit(1)
})
async function getCurrentBranch () {
const gitDir = path.resolve(__dirname, '..')
console.log(`Determining current git branch`)
let gitArgs = ['rev-parse', '--abbrev-ref', 'HEAD']
let branchDetails = await GitProcess.exec(gitArgs, gitDir)
if (branchDetails.exitCode === 0) {
let currentBranch = branchDetails.stdout.trim()
console.log(`Successfully determined current git branch is ` +
`${currentBranch}`)
return currentBranch
} else {
let error = GitProcess.parseError(branchDetails.stderr)
console.log(`Could not get details for the current branch,
error was ${branchDetails.stderr}`, error)
process.exit(1)
}
}

View File

@@ -0,0 +1,108 @@
#!/usr/bin/env node
if (!process.env.CI) require('dotenv-safe').load()
require('colors')
const args = require('minimist')(process.argv.slice(2), {
boolean: ['tag']
})
const { execSync } = require('child_process')
const { GitProcess } = require('dugite')
const GitHub = require('github')
const path = require('path')
const github = new GitHub()
const gitDir = path.resolve(__dirname, '..')
github.authenticate({
type: 'token',
token: process.env.ELECTRON_GITHUB_TOKEN
})
function getLastBumpCommit (tag) {
const data = execSync(`git log -n1 --grep "Bump ${tag}" --format="format:{hash: %H, message: '%s'}"`)
return JSON.parse(data)
}
async function getCurrentBranch (gitDir) {
const gitArgs = ['rev-parse', '--abbrev-ref', 'HEAD']
const branchDetails = await GitProcess.exec(gitArgs, gitDir)
if (branchDetails.exitCode === 0) {
return branchDetails.stdout.trim()
}
const error = GitProcess.parseError(branchDetails.stderr)
console.error(`Couldn't get current branch: `, error)
process.exit(1)
}
async function revertBumpCommit (tag) {
const branch = getCurrentBranch()
const commitToRevert = getLastBumpCommit(tag).hash
await GitProcess.exec(['revert', commitToRevert], gitDir)
const pushDetails = await GitProcess.exec(['push', 'origin', `HEAD:${branch}`, '--follow-tags'], gitDir)
if (pushDetails.exitCode === 0) {
console.log(`Successfully reverted release commit.`)
} else {
const error = GitProcess.parseError(pushDetails.stderr)
console.error(`Failed to push release commit: `, error)
process.exit(1)
}
}
async function deleteDraft (tag, targetRepo) {
try {
const result = await github.repos.getReleaseByTag({
owner: 'electron',
repo: targetRepo,
tag
})
if (!result.draft) {
console.log(`Published releases cannot be deleted.`)
process.exit(1)
} else {
await github.repos.deleteRelease({
owner: 'electron',
repo: targetRepo,
release_id: result.id
})
}
console.log(`Successfully deleted draft with tag ${tag} from ${targetRepo}`)
} catch (err) {
console.error(`Couldn't delete draft with tag ${tag} from ${targetRepo}: `, err)
process.exit(1)
}
}
async function deleteTag (tag, targetRepo) {
try {
await github.gitdata.deleteReference({
owner: 'electron',
repo: targetRepo,
ref: tag
})
console.log(`Successfully deleted tag ${tag} from ${targetRepo}`)
} catch (err) {
console.log(`Couldn't delete tag ${tag} from ${targetRepo}: `, err)
process.exit(1)
}
}
async function cleanReleaseArtifacts () {
const tag = args.tag
const lastBumpCommit = getLastBumpCommit().message
if (lastBumpCommit.indexOf('nightly' > 0)) {
await deleteDraft(tag, 'nightlies')
await deleteTag(tag, 'nightlies')
} else {
await deleteDraft(tag, 'electron')
}
await deleteTag(tag, 'electron')
await revertBumpCommit(tag)
console.log('Failed release artifact cleanup complete')
}
cleanReleaseArtifacts()

View File

@@ -0,0 +1,478 @@
const { GitProcess } = require('dugite')
const Entities = require('html-entities').AllHtmlEntities
const fetch = require('node-fetch')
const fs = require('fs')
const GitHub = require('github')
const path = require('path')
const semver = require('semver')
const CACHE_DIR = path.resolve(__dirname, '.cache')
// Fill this with tags to ignore if you are generating release notes for older
// versions
//
// E.g. ['v3.0.0-beta.1'] to generate the release notes for 3.0.0-beta.1 :) from
// the current 3-0-x branch
const EXCLUDE_TAGS = []
const entities = new Entities()
const github = new GitHub()
const gitDir = path.resolve(__dirname, '..', '..')
github.authenticate({ type: 'token', token: process.env.ELECTRON_GITHUB_TOKEN })
let currentBranch
const semanticMap = new Map()
for (const line of fs.readFileSync(path.resolve(__dirname, 'legacy-pr-semantic-map.csv'), 'utf8').split('\n')) {
if (!line) continue
const bits = line.split(',')
if (bits.length !== 2) continue
semanticMap.set(bits[0], bits[1])
}
const getCurrentBranch = async () => {
if (currentBranch) return currentBranch
const gitArgs = ['rev-parse', '--abbrev-ref', 'HEAD']
const branchDetails = await GitProcess.exec(gitArgs, gitDir)
if (branchDetails.exitCode === 0) {
currentBranch = branchDetails.stdout.trim()
return currentBranch
}
throw GitProcess.parseError(branchDetails.stderr)
}
const getBranchOffPoint = async (branchName) => {
const gitArgs = ['merge-base', branchName, 'master']
const commitDetails = await GitProcess.exec(gitArgs, gitDir)
if (commitDetails.exitCode === 0) {
return commitDetails.stdout.trim()
}
throw GitProcess.parseError(commitDetails.stderr)
}
const getTagsOnBranch = async (branchName) => {
const gitArgs = ['tag', '--merged', branchName]
const tagDetails = await GitProcess.exec(gitArgs, gitDir)
if (tagDetails.exitCode === 0) {
return tagDetails.stdout.trim().split('\n').filter(tag => !EXCLUDE_TAGS.includes(tag))
}
throw GitProcess.parseError(tagDetails.stderr)
}
const memLastKnownRelease = new Map()
const getLastKnownReleaseOnBranch = async (branchName) => {
if (memLastKnownRelease.has(branchName)) {
return memLastKnownRelease.get(branchName)
}
const tags = await getTagsOnBranch(branchName)
if (!tags.length) {
throw new Error(`Branch ${branchName} has no tags, we have no idea what the last release was`)
}
const branchOffPointTags = await getTagsOnBranch(await getBranchOffPoint(branchName))
if (branchOffPointTags.length >= tags.length) {
// No release on this branch
return null
}
memLastKnownRelease.set(branchName, tags[tags.length - 1])
// Latest tag is the latest release
return tags[tags.length - 1]
}
const getBranches = async () => {
const gitArgs = ['branch', '--remote']
const branchDetails = await GitProcess.exec(gitArgs, gitDir)
if (branchDetails.exitCode === 0) {
return branchDetails.stdout.trim().split('\n').map(b => b.trim()).filter(branch => branch !== 'origin/HEAD -> origin/master')
}
throw GitProcess.parseError(branchDetails.stderr)
}
const semverify = (v) => v.replace(/^origin\//, '').replace('x', '0').replace(/-/g, '.')
const getLastReleaseBranch = async () => {
const current = await getCurrentBranch()
const allBranches = await getBranches()
const releaseBranches = allBranches
.filter(branch => /^origin\/[0-9]+-[0-9]+-x$/.test(branch))
.filter(branch => branch !== current && branch !== `origin/${current}`)
let latest = null
for (const b of releaseBranches) {
if (latest === null) latest = b
if (semver.gt(semverify(b), semverify(latest))) {
latest = b
}
}
return latest
}
const commitBeforeTag = async (commit, tag) => {
const gitArgs = ['tag', '--contains', commit]
const tagDetails = await GitProcess.exec(gitArgs, gitDir)
if (tagDetails.exitCode === 0) {
return tagDetails.stdout.split('\n').includes(tag)
}
throw GitProcess.parseError(tagDetails.stderr)
}
const getCommitsMergedIntoCurrentBranchSincePoint = async (point) => {
return getCommitsBetween(point, 'HEAD')
}
const getCommitsBetween = async (point1, point2) => {
const gitArgs = ['rev-list', `${point1}..${point2}`]
const commitsDetails = await GitProcess.exec(gitArgs, gitDir)
if (commitsDetails.exitCode !== 0) {
throw GitProcess.parseError(commitsDetails.stderr)
}
return commitsDetails.stdout.trim().split('\n')
}
const TITLE_PREFIX = 'Merged Pull Request: '
const getCommitDetails = async (commitHash) => {
const commitInfo = await (await fetch(`https://github.com/electron/electron/branch_commits/${commitHash}`)).text()
const bits = commitInfo.split('</a>)')[0].split('>')
const prIdent = bits[bits.length - 1].trim()
if (!prIdent || commitInfo.indexOf('href="/electron/electron/pull') === -1) {
console.warn(`WARNING: Could not track commit "${commitHash}" to a pull request, it may have been committed directly to the branch`)
return null
}
const title = commitInfo.split('title="')[1].split('"')[0]
if (!title.startsWith(TITLE_PREFIX)) {
console.warn(`WARNING: Unknown PR title for commit "${commitHash}" in PR "${prIdent}"`)
return null
}
return {
mergedFrom: prIdent,
prTitle: entities.decode(title.substr(TITLE_PREFIX.length))
}
}
const doWork = async (items, fn, concurrent = 5) => {
const results = []
const toUse = [].concat(items)
let i = 1
const doBit = async () => {
if (toUse.length === 0) return
console.log(`Running ${i}/${items.length}`)
i += 1
const item = toUse.pop()
const index = toUse.length
results[index] = await fn(item)
await doBit()
}
const bits = []
for (let i = 0; i < concurrent; i += 1) {
bits.push(doBit())
}
await Promise.all(bits)
return results
}
const notes = new Map()
const NoteType = {
FIX: 'fix',
FEATURE: 'feature',
BREAKING_CHANGE: 'breaking-change',
DOCUMENTATION: 'doc',
OTHER: 'other',
UNKNOWN: 'unknown'
}
class Note {
constructor (trueTitle, prNumber, ignoreIfInVersion) {
// Self bindings
this.guessType = this.guessType.bind(this)
this.fetchPrInfo = this.fetchPrInfo.bind(this)
this._getPr = this._getPr.bind(this)
if (!trueTitle.trim()) console.error(prNumber)
this._ignoreIfInVersion = ignoreIfInVersion
this.reverted = false
if (notes.has(trueTitle)) {
console.warn(`Duplicate PR trueTitle: "${trueTitle}", "${prNumber}" this might cause weird reversions (this would be RARE)`)
}
// Memoize
notes.set(trueTitle, this)
this.originalTitle = trueTitle
this.title = trueTitle
this.prNumber = prNumber
this.stripColon = true
if (this.guessType() !== NoteType.UNKNOWN && this.stripColon) {
this.title = trueTitle.split(':').slice(1).join(':').trim()
}
}
guessType () {
if (this.originalTitle.startsWith('fix:') ||
this.originalTitle.startsWith('Fix:')) return NoteType.FIX
if (this.originalTitle.startsWith('feat:')) return NoteType.FEATURE
if (this.originalTitle.startsWith('spec:') ||
this.originalTitle.startsWith('build:') ||
this.originalTitle.startsWith('test:') ||
this.originalTitle.startsWith('chore:') ||
this.originalTitle.startsWith('deps:') ||
this.originalTitle.startsWith('refactor:') ||
this.originalTitle.startsWith('tools:') ||
this.originalTitle.startsWith('vendor:') ||
this.originalTitle.startsWith('perf:') ||
this.originalTitle.startsWith('style:') ||
this.originalTitle.startsWith('ci')) return NoteType.OTHER
if (this.originalTitle.startsWith('doc:') ||
this.originalTitle.startsWith('docs:')) return NoteType.DOCUMENTATION
this.stripColon = false
if (this.pr && this.pr.data.labels.find(label => label.name === 'semver/breaking-change')) {
return NoteType.BREAKING_CHANGE
}
// FIXME: Backported features will not be picked up by this
if (this.pr && this.pr.data.labels.find(label => label.name === 'semver/nonbreaking-feature')) {
return NoteType.FEATURE
}
const n = this.prNumber.replace('#', '')
if (semanticMap.has(n)) {
switch (semanticMap.get(n)) {
case 'feat':
return NoteType.FEATURE
case 'fix':
return NoteType.FIX
case 'breaking-change':
return NoteType.BREAKING_CHANGE
case 'doc':
return NoteType.DOCUMENTATION
case 'build':
case 'vendor':
case 'refactor':
case 'spec':
return NoteType.OTHER
default:
throw new Error(`Unknown semantic mapping: ${semanticMap.get(n)}`)
}
}
return NoteType.UNKNOWN
}
async _getPr (n) {
const cachePath = path.resolve(CACHE_DIR, n)
if (fs.existsSync(cachePath)) {
return JSON.parse(fs.readFileSync(cachePath, 'utf8'))
} else {
try {
const pr = await github.pullRequests.get({
number: n,
owner: 'electron',
repo: 'electron'
})
fs.writeFileSync(cachePath, JSON.stringify({ data: pr.data }))
return pr
} catch (err) {
console.info('#### FAILED:', `#${n}`)
throw err
}
}
}
async fetchPrInfo () {
if (this.pr) return
const n = this.prNumber.replace('#', '')
this.pr = await this._getPr(n)
if (this.pr.data.labels.find(label => label.name === `merged/${this._ignoreIfInVersion.replace('origin/', '')}`)) {
// This means we probably backported this PR, let's try figure out what
// the corresponding backport PR would be by searching through comments
// for trop
let comments
const cacheCommentsPath = path.resolve(CACHE_DIR, `${n}-comments`)
if (fs.existsSync(cacheCommentsPath)) {
comments = JSON.parse(fs.readFileSync(cacheCommentsPath, 'utf8'))
} else {
comments = await github.issues.getComments({
number: n,
owner: 'electron',
repo: 'electron',
per_page: 100
})
fs.writeFileSync(cacheCommentsPath, JSON.stringify({ data: comments.data }))
}
const tropComment = comments.data.find(
c => (
new RegExp(`We have automatically backported this PR to "${this._ignoreIfInVersion.replace('origin/', '')}", please check out #[0-9]+`)
).test(c.body)
)
if (tropComment) {
const commentBits = tropComment.body.split('#')
const tropPrNumber = commentBits[commentBits.length - 1]
const tropPr = await this._getPr(tropPrNumber)
if (tropPr.data.merged && tropPr.data.merge_commit_sha) {
if (await commitBeforeTag(tropPr.data.merge_commit_sha, await getLastKnownReleaseOnBranch(this._ignoreIfInVersion))) {
this.reverted = true
console.log('PR', this.prNumber, 'was backported to a previous version, ignoring from notes')
}
}
}
}
}
}
Note.findByTrueTitle = (trueTitle) => notes.get(trueTitle)
class ReleaseNotes {
constructor (ignoreIfInVersion) {
this._ignoreIfInVersion = ignoreIfInVersion
this._handledPrs = new Set()
this._revertedPrs = new Set()
this.other = []
this.docs = []
this.fixes = []
this.features = []
this.breakingChanges = []
this.unknown = []
}
async parseCommits (commitHashes) {
await doWork(commitHashes, async (commit) => {
const info = await getCommitDetails(commit)
if (!info) return
// Only handle each PR once
if (this._handledPrs.has(info.mergedFrom)) return
this._handledPrs.add(info.mergedFrom)
// Strip the trop backport prefix
const trueTitle = info.prTitle.replace(/^Backport \([0-9]+-[0-9]+-x\) - /, '')
if (this._revertedPrs.has(trueTitle)) return
// Handle PRs that revert other PRs
if (trueTitle.startsWith('Revert "')) {
const revertedTrueTitle = trueTitle.substr(8, trueTitle.length - 9)
this._revertedPrs.add(revertedTrueTitle)
const existingNote = Note.findByTrueTitle(revertedTrueTitle)
if (existingNote) {
existingNote.reverted = true
}
return
}
// Add a note for this PR
const note = new Note(trueTitle, info.mergedFrom, this._ignoreIfInVersion)
try {
await note.fetchPrInfo()
} catch (err) {
console.error(commit, info)
throw err
}
switch (note.guessType()) {
case NoteType.FIX:
this.fixes.push(note)
break
case NoteType.FEATURE:
this.features.push(note)
break
case NoteType.BREAKING_CHANGE:
this.breakingChanges.push(note)
break
case NoteType.OTHER:
this.other.push(note)
break
case NoteType.DOCUMENTATION:
this.docs.push(note)
break
case NoteType.UNKNOWN:
default:
this.unknown.push(note)
break
}
}, 20)
}
list (notes) {
if (notes.length === 0) {
return '_There are no items in this section this release_'
}
return notes
.filter(note => !note.reverted)
.sort((a, b) => a.title.toLowerCase().localeCompare(b.title.toLowerCase()))
.map((note) => `* ${note.title.trim()} ${note.prNumber}`).join('\n')
}
render () {
return `
# Release Notes
## Breaking Changes
${this.list(this.breakingChanges)}
## Features
${this.list(this.features)}
## Fixes
${this.list(this.fixes)}
## Other Changes (E.g. Internal refactors or build system updates)
${this.list(this.other)}
## Documentation Updates
Some documentation updates, fixes and reworks: ${
this.docs.length === 0
? '_None in this release_'
: this.docs.sort((a, b) => a.prNumber.localeCompare(b.prNumber)).map(note => note.prNumber).join(', ')
}
${this.unknown.filter(n => !n.reverted).length > 0
? `## Unknown (fix these before publishing release)
${this.list(this.unknown)}
` : ''}`
}
}
async function main () {
if (!fs.existsSync(CACHE_DIR)) {
fs.mkdirSync(CACHE_DIR)
}
const lastReleaseBranch = await getLastReleaseBranch()
const notes = new ReleaseNotes(lastReleaseBranch)
const lastKnownReleaseInCurrentStream = await getLastKnownReleaseOnBranch(await getCurrentBranch())
const currentBranchOff = await getBranchOffPoint(await getCurrentBranch())
const commits = await getCommitsMergedIntoCurrentBranchSincePoint(
lastKnownReleaseInCurrentStream || currentBranchOff
)
if (!lastKnownReleaseInCurrentStream) {
// This means we are the first release in our stream
// FIXME: This will not work for minor releases!!!!
const lastReleaseBranch = await getLastReleaseBranch()
const lastBranchOff = await getBranchOffPoint(lastReleaseBranch)
commits.push(...await getCommitsBetween(lastBranchOff, currentBranchOff))
}
await notes.parseCommits(commits)
console.log(notes.render())
const badNotes = notes.unknown.filter(n => !n.reverted).length
if (badNotes > 0) {
throw new Error(`You have ${badNotes.length} unknown release notes, please fix them before releasing`)
}
}
if (process.mainModule === module) {
main().catch((err) => {
console.error('Error Occurred:', err)
process.exit(1)
})
}

View File

@@ -0,0 +1,193 @@
12884,fix
12093,feat
12595,doc
12674,doc
12577,doc
12084,doc
12103,doc
12948,build
12496,feat
13133,build
12651,build
12767,doc
12238,build
12646,build
12373,doc
12723,feat
12202,doc
12504,doc
12669,doc
13044,feat
12746,spec
12617,doc
12532,feat
12619,feat
12118,build
12921,build
13281,doc
12059,feat
12131,doc
12123,doc
12080,build
12904,fix
12562,fix
12122,spec
12817,spec
12254,fix
12999,vendor
13248,vendor
12104,build
12477,feat
12648,refactor
12649,refactor
12650,refactor
12673,refactor
12305,refactor
12168,refactor
12627,refactor
12446,doc
12304,refactor
12615,breaking-change
12135,feat
12155,doc
12975,fix
12501,fix
13065,fix
13089,build
12786,doc
12736,doc
11966,doc
12885,fix
12984,refactor
12187,build
12535,refactor
12538,feat
12190,fix
12139,fix
11328,fix
12828,feat
12614,feat
12546,feat
12647,refactor
12987,build
12900,doc
12389,doc
12387,doc
12232,doc
12742,build
12043,fix
12741,fix
12995,fix
12395,fix
12003,build
12216,fix
12132,fix
12062,fix
12968,doc
12422,doc
12149,doc
13339,build
12044,fix
12327,fix
12180,fix
12263,spec
12153,spec
13055,feat
12113,doc
12067,doc
12882,build
13029,build
13067,doc
12196,build
12797,doc
12013,fix
12507,fix
11607,feat
12837,build
11613,feat
12015,spec
12058,doc
12403,spec
12192,feat
12204,doc
13294,doc
12542,doc
12826,refactor
12781,doc
12157,fix
12319,fix
12188,build
12399,doc
12145,doc
12661,refactor
8953,fix
12037,fix
12186,spec
12397,fix
12040,doc
12886,refactor
12008,refactor
12716,refactor
12750,refactor
12787,refactor
12858,refactor
12140,refactor
12503,refactor
12514,refactor
12584,refactor
12596,refactor
12637,refactor
12660,refactor
12696,refactor
12877,refactor
13030,refactor
12916,build
12896,build
13039,breaking-change
11927,build
12847,doc
12852,doc
12194,fix
12870,doc
12924,fix
12682,doc
12004,refactor
12601,refactor
12998,fix
13105,vendor
12452,doc
12738,fix
12536,refactor
12189,spec
13122,spec
12662,fix
12665,doc
12419,feat
12756,doc
12616,refactor
12679,breaking-change
12000,doc
12372,build
12805,build
12348,fix
12315,doc
12072,doc
12912,doc
12982,fix
12105,doc
12917,spec
12400,doc
12101,feat
12642,build
13058,fix
12913,vendor
13298,vendor
13042,build
11230,feat
11459,feat
12476,vendor
11937,doc
12328,build
12539,refactor
12127,build
12537,build
1 12884 fix
2 12093 feat
3 12595 doc
4 12674 doc
5 12577 doc
6 12084 doc
7 12103 doc
8 12948 build
9 12496 feat
10 13133 build
11 12651 build
12 12767 doc
13 12238 build
14 12646 build
15 12373 doc
16 12723 feat
17 12202 doc
18 12504 doc
19 12669 doc
20 13044 feat
21 12746 spec
22 12617 doc
23 12532 feat
24 12619 feat
25 12118 build
26 12921 build
27 13281 doc
28 12059 feat
29 12131 doc
30 12123 doc
31 12080 build
32 12904 fix
33 12562 fix
34 12122 spec
35 12817 spec
36 12254 fix
37 12999 vendor
38 13248 vendor
39 12104 build
40 12477 feat
41 12648 refactor
42 12649 refactor
43 12650 refactor
44 12673 refactor
45 12305 refactor
46 12168 refactor
47 12627 refactor
48 12446 doc
49 12304 refactor
50 12615 breaking-change
51 12135 feat
52 12155 doc
53 12975 fix
54 12501 fix
55 13065 fix
56 13089 build
57 12786 doc
58 12736 doc
59 11966 doc
60 12885 fix
61 12984 refactor
62 12187 build
63 12535 refactor
64 12538 feat
65 12190 fix
66 12139 fix
67 11328 fix
68 12828 feat
69 12614 feat
70 12546 feat
71 12647 refactor
72 12987 build
73 12900 doc
74 12389 doc
75 12387 doc
76 12232 doc
77 12742 build
78 12043 fix
79 12741 fix
80 12995 fix
81 12395 fix
82 12003 build
83 12216 fix
84 12132 fix
85 12062 fix
86 12968 doc
87 12422 doc
88 12149 doc
89 13339 build
90 12044 fix
91 12327 fix
92 12180 fix
93 12263 spec
94 12153 spec
95 13055 feat
96 12113 doc
97 12067 doc
98 12882 build
99 13029 build
100 13067 doc
101 12196 build
102 12797 doc
103 12013 fix
104 12507 fix
105 11607 feat
106 12837 build
107 11613 feat
108 12015 spec
109 12058 doc
110 12403 spec
111 12192 feat
112 12204 doc
113 13294 doc
114 12542 doc
115 12826 refactor
116 12781 doc
117 12157 fix
118 12319 fix
119 12188 build
120 12399 doc
121 12145 doc
122 12661 refactor
123 8953 fix
124 12037 fix
125 12186 spec
126 12397 fix
127 12040 doc
128 12886 refactor
129 12008 refactor
130 12716 refactor
131 12750 refactor
132 12787 refactor
133 12858 refactor
134 12140 refactor
135 12503 refactor
136 12514 refactor
137 12584 refactor
138 12596 refactor
139 12637 refactor
140 12660 refactor
141 12696 refactor
142 12877 refactor
143 13030 refactor
144 12916 build
145 12896 build
146 13039 breaking-change
147 11927 build
148 12847 doc
149 12852 doc
150 12194 fix
151 12870 doc
152 12924 fix
153 12682 doc
154 12004 refactor
155 12601 refactor
156 12998 fix
157 13105 vendor
158 12452 doc
159 12738 fix
160 12536 refactor
161 12189 spec
162 13122 spec
163 12662 fix
164 12665 doc
165 12419 feat
166 12756 doc
167 12616 refactor
168 12679 breaking-change
169 12000 doc
170 12372 build
171 12805 build
172 12348 fix
173 12315 doc
174 12072 doc
175 12912 doc
176 12982 fix
177 12105 doc
178 12917 spec
179 12400 doc
180 12101 feat
181 12642 build
182 13058 fix
183 12913 vendor
184 13298 vendor
185 13042 build
186 11230 feat
187 11459 feat
188 12476 vendor
189 11937 doc
190 12328 build
191 12539 refactor
192 12127 build
193 12537 build

View File

@@ -1,11 +1,12 @@
#!/usr/bin/env node
if (!process.env.CI) require('dotenv-safe').load()
require('colors')
const args = require('minimist')(process.argv.slice(2))
const assert = require('assert')
const fs = require('fs')
const { execSync } = require('child_process')
const GitHub = require('github')
const { GitProcess } = require('dugite')
const nugget = require('nugget')
const pkg = require('../package.json')
const pkgVersion = `v${pkg.version}`
@@ -15,17 +16,16 @@ const fail = '\u2717'.red
const sumchecker = require('sumchecker')
const temp = require('temp').track()
const { URL } = require('url')
const targetRepo = pkgVersion.indexOf('nightly') > 0 ? 'nightlies' : 'electron'
let failureCount = 0
assert(process.env.ELECTRON_GITHUB_TOKEN, 'ELECTRON_GITHUB_TOKEN not found in environment')
const github = new GitHub({
followRedirects: false
})
github.authenticate({type: 'token', token: process.env.ELECTRON_GITHUB_TOKEN})
async function getDraftRelease (version, skipValidation) {
let releaseInfo = await github.repos.getReleases({owner: 'electron', repo: 'electron'})
let releaseInfo = await github.repos.getReleases({owner: 'electron', repo: targetRepo})
let drafts
let versionToCheck
if (version) {
@@ -89,15 +89,12 @@ function assetsForVersion (version, validatingRelease) {
`electron-${version}-darwin-x64-dsym.zip`,
`electron-${version}-darwin-x64-symbols.zip`,
`electron-${version}-darwin-x64.zip`,
`electron-${version}-linux-arm-symbols.zip`,
`electron-${version}-linux-arm.zip`,
`electron-${version}-linux-arm64-symbols.zip`,
`electron-${version}-linux-arm64.zip`,
`electron-${version}-linux-armv7l-symbols.zip`,
`electron-${version}-linux-armv7l.zip`,
`electron-${version}-linux-ia32-symbols.zip`,
`electron-${version}-linux-ia32.zip`,
// `electron-${version}-linux-mips64el.zip`,
`electron-${version}-linux-x64-symbols.zip`,
`electron-${version}-linux-x64.zip`,
`electron-${version}-mas-x64-dsym.zip`,
@@ -112,11 +109,9 @@ function assetsForVersion (version, validatingRelease) {
`electron-api.json`,
`electron.d.ts`,
`ffmpeg-${version}-darwin-x64.zip`,
`ffmpeg-${version}-linux-arm.zip`,
`ffmpeg-${version}-linux-arm64.zip`,
`ffmpeg-${version}-linux-armv7l.zip`,
`ffmpeg-${version}-linux-ia32.zip`,
// `ffmpeg-${version}-linux-mips64el.zip`,
`ffmpeg-${version}-linux-x64.zip`,
`ffmpeg-${version}-mas-x64.zip`,
`ffmpeg-${version}-win32-ia32.zip`,
@@ -146,6 +141,8 @@ function s3UrlsForVersion (version) {
}
function checkVersion () {
if (args.skipVersionCheck) return
console.log(`Verifying that app version matches package version ${pkgVersion}.`)
let startScript = path.join(__dirname, 'start.py')
let scriptArgs = ['--version']
@@ -183,11 +180,7 @@ function uploadNodeShasums () {
function uploadIndexJson () {
console.log('Uploading index.json to S3.')
let scriptPath = path.join(__dirname, 'upload-index-json.py')
let scriptArgs = []
if (args.automaticRelease) {
scriptArgs.push('-R')
}
runScript(scriptPath, scriptArgs)
runScript(scriptPath, [pkgVersion])
console.log(`${pass} Done uploading index.json to S3.`)
}
@@ -198,7 +191,7 @@ async function createReleaseShasums (release) {
console.log(`${fileName} already exists on GitHub; deleting before creating new file.`)
await github.repos.deleteAsset({
owner: 'electron',
repo: 'electron',
repo: targetRepo,
id: existingAssets[0].id
}).catch(err => {
console.log(`${fail} Error deleting ${fileName} on GitHub:`, err)
@@ -217,7 +210,7 @@ async function createReleaseShasums (release) {
async function uploadShasumFile (filePath, fileName, release) {
let githubOpts = {
owner: 'electron',
repo: 'electron',
repo: targetRepo,
id: release.id,
filePath,
name: fileName
@@ -252,7 +245,7 @@ function saveShaSumFile (checksums, fileName) {
async function publishRelease (release) {
let githubOpts = {
owner: 'electron',
repo: 'electron',
repo: targetRepo,
id: release.id,
tag_name: release.tag_name,
draft: false
@@ -279,10 +272,12 @@ async function makeRelease (releaseToValidate) {
let draftRelease = await getDraftRelease()
uploadNodeShasums()
uploadIndexJson()
await createReleaseShasums(draftRelease)
// Fetch latest version of release before verifying
draftRelease = await getDraftRelease(pkgVersion, true)
await validateReleaseAssets(draftRelease)
await tagLibCC()
await publishRelease(draftRelease)
console.log(`${pass} SUCCESS!!! Release has been published. Please run ` +
`"npm run publish-to-npm" to publish release to npm.`)
@@ -305,7 +300,7 @@ async function verifyAssets (release) {
let downloadDir = await makeTempDir()
let githubOpts = {
owner: 'electron',
repo: 'electron',
repo: targetRepo,
headers: {
Accept: 'application/octet-stream'
}
@@ -451,4 +446,23 @@ async function validateChecksums (validationArgs) {
`shasums defined in ${validationArgs.shaSumFile}.`)
}
async function tagLibCC () {
const tag = `electron-${pkg.version}`
const libccDir = path.join(path.resolve(__dirname, '..'), 'vendor', 'libchromiumcontent')
console.log(`Tagging release ${tag}.`)
let tagDetails = await GitProcess.exec([ 'tag', '-a', '-m', tag, tag ], libccDir)
if (tagDetails.exitCode === 0) {
let pushDetails = await GitProcess.exec(['push', '--tags'], libccDir)
if (pushDetails.exitCode === 0) {
console.log(`${pass} Successfully tagged libchromiumcontent with ${tag}.`)
} else {
console.log(`${fail} Error pushing libchromiumcontent tag ${tag}: ` +
`${pushDetails.stderr}`)
}
} else {
console.log(`${fail} Error tagging libchromiumcontent with ${tag}: ` +
`${tagDetails.stderr}`)
}
}
makeRelease(args.validateRelease)

View File

@@ -8,10 +8,10 @@
# Do NOT CHANGE this if you don't know what you're doing -- see
# https://code.google.com/p/chromium/wiki/UpdatingClang
# Reverting problematic clang rolls is safe, though.
CLANG_REVISION=307486
CLANG_REVISION=308728
# This is incremented when pushing a new build of Clang at the same revision.
CLANG_SUB_REVISION=1
CLANG_SUB_REVISION=3
PACKAGE_VERSION="${CLANG_REVISION}-${CLANG_SUB_REVISION}"

View File

@@ -2,39 +2,49 @@
import os
import sys
import urllib2
from lib.config import PLATFORM, s3_config
from lib.util import electron_gyp, execute, s3put, scoped_cwd
from lib.config import s3_config
from lib.util import s3put, scoped_cwd, safe_mkdir
SOURCE_ROOT = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
OUT_DIR = os.path.join(SOURCE_ROOT, 'out', 'D')
PROJECT_NAME = electron_gyp()['project_name%']
PRODUCT_NAME = electron_gyp()['product_name%']
BASE_URL = 'https://electron-metadumper.herokuapp.com/?version='
version = sys.argv[1]
authToken = os.getenv('META_DUMPER_AUTH_HEADER')
def get_content(retry_count = 5):
try:
request = urllib2.Request(
BASE_URL + version,
headers={"Authorization" : authToken}
)
return urllib2.urlopen(
request
).read()
except Exception as e:
if retry_count == 0:
raise e
return get_content(retry_count - 1)
def main():
if not authToken or authToken == "":
raise Exception("Please set META_DUMPER_AUTH_HEADER")
# Upload the index.json.
with scoped_cwd(SOURCE_ROOT):
if len(sys.argv) == 2 and sys.argv[1] == '-R':
config = 'R'
else:
config = 'D'
out_dir = os.path.join(SOURCE_ROOT, 'out', config)
if sys.platform == 'darwin':
electron = os.path.join(out_dir, '{0}.app'.format(PRODUCT_NAME),
'Contents', 'MacOS', PRODUCT_NAME)
elif sys.platform == 'win32':
electron = os.path.join(out_dir, '{0}.exe'.format(PROJECT_NAME))
else:
electron = os.path.join(out_dir, PROJECT_NAME)
index_json = os.path.relpath(os.path.join(out_dir, 'index.json'))
execute([electron,
os.path.join('tools', 'dump-version-info.js'),
index_json])
safe_mkdir(OUT_DIR)
index_json = os.path.relpath(os.path.join(OUT_DIR, 'index.json'))
new_content = get_content()
with open(index_json, "w") as f:
f.write(new_content)
bucket, access_key, secret_key = s3_config()
s3put(bucket, access_key, secret_key, out_dir, 'atom-shell/dist',
s3put(bucket, access_key, secret_key, OUT_DIR, 'atom-shell/dist',
[index_json])

View File

@@ -1,18 +1,23 @@
if (!process.env.CI) require('dotenv-safe').load()
const GitHub = require('github')
const github = new GitHub()
github.authenticate({type: 'token', token: process.env.ELECTRON_GITHUB_TOKEN})
if (process.argv.length < 5) {
if (process.argv.length < 6) {
console.log('Usage: upload-to-github filePath fileName releaseId')
process.exit(1)
}
let filePath = process.argv[2]
let fileName = process.argv[3]
let releaseId = process.argv[4]
let releaseVersion = process.argv[5]
const targetRepo = releaseVersion.indexOf('nightly') > 0 ? 'nightlies' : 'electron'
let githubOpts = {
owner: 'electron',
repo: 'electron',
repo: targetRepo,
id: releaseId,
filePath: filePath,
name: fileName
@@ -34,7 +39,7 @@ function uploadToGitHub () {
console.log(`${fileName} already exists; will delete before retrying upload.`)
github.repos.deleteAsset({
owner: 'electron',
repo: 'electron',
repo: targetRepo,
id: existingAssets[0].id
}).then(uploadToGitHub).catch(uploadToGitHub)
} else {

View File

@@ -1,8 +1,10 @@
#!/usr/bin/env python
import argparse
import datetime
import errno
import hashlib
import json
import os
import shutil
import subprocess
@@ -14,7 +16,6 @@ from lib.config import PLATFORM, get_target_arch, get_env_var, s3_config, \
get_zip_name
from lib.util import electron_gyp, execute, get_electron_version, \
parse_version, scoped_cwd, s3put
from lib.github import GitHub
ELECTRON_REPO = 'electron/electron'
@@ -35,6 +36,9 @@ PDB_NAME = get_zip_name(PROJECT_NAME, ELECTRON_VERSION, 'pdb')
def main():
args = parse_args()
if args.upload_to_s3:
utcnow = datetime.datetime.utcnow()
args.upload_timestamp = utcnow.strftime('%Y-%m-%d_%H:%M:%S')
if not dist_newer_than_head():
run_python_script('create-dist.py')
@@ -47,56 +51,40 @@ def main():
sys.stderr.flush()
return 1
github = GitHub(auth_token())
releases = github.repos(ELECTRON_REPO).releases.get()
tag_exists = False
for r in releases:
if not r['draft'] and r['tag_name'] == args.version:
release = r
tag_exists = True
break
release = get_release(args.version)
if not release['draft']:
tag_exists = True
if not args.upload_to_s3:
assert release['exists'], 'Release does not exist; cannot upload to GitHub!'
assert tag_exists == args.overwrite, \
'You have to pass --overwrite to overwrite a published release'
if not args.overwrite:
release = create_or_get_release_draft(github, releases, args.version,
tag_exists)
# Upload Electron with GitHub Releases API.
upload_electron(github, release, os.path.join(DIST_DIR, DIST_NAME),
args.upload_to_s3)
# Upload Electron files.
upload_electron(release, os.path.join(DIST_DIR, DIST_NAME), args)
if get_target_arch() != 'mips64el':
upload_electron(github, release, os.path.join(DIST_DIR, SYMBOLS_NAME),
args.upload_to_s3)
upload_electron(release, os.path.join(DIST_DIR, SYMBOLS_NAME), args)
if PLATFORM == 'darwin':
upload_electron(github, release, os.path.join(DIST_DIR,
'electron-api.json'), args.upload_to_s3)
upload_electron(github, release, os.path.join(DIST_DIR, 'electron.d.ts'),
args.upload_to_s3)
upload_electron(github, release, os.path.join(DIST_DIR, DSYM_NAME),
args.upload_to_s3)
upload_electron(release, os.path.join(DIST_DIR, 'electron-api.json'), args)
upload_electron(release, os.path.join(DIST_DIR, 'electron.d.ts'), args)
upload_electron(release, os.path.join(DIST_DIR, DSYM_NAME), args)
elif PLATFORM == 'win32':
upload_electron(github, release, os.path.join(DIST_DIR, PDB_NAME),
args.upload_to_s3)
upload_electron(release, os.path.join(DIST_DIR, PDB_NAME), args)
# Upload free version of ffmpeg.
ffmpeg = get_zip_name('ffmpeg', ELECTRON_VERSION)
upload_electron(github, release, os.path.join(DIST_DIR, ffmpeg),
args.upload_to_s3)
upload_electron(release, os.path.join(DIST_DIR, ffmpeg), args)
chromedriver = get_zip_name('chromedriver', ELECTRON_VERSION)
upload_electron(github, release, os.path.join(DIST_DIR, chromedriver),
args.upload_to_s3)
upload_electron(release, os.path.join(DIST_DIR, chromedriver), args)
mksnapshot = get_zip_name('mksnapshot', ELECTRON_VERSION)
upload_electron(github, release, os.path.join(DIST_DIR, mksnapshot),
args.upload_to_s3)
upload_electron(release, os.path.join(DIST_DIR, mksnapshot), args)
if get_target_arch().startswith('arm'):
# Upload the x64 binary for arm/arm64 mksnapshot
mksnapshot = get_zip_name('mksnapshot', ELECTRON_VERSION, 'x64')
upload_electron(github, release, os.path.join(DIST_DIR, mksnapshot),
args.upload_to_s3)
upload_electron(release, os.path.join(DIST_DIR, mksnapshot), args)
if PLATFORM == 'win32' and not tag_exists and not args.upload_to_s3:
# Upload PDBs to Windows symbol server.
@@ -162,76 +150,26 @@ def dist_newer_than_head():
return dist_time > int(head_time)
def get_text_with_editor(name):
editor = os.environ.get('EDITOR', 'nano')
initial_message = '\n# Please enter the body of your release note for %s.' \
% name
t = tempfile.NamedTemporaryFile(suffix='.tmp', delete=False)
t.write(initial_message)
t.close()
subprocess.call([editor, t.name])
text = ''
for line in open(t.name, 'r'):
if len(line) == 0 or line[0] != '#':
text += line
os.unlink(t.name)
return text
def create_or_get_release_draft(github, releases, tag, tag_exists):
# Search for existing draft.
for release in releases:
if release['draft'] and release['tag_name'] == tag:
return release
if tag_exists:
tag = 'do-not-publish-me'
return create_release_draft(github, tag)
def create_release_draft(github, tag):
name = '{0} {1} beta'.format(PROJECT_NAME, tag)
if os.environ.has_key('CI'):
body = '(placeholder)'
else:
body = get_text_with_editor(name)
if body == '':
sys.stderr.write('Quit due to empty release note.\n')
sys.exit(0)
data = dict(tag_name=tag, name=name, body=body, draft=True, prerelease=True)
r = github.repos(ELECTRON_REPO).releases.post(data=data)
return r
def upload_electron(github, release, file_path, upload_to_s3):
def upload_electron(release, file_path, args):
filename = os.path.basename(file_path)
# if upload_to_s3 is set, skip github upload.
if upload_to_s3:
if args.upload_to_s3:
bucket, access_key, secret_key = s3_config()
key_prefix = 'electron-artifacts/{0}'.format(release['tag_name'])
key_prefix = 'electron-artifacts/{0}_{1}'.format(args.version,
args.upload_timestamp)
s3put(bucket, access_key, secret_key, os.path.dirname(file_path),
key_prefix, [file_path])
upload_sha256_checksum(release['tag_name'], file_path, key_prefix)
upload_sha256_checksum(args.version, file_path, key_prefix)
s3url = 'https://gh-contractor-zcbenz.s3.amazonaws.com'
print '{0} uploaded to {1}/{2}/{0}'.format(filename, s3url, key_prefix)
return
# Delete the original file before uploading in CI.
filename = os.path.basename(file_path)
if os.environ.has_key('CI'):
try:
for asset in release['assets']:
if asset['name'] == filename:
github.repos(ELECTRON_REPO).releases.assets(asset['id']).delete()
except Exception:
pass
# Upload the file.
upload_io_to_github(release, filename, file_path)
upload_io_to_github(release, filename, file_path, args.version)
# Upload the checksum file.
upload_sha256_checksum(release['tag_name'], file_path)
upload_sha256_checksum(args.version, file_path)
# Upload ARM assets without the v7l suffix for backwards compatibility
# TODO Remove for 2.0
@@ -239,14 +177,15 @@ def upload_electron(github, release, file_path, upload_to_s3):
arm_filename = filename.replace('armv7l', 'arm')
arm_file_path = os.path.join(os.path.dirname(file_path), arm_filename)
shutil.copy2(file_path, arm_file_path)
upload_electron(github, release, arm_file_path, upload_to_s3)
upload_electron(release, arm_file_path, args)
def upload_io_to_github(release, filename, filepath):
def upload_io_to_github(release, filename, filepath, version):
print 'Uploading %s to Github' % \
(filename)
script_path = os.path.join(SOURCE_ROOT, 'script', 'upload-to-github.js')
execute(['node', script_path, filepath, filename, str(release['id'])])
execute(['node', script_path, filepath, filename, str(release['id']),
version])
def upload_sha256_checksum(version, file_path, key_prefix=None):
@@ -273,6 +212,12 @@ def auth_token():
return token
def get_release(version):
script_path = os.path.join(SOURCE_ROOT, 'script', 'find-release.js')
release_info = execute(['node', script_path, version])
release = json.loads(release_info)
return release
if __name__ == '__main__':
import sys
sys.exit(main())

25
spec/api-tray-spec.js Normal file
View File

@@ -0,0 +1,25 @@
const {remote} = require('electron')
const {Menu, Tray, nativeImage} = remote
describe('tray module', () => {
describe('tray.setContextMenu', () => {
let tray
beforeEach(() => {
tray = new Tray(nativeImage.createEmpty())
})
afterEach(() => {
tray.destroy()
tray = null
})
it('accepts menu instance', () => {
tray.setContextMenu(new Menu())
})
it('accepts null', () => {
tray.setContextMenu(null)
})
})
})

View File

@@ -92,7 +92,7 @@ describe('webContents module', () => {
})
})
describe('setDevToolsWebCotnents() API', () => {
describe('setDevToolsWebContents() API', () => {
it('sets arbitry webContents as devtools', (done) => {
let devtools = new BrowserWindow({show: false})
devtools.webContents.once('dom-ready', () => {
@@ -708,4 +708,23 @@ describe('webContents module', () => {
w.loadURL(`file://${fixtures}/pages/a.html`)
})
})
describe('webframe messages in sandboxed contents', () => {
it('responds to executeJavaScript', (done) => {
w.destroy()
w = new BrowserWindow({
show: false,
webPreferences: {
sandbox: true
}
})
w.webContents.once('did-finish-load', () => {
w.webContents.executeJavaScript('37 + 5', (result) => {
assert.equal(result, 42)
done()
})
})
w.loadURL('about:blank')
})
})
})

View File

@@ -2,6 +2,7 @@ const assert = require('assert')
const ChildProcess = require('child_process')
const fs = require('fs')
const path = require('path')
const util = require('util')
const {closeWindow} = require('./window-helpers')
const nativeImage = require('electron').nativeImage
@@ -549,6 +550,60 @@ describe('asar package', function () {
})
})
describe('fs.exists', function () {
it('handles an existing file', function (done) {
var p = path.join(fixtures, 'asar', 'a.asar', 'file1')
// eslint-disable-next-line
fs.exists(p, function (exists) {
assert.equal(exists, true)
done()
})
})
it('handles a non-existent file', function (done) {
var p = path.join(fixtures, 'asar', 'a.asar', 'not-exist')
// eslint-disable-next-line
fs.exists(p, function (exists) {
assert.equal(exists, false)
done()
})
})
it('promisified version handles an existing file', (done) => {
var p = path.join(fixtures, 'asar', 'a.asar', 'file1')
// eslint-disable-next-line
util.promisify(fs.exists)(p).then(exists => {
assert.equal(exists, true)
done()
})
})
it('promisified version handles a non-existent file', function (done) {
var p = path.join(fixtures, 'asar', 'a.asar', 'not-exist')
// eslint-disable-next-line
util.promisify(fs.exists)(p).then(exists => {
assert.equal(exists, false)
done()
})
})
})
describe('fs.existsSync', function () {
it('handles an existing file', function () {
var p = path.join(fixtures, 'asar', 'a.asar', 'file1')
assert.doesNotThrow(function () {
assert.equal(fs.existsSync(p), true)
})
})
it('handles a non-existent file', function () {
var p = path.join(fixtures, 'asar', 'a.asar', 'not-exist')
assert.doesNotThrow(function () {
assert.equal(fs.existsSync(p), false)
})
})
})
describe('fs.access', function () {
it('accesses a normal file', function (done) {
var p = path.join(fixtures, 'asar', 'a.asar', 'file1')
@@ -644,6 +699,12 @@ describe('asar package', function () {
done()
})
})
it('can be promisified', () => {
return util.promisify(ChildProcess.exec)('echo ' + echo + ' foo bar').then(({ stdout }) => {
assert.equal(stdout.toString().replace(/\r/g, ''), echo + ' foo bar\n')
})
})
})
describe('child_process.execSync', function () {
@@ -680,6 +741,12 @@ describe('asar package', function () {
var output = execFileSync(echo, ['test'])
assert.equal(String(output), 'test\n')
})
it('can be promisified', () => {
return util.promisify(ChildProcess.execFile)(echo, ['test']).then(({ stdout }) => {
assert.equal(stdout, 'test\n')
})
})
})
describe('internalModuleReadFile', function () {
@@ -700,6 +767,18 @@ describe('asar package', function () {
})
})
describe('util.promisify', function () {
it('can promisify all fs functions', function () {
const originalFs = require('original-fs')
for (const key in originalFs) {
if (originalFs[key][util.promisify.custom] && !fs[key][util.promisify.custom]) {
assert(false, `fs.${key}[util.promisify.custom] missing`)
}
}
})
})
describe('process.noAsar', function () {
var errorName = process.platform === 'win32' ? 'ENOENT' : 'ENOTDIR'

View File

@@ -10,27 +10,32 @@ if (!process.argv.includes('--enable-mixed-sandbox')) {
app.enableMixedSandbox()
}
let sandboxWindow
let noSandboxWindow
let currentWindowSandboxed = false
app.once('ready', () => {
sandboxWindow = new BrowserWindow({
show: false,
webPreferences: {
preload: path.join(__dirname, 'electron-app-mixed-sandbox-preload.js'),
sandbox: true
}
})
sandboxWindow.loadURL('about:blank')
noSandboxWindow = new BrowserWindow({
show: false,
webPreferences: {
preload: path.join(__dirname, 'electron-app-mixed-sandbox-preload.js'),
sandbox: false
}
})
noSandboxWindow.loadURL('about:blank')
function testWindow (isSandboxed, callback) {
currentWindowSandboxed = isSandboxed
let currentWindow = new BrowserWindow({
show: false,
webPreferences: {
preload: path.join(__dirname, 'electron-app-mixed-sandbox-preload.js'),
sandbox: isSandboxed
}
})
currentWindow.loadURL('about:blank')
currentWindow.webContents.once('devtools-opened', () => {
if (isSandboxed) {
argv.sandboxDevtools = true
} else {
argv.noSandboxDevtools = true
}
if (callback) {
callback()
}
finish()
})
currentWindow.webContents.openDevTools()
}
const argv = {
sandbox: null,
@@ -41,6 +46,10 @@ app.once('ready', () => {
let connected = false
testWindow(true, () => {
testWindow()
})
function finish () {
if (connected && argv.sandbox != null && argv.noSandbox != null &&
argv.noSandboxDevtools != null && argv.sandboxDevtools != null) {
@@ -57,22 +66,10 @@ app.once('ready', () => {
finish()
})
noSandboxWindow.webContents.once('devtools-opened', () => {
argv.noSandboxDevtools = true
finish()
})
noSandboxWindow.webContents.openDevTools()
sandboxWindow.webContents.once('devtools-opened', () => {
argv.sandboxDevtools = true
finish()
})
sandboxWindow.webContents.openDevTools()
ipcMain.on('argv', (event, value) => {
if (event.sender === sandboxWindow.webContents) {
if (currentWindowSandboxed) {
argv.sandbox = value
} else if (event.sender === noSandboxWindow.webContents) {
} else {
argv.noSandbox = value
}
finish()

View File

@@ -179,6 +179,10 @@ describe('node feature', () => {
it('can be scheduled in time', (done) => {
remote.getGlobal('setTimeout')(done, 0)
})
it('can be promisified', (done) => {
remote.getGlobal('setTimeoutPromisified')(0).then(done)
})
})
describe('setInterval called under Chromium event loop in browser process', () => {

View File

@@ -76,6 +76,8 @@ ipcMain.on('echo', function (event, msg) {
event.returnValue = msg
})
global.setTimeoutPromisified = util.promisify(setTimeout)
const coverage = new Coverage({
outputPath: path.join(__dirname, '..', '..', 'out', 'coverage')
})

View File

@@ -646,24 +646,22 @@ describe('<webview> tag', function () {
describe('setDevToolsWebCotnents() API', () => {
it('sets webContents of webview as devtools', (done) => {
const webview2 = new WebView()
webview2.addEventListener('did-attach', () => {
webview2.addEventListener('dom-ready', () => {
const devtools = webview2.getWebContents()
assert.ok(devtools.getURL().startsWith('chrome-devtools://devtools'))
devtools.executeJavaScript('InspectorFrontendHost.constructor.name', (name) => {
assert.ok(name, 'InspectorFrontendHostImpl')
document.body.removeChild(webview2)
done()
})
})
webview.addEventListener('dom-ready', () => {
webview.getWebContents().setDevToolsWebContents(webview2.getWebContents())
webview.getWebContents().openDevTools()
})
webview.src = 'about:blank'
document.body.appendChild(webview)
})
document.body.appendChild(webview2)
webview2.addEventListener('dom-ready', () => {
const devtools = webview2.getWebContents()
assert.ok(devtools.getURL().startsWith('chrome-devtools://devtools'))
devtools.executeJavaScript('InspectorFrontendHost.constructor.name', (name) => {
assert.ok(name, 'InspectorFrontendHostImpl')
document.body.removeChild(webview2)
done()
})
})
webview.addEventListener('dom-ready', () => {
webview.getWebContents().setDevToolsWebContents(webview2.getWebContents())
webview.getWebContents().openDevTools()
})
webview.src = 'about:blank'
document.body.appendChild(webview)
})
})
@@ -1153,6 +1151,16 @@ describe('<webview> tag', function () {
})
describe('will-attach-webview event', () => {
it('does not emit when src is not changed', (done) => {
document.body.appendChild(webview)
setTimeout(() => {
assert.throws(() => {
webview.stop()
}, 'Cannot call stop because the webContents is unavailable. The WebView must be attached to the DOM and the dom-ready event emitted before this method can be called.')
done()
})
})
it('supports changing the web preferences', (done) => {
ipcRenderer.send('disable-node-on-next-will-attach-webview')
webview.addEventListener('console-message', (event) => {
@@ -1350,12 +1358,12 @@ describe('<webview> tag', function () {
const destroy1Listener = () => {
webview.removeEventListener('destroyed', destroy1Listener, false)
assert.equal(webContents, webview2.getWebContents())
assert.equal(null, webview.getWebContents())
assert.notStrictEqual(webContents, webview.getWebContents())
const destroy2Listener = () => {
webview2.removeEventListener('destroyed', destroy2Listener, false)
assert.equal(webContents, webview.getWebContents())
assert.equal(null, webview2.getWebContents())
assert.notStrictEqual(webContents, webview2.getWebContents())
// Make sure that events are hooked up to the right webview now
webview.addEventListener('console-message', (e) => {

2
vendor/node vendored

Some files were not shown because too many files have changed in this diff Show More