Compare commits

...

48 Commits

Author SHA1 Message Date
John Kleinschmidt
378666668f v1.7.10 2017-12-18 15:23:57 -05:00
John Kleinschmidt
9623329cbd Merge pull request #11427 from electron/prepare-for-release
Preparation for 1.7.10 release
2017-12-18 13:20:31 -05:00
John Kleinschmidt
34b75e5e73 Update to use new release scripts 2017-12-13 11:54:23 -05:00
John Kleinschmidt
c339e576a8 Update libcc 2017-12-13 10:44:20 -05:00
Charles Kerr
c12d88fc30 Merge pull request #11327 from electron/cherry/1-7-x/10540
Cherry #10540
2017-12-04 10:43:49 -06:00
Benjamin Pasero
8f2064d9b7 set iconPosition as optional 2017-12-04 10:04:29 -06:00
Charles Kerr
75ff5d2a45 Merge pull request #11141 from electron/backport-scrubber-width-fix
Backport scrubber width fix to 1-7-x
2017-11-17 14:31:54 -06:00
John Kleinschmidt
87565e25eb Add Jenkinsfile for macOS CI 2017-11-17 10:58:26 -05:00
Samuel Attard
d0c9c4028e Forward declare the scrubber layout 2017-11-17 06:21:36 +11:00
Armagan Amcalar
37f23f6d4e Make ScrubberItem width dynamic
Depending on whether a ScrubberItem has text or an icon, this changeset
calculates the actual width and sizes the TouchBar items accordingly.
Previously, all ScrubberItems, regardless of their content, had a static
width of 50px.

This commit also fixes #10539.
2017-11-17 06:19:19 +11:00
Charles Kerr
4b8f3276b5 Merge pull request #11020 from pfrazee/fix-protocol-crash-error-17x
Fix crash in custom protocols caused by bad callback exec
2017-11-16 09:36:38 -06:00
Charles Kerr
b15c418c54 Merge pull request #10898 from electron/nvidia-high-sierra-17x-fix
Fix rendering issues with Nvidia GPU on High Sierra for 1.7.x
2017-11-13 17:49:30 -06:00
Paul Frazee
d964560990 Fix crash in custom protocols caused by bad callback exec 2017-11-05 12:20:46 -06:00
John Kleinschmidt
66e0e8fdcd Fix rendering issues with Nvidia GPU on High Sierra 2017-10-24 13:48:39 -04:00
John Kleinschmidt
c16498c274 Bump v1.7.9 2017-10-10 15:33:41 -04:00
John Kleinschmidt
62bd339b0d Merge pull request #10743 from electron/crankshaft-fix
Crankshaft RCE fix
2017-10-10 15:04:29 -04:00
John Kleinschmidt
0368460d74 Crankshaft RCE fix 2017-10-10 13:23:31 -04:00
John Kleinschmidt
9386de415a Merge pull request #10687 from electron/circleci-report-testing-results
CircleCI report testing results
2017-10-04 10:41:10 -04:00
John Kleinschmidt
4d3d7aa5fd Improve CircleCI test results 2017-10-04 10:01:09 -04:00
John Kleinschmidt
1d0dfd590b Release process updates for 1-7-x (#10643)
* Add prerelease script

* Update CircleCI for releases

* Rerun builds
2017-09-28 10:48:02 -04:00
Birunthan Mohanathas
5b679cb1eb Merge pull request #10630 from electron/squirrel-avoid-double-update-1-7
Use Squirrel.Windows --checkForUpdate (1.7.x)
2017-09-27 21:20:22 +03:00
J.M
8c28e3b6f1 Use Squirrel.Windows --checkForUpdate
This cherry-picks #10483 to fix #5057.
2017-09-27 20:32:58 +03:00
Birunthan Mohanathas
f786199d2c Merge pull request #10628 from electron/squirrel-mac-bump
Bump external binaries to 1.2.2
2017-09-27 16:06:03 +03:00
Josh Abernathy
acfc24e45e Bump external binaries to 1.2.2 2017-09-27 16:04:05 +03:00
John Kleinschmidt
01ca2252cd Bump v1.7.8 2017-09-24 10:34:18 +09:00
John Kleinschmidt
53db3862c0 Merge pull request #10591 from electron/disable-app-importcertificate-tests
disable flaky app.importCertificate and select-client-certificate tests
2017-09-24 10:25:57 +09:00
John Kleinschmidt
6099ab222e Change GitHub upload to use JS GitHub lib 2017-09-24 10:17:15 +09:00
Zeke Sikelianos
830cc7ecd1 disable select-client-certificate spec 2017-09-24 10:00:34 +09:00
Zeke Sikelianos
eef8ff09e2 disable flaky app.importCertificate tests 2017-09-24 09:49:48 +09:00
Samuel Attard
b15392e1c1 Backporting changes for 1.7.8 (#10586)
* Fix app.makeSingleInstance hanging on posix systems

Wait for the IO thread to be a thing before attempting to listen on the socket

Fixes #9880

* Move OnBrowserReady call to PreMainMessageLoopRun to account for timing issues on macOS

* Woops, how did that happen ;)

* Refactor as per @zcbenz comments

Also fix issue where we run the single instance callback *not* on the UI thread,
this apparently results in a hung process.

* Appease the linting gods

* Create watcher when message loop is ready

* spec: Add test case for app.makeSingleInstance

* Fix missing extension when saving a file without filters

Previously, when triggering the save dialog through e.g. `<a download>`
links (e.g. http://jsfiddle.net/koldev/cW7W5/), the extension was only
saved if Finder was set to show all extensions by default. We now always
display the extension to make sure that it is saved.

If we want to keep the extension hidden, we could also populate the
allowed file types array with the extension from the default filename,
but that would have interfered with how we set the filters.

* Try to make test less flaky

* Try simpler test

* Fix stdout detection

* Try longer timeout on test
2017-09-24 07:27:08 +09:00
John Kleinschmidt
fd1bb3f95d Merge pull request #10585 from electron/turbofan-fix
Apply turbofan-fix
2017-09-23 16:06:34 +09:00
John Kleinschmidt
53eb4d68c5 Apply turbofan-fix 2017-09-23 15:05:15 +09:00
John Kleinschmidt
331a1759d2 Fix CircleCI test step 2017-09-13 11:22:33 -04:00
John Kleinschmidt
b69d76258a Remove arm64 build
Arm64 builds are not built for 1.7.x
2017-09-13 10:50:53 -04:00
John Kleinschmidt
680bf0076b Bump v1.7.7 2017-08-30 13:06:59 -04:00
John Kleinschmidt
d42d856b9a Rerun builds 2017-08-30 09:42:23 -04:00
John Kleinschmidt
e6f6862ae8 Merge pull request #10390 from electron/render_widget_compositor_patch
Backport fix for flickering web contents on large monitors
2017-08-29 21:52:35 -04:00
John Kleinschmidt
44b4cc374b Update to latest build 2017-08-29 21:27:59 -04:00
John Kleinschmidt
9824c88d2d Backport fix for flickering web contents on large monitors 2017-08-29 13:43:02 -04:00
John Kleinschmidt
96bc46c255 Merge pull request #10365 from electron/fix_exit_crash
Fixed crash on process exit on Windows
2017-08-29 11:55:12 -04:00
John Kleinschmidt
873a8902af Merge pull request #10375 from electron/backport-notification-sounds
Backport #10293 (notification-sounds)
2017-08-29 10:52:26 -04:00
John Kleinschmidt
7dac300305 Merge pull request #10377 from electron/backfort-libuv-1419
Backport libuv/libuv#1419
2017-08-29 10:50:32 -04:00
John Kleinschmidt
2a536d2aa2 Merge pull request #10374 from electron/backport-drag-browser-view
Backport #10232 (drag-browser-view)
2017-08-29 09:34:58 -04:00
Cheng Zhao
ccd03c6675 Backport https://github.com/libuv/libuv/pull/1419
win, fs: support unusual reparse points

Allow for running uv_fs_stat and uv_fs_lstat on all reparse points. One
of such points is new OneDrive drive with "files on demand" feature
enabled.
2017-08-29 10:25:45 +09:00
Charlie Hess
5515092944 Merge pull request #10293 from electron/notification-sounds
Add support for soundName in main process notifications
2017-08-28 13:23:27 -07:00
Samuel Attard
fb7661d2d2 Merge pull request #10232 from electron/drag-browser-view
Add -webkit-app-region support to BrowserView
2017-08-28 13:19:09 -07:00
John Kleinschmidt
75b31f0bb6 Add CircleCI 2017-08-28 11:35:30 -04:00
Ales Pergl
cfee5ba8c8 Fixed crash on process exit on Windows 2017-08-28 16:28:07 +02:00
53 changed files with 1966 additions and 128 deletions

223
.circleci/config.yml Normal file
View File

@@ -0,0 +1,223 @@
version: 2
jobs:
electron-linux-arm:
docker:
- image: electronbuilds/electron:0.0.4
environment:
TARGET_ARCH: arm
resource_class: 2xlarge
steps:
- checkout
- run:
name: Check for release
command: |
if [ -n "${RUN_RELEASE_BUILD}" ]; then
echo 'release build triggered from api'
echo 'export ELECTRON_RELEASE=1 TRIGGERED_BY_API=1' >> $BASH_ENV
fi
- run:
name: Bootstrap
command: |
if [ "$ELECTRON_RELEASE" == "1" ]; then
echo 'Bootstrapping Electron for release build'
script/bootstrap.py --target_arch=$TARGET_ARCH
else
echo 'Bootstrapping Electron for debug build'
script/bootstrap.py --target_arch=$TARGET_ARCH --dev
fi
- run: npm run lint
- run:
name: Build
command: |
if [ "$ELECTRON_RELEASE" == "1" ]; then
echo 'Building Electron for release'
script/build.py -c R
else
echo 'Building Electron for debug'
script/build.py -c D
fi
- run:
name: Create distribution
command: |
if [ "$ELECTRON_RELEASE" == "1" ]; then
echo 'Creating Electron release distribution'
script/create-dist.py
else
echo 'Skipping create distribution because build is not for release'
fi
- run:
name: Upload distribution
command: |
if [ "$ELECTRON_RELEASE" == "1" ] && [ "$TRIGGERED_BY_API" != "1" ]; then
echo 'Uploading Electron release distribution to github releases'
script/upload.py
elif [ "$ELECTRON_RELEASE" == "1" ] && [ "$TRIGGERED_BY_API" == "1" ]; then
echo 'Uploading Electron release distribution to s3'
script/upload.py --upload_to_s3
else
echo 'Skipping upload distribution because build is not for release'
fi
electron-linux-ia32:
docker:
- image: electronbuilds/electron:0.0.4
environment:
TARGET_ARCH: ia32
resource_class: xlarge
steps:
- checkout
- run:
name: Check for release
command: |
if [ -n "${RUN_RELEASE_BUILD}" ]; then
echo 'release build triggered from api'
echo 'export ELECTRON_RELEASE=1 TRIGGERED_BY_API=1' >> $BASH_ENV
fi
- run:
name: Bootstrap
command: |
if [ "$ELECTRON_RELEASE" == "1" ]; then
echo 'Bootstrapping Electron for release build'
script/bootstrap.py --target_arch=$TARGET_ARCH
else
echo 'Bootstrapping Electron for debug build'
script/bootstrap.py --target_arch=$TARGET_ARCH --dev
fi
- run: npm run lint
- run:
name: Build
command: |
if [ "$ELECTRON_RELEASE" == "1" ]; then
echo 'Building Electron for release'
script/build.py -c R
else
echo 'Building Electron for debug'
script/build.py -c D
fi
- run:
name: Create distribution
command: |
if [ "$ELECTRON_RELEASE" == "1" ]; then
echo 'Creating Electron release distribution'
script/create-dist.py
else
echo 'Skipping create distribution because build is not for release'
fi
- run:
name: Upload distribution
command: |
if [ "$ELECTRON_RELEASE" == "1" ] && [ "$TRIGGERED_BY_API" != "1" ]; then
echo 'Uploading Electron release distribution to github releases'
script/upload.py
elif [ "$ELECTRON_RELEASE" == "1" ] && [ "$TRIGGERED_BY_API" == "1" ]; then
echo 'Uploading Electron release distribution to s3'
script/upload.py --upload_to_s3
else
echo 'Skipping upload distribution because build is not for release'
fi
electron-linux-x64:
docker:
- image: electronbuilds/electron:0.0.4
environment:
TARGET_ARCH: x64
DISPLAY: ':99.0'
resource_class: xlarge
steps:
- checkout
- run:
name: Setup for headless testing
command: sh -e /etc/init.d/xvfb start
- run:
name: Check for release
command: |
if [ -n "${RUN_RELEASE_BUILD}" ]; then
echo 'release build triggered from api'
echo 'export ELECTRON_RELEASE=1 TRIGGERED_BY_API=1' >> $BASH_ENV
fi
- run:
name: Bootstrap
command: |
if [ "$ELECTRON_RELEASE" == "1" ]; then
echo 'Bootstrapping Electron for release build'
script/bootstrap.py --target_arch=$TARGET_ARCH
else
echo 'Bootstrapping Electron for debug build'
script/bootstrap.py --target_arch=$TARGET_ARCH --dev
fi
- run: npm run lint
- run:
name: Build
command: |
if [ "$ELECTRON_RELEASE" == "1" ]; then
echo 'Building Electron for release'
script/build.py -c R
else
echo 'Building Electron for debug'
script/build.py -c D
fi
- run:
name: Create distribution
command: |
if [ "$ELECTRON_RELEASE" == "1" ]; then
echo 'Creating Electron release distribution'
script/create-dist.py
else
echo 'Skipping create distribution because build is not for release'
fi
- run:
name: Upload distribution
command: |
if [ "$ELECTRON_RELEASE" == "1" ] && [ "$TRIGGERED_BY_API" != "1" ]; then
echo 'Uploading Electron release distribution to github releases'
script/upload.py
elif [ "$ELECTRON_RELEASE" == "1" ] && [ "$TRIGGERED_BY_API" == "1" ]; then
echo 'Uploading Electron release distribution to s3'
script/upload.py --upload_to_s3
else
echo 'Skipping upload distribution because build is not for release'
fi
- run:
name: Test
environment:
MOCHA_FILE: junit/test-results.xml
MOCHA_REPORTER: mocha-junit-reporter
command: |
if [ "$ELECTRON_RELEASE" != "1" ]; then
echo 'Testing Electron debug build'
mkdir junit
script/test.py --ci --rebuild_native_modules
else
echo 'Skipping testing on release build'
fi
- run:
name: Verify FFmpeg
command: |
if [ "$ELECTRON_RELEASE" != "1" ]; then
echo 'Verifying ffmpeg on debug build'
script/verify-ffmpeg.py
else
echo 'Skipping verify ffmpeg on release build'
fi
- run:
name: Generate Typescript Definitions
command: npm run create-typescript-definitions
- store_test_results:
path: junit
- store_artifacts:
path: junit
- store_artifacts:
path: out/electron.d.ts
- store_artifacts:
path: out/electron-api.json
workflows:
version: 2
build-arm:
jobs:
- electron-linux-arm
build-ia32:
jobs:
- electron-linux-ia32
build-x64:
jobs:
- electron-linux-x64

44
Jenkinsfile vendored Normal file
View File

@@ -0,0 +1,44 @@
pipeline {
agent none
stages {
stage('Build') {
parallel {
stage('electron-osx-x64') {
agent {
label 'osx'
}
steps {
sh 'script/bootstrap.py --target_arch=x64 --dev'
sh 'npm run lint'
sh 'script/build.py -c D'
sh 'script/test.py --ci --rebuild_native_modules'
}
post {
always {
cleanWs()
}
}
}
stage('electron-mas-x64') {
agent {
label 'osx'
}
environment {
MAS_BUILD = '1'
}
steps {
sh 'script/bootstrap.py --target_arch=x64 --dev'
sh 'npm run lint'
sh 'script/build.py -c D'
sh 'script/test.py --ci --rebuild_native_modules'
}
post {
always {
cleanWs()
}
}
}
}
}
}
}

View File

@@ -95,6 +95,25 @@ int APIENTRY wWinMain(HINSTANCE instance, HINSTANCE, wchar_t* cmd, int) {
}
}
#ifndef DEBUG
// Chromium has its own TLS subsystem which supports automatic destruction
// of thread-local data, and also depends on memory allocation routines
// provided by the CRT. The problem is that the auto-destruction mechanism
// uses a hidden feature of the OS loader which calls a callback on thread
// exit, but only after all loaded DLLs have been detached. Since the CRT is
// also a DLL, it happens that by the time Chromium's `OnThreadExit` function
// is called, the heap functions, though still in memory, no longer perform
// their duties, and when Chromium calls `free` on its buffer, it triggers
// an access violation error.
// We work around this problem by invoking Chromium's `OnThreadExit` in time
// from within the CRT's atexit facility, ensuring the heap functions are
// still active. The second invocation from the OS loader will be a no-op.
extern void NTAPI OnThreadExit(PVOID module, DWORD reason, PVOID reserved);
atexit([]() {
OnThreadExit(nullptr, DLL_THREAD_DETACH, nullptr);
});
#endif
if (run_as_node) {
// Now that argv conversion is done, we can finally start.
base::AtExitManager atexit_manager;

View File

@@ -575,6 +575,12 @@ void App::OnFinishLaunching(const base::DictionaryValue& launch_info) {
Emit("ready", launch_info);
}
void App::OnPreMainMessageLoopRun() {
if (process_singleton_) {
process_singleton_->OnBrowserReady();
}
}
void App::OnAccessibilitySupportChanged() {
Emit("accessibility-support-changed", IsAccessibilitySupportEnabled());
}

View File

@@ -94,6 +94,7 @@ class App : public AtomBrowserClient::Delegate,
base::FilePath GetAppPath() const;
void RenderProcessReady(content::RenderProcessHost* host);
void RenderProcessDisconnected(base::ProcessId host_pid);
void PreMainMessageLoopRun();
protected:
explicit App(v8::Isolate* isolate);
@@ -112,6 +113,7 @@ class App : public AtomBrowserClient::Delegate,
void OnLogin(LoginHandler* login_handler,
const base::DictionaryValue& request_details) override;
void OnAccessibilitySupportChanged() override;
void OnPreMainMessageLoopRun() override;
#if defined(OS_MACOSX)
void OnContinueUserActivity(
bool* prevent_default,

View File

@@ -67,6 +67,7 @@ Notification::Notification(v8::Isolate* isolate,
opts.Get("replyPlaceholder", &reply_placeholder_);
opts.Get("hasReply", &has_reply_);
opts.Get("actions", &actions_);
opts.Get("sound", &sound_);
}
}
@@ -113,6 +114,10 @@ std::vector<brightray::NotificationAction> Notification::GetActions() const {
return actions_;
}
base::string16 Notification::GetSound() const {
return sound_;
}
// Setters
void Notification::SetTitle(const base::string16& new_title) {
title_ = new_title;
@@ -143,6 +148,10 @@ void Notification::SetActions(
actions_ = actions;
}
void Notification::SetSound(const base::string16& new_sound) {
sound_ = new_sound;
}
void Notification::NotificationAction(int index) {
Emit("action", index);
}
@@ -181,6 +190,7 @@ void Notification::Show() {
options.has_reply = has_reply_;
options.reply_placeholder = reply_placeholder_;
options.actions = actions_;
options.sound = sound_;
notification_->Show(options);
}
}
@@ -207,7 +217,9 @@ void Notification::BuildPrototype(v8::Isolate* isolate,
.SetProperty("hasReply", &Notification::GetHasReply,
&Notification::SetHasReply)
.SetProperty("actions", &Notification::GetActions,
&Notification::SetActions);
&Notification::SetActions)
.SetProperty("sound", &Notification::GetSound,
&Notification::SetSound);
}
} // namespace api

View File

@@ -54,6 +54,7 @@ class Notification : public mate::TrackableObject<Notification>,
base::string16 GetReplyPlaceholder() const;
bool GetHasReply() const;
std::vector<brightray::NotificationAction> GetActions() const;
base::string16 GetSound() const;
// Prop Setters
void SetTitle(const base::string16& new_title);
@@ -63,6 +64,7 @@ class Notification : public mate::TrackableObject<Notification>,
void SetReplyPlaceholder(const base::string16& new_reply_placeholder);
void SetHasReply(bool new_has_reply);
void SetActions(const std::vector<brightray::NotificationAction>& actions);
void SetSound(const base::string16& sound);
private:
base::string16 title_;
@@ -75,6 +77,7 @@ class Notification : public mate::TrackableObject<Notification>,
base::string16 reply_placeholder_;
bool has_reply_ = false;
std::vector<brightray::NotificationAction> actions_;
base::string16 sound_;
brightray::NotificationPresenter* presenter_;

View File

@@ -4,6 +4,7 @@
#include "atom/browser/atom_browser_main_parts.h"
#include "atom/browser/api/atom_api_app.h"
#include "atom/browser/api/trackable_object.h"
#include "atom/browser/atom_access_token_store.h"
#include "atom/browser/atom_browser_client.h"
@@ -183,6 +184,8 @@ void AtomBrowserMainParts::PreMainMessageLoopRun() {
std::unique_ptr<base::DictionaryValue> empty_info(new base::DictionaryValue);
Browser::Get()->DidFinishLaunching(*empty_info);
#endif
Browser::Get()->PreMainMessageLoopRun();
}
bool AtomBrowserMainParts::MainMessageLoopRun(int* result_code) {

View File

@@ -171,6 +171,12 @@ void Browser::RequestLogin(
observer.OnLogin(login_handler, *(request_details.get()));
}
void Browser::PreMainMessageLoopRun() {
for (BrowserObserver& observer : observers_) {
observer.OnPreMainMessageLoopRun();
}
}
void Browser::NotifyAndShutdown() {
if (is_shutdown_)
return;

View File

@@ -202,6 +202,8 @@ class Browser : public WindowListObserver {
void RequestLogin(LoginHandler* login_handler,
std::unique_ptr<base::DictionaryValue> request_details);
void PreMainMessageLoopRun();
void AddObserver(BrowserObserver* obs) {
observers_.AddObserver(obs);
}

View File

@@ -55,6 +55,9 @@ class BrowserObserver {
// The browser's accessibility suppport has changed.
virtual void OnAccessibilitySupportChanged() {}
// The app message loop is ready
virtual void OnPreMainMessageLoopRun() {}
#if defined(OS_MACOSX)
// The browser wants to resume a user activity via handoff. (macOS only)
virtual void OnContinueUserActivity(

View File

@@ -2,6 +2,8 @@
// Use of this source code is governed by the MIT license that can be
// found in the LICENSE file.
#include <vector>
#include "atom/browser/native_browser_view.h"
#include "atom/browser/api/atom_api_web_contents.h"

View File

@@ -5,6 +5,9 @@
#ifndef ATOM_BROWSER_NATIVE_BROWSER_VIEW_H_
#define ATOM_BROWSER_NATIVE_BROWSER_VIEW_H_
#include <vector>
#include "atom/common/draggable_region.h"
#include "base/macros.h"
#include "third_party/skia/include/core/SkColor.h"
@@ -38,6 +41,10 @@ class NativeBrowserView {
virtual void SetBounds(const gfx::Rect& bounds) = 0;
virtual void SetBackgroundColor(SkColor color) = 0;
// Called when the window needs to update its draggable region.
virtual void UpdateDraggableRegions(
const std::vector<gfx::Rect>& system_drag_exclude_areas) {}
protected:
explicit NativeBrowserView(
brightray::InspectableWebContentsView* web_contents_view);

View File

@@ -6,8 +6,11 @@
#define ATOM_BROWSER_NATIVE_BROWSER_VIEW_MAC_H_
#import <Cocoa/Cocoa.h>
#include <vector>
#include "atom/browser/native_browser_view.h"
#include "atom/common/draggable_region.h"
#include "base/mac/scoped_nsobject.h"
namespace atom {
@@ -20,6 +23,8 @@ class NativeBrowserViewMac : public NativeBrowserView {
void SetAutoResizeFlags(uint8_t flags) override;
void SetBounds(const gfx::Rect& bounds) override;
void SetBackgroundColor(SkColor color) override;
void UpdateDraggableRegions(
const std::vector<gfx::Rect>& system_drag_exclude_areas) override;
private:
DISALLOW_COPY_AND_ASSIGN(NativeBrowserViewMac);

View File

@@ -12,6 +12,101 @@
const NSAutoresizingMaskOptions kDefaultAutoResizingMask =
NSViewMaxXMargin | NSViewMinYMargin;
@interface DragRegionView : NSView
@property (assign) NSPoint initialLocation;
@end
@interface NSWindow ()
- (void)performWindowDragWithEvent:(NSEvent *)event;
@end
@implementation DragRegionView
- (BOOL)mouseDownCanMoveWindow
{
return NO;
}
- (NSView *)hitTest:(NSPoint)aPoint
{
// Pass-through events that don't hit one of the exclusion zones
for (NSView *exlusion_zones in [self subviews]) {
if ([exlusion_zones hitTest:aPoint])
return nil;
}
return self;
}
- (void)mouseDown:(NSEvent *)event
{
if ([self.window respondsToSelector:@selector(performWindowDragWithEvent)]) {
[self.window performWindowDragWithEvent:event];
return;
}
self.initialLocation = [event locationInWindow];
}
- (void)mouseDragged:(NSEvent *)theEvent
{
if ([self.window respondsToSelector:@selector(performWindowDragWithEvent)]) {
return;
}
NSPoint currentLocation = [NSEvent mouseLocation];
NSPoint newOrigin;
NSRect screenFrame = [[NSScreen mainScreen] frame];
NSRect windowFrame = [self.window frame];
newOrigin.x = currentLocation.x - self.initialLocation.x;
newOrigin.y = currentLocation.y - self.initialLocation.y;
// Don't let window get dragged up under the menu bar
if ((newOrigin.y + windowFrame.size.height) > (screenFrame.origin.y + screenFrame.size.height)) {
newOrigin.y = screenFrame.origin.y + (screenFrame.size.height - windowFrame.size.height);
}
// Move the window to the new location
[self.window setFrameOrigin:newOrigin];
}
// Debugging tips:
// Uncomment the following four lines to color DragRegionView bright red
// #ifdef DEBUG_DRAG_REGIONS
// - (void)drawRect:(NSRect)aRect
// {
// [[NSColor redColor] set];
// NSRectFill([self bounds]);
// }
// #endif
@end
@interface ExcludeDragRegionView : NSView
@end
@implementation ExcludeDragRegionView
- (BOOL)mouseDownCanMoveWindow {
return NO;
}
// Debugging tips:
// Uncomment the following four lines to color ExcludeDragRegionView bright red
// #ifdef DEBUG_DRAG_REGIONS
// - (void)drawRect:(NSRect)aRect
// {
// [[NSColor greenColor] set];
// NSRectFill([self bounds]);
// }
// #endif
@end
namespace atom {
NativeBrowserViewMac::NativeBrowserViewMac(
@@ -51,6 +146,59 @@ void NativeBrowserViewMac::SetBackgroundColor(SkColor color) {
view.layer.backgroundColor = skia::CGColorCreateFromSkColor(color);
}
void NativeBrowserViewMac::UpdateDraggableRegions(
const std::vector<gfx::Rect>& system_drag_exclude_areas) {
NSView* webView = GetInspectableWebContentsView()->GetNativeView();
NSInteger superViewHeight = NSHeight([webView.superview bounds]);
NSInteger webViewHeight = NSHeight([webView bounds]);
NSInteger webViewWidth = NSWidth([webView bounds]);
NSInteger webViewX = NSMinX([webView frame]);
NSInteger webViewY = 0;
// Apple's NSViews have their coordinate system originate at the bottom left,
// meaning that we need to be a bit smarter when it comes to calculating our
// current top offset
if (webViewHeight > superViewHeight) {
webViewY = std::abs(webViewHeight - superViewHeight - (std::abs(NSMinY([webView frame]))));
} else {
webViewY = superViewHeight - NSMaxY([webView frame]);
}
// Remove all DraggableRegionViews that are added last time.
// Note that [webView subviews] returns the view's mutable internal array and
// it should be copied to avoid mutating the original array while enumerating
// it.
base::scoped_nsobject<NSArray> subviews([[webView subviews] copy]);
for (NSView* subview in subviews.get())
if ([subview isKindOfClass:[DragRegionView class]])
[subview removeFromSuperview];
// Create one giant NSView that is draggable.
base::scoped_nsobject<NSView> dragRegion(
[[DragRegionView alloc] initWithFrame:NSZeroRect]);
[dragRegion setFrame:NSMakeRect(0,
0,
webViewWidth,
webViewHeight)];
// Then, on top of that, add "exclusion zones"
for (auto iter = system_drag_exclude_areas.begin();
iter != system_drag_exclude_areas.end();
++iter) {
base::scoped_nsobject<NSView> controlRegion(
[[ExcludeDragRegionView alloc] initWithFrame:NSZeroRect]);
[controlRegion setFrame:NSMakeRect(iter->x() - webViewX,
webViewHeight - iter->bottom() + webViewY,
iter->width(),
iter->height())];
[dragRegion addSubview:controlRegion];
}
// Add the DragRegion to the WebView
[webView addSubview:dragRegion];
}
// static
NativeBrowserView* NativeBrowserView::Create(
brightray::InspectableWebContentsView* web_contents_view) {

View File

@@ -154,7 +154,7 @@ class NativeWindowMac : public NativeWindow,
void UninstallView();
// Install the drag view, which will cover the whole window and decides
// whehter we can drag.
// whether we can drag.
void UpdateDraggableRegionViews(const std::vector<DraggableRegion>& regions);
void RegisterInputEventObserver(content::RenderViewHost* host);

View File

@@ -1767,6 +1767,10 @@ void NativeWindowMac::UpdateDraggableRegionViews(
std::vector<gfx::Rect> system_drag_exclude_areas =
CalculateNonDraggableRegions(regions, webViewWidth, webViewHeight);
if (browser_view_) {
browser_view_->UpdateDraggableRegions(system_drag_exclude_areas);
}
// Create and add a ControlRegionView for each region that needs to be
// excluded from the dragging.
for (std::vector<gfx::Rect>::const_iterator iter =

View File

@@ -258,7 +258,9 @@ void URLRequestFetchJob::OnURLFetchComplete(const net::URLFetcher* source) {
HeadersCompleted();
return;
}
ReadRawDataComplete(0);
if (request_->status().is_io_pending()) {
ReadRawDataComplete(0);
}
} else {
NotifyStartError(fetcher_->GetStatus());
}

View File

@@ -17,9 +17,9 @@
<key>CFBundleIconFile</key>
<string>electron.icns</string>
<key>CFBundleVersion</key>
<string>1.7.6</string>
<string>1.7.10</string>
<key>CFBundleShortVersionString</key>
<string>1.7.6</string>
<string>1.7.10</string>
<key>LSApplicationCategoryType</key>
<string>public.app-category.developer-tools</string>
<key>LSMinimumSystemVersion</key>

View File

@@ -56,8 +56,8 @@ END
//
VS_VERSION_INFO VERSIONINFO
FILEVERSION 1,7,6,0
PRODUCTVERSION 1,7,6,0
FILEVERSION 1,7,10,0
PRODUCTVERSION 1,7,10,0
FILEFLAGSMASK 0x3fL
#ifdef _DEBUG
FILEFLAGS 0x1L
@@ -74,12 +74,12 @@ BEGIN
BEGIN
VALUE "CompanyName", "GitHub, Inc."
VALUE "FileDescription", "Electron"
VALUE "FileVersion", "1.7.6"
VALUE "FileVersion", "1.7.10"
VALUE "InternalName", "electron.exe"
VALUE "LegalCopyright", "Copyright (C) 2015 GitHub, Inc. All rights reserved."
VALUE "OriginalFilename", "electron.exe"
VALUE "ProductName", "Electron"
VALUE "ProductVersion", "1.7.6"
VALUE "ProductVersion", "1.7.10"
VALUE "SquirrelAwareVersion", "1"
END
END

View File

@@ -17,7 +17,7 @@
#include "native_mate/constructor.h"
#include "native_mate/persistent_dictionary.h"
@interface AtomTouchBar : NSObject<NSScrubberDelegate, NSScrubberDataSource> {
@interface AtomTouchBar : NSObject<NSScrubberDelegate, NSScrubberDataSource, NSScrubberFlowLayoutDelegate> {
@protected
std::vector<mate::PersistentDictionary> ordered_settings_;
std::map<std::string, mate::PersistentDictionary> settings_;

View File

@@ -666,4 +666,40 @@ static NSString* const ImageScrubberItemIdentifier = @"scrubber.image.item";
return itemView;
}
- (NSSize)scrubber:(NSScrubber *)scrubber layout:(NSScrubberFlowLayout *)layout sizeForItemAtIndex:(NSInteger)itemIndex
{
NSInteger width = 50;
NSInteger height = 30;
NSInteger margin = 15;
NSSize defaultSize = NSMakeSize(width, height);
std::string s_id([[scrubber identifier] UTF8String]);
if (![self hasItemWithID:s_id]) return defaultSize;
mate::PersistentDictionary settings = settings_[s_id];
std::vector<mate::PersistentDictionary> items;
if (!settings.Get("items", &items)) return defaultSize;
if (itemIndex >= static_cast<NSInteger>(items.size())) return defaultSize;
mate::PersistentDictionary item = items[itemIndex];
std::string title;
if (item.Get("label", &title)) {
NSSize size = NSMakeSize(CGFLOAT_MAX, CGFLOAT_MAX);
NSRect textRect = [base::SysUTF8ToNSString(title) boundingRectWithSize:size
options:NSStringDrawingUsesLineFragmentOrigin | NSStringDrawingUsesFontLeading
attributes:@{ NSFontAttributeName: [NSFont systemFontOfSize:0]}];
width = textRect.size.width + margin;
} else {
gfx::Image image;
if (item.Get("icon", &image)) {
width = image.AsNSImage().size.width;
}
}
return NSMakeSize(width, height);
}
@end

View File

@@ -15,7 +15,7 @@
@class NSTouchBar, NSTouchBarItem;
@class NSScrubber, NSScrubberItemView, NSScrubberArrangedView, NSScrubberTextItemView, NSScrubberImageItemView, NSScrubberSelectionStyle;
@protocol NSTouchBarDelegate, NSScrubberDelegate, NSScrubberDataSource;
@protocol NSTouchBarDelegate, NSScrubberDelegate, NSScrubberDataSource, NSScrubberFlowLayoutDelegate, NSScrubberFlowLayout;
typedef float NSTouchBarItemPriority;
static const NSTouchBarItemPriority NSTouchBarItemPriorityHigh = 1000;
@@ -149,6 +149,9 @@ static const NSTouchBarItemIdentifier NSTouchBarItemIdentifierOtherItemsProxy =
@end
@interface NSScrubberFlowLayout: NSObject
@end
@interface NSScrubberSelectionStyle : NSObject<NSCoding>
@property(class, strong, readonly) NSScrubberSelectionStyle* outlineOverlayStyle;
@@ -229,6 +232,12 @@ static const NSTouchBarItemIdentifier NSTouchBarItemIdentifierOtherItemsProxy =
@end
@protocol NSScrubberFlowLayoutDelegate<NSObject>
- (NSSize)scrubber:(NSScrubber *)scrubber layout:(NSScrubberFlowLayout *)layout sizeForItemAtIndex:(NSInteger)itemIndex;
@end
#pragma clang assume_nonnull end
#elif MAC_OS_X_VERSION_MIN_REQUIRED < MAC_OS_X_VERSION_10_12_1

View File

@@ -40,7 +40,6 @@ void SetAllowedFileTypes(NSSavePanel* dialog, const Filters& filters) {
if ([file_type_set count])
file_types = [file_type_set allObjects];
[dialog setExtensionHidden:NO];
[dialog setAllowedFileTypes:file_types];
}
@@ -84,11 +83,14 @@ void SetupDialog(NSSavePanel* dialog,
SetAllowedFileTypes(dialog, settings.filters);
}
// Make sure the extension is always visible. Without this, the extension in
// the default filename will not be used in the saved file.
[dialog setExtensionHidden:NO];
if (default_dir)
[dialog setDirectoryURL:[NSURL fileURLWithPath:default_dir]];
if (default_filename)
[dialog setNameFieldStringValue:default_filename];
}
void SetupDialogForProperties(NSOpenPanel* dialog, int properties) {

View File

@@ -7,8 +7,8 @@
#define ATOM_MAJOR_VERSION 1
#define ATOM_MINOR_VERSION 7
#define ATOM_PATCH_VERSION 6
#define ATOM_PATCH_VERSION 10
// #define ATOM_PRE_RELEASE_VERSION
#define ATOM_VERSION_IS_RELEASE 1
#ifndef ATOM_TAG

View File

@@ -39,6 +39,8 @@ void CocoaNotification::Show(const NotificationOptions& options) {
if (options.silent) {
[notification_ setSoundName:nil];
} else if (options.sound != nil) {
[notification_ setSoundName:base::SysUTF16ToNSString(options.sound)];
} else {
[notification_ setSoundName:NSUserNotificationDefaultSoundName];
}

View File

@@ -33,6 +33,7 @@ struct NotificationOptions {
bool silent;
bool has_reply;
base::string16 reply_placeholder;
base::string16 sound;
std::vector<NotificationAction> actions;
};

View File

@@ -74,6 +74,8 @@ class ProcessSingleton : public base::NonThreadSafe {
// TODO(brettw): Make the implementation of this method non-platform-specific
// by making Linux re-use the Windows implementation.
NotifyResult NotifyOtherProcessOrCreate();
void StartListeningOnSocket();
void OnBrowserReady();
// Sets ourself up as the singleton instance. Returns true on success. If
// false is returned, we are not the singleton instance and the caller must
@@ -173,6 +175,8 @@ class ProcessSingleton : public base::NonThreadSafe {
// because it posts messages between threads.
class LinuxWatcher;
scoped_refptr<LinuxWatcher> watcher_;
int sock_;
bool listen_on_ready_ = false;
#endif
DISALLOW_COPY_AND_ASSIGN(ProcessSingleton);

View File

@@ -55,6 +55,7 @@
#include <stddef.h>
#include "atom/browser/browser.h"
#include "atom/common/atom_command_line.h"
#include "base/base_paths.h"
@@ -719,8 +720,7 @@ ProcessSingleton::ProcessSingleton(
const base::FilePath& user_data_dir,
const NotificationCallback& notification_callback)
: notification_callback_(notification_callback),
current_pid_(base::GetCurrentProcId()),
watcher_(new LinuxWatcher(this)) {
current_pid_(base::GetCurrentProcId()) {
// The user_data_dir may have not been created yet.
base::CreateDirectoryAndGetError(user_data_dir, nullptr);
@@ -881,6 +881,23 @@ ProcessSingleton::NotifyResult ProcessSingleton::NotifyOtherProcessOrCreate() {
base::TimeDelta::FromSeconds(kTimeoutInSeconds));
}
void ProcessSingleton::StartListeningOnSocket() {
watcher_ = new LinuxWatcher(this);
BrowserThread::PostTask(
BrowserThread::IO,
FROM_HERE,
base::Bind(&ProcessSingleton::LinuxWatcher::StartListening,
watcher_,
sock_));
}
void ProcessSingleton::OnBrowserReady() {
if (listen_on_ready_) {
StartListeningOnSocket();
listen_on_ready_ = false;
}
}
ProcessSingleton::NotifyResult
ProcessSingleton::NotifyOtherProcessWithTimeoutOrCreate(
const base::CommandLine& command_line,
@@ -1031,13 +1048,13 @@ bool ProcessSingleton::Create() {
if (listen(sock, 5) < 0)
NOTREACHED() << "listen failed: " << base::safe_strerror(errno);
DCHECK(BrowserThread::IsMessageLoopValid(BrowserThread::IO));
BrowserThread::PostTask(
BrowserThread::IO,
FROM_HERE,
base::Bind(&ProcessSingleton::LinuxWatcher::StartListening,
watcher_,
sock));
sock_ = sock;
if (BrowserThread::IsMessageLoopValid(BrowserThread::IO)) {
StartListeningOnSocket();
} else {
listen_on_ready_ = true;
}
return true;
}

View File

@@ -258,6 +258,9 @@ ProcessSingleton::NotifyOtherProcessOrCreate() {
return result;
}
void ProcessSingleton::StartListeningOnSocket() {}
void ProcessSingleton::OnBrowserReady() {}
// Look for a Chrome instance that uses the same profile directory. If there
// isn't one, create a message window with its title set to the profile
// directory path.

View File

@@ -37,6 +37,7 @@ Returns `Boolean` - Whether or not desktop notifications are supported on the cu
* `icon` [NativeImage](native-image.md) - (optional) An icon to use in the notification
* `hasReply` Boolean - (optional) Whether or not to add an inline reply option to the notification. _macOS_
* `replyPlaceholder` String - (optional) The placeholder to write in the inline reply input field. _macOS_
* `sound` String - (optional) The name of the sound file to play when the notification is shown. _macOS_
* `actions` [NotificationAction[]](structures/notification-action.md) - (optional) Actions to add to the notification. Please read the available actions and limitations in the `NotificationAction` documentation _macOS_
@@ -102,3 +103,18 @@ Immediately shows the notification to the user, please note this means unlike th
HTML5 Notification implementation, simply instantiating a `new Notification` does
not immediately show it to the user, you need to call this method before the OS
will display it.
### Playing Sounds
On macOS, you can specify the name of the sound you'd like to play when the
notification is shown. Any of the default sounds (under System Preferences >
Sound) can be used, in addition to custom sound files. Be sure that the sound
file is copied under the app bundle (e.g., `YourApp.app/Contents/Resources`),
or one of the following locations:
* `~/Library/Sounds`
* `/Library/Sounds`
* `/Network/Library/Sounds`
* `/System/Library/Sounds`
See the [`NSSound`](https://developer.apple.com/documentation/appkit/nssound) docs for more information.

View File

@@ -11,7 +11,7 @@ Process: [Main](../tutorial/quick-start.md#main-process)
* `backgroundColor` String (optional) - Button background color in hex format,
i.e `#ABCDEF`.
* `icon` [NativeImage](native-image.md) (optional) - Button icon.
* `iconPosition` String - Can be `left`, `right` or `overlay`.
* `iconPosition` String (optional) - Can be `left`, `right` or `overlay`.
* `click` Function (optional) - Function to call when the button is clicked.
### Instance Properties

View File

@@ -4,7 +4,7 @@
'product_name%': 'Electron',
'company_name%': 'GitHub, Inc',
'company_abbr%': 'github',
'version%': '1.7.6',
'version%': '1.7.10',
'js2c_input_dir': '<(SHARED_INTERMEDIATE_DIR)/js2c',
},
'includes': [

View File

@@ -29,7 +29,7 @@ class AutoUpdater extends EventEmitter {
return this.emitError('Can not find Squirrel')
}
this.emit('checking-for-update')
squirrelUpdate.download(this.updateURL, (error, update) => {
squirrelUpdate.checkForUpdate(this.updateURL, (error, update) => {
if (error != null) {
return this.emitError(error)
}

View File

@@ -82,8 +82,8 @@ exports.processStart = function () {
}
// Download the releases specified by the URL and write new results to stdout.
exports.download = function (updateURL, callback) {
return spawnUpdate(['--download', updateURL], false, function (error, stdout) {
exports.checkForUpdate = function (updateURL, callback) {
return spawnUpdate(['--checkForUpdate', updateURL], false, function (error, stdout) {
var json, ref, ref1, update
if (error != null) {
return callback(error)

View File

@@ -1,21 +1,27 @@
{
"name": "electron",
"version": "1.7.6",
"version": "1.7.10",
"repository": "https://github.com/electron/electron",
"description": "Build cross platform desktop apps with JavaScript, HTML, and CSS",
"devDependencies": {
"asar": "^0.11.0",
"browserify": "^13.1.0",
"check-for-leaks": "^1.0.2",
"colors": "^1.1.2",
"dotenv-safe": "^4.0.4",
"dugite": "^1.45.0",
"electabul": "~0.0.4",
"electron-docs-linter": "^2.3.3",
"electron-typescript-definitions": "^1.2.7",
"github": "^9.2.0",
"heads": "^1.3.0",
"husky": "^0.14.3",
"minimist": "^1.2.0",
"nugget": "^2.0.1",
"request": "^2.68.0",
"standard": "^8.4.0",
"standard-markdown": "^4.0.0",
"sumchecker": "^2.0.2",
"temp": "^0.8.3"
},
"standard": {
@@ -47,11 +53,15 @@
"lint-api-docs-js": "standard-markdown docs && standard-markdown docs-translations",
"create-api-json": "electron-docs-linter docs --outfile=out/electron-api.json --version=$npm_package_version",
"create-typescript-definitions": "npm run create-api-json && electron-typescript-definitions --in=out/electron-api.json --out=out/electron.d.ts",
"merge-release": "node ./script/merge-release.js",
"mock-release": "node ./script/ci-release-build.js",
"preinstall": "node -e 'process.exit(0)'",
"publish-to-npm": "node ./script/publish-to-npm.js",
"prepack": "check-for-leaks",
"prepush": "check-for-leaks",
"release": "./script/upload.py -p",
"prepare-release": "node ./script/prepare-release.js",
"prerelease": "python ./script/bootstrap.py -v --dev && npm run build",
"release": "node ./script/release.js",
"repl": "python ./script/start.py --interactive",
"start": "python ./script/start.py",
"test": "python ./script/test.py"

View File

@@ -3,6 +3,7 @@
import os
import re
import sys
import argparse
from lib.util import execute, get_electron_version, parse_version, scoped_cwd
@@ -11,29 +12,85 @@ SOURCE_ROOT = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
def main():
if len(sys.argv) != 2 or sys.argv[1] == '-h':
print 'Usage: bump-version.py [<version> | major | minor | patch]'
parser = argparse.ArgumentParser(
description='Bump version numbers. Must specify at least one of the three'
+' options:\n'
+' --bump=patch to increment patch version, or\n'
+' --stable to promote current beta to stable, or\n'
+' --version={version} to set version number directly\n'
+'Note that you can use both --bump and --stable '
+'simultaneously.',
formatter_class=argparse.RawTextHelpFormatter
)
parser.add_argument(
'--version',
default=None,
dest='new_version',
help='new version number'
)
parser.add_argument(
'--bump',
action='store',
default=None,
dest='bump',
help='increment [major | minor | patch | beta]'
)
parser.add_argument(
'--stable',
action='store_true',
default= False,
dest='stable',
help='promote to stable (i.e. remove `-beta.x` suffix)'
)
parser.add_argument(
'--dry-run',
action='store_true',
default= False,
dest='dry_run',
help='just to check that version number is correct'
)
args = parser.parse_args()
if args.new_version == None and args.bump == None and args.stable == False:
parser.print_help()
return 1
option = sys.argv[1]
increments = ['major', 'minor', 'patch', 'build']
if option in increments:
version = get_electron_version()
versions = parse_version(version.split('-')[0])
versions = increase_version(versions, increments.index(option))
else:
versions = parse_version(option)
increments = ['major', 'minor', 'patch', 'beta']
curr_version = get_electron_version()
versions = parse_version(re.sub('-beta', '', curr_version))
if args.bump in increments:
versions = increase_version(versions, increments.index(args.bump))
if versions[3] == '0':
# beta starts at 1
versions = increase_version(versions, increments.index('beta'))
if args.stable == True:
versions[3] = '0'
if args.new_version != None:
versions = parse_version(re.sub('-beta', '', args.new_version))
version = '.'.join(versions[:3])
suffix = '' if versions[3] == '0' else '-beta.' + versions[3]
if args.dry_run:
print 'new version number would be: {0}\n'.format(version + suffix)
return 0
with scoped_cwd(SOURCE_ROOT):
update_electron_gyp(version)
update_electron_gyp(version, suffix)
update_win_rc(version, versions)
update_version_h(versions)
update_version_h(versions, suffix)
update_info_plist(version)
update_package_json(version)
tag_version(version)
update_package_json(version, suffix)
tag_version(version, suffix)
print 'Bumped to version: {0}'.format(version + suffix)
def increase_version(versions, index):
for i in range(index + 1, 4):
@@ -42,14 +99,14 @@ def increase_version(versions, index):
return versions
def update_electron_gyp(version):
pattern = re.compile(" *'version%' *: *'[0-9.]+'")
def update_electron_gyp(version, suffix):
pattern = re.compile(" *'version%' *: *'[0-9.]+(-beta[0-9.]*)?'")
with open('electron.gyp', 'r') as f:
lines = f.readlines()
for i in range(0, len(lines)):
if pattern.match(lines[i]):
lines[i] = " 'version%': '{0}',\n".format(version)
lines[i] = " 'version%': '{0}',\n".format(version + suffix)
with open('electron.gyp', 'w') as f:
f.write(''.join(lines))
return
@@ -81,7 +138,7 @@ def update_win_rc(version, versions):
f.write(''.join(lines))
def update_version_h(versions):
def update_version_h(versions, suffix):
version_h = os.path.join('atom', 'common', 'atom_version.h')
with open(version_h, 'r') as f:
lines = f.readlines()
@@ -93,6 +150,11 @@ def update_version_h(versions):
lines[i + 1] = '#define ATOM_MINOR_VERSION {0}\n'.format(versions[1])
lines[i + 2] = '#define ATOM_PATCH_VERSION {0}\n'.format(versions[2])
if (suffix):
lines[i + 3] = '#define ATOM_PRE_RELEASE_VERSION {0}\n'.format(suffix)
else:
lines[i + 3] = '// #define ATOM_PRE_RELEASE_VERSION\n'
with open(version_h, 'w') as f:
f.write(''.join(lines))
return
@@ -114,7 +176,7 @@ def update_info_plist(version):
f.write(''.join(lines))
def update_package_json(version):
def update_package_json(version, suffix):
package_json = 'package.json'
with open(package_json, 'r') as f:
lines = f.readlines()
@@ -122,15 +184,15 @@ def update_package_json(version):
for i in range(0, len(lines)):
line = lines[i];
if 'version' in line:
lines[i] = ' "version": "{0}",\n'.format(version)
lines[i] = ' "version": "{0}",\n'.format(version + suffix)
break
with open(package_json, 'w') as f:
f.write(''.join(lines))
def tag_version(version):
execute(['git', 'commit', '-a', '-m', 'Bump v{0}'.format(version)])
def tag_version(version, suffix):
execute(['git', 'commit', '-a', '-m', 'Bump v{0}'.format(version + suffix)])
if __name__ == '__main__':

209
script/ci-release-build.js Normal file
View File

@@ -0,0 +1,209 @@
const assert = require('assert')
const request = require('request')
const buildAppVeyorURL = 'https://windows-ci.electronjs.org/api/builds'
const jenkinsServer = 'https://mac-ci.electronjs.org'
const circleCIJobs = [
'electron-linux-arm',
'electron-linux-ia32',
'electron-linux-x64'
]
const jenkinsJobs = [
'electron-mas-x64-release',
'electron-osx-x64-release'
]
async function makeRequest (requestOptions, parseResponse) {
return new Promise((resolve, reject) => {
request(requestOptions, (err, res, body) => {
if (!err && res.statusCode >= 200 && res.statusCode < 300) {
if (parseResponse) {
const build = JSON.parse(body)
resolve(build)
} else {
resolve(body)
}
} else {
if (parseResponse) {
console.log('Error: ', `(status ${res.statusCode})`, err || JSON.parse(res.body), requestOptions)
} else {
console.log('Error: ', `(status ${res.statusCode})`, err || res.body, requestOptions)
}
reject(err)
}
})
})
}
async function circleCIcall (buildUrl, targetBranch, job, ghRelease) {
assert(process.env.CIRCLE_TOKEN, 'CIRCLE_TOKEN not found in environment')
console.log(`Triggering CircleCI to run build job: ${job} on branch: ${targetBranch} with release flag.`)
let buildRequest = {
'build_parameters': {
'CIRCLE_JOB': job
}
}
if (ghRelease) {
buildRequest.build_parameters.ELECTRON_RELEASE = 1
} else {
buildRequest.build_parameters.RUN_RELEASE_BUILD = 'true'
}
let circleResponse = await makeRequest({
method: 'POST',
url: buildUrl,
headers: {
'Content-Type': 'application/json',
'Accept': 'application/json'
},
body: JSON.stringify(buildRequest)
}, true).catch(err => {
console.log('Error calling CircleCI:', err)
})
console.log(`Check ${circleResponse.build_url} for status. (${job})`)
}
async function buildAppVeyor (targetBranch, ghRelease) {
console.log(`Triggering AppVeyor to run build on branch: ${targetBranch} with release flag.`)
assert(process.env.APPVEYOR_TOKEN, 'APPVEYOR_TOKEN not found in environment')
let environmentVariables = {}
if (ghRelease) {
environmentVariables.ELECTRON_RELEASE = 1
} else {
environmentVariables.RUN_RELEASE_BUILD = 'true'
}
const requestOpts = {
url: buildAppVeyorURL,
auth: {
bearer: process.env.APPVEYOR_TOKEN
},
headers: {
'Content-Type': 'application/json'
},
body: JSON.stringify({
accountName: 'AppVeyor',
projectSlug: 'electron',
branch: targetBranch,
environmentVariables
}),
method: 'POST'
}
let appVeyorResponse = await makeRequest(requestOpts, true).catch(err => {
console.log('Error calling AppVeyor:', err)
})
const buildUrl = `https://windows-ci.electronjs.org/project/AppVeyor/electron/build/${appVeyorResponse.version}`
console.log(`AppVeyor release build request successful. Check build status at ${buildUrl}`)
}
function buildCircleCI (targetBranch, ghRelease, job) {
const circleBuildUrl = `https://circleci.com/api/v1.1/project/github/electron/electron/tree/${targetBranch}?circle-token=${process.env.CIRCLE_TOKEN}`
if (job) {
assert(circleCIJobs.includes(job), `Unknown CI job name: ${job}.`)
circleCIcall(circleBuildUrl, targetBranch, job, ghRelease)
} else {
circleCIJobs.forEach((job) => circleCIcall(circleBuildUrl, targetBranch, job, ghRelease))
}
}
async function buildJenkins (targetBranch, ghRelease, job) {
assert(process.env.JENKINS_AUTH_TOKEN, 'JENKINS_AUTH_TOKEN not found in environment')
assert(process.env.JENKINS_BUILD_TOKEN, 'JENKINS_BUILD_TOKEN not found in environment')
let jenkinsCrumb = await getJenkinsCrumb()
if (job) {
assert(jenkinsJobs.includes(job), `Unknown CI job name: ${job}.`)
callJenkinsBuild(job, jenkinsCrumb, targetBranch, ghRelease)
} else {
jenkinsJobs.forEach((job) => {
callJenkinsBuild(job, jenkinsCrumb, targetBranch, ghRelease)
})
}
}
async function callJenkins (path, requestParameters, requestHeaders) {
let requestOptions = {
url: `${jenkinsServer}/${path}`,
auth: {
user: 'build',
pass: process.env.JENKINS_AUTH_TOKEN
},
qs: requestParameters
}
if (requestHeaders) {
requestOptions.headers = requestHeaders
}
let jenkinsResponse = await makeRequest(requestOptions).catch(err => {
console.log(`Error calling Jenkins:`, err)
})
return jenkinsResponse
}
async function callJenkinsBuild (job, jenkinsCrumb, targetBranch, ghRelease) {
console.log(`Triggering Jenkins to run build job: ${job} on branch: ${targetBranch} with release flag.`)
let jenkinsParams = {
token: process.env.JENKINS_BUILD_TOKEN,
BRANCH: targetBranch
}
if (!ghRelease) {
jenkinsParams.RUN_RELEASE_BUILD = 1
}
await callJenkins(`job/${job}/buildWithParameters`, jenkinsParams, jenkinsCrumb)
.catch(err => {
console.log(`Error calling Jenkins build`, err)
})
let buildUrl = `${jenkinsServer}/job/${job}/lastBuild/`
console.log(`Jenkins build request successful. Check build status at ${buildUrl}.`)
}
async function getJenkinsCrumb () {
let crumbResponse = await callJenkins('crumbIssuer/api/xml', {
xpath: 'concat(//crumbRequestField,":",//crumb)'
}).catch(err => {
console.log(`Error getting jenkins crumb:`, err)
})
let crumbDetails = crumbResponse.split(':')
let crumbHeader = {}
crumbHeader[crumbDetails[0]] = crumbDetails[1]
return crumbHeader
}
function runRelease (targetBranch, options) {
if (options.ci) {
switch (options.ci) {
case 'CircleCI': {
buildCircleCI(targetBranch, options.ghRelease, options.job)
break
}
case 'AppVeyor': {
buildAppVeyor(targetBranch, options.ghRelease)
break
}
case 'Jenkins': {
buildJenkins(targetBranch, options.ghRelease, options.job)
break
}
}
} else {
buildCircleCI(targetBranch, options.ghRelease, options.job)
buildAppVeyor(targetBranch, options.ghRelease)
buildJenkins(targetBranch, options.ghRelease, options.job)
}
}
module.exports = runRelease
if (require.main === module) {
const args = require('minimist')(process.argv.slice(2))
const targetBranch = args._[0]
if (args._.length < 1) {
console.log(`Trigger CI to build release builds of electron.
Usage: ci-release-build.js [--job=CI_JOB_NAME] [--ci=CircleCI|AppVeyor|Jenkins] [--ghRelease] TARGET_BRANCH
`)
process.exit(0)
}
runRelease(targetBranch, args)
}

View File

@@ -63,10 +63,9 @@ def main():
deps += LINUX_DEPS_NO_ARM
execute(['sudo', 'apt-get', 'install'] + deps)
execute(['sh', '-e', '/etc/init.d/xvfb', 'start'])
if PLATFORM == 'linux':
if PLATFORM == 'linux' and target_arch == 'x64':
os.environ['DISPLAY'] = ':99.0'
execute(['sh', '-e', '/etc/init.d/xvfb', 'start'])
# CI's npm is not reliable.
npm = 'npm.cmd' if PLATFORM == 'win32' else 'npm'

116
script/merge-release.js Executable file
View File

@@ -0,0 +1,116 @@
#!/usr/bin/env node
require('colors')
const assert = require('assert')
const branchToRelease = process.argv[2]
const fail = '\u2717'.red
const { GitProcess, GitError } = require('dugite')
const pass = '\u2713'.green
const path = require('path')
const pkg = require('../package.json')
assert(process.env.ELECTRON_GITHUB_TOKEN, 'ELECTRON_GITHUB_TOKEN not found in environment')
if (!branchToRelease) {
console.log(`Usage: merge-release branch`)
process.exit(1)
}
const gitDir = path.resolve(__dirname, '..')
async function callGit (args, errorMessage, successMessage) {
let gitResult = await GitProcess.exec(args, gitDir)
if (gitResult.exitCode === 0) {
console.log(`${pass} ${successMessage}`)
return true
} else {
console.log(`${fail} ${errorMessage} ${gitResult.stderr}`)
process.exit(1)
}
}
async function checkoutBranch (branchName) {
console.log(`Checking out ${branchName}.`)
let errorMessage = `Error checking out branch ${branchName}:`
let successMessage = `Successfully checked out branch ${branchName}.`
return callGit(['checkout', branchName], errorMessage, successMessage)
}
async function commitMerge () {
console.log(`Committing the merge for v${pkg.version}`)
let errorMessage = `Error committing merge:`
let successMessage = `Successfully committed the merge for v${pkg.version}`
let gitArgs = ['commit', '-m', `v${pkg.version}`]
return callGit(gitArgs, errorMessage, successMessage)
}
async function mergeReleaseIntoBranch (branchName) {
console.log(`Merging release branch into ${branchName}.`)
let mergeArgs = ['merge', 'release', '--squash']
let mergeDetails = await GitProcess.exec(mergeArgs, gitDir)
if (mergeDetails.exitCode === 0) {
return true
} else {
const error = GitProcess.parseError(mergeDetails.stderr)
if (error === GitError.MergeConflicts) {
console.log(`${fail} Could not merge release branch into ${branchName} ` +
`due to merge conflicts.`)
return false
} else {
console.log(`${fail} Could not merge release branch into ${branchName} ` +
`due to an error: ${mergeDetails.stderr}.`)
process.exit(1)
}
}
}
async function pushBranch (branchName) {
console.log(`Pushing branch ${branchName}.`)
let pushArgs = ['push', 'origin', branchName]
let errorMessage = `Could not push branch ${branchName} due to an error:`
let successMessage = `Successfully pushed branch ${branchName}.`
return callGit(pushArgs, errorMessage, successMessage)
}
async function pull () {
console.log(`Performing a git pull`)
let errorMessage = `Could not pull due to an error:`
let successMessage = `Successfully performed a git pull`
return callGit(['pull'], errorMessage, successMessage)
}
async function rebase (targetBranch) {
console.log(`Rebasing release branch from ${targetBranch}`)
let errorMessage = `Could not rebase due to an error:`
let successMessage = `Successfully rebased release branch from ` +
`${targetBranch}`
return callGit(['rebase', targetBranch], errorMessage, successMessage)
}
async function mergeRelease () {
await checkoutBranch(branchToRelease)
let mergeSuccess = await mergeReleaseIntoBranch(branchToRelease)
if (mergeSuccess) {
console.log(`${pass} Successfully merged release branch into ` +
`${branchToRelease}.`)
await commitMerge()
let pushSuccess = await pushBranch(branchToRelease)
if (pushSuccess) {
console.log(`${pass} Success!!! ${branchToRelease} now has the latest release!`)
}
} else {
console.log(`Trying rebase of ${branchToRelease} into release branch.`)
await pull()
await checkoutBranch('release')
let rebaseResult = await rebase(branchToRelease)
if (rebaseResult) {
let pushResult = pushBranch('HEAD')
if (pushResult) {
console.log(`Rebase of ${branchToRelease} into release branch was ` +
`successful. Let release builds run and then try this step again.`)
}
// Exit as failure so release doesn't continue
process.exit(1)
}
}
}
mergeRelease()

181
script/prepare-release.js Executable file
View File

@@ -0,0 +1,181 @@
#!/usr/bin/env node
require('colors')
const args = require('minimist')(process.argv.slice(2))
const assert = require('assert')
const ciReleaseBuild = require('./ci-release-build')
const { execSync } = require('child_process')
const fail = '\u2717'.red
const { GitProcess, GitError } = require('dugite')
const GitHub = require('github')
const pass = '\u2713'.green
const path = require('path')
const pkg = require('../package.json')
const versionType = args._[0]
// TODO (future) automatically determine version based on conventional commits
// via conventional-recommended-bump
assert(process.env.ELECTRON_GITHUB_TOKEN, 'ELECTRON_GITHUB_TOKEN not found in environment')
if (!versionType && !args.notesOnly) {
console.log(`Usage: prepare-release versionType [major | minor | patch | beta]` +
` (--stable) (--notesOnly)`)
process.exit(1)
}
const github = new GitHub()
const gitDir = path.resolve(__dirname, '..')
github.authenticate({type: 'token', token: process.env.ELECTRON_GITHUB_TOKEN})
async function createReleaseBranch () {
console.log(`Creating release branch.`)
let checkoutDetails = await GitProcess.exec([ 'checkout', '-b', 'release' ], gitDir)
if (checkoutDetails.exitCode === 0) {
console.log(`${pass} Successfully created the release branch.`)
} else {
const error = GitProcess.parseError(checkoutDetails.stderr)
if (error === GitError.BranchAlreadyExists) {
console.log(`${fail} Release branch already exists, aborting prepare ` +
`release process.`)
} else {
console.log(`${fail} Error creating release branch: ` +
`${checkoutDetails.stderr}`)
}
process.exit(1)
}
}
function getNewVersion () {
console.log(`Bumping for new "${versionType}" version.`)
let bumpScript = path.join(__dirname, 'bump-version.py')
let scriptArgs = [bumpScript, `--bump ${versionType}`]
if (args.stable) {
scriptArgs.push('--stable')
}
try {
let bumpVersion = execSync(scriptArgs.join(' '), {encoding: 'UTF-8'})
bumpVersion = bumpVersion.substr(bumpVersion.indexOf(':') + 1).trim()
let newVersion = `v${bumpVersion}`
console.log(`${pass} Successfully bumped version to ${newVersion}`)
return newVersion
} catch (err) {
console.log(`${fail} Could not bump version, error was:`, err)
}
}
async function getCurrentBranch (gitDir) {
console.log(`Determining current git branch`)
let gitArgs = ['rev-parse', '--abbrev-ref', 'HEAD']
let branchDetails = await GitProcess.exec(gitArgs, gitDir)
if (branchDetails.exitCode === 0) {
let currentBranch = branchDetails.stdout.trim()
console.log(`${pass} Successfully determined current git branch is ` +
`${currentBranch}`)
return currentBranch
} else {
let error = GitProcess.parseError(branchDetails.stderr)
console.log(`${fail} Could not get details for the current branch,
error was ${branchDetails.stderr}`, error)
process.exit(1)
}
}
async function getReleaseNotes (currentBranch) {
console.log(`Generating release notes for ${currentBranch}.`)
let githubOpts = {
owner: 'electron',
repo: 'electron',
base: `v${pkg.version}`,
head: currentBranch
}
let releaseNotes = '(placeholder)\n'
console.log(`Checking for commits from ${pkg.version} to ${currentBranch}`)
let commitComparison = await github.repos.compareCommits(githubOpts)
.catch(err => {
console.log(`{$fail} Error checking for commits from ${pkg.version} to ` +
`${currentBranch}`, err)
process.exit(1)
})
commitComparison.data.commits.forEach(commitEntry => {
let commitMessage = commitEntry.commit.message
if (commitMessage.toLowerCase().indexOf('merge') > -1) {
releaseNotes += `${commitMessage} \n`
}
})
console.log(`${pass} Done generating release notes for ${currentBranch}.`)
return releaseNotes
}
async function createRelease (branchToTarget, isBeta) {
let releaseNotes = await getReleaseNotes(branchToTarget)
let newVersion = getNewVersion()
const githubOpts = {
owner: 'electron',
repo: 'electron'
}
console.log(`Checking for existing draft release.`)
let releases = await github.repos.getReleases(githubOpts)
.catch(err => {
console.log('$fail} Could not get releases. Error was', err)
})
let drafts = releases.data.filter(release => release.draft)
if (drafts.length > 0) {
console.log(`${fail} Aborting because draft release for
${drafts[0].tag_name} already exists.`)
process.exit(1)
}
console.log(`${pass} A draft release does not exist; creating one.`)
githubOpts.body = releaseNotes
githubOpts.draft = true
githubOpts.name = `electron ${newVersion}`
if (isBeta) {
githubOpts.body = `Note: This is a beta release. Please file new issues ` +
`for any bugs you find in it.\n \n This release is published to npm ` +
`under the beta tag and can be installed via npm install electron@beta, ` +
`or npm i electron@${newVersion.substr(1)}.`
githubOpts.name = `${githubOpts.name}`
githubOpts.prerelease = true
}
githubOpts.tag_name = newVersion
githubOpts.target_commitish = branchToTarget
await github.repos.createRelease(githubOpts)
.catch(err => {
console.log(`${fail} Error creating new release: `, err)
process.exit(1)
})
console.log(`${pass} Draft release for ${newVersion} has been created.`)
}
async function pushRelease () {
let pushDetails = await GitProcess.exec(['push', 'origin', 'HEAD'], gitDir)
if (pushDetails.exitCode === 0) {
console.log(`${pass} Successfully pushed the release branch. Wait for ` +
`release builds to finish before running "npm run release".`)
} else {
console.log(`${fail} Error pushing the release branch: ` +
`${pushDetails.stderr}`)
process.exit(1)
}
}
async function runReleaseBuilds () {
await ciReleaseBuild('release', {
ghRelease: true
})
}
async function prepareRelease (isBeta, notesOnly) {
let currentBranch = await getCurrentBranch(gitDir)
if (notesOnly) {
let releaseNotes = await getReleaseNotes(currentBranch)
console.log(`Draft release notes are: ${releaseNotes}`)
} else {
await createReleaseBranch()
await createRelease(currentBranch, isBeta)
await pushRelease()
await runReleaseBuilds()
}
}
prepareRelease(!args.stable, args.notesOnly)

112
script/prerelease.js Executable file
View File

@@ -0,0 +1,112 @@
#!/usr/bin/env node
require('colors')
const assert = require('assert')
const GitHub = require('github')
const heads = require('heads')
const pkg = require('../package.json')
const pass = '\u2713'.green
const fail = '\u2717'.red
let failureCount = 0
assert(process.env.ELECTRON_GITHUB_TOKEN, 'ELECTRON_GITHUB_TOKEN not found in environment')
const github = new GitHub()
github.authenticate({type: 'token', token: process.env.ELECTRON_GITHUB_TOKEN})
github.repos.getReleases({owner: 'electron', repo: 'electron'})
.then(res => {
const releases = res.data
const drafts = releases
.filter(release => release.draft) // comment out for testing
// .filter(release => release.tag_name === 'v1.7.5') // uncomment for testing
check(drafts.length === 1, 'one draft exists', true)
const draft = drafts[0]
check(draft.tag_name === `v${pkg.version}`, `draft release version matches local package.json (v${pkg.version})`)
check(draft.prerelease, 'draft is a prerelease')
check(draft.body.length > 50 && !draft.body.includes('(placeholder)'), 'draft has release notes')
const requiredAssets = assetsForVersion(draft.tag_name).sort()
const extantAssets = draft.assets.map(asset => asset.name).sort()
requiredAssets.forEach(asset => {
check(extantAssets.includes(asset), asset)
})
const s3Urls = s3UrlsForVersion(draft.tag_name)
heads(s3Urls)
.then(results => {
results.forEach((result, i) => {
check(result === 200, s3Urls[i])
})
process.exit(failureCount > 0 ? 1 : 0)
})
.catch(err => {
console.error('Error making HEAD requests for S3 assets')
console.error(err)
process.exit(1)
})
})
function check (condition, statement, exitIfFail = false) {
if (condition) {
console.log(`${pass} ${statement}`)
} else {
failureCount++
console.log(`${fail} ${statement}`)
if (exitIfFail) process.exit(1)
}
}
function assetsForVersion (version) {
const patterns = [
'electron-{{VERSION}}-darwin-x64-dsym.zip',
'electron-{{VERSION}}-darwin-x64-symbols.zip',
'electron-{{VERSION}}-darwin-x64.zip',
'electron-{{VERSION}}-linux-arm-symbols.zip',
'electron-{{VERSION}}-linux-arm.zip',
'electron-{{VERSION}}-linux-armv7l-symbols.zip',
'electron-{{VERSION}}-linux-armv7l.zip',
'electron-{{VERSION}}-linux-ia32-symbols.zip',
'electron-{{VERSION}}-linux-ia32.zip',
'electron-{{VERSION}}-linux-x64-symbols.zip',
'electron-{{VERSION}}-linux-x64.zip',
'electron-{{VERSION}}-mas-x64-dsym.zip',
'electron-{{VERSION}}-mas-x64-symbols.zip',
'electron-{{VERSION}}-mas-x64.zip',
'electron-{{VERSION}}-win32-ia32-pdb.zip',
'electron-{{VERSION}}-win32-ia32-symbols.zip',
'electron-{{VERSION}}-win32-ia32.zip',
'electron-{{VERSION}}-win32-x64-pdb.zip',
'electron-{{VERSION}}-win32-x64-symbols.zip',
'electron-{{VERSION}}-win32-x64.zip',
'electron-api.json',
'electron.d.ts',
'ffmpeg-{{VERSION}}-darwin-x64.zip',
'ffmpeg-{{VERSION}}-linux-arm.zip',
'ffmpeg-{{VERSION}}-linux-armv7l.zip',
'ffmpeg-{{VERSION}}-linux-ia32.zip',
'ffmpeg-{{VERSION}}-linux-x64.zip',
'ffmpeg-{{VERSION}}-mas-x64.zip',
'ffmpeg-{{VERSION}}-win32-ia32.zip',
'ffmpeg-{{VERSION}}-win32-x64.zip'
]
return patterns.map(pattern => pattern.replace(/{{VERSION}}/g, version))
}
function s3UrlsForVersion (version) {
const bucket = 'https://gh-contractor-zcbenz.s3.amazonaws.com/'
const patterns = [
'atom-shell/dist/{{VERSION}}/iojs-{{VERSION}}-headers.tar.gz',
'atom-shell/dist/{{VERSION}}/iojs-{{VERSION}}.tar.gz',
'atom-shell/dist/{{VERSION}}/node-{{VERSION}}.tar.gz',
'atom-shell/dist/{{VERSION}}/node.lib',
'atom-shell/dist/{{VERSION}}/win-x64/iojs.lib',
'atom-shell/dist/{{VERSION}}/win-x86/iojs.lib',
'atom-shell/dist/{{VERSION}}/x64/node.lib',
'atom-shell/dist/index.json'
]
return patterns.map(pattern => bucket + pattern.replace(/{{VERSION}}/g, version))
}

459
script/release.js Executable file
View File

@@ -0,0 +1,459 @@
#!/usr/bin/env node
require('colors')
const args = require('minimist')(process.argv.slice(2))
const assert = require('assert')
const fs = require('fs')
const { execSync } = require('child_process')
const GitHub = require('github')
const { GitProcess } = require('dugite')
const nugget = require('nugget')
const pkg = require('../package.json')
const pkgVersion = `v${pkg.version}`
const pass = '\u2713'.green
const path = require('path')
const fail = '\u2717'.red
const sumchecker = require('sumchecker')
const temp = require('temp').track()
const { URL } = require('url')
let failureCount = 0
assert(process.env.ELECTRON_GITHUB_TOKEN, 'ELECTRON_GITHUB_TOKEN not found in environment')
const github = new GitHub({
followRedirects: false
})
github.authenticate({type: 'token', token: process.env.ELECTRON_GITHUB_TOKEN})
const gitDir = path.resolve(__dirname, '..')
async function getDraftRelease (version, skipValidation) {
let releaseInfo = await github.repos.getReleases({owner: 'electron', repo: 'electron'})
let drafts
let versionToCheck
if (version) {
drafts = releaseInfo.data
.filter(release => release.tag_name === version)
versionToCheck = version
} else {
drafts = releaseInfo.data
.filter(release => release.draft)
versionToCheck = pkgVersion
}
const draft = drafts[0]
if (!skipValidation) {
failureCount = 0
check(drafts.length === 1, 'one draft exists', true)
check(draft.tag_name === versionToCheck, `draft release version matches local package.json (${versionToCheck})`)
if (versionToCheck.indexOf('beta')) {
check(draft.prerelease, 'draft is a prerelease')
}
check(draft.body.length > 50 && !draft.body.includes('(placeholder)'), 'draft has release notes')
check((failureCount === 0), `Draft release looks good to go.`, true)
}
return draft
}
async function validateReleaseAssets (release) {
const requiredAssets = assetsForVersion(release.tag_name).sort()
const extantAssets = release.assets.map(asset => asset.name).sort()
const downloadUrls = release.assets.map(asset => asset.browser_download_url).sort()
failureCount = 0
requiredAssets.forEach(asset => {
check(extantAssets.includes(asset), asset)
})
check((failureCount === 0), `All required GitHub assets exist for release`, true)
if (release.draft) {
await verifyAssets(release)
} else {
await verifyShasums(downloadUrls)
.catch(err => {
console.log(`${fail} error verifyingShasums`, err)
})
}
const s3Urls = s3UrlsForVersion(release.tag_name)
await verifyShasums(s3Urls, true)
}
function check (condition, statement, exitIfFail = false) {
if (condition) {
console.log(`${pass} ${statement}`)
} else {
failureCount++
console.log(`${fail} ${statement}`)
if (exitIfFail) process.exit(1)
}
}
function assetsForVersion (version) {
const patterns = [
`electron-${version}-darwin-x64-dsym.zip`,
`electron-${version}-darwin-x64-symbols.zip`,
`electron-${version}-darwin-x64.zip`,
`electron-${version}-linux-arm-symbols.zip`,
`electron-${version}-linux-arm.zip`,
`electron-${version}-linux-armv7l-symbols.zip`,
`electron-${version}-linux-armv7l.zip`,
`electron-${version}-linux-ia32-symbols.zip`,
`electron-${version}-linux-ia32.zip`,
`electron-${version}-linux-x64-symbols.zip`,
`electron-${version}-linux-x64.zip`,
`electron-${version}-mas-x64-dsym.zip`,
`electron-${version}-mas-x64-symbols.zip`,
`electron-${version}-mas-x64.zip`,
`electron-${version}-win32-ia32-pdb.zip`,
`electron-${version}-win32-ia32-symbols.zip`,
`electron-${version}-win32-ia32.zip`,
`electron-${version}-win32-x64-pdb.zip`,
`electron-${version}-win32-x64-symbols.zip`,
`electron-${version}-win32-x64.zip`,
`electron-api.json`,
`electron.d.ts`,
`ffmpeg-${version}-darwin-x64.zip`,
`ffmpeg-${version}-linux-arm.zip`,
`ffmpeg-${version}-linux-armv7l.zip`,
`ffmpeg-${version}-linux-ia32.zip`,
`ffmpeg-${version}-linux-x64.zip`,
`ffmpeg-${version}-mas-x64.zip`,
`ffmpeg-${version}-win32-ia32.zip`,
`ffmpeg-${version}-win32-x64.zip`,
`SHASUMS256.txt`
]
return patterns
}
function s3UrlsForVersion (version) {
const bucket = `https://gh-contractor-zcbenz.s3.amazonaws.com/`
const patterns = [
`${bucket}atom-shell/dist/${version}/iojs-${version}-headers.tar.gz`,
`${bucket}atom-shell/dist/${version}/iojs-${version}.tar.gz`,
`${bucket}atom-shell/dist/${version}/node-${version}.tar.gz`,
`${bucket}atom-shell/dist/${version}/node.lib`,
`${bucket}atom-shell/dist/${version}/win-x64/iojs.lib`,
`${bucket}atom-shell/dist/${version}/win-x86/iojs.lib`,
`${bucket}atom-shell/dist/${version}/x64/node.lib`,
`${bucket}atom-shell/dist/${version}/SHASUMS.txt`,
`${bucket}atom-shell/dist/${version}/SHASUMS256.txt`,
`${bucket}atom-shell/dist/index.json`
]
return patterns
}
function checkVersion () {
console.log(`Verifying that app version matches package version ${pkgVersion}.`)
let startScript = path.join(__dirname, 'start.py')
let appVersion = runScript(startScript, ['--version']).trim()
check((pkgVersion.indexOf(appVersion) === 0), `App version ${appVersion} matches ` +
`package version ${pkgVersion}.`, true)
}
function runScript (scriptName, scriptArgs, cwd) {
let scriptCommand = `${scriptName} ${scriptArgs.join(' ')}`
let scriptOptions = {
encoding: 'UTF-8'
}
if (cwd) {
scriptOptions.cwd = cwd
}
try {
return execSync(scriptCommand, scriptOptions)
} catch (err) {
console.log(`${fail} Error running ${scriptName}`, err)
process.exit(1)
}
}
function uploadNodeShasums () {
console.log('Uploading Node SHASUMS file to S3.')
let scriptPath = path.join(__dirname, 'upload-node-checksums.py')
runScript(scriptPath, ['-v', pkgVersion])
console.log(`${pass} Done uploading Node SHASUMS file to S3.`)
}
function uploadIndexJson () {
console.log('Uploading index.json to S3.')
let scriptPath = path.join(__dirname, 'upload-index-json.py')
runScript(scriptPath, [])
console.log(`${pass} Done uploading index.json to S3.`)
}
async function createReleaseShasums (release) {
let fileName = 'SHASUMS256.txt'
let existingAssets = release.assets.filter(asset => asset.name === fileName)
if (existingAssets.length > 0) {
console.log(`${fileName} already exists on GitHub; deleting before creating new file.`)
await github.repos.deleteAsset({
owner: 'electron',
repo: 'electron',
id: existingAssets[0].id
}).catch(err => {
console.log(`${fail} Error deleting ${fileName} on GitHub:`, err)
})
}
console.log(`Creating and uploading the release ${fileName}.`)
let scriptPath = path.join(__dirname, 'merge-electron-checksums.py')
let checksums = runScript(scriptPath, ['-v', pkgVersion])
console.log(`${pass} Generated release SHASUMS.`)
let filePath = await saveShaSumFile(checksums, fileName)
console.log(`${pass} Created ${fileName} file.`)
await uploadShasumFile(filePath, fileName, release)
console.log(`${pass} Successfully uploaded ${fileName} to GitHub.`)
}
async function uploadShasumFile (filePath, fileName, release) {
let githubOpts = {
owner: 'electron',
repo: 'electron',
id: release.id,
filePath,
name: fileName
}
return github.repos.uploadAsset(githubOpts)
.catch(err => {
console.log(`${fail} Error uploading ${filePath} to GitHub:`, err)
process.exit(1)
})
}
function saveShaSumFile (checksums, fileName) {
return new Promise((resolve, reject) => {
temp.open(fileName, (err, info) => {
if (err) {
console.log(`${fail} Could not create ${fileName} file`)
process.exit(1)
} else {
fs.writeFileSync(info.fd, checksums)
fs.close(info.fd, (err) => {
if (err) {
console.log(`${fail} Could close ${fileName} file`)
process.exit(1)
}
resolve(info.path)
})
}
})
})
}
async function publishRelease (release) {
let githubOpts = {
owner: 'electron',
repo: 'electron',
id: release.id,
tag_name: release.tag_name,
draft: false
}
return github.repos.editRelease(githubOpts)
.catch(err => {
console.log(`${fail} Error publishing release:`, err)
process.exit(1)
})
}
async function makeRelease (releaseToValidate) {
if (releaseToValidate) {
console.log(`Validating release ${args.validateRelease}`)
let release = await getDraftRelease(args.validateRelease)
await validateReleaseAssets(release)
} else {
checkVersion()
let draftRelease = await getDraftRelease()
uploadNodeShasums()
uploadIndexJson()
await createReleaseShasums(draftRelease)
// Fetch latest version of release before verifying
draftRelease = await getDraftRelease(pkgVersion, true)
await validateReleaseAssets(draftRelease)
await publishRelease(draftRelease)
await cleanupReleaseBranch()
console.log(`${pass} SUCCESS!!! Release has been published. Please run ` +
`"npm run publish-to-npm" to publish release to npm.`)
}
}
async function makeTempDir () {
return new Promise((resolve, reject) => {
temp.mkdir('electron-publish', (err, dirPath) => {
if (err) {
reject(err)
} else {
resolve(dirPath)
}
})
})
}
async function verifyAssets (release) {
let downloadDir = await makeTempDir()
let githubOpts = {
owner: 'electron',
repo: 'electron',
headers: {
Accept: 'application/octet-stream'
}
}
console.log(`Downloading files from GitHub to verify shasums`)
let shaSumFile = 'SHASUMS256.txt'
let filesToCheck = await Promise.all(release.assets.map(async (asset) => {
githubOpts.id = asset.id
let assetDetails = await github.repos.getAsset(githubOpts)
await downloadFiles(assetDetails.meta.location, downloadDir, false, asset.name)
return asset.name
})).catch(err => {
console.log(`${fail} Error downloading files from GitHub`, err)
process.exit(1)
})
filesToCheck = filesToCheck.filter(fileName => fileName !== shaSumFile)
let checkerOpts
await validateChecksums({
algorithm: 'sha256',
filesToCheck,
fileDirectory: downloadDir,
shaSumFile,
checkerOpts,
fileSource: 'GitHub'
})
}
function downloadFiles (urls, directory, quiet, targetName) {
return new Promise((resolve, reject) => {
let nuggetOpts = {
dir: directory
}
if (quiet) {
nuggetOpts.quiet = quiet
}
if (targetName) {
nuggetOpts.target = targetName
}
nugget(urls, nuggetOpts, (err) => {
if (err) {
reject(err)
} else {
resolve()
}
})
})
}
async function verifyShasums (urls, isS3) {
let fileSource = isS3 ? 'S3' : 'GitHub'
console.log(`Downloading files from ${fileSource} to verify shasums`)
let downloadDir = await makeTempDir()
let filesToCheck = []
try {
if (!isS3) {
await downloadFiles(urls, downloadDir)
filesToCheck = urls.map(url => {
let currentUrl = new URL(url)
return path.basename(currentUrl.pathname)
}).filter(file => file.indexOf('SHASUMS') === -1)
} else {
const s3VersionPath = `/atom-shell/dist/${pkgVersion}/`
await Promise.all(urls.map(async (url) => {
let currentUrl = new URL(url)
let dirname = path.dirname(currentUrl.pathname)
let filename = path.basename(currentUrl.pathname)
let s3VersionPathIdx = dirname.indexOf(s3VersionPath)
if (s3VersionPathIdx === -1 || dirname === s3VersionPath) {
if (s3VersionPathIdx !== -1 && filename.indexof('SHASUMS') === -1) {
filesToCheck.push(filename)
}
await downloadFiles(url, downloadDir, true)
} else {
let subDirectory = dirname.substr(s3VersionPathIdx + s3VersionPath.length)
let fileDirectory = path.join(downloadDir, subDirectory)
try {
fs.statSync(fileDirectory)
} catch (err) {
fs.mkdirSync(fileDirectory)
}
filesToCheck.push(path.join(subDirectory, filename))
await downloadFiles(url, fileDirectory, true)
}
}))
}
} catch (err) {
console.log(`${fail} Error downloading files from ${fileSource}`, err)
process.exit(1)
}
console.log(`${pass} Successfully downloaded the files from ${fileSource}.`)
let checkerOpts
if (isS3) {
checkerOpts = { defaultTextEncoding: 'binary' }
}
await validateChecksums({
algorithm: 'sha256',
filesToCheck,
fileDirectory: downloadDir,
shaSumFile: 'SHASUMS256.txt',
checkerOpts,
fileSource
})
if (isS3) {
await validateChecksums({
algorithm: 'sha1',
filesToCheck,
fileDirectory: downloadDir,
shaSumFile: 'SHASUMS.txt',
checkerOpts,
fileSource
})
}
}
async function validateChecksums (validationArgs) {
console.log(`Validating checksums for files from ${validationArgs.fileSource} ` +
`against ${validationArgs.shaSumFile}.`)
let shaSumFilePath = path.join(validationArgs.fileDirectory, validationArgs.shaSumFile)
let checker = new sumchecker.ChecksumValidator(validationArgs.algorithm,
shaSumFilePath, validationArgs.checkerOpts)
await checker.validate(validationArgs.fileDirectory, validationArgs.filesToCheck)
.catch(err => {
if (err instanceof sumchecker.ChecksumMismatchError) {
console.error(`${fail} The checksum of ${err.filename} from ` +
`${validationArgs.fileSource} did not match the shasum in ` +
`${validationArgs.shaSumFile}`)
} else if (err instanceof sumchecker.ChecksumParseError) {
console.error(`${fail} The checksum file ${validationArgs.shaSumFile} ` +
`from ${validationArgs.fileSource} could not be parsed.`, err)
} else if (err instanceof sumchecker.NoChecksumFoundError) {
console.error(`${fail} The file ${err.filename} from ` +
`${validationArgs.fileSource} was not in the shasum file ` +
`${validationArgs.shaSumFile}.`)
} else {
console.error(`${fail} Error matching files from ` +
`${validationArgs.fileSource} shasums in ${validationArgs.shaSumFile}.`, err)
}
process.exit(1)
})
console.log(`${pass} All files from ${validationArgs.fileSource} match ` +
`shasums defined in ${validationArgs.shaSumFile}.`)
}
async function cleanupReleaseBranch () {
console.log(`Cleaning up release branch.`)
let errorMessage = `Could not delete local release branch.`
let successMessage = `Successfully deleted local release branch.`
await callGit(['branch', '-D', 'release'], errorMessage, successMessage)
errorMessage = `Could not delete remote release branch.`
successMessage = `Successfully deleted remote release branch.`
return callGit(['push', 'origin', ':release'], errorMessage, successMessage)
}
async function callGit (args, errorMessage, successMessage) {
let gitResult = await GitProcess.exec(args, gitDir)
if (gitResult.exitCode === 0) {
console.log(`${pass} ${successMessage}`)
return true
} else {
console.log(`${fail} ${errorMessage} ${gitResult.stderr}`)
process.exit(1)
}
}
makeRelease(args.validateRelease)

View File

@@ -8,7 +8,7 @@ from lib.config import get_target_arch
from lib.util import safe_mkdir, rm_rf, extract_zip, tempdir, download
VERSION = 'v1.2.1'
VERSION = 'v1.2.2'
SOURCE_ROOT = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
FRAMEWORKS_URL = 'http://github.com/electron/electron-frameworks/releases' \
'/download/' + VERSION

View File

@@ -0,0 +1,51 @@
const GitHub = require('github')
const github = new GitHub()
github.authenticate({type: 'token', token: process.env.ELECTRON_GITHUB_TOKEN})
if (process.argv.length < 5) {
console.log('Usage: upload-to-github filePath fileName releaseId')
process.exit(1)
}
let filePath = process.argv[2]
let fileName = process.argv[3]
let releaseId = process.argv[4]
let githubOpts = {
owner: 'electron',
repo: 'electron',
id: releaseId,
filePath: filePath,
name: fileName
}
let retry = 0
function uploadToGitHub () {
github.repos.uploadAsset(githubOpts).then(() => {
console.log(`Successfully uploaded ${fileName} to GitHub.`)
process.exit()
}).catch((err) => {
if (retry < 4) {
console.log(`Error uploading ${fileName} to GitHub, will retry. Error was:`, err)
retry++
github.repos.getRelease(githubOpts).then(release => {
let existingAssets = release.data.assets.filter(asset => asset.name === fileName)
if (existingAssets.length > 0) {
console.log(`${fileName} already exists; will delete before retrying upload.`)
github.repos.deleteAsset({
owner: 'electron',
repo: 'electron',
id: existingAssets[0].id
}).then(uploadToGitHub).catch(uploadToGitHub)
} else {
uploadToGitHub()
}
})
} else {
console.log(`Error retrying uploading ${fileName} to GitHub:`, err)
process.exitCode = 1
}
})
}
uploadToGitHub()

View File

@@ -36,70 +36,65 @@ PDB_NAME = get_zip_name(PROJECT_NAME, ELECTRON_VERSION, 'pdb')
def main():
args = parse_args()
if not args.publish_release:
if not dist_newer_than_head():
run_python_script('create-dist.py')
if not dist_newer_than_head():
run_python_script('create-dist.py')
build_version = get_electron_build_version()
if not ELECTRON_VERSION.startswith(build_version):
error = 'Tag name ({0}) should match build version ({1})\n'.format(
ELECTRON_VERSION, build_version)
sys.stderr.write(error)
sys.stderr.flush()
return 1
build_version = get_electron_build_version()
if not ELECTRON_VERSION.startswith(build_version):
error = 'Tag name ({0}) should match build version ({1})\n'.format(
ELECTRON_VERSION, build_version)
sys.stderr.write(error)
sys.stderr.flush()
return 1
github = GitHub(auth_token())
releases = github.repos(ELECTRON_REPO).releases.get()
tag_exists = False
for release in releases:
if not release['draft'] and release['tag_name'] == args.version:
for r in releases:
if not r['draft'] and r['tag_name'] == args.version:
release = r
tag_exists = True
break
release = create_or_get_release_draft(github, releases, args.version,
tag_exists)
if args.publish_release:
# Upload the Node SHASUMS*.txt.
run_python_script('upload-node-checksums.py', '-v', ELECTRON_VERSION)
# Upload the index.json.
run_python_script('upload-index-json.py')
# Create and upload the Electron SHASUMS*.txt
release_electron_checksums(github, release)
# Press the publish button.
publish_release(github, release['id'])
# TODO: run publish-to-npm script here
# Do not upload other files when passed "-p".
return
if not args.upload_to_s3:
assert tag_exists == args.overwrite, \
'You have to pass --overwrite to overwrite a published release'
if not args.overwrite:
release = create_or_get_release_draft(github, releases, args.version,
tag_exists)
# Upload Electron with GitHub Releases API.
upload_electron(github, release, os.path.join(DIST_DIR, DIST_NAME))
upload_electron(github, release, os.path.join(DIST_DIR, SYMBOLS_NAME))
upload_electron(github, release, os.path.join(DIST_DIR, DIST_NAME),
args.upload_to_s3)
if get_target_arch() != 'mips64el':
upload_electron(github, release, os.path.join(DIST_DIR, SYMBOLS_NAME),
args.upload_to_s3)
if PLATFORM == 'darwin':
upload_electron(github, release, os.path.join(DIST_DIR,
'electron-api.json'))
upload_electron(github, release, os.path.join(DIST_DIR, 'electron.d.ts'))
upload_electron(github, release, os.path.join(DIST_DIR, DSYM_NAME))
'electron-api.json'), args.upload_to_s3)
upload_electron(github, release, os.path.join(DIST_DIR, 'electron.d.ts'),
args.upload_to_s3)
upload_electron(github, release, os.path.join(DIST_DIR, DSYM_NAME),
args.upload_to_s3)
elif PLATFORM == 'win32':
upload_electron(github, release, os.path.join(DIST_DIR, PDB_NAME))
upload_electron(github, release, os.path.join(DIST_DIR, PDB_NAME),
args.upload_to_s3)
# Upload free version of ffmpeg.
ffmpeg = get_zip_name('ffmpeg', ELECTRON_VERSION)
upload_electron(github, release, os.path.join(DIST_DIR, ffmpeg))
upload_electron(github, release, os.path.join(DIST_DIR, ffmpeg),
args.upload_to_s3)
# Upload chromedriver and mksnapshot for minor version update.
if parse_version(args.version)[2] == '0':
chromedriver = get_zip_name('chromedriver', ELECTRON_VERSION)
upload_electron(github, release, os.path.join(DIST_DIR, chromedriver))
upload_electron(github, release, os.path.join(DIST_DIR, chromedriver),
args.upload_to_s3)
mksnapshot = get_zip_name('mksnapshot', ELECTRON_VERSION)
upload_electron(github, release, os.path.join(DIST_DIR, mksnapshot))
upload_electron(github, release, os.path.join(DIST_DIR, mksnapshot),
args.upload_to_s3)
if PLATFORM == 'win32' and not tag_exists:
if PLATFORM == 'win32' and not tag_exists and not args.upload_to_s3:
# Upload PDBs to Windows symbol server.
run_python_script('upload-windows-pdb.py')
@@ -112,9 +107,18 @@ def parse_args():
parser = argparse.ArgumentParser(description='upload distribution file')
parser.add_argument('-v', '--version', help='Specify the version',
default=ELECTRON_VERSION)
parser.add_argument('-o', '--overwrite',
help='Overwrite a published release',
action='store_true')
parser.add_argument('-p', '--publish-release',
help='Publish the release',
action='store_true')
parser.add_argument('-s', '--upload_to_s3',
help='Upload assets to s3 bucket',
dest='upload_to_s3',
action='store_true',
default=False,
required=False)
return parser.parse_args()
@@ -124,7 +128,7 @@ def run_python_script(script, *args):
def get_electron_build_version():
if get_target_arch() == 'arm' or os.environ.has_key('CI'):
if get_target_arch().startswith('arm') or os.environ.has_key('CI'):
# In CI we just build as told.
return ELECTRON_VERSION
if PLATFORM == 'darwin':
@@ -198,14 +202,17 @@ def create_release_draft(github, tag):
return r
def release_electron_checksums(github, release):
checksums = run_python_script('merge-electron-checksums.py',
'-v', ELECTRON_VERSION)
upload_io_to_github(github, release, 'SHASUMS256.txt',
StringIO(checksums.decode('utf-8')), 'text/plain')
def upload_electron(github, release, file_path, upload_to_s3):
# if upload_to_s3 is set, skip github upload.
if upload_to_s3:
bucket, access_key, secret_key = s3_config()
key_prefix = 'electron-artifacts/{0}'.format(release['tag_name'])
s3put(bucket, access_key, secret_key, os.path.dirname(file_path),
key_prefix, [file_path])
upload_sha256_checksum(release['tag_name'], file_path, key_prefix)
return
def upload_electron(github, release, file_path):
# Delete the original file before uploading in CI.
filename = os.path.basename(file_path)
if os.environ.has_key('CI'):
@@ -217,8 +224,7 @@ def upload_electron(github, release, file_path):
pass
# Upload the file.
with open(file_path, 'rb') as f:
upload_io_to_github(github, release, filename, f, 'application/zip')
upload_io_to_github(release, filename, file_path)
# Upload the checksum file.
upload_sha256_checksum(release['tag_name'], file_path)
@@ -229,19 +235,21 @@ def upload_electron(github, release, file_path):
arm_filename = filename.replace('armv7l', 'arm')
arm_file_path = os.path.join(os.path.dirname(file_path), arm_filename)
shutil.copy2(file_path, arm_file_path)
upload_electron(github, release, arm_file_path)
upload_electron(github, release, arm_file_path, upload_to_s3)
def upload_io_to_github(github, release, name, io, content_type):
params = {'name': name}
headers = {'Content-Type': content_type}
github.repos(ELECTRON_REPO).releases(release['id']).assets.post(
params=params, headers=headers, data=io, verify=False)
def upload_io_to_github(release, filename, filepath):
print 'Uploading %s to Github' % \
(filename)
script_path = os.path.join(SOURCE_ROOT, 'script', 'upload-to-github.js')
execute(['node', script_path, filepath, filename, str(release['id'])])
def upload_sha256_checksum(version, file_path):
def upload_sha256_checksum(version, file_path, key_prefix=None):
bucket, access_key, secret_key = s3_config()
checksum_path = '{}.sha256sum'.format(file_path)
if key_prefix is None:
key_prefix = 'atom-shell/tmp/{0}'.format(version)
sha256 = hashlib.sha256()
with open(file_path, 'rb') as f:
sha256.update(f.read())
@@ -250,12 +258,7 @@ def upload_sha256_checksum(version, file_path):
with open(checksum_path, 'w') as checksum:
checksum.write('{} *{}'.format(sha256.hexdigest(), filename))
s3put(bucket, access_key, secret_key, os.path.dirname(checksum_path),
'atom-shell/tmp/{0}'.format(version), [checksum_path])
def publish_release(github, release_id):
data = dict(draft=False)
github.repos(ELECTRON_REPO).releases(release_id).patch(data=data)
key_prefix, [checksum_path])
def auth_token():

View File

@@ -149,6 +149,34 @@ describe('app module', function () {
})
})
describe('app.makeSingleInstance', function () {
it('prevents the second launch of app', function (done) {
this.timeout(120000)
const appPath = path.join(__dirname, 'fixtures', 'api', 'singleton')
// First launch should exit with 0.
let secondLaunched = false
const first = ChildProcess.spawn(remote.process.execPath, [appPath])
let launchOnce = true
first.stdout.on('data', (data) => {
if (data.toString().trim() === 'launched' && launchOnce) {
launchOnce = false
// Second launch should exit with 1.
const second = ChildProcess.spawn(remote.process.execPath, [appPath])
second.once('exit', (code) => {
assert.ok(!secondLaunched)
assert.equal(code, 1)
secondLaunched = true
})
}
})
first.once('exit', (code) => {
assert.ok(secondLaunched)
assert.equal(code, 0)
done()
})
})
})
describe('app.relaunch', function () {
let server = null
const socketPath = process.platform === 'win32' ? '\\\\.\\pipe\\electron-app-relaunch' : '/tmp/electron-app-relaunch'
@@ -208,9 +236,10 @@ describe('app module', function () {
})
})
describe('app.importCertificate', function () {
xdescribe('app.importCertificate', function () {
if (process.platform !== 'linux') return
this.timeout(120000)
var w = null
afterEach(function () {
@@ -405,7 +434,7 @@ describe('app module', function () {
})
})
describe('select-client-certificate event', function () {
xdescribe('select-client-certificate event', function () {
let w = null
beforeEach(function () {

15
spec/fixtures/api/singleton/main.js vendored Normal file
View File

@@ -0,0 +1,15 @@
const {app} = require('electron')
console.log('launched')
process.on('uncaughtException', () => {
app.exit(2)
})
const shouldExit = app.makeSingleInstance(() => {
process.nextTick(() => app.exit(0))
})
if (shouldExit) {
app.exit(1)
}

View File

@@ -0,0 +1,5 @@
{
"name": "electron-app-singleton",
"main": "main.js"
}

View File

@@ -9,6 +9,7 @@
"graceful-fs": "^4.1.9",
"mkdirp": "^0.5.1",
"mocha": "^3.1.0",
"mocha-junit-reporter": "^1.14.0",
"multiparty": "^4.1.3",
"q": "^1.4.1",
"send": "^0.14.1",

View File

@@ -51,10 +51,15 @@
var Coverage = require('electabul').Coverage;
var Mocha = require('mocha');
var mochaOpts = {};
if (process.env.MOCHA_REPORTER) {
mochaOpts.reporter = process.env.MOCHA_REPORTER;
}
var mocha = new Mocha(mochaOpts);
var mocha = new Mocha();
mocha.ui('bdd').reporter(isCi ? 'tap' : 'html');
if (!process.env.MOCHA_REPORTER) {
mocha.ui('bdd').reporter(isCi ? 'tap' : 'html');
}
mocha.timeout(isCi ? 30000 : 10000)
var query = Mocha.utils.parseQuery(window.location.search || '');

2
vendor/node vendored