mirror of
https://github.com/electron/electron.git
synced 2026-02-19 03:14:51 -05:00
Compare commits
159 Commits
try-fix-ap
...
1-8-x
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f1a1197e69 | ||
|
|
90575997e2 | ||
|
|
08615b52ed | ||
|
|
e52b080fa7 | ||
|
|
6e1917c01d | ||
|
|
1ee5fabcf2 | ||
|
|
cb70ec0cca | ||
|
|
c3f9251808 | ||
|
|
d2f7570e86 | ||
|
|
1ee0a41177 | ||
|
|
2b10d24829 | ||
|
|
7780d45814 | ||
|
|
23f698a4e1 | ||
|
|
b0a70d4aae | ||
|
|
c5252c872d | ||
|
|
cf2ab209e8 | ||
|
|
7299943cd9 | ||
|
|
6b8eba48f5 | ||
|
|
c2412d56fe | ||
|
|
320e026eab | ||
|
|
0602ca8e9c | ||
|
|
519a02d8d4 | ||
|
|
8ebd0ce1b1 | ||
|
|
61d9470b7a | ||
|
|
16e1b2340f | ||
|
|
f91aa3f37d | ||
|
|
a1a921325d | ||
|
|
65533961e2 | ||
|
|
77093d540f | ||
|
|
86438632d7 | ||
|
|
df69c6de2c | ||
|
|
e68bdbff65 | ||
|
|
caa889e270 | ||
|
|
700ea8731f | ||
|
|
e15e152177 | ||
|
|
8e9bdfc59d | ||
|
|
c96916ef94 | ||
|
|
959ffff316 | ||
|
|
66bd8c669d | ||
|
|
2bb22f776a | ||
|
|
baa12df67b | ||
|
|
d8b41482a2 | ||
|
|
43650d4053 | ||
|
|
8c50e14eee | ||
|
|
41eda591c2 | ||
|
|
2a90b5cbfa | ||
|
|
9bb3547809 | ||
|
|
9827185d60 | ||
|
|
ed3206a0f5 | ||
|
|
7189fb5bd8 | ||
|
|
44d9720629 | ||
|
|
1607708c33 | ||
|
|
cc7aa41303 | ||
|
|
acb9a47d50 | ||
|
|
8d7b348a70 | ||
|
|
adcddb8b32 | ||
|
|
c94eb28451 | ||
|
|
dccc98677b | ||
|
|
295a4f81dd | ||
|
|
6993ee352d | ||
|
|
9dfb85dd21 | ||
|
|
cf5e77c39f | ||
|
|
9a9b4910e8 | ||
|
|
8200d6cdbf | ||
|
|
c26372c0f3 | ||
|
|
ac75e00aee | ||
|
|
f95e5c71c0 | ||
|
|
ffe47c3f50 | ||
|
|
73c2652935 | ||
|
|
6c7e64cceb | ||
|
|
e2c7b43f25 | ||
|
|
8144e53bfd | ||
|
|
891ab8e256 | ||
|
|
fe2dfdc24d | ||
|
|
8efd31ab4d | ||
|
|
d2adedadd3 | ||
|
|
1ddf6188f4 | ||
|
|
2695a68869 | ||
|
|
ba016f3949 | ||
|
|
c7a5c079f7 | ||
|
|
c1dad25f33 | ||
|
|
120bc964c6 | ||
|
|
c3da3ae3f0 | ||
|
|
d1f1210044 | ||
|
|
c9febc49d1 | ||
|
|
87d2571663 | ||
|
|
d94970e81a | ||
|
|
acbf1c9f94 | ||
|
|
e14b7d9bc6 | ||
|
|
712b5fe0fd | ||
|
|
eedb0b778f | ||
|
|
ed0676e167 | ||
|
|
f39763d7f5 | ||
|
|
5889bbe27c | ||
|
|
ae1a54d33e | ||
|
|
71a93aec5d | ||
|
|
ad1929c4b6 | ||
|
|
6aa0c44f5d | ||
|
|
5e0111abc6 | ||
|
|
1860930b7e | ||
|
|
c285bdb2bc | ||
|
|
9b44c764dc | ||
|
|
75ce746885 | ||
|
|
d0aa740720 | ||
|
|
5e8735c968 | ||
|
|
13cb27f7ab | ||
|
|
43bc858c0b | ||
|
|
6bc2894bf5 | ||
|
|
a784804349 | ||
|
|
b3e6597317 | ||
|
|
6413a7f0f3 | ||
|
|
0e9b0fff46 | ||
|
|
0b7fd96629 | ||
|
|
03add24cd7 | ||
|
|
36f0a74b17 | ||
|
|
92f4e5ea7d | ||
|
|
5aee1cefff | ||
|
|
7bfb3f4141 | ||
|
|
cd3f36a968 | ||
|
|
8681dc4a26 | ||
|
|
95c7499775 | ||
|
|
470b31d1af | ||
|
|
e912091aca | ||
|
|
281e1748ef | ||
|
|
f15ce53dca | ||
|
|
d45b8dfe83 | ||
|
|
ba933e2998 | ||
|
|
cb1ee8982d | ||
|
|
290f985571 | ||
|
|
08f36c1383 | ||
|
|
7659c16b09 | ||
|
|
daf6dd99a8 | ||
|
|
8d220141a5 | ||
|
|
a1f23064e1 | ||
|
|
64ede04002 | ||
|
|
dab7f7f18d | ||
|
|
9ceef5f1d3 | ||
|
|
a250089a40 | ||
|
|
6efa33043a | ||
|
|
445781ce33 | ||
|
|
ec088c7940 | ||
|
|
71034f8008 | ||
|
|
c9d2b071ab | ||
|
|
d0ca62b173 | ||
|
|
6982fb6dd0 | ||
|
|
be46ba1849 | ||
|
|
5eefde63b9 | ||
|
|
2a97f2636a | ||
|
|
ab3811e6dd | ||
|
|
686dd664de | ||
|
|
1e5ba47731 | ||
|
|
7ef262cab3 | ||
|
|
19d70e5f1f | ||
|
|
d0a9379b37 | ||
|
|
0ac7106e6c | ||
|
|
f19f125f68 | ||
|
|
79385dcb74 | ||
|
|
a71a20ee32 | ||
|
|
99a1161fe4 |
@@ -3,10 +3,10 @@ version: 2
|
||||
jobs:
|
||||
electron-linux-arm:
|
||||
docker:
|
||||
- image: electronbuilds/electron:0.0.3
|
||||
- image: electronbuilds/electron:0.0.7
|
||||
environment:
|
||||
TARGET_ARCH: arm
|
||||
resource_class: xlarge
|
||||
resource_class: 2xlarge
|
||||
steps:
|
||||
- checkout
|
||||
- run:
|
||||
@@ -60,10 +60,10 @@ jobs:
|
||||
fi
|
||||
electron-linux-arm64:
|
||||
docker:
|
||||
- image: electronbuilds/electron:0.0.3
|
||||
- image: electronbuilds/electron:0.0.7
|
||||
environment:
|
||||
TARGET_ARCH: arm64
|
||||
resource_class: xlarge
|
||||
resource_class: 2xlarge
|
||||
steps:
|
||||
- checkout
|
||||
- run:
|
||||
@@ -117,7 +117,7 @@ jobs:
|
||||
fi
|
||||
electron-linux-ia32:
|
||||
docker:
|
||||
- image: electronbuilds/electron:0.0.3
|
||||
- image: electronbuilds/electron:0.0.7
|
||||
environment:
|
||||
TARGET_ARCH: ia32
|
||||
resource_class: xlarge
|
||||
@@ -174,7 +174,7 @@ jobs:
|
||||
fi
|
||||
electron-linux-mips64el:
|
||||
docker:
|
||||
- image: electronbuilds/electron:0.0.3
|
||||
- image: electronbuilds/electron:0.0.7
|
||||
environment:
|
||||
TARGET_ARCH: mips64el
|
||||
resource_class: xlarge
|
||||
@@ -232,11 +232,11 @@ jobs:
|
||||
|
||||
electron-linux-x64:
|
||||
docker:
|
||||
- image: electronbuilds/electron:0.0.3
|
||||
- image: electronbuilds/electron:0.0.7
|
||||
environment:
|
||||
TARGET_ARCH: x64
|
||||
DISPLAY: ':99.0'
|
||||
resource_class: xlarge
|
||||
resource_class: 2xlarge
|
||||
steps:
|
||||
- checkout
|
||||
- run:
|
||||
@@ -262,6 +262,206 @@ jobs:
|
||||
- run: npm run lint
|
||||
- run:
|
||||
name: Build
|
||||
no_output_timeout: 30m
|
||||
command: |
|
||||
if [ "$ELECTRON_RELEASE" == "1" ]; then
|
||||
echo 'Building Electron for release'
|
||||
script/build.py -c R
|
||||
else
|
||||
echo 'Building Electron for debug'
|
||||
script/build.py -c D
|
||||
fi
|
||||
- run:
|
||||
name: Create distribution
|
||||
command: |
|
||||
if [ "$ELECTRON_RELEASE" == "1" ]; then
|
||||
echo 'Creating Electron release distribution'
|
||||
script/create-dist.py
|
||||
else
|
||||
echo 'Skipping create distribution because build is not for release'
|
||||
fi
|
||||
- run:
|
||||
name: Upload distribution
|
||||
command: |
|
||||
if [ "$ELECTRON_RELEASE" == "1" ] && [ "$TRIGGERED_BY_API" != "1" ]; then
|
||||
echo 'Uploading Electron release distribution to github releases'
|
||||
script/upload.py
|
||||
elif [ "$ELECTRON_RELEASE" == "1" ] && [ "$TRIGGERED_BY_API" == "1" ]; then
|
||||
echo 'Uploading Electron release distribution to s3'
|
||||
script/upload.py --upload_to_s3
|
||||
else
|
||||
echo 'Skipping upload distribution because build is not for release'
|
||||
fi
|
||||
- run:
|
||||
name: Test
|
||||
environment:
|
||||
MOCHA_FILE: junit/test-results.xml
|
||||
MOCHA_REPORTER: mocha-junit-reporter
|
||||
command: |
|
||||
if [ "$ELECTRON_RELEASE" != "1" ]; then
|
||||
echo 'Testing Electron debug build'
|
||||
mkdir junit
|
||||
script/test.py --ci --rebuild_native_modules
|
||||
else
|
||||
echo 'Skipping testing on release build'
|
||||
fi
|
||||
- run:
|
||||
name: Verify FFmpeg
|
||||
command: |
|
||||
if [ "$ELECTRON_RELEASE" != "1" ]; then
|
||||
echo 'Verifying ffmpeg on debug build'
|
||||
script/verify-ffmpeg.py
|
||||
else
|
||||
echo 'Skipping verify ffmpeg on release build'
|
||||
fi
|
||||
- run:
|
||||
name: Generate Typescript Definitions
|
||||
command: npm run create-typescript-definitions
|
||||
- store_test_results:
|
||||
path: junit
|
||||
- store_artifacts:
|
||||
path: junit
|
||||
- store_artifacts:
|
||||
path: out/electron.d.ts
|
||||
- store_artifacts:
|
||||
path: out/electron-api.json
|
||||
|
||||
electron-osx-x64:
|
||||
environment:
|
||||
TARGET_ARCH: x64
|
||||
macos:
|
||||
xcode: "9.0"
|
||||
resource_class: 2xlarge
|
||||
steps:
|
||||
- checkout
|
||||
- run:
|
||||
name: Reclaim disk space
|
||||
command: |
|
||||
df -h
|
||||
sudo rm -rf /Library/Developer/CoreSimulator
|
||||
df -h
|
||||
sysctl -n hw.ncpu
|
||||
- run:
|
||||
name: Check for release
|
||||
command: |
|
||||
if [ -n "${RUN_RELEASE_BUILD}" ]; then
|
||||
echo 'release build triggered from api'
|
||||
echo 'export ELECTRON_RELEASE=1 TRIGGERED_BY_API=1' >> $BASH_ENV
|
||||
fi
|
||||
- run:
|
||||
name: Bootstrap
|
||||
command: |
|
||||
if [ "$ELECTRON_RELEASE" == "1" ]; then
|
||||
echo 'Bootstrapping Electron for release build'
|
||||
script/bootstrap.py --target_arch=$TARGET_ARCH
|
||||
else
|
||||
echo 'Bootstrapping Electron for debug build'
|
||||
script/bootstrap.py --target_arch=$TARGET_ARCH --dev
|
||||
fi
|
||||
- run: npm run lint
|
||||
- run:
|
||||
name: Build
|
||||
no_output_timeout: 30m
|
||||
command: |
|
||||
if [ "$ELECTRON_RELEASE" == "1" ]; then
|
||||
echo 'Building Electron for release'
|
||||
script/build.py -c R
|
||||
else
|
||||
echo 'Building Electron for debug'
|
||||
script/build.py -c D
|
||||
fi
|
||||
- run:
|
||||
name: Create distribution
|
||||
command: |
|
||||
if [ "$ELECTRON_RELEASE" == "1" ]; then
|
||||
echo 'Creating Electron release distribution'
|
||||
script/create-dist.py
|
||||
else
|
||||
echo 'Skipping create distribution because build is not for release'
|
||||
fi
|
||||
- run:
|
||||
name: Upload distribution
|
||||
command: |
|
||||
if [ "$ELECTRON_RELEASE" == "1" ] && [ "$TRIGGERED_BY_API" != "1" ]; then
|
||||
echo 'Uploading Electron release distribution to github releases'
|
||||
script/upload.py
|
||||
elif [ "$ELECTRON_RELEASE" == "1" ] && [ "$TRIGGERED_BY_API" == "1" ]; then
|
||||
echo 'Uploading Electron release distribution to s3'
|
||||
script/upload.py --upload_to_s3
|
||||
else
|
||||
echo 'Skipping upload distribution because build is not for release'
|
||||
fi
|
||||
- run:
|
||||
name: Test
|
||||
environment:
|
||||
MOCHA_FILE: junit/test-results.xml
|
||||
MOCHA_REPORTER: mocha-junit-reporter
|
||||
command: |
|
||||
if [ "$ELECTRON_RELEASE" != "1" ]; then
|
||||
echo 'Testing Electron debug build'
|
||||
mkdir junit
|
||||
script/test.py --ci --rebuild_native_modules
|
||||
else
|
||||
echo 'Skipping testing on release build'
|
||||
fi
|
||||
- run:
|
||||
name: Verify FFmpeg
|
||||
command: |
|
||||
if [ "$ELECTRON_RELEASE" != "1" ]; then
|
||||
echo 'Verifying ffmpeg on debug build'
|
||||
script/verify-ffmpeg.py
|
||||
else
|
||||
echo 'Skipping verify ffmpeg on release build'
|
||||
fi
|
||||
- run:
|
||||
name: Generate Typescript Definitions
|
||||
command: npm run create-typescript-definitions
|
||||
- store_test_results:
|
||||
path: junit
|
||||
- store_artifacts:
|
||||
path: junit
|
||||
- store_artifacts:
|
||||
path: out/electron.d.ts
|
||||
- store_artifacts:
|
||||
path: out/electron-api.json
|
||||
|
||||
electron-mas-x64:
|
||||
environment:
|
||||
TARGET_ARCH: x64
|
||||
MAS_BUILD: 1
|
||||
macos:
|
||||
xcode: "9.0"
|
||||
resource_class: 2xlarge
|
||||
steps:
|
||||
- checkout
|
||||
- run:
|
||||
name: Reclaim disk space
|
||||
command: |
|
||||
df -h
|
||||
sudo rm -rf /Library/Developer/CoreSimulator
|
||||
df -h
|
||||
sysctl -n hw.ncpu
|
||||
- run:
|
||||
name: Check for release
|
||||
command: |
|
||||
if [ -n "${RUN_RELEASE_BUILD}" ]; then
|
||||
echo 'release build triggered from api'
|
||||
echo 'export ELECTRON_RELEASE=1 TRIGGERED_BY_API=1' >> $BASH_ENV
|
||||
fi
|
||||
- run:
|
||||
name: Bootstrap
|
||||
command: |
|
||||
if [ "$ELECTRON_RELEASE" == "1" ]; then
|
||||
echo 'Bootstrapping Electron for release build'
|
||||
script/bootstrap.py --target_arch=$TARGET_ARCH
|
||||
else
|
||||
echo 'Bootstrapping Electron for debug build'
|
||||
script/bootstrap.py --target_arch=$TARGET_ARCH --dev
|
||||
fi
|
||||
- run: npm run lint
|
||||
- run:
|
||||
name: Build
|
||||
no_output_timeout: 30m
|
||||
command: |
|
||||
if [ "$ELECTRON_RELEASE" == "1" ]; then
|
||||
echo 'Building Electron for release'
|
||||
@@ -339,3 +539,12 @@ workflows:
|
||||
build-x64:
|
||||
jobs:
|
||||
- electron-linux-x64
|
||||
build-mips64el:
|
||||
jobs:
|
||||
- electron-linux-mips64el
|
||||
build-osx-x64:
|
||||
jobs:
|
||||
- electron-osx-x64
|
||||
build-mas-x64:
|
||||
jobs:
|
||||
- electron-mas-x64
|
||||
|
||||
5
.gitignore
vendored
5
.gitignore
vendored
@@ -28,6 +28,7 @@
|
||||
/external_binaries/
|
||||
/out/
|
||||
/vendor/.gclient
|
||||
/vendor/cross-gcc-4.9.3-n64-loongson-rc5.4
|
||||
/vendor/debian_jessie_amd64-sysroot/
|
||||
/vendor/debian_jessie_arm-sysroot/
|
||||
/vendor/debian_jessie_arm64-sysroot/
|
||||
@@ -36,13 +37,13 @@
|
||||
/vendor/debian_wheezy_amd64-sysroot/
|
||||
/vendor/debian_wheezy_arm-sysroot/
|
||||
/vendor/debian_wheezy_i386-sysroot/
|
||||
/vendor/gcc-4.8.3-d197-n64-loongson/
|
||||
/vendor/readme-gcc483-loongson.txt
|
||||
/vendor/download/
|
||||
/vendor/llvm-build/
|
||||
/vendor/llvm/
|
||||
/vendor/node/deps/node-inspect/.npmrc
|
||||
/vendor/npm/
|
||||
/vendor/python_26/
|
||||
/vendor/native_mksnapshot
|
||||
/vendor/LICENSES.chromium.html
|
||||
node_modules/
|
||||
SHASUMS256.txt
|
||||
|
||||
@@ -34,7 +34,7 @@ This Code of Conduct applies both within project spaces and in public spaces whe
|
||||
|
||||
## Enforcement
|
||||
|
||||
Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team at [electron@github.com](mailto:electron@github.com). All complaints will be reviewed and investigated and will result in a response that is deemed necessary and appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately.
|
||||
Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team at [coc@electronjs.org](mailto:coc@electronjs.org). All complaints will be reviewed and investigated and will result in a response that is deemed necessary and appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately.
|
||||
|
||||
Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership.
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
|
||||
This project adheres to the Contributor Covenant [code of conduct](CODE_OF_CONDUCT.md).
|
||||
By participating, you are expected to uphold this code. Please report unacceptable
|
||||
behavior to electron@github.com.
|
||||
behavior to coc@electronjs.org.
|
||||
|
||||
The following is a set of guidelines for contributing to Electron.
|
||||
These are just guidelines, not rules, use your best judgment and feel free to
|
||||
|
||||
@@ -7,7 +7,7 @@ ENV HOME=/home
|
||||
RUN chmod a+rwx /home
|
||||
|
||||
# Install node.js
|
||||
RUN curl -sL https://deb.nodesource.com/setup_6.x | bash -
|
||||
RUN curl -sL https://deb.nodesource.com/setup_8.x | bash -
|
||||
RUN apt-get update && apt-get install -y --force-yes nodejs
|
||||
|
||||
# Install wget used by crash reporter
|
||||
|
||||
64
Dockerfile.arm64v8
Normal file
64
Dockerfile.arm64v8
Normal file
@@ -0,0 +1,64 @@
|
||||
FROM arm64v8/ubuntu:16.04
|
||||
|
||||
RUN groupadd --gid 1000 builduser \
|
||||
&& useradd --uid 1000 --gid builduser --shell /bin/bash --create-home builduser
|
||||
|
||||
RUN groupadd --gid 114 jenkins \
|
||||
&& useradd --uid 110 --gid jenkins --shell /bin/bash --create-home jenkins
|
||||
|
||||
# Set up TEMP directory
|
||||
ENV TEMP=/tmp
|
||||
RUN chmod a+rwx /tmp
|
||||
|
||||
RUN apt-get update && apt-get install -y\
|
||||
bison \
|
||||
build-essential \
|
||||
clang \
|
||||
curl \
|
||||
gperf \
|
||||
git \
|
||||
libasound2 \
|
||||
libasound2-dev \
|
||||
libcap-dev \
|
||||
libcups2-dev \
|
||||
libdbus-1-dev \
|
||||
libgconf-2-4 \
|
||||
libgconf2-dev \
|
||||
libgnome-keyring-dev \
|
||||
libgtk2.0-0 \
|
||||
libgtk2.0-dev \
|
||||
libgtk-3-0 \
|
||||
libgtk-3-dev \
|
||||
libnotify-dev \
|
||||
libnss3 \
|
||||
libnss3-dev \
|
||||
libx11-xcb-dev \
|
||||
libxss1 \
|
||||
libxtst-dev \
|
||||
libxtst6 \
|
||||
lsb-release \
|
||||
locales \
|
||||
ninja \
|
||||
python-setuptools \
|
||||
python-pip \
|
||||
python-dbusmock \
|
||||
unzip \
|
||||
wget \
|
||||
xvfb
|
||||
|
||||
# Install node.js
|
||||
RUN curl -sL https://deb.nodesource.com/setup_8.x | bash -
|
||||
RUN apt-get update && apt-get install -y nodejs
|
||||
|
||||
# Install crcmod
|
||||
RUN pip install -U crcmod
|
||||
|
||||
ADD tools/xvfb-init.sh /etc/init.d/xvfb
|
||||
RUN chmod a+x /etc/init.d/xvfb
|
||||
|
||||
# Install ninja in /usr/local
|
||||
RUN cd /usr/local && git clone https://github.com/martine/ninja.git -b v1.5.3
|
||||
RUN cd /usr/local/ninja && ./configure.py --bootstrap
|
||||
|
||||
USER builduser
|
||||
WORKDIR /home/builduser
|
||||
@@ -3,7 +3,7 @@ FROM electronbuilds/libchromiumcontent:0.0.4
|
||||
USER root
|
||||
|
||||
# Install node.js
|
||||
RUN curl -sL https://deb.nodesource.com/setup_6.x | bash -
|
||||
RUN curl -sL https://deb.nodesource.com/setup_8.x | bash -
|
||||
RUN apt-get update && apt-get install -y --force-yes nodejs
|
||||
|
||||
# Install wget used by crash reporter
|
||||
|
||||
36
Jenkinsfile.arm64
Normal file
36
Jenkinsfile.arm64
Normal file
@@ -0,0 +1,36 @@
|
||||
pipeline {
|
||||
agent {
|
||||
docker {
|
||||
image 'electronbuilds/arm64v8:0.0.3'
|
||||
args '--privileged'
|
||||
}
|
||||
}
|
||||
environment {
|
||||
TARGET_ARCH='arm64'
|
||||
DISPLAY=':99.0'
|
||||
MOCHA_TIMEOUT='60000'
|
||||
}
|
||||
stages {
|
||||
stage('Bootstrap') {
|
||||
steps {
|
||||
sh 'script/bootstrap.py -v --dev --target_arch=$TARGET_ARCH'
|
||||
}
|
||||
}
|
||||
stage('Build') {
|
||||
steps {
|
||||
sh 'script/build.py -c D --ninja-path /usr/local/ninja/ninja'
|
||||
}
|
||||
}
|
||||
stage('Test') {
|
||||
steps {
|
||||
sh '/etc/init.d/xvfb start'
|
||||
sh 'script/test.py --ci'
|
||||
}
|
||||
}
|
||||
}
|
||||
post {
|
||||
always {
|
||||
cleanWs()
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -19,7 +19,7 @@ announcements.
|
||||
This project adheres to the Contributor Covenant
|
||||
[code of conduct](https://github.com/electron/electron/tree/master/CODE_OF_CONDUCT.md).
|
||||
By participating, you are expected to uphold this code. Please report unacceptable
|
||||
behavior to [electron@github.com](mailto:electron@github.com).
|
||||
behavior to [coc@electronjs.org](mailto:coc@electronjs.org).
|
||||
|
||||
## Installation
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
The Electron team and community take security bugs in Electron seriously. We appreciate your efforts to responsibly disclose your findings, and will make every effort to acknowledge your contributions.
|
||||
|
||||
To report a security issue, email [electron@github.com](mailto:electron@github.com) and include the word "SECURITY" in the subject line.
|
||||
To report a security issue, email [security@electronjs.org](mailto:security@electronjs.org) and include the word "SECURITY" in the subject line.
|
||||
|
||||
The Electron team will send a response indicating the next steps in handling your report. After the initial reply to your report, the security team will keep you informed of the progress towards a fix and full announcement, and may ask for additional information or guidance.
|
||||
|
||||
|
||||
85
appveyor.yml
85
appveyor.yml
@@ -1,25 +1,60 @@
|
||||
# appveyor file
|
||||
# http://www.appveyor.com/docs/appveyor-yml
|
||||
version: "{build}"
|
||||
|
||||
os: Visual Studio 2015
|
||||
|
||||
init:
|
||||
- git config --global core.autocrlf input
|
||||
|
||||
platform:
|
||||
- x86
|
||||
- x64
|
||||
|
||||
install:
|
||||
- cmd: SET PATH=C:\Program Files (x86)\MSBuild\14.0\bin\;%PATH%
|
||||
- cmd: SET PATH=C:\python27;%PATH%
|
||||
- cmd: python script/cibuild
|
||||
|
||||
branches:
|
||||
only:
|
||||
- master
|
||||
|
||||
# disable build and test phases
|
||||
build: off
|
||||
test: off
|
||||
build_cloud: electron-16
|
||||
image: electron-16-vs2015
|
||||
build_script:
|
||||
- ps: >-
|
||||
if($env:SKIP_GYP_BUILD -eq "true") {
|
||||
Write-warning "Skipping debug build for older branch"; Exit-AppveyorBuild
|
||||
} elseif(($env:APPVEYOR_PULL_REQUEST_HEAD_REPO_NAME -split "/")[0] -eq ($env:APPVEYOR_REPO_NAME -split "/")[0]) {
|
||||
Write-warning "Skipping PR build for branch"; Exit-AppveyorBuild
|
||||
} else {
|
||||
Add-Path "$env:ProgramFiles (x86)\Windows Kits\10\Debuggers\x64"
|
||||
$env:path = "$env:ProgramFiles (x86)\Windows Kits\10\Debuggers\x64;$env:path"
|
||||
if($env:APPVEYOR_SCHEDULED_BUILD -eq 'True') {
|
||||
$env:RUN_RELEASE_BUILD = "1"
|
||||
}
|
||||
$Message = (git log --format=%B -n 1 HEAD) | Out-String
|
||||
if ((Test-Path Env:\RUN_RELEASE_BUILD)) {
|
||||
$env:ELECTRON_RELEASE = '1'
|
||||
Write-Output "release build triggered from api"
|
||||
}
|
||||
if ((Test-Path Env:\ELECTRON_RELEASE)) {
|
||||
Write-Output "Running release build"
|
||||
python script\bootstrap.py --target_arch=$env:TARGET_ARCH
|
||||
python script\build.py -c R
|
||||
python script\create-dist.py
|
||||
} else {
|
||||
Write-Output "Running debug build"
|
||||
python script\bootstrap.py --target_arch=$env:TARGET_ARCH --dev
|
||||
python script\build.py -c D
|
||||
}
|
||||
if ($? -ne 'True') {
|
||||
throw "Build failed with exit code $?"
|
||||
} else {
|
||||
"Build succeeded."
|
||||
}
|
||||
Push-AppveyorArtifact out
|
||||
}
|
||||
test_script:
|
||||
- ps: >-
|
||||
if (Test-Path Env:\ELECTRON_RELEASE) {
|
||||
Write-Output "Skipping tests for release build"
|
||||
} else {
|
||||
Write-Output "Running tests for debug build"
|
||||
python script\test.py --ci --rebuild_native_modules
|
||||
if ($LASTEXITCODE -ne '0') {
|
||||
throw "Tests failed with exit code $LASTEXITCODE"
|
||||
} else {
|
||||
Write-Output "Tests succeeded."
|
||||
}
|
||||
python script\verify-ffmpeg.py
|
||||
if ($LASTEXITCODE -ne '0') {
|
||||
throw "Verify ffmpeg failed with exit code $LASTEXITCODE"
|
||||
} else {
|
||||
"Verify ffmpeg succeeded."
|
||||
}
|
||||
}
|
||||
artifacts:
|
||||
- path: test-results.xml
|
||||
name: test-results.xml
|
||||
deploy_script:
|
||||
- ps: "if (Test-Path Env:\\ELECTRON_RELEASE) {\n if (Test-Path Env:\\RUN_RELEASE_BUILD) {\n Write-Output \"Uploading Electron release distribution to s3\"\n & python script\\upload.py --upload_to_s3\n } else {\n Write-Output \"Uploading Electron release distribution to github releases\"\n & python script\\upload.py\n }\n} else {\n Write-Output \"Skipping upload distribution because build is not for release\"\n}"
|
||||
|
||||
@@ -10,7 +10,7 @@
|
||||
#if defined(OS_MACOSX)
|
||||
extern "C" {
|
||||
__attribute__((visibility("default")))
|
||||
int AtomMain(int argc, const char* argv[]);
|
||||
int AtomMain(int argc, char* argv[]);
|
||||
|
||||
__attribute__((visibility("default")))
|
||||
int AtomInitializeICUandStartNode(int argc, char *argv[]);
|
||||
|
||||
@@ -15,11 +15,11 @@
|
||||
#include "content/public/app/content_main.h"
|
||||
|
||||
#if defined(OS_MACOSX)
|
||||
int AtomMain(int argc, const char* argv[]) {
|
||||
int AtomMain(int argc, char* argv[]) {
|
||||
atom::AtomMainDelegate delegate;
|
||||
content::ContentMainParams params(&delegate);
|
||||
params.argc = argc;
|
||||
params.argv = argv;
|
||||
params.argv = const_cast<const char**>(argv);
|
||||
atom::AtomCommandLine::Init(argc, argv);
|
||||
return content::ContentMain(params);
|
||||
}
|
||||
|
||||
@@ -4,7 +4,8 @@
|
||||
|
||||
#include "atom/app/atom_main.h"
|
||||
|
||||
#include <stdlib.h>
|
||||
#include <cstdlib>
|
||||
#include <vector>
|
||||
|
||||
#if defined(OS_WIN)
|
||||
#include <windows.h> // windows.h must be included first
|
||||
@@ -15,9 +16,11 @@
|
||||
#include <tchar.h>
|
||||
|
||||
#include "atom/app/atom_main_delegate.h"
|
||||
#include "atom/app/command_line_args.h"
|
||||
#include "atom/common/crash_reporter/win/crash_service_main.h"
|
||||
#include "base/environment.h"
|
||||
#include "base/process/launch.h"
|
||||
#include "base/strings/utf_string_conversions.h"
|
||||
#include "base/win/windows_version.h"
|
||||
#include "content/public/app/sandbox_helper_win.h"
|
||||
#include "sandbox/win/src/sandbox_types.h"
|
||||
@@ -52,18 +55,23 @@ bool IsEnvSet(const char* name) {
|
||||
|
||||
#if defined(OS_WIN)
|
||||
int APIENTRY wWinMain(HINSTANCE instance, HINSTANCE, wchar_t* cmd, int) {
|
||||
int argc = 0;
|
||||
wchar_t** wargv = ::CommandLineToArgvW(::GetCommandLineW(), &argc);
|
||||
struct Arguments {
|
||||
int argc = 0;
|
||||
wchar_t** argv = ::CommandLineToArgvW(::GetCommandLineW(), &argc);
|
||||
|
||||
bool run_as_node = IsEnvSet(kRunAsNode);
|
||||
~Arguments() { LocalFree(argv); }
|
||||
} arguments;
|
||||
|
||||
if (!arguments.argv)
|
||||
return -1;
|
||||
|
||||
#ifdef _DEBUG
|
||||
// Don't display assert dialog boxes in CI test runs
|
||||
static const auto kCI = "ELECTRON_CI";
|
||||
bool is_ci = IsEnvSet(kCI);
|
||||
if (!is_ci) {
|
||||
for (int i = 0; i < argc; ++i) {
|
||||
if (!_wcsicmp(wargv[i], L"--ci")) {
|
||||
for (int i = 0; i < arguments.argc; ++i) {
|
||||
if (!_wcsicmp(arguments.argv[i], L"--ci")) {
|
||||
is_ci = true;
|
||||
_putenv_s(kCI, "1"); // set flag for child processes
|
||||
break;
|
||||
@@ -81,44 +89,12 @@ int APIENTRY wWinMain(HINSTANCE instance, HINSTANCE, wchar_t* cmd, int) {
|
||||
}
|
||||
#endif
|
||||
|
||||
bool run_as_node = IsEnvSet(kRunAsNode);
|
||||
|
||||
// Make sure the output is printed to console.
|
||||
if (run_as_node || !IsEnvSet("ELECTRON_NO_ATTACH_CONSOLE"))
|
||||
base::RouteStdioToConsole(false);
|
||||
|
||||
// Convert argv to to UTF8
|
||||
char** argv = new char*[argc];
|
||||
for (int i = 0; i < argc; i++) {
|
||||
// Compute the size of the required buffer
|
||||
DWORD size = WideCharToMultiByte(CP_UTF8,
|
||||
0,
|
||||
wargv[i],
|
||||
-1,
|
||||
NULL,
|
||||
0,
|
||||
NULL,
|
||||
NULL);
|
||||
if (size == 0) {
|
||||
// This should never happen.
|
||||
fprintf(stderr, "Could not convert arguments to utf8.");
|
||||
exit(1);
|
||||
}
|
||||
// Do the actual conversion
|
||||
argv[i] = new char[size];
|
||||
DWORD result = WideCharToMultiByte(CP_UTF8,
|
||||
0,
|
||||
wargv[i],
|
||||
-1,
|
||||
argv[i],
|
||||
size,
|
||||
NULL,
|
||||
NULL);
|
||||
if (result == 0) {
|
||||
// This should never happen.
|
||||
fprintf(stderr, "Could not convert arguments to utf8.");
|
||||
exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
#ifndef DEBUG
|
||||
// Chromium has its own TLS subsystem which supports automatic destruction
|
||||
// of thread-local data, and also depends on memory allocation routines
|
||||
@@ -139,14 +115,23 @@ int APIENTRY wWinMain(HINSTANCE instance, HINSTANCE, wchar_t* cmd, int) {
|
||||
#endif
|
||||
|
||||
if (run_as_node) {
|
||||
// Now that argv conversion is done, we can finally start.
|
||||
std::vector<char*> argv(arguments.argc);
|
||||
std::transform(
|
||||
arguments.argv, arguments.argv + arguments.argc, argv.begin(),
|
||||
[](auto& a) { return _strdup(base::WideToUTF8(a).c_str()); });
|
||||
|
||||
base::AtExitManager atexit_manager;
|
||||
base::i18n::InitializeICU();
|
||||
return atom::NodeMain(argc, argv);
|
||||
auto ret = atom::NodeMain(argv.size(), argv.data());
|
||||
std::for_each(argv.begin(), argv.end(), free);
|
||||
return ret;
|
||||
} else if (IsEnvSet("ELECTRON_INTERNAL_CRASH_SERVICE")) {
|
||||
return crash_service::Main(cmd);
|
||||
}
|
||||
|
||||
if (!atom::CheckCommandLineArguments(arguments.argc, arguments.argv))
|
||||
return -1;
|
||||
|
||||
sandbox::SandboxInterfaceInfo sandbox_info = {0};
|
||||
content::InitializeSandboxInfo(&sandbox_info);
|
||||
atom::AtomMainDelegate delegate;
|
||||
@@ -154,33 +139,32 @@ int APIENTRY wWinMain(HINSTANCE instance, HINSTANCE, wchar_t* cmd, int) {
|
||||
content::ContentMainParams params(&delegate);
|
||||
params.instance = instance;
|
||||
params.sandbox_info = &sandbox_info;
|
||||
atom::AtomCommandLine::Init(argc, argv);
|
||||
atom::AtomCommandLine::InitW(argc, wargv);
|
||||
atom::AtomCommandLine::Init(arguments.argc, arguments.argv);
|
||||
return content::ContentMain(params);
|
||||
}
|
||||
|
||||
#elif defined(OS_LINUX) // defined(OS_WIN)
|
||||
|
||||
int main(int argc, const char* argv[]) {
|
||||
int main(int argc, char* argv[]) {
|
||||
if (IsEnvSet(kRunAsNode)) {
|
||||
base::i18n::InitializeICU();
|
||||
base::AtExitManager atexit_manager;
|
||||
return atom::NodeMain(argc, const_cast<char**>(argv));
|
||||
return atom::NodeMain(argc, argv);
|
||||
}
|
||||
|
||||
atom::AtomMainDelegate delegate;
|
||||
content::ContentMainParams params(&delegate);
|
||||
params.argc = argc;
|
||||
params.argv = argv;
|
||||
params.argv = const_cast<const char**>(argv);
|
||||
atom::AtomCommandLine::Init(argc, argv);
|
||||
return content::ContentMain(params);
|
||||
}
|
||||
|
||||
#else // defined(OS_LINUX)
|
||||
|
||||
int main(int argc, const char* argv[]) {
|
||||
int main(int argc, char* argv[]) {
|
||||
if (IsEnvSet(kRunAsNode)) {
|
||||
return AtomInitializeICUandStartNode(argc, const_cast<char**>(argv));
|
||||
return AtomInitializeICUandStartNode(argc, argv);
|
||||
}
|
||||
|
||||
return AtomMain(argc, argv);
|
||||
|
||||
1389
atom/app/command_line_args.cc
Normal file
1389
atom/app/command_line_args.cc
Normal file
File diff suppressed because it is too large
Load Diff
17
atom/app/command_line_args.h
Normal file
17
atom/app/command_line_args.h
Normal file
@@ -0,0 +1,17 @@
|
||||
// Copyright (c) 2018 GitHub, Inc.
|
||||
// Use of this source code is governed by the MIT license that can be
|
||||
// found in the LICENSE file.
|
||||
|
||||
#ifndef ATOM_APP_COMMAND_LINE_ARGS_H_
|
||||
#define ATOM_APP_COMMAND_LINE_ARGS_H_
|
||||
|
||||
#include "base/command_line.h"
|
||||
|
||||
namespace atom {
|
||||
|
||||
bool CheckCommandLineArguments(int argc, base::CommandLine::CharType** argv);
|
||||
|
||||
} // namespace atom
|
||||
|
||||
#endif // ATOM_APP_COMMAND_LINE_ARGS_H_
|
||||
|
||||
@@ -861,11 +861,7 @@ bool App::Relaunch(mate::Arguments* js_args) {
|
||||
}
|
||||
|
||||
if (!override_argv) {
|
||||
#if defined(OS_WIN)
|
||||
const relauncher::StringVector& argv = atom::AtomCommandLine::wargv();
|
||||
#else
|
||||
const relauncher::StringVector& argv = atom::AtomCommandLine::argv();
|
||||
#endif
|
||||
return relauncher::RelaunchApp(argv);
|
||||
}
|
||||
|
||||
|
||||
@@ -68,8 +68,8 @@ void BrowserView::Init(v8::Isolate* isolate,
|
||||
web_contents_.Reset(isolate, web_contents.ToV8());
|
||||
api_web_contents_ = web_contents.get();
|
||||
|
||||
view_.reset(NativeBrowserView::Create(
|
||||
api_web_contents_->managed_web_contents()->GetView()));
|
||||
view_.reset(
|
||||
NativeBrowserView::Create(api_web_contents_->managed_web_contents()));
|
||||
|
||||
InitWith(isolate, wrapper);
|
||||
}
|
||||
|
||||
@@ -60,6 +60,7 @@ void DesktopCapturer::StartHandling(bool capture_window,
|
||||
// implemetation. This is a known and wontFix issue in webrtc (see:
|
||||
// http://code.google.com/p/webrtc/issues/detail?id=3373)
|
||||
options.set_disable_effects(false);
|
||||
options.set_allow_directx_capturer(true);
|
||||
#endif
|
||||
|
||||
std::unique_ptr<webrtc::DesktopCapturer> screen_capturer(
|
||||
|
||||
@@ -22,22 +22,16 @@ MenuViews::MenuViews(v8::Isolate* isolate, v8::Local<v8::Object> wrapper)
|
||||
|
||||
void MenuViews::PopupAt(
|
||||
Window* window, int x, int y, int positioning_item, bool async) {
|
||||
NativeWindow* native_window = static_cast<NativeWindow*>(window->window());
|
||||
auto* native_window = static_cast<NativeWindowViews*>(window->window());
|
||||
if (!native_window)
|
||||
return;
|
||||
content::WebContents* web_contents = native_window->web_contents();
|
||||
if (!web_contents)
|
||||
return;
|
||||
content::RenderWidgetHostView* view = web_contents->GetRenderWidgetHostView();
|
||||
if (!view)
|
||||
return;
|
||||
|
||||
// (-1, -1) means showing on mouse location.
|
||||
gfx::Point location;
|
||||
if (x == -1 || y == -1) {
|
||||
location = display::Screen::GetScreen()->GetCursorScreenPoint();
|
||||
} else {
|
||||
gfx::Point origin = view->GetViewBounds().origin();
|
||||
gfx::Point origin = native_window->GetContentBounds().origin();
|
||||
location = gfx::Point(origin.x() + x, origin.y() + y);
|
||||
}
|
||||
|
||||
@@ -55,7 +49,7 @@ void MenuViews::PopupAt(
|
||||
menu_runners_[window_id] = std::unique_ptr<MenuRunner>(new MenuRunner(
|
||||
model(), flags, close_callback));
|
||||
ignore_result(menu_runners_[window_id]->RunMenuAt(
|
||||
static_cast<NativeWindowViews*>(window->window())->widget(),
|
||||
native_window->widget(),
|
||||
NULL,
|
||||
gfx::Rect(location, gfx::Size()),
|
||||
views::MENU_ANCHOR_TOPLEFT,
|
||||
|
||||
@@ -78,10 +78,6 @@ class Protocol : public mate::TrackableObject<Protocol> {
|
||||
net::URLRequestJob* MaybeCreateJob(
|
||||
net::URLRequest* request,
|
||||
net::NetworkDelegate* network_delegate) const override {
|
||||
if (!request->initiator().has_value()) {
|
||||
// Don't intercept this request as it was created by `net.request`.
|
||||
return nullptr;
|
||||
}
|
||||
RequestJob* request_job = new RequestJob(request, network_delegate);
|
||||
request_job->SetHandlerInfo(isolate_, request_context_.get(), handler_);
|
||||
return request_job;
|
||||
|
||||
@@ -208,7 +208,7 @@ void Tray::PopUpContextMenu(mate::Arguments* args) {
|
||||
|
||||
void Tray::SetContextMenu(v8::Isolate* isolate, mate::Handle<Menu> menu) {
|
||||
menu_.Reset(isolate, menu.ToV8());
|
||||
tray_icon_->SetContextMenu(menu->model());
|
||||
tray_icon_->SetContextMenu(menu.IsEmpty() ? nullptr : menu->model());
|
||||
}
|
||||
|
||||
gfx::Rect Tray::GetBounds() {
|
||||
|
||||
@@ -271,6 +271,9 @@ content::ServiceWorkerContext* GetServiceWorkerContext(
|
||||
void OnCapturePageDone(const base::Callback<void(const gfx::Image&)>& callback,
|
||||
const SkBitmap& bitmap,
|
||||
content::ReadbackResponse response) {
|
||||
// Hack to enable transparency in captured image
|
||||
// TODO(nitsakh) Remove hack once fixed in chromium
|
||||
const_cast<SkBitmap&>(bitmap).setAlphaType(kPremul_SkAlphaType);
|
||||
callback.Run(gfx::Image::CreateFrom1xBitmap(bitmap));
|
||||
}
|
||||
|
||||
@@ -377,8 +380,8 @@ WebContents::WebContents(v8::Isolate* isolate, const mate::Dictionary& options)
|
||||
options.Get("transparent", &transparent);
|
||||
|
||||
content::WebContents::CreateParams params(session->browser_context());
|
||||
auto* view = new OffScreenWebContentsView(
|
||||
transparent, base::Bind(&WebContents::OnPaint, base::Unretained(this)));
|
||||
auto* view = new OffScreenWebContentsView(transparent,
|
||||
base::Bind(&WebContents::OnPaint, base::Unretained(this)));
|
||||
params.view = view;
|
||||
params.delegate_view = view;
|
||||
|
||||
@@ -1644,10 +1647,10 @@ void WebContents::StartPainting() {
|
||||
return;
|
||||
|
||||
#if defined(ENABLE_OSR)
|
||||
auto* osr_rwhv = static_cast<OffScreenRenderWidgetHostView*>(
|
||||
web_contents()->GetRenderWidgetHostView());
|
||||
if (osr_rwhv)
|
||||
osr_rwhv->SetPainting(true);
|
||||
const auto* wc_impl = static_cast<content::WebContentsImpl*>(web_contents());
|
||||
auto* osr_wcv = static_cast<OffScreenWebContentsView*>(wc_impl->GetView());
|
||||
if (osr_wcv)
|
||||
osr_wcv->SetPainting(true);
|
||||
#endif
|
||||
}
|
||||
|
||||
@@ -1656,10 +1659,10 @@ void WebContents::StopPainting() {
|
||||
return;
|
||||
|
||||
#if defined(ENABLE_OSR)
|
||||
auto* osr_rwhv = static_cast<OffScreenRenderWidgetHostView*>(
|
||||
web_contents()->GetRenderWidgetHostView());
|
||||
if (osr_rwhv)
|
||||
osr_rwhv->SetPainting(false);
|
||||
const auto* wc_impl = static_cast<content::WebContentsImpl*>(web_contents());
|
||||
auto* osr_wcv = static_cast<OffScreenWebContentsView*>(wc_impl->GetView());
|
||||
if (osr_wcv)
|
||||
osr_wcv->SetPainting(false);
|
||||
#endif
|
||||
}
|
||||
|
||||
@@ -1668,9 +1671,10 @@ bool WebContents::IsPainting() const {
|
||||
return false;
|
||||
|
||||
#if defined(ENABLE_OSR)
|
||||
const auto* osr_rwhv = static_cast<OffScreenRenderWidgetHostView*>(
|
||||
web_contents()->GetRenderWidgetHostView());
|
||||
return osr_rwhv && osr_rwhv->IsPainting();
|
||||
const auto* wc_impl = static_cast<content::WebContentsImpl*>(web_contents());
|
||||
auto* osr_wcv = static_cast<OffScreenWebContentsView*>(wc_impl->GetView());
|
||||
|
||||
return osr_wcv && osr_wcv->IsPainting();
|
||||
#else
|
||||
return false;
|
||||
#endif
|
||||
@@ -1681,10 +1685,11 @@ void WebContents::SetFrameRate(int frame_rate) {
|
||||
return;
|
||||
|
||||
#if defined(ENABLE_OSR)
|
||||
auto* osr_rwhv = static_cast<OffScreenRenderWidgetHostView*>(
|
||||
web_contents()->GetRenderWidgetHostView());
|
||||
if (osr_rwhv)
|
||||
osr_rwhv->SetFrameRate(frame_rate);
|
||||
const auto* wc_impl = static_cast<content::WebContentsImpl*>(web_contents());
|
||||
auto* osr_wcv = static_cast<OffScreenWebContentsView*>(wc_impl->GetView());
|
||||
|
||||
if (osr_wcv)
|
||||
osr_wcv->SetFrameRate(frame_rate);
|
||||
#endif
|
||||
}
|
||||
|
||||
@@ -1693,9 +1698,10 @@ int WebContents::GetFrameRate() const {
|
||||
return 0;
|
||||
|
||||
#if defined(ENABLE_OSR)
|
||||
const auto* osr_rwhv = static_cast<OffScreenRenderWidgetHostView*>(
|
||||
web_contents()->GetRenderWidgetHostView());
|
||||
return osr_rwhv ? osr_rwhv->GetFrameRate() : 0;
|
||||
const auto* wc_impl = static_cast<content::WebContentsImpl*>(web_contents());
|
||||
auto* osr_wcv = static_cast<OffScreenWebContentsView*>(wc_impl->GetView());
|
||||
|
||||
return osr_wcv ? osr_wcv->GetFrameRate() : 0;
|
||||
#else
|
||||
return 0;
|
||||
#endif
|
||||
@@ -1765,6 +1771,14 @@ v8::Local<v8::Value> WebContents::GetWebPreferences(v8::Isolate* isolate) {
|
||||
return mate::ConvertToV8(isolate, *web_preferences->web_preferences());
|
||||
}
|
||||
|
||||
v8::Local<v8::Value> WebContents::GetLastWebPreferences(v8::Isolate* isolate) {
|
||||
WebContentsPreferences* web_preferences =
|
||||
WebContentsPreferences::FromWebContents(web_contents());
|
||||
if (!web_preferences)
|
||||
return v8::Null(isolate);
|
||||
return mate::ConvertToV8(isolate, *web_preferences->last_web_preferences());
|
||||
}
|
||||
|
||||
v8::Local<v8::Value> WebContents::GetOwnerBrowserWindow() {
|
||||
if (owner_window())
|
||||
return Window::From(isolate(), owner_window());
|
||||
@@ -1911,6 +1925,7 @@ void WebContents::BuildPrototype(v8::Isolate* isolate,
|
||||
.SetMethod("_getZoomFactor", &WebContents::GetZoomFactor)
|
||||
.SetMethod("getType", &WebContents::GetType)
|
||||
.SetMethod("getWebPreferences", &WebContents::GetWebPreferences)
|
||||
.SetMethod("getLastWebPreferences", &WebContents::GetLastWebPreferences)
|
||||
.SetMethod("getOwnerBrowserWindow", &WebContents::GetOwnerBrowserWindow)
|
||||
.SetMethod("hasServiceWorker", &WebContents::HasServiceWorker)
|
||||
.SetMethod("unregisterServiceWorker",
|
||||
|
||||
@@ -212,6 +212,7 @@ class WebContents : public mate::TrackableObject<WebContents>,
|
||||
|
||||
// Returns the web preferences of current WebContents.
|
||||
v8::Local<v8::Value> GetWebPreferences(v8::Isolate* isolate);
|
||||
v8::Local<v8::Value> GetLastWebPreferences(v8::Isolate* isolate);
|
||||
|
||||
// Returns the owner window.
|
||||
v8::Local<v8::Value> GetOwnerBrowserWindow();
|
||||
|
||||
@@ -889,6 +889,8 @@ void Window::SetBrowserView(v8::Local<v8::Value> value) {
|
||||
window_->SetBrowserView(browser_view->view());
|
||||
browser_view->web_contents()->SetOwnerWindow(window_.get());
|
||||
browser_view_.Reset(isolate(), value);
|
||||
|
||||
window_->UpdateDraggableRegionViews();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -38,6 +38,7 @@
|
||||
#include "content/public/browser/resource_dispatcher_host.h"
|
||||
#include "content/public/browser/site_instance.h"
|
||||
#include "content/public/browser/web_contents.h"
|
||||
#include "content/public/common/content_paths.h"
|
||||
#include "content/public/common/content_switches.h"
|
||||
#include "content/public/common/url_constants.h"
|
||||
#include "content/public/common/web_preferences.h"
|
||||
@@ -243,6 +244,11 @@ void AtomBrowserClient::OverrideSiteInstanceForNavigation(
|
||||
void AtomBrowserClient::AppendExtraCommandLineSwitches(
|
||||
base::CommandLine* command_line,
|
||||
int process_id) {
|
||||
// Make sure we're about to launch a known executable
|
||||
base::FilePath child_path;
|
||||
PathService::Get(content::CHILD_PROCESS_EXE, &child_path);
|
||||
CHECK(base::MakeAbsoluteFilePath(command_line->GetProgram()) == child_path);
|
||||
|
||||
std::string process_type =
|
||||
command_line->GetSwitchValueASCII(::switches::kProcessType);
|
||||
if (process_type != ::switches::kRendererProcess)
|
||||
|
||||
@@ -7,14 +7,23 @@
|
||||
#include "atom/browser/native_browser_view.h"
|
||||
|
||||
#include "atom/browser/api/atom_api_web_contents.h"
|
||||
#include "brightray/browser/inspectable_web_contents_view.h"
|
||||
#include "brightray/browser/inspectable_web_contents.h"
|
||||
|
||||
namespace atom {
|
||||
|
||||
NativeBrowserView::NativeBrowserView(
|
||||
brightray::InspectableWebContentsView* web_contents_view)
|
||||
: web_contents_view_(web_contents_view) {}
|
||||
brightray::InspectableWebContents* inspectable_web_contents)
|
||||
: inspectable_web_contents_(inspectable_web_contents) {}
|
||||
|
||||
NativeBrowserView::~NativeBrowserView() {}
|
||||
|
||||
brightray::InspectableWebContentsView*
|
||||
NativeBrowserView::GetInspectableWebContentsView() {
|
||||
return inspectable_web_contents_->GetView();
|
||||
}
|
||||
|
||||
content::WebContents* NativeBrowserView::GetWebContents() {
|
||||
return inspectable_web_contents_->GetWebContents();
|
||||
}
|
||||
|
||||
} // namespace atom
|
||||
|
||||
@@ -9,9 +9,11 @@
|
||||
|
||||
#include "atom/common/draggable_region.h"
|
||||
#include "base/macros.h"
|
||||
#include "content/public/browser/web_contents.h"
|
||||
#include "third_party/skia/include/core/SkColor.h"
|
||||
|
||||
namespace brightray {
|
||||
class InspectableWebContents;
|
||||
class InspectableWebContentsView;
|
||||
}
|
||||
|
||||
@@ -31,12 +33,15 @@ class NativeBrowserView {
|
||||
virtual ~NativeBrowserView();
|
||||
|
||||
static NativeBrowserView* Create(
|
||||
brightray::InspectableWebContentsView* web_contents_view);
|
||||
brightray::InspectableWebContents* inspectable_web_contents);
|
||||
|
||||
brightray::InspectableWebContentsView* GetInspectableWebContentsView() {
|
||||
return web_contents_view_;
|
||||
brightray::InspectableWebContents* GetInspectableWebContents() {
|
||||
return inspectable_web_contents_;
|
||||
}
|
||||
|
||||
brightray::InspectableWebContentsView* GetInspectableWebContentsView();
|
||||
content::WebContents* GetWebContents();
|
||||
|
||||
virtual void SetAutoResizeFlags(uint8_t flags) = 0;
|
||||
virtual void SetBounds(const gfx::Rect& bounds) = 0;
|
||||
virtual void SetBackgroundColor(SkColor color) = 0;
|
||||
@@ -47,9 +52,9 @@ class NativeBrowserView {
|
||||
|
||||
protected:
|
||||
explicit NativeBrowserView(
|
||||
brightray::InspectableWebContentsView* web_contents_view);
|
||||
brightray::InspectableWebContents* inspectable_web_contents);
|
||||
|
||||
brightray::InspectableWebContentsView* web_contents_view_;
|
||||
brightray::InspectableWebContents* inspectable_web_contents_;
|
||||
|
||||
private:
|
||||
DISALLOW_COPY_AND_ASSIGN(NativeBrowserView);
|
||||
|
||||
@@ -17,12 +17,13 @@ namespace atom {
|
||||
class NativeBrowserViewMac : public NativeBrowserView {
|
||||
public:
|
||||
explicit NativeBrowserViewMac(
|
||||
brightray::InspectableWebContentsView* web_contents_view);
|
||||
brightray::InspectableWebContents* inspectable_web_contents);
|
||||
~NativeBrowserViewMac() override;
|
||||
|
||||
void SetAutoResizeFlags(uint8_t flags) override;
|
||||
void SetBounds(const gfx::Rect& bounds) override;
|
||||
void SetBackgroundColor(SkColor color) override;
|
||||
|
||||
void UpdateDraggableRegions(
|
||||
const std::vector<gfx::Rect>& system_drag_exclude_areas) override;
|
||||
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
|
||||
#include "atom/browser/native_browser_view_mac.h"
|
||||
|
||||
#include "brightray/browser/inspectable_web_contents.h"
|
||||
#include "brightray/browser/inspectable_web_contents_view.h"
|
||||
#include "skia/ext/skia_utils_mac.h"
|
||||
#include "ui/gfx/geometry/rect.h"
|
||||
@@ -156,8 +157,8 @@ const NSAutoresizingMaskOptions kDefaultAutoResizingMask =
|
||||
namespace atom {
|
||||
|
||||
NativeBrowserViewMac::NativeBrowserViewMac(
|
||||
brightray::InspectableWebContentsView* web_contents_view)
|
||||
: NativeBrowserView(web_contents_view) {
|
||||
brightray::InspectableWebContents* inspectable_web_contents)
|
||||
: NativeBrowserView(inspectable_web_contents) {
|
||||
auto* view = GetInspectableWebContentsView()->GetNativeView();
|
||||
view.autoresizingMask = kDefaultAutoResizingMask;
|
||||
}
|
||||
@@ -193,62 +194,46 @@ void NativeBrowserViewMac::SetBackgroundColor(SkColor color) {
|
||||
}
|
||||
|
||||
void NativeBrowserViewMac::UpdateDraggableRegions(
|
||||
const std::vector<gfx::Rect>& system_drag_exclude_areas) {
|
||||
NSView* webView = GetInspectableWebContentsView()->GetNativeView();
|
||||
const std::vector<gfx::Rect>& drag_exclude_rects) {
|
||||
NSView* web_view = GetWebContents()->GetNativeView();
|
||||
NSView* inspectable_view = GetInspectableWebContentsView()->GetNativeView();
|
||||
NSView* window_content_view = inspectable_view.superview;
|
||||
const auto window_content_view_height = NSHeight(window_content_view.bounds);
|
||||
|
||||
NSInteger superViewHeight = NSHeight([webView.superview bounds]);
|
||||
NSInteger webViewHeight = NSHeight([webView bounds]);
|
||||
NSInteger webViewWidth = NSWidth([webView bounds]);
|
||||
NSInteger webViewX = NSMinX([webView frame]);
|
||||
NSInteger webViewY = 0;
|
||||
|
||||
// Apple's NSViews have their coordinate system originate at the bottom left,
|
||||
// meaning that we need to be a bit smarter when it comes to calculating our
|
||||
// current top offset
|
||||
if (webViewHeight > superViewHeight) {
|
||||
webViewY = std::abs(webViewHeight - superViewHeight - (std::abs(NSMinY([webView frame]))));
|
||||
} else {
|
||||
webViewY = superViewHeight - NSMaxY([webView frame]);
|
||||
}
|
||||
|
||||
// Remove all DraggableRegionViews that are added last time.
|
||||
// Note that [webView subviews] returns the view's mutable internal array and
|
||||
// it should be copied to avoid mutating the original array while enumerating
|
||||
// it.
|
||||
base::scoped_nsobject<NSArray> subviews([[webView subviews] copy]);
|
||||
for (NSView* subview in subviews.get())
|
||||
if ([subview isKindOfClass:[DragRegionView class]])
|
||||
// Remove all DragRegionViews that were added last time. Note that we need
|
||||
// to copy the `subviews` array to avoid mutation during iteration.
|
||||
base::scoped_nsobject<NSArray> subviews([[web_view subviews] copy]);
|
||||
for (NSView* subview in subviews.get()) {
|
||||
if ([subview isKindOfClass:[DragRegionView class]]) {
|
||||
[subview removeFromSuperview];
|
||||
}
|
||||
}
|
||||
|
||||
// Create one giant NSView that is draggable.
|
||||
base::scoped_nsobject<NSView> dragRegion(
|
||||
[[DragRegionView alloc] initWithFrame:NSZeroRect]);
|
||||
[dragRegion setFrame:NSMakeRect(0,
|
||||
0,
|
||||
webViewWidth,
|
||||
webViewHeight)];
|
||||
base::scoped_nsobject<NSView> drag_region_view(
|
||||
[[DragRegionView alloc] initWithFrame:web_view.bounds]);
|
||||
[web_view addSubview:drag_region_view];
|
||||
|
||||
// Then, on top of that, add "exclusion zones"
|
||||
for (auto iter = system_drag_exclude_areas.begin();
|
||||
iter != system_drag_exclude_areas.end();
|
||||
++iter) {
|
||||
base::scoped_nsobject<NSView> controlRegion(
|
||||
[[ExcludeDragRegionView alloc] initWithFrame:NSZeroRect]);
|
||||
[controlRegion setFrame:NSMakeRect(iter->x() - webViewX,
|
||||
webViewHeight - iter->bottom() + webViewY,
|
||||
iter->width(),
|
||||
iter->height())];
|
||||
[dragRegion addSubview:controlRegion];
|
||||
}
|
||||
for (const auto& rect : drag_exclude_rects) {
|
||||
const auto window_content_view_exclude_rect =
|
||||
NSMakeRect(rect.x(), window_content_view_height - rect.bottom(),
|
||||
rect.width(), rect.height());
|
||||
const auto drag_region_view_exclude_rect =
|
||||
[window_content_view convertRect:window_content_view_exclude_rect
|
||||
toView:drag_region_view];
|
||||
|
||||
// Add the DragRegion to the WebView
|
||||
[webView addSubview:dragRegion];
|
||||
base::scoped_nsobject<NSView> exclude_drag_region_view(
|
||||
[[ExcludeDragRegionView alloc]
|
||||
initWithFrame:drag_region_view_exclude_rect]);
|
||||
[drag_region_view addSubview:exclude_drag_region_view];
|
||||
}
|
||||
}
|
||||
|
||||
// static
|
||||
NativeBrowserView* NativeBrowserView::Create(
|
||||
brightray::InspectableWebContentsView* web_contents_view) {
|
||||
return new NativeBrowserViewMac(web_contents_view);
|
||||
brightray::InspectableWebContents* inspectable_web_contents) {
|
||||
return new NativeBrowserViewMac(inspectable_web_contents);
|
||||
}
|
||||
|
||||
} // namespace atom
|
||||
|
||||
@@ -12,8 +12,8 @@
|
||||
namespace atom {
|
||||
|
||||
NativeBrowserViewViews::NativeBrowserViewViews(
|
||||
brightray::InspectableWebContentsView* web_contents_view)
|
||||
: NativeBrowserView(web_contents_view) {}
|
||||
brightray::InspectableWebContents* inspectable_web_contents)
|
||||
: NativeBrowserView(inspectable_web_contents) {}
|
||||
|
||||
NativeBrowserViewViews::~NativeBrowserViewViews() {}
|
||||
|
||||
@@ -29,8 +29,8 @@ void NativeBrowserViewViews::SetBackgroundColor(SkColor color) {
|
||||
|
||||
// static
|
||||
NativeBrowserView* NativeBrowserView::Create(
|
||||
brightray::InspectableWebContentsView* web_contents_view) {
|
||||
return new NativeBrowserViewViews(web_contents_view);
|
||||
brightray::InspectableWebContents* inspectable_web_contents) {
|
||||
return new NativeBrowserViewViews(inspectable_web_contents);
|
||||
}
|
||||
|
||||
} // namespace atom
|
||||
|
||||
@@ -12,7 +12,7 @@ namespace atom {
|
||||
class NativeBrowserViewViews : public NativeBrowserView {
|
||||
public:
|
||||
explicit NativeBrowserViewViews(
|
||||
brightray::InspectableWebContentsView* web_contents_view);
|
||||
brightray::InspectableWebContents* inspectable_web_contents);
|
||||
~NativeBrowserViewViews() override;
|
||||
|
||||
uint8_t GetAutoResizeFlags() { return auto_resize_flags_; }
|
||||
|
||||
@@ -237,6 +237,8 @@ class NativeWindow : public base::SupportsUserData,
|
||||
const std::vector<base::string16>& labels) {}
|
||||
virtual void HideAutofillPopup(content::RenderFrameHost* frame_host) {}
|
||||
|
||||
virtual void UpdateDraggableRegionViews() {}
|
||||
|
||||
// Public API used by platform-dependent delegates and observers to send UI
|
||||
// related notifications.
|
||||
void NotifyWindowClosed();
|
||||
|
||||
@@ -126,7 +126,7 @@ class NativeWindowMac : public NativeWindow,
|
||||
content::RenderViewHost* new_host) override;
|
||||
|
||||
// Refresh the DraggableRegion views.
|
||||
void UpdateDraggableRegionViews() {
|
||||
void UpdateDraggableRegionViews() override {
|
||||
UpdateDraggableRegionViews(draggable_regions_);
|
||||
}
|
||||
|
||||
|
||||
@@ -1932,25 +1932,20 @@ void NativeWindowMac::UpdateDraggableRegionViews(
|
||||
|
||||
// Draggable regions is implemented by having the whole web view draggable
|
||||
// (mouseDownCanMoveWindow) and overlaying regions that are not draggable.
|
||||
std::vector<gfx::Rect> system_drag_exclude_areas =
|
||||
std::vector<gfx::Rect> drag_exclude_rects =
|
||||
CalculateNonDraggableRegions(regions, webViewWidth, webViewHeight);
|
||||
|
||||
if (browser_view_) {
|
||||
browser_view_->UpdateDraggableRegions(system_drag_exclude_areas);
|
||||
browser_view_->UpdateDraggableRegions(drag_exclude_rects);
|
||||
}
|
||||
|
||||
// Create and add a ControlRegionView for each region that needs to be
|
||||
// excluded from the dragging.
|
||||
for (std::vector<gfx::Rect>::const_iterator iter =
|
||||
system_drag_exclude_areas.begin();
|
||||
iter != system_drag_exclude_areas.end();
|
||||
++iter) {
|
||||
for (const auto& rect : drag_exclude_rects) {
|
||||
base::scoped_nsobject<NSView> controlRegion(
|
||||
[[ControlRegionView alloc] initWithFrame:NSZeroRect]);
|
||||
[controlRegion setFrame:NSMakeRect(iter->x(),
|
||||
webViewHeight - iter->bottom(),
|
||||
iter->width(),
|
||||
iter->height())];
|
||||
[controlRegion setFrame:NSMakeRect(rect.x(), webViewHeight - rect.bottom(),
|
||||
rect.width(), rect.height())];
|
||||
[webView addSubview:controlRegion];
|
||||
}
|
||||
|
||||
|
||||
@@ -53,6 +53,7 @@
|
||||
#include "atom/browser/ui/views/win_frame_view.h"
|
||||
#include "atom/browser/ui/win/atom_desktop_native_widget_aura.h"
|
||||
#include "atom/browser/ui/win/atom_desktop_window_tree_host_win.h"
|
||||
#include "content/public/browser/gpu_data_manager.h"
|
||||
#include "skia/ext/skia_utils_win.h"
|
||||
#include "ui/base/win/shell.h"
|
||||
#include "ui/display/display.h"
|
||||
@@ -297,9 +298,12 @@ NativeWindowViews::NativeWindowViews(
|
||||
::SetWindowLong(GetAcceleratedWidget(), GWL_STYLE, frame_style);
|
||||
}
|
||||
|
||||
bool hardware_accelerated =
|
||||
content::GpuDataManager::GetInstance()->HardwareAccelerationEnabled();
|
||||
LONG ex_style = ::GetWindowLong(GetAcceleratedWidget(), GWL_EXSTYLE);
|
||||
// Window without thick frame has to have WS_EX_COMPOSITED style.
|
||||
if (!thick_frame_)
|
||||
// Window without thick frame has to have WS_EX_COMPOSITED style when GPU
|
||||
// acceleration is enabled.
|
||||
if (!thick_frame_ && hardware_accelerated)
|
||||
ex_style |= WS_EX_COMPOSITED;
|
||||
if (window_type == "toolbar")
|
||||
ex_style |= WS_EX_TOOLWINDOW;
|
||||
@@ -1363,22 +1367,26 @@ void NativeWindowViews::ShowAutofillPopup(
|
||||
const gfx::RectF& bounds,
|
||||
const std::vector<base::string16>& values,
|
||||
const std::vector<base::string16>& labels) {
|
||||
const auto* web_preferences =
|
||||
WebContentsPreferences::FromWebContents(web_contents)->web_preferences();
|
||||
|
||||
bool is_offsceen = false;
|
||||
web_preferences->GetBoolean("offscreen", &is_offsceen);
|
||||
int guest_instance_id = 0;
|
||||
web_preferences->GetInteger(options::kGuestInstanceID, &guest_instance_id);
|
||||
|
||||
bool is_embedder_offscreen = false;
|
||||
if (guest_instance_id) {
|
||||
auto manager = WebViewManager::GetWebViewManager(web_contents);
|
||||
if (manager) {
|
||||
auto embedder = manager->GetEmbedder(guest_instance_id);
|
||||
if (embedder) {
|
||||
is_embedder_offscreen = WebContentsPreferences::IsPreferenceEnabled(
|
||||
"offscreen", embedder);
|
||||
|
||||
auto* web_contents_preferences =
|
||||
WebContentsPreferences::FromWebContents(web_contents);
|
||||
if (web_contents_preferences) {
|
||||
const auto* web_preferences = web_contents_preferences->web_preferences();
|
||||
|
||||
web_preferences->GetBoolean("offscreen", &is_offsceen);
|
||||
int guest_instance_id = 0;
|
||||
web_preferences->GetInteger(options::kGuestInstanceID, &guest_instance_id);
|
||||
|
||||
if (guest_instance_id) {
|
||||
auto manager = WebViewManager::GetWebViewManager(web_contents);
|
||||
if (manager) {
|
||||
auto embedder = manager->GetEmbedder(guest_instance_id);
|
||||
if (embedder) {
|
||||
is_embedder_offscreen = WebContentsPreferences::IsPreferenceEnabled(
|
||||
"offscreen", embedder);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -245,10 +245,21 @@ void URLRequestAsarJob::FetchMetaInfo(const base::FilePath& file_path,
|
||||
meta_info->file_size = file_info.size;
|
||||
meta_info->is_directory = file_info.is_directory;
|
||||
}
|
||||
// On Windows GetMimeTypeFromFile() goes to the registry. Thus it should be
|
||||
// done in WorkerPool.
|
||||
meta_info->mime_type_result =
|
||||
net::GetMimeTypeFromFile(file_path, &meta_info->mime_type);
|
||||
|
||||
// We use GetWellKnownMimeTypeFromExtension() to ensure that configurations
|
||||
// that may have been set by other programs on a user's machine don't affect
|
||||
// the mime type returned (in particular, JS should always be
|
||||
// (application/javascript). See https://crbug.com/797712. Using an accurate
|
||||
// mime type is necessary at least for modules and sw, which enforce strict
|
||||
// mime type requirements.
|
||||
// TODO(deepak1556): Revert this when sw support is removed for file scheme.
|
||||
base::FilePath::StringType file_extension = file_path.Extension();
|
||||
if (file_extension.empty()) {
|
||||
meta_info->mime_type_result = false;
|
||||
} else {
|
||||
meta_info->mime_type_result = net::GetWellKnownMimeTypeFromExtension(
|
||||
file_extension.substr(1), &meta_info->mime_type);
|
||||
}
|
||||
}
|
||||
|
||||
void URLRequestAsarJob::DidFetchMetaInfo(const FileMetaInfo* meta_info) {
|
||||
|
||||
@@ -7,6 +7,7 @@
|
||||
#include <string>
|
||||
#include "atom/browser/api/atom_api_url_request.h"
|
||||
#include "atom/browser/atom_browser_context.h"
|
||||
#include "atom/browser/net/atom_url_request_job_factory.h"
|
||||
#include "base/callback.h"
|
||||
#include "content/public/browser/browser_thread.h"
|
||||
#include "net/base/elements_upload_data_stream.h"
|
||||
@@ -120,6 +121,9 @@ void AtomURLRequest::DoInitialize(
|
||||
request_->set_method(method);
|
||||
// Do not send cookies from the cookie store.
|
||||
DoSetLoadFlags(net::LOAD_DO_NOT_SEND_COOKIES);
|
||||
// Set a flag to stop custom protocol from intercepting this request.
|
||||
request_->SetUserData(DisableProtocolInterceptFlagKey(),
|
||||
base::WrapUnique(new base::SupportsUserData::Data()));
|
||||
}
|
||||
|
||||
void AtomURLRequest::DoTerminate() {
|
||||
|
||||
@@ -15,8 +15,18 @@ using content::BrowserThread;
|
||||
|
||||
namespace atom {
|
||||
|
||||
namespace {
|
||||
|
||||
int disable_protocol_intercept_flag_key = 0;
|
||||
|
||||
} // namespace
|
||||
|
||||
typedef net::URLRequestJobFactory::ProtocolHandler ProtocolHandler;
|
||||
|
||||
const void* DisableProtocolInterceptFlagKey() {
|
||||
return &disable_protocol_intercept_flag_key;
|
||||
}
|
||||
|
||||
AtomURLRequestJobFactory::AtomURLRequestJobFactory() {}
|
||||
|
||||
AtomURLRequestJobFactory::~AtomURLRequestJobFactory() {
|
||||
@@ -93,6 +103,8 @@ net::URLRequestJob* AtomURLRequestJobFactory::MaybeCreateJobWithProtocolHandler(
|
||||
auto it = protocol_handler_map_.find(scheme);
|
||||
if (it == protocol_handler_map_.end())
|
||||
return nullptr;
|
||||
if (request->GetUserData(DisableProtocolInterceptFlagKey()))
|
||||
return nullptr;
|
||||
return it->second->MaybeCreateJob(request, network_delegate);
|
||||
}
|
||||
|
||||
|
||||
@@ -16,6 +16,8 @@
|
||||
|
||||
namespace atom {
|
||||
|
||||
const void* DisableProtocolInterceptFlagKey();
|
||||
|
||||
class AtomURLRequestJobFactory : public net::URLRequestJobFactory {
|
||||
public:
|
||||
AtomURLRequestJobFactory();
|
||||
|
||||
@@ -4,6 +4,8 @@
|
||||
|
||||
#include "atom/browser/node_debugger.h"
|
||||
|
||||
#include <string>
|
||||
|
||||
#include "base/command_line.h"
|
||||
#include "base/strings/utf_string_conversions.h"
|
||||
#include "libplatform/libplatform.h"
|
||||
@@ -27,10 +29,15 @@ void NodeDebugger::Start() {
|
||||
node::DebugOptions options;
|
||||
for (auto& arg : base::CommandLine::ForCurrentProcess()->argv()) {
|
||||
#if defined(OS_WIN)
|
||||
options.ParseOption("Electron", base::UTF16ToUTF8(arg));
|
||||
const std::string nice_arg = base::UTF16ToUTF8(arg);
|
||||
#else
|
||||
options.ParseOption("Electron", arg);
|
||||
const std::string& nice_arg = arg;
|
||||
#endif
|
||||
// Stop handling arguments after a "--" to be consistent with Chromium
|
||||
if (nice_arg == "--")
|
||||
break;
|
||||
|
||||
options.ParseOption("Electron", nice_arg);
|
||||
}
|
||||
|
||||
if (options.inspector_enabled()) {
|
||||
@@ -42,7 +49,7 @@ void NodeDebugger::Start() {
|
||||
// the debugger on the first line
|
||||
if (options.wait_for_connect()) {
|
||||
mate::Dictionary process(env_->isolate(), env_->process_object());
|
||||
process.Set("_debugWaitConnect", true);
|
||||
process.Set("_breakFirstLine", true);
|
||||
}
|
||||
|
||||
inspector->Start(platform_.get(), nullptr, options);
|
||||
|
||||
@@ -124,7 +124,7 @@ class AtomCopyFrameGenerator {
|
||||
}
|
||||
|
||||
void GenerateCopyFrame(const gfx::Rect& damage_rect) {
|
||||
if (!view_->render_widget_host())
|
||||
if (!view_->render_widget_host() || !view_->IsPainting())
|
||||
return;
|
||||
|
||||
std::unique_ptr<cc::CopyOutputRequest> request =
|
||||
@@ -255,6 +255,8 @@ class AtomBeginFrameTimer : public cc::DelayBasedTimeSourceClient {
|
||||
|
||||
OffScreenRenderWidgetHostView::OffScreenRenderWidgetHostView(
|
||||
bool transparent,
|
||||
bool painting,
|
||||
int frame_rate,
|
||||
const OnPaintCallback& callback,
|
||||
content::RenderWidgetHost* host,
|
||||
OffScreenRenderWidgetHostView* parent_host_view,
|
||||
@@ -268,17 +270,18 @@ OffScreenRenderWidgetHostView::OffScreenRenderWidgetHostView(
|
||||
transparent_(transparent),
|
||||
callback_(callback),
|
||||
parent_callback_(nullptr),
|
||||
frame_rate_(60),
|
||||
frame_rate_(frame_rate),
|
||||
frame_rate_threshold_us_(0),
|
||||
last_time_(base::Time::Now()),
|
||||
scale_factor_(kDefaultScaleFactor),
|
||||
size_(native_window->GetSize()),
|
||||
painting_(true),
|
||||
painting_(painting),
|
||||
is_showing_(!render_widget_host_->is_hidden()),
|
||||
is_destroyed_(false),
|
||||
popup_position_(gfx::Rect()),
|
||||
hold_resize_(false),
|
||||
pending_resize_(false),
|
||||
paint_callback_running_(false),
|
||||
renderer_compositor_frame_sink_(nullptr),
|
||||
background_color_(SkColor()),
|
||||
weak_ptr_factory_(this) {
|
||||
@@ -303,7 +306,7 @@ OffScreenRenderWidgetHostView::OffScreenRenderWidgetHostView(
|
||||
new ui::Compositor(context_factory_private->AllocateFrameSinkId(),
|
||||
content::GetContextFactory(), context_factory_private,
|
||||
base::ThreadTaskRunnerHandle::Get()));
|
||||
compositor_->SetAcceleratedWidget(native_window_->GetAcceleratedWidget());
|
||||
compositor_->SetAcceleratedWidget(gfx::kNullAcceleratedWidget);
|
||||
compositor_->SetRootLayer(root_layer_.get());
|
||||
#endif
|
||||
GetCompositor()->SetDelegate(this);
|
||||
@@ -738,6 +741,8 @@ content::RenderWidgetHostViewBase*
|
||||
|
||||
return new OffScreenRenderWidgetHostView(
|
||||
transparent_,
|
||||
true,
|
||||
embedder_host_view->GetFrameRate(),
|
||||
callback_,
|
||||
render_widget_host,
|
||||
embedder_host_view,
|
||||
@@ -930,7 +935,7 @@ bool OffScreenRenderWidgetHostView::IsAutoResizeEnabled() const {
|
||||
|
||||
void OffScreenRenderWidgetHostView::SetNeedsBeginFrames(
|
||||
bool needs_begin_frames) {
|
||||
SetupFrameRate(false);
|
||||
SetupFrameRate(true);
|
||||
|
||||
begin_frame_timer_->SetActive(needs_begin_frames);
|
||||
|
||||
@@ -1004,7 +1009,9 @@ void OffScreenRenderWidgetHostView::OnPaint(
|
||||
}
|
||||
|
||||
damage.Intersect(GetViewBounds());
|
||||
paint_callback_running_ = true;
|
||||
callback_.Run(damage, bitmap);
|
||||
paint_callback_running_ = false;
|
||||
|
||||
for (size_t i = 0; i < damages.size(); i++) {
|
||||
CopyBitmapTo(bitmap, originals[i], damages[i]);
|
||||
@@ -1151,7 +1158,7 @@ void OffScreenRenderWidgetHostView::SetPainting(bool painting) {
|
||||
painting_ = painting;
|
||||
|
||||
if (software_output_device_) {
|
||||
software_output_device_->SetActive(painting_, true);
|
||||
software_output_device_->SetActive(painting_, !paint_callback_running_);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1168,16 +1175,16 @@ void OffScreenRenderWidgetHostView::SetFrameRate(int frame_rate) {
|
||||
} else {
|
||||
if (frame_rate <= 0)
|
||||
frame_rate = 1;
|
||||
if (frame_rate > 60)
|
||||
frame_rate = 60;
|
||||
if (frame_rate > 240)
|
||||
frame_rate = 240;
|
||||
|
||||
frame_rate_ = frame_rate;
|
||||
}
|
||||
|
||||
SetupFrameRate(true);
|
||||
|
||||
for (auto guest_host_view : guest_host_views_)
|
||||
guest_host_view->SetFrameRate(frame_rate);
|
||||
|
||||
SetupFrameRate(true);
|
||||
}
|
||||
|
||||
int OffScreenRenderWidgetHostView::GetFrameRate() const {
|
||||
@@ -1205,7 +1212,7 @@ void OffScreenRenderWidgetHostView::SetupFrameRate(bool force) {
|
||||
|
||||
frame_rate_threshold_us_ = 1000000 / frame_rate_;
|
||||
|
||||
GetCompositor()->vsync_manager()->SetAuthoritativeVSyncInterval(
|
||||
GetCompositor()->SetAuthoritativeVSyncInterval(
|
||||
base::TimeDelta::FromMicroseconds(frame_rate_threshold_us_));
|
||||
|
||||
if (copy_frame_generator_.get()) {
|
||||
|
||||
@@ -74,6 +74,8 @@ class OffScreenRenderWidgetHostView
|
||||
public OffscreenViewProxyObserver {
|
||||
public:
|
||||
OffScreenRenderWidgetHostView(bool transparent,
|
||||
bool painting,
|
||||
int frame_rate,
|
||||
const OnPaintCallback& callback,
|
||||
content::RenderWidgetHost* render_widget_host,
|
||||
OffScreenRenderWidgetHostView* parent_host_view,
|
||||
@@ -314,6 +316,8 @@ class OffScreenRenderWidgetHostView
|
||||
bool hold_resize_;
|
||||
bool pending_resize_;
|
||||
|
||||
bool paint_callback_running_;
|
||||
|
||||
std::unique_ptr<ui::Layer> root_layer_;
|
||||
std::unique_ptr<ui::Compositor> compositor_;
|
||||
std::unique_ptr<content::DelegatedFrameHost> delegated_frame_host_;
|
||||
|
||||
@@ -15,6 +15,8 @@ namespace atom {
|
||||
OffScreenWebContentsView::OffScreenWebContentsView(
|
||||
bool transparent, const OnPaintCallback& callback)
|
||||
: transparent_(transparent),
|
||||
painting_(true),
|
||||
frame_rate_(60),
|
||||
callback_(callback),
|
||||
web_contents_(nullptr) {
|
||||
#if defined(OS_MACOSX)
|
||||
@@ -103,6 +105,8 @@ content::RenderWidgetHostViewBase*
|
||||
auto relay = NativeWindowRelay::FromWebContents(web_contents_);
|
||||
return new OffScreenRenderWidgetHostView(
|
||||
transparent_,
|
||||
painting_,
|
||||
GetFrameRate(),
|
||||
callback_,
|
||||
render_widget_host,
|
||||
nullptr,
|
||||
@@ -125,6 +129,8 @@ content::RenderWidgetHostViewBase*
|
||||
|
||||
return new OffScreenRenderWidgetHostView(
|
||||
transparent_,
|
||||
true,
|
||||
view->GetFrameRate(),
|
||||
callback_,
|
||||
render_widget_host,
|
||||
view,
|
||||
@@ -202,6 +208,42 @@ void OffScreenWebContentsView::UpdateDragCursor(
|
||||
blink::WebDragOperation operation) {
|
||||
}
|
||||
|
||||
void OffScreenWebContentsView::SetPainting(bool painting) {
|
||||
auto* view = GetView();
|
||||
if (view != nullptr) {
|
||||
view->SetPainting(painting);
|
||||
} else {
|
||||
painting_ = painting;
|
||||
}
|
||||
}
|
||||
|
||||
bool OffScreenWebContentsView::IsPainting() const {
|
||||
auto* view = GetView();
|
||||
if (view != nullptr) {
|
||||
return view->IsPainting();
|
||||
} else {
|
||||
return painting_;
|
||||
}
|
||||
}
|
||||
|
||||
void OffScreenWebContentsView::SetFrameRate(int frame_rate) {
|
||||
auto* view = GetView();
|
||||
if (view != nullptr) {
|
||||
view->SetFrameRate(frame_rate);
|
||||
} else {
|
||||
frame_rate_ = frame_rate;
|
||||
}
|
||||
}
|
||||
|
||||
int OffScreenWebContentsView::GetFrameRate() const {
|
||||
auto* view = GetView();
|
||||
if (view != nullptr) {
|
||||
return view->GetFrameRate();
|
||||
} else {
|
||||
return frame_rate_;
|
||||
}
|
||||
}
|
||||
|
||||
OffScreenRenderWidgetHostView* OffScreenWebContentsView::GetView() const {
|
||||
if (web_contents_) {
|
||||
return static_cast<OffScreenRenderWidgetHostView*>(
|
||||
|
||||
@@ -69,6 +69,11 @@ class OffScreenWebContentsView : public content::WebContentsView,
|
||||
content::RenderWidgetHostImpl* source_rwh) override;
|
||||
void UpdateDragCursor(blink::WebDragOperation operation) override;
|
||||
|
||||
void SetPainting(bool painting);
|
||||
bool IsPainting() const;
|
||||
void SetFrameRate(int frame_rate);
|
||||
int GetFrameRate() const;
|
||||
|
||||
private:
|
||||
#if defined(OS_MACOSX)
|
||||
void PlatformCreate();
|
||||
@@ -78,6 +83,8 @@ class OffScreenWebContentsView : public content::WebContentsView,
|
||||
OffScreenRenderWidgetHostView* GetView() const;
|
||||
|
||||
const bool transparent_;
|
||||
bool painting_;
|
||||
int frame_rate_;
|
||||
OnPaintCallback callback_;
|
||||
|
||||
// Weak refs.
|
||||
|
||||
@@ -140,11 +140,7 @@ bool RelaunchAppWithHelper(const base::FilePath& helper,
|
||||
}
|
||||
|
||||
int RelauncherMain(const content::MainFunctionParams& main_parameters) {
|
||||
#if defined(OS_WIN)
|
||||
const StringVector& argv = atom::AtomCommandLine::wargv();
|
||||
#else
|
||||
const StringVector& argv = atom::AtomCommandLine::argv();
|
||||
#endif
|
||||
|
||||
if (argv.size() < 4 || argv[1] != internal::kRelauncherTypeArg) {
|
||||
LOG(ERROR) << "relauncher process invoked with unexpected arguments";
|
||||
|
||||
@@ -17,9 +17,9 @@
|
||||
<key>CFBundleIconFile</key>
|
||||
<string>electron.icns</string>
|
||||
<key>CFBundleVersion</key>
|
||||
<string>1.8.2</string>
|
||||
<string>1.8.8</string>
|
||||
<key>CFBundleShortVersionString</key>
|
||||
<string>1.8.2</string>
|
||||
<string>1.8.8</string>
|
||||
<key>LSApplicationCategoryType</key>
|
||||
<string>public.app-category.developer-tools</string>
|
||||
<key>LSMinimumSystemVersion</key>
|
||||
|
||||
@@ -56,8 +56,8 @@ END
|
||||
//
|
||||
|
||||
VS_VERSION_INFO VERSIONINFO
|
||||
FILEVERSION 1,8,2,2
|
||||
PRODUCTVERSION 1,8,2,2
|
||||
FILEVERSION 1,8,8,0
|
||||
PRODUCTVERSION 1,8,8,0
|
||||
FILEFLAGSMASK 0x3fL
|
||||
#ifdef _DEBUG
|
||||
FILEFLAGS 0x1L
|
||||
@@ -74,12 +74,12 @@ BEGIN
|
||||
BEGIN
|
||||
VALUE "CompanyName", "GitHub, Inc."
|
||||
VALUE "FileDescription", "Electron"
|
||||
VALUE "FileVersion", "1.8.2"
|
||||
VALUE "FileVersion", "1.8.8"
|
||||
VALUE "InternalName", "electron.exe"
|
||||
VALUE "LegalCopyright", "Copyright (C) 2015 GitHub, Inc. All rights reserved."
|
||||
VALUE "OriginalFilename", "electron.exe"
|
||||
VALUE "ProductName", "Electron"
|
||||
VALUE "ProductVersion", "1.8.2"
|
||||
VALUE "ProductVersion", "1.8.8"
|
||||
VALUE "SquirrelAwareVersion", "1"
|
||||
END
|
||||
END
|
||||
|
||||
@@ -91,7 +91,9 @@ bool TriggerAcceleratorTableCommand(AcceleratorTable* table,
|
||||
if (base::ContainsKey(*table, accelerator)) {
|
||||
const accelerator_util::MenuItem& item = (*table)[accelerator];
|
||||
if (item.model->IsEnabledAt(item.position)) {
|
||||
item.model->ActivatedAt(item.position);
|
||||
const auto event_flags =
|
||||
accelerator.MaskOutKeyEventFlags(accelerator.modifiers());
|
||||
item.model->ActivatedAt(item.position, event_flags);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -154,6 +154,18 @@ bool ShowOpenDialog(const DialogSettings& settings,
|
||||
return true;
|
||||
}
|
||||
|
||||
void OpenDialogCompletion(int chosen, NSOpenPanel* dialog,
|
||||
const DialogSettings& settings,
|
||||
const OpenDialogCallback& callback) {
|
||||
if (chosen == NSFileHandlingPanelCancelButton) {
|
||||
callback.Run(false, std::vector<base::FilePath>());
|
||||
} else {
|
||||
std::vector<base::FilePath> paths;
|
||||
ReadDialogPaths(dialog, &paths);
|
||||
callback.Run(true, paths);
|
||||
}
|
||||
}
|
||||
|
||||
void ShowOpenDialog(const DialogSettings& settings,
|
||||
const OpenDialogCallback& c) {
|
||||
NSOpenPanel* dialog = [NSOpenPanel openPanel];
|
||||
@@ -167,25 +179,14 @@ void ShowOpenDialog(const DialogSettings& settings,
|
||||
|
||||
if (!settings.parent_window || !settings.parent_window->GetNativeWindow() ||
|
||||
settings.force_detached) {
|
||||
int chosen = [dialog runModal];
|
||||
if (chosen == NSFileHandlingPanelCancelButton) {
|
||||
callback.Run(false, std::vector<base::FilePath>());
|
||||
} else {
|
||||
std::vector<base::FilePath> paths;
|
||||
ReadDialogPaths(dialog, &paths);
|
||||
callback.Run(true, paths);
|
||||
}
|
||||
[dialog beginWithCompletionHandler:^(NSInteger chosen) {
|
||||
OpenDialogCompletion(chosen, dialog, settings, callback);
|
||||
}];
|
||||
} else {
|
||||
NSWindow* window = settings.parent_window->GetNativeWindow();
|
||||
[dialog beginSheetModalForWindow:window
|
||||
completionHandler:^(NSInteger chosen) {
|
||||
if (chosen == NSFileHandlingPanelCancelButton) {
|
||||
callback.Run(false, std::vector<base::FilePath>());
|
||||
} else {
|
||||
std::vector<base::FilePath> paths;
|
||||
ReadDialogPaths(dialog, &paths);
|
||||
callback.Run(true, paths);
|
||||
}
|
||||
OpenDialogCompletion(chosen, dialog, settings, callback);
|
||||
}];
|
||||
}
|
||||
}
|
||||
@@ -205,6 +206,17 @@ bool ShowSaveDialog(const DialogSettings& settings,
|
||||
return true;
|
||||
}
|
||||
|
||||
void SaveDialogCompletion(int chosen, NSSavePanel* dialog,
|
||||
const DialogSettings& settings,
|
||||
const SaveDialogCallback& callback) {
|
||||
if (chosen == NSFileHandlingPanelCancelButton) {
|
||||
callback.Run(false, base::FilePath());
|
||||
} else {
|
||||
std::string path = base::SysNSStringToUTF8([[dialog URL] path]);
|
||||
callback.Run(true, base::FilePath(path));
|
||||
}
|
||||
}
|
||||
|
||||
void ShowSaveDialog(const DialogSettings& settings,
|
||||
const SaveDialogCallback& c) {
|
||||
NSSavePanel* dialog = [NSSavePanel savePanel];
|
||||
@@ -216,23 +228,14 @@ void ShowSaveDialog(const DialogSettings& settings,
|
||||
|
||||
if (!settings.parent_window || !settings.parent_window->GetNativeWindow() ||
|
||||
settings.force_detached) {
|
||||
int chosen = [dialog runModal];
|
||||
if (chosen == NSFileHandlingPanelCancelButton) {
|
||||
callback.Run(false, base::FilePath());
|
||||
} else {
|
||||
std::string path = base::SysNSStringToUTF8([[dialog URL] path]);
|
||||
callback.Run(true, base::FilePath(path));
|
||||
}
|
||||
[dialog beginWithCompletionHandler:^(NSInteger chosen) {
|
||||
SaveDialogCompletion(chosen, dialog, settings, callback);
|
||||
}];
|
||||
} else {
|
||||
NSWindow* window = settings.parent_window->GetNativeWindow();
|
||||
[dialog beginSheetModalForWindow:window
|
||||
completionHandler:^(NSInteger chosen) {
|
||||
if (chosen == NSFileHandlingPanelCancelButton) {
|
||||
callback.Run(false, base::FilePath());
|
||||
} else {
|
||||
std::string path = base::SysNSStringToUTF8([[dialog URL] path]);
|
||||
callback.Run(true, base::FilePath(path));
|
||||
}
|
||||
SaveDialogCompletion(chosen, dialog, settings, callback);
|
||||
}];
|
||||
}
|
||||
}
|
||||
|
||||
@@ -188,7 +188,7 @@ void ShowMessageBox(NativeWindow* parent_window,
|
||||
if (!parent_window || !parent_window->GetNativeWindow() ||
|
||||
parent_window->is_offscreen_dummy()) {
|
||||
int ret = [[alert autorelease] runModal];
|
||||
callback.Run(ret, false);
|
||||
callback.Run(ret, alert.suppressionButton.state == NSOnState);
|
||||
} else {
|
||||
ModalDelegate* delegate = [[ModalDelegate alloc] initWithCallback:callback
|
||||
andAlert:alert
|
||||
|
||||
@@ -36,9 +36,16 @@ const CGFloat kVerticalTitleMargin = 2;
|
||||
|
||||
@implementation StatusItemView
|
||||
|
||||
- (void)dealloc {
|
||||
trayIcon_ = nil;
|
||||
menuController_ = nil;
|
||||
[super dealloc];
|
||||
}
|
||||
|
||||
- (id)initWithImage:(NSImage*)image icon:(atom::TrayIconCocoa*)icon {
|
||||
image_.reset([image copy]);
|
||||
trayIcon_ = icon;
|
||||
menuController_ = nil;
|
||||
highlight_mode_ = atom::TrayIcon::HighlightMode::SELECTION;
|
||||
forceHighlight_ = NO;
|
||||
inMouseEventSequence_ = NO;
|
||||
@@ -76,6 +83,7 @@ const CGFloat kVerticalTitleMargin = 2;
|
||||
|
||||
- (void)removeItem {
|
||||
[[NSStatusBar systemStatusBar] removeStatusItem:statusItem_];
|
||||
[statusItem_ setView:nil];
|
||||
statusItem_.reset();
|
||||
}
|
||||
|
||||
@@ -414,11 +422,18 @@ void TrayIconCocoa::SetContextMenu(AtomMenuModel* menu_model) {
|
||||
// Substribe to MenuClosed event.
|
||||
if (menu_model_)
|
||||
menu_model_->RemoveObserver(this);
|
||||
menu_model->AddObserver(this);
|
||||
|
||||
// Create native menu.
|
||||
menu_.reset([[AtomMenuController alloc] initWithModel:menu_model
|
||||
useDefaultAccelerator:NO]);
|
||||
menu_model_ = menu_model;
|
||||
|
||||
if (menu_model) {
|
||||
menu_model->AddObserver(this);
|
||||
// Create native menu.
|
||||
menu_.reset([[AtomMenuController alloc] initWithModel:menu_model
|
||||
useDefaultAccelerator:NO]);
|
||||
} else {
|
||||
menu_.reset();
|
||||
}
|
||||
|
||||
[status_item_view_ setMenuController:menu_.get()];
|
||||
}
|
||||
|
||||
|
||||
@@ -3,12 +3,16 @@
|
||||
// found in the LICENSE file.
|
||||
|
||||
#include "atom/browser/ui/win/atom_desktop_native_widget_aura.h"
|
||||
#include "ui/views/corewm/tooltip_controller.h"
|
||||
#include "ui/wm/public/tooltip_client.h"
|
||||
|
||||
namespace atom {
|
||||
|
||||
AtomDesktopNativeWidgetAura::AtomDesktopNativeWidgetAura(
|
||||
views::internal::NativeWidgetDelegate* delegate)
|
||||
: views::DesktopNativeWidgetAura(delegate) {
|
||||
// This is to enable the override of OnWindowActivated
|
||||
aura::client::SetActivationChangeObserver(GetNativeWindow(), this);
|
||||
}
|
||||
|
||||
void AtomDesktopNativeWidgetAura::Activate() {
|
||||
@@ -19,4 +23,23 @@ void AtomDesktopNativeWidgetAura::Activate() {
|
||||
views::DesktopNativeWidgetAura::Activate();
|
||||
}
|
||||
|
||||
void AtomDesktopNativeWidgetAura::OnWindowActivated(
|
||||
aura::client::ActivationChangeObserver::ActivationReason reason,
|
||||
aura::Window* gained_active,
|
||||
aura::Window* lost_active) {
|
||||
views::DesktopNativeWidgetAura::OnWindowActivated(
|
||||
reason, gained_active, lost_active);
|
||||
if (lost_active != nullptr) {
|
||||
auto* tooltip_controller = static_cast<views::corewm::TooltipController*>(
|
||||
aura::client::GetTooltipClient(lost_active->GetRootWindow()));
|
||||
|
||||
// This will cause the tooltip to be hidden when a window is deactivated,
|
||||
// as it should be.
|
||||
// TODO(brenca): Remove this fix when the chromium issue is fixed.
|
||||
// crbug.com/724538
|
||||
if (tooltip_controller != nullptr)
|
||||
tooltip_controller->OnCancelMode(nullptr);
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace atom
|
||||
|
||||
@@ -19,6 +19,10 @@ class AtomDesktopNativeWidgetAura : public views::DesktopNativeWidgetAura {
|
||||
void Activate() override;
|
||||
|
||||
private:
|
||||
void OnWindowActivated(
|
||||
aura::client::ActivationChangeObserver::ActivationReason reason,
|
||||
aura::Window* gained_active,
|
||||
aura::Window* lost_active) override;
|
||||
DISALLOW_COPY_AND_ASSIGN(AtomDesktopNativeWidgetAura);
|
||||
};
|
||||
|
||||
|
||||
@@ -47,6 +47,35 @@ WebContentsPreferences::WebContentsPreferences(
|
||||
web_contents->SetUserData(UserDataKey(), this);
|
||||
|
||||
instances_.push_back(this);
|
||||
|
||||
// Set WebPreferences defaults onto the JS object
|
||||
SetDefaultBoolIfUndefined("plugins", false);
|
||||
SetDefaultBoolIfUndefined(options::kExperimentalFeatures, false);
|
||||
SetDefaultBoolIfUndefined(options::kExperimentalCanvasFeatures, false);
|
||||
bool node = SetDefaultBoolIfUndefined(options::kNodeIntegration, true);
|
||||
SetDefaultBoolIfUndefined(options::kNodeIntegrationInWorker, false);
|
||||
SetDefaultBoolIfUndefined(options::kWebviewTag, node);
|
||||
SetDefaultBoolIfUndefined("sandbox", false);
|
||||
SetDefaultBoolIfUndefined("nativeWindowOpen", false);
|
||||
SetDefaultBoolIfUndefined(options::kContextIsolation, false);
|
||||
SetDefaultBoolIfUndefined("javascript", true);
|
||||
SetDefaultBoolIfUndefined("images", true);
|
||||
SetDefaultBoolIfUndefined("textAreasAreResizable", true);
|
||||
SetDefaultBoolIfUndefined("webgl", true);
|
||||
bool webSecurity = true;
|
||||
SetDefaultBoolIfUndefined("webSecurity", webSecurity);
|
||||
// If webSecurity was explicity set to false, let's inherit that into
|
||||
// insecureContent
|
||||
if (web_preferences.Get("webSecurity", &webSecurity) && !webSecurity) {
|
||||
SetDefaultBoolIfUndefined("allowRunningInsecureContent", true);
|
||||
} else {
|
||||
SetDefaultBoolIfUndefined("allowRunningInsecureContent", false);
|
||||
}
|
||||
#if defined(OS_MACOSX)
|
||||
SetDefaultBoolIfUndefined(options::kScrollBounce, false);
|
||||
#endif
|
||||
SetDefaultBoolIfUndefined("offscreen", false);
|
||||
last_web_preferences_.MergeDictionary(&web_preferences_);
|
||||
}
|
||||
|
||||
WebContentsPreferences::~WebContentsPreferences() {
|
||||
@@ -55,6 +84,16 @@ WebContentsPreferences::~WebContentsPreferences() {
|
||||
instances_.end());
|
||||
}
|
||||
|
||||
bool WebContentsPreferences::SetDefaultBoolIfUndefined(const std::string key,
|
||||
bool val) {
|
||||
bool existing;
|
||||
if (!web_preferences_.GetBoolean(key, &existing)) {
|
||||
web_preferences_.SetBoolean(key, val);
|
||||
return val;
|
||||
}
|
||||
return existing;
|
||||
}
|
||||
|
||||
void WebContentsPreferences::Merge(const base::DictionaryValue& extend) {
|
||||
web_preferences_.MergeDictionary(&extend);
|
||||
}
|
||||
@@ -79,6 +118,12 @@ void WebContentsPreferences::AppendExtraCommandLineSwitches(
|
||||
|
||||
base::DictionaryValue& web_preferences = self->web_preferences_;
|
||||
|
||||
// We are appending args to a webContents so let's save the current state
|
||||
// of our preferences object so that during the lifetime of the WebContents
|
||||
// we can fetch the options used to initally configure the WebContents
|
||||
self->last_web_preferences_.Clear();
|
||||
self->last_web_preferences_.MergeDictionary(&web_preferences);
|
||||
|
||||
bool b;
|
||||
// Check if plugins are enabled.
|
||||
if (web_preferences.GetBoolean("plugins", &b) && b)
|
||||
|
||||
@@ -57,6 +57,9 @@ class WebContentsPreferences
|
||||
|
||||
// Returns the web preferences.
|
||||
base::DictionaryValue* web_preferences() { return &web_preferences_; }
|
||||
base::DictionaryValue* last_web_preferences() {
|
||||
return &last_web_preferences_;
|
||||
}
|
||||
|
||||
private:
|
||||
friend class content::WebContentsUserData<WebContentsPreferences>;
|
||||
@@ -65,6 +68,10 @@ class WebContentsPreferences
|
||||
|
||||
content::WebContents* web_contents_;
|
||||
base::DictionaryValue web_preferences_;
|
||||
base::DictionaryValue last_web_preferences_;
|
||||
|
||||
// Set preference value to given bool if user did not provide value
|
||||
bool SetDefaultBoolIfUndefined(const std::string key, bool val);
|
||||
|
||||
// Get preferences value as integer possibly coercing it from a string
|
||||
bool GetInteger(const std::string& attributeName, int* intValue);
|
||||
|
||||
@@ -156,10 +156,10 @@ file_dialog::Filters GetFileTypesFromAcceptType(
|
||||
|
||||
filters.push_back(file_dialog::Filter());
|
||||
|
||||
if (valid_type_count > 1 ||
|
||||
(valid_type_count == 1 && description.empty() && extensions.size() > 1))
|
||||
if (valid_type_count > 1 || (valid_type_count == 1 && description.empty()))
|
||||
description = "Custom Files";
|
||||
|
||||
DCHECK(!description.empty());
|
||||
filters[0].first = description;
|
||||
|
||||
for (const auto& extension : extensions) {
|
||||
@@ -215,6 +215,7 @@ void WebDialogHelper::RunFileChooser(
|
||||
flags |= file_dialog::FILE_DIALOG_MULTI_SELECTIONS;
|
||||
case content::FileChooserParams::Open:
|
||||
flags |= file_dialog::FILE_DIALOG_OPEN_FILE;
|
||||
flags |= file_dialog::FILE_DIALOG_TREAT_PACKAGE_APP_AS_DIRECTORY;
|
||||
break;
|
||||
case content::FileChooserParams::UploadFolder:
|
||||
flags |= file_dialog::FILE_DIALOG_OPEN_DIRECTORY;
|
||||
|
||||
@@ -10,31 +10,22 @@
|
||||
namespace atom {
|
||||
|
||||
// static
|
||||
std::vector<std::string> AtomCommandLine::argv_;
|
||||
|
||||
#if defined(OS_WIN)
|
||||
// static
|
||||
std::vector<std::wstring> AtomCommandLine::wargv_;
|
||||
#endif
|
||||
base::CommandLine::StringVector AtomCommandLine::argv_;
|
||||
|
||||
// static
|
||||
void AtomCommandLine::Init(int argc, const char* const* argv) {
|
||||
void AtomCommandLine::Init(int argc, base::CommandLine::CharType** argv) {
|
||||
DCHECK(argv_.empty());
|
||||
|
||||
// NOTE: uv_setup_args does nothing on Windows, so we don't need to call it.
|
||||
// Otherwise we'd have to convert the arguments from UTF16.
|
||||
#if !defined(OS_WIN)
|
||||
// Hack around with the argv pointer. Used for process.title = "blah"
|
||||
char** new_argv = uv_setup_args(argc, const_cast<char**>(argv));
|
||||
for (int i = 0; i < argc; ++i) {
|
||||
argv_.push_back(new_argv[i]);
|
||||
}
|
||||
}
|
||||
|
||||
#if defined(OS_WIN)
|
||||
// static
|
||||
void AtomCommandLine::InitW(int argc, const wchar_t* const* argv) {
|
||||
for (int i = 0; i < argc; ++i) {
|
||||
wargv_.push_back(argv[i]);
|
||||
}
|
||||
}
|
||||
argv = uv_setup_args(argc, argv);
|
||||
#endif
|
||||
|
||||
argv_.assign(argv, argv + argc);
|
||||
}
|
||||
|
||||
#if defined(OS_LINUX)
|
||||
// static
|
||||
void AtomCommandLine::InitializeFromCommandLine() {
|
||||
|
||||
@@ -8,6 +8,7 @@
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
#include "base/command_line.h"
|
||||
#include "base/macros.h"
|
||||
#include "build/build_config.h"
|
||||
|
||||
@@ -16,13 +17,9 @@ namespace atom {
|
||||
// Singleton to remember the original "argc" and "argv".
|
||||
class AtomCommandLine {
|
||||
public:
|
||||
static void Init(int argc, const char* const* argv);
|
||||
static std::vector<std::string> argv() { return argv_; }
|
||||
static const base::CommandLine::StringVector& argv() { return argv_; }
|
||||
|
||||
#if defined(OS_WIN)
|
||||
static void InitW(int argc, const wchar_t* const* argv);
|
||||
static std::vector<std::wstring> wargv() { return wargv_; }
|
||||
#endif
|
||||
static void Init(int argc, base::CommandLine::CharType** argv);
|
||||
|
||||
#if defined(OS_LINUX)
|
||||
// On Linux the command line has to be read from base::CommandLine since
|
||||
@@ -31,11 +28,7 @@ class AtomCommandLine {
|
||||
#endif
|
||||
|
||||
private:
|
||||
static std::vector<std::string> argv_;
|
||||
|
||||
#if defined(OS_WIN)
|
||||
static std::vector<std::wstring> wargv_;
|
||||
#endif
|
||||
static base::CommandLine::StringVector argv_;
|
||||
|
||||
DISALLOW_IMPLICIT_CONSTRUCTORS(AtomCommandLine);
|
||||
};
|
||||
|
||||
@@ -7,22 +7,24 @@
|
||||
|
||||
#define ATOM_MAJOR_VERSION 1
|
||||
#define ATOM_MINOR_VERSION 8
|
||||
#define ATOM_PATCH_VERSION 2
|
||||
#define ATOM_PRE_RELEASE_VERSION -beta.2
|
||||
|
||||
#ifndef ATOM_PRE_RELEASE_VERSION
|
||||
# define ATOM_PRE_RELEASE_VERSION ""
|
||||
#endif
|
||||
#define ATOM_PATCH_VERSION 8
|
||||
// #define ATOM_PRE_RELEASE_VERSION
|
||||
|
||||
#ifndef ATOM_STRINGIFY
|
||||
#define ATOM_STRINGIFY(n) ATOM_STRINGIFY_HELPER(n)
|
||||
#define ATOM_STRINGIFY_HELPER(n) #n
|
||||
#endif
|
||||
|
||||
# define ATOM_VERSION_STRING ATOM_STRINGIFY(ATOM_MAJOR_VERSION) "." \
|
||||
#ifndef ATOM_PRE_RELEASE_VERSION
|
||||
#define ATOM_VERSION_STRING ATOM_STRINGIFY(ATOM_MAJOR_VERSION) "." \
|
||||
ATOM_STRINGIFY(ATOM_MINOR_VERSION) "." \
|
||||
ATOM_STRINGIFY(ATOM_PATCH_VERSION)
|
||||
#else
|
||||
#define ATOM_VERSION_STRING ATOM_STRINGIFY(ATOM_MAJOR_VERSION) "." \
|
||||
ATOM_STRINGIFY(ATOM_MINOR_VERSION) "." \
|
||||
ATOM_STRINGIFY(ATOM_PATCH_VERSION) \
|
||||
ATOM_STRINGIFY(ATOM_PRE_RELEASE_VERSION)
|
||||
#endif
|
||||
|
||||
#define ATOM_VERSION "v" ATOM_VERSION_STRING
|
||||
|
||||
|
||||
@@ -209,7 +209,10 @@ void CrashReporterWin::SetUploadParameters() {
|
||||
int CrashReporterWin::CrashForException(EXCEPTION_POINTERS* info) {
|
||||
if (breakpad_) {
|
||||
breakpad_->WriteMinidumpForException(info);
|
||||
TerminateProcessWithoutDump();
|
||||
if (skip_system_crash_handler_)
|
||||
TerminateProcessWithoutDump();
|
||||
else
|
||||
RaiseFailFastException(info->ExceptionRecord, info->ContextRecord, 0);
|
||||
}
|
||||
return EXCEPTION_CONTINUE_SEARCH;
|
||||
}
|
||||
@@ -229,7 +232,7 @@ bool CrashReporterWin::MinidumpCallback(const wchar_t* dump_path,
|
||||
MDRawAssertionInfo* assertion,
|
||||
bool succeeded) {
|
||||
CrashReporterWin* self = static_cast<CrashReporterWin*>(context);
|
||||
if (succeeded && !self->skip_system_crash_handler_)
|
||||
if (succeeded && self->skip_system_crash_handler_)
|
||||
return true;
|
||||
else
|
||||
return false;
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
|
||||
#include "atom/common/node_bindings.h"
|
||||
|
||||
#include <algorithm>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
@@ -17,6 +18,7 @@
|
||||
#include "base/files/file_path.h"
|
||||
#include "base/path_service.h"
|
||||
#include "base/run_loop.h"
|
||||
#include "base/strings/utf_string_conversions.h"
|
||||
#include "base/threading/thread_task_runner_handle.h"
|
||||
#include "base/trace_event/trace_event.h"
|
||||
#include "content/public/browser/browser_thread.h"
|
||||
@@ -137,6 +139,7 @@ void NodeBindings::Initialize() {
|
||||
|
||||
// Init node.
|
||||
// (we assume node::Init would not modify the parameters under embedded mode).
|
||||
// NOTE: If you change this line, please ping @codebytere or @MarshallOfSound
|
||||
node::Init(nullptr, nullptr, nullptr, nullptr);
|
||||
|
||||
#if defined(OS_WIN)
|
||||
@@ -150,7 +153,14 @@ void NodeBindings::Initialize() {
|
||||
|
||||
node::Environment* NodeBindings::CreateEnvironment(
|
||||
v8::Handle<v8::Context> context) {
|
||||
#if defined(OS_WIN)
|
||||
auto& atom_args = AtomCommandLine::argv();
|
||||
std::vector<std::string> args(atom_args.size());
|
||||
std::transform(atom_args.cbegin(), atom_args.cend(), args.begin(),
|
||||
[](auto& a) { return base::WideToUTF8(a); });
|
||||
#else
|
||||
auto args = AtomCommandLine::argv();
|
||||
#endif
|
||||
|
||||
// Feed node the path to initialization script.
|
||||
base::FilePath::StringType process_type;
|
||||
@@ -170,8 +180,7 @@ node::Environment* NodeBindings::CreateEnvironment(
|
||||
resources_path.Append(FILE_PATH_LITERAL("electron.asar"))
|
||||
.Append(process_type)
|
||||
.Append(FILE_PATH_LITERAL("init.js"));
|
||||
std::string script_path_str = script_path.AsUTF8Unsafe();
|
||||
args.insert(args.begin() + 1, script_path_str.c_str());
|
||||
args.insert(args.begin() + 1, script_path.AsUTF8Unsafe());
|
||||
|
||||
std::unique_ptr<const char*[]> c_argv = StringVectorToArgArray(args);
|
||||
node::Environment* env = node::CreateEnvironment(
|
||||
|
||||
@@ -13,6 +13,7 @@
|
||||
#include "atom/renderer/api/atom_api_spell_check_client.h"
|
||||
#include "base/memory/memory_pressure_listener.h"
|
||||
#include "content/public/renderer/render_frame.h"
|
||||
#include "content/public/renderer/render_frame_visitor.h"
|
||||
#include "content/public/renderer/render_view.h"
|
||||
#include "native_mate/dictionary.h"
|
||||
#include "native_mate/object_template_builder.h"
|
||||
@@ -58,6 +59,30 @@ class ScriptExecutionCallback : public blink::WebScriptExecutionCallback {
|
||||
DISALLOW_COPY_AND_ASSIGN(ScriptExecutionCallback);
|
||||
};
|
||||
|
||||
class FrameSpellChecker : public content::RenderFrameVisitor {
|
||||
public:
|
||||
explicit FrameSpellChecker(SpellCheckClient* spell_check_client,
|
||||
content::RenderFrame* main_frame)
|
||||
: spell_check_client_(spell_check_client), main_frame_(main_frame) {}
|
||||
~FrameSpellChecker() override {
|
||||
spell_check_client_ = nullptr;
|
||||
main_frame_ = nullptr;
|
||||
}
|
||||
bool Visit(content::RenderFrame* render_frame) override {
|
||||
auto view = render_frame->GetRenderView();
|
||||
if (view->GetMainRenderFrame() == main_frame_ ||
|
||||
(render_frame->IsMainFrame() && render_frame == main_frame_)) {
|
||||
render_frame->GetWebFrame()->SetTextCheckClient(spell_check_client_);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
private:
|
||||
SpellCheckClient* spell_check_client_;
|
||||
content::RenderFrame* main_frame_;
|
||||
DISALLOW_COPY_AND_ASSIGN(FrameSpellChecker);
|
||||
};
|
||||
|
||||
} // namespace
|
||||
|
||||
WebFrame::WebFrame(v8::Isolate* isolate)
|
||||
@@ -101,6 +126,7 @@ double WebFrame::GetZoomFactor() const {
|
||||
|
||||
void WebFrame::SetVisualZoomLevelLimits(double min_level, double max_level) {
|
||||
web_frame_->View()->SetDefaultPageScaleLimits(min_level, max_level);
|
||||
web_frame_->View()->SetIgnoreViewportTagScaleLimits(true);
|
||||
}
|
||||
|
||||
void WebFrame::SetLayoutZoomLevelLimits(double min_level, double max_level) {
|
||||
@@ -139,10 +165,15 @@ void WebFrame::SetSpellCheckProvider(mate::Arguments* args,
|
||||
return;
|
||||
}
|
||||
|
||||
spell_check_client_.reset(new SpellCheckClient(
|
||||
std::unique_ptr<SpellCheckClient> client(new SpellCheckClient(
|
||||
language, auto_spell_correct_turned_on, args->isolate(), provider));
|
||||
// Set spellchecker for all live frames in the same process or
|
||||
// in the sandbox mode for all live sub frames to this WebFrame.
|
||||
FrameSpellChecker spell_checker(
|
||||
client.get(), content::RenderFrame::FromWebFrame(web_frame_));
|
||||
content::RenderFrame::ForEach(&spell_checker);
|
||||
spell_check_client_.swap(client);
|
||||
web_frame_->View()->SetSpellCheckClient(spell_check_client_.get());
|
||||
web_frame_->SetTextCheckClient(spell_check_client_.get());
|
||||
}
|
||||
|
||||
void WebFrame::RegisterURLSchemeAsSecure(const std::string& scheme) {
|
||||
|
||||
@@ -38,7 +38,6 @@ namespace {
|
||||
const std::string kIpcKey = "ipcNative";
|
||||
const std::string kModuleCacheKey = "native-module-cache";
|
||||
|
||||
|
||||
v8::Local<v8::Object> GetModuleCache(v8::Isolate* isolate) {
|
||||
mate::Dictionary global(isolate, isolate->GetCurrentContext()->Global());
|
||||
v8::Local<v8::Value> cache;
|
||||
@@ -158,8 +157,6 @@ void AtomSandboxedRendererClient::DidCreateScriptContext(
|
||||
base::CommandLine* command_line = base::CommandLine::ForCurrentProcess();
|
||||
std::string preload_script = command_line->GetSwitchValueASCII(
|
||||
switches::kPreloadScript);
|
||||
if (preload_script.empty())
|
||||
return;
|
||||
|
||||
auto isolate = context->GetIsolate();
|
||||
v8::HandleScope handle_scope(isolate);
|
||||
|
||||
@@ -228,10 +228,8 @@
|
||||
'msvs_settings': {
|
||||
'VCCLCompilerTool': {
|
||||
'RuntimeLibrary': '2', # /MD (nondebug DLL)
|
||||
# 1, optimizeMinSpace, Minimize Size (/O1)
|
||||
'Optimization': '1',
|
||||
# 2, favorSize - Favor small code (/Os)
|
||||
'FavorSizeOrSpeed': '2',
|
||||
'Optimization': '2', # /O2
|
||||
'WholeProgramOptimization': 'true', # /GL
|
||||
# See http://msdn.microsoft.com/en-us/library/47238hez(VS.71).aspx
|
||||
'InlineFunctionExpansion': '2', # 2 = max
|
||||
# See http://msdn.microsoft.com/en-us/library/2kxx5t2c(v=vs.80).aspx
|
||||
@@ -241,11 +239,24 @@
|
||||
# perform FPO regardless, so we must explicitly disable.
|
||||
# We still want the false setting above to avoid having
|
||||
# "/Oy /Oy-" and warnings about overriding.
|
||||
'AdditionalOptions': ['/Oy-'],
|
||||
'AdditionalOptions': ['/Oy-', '/d2guard4'],
|
||||
},
|
||||
'VCLibrarianTool': {
|
||||
'LinkTimeCodeGeneration': 'true', # /LTCG
|
||||
},
|
||||
'VCLinkerTool': {
|
||||
# Control Flow Guard is a security feature in Windows
|
||||
# 8.1 and higher designed to prevent exploitation of
|
||||
# indirect calls in executables.
|
||||
# Control Flow Guard is enabled using the /d2guard4
|
||||
# compiler setting in combination with the /guard:cf
|
||||
# linker setting.
|
||||
'AdditionalOptions': ['/guard:cf'],
|
||||
# Turn off incremental linking to save binary size.
|
||||
'LinkIncremental': '1', # /INCREMENTAL:NO
|
||||
'LinkTimeCodeGeneration': '1', # /LTCG
|
||||
'OptimizeReferences': 2, # /OPT:REF
|
||||
'EnableCOMDATFolding': 2, # /OPT:ICF
|
||||
},
|
||||
},
|
||||
'conditions': [
|
||||
@@ -270,6 +281,18 @@
|
||||
'-Wl,--gc-sections',
|
||||
],
|
||||
}], # OS=="linux"
|
||||
['OS=="linux" and target_arch in ["ia32", "x64", "arm64"]', {
|
||||
'cflags': [
|
||||
'-flto',
|
||||
],
|
||||
'ldflags': [
|
||||
'-flto',
|
||||
'-fuse-ld=gold',
|
||||
'-Wl,-plugin-opt,O1',
|
||||
'-Wl,-plugin-opt,-function-sections',
|
||||
'-Wl,--icf=all',
|
||||
],
|
||||
}],
|
||||
],
|
||||
}, # Release_Base
|
||||
'conditions': [
|
||||
|
||||
@@ -174,7 +174,7 @@ void DesktopNotificationController::AnimateAll() {
|
||||
if (SystemParametersInfo(SPI_GETWORKAREA, 0, &work_area, 0)) {
|
||||
ScreenMetrics metrics;
|
||||
POINT origin = { work_area.right,
|
||||
work_area.bottom - metrics.Y(toast_margin_<int>) };
|
||||
work_area.bottom - metrics.Y(toast_margin_) };
|
||||
|
||||
auto hdwp =
|
||||
BeginDeferWindowPos(static_cast<int>(instances_.size()));
|
||||
@@ -231,7 +231,7 @@ void DesktopNotificationController::AnimateAll() {
|
||||
// Set new toast positions
|
||||
if (!instances_.empty()) {
|
||||
ScreenMetrics metrics;
|
||||
auto margin = metrics.Y(toast_margin_<int>);
|
||||
auto margin = metrics.Y(toast_margin_);
|
||||
|
||||
int target_pos = 0;
|
||||
for (auto&& inst : instances_) {
|
||||
@@ -305,7 +305,7 @@ void DesktopNotificationController::CreateToast(NotificationLink&& data) {
|
||||
auto toast = Toast::Get(item.hwnd);
|
||||
toast_pos = toast->GetVerticalPosition() +
|
||||
toast->GetHeight() +
|
||||
scr.Y(toast_margin_<int>);
|
||||
scr.Y(toast_margin_);
|
||||
}
|
||||
|
||||
instances_.push_back({ hwnd, move(data) });
|
||||
|
||||
@@ -36,8 +36,7 @@ class DesktopNotificationController {
|
||||
TimerID_Animate = 1
|
||||
};
|
||||
|
||||
template<typename T>
|
||||
static constexpr T toast_margin_ = 20;
|
||||
static constexpr int toast_margin_ = 20;
|
||||
|
||||
// Wrapper around `NotificationData` which makes sure that
|
||||
// the `controller` member is cleared when the controller object
|
||||
|
||||
@@ -140,6 +140,12 @@
|
||||
'<(libchromiumcontent_src_dir)/v8',
|
||||
'<(libchromiumcontent_src_dir)/v8/include',
|
||||
],
|
||||
'defines': [
|
||||
# Export V8 symbols from node.dll / libnode.so
|
||||
'BUILDING_V8_SHARED',
|
||||
'BUILDING_V8_PLATFORM_SHARED',
|
||||
'BUILDING_V8_BASE_SHARED',
|
||||
],
|
||||
'conditions': [
|
||||
['OS=="mac" and libchromiumcontent_component==0', {
|
||||
# -all_load is the "whole-archive" on macOS.
|
||||
|
||||
@@ -73,6 +73,16 @@ A `Integer` representing the unique ID of the view.
|
||||
|
||||
Objects created with `new BrowserView` have the following instance methods:
|
||||
|
||||
#### `view.destroy()`
|
||||
|
||||
Force closing the view, the `unload` and `beforeunload` events won't be emitted
|
||||
for the web page. After you're done with a view, call this function in order to
|
||||
free memory and other resources as soon as possible.
|
||||
|
||||
#### `view.isDestroyed()`
|
||||
|
||||
Returns `Boolean` - Whether the view is destroyed.
|
||||
|
||||
#### `view.setAutoResize(options)` _Experimental_
|
||||
|
||||
* `options` Object
|
||||
|
||||
@@ -1174,7 +1174,7 @@ menu bar.
|
||||
|
||||
* `progress` Double
|
||||
* `options` Object (optional)
|
||||
* `mode` String _Windows_ - Mode for the progress bar. Can be `none`, `normal`, `indeterminate`, `error`, or `paused`.
|
||||
* `mode` String _Windows_ - Mode for the progress bar. Can be `none`, `normal`, `indeterminate`, `error` or `paused`.
|
||||
|
||||
Sets progress value in progress bar. Valid range is [0, 1.0].
|
||||
|
||||
@@ -1191,7 +1191,7 @@ mode set (but with a value within the valid range), `normal` will be assumed.
|
||||
|
||||
#### `win.setOverlayIcon(overlay, description)` _Windows_
|
||||
|
||||
* `overlay` [NativeImage](native-image.md) - the icon to display on the bottom
|
||||
* `overlay` [NativeImage](native-image.md) | null - the icon to display on the bottom
|
||||
right corner of the taskbar icon. If this parameter is `null`, the overlay is
|
||||
cleared
|
||||
* `description` String - a description that will be provided to Accessibility
|
||||
|
||||
@@ -194,7 +194,7 @@ Sets the `image` associated with this tray icon.
|
||||
|
||||
#### `tray.setPressedImage(image)` _macOS_
|
||||
|
||||
* `image` [NativeImage](native-image.md)
|
||||
* `image` ([NativeImage](native-image.md) | String)
|
||||
|
||||
Sets the `image` associated with this tray icon when pressed on macOS.
|
||||
|
||||
@@ -262,7 +262,7 @@ The `position` is only available on Windows, and it is (0, 0) by default.
|
||||
|
||||
#### `tray.setContextMenu(menu)`
|
||||
|
||||
* `menu` Menu
|
||||
* `menu` Menu | null
|
||||
|
||||
Sets the context menu for this icon.
|
||||
|
||||
|
||||
@@ -1291,11 +1291,17 @@ Shows pop-up dictionary that searches the selected word on the page.
|
||||
Set the size of the page. This is only supported for `<webview>` guest contents.
|
||||
|
||||
* `options` Object
|
||||
* `normal` Object (optional) - Normal size of the page. This can be used in
|
||||
combination with the [`disableguestresize`](web-view-tag.md#disableguestresize)
|
||||
* `enableAutoSize` Boolean (optional) - true to make the webview container automatically
|
||||
resize within the bounds specified by the attributes normal, min and max.
|
||||
* `normal` [Size](structures/size.md) (optional) - Normal size of the page. This can be used in
|
||||
combination with the [`disableguestresize`](webview-tag.md#disableguestresize)
|
||||
attribute to manually resize the webview guest contents.
|
||||
* `min` [Size](structures/size.md) (optional) - Minimum size of the page. This can be used in
|
||||
combination with the [`disableguestresize`](webview-tag.md#disableguestresize)
|
||||
attribute to manually resize the webview guest contents.
|
||||
* `max` [Size](structures/size.md) (optional) - Maximium size of the page. This can be used in
|
||||
combination with the [`disableguestresize`](webview-tag.md#disableguestresize)
|
||||
attribute to manually resize the webview guest contents.
|
||||
* `width` Integer
|
||||
* `height` Integer
|
||||
|
||||
#### `contents.isOffscreen()`
|
||||
|
||||
|
||||
@@ -150,6 +150,7 @@ this limitation.
|
||||
Returns `Object`:
|
||||
|
||||
* `images` [MemoryUsageDetails](structures/memory-usage-details.md)
|
||||
* `scripts` [MemoryUsageDetails](structures/memory-usage-details.md)
|
||||
* `cssStyleSheets` [MemoryUsageDetails](structures/memory-usage-details.md)
|
||||
* `xslStyleSheets` [MemoryUsageDetails](structures/memory-usage-details.md)
|
||||
* `fonts` [MemoryUsageDetails](structures/memory-usage-details.md)
|
||||
|
||||
@@ -17,15 +17,27 @@ Please note, the `ARM` version of Windows is not supported for now.
|
||||
|
||||
### Linux
|
||||
|
||||
The prebuilt `ia32` (`i686`) and `x64` (`amd64`) binaries of Electron are built on
|
||||
Ubuntu 12.04, the `arm` binary is built against ARM v7 with hard-float ABI and
|
||||
NEON for Debian Wheezy.
|
||||
|
||||
Whether the prebuilt binary can run on a distribution depends on whether the
|
||||
distribution includes the libraries that Electron is linked to on the building
|
||||
platform, so only Ubuntu 12.04 is guaranteed to work, but following platforms
|
||||
are also verified to be able to run the prebuilt binaries of Electron:
|
||||
The prebuilt binaries of Electron are built for Debian Jessie, but whether the
|
||||
prebuilt binary can run on a distribution depends on whether the distribution
|
||||
includes the libraries that Electron is linked to on the building platform, so
|
||||
only Debian Jessie is guaranteed to work, but following platforms are also
|
||||
verified to be able to run the prebuilt binaries of Electron:
|
||||
|
||||
* Ubuntu 12.04 and later
|
||||
* Fedora 21
|
||||
* Debian 8
|
||||
* Debian 8 and later
|
||||
|
||||
Electorn provides prebuilt binaries for following CPU architectures:
|
||||
|
||||
* `ia32` (`i686`)
|
||||
* `x64` (`amd64`)
|
||||
* `armv7l`
|
||||
* `arm64`
|
||||
* `mips64el`
|
||||
|
||||
The `arm` binary is built against ARM v7 with hard-float ABI and NEON, and it is
|
||||
not guaranteed to run on all ARM platforms.
|
||||
|
||||
The `mips64el` binary is built with toolchains provided by Loongson, and it is
|
||||
not guaranteed to run on all MIPS64 platforms. And currently all certificate
|
||||
related APIs are not working on `mips64el` builds.
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
'product_name%': 'Electron',
|
||||
'company_name%': 'GitHub, Inc',
|
||||
'company_abbr%': 'github',
|
||||
'version%': '1.8.2-beta.2',
|
||||
'version%': '1.8.8',
|
||||
'js2c_input_dir': '<(SHARED_INTERMEDIATE_DIR)/js2c',
|
||||
},
|
||||
'includes': [
|
||||
@@ -217,6 +217,11 @@
|
||||
],
|
||||
},
|
||||
],
|
||||
'link_settings': {
|
||||
'ldflags': [
|
||||
'-Wl,-z,noexecstack',
|
||||
],
|
||||
},
|
||||
}], # OS=="linux"
|
||||
],
|
||||
}, # target <(project_name)
|
||||
|
||||
@@ -98,6 +98,8 @@
|
||||
'atom/app/atom_main_delegate.cc',
|
||||
'atom/app/atom_main_delegate.h',
|
||||
'atom/app/atom_main_delegate_mac.mm',
|
||||
'atom/app/command_line_args.cc',
|
||||
'atom/app/command_line_args.h',
|
||||
'atom/app/node_main.cc',
|
||||
'atom/app/node_main.h',
|
||||
'atom/app/uv_task_runner.cc',
|
||||
|
||||
@@ -24,7 +24,7 @@ Menu.prototype._init = function () {
|
||||
getAcceleratorForCommandId: (id, useDefaultAccelerator) => {
|
||||
const command = this.commandsMap[id]
|
||||
if (!command) return
|
||||
if (command.accelerator) return command.accelerator
|
||||
if (command.accelerator != null) return command.accelerator
|
||||
if (useDefaultAccelerator) return command.getDefaultRoleAccelerator()
|
||||
},
|
||||
getIconForCommandId: id => this.commandsMap[id] ? this.commandsMap[id].icon : undefined,
|
||||
@@ -44,20 +44,23 @@ Menu.prototype._init = function () {
|
||||
}
|
||||
|
||||
Menu.prototype.popup = function (window, x, y, positioningItem) {
|
||||
let asyncPopup
|
||||
let asyncPopup, opts
|
||||
let [newX, newY, newPosition, newWindow] = [x, y, positioningItem, window]
|
||||
|
||||
// menu.popup(x, y, positioningItem)
|
||||
if (!window) {
|
||||
// shift over values
|
||||
if (typeof window !== 'object' || window.constructor !== BrowserWindow) {
|
||||
[newPosition, newY, newX, newWindow] = [y, x, window, null]
|
||||
}
|
||||
if (window != null && !(window instanceof BrowserWindow)) {
|
||||
[newPosition, newY, newX, newWindow] = [y, x, window, null]
|
||||
}
|
||||
|
||||
// menu.popup({})
|
||||
if (window != null && window.constructor === Object) {
|
||||
opts = window
|
||||
// menu.popup(window, {})
|
||||
if (x && typeof x === 'object') {
|
||||
const opts = x
|
||||
} else if (x && typeof x === 'object') {
|
||||
opts = x
|
||||
}
|
||||
|
||||
if (opts) {
|
||||
newX = opts.x
|
||||
newY = opts.y
|
||||
newPosition = opts.positioningItem
|
||||
@@ -65,13 +68,28 @@ Menu.prototype.popup = function (window, x, y, positioningItem) {
|
||||
}
|
||||
|
||||
// set defaults
|
||||
if (typeof x !== 'number') newX = -1
|
||||
if (typeof y !== 'number') newY = -1
|
||||
if (typeof positioningItem !== 'number') newPosition = -1
|
||||
if (!window) newWindow = BrowserWindow.getFocusedWindow()
|
||||
if (typeof newX !== 'number') newX = -1
|
||||
if (typeof newY !== 'number') newY = -1
|
||||
if (typeof newPosition !== 'number') newPosition = -1
|
||||
if (typeof asyncPopup !== 'boolean') asyncPopup = false
|
||||
if (!newWindow || (newWindow && newWindow.constructor !== BrowserWindow)) {
|
||||
newWindow = BrowserWindow.getFocusedWindow()
|
||||
|
||||
// No window focused?
|
||||
if (!newWindow) {
|
||||
const browserWindows = BrowserWindow.getAllWindows()
|
||||
|
||||
if (browserWindows && browserWindows.length > 0) {
|
||||
newWindow = browserWindows[0]
|
||||
} else {
|
||||
throw new Error(`Cannot open Menu without a BrowserWindow present`)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
this.popupAt(newWindow, newX, newY, newPosition, asyncPopup)
|
||||
|
||||
return { browserWindow: newWindow, x: newX, y: newY, position: newPosition, async: asyncPopup }
|
||||
}
|
||||
|
||||
Menu.prototype.closePopup = function (window) {
|
||||
@@ -149,7 +167,7 @@ Menu.setApplicationMenu = function (menu) {
|
||||
}
|
||||
|
||||
Menu.buildFromTemplate = function (template) {
|
||||
if (!(template instanceof Array)) {
|
||||
if (!Array.isArray(template)) {
|
||||
throw new TypeError('Invalid template for Menu')
|
||||
}
|
||||
|
||||
|
||||
@@ -118,6 +118,18 @@ class TouchBar extends EventEmitter {
|
||||
window.removeListener('closed', removeListeners)
|
||||
window._touchBar = null
|
||||
delete this.windowListeners[id]
|
||||
const unregisterItems = (items) => {
|
||||
for (const item of items) {
|
||||
item.removeListener('change', this.changeListener)
|
||||
if (item.child instanceof TouchBar) {
|
||||
unregisterItems(item.child.ordereredItems)
|
||||
}
|
||||
}
|
||||
}
|
||||
unregisterItems(this.ordereredItems)
|
||||
if (this.escapeItem) {
|
||||
this.escapeItem.removeListener('change', this.changeListener)
|
||||
}
|
||||
}
|
||||
window.once('closed', removeListeners)
|
||||
this.windowListeners[id] = removeListeners
|
||||
|
||||
@@ -156,7 +156,7 @@ const createGuest = function (embedder, params) {
|
||||
// Forward internal web contents event to embedder to handle
|
||||
// native window.open setup
|
||||
guest.on('-add-new-contents', (...args) => {
|
||||
if (guest.getWebPreferences().nativeWindowOpen === true) {
|
||||
if (guest.getLastWebPreferences().nativeWindowOpen === true) {
|
||||
const embedder = getEmbedder(guestInstanceId)
|
||||
if (embedder != null) {
|
||||
embedder.emit('-add-new-contents', ...args)
|
||||
@@ -164,7 +164,7 @@ const createGuest = function (embedder, params) {
|
||||
}
|
||||
})
|
||||
guest.on('-web-contents-created', (...args) => {
|
||||
if (guest.getWebPreferences().nativeWindowOpen === true) {
|
||||
if (guest.getLastWebPreferences().nativeWindowOpen === true) {
|
||||
const embedder = getEmbedder(guestInstanceId)
|
||||
if (embedder != null) {
|
||||
embedder.emit('-web-contents-created', ...args)
|
||||
|
||||
@@ -26,11 +26,11 @@ const mergeOptions = function (child, parent, visited) {
|
||||
visited.add(parent)
|
||||
for (const key in parent) {
|
||||
if (!hasProp.call(parent, key)) continue
|
||||
if (key in child) continue
|
||||
if (key in child && key !== 'webPreferences') continue
|
||||
|
||||
const value = parent[key]
|
||||
if (typeof value === 'object') {
|
||||
child[key] = mergeOptions({}, value, visited)
|
||||
child[key] = mergeOptions(child[key] || {}, value, visited)
|
||||
} else {
|
||||
child[key] = value
|
||||
}
|
||||
@@ -46,16 +46,24 @@ const mergeBrowserWindowOptions = function (embedder, options) {
|
||||
options.webPreferences = {}
|
||||
}
|
||||
if (embedder.browserWindowOptions != null) {
|
||||
let parentOptions = embedder.browserWindowOptions
|
||||
|
||||
// if parent's visibility is available, that overrides 'show' flag (#12125)
|
||||
const win = BrowserWindow.fromWebContents(embedder.webContents)
|
||||
if (win != null) {
|
||||
parentOptions = Object.assign({}, embedder.browserWindowOptions, {show: win.isVisible()})
|
||||
}
|
||||
|
||||
// Inherit the original options if it is a BrowserWindow.
|
||||
mergeOptions(options, embedder.browserWindowOptions)
|
||||
mergeOptions(options, parentOptions)
|
||||
} else {
|
||||
// Or only inherit webPreferences if it is a webview.
|
||||
mergeOptions(options.webPreferences, embedder.getWebPreferences())
|
||||
mergeOptions(options.webPreferences, embedder.getLastWebPreferences())
|
||||
}
|
||||
|
||||
// Inherit certain option values from parent window
|
||||
for (const [name, value] of inheritedWebPreferences) {
|
||||
if (embedder.getWebPreferences()[name] === value) {
|
||||
if (embedder.getLastWebPreferences()[name] === value) {
|
||||
options.webPreferences[name] = value
|
||||
}
|
||||
}
|
||||
@@ -168,8 +176,8 @@ const getGuestWindow = function (guestContents) {
|
||||
// The W3C does not have anything on this, but from my understanding of the
|
||||
// security model of |window.opener|, this should be fine.
|
||||
const canAccessWindow = function (sender, target) {
|
||||
return (target.getWebPreferences().openerId === sender.id) ||
|
||||
(sender.getWebPreferences().nodeIntegration === true) ||
|
||||
return (target.getLastWebPreferences().openerId === sender.id) ||
|
||||
(sender.getLastWebPreferences().nodeIntegration === true) ||
|
||||
isSameOrigin(sender.getURL(), target.getURL())
|
||||
}
|
||||
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
const timers = require('timers')
|
||||
const util = require('util')
|
||||
|
||||
process.atomBinding = require('./atom-binding-setup')(process.binding, process.type)
|
||||
|
||||
@@ -8,11 +9,21 @@ process.atomBinding = require('./atom-binding-setup')(process.binding, process.t
|
||||
// which would delay the callbacks for arbitrary long time. So we should
|
||||
// initiatively activate the uv loop once setImmediate and process.nextTick is
|
||||
// called.
|
||||
var wrapWithActivateUvLoop = function (func) {
|
||||
return function () {
|
||||
process.activateUvLoop()
|
||||
return func.apply(this, arguments)
|
||||
const wrapWithActivateUvLoop = function (func) {
|
||||
return wrap(func, function (func) {
|
||||
return function () {
|
||||
process.activateUvLoop()
|
||||
return func.apply(this, arguments)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
function wrap (func, wrapper) {
|
||||
const wrapped = wrapper(func)
|
||||
if (func[util.promisify.custom]) {
|
||||
wrapped[util.promisify.custom] = wrapper(func[util.promisify.custom])
|
||||
}
|
||||
return wrapped
|
||||
}
|
||||
|
||||
process.nextTick = wrapWithActivateUvLoop(process.nextTick)
|
||||
|
||||
@@ -121,7 +121,18 @@ if (nodeIntegration === 'true') {
|
||||
|
||||
// Set the __filename to the path of html file if it is file: protocol.
|
||||
if (window.location.protocol === 'file:') {
|
||||
var pathname = process.platform === 'win32' && window.location.pathname[0] === '/' ? window.location.pathname.substr(1) : window.location.pathname
|
||||
const location = window.location
|
||||
let pathname = location.pathname
|
||||
|
||||
if (process.platform === 'win32') {
|
||||
if (pathname[0] === '/') pathname = pathname.substr(1)
|
||||
|
||||
const isWindowsNetworkSharePath = location.hostname.length > 0 && globalPaths[0].startsWith('\\')
|
||||
if (isWindowsNetworkSharePath) {
|
||||
pathname = `//${location.host}/${pathname}`
|
||||
}
|
||||
}
|
||||
|
||||
global.__filename = path.normalize(decodeURIComponent(pathname))
|
||||
global.__dirname = path.dirname(global.__filename)
|
||||
|
||||
|
||||
@@ -32,8 +32,6 @@ const preloadModules = new Map([
|
||||
['timers', require('timers')]
|
||||
])
|
||||
|
||||
const preloadSrc = fs.readFileSync(preloadPath).toString()
|
||||
|
||||
// Pass different process object to the preload script(which should not have
|
||||
// access to things like `process.atomBinding`).
|
||||
const preloadProcess = new events.EventEmitter()
|
||||
@@ -54,6 +52,11 @@ function preloadRequire (module) {
|
||||
throw new Error('module not found')
|
||||
}
|
||||
|
||||
if (window.location.protocol === 'chrome-devtools:') {
|
||||
// Override some inspector APIs.
|
||||
require('../renderer/inspector')
|
||||
}
|
||||
|
||||
// Wrap the script into a function executed in global scope. It won't have
|
||||
// access to the current scope, so we'll expose a few objects as arguments:
|
||||
//
|
||||
@@ -73,13 +76,16 @@ function preloadRequire (module) {
|
||||
// and any `require('electron')` calls in `preload.js` will work as expected
|
||||
// since browserify won't try to include `electron` in the bundle, falling back
|
||||
// to the `preloadRequire` function above.
|
||||
const preloadWrapperSrc = `(function(require, process, Buffer, global, setImmediate) {
|
||||
${preloadSrc}
|
||||
})`
|
||||
if (preloadPath) {
|
||||
const preloadSrc = fs.readFileSync(preloadPath).toString()
|
||||
const preloadWrapperSrc = `(function(require, process, Buffer, global, setImmediate) {
|
||||
${preloadSrc}
|
||||
})`
|
||||
|
||||
// eval in window scope:
|
||||
// http://www.ecma-international.org/ecma-262/5.1/#sec-10.4.2
|
||||
const geval = eval
|
||||
const preloadFn = geval(preloadWrapperSrc)
|
||||
const {setImmediate} = require('timers')
|
||||
preloadFn(preloadRequire, preloadProcess, Buffer, global, setImmediate)
|
||||
// eval in window scope:
|
||||
// http://www.ecma-international.org/ecma-262/5.1/#sec-10.4.2
|
||||
const geval = eval
|
||||
const preloadFn = geval(preloadWrapperSrc)
|
||||
const {setImmediate} = require('timers')
|
||||
preloadFn(preloadRequire, preloadProcess, Buffer, global, setImmediate)
|
||||
}
|
||||
|
||||
@@ -4,7 +4,7 @@ var electron = require('./')
|
||||
|
||||
var proc = require('child_process')
|
||||
|
||||
var child = proc.spawn(electron, process.argv.slice(2), {stdio: 'inherit'})
|
||||
var child = proc.spawn(electron, process.argv.slice(2), {stdio: 'inherit', windowsHide: false})
|
||||
child.on('close', function (code) {
|
||||
process.exit(code)
|
||||
})
|
||||
|
||||
10156
package-lock.json
generated
Normal file
10156
package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "electron",
|
||||
"version": "1.8.2-beta.2",
|
||||
"version": "1.8.8",
|
||||
"repository": "https://github.com/electron/electron",
|
||||
"description": "Build cross platform desktop apps with JavaScript, HTML, and CSS",
|
||||
"devDependencies": {
|
||||
@@ -12,7 +12,7 @@
|
||||
"dugite": "^1.45.0",
|
||||
"electabul": "~0.0.4",
|
||||
"electron-docs-linter": "^2.3.4",
|
||||
"electron-typescript-definitions": "^1.2.11",
|
||||
"electron-typescript-definitions": "~1.2.11",
|
||||
"github": "^9.2.0",
|
||||
"husky": "^0.14.3",
|
||||
"minimist": "^1.2.0",
|
||||
|
||||
@@ -40,6 +40,9 @@ def main():
|
||||
if args.target_arch == 'mips64el':
|
||||
download_mips64el_toolchain()
|
||||
|
||||
if args.target_arch.startswith('arm'):
|
||||
download_native_mksnapshot(args.target_arch)
|
||||
|
||||
# Redirect to use local libchromiumcontent build.
|
||||
if args.build_release_libcc or args.build_debug_libcc:
|
||||
build_libchromiumcontent(args.verbose, args.target_arch, defines,
|
||||
@@ -218,6 +221,15 @@ def download_mips64el_toolchain():
|
||||
subprocess.check_call(['tar', '-xf', tar_name, '-C', VENDOR_DIR])
|
||||
os.remove(tar_name)
|
||||
|
||||
def download_native_mksnapshot(arch):
|
||||
if not os.path.exists(os.path.join(VENDOR_DIR,
|
||||
'native_mksnapshot')):
|
||||
zip_name = 'native-mksnapshot.zip'
|
||||
url = '{0}/linux/{1}/{2}/{3}'.format(BASE_URL, arch,
|
||||
get_libchromiumcontent_commit(), zip_name)
|
||||
download(zip_name, url, os.path.join(SOURCE_ROOT, zip_name))
|
||||
subprocess.call(['unzip', zip_name, '-d', VENDOR_DIR])
|
||||
os.remove(zip_name)
|
||||
|
||||
def create_chrome_version_h():
|
||||
version_file = os.path.join(VENDOR_DIR, 'libchromiumcontent', 'VERSION')
|
||||
|
||||
@@ -28,6 +28,8 @@ def main():
|
||||
ninja += '.exe'
|
||||
|
||||
args = parse_args()
|
||||
if args.ninja_path:
|
||||
ninja = args.ninja_path
|
||||
if args.libcc:
|
||||
if ('D' not in args.configuration
|
||||
or not os.path.exists(GCLIENT_DONE)
|
||||
@@ -67,6 +69,9 @@ def parse_args():
|
||||
'-d --debug_libchromiumcontent.'
|
||||
),
|
||||
action='store_true', default=False)
|
||||
parser.add_argument('--ninja-path',
|
||||
help='Path of ninja command to use.',
|
||||
required=False)
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
|
||||
@@ -5,12 +5,12 @@ import re
|
||||
import sys
|
||||
import argparse
|
||||
|
||||
from lib.util import execute, get_electron_version, parse_version, scoped_cwd
|
||||
|
||||
from lib.util import execute, get_electron_version, parse_version, scoped_cwd, \
|
||||
is_nightly, is_beta, is_stable, get_next_nightly, get_next_beta, \
|
||||
get_next_stable_from_pre, get_next_stable_from_stable, clean_parse_version
|
||||
|
||||
SOURCE_ROOT = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
|
||||
|
||||
|
||||
def main():
|
||||
|
||||
parser = argparse.ArgumentParser(
|
||||
@@ -34,14 +34,7 @@ def main():
|
||||
action='store',
|
||||
default=None,
|
||||
dest='bump',
|
||||
help='increment [major | minor | patch | beta]'
|
||||
)
|
||||
parser.add_argument(
|
||||
'--stable',
|
||||
action='store_true',
|
||||
default= False,
|
||||
dest='stable',
|
||||
help='promote to stable (i.e. remove `-beta.x` suffix)'
|
||||
help='increment [stable | beta | nightly]'
|
||||
)
|
||||
parser.add_argument(
|
||||
'--dry-run',
|
||||
@@ -52,36 +45,56 @@ def main():
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
curr_version = get_electron_version()
|
||||
|
||||
if args.bump not in ['stable', 'beta', 'nightly']:
|
||||
raise Exception('bump must be set to either stable, beta or nightly')
|
||||
|
||||
if is_nightly(curr_version):
|
||||
if args.bump == 'nightly':
|
||||
version = get_next_nightly(curr_version)
|
||||
elif args.bump == 'beta':
|
||||
version = get_next_beta(curr_version)
|
||||
elif args.bump == 'stable':
|
||||
version = get_next_stable_from_pre(curr_version)
|
||||
else:
|
||||
not_reached()
|
||||
elif is_beta(curr_version):
|
||||
if args.bump == 'nightly':
|
||||
version = get_next_nightly(curr_version)
|
||||
elif args.bump == 'beta':
|
||||
version = get_next_beta(curr_version)
|
||||
elif args.bump == 'stable':
|
||||
version = get_next_stable_from_pre(curr_version)
|
||||
else:
|
||||
not_reached()
|
||||
elif is_stable(curr_version):
|
||||
if args.bump == 'nightly':
|
||||
version = get_next_nightly(curr_version)
|
||||
elif args.bump == 'beta':
|
||||
raise Exception("You can\'t bump to a beta from stable")
|
||||
elif args.bump == 'stable':
|
||||
version = get_next_stable_from_stable(curr_version)
|
||||
else:
|
||||
not_reached()
|
||||
else:
|
||||
raise Exception("Invalid current version: " + curr_version)
|
||||
|
||||
if args.new_version == None and args.bump == None and args.stable == False:
|
||||
parser.print_help()
|
||||
return 1
|
||||
|
||||
increments = ['major', 'minor', 'patch', 'beta']
|
||||
|
||||
curr_version = get_electron_version()
|
||||
versions = parse_version(re.sub('-beta', '', curr_version))
|
||||
|
||||
if args.bump in increments:
|
||||
versions = increase_version(versions, increments.index(args.bump))
|
||||
if versions[3] == '0':
|
||||
# beta starts at 1
|
||||
versions = increase_version(versions, increments.index('beta'))
|
||||
|
||||
if args.stable == True:
|
||||
versions[3] = '0'
|
||||
|
||||
if args.new_version != None:
|
||||
versions = parse_version(re.sub('-beta', '', args.new_version))
|
||||
|
||||
version = '.'.join(versions[:3])
|
||||
suffix = '' if versions[3] == '0' else '-beta.' + versions[3]
|
||||
versions = clean_parse_version(version)
|
||||
suffix = ''
|
||||
if '-' in version:
|
||||
suffix = '-' + version.split('-')[1]
|
||||
versions[3] = parse_version(version)[3]
|
||||
version = version.split('-')[0]
|
||||
|
||||
if args.dry_run:
|
||||
print 'new version number would be: {0}\n'.format(version + suffix)
|
||||
return 0
|
||||
|
||||
|
||||
with scoped_cwd(SOURCE_ROOT):
|
||||
update_electron_gyp(version, suffix)
|
||||
update_win_rc(version, versions)
|
||||
@@ -92,6 +105,9 @@ def main():
|
||||
|
||||
print 'Bumped to version: {0}'.format(version + suffix)
|
||||
|
||||
def not_reached():
|
||||
raise Exception('Unreachable code was reached')
|
||||
|
||||
def increase_version(versions, index):
|
||||
for i in range(index + 1, 4):
|
||||
versions[i] = '0'
|
||||
@@ -100,7 +116,8 @@ def increase_version(versions, index):
|
||||
|
||||
|
||||
def update_electron_gyp(version, suffix):
|
||||
pattern = re.compile(" *'version%' *: *'[0-9.]+(-beta[0-9.]*)?'")
|
||||
pattern = re.compile(" *'version%' *: *'[0-9.]+(-beta[0-9.]*)?(-dev)?"
|
||||
+ "(-nightly[0-9.]*)?'")
|
||||
with open('electron.gyp', 'r') as f:
|
||||
lines = f.readlines()
|
||||
|
||||
@@ -192,7 +209,14 @@ def update_package_json(version, suffix):
|
||||
|
||||
|
||||
def tag_version(version, suffix):
|
||||
execute(['git', 'commit', '-a', '-m', 'Bump v{0}'.format(version + suffix)])
|
||||
execute([
|
||||
'git',
|
||||
'commit',
|
||||
'-a',
|
||||
'-m',
|
||||
'Bump v{0}'.format(version + suffix),
|
||||
'-n'
|
||||
])
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
@@ -1,7 +1,14 @@
|
||||
require('dotenv-safe').load()
|
||||
|
||||
const assert = require('assert')
|
||||
const request = require('request')
|
||||
const buildAppVeyorURL = 'https://windows-ci.electronjs.org/api/builds'
|
||||
const jenkinsServer = 'https://mac-ci.electronjs.org'
|
||||
const vstsURL = 'https://github.visualstudio.com/electron/_apis/build'
|
||||
|
||||
const appVeyorJobs = {
|
||||
'electron-x64': 'electron',
|
||||
'electron-ia32': 'electron-39ng6'
|
||||
}
|
||||
|
||||
const circleCIJobs = [
|
||||
'electron-linux-arm',
|
||||
@@ -11,9 +18,9 @@ const circleCIJobs = [
|
||||
'electron-linux-x64'
|
||||
]
|
||||
|
||||
const jenkinsJobs = [
|
||||
'electron-mas-x64-release',
|
||||
'electron-osx-x64-release'
|
||||
const vstsJobs = [
|
||||
'electron-release-mas-x64',
|
||||
'electron-release-osx-x64'
|
||||
]
|
||||
|
||||
async function makeRequest (requestOptions, parseResponse) {
|
||||
@@ -27,19 +34,23 @@ async function makeRequest (requestOptions, parseResponse) {
|
||||
resolve(body)
|
||||
}
|
||||
} else {
|
||||
console.error('Error occurred while requesting:', requestOptions.url)
|
||||
if (parseResponse) {
|
||||
console.log('Error: ', `(status ${res.statusCode})`, err || JSON.parse(res.body), requestOptions)
|
||||
try {
|
||||
console.log('Error: ', `(status ${res.statusCode})`, err || JSON.parse(res.body), requestOptions)
|
||||
} catch (err) {
|
||||
console.log('Error: ', `(status ${res.statusCode})`, err || res.body, requestOptions)
|
||||
}
|
||||
} else {
|
||||
console.log('Error: ', `(status ${res.statusCode})`, err || res.body, requestOptions)
|
||||
}
|
||||
reject()
|
||||
reject(err)
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
async function circleCIcall (buildUrl, targetBranch, job, ghRelease) {
|
||||
assert(process.env.CIRCLE_TOKEN, 'CIRCLE_TOKEN not found in environment')
|
||||
async function circleCIcall (buildUrl, targetBranch, job, options) {
|
||||
console.log(`Triggering CircleCI to run build job: ${job} on branch: ${targetBranch} with release flag.`)
|
||||
let buildRequest = {
|
||||
'build_parameters': {
|
||||
@@ -47,12 +58,16 @@ async function circleCIcall (buildUrl, targetBranch, job, ghRelease) {
|
||||
}
|
||||
}
|
||||
|
||||
if (ghRelease) {
|
||||
if (options.ghRelease) {
|
||||
buildRequest.build_parameters.ELECTRON_RELEASE = 1
|
||||
} else {
|
||||
buildRequest.build_parameters.RUN_RELEASE_BUILD = 'true'
|
||||
}
|
||||
|
||||
if (options.automaticRelease) {
|
||||
buildRequest.build_parameters.AUTO_RELEASE = 'true'
|
||||
}
|
||||
|
||||
let circleResponse = await makeRequest({
|
||||
method: 'POST',
|
||||
url: buildUrl,
|
||||
@@ -64,20 +79,33 @@ async function circleCIcall (buildUrl, targetBranch, job, ghRelease) {
|
||||
}, true).catch(err => {
|
||||
console.log('Error calling CircleCI:', err)
|
||||
})
|
||||
console.log(`Check ${circleResponse.build_url} for status. (${job})`)
|
||||
console.log(`CircleCI release build request for ${job} successful. Check ${circleResponse.build_url} for status.`)
|
||||
}
|
||||
|
||||
async function buildAppVeyor (targetBranch, ghRelease) {
|
||||
console.log(`Triggering AppVeyor to run build on branch: ${targetBranch} with release flag.`)
|
||||
assert(process.env.APPVEYOR_TOKEN, 'APPVEYOR_TOKEN not found in environment')
|
||||
function buildAppVeyor (targetBranch, options) {
|
||||
const validJobs = Object.keys(appVeyorJobs)
|
||||
if (options.job) {
|
||||
assert(validJobs.includes(options.job), `Unknown AppVeyor CI job name: ${options.job}. Valid values are: ${validJobs}.`)
|
||||
callAppVeyor(targetBranch, options.job, options)
|
||||
} else {
|
||||
validJobs.forEach((job) => callAppVeyor(targetBranch, job, options))
|
||||
}
|
||||
}
|
||||
|
||||
async function callAppVeyor (targetBranch, job, options) {
|
||||
console.log(`Triggering AppVeyor to run build job: ${job} on branch: ${targetBranch} with release flag.`)
|
||||
let environmentVariables = {}
|
||||
|
||||
if (ghRelease) {
|
||||
if (options.ghRelease) {
|
||||
environmentVariables.ELECTRON_RELEASE = 1
|
||||
} else {
|
||||
environmentVariables.RUN_RELEASE_BUILD = 'true'
|
||||
}
|
||||
|
||||
if (options.automaticRelease) {
|
||||
environmentVariables.AUTO_RELEASE = 'true'
|
||||
}
|
||||
|
||||
const requestOpts = {
|
||||
url: buildAppVeyorURL,
|
||||
auth: {
|
||||
@@ -88,7 +116,7 @@ async function buildAppVeyor (targetBranch, ghRelease) {
|
||||
},
|
||||
body: JSON.stringify({
|
||||
accountName: 'AppVeyor',
|
||||
projectSlug: 'electron',
|
||||
projectSlug: appVeyorJobs[job],
|
||||
branch: targetBranch,
|
||||
environmentVariables
|
||||
}),
|
||||
@@ -97,113 +125,121 @@ async function buildAppVeyor (targetBranch, ghRelease) {
|
||||
let appVeyorResponse = await makeRequest(requestOpts, true).catch(err => {
|
||||
console.log('Error calling AppVeyor:', err)
|
||||
})
|
||||
const buildUrl = `https://windows-ci.electronjs.org/project/AppVeyor/electron/build/${appVeyorResponse.version}`
|
||||
console.log(`AppVeyor release build request successful. Check build status at ${buildUrl}`)
|
||||
const buildUrl = `https://windows-ci.electronjs.org/project/AppVeyor/${appVeyorJobs[job]}/build/${appVeyorResponse.version}`
|
||||
console.log(`AppVeyor release build request for ${job} successful. Check build status at ${buildUrl}`)
|
||||
}
|
||||
|
||||
function buildCircleCI (targetBranch, ghRelease, job) {
|
||||
function buildCircleCI (targetBranch, options) {
|
||||
const circleBuildUrl = `https://circleci.com/api/v1.1/project/github/electron/electron/tree/${targetBranch}?circle-token=${process.env.CIRCLE_TOKEN}`
|
||||
if (job) {
|
||||
assert(circleCIJobs.includes(job), `Unknown CI job name: ${job}.`)
|
||||
circleCIcall(circleBuildUrl, targetBranch, job, ghRelease)
|
||||
if (options.job) {
|
||||
assert(circleCIJobs.includes(options.job), `Unknown CircleCI job name: ${options.job}. Valid values are: ${circleCIJobs}.`)
|
||||
circleCIcall(circleBuildUrl, targetBranch, options.job, options)
|
||||
} else {
|
||||
circleCIJobs.forEach((job) => circleCIcall(circleBuildUrl, targetBranch, job, ghRelease))
|
||||
circleCIJobs.forEach((job) => circleCIcall(circleBuildUrl, targetBranch, job, options))
|
||||
}
|
||||
}
|
||||
|
||||
async function buildJenkins (targetBranch, ghRelease, job) {
|
||||
assert(process.env.JENKINS_AUTH_TOKEN, 'JENKINS_AUTH_TOKEN not found in environment')
|
||||
assert(process.env.JENKINS_BUILD_TOKEN, 'JENKINS_BUILD_TOKEN not found in environment')
|
||||
let jenkinsCrumb = await getJenkinsCrumb()
|
||||
|
||||
if (job) {
|
||||
assert(jenkinsJobs.includes(job), `Unknown CI job name: ${job}.`)
|
||||
callJenkinsBuild(job, jenkinsCrumb, targetBranch, ghRelease)
|
||||
} else {
|
||||
jenkinsJobs.forEach((job) => {
|
||||
callJenkinsBuild(job, jenkinsCrumb, targetBranch, ghRelease)
|
||||
})
|
||||
async function buildVSTS (targetBranch, options) {
|
||||
if (options.job) {
|
||||
assert(vstsJobs.includes(options.job), `Unknown VSTS CI job name: ${options.job}. Valid values are: ${vstsJobs}.`)
|
||||
}
|
||||
}
|
||||
console.log(`Triggering VSTS to run build on branch: ${targetBranch} with release flag.`)
|
||||
let environmentVariables = {}
|
||||
|
||||
async function callJenkins (path, requestParameters, requestHeaders) {
|
||||
let requestOptions = {
|
||||
url: `${jenkinsServer}/${path}`,
|
||||
if (!options.ghRelease) {
|
||||
environmentVariables.UPLOAD_TO_S3 = 1
|
||||
}
|
||||
|
||||
if (options.automaticRelease) {
|
||||
environmentVariables.AUTO_RELEASE = 'true'
|
||||
}
|
||||
|
||||
let requestOpts = {
|
||||
url: `${vstsURL}/definitions?api-version=4.1`,
|
||||
auth: {
|
||||
user: 'build',
|
||||
pass: process.env.JENKINS_AUTH_TOKEN
|
||||
user: '',
|
||||
password: process.env.VSTS_TOKEN
|
||||
},
|
||||
qs: requestParameters
|
||||
headers: {
|
||||
'Content-Type': 'application/json'
|
||||
}
|
||||
}
|
||||
if (requestHeaders) {
|
||||
requestOptions.headers = requestHeaders
|
||||
}
|
||||
let jenkinsResponse = await makeRequest(requestOptions).catch(err => {
|
||||
console.log(`Error calling Jenkins:`, err)
|
||||
let vstsResponse = await makeRequest(requestOpts, true).catch(err => {
|
||||
console.log('Error calling VSTS to get build definitions:', err)
|
||||
})
|
||||
return jenkinsResponse
|
||||
let buildsToRun = []
|
||||
if (options.job) {
|
||||
buildsToRun = vstsResponse.value.filter(build => build.name === options.job)
|
||||
} else {
|
||||
buildsToRun = vstsResponse.value.filter(build => vstsJobs.includes(build.name))
|
||||
}
|
||||
buildsToRun.forEach((build) => callVSTSBuild(build, targetBranch, environmentVariables))
|
||||
}
|
||||
|
||||
async function callJenkinsBuild (job, jenkinsCrumb, targetBranch, ghRelease) {
|
||||
console.log(`Triggering Jenkins to run build job: ${job} on branch: ${targetBranch} with release flag.`)
|
||||
let jenkinsParams = {
|
||||
token: process.env.JENKINS_BUILD_TOKEN,
|
||||
BRANCH: targetBranch
|
||||
async function callVSTSBuild (build, targetBranch, environmentVariables) {
|
||||
let buildBody = {
|
||||
definition: build,
|
||||
sourceBranch: targetBranch,
|
||||
priority: 'high'
|
||||
}
|
||||
if (!ghRelease) {
|
||||
jenkinsParams.RUN_RELEASE_BUILD = 1
|
||||
if (Object.keys(environmentVariables).length !== 0) {
|
||||
buildBody.parameters = JSON.stringify(environmentVariables)
|
||||
}
|
||||
await callJenkins(`job/${job}/buildWithParameters`, jenkinsParams, jenkinsCrumb)
|
||||
.catch(err => {
|
||||
console.log(`Error calling Jenkins build`, err)
|
||||
})
|
||||
let buildUrl = `${jenkinsServer}/job/${job}/lastBuild/`
|
||||
console.log(`Jenkins build request successful. Check build status at ${buildUrl}.`)
|
||||
}
|
||||
|
||||
async function getJenkinsCrumb () {
|
||||
let crumbResponse = await callJenkins('crumbIssuer/api/xml', {
|
||||
xpath: 'concat(//crumbRequestField,":",//crumb)'
|
||||
}).catch(err => {
|
||||
console.log(`Error getting jenkins crumb:`, err)
|
||||
let requestOpts = {
|
||||
url: `${vstsURL}/builds?api-version=4.1`,
|
||||
auth: {
|
||||
user: '',
|
||||
password: process.env.VSTS_TOKEN
|
||||
},
|
||||
headers: {
|
||||
'Content-Type': 'application/json'
|
||||
},
|
||||
body: JSON.stringify(buildBody),
|
||||
method: 'POST'
|
||||
}
|
||||
let vstsResponse = await makeRequest(requestOpts, true).catch(err => {
|
||||
console.log(`Error calling VSTS for job ${build.name}`, err)
|
||||
})
|
||||
let crumbDetails = crumbResponse.split(':')
|
||||
let crumbHeader = {}
|
||||
crumbHeader[crumbDetails[0]] = crumbDetails[1]
|
||||
return crumbHeader
|
||||
console.log(`VSTS release build request for ${build.name} successful. Check ${vstsResponse._links.web.href} for status.`)
|
||||
}
|
||||
|
||||
function runRelease (targetBranch, options) {
|
||||
if (options.ci) {
|
||||
switch (options.ci) {
|
||||
case 'CircleCI': {
|
||||
buildCircleCI(targetBranch, options.ghRelease, options.job)
|
||||
buildCircleCI(targetBranch, options)
|
||||
break
|
||||
}
|
||||
case 'AppVeyor': {
|
||||
buildAppVeyor(targetBranch, options.ghRelease)
|
||||
buildAppVeyor(targetBranch, options)
|
||||
break
|
||||
}
|
||||
case 'Jenkins': {
|
||||
buildJenkins(targetBranch, options.ghRelease, options.job)
|
||||
case 'VSTS': {
|
||||
buildVSTS(targetBranch, options)
|
||||
break
|
||||
}
|
||||
default: {
|
||||
console.log(`Error! Unknown CI: ${options.ci}.`)
|
||||
process.exit(1)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
buildCircleCI(targetBranch, options.ghRelease, options.job)
|
||||
buildAppVeyor(targetBranch, options.ghRelease)
|
||||
buildJenkins(targetBranch, options.ghRelease, options.job)
|
||||
buildCircleCI(targetBranch, options)
|
||||
buildAppVeyor(targetBranch, options)
|
||||
buildVSTS(targetBranch, options)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = runRelease
|
||||
|
||||
if (require.main === module) {
|
||||
const args = require('minimist')(process.argv.slice(2))
|
||||
const args = require('minimist')(process.argv.slice(2), {
|
||||
boolean: ['ghRelease', 'automaticRelease']
|
||||
})
|
||||
const targetBranch = args._[0]
|
||||
if (args._.length < 1) {
|
||||
console.log(`Trigger CI to build release builds of electron.
|
||||
Usage: ci-release-build.js [--job=CI_JOB_NAME] [--ci=CircleCI|AppVeyor|Jenkins] [--ghRelease] TARGET_BRANCH
|
||||
Usage: ci-release-build.js [--job=CI_JOB_NAME] [--ci=CircleCI|AppVeyor|VSTS] [--ghRelease] [--automaticRelease] TARGET_BRANCH
|
||||
`)
|
||||
process.exit(0)
|
||||
}
|
||||
|
||||
@@ -24,6 +24,7 @@ DIST_DIR = os.path.join(SOURCE_ROOT, 'dist')
|
||||
OUT_DIR = os.path.join(SOURCE_ROOT, 'out', 'R')
|
||||
CHROMIUM_DIR = os.path.join(SOURCE_ROOT, 'vendor', 'download',
|
||||
'libchromiumcontent', 'static_library')
|
||||
NATIVE_MKSNAPSHOT_DIR = os.path.join(SOURCE_ROOT, 'vendor', 'native_mksnapshot')
|
||||
|
||||
PROJECT_NAME = electron_gyp()['project_name%']
|
||||
PRODUCT_NAME = electron_gyp()['product_name%']
|
||||
@@ -138,7 +139,6 @@ def copy_chrome_binary(binary):
|
||||
shutil.copyfile(src, dest)
|
||||
os.chmod(dest, os.stat(dest).st_mode | stat.S_IEXEC)
|
||||
|
||||
|
||||
def copy_vcruntime_binaries():
|
||||
with _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE,
|
||||
r"SOFTWARE\Microsoft\VisualStudio\14.0\Setup\VC", 0,
|
||||
@@ -215,7 +215,7 @@ def strip_binary(binary_path):
|
||||
elif get_target_arch() == 'arm64':
|
||||
strip = 'aarch64-linux-gnu-strip'
|
||||
elif get_target_arch() == 'mips64el':
|
||||
strip = 'mips64el-redhat-linux-strip'
|
||||
strip = 'mips64el-loongson-linux-strip'
|
||||
else:
|
||||
strip = 'strip'
|
||||
execute([strip, binary_path], env=build_env())
|
||||
@@ -257,17 +257,39 @@ def create_dist_zip():
|
||||
|
||||
|
||||
def create_chrome_binary_zip(binary, version):
|
||||
dist_name = get_zip_name(binary, version)
|
||||
file_suffix = ''
|
||||
create_native_mksnapshot = False
|
||||
if binary == 'mksnapshot':
|
||||
arch = get_target_arch()
|
||||
if arch.startswith('arm'):
|
||||
# if the arch is arm/arm64 the mksnapshot executable is an x64 binary,
|
||||
# so name it as such.
|
||||
file_suffix = 'x64'
|
||||
create_native_mksnapshot = True
|
||||
dist_name = get_zip_name(binary, version, file_suffix)
|
||||
zip_file = os.path.join(SOURCE_ROOT, 'dist', dist_name)
|
||||
|
||||
files = ['LICENSE', 'LICENSES.chromium.html']
|
||||
if PLATFORM == 'win32':
|
||||
files += [binary + '.exe']
|
||||
else:
|
||||
files += [binary]
|
||||
|
||||
with scoped_cwd(DIST_DIR):
|
||||
files = ['LICENSE', 'LICENSES.chromium.html']
|
||||
if PLATFORM == 'win32':
|
||||
files += [binary + '.exe']
|
||||
else:
|
||||
files += [binary]
|
||||
make_zip(zip_file, files, [])
|
||||
|
||||
if create_native_mksnapshot == True:
|
||||
# Create a zip with the native version of the mksnapshot binary.
|
||||
src = os.path.join(NATIVE_MKSNAPSHOT_DIR, binary)
|
||||
dest = os.path.join(DIST_DIR, binary)
|
||||
# Copy file and keep the executable bit.
|
||||
shutil.copyfile(src, dest)
|
||||
os.chmod(dest, os.stat(dest).st_mode | stat.S_IEXEC)
|
||||
|
||||
dist_name = get_zip_name(binary, version)
|
||||
zip_file = os.path.join(SOURCE_ROOT, 'dist', dist_name)
|
||||
with scoped_cwd(DIST_DIR):
|
||||
make_zip(zip_file, files, [])
|
||||
|
||||
def create_ffmpeg_zip():
|
||||
dist_name = get_zip_name('ffmpeg', ELECTRON_VERSION)
|
||||
|
||||
29
script/get-last-major-for-master.js
Normal file
29
script/get-last-major-for-master.js
Normal file
@@ -0,0 +1,29 @@
|
||||
const { GitProcess } = require('dugite')
|
||||
const path = require('path')
|
||||
const semver = require('semver')
|
||||
const gitDir = path.resolve(__dirname, '..')
|
||||
|
||||
async function determineNextMajorForMaster () {
|
||||
let branchNames
|
||||
let result = await GitProcess.exec(['branch', '-a', '--remote', '--list', 'origin/[0-9]-[0-9]-x'], gitDir)
|
||||
if (result.exitCode === 0) {
|
||||
branchNames = result.stdout.trim().split('\n')
|
||||
const filtered = branchNames.map(b => b.replace('origin/', ''))
|
||||
return getNextReleaseBranch(filtered)
|
||||
} else {
|
||||
throw new Error('Release branches could not be fetched.')
|
||||
}
|
||||
}
|
||||
|
||||
function getNextReleaseBranch (branches) {
|
||||
const converted = branches.map(b => b.replace(/-/g, '.').replace('x', '0'))
|
||||
const next = converted.reduce((v1, v2) => {
|
||||
return semver.gt(v1, v2) ? v1 : v2
|
||||
})
|
||||
return parseInt(next.split('.')[0], 10)
|
||||
}
|
||||
|
||||
determineNextMajorForMaster().then(console.info).catch((err) => {
|
||||
console.error(err)
|
||||
process.exit(1)
|
||||
})
|
||||
11
script/get-version.py
Executable file
11
script/get-version.py
Executable file
@@ -0,0 +1,11 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import sys
|
||||
|
||||
from lib.util import get_electron_version
|
||||
|
||||
def main():
|
||||
print get_electron_version()
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
||||
@@ -8,8 +8,8 @@ import sys
|
||||
# URL to the mips64el sysroot image.
|
||||
MIPS64EL_SYSROOT_URL = 'https://github.com/electron/debian-sysroot-image-creator/releases/download/v0.5.0/debian_jessie_mips64-sysroot.tar.bz2'
|
||||
# URL to the mips64el toolchain.
|
||||
MIPS64EL_GCC = 'gcc-4.8.3-d197-n64-loongson'
|
||||
MIPS64EL_GCC_URL = 'http://ftp.loongnix.org/toolchain/gcc/release/' + MIPS64EL_GCC + '.tar.gz'
|
||||
MIPS64EL_GCC = 'cross-gcc-4.9.3-n64-loongson-rc5.4'
|
||||
MIPS64EL_GCC_URL = 'https://github.com/electron/debian-sysroot-image-creator/releases/download/v0.5.0/' + MIPS64EL_GCC + '.tar.gz'
|
||||
|
||||
BASE_URL = os.getenv('LIBCHROMIUMCONTENT_MIRROR') or \
|
||||
'https://s3.amazonaws.com/github-janky-artifacts/libchromiumcontent'
|
||||
@@ -93,11 +93,11 @@ def build_env():
|
||||
VENDOR_DIR = os.path.join(SOURCE_ROOT, 'vendor')
|
||||
gcc_dir = os.path.join(VENDOR_DIR, MIPS64EL_GCC)
|
||||
ldlib_dirs = [
|
||||
gcc_dir + '/usr/x86_64-unknown-linux-gnu/mips64el-redhat-linux/lib',
|
||||
gcc_dir + '/usr/x86_64-unknown-linux-gnu/mips64el-loongson-linux/lib',
|
||||
gcc_dir + '/usr/lib64',
|
||||
gcc_dir + '/usr/mips64el-redhat-linux/lib64',
|
||||
gcc_dir + '/usr/mips64el-redhat-linux/sysroot/lib64',
|
||||
gcc_dir + '/usr/mips64el-redhat-linux/sysroot/usr/lib64',
|
||||
gcc_dir + '/usr/mips64el-loongson-linux/lib64',
|
||||
gcc_dir + '/usr/mips64el-loongson-linux/sysroot/lib64',
|
||||
gcc_dir + '/usr/mips64el-loongson-linux/sysroot/usr/lib64',
|
||||
]
|
||||
env['LD_LIBRARY_PATH'] = os.pathsep.join(ldlib_dirs)
|
||||
env['PATH'] = os.pathsep.join([gcc_dir + '/usr/bin', env['PATH']])
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
import atexit
|
||||
import contextlib
|
||||
import datetime
|
||||
import errno
|
||||
import platform
|
||||
import re
|
||||
@@ -87,7 +88,7 @@ def download(text, url, path):
|
||||
downloaded_size = 0
|
||||
block_size = 128
|
||||
|
||||
ci = os.environ.get('CI') == '1'
|
||||
ci = os.environ.get('CI') is not None
|
||||
|
||||
while True:
|
||||
buf = web_file.read(block_size)
|
||||
@@ -287,3 +288,67 @@ def update_node_modules(dirname, env=None):
|
||||
pass
|
||||
else:
|
||||
execute_stdout(args, env)
|
||||
|
||||
def clean_parse_version(v):
|
||||
return parse_version(v.split("-")[0])
|
||||
|
||||
def is_stable(v):
|
||||
return len(v.split(".")) == 3
|
||||
|
||||
def is_beta(v):
|
||||
return 'beta' in v
|
||||
|
||||
def is_nightly(v):
|
||||
return 'nightly' in v
|
||||
|
||||
def get_nightly_date():
|
||||
return datetime.datetime.today().strftime('%Y%m%d')
|
||||
|
||||
def get_last_major():
|
||||
return execute(['node', 'script/get-last-major-for-master.js'])
|
||||
|
||||
def get_next_nightly(v):
|
||||
pv = clean_parse_version(v)
|
||||
major = pv[0]; minor = pv[1]; patch = pv[2]
|
||||
|
||||
if (is_stable(v)):
|
||||
patch = str(int(pv[2]) + 1)
|
||||
|
||||
if execute(['git', 'rev-parse', '--abbrev-ref', 'HEAD']) == "master":
|
||||
major = str(get_last_major() + 1)
|
||||
minor = '0'
|
||||
patch = '0'
|
||||
|
||||
pre = 'nightly.' + get_nightly_date()
|
||||
return make_version(major, minor, patch, pre)
|
||||
|
||||
def non_empty(thing):
|
||||
return thing.strip() != ''
|
||||
|
||||
def get_next_beta(v):
|
||||
pv = clean_parse_version(v)
|
||||
tag_pattern = 'v' + pv[0] + '.' + pv[1] + '.' + pv[2] + '-beta.*'
|
||||
tag_list = filter(
|
||||
non_empty,
|
||||
execute(['git', 'tag', '--list', '-l', tag_pattern]).strip().split('\n')
|
||||
)
|
||||
if len(tag_list) == 0:
|
||||
return make_version(pv[0] , pv[1], pv[2], 'beta.1')
|
||||
|
||||
lv = parse_version(tag_list[-1])
|
||||
return make_version(pv[0] , pv[1], pv[2], 'beta.' + str(int(lv[3]) + 1))
|
||||
|
||||
def get_next_stable_from_pre(v):
|
||||
pv = clean_parse_version(v)
|
||||
major = pv[0]; minor = pv[1]; patch = pv[2]
|
||||
return make_version(major, minor, patch)
|
||||
|
||||
def get_next_stable_from_stable(v):
|
||||
pv = clean_parse_version(v)
|
||||
major = pv[0]; minor = pv[1]; patch = pv[2]
|
||||
return make_version(major, minor, str(int(patch) + 1))
|
||||
|
||||
def make_version(major, minor, patch, pre = None):
|
||||
if pre is None:
|
||||
return major + '.' + minor + '.' + patch
|
||||
return major + "." + minor + "." + patch + '-' + pre
|
||||
|
||||
@@ -1,116 +0,0 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
require('colors')
|
||||
const assert = require('assert')
|
||||
const branchToRelease = process.argv[2]
|
||||
const fail = '\u2717'.red
|
||||
const { GitProcess, GitError } = require('dugite')
|
||||
const pass = '\u2713'.green
|
||||
const path = require('path')
|
||||
const pkg = require('../package.json')
|
||||
|
||||
assert(process.env.ELECTRON_GITHUB_TOKEN, 'ELECTRON_GITHUB_TOKEN not found in environment')
|
||||
if (!branchToRelease) {
|
||||
console.log(`Usage: merge-release branch`)
|
||||
process.exit(1)
|
||||
}
|
||||
const gitDir = path.resolve(__dirname, '..')
|
||||
|
||||
async function callGit (args, errorMessage, successMessage) {
|
||||
let gitResult = await GitProcess.exec(args, gitDir)
|
||||
if (gitResult.exitCode === 0) {
|
||||
console.log(`${pass} ${successMessage}`)
|
||||
return true
|
||||
} else {
|
||||
console.log(`${fail} ${errorMessage} ${gitResult.stderr}`)
|
||||
process.exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
async function checkoutBranch (branchName) {
|
||||
console.log(`Checking out ${branchName}.`)
|
||||
let errorMessage = `Error checking out branch ${branchName}:`
|
||||
let successMessage = `Successfully checked out branch ${branchName}.`
|
||||
return await callGit(['checkout', branchName], errorMessage, successMessage)
|
||||
}
|
||||
|
||||
async function commitMerge () {
|
||||
console.log(`Committing the merge for v${pkg.version}`)
|
||||
let errorMessage = `Error committing merge:`
|
||||
let successMessage = `Successfully committed the merge for v${pkg.version}`
|
||||
let gitArgs = ['commit', '-m', `v${pkg.version}`]
|
||||
return await callGit(gitArgs, errorMessage, successMessage)
|
||||
}
|
||||
|
||||
async function mergeReleaseIntoBranch (branchName) {
|
||||
console.log(`Merging release branch into ${branchName}.`)
|
||||
let mergeArgs = ['merge', 'release', '--squash']
|
||||
let mergeDetails = await GitProcess.exec(mergeArgs, gitDir)
|
||||
if (mergeDetails.exitCode === 0) {
|
||||
return true
|
||||
} else {
|
||||
const error = GitProcess.parseError(mergeDetails.stderr)
|
||||
if (error === GitError.MergeConflicts) {
|
||||
console.log(`${fail} Could not merge release branch into ${branchName} ` +
|
||||
`due to merge conflicts.`)
|
||||
return false
|
||||
} else {
|
||||
console.log(`${fail} Could not merge release branch into ${branchName} ` +
|
||||
`due to an error: ${mergeDetails.stderr}.`)
|
||||
process.exit(1)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function pushBranch (branchName) {
|
||||
console.log(`Pushing branch ${branchName}.`)
|
||||
let pushArgs = ['push', 'origin', branchName]
|
||||
let errorMessage = `Could not push branch ${branchName} due to an error:`
|
||||
let successMessage = `Successfully pushed branch ${branchName}.`
|
||||
return await callGit(pushArgs, errorMessage, successMessage)
|
||||
}
|
||||
|
||||
async function pull () {
|
||||
console.log(`Performing a git pull`)
|
||||
let errorMessage = `Could not pull due to an error:`
|
||||
let successMessage = `Successfully performed a git pull`
|
||||
return await callGit(['pull'], errorMessage, successMessage)
|
||||
}
|
||||
|
||||
async function rebase (targetBranch) {
|
||||
console.log(`Rebasing release branch from ${targetBranch}`)
|
||||
let errorMessage = `Could not rebase due to an error:`
|
||||
let successMessage = `Successfully rebased release branch from ` +
|
||||
`${targetBranch}`
|
||||
return await callGit(['rebase', targetBranch], errorMessage, successMessage)
|
||||
}
|
||||
|
||||
async function mergeRelease () {
|
||||
await checkoutBranch(branchToRelease)
|
||||
let mergeSuccess = await mergeReleaseIntoBranch(branchToRelease)
|
||||
if (mergeSuccess) {
|
||||
console.log(`${pass} Successfully merged release branch into ` +
|
||||
`${branchToRelease}.`)
|
||||
await commitMerge()
|
||||
let pushSuccess = await pushBranch(branchToRelease)
|
||||
if (pushSuccess) {
|
||||
console.log(`${pass} Success!!! ${branchToRelease} now has the latest release!`)
|
||||
}
|
||||
} else {
|
||||
console.log(`Trying rebase of ${branchToRelease} into release branch.`)
|
||||
await pull()
|
||||
await checkoutBranch('release')
|
||||
let rebaseResult = await rebase(branchToRelease)
|
||||
if (rebaseResult) {
|
||||
let pushResult = pushBranch('HEAD')
|
||||
if (pushResult) {
|
||||
console.log(`Rebase of ${branchToRelease} into release branch was ` +
|
||||
`successful. Let release builds run and then try this step again.`)
|
||||
}
|
||||
// Exit as failure so release doesn't continue
|
||||
process.exit(1)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
mergeRelease()
|
||||
@@ -1,25 +1,28 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
if (!process.env.CI) require('dotenv-safe').load()
|
||||
require('colors')
|
||||
const args = require('minimist')(process.argv.slice(2))
|
||||
const assert = require('assert')
|
||||
const args = require('minimist')(process.argv.slice(2), {
|
||||
boolean: ['automaticRelease', 'notesOnly', 'stable']
|
||||
})
|
||||
const ciReleaseBuild = require('./ci-release-build')
|
||||
const { execSync } = require('child_process')
|
||||
const fail = '\u2717'.red
|
||||
const { GitProcess, GitError } = require('dugite')
|
||||
const { GitProcess } = require('dugite')
|
||||
const GitHub = require('github')
|
||||
const pass = '\u2713'.green
|
||||
const path = require('path')
|
||||
const pkg = require('../package.json')
|
||||
const readline = require('readline')
|
||||
const versionType = args._[0]
|
||||
const targetRepo = versionType === 'nightly' ? 'nightlies' : 'electron'
|
||||
|
||||
// TODO (future) automatically determine version based on conventional commits
|
||||
// via conventional-recommended-bump
|
||||
|
||||
assert(process.env.ELECTRON_GITHUB_TOKEN, 'ELECTRON_GITHUB_TOKEN not found in environment')
|
||||
if (!versionType && !args.notesOnly) {
|
||||
console.log(`Usage: prepare-release versionType [major | minor | patch | beta]` +
|
||||
` (--stable) (--notesOnly)`)
|
||||
console.log(`Usage: prepare-release versionType [stable | beta | nightly]` +
|
||||
` (--stable) (--notesOnly) (--automaticRelease) (--branch)`)
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
@@ -27,39 +30,26 @@ const github = new GitHub()
|
||||
const gitDir = path.resolve(__dirname, '..')
|
||||
github.authenticate({type: 'token', token: process.env.ELECTRON_GITHUB_TOKEN})
|
||||
|
||||
async function createReleaseBranch () {
|
||||
console.log(`Creating release branch.`)
|
||||
let checkoutDetails = await GitProcess.exec([ 'checkout', '-b', 'release' ], gitDir)
|
||||
if (checkoutDetails.exitCode === 0) {
|
||||
console.log(`${pass} Successfully created the release branch.`)
|
||||
} else {
|
||||
const error = GitProcess.parseError(checkoutDetails.stderr)
|
||||
if (error === GitError.BranchAlreadyExists) {
|
||||
console.log(`${fail} Release branch already exists, aborting prepare ` +
|
||||
`release process.`)
|
||||
} else {
|
||||
console.log(`${fail} Error creating release branch: ` +
|
||||
`${checkoutDetails.stderr}`)
|
||||
}
|
||||
process.exit(1)
|
||||
async function getNewVersion (dryRun) {
|
||||
if (!dryRun) {
|
||||
console.log(`Bumping for new "${versionType}" version.`)
|
||||
}
|
||||
}
|
||||
|
||||
function getNewVersion () {
|
||||
console.log(`Bumping for new "${versionType}" version.`)
|
||||
let bumpScript = path.join(__dirname, 'bump-version.py')
|
||||
let scriptArgs = [bumpScript, `--bump ${versionType}`]
|
||||
if (args.stable) {
|
||||
scriptArgs.push('--stable')
|
||||
let scriptArgs = [bumpScript, '--bump', versionType]
|
||||
if (dryRun) {
|
||||
scriptArgs.push('--dry-run')
|
||||
}
|
||||
try {
|
||||
let bumpVersion = execSync(scriptArgs.join(' '), {encoding: 'UTF-8'})
|
||||
bumpVersion = bumpVersion.substr(bumpVersion.indexOf(':') + 1).trim()
|
||||
let newVersion = `v${bumpVersion}`
|
||||
console.log(`${pass} Successfully bumped version to ${newVersion}`)
|
||||
if (!dryRun) {
|
||||
console.log(`${pass} Successfully bumped version to ${newVersion}`)
|
||||
}
|
||||
return newVersion
|
||||
} catch (err) {
|
||||
console.log(`${fail} Could not bump version, error was:`, err)
|
||||
throw err
|
||||
}
|
||||
}
|
||||
|
||||
@@ -81,64 +71,114 @@ async function getCurrentBranch (gitDir) {
|
||||
}
|
||||
|
||||
async function getReleaseNotes (currentBranch) {
|
||||
if (versionType === 'nightly') {
|
||||
return 'Nightlies do not get release notes, please compare tags for info'
|
||||
}
|
||||
console.log(`Generating release notes for ${currentBranch}.`)
|
||||
let githubOpts = {
|
||||
owner: 'electron',
|
||||
repo: 'electron',
|
||||
repo: targetRepo,
|
||||
base: `v${pkg.version}`,
|
||||
head: currentBranch
|
||||
}
|
||||
let releaseNotes = '(placeholder)\n'
|
||||
let releaseNotes
|
||||
if (args.automaticRelease) {
|
||||
releaseNotes = '## Bug Fixes/Changes \n\n'
|
||||
} else {
|
||||
releaseNotes = '(placeholder)\n'
|
||||
}
|
||||
console.log(`Checking for commits from ${pkg.version} to ${currentBranch}`)
|
||||
let commitComparison = await github.repos.compareCommits(githubOpts)
|
||||
.catch(err => {
|
||||
console.log(`{$fail} Error checking for commits from ${pkg.version} to ` +
|
||||
console.log(`${fail} Error checking for commits from ${pkg.version} to ` +
|
||||
`${currentBranch}`, err)
|
||||
process.exit(1)
|
||||
})
|
||||
|
||||
if (commitComparison.data.commits.length === 0) {
|
||||
console.log(`${pass} There are no commits from ${pkg.version} to ` +
|
||||
`${currentBranch}, skipping release.`)
|
||||
process.exit(0)
|
||||
}
|
||||
|
||||
let prCount = 0
|
||||
const mergeRE = /Merge pull request #(\d+) from .*\n/
|
||||
const newlineRE = /(.*)\n*.*/
|
||||
const prRE = /(.* )\(#(\d+)\)(?:.*)/
|
||||
commitComparison.data.commits.forEach(commitEntry => {
|
||||
let commitMessage = commitEntry.commit.message
|
||||
if (commitMessage.toLowerCase().indexOf('merge') > -1) {
|
||||
releaseNotes += `${commitMessage} \n`
|
||||
if (commitMessage.indexOf('#') > -1) {
|
||||
let prMatch = commitMessage.match(mergeRE)
|
||||
let prNumber
|
||||
if (prMatch) {
|
||||
commitMessage = commitMessage.replace(mergeRE, '').replace('\n', '')
|
||||
let newlineMatch = commitMessage.match(newlineRE)
|
||||
if (newlineMatch) {
|
||||
commitMessage = newlineMatch[1]
|
||||
}
|
||||
prNumber = prMatch[1]
|
||||
} else {
|
||||
prMatch = commitMessage.match(prRE)
|
||||
if (prMatch) {
|
||||
commitMessage = prMatch[1].trim()
|
||||
prNumber = prMatch[2]
|
||||
}
|
||||
}
|
||||
if (prMatch) {
|
||||
if (commitMessage.substr(commitMessage.length - 1, commitMessage.length) !== '.') {
|
||||
commitMessage += '.'
|
||||
}
|
||||
releaseNotes += `* ${commitMessage} #${prNumber} \n\n`
|
||||
prCount++
|
||||
}
|
||||
}
|
||||
})
|
||||
console.log(`${pass} Done generating release notes for ${currentBranch}.`)
|
||||
console.log(`${pass} Done generating release notes for ${currentBranch}. Found ${prCount} PRs.`)
|
||||
return releaseNotes
|
||||
}
|
||||
|
||||
async function createRelease (branchToTarget, isBeta) {
|
||||
let releaseNotes = await getReleaseNotes(branchToTarget)
|
||||
let newVersion = getNewVersion()
|
||||
let newVersion = await getNewVersion()
|
||||
await tagRelease(newVersion)
|
||||
const githubOpts = {
|
||||
owner: 'electron',
|
||||
repo: 'electron'
|
||||
repo: targetRepo
|
||||
}
|
||||
console.log(`Checking for existing draft release.`)
|
||||
let releases = await github.repos.getReleases(githubOpts)
|
||||
.catch(err => {
|
||||
console.log('$fail} Could not get releases. Error was', err)
|
||||
})
|
||||
let drafts = releases.data.filter(release => release.draft)
|
||||
let drafts = releases.data.filter(release => release.draft &&
|
||||
release.tag_name === newVersion)
|
||||
if (drafts.length > 0) {
|
||||
console.log(`${fail} Aborting because draft release for
|
||||
${drafts[0].tag_name} already exists.`)
|
||||
process.exit(1)
|
||||
}
|
||||
console.log(`${pass} A draft release does not exist; creating one.`)
|
||||
githubOpts.body = releaseNotes
|
||||
githubOpts.draft = true
|
||||
githubOpts.name = `electron ${newVersion}`
|
||||
if (isBeta) {
|
||||
githubOpts.body = `Note: This is a beta release. Please file new issues ` +
|
||||
`for any bugs you find in it.\n \n This release is published to npm ` +
|
||||
`under the beta tag and can be installed via npm install electron@beta, ` +
|
||||
`or npm i electron@${newVersion.substr(1)}.`
|
||||
if (newVersion.indexOf('nightly') > 0) {
|
||||
githubOpts.body = `Note: This is a nightly release. Please file new issues ` +
|
||||
`for any bugs you find in it.\n \n This release is published to npm ` +
|
||||
`under the nightly tag and can be installed via npm install electron@nightly, ` +
|
||||
`or npm i electron@${newVersion.substr(1)}.\n \n ${releaseNotes}`
|
||||
} else {
|
||||
githubOpts.body = `Note: This is a beta release. Please file new issues ` +
|
||||
`for any bugs you find in it.\n \n This release is published to npm ` +
|
||||
`under the beta tag and can be installed via npm install electron@beta, ` +
|
||||
`or npm i electron@${newVersion.substr(1)}.\n \n ${releaseNotes}`
|
||||
}
|
||||
githubOpts.name = `${githubOpts.name}`
|
||||
githubOpts.prerelease = true
|
||||
} else {
|
||||
githubOpts.body = releaseNotes
|
||||
}
|
||||
githubOpts.tag_name = newVersion
|
||||
githubOpts.target_commitish = branchToTarget
|
||||
githubOpts.target_commitish = newVersion.indexOf('nightly') !== -1 ? 'master' : branchToTarget
|
||||
await github.repos.createRelease(githubOpts)
|
||||
.catch(err => {
|
||||
console.log(`${fail} Error creating new release: `, err)
|
||||
@@ -147,34 +187,94 @@ async function createRelease (branchToTarget, isBeta) {
|
||||
console.log(`${pass} Draft release for ${newVersion} has been created.`)
|
||||
}
|
||||
|
||||
async function pushRelease () {
|
||||
let pushDetails = await GitProcess.exec(['push', 'origin', 'HEAD'], gitDir)
|
||||
async function pushRelease (branch) {
|
||||
let pushDetails = await GitProcess.exec(['push', 'origin', `HEAD:${branch}`, '--follow-tags'], gitDir)
|
||||
if (pushDetails.exitCode === 0) {
|
||||
console.log(`${pass} Successfully pushed the release branch. Wait for ` +
|
||||
console.log(`${pass} Successfully pushed the release. Wait for ` +
|
||||
`release builds to finish before running "npm run release".`)
|
||||
} else {
|
||||
console.log(`${fail} Error pushing the release branch: ` +
|
||||
console.log(`${fail} Error pushing the release: ` +
|
||||
`${pushDetails.stderr}`)
|
||||
process.exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
async function runReleaseBuilds () {
|
||||
await ciReleaseBuild('release', {
|
||||
ghRelease: true
|
||||
async function runReleaseBuilds (branch) {
|
||||
await ciReleaseBuild(branch, {
|
||||
ghRelease: true,
|
||||
automaticRelease: args.automaticRelease
|
||||
})
|
||||
}
|
||||
|
||||
async function prepareRelease (isBeta, notesOnly) {
|
||||
let currentBranch = await getCurrentBranch(gitDir)
|
||||
if (notesOnly) {
|
||||
let releaseNotes = await getReleaseNotes(currentBranch)
|
||||
console.log(`Draft release notes are: ${releaseNotes}`)
|
||||
async function tagRelease (version) {
|
||||
console.log(`Tagging release ${version}.`)
|
||||
let checkoutDetails = await GitProcess.exec([ 'tag', '-a', '-m', version, version ], gitDir)
|
||||
if (checkoutDetails.exitCode === 0) {
|
||||
console.log(`${pass} Successfully tagged ${version}.`)
|
||||
} else {
|
||||
await createReleaseBranch()
|
||||
await createRelease(currentBranch, isBeta)
|
||||
await pushRelease()
|
||||
await runReleaseBuilds()
|
||||
console.log(`${fail} Error tagging ${version}: ` +
|
||||
`${checkoutDetails.stderr}`)
|
||||
process.exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
async function verifyNewVersion () {
|
||||
let newVersion = await getNewVersion(true)
|
||||
let response
|
||||
if (args.automaticRelease) {
|
||||
response = 'y'
|
||||
} else {
|
||||
response = await promptForVersion(newVersion)
|
||||
}
|
||||
if (response.match(/^y/i)) {
|
||||
console.log(`${pass} Starting release of ${newVersion}`)
|
||||
} else {
|
||||
console.log(`${fail} Aborting release of ${newVersion}`)
|
||||
process.exit()
|
||||
}
|
||||
}
|
||||
|
||||
async function promptForVersion (version) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const rl = readline.createInterface({
|
||||
input: process.stdin,
|
||||
output: process.stdout
|
||||
})
|
||||
rl.question(`Do you want to create the release ${version.green} (y/N)? `, (answer) => {
|
||||
rl.close()
|
||||
resolve(answer)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
// function to determine if there have been commits to master since the last release
|
||||
async function changesToRelease () {
|
||||
let lastCommitWasRelease = new RegExp(`^Bump v[0-9.]*(-beta[0-9.]*)?(-nightly[0-9.]*)?$`, 'g')
|
||||
let lastCommit = await GitProcess.exec(['log', '-n', '1', `--pretty=format:'%s'`], gitDir)
|
||||
return !lastCommitWasRelease.test(lastCommit.stdout)
|
||||
}
|
||||
|
||||
async function prepareRelease (isBeta, notesOnly) {
|
||||
if (args.dryRun) {
|
||||
let newVersion = await getNewVersion(true)
|
||||
console.log(newVersion)
|
||||
} else {
|
||||
const currentBranch = (args.branch) ? args.branch : await getCurrentBranch(gitDir)
|
||||
if (notesOnly) {
|
||||
let releaseNotes = await getReleaseNotes(currentBranch)
|
||||
console.log(`Draft release notes are: \n${releaseNotes}`)
|
||||
} else {
|
||||
const changes = await changesToRelease(currentBranch)
|
||||
if (changes) {
|
||||
await verifyNewVersion()
|
||||
await createRelease(currentBranch, isBeta)
|
||||
await pushRelease(currentBranch)
|
||||
await runReleaseBuilds(currentBranch)
|
||||
} else {
|
||||
console.log(`There are no new changes to this branch since the last release, aborting release.`)
|
||||
process.exit(1)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -3,10 +3,16 @@ const fs = require('fs')
|
||||
const path = require('path')
|
||||
const childProcess = require('child_process')
|
||||
const GitHubApi = require('github')
|
||||
const {GitProcess} = require('dugite')
|
||||
const request = require('request')
|
||||
const assert = require('assert')
|
||||
const semver = require('semver')
|
||||
const rootPackageJson = require('../package.json')
|
||||
|
||||
if (!process.env.ELECTRON_NPM_OTP) {
|
||||
console.error('Please set ELECTRON_NPM_OTP')
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
const github = new GitHubApi({
|
||||
// debug: true,
|
||||
headers: { 'User-Agent': 'electron-npm-publisher' },
|
||||
@@ -21,7 +27,8 @@ const files = [
|
||||
'index.js',
|
||||
'install.js',
|
||||
'package.json',
|
||||
'README.md'
|
||||
'README.md',
|
||||
'LICENSE'
|
||||
]
|
||||
|
||||
const jsonFields = [
|
||||
@@ -49,9 +56,10 @@ new Promise((resolve, reject) => {
|
||||
tempDir = dirPath
|
||||
// copy files from `/npm` to temp directory
|
||||
files.forEach((name) => {
|
||||
const noThirdSegment = name === 'README.md' || name === 'LICENSE'
|
||||
fs.writeFileSync(
|
||||
path.join(tempDir, name),
|
||||
fs.readFileSync(path.join(__dirname, '..', name === 'README.md' ? '' : 'npm', name))
|
||||
fs.readFileSync(path.join(__dirname, '..', noThirdSegment ? '' : 'npm', name))
|
||||
)
|
||||
})
|
||||
// copy from root package.json to temp/package.json
|
||||
@@ -66,7 +74,7 @@ new Promise((resolve, reject) => {
|
||||
|
||||
return github.repos.getReleases({
|
||||
owner: 'electron',
|
||||
repo: 'electron'
|
||||
repo: rootPackageJson.version.indexOf('nightly') > 0 ? 'nightlies' : 'electron'
|
||||
})
|
||||
})
|
||||
.then((releases) => {
|
||||
@@ -101,8 +109,28 @@ new Promise((resolve, reject) => {
|
||||
})
|
||||
})
|
||||
})
|
||||
.then((release) => {
|
||||
npmTag = release.prerelease ? 'beta' : 'latest'
|
||||
.then(async (release) => {
|
||||
const currentBranch = await getCurrentBranch()
|
||||
|
||||
if (release.tag_name.indexOf('nightly') > 0) {
|
||||
if (currentBranch === 'master') {
|
||||
npmTag = 'nightly'
|
||||
} else {
|
||||
npmTag = `nightly-${currentBranch}`
|
||||
}
|
||||
} else {
|
||||
if (currentBranch === 'master') {
|
||||
// This should never happen, master releases should be nightly releases
|
||||
// this is here just-in-case
|
||||
npmTag = 'master'
|
||||
} else if (!release.prerelease) {
|
||||
// Tag the release with a `2-0-x` style tag
|
||||
npmTag = currentBranch
|
||||
} else {
|
||||
// Tag the release with a `beta-3-0-x` style tag
|
||||
npmTag = `beta-${currentBranch}`
|
||||
}
|
||||
}
|
||||
})
|
||||
.then(() => childProcess.execSync('npm pack', { cwd: tempDir }))
|
||||
.then(() => {
|
||||
@@ -113,13 +141,42 @@ new Promise((resolve, reject) => {
|
||||
env: Object.assign({}, process.env, { electron_config_cache: tempDir }),
|
||||
cwd: tempDir
|
||||
})
|
||||
const checkVersion = childProcess.execSync(`${path.join(tempDir, 'node_modules', '.bin', 'electron')} -v`)
|
||||
assert.ok((`v${rootPackageJson.version}`.indexOf(checkVersion.toString().trim()) === 0), `Version is correct`)
|
||||
resolve(tarballPath)
|
||||
})
|
||||
})
|
||||
.then((tarballPath) => childProcess.execSync(`npm publish ${tarballPath} --tag ${npmTag}`))
|
||||
.then((tarballPath) => childProcess.execSync(`npm publish ${tarballPath} --tag ${npmTag} --otp=${process.env.ELECTRON_NPM_OTP}`))
|
||||
.then(() => {
|
||||
const currentTags = JSON.parse(childProcess.execSync('npm show electron dist-tags --json').toString())
|
||||
const localVersion = rootPackageJson.version
|
||||
const parsedLocalVersion = semver.parse(localVersion)
|
||||
if (parsedLocalVersion.prerelease.length === 0 &&
|
||||
semver.gt(localVersion, currentTags.latest)) {
|
||||
childProcess.execSync(`npm dist-tag add electron@${localVersion} latest --otp=${process.env.ELECTRON_NPM_OTP}`)
|
||||
}
|
||||
if (parsedLocalVersion.prerelease[0] === 'beta' &&
|
||||
semver.gt(localVersion, currentTags.beta)) {
|
||||
childProcess.execSync(`npm dist-tag add electron@${localVersion} beta --otp=${process.env.ELECTRON_NPM_OTP}`)
|
||||
}
|
||||
})
|
||||
.catch((err) => {
|
||||
console.error(`Error: ${err}`)
|
||||
process.exit(1)
|
||||
})
|
||||
|
||||
async function getCurrentBranch () {
|
||||
const gitDir = path.resolve(__dirname, '..')
|
||||
console.log(`Determining current git branch`)
|
||||
let gitArgs = ['rev-parse', '--abbrev-ref', 'HEAD']
|
||||
let branchDetails = await GitProcess.exec(gitArgs, gitDir)
|
||||
if (branchDetails.exitCode === 0) {
|
||||
let currentBranch = branchDetails.stdout.trim()
|
||||
console.log(`Successfully determined current git branch is ` +
|
||||
`${currentBranch}`)
|
||||
return currentBranch
|
||||
} else {
|
||||
let error = GitProcess.parseError(branchDetails.stderr)
|
||||
console.log(`Could not get details for the current branch,
|
||||
error was ${branchDetails.stderr}`, error)
|
||||
process.exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
108
script/release-artifact-cleanup.js
Executable file
108
script/release-artifact-cleanup.js
Executable file
@@ -0,0 +1,108 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
if (!process.env.CI) require('dotenv-safe').load()
|
||||
require('colors')
|
||||
const args = require('minimist')(process.argv.slice(2), {
|
||||
string: ['tag']
|
||||
})
|
||||
const { execSync } = require('child_process')
|
||||
const { GitProcess } = require('dugite')
|
||||
|
||||
const GitHub = require('github')
|
||||
const path = require('path')
|
||||
|
||||
const github = new GitHub()
|
||||
const gitDir = path.resolve(__dirname, '..')
|
||||
|
||||
github.authenticate({
|
||||
type: 'token',
|
||||
token: process.env.ELECTRON_GITHUB_TOKEN
|
||||
})
|
||||
|
||||
function getLastBumpCommit (tag) {
|
||||
const data = execSync(`git log -n1 --grep "Bump ${tag}" --format='format:{"hash": "%H", "message": "%s"}'`).toString()
|
||||
return JSON.parse(data)
|
||||
}
|
||||
|
||||
async function getCurrentBranch (gitDir) {
|
||||
const gitArgs = ['rev-parse', '--abbrev-ref', 'HEAD']
|
||||
const branchDetails = await GitProcess.exec(gitArgs, gitDir)
|
||||
if (branchDetails.exitCode === 0) {
|
||||
return branchDetails.stdout.trim()
|
||||
}
|
||||
|
||||
const error = GitProcess.parseError(branchDetails.stderr)
|
||||
console.error(`Couldn't get current branch: `, error)
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
async function revertBumpCommit (tag) {
|
||||
const branch = await getCurrentBranch()
|
||||
const commitToRevert = getLastBumpCommit(tag).hash
|
||||
await GitProcess.exec(['revert', commitToRevert], gitDir)
|
||||
const pushDetails = await GitProcess.exec(['push', 'origin', `HEAD:${branch}`, '--follow-tags'], gitDir)
|
||||
if (pushDetails.exitCode === 0) {
|
||||
console.log(`Successfully reverted release commit.`)
|
||||
} else {
|
||||
const error = GitProcess.parseError(pushDetails.stderr)
|
||||
console.error(`Failed to push release commit: `, error)
|
||||
process.exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
async function deleteDraft (tag, targetRepo) {
|
||||
try {
|
||||
const result = await github.repos.getReleaseByTag({
|
||||
owner: 'electron',
|
||||
repo: targetRepo,
|
||||
tag
|
||||
})
|
||||
if (!result.draft) {
|
||||
console.log(`Published releases cannot be deleted.`)
|
||||
process.exit(1)
|
||||
} else {
|
||||
await github.repos.deleteRelease({
|
||||
owner: 'electron',
|
||||
repo: targetRepo,
|
||||
release_id: result.id
|
||||
})
|
||||
}
|
||||
console.log(`Successfully deleted draft with tag ${tag} from ${targetRepo}`)
|
||||
} catch (err) {
|
||||
console.error(`Couldn't delete draft with tag ${tag} from ${targetRepo}: `, err)
|
||||
process.exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
async function deleteTag (tag, targetRepo) {
|
||||
try {
|
||||
await github.gitdata.deleteReference({
|
||||
owner: 'electron',
|
||||
repo: targetRepo,
|
||||
ref: tag
|
||||
})
|
||||
console.log(`Successfully deleted tag ${tag} from ${targetRepo}`)
|
||||
} catch (err) {
|
||||
console.log(`Couldn't delete tag ${tag} from ${targetRepo}: `, err)
|
||||
process.exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
async function cleanReleaseArtifacts () {
|
||||
const tag = args.tag
|
||||
const isNightly = args.tag.includes('nightly')
|
||||
|
||||
if (isNightly) {
|
||||
await deleteDraft(tag, 'nightlies')
|
||||
await deleteTag(tag, 'nightlies')
|
||||
} else {
|
||||
await deleteDraft(tag, 'electron')
|
||||
}
|
||||
|
||||
await deleteTag(tag, 'electron')
|
||||
await revertBumpCommit(tag)
|
||||
|
||||
console.log('Failed release artifact cleanup complete')
|
||||
}
|
||||
|
||||
cleanReleaseArtifacts()
|
||||
478
script/release-notes/index.js
Normal file
478
script/release-notes/index.js
Normal file
@@ -0,0 +1,478 @@
|
||||
const { GitProcess } = require('dugite')
|
||||
const Entities = require('html-entities').AllHtmlEntities
|
||||
const fetch = require('node-fetch')
|
||||
const fs = require('fs')
|
||||
const GitHub = require('github')
|
||||
const path = require('path')
|
||||
const semver = require('semver')
|
||||
|
||||
const CACHE_DIR = path.resolve(__dirname, '.cache')
|
||||
// Fill this with tags to ignore if you are generating release notes for older
|
||||
// versions
|
||||
//
|
||||
// E.g. ['v3.0.0-beta.1'] to generate the release notes for 3.0.0-beta.1 :) from
|
||||
// the current 3-0-x branch
|
||||
const EXCLUDE_TAGS = []
|
||||
|
||||
const entities = new Entities()
|
||||
const github = new GitHub()
|
||||
const gitDir = path.resolve(__dirname, '..', '..')
|
||||
github.authenticate({ type: 'token', token: process.env.ELECTRON_GITHUB_TOKEN })
|
||||
let currentBranch
|
||||
|
||||
const semanticMap = new Map()
|
||||
for (const line of fs.readFileSync(path.resolve(__dirname, 'legacy-pr-semantic-map.csv'), 'utf8').split('\n')) {
|
||||
if (!line) continue
|
||||
const bits = line.split(',')
|
||||
if (bits.length !== 2) continue
|
||||
semanticMap.set(bits[0], bits[1])
|
||||
}
|
||||
|
||||
const getCurrentBranch = async () => {
|
||||
if (currentBranch) return currentBranch
|
||||
const gitArgs = ['rev-parse', '--abbrev-ref', 'HEAD']
|
||||
const branchDetails = await GitProcess.exec(gitArgs, gitDir)
|
||||
if (branchDetails.exitCode === 0) {
|
||||
currentBranch = branchDetails.stdout.trim()
|
||||
return currentBranch
|
||||
}
|
||||
throw GitProcess.parseError(branchDetails.stderr)
|
||||
}
|
||||
|
||||
const getBranchOffPoint = async (branchName) => {
|
||||
const gitArgs = ['merge-base', branchName, 'master']
|
||||
const commitDetails = await GitProcess.exec(gitArgs, gitDir)
|
||||
if (commitDetails.exitCode === 0) {
|
||||
return commitDetails.stdout.trim()
|
||||
}
|
||||
throw GitProcess.parseError(commitDetails.stderr)
|
||||
}
|
||||
|
||||
const getTagsOnBranch = async (branchName) => {
|
||||
const gitArgs = ['tag', '--merged', branchName]
|
||||
const tagDetails = await GitProcess.exec(gitArgs, gitDir)
|
||||
if (tagDetails.exitCode === 0) {
|
||||
return tagDetails.stdout.trim().split('\n').filter(tag => !EXCLUDE_TAGS.includes(tag))
|
||||
}
|
||||
throw GitProcess.parseError(tagDetails.stderr)
|
||||
}
|
||||
|
||||
const memLastKnownRelease = new Map()
|
||||
|
||||
const getLastKnownReleaseOnBranch = async (branchName) => {
|
||||
if (memLastKnownRelease.has(branchName)) {
|
||||
return memLastKnownRelease.get(branchName)
|
||||
}
|
||||
const tags = await getTagsOnBranch(branchName)
|
||||
if (!tags.length) {
|
||||
throw new Error(`Branch ${branchName} has no tags, we have no idea what the last release was`)
|
||||
}
|
||||
const branchOffPointTags = await getTagsOnBranch(await getBranchOffPoint(branchName))
|
||||
if (branchOffPointTags.length >= tags.length) {
|
||||
// No release on this branch
|
||||
return null
|
||||
}
|
||||
memLastKnownRelease.set(branchName, tags[tags.length - 1])
|
||||
// Latest tag is the latest release
|
||||
return tags[tags.length - 1]
|
||||
}
|
||||
|
||||
const getBranches = async () => {
|
||||
const gitArgs = ['branch', '--remote']
|
||||
const branchDetails = await GitProcess.exec(gitArgs, gitDir)
|
||||
if (branchDetails.exitCode === 0) {
|
||||
return branchDetails.stdout.trim().split('\n').map(b => b.trim()).filter(branch => branch !== 'origin/HEAD -> origin/master')
|
||||
}
|
||||
throw GitProcess.parseError(branchDetails.stderr)
|
||||
}
|
||||
|
||||
const semverify = (v) => v.replace(/^origin\//, '').replace('x', '0').replace(/-/g, '.')
|
||||
|
||||
const getLastReleaseBranch = async () => {
|
||||
const current = await getCurrentBranch()
|
||||
const allBranches = await getBranches()
|
||||
const releaseBranches = allBranches
|
||||
.filter(branch => /^origin\/[0-9]+-[0-9]+-x$/.test(branch))
|
||||
.filter(branch => branch !== current && branch !== `origin/${current}`)
|
||||
let latest = null
|
||||
for (const b of releaseBranches) {
|
||||
if (latest === null) latest = b
|
||||
if (semver.gt(semverify(b), semverify(latest))) {
|
||||
latest = b
|
||||
}
|
||||
}
|
||||
return latest
|
||||
}
|
||||
|
||||
const commitBeforeTag = async (commit, tag) => {
|
||||
const gitArgs = ['tag', '--contains', commit]
|
||||
const tagDetails = await GitProcess.exec(gitArgs, gitDir)
|
||||
if (tagDetails.exitCode === 0) {
|
||||
return tagDetails.stdout.split('\n').includes(tag)
|
||||
}
|
||||
throw GitProcess.parseError(tagDetails.stderr)
|
||||
}
|
||||
|
||||
const getCommitsMergedIntoCurrentBranchSincePoint = async (point) => {
|
||||
return getCommitsBetween(point, 'HEAD')
|
||||
}
|
||||
|
||||
const getCommitsBetween = async (point1, point2) => {
|
||||
const gitArgs = ['rev-list', `${point1}..${point2}`]
|
||||
const commitsDetails = await GitProcess.exec(gitArgs, gitDir)
|
||||
if (commitsDetails.exitCode !== 0) {
|
||||
throw GitProcess.parseError(commitsDetails.stderr)
|
||||
}
|
||||
return commitsDetails.stdout.trim().split('\n')
|
||||
}
|
||||
|
||||
const TITLE_PREFIX = 'Merged Pull Request: '
|
||||
|
||||
const getCommitDetails = async (commitHash) => {
|
||||
const commitInfo = await (await fetch(`https://github.com/electron/electron/branch_commits/${commitHash}`)).text()
|
||||
const bits = commitInfo.split('</a>)')[0].split('>')
|
||||
const prIdent = bits[bits.length - 1].trim()
|
||||
if (!prIdent || commitInfo.indexOf('href="/electron/electron/pull') === -1) {
|
||||
console.warn(`WARNING: Could not track commit "${commitHash}" to a pull request, it may have been committed directly to the branch`)
|
||||
return null
|
||||
}
|
||||
const title = commitInfo.split('title="')[1].split('"')[0]
|
||||
if (!title.startsWith(TITLE_PREFIX)) {
|
||||
console.warn(`WARNING: Unknown PR title for commit "${commitHash}" in PR "${prIdent}"`)
|
||||
return null
|
||||
}
|
||||
return {
|
||||
mergedFrom: prIdent,
|
||||
prTitle: entities.decode(title.substr(TITLE_PREFIX.length))
|
||||
}
|
||||
}
|
||||
|
||||
const doWork = async (items, fn, concurrent = 5) => {
|
||||
const results = []
|
||||
const toUse = [].concat(items)
|
||||
let i = 1
|
||||
const doBit = async () => {
|
||||
if (toUse.length === 0) return
|
||||
console.log(`Running ${i}/${items.length}`)
|
||||
i += 1
|
||||
|
||||
const item = toUse.pop()
|
||||
const index = toUse.length
|
||||
results[index] = await fn(item)
|
||||
await doBit()
|
||||
}
|
||||
const bits = []
|
||||
for (let i = 0; i < concurrent; i += 1) {
|
||||
bits.push(doBit())
|
||||
}
|
||||
await Promise.all(bits)
|
||||
return results
|
||||
}
|
||||
|
||||
const notes = new Map()
|
||||
|
||||
const NoteType = {
|
||||
FIX: 'fix',
|
||||
FEATURE: 'feature',
|
||||
BREAKING_CHANGE: 'breaking-change',
|
||||
DOCUMENTATION: 'doc',
|
||||
OTHER: 'other',
|
||||
UNKNOWN: 'unknown'
|
||||
}
|
||||
|
||||
class Note {
|
||||
constructor (trueTitle, prNumber, ignoreIfInVersion) {
|
||||
// Self bindings
|
||||
this.guessType = this.guessType.bind(this)
|
||||
this.fetchPrInfo = this.fetchPrInfo.bind(this)
|
||||
this._getPr = this._getPr.bind(this)
|
||||
|
||||
if (!trueTitle.trim()) console.error(prNumber)
|
||||
|
||||
this._ignoreIfInVersion = ignoreIfInVersion
|
||||
this.reverted = false
|
||||
if (notes.has(trueTitle)) {
|
||||
console.warn(`Duplicate PR trueTitle: "${trueTitle}", "${prNumber}" this might cause weird reversions (this would be RARE)`)
|
||||
}
|
||||
|
||||
// Memoize
|
||||
notes.set(trueTitle, this)
|
||||
|
||||
this.originalTitle = trueTitle
|
||||
this.title = trueTitle
|
||||
this.prNumber = prNumber
|
||||
this.stripColon = true
|
||||
if (this.guessType() !== NoteType.UNKNOWN && this.stripColon) {
|
||||
this.title = trueTitle.split(':').slice(1).join(':').trim()
|
||||
}
|
||||
}
|
||||
|
||||
guessType () {
|
||||
if (this.originalTitle.startsWith('fix:') ||
|
||||
this.originalTitle.startsWith('Fix:')) return NoteType.FIX
|
||||
if (this.originalTitle.startsWith('feat:')) return NoteType.FEATURE
|
||||
if (this.originalTitle.startsWith('spec:') ||
|
||||
this.originalTitle.startsWith('build:') ||
|
||||
this.originalTitle.startsWith('test:') ||
|
||||
this.originalTitle.startsWith('chore:') ||
|
||||
this.originalTitle.startsWith('deps:') ||
|
||||
this.originalTitle.startsWith('refactor:') ||
|
||||
this.originalTitle.startsWith('tools:') ||
|
||||
this.originalTitle.startsWith('vendor:') ||
|
||||
this.originalTitle.startsWith('perf:') ||
|
||||
this.originalTitle.startsWith('style:') ||
|
||||
this.originalTitle.startsWith('ci')) return NoteType.OTHER
|
||||
if (this.originalTitle.startsWith('doc:') ||
|
||||
this.originalTitle.startsWith('docs:')) return NoteType.DOCUMENTATION
|
||||
|
||||
this.stripColon = false
|
||||
|
||||
if (this.pr && this.pr.data.labels.find(label => label.name === 'semver/breaking-change')) {
|
||||
return NoteType.BREAKING_CHANGE
|
||||
}
|
||||
// FIXME: Backported features will not be picked up by this
|
||||
if (this.pr && this.pr.data.labels.find(label => label.name === 'semver/nonbreaking-feature')) {
|
||||
return NoteType.FEATURE
|
||||
}
|
||||
|
||||
const n = this.prNumber.replace('#', '')
|
||||
if (semanticMap.has(n)) {
|
||||
switch (semanticMap.get(n)) {
|
||||
case 'feat':
|
||||
return NoteType.FEATURE
|
||||
case 'fix':
|
||||
return NoteType.FIX
|
||||
case 'breaking-change':
|
||||
return NoteType.BREAKING_CHANGE
|
||||
case 'doc':
|
||||
return NoteType.DOCUMENTATION
|
||||
case 'build':
|
||||
case 'vendor':
|
||||
case 'refactor':
|
||||
case 'spec':
|
||||
return NoteType.OTHER
|
||||
default:
|
||||
throw new Error(`Unknown semantic mapping: ${semanticMap.get(n)}`)
|
||||
}
|
||||
}
|
||||
return NoteType.UNKNOWN
|
||||
}
|
||||
|
||||
async _getPr (n) {
|
||||
const cachePath = path.resolve(CACHE_DIR, n)
|
||||
if (fs.existsSync(cachePath)) {
|
||||
return JSON.parse(fs.readFileSync(cachePath, 'utf8'))
|
||||
} else {
|
||||
try {
|
||||
const pr = await github.pullRequests.get({
|
||||
number: n,
|
||||
owner: 'electron',
|
||||
repo: 'electron'
|
||||
})
|
||||
fs.writeFileSync(cachePath, JSON.stringify({ data: pr.data }))
|
||||
return pr
|
||||
} catch (err) {
|
||||
console.info('#### FAILED:', `#${n}`)
|
||||
throw err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fetchPrInfo () {
|
||||
if (this.pr) return
|
||||
const n = this.prNumber.replace('#', '')
|
||||
this.pr = await this._getPr(n)
|
||||
if (this.pr.data.labels.find(label => label.name === `merged/${this._ignoreIfInVersion.replace('origin/', '')}`)) {
|
||||
// This means we probably backported this PR, let's try figure out what
|
||||
// the corresponding backport PR would be by searching through comments
|
||||
// for trop
|
||||
let comments
|
||||
const cacheCommentsPath = path.resolve(CACHE_DIR, `${n}-comments`)
|
||||
if (fs.existsSync(cacheCommentsPath)) {
|
||||
comments = JSON.parse(fs.readFileSync(cacheCommentsPath, 'utf8'))
|
||||
} else {
|
||||
comments = await github.issues.getComments({
|
||||
number: n,
|
||||
owner: 'electron',
|
||||
repo: 'electron',
|
||||
per_page: 100
|
||||
})
|
||||
fs.writeFileSync(cacheCommentsPath, JSON.stringify({ data: comments.data }))
|
||||
}
|
||||
|
||||
const tropComment = comments.data.find(
|
||||
c => (
|
||||
new RegExp(`We have automatically backported this PR to "${this._ignoreIfInVersion.replace('origin/', '')}", please check out #[0-9]+`)
|
||||
).test(c.body)
|
||||
)
|
||||
|
||||
if (tropComment) {
|
||||
const commentBits = tropComment.body.split('#')
|
||||
const tropPrNumber = commentBits[commentBits.length - 1]
|
||||
|
||||
const tropPr = await this._getPr(tropPrNumber)
|
||||
if (tropPr.data.merged && tropPr.data.merge_commit_sha) {
|
||||
if (await commitBeforeTag(tropPr.data.merge_commit_sha, await getLastKnownReleaseOnBranch(this._ignoreIfInVersion))) {
|
||||
this.reverted = true
|
||||
console.log('PR', this.prNumber, 'was backported to a previous version, ignoring from notes')
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Note.findByTrueTitle = (trueTitle) => notes.get(trueTitle)
|
||||
|
||||
class ReleaseNotes {
|
||||
constructor (ignoreIfInVersion) {
|
||||
this._ignoreIfInVersion = ignoreIfInVersion
|
||||
this._handledPrs = new Set()
|
||||
this._revertedPrs = new Set()
|
||||
this.other = []
|
||||
this.docs = []
|
||||
this.fixes = []
|
||||
this.features = []
|
||||
this.breakingChanges = []
|
||||
this.unknown = []
|
||||
}
|
||||
|
||||
async parseCommits (commitHashes) {
|
||||
await doWork(commitHashes, async (commit) => {
|
||||
const info = await getCommitDetails(commit)
|
||||
if (!info) return
|
||||
// Only handle each PR once
|
||||
if (this._handledPrs.has(info.mergedFrom)) return
|
||||
this._handledPrs.add(info.mergedFrom)
|
||||
|
||||
// Strip the trop backport prefix
|
||||
const trueTitle = info.prTitle.replace(/^Backport \([0-9]+-[0-9]+-x\) - /, '')
|
||||
if (this._revertedPrs.has(trueTitle)) return
|
||||
|
||||
// Handle PRs that revert other PRs
|
||||
if (trueTitle.startsWith('Revert "')) {
|
||||
const revertedTrueTitle = trueTitle.substr(8, trueTitle.length - 9)
|
||||
this._revertedPrs.add(revertedTrueTitle)
|
||||
const existingNote = Note.findByTrueTitle(revertedTrueTitle)
|
||||
if (existingNote) {
|
||||
existingNote.reverted = true
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// Add a note for this PR
|
||||
const note = new Note(trueTitle, info.mergedFrom, this._ignoreIfInVersion)
|
||||
try {
|
||||
await note.fetchPrInfo()
|
||||
} catch (err) {
|
||||
console.error(commit, info)
|
||||
throw err
|
||||
}
|
||||
switch (note.guessType()) {
|
||||
case NoteType.FIX:
|
||||
this.fixes.push(note)
|
||||
break
|
||||
case NoteType.FEATURE:
|
||||
this.features.push(note)
|
||||
break
|
||||
case NoteType.BREAKING_CHANGE:
|
||||
this.breakingChanges.push(note)
|
||||
break
|
||||
case NoteType.OTHER:
|
||||
this.other.push(note)
|
||||
break
|
||||
case NoteType.DOCUMENTATION:
|
||||
this.docs.push(note)
|
||||
break
|
||||
case NoteType.UNKNOWN:
|
||||
default:
|
||||
this.unknown.push(note)
|
||||
break
|
||||
}
|
||||
}, 20)
|
||||
}
|
||||
|
||||
list (notes) {
|
||||
if (notes.length === 0) {
|
||||
return '_There are no items in this section this release_'
|
||||
}
|
||||
return notes
|
||||
.filter(note => !note.reverted)
|
||||
.sort((a, b) => a.title.toLowerCase().localeCompare(b.title.toLowerCase()))
|
||||
.map((note) => `* ${note.title.trim()} ${note.prNumber}`).join('\n')
|
||||
}
|
||||
|
||||
render () {
|
||||
return `
|
||||
# Release Notes
|
||||
|
||||
## Breaking Changes
|
||||
|
||||
${this.list(this.breakingChanges)}
|
||||
|
||||
## Features
|
||||
|
||||
${this.list(this.features)}
|
||||
|
||||
## Fixes
|
||||
|
||||
${this.list(this.fixes)}
|
||||
|
||||
## Other Changes (E.g. Internal refactors or build system updates)
|
||||
|
||||
${this.list(this.other)}
|
||||
|
||||
## Documentation Updates
|
||||
|
||||
Some documentation updates, fixes and reworks: ${
|
||||
this.docs.length === 0
|
||||
? '_None in this release_'
|
||||
: this.docs.sort((a, b) => a.prNumber.localeCompare(b.prNumber)).map(note => note.prNumber).join(', ')
|
||||
}
|
||||
${this.unknown.filter(n => !n.reverted).length > 0
|
||||
? `## Unknown (fix these before publishing release)
|
||||
|
||||
${this.list(this.unknown)}
|
||||
` : ''}`
|
||||
}
|
||||
}
|
||||
|
||||
async function main () {
|
||||
if (!fs.existsSync(CACHE_DIR)) {
|
||||
fs.mkdirSync(CACHE_DIR)
|
||||
}
|
||||
const lastReleaseBranch = await getLastReleaseBranch()
|
||||
|
||||
const notes = new ReleaseNotes(lastReleaseBranch)
|
||||
const lastKnownReleaseInCurrentStream = await getLastKnownReleaseOnBranch(await getCurrentBranch())
|
||||
const currentBranchOff = await getBranchOffPoint(await getCurrentBranch())
|
||||
|
||||
const commits = await getCommitsMergedIntoCurrentBranchSincePoint(
|
||||
lastKnownReleaseInCurrentStream || currentBranchOff
|
||||
)
|
||||
|
||||
if (!lastKnownReleaseInCurrentStream) {
|
||||
// This means we are the first release in our stream
|
||||
// FIXME: This will not work for minor releases!!!!
|
||||
|
||||
const lastReleaseBranch = await getLastReleaseBranch()
|
||||
const lastBranchOff = await getBranchOffPoint(lastReleaseBranch)
|
||||
commits.push(...await getCommitsBetween(lastBranchOff, currentBranchOff))
|
||||
}
|
||||
|
||||
await notes.parseCommits(commits)
|
||||
|
||||
console.log(notes.render())
|
||||
|
||||
const badNotes = notes.unknown.filter(n => !n.reverted).length
|
||||
if (badNotes > 0) {
|
||||
throw new Error(`You have ${badNotes.length} unknown release notes, please fix them before releasing`)
|
||||
}
|
||||
}
|
||||
|
||||
if (process.mainModule === module) {
|
||||
main().catch((err) => {
|
||||
console.error('Error Occurred:', err)
|
||||
process.exit(1)
|
||||
})
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user