mirror of
https://github.com/atom/atom.git
synced 2026-01-26 07:19:06 -05:00
Merge pull request #17925 from atom/mb-decaf-main-process-code
Convert the remaining coffee-script code in the main process to JavaScript
This commit is contained in:
@@ -1,7 +1,5 @@
|
||||
/** @babel */
|
||||
|
||||
export function beforeEach (fn) {
|
||||
global.beforeEach(function () {
|
||||
function beforeEach (fn) {
|
||||
global.beforeEach(() => {
|
||||
const result = fn()
|
||||
if (result instanceof Promise) {
|
||||
waitsForPromise(() => result)
|
||||
@@ -9,8 +7,8 @@ export function beforeEach (fn) {
|
||||
})
|
||||
}
|
||||
|
||||
export function afterEach (fn) {
|
||||
global.afterEach(function () {
|
||||
function afterEach (fn) {
|
||||
global.afterEach(() => {
|
||||
const result = fn()
|
||||
if (result instanceof Promise) {
|
||||
waitsForPromise(() => result)
|
||||
@@ -18,14 +16,14 @@ export function afterEach (fn) {
|
||||
})
|
||||
}
|
||||
|
||||
['it', 'fit', 'ffit', 'fffit'].forEach(function (name) {
|
||||
module.exports[name] = function (description, fn) {
|
||||
;['it', 'fit', 'ffit', 'fffit'].forEach(name => {
|
||||
exports[name] = (description, fn) => {
|
||||
if (fn === undefined) {
|
||||
global[name](description)
|
||||
return
|
||||
}
|
||||
|
||||
global[name](description, function () {
|
||||
global[name](description, () => {
|
||||
const result = fn()
|
||||
if (result instanceof Promise) {
|
||||
waitsForPromise(() => result)
|
||||
@@ -34,7 +32,7 @@ export function afterEach (fn) {
|
||||
}
|
||||
})
|
||||
|
||||
export async function conditionPromise (condition, description = 'anonymous condition') {
|
||||
async function conditionPromise (condition, description = 'anonymous condition') {
|
||||
const startTime = Date.now()
|
||||
|
||||
while (true) {
|
||||
@@ -50,23 +48,23 @@ export async function conditionPromise (condition, description = 'anonymous cond
|
||||
}
|
||||
}
|
||||
|
||||
export function timeoutPromise (timeout) {
|
||||
return new Promise(function (resolve) {
|
||||
function timeoutPromise (timeout) {
|
||||
return new Promise(resolve => {
|
||||
global.setTimeout(resolve, timeout)
|
||||
})
|
||||
}
|
||||
|
||||
function waitsForPromise (fn) {
|
||||
const promise = fn()
|
||||
global.waitsFor('spec promise to resolve', function (done) {
|
||||
promise.then(done, function (error) {
|
||||
global.waitsFor('spec promise to resolve', done => {
|
||||
promise.then(done, error => {
|
||||
jasmine.getEnv().currentSpec.fail(error)
|
||||
done()
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
export function emitterEventPromise (emitter, event, timeout = 15000) {
|
||||
function emitterEventPromise (emitter, event, timeout = 15000) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const timeoutHandle = setTimeout(() => {
|
||||
reject(new Error(`Timed out waiting for '${event}' event`))
|
||||
@@ -78,7 +76,7 @@ export function emitterEventPromise (emitter, event, timeout = 15000) {
|
||||
})
|
||||
}
|
||||
|
||||
export function promisify (original) {
|
||||
function promisify (original) {
|
||||
return function (...args) {
|
||||
return new Promise((resolve, reject) => {
|
||||
args.push((err, ...results) => {
|
||||
@@ -94,10 +92,18 @@ export function promisify (original) {
|
||||
}
|
||||
}
|
||||
|
||||
export function promisifySome (obj, fnNames) {
|
||||
function promisifySome (obj, fnNames) {
|
||||
const result = {}
|
||||
for (const fnName of fnNames) {
|
||||
result[fnName] = promisify(obj[fnName])
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
exports.afterEach = afterEach
|
||||
exports.beforeEach = beforeEach
|
||||
exports.conditionPromise = conditionPromise
|
||||
exports.emitterEventPromise = emitterEventPromise
|
||||
exports.promisify = promisify
|
||||
exports.promisifySome = promisifySome
|
||||
exports.timeoutPromise = timeoutPromise
|
||||
|
||||
@@ -1,10 +1,9 @@
|
||||
"use babel"
|
||||
const Mocha = require('mocha')
|
||||
const fs = require('fs-plus')
|
||||
const {assert} = require('chai')
|
||||
|
||||
import Mocha from 'mocha'
|
||||
import fs from 'fs-plus'
|
||||
import {assert} from 'chai'
|
||||
|
||||
export default function (testPaths) {
|
||||
module.exports =
|
||||
function (testPaths) {
|
||||
global.assert = assert
|
||||
|
||||
let reporterOptions = {
|
||||
@@ -24,6 +23,7 @@ export default function (testPaths) {
|
||||
reporter: 'mocha-multi-reporters',
|
||||
reporterOptions
|
||||
})
|
||||
|
||||
for (let testPath of testPaths) {
|
||||
if (fs.isDirectorySync(testPath)) {
|
||||
for (let testFilePath of fs.listTreeSync(testPath)) {
|
||||
@@ -36,7 +36,7 @@ export default function (testPaths) {
|
||||
}
|
||||
}
|
||||
|
||||
mocha.run(function (failures) {
|
||||
mocha.run(failures => {
|
||||
if (failures === 0) {
|
||||
process.exit(0)
|
||||
} else {
|
||||
|
||||
@@ -1,10 +1,8 @@
|
||||
/** @babel */
|
||||
const parseCommandLine = require('../../src/main-process/parse-command-line')
|
||||
|
||||
import parseCommandLine from '../../src/main-process/parse-command-line'
|
||||
|
||||
describe('parseCommandLine', function () {
|
||||
describe('when --uri-handler is not passed', function () {
|
||||
it('parses arguments as normal', function () {
|
||||
describe('parseCommandLine', () => {
|
||||
describe('when --uri-handler is not passed', () => {
|
||||
it('parses arguments as normal', () => {
|
||||
const args = parseCommandLine(['-d', '--safe', '--test', '/some/path', 'atom://test/url', 'atom://other/url'])
|
||||
assert.isTrue(args.devMode)
|
||||
assert.isTrue(args.safeMode)
|
||||
@@ -14,8 +12,8 @@ describe('parseCommandLine', function () {
|
||||
})
|
||||
})
|
||||
|
||||
describe('when --uri-handler is passed', function () {
|
||||
it('ignores other arguments and limits to one URL', function () {
|
||||
describe('when --uri-handler is passed', () => {
|
||||
it('ignores other arguments and limits to one URL', () => {
|
||||
const args = parseCommandLine(['-d', '--uri-handler', '--safe', '--test', '/some/path', 'atom://test/url', 'atom://other/url'])
|
||||
assert.isUndefined(args.devMode)
|
||||
assert.isUndefined(args.safeMode)
|
||||
|
||||
@@ -1,43 +0,0 @@
|
||||
semver = require 'semver'
|
||||
|
||||
deprecatedPackages = require('../package.json')?._deprecatedPackages ? {}
|
||||
ranges = {}
|
||||
|
||||
exports.getDeprecatedPackageMetadata = (name) ->
|
||||
metadata = null
|
||||
if deprecatedPackages.hasOwnProperty(name)
|
||||
metadata = deprecatedPackages[name]
|
||||
Object.freeze(metadata) if metadata
|
||||
metadata
|
||||
|
||||
exports.isDeprecatedPackage = (name, version) ->
|
||||
return false unless deprecatedPackages.hasOwnProperty(name)
|
||||
|
||||
deprecatedVersionRange = deprecatedPackages[name].version
|
||||
return true unless deprecatedVersionRange
|
||||
|
||||
semver.valid(version) and satisfies(version, deprecatedVersionRange)
|
||||
|
||||
satisfies = (version, rawRange) ->
|
||||
unless parsedRange = ranges[rawRange]
|
||||
parsedRange = new Range(rawRange)
|
||||
ranges[rawRange] = parsedRange
|
||||
parsedRange.test(version)
|
||||
|
||||
# Extend semver.Range to memoize matched versions for speed
|
||||
class Range extends semver.Range
|
||||
constructor: ->
|
||||
super
|
||||
@matchedVersions = new Set()
|
||||
@unmatchedVersions = new Set()
|
||||
|
||||
test: (version) ->
|
||||
return true if @matchedVersions.has(version)
|
||||
return false if @unmatchedVersions.has(version)
|
||||
|
||||
matches = super
|
||||
if matches
|
||||
@matchedVersions.add(version)
|
||||
else
|
||||
@unmatchedVersions.add(version)
|
||||
matches
|
||||
@@ -1,143 +0,0 @@
|
||||
autoUpdater = null
|
||||
{EventEmitter} = require 'events'
|
||||
path = require 'path'
|
||||
|
||||
IdleState = 'idle'
|
||||
CheckingState = 'checking'
|
||||
DownloadingState = 'downloading'
|
||||
UpdateAvailableState = 'update-available'
|
||||
NoUpdateAvailableState = 'no-update-available'
|
||||
UnsupportedState = 'unsupported'
|
||||
ErrorState = 'error'
|
||||
|
||||
module.exports =
|
||||
class AutoUpdateManager
|
||||
Object.assign @prototype, EventEmitter.prototype
|
||||
|
||||
constructor: (@version, @testMode, @config) ->
|
||||
@state = IdleState
|
||||
@iconPath = path.resolve(__dirname, '..', '..', 'resources', 'atom.png')
|
||||
|
||||
initialize: ->
|
||||
if process.platform is 'win32'
|
||||
archSuffix = if process.arch is 'ia32' then '' else '-' + process.arch
|
||||
@feedUrl = "https://atom.io/api/updates#{archSuffix}?version=#{@version}"
|
||||
autoUpdater = require './auto-updater-win32'
|
||||
else
|
||||
@feedUrl = "https://atom.io/api/updates?version=#{@version}"
|
||||
{autoUpdater} = require 'electron'
|
||||
|
||||
autoUpdater.on 'error', (event, message) =>
|
||||
@setState(ErrorState, message)
|
||||
@emitWindowEvent('update-error')
|
||||
console.error "Error Downloading Update: #{message}"
|
||||
|
||||
autoUpdater.setFeedURL @feedUrl
|
||||
|
||||
autoUpdater.on 'checking-for-update', =>
|
||||
@setState(CheckingState)
|
||||
@emitWindowEvent('checking-for-update')
|
||||
|
||||
autoUpdater.on 'update-not-available', =>
|
||||
@setState(NoUpdateAvailableState)
|
||||
@emitWindowEvent('update-not-available')
|
||||
|
||||
autoUpdater.on 'update-available', =>
|
||||
@setState(DownloadingState)
|
||||
# We use sendMessage to send an event called 'update-available' in 'update-downloaded'
|
||||
# once the update download is complete. This mismatch between the electron
|
||||
# autoUpdater events is unfortunate but in the interest of not changing the
|
||||
# one existing event handled by applicationDelegate
|
||||
@emitWindowEvent('did-begin-downloading-update')
|
||||
@emit('did-begin-download')
|
||||
|
||||
autoUpdater.on 'update-downloaded', (event, releaseNotes, @releaseVersion) =>
|
||||
@setState(UpdateAvailableState)
|
||||
@emitUpdateAvailableEvent()
|
||||
|
||||
@config.onDidChange 'core.automaticallyUpdate', ({newValue}) =>
|
||||
if newValue
|
||||
@scheduleUpdateCheck()
|
||||
else
|
||||
@cancelScheduledUpdateCheck()
|
||||
|
||||
@scheduleUpdateCheck() if @config.get 'core.automaticallyUpdate'
|
||||
|
||||
switch process.platform
|
||||
when 'win32'
|
||||
@setState(UnsupportedState) unless autoUpdater.supportsUpdates()
|
||||
when 'linux'
|
||||
@setState(UnsupportedState)
|
||||
|
||||
emitUpdateAvailableEvent: ->
|
||||
return unless @releaseVersion?
|
||||
@emitWindowEvent('update-available', {@releaseVersion})
|
||||
return
|
||||
|
||||
emitWindowEvent: (eventName, payload) ->
|
||||
for atomWindow in @getWindows()
|
||||
atomWindow.sendMessage(eventName, payload)
|
||||
return
|
||||
|
||||
setState: (state, errorMessage) ->
|
||||
return if @state is state
|
||||
@state = state
|
||||
@errorMessage = errorMessage
|
||||
@emit 'state-changed', @state
|
||||
|
||||
getState: ->
|
||||
@state
|
||||
|
||||
getErrorMessage: ->
|
||||
@errorMessage
|
||||
|
||||
scheduleUpdateCheck: ->
|
||||
# Only schedule update check periodically if running in release version and
|
||||
# and there is no existing scheduled update check.
|
||||
unless /-dev/.test(@version) or @checkForUpdatesIntervalID
|
||||
checkForUpdates = => @check(hidePopups: true)
|
||||
fourHours = 1000 * 60 * 60 * 4
|
||||
@checkForUpdatesIntervalID = setInterval(checkForUpdates, fourHours)
|
||||
checkForUpdates()
|
||||
|
||||
cancelScheduledUpdateCheck: ->
|
||||
if @checkForUpdatesIntervalID
|
||||
clearInterval(@checkForUpdatesIntervalID)
|
||||
@checkForUpdatesIntervalID = null
|
||||
|
||||
check: ({hidePopups}={}) ->
|
||||
unless hidePopups
|
||||
autoUpdater.once 'update-not-available', @onUpdateNotAvailable
|
||||
autoUpdater.once 'error', @onUpdateError
|
||||
|
||||
autoUpdater.checkForUpdates() unless @testMode
|
||||
|
||||
install: ->
|
||||
autoUpdater.quitAndInstall() unless @testMode
|
||||
|
||||
onUpdateNotAvailable: =>
|
||||
autoUpdater.removeListener 'error', @onUpdateError
|
||||
{dialog} = require 'electron'
|
||||
dialog.showMessageBox {
|
||||
type: 'info'
|
||||
buttons: ['OK']
|
||||
icon: @iconPath
|
||||
message: 'No update available.'
|
||||
title: 'No Update Available'
|
||||
detail: "Version #{@version} is the latest version."
|
||||
}, -> # noop callback to get async behavior
|
||||
|
||||
onUpdateError: (event, message) =>
|
||||
autoUpdater.removeListener 'update-not-available', @onUpdateNotAvailable
|
||||
{dialog} = require 'electron'
|
||||
dialog.showMessageBox {
|
||||
type: 'warning'
|
||||
buttons: ['OK']
|
||||
icon: @iconPath
|
||||
message: 'There was an error checking for updates.'
|
||||
title: 'Update Error'
|
||||
detail: message
|
||||
}, -> # noop callback to get async behavior
|
||||
|
||||
getWindows: ->
|
||||
global.atomApplication.getAllWindows()
|
||||
178
src/main-process/auto-update-manager.js
Normal file
178
src/main-process/auto-update-manager.js
Normal file
@@ -0,0 +1,178 @@
|
||||
const {EventEmitter} = require('events')
|
||||
const path = require('path')
|
||||
|
||||
const IdleState = 'idle'
|
||||
const CheckingState = 'checking'
|
||||
const DownloadingState = 'downloading'
|
||||
const UpdateAvailableState = 'update-available'
|
||||
const NoUpdateAvailableState = 'no-update-available'
|
||||
const UnsupportedState = 'unsupported'
|
||||
const ErrorState = 'error'
|
||||
|
||||
let autoUpdater = null
|
||||
|
||||
module.exports =
|
||||
class AutoUpdateManager extends EventEmitter {
|
||||
constructor (version, testMode, config) {
|
||||
super()
|
||||
this.onUpdateNotAvailable = this.onUpdateNotAvailable.bind(this)
|
||||
this.onUpdateError = this.onUpdateError.bind(this)
|
||||
this.version = version
|
||||
this.testMode = testMode
|
||||
this.config = config
|
||||
this.state = IdleState
|
||||
this.iconPath = path.resolve(__dirname, '..', '..', 'resources', 'atom.png')
|
||||
}
|
||||
|
||||
initialize () {
|
||||
if (process.platform === 'win32') {
|
||||
const archSuffix = process.arch === 'ia32' ? '' : `-${process.arch}`
|
||||
this.feedUrl = `https://atom.io/api/updates${archSuffix}?version=${this.version}`
|
||||
autoUpdater = require('./auto-updater-win32')
|
||||
} else {
|
||||
this.feedUrl = `https://atom.io/api/updates?version=${this.version}`;
|
||||
({autoUpdater} = require('electron'))
|
||||
}
|
||||
|
||||
autoUpdater.on('error', (event, message) => {
|
||||
this.setState(ErrorState, message)
|
||||
this.emitWindowEvent('update-error')
|
||||
console.error(`Error Downloading Update: ${message}`)
|
||||
})
|
||||
|
||||
autoUpdater.setFeedURL(this.feedUrl)
|
||||
|
||||
autoUpdater.on('checking-for-update', () => {
|
||||
this.setState(CheckingState)
|
||||
this.emitWindowEvent('checking-for-update')
|
||||
})
|
||||
|
||||
autoUpdater.on('update-not-available', () => {
|
||||
this.setState(NoUpdateAvailableState)
|
||||
this.emitWindowEvent('update-not-available')
|
||||
})
|
||||
|
||||
autoUpdater.on('update-available', () => {
|
||||
this.setState(DownloadingState)
|
||||
// We use sendMessage to send an event called 'update-available' in 'update-downloaded'
|
||||
// once the update download is complete. This mismatch between the electron
|
||||
// autoUpdater events is unfortunate but in the interest of not changing the
|
||||
// one existing event handled by applicationDelegate
|
||||
this.emitWindowEvent('did-begin-downloading-update')
|
||||
this.emit('did-begin-download')
|
||||
})
|
||||
|
||||
autoUpdater.on('update-downloaded', (event, releaseNotes, releaseVersion) => {
|
||||
this.releaseVersion = releaseVersion
|
||||
this.setState(UpdateAvailableState)
|
||||
this.emitUpdateAvailableEvent()
|
||||
})
|
||||
|
||||
this.config.onDidChange('core.automaticallyUpdate', ({newValue}) => {
|
||||
if (newValue) {
|
||||
this.scheduleUpdateCheck()
|
||||
} else {
|
||||
this.cancelScheduledUpdateCheck()
|
||||
}
|
||||
})
|
||||
|
||||
if (this.config.get('core.automaticallyUpdate')) this.scheduleUpdateCheck()
|
||||
|
||||
switch (process.platform) {
|
||||
case 'win32':
|
||||
if (!autoUpdater.supportsUpdates()) {
|
||||
this.setState(UnsupportedState)
|
||||
}
|
||||
break
|
||||
case 'linux':
|
||||
this.setState(UnsupportedState)
|
||||
}
|
||||
}
|
||||
|
||||
emitUpdateAvailableEvent () {
|
||||
if (this.releaseVersion == null) return
|
||||
this.emitWindowEvent('update-available', {releaseVersion: this.releaseVersion})
|
||||
}
|
||||
|
||||
emitWindowEvent (eventName, payload) {
|
||||
for (let atomWindow of this.getWindows()) {
|
||||
atomWindow.sendMessage(eventName, payload)
|
||||
}
|
||||
}
|
||||
|
||||
setState (state, errorMessage) {
|
||||
if (this.state === state) return
|
||||
this.state = state
|
||||
this.errorMessage = errorMessage
|
||||
this.emit('state-changed', this.state)
|
||||
}
|
||||
|
||||
getState () {
|
||||
return this.state
|
||||
}
|
||||
|
||||
getErrorMessage () {
|
||||
return this.errorMessage
|
||||
}
|
||||
|
||||
scheduleUpdateCheck () {
|
||||
// Only schedule update check periodically if running in release version and
|
||||
// and there is no existing scheduled update check.
|
||||
if (!/-dev/.test(this.version) && !this.checkForUpdatesIntervalID) {
|
||||
const checkForUpdates = () => this.check({hidePopups: true})
|
||||
const fourHours = 1000 * 60 * 60 * 4
|
||||
this.checkForUpdatesIntervalID = setInterval(checkForUpdates, fourHours)
|
||||
checkForUpdates()
|
||||
}
|
||||
}
|
||||
|
||||
cancelScheduledUpdateCheck () {
|
||||
if (this.checkForUpdatesIntervalID) {
|
||||
clearInterval(this.checkForUpdatesIntervalID)
|
||||
this.checkForUpdatesIntervalID = null
|
||||
}
|
||||
}
|
||||
|
||||
check ({hidePopups} = {}) {
|
||||
if (!hidePopups) {
|
||||
autoUpdater.once('update-not-available', this.onUpdateNotAvailable)
|
||||
autoUpdater.once('error', this.onUpdateError)
|
||||
}
|
||||
|
||||
if (!this.testMode) autoUpdater.checkForUpdates()
|
||||
}
|
||||
|
||||
install () {
|
||||
if (!this.testMode) autoUpdater.quitAndInstall()
|
||||
}
|
||||
|
||||
onUpdateNotAvailable () {
|
||||
autoUpdater.removeListener('error', this.onUpdateError)
|
||||
const {dialog} = require('electron')
|
||||
dialog.showMessageBox({
|
||||
type: 'info',
|
||||
buttons: ['OK'],
|
||||
icon: this.iconPath,
|
||||
message: 'No update available.',
|
||||
title: 'No Update Available',
|
||||
detail: `Version ${this.version} is the latest version.`
|
||||
}, () => {}) // noop callback to get async behavior
|
||||
}
|
||||
|
||||
onUpdateError (event, message) {
|
||||
autoUpdater.removeListener('update-not-available', this.onUpdateNotAvailable)
|
||||
const {dialog} = require('electron')
|
||||
dialog.showMessageBox({
|
||||
type: 'warning',
|
||||
buttons: ['OK'],
|
||||
icon: this.iconPath,
|
||||
message: 'There was an error checking for updates.',
|
||||
title: 'Update Error',
|
||||
detail: message
|
||||
}, () => {}) // noop callback to get async behavior
|
||||
}
|
||||
|
||||
getWindows () {
|
||||
return global.atomApplication.getAllWindows()
|
||||
}
|
||||
}
|
||||
@@ -1,62 +0,0 @@
|
||||
{EventEmitter} = require 'events'
|
||||
SquirrelUpdate = require './squirrel-update'
|
||||
|
||||
class AutoUpdater
|
||||
Object.assign @prototype, EventEmitter.prototype
|
||||
|
||||
setFeedURL: (@updateUrl) ->
|
||||
|
||||
quitAndInstall: ->
|
||||
if SquirrelUpdate.existsSync()
|
||||
SquirrelUpdate.restartAtom(require('electron').app)
|
||||
else
|
||||
require('electron').autoUpdater.quitAndInstall()
|
||||
|
||||
downloadUpdate: (callback) ->
|
||||
SquirrelUpdate.spawn ['--download', @updateUrl], (error, stdout) ->
|
||||
return callback(error) if error?
|
||||
|
||||
try
|
||||
# Last line of output is the JSON details about the releases
|
||||
json = stdout.trim().split('\n').pop()
|
||||
update = JSON.parse(json)?.releasesToApply?.pop?()
|
||||
catch error
|
||||
error.stdout = stdout
|
||||
return callback(error)
|
||||
|
||||
callback(null, update)
|
||||
|
||||
installUpdate: (callback) ->
|
||||
SquirrelUpdate.spawn(['--update', @updateUrl], callback)
|
||||
|
||||
supportsUpdates: ->
|
||||
SquirrelUpdate.existsSync()
|
||||
|
||||
checkForUpdates: ->
|
||||
throw new Error('Update URL is not set') unless @updateUrl
|
||||
|
||||
@emit 'checking-for-update'
|
||||
|
||||
unless SquirrelUpdate.existsSync()
|
||||
@emit 'update-not-available'
|
||||
return
|
||||
|
||||
@downloadUpdate (error, update) =>
|
||||
if error?
|
||||
@emit 'update-not-available'
|
||||
return
|
||||
|
||||
unless update?
|
||||
@emit 'update-not-available'
|
||||
return
|
||||
|
||||
@emit 'update-available'
|
||||
|
||||
@installUpdate (error) =>
|
||||
if error?
|
||||
@emit 'update-not-available'
|
||||
return
|
||||
|
||||
@emit 'update-downloaded', {}, update.releaseNotes, update.version, new Date(), 'https://atom.io', => @quitAndInstall()
|
||||
|
||||
module.exports = new AutoUpdater()
|
||||
88
src/main-process/auto-updater-win32.js
Normal file
88
src/main-process/auto-updater-win32.js
Normal file
@@ -0,0 +1,88 @@
|
||||
const {EventEmitter} = require('events')
|
||||
const SquirrelUpdate = require('./squirrel-update')
|
||||
|
||||
class AutoUpdater extends EventEmitter {
|
||||
setFeedURL (updateUrl) {
|
||||
this.updateUrl = updateUrl
|
||||
}
|
||||
|
||||
quitAndInstall () {
|
||||
if (SquirrelUpdate.existsSync()) {
|
||||
SquirrelUpdate.restartAtom(require('electron').app)
|
||||
} else {
|
||||
require('electron').autoUpdater.quitAndInstall()
|
||||
}
|
||||
}
|
||||
|
||||
downloadUpdate (callback) {
|
||||
SquirrelUpdate.spawn(['--download', this.updateUrl], function (error, stdout) {
|
||||
let update
|
||||
if (error != null) return callback(error)
|
||||
|
||||
try {
|
||||
// Last line of output is the JSON details about the releases
|
||||
const json = stdout.trim().split('\n').pop()
|
||||
const data = JSON.parse(json)
|
||||
const releasesToApply = data && data.releasesToApply
|
||||
if (releasesToApply.pop) update = releasesToApply.pop()
|
||||
} catch (error) {
|
||||
error.stdout = stdout
|
||||
return callback(error)
|
||||
}
|
||||
|
||||
callback(null, update)
|
||||
})
|
||||
}
|
||||
|
||||
installUpdate (callback) {
|
||||
SquirrelUpdate.spawn(['--update', this.updateUrl], callback)
|
||||
}
|
||||
|
||||
supportsUpdates () {
|
||||
SquirrelUpdate.existsSync()
|
||||
}
|
||||
|
||||
checkForUpdates () {
|
||||
if (!this.updateUrl) throw new Error('Update URL is not set')
|
||||
|
||||
this.emit('checking-for-update')
|
||||
|
||||
if (!SquirrelUpdate.existsSync()) {
|
||||
this.emit('update-not-available')
|
||||
return
|
||||
}
|
||||
|
||||
this.downloadUpdate((error, update) => {
|
||||
if (error != null) {
|
||||
this.emit('update-not-available')
|
||||
return
|
||||
}
|
||||
|
||||
if (update == null) {
|
||||
this.emit('update-not-available')
|
||||
return
|
||||
}
|
||||
|
||||
this.emit('update-available')
|
||||
|
||||
this.installUpdate(error => {
|
||||
if (error != null) {
|
||||
this.emit('update-not-available')
|
||||
return
|
||||
}
|
||||
|
||||
this.emit(
|
||||
'update-downloaded',
|
||||
{},
|
||||
update.releaseNotes,
|
||||
update.version,
|
||||
new Date(),
|
||||
'https://atom.io',
|
||||
() => this.quitAndInstall()
|
||||
)
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = new AutoUpdater()
|
||||
@@ -1,24 +0,0 @@
|
||||
{Menu} = require 'electron'
|
||||
|
||||
module.exports =
|
||||
class ContextMenu
|
||||
constructor: (template, @atomWindow) ->
|
||||
template = @createClickHandlers(template)
|
||||
menu = Menu.buildFromTemplate(template)
|
||||
menu.popup(@atomWindow.browserWindow, {async: true})
|
||||
|
||||
# It's necessary to build the event handlers in this process, otherwise
|
||||
# closures are dragged across processes and failed to be garbage collected
|
||||
# appropriately.
|
||||
createClickHandlers: (template) ->
|
||||
for item in template
|
||||
if item.command
|
||||
item.commandDetail ?= {}
|
||||
item.commandDetail.contextCommand = true
|
||||
item.commandDetail.atomWindow = @atomWindow
|
||||
do (item) =>
|
||||
item.click = =>
|
||||
global.atomApplication.sendCommandToWindow(item.command, @atomWindow, item.commandDetail)
|
||||
else if item.submenu
|
||||
@createClickHandlers(item.submenu)
|
||||
item
|
||||
33
src/main-process/context-menu.js
Normal file
33
src/main-process/context-menu.js
Normal file
@@ -0,0 +1,33 @@
|
||||
const {Menu} = require('electron')
|
||||
|
||||
module.exports =
|
||||
class ContextMenu {
|
||||
constructor (template, atomWindow) {
|
||||
this.atomWindow = atomWindow
|
||||
this.createClickHandlers(template)
|
||||
const menu = Menu.buildFromTemplate(template)
|
||||
menu.popup(this.atomWindow.browserWindow, {async: true})
|
||||
}
|
||||
|
||||
// It's necessary to build the event handlers in this process, otherwise
|
||||
// closures are dragged across processes and failed to be garbage collected
|
||||
// appropriately.
|
||||
createClickHandlers (template) {
|
||||
template.forEach(item => {
|
||||
if (item.command) {
|
||||
if (!item.commandDetail) item.commandDetail = {}
|
||||
item.commandDetail.contextCommand = true
|
||||
item.commandDetail.atomWindow = this.atomWindow
|
||||
item.click = () => {
|
||||
global.atomApplication.sendCommandToWindow(
|
||||
item.command,
|
||||
this.atomWindow,
|
||||
item.commandDetail
|
||||
)
|
||||
}
|
||||
} else if (item.submenu) {
|
||||
this.createClickHandlers(item.submenu)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -1,36 +0,0 @@
|
||||
ChildProcess = require 'child_process'
|
||||
|
||||
# Spawn a command and invoke the callback when it completes with an error
|
||||
# and the output from standard out.
|
||||
#
|
||||
# * `command` The underlying OS command {String} to execute.
|
||||
# * `args` (optional) The {Array} with arguments to be passed to command.
|
||||
# * `callback` (optional) The {Function} to call after the command has run. It will be invoked with arguments:
|
||||
# * `error` (optional) An {Error} object returned by the command, `null` if no error was thrown.
|
||||
# * `code` Error code returned by the command.
|
||||
# * `stdout` The {String} output text generated by the command.
|
||||
# * `stdout` The {String} output text generated by the command.
|
||||
#
|
||||
# Returns `undefined`.
|
||||
exports.spawn = (command, args, callback) ->
|
||||
stdout = ''
|
||||
|
||||
try
|
||||
spawnedProcess = ChildProcess.spawn(command, args)
|
||||
catch error
|
||||
# Spawn can throw an error
|
||||
process.nextTick -> callback?(error, stdout)
|
||||
return
|
||||
|
||||
spawnedProcess.stdout.on 'data', (data) -> stdout += data
|
||||
|
||||
error = null
|
||||
spawnedProcess.on 'error', (processError) -> error ?= processError
|
||||
spawnedProcess.on 'close', (code, signal) ->
|
||||
error ?= new Error("Command failed: #{signal ? code}") if code isnt 0
|
||||
error?.code ?= code
|
||||
error?.stdout ?= stdout
|
||||
callback?(error, stdout)
|
||||
# This is necessary if using Powershell 2 on Windows 7 to get the events to raise
|
||||
# http://stackoverflow.com/questions/9155289/calling-powershell-from-nodejs
|
||||
spawnedProcess.stdin.end()
|
||||
43
src/main-process/spawner.js
Normal file
43
src/main-process/spawner.js
Normal file
@@ -0,0 +1,43 @@
|
||||
const ChildProcess = require('child_process')
|
||||
|
||||
// Spawn a command and invoke the callback when it completes with an error
|
||||
// and the output from standard out.
|
||||
//
|
||||
// * `command` The underlying OS command {String} to execute.
|
||||
// * `args` (optional) The {Array} with arguments to be passed to command.
|
||||
// * `callback` (optional) The {Function} to call after the command has run. It will be invoked with arguments:
|
||||
// * `error` (optional) An {Error} object returned by the command, `null` if no error was thrown.
|
||||
// * `code` Error code returned by the command.
|
||||
// * `stdout` The {String} output text generated by the command.
|
||||
// * `stdout` The {String} output text generated by the command.
|
||||
exports.spawn = function (command, args, callback) {
|
||||
let error
|
||||
let spawnedProcess
|
||||
let stdout = ''
|
||||
|
||||
try {
|
||||
spawnedProcess = ChildProcess.spawn(command, args)
|
||||
} catch (error) {
|
||||
process.nextTick(() => callback && callback(error, stdout))
|
||||
return
|
||||
}
|
||||
|
||||
spawnedProcess.stdout.on('data', data => { stdout += data })
|
||||
spawnedProcess.on('error', processError => { error = processError })
|
||||
spawnedProcess.on('close', (code, signal) => {
|
||||
if (!error && code !== 0) {
|
||||
error = new Error(`Command failed: ${signal != null ? signal : code}`)
|
||||
}
|
||||
|
||||
if (error) {
|
||||
if (error.code == null) error.code = code
|
||||
if (error.stdout == null) error.stdout = stdout
|
||||
}
|
||||
|
||||
callback && callback(error, stdout)
|
||||
})
|
||||
|
||||
// This is necessary if using Powershell 2 on Windows 7 to get the events to raise
|
||||
// http://stackoverflow.com/questions/9155289/calling-powershell-from-nodejs
|
||||
return spawnedProcess.stdin.end()
|
||||
}
|
||||
@@ -1,162 +0,0 @@
|
||||
fs = require 'fs-plus'
|
||||
path = require 'path'
|
||||
Spawner = require './spawner'
|
||||
WinShell = require './win-shell'
|
||||
WinPowerShell = require './win-powershell'
|
||||
|
||||
appFolder = path.resolve(process.execPath, '..')
|
||||
rootAtomFolder = path.resolve(appFolder, '..')
|
||||
binFolder = path.join(rootAtomFolder, 'bin')
|
||||
updateDotExe = path.join(rootAtomFolder, 'Update.exe')
|
||||
exeName = path.basename(process.execPath)
|
||||
|
||||
if process.env.SystemRoot
|
||||
system32Path = path.join(process.env.SystemRoot, 'System32')
|
||||
setxPath = path.join(system32Path, 'setx.exe')
|
||||
else
|
||||
setxPath = 'setx.exe'
|
||||
|
||||
# Spawn setx.exe and callback when it completes
|
||||
spawnSetx = (args, callback) ->
|
||||
Spawner.spawn(setxPath, args, callback)
|
||||
|
||||
# Spawn the Update.exe with the given arguments and invoke the callback when
|
||||
# the command completes.
|
||||
spawnUpdate = (args, callback) ->
|
||||
Spawner.spawn(updateDotExe, args, callback)
|
||||
|
||||
# Add atom and apm to the PATH
|
||||
#
|
||||
# This is done by adding .cmd shims to the root bin folder in the Atom
|
||||
# install directory that point to the newly installed versions inside
|
||||
# the versioned app directories.
|
||||
addCommandsToPath = (callback) ->
|
||||
installCommands = (callback) ->
|
||||
atomCommandPath = path.join(binFolder, 'atom.cmd')
|
||||
relativeAtomPath = path.relative(binFolder, path.join(appFolder, 'resources', 'cli', 'atom.cmd'))
|
||||
atomCommand = "@echo off\r\n\"%~dp0\\#{relativeAtomPath}\" %*"
|
||||
|
||||
atomShCommandPath = path.join(binFolder, 'atom')
|
||||
relativeAtomShPath = path.relative(binFolder, path.join(appFolder, 'resources', 'cli', 'atom.sh'))
|
||||
atomShCommand = "#!/bin/sh\r\n\"$(dirname \"$0\")/#{relativeAtomShPath.replace(/\\/g, '/')}\" \"$@\"\r\necho"
|
||||
|
||||
apmCommandPath = path.join(binFolder, 'apm.cmd')
|
||||
relativeApmPath = path.relative(binFolder, path.join(process.resourcesPath, 'app', 'apm', 'bin', 'apm.cmd'))
|
||||
apmCommand = "@echo off\r\n\"%~dp0\\#{relativeApmPath}\" %*"
|
||||
|
||||
apmShCommandPath = path.join(binFolder, 'apm')
|
||||
relativeApmShPath = path.relative(binFolder, path.join(appFolder, 'resources', 'cli', 'apm.sh'))
|
||||
apmShCommand = "#!/bin/sh\r\n\"$(dirname \"$0\")/#{relativeApmShPath.replace(/\\/g, '/')}\" \"$@\""
|
||||
|
||||
fs.writeFile atomCommandPath, atomCommand, ->
|
||||
fs.writeFile atomShCommandPath, atomShCommand, ->
|
||||
fs.writeFile apmCommandPath, apmCommand, ->
|
||||
fs.writeFile apmShCommandPath, apmShCommand, ->
|
||||
callback()
|
||||
|
||||
addBinToPath = (pathSegments, callback) ->
|
||||
pathSegments.push(binFolder)
|
||||
newPathEnv = pathSegments.join(';')
|
||||
spawnSetx(['Path', newPathEnv], callback)
|
||||
|
||||
installCommands (error) ->
|
||||
return callback(error) if error?
|
||||
|
||||
WinPowerShell.getPath (error, pathEnv) ->
|
||||
return callback(error) if error?
|
||||
|
||||
pathSegments = pathEnv.split(/;+/).filter (pathSegment) -> pathSegment
|
||||
if pathSegments.indexOf(binFolder) is -1
|
||||
addBinToPath(pathSegments, callback)
|
||||
else
|
||||
callback()
|
||||
|
||||
# Remove atom and apm from the PATH
|
||||
removeCommandsFromPath = (callback) ->
|
||||
WinPowerShell.getPath (error, pathEnv) ->
|
||||
return callback(error) if error?
|
||||
|
||||
pathSegments = pathEnv.split(/;+/).filter (pathSegment) ->
|
||||
pathSegment and pathSegment isnt binFolder
|
||||
newPathEnv = pathSegments.join(';')
|
||||
|
||||
if pathEnv isnt newPathEnv
|
||||
spawnSetx(['Path', newPathEnv], callback)
|
||||
else
|
||||
callback()
|
||||
|
||||
# Create a desktop and start menu shortcut by using the command line API
|
||||
# provided by Squirrel's Update.exe
|
||||
createShortcuts = (locations, callback) ->
|
||||
spawnUpdate(['--createShortcut', exeName, '-l', locations.join(',')], callback)
|
||||
|
||||
# Update the desktop and start menu shortcuts by using the command line API
|
||||
# provided by Squirrel's Update.exe
|
||||
updateShortcuts = (callback) ->
|
||||
if homeDirectory = fs.getHomeDirectory()
|
||||
desktopShortcutPath = path.join(homeDirectory, 'Desktop', 'Atom.lnk')
|
||||
# Check if the desktop shortcut has been previously deleted and
|
||||
# and keep it deleted if it was
|
||||
fs.exists desktopShortcutPath, (desktopShortcutExists) ->
|
||||
locations = ['StartMenu']
|
||||
locations.push 'Desktop' if desktopShortcutExists
|
||||
|
||||
createShortcuts locations, callback
|
||||
else
|
||||
createShortcuts ['Desktop', 'StartMenu'], callback
|
||||
|
||||
# Remove the desktop and start menu shortcuts by using the command line API
|
||||
# provided by Squirrel's Update.exe
|
||||
removeShortcuts = (callback) ->
|
||||
spawnUpdate(['--removeShortcut', exeName], callback)
|
||||
|
||||
exports.spawn = spawnUpdate
|
||||
|
||||
# Is the Update.exe installed with Atom?
|
||||
exports.existsSync = ->
|
||||
fs.existsSync(updateDotExe)
|
||||
|
||||
# Restart Atom using the version pointed to by the atom.cmd shim
|
||||
exports.restartAtom = (app) ->
|
||||
if projectPath = global.atomApplication?.lastFocusedWindow?.projectPath
|
||||
args = [projectPath]
|
||||
app.once 'will-quit', -> Spawner.spawn(path.join(binFolder, 'atom.cmd'), args)
|
||||
app.quit()
|
||||
|
||||
updateContextMenus = (callback) ->
|
||||
WinShell.fileContextMenu.update ->
|
||||
WinShell.folderContextMenu.update ->
|
||||
WinShell.folderBackgroundContextMenu.update ->
|
||||
callback()
|
||||
|
||||
# Handle squirrel events denoted by --squirrel-* command line arguments.
|
||||
exports.handleStartupEvent = (app, squirrelCommand) ->
|
||||
switch squirrelCommand
|
||||
when '--squirrel-install'
|
||||
createShortcuts ['Desktop', 'StartMenu'], ->
|
||||
addCommandsToPath ->
|
||||
WinShell.fileHandler.register ->
|
||||
updateContextMenus ->
|
||||
app.quit()
|
||||
true
|
||||
when '--squirrel-updated'
|
||||
updateShortcuts ->
|
||||
addCommandsToPath ->
|
||||
WinShell.fileHandler.update ->
|
||||
updateContextMenus ->
|
||||
app.quit()
|
||||
true
|
||||
when '--squirrel-uninstall'
|
||||
removeShortcuts ->
|
||||
removeCommandsFromPath ->
|
||||
WinShell.fileHandler.deregister ->
|
||||
WinShell.fileContextMenu.deregister ->
|
||||
WinShell.folderContextMenu.deregister ->
|
||||
WinShell.folderBackgroundContextMenu.deregister ->
|
||||
app.quit()
|
||||
true
|
||||
when '--squirrel-obsolete'
|
||||
app.quit()
|
||||
true
|
||||
else
|
||||
false
|
||||
187
src/main-process/squirrel-update.js
Normal file
187
src/main-process/squirrel-update.js
Normal file
@@ -0,0 +1,187 @@
|
||||
let setxPath
|
||||
const fs = require('fs-plus')
|
||||
const path = require('path')
|
||||
const Spawner = require('./spawner')
|
||||
const WinShell = require('./win-shell')
|
||||
const WinPowerShell = require('./win-powershell')
|
||||
|
||||
const appFolder = path.resolve(process.execPath, '..')
|
||||
const rootAtomFolder = path.resolve(appFolder, '..')
|
||||
const binFolder = path.join(rootAtomFolder, 'bin')
|
||||
const updateDotExe = path.join(rootAtomFolder, 'Update.exe')
|
||||
const exeName = path.basename(process.execPath)
|
||||
|
||||
if (process.env.SystemRoot) {
|
||||
const system32Path = path.join(process.env.SystemRoot, 'System32')
|
||||
setxPath = path.join(system32Path, 'setx.exe')
|
||||
} else {
|
||||
setxPath = 'setx.exe'
|
||||
}
|
||||
|
||||
// Spawn setx.exe and callback when it completes
|
||||
const spawnSetx = (args, callback) => Spawner.spawn(setxPath, args, callback)
|
||||
|
||||
// Spawn the Update.exe with the given arguments and invoke the callback when
|
||||
// the command completes.
|
||||
const spawnUpdate = (args, callback) => Spawner.spawn(updateDotExe, args, callback)
|
||||
|
||||
// Add atom and apm to the PATH
|
||||
//
|
||||
// This is done by adding .cmd shims to the root bin folder in the Atom
|
||||
// install directory that point to the newly installed versions inside
|
||||
// the versioned app directories.
|
||||
const addCommandsToPath = callback => {
|
||||
const installCommands = callback => {
|
||||
const atomCommandPath = path.join(binFolder, 'atom.cmd')
|
||||
const relativeAtomPath = path.relative(binFolder, path.join(appFolder, 'resources', 'cli', 'atom.cmd'))
|
||||
const atomCommand = `@echo off\r\n\"%~dp0\\${relativeAtomPath}\" %*`
|
||||
|
||||
const atomShCommandPath = path.join(binFolder, 'atom')
|
||||
const relativeAtomShPath = path.relative(binFolder, path.join(appFolder, 'resources', 'cli', 'atom.sh'))
|
||||
const atomShCommand = `#!/bin/sh\r\n\"$(dirname \"$0\")/${relativeAtomShPath.replace(/\\/g, '/')}\" \"$@\"\r\necho`
|
||||
|
||||
const apmCommandPath = path.join(binFolder, 'apm.cmd')
|
||||
const relativeApmPath = path.relative(binFolder, path.join(process.resourcesPath, 'app', 'apm', 'bin', 'apm.cmd'))
|
||||
const apmCommand = `@echo off\r\n\"%~dp0\\${relativeApmPath}\" %*`
|
||||
|
||||
const apmShCommandPath = path.join(binFolder, 'apm')
|
||||
const relativeApmShPath = path.relative(binFolder, path.join(appFolder, 'resources', 'cli', 'apm.sh'))
|
||||
const apmShCommand = `#!/bin/sh\r\n\"$(dirname \"$0\")/${relativeApmShPath.replace(/\\/g, '/')}\" \"$@\"`
|
||||
|
||||
fs.writeFile(atomCommandPath, atomCommand, () =>
|
||||
fs.writeFile(atomShCommandPath, atomShCommand, () =>
|
||||
fs.writeFile(apmCommandPath, apmCommand, () =>
|
||||
fs.writeFile(apmShCommandPath, apmShCommand, () => callback())
|
||||
)
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
const addBinToPath = (pathSegments, callback) => {
|
||||
pathSegments.push(binFolder)
|
||||
const newPathEnv = pathSegments.join(';')
|
||||
spawnSetx(['Path', newPathEnv], callback)
|
||||
}
|
||||
|
||||
installCommands(error => {
|
||||
if (error) return callback(error)
|
||||
|
||||
WinPowerShell.getPath((error, pathEnv) => {
|
||||
if (error) return callback(error)
|
||||
|
||||
const pathSegments = pathEnv.split(/;+/).filter(pathSegment => pathSegment)
|
||||
if (pathSegments.indexOf(binFolder) === -1) {
|
||||
addBinToPath(pathSegments, callback)
|
||||
} else {
|
||||
callback()
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
// Remove atom and apm from the PATH
|
||||
const removeCommandsFromPath = callback =>
|
||||
WinPowerShell.getPath((error, pathEnv) => {
|
||||
if (error != null) { return callback(error) }
|
||||
|
||||
const pathSegments = pathEnv.split(/;+/).filter(pathSegment => pathSegment && (pathSegment !== binFolder))
|
||||
const newPathEnv = pathSegments.join(';')
|
||||
|
||||
if (pathEnv !== newPathEnv) {
|
||||
return spawnSetx(['Path', newPathEnv], callback)
|
||||
} else {
|
||||
return callback()
|
||||
}
|
||||
})
|
||||
|
||||
// Create a desktop and start menu shortcut by using the command line API
|
||||
// provided by Squirrel's Update.exe
|
||||
const createShortcuts = (locations, callback) => spawnUpdate(['--createShortcut', exeName, '-l', locations.join(',')], callback)
|
||||
|
||||
// Update the desktop and start menu shortcuts by using the command line API
|
||||
// provided by Squirrel's Update.exe
|
||||
const updateShortcuts = (callback) => {
|
||||
const homeDirectory = fs.getHomeDirectory()
|
||||
if (homeDirectory) {
|
||||
const desktopShortcutPath = path.join(homeDirectory, 'Desktop', 'Atom.lnk')
|
||||
// Check if the desktop shortcut has been previously deleted and
|
||||
// and keep it deleted if it was
|
||||
fs.exists(desktopShortcutPath, (desktopShortcutExists) => {
|
||||
const locations = ['StartMenu']
|
||||
if (desktopShortcutExists) { locations.push('Desktop') }
|
||||
|
||||
createShortcuts(locations, callback)
|
||||
})
|
||||
} else {
|
||||
createShortcuts(['Desktop', 'StartMenu'], callback)
|
||||
}
|
||||
}
|
||||
|
||||
// Remove the desktop and start menu shortcuts by using the command line API
|
||||
// provided by Squirrel's Update.exe
|
||||
const removeShortcuts = callback => spawnUpdate(['--removeShortcut', exeName], callback)
|
||||
|
||||
exports.spawn = spawnUpdate
|
||||
|
||||
// Is the Update.exe installed with Atom?
|
||||
exports.existsSync = () => fs.existsSync(updateDotExe)
|
||||
|
||||
// Restart Atom using the version pointed to by the atom.cmd shim
|
||||
exports.restartAtom = (app) => {
|
||||
let args
|
||||
if (global.atomApplication && global.atomApplication.lastFocusedWindow) {
|
||||
const {projectPath} = global.atomApplication.lastFocusedWindow
|
||||
if (projectPath) args = [projectPath]
|
||||
}
|
||||
app.once('will-quit', () => Spawner.spawn(path.join(binFolder, 'atom.cmd'), args))
|
||||
app.quit()
|
||||
}
|
||||
|
||||
const updateContextMenus = callback =>
|
||||
WinShell.fileContextMenu.update(() =>
|
||||
WinShell.folderContextMenu.update(() =>
|
||||
WinShell.folderBackgroundContextMenu.update(() => callback())
|
||||
)
|
||||
)
|
||||
|
||||
// Handle squirrel events denoted by --squirrel-* command line arguments.
|
||||
exports.handleStartupEvent = (app, squirrelCommand) => {
|
||||
switch (squirrelCommand) {
|
||||
case '--squirrel-install':
|
||||
createShortcuts(['Desktop', 'StartMenu'], () =>
|
||||
addCommandsToPath(() =>
|
||||
WinShell.fileHandler.register(() =>
|
||||
updateContextMenus(() => app.quit())
|
||||
)
|
||||
)
|
||||
)
|
||||
return true
|
||||
case '--squirrel-updated':
|
||||
updateShortcuts(() =>
|
||||
addCommandsToPath(() =>
|
||||
WinShell.fileHandler.update(() =>
|
||||
updateContextMenus(() => app.quit())
|
||||
)
|
||||
)
|
||||
)
|
||||
return true
|
||||
case '--squirrel-uninstall':
|
||||
removeShortcuts(() =>
|
||||
removeCommandsFromPath(() =>
|
||||
WinShell.fileHandler.deregister(() =>
|
||||
WinShell.fileContextMenu.deregister(() =>
|
||||
WinShell.folderContextMenu.deregister(() =>
|
||||
WinShell.folderBackgroundContextMenu.deregister(() => app.quit())
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
return true
|
||||
case '--squirrel-obsolete':
|
||||
app.quit()
|
||||
return true
|
||||
default:
|
||||
return false
|
||||
}
|
||||
}
|
||||
@@ -43,7 +43,6 @@ module.exports = function start (resourcePath, devResourcePath, startTime) {
|
||||
|
||||
atomPaths.setAtomHome(app.getPath('home'))
|
||||
atomPaths.setUserData(app)
|
||||
setupCompileCache()
|
||||
|
||||
const config = getConfig()
|
||||
const colorProfile = config.get('core.colorProfile')
|
||||
@@ -104,12 +103,6 @@ function handleStartupEventWithSquirrel () {
|
||||
return SquirrelUpdate.handleStartupEvent(app, squirrelCommand)
|
||||
}
|
||||
|
||||
function setupCompileCache () {
|
||||
const CompileCache = require('../compile-cache')
|
||||
CompileCache.setAtomHomeDirectory(process.env.ATOM_HOME)
|
||||
CompileCache.install(process.resourcesPath, require)
|
||||
}
|
||||
|
||||
function getConfig () {
|
||||
const config = new Config()
|
||||
|
||||
|
||||
44
src/main-process/win-powershell.js
Normal file
44
src/main-process/win-powershell.js
Normal file
@@ -0,0 +1,44 @@
|
||||
let powershellPath
|
||||
const path = require('path')
|
||||
const Spawner = require('./spawner')
|
||||
|
||||
if (process.env.SystemRoot) {
|
||||
const system32Path = path.join(process.env.SystemRoot, 'System32')
|
||||
powershellPath = path.join(system32Path, 'WindowsPowerShell', 'v1.0', 'powershell.exe')
|
||||
} else {
|
||||
powershellPath = 'powershell.exe'
|
||||
}
|
||||
|
||||
// Spawn powershell.exe and callback when it completes
|
||||
const spawnPowershell = function (args, callback) {
|
||||
// Set encoding and execute the command, capture the output, and return it
|
||||
// via .NET's console in order to have consistent UTF-8 encoding.
|
||||
// See http://stackoverflow.com/questions/22349139/utf-8-output-from-powershell
|
||||
// to address https://github.com/atom/atom/issues/5063
|
||||
args[0] = `\
|
||||
[Console]::OutputEncoding=[System.Text.Encoding]::UTF8
|
||||
$output=${args[0]}
|
||||
[Console]::WriteLine($output)\
|
||||
`
|
||||
args.unshift('-command')
|
||||
args.unshift('RemoteSigned')
|
||||
args.unshift('-ExecutionPolicy')
|
||||
args.unshift('-noprofile')
|
||||
Spawner.spawn(powershellPath, args, callback)
|
||||
}
|
||||
|
||||
// Get the user's PATH environment variable registry value.
|
||||
//
|
||||
// * `callback` The {Function} to call after registry operation is done.
|
||||
// It will be invoked with the same arguments provided by {Spawner.spawn}.
|
||||
//
|
||||
// Returns the user's path {String}.
|
||||
exports.getPath = callback =>
|
||||
spawnPowershell(['[environment]::GetEnvironmentVariable(\'Path\',\'User\')'], function (error, stdout) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
|
||||
const pathOutput = stdout.replace(/^\s+|\s+$/g, '')
|
||||
return callback(null, pathOutput)
|
||||
})
|
||||
@@ -1,285 +0,0 @@
|
||||
Module = require 'module'
|
||||
path = require 'path'
|
||||
semver = require 'semver'
|
||||
|
||||
# Extend semver.Range to memoize matched versions for speed
|
||||
class Range extends semver.Range
|
||||
constructor: ->
|
||||
super
|
||||
@matchedVersions = new Set()
|
||||
@unmatchedVersions = new Set()
|
||||
|
||||
test: (version) ->
|
||||
return true if @matchedVersions.has(version)
|
||||
return false if @unmatchedVersions.has(version)
|
||||
|
||||
matches = super
|
||||
if matches
|
||||
@matchedVersions.add(version)
|
||||
else
|
||||
@unmatchedVersions.add(version)
|
||||
matches
|
||||
|
||||
nativeModules = null
|
||||
|
||||
cache =
|
||||
builtins: {}
|
||||
debug: false
|
||||
dependencies: {}
|
||||
extensions: {}
|
||||
folders: {}
|
||||
ranges: {}
|
||||
registered: false
|
||||
resourcePath: null
|
||||
resourcePathWithTrailingSlash: null
|
||||
|
||||
# isAbsolute is inlined from fs-plus so that fs-plus itself can be required
|
||||
# from this cache.
|
||||
if process.platform is 'win32'
|
||||
isAbsolute = (pathToCheck) ->
|
||||
pathToCheck and (pathToCheck[1] is ':' or (pathToCheck[0] is '\\' and pathToCheck[1] is '\\'))
|
||||
else
|
||||
isAbsolute = (pathToCheck) ->
|
||||
pathToCheck and pathToCheck[0] is '/'
|
||||
|
||||
isCorePath = (pathToCheck) ->
|
||||
pathToCheck.startsWith(cache.resourcePathWithTrailingSlash)
|
||||
|
||||
loadDependencies = (modulePath, rootPath, rootMetadata, moduleCache) ->
|
||||
fs = require 'fs-plus'
|
||||
|
||||
for childPath in fs.listSync(path.join(modulePath, 'node_modules'))
|
||||
continue if path.basename(childPath) is '.bin'
|
||||
continue if rootPath is modulePath and rootMetadata.packageDependencies?.hasOwnProperty(path.basename(childPath))
|
||||
|
||||
childMetadataPath = path.join(childPath, 'package.json')
|
||||
continue unless fs.isFileSync(childMetadataPath)
|
||||
|
||||
childMetadata = JSON.parse(fs.readFileSync(childMetadataPath))
|
||||
if childMetadata?.version
|
||||
try
|
||||
mainPath = require.resolve(childPath)
|
||||
catch error
|
||||
mainPath = null
|
||||
|
||||
if mainPath
|
||||
moduleCache.dependencies.push
|
||||
name: childMetadata.name
|
||||
version: childMetadata.version
|
||||
path: path.relative(rootPath, mainPath)
|
||||
|
||||
loadDependencies(childPath, rootPath, rootMetadata, moduleCache)
|
||||
|
||||
return
|
||||
|
||||
loadFolderCompatibility = (modulePath, rootPath, rootMetadata, moduleCache) ->
|
||||
fs = require 'fs-plus'
|
||||
|
||||
metadataPath = path.join(modulePath, 'package.json')
|
||||
return unless fs.isFileSync(metadataPath)
|
||||
|
||||
dependencies = JSON.parse(fs.readFileSync(metadataPath))?.dependencies ? {}
|
||||
|
||||
for name, version of dependencies
|
||||
try
|
||||
new Range(version)
|
||||
catch error
|
||||
delete dependencies[name]
|
||||
|
||||
onDirectory = (childPath) ->
|
||||
path.basename(childPath) isnt 'node_modules'
|
||||
|
||||
extensions = ['.js', '.coffee', '.json', '.node']
|
||||
paths = {}
|
||||
onFile = (childPath) ->
|
||||
if path.extname(childPath) in extensions
|
||||
relativePath = path.relative(rootPath, path.dirname(childPath))
|
||||
paths[relativePath] = true
|
||||
fs.traverseTreeSync(modulePath, onFile, onDirectory)
|
||||
|
||||
paths = Object.keys(paths)
|
||||
if paths.length > 0 and Object.keys(dependencies).length > 0
|
||||
moduleCache.folders.push({paths, dependencies})
|
||||
|
||||
for childPath in fs.listSync(path.join(modulePath, 'node_modules'))
|
||||
continue if path.basename(childPath) is '.bin'
|
||||
continue if rootPath is modulePath and rootMetadata.packageDependencies?.hasOwnProperty(path.basename(childPath))
|
||||
|
||||
loadFolderCompatibility(childPath, rootPath, rootMetadata, moduleCache)
|
||||
|
||||
return
|
||||
|
||||
loadExtensions = (modulePath, rootPath, rootMetadata, moduleCache) ->
|
||||
fs = require 'fs-plus'
|
||||
extensions = ['.js', '.coffee', '.json', '.node']
|
||||
nodeModulesPath = path.join(rootPath, 'node_modules')
|
||||
|
||||
onFile = (filePath) ->
|
||||
filePath = path.relative(rootPath, filePath)
|
||||
segments = filePath.split(path.sep)
|
||||
return if 'test' in segments
|
||||
return if 'tests' in segments
|
||||
return if 'spec' in segments
|
||||
return if 'specs' in segments
|
||||
return if segments.length > 1 and not (segments[0] in ['exports', 'lib', 'node_modules', 'src', 'static', 'vendor'])
|
||||
|
||||
extension = path.extname(filePath)
|
||||
if extension in extensions
|
||||
moduleCache.extensions[extension] ?= []
|
||||
moduleCache.extensions[extension].push(filePath)
|
||||
|
||||
onDirectory = (childPath) ->
|
||||
# Don't include extensions from bundled packages
|
||||
# These are generated and stored in the package's own metadata cache
|
||||
if rootMetadata.name is 'atom'
|
||||
parentPath = path.dirname(childPath)
|
||||
if parentPath is nodeModulesPath
|
||||
packageName = path.basename(childPath)
|
||||
return false if rootMetadata.packageDependencies?.hasOwnProperty(packageName)
|
||||
|
||||
true
|
||||
|
||||
fs.traverseTreeSync(rootPath, onFile, onDirectory)
|
||||
|
||||
return
|
||||
|
||||
satisfies = (version, rawRange) ->
|
||||
unless parsedRange = cache.ranges[rawRange]
|
||||
parsedRange = new Range(rawRange)
|
||||
cache.ranges[rawRange] = parsedRange
|
||||
parsedRange.test(version)
|
||||
|
||||
resolveFilePath = (relativePath, parentModule) ->
|
||||
return unless relativePath
|
||||
return unless parentModule?.filename
|
||||
return unless relativePath[0] is '.' or isAbsolute(relativePath)
|
||||
|
||||
resolvedPath = path.resolve(path.dirname(parentModule.filename), relativePath)
|
||||
return unless isCorePath(resolvedPath)
|
||||
|
||||
extension = path.extname(resolvedPath)
|
||||
if extension
|
||||
return resolvedPath if cache.extensions[extension]?.has(resolvedPath)
|
||||
else
|
||||
for extension, paths of cache.extensions
|
||||
resolvedPathWithExtension = "#{resolvedPath}#{extension}"
|
||||
return resolvedPathWithExtension if paths.has(resolvedPathWithExtension)
|
||||
|
||||
return
|
||||
|
||||
resolveModulePath = (relativePath, parentModule) ->
|
||||
return unless relativePath
|
||||
return unless parentModule?.filename
|
||||
|
||||
nativeModules ?= process.binding('natives')
|
||||
return if nativeModules.hasOwnProperty(relativePath)
|
||||
return if relativePath[0] is '.'
|
||||
return if isAbsolute(relativePath)
|
||||
|
||||
folderPath = path.dirname(parentModule.filename)
|
||||
|
||||
range = cache.folders[folderPath]?[relativePath]
|
||||
unless range?
|
||||
if builtinPath = cache.builtins[relativePath]
|
||||
return builtinPath
|
||||
else
|
||||
return
|
||||
|
||||
candidates = cache.dependencies[relativePath]
|
||||
return unless candidates?
|
||||
|
||||
for version, resolvedPath of candidates
|
||||
if Module._cache[resolvedPath] or isCorePath(resolvedPath)
|
||||
return resolvedPath if satisfies(version, range)
|
||||
|
||||
return
|
||||
|
||||
registerBuiltins = (devMode) ->
|
||||
if devMode or not cache.resourcePath.startsWith("#{process.resourcesPath}#{path.sep}")
|
||||
fs = require 'fs-plus'
|
||||
atomJsPath = path.join(cache.resourcePath, 'exports', 'atom.js')
|
||||
cache.builtins.atom = atomJsPath if fs.isFileSync(atomJsPath)
|
||||
cache.builtins.atom ?= path.join(cache.resourcePath, 'exports', 'atom.js')
|
||||
|
||||
electronAsarRoot = path.join(process.resourcesPath, 'electron.asar')
|
||||
|
||||
commonRoot = path.join(electronAsarRoot, 'common', 'api')
|
||||
commonBuiltins = ['callbacks-registry', 'clipboard', 'crash-reporter', 'shell']
|
||||
for builtin in commonBuiltins
|
||||
cache.builtins[builtin] = path.join(commonRoot, "#{builtin}.js")
|
||||
|
||||
rendererRoot = path.join(electronAsarRoot, 'renderer', 'api')
|
||||
rendererBuiltins = ['ipc-renderer', 'remote', 'screen']
|
||||
for builtin in rendererBuiltins
|
||||
cache.builtins[builtin] = path.join(rendererRoot, "#{builtin}.js")
|
||||
|
||||
exports.create = (modulePath) ->
|
||||
fs = require 'fs-plus'
|
||||
|
||||
modulePath = fs.realpathSync(modulePath)
|
||||
metadataPath = path.join(modulePath, 'package.json')
|
||||
metadata = JSON.parse(fs.readFileSync(metadataPath))
|
||||
|
||||
moduleCache =
|
||||
version: 1
|
||||
dependencies: []
|
||||
extensions: {}
|
||||
folders: []
|
||||
|
||||
loadDependencies(modulePath, modulePath, metadata, moduleCache)
|
||||
loadFolderCompatibility(modulePath, modulePath, metadata, moduleCache)
|
||||
loadExtensions(modulePath, modulePath, metadata, moduleCache)
|
||||
|
||||
metadata._atomModuleCache = moduleCache
|
||||
fs.writeFileSync(metadataPath, JSON.stringify(metadata, null, 2))
|
||||
|
||||
return
|
||||
|
||||
exports.register = ({resourcePath, devMode}={}) ->
|
||||
return if cache.registered
|
||||
|
||||
originalResolveFilename = Module._resolveFilename
|
||||
Module._resolveFilename = (relativePath, parentModule) ->
|
||||
resolvedPath = resolveModulePath(relativePath, parentModule)
|
||||
resolvedPath ?= resolveFilePath(relativePath, parentModule)
|
||||
resolvedPath ? originalResolveFilename(relativePath, parentModule)
|
||||
|
||||
cache.registered = true
|
||||
cache.resourcePath = resourcePath
|
||||
cache.resourcePathWithTrailingSlash = "#{resourcePath}#{path.sep}"
|
||||
registerBuiltins(devMode)
|
||||
|
||||
return
|
||||
|
||||
exports.add = (directoryPath, metadata) ->
|
||||
# path.join isn't used in this function for speed since path.join calls
|
||||
# path.normalize and all the paths are already normalized here.
|
||||
|
||||
unless metadata?
|
||||
try
|
||||
metadata = require("#{directoryPath}#{path.sep}package.json")
|
||||
catch error
|
||||
return
|
||||
|
||||
cacheToAdd = metadata?._atomModuleCache
|
||||
return unless cacheToAdd?
|
||||
|
||||
for dependency in cacheToAdd.dependencies ? []
|
||||
cache.dependencies[dependency.name] ?= {}
|
||||
cache.dependencies[dependency.name][dependency.version] ?= "#{directoryPath}#{path.sep}#{dependency.path}"
|
||||
|
||||
for entry in cacheToAdd.folders ? []
|
||||
for folderPath in entry.paths
|
||||
if folderPath
|
||||
cache.folders["#{directoryPath}#{path.sep}#{folderPath}"] = entry.dependencies
|
||||
else
|
||||
cache.folders[directoryPath] = entry.dependencies
|
||||
|
||||
for extension, paths of cacheToAdd.extensions
|
||||
cache.extensions[extension] ?= new Set()
|
||||
for filePath in paths
|
||||
cache.extensions[extension].add("#{directoryPath}#{path.sep}#{filePath}")
|
||||
|
||||
return
|
||||
|
||||
exports.cache = cache
|
||||
339
src/module-cache.js
Normal file
339
src/module-cache.js
Normal file
@@ -0,0 +1,339 @@
|
||||
const Module = require('module')
|
||||
const path = require('path')
|
||||
const semver = require('semver')
|
||||
|
||||
// Extend semver.Range to memoize matched versions for speed
|
||||
class Range extends semver.Range {
|
||||
constructor () {
|
||||
super(...arguments)
|
||||
this.matchedVersions = new Set()
|
||||
this.unmatchedVersions = new Set()
|
||||
}
|
||||
|
||||
test (version) {
|
||||
if (this.matchedVersions.has(version)) return true
|
||||
if (this.unmatchedVersions.has(version)) return false
|
||||
|
||||
const matches = super.test(...arguments)
|
||||
if (matches) {
|
||||
this.matchedVersions.add(version)
|
||||
} else {
|
||||
this.unmatchedVersions.add(version)
|
||||
}
|
||||
return matches
|
||||
}
|
||||
}
|
||||
|
||||
let nativeModules = null
|
||||
|
||||
const cache = {
|
||||
builtins: {},
|
||||
debug: false,
|
||||
dependencies: {},
|
||||
extensions: {},
|
||||
folders: {},
|
||||
ranges: {},
|
||||
registered: false,
|
||||
resourcePath: null,
|
||||
resourcePathWithTrailingSlash: null
|
||||
}
|
||||
|
||||
// isAbsolute is inlined from fs-plus so that fs-plus itself can be required
|
||||
// from this cache.
|
||||
let isAbsolute
|
||||
if (process.platform === 'win32') {
|
||||
isAbsolute = pathToCheck => pathToCheck && ((pathToCheck[1] === ':') || ((pathToCheck[0] === '\\') && (pathToCheck[1] === '\\')))
|
||||
} else {
|
||||
isAbsolute = pathToCheck => pathToCheck && (pathToCheck[0] === '/')
|
||||
}
|
||||
|
||||
const isCorePath = pathToCheck => pathToCheck.startsWith(cache.resourcePathWithTrailingSlash)
|
||||
|
||||
function loadDependencies (modulePath, rootPath, rootMetadata, moduleCache) {
|
||||
const fs = require('fs-plus')
|
||||
|
||||
for (let childPath of fs.listSync(path.join(modulePath, 'node_modules'))) {
|
||||
if (path.basename(childPath) === '.bin') continue
|
||||
if (rootPath === modulePath && (rootMetadata.packageDependencies && rootMetadata.packageDependencies.hasOwnProperty(path.basename(childPath)))) {
|
||||
continue
|
||||
}
|
||||
|
||||
const childMetadataPath = path.join(childPath, 'package.json')
|
||||
if (!fs.isFileSync(childMetadataPath)) continue
|
||||
|
||||
const childMetadata = JSON.parse(fs.readFileSync(childMetadataPath))
|
||||
if (childMetadata && childMetadata.version) {
|
||||
var mainPath
|
||||
try {
|
||||
mainPath = require.resolve(childPath)
|
||||
} catch (error) {
|
||||
mainPath = null
|
||||
}
|
||||
|
||||
if (mainPath) {
|
||||
moduleCache.dependencies.push({
|
||||
name: childMetadata.name,
|
||||
version: childMetadata.version,
|
||||
path: path.relative(rootPath, mainPath)
|
||||
})
|
||||
}
|
||||
|
||||
loadDependencies(childPath, rootPath, rootMetadata, moduleCache)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function loadFolderCompatibility (modulePath, rootPath, rootMetadata, moduleCache) {
|
||||
const fs = require('fs-plus')
|
||||
|
||||
const metadataPath = path.join(modulePath, 'package.json')
|
||||
if (!fs.isFileSync(metadataPath)) return
|
||||
|
||||
const metadata = JSON.parse(fs.readFileSync(metadataPath))
|
||||
const dependencies = metadata.dependencies || {}
|
||||
|
||||
for (let name in dependencies) {
|
||||
if (!semver.validRange(dependencies[name])) {
|
||||
delete dependencies[name]
|
||||
}
|
||||
}
|
||||
|
||||
const onDirectory = childPath => path.basename(childPath) !== 'node_modules'
|
||||
|
||||
const extensions = ['.js', '.coffee', '.json', '.node']
|
||||
let paths = {}
|
||||
function onFile (childPath) {
|
||||
const needle = path.extname(childPath)
|
||||
if (extensions.includes(needle)) {
|
||||
const relativePath = path.relative(rootPath, path.dirname(childPath))
|
||||
paths[relativePath] = true
|
||||
}
|
||||
}
|
||||
fs.traverseTreeSync(modulePath, onFile, onDirectory)
|
||||
|
||||
paths = Object.keys(paths)
|
||||
if (paths.length > 0 && Object.keys(dependencies).length > 0) {
|
||||
moduleCache.folders.push({paths, dependencies})
|
||||
}
|
||||
|
||||
for (let childPath of fs.listSync(path.join(modulePath, 'node_modules'))) {
|
||||
if (path.basename(childPath) === '.bin') continue
|
||||
if (rootPath === modulePath && (rootMetadata.packageDependencies && rootMetadata.packageDependencies.hasOwnProperty(path.basename(childPath)))) {
|
||||
continue
|
||||
}
|
||||
loadFolderCompatibility(childPath, rootPath, rootMetadata, moduleCache)
|
||||
}
|
||||
}
|
||||
|
||||
function loadExtensions (modulePath, rootPath, rootMetadata, moduleCache) {
|
||||
const fs = require('fs-plus')
|
||||
const extensions = ['.js', '.coffee', '.json', '.node']
|
||||
const nodeModulesPath = path.join(rootPath, 'node_modules')
|
||||
|
||||
function onFile (filePath) {
|
||||
filePath = path.relative(rootPath, filePath)
|
||||
const segments = filePath.split(path.sep)
|
||||
if (segments.includes('test')) return
|
||||
if (segments.includes('tests')) return
|
||||
if (segments.includes('spec')) return
|
||||
if (segments.includes('specs')) return
|
||||
if (segments.length > 1 && !['exports', 'lib', 'node_modules', 'src', 'static', 'vendor'].includes(segments[0])) return
|
||||
|
||||
const extension = path.extname(filePath)
|
||||
if (extensions.includes(extension)) {
|
||||
if (moduleCache.extensions[extension] == null) { moduleCache.extensions[extension] = [] }
|
||||
moduleCache.extensions[extension].push(filePath)
|
||||
}
|
||||
}
|
||||
|
||||
function onDirectory (childPath) {
|
||||
// Don't include extensions from bundled packages
|
||||
// These are generated and stored in the package's own metadata cache
|
||||
if (rootMetadata.name === 'atom') {
|
||||
const parentPath = path.dirname(childPath)
|
||||
if (parentPath === nodeModulesPath) {
|
||||
const packageName = path.basename(childPath)
|
||||
if (rootMetadata.packageDependencies && rootMetadata.packageDependencies.hasOwnProperty(packageName)) return false
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
fs.traverseTreeSync(rootPath, onFile, onDirectory)
|
||||
}
|
||||
|
||||
function satisfies (version, rawRange) {
|
||||
let parsedRange
|
||||
if (!(parsedRange = cache.ranges[rawRange])) {
|
||||
parsedRange = new Range(rawRange)
|
||||
cache.ranges[rawRange] = parsedRange
|
||||
}
|
||||
return parsedRange.test(version)
|
||||
}
|
||||
|
||||
function resolveFilePath (relativePath, parentModule) {
|
||||
if (!relativePath) return
|
||||
if (!(parentModule && parentModule.filename)) return
|
||||
if (relativePath[0] !== '.' && !isAbsolute(relativePath)) return
|
||||
|
||||
const resolvedPath = path.resolve(path.dirname(parentModule.filename), relativePath)
|
||||
if (!isCorePath(resolvedPath)) return
|
||||
|
||||
let extension = path.extname(resolvedPath)
|
||||
if (extension) {
|
||||
if (cache.extensions[extension] && cache.extensions[extension].has(resolvedPath)) return resolvedPath
|
||||
} else {
|
||||
for (extension in cache.extensions) {
|
||||
const paths = cache.extensions[extension]
|
||||
const resolvedPathWithExtension = `${resolvedPath}${extension}`
|
||||
if (paths.has(resolvedPathWithExtension)) {
|
||||
return resolvedPathWithExtension
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function resolveModulePath (relativePath, parentModule) {
|
||||
if (!relativePath) return
|
||||
if (!(parentModule && parentModule.filename)) return
|
||||
|
||||
if (!nativeModules) nativeModules = process.binding('natives')
|
||||
if (nativeModules.hasOwnProperty(relativePath)) return
|
||||
if (relativePath[0] === '.') return
|
||||
if (isAbsolute(relativePath)) return
|
||||
|
||||
const folderPath = path.dirname(parentModule.filename)
|
||||
|
||||
const range = cache.folders[folderPath] && cache.folders[folderPath][relativePath]
|
||||
if (!range) {
|
||||
const builtinPath = cache.builtins[relativePath]
|
||||
if (builtinPath) {
|
||||
return builtinPath
|
||||
} else {
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
const candidates = cache.dependencies[relativePath]
|
||||
if (candidates == null) return
|
||||
|
||||
for (let version in candidates) {
|
||||
const resolvedPath = candidates[version]
|
||||
if (Module._cache[resolvedPath] || isCorePath(resolvedPath)) {
|
||||
if (satisfies(version, range)) return resolvedPath
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function registerBuiltins (devMode) {
|
||||
if (devMode || !cache.resourcePath.startsWith(`${process.resourcesPath}${path.sep}`)) {
|
||||
const fs = require('fs-plus')
|
||||
const atomJsPath = path.join(cache.resourcePath, 'exports', 'atom.js')
|
||||
if (fs.isFileSync(atomJsPath)) { cache.builtins.atom = atomJsPath }
|
||||
}
|
||||
if (cache.builtins.atom == null) { cache.builtins.atom = path.join(cache.resourcePath, 'exports', 'atom.js') }
|
||||
|
||||
const electronAsarRoot = path.join(process.resourcesPath, 'electron.asar')
|
||||
|
||||
const commonRoot = path.join(electronAsarRoot, 'common', 'api')
|
||||
const commonBuiltins = ['callbacks-registry', 'clipboard', 'crash-reporter', 'shell']
|
||||
for (const builtin of commonBuiltins) {
|
||||
cache.builtins[builtin] = path.join(commonRoot, `${builtin}.js`)
|
||||
}
|
||||
|
||||
const rendererRoot = path.join(electronAsarRoot, 'renderer', 'api')
|
||||
const rendererBuiltins = ['ipc-renderer', 'remote', 'screen']
|
||||
for (const builtin of rendererBuiltins) {
|
||||
cache.builtins[builtin] = path.join(rendererRoot, `${builtin}.js`)
|
||||
}
|
||||
}
|
||||
|
||||
exports.create = function (modulePath) {
|
||||
const fs = require('fs-plus')
|
||||
|
||||
modulePath = fs.realpathSync(modulePath)
|
||||
const metadataPath = path.join(modulePath, 'package.json')
|
||||
const metadata = JSON.parse(fs.readFileSync(metadataPath))
|
||||
|
||||
const moduleCache = {
|
||||
version: 1,
|
||||
dependencies: [],
|
||||
extensions: {},
|
||||
folders: []
|
||||
}
|
||||
|
||||
loadDependencies(modulePath, modulePath, metadata, moduleCache)
|
||||
loadFolderCompatibility(modulePath, modulePath, metadata, moduleCache)
|
||||
loadExtensions(modulePath, modulePath, metadata, moduleCache)
|
||||
|
||||
metadata._atomModuleCache = moduleCache
|
||||
fs.writeFileSync(metadataPath, JSON.stringify(metadata, null, 2))
|
||||
}
|
||||
|
||||
exports.register = function ({resourcePath, devMode} = {}) {
|
||||
if (cache.registered) return
|
||||
|
||||
const originalResolveFilename = Module._resolveFilename
|
||||
Module._resolveFilename = function (relativePath, parentModule) {
|
||||
let resolvedPath = resolveModulePath(relativePath, parentModule)
|
||||
if (!resolvedPath) {
|
||||
resolvedPath = resolveFilePath(relativePath, parentModule)
|
||||
}
|
||||
return resolvedPath || originalResolveFilename(relativePath, parentModule)
|
||||
}
|
||||
|
||||
cache.registered = true
|
||||
cache.resourcePath = resourcePath
|
||||
cache.resourcePathWithTrailingSlash = `${resourcePath}${path.sep}`
|
||||
registerBuiltins(devMode)
|
||||
}
|
||||
|
||||
exports.add = function (directoryPath, metadata) {
|
||||
// path.join isn't used in this function for speed since path.join calls
|
||||
// path.normalize and all the paths are already normalized here.
|
||||
|
||||
if (metadata == null) {
|
||||
try {
|
||||
metadata = require(`${directoryPath}${path.sep}package.json`)
|
||||
} catch (error) {
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
const cacheToAdd = metadata && metadata._atomModuleCache
|
||||
if (!cacheToAdd) return
|
||||
|
||||
for (const dependency of cacheToAdd.dependencies || []) {
|
||||
if (!cache.dependencies[dependency.name]) {
|
||||
cache.dependencies[dependency.name] = {}
|
||||
}
|
||||
if (!cache.dependencies[dependency.name][dependency.version]) {
|
||||
cache.dependencies[dependency.name][dependency.version] = `${directoryPath}${path.sep}${dependency.path}`
|
||||
}
|
||||
}
|
||||
|
||||
for (const entry of cacheToAdd.folders || []) {
|
||||
for (const folderPath of entry.paths) {
|
||||
if (folderPath) {
|
||||
cache.folders[`${directoryPath}${path.sep}${folderPath}`] = entry.dependencies
|
||||
} else {
|
||||
cache.folders[directoryPath] = entry.dependencies
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (const extension in cacheToAdd.extensions) {
|
||||
const paths = cacheToAdd.extensions[extension]
|
||||
if (!cache.extensions[extension]) {
|
||||
cache.extensions[extension] = new Set()
|
||||
}
|
||||
for (let filePath of paths) {
|
||||
cache.extensions[extension].add(`${directoryPath}${path.sep}${filePath}`)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
exports.cache = cache
|
||||
|
||||
exports.Range = Range
|
||||
@@ -9,7 +9,7 @@ const CSON = require('season')
|
||||
const ServiceHub = require('service-hub')
|
||||
const Package = require('./package')
|
||||
const ThemePackage = require('./theme-package')
|
||||
const {isDeprecatedPackage, getDeprecatedPackageMetadata} = require('./deprecated-packages')
|
||||
const ModuleCache = require('./module-cache')
|
||||
const packageJSON = require('../package.json')
|
||||
|
||||
// Extended: Package manager for coordinating the lifecycle of Atom packages.
|
||||
@@ -42,6 +42,8 @@ module.exports = class PackageManager {
|
||||
this.triggeredActivationHooks = new Set()
|
||||
this.packagesCache = packageJSON._atomPackages != null ? packageJSON._atomPackages : {}
|
||||
this.packageDependencies = packageJSON.packageDependencies != null ? packageJSON.packageDependencies : {}
|
||||
this.deprecatedPackages = packageJSON._deprecatedPackages || {}
|
||||
this.deprecatedPackageRanges = {}
|
||||
this.initialPackagesLoaded = false
|
||||
this.initialPackagesActivated = false
|
||||
this.preloadedPackages = {}
|
||||
@@ -220,11 +222,26 @@ module.exports = class PackageManager {
|
||||
}
|
||||
|
||||
isDeprecatedPackage (name, version) {
|
||||
return isDeprecatedPackage(name, version)
|
||||
const metadata = this.deprecatedPackages[name]
|
||||
if (!metadata) return false
|
||||
if (!metadata.version) return true
|
||||
|
||||
let range = this.deprecatedPackageRanges[metadata.version]
|
||||
if (!range) {
|
||||
try {
|
||||
range = new ModuleCache.Range(metadata.version)
|
||||
} catch (error) {
|
||||
range = NullVersionRange
|
||||
}
|
||||
this.deprecatedPackageRanges[metadata.version] = range
|
||||
}
|
||||
return range.test(version)
|
||||
}
|
||||
|
||||
getDeprecatedPackageMetadata (name) {
|
||||
return getDeprecatedPackageMetadata(name)
|
||||
const metadata = this.deprecatedPackages[name]
|
||||
if (metadata) Object.freeze(metadata)
|
||||
return metadata
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -871,3 +888,7 @@ module.exports = class PackageManager {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const NullVersionRange = {
|
||||
test () { return false }
|
||||
}
|
||||
|
||||
@@ -7,7 +7,6 @@ const dedent = require('dedent')
|
||||
|
||||
const CompileCache = require('./compile-cache')
|
||||
const ModuleCache = require('./module-cache')
|
||||
const ScopedProperties = require('./scoped-properties')
|
||||
const BufferedProcess = require('./buffered-process')
|
||||
|
||||
// Extended: Loads and activates a package's main module and resources such as
|
||||
@@ -103,7 +102,7 @@ class Package {
|
||||
this.activateKeymaps()
|
||||
this.activateMenus()
|
||||
for (let settings of this.settings) {
|
||||
settings.activate()
|
||||
settings.activate(this.config)
|
||||
}
|
||||
this.settingsActivated = true
|
||||
}
|
||||
@@ -318,7 +317,7 @@ class Package {
|
||||
|
||||
if (!this.settingsActivated) {
|
||||
for (let settings of this.settings) {
|
||||
settings.activate()
|
||||
settings.activate(this.config)
|
||||
}
|
||||
this.settingsActivated = true
|
||||
}
|
||||
@@ -636,14 +635,14 @@ class Package {
|
||||
this.settings = []
|
||||
|
||||
const loadSettingsFile = (settingsPath, callback) => {
|
||||
return ScopedProperties.load(settingsPath, this.config, (error, settings) => {
|
||||
return SettingsFile.load(settingsPath, (error, settingsFile) => {
|
||||
if (error) {
|
||||
const detail = `${error.message} in ${settingsPath}`
|
||||
const stack = `${error.stack}\n at ${settingsPath}:1:1`
|
||||
this.notificationManager.addFatalError(`Failed to load the ${this.name} package settings`, {stack, detail, packageName: this.name, dismissable: true})
|
||||
} else {
|
||||
this.settings.push(settings)
|
||||
if (this.settingsActivated) { settings.activate() }
|
||||
this.settings.push(settingsFile)
|
||||
if (this.settingsActivated) settingsFile.activate(this.config)
|
||||
}
|
||||
return callback()
|
||||
})
|
||||
@@ -652,10 +651,10 @@ class Package {
|
||||
return new Promise(resolve => {
|
||||
if (this.preloadedPackage && this.packageManager.packagesCache[this.name]) {
|
||||
for (let settingsPath in this.packageManager.packagesCache[this.name].settings) {
|
||||
const scopedProperties = this.packageManager.packagesCache[this.name].settings[settingsPath]
|
||||
const settings = new ScopedProperties(`core:${settingsPath}`, scopedProperties || {}, this.config)
|
||||
this.settings.push(settings)
|
||||
if (this.settingsActivated) { settings.activate() }
|
||||
const properties = this.packageManager.packagesCache[this.name].settings[settingsPath]
|
||||
const settingsFile = new SettingsFile(`core:${settingsPath}`, properties || {})
|
||||
this.settings.push(settingsFile)
|
||||
if (this.settingsActivated) settingsFile.activate(this.config)
|
||||
}
|
||||
return resolve()
|
||||
} else {
|
||||
@@ -727,7 +726,7 @@ class Package {
|
||||
grammar.deactivate()
|
||||
}
|
||||
for (let settings of this.settings) {
|
||||
settings.deactivate()
|
||||
settings.deactivate(this.config)
|
||||
}
|
||||
|
||||
if (this.stylesheetDisposables) this.stylesheetDisposables.dispose()
|
||||
@@ -1105,3 +1104,32 @@ class Package {
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
class SettingsFile {
|
||||
static load (path, callback) {
|
||||
CSON.readFile(path, (error, properties = {}) => {
|
||||
if (error) {
|
||||
callback(error)
|
||||
} else {
|
||||
callback(null, new SettingsFile(path, properties))
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
constructor (path, properties) {
|
||||
this.path = path
|
||||
this.properties = properties
|
||||
}
|
||||
|
||||
activate (config) {
|
||||
for (let selector in this.properties) {
|
||||
config.set(null, this.properties[selector], {scopeSelector: selector, source: this.path})
|
||||
}
|
||||
}
|
||||
|
||||
deactivate (config) {
|
||||
for (let selector in this.properties) {
|
||||
config.unset(null, {scopeSelector: selector, source: this.path})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,22 +0,0 @@
|
||||
CSON = require 'season'
|
||||
|
||||
module.exports =
|
||||
class ScopedProperties
|
||||
@load: (scopedPropertiesPath, config, callback) ->
|
||||
CSON.readFile scopedPropertiesPath, (error, scopedProperties={}) ->
|
||||
if error?
|
||||
callback(error)
|
||||
else
|
||||
callback(null, new ScopedProperties(scopedPropertiesPath, scopedProperties, config))
|
||||
|
||||
constructor: (@path, @scopedProperties, @config) ->
|
||||
|
||||
activate: ->
|
||||
for selector, properties of @scopedProperties
|
||||
@config.set(null, properties, scopeSelector: selector, source: @path)
|
||||
return
|
||||
|
||||
deactivate: ->
|
||||
for selector of @scopedProperties
|
||||
@config.unset(null, scopeSelector: selector, source: @path)
|
||||
return
|
||||
Reference in New Issue
Block a user