mirror of
https://github.com/chromebrew/chromebrew.git
synced 2026-01-09 15:37:56 -05:00
* Rename IgnoredPatterns to AllowedPatterns. * Exclude docopt.rb (not our code) from Rubocop * Disable Style/RedundantReturn * Disable Style/MutableConstant * Disable Style/NumericLiterals * Set Layout/IndentationStyle to spaces * Temporarily disable various cops. * Add Rubocop CI via Octocop * Lint tree with rubocop -A -c .rubocop.yml Co-authored-by: Satadru Pramanik <satadru@gmail.com>
1876 lines
68 KiB
Ruby
Executable File
1876 lines
68 KiB
Ruby
Executable File
#!/usr/bin/env ruby
|
||
require_relative '../lib/color'
|
||
|
||
# Disallow sudo
|
||
abort 'Chromebrew should not be run as root.'.lightred if Process.uid.zero?
|
||
|
||
require 'uri'
|
||
require 'digest/sha2'
|
||
require 'json'
|
||
require 'fileutils'
|
||
require 'tmpdir'
|
||
require_relative '../lib/const'
|
||
require_relative '../lib/util'
|
||
require_relative '../lib/convert_size'
|
||
require_relative '../lib/downloader'
|
||
require_relative '../lib/deb_utils'
|
||
require_relative '../lib/package'
|
||
|
||
# Add lib to LOAD_PATH
|
||
$LOAD_PATH.unshift "#{CREW_LIB_PATH}lib"
|
||
|
||
DOC = <<~DOCOPT
|
||
Chromebrew - Package manager for Chrome OS https://chromebrew.github.io
|
||
|
||
Usage:
|
||
crew autoremove [options]
|
||
crew build [options] [-k|--keep] <name> ...
|
||
crew const [options] [<name> ...]
|
||
crew deps [options] [-t|--tree] [-b|--include-build-deps] [--exclude-buildessential] <name> ...
|
||
crew download [options] <name> ...
|
||
crew files [options] <name> ...
|
||
crew help [<command>]
|
||
crew install [options] [-k|--keep] [-s|--build-from-source] [-S|--recursive-build] <name> ...
|
||
crew list [options] (available|installed|compatible|incompatible)
|
||
crew postinstall [options] <name> ...
|
||
crew reinstall [options] [-k|--keep] [-s|--build-from-source] [-S|--recursive-build] <name> ...
|
||
crew remove [options] <name> ...
|
||
crew search [options] [<name> ...]
|
||
crew sysinfo [options]
|
||
crew update [options] [<compatible>]
|
||
crew upgrade [options] [-k|--keep] [-s|--build-from-source] [<name> ...]
|
||
crew whatprovides [options] <pattern> ...
|
||
|
||
-b --include-build-deps Include build dependencies in output.
|
||
-t --tree Print dependencies in a tree-structure format.
|
||
-c --color Use colors even if standard out is not a tty.
|
||
-d --no-color Disable colors even if standard out is a tty.
|
||
-k --keep Keep the `CREW_BREW_DIR` (#{CREW_BREW_DIR}) directory.
|
||
-L --license Display the crew license.
|
||
-s --build-from-source Build from source even if pre-compiled binary exists.
|
||
-S --recursive-build Build from source, including all dependencies, even if pre-compiled binaries exist.
|
||
-v --verbose Show extra information.
|
||
-V --version Display the crew version.
|
||
-h --help Show this screen.
|
||
|
||
version #{CREW_VERSION}
|
||
DOCOPT
|
||
|
||
CREW_LICENSE = <<~LICENSESTRING
|
||
Copyright (C) 2013-2022 Chromebrew Authors
|
||
|
||
This program is free software: you can redistribute it and/or modify
|
||
it under the terms of the GNU General Public License as published by
|
||
the Free Software Foundation, either version 3 of the License, or
|
||
(at your option) any later version.
|
||
|
||
This program is distributed in the hope that it will be useful,
|
||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||
GNU General Public License for more details.
|
||
|
||
You should have received a copy of the GNU General Public License
|
||
along with this program. If not, see https://www.gnu.org/licenses/gpl-3.0.html.
|
||
|
||
Chromebrew embeds lib/docopt.rb from another project under the MIT License.
|
||
You should have received a copy of the license along with this program.
|
||
If not, see https://github.com/docopt/docopt.rb/blob/master/LICENSE
|
||
LICENSESTRING
|
||
|
||
# All available crew commands.
|
||
@cmds = %w[
|
||
autoremove
|
||
build
|
||
const
|
||
deps
|
||
download
|
||
files
|
||
help
|
||
install
|
||
list
|
||
postinstall
|
||
reinstall
|
||
remove
|
||
search
|
||
sysinfo
|
||
update
|
||
upgrade
|
||
whatprovides
|
||
]
|
||
|
||
# Parse arguments using docopt
|
||
require_relative '../lib/docopt'
|
||
begin
|
||
args = Docopt.docopt(DOC)
|
||
args['<name>'] = args['<name>'].map { |arg| arg.gsub('-', '_') } if args['<name>']
|
||
rescue Docopt::Exit => e
|
||
if ARGV[0]
|
||
case ARGV[0]
|
||
when '-V', '--version'
|
||
puts CREW_VERSION
|
||
exit 0
|
||
when '-L', '--license'
|
||
puts CREW_LICENSE
|
||
exit 0
|
||
end
|
||
if (ARGV[0] != '-h') && (ARGV[0] != '--help')
|
||
puts "Could not understand \"crew #{ARGV.join(' ')}\".".lightred
|
||
# Looking for similar commands
|
||
unless @cmds.include?(ARGV[0])
|
||
similar = @cmds.select { |word| edit_distance(ARGV[0], word) < 4 }
|
||
unless similar.empty?
|
||
puts 'Did you mean?'
|
||
similar.each { |sug| puts " #{sug}" }
|
||
end
|
||
end
|
||
end
|
||
end
|
||
puts e.message
|
||
exit 1
|
||
end
|
||
|
||
String.use_color = args['--color'] || !args['--no-color']
|
||
@opt_keep = args['--keep']
|
||
@opt_verbose = args['--verbose']
|
||
|
||
if @opt_verbose
|
||
@fileutils_verbose = true
|
||
@verbose = 'v'
|
||
@short_verbose = '-v'
|
||
else
|
||
@fileutils_verbose = false
|
||
@verbose = ''
|
||
@short_verbose = ''
|
||
end
|
||
|
||
@opt_src = args['--build-from-source']
|
||
@opt_recursive = args['--recursive-build']
|
||
|
||
@device = JSON.parse(File.read("#{CREW_CONFIG_PATH}device.json"), symbolize_names: true)
|
||
# symbolize also values
|
||
@device.each do |key, _elem|
|
||
@device[key] = begin
|
||
@device[key].to_sym
|
||
rescue StandardError
|
||
@device[key]
|
||
end
|
||
end
|
||
|
||
def print_package(pkgPath, extra = false)
|
||
pkgName = File.basename pkgPath, '.rb'
|
||
begin
|
||
set_package pkgName, pkgPath
|
||
rescue StandardError => e
|
||
puts "Error with #{pkgName}.rb: #{e}".red unless e.to_s.include?('uninitialized constant')
|
||
end
|
||
print_current_package extra
|
||
end
|
||
|
||
def print_current_package(extra = false)
|
||
status = ''
|
||
status = 'installed' if @device[:installed_packages].any? { |elem| elem[:name] == @pkg.name }
|
||
status = 'incompatible' unless @device[:compatible_packages].any? { |elem| elem[:name] == @pkg.name }
|
||
case status
|
||
when 'installed'
|
||
print @pkg.name.lightgreen
|
||
when 'incompatible'
|
||
print @pkg.name.lightred
|
||
else
|
||
print @pkg.name.lightblue
|
||
end
|
||
print ": #{@pkg.description}".lightblue if @pkg.description
|
||
if extra
|
||
puts ''
|
||
puts @pkg.homepage if @pkg.homepage
|
||
puts "Version: #{@pkg.version}"
|
||
print "License: #{@pkg.license}" if @pkg.license
|
||
end
|
||
puts ''
|
||
end
|
||
|
||
def set_package(pkgName, pkgPath)
|
||
begin
|
||
@pkg = Package.load_package(pkgPath, pkgName)
|
||
rescue SyntaxError => e
|
||
warn "#{e.class}: #{e.message}".red
|
||
end
|
||
|
||
@pkg.build_from_source = true if @opt_recursive
|
||
end
|
||
|
||
def list_packages
|
||
Dir["#{CREW_PACKAGES_PATH}*.rb"].each do |filename|
|
||
print_package filename
|
||
end
|
||
end
|
||
|
||
def list_available
|
||
Dir["#{CREW_PACKAGES_PATH}*.rb"].each do |filename|
|
||
notInstalled = true
|
||
pkgName = File.basename filename, '.rb'
|
||
notInstalled = false if File.exist? "#{CREW_META_PATH}#{pkgName}.filelist"
|
||
next unless notInstalled
|
||
|
||
begin
|
||
set_package pkgName, filename
|
||
rescue StandardError => e
|
||
puts "Error with #{pkgName}.rb: #{e}".red unless e.to_s.include?('uninitialized constant')
|
||
end
|
||
puts pkgName if (@pkg.compatibility&.include? 'all') || (@pkg.compatibility&.include? ARCH)
|
||
end
|
||
end
|
||
|
||
def list_installed
|
||
if @opt_verbose
|
||
@installed_packages = []
|
||
@device[:installed_packages].each do |package|
|
||
search package[:name], true
|
||
@installed_packages.append("#{package[:name]} #{package[:version]}")
|
||
end
|
||
@sorted_installed_packages = @installed_packages.sort
|
||
@sorted_installed_packages.unshift('======= =======')
|
||
@sorted_installed_packages.unshift('Package Version')
|
||
@first_col_width = @sorted_installed_packages.map(&:split).map(&:first).max_by(&:size).size + 2
|
||
@sorted_installed_packages.map(&:strip).each do |line|
|
||
puts "%-#{@first_col_width}s%s".lightgreen % line.split
|
||
end
|
||
puts
|
||
else
|
||
Dir["#{CREW_META_PATH}*.directorylist"].map do |f|
|
||
File.basename(f, '.directorylist').lightgreen
|
||
end
|
||
end
|
||
end
|
||
|
||
def list_compatible(compat = true)
|
||
Dir["#{CREW_PACKAGES_PATH}*.rb"].each do |filename|
|
||
pkgName = File.basename filename, '.rb'
|
||
if @device[:compatible_packages].any? { |elem| elem[:name] == pkgName }
|
||
if compat
|
||
if File.exist? "#{CREW_META_PATH}#{pkgName}.filelist"
|
||
puts pkgName.lightgreen
|
||
else
|
||
puts pkgName
|
||
end
|
||
end
|
||
else
|
||
puts pkgName.lightred unless compat
|
||
end
|
||
end
|
||
end
|
||
|
||
def generate_compatible
|
||
puts 'Generating compatible packages...'.orange if @opt_verbose
|
||
@device[:compatible_packages] = []
|
||
Dir["#{CREW_PACKAGES_PATH}*.rb"].each do |filename|
|
||
pkgName = File.basename filename, '.rb'
|
||
begin
|
||
set_package pkgName, filename
|
||
rescue StandardError => e
|
||
puts "Error with #{pkgName}.rb: #{e}".red unless e.to_s.include?('uninitialized constant')
|
||
end
|
||
puts "Checking #{pkgName} for compatibility.".orange if @opt_verbose
|
||
# If compatibility property does not exist, check if a binary package
|
||
# exists, and if not, see if at least a source url exists.
|
||
@compatibility = true
|
||
@binary_url = ''
|
||
@url = ''
|
||
if @pkg.compatibility.nil?
|
||
@binary_url = @pkg.get_binary_url(@device[:architecture])
|
||
@url = @pkg.get_url(@device[:architecture])
|
||
unless @binary_url
|
||
puts "#{pkgName} is missing compatibility information".red
|
||
# puts "url: #{@url}".green
|
||
# If no source package is available, then package is not compatible.
|
||
@compatibility = false unless @url
|
||
puts "#{pkgName} compatibility is #{@compatibility}" if @opt_verbose
|
||
end
|
||
end
|
||
if ((@pkg.compatibility&.include? 'all') || (@pkg.compatibility&.include? ARCH) || @pkg.compatibility.nil?) && @compatibility
|
||
# add to compatible packages
|
||
puts "Adding #{pkgName} to compatible packages.".lightgreen if @opt_verbose
|
||
@device[:compatible_packages].push(name: @pkg.name)
|
||
elsif @opt_verbose
|
||
puts "#{pkgName} is not a compatible package.".lightred
|
||
end
|
||
end
|
||
File.open("#{CREW_CONFIG_PATH}device.json", 'w') do |file|
|
||
output = JSON.parse @device.to_json
|
||
file.write JSON.pretty_generate(output)
|
||
end
|
||
puts 'Generating compatible packages done.'.orange if @opt_verbose
|
||
end
|
||
|
||
def search(pkgName, silent = false)
|
||
pkgPath = "#{CREW_PACKAGES_PATH}#{pkgName}.rb"
|
||
begin
|
||
return set_package(pkgName, pkgPath) if File.exist?(pkgPath)
|
||
rescue StandardError => e
|
||
puts "Error with #{pkgName}.rb: #{e}".lightred unless e.to_s.include?('uninitialized constant')
|
||
end
|
||
unless File.exist?(pkgPath) && silent
|
||
@pkg = nil
|
||
abort "Package #{pkgName} not found. 😞".lightred unless silent
|
||
return
|
||
end
|
||
end
|
||
|
||
def regexp_search(pkgPat)
|
||
re = Regexp.new(pkgPat, true)
|
||
results = Dir["#{CREW_PACKAGES_PATH}*.rb"] \
|
||
.select { |f| File.basename(f, '.rb') =~ re } \
|
||
.each { |f| print_package(f, @opt_verbose) }
|
||
if results.empty?
|
||
Dir["#{CREW_PACKAGES_PATH}*.rb"].each do |packagePath|
|
||
packageName = File.basename packagePath, '.rb'
|
||
begin
|
||
set_package packageName, packagePath
|
||
rescue StandardError => e
|
||
puts "Error with #{pkgName}.rb: #{e}".red unless e.to_s.include?('uninitialized constant')
|
||
end
|
||
if @pkg.description =~ /#{pkgPat}/i
|
||
print_current_package @opt_verbose
|
||
results.push(packageName)
|
||
end
|
||
end
|
||
end
|
||
abort "Package #{pkgPat} not found. :(".lightred if results.empty?
|
||
end
|
||
|
||
def help(pkgName)
|
||
case pkgName
|
||
when 'autoremove'
|
||
puts <<~EOT
|
||
Remove unused dependencies installed by removed packages
|
||
Usage: crew autoremove
|
||
EOT
|
||
when 'build'
|
||
puts <<~EOT
|
||
Build package(s).
|
||
Usage: crew build [-k|--keep] [-v|--verbose] <package1> [<package2> ...]
|
||
Build package(s) from source and place the archive and checksum in the current working directory.
|
||
If `-k` or `--keep` is present, the `CREW_BREW_DIR` (#{CREW_BREW_DIR}) directory will remain.
|
||
If `-v` or `--verbose` is present, extra information will be displayed.
|
||
EOT
|
||
when 'const'
|
||
puts <<~EOT
|
||
Display constant(s).
|
||
Usage: crew const [<const1> <const2> ...]
|
||
If no constants are provided, all constants will be displayed.
|
||
EOT
|
||
when 'deps'
|
||
puts <<~EOT
|
||
Display dependencies of package(s).
|
||
Usage: crew deps [-t|--tree] [-b|--include-build-deps] [--exclude-buildessential] <package1> [<package2> ...]
|
||
|
||
If `-t` or `--tree` specified, dependencies will be printed in a tree-structure format
|
||
If `-b` or `--include-build-deps` specified, build dependencies will be included in output
|
||
It `--exclude-buildessential` specified, `buildessential` and its dependencies will not be inserted automatically
|
||
EOT
|
||
when 'download'
|
||
puts <<~EOT
|
||
Download package(s).
|
||
Usage: crew download [-v|--verbose] <package1> [<package2> ...]
|
||
Download package(s) to `CREW_BREW_DIR` (#{CREW_BREW_DIR}), but don't install.
|
||
If `-v` or `--verbose` is present, extra information will be displayed.
|
||
EOT
|
||
when 'files'
|
||
puts <<~EOT
|
||
Display installed files of package(s).
|
||
Usage: crew files <package1> [<package2> ...]
|
||
The package(s) must be currently installed.
|
||
EOT
|
||
when 'install'
|
||
puts <<~EOT
|
||
Install package(s).
|
||
Usage: crew install [-k|--keep] [-s|--build-from-source] [-S|--recursive-build] [-v|--verbose] <package1> [<package2> ...]
|
||
The package(s) must have a valid name. Use `crew search <pattern>` to search for packages to install.
|
||
If `-k` or `--keep` is present, the `CREW_BREW_DIR` (#{CREW_BREW_DIR}) directory will remain.
|
||
If `-s` or `--build-from-source` is present, the package(s) will be compiled instead of installed via binary.
|
||
If `-S` or `--recursive-build` is present, the package(s), including all dependencies, will be compiled instead of installed via binary.
|
||
If `-v` or `--verbose` is present, extra information will be displayed.
|
||
EOT
|
||
when 'list'
|
||
puts <<~EOT
|
||
List packages
|
||
Usage: crew list [-v|--verbose] available|installed|compatible|incompatible
|
||
EOT
|
||
when 'postinstall'
|
||
puts <<~EOT
|
||
Display postinstall messages of package(s).
|
||
Usage: crew postinstall <package1> [<package2> ...]
|
||
The package(s) must be currently installed.
|
||
EOT
|
||
when 'reinstall'
|
||
puts <<~EOT
|
||
Remove and install package(s).
|
||
Usage: crew reinstall [-k|--keep] [-s|--build-from-source] [-S|--recursive-build] [-v|--verbose] <package1> [<package2> ...]
|
||
If `-k` or `--keep` is present, the `CREW_BREW_DIR` (#{CREW_BREW_DIR}) directory will remain.
|
||
If `-s` or `--build-from-source` is present, the package(s) will be compiled instead of installed via binary.
|
||
If `-S` or `--recursive-build` is present, the package(s), including all dependencies, will be compiled instead of installed via binary.
|
||
If `-v` or `--verbose` is present, extra information will be displayed.
|
||
EOT
|
||
when 'remove'
|
||
puts <<~EOT
|
||
Remove package(s).
|
||
Usage: crew remove [-v|--verbose] <package1> [<package2> ...]
|
||
The package(s) must be currently installed.
|
||
If `-v` or `--verbose` is present, extra information will be displayed.
|
||
EOT
|
||
when 'search'
|
||
puts <<~EOT
|
||
Look for package(s).
|
||
Usage: crew search [-v|--verbose] [<pattern> ...]
|
||
If <pattern> is omitted, all packages will be returned.
|
||
If the package color is " + "green".lightgreen + ", it means the package is installed.
|
||
If the package color is " + "red".lightred + ", it means the architecture is not supported.
|
||
The <pattern> string can also contain regular expressions.
|
||
If `-v` or `--verbose` is present, homepage, version and license will be displayed.
|
||
Examples:
|
||
crew search ^lib".lightblue + " will display all packages that start with `lib`.
|
||
crew search audio".lightblue + " will display all packages with `audio` in the name.
|
||
crew search | grep -i audio".lightblue + " will display all packages with `audio` in the name or description.
|
||
crew search git -v".lightblue + " will display packages with `git` in the name along with homepage, version and license.
|
||
EOT
|
||
when 'sysinfo'
|
||
puts <<~EOT
|
||
Show system infomation in Markdown style.
|
||
|
||
Usage: crew sysinfo
|
||
EOT
|
||
when 'update'
|
||
puts <<~EOT
|
||
Update crew.
|
||
Usage: crew update
|
||
This only updates crew itself. Use `crew upgrade` to update packages.
|
||
Usage: crew update compatible
|
||
This updates the crew package compatibility list.
|
||
EOT
|
||
when 'upgrade'
|
||
puts <<~EOT
|
||
Update package(s).
|
||
Usage: crew upgrade [-v|--verbose] [-s|--build-from-source] <package1> [<package2> ...]
|
||
If package(s) are omitted, all packages will be updated. Otherwise, specific package(s) will be updated.
|
||
Use `crew update` to update crew itself.
|
||
If `-s` or `--build-from-source` is present, the package(s) will be compiled instead of upgraded via binary.
|
||
If `-v` or `--verbose` is present, extra information will be displayed.
|
||
EOT
|
||
when 'whatprovides'
|
||
puts <<~EOT
|
||
Determine which package(s) contains file(s).
|
||
Usage: crew whatprovides <pattern> ...
|
||
The <pattern> is a search string which can contain regular expressions.
|
||
EOT
|
||
else
|
||
puts "Available commands: #{@cmds.join(', ')}"
|
||
end
|
||
end
|
||
|
||
def const(var)
|
||
if var
|
||
value = eval(var)
|
||
puts "#{var}=#{value}"
|
||
else
|
||
@ruby_default_constants = %w[
|
||
ARGF
|
||
ARGV
|
||
CROSS_COMPILING
|
||
DOC
|
||
ENV
|
||
GC
|
||
IO
|
||
JSON
|
||
OpenSSL
|
||
Q
|
||
R
|
||
RUBY_COPYRIGHT
|
||
RUBY_DESCRIPTION
|
||
RUBY_ENGINE
|
||
RUBY_ENGINE_VERSION
|
||
RUBYGEMS_ACTIVATION_MONITOR
|
||
RUBY_PATCHLEVEL
|
||
RUBY_PLATFORM
|
||
RUBY_RELEASE_DATE
|
||
RUBY_REVISION
|
||
RUBY_VERSION
|
||
RubyVM
|
||
S
|
||
STDERR
|
||
STDIN
|
||
STDOUT
|
||
StringIO
|
||
TOPLEVEL_BINDING
|
||
URI
|
||
]
|
||
# Get all constants
|
||
@constants = Module.constants.grep(/[[:upper:]]$/)
|
||
# Reject all constants which match the default list
|
||
@constants = @constants.map(&:to_s).reject { |e| @ruby_default_constants.any? { |f| /\A#{e}\z/ =~ f } }
|
||
# Print a sorted list of the remaining constants used by crew.
|
||
@constants.sort.each do |var|
|
||
value = eval(var.to_s)
|
||
puts "#{var}=#{value}"
|
||
end
|
||
end
|
||
end
|
||
|
||
def files(pkgName)
|
||
filelist = "#{CREW_META_PATH}#{pkgName}.filelist"
|
||
if File.exist? filelist
|
||
system "sort #{filelist}"
|
||
lines = File.readlines(filelist).size
|
||
size = 0
|
||
File.readlines(filelist).each do |filename|
|
||
size += File.size(filename.chomp) if File.exist? filename.chomp
|
||
end
|
||
humansize = human_size(size)
|
||
puts "Total found: #{lines}".lightgreen
|
||
puts "Disk usage: #{humansize}".lightgreen
|
||
else
|
||
puts "Package #{pkgName} is not installed. :(".lightred
|
||
end
|
||
end
|
||
|
||
def whatprovides(regexPat)
|
||
# Use grep version command to ascertain whether we have a working grep.
|
||
abort 'Grep is not working. Please install it with \'crew install grep\''.lightred unless system('grep -V > /dev/null 2>&1')
|
||
fileArray = []
|
||
@grepresults = `grep "#{regexPat}" #{CREW_META_PATH}*.filelist`.chomp.gsub('.filelist', '').gsub(':', ': ').gsub(CREW_META_PATH, '').split(/$/).map(&:strip)
|
||
@grepresults.each { |fileLine| fileArray.push(fileLine) }
|
||
unless fileArray.empty?
|
||
fileArray.sort.each do |item|
|
||
puts item
|
||
end
|
||
puts "\nTotal found: #{fileArray.length}".lightgreen
|
||
end
|
||
end
|
||
|
||
def update
|
||
abort "'crew update' is used to update crew itself. Use 'crew upgrade <package1> [<package2> ...]' to update specific packages.".orange if @pkgName
|
||
# update package lists
|
||
Dir.chdir CREW_LIB_PATH do
|
||
if CREW_TESTING == '1'
|
||
puts 'Updating crew from testing repository...'
|
||
system "git remote add testing #{CREW_TESTING_REPO} 2>/dev/null || \
|
||
git remote set-url testing #{CREW_TESTING_REPO}"
|
||
system "git fetch testing #{CREW_TESTING_BRANCH}"
|
||
system "git reset --hard testing/#{CREW_TESTING_BRANCH}"
|
||
else
|
||
system 'git fetch origin master'
|
||
system 'git reset --hard origin/master'
|
||
end
|
||
end
|
||
|
||
puts 'Package lists, crew, and library updated.'
|
||
|
||
# update compatible packages
|
||
generate_compatible
|
||
# check for outdated installed packages
|
||
puts 'Checking for package updates...'
|
||
|
||
canBeUpdated = 0
|
||
@device[:installed_packages].each do |package|
|
||
search package[:name], true
|
||
if @pkg.nil?
|
||
puts "Package file for #{package[:name]} not found. :(".lightred if @opt_verbose
|
||
next
|
||
end
|
||
if package[:version].to_s != @pkg.version
|
||
canBeUpdated += 1
|
||
puts "#{@pkg.name} could be updated from #{package[:version]} to #{@pkg.version}"
|
||
end
|
||
end
|
||
|
||
if canBeUpdated.positive?
|
||
puts
|
||
puts 'Run `crew upgrade` to update all packages or `crew upgrade <package1> [<package2> ...]` to update specific packages.'
|
||
else
|
||
puts 'Your software is up to date.'.lightgreen
|
||
end
|
||
end
|
||
|
||
def upgrade
|
||
if @pkgName
|
||
currentVersion = nil
|
||
@device[:installed_packages].each do |package|
|
||
currentVersion = package[:version] if package[:name] == @pkg.name
|
||
end
|
||
|
||
if currentVersion == @pkg.version
|
||
puts "#{@pkg.name} is already up to date.".lightgreen
|
||
else
|
||
puts "Updating #{@pkg.name}..."
|
||
@pkg.in_upgrade = true
|
||
resolve_dependencies_and_install
|
||
@pkg.in_upgrade = false
|
||
end
|
||
else
|
||
# Make an installed packages list belong to the dependency order
|
||
dependencies = []
|
||
@device[:installed_packages].each do |package|
|
||
unless File.exist?("#{CREW_PACKAGES_PATH}#{package[:name]}.rb")
|
||
puts "Package file for installed package #{package[:name]} is missing.".lightred
|
||
next
|
||
end
|
||
# skip package if it is dependent other packages previously checked
|
||
next if dependencies.include? package[:name]
|
||
|
||
# add package itself
|
||
dependencies = [package[:name]].concat(dependencies)
|
||
# expand dependencies and add it to the dependencies list
|
||
search package[:name], true
|
||
@dependencies = []
|
||
exp_dep = expand_dependencies
|
||
dependencies = exp_dep.concat(dependencies)
|
||
end
|
||
dependencies.uniq!
|
||
|
||
# Check version number of installed package and make a target list
|
||
toBeUpdated = []
|
||
dependencies.each do |dep|
|
||
package = @device[:installed_packages].select { |pkg| pkg[:name] == dep } [0]
|
||
next unless package
|
||
|
||
search package[:name], true
|
||
toBeUpdated.push(package[:name]) if package[:version] != @pkg.version
|
||
end
|
||
|
||
if toBeUpdated.empty?
|
||
puts 'Your software is already up to date.'.lightgreen
|
||
else
|
||
puts 'Updating packages...'
|
||
toBeUpdated.each do |package|
|
||
search package
|
||
print_current_package
|
||
puts "Updating #{@pkg.name}..." if @opt_verbose
|
||
@pkg.in_upgrade = true
|
||
resolve_dependencies_and_install
|
||
@pkg.in_upgrade = false
|
||
end
|
||
puts 'Packages have been updated.'.lightgreen
|
||
end
|
||
end
|
||
end
|
||
|
||
def download
|
||
url = @pkg.get_url(@device[:architecture])
|
||
source = @pkg.is_source?(@device[:architecture])
|
||
|
||
uri = URI.parse url
|
||
filename = File.basename(uri.path)
|
||
sha256sum = @pkg.get_sha256(@device[:architecture])
|
||
@extract_dir = @pkg.get_extract_dir
|
||
|
||
if !url
|
||
abort "No precompiled binary or source is available for #{@device[:architecture]}.".lightred
|
||
elsif !source
|
||
puts 'Precompiled binary available, downloading...'
|
||
elsif @pkg.build_from_source
|
||
puts 'Downloading source...'
|
||
elsif uri =~ /^SKIP$/i
|
||
puts 'Skipping source download...'
|
||
else
|
||
puts 'No precompiled binary available for your platform, downloading source...'
|
||
end
|
||
|
||
@git = uri.scheme =~ /git/ ? true : false
|
||
|
||
Dir.chdir CREW_BREW_DIR do
|
||
case File.basename(filename)
|
||
# Sources that download with curl
|
||
when /\.zip$/i, /\.(tar(\.(gz|bz2|xz|lzma|lz|zst))?|tgz|tbz|tpxz|txz)$/i, /\.deb$/i, /\.AppImage$/i
|
||
# Recall file from cache if requested
|
||
if CREW_CACHE_ENABLED
|
||
puts "Looking for #{@pkg.name} archive in cache".orange if @opt_verbose
|
||
cachefile = CREW_CACHE_DIR + filename
|
||
if File.exist?(cachefile)
|
||
puts "#{@pkg.name.capitalize} archive file exists in cache".lightgreen if @opt_verbose
|
||
if Digest::SHA256.hexdigest(File.read(cachefile)) == sha256sum || sha256sum =~ /^SKIP$/i
|
||
begin
|
||
# Hard link cached file if possible.
|
||
FileUtils.ln cachefile, CREW_BREW_DIR, force: true, verbose: @fileutils_verbose unless File.identical?(cachefile, "#{CREW_BREW_DIR}/#{filename}")
|
||
puts 'Archive hard linked from cache'.green if @opt_verbose
|
||
rescue StandardError
|
||
# Copy cached file if hard link fails.
|
||
FileUtils.cp cachefile, CREW_BREW_DIR, verbose: @fileutils_verbose unless File.identical?(cachefile, "#{CREW_BREW_DIR}/#{filename}")
|
||
puts 'Archive copied from cache'.green if @opt_verbose
|
||
end
|
||
puts 'Archive found in cache'.lightgreen
|
||
return { source: source, filename: filename }
|
||
else
|
||
puts 'Cached archive checksum mismatch. 😔 Will download.'.lightred
|
||
cachefile = ''
|
||
end
|
||
else
|
||
puts 'Cannot find cached archive. 😔 Will download.'.lightred
|
||
cachefile = ''
|
||
end
|
||
end
|
||
# Download file if not cached.
|
||
downloader url, sha256sum, filename, @opt_verbose
|
||
|
||
puts "#{@pkg.name.capitalize} archive downloaded.".lightgreen
|
||
# Stow file in cache if requested, if file is not from cache,
|
||
# and cache is writable.
|
||
if CREW_CACHE_ENABLED && cachefile.to_s.empty? && File.writable?(CREW_CACHE_DIR)
|
||
begin
|
||
# Hard link to cache if possible.
|
||
FileUtils.ln filename, CREW_CACHE_DIR, verbose: @fileutils_verbose
|
||
puts 'Archive hard linked to cache'.green if @opt_verbose
|
||
rescue StandardError
|
||
# Copy to cache if hard link fails.
|
||
FileUtils.cp filename, CREW_CACHE_DIR, verbose: @fileutils_verbose
|
||
puts 'Archive copied to cache'.green if @opt_verbose
|
||
end
|
||
end
|
||
return { source: source, filename: filename }
|
||
|
||
when /^SKIP$/i
|
||
Dir.mkdir @extract_dir
|
||
when /\.git$/i # Source URLs which end with .git are git sources.
|
||
@git = true
|
||
else
|
||
Dir.mkdir @extract_dir
|
||
downloader url, sha256sum, filename, @opt_verbose
|
||
|
||
puts "#{filename}: File downloaded.".lightgreen
|
||
|
||
FileUtils.mv filename, "#{@extract_dir}/#{filename}"
|
||
end
|
||
|
||
# Handle git sources.
|
||
if @git == true
|
||
# Recall repository from cache if requested
|
||
if CREW_CACHE_ENABLED
|
||
# No git branch specified, just a git commit or tag
|
||
if @pkg.git_branch.to_s.empty?
|
||
abort('No Git branch, commit, or tag specified!').lightred if @pkg.git_hashtag.to_s.empty?
|
||
cachefile = "#{CREW_CACHE_DIR}#{filename}#{@pkg.git_hashtag.gsub('/', '_')}.tar.xz"
|
||
# Git branch and git commit specified
|
||
elsif !@pkg.git_hashtag.to_s.empty?
|
||
cachefile = "#{CREW_CACHE_DIR}#{filename}#{@pkg.git_branch.gsub(/[^0-9A-Za-z.\-]/, '_')}_#{@pkg.git_hashtag.gsub('/', '_')}.tar.xz"
|
||
# Git branch specified, without a specific git commit.
|
||
else
|
||
# Use to the day granularity for a branch timestamp with no specific commit specified.
|
||
cachefile = "#{CREW_CACHE_DIR}#{filename}#{@pkg.git_branch.gsub(/[^0-9A-Za-z.\-]/, '_')}#{Time.now.strftime('%m%d%Y')}.tar.xz"
|
||
end
|
||
puts "cachefile is #{cachefile}".orange if @opt_verbose
|
||
if File.file?(cachefile)
|
||
if system "cd #{CREW_CACHE_DIR} && sha256sum -c #{cachefile}.sha256"
|
||
FileUtils.mkdir @extract_dir
|
||
system "tar x#{@verbose}f #{cachefile} -C #{@extract_dir}"
|
||
return { source: source, filename: filename }
|
||
else
|
||
puts 'Cached git repository checksum mismatch. 😔 Will download.'.lightred
|
||
end
|
||
else
|
||
puts 'Cannot find cached git repository. 😔 Will download.'.lightred
|
||
end
|
||
end
|
||
# Download via git
|
||
Dir.mkdir @extract_dir
|
||
Dir.chdir @extract_dir do
|
||
if @pkg.git_branch.to_s.empty?
|
||
system 'git init'
|
||
system 'git config advice.detachedHead false'
|
||
system 'git config init.defaultBranch master'
|
||
system "git remote add origin #{@pkg.source_url}", exception: true
|
||
system "git fetch --depth 1 origin #{@pkg.git_hashtag}", exception: true
|
||
system 'git checkout FETCH_HEAD'
|
||
else
|
||
# Leave a message because this step can be slow.
|
||
puts 'Downloading src from a git branch. This may take a while...'
|
||
system "git clone --branch #{@pkg.git_branch} --single-branch #{@pkg.source_url} tmpdir", exception: true
|
||
system 'mv tmpdir/.git . && rm -rf tmpdir'
|
||
system "git reset --hard #{@pkg.git_hashtag}", exception: true
|
||
end
|
||
system 'git submodule update --init --recursive' unless @pkg.no_git_submodules?
|
||
system 'git fetch --tags', exception: true if @pkg.git_fetchtags?
|
||
system "git fetch origin #{@pkg.git_hashtag}", exception: true if @pkg.git_clone_deep?
|
||
puts 'Repository downloaded.'.lightgreen
|
||
end
|
||
# Stow file in cache if requested and cache is writable.
|
||
if CREW_CACHE_ENABLED && File.writable?(CREW_CACHE_DIR)
|
||
puts 'Caching downloaded git repo...'
|
||
Dir.chdir @extract_dir.to_s do
|
||
# Do not use --exclude-vcs to exclude .git
|
||
# because some builds will use that information.
|
||
system "tar c#{@verbose}Jf #{cachefile} \
|
||
$(find -mindepth 1 -maxdepth 1 -printf '%P\n')"
|
||
end
|
||
system "sha256sum #{cachefile} > #{cachefile}.sha256"
|
||
puts 'Git repo cached.'.lightgreen
|
||
end
|
||
end
|
||
end
|
||
return { source: source, filename: filename }
|
||
end
|
||
|
||
def unpack(meta)
|
||
target_dir = nil
|
||
Dir.chdir CREW_BREW_DIR do
|
||
FileUtils.mkdir_p @extract_dir, verbose: @fileutils_verbose
|
||
case File.basename meta[:filename]
|
||
when /\.zip$/i
|
||
puts "Unpacking archive using 'unzip', this may take a while..."
|
||
_verbopt = @opt_verbose ? '-v' : '-qq'
|
||
system 'unzip', _verbopt, '-d', @extract_dir, meta[:filename], exception: true
|
||
when /\.(tar(\.(gz|bz2|xz|lzma|lz))?|tgz|tbz|txz)$/i
|
||
puts "Unpacking archive using 'tar', this may take a while..."
|
||
system "tar x#{@verbose}f #{meta[:filename]} -C #{@extract_dir}", exception: true
|
||
when /\.tar\.zst$/i
|
||
# Check to see that there is a working zstd
|
||
if File.exist?("#{CREW_PREFIX}/bin/zstd")
|
||
@crew_prefix_zstd_available = `#{CREW_PREFIX}/bin/zstd --version`.include?('zstd command line interface') ? true : nil
|
||
end
|
||
if File.exist?("#{CREW_MUSL_PREFIX}/bin/zstd")
|
||
@crew_musl_prefix_zstd_available = `#{CREW_MUSL_PREFIX}/bin/zstd --version`.include?('zstd command line interface') ? true : nil
|
||
end
|
||
abort 'zstd is needed for this install. Please (re)install it first with \'crew remove musl_zstd zstd ; crew install musl_zstd ; crew install zstd\''.lightred unless @crew_prefix_zstd_available || @crew_musl_prefix_zstd_available
|
||
puts "Unpacking archive using 'tar', this may take a while..."
|
||
# Use the zstd found to be working. This should correct for
|
||
# situations where the libzstd library is unavailable.
|
||
if @crew_prefix_zstd_available
|
||
puts 'Using standard zstd'.lightblue if @opt_verbose
|
||
system "tar -Izstd -x#{@verbose}f #{meta[:filename]} -C #{@extract_dir}", exception: true
|
||
elsif @crew_musl_prefix_zstd_available
|
||
puts 'Using musl zstd'.lightblue if @opt_verbose
|
||
system "PATH=#{CREW_MUSL_PREFIX}/bin:\$PATH tar -Izstd -x#{@verbose}f #{meta[:filename]} -C #{@extract_dir}", exception: true
|
||
end
|
||
when /\.deb$/i
|
||
puts "Unpacking '.deb' archive, this may take a while..."
|
||
DebUtils.extract_deb(meta[:filename], /data\..*/)
|
||
system "tar x#{@verbose}f data.* -C #{@extract_dir}", exception: true
|
||
when /\.AppImage$/i
|
||
puts "Unpacking 'AppImage' archive, this may take a while..."
|
||
FileUtils.chmod 0o755, meta[:filename], verbose: @fileutils_verbose
|
||
Dir.chdir @extract_dir do
|
||
system "../#{meta[:filename]} --appimage-extract", exception: true
|
||
end
|
||
when /\.tpxz$/i
|
||
abort 'Pixz is needed for this install. Please install it with \'crew install pixz\''.lightred unless File.exist?("#{CREW_PREFIX}/bin/pixz")
|
||
puts "Unpacking 'tpxz' archive using 'tar', this may take a while..."
|
||
system "tar -Ipixz -x#{@verbose}f #{meta[:filename]} -C #{@extract_dir}", exception: true
|
||
end
|
||
if meta[:source] == true
|
||
# Check the number of directories in the archive
|
||
entries = Dir["#{@extract_dir}/*"]
|
||
entries = Dir[@extract_dir] if entries.empty?
|
||
if entries.empty?
|
||
abort "Empty archive: #{meta[:filename]}".lightred
|
||
elsif entries.length == 1 && File.directory?(entries.first)
|
||
# Use `extract_dir/dir_in_archive` if there is only one directory.
|
||
target_dir = entries.first
|
||
else
|
||
# Use `extract_dir` otherwise
|
||
target_dir = @extract_dir
|
||
end
|
||
else
|
||
# Use `extract_dir` for binary distribution
|
||
target_dir = @extract_dir
|
||
end
|
||
# Remove tarball to save space.
|
||
FileUtils.rm_f meta[:filename], verbose: @fileutils_verbose
|
||
end
|
||
return CREW_BREW_DIR + target_dir
|
||
end
|
||
|
||
def build_and_preconfigure(target_dir)
|
||
Dir.chdir target_dir do
|
||
unless @pkg.no_compile_needed?
|
||
puts 'Building from source, this may take a while...'
|
||
|
||
if CREW_LA_RENAME_ENABLED
|
||
# Rename *.la files temporarily to *.la_tmp to avoid
|
||
# libtool: link: '*.la' is not a valid libtool archive.
|
||
# See https://gnunet.org/faq-la-files and
|
||
# https://stackoverflow.com/questions/42963653/libquadmath-la-is-not-a-valid-libtool-archive-when-configuring-openmpi-with-g
|
||
puts 'Rename all *.la files to *.la_tmp'.lightblue
|
||
|
||
system "find #{CREW_LIB_PREFIX} -type f -name *.la -print0 | xargs --null -I{} mv #{@short_verbose} {} {}_tmp"
|
||
end
|
||
|
||
# Load musl options only if package is targeted at the musl toolchain
|
||
load "#{CREW_LIB_PATH}lib/musl.rb" if @pkg.is_musl?
|
||
end
|
||
|
||
@pkg.in_build = true
|
||
@pkg.patch
|
||
@pkg.prebuild
|
||
@pkg.build
|
||
@pkg.in_build = false
|
||
# wipe crew destdir
|
||
FileUtils.rm_rf Dir["#{CREW_DEST_DIR}/*"], verbose: @fileutils_verbose
|
||
puts 'Preconfiguring package...'
|
||
@pkg.install
|
||
|
||
unless @pkg.no_compile_needed? && !CREW_LA_RENAME_ENABLED
|
||
# Rename all *.la_tmp back to *.la to avoid
|
||
# cannot access '*.la': No such file or directory
|
||
puts 'Rename all *.la_tmp files back to *.la'.lightblue
|
||
system "find #{CREW_LIB_PREFIX} -type f -name '*.la_tmp' -exec bash -c 'mv #{@short_verbose} \"$1\" \"${1%.la_tmp}.la\"' _ {} \\;"
|
||
end
|
||
end
|
||
end
|
||
|
||
def pre_flight
|
||
puts 'Performing pre-flight checks...'
|
||
@pkg.preflight
|
||
end
|
||
|
||
def pre_install(dest_dir)
|
||
Dir.chdir dest_dir do
|
||
puts 'Performing pre-install...'
|
||
@pkg.preinstall
|
||
end
|
||
end
|
||
|
||
def post_install
|
||
# return unless the postinstall function was defined by the package recipe
|
||
return unless @pkg.method(:postinstall).source_location[0].include?(@pkg.name)
|
||
|
||
Dir.mktmpdir do |post_install_tempdir|
|
||
Dir.chdir post_install_tempdir do
|
||
puts "Performing post-install for #{@pkg.name}...".lightblue
|
||
@pkg.postinstall
|
||
if @pkg.gnome?
|
||
puts "Performing Gnome post-installs for #{@pkg.name}...".lightblue if @opt_verbose
|
||
# generate schemas
|
||
system "#{CREW_PREFIX}/bin/glib-compile-schemas #{CREW_PREFIX}/share/glib-2.0/schemas" if @device[:installed_packages].any? { |elem| elem[:name] == 'glib' }
|
||
# update mime database
|
||
system "#{CREW_PREFIX}/bin/update-mime-database #{CREW_PREFIX}/share/mime" if @device[:installed_packages].any? { |elem| elem[:name] == 'shared_mime_info' }
|
||
# update icon cache, but only if gdk_pixbuf is already installed.
|
||
system "#{CREW_PREFIX}/bin/gtk-update-icon-cache -ft #{CREW_PREFIX}/share/icons/* || true" if @device[:installed_packages].any? { |elem| elem[:name] == 'gtk3' }
|
||
end
|
||
end
|
||
end
|
||
end
|
||
|
||
def compress_doc(dir)
|
||
# check whether crew should compress
|
||
return if CREW_NOT_COMPRESS || !File.exist?("#{CREW_PREFIX}/bin/compressdoc")
|
||
|
||
if Dir.exist? dir
|
||
system "find #{dir} -type f ! -perm -200 | xargs -r chmod u+w"
|
||
system "compressdoc --gzip -9 #{@short_verbose} #{dir}"
|
||
end
|
||
end
|
||
|
||
def prepare_package(destdir)
|
||
Dir.chdir destdir do
|
||
# Avoid /usr/local/share/info/dir{.gz} file conflict:
|
||
# The install-info program maintains a directory of installed
|
||
# info documents in /usr/share/info/dir for the use of info
|
||
# readers. This file must not be included in packages other
|
||
# than install-info.
|
||
# https://www.debian.org/doc/debian-policy/ch-docs.html#info-documents
|
||
FileUtils.rm_f "#{CREW_DEST_PREFIX}/share/info/dir"
|
||
|
||
# Remove all perl module files which will conflict
|
||
if @pkg.name =~ /^perl_/
|
||
puts 'Removing .packlist and perllocal.pod files to avoid conflicts with other perl packages.'.orange
|
||
system "find #{CREW_DEST_DIR} -type f \\( -name '.packlist' -o -name perllocal.pod \\) -delete"
|
||
end
|
||
|
||
# compress manual files
|
||
compress_doc "#{CREW_DEST_PREFIX}/man"
|
||
compress_doc "#{CREW_DEST_PREFIX}/info"
|
||
compress_doc "#{CREW_DEST_PREFIX}/share/man"
|
||
compress_doc "#{CREW_DEST_PREFIX}/share/info"
|
||
|
||
# Allow postbuild to override the filelist contents
|
||
@pkg.postbuild
|
||
|
||
# create file list
|
||
system 'find . -type f > ../filelist'
|
||
system 'find . -type l >> ../filelist'
|
||
system 'cut -c2- ../filelist > filelist'
|
||
|
||
# check for FHS3 compliance
|
||
puts 'Checking for FHS3 compliance...'
|
||
@_errors = 0
|
||
@fhs_compliant_prefix = %w[bin etc include lib libexec opt sbin share var]
|
||
@fhs_compliant_prefix.append(ARCH_LIB) if ARCH_LIB == 'lib64'
|
||
Dir.foreach(CREW_DEST_PREFIX) do |filename|
|
||
next if (filename == '.') || (filename == '..')
|
||
|
||
unless @fhs_compliant_prefix.include?(filename)
|
||
if CREW_FHS_NONCOMPLIANCE_ONLY_ADVISORY || @pkg.no_fhs?
|
||
puts "Warning: #{CREW_PREFIX}/#{filename} in #{@pkg.name} is not FHS3 compliant.".orange
|
||
else
|
||
puts "Error: #{CREW_PREFIX}/#{filename} in #{@pkg.name} is not FHS3 compliant.".lightred
|
||
@_errors = 1
|
||
end
|
||
end
|
||
end
|
||
|
||
# check for conflicts with other installed files
|
||
puts 'Checking for conflicts with files from installed packages...'
|
||
conflicts = []
|
||
conflictscmd = `grep --exclude #{CREW_META_PATH}#{@pkg.name}.filelist -Fxf filelist #{CREW_META_PATH}*.filelist`
|
||
conflicts << conflictscmd.gsub(/(\.filelist|#{CREW_META_PATH})/, '').split("\n")
|
||
conflicts.reject!(&:empty?)
|
||
unless conflicts.empty?
|
||
if CREW_CONFLICTS_ONLY_ADVISORY || @pkg.conflicts_ok?
|
||
puts 'Warning: There is a conflict with the same file in another package.'.orange
|
||
else
|
||
puts 'Error: There is a conflict with the same file in another package.'.lightred
|
||
@_errors = 1
|
||
end
|
||
conflicts.each { |conflict| puts conflict }
|
||
end
|
||
|
||
# abort if errors encountered
|
||
abort 'Exiting due to above errors.'.lightred if @_errors == 1
|
||
|
||
# create directory list
|
||
system 'find . -type d > ../dlist'
|
||
system 'cut -c2- ../dlist > dlistcut'
|
||
system 'tail -n +2 dlistcut > dlist'
|
||
|
||
# remove temporary files
|
||
FileUtils.rm_rf ['dlistcut', '../dlist', '../filelist'], verbose: @fileutils_verbose
|
||
|
||
strip_dir destdir
|
||
|
||
# Use patchelf to set need paths for all binaries.
|
||
patchelf_set_need_paths destdir
|
||
|
||
# use upx on executables
|
||
shrink_dir destdir
|
||
end
|
||
end
|
||
|
||
def patchelf_set_need_paths(dir)
|
||
return if @pkg.no_patchelf? || @pkg.no_compile_needed?
|
||
|
||
Dir.chdir dir do
|
||
puts 'Running patchelf'.lightblue
|
||
abort('No Patchelf found!').lightred unless File.exist?("#{CREW_PREFIX}/bin/patchelf")
|
||
@execfiles = `find . -executable -type f ! \\( -name \"*.a\" \\) -exec head -c4 {} 2>/dev/null \\; -printf ' %i %p\\n'| grep ^.ELF | sort -u -n -s -k2,2 | awk '{print $3}'`.chomp
|
||
return if @execfiles.to_s.empty?
|
||
|
||
@patchelf_lib_prefix = @pkg.is_musl? ? "#{CREW_MUSL_PREFIX}/lib" : CREW_LIB_PREFIX
|
||
puts "@patchelf_lib_prefix is #{@patchelf_lib_prefix}" if @opt_verbose
|
||
@patchelf_interpreter = @pkg.is_musl? ? "#{CREW_MUSL_PREFIX}/lib/libc.so" : 'CREW_LIB_PREFIX/libc.so.6'
|
||
puts "@patchelf_interpreter is #{@patchelf_interpreter}" if @opt_verbose
|
||
|
||
puts 'Running patchelf to patch binaries for library paths'.lightblue
|
||
@execfiles.each_line(chomp: true) do |execfiletopatch|
|
||
execfiletopatch = Dir.pwd + execfiletopatch.delete_prefix('.')
|
||
@neededlibs = `patchelf --print-needed #{execfiletopatch}`
|
||
next if @neededlibs.to_s.empty?
|
||
|
||
@neededlibs.each_line(chomp: true) do |neededlibspatch|
|
||
next if neededlibspatch.include?(@patchelf_lib_prefix.to_s)
|
||
|
||
# Avoid segfaults from not using system versions of these files.
|
||
patchelf_veto_files = %w[
|
||
libdl.so
|
||
ld-linux.so.2
|
||
ld-linux-x86-64.so.2
|
||
ld-linux-armhf.so.3
|
||
libc.so.6
|
||
]
|
||
next if !@pkg.is_musl? && patchelf_veto_files.any? { |i| neededlibspatch.include? i }
|
||
|
||
@neededlib_basename = File.basename(neededlibspatch)
|
||
@neededlibspatchednamepath = "#{@patchelf_lib_prefix}/" + @neededlib_basename
|
||
# The first check here can be changed to just check the dest_dir
|
||
# hierarchy for @neededlib_basename if the intent is to allow
|
||
# using a different CREW_PREFIX during package installs.
|
||
if File.exist?(@neededlibspatchednamepath) || File.exist?(Dir.pwd + @neededlibspatchednamepath)
|
||
puts "patchelf --replace-needed #{neededlibspatch} #{@neededlibspatchednamepath} #{execfiletopatch}" if @opt_verbose
|
||
system "patchelf --replace-needed #{neededlibspatch} #{@neededlibspatchednamepath} #{execfiletopatch}"
|
||
else
|
||
puts "#{execfiletopatch} needed library #{@neededlib_basename} not found in #{@patchelf_lib_prefix} or #{Dir.pwd + @neededlibspatchednamepath}.".lightred
|
||
end
|
||
end
|
||
# Do not set interpreter for non-musl, as this can break apps if there
|
||
# is an issue with the crew glibc.
|
||
next unless @pkg.is_musl?
|
||
|
||
puts 'Running patchelf to patch binary interpreter paths'.lightblue
|
||
system "patchelf --set-interpreter #{@patchelf_interpreter} #{execfiletopatch}"
|
||
end
|
||
end
|
||
end
|
||
|
||
def strip_find_files(find_cmd, strip_option = '')
|
||
# check whether crew should strip
|
||
return if CREW_NOT_STRIP || !File.exist?("#{CREW_PREFIX}/bin/llvm-strip")
|
||
|
||
# run find_cmd and strip only ar or ELF files
|
||
system "#{find_cmd} | xargs -r chmod u+w"
|
||
system "#{find_cmd} | xargs -r sh -c 'for i in \"$0\" \"$@\"; do case \"$(head -c 4 $i)\" in ?ELF|\!?ar) echo \"$i\";; esac ; done' | xargs -r llvm-strip #{strip_option}"
|
||
end
|
||
|
||
def strip_dir(dir)
|
||
unless CREW_NOT_STRIP || @pkg.no_compile_needed?
|
||
Dir.chdir dir do
|
||
# Strip libraries with -S
|
||
puts 'Stripping libraries...'
|
||
strip_find_files "find . -type f \\( -name 'lib*.a' -o -name 'lib*.so*' \\) -print", '-S'
|
||
|
||
# Strip binaries but not compressed archives
|
||
puts 'Stripping binaries...'
|
||
extensions = %w[bz2 gz lha lz lzh rar tar tbz tgz tpxz txz xz Z zip zst]
|
||
inames = extensions.join(' ! -iname *\.')
|
||
strip_find_files "find . -type f ! -iname *\.#{inames} -perm /111 -print | sed -e '/lib.*\.a$/d' -e '/lib.*\.so/d'"
|
||
end
|
||
end
|
||
end
|
||
|
||
def shrink_dir(dir)
|
||
unless CREW_NOT_SHRINK_ARCHIVE
|
||
Dir.chdir dir do
|
||
if File.exist?("#{CREW_PREFIX}/bin/rdfind")
|
||
puts 'Using rdfind to convert duplicate files to hard links.'
|
||
system "#{CREW_PREFIX}/bin/rdfind -removeidentinode true -makesymlinks false -makehardlinks true -makeresultsfile false ."
|
||
end
|
||
# Issues with non-x86_64 in compressing libraries, so just compress
|
||
# non-libraries. Also note that one needs to use "upx -d" on a
|
||
# compressed file to use ldd.
|
||
# sommelier also isn't happy when sommelier and xwayland are compressed
|
||
# so don't compress those packages.
|
||
if File.exist?("#{CREW_PREFIX}/bin/upx")
|
||
# 1. Find executable binaries but also check for hard linked
|
||
# files by making sure we have a unique set of
|
||
# inodes for the binaries found.
|
||
# 2. Copy to a temp file.
|
||
# 3. Compress using upx. (Uncompressble files are ignored.)
|
||
# 4. Check compression by expanding the compressed file with
|
||
# upx.
|
||
# 5. If the expansion doesn't error out then it is ok to copy
|
||
# over the original. (This also lets us only avoid compressing
|
||
# hard linked files multiple times.)
|
||
# Also disable for sommelier.elf and Xwayland.elf since upx
|
||
# breaks those binaries.
|
||
@execfiles = `find . -executable -type f ! \\( -name \"*.so*\" -o -name \"*.a\" -o -name \"Xwayland.elf\" -o -name \"sommelier.elf\" \\) -exec head -c4 {} 2>/dev/null \\; -printf ' %i %p\\n'| grep ^.ELF | sort -u -n -s -k2,2 | awk '{print $3}'`.chomp
|
||
unless @execfiles.to_s.empty?
|
||
puts 'Using upx to shrink binaries.'
|
||
# Copying in the ThreadPoolExecutor loop fails non-deterministically
|
||
@execfiles.each_line do |execfilecp|
|
||
execfilecp.slice! '.'
|
||
execfilecp = dir + execfilecp.chomp
|
||
FileUtils.cp execfilecp.to_s, "#{execfilecp}-crewupxtmp"
|
||
end
|
||
begin
|
||
gem 'concurrent-ruby'
|
||
rescue Gem::LoadError
|
||
puts ' -> install gem concurrent-ruby'
|
||
Gem.install('concurrent-ruby')
|
||
gem 'concurrent-ruby'
|
||
end
|
||
require 'concurrent'
|
||
pool = Concurrent::ThreadPoolExecutor.new(
|
||
min_threads: 1,
|
||
max_threads: CREW_NPROC,
|
||
max_queue: 0, # unbounded work queue
|
||
fallback_policy: :caller_runs
|
||
)
|
||
@execfiles.each_line do |execfile|
|
||
pool.post do
|
||
execfile.slice! '.'
|
||
execfile = dir + execfile.chomp
|
||
puts "Attempting to compress #{execfile} ...".orange
|
||
# Make tmp file for compression
|
||
unless system "upx --lzma #{execfile}-crewupxtmp"
|
||
puts "Compression of #{execfile} failed...".orange if @opt_verbose
|
||
FileUtils.rm_f "#{execfile}-crewupxtmp"
|
||
end
|
||
if File.exist?("#{execfile}-crewupxtmp")
|
||
puts "Testing compressed #{execfile}...".lightblue if @opt_verbose
|
||
if system "upx -t #{execfile}-crewupxtmp && cp #{execfile}-crewupxtmp #{execfile}"
|
||
puts "#{execfile} successfully compressed...".lightgreen
|
||
else
|
||
FileUtils.rm_f "#{execfile}-crewupxtmp"
|
||
end
|
||
end
|
||
FileUtils.rm_f "#{execfile}-crewupxtmp"
|
||
end
|
||
end
|
||
pool.shutdown
|
||
pool.wait_for_termination
|
||
# Make sure temporary compression copies are deleted.
|
||
system 'find . -executable -type f -name "*-crewupxtmp" -delete'
|
||
end
|
||
end
|
||
end
|
||
end
|
||
end
|
||
|
||
def install_package(pkgdir)
|
||
Dir.chdir pkgdir do
|
||
# install filelist, dlist and binary files
|
||
puts 'Performing install...'
|
||
|
||
FileUtils.mv 'dlist', "#{CREW_META_PATH}#{@pkg.name}.directorylist", verbose: @fileutils_verbose
|
||
FileUtils.mv 'filelist', "#{CREW_META_PATH}#{@pkg.name}.filelist", verbose: @fileutils_verbose
|
||
|
||
@brokensymlinks = nil
|
||
@brokensymlinks = `find . -type l -exec test ! -e {} \\; -print`.chomp
|
||
unless @brokensymlinks.to_s.empty?
|
||
puts 'There are broken symlinks. Will try to fix.'.orange if @opt_verbose
|
||
@brokensymlinks.each_line do |fixlink|
|
||
@brokentarget = nil
|
||
@fixedtarget = nil
|
||
@brokentarget = `readlink -n #{fixlink}`.chomp
|
||
puts "Attempting fix of: #{fixlink.delete_prefix('.')} -> #{@brokentarget}".orange if @opt_verbose
|
||
@fixedtarget = @brokentarget.delete_prefix(CREW_DEST_DIR).chomp
|
||
@fixedlink_loc = pkgdir + fixlink.delete_prefix('.')
|
||
@fixedlink_loc = @fixedlink_loc.chomp
|
||
# If no changes were made, don't replace symlink
|
||
unless @fixedtarget == @brokentarget
|
||
FileUtils.ln_sf @fixedtarget.to_s, @fixedlink_loc.to_s
|
||
puts "Fixed: #{@fixedtarget} -> #{fixlink.delete_prefix('.')}".orange if @opt_verbose
|
||
end
|
||
end
|
||
end
|
||
if File.exist?("#{CREW_PREFIX}/bin/rdfind")
|
||
puts 'Using rdfind to convert duplicate files to hard links.'
|
||
system 'rdfind -removeidentinode true -makesymlinks false -makehardlinks true -makeresultsfile false .'
|
||
end
|
||
|
||
# check if the available rsync command support ACLs
|
||
|
||
rsync_available = true if File.exist?("#{CREW_PREFIX}/bin/rsync") && `#{CREW_PREFIX}/bin/rsync --version`.include?('rsync version')
|
||
puts 'rsync is not working. Please (re)install it with \'crew remove musl_zstd zstd ; crew install musl_zstd ; crew install rsync\''.lightred unless rsync_available || (@pkg.name == 'rsync')
|
||
if Dir.exist? "#{pkgdir}/#{HOME}"
|
||
if rsync_available
|
||
system "rsync -ahHAXW --remove-source-files ./#{HOME.delete_prefix('/')}/ #{HOME}"
|
||
else
|
||
system "tar -c#{@verbose}f - ./usr/* | (cd /; tar xp --keep-directory-symlink -f -)"
|
||
end
|
||
end
|
||
if Dir.exist? "#{pkgdir}/usr/local"
|
||
if rsync_available
|
||
# Adjust "./usr/local" if the build CREW_PREFIX ever changes.
|
||
system "rsync -ahHAXWx --remove-source-files ./usr/local/ #{CREW_PREFIX}"
|
||
else
|
||
system "tar -c#{@verbose}f - ./usr/* | (cd /; tar xp --keep-directory-symlink -f -)"
|
||
end
|
||
end
|
||
if Dir.exist? "#{pkgdir}/#{CREW_PREFIX}"
|
||
if rsync_available
|
||
# Adjust "./usr/local" if the build CREW_PREFIX ever changes.
|
||
system "rsync -ahHAXWx --remove-source-files ./#{CREW_PREFIX}/ #{CREW_PREFIX}"
|
||
else
|
||
system "cp -a#{@verbose} ./#{CREW_PREFIX}/* #{CREW_PREFIX}"
|
||
end
|
||
end
|
||
end
|
||
end
|
||
|
||
def resolve_dependencies_and_install
|
||
@resolve_dependencies_and_install = 1
|
||
preflight_fake_packages = %w[hunspell imagemagick jdk php]
|
||
unless @pkg.is_fake? && !preflight_fake_packages.include?(@pkg.name)
|
||
# Process preflight block to see if package should even
|
||
# be downloaded or installed.
|
||
pre_flight
|
||
end
|
||
begin
|
||
origin = @pkg.name
|
||
|
||
@to_postinstall = []
|
||
resolve_dependencies
|
||
|
||
search origin, true
|
||
install
|
||
@to_postinstall.append(@pkg.name)
|
||
@to_postinstall.each do |dep|
|
||
search dep
|
||
post_install
|
||
end
|
||
rescue InstallError => e
|
||
abort "#{@pkg.name} failed to install: #{e}".lightred
|
||
ensure
|
||
# cleanup
|
||
unless @opt_keep
|
||
FileUtils.rm_rf Dir.glob("#{CREW_BREW_DIR}/*")
|
||
FileUtils.mkdir_p "#{CREW_BREW_DIR}/dest" # this is a little ugly, feel free to find a better way
|
||
end
|
||
end
|
||
puts "#{@pkg.name.capitalize} installed!".lightgreen
|
||
@resolve_dependencies_and_install = 0
|
||
end
|
||
|
||
def expand_dependencies
|
||
@dependencies = @pkg.get_deps_list.reject { |depName| @device[:installed_packages].any? { |pkg| pkg[:name] == depName } }
|
||
end
|
||
|
||
def resolve_dependencies
|
||
abort "Package #{@pkg.name} is not compatible with your device architecture (#{ARCH}) :/".lightred unless @device[:compatible_packages].any? { |elem| elem[:name] == @pkg.name }
|
||
|
||
@dependencies = []
|
||
expand_dependencies
|
||
|
||
# leave only not installed packages in dependencies
|
||
@dependencies.select! { |name| @device[:installed_packages].none? { |pkg| pkg[:name] == name } }
|
||
%w[jdk11 jdk15 jdk16 jdk17 jdk18].each do |jdk|
|
||
@dependencies.delete('jdk8') if @dependencies.include?(jdk)
|
||
@dependencies.delete('jdk8') if @pkg.name == jdk
|
||
end
|
||
|
||
return if @dependencies.empty?
|
||
|
||
puts 'The following packages also need to be installed: '
|
||
|
||
@dependencies.each do |dep|
|
||
abort "Dependency #{dep} was not found.".lightred unless File.exist?("#{CREW_PACKAGES_PATH}#{dep}.rb")
|
||
end
|
||
|
||
puts @dependencies.join(' ')
|
||
|
||
print 'Do you agree? [Y/n] '
|
||
response = $stdin.getc
|
||
case response
|
||
when 'n'
|
||
abort 'No changes made.'
|
||
when "\n", 'y', 'Y'
|
||
puts 'Proceeding...'
|
||
proceed = true
|
||
else
|
||
puts "I don't understand `#{response}`. :(".lightred
|
||
abort 'No changes made.'
|
||
end
|
||
|
||
if proceed
|
||
@dependencies.each do |dep|
|
||
search dep
|
||
print_current_package
|
||
@pkg.is_dep = true
|
||
install
|
||
end
|
||
if (@resolve_dependencies_and_install == 1) || (@resolve_dependencies_and_build == 1)
|
||
@to_postinstall = @dependencies
|
||
else
|
||
@dependencies.each do |dep|
|
||
search dep
|
||
post_install
|
||
end
|
||
end
|
||
end
|
||
end
|
||
|
||
def install
|
||
if !@pkg.in_upgrade && @device[:installed_packages].any? { |pkg| pkg[:name] == @pkg.name }
|
||
puts "Package #{@pkg.name} already installed, skipping...".lightgreen
|
||
return
|
||
end
|
||
|
||
unless @pkg.is_fake?
|
||
meta = download
|
||
target_dir = unpack meta
|
||
if meta[:source] == true
|
||
|
||
# build from source and place binaries at CREW_DEST_DIR
|
||
# CREW_DEST_DIR contains usr/local/... hierarchy
|
||
build_and_preconfigure target_dir
|
||
|
||
# prepare filelist and dlist at CREW_DEST_DIR
|
||
prepare_package CREW_DEST_DIR
|
||
|
||
# use CREW_DEST_DIR
|
||
dest_dir = CREW_DEST_DIR
|
||
else
|
||
# use extracted binary directory
|
||
dest_dir = target_dir
|
||
end
|
||
end
|
||
|
||
# Make backup of installed packages json file.
|
||
# If this fails, the install should fail before we create any
|
||
# damage, and we should roughly be at maximal disk space usage at this
|
||
# point anyways.
|
||
FileUtils.cp "#{CREW_CONFIG_PATH}device.json", "#{CREW_CONFIG_PATH}device.json.tmp"
|
||
|
||
# remove it just before the file copy
|
||
if @pkg.in_upgrade
|
||
puts 'Removing since upgrade or reinstall...'
|
||
remove @pkg.name
|
||
end
|
||
|
||
unless @pkg.is_fake?
|
||
# perform pre-install process
|
||
pre_install dest_dir
|
||
|
||
# perform install process
|
||
install_package dest_dir
|
||
|
||
unless (@resolve_dependencies_and_install == 1) || (@resolve_dependencies_and_build == 1)
|
||
# perform post-install process
|
||
post_install
|
||
end
|
||
end
|
||
|
||
# add to installed packages
|
||
@device[:installed_packages].push(name: @pkg.name, version: @pkg.version, is_dep: @pkg.is_dep)
|
||
File.open("#{CREW_CONFIG_PATH}device.json.tmp", 'w') do |file|
|
||
output = JSON.parse @device.to_json
|
||
file.write JSON.pretty_generate(output)
|
||
end
|
||
# Only copy over original if the write to the tmp file succeeds.
|
||
FileUtils.cp "#{CREW_CONFIG_PATH}device.json.tmp", "#{CREW_CONFIG_PATH}device.json"
|
||
# This may no longer be needed.
|
||
# Update shared library cache after install is complete.
|
||
# system "echo #{CREW_LIB_PREFIX} > #{CREW_PREFIX}/etc/ld.so.conf"
|
||
# system "#{CREW_PREFIX}/sbin/ldconfig -f #{CREW_PREFIX}/etc/ld.so.conf -C #{CREW_PREFIX}/etc/ld.so.cache"
|
||
end
|
||
|
||
def resolve_dependencies_and_build
|
||
@resolve_dependencies_and_build = 1
|
||
@to_postinstall = []
|
||
begin
|
||
origin = @pkg.name
|
||
|
||
# mark current package as which is required to compile from source
|
||
@pkg.build_from_source = true
|
||
resolve_dependencies
|
||
@to_postinstall.each do |dep|
|
||
search dep
|
||
post_install
|
||
end
|
||
search origin, true
|
||
build_package Dir.pwd
|
||
rescue InstallError => e
|
||
abort "#{@pkg.name} failed to build: #{e}".lightred
|
||
ensure
|
||
# cleanup
|
||
unless @opt_keep
|
||
FileUtils.rm_rf Dir.glob("#{CREW_BREW_DIR}/*"), verbose: @fileutils_verbose
|
||
FileUtils.mkdir_p "#{CREW_BREW_DIR}/dest", verbose: @fileutils_verbose # this is a little ugly, feel free to find a better way
|
||
end
|
||
end
|
||
puts "#{@pkg.name} is built!".lightgreen
|
||
@resolve_dependencies_and_build = 0
|
||
end
|
||
|
||
def build_package(pwd)
|
||
abort 'It is not possible to build a fake package'.lightred if @pkg.is_fake?
|
||
abort 'It is not possible to build without source'.lightred unless @pkg.is_source?(@device[:architecture])
|
||
|
||
# download source codes and unpack it
|
||
meta = download
|
||
target_dir = unpack meta
|
||
|
||
# build from source and place binaries at CREW_DEST_DIR
|
||
build_and_preconfigure target_dir
|
||
|
||
# call check method here. this check method is called by this function only,
|
||
# therefore it is possible place time consuming tests in the check method.
|
||
if Dir.exist? target_dir
|
||
Dir.chdir target_dir do
|
||
puts 'Running tests...'
|
||
@pkg.check
|
||
end
|
||
end
|
||
|
||
# prepare filelist and dlist at CREW_DEST_DIR
|
||
prepare_package CREW_DEST_DIR
|
||
|
||
# build package from filelist, dlist and binary files in CREW_DEST_DIR
|
||
puts 'Archiving...'
|
||
archive_package pwd
|
||
end
|
||
|
||
def archive_package(pwd)
|
||
# Check to see that there is a working zstd
|
||
if File.exist?("#{CREW_PREFIX}/bin/zstd")
|
||
@crew_prefix_zstd_available = `#{CREW_PREFIX}/bin/zstd --version`.include?('zstd command line interface') ? true : nil
|
||
end
|
||
if File.exist?("#{CREW_MUSL_PREFIX}/bin/zstd")
|
||
@crew_musl_prefix_zstd_available = `#{CREW_MUSL_PREFIX}/bin/zstd --version`.include?('zstd command line interface') ? true : nil
|
||
end
|
||
if @pkg.no_zstd? || (!@crew_prefix_zstd_available && !@crew_musl_prefix_zstd_available)
|
||
puts 'Using xz to compress package. This may take some time.'.lightblue
|
||
pkg_name = "#{@pkg.name}-#{@pkg.version}-chromeos-#{@device[:architecture]}.tar.xz"
|
||
Dir.chdir CREW_DEST_DIR do
|
||
system "tar c#{@verbose}Jf #{pwd}/#{pkg_name} *"
|
||
end
|
||
else
|
||
puts 'Using zstd to compress package. This may take some time.'.lightblue
|
||
pkg_name = "#{@pkg.name}-#{@pkg.version}-chromeos-#{@device[:architecture]}.tar.zst"
|
||
Dir.chdir CREW_DEST_DIR do
|
||
# Using same zstd compression options as Arch, which privilege
|
||
# decompression speed over compression speed.
|
||
# See https://lists.archlinux.org/pipermail/arch-dev-public/2019-March/029542.html
|
||
# Use nice so that user can (possibly) do other things during compression.
|
||
if @crew_prefix_zstd_available
|
||
puts 'Using standard zstd'.lightblue if @opt_verbose
|
||
system "tar c#{@verbose} * | nice -n 20 #{CREW_PREFIX}/bin/zstd -c -T0 --ultra -20 - > #{pwd}/#{pkg_name}"
|
||
elsif @crew_musl_prefix_zstd_available
|
||
puts 'Using musl zstd'.lightblue if @opt_verbose
|
||
system "tar c#{@verbose} * | nice -n 20 #{CREW_MUSL_PREFIX}/bin/zstd -c -T0 --ultra -20 - > #{pwd}/#{pkg_name}"
|
||
end
|
||
end
|
||
end
|
||
system "sha256sum #{pwd}/#{pkg_name} > #{pwd}/#{pkg_name}.sha256"
|
||
end
|
||
|
||
def remove(pkgName)
|
||
# make sure the package is actually installed
|
||
unless @device[:installed_packages].any? { |pkg| pkg[:name] == pkgName } || File.exist?("#{CREW_META_PATH}#{pkgName}.filelist")
|
||
puts "Package #{pkgName} isn't installed.".lightred
|
||
return
|
||
end
|
||
|
||
# Preserve CREW_ESSENTIAL_FILES and make sure they are real files
|
||
# and not symlinks, because preserving symlinked libraries does not
|
||
# prevent breakage.
|
||
CREW_ESSENTIAL_FILES.each do |file|
|
||
next unless File.symlink?("#{CREW_LIB_PREFIX}/#{file}")
|
||
|
||
@canonicalized_file = `readlink -m #{CREW_LIB_PREFIX}/#{file}`.chomp
|
||
if File.exist?(@canonicalized_file) && @canonicalized_file.include?(CREW_PREFIX)
|
||
puts "Replacing symlinked essential #{file} with hard link to #{@canonicalized_file} to avoid breakage.".lightblue if @opt_verbose
|
||
FileUtils.ln(@canonicalized_file, "#{CREW_LIB_PREFIX}/#{file}", force: true)
|
||
end
|
||
end
|
||
# if the filelist exists, remove the files and directories installed by the package
|
||
if File.file?("#{CREW_META_PATH}#{pkgName}.filelist")
|
||
Dir.chdir CREW_CONFIG_PATH do
|
||
# remove all files installed by the package
|
||
File.foreach("meta/#{pkgName}.filelist", chomp: true) do |line|
|
||
# Do not remove essential files which crew (and dependencies)
|
||
# rely on, especially during package upgrades or reinstalls.
|
||
# These essential files are enumerated in const.rb as
|
||
# CREW_ESSENTIAL_FILES.
|
||
if CREW_ESSENTIAL_FILES.include?(File.basename(line))
|
||
puts "Removing #{line} will break crew. It was #{'NOT'.lightred} deleted." if @opt_verbose
|
||
else
|
||
puts "Removing file #{line}".lightred if @opt_verbose
|
||
begin
|
||
File.unlink line.chomp
|
||
rescue StandardError => e # swallow exception
|
||
end
|
||
end
|
||
end
|
||
|
||
# remove all directories installed by the package
|
||
File.foreach("meta/#{pkgName}.directorylist", chomp: true) do |line|
|
||
|
||
puts "Removing directory #{line}#{''.lightred}" if @opt_verbose
|
||
Dir.rmdir line
|
||
rescue StandardError => e # swallow exception
|
||
|
||
end
|
||
|
||
# remove the file and directory list
|
||
FileUtils.rm_f "meta/#{pkgName}.filelist"
|
||
FileUtils.rm_f "meta/#{pkgName}.directorylist"
|
||
end
|
||
end
|
||
|
||
# remove from installed packages
|
||
puts "Removing package #{pkgName}#{''.lightred}" if @opt_verbose
|
||
@device[:installed_packages].delete_if { |elem| elem[:name] == pkgName }
|
||
|
||
# update the device manifest
|
||
File.write "#{CREW_CONFIG_PATH}/device.json", JSON.pretty_generate(JSON.parse(@device.to_json))
|
||
|
||
search pkgName, true
|
||
@pkg.remove
|
||
|
||
puts "#{pkgName.capitalize} removed!".lightgreen
|
||
end
|
||
|
||
def print_deps_tree(args)
|
||
warn 'Walking through dependencies recursively, this may take a while...', ''
|
||
|
||
# depHash: Hash object returned by @pkg.get_deps_list
|
||
depHash = @pkg.get_deps_list(hash: true, include_build_deps: (args['--include-build-deps'] || 'auto'), exclude_buildessential: args['--exclude-buildessential'])
|
||
|
||
# convert returned hash to json and format it
|
||
jsonView = JSON.pretty_generate(depHash)
|
||
|
||
# convert formatted json string to tree structure
|
||
treeView = jsonView.gsub(/\{\s*/m, '└─────').gsub(/[\[\]{},":]/, '').gsub(/^\s*$\n/, '').gsub(/\s*$/, '')
|
||
|
||
# add pipe char to connect endpoints and starting points, improve readability
|
||
# find the horizontal location of all arrow symbols
|
||
index_with_pipe_char = treeView.lines.map { |line| line.index('└') }.compact.uniq
|
||
|
||
# determine whatever a pipe char should be added according to the horizontal location of arrow symbols
|
||
treeView = treeView.lines.each_with_index.map do |line, line_i|
|
||
index_with_pipe_char.each do |char_i|
|
||
# check if there have any non-space char (pkgNames) between starting point ([line_i][char_i]) and endpoint vertically ([next_arrow_line_offset][char_i])
|
||
# (used to determine if the starting point and endpoint are in same branch, use pipe char to connect them if true)
|
||
next_arrow_line_offset = treeView.lines[line_i..].index { |l| l[char_i] == '└' }
|
||
have_line_with_non_empty_char = treeView.lines[line_i + 1..line_i + next_arrow_line_offset.to_i - 1].any? { |l| l[char_i].nil? or l[char_i] =~ /\S/ }
|
||
|
||
line[char_i] = '│' if next_arrow_line_offset && (line[char_i] == ' ') && !have_line_with_non_empty_char
|
||
end
|
||
next line
|
||
end.join
|
||
|
||
# replace arrow symbols with a tee symbol on branch intersection
|
||
treeView = treeView.lines.each_with_index.map do |line, line_i|
|
||
# orig_arrow_index_connecter: the horizontal location of the arrow symbol used to connect parent branch
|
||
#
|
||
# example:
|
||
# └───┬─chrome
|
||
# └─────buildessential
|
||
# ^
|
||
orig_arrow_index_connecter = line.index('└')
|
||
# orig_arrow_index_newbranch: the horizontal location of the "box drawing char" symbol MIGHT be
|
||
# required to convert to tee char in order to connect child branch,
|
||
# located at 3 chars later of orig_arrow_index_connecter
|
||
#
|
||
# example:
|
||
# v
|
||
# └─────chrome
|
||
# └─────buildessential
|
||
#
|
||
# which might need to be convert to:
|
||
# └───┬─chrome
|
||
# └─────buildessential
|
||
orig_arrow_index_newbranch = orig_arrow_index_connecter + 4
|
||
|
||
# if the char under the processing arrow symbol (orig_arrow_index_connecter) is also arrow or pipe, change the processing char to tee symbol
|
||
line[orig_arrow_index_connecter] = '├' if orig_arrow_index_connecter && treeView.lines[line_i + 1].to_s[orig_arrow_index_connecter] =~ (/[└│]/)
|
||
# if the char under the processing arrow symbol (orig_arrow_index_newbranch) is also arrow or pipe, change the processing char to tee symbol
|
||
line[orig_arrow_index_newbranch] = '┬' if orig_arrow_index_newbranch && treeView.lines[line_i + 1].to_s[orig_arrow_index_newbranch] =~ (/[└├]/)
|
||
next line # return modified line
|
||
end.join
|
||
|
||
if String.use_color
|
||
puts <<~EOT, ''
|
||
[45m [0m: satisfied dependency
|
||
[46m [0m: build dependency
|
||
[47m [0m: runtime dependency
|
||
EOT
|
||
# (the first string in each #{} is used for commenting only, will not be included in output)
|
||
|
||
# replace special symbols returned by @pkg.get_deps_list to actual color code
|
||
treeView.gsub!(/\*(.+)\*/, '\1'.lightcyan)
|
||
treeView.gsub!(/\+(.+)\+/, "\e[45m\\1\e[0m")
|
||
end
|
||
|
||
puts treeView
|
||
end
|
||
|
||
def autoremove_command(_args)
|
||
deps_of_installed_pkgs = @device[:installed_packages].map do |pkg|
|
||
# ignore deleted/non-exist package recipes
|
||
next unless File.exist?("#{CREW_PACKAGES_PATH}/#{pkg[:name]}.rb")
|
||
|
||
set_package pkg[:name], "#{CREW_PACKAGES_PATH}/#{pkg[:name]}.rb"
|
||
next @pkg.dependencies
|
||
end.flatten
|
||
|
||
remove_pkg = @device[:installed_packages].select do |pkg|
|
||
pkg[:is_dep] and !deps_of_installed_pkgs.include?(pkg[:name])
|
||
end.map { |pkg| pkg[:name] }
|
||
|
||
return if remove_pkg.empty?
|
||
|
||
puts 'The following packages also need to be REMOVED: '
|
||
remove_pkg.each do |pkg|
|
||
print "#{pkg} "
|
||
end
|
||
print "\nDo you agree? [Y/n] "
|
||
|
||
response = $stdin.getc
|
||
case response
|
||
when 'n'
|
||
abort 'No changes made.'
|
||
when "\n", 'y', 'Y'
|
||
puts 'Proceeding...'
|
||
else
|
||
puts "I don't understand `#{response}`. :(".lightred
|
||
abort 'No changes made.'
|
||
end
|
||
|
||
remove_pkg.each { |pkg| remove(pkg) }
|
||
end
|
||
|
||
def build_command(args)
|
||
args['<name>'].each do |name|
|
||
@pkgName = name
|
||
search @pkgName
|
||
print_current_package @opt_verbose
|
||
resolve_dependencies_and_build
|
||
end
|
||
end
|
||
|
||
def const_command(args)
|
||
if args['<name>'].empty?
|
||
const nil
|
||
else
|
||
args['<name>'].each do |name|
|
||
const name
|
||
end
|
||
end
|
||
end
|
||
|
||
def deps_command(args)
|
||
args['<name>'].each do |name|
|
||
@pkgName = name
|
||
search @pkgName
|
||
|
||
if args['--tree']
|
||
# call `print_deps_tree` (print dependency tree) if --tree is specified
|
||
print_deps_tree(args)
|
||
else
|
||
# print dependencies according to the install order if --tree is not specified
|
||
puts @pkg.get_deps_list(include_build_deps: (args['--include-build-deps'] || 'auto'), exclude_buildessential: args['--exclude-buildessential'])
|
||
end
|
||
end
|
||
end
|
||
|
||
def download_command(args)
|
||
args['<name>'].each do |name|
|
||
@pkgName = name
|
||
search @pkgName
|
||
print_current_package @opt_verbose
|
||
download
|
||
end
|
||
end
|
||
|
||
def files_command(args)
|
||
args['<name>'].each do |name|
|
||
@pkgName = name
|
||
search @pkgName
|
||
print_current_package
|
||
files name
|
||
end
|
||
end
|
||
|
||
def help_command(args)
|
||
if args['<command>']
|
||
help args['<command>']
|
||
else
|
||
puts 'Usage: crew help <command>'
|
||
help nil
|
||
end
|
||
end
|
||
|
||
def install_command(args)
|
||
args['<name>'].each do |name|
|
||
@pkgName = name
|
||
search @pkgName
|
||
print_current_package true
|
||
@pkg.build_from_source = true if @opt_src || @opt_recursive || (CREW_BUILD_FROM_SOURCE == '1')
|
||
resolve_dependencies_and_install
|
||
end
|
||
end
|
||
|
||
def list_command(args)
|
||
if args['available']
|
||
list_available
|
||
elsif args['installed']
|
||
puts list_installed
|
||
elsif args['compatible']
|
||
list_compatible true
|
||
elsif args['incompatible']
|
||
list_compatible false
|
||
end
|
||
end
|
||
|
||
def postinstall_command(args)
|
||
args['<name>'].each do |name|
|
||
@pkgName = name
|
||
search @pkgName, true
|
||
if @device[:installed_packages].any? { |elem| elem[:name] == @pkgName }
|
||
@pkg.postinstall
|
||
else
|
||
puts "Package #{@pkgName} is not installed. :(".lightred
|
||
end
|
||
end
|
||
end
|
||
|
||
def reinstall_command(args)
|
||
args['<name>'].each do |name|
|
||
@pkgName = name
|
||
search @pkgName
|
||
print_current_package
|
||
@pkg.build_from_source = true if @opt_src || @opt_recursive || (CREW_BUILD_FROM_SOURCE == '1')
|
||
next unless @pkgName
|
||
|
||
@pkg.in_upgrade = true
|
||
resolve_dependencies_and_install
|
||
@pkg.in_upgrade = false
|
||
end
|
||
end
|
||
|
||
def remove_command(args)
|
||
args['<name>'].each do |name|
|
||
remove name
|
||
end
|
||
end
|
||
|
||
def search_command(args)
|
||
args['<name>'].each do |name|
|
||
regexp_search name
|
||
end.empty? and begin
|
||
list_packages
|
||
end
|
||
end
|
||
|
||
def sysinfo_command(_args)
|
||
# newer version of Chrome OS exports info to env by default
|
||
lsb_release = if File.exist?('/etc/lsb-release')
|
||
File.read('/etc/lsb-release').scan(/^(.+?)=(.+)$/).to_h
|
||
else
|
||
# newer version of Chrome OS exports info to env by default
|
||
ENV
|
||
end
|
||
|
||
git_commit_message_format = '%h `%s (%cr)`'
|
||
|
||
puts <<~MD
|
||
- Architecture: `#{ARCH_ACTUAL}` (`#{ARCH}`)
|
||
- Kernel version: `#{`uname -r`.chomp}`
|
||
|
||
- Chromebrew version: `#{CREW_VERSION}`
|
||
- Chromebrew prefix: `#{CREW_PREFIX}`
|
||
- Chromebrew libdir: `#{CREW_LIB_PREFIX}`
|
||
|
||
- Last update in local repository: #{`cd '#{CREW_LIB_PATH}'; git show -s --format='#{git_commit_message_format}' '#{CREW_LIB_PATH}'`.chomp}
|
||
|
||
- OS variant: `#{lsb_release['CHROMEOS_RELEASE_NAME']}`
|
||
- OS version: `#{lsb_release['CHROMEOS_RELEASE_BUILDER_PATH']}`
|
||
- OS channel: `#{lsb_release['CHROMEOS_RELEASE_TRACK']}`
|
||
MD
|
||
end
|
||
|
||
def update_command(args)
|
||
if args['<compatible>']
|
||
generate_compatible
|
||
else
|
||
update
|
||
end
|
||
end
|
||
|
||
def upgrade_command(args)
|
||
args['<name>'].each do |name|
|
||
@pkgName = name
|
||
search @pkgName
|
||
print_current_package
|
||
@pkg.build_from_source = true if @opt_src || (CREW_BUILD_FROM_SOURCE == '1')
|
||
upgrade
|
||
end.empty? and begin
|
||
upgrade
|
||
end
|
||
end
|
||
|
||
def whatprovides_command(args)
|
||
args['<pattern>'].each do |name|
|
||
whatprovides name
|
||
end
|
||
end
|
||
|
||
def is_command(name)
|
||
return !!!name[/^[-<]/]
|
||
end
|
||
|
||
trap('INT') do
|
||
abort 'Interrupted.'.lightred
|
||
end
|
||
|
||
command_name = args.select { |k, v| v and is_command(k) }.keys[0]
|
||
function = "#{command_name}_command"
|
||
send(function, args)
|