Files
chromebrew/bin/crew
Maximilian Downey Twiss 84792a5a5e Rework CREW_TESTING_* variables and crew update process (#8809)
* Rework CREW_TESTING_* variables and crew update process

* Remove unused CREW_GITHUB_* variables
2023-10-20 12:15:11 -04:00

2001 lines
75 KiB
Ruby
Executable File

#!/usr/bin/env ruby
require 'uri'
require 'digest/sha2'
require 'json'
require 'fileutils'
require 'tmpdir'
require_relative '../lib/docopt'
require_relative '../lib/color'
require_relative '../lib/const'
require_relative '../lib/util'
require_relative '../lib/convert_size'
require_relative '../lib/downloader'
require_relative '../lib/deb_utils'
require_relative '../lib/package'
CREW_LICENSE = <<~LICENSESTRING
Copyright (C) 2013-2022 Chromebrew Authors
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see https://www.gnu.org/licenses/gpl-3.0.html.
Chromebrew embeds lib/docopt.rb from another project under the MIT License.
You should have received a copy of the license along with this program.
If not, see https://github.com/docopt/docopt.rb/blob/master/LICENSE
LICENSESTRING
DOC = <<~DOCOPT
Chromebrew - Package manager for Chrome OS https://chromebrew.github.io
Usage:
crew build [options] [-k|--keep] <name> ...
crew const [options] [<name> ...]
crew deps [options] [-t|--tree] [-b|--include-build-deps] [--exclude-buildessential] <name> ...
crew download [options] <name> ...
crew files [options] <name> ...
crew help [<command>] [<subcommand>]
crew install [options] [-k|--keep] [-s|--build-from-source] [-S|--recursive-build] <name> ...
crew list [options] (available|installed|compatible|incompatible)
crew postinstall [options] <name> ...
crew prop
crew reinstall [options] [-k|--keep] [-s|--build-from-source] [-S|--recursive-build] <name> ...
crew remove [options] <name> ...
crew search [options] [<name> ...]
crew sysinfo [options]
crew update [options] [<compatible>]
crew upgrade [options] [-k|--keep] [-s|--build-from-source] [<name> ...]
crew whatprovides [options] <pattern> ...
-b --include-build-deps Include build dependencies in output.
-t --tree Print dependencies in a tree-structure format.
-c --color Use colors even if standard out is not a tty.
-d --no-color Disable colors even if standard out is a tty.
-k --keep Keep the `CREW_BREW_DIR` (#{CREW_BREW_DIR}) directory.
-L --license Display the crew license.
-s --build-from-source Build from source even if pre-compiled binary exists.
-S --recursive-build Build from source, including all dependencies, even if pre-compiled binaries exist.
-v --verbose Show extra information.
-V --version Display the crew version.
-h --help Show this screen.
version #{CREW_VERSION}
DOCOPT
# All available crew commands.
@cmds = DOC.scan(/crew ([^\s]+)/).flatten
# Disallow sudo
abort 'Chromebrew should not be run as root.'.lightred if Process.uid.zero?
# Add lib to LOAD_PATH
$LOAD_PATH << File.join(CREW_LIB_PATH, 'lib')
# Parse arguments using docopt
begin
args = Docopt.docopt(DOC)
args['<name>']&.map! { |arg| arg.tr('-', '_') }
rescue Docopt::Exit => e
if ARGV[0]
case ARGV[0]
when '-V', '--version', 'version'
puts CREW_VERSION
exit 0
when '-L', '--license', 'license'
puts CREW_LICENSE
exit 0
end
unless %w[-h --help].include?(ARGV[0])
puts "Could not understand \"crew #{ARGV.join(' ')}\".".lightred
# Looking for similar commands
unless @cmds.include?(ARGV[0])
similar = @cmds.select { |word| edit_distance(ARGV[0], word) < 4 }
unless similar.empty?
abort <<~EOT
Did you mean?
#{similar.join("\n ")}
EOT
end
end
end
end
abort e.message
end
# override default color options if specified
String.use_color = args['--color'] || !args['--no-color']
@opt_keep = args['--keep']
@opt_verbose = args['--verbose']
@opt_src = args['--build-from-source']
@opt_recursive = args['--recursive-build']
# Verbose options
@fileutils_verbose = @opt_verbose
@verbose = @opt_verbose ? 'v' : ''
@short_verbose = @opt_verbose ? '-v' : ''
def load_json
# load_json(): (re)load device.json
json_path = File.join(CREW_CONFIG_PATH, 'device.json')
@device = JSON.load_file(json_path, symbolize_names: true)
# symbolize also values
@device.transform_values! {|val| val.is_a?(String) ? val.to_sym : val }
end
def print_package(pkgPath, extra = false)
pkgName = File.basename pkgPath, '.rb'
begin
set_package pkgName, pkgPath
rescue StandardError => e
warn "Error with #{pkgName}.rb: #{e}".red unless e.to_s.include?('uninitialized constant')
end
print_current_package extra
end
def print_current_package(extra = false)
status = if @device[:installed_packages].any? { |elem| elem[:name] == @pkg.name }
:installed
elsif !@pkg.compatible?
:incompatible
else
:available
end
case status
when :installed
print @pkg.name.lightgreen
when :incompatible
print @pkg.name.lightred
when :available
print @pkg.name.lightblue
end
print ": #{@pkg.description}".lightblue if @pkg.description
if extra
puts ''
puts @pkg.homepage if @pkg.homepage
puts "Version: #{@pkg.version}"
print "License: #{@pkg.license}" if @pkg.license
end
puts ''
end
def set_package(pkgName, pkgPath)
begin
@pkg = Package.load_package(pkgPath, pkgName)
rescue SyntaxError => e
warn "#{e.class}: #{e.message}".red
end
@pkg.build_from_source = true if @opt_recursive
end
def list_packages
Dir["#{CREW_PACKAGES_PATH}*.rb"].each do |filename|
print_package filename
end
end
def list_available
Dir["#{CREW_PACKAGES_PATH}*.rb"].each do |filename|
pkgName = File.basename(filename, '.rb')
filelist = File.join(CREW_META_PATH, "#{pkgName}.filelist")
notInstalled = !File.file?(filelist)
next unless notInstalled
begin
set_package pkgName, filename
rescue StandardError => e
puts "Error with #{pkgName}.rb: #{e}".red unless e.to_s.include?('uninitialized constant')
end
puts pkgName if @pkg.compatible?
end
end
def list_installed
if @opt_verbose
@installed_packages = []
@device[:installed_packages].each do |package|
search package[:name], true
@installed_packages.append("#{package[:name]} #{package[:version]}")
end
@sorted_installed_packages = @installed_packages.sort
@sorted_installed_packages.unshift('======= =======')
@sorted_installed_packages.unshift('Package Version')
@first_col_width = @sorted_installed_packages.map(&:split).map(&:first).max_by(&:size).size + 2
@sorted_installed_packages.map(&:strip).each do |line|
puts "%-#{@first_col_width}s%s".lightgreen % line.split
end
print "\n"
else
Dir["#{CREW_META_PATH}*.directorylist"].map do |f|
File.basename(f, '.directorylist').lightgreen
end
end
end
def list_compatible(compat = true)
Dir["#{CREW_PACKAGES_PATH}*.rb"].each do |filename|
pkgName = File.basename filename, '.rb'
if @device[:compatible_packages].any? { |elem| elem[:name] == pkgName }
if compat
if File.file? "#{CREW_META_PATH}#{pkgName}.filelist"
puts pkgName.lightgreen
else
puts pkgName
end
end
elsif !compat
puts pkgName.lightred
end
end
end
def generate_compatible
puts 'Generating compatible packages...'.orange if @opt_verbose
@device[:compatible_packages] = []
Dir["#{CREW_PACKAGES_PATH}*.rb"].each do |filename|
pkgName = File.basename filename, '.rb'
begin
set_package pkgName, filename
rescue StandardError => e
puts "Error with #{pkgName}.rb: #{e}".red unless e.to_s.include?('uninitialized constant')
end
puts "Checking #{pkgName} for compatibility.".orange if @opt_verbose
# If compatibility property does not exist, check if a binary package
# exists, and if not, see if at least a source url exists.
@compatibility = true
@binary_url = ''
@url = ''
if @pkg.compatibility.nil?
@binary_url = @pkg.get_binary_url(@device[:architecture])
@url = @pkg.get_url(@device[:architecture])
unless @binary_url
puts "#{pkgName} is missing compatibility information".red
# puts "url: #{@url}".green
# If no source package is available, then package is not compatible.
@compatibility = false unless @url
puts "#{pkgName} compatibility is #{@compatibility}" if @opt_verbose
end
end
if @pkg.compatible? && @compatibility
# add to compatible packages
puts "Adding #{pkgName} #{@pkg.version} to compatible packages.".lightgreen if @opt_verbose
@device[:compatible_packages].push(name: @pkg.name)
elsif @opt_verbose
puts "#{pkgName} is not a compatible package.".lightred
end
end
File.open("#{CREW_CONFIG_PATH}device.json", 'w') do |file|
output = JSON.parse @device.to_json
file.write JSON.pretty_generate(output)
end
puts 'Generating compatible packages done.'.orange if @opt_verbose
end
def search(pkgName, silent = false)
pkgPath = File.join(CREW_PACKAGES_PATH, "#{pkgName}.rb")
begin
return set_package(pkgName, pkgPath) if File.file?(pkgPath)
rescue StandardError => e
puts "Error with #{pkgName}.rb: #{e}".lightred unless e.to_s.include?('uninitialized constant')
end
unless File.file?(pkgPath) && silent
@pkg = nil
abort "Package #{pkgName} not found. 😞".lightred unless silent
return
end
end
def regexp_search(pkgPat)
re = Regexp.new(pkgPat, true)
results = Dir["#{CREW_PACKAGES_PATH}*.rb"] \
.select { |f| File.basename(f, '.rb') =~ re } \
.each { |f| print_package(f, @opt_verbose) }
if results.empty?
Dir["#{CREW_PACKAGES_PATH}*.rb"].each do |packagePath|
packageName = File.basename packagePath, '.rb'
begin
set_package packageName, packagePath
rescue StandardError => e
puts "Error with #{pkgName}.rb: #{e}".red unless e.to_s.include?('uninitialized constant')
end
if @pkg.description =~ /#{pkgPat}/i
print_current_package @opt_verbose
results.push(packageName)
end
end
end
abort "Package #{pkgPat} not found. :(".lightred if results.empty?
end
def help(pkgName = nil)
case pkgName
when 'build'
puts <<~EOT
Build package(s).
Usage: crew build [-k|--keep] [-v|--verbose] <package1> [<package2> ...]
Build package(s) from source and place the archive and checksum in the current working directory.
If `-k` or `--keep` is present, the `CREW_BREW_DIR` (#{CREW_BREW_DIR}) directory will remain.
If `-v` or `--verbose` is present, extra information will be displayed.
EOT
when 'const'
puts <<~EOT
Display constant(s).
Usage: crew const [<const1> <const2> ...]
If no constants are provided, all constants will be displayed.
EOT
when 'deps'
puts <<~EOT
Display dependencies of package(s).
Usage: crew deps [-t|--tree] [-b|--include-build-deps] [--exclude-buildessential] <package1> [<package2> ...]
If `-t` or `--tree` specified, dependencies will be printed in a tree-structure format
If `-b` or `--include-build-deps` specified, build dependencies will be included in output
It `--exclude-buildessential` specified, `buildessential` and its dependencies will not be inserted automatically
EOT
when 'download'
puts <<~EOT
Download package(s).
Usage: crew download [-v|--verbose] <package1> [<package2> ...]
Download package(s) to `CREW_BREW_DIR` (#{CREW_BREW_DIR}), but don't install.
If `-v` or `--verbose` is present, extra information will be displayed.
EOT
when 'files'
puts <<~EOT
Display installed files of package(s).
Usage: crew files <package1> [<package2> ...]
The package(s) must be currently installed.
EOT
when 'install'
puts <<~EOT
Install package(s).
Usage: crew install [-k|--keep] [-s|--build-from-source] [-S|--recursive-build] [-v|--verbose] <package1> [<package2> ...]
The package(s) must have a valid name. Use `crew search <pattern>` to search for packages to install.
If `-k` or `--keep` is present, the `CREW_BREW_DIR` (#{CREW_BREW_DIR}) directory will remain.
If `-s` or `--build-from-source` is present, the package(s) will be compiled instead of installed via binary.
If `-S` or `--recursive-build` is present, the package(s), including all dependencies, will be compiled instead of installed via binary.
If `-v` or `--verbose` is present, extra information will be displayed.
EOT
when 'list'
puts <<~EOT
List packages
Usage: crew list [-v|--verbose] available|installed|compatible|incompatible
EOT
when 'postinstall'
puts <<~EOT
Display postinstall messages of package(s).
Usage: crew postinstall <package1> [<package2> ...]
The package(s) must be currently installed.
EOT
when 'prop'
if ARGV[2]
valid = nil
properties = prop(true)
properties.each do |p|
if ARGV[2] == p
valid = true
break
end
end
abort 'Invalid property.'.lightred unless valid
case ARGV[2]
when 'conflicts_ok'
puts "The 'conflicts_ok' property bypasses checks for other package file conflicts."
when 'git_clone_deep'
puts "The 'git_clone_deep' property clones the repository without a depth value."
puts "Applicable only when 'source_url' is a git repository."
when 'git_fetchtags'
puts "The 'git_fetchtags' property gets the repository tags."
puts "Applicable only when 'source_url' is a git repository."
when 'is_fake'
puts "Use the 'is_fake' property for packages that simply depend on other packages."
when 'is_musl'
puts "Use the 'is_musl' property for musl specific packages."
when 'is_static'
puts "Use the 'is_static' property for packages which do not require shared dependencies."
when 'no_compile_needed'
puts "Use the 'no_compile_needed' property for packages that do not require pre-built binaries."
when 'no_compress'
puts "Use the 'no_compress' property for packages that do not need compressed files."
when 'no_env_options'
puts "Use the 'no_env_options' property for packages that do not require"
puts 'environment options or to override the default options.'
when 'no_fhs'
puts "The 'no_fhs' property bypasses FHS3 compliance checks."
when 'no_git_submodules'
puts "Use the 'no_git_submodules' property for repositories without git submodules."
puts "Applicable only when 'source_url' is a git repository."
when 'no_links'
puts "Use the 'no_links' property to bypass checks for duplicate links."
when 'no_patchelf'
puts "Use the 'no_patchelf' property to bypass patchelf execution."
when 'no_shrink'
puts "Use the 'no_shrink' property to bypass upx binary compression."
when 'no_strip'
puts "Use the 'no_strip' property to bypass strip execution."
when 'no_lto'
puts "Use the 'no_lto' property to bypass lto usage."
when 'no_zstd'
puts "Use the 'no_zstd' property for the alternate xz compression algorithm."
when 'patchelf'
puts "Use the 'patchelf' property for patchelf execution."
when 'run_tests'
puts "Use the 'run_tests' property to execute make check tests."
else
abort "Help for the '#{ARGV[2]}' property is not available.".lightred
end
else
puts <<~EOT
Display all available package boolean properties.
Usage: crew prop
EOT
end
when 'reinstall'
puts <<~EOT
Remove and install package(s).
Usage: crew reinstall [-k|--keep] [-s|--build-from-source] [-S|--recursive-build] [-v|--verbose] <package1> [<package2> ...]
If `-k` or `--keep` is present, the `CREW_BREW_DIR` (#{CREW_BREW_DIR}) directory will remain.
If `-s` or `--build-from-source` is present, the package(s) will be compiled instead of installed via binary.
If `-S` or `--recursive-build` is present, the package(s), including all dependencies, will be compiled instead of installed via binary.
If `-v` or `--verbose` is present, extra information will be displayed.
EOT
when 'remove'
puts <<~EOT
Remove package(s).
Usage: crew remove [-v|--verbose] <package1> [<package2> ...]
The package(s) must be currently installed.
If `-v` or `--verbose` is present, extra information will be displayed.
EOT
when 'search'
puts <<~EOT
Look for package(s).
Usage: crew search [-v|--verbose] [<pattern> ...]
If <pattern> is omitted, all packages will be returned.
If the package color is " + "green".lightgreen + ", it means the package is installed.
If the package color is " + "red".lightred + ", it means the architecture is not supported.
The <pattern> string can also contain regular expressions.
If `-v` or `--verbose` is present, homepage, version and license will be displayed.
Examples:
crew search ^lib".lightblue + " will display all packages that start with `lib`.
crew search audio".lightblue + " will display all packages with `audio` in the name.
crew search | grep -i audio".lightblue + " will display all packages with `audio` in the name or description.
crew search git -v".lightblue + " will display packages with `git` in the name along with homepage, version and license.
EOT
when 'sysinfo'
puts <<~EOT
Show system information.
Usage: crew sysinfo
If `-v` or `--verbose` is present, show system information with raw markdown.
EOT
when 'update'
puts <<~EOT
Update crew.
Usage: crew update
This only updates crew itself. Use `crew upgrade` to update packages.
Usage: crew update compatible
This updates the crew package compatibility list.
EOT
when 'upgrade'
puts <<~EOT
Update package(s).
Usage: crew upgrade [-v|--verbose] [-s|--build-from-source] <package1> [<package2> ...]
If package(s) are omitted, all packages will be updated. Otherwise, specific package(s) will be updated.
Use `crew update` to update crew itself.
If `-s` or `--build-from-source` is present, the package(s) will be compiled instead of upgraded via binary.
If `-v` or `--verbose` is present, extra information will be displayed.
EOT
when 'whatprovides'
puts <<~EOT
Determine which package(s) contains file(s).
Usage: crew whatprovides <pattern> ...
The <pattern> is a search string which can contain regular expressions.
EOT
else
puts "Available commands: #{@cmds.join(', ')}"
end
end
def cache_build
@build_cachefile = "#{CREW_CACHE_DIR}#{@pkg.name}-#{@pkg.version}-build-#{@device[:architecture]}.tar.zst"
if CREW_CACHE_ENABLED && File.writable?(CREW_CACHE_DIR)
puts 'Caching build dir...'
@pkg_build_dirname_absolute = File.join(CREW_BREW_DIR, @extract_dir)
@pkg_build_dirname = File.basename(@pkg_build_dirname_absolute)
Dir.chdir @pkg_build_dirname_absolute do
# Do not use --exclude-vcs w/ tar to exclude .git
# because some builds will use that information.
# Backup build cachefile it if exists.
FileUtils.mv @build_cachefile, "#{@build_cachefile}.bak", force: true if File.file?(@build_cachefile)
FileUtils.mv "#{@build_cachefile}.sha256", "#{@build_cachefile}.sha256.bak", force: true if File.file?("#{@build_cachefile}.sha256")
Dir.chdir(CREW_BREW_DIR) do
system "tar c#{@verbose} #{@pkg_build_dirname} \
| nice -n 20 #{CREW_PREFIX}/bin/zstd -c --ultra --fast -f -o #{@build_cachefile} -"
end
end
system "sha256sum #{@build_cachefile} > #{@build_cachefile}.sha256"
puts "Build directory cached at #{@build_cachefile}".lightgreen
else
puts 'CREW_CACHE_ENABLED is not set.'.orange unless CREW_CACHE_ENABLED
puts 'CREW_CACHE_DIR is not writable.'.lightred unless File.writable?(CREW_CACHE_DIR)
end
end
def const(var = nil)
if var
value = Object.const_get(var.to_sym)
puts "#{var}=#{value}"
else
# Get all constants defined by const.rb
constants = Object.constants.select do |const|
Object.const_source_location(const)[0] == File.join(CREW_LIB_PATH, 'lib/const.rb')
end.sort
# Print a sorted list of the remaining constants used by crew.
constants.each do |var|
value = Object.const_get(var.to_sym)
puts "#{var}=#{value}"
end
end
end
def files(pkgName)
local_filelist = File.join(CREW_META_PATH, "#{pkgName}.filelist")
manifest_filelist = File.join(CREW_LIB_PATH, "manifest/#{USER_SPACE_ARCH}/#{pkgName[0]}/#{pkgName}.filelist")
if File.exist?(local_filelist)
# search for local filelist first
filelist_path = local_filelist
elsif File.exist?(manifest_filelist)
# search for manifest directory if not installed
filelist_path = manifest_filelist
else
# package does not have any filelist available
warn "Package #{pkgName} is not installed. :(".lightred
return false
end
filelist = File.readlines(filelist_path, chomp: true).sort
lines = filelist.size
size = 0
filelist.each do |filename|
# ignore symlinks to prevent duplicating calculation
size += File.size(filename) if File.file?(filename) && !File.symlink?(filename)
end
puts filelist, <<~EOT.lightgreen
Total found: #{lines}
Disk usage: #{human_size(size)}
EOT
end
def prop(silent = false)
props = []
pkg = Package.new
excluded_methods = %w[compatible is_binary is_source json_creatable autoload include const_defined class_variable_defined singleton_class method_defined public_method_defined private_method_defined protected_method_defined instance_variable_defined instance_of kind_of is_a frozen nil eql respond_to equal]
all_methods = pkg.class.methods.grep(/\?$/).to_s.gsub(/([?:,\[\]])/, '').split
all_methods.each do |method|
props.push(method) unless excluded_methods.include?(method)
end
if silent
return props
else
puts props.sort
puts "For more information, type 'crew help prop <property>' where <property> is one of the above properties.".lightblue
end
end
def whatprovides(regexPat)
matchedList = `grep -R "#{regexPat}" #{CREW_LIB_PATH}/manifest/#{USER_SPACE_ARCH}`.lines(chomp: true).flat_map do |result|
filelist, matchedFile = result.split(':', 2)
pkgName = File.basename(filelist, '.filelist')
pkgNameStatus = pkgName
if @device[:compatible_packages].any? { |elem| elem[:name] == pkgName }
pkgNameStatus = pkgName.lightgreen if File.file? "#{CREW_META_PATH}/#{pkgName}.filelist"
else
pkgNameStatus = pkgName.lightred
end
"#{pkgNameStatus}: #{matchedFile}"
end.sort
puts matchedList, "\nTotal found: #{matchedList.length}".lightgreen if matchedList.any?
end
def update
abort "'crew update' is used to update crew itself. Use 'crew upgrade <package1> [<package2> ...]' to update specific packages.".orange if @pkgName
# update package lists
Dir.chdir(CREW_LIB_PATH) do
# Set sparse-checkout folders.
system "git sparse-checkout set packages manifest/#{USER_SPACE_ARCH} lib bin crew tools"
system 'git sparse-checkout reapply'
system "git fetch #{CREW_REPO} #{CREW_BRANCH}"
system 'git reset --hard FETCH_HEAD'
end
puts 'Package lists, crew, and library updated.'
# Do any fixups necessary after crew has updated from git.
load "#{CREW_LOCAL_REPO_ROOT}/lib/fixup.rb"
# update compatible packages
generate_compatible
# check for outdated installed packages
puts 'Checking for package updates...'
canBeUpdated = 0
@device[:installed_packages].each do |package|
search package[:name], true
unless @pkg
puts "Package file for #{package[:name]} not found. :(".lightred if @opt_verbose
next
end
if package[:binary_sha256] && package[:binary_sha256] != @pkg.get_binary_sha256(@device[:architecture])
canBeUpdated += 1
puts "#{@pkg.name} could be updated (rebuild)"
end
if package[:version] != @pkg.version
canBeUpdated += 1
puts "#{@pkg.name} could be updated from #{package[:version]} to #{@pkg.version}"
end
end
if canBeUpdated.positive?
puts "\nRun `crew upgrade` to update all packages or `crew upgrade <package1> [<package2> ...]` to update specific packages."
else
puts 'Your software is up to date.'.lightgreen
end
end
def upgrade(*pkgs, build_from_source: false)
check_update_avail = lambda do |pkgFile|
pkgName = File.basename(pkgFile, '.rb')
unless File.file?(pkgFile)
warn "Package file for installed package #{pkgName} is missing.".lightred
return false
end
pkgs.each do
unless @device[:installed_packages].any? { |package| package[:name] == pkgName }
puts 'Package '.lightred + pkgName.orange + ' is not installed. 😔 You may try this: '.lightred + "crew install #{pkgName}".lightblue
return false
end
end
pkgVer_latest = Package.load_package(pkgFile, pkgName).version
pkgVer_installed = @device[:installed_packages].select { |pkg| pkg[:name] == pkgName } [0][:version]
pkgHash_latest = Package.load_package(pkgFile, pkgName).get_binary_sha256(@device[:architecture])
pkgHash_installed = @device[:installed_packages].select { |pkg| pkg[:name] == pkgName } [0][:binary_sha256]
return pkgHash_latest != pkgHash_installed unless !pkgHash_installed || pkgHash_latest == ''
return pkgVer_latest != pkgVer_installed
end
to_be_upgraded = []
if pkgs.any?
# check for specific package(s)
pkgs.each do |pkgName|
pkgFile = File.join(CREW_PACKAGES_PATH, "#{pkgName}.rb")
to_be_upgraded << pkgName if check_update_avail.call(pkgFile)
end
else
# check for all packages if no package name provided
@device[:installed_packages].each do |pkg|
pkgFile = File.join(CREW_PACKAGES_PATH, "#{pkg[:name]}.rb")
to_be_upgraded << pkg[:name] if check_update_avail.call(pkgFile)
end
end
if to_be_upgraded.empty?
puts 'Your software is already up to date.'.lightgreen
return true
end
# Eventually, we should have the upgrade order generated based upon an
# analysis of the dependency hierarchy, to make sure that earlier
# dependencies get upgraded first.
# Upgrade OpenSSL first if OpenSSL is in the upgrade list, as other
# package upgrades, especially their postinstalls, may break until the
# new version of OpenSSL is installed.
to_be_upgraded.insert(0, to_be_upgraded.delete('openssl')) if to_be_upgraded.include?('openssl')
# Only upgrade ruby if ruby is in the upgrade list, as other
# package upgrades may break until crew is rerun with the new
# version of ruby.
if to_be_upgraded.include?('ruby')
to_be_upgraded = ['ruby']
@rerun_upgrade = true
end
# install new dependencies (if any)
to_be_upgraded.each do |pkgName|
search(pkgName)
resolve_dependencies
end
puts 'Updating packages...'
# upgrade packages
to_be_upgraded.each do |pkgName|
search(pkgName)
print_current_package
@pkg.build_from_source = (build_from_source or CREW_BUILD_FROM_SOURCE.eql?(1))
puts "Updating #{@pkg.name}..." if @opt_verbose
@pkg.in_upgrade = true
resolve_dependencies_and_install
end
puts 'Packages have been updated.'.lightgreen unless @rerun_upgrade
puts "Ruby was upgraded. Please run 'crew upgrade' again to make sure upgrades are complete.".lightblue if @rerun_upgrade
end
def download
url = @pkg.get_url(@device[:architecture])
source = @pkg.is_source?(@device[:architecture])
uri = URI.parse url
filename = File.basename(uri.path)
sha256sum = @pkg.get_sha256(@device[:architecture])
@extract_dir = @pkg.get_extract_dir
@build_cachefile = "#{CREW_CACHE_DIR}#{@pkg.name}-#{@pkg.version}-build-#{@device[:architecture]}.tar.zst"
return { source:, filename: } if CREW_CACHE_BUILD && File.file?(@build_cachefile)
if !url
abort "No precompiled binary or source is available for #{@device[:architecture]}.".lightred
elsif !source
puts 'Precompiled binary available, downloading...'
elsif @pkg.build_from_source
puts 'Downloading source...'
elsif url.casecmp?('SKIP')
puts 'Skipping source download...'
else
puts 'No precompiled binary available for your platform, downloading source...'
end
@git = uri.scheme.eql?('git')
Dir.chdir CREW_BREW_DIR do
case File.basename(filename)
# Sources that download with our internal downloader
when /\.zip$/i, /\.(tar(\.(gz|bz2|xz|lzma|lz|zst))?|tgz|tbz|tpxz|txz)$/i, /\.deb$/i, /\.AppImage$/i
# Recall file from cache if requested
if CREW_CACHE_ENABLED
puts "Looking for #{@pkg.name} archive in cache".orange if @opt_verbose
cachefile = File.join(CREW_CACHE_DIR, filename)
if File.file?(cachefile)
puts "#{@pkg.name.capitalize} archive file exists in cache".lightgreen if @opt_verbose
if Digest::SHA256.hexdigest(File.read(cachefile)) == sha256sum || sha256sum =~ /^SKIP$/i
begin
# Hard link cached file if possible.
FileUtils.ln cachefile, CREW_BREW_DIR, force: true, verbose: @fileutils_verbose unless File.identical?(cachefile, "#{CREW_BREW_DIR}/#{filename}")
puts 'Archive hard linked from cache'.green if @opt_verbose
rescue StandardError
# Copy cached file if hard link fails.
FileUtils.cp cachefile, CREW_BREW_DIR, verbose: @fileutils_verbose unless File.identical?(cachefile, "#{CREW_BREW_DIR}/#{filename}")
puts 'Archive copied from cache'.green if @opt_verbose
end
puts 'Archive found in cache'.lightgreen
return { source:, filename: }
else
puts 'Cached archive checksum mismatch. 😔 Will download.'.lightred
cachefile = ''
end
else
puts 'Cannot find cached archive. 😔 Will download.'.lightred
cachefile = ''
end
end
# Download file if not cached.
downloader url, sha256sum, filename, @opt_verbose
puts "#{@pkg.name.capitalize} archive downloaded.".lightgreen
# Stow file in cache if requested, if file is not from cache,
# and cache is writable.
if CREW_CACHE_ENABLED && cachefile.to_s.empty? && File.writable?(CREW_CACHE_DIR)
begin
# Hard link to cache if possible.
FileUtils.ln filename, CREW_CACHE_DIR, verbose: @fileutils_verbose
puts 'Archive hard linked to cache'.green if @opt_verbose
rescue StandardError
# Copy to cache if hard link fails.
FileUtils.cp filename, CREW_CACHE_DIR, verbose: @fileutils_verbose
puts 'Archive copied to cache'.green if @opt_verbose
end
end
return { source:, filename: }
when /^SKIP$/i
Dir.mkdir @extract_dir
when /\.git$/i # Source URLs which end with .git are git sources.
@git = true
else
Dir.mkdir @extract_dir
downloader url, sha256sum, filename, @opt_verbose
puts "#{filename}: File downloaded.".lightgreen
FileUtils.mv filename, "#{@extract_dir}/#{filename}"
end
# Handle git sources.
if @git
# Recall repository from cache if requested
if CREW_CACHE_ENABLED
# No git branch specified, just a git commit or tag
if @pkg.git_branch.to_s.empty?
abort('No Git branch, commit, or tag specified!').lightred if @pkg.git_hashtag.to_s.empty?
cachefile = "#{CREW_CACHE_DIR}#{filename}#{@pkg.git_hashtag.gsub('/', '_')}.tar.zst"
# Git branch and git commit specified
elsif !@pkg.git_hashtag.to_s.empty?
cachefile = "#{CREW_CACHE_DIR}#{filename}#{@pkg.git_branch.gsub(/[^0-9A-Za-z.-]/, '_')}_#{@pkg.git_hashtag.gsub('/', '_')}.tar.zst"
# Git branch specified, without a specific git commit.
else
# Use to the day granularity for a branch timestamp with no specific commit specified.
cachefile = "#{CREW_CACHE_DIR}#{filename}#{@pkg.git_branch.gsub(/[^0-9A-Za-z.-]/, '_')}#{Time.now.strftime('%m%d%Y')}.tar.zst"
end
puts "Git cachefile is #{cachefile}".orange if @opt_verbose
if File.file?(cachefile) && File.file?("#{cachefile}.sha256")
if system "cd #{CREW_CACHE_DIR} && sha256sum -c #{cachefile}.sha256"
FileUtils.mkdir_p @extract_dir
system "tar -Izstd -x#{@verbose}f #{cachefile} -C #{@extract_dir}"
return { source:, filename: }
else
puts 'Cached git repository checksum mismatch. 😔 Will download.'.lightred
end
else
puts 'Cannot find cached git repository. 😔 Will download.'.lightred
end
end
# Download via git
Dir.mkdir @extract_dir
Dir.chdir @extract_dir do
if @pkg.git_branch.to_s.empty?
system 'git init'
system 'git config advice.detachedHead false'
system 'git config init.defaultBranch master'
system "git remote add origin #{@pkg.source_url}", exception: true
system "git fetch --depth 1 origin #{@pkg.git_hashtag}", exception: true
system 'git checkout FETCH_HEAD'
else
# Leave a message because this step can be slow.
puts 'Downloading src from a git branch. This may take a while...'
system "git clone --branch #{@pkg.git_branch} --single-branch #{@pkg.source_url} tmpdir", exception: true
system 'mv tmpdir/.git . && rm -rf tmpdir'
system "git reset --hard #{@pkg.git_hashtag}", exception: true
end
system 'git submodule update --init --recursive' unless @pkg.no_git_submodules?
system 'git fetch --tags', exception: true if @pkg.git_fetchtags?
system "git fetch origin #{@pkg.git_hashtag}", exception: true if @pkg.git_clone_deep?
puts 'Repository downloaded.'.lightgreen
end
# Stow file in cache if requested and cache is writable.
if CREW_CACHE_ENABLED && File.writable?(CREW_CACHE_DIR)
puts 'Caching downloaded git repo...'
Dir.chdir @extract_dir do
# Do not use --exclude-vcs to exclude .git
# because some builds will use that information.
system "tar c#{@verbose} \
$(find -mindepth 1 -maxdepth 1 -printf '%P\n') | \
nice -n 20 #{CREW_PREFIX}/bin/zstd -c -T0 --ultra -20 - > \
#{cachefile}"
end
system 'sha256sum', cachefile, out: "#{cachefile}.sha256"
puts 'Git repo cached.'.lightgreen
end
end
end
return { source:, filename: }
end
def unpack(meta)
target_dir = nil
Dir.chdir CREW_BREW_DIR do
FileUtils.mkdir_p @extract_dir, verbose: @fileutils_verbose
@build_cachefile = "#{CREW_CACHE_DIR}#{@pkg.name}-#{@pkg.version}-build-#{@device[:architecture]}.tar.zst"
if CREW_CACHE_BUILD && File.file?(@build_cachefile) && File.file?("#{@build_cachefile}.sha256") && ( system "cd #{CREW_CACHE_DIR} && sha256sum -c #{@build_cachefile}.sha256" )
@pkg.cached_build = true
puts "Extracting cached build directory from #{@build_cachefile}".lightgreen
system "tar -Izstd -x#{@verbose}f #{@build_cachefile} -C #{CREW_BREW_DIR}", exception: true
# Need to reset @extract_dir to the extracted cached build
# directory.
@extract_dir = `tar -Izstd --exclude='./*/*' -tf #{@build_cachefile} | cut -d '/' -f 1 | sort -u`.chomp
else
@pkg.cached_build = false
case File.basename meta[:filename]
when /\.zip$/i
puts "Unpacking archive using 'unzip', this may take a while..."
_verbopt = @opt_verbose ? '-v' : '-qq'
system 'unzip', _verbopt, '-d', @extract_dir, meta[:filename], exception: true
when /\.(tar(\.(gz|bz2|xz|lzma|lz))?|tgz|tbz|txz)$/i
puts "Unpacking archive using 'tar', this may take a while..."
system "tar x#{@verbose}f #{meta[:filename]} -C #{@extract_dir}", exception: true
when /\.tar\.zst$/i
puts "Unpacking archive using 'tar', this may take a while..."
system "tar -Izstd -x#{@verbose}f #{meta[:filename]} -C #{@extract_dir}", exception: true
when /\.deb$/i
puts "Unpacking '.deb' archive, this may take a while..."
DebUtils.extract_deb(meta[:filename], /data\..*/)
system "tar x#{@verbose}f data.* -C #{@extract_dir}", exception: true
when /\.AppImage$/i
puts "Unpacking 'AppImage' archive, this may take a while..."
FileUtils.chmod 0o755, meta[:filename], verbose: @fileutils_verbose
Dir.chdir @extract_dir do
system "../#{meta[:filename]} --appimage-extract", exception: true
end
when /\.tpxz$/i
abort 'Pixz is needed for this install. Please install it with \'crew install pixz\''.lightred unless File.file?("#{CREW_PREFIX}/bin/pixz")
puts "Unpacking 'tpxz' archive using 'tar', this may take a while..."
system "tar -Ipixz -x#{@verbose}f #{meta[:filename]} -C #{@extract_dir}", exception: true
end
end
if meta[:source]
# Check the number of directories in the archive
entries = Dir["#{@extract_dir}/*"]
if entries.empty? && @opt_verbose
# This will happen with SKIP packages.
puts "Empty archive: #{meta[:filename]}".orange
end
target_dir = if entries.length == 1 && File.directory?(entries.first)
# Use `extract_dir/dir_in_archive` if there is only one directory.
entries.first
else
# Use `extract_dir` otherwise
@extract_dir
end
else
# Use `extract_dir` for binary distribution
target_dir = @extract_dir
end
# Remove tarball to save space.
FileUtils.rm_f meta[:filename], verbose: @fileutils_verbose
end
return File.join(CREW_BREW_DIR, target_dir)
end
def build_and_preconfigure(target_dir)
Dir.chdir target_dir do
unless @pkg.no_compile_needed?
puts 'Building from source, this may take a while...'
# Load musl options only if package is targeted at the musl toolchain
load "#{CREW_LIB_PATH}lib/musl.rb" if @pkg.is_musl?
end
@pkg.in_build = true
unless @pkg.cached_build
@pkg.patch
@pkg.prebuild
end
begin
@pkg.build
rescue StandardError => e
if CREW_CACHE_FAILED_BUILD
cache_build
abort 'There was a build error, caching build directory.'.lightred
end
abort 'There was a build error.'.lightred
end
@pkg.in_build = false
# wipe crew destdir
FileUtils.rm_rf Dir["#{CREW_DEST_DIR}/*"], verbose: @fileutils_verbose
puts 'Preconfiguring package...'
cache_build if CREW_CACHE_BUILD
@pkg.install
end
end
def pre_flight
puts 'Performing pre-flight checks...'
@pkg.preflight
end
def pre_install(dest_dir)
Dir.chdir dest_dir do
puts 'Performing pre-install...'
@pkg.preinstall
# Reload device.json in case preinstall modified it via
# running 'crew remove packages...'
load_json
end
end
def post_install
# return unless the postinstall function was defined by the package recipe
return unless @pkg.method(:postinstall).source_location[0].include?("#{@pkg.name}.rb")
Dir.mktmpdir do |post_install_tempdir|
Dir.chdir post_install_tempdir do
puts "Performing post-install for #{@pkg.name}...".lightblue
@pkg.postinstall
end
end
end
def compress_doc(dir)
# check whether crew should compress
return if CREW_NOT_COMPRESS || @pkg.no_compress? || !File.file?("#{CREW_PREFIX}/bin/compressdoc")
if Dir.exist? dir
system "find #{dir} -type f ! -perm -200 | xargs -r chmod u+w"
system "compressdoc --zstd #{@short_verbose} #{dir}"
end
end
def determine_conflicts(dir, pkg)
conflicts = []
if File.file?("#{dir}/filelist")
if File.file?("#{CREW_META_PATH}#{pkg}.filelist")
puts 'Checking for conflicts with files from installed packages...'.orange
conflictscmd = `grep --exclude=#{CREW_META_PATH}#{pkg}.filelist --exclude=#{CREW_META_PATH}\\\*_build.filelist -Fxf #{dir}/filelist #{CREW_META_PATH}*.filelist`
conflicts = conflictscmd.gsub(/(\.filelist|#{CREW_META_PATH})/, '').split("\n")
conflicts.reject!(&:empty?)
end
elsif File.file?("#{CREW_META_PATH}#{pkg}.filelist")
puts "Checking for conflicts of #{pkg} with files from installed packages...".orange
conflictscmd = `grep --exclude=#{CREW_META_PATH}#{pkg}.filelist --exclude=#{CREW_META_PATH}\\\*_build.filelist -Fxf #{CREW_META_PATH}#{pkg}.filelist #{CREW_META_PATH}*.filelist`
conflicts = conflictscmd.gsub(/(\.filelist|#{CREW_META_PATH})/, '').split("\n")
conflicts.reject!(&:empty?)
end
if conflicts.any?
puts 'There is a conflict with the same file in another package:'.orange
puts conflicts.to_s.orange
end
conflicts.map! { |x| x.to_s.partition(':').last}
return conflicts
end
def prepare_package(destdir)
# Create the destdir if it does not exist to avoid having to have
# this single line in no_compile_needed packages.
FileUtils.mkdir_p CREW_DEST_PREFIX
Dir.chdir destdir do
# Avoid /usr/local/share/info/dir{.gz} file conflict:
# The install-info program maintains a directory of installed
# info documents in /usr/share/info/dir for the use of info
# readers. This file must not be included in packages other
# than install-info.
# https://www.debian.org/doc/debian-policy/ch-docs.html#info-documents
FileUtils.rm_f "#{CREW_DEST_PREFIX}/share/info/dir"
# Remove all perl module files which will conflict
if @pkg.name =~ /^perl_/
puts 'Removing .packlist and perllocal.pod files to avoid conflicts with other perl packages.'.orange
system "find #{CREW_DEST_DIR} -type f \\( -name '.packlist' -o -name perllocal.pod \\) -delete"
end
# Compress manual files, and move errant files to the correct
# locations.
if File.exist?("#{CREW_DEST_PREFIX}/man")
puts "Files in #{CREW_PREFIX}/man will be moved to #{CREW_MAN_PREFIX}.".orange
FileUtils.mkdir_p CREW_DEST_MAN_PREFIX.to_s
FileUtils.mv Dir["#{CREW_DEST_PREFIX}/man/*"], "#{CREW_DEST_MAN_PREFIX}/"
Dir.rmdir "#{CREW_DEST_PREFIX}/man" if Dir.empty?("#{CREW_DEST_PREFIX}/man")
end
if File.exist?("#{CREW_DEST_PREFIX}/info")
puts "Files in #{CREW_PREFIX}/info will be moved to #{CREW_PREFIX}/share/info.".orange
FileUtils.mkdir_p "#{CREW_DEST_PREFIX}/share/info/"
FileUtils.mv Dir["#{CREW_DEST_PREFIX}/info/*"], "#{CREW_DEST_PREFIX}/share/info/"
Dir.rmdir "#{CREW_DEST_PREFIX}/info" if Dir.empty?("#{CREW_DEST_PREFIX}/info")
end
# Remove the "share/info/dir.*" file since it causes conflicts.
FileUtils.rm_f Dir["#{CREW_DEST_PREFIX}/share/info/dir*"]
compress_doc CREW_DEST_MAN_PREFIX.to_s
compress_doc "#{CREW_DEST_PREFIX}/share/info"
# Allow postbuild to override the filelist contents
@pkg.postbuild
# create file list
system "find .#{CREW_PREFIX} -type f > ../filelist"
system "find .#{CREW_PREFIX} -type l >> ../filelist"
system 'cut -c2- ../filelist | sort -o filelist'
if Dir.exist?(CREW_LOCAL_MANIFEST_PATH) && File.writable?(CREW_LOCAL_MANIFEST_PATH)
FileUtils.mkdir_p "#{CREW_LOCAL_MANIFEST_PATH}/#{ARCH}/#{@pkg.name.chr.downcase}"
FileUtils.cp 'filelist', "#{CREW_LOCAL_MANIFEST_PATH}/#{ARCH}/#{@pkg.name.chr.downcase}/#{@pkg.name}.filelist"
end
# check for FHS3 compliance
puts 'Checking for FHS3 compliance...'
@_errors = false
@fhs_compliant_prefix = %W[bin etc include lib #{ARCH_LIB} libexec opt sbin share var].uniq
Dir.foreach(CREW_DEST_PREFIX) do |filename|
next if %w[. ..].include?(filename)
unless @fhs_compliant_prefix.include?(filename)
if CREW_FHS_NONCOMPLIANCE_ONLY_ADVISORY || @pkg.no_fhs?
puts "Warning: #{CREW_PREFIX}/#{filename} in #{@pkg.name} is not FHS3 compliant.".orange
else
puts "Error: #{CREW_PREFIX}/#{filename} in #{@pkg.name} is not FHS3 compliant.".lightred
@_errors = true
end
end
end
# check for conflicts with other installed files
conflicts = determine_conflicts(Dir.pwd, @pkg.name)
if conflicts.any?
if CREW_CONFLICTS_ONLY_ADVISORY || @pkg.conflicts_ok?
puts 'Warning: There is a conflict with the same file in another package.'.orange
else
puts 'Error: There is a conflict with the same file in another package.'.lightred
@_errors = true
end
puts conflicts
end
# abort if errors encountered
abort 'Exiting due to above errors.'.lightred if @_errors
# create directory list
system "find .#{CREW_PREFIX} -type d | cut -c2- | tail -n +2", out: 'dlist'
strip_dir destdir
# Patchelf currently disabled for security reasons
# See https://github.com/upx/upx/issues/655#issuecomment-1457434081
# Use patchelf to set need paths for all binaries.
# patchelf_set_need_paths destdir
# use upx on executables
shrink_dir destdir
end
end
def patchelf_set_need_paths(dir)
return if @pkg.no_patchelf? || @pkg.no_compile_needed?
puts 'Patchelf is currently disabled during builds due to problems with upx.'.yellow
return
Dir.chdir dir do
puts 'Running patchelf'.lightblue
abort('No Patchelf found!').lightred unless File.file?("#{CREW_PREFIX}/bin/patchelf")
@execfiles = `find . -executable -type f ! \\( -name '*.a' \\) | xargs -P#{CREW_NPROC} -n1 sh -c '[ "$(head -c4 ${1})" = "\x7FELF" ] && echo ${1}' --`.chomp
return if @execfiles.empty?
@patchelf_lib_prefix = @pkg.is_musl? ? "#{CREW_MUSL_PREFIX}/lib" : CREW_LIB_PREFIX
puts "@patchelf_lib_prefix is #{@patchelf_lib_prefix}" if @opt_verbose
@patchelf_interpreter = @pkg.is_musl? ? "#{CREW_MUSL_PREFIX}/lib/libc.so" : 'CREW_LIB_PREFIX/libc.so.6'
puts "@patchelf_interpreter is #{@patchelf_interpreter}" if @opt_verbose
puts 'Running patchelf to patch binaries for library paths'.lightblue
@execfiles.each_line(chomp: true) do |execfiletopatch|
execfiletopatch = Dir.pwd + execfiletopatch.delete_prefix('.')
@neededlibs = `patchelf --print-needed #{execfiletopatch}`
next if @neededlibs.to_s.empty?
@neededlibs.each_line(chomp: true) do |neededlibspatch|
next if neededlibspatch.include?(@patchelf_lib_prefix.to_s)
# Avoid segfaults from not using system versions of these files.
patchelf_veto_files = %w[
libdl.so
ld-linux.so.2
ld-linux-x86-64.so.2
ld-linux-armhf.so.3
libc.so.6
]
next if !@pkg.is_musl? && patchelf_veto_files.any? { |i| neededlibspatch.include? i }
@neededlib_basename = File.basename(neededlibspatch)
@neededlibspatchednamepath = "#{@patchelf_lib_prefix}/" + @neededlib_basename
# The first check here can be changed to just check the dest_dir
# hierarchy for @neededlib_basename if the intent is to allow
# using a different CREW_PREFIX during package installs.
if File.file?(@neededlibspatchednamepath) || File.file?(Dir.pwd + @neededlibspatchednamepath)
puts "patchelf --replace-needed #{neededlibspatch} #{@neededlibspatchednamepath} #{execfiletopatch}" if @opt_verbose
system "patchelf --replace-needed #{neededlibspatch} #{@neededlibspatchednamepath} #{execfiletopatch}"
else
puts "#{execfiletopatch} needed library #{@neededlib_basename} not found in #{@patchelf_lib_prefix} or #{Dir.pwd + @neededlibspatchednamepath}.".lightred
end
end
# Do not set interpreter for non-musl, as this can break apps if there
# is an issue with the crew glibc.
next unless @pkg.is_musl?
puts 'Running patchelf to patch binary interpreter paths'.lightblue
system "patchelf --set-interpreter #{@patchelf_interpreter} #{execfiletopatch}"
end
end
end
def strip_find_files(find_cmd, strip_option = '')
# Check whether crew should strip.
return if CREW_NOT_STRIP || @pkg.no_strip? || !File.file?("#{CREW_PREFIX}/bin/llvm-strip")
# Run find_cmd and strip only files with ar or elf magic headers.
system "#{find_cmd} | xargs -r chmod u+w"
@strip_verbose = @opt_verbose ? 'echo "Stripping ${0:1}" &&' : ''
# The craziness here is from having to escape the special characters
# in the magic headers for these files.
system "#{find_cmd} | xargs -P#{CREW_NPROC} -n1 -r bash -c 'header=$(head -c4 ${0}); elfheader='$(printf '\\\177ELF')' ; arheader=\\!\\<ar ; case $header in $elfheader|$arheader) #{@strip_verbose} llvm-strip #{strip_option} ${0} ;; esac'"
end
def strip_dir(dir)
unless CREW_NOT_STRIP || @pkg.no_strip? || @pkg.no_compile_needed?
Dir.chdir dir do
# Strip libraries with -S
puts 'Stripping libraries...'
strip_find_files "find . -type f \\( -name 'lib*.a' -o -name 'lib*.so*' \\) -print", '-S'
# Strip binaries but not compressed archives
puts 'Stripping binaries...'
extensions = %w[bz2 gz lha lz lzh rar tar tbz tgz tpxz txz xz Z zip zst]
inames = extensions.join(' -o -iname *.')
strip_find_files "find . -type f ! \\( -iname *.#{inames} \\) ! \\( -name 'lib*.a' -o -name 'lib*.so' \\) -perm /111 -print"
end
end
end
def shrink_dir(dir)
unless CREW_NOT_SHRINK_ARCHIVE || @pkg.no_shrink?
Dir.chdir dir do
if File.file?("#{CREW_PREFIX}/bin/rdfind")
puts 'Using rdfind to convert duplicate files to hard links.'
system "#{CREW_PREFIX}/bin/rdfind -removeidentinode true -makesymlinks false -makehardlinks true -makeresultsfile false ."
end
# Issues with non-x86_64 in compressing libraries, so just compress
# non-libraries. Also note that one needs to use "upx -d" on a
# compressed file to use ldd.
# sommelier also isn't happy when sommelier and xwayland are compressed
# so don't compress those packages.
if File.executable?("#{CREW_PREFIX}/bin/upx")
# 1. Find executable binaries but also check for hard linked
# files by making sure we have a unique set of
# inodes for the binaries found.
# 2. Copy to a temp file.
# 3. Compress using upx. (Uncompressble files are ignored.)
# 4. Check compression by expanding the compressed file with
# upx.
# 5. If the expansion doesn't error out then it is ok to copy
# over the original. (This also lets us only avoid compressing
# hard linked files multiple times.)
@execfiles = `find . -executable -type f ! \\( -name '*.so*' -o -name '*.a' \\) | xargs -P8 -n1 sh -c '[ "$(head -c4 ${1})" = "\x7FELF" ] && echo ${1}' --`.chomp
unless @execfiles.empty?
puts 'Using upx to shrink binaries.'
# Copying in the ThreadPoolExecutor loop fails non-deterministically
@execfiles.each_line(chomp: true) do |execfilecp|
execfilecp.slice! '.'
next if execfilecp.empty?
execfilecp = File.join(dir, execfilecp)
next unless File.file?(execfilecp)
FileUtils.cp execfilecp, "#{execfilecp}-crewupxtmp"
end
begin
gem 'concurrent-ruby'
rescue Gem::LoadError
puts ' -> install gem concurrent-ruby'
Gem.install('concurrent-ruby')
gem 'concurrent-ruby'
end
require 'concurrent'
pool = Concurrent::ThreadPoolExecutor.new(
min_threads: 1,
max_threads: CREW_NPROC,
max_queue: 0, # unbounded work queue
fallback_policy: :caller_runs
)
@execfiles.each_line(chomp: true) do |execfile|
pool.post do
execfile.slice! '.'
execfile = File.join(dir, execfile)
puts "Attempting to compress #{execfile} ...".orange
# Make tmp file for compression
unless system "upx --lzma #{execfile}-crewupxtmp"
puts "Compression of #{execfile} failed...".orange if @opt_verbose
FileUtils.rm_f "#{execfile}-crewupxtmp"
end
if File.file?("#{execfile}-crewupxtmp")
puts "Testing compressed #{execfile}...".lightblue if @opt_verbose
if system 'upx', '-t', "#{execfile}-crewupxtmp"
puts "#{execfile} successfully compressed...".lightgreen
FileUtils.cp "#{execfile}-crewupxtmp", execfile
end
end
FileUtils.rm_f "#{execfile}-crewupxtmp"
end
end
pool.shutdown
pool.wait_for_termination
# Make sure temporary compression copies are deleted.
system 'find . -executable -type f -name "*-crewupxtmp" -delete'
end
end
end
end
end
def install_files(src, dst = File.join( CREW_PREFIX, src.delete_prefix('./usr/local') ))
if Dir.exist?(src)
if File.executable?("#{CREW_PREFIX}/bin/crew-mvdir") && !CREW_DISABLE_MVDIR
system "crew-mvdir #{@short_verbose} #{src} #{dst}", exception: true
else
warn 'crew-mvdir is not installed. Please install it with \'crew install crew_mvdir\' for improved installation performance'.yellow unless (@pkg.name == 'crew_mvdir') || CREW_DISABLE_MVDIR
if File.executable?("#{CREW_PREFIX}/bin/rsync") && system("#{CREW_PREFIX}/bin/rsync --version > /dev/null")
# rsync src path needs a trailing slash
src << '/' unless src.end_with?('/')
# Check for ACLs support.
@rsync_version = `rsync --version`.chomp
if @rsync_version.include?('ACLs') && !@rsync_version.include?('no ACLs')
system 'rsync', "-ah#{@verbose}HAXW", '--remove-source-files', src, dst, exception: true
else
system 'rsync', "-ah#{@verbose}HXW", '--remove-source-files', src, dst, exception: true
end
else
system "cd #{src}; tar -cf - ./* | (cd #{dst}; tar -x#{@verbose}p --keep-directory-symlink -f -)", exception: true
end
end
else
abort "#{src} directory does not exist.".lightred
end
end
def install_package(pkgdir)
Dir.chdir pkgdir do
# install filelist, dlist and binary files
puts 'Performing install...'
FileUtils.mv 'dlist', "#{CREW_META_PATH}#{@pkg.name}.directorylist", verbose: @fileutils_verbose
FileUtils.mv 'filelist', "#{CREW_META_PATH}#{@pkg.name}.filelist", verbose: @fileutils_verbose
unless CREW_NOT_LINKS || @pkg.no_links?
@brokensymlinks = nil
@brokensymlinks = `find . -type l -exec test ! -e {} \\; -print`.chomp
unless @brokensymlinks.to_s.empty?
puts 'There are broken symlinks. Will try to fix.'.orange if @opt_verbose
@brokensymlinks.each_line(chomp: true) do |fixlink|
@brokentarget = @fixedtarget = nil
@brokentarget = `readlink -n #{fixlink}`.chomp
puts "Attempting fix of: #{fixlink.delete_prefix('.')} -> #{@brokentarget}".orange if @opt_verbose
@fixedtarget = @brokentarget.delete_prefix(CREW_DEST_DIR)
@fixedlink_loc = File.join(pkgdir, fixlink.delete_prefix('.'))
# If no changes were made, don't replace symlink
unless @fixedtarget == @brokentarget
FileUtils.ln_sf @fixedtarget, @fixedlink_loc
puts "Fixed: #{@fixedtarget} -> #{fixlink.delete_prefix('.')}".orange if @opt_verbose
end
end
end
if File.executable?("#{CREW_PREFIX}/bin/rdfind")
puts 'Using rdfind to convert duplicate files to hard links.'
system 'rdfind -removeidentinode true -makesymlinks false -makehardlinks true -makeresultsfile false .'
end
end
install_files(".#{CREW_PREFIX}") if Dir.exist?(".#{CREW_PREFIX}")
install_files(".#{HOME}", HOME) if Dir.exist?(".#{HOME}")
end
end
def resolve_dependencies_and_install
@resolve_dependencies_and_install = 1
abort "Package #{@pkg.name} is not compatible with your device architecture (#{ARCH}) :/".lightred unless @pkg.compatible?
# Process preflight block to see if package should even
# be downloaded or installed.
pre_flight
begin
origin = @pkg.name
@to_postinstall = []
resolve_dependencies
search origin, true
install
@to_postinstall.append(@pkg.name)
@to_postinstall.each do |dep|
search dep
post_install
end
rescue InstallError => e
abort "#{@pkg.name} failed to install: #{e}".lightred
ensure
# cleanup
unless @opt_keep
FileUtils.rm_rf Dir["#{CREW_BREW_DIR}/*"]
FileUtils.mkdir_p "#{CREW_BREW_DIR}/dest" # this is a little ugly, feel free to find a better way
end
end
puts "#{@pkg.name.capitalize} installed!".lightgreen
@resolve_dependencies_and_install = 0
end
def resolve_dependencies
@dependencies = @pkg.get_deps_list(return_attr: true)
# compare dependency version with required range (if installed)
@dependencies.each do |dep|
depName = dep.keys[0]
dep_info = @device[:installed_packages].select {|pkg| pkg[:name] == depName } [0]
# skip if dependency is not installed
next unless dep_info
tags, version_check = dep.values[0]
installed_version = dep_info[:version]
next unless version_check
# abort if the range is not fulfilled
abort unless version_check.call(installed_version)
end
# leave only dependency names (remove all package attributes returned by @pkg.get_deps_list)
@dependencies.map!(&:keys).flatten!
# leave only not installed packages in dependencies
@dependencies.reject! { |depName| @device[:installed_packages].any? { |pkg| pkg[:name] == depName } }
# run preflight check for dependencies
@dependencies.each do |depName|
dep_pkgPath = File.join(CREW_PACKAGES_PATH, "#{depName}.rb")
Package.load_package(dep_pkgPath, depName).preflight
end
return if @dependencies.empty?
puts 'The following packages also need to be installed: '
@dependencies.each do |dep|
abort "Dependency #{dep} was not found.".lightred unless File.file?( File.join(CREW_PACKAGES_PATH, "#{dep}.rb") )
end
puts @dependencies.join(' ')
print 'Do you agree? [Y/n] '
response = $stdin.gets.chomp.downcase
case response
when 'n', 'no'
abort 'No changes made.'
when '', 'y', 'yes'
puts 'Proceeding...'
else
puts "I don't understand `#{response}`. :(".lightred
abort 'No changes made.'
end
@dependencies.each do |dep|
search dep
print_current_package
install
end
if @resolve_dependencies_and_install.eql?(1) || @resolve_dependencies_and_build.eql?(1)
@to_postinstall = @dependencies
else
# Make sure the sommelier postinstall happens last so the messages
# from that are not missed by users.
@dependencies.partition { |v| v != 'sommelier' }.reduce(:+)
@dependencies.each do |dep|
search dep
post_install
end
end
end
def install
if !@pkg.in_upgrade && @device[:installed_packages].any? { |pkg| pkg[:name] == @pkg.name }
puts "Package #{@pkg.name} already installed, skipping...".lightgreen
return
end
unless @pkg.is_fake?
meta = download
target_dir = unpack meta
if meta[:source]
# build from source and place binaries at CREW_DEST_DIR
# CREW_DEST_DIR contains usr/local/... hierarchy
build_and_preconfigure target_dir
# prepare filelist and dlist at CREW_DEST_DIR
prepare_package CREW_DEST_DIR
# use CREW_DEST_DIR
dest_dir = CREW_DEST_DIR
else
# use extracted binary directory
dest_dir = target_dir
end
end
# Make backup of installed packages json file.
# If this fails, the install should fail before we create any
# damage, and we should roughly be at maximal disk space usage at this
# point anyways.
FileUtils.cp "#{CREW_CONFIG_PATH}device.json", "#{CREW_CONFIG_PATH}device.json.tmp"
# remove it just before the file copy
if @pkg.in_upgrade
puts 'Removing since upgrade or reinstall...'
remove @pkg.name
end
unless @pkg.is_fake?
# perform pre-install process
pre_install dest_dir
# perform install process
install_package dest_dir
unless (@resolve_dependencies_and_install == 1) || (@resolve_dependencies_and_build == 1)
# perform post-install process
post_install
end
end
# add to installed packages
@device[:installed_packages].push(name: @pkg.name, version: @pkg.version, binary_sha256: @pkg.get_binary_sha256(@device[:architecture]))
File.open("#{CREW_CONFIG_PATH}device.json.tmp", 'w') do |file|
output = JSON.parse @device.to_json
file.write JSON.pretty_generate(output)
end
# Copy over original if the write to the tmp file succeeds.
FileUtils.cp "#{CREW_CONFIG_PATH}device.json.tmp", "#{CREW_CONFIG_PATH}device.json"
FileUtils.rm "#{CREW_CONFIG_PATH}device.json.tmp"
end
def resolve_dependencies_and_build
@resolve_dependencies_and_build = 1
@to_postinstall = []
begin
origin = @pkg.name
# mark current package as which is required to compile from source
@pkg.build_from_source = true
resolve_dependencies
@to_postinstall.each do |dep|
search dep
post_install
end
search origin, true
abort "CREW_ARCHIVE_DEST (#{CREW_ARCHIVE_DEST}) is not writable!".lightred unless File.writable?(CREW_ARCHIVE_DEST)
build_package CREW_ARCHIVE_DEST
rescue InstallError => e
abort "#{@pkg.name} failed to build: #{e}".lightred
ensure
# cleanup
unless @opt_keep
FileUtils.rm_rf Dir["#{CREW_BREW_DIR}/*"], verbose: @fileutils_verbose
FileUtils.mkdir_p "#{CREW_BREW_DIR}/dest", verbose: @fileutils_verbose # this is a little ugly, feel free to find a better way
end
end
puts "#{@pkg.name} is built!".lightgreen
@resolve_dependencies_and_build = 0
end
def build_package(crew_archive_dest)
abort 'It is not possible to build a fake package'.lightred if @pkg.is_fake?
abort 'It is not possible to build without source'.lightred unless @pkg.is_source?(@device[:architecture])
abort "Unable to locate CREW_LOCAL_MANIFEST_PATH. Please try again in the chromebrew/release/#{ARCH} directory.".lightred if CREW_LOCAL_MANIFEST_PATH.empty?
abort "CREW_LOCAL_MANIFEST_PATH: #{CREW_LOCAL_MANIFEST_PATH} is not writable.".lightred unless File.writable?(CREW_LOCAL_MANIFEST_PATH)
# download source codes and unpack it
meta = download
target_dir = unpack meta
# build from source and place binaries at CREW_DEST_DIR
build_and_preconfigure target_dir
# call check method here. this check method is called by this function only,
# therefore it is possible place time consuming tests in the check method.
if Dir.exist? target_dir
Dir.chdir target_dir do
@pkg.check
end
end
# prepare filelist and dlist at CREW_DEST_DIR
prepare_package CREW_DEST_DIR
# build package from filelist, dlist and binary files in CREW_DEST_DIR
puts 'Archiving...'
archive_package crew_archive_dest
end
def archive_package(crew_archive_dest)
# Check to see that there is a working zstd
if File.file?("#{CREW_PREFIX}/bin/zstd")
@crew_prefix_zstd_available = File.file?("#{CREW_PREFIX}/bin/zstd") ? true : nil
end
if @pkg.no_zstd? || !@crew_prefix_zstd_available
puts 'Using xz to compress package. This may take some time.'.lightblue
pkg_name = "#{@pkg.name}-#{@pkg.version}-chromeos-#{@device[:architecture]}.tar.xz"
Dir.chdir CREW_DEST_DIR do
system "tar c#{@verbose}Jf #{crew_archive_dest}/#{pkg_name} *"
end
else
puts 'Using zstd to compress package. This may take some time.'.lightblue
pkg_name = "#{@pkg.name}-#{@pkg.version}-chromeos-#{@device[:architecture]}.tar.zst"
Dir.chdir CREW_DEST_DIR do
# Using same zstd compression options as Arch, which privilege
# decompression speed over compression speed.
# See https://lists.archlinux.org/pipermail/arch-dev-public/2019-March/029542.html
# Use nice so that user can (possibly) do other things during compression.
if @crew_prefix_zstd_available
puts 'Using standard zstd'.lightblue if @opt_verbose
system "tar c#{@verbose} * | nice -n 20 #{CREW_PREFIX}/bin/zstd -c -T0 --ultra -20 - > #{crew_archive_dest}/#{pkg_name}"
end
end
end
system "sha256sum #{crew_archive_dest}/#{pkg_name} > #{crew_archive_dest}/#{pkg_name}.sha256"
end
def remove(pkgName)
# make sure the package is actually installed
unless @device[:installed_packages].any? { |pkg| pkg[:name] == pkgName } || File.file?("#{CREW_META_PATH}#{pkgName}.filelist")
puts "Package #{pkgName} isn't installed.".lightred
return
end
# Perform any operations required prior to package removal.
search pkgName, true
@pkg.preremove
# Preserve CREW_ESSENTIAL_FILES and make sure they are real files
# and not symlinks, because preserving symlinked libraries does not
# prevent breakage.
CREW_ESSENTIAL_FILES.each do |file|
next unless File.symlink?("#{CREW_LIB_PREFIX}/#{file}")
@canonicalized_file = `readlink -m #{CREW_LIB_PREFIX}/#{file}`.chomp
if File.file?(@canonicalized_file) && @canonicalized_file.include?(CREW_PREFIX)
puts "Replacing symlinked essential #{file} with hard link to #{@canonicalized_file} to avoid breakage.".lightblue if @opt_verbose
FileUtils.ln(@canonicalized_file, "#{CREW_LIB_PREFIX}/#{file}", force: true)
end
end
conflicts = determine_conflicts(Dir.pwd, @pkg.name)
# if the filelist exists, remove the files and directories installed by the package
if File.file?("#{CREW_META_PATH}#{pkgName}.filelist")
Dir.chdir CREW_CONFIG_PATH do
# remove all files installed by the package
File.foreach("meta/#{pkgName}.filelist", chomp: true) do |line|
# Do not remove essential files which crew (and dependencies)
# rely on, especially during package upgrades or reinstalls.
# These essential files are enumerated in const.rb as
# CREW_ESSENTIAL_FILES.
if CREW_ESSENTIAL_FILES.include?(File.basename(line))
puts "Removing #{line} will break crew. It was #{'NOT'.lightred} deleted." if @opt_verbose
else
puts "Removing file #{line}".lightred if @opt_verbose
puts "filelist contains #{line}".lightred if @opt_verbose && !line.include?(CREW_PREFIX)
if line.start_with?(CREW_PREFIX)
if conflicts.include?(line)
puts "#{line} is in another package. It will not be removed during the removal of #{pkgName}".orange
else
FileUtils.rm_rf line
end
end
end
end
# remove all directories installed by the package
File.foreach("meta/#{pkgName}.directorylist", chomp: true) do |line|
puts "directorylist contains #{line}".lightred if @opt_verbose && !line.include?(CREW_PREFIX)
next unless Dir.exist?(line) && Dir.empty?(line) && line.include?(CREW_PREFIX)
puts "Removing directory #{line}".lightred if @opt_verbose
FileUtils.rmdir(line)
end
# remove the file and directory list
FileUtils.rm_f Dir["meta/#{pkgName}.{file,directory}list"]
end
end
# remove from installed packages
puts "Removing package #{pkgName}".lightred if @opt_verbose
@device[:installed_packages].delete_if { |elem| elem[:name] == pkgName }
# update the device manifest
File.write "#{CREW_CONFIG_PATH}/device.json", JSON.pretty_generate(JSON.parse(@device.to_json))
search pkgName, true
@pkg.remove
puts "#{pkgName.capitalize} removed!".lightgreen
end
def print_deps_tree(args)
warn 'Walking through dependencies recursively, this may take a while...', ''
# depHash: Hash object returned by @pkg.get_deps_list
depHash = @pkg.get_deps_list(hash: true, include_build_deps: (args['--include-build-deps'] || 'auto'), exclude_buildessential: args['--exclude-buildessential'])
# convert returned hash to json and format it
jsonView = JSON.pretty_generate(depHash)
# convert formatted json string to tree structure
treeView = jsonView.gsub(/\{\s*/m, '└─────').gsub(/[\[\]{},":]/, '').gsub(/^\s*$\n/, '').gsub(/\s*$/, '')
# add pipe char to connect endpoints and starting points, improve readability
# find the horizontal location of all arrow symbols
index_with_pipe_char = treeView.lines.map { |line| line.index('└') }.compact.uniq
# determine whatever a pipe char should be added according to the horizontal location of arrow symbols
treeView = treeView.lines.each_with_index.map do |line, line_i|
index_with_pipe_char.each do |char_i|
# check if there have any non-space char (pkgNames) between starting point ([line_i][char_i]) and endpoint vertically ([next_arrow_line_offset][char_i])
# (used to determine if the starting point and endpoint are in same branch, use pipe char to connect them if true)
next_arrow_line_offset = treeView.lines[line_i..].index { |l| l[char_i] == '└' }
have_line_with_non_empty_char = treeView.lines[line_i + 1..line_i + next_arrow_line_offset.to_i - 1].any? { |l| l[char_i].nil? or l[char_i] =~ /\S/ }
line[char_i] = '│' if next_arrow_line_offset && (line[char_i] == ' ') && !have_line_with_non_empty_char
end
next line
end.join
# replace arrow symbols with a tee symbol on branch intersection
treeView = treeView.lines.each_with_index.map do |line, line_i|
# orig_arrow_index_connecter: the horizontal location of the arrow symbol used to connect parent branch
#
# example:
# └───┬─chrome
# └─────buildessential
# ^
orig_arrow_index_connecter = line.index('└')
# orig_arrow_index_newbranch: the horizontal location of the "box drawing char" symbol MIGHT be
# required to convert to tee char in order to connect child branch,
# located at 3 chars later of orig_arrow_index_connecter
#
# example:
# v
# └─────chrome
# └─────buildessential
#
# which might need to be convert to:
# └───┬─chrome
# └─────buildessential
orig_arrow_index_newbranch = orig_arrow_index_connecter + 4
# if the char under the processing arrow symbol (orig_arrow_index_connecter) is also arrow or pipe, change the processing char to tee symbol
line[orig_arrow_index_connecter] = '├' if orig_arrow_index_connecter && treeView.lines[line_i + 1].to_s[orig_arrow_index_connecter] =~ (/[└│]/)
# if the char under the processing arrow symbol (orig_arrow_index_newbranch) is also arrow or pipe, change the processing char to tee symbol
line[orig_arrow_index_newbranch] = '┬' if orig_arrow_index_newbranch && treeView.lines[line_i + 1].to_s[orig_arrow_index_newbranch] =~ (/[└├]/)
next line # return modified line
end.join
if String.use_color
puts <<~EOT, ''
\e[45m \e[0m: satisfied dependency
\e[46m \e[0m: build dependency
\e[47m \e[0m: runtime dependency
EOT
# (the first string in each #{} is used for commenting only, will not be included in output)
# replace special symbols returned by @pkg.get_deps_list to actual color code
treeView.gsub!(/\*(.+)\*/, '\1'.lightcyan)
treeView.gsub!(/\+(.+)\+/, "\e[45m\\1\e[0m")
end
puts treeView
end
def build_command(args)
args['<name>'].each do |name|
@pkgName = name
search @pkgName
print_current_package @opt_verbose
resolve_dependencies_and_build
end
end
def const_command(args)
if args['<name>'].empty?
const
else
args['<name>'].each do |name|
const name
end
end
end
def deps_command(args)
args['<name>'].each do |name|
@pkgName = name
search @pkgName
if args['--tree']
# call `print_deps_tree` (print dependency tree) if --tree is specified
print_deps_tree(args)
else
# print dependencies according to the install order if --tree is not specified
puts @pkg.get_deps_list(include_build_deps: (args['--include-build-deps'] || 'auto'), exclude_buildessential: args['--exclude-buildessential'])
end
end
end
def download_command(args)
args['<name>'].each do |name|
@pkgName = name
search @pkgName
print_current_package @opt_verbose
download
end
end
def files_command(args)
args['<name>'].each do |name|
@pkgName = name
search @pkgName
print_current_package
files name
end
end
def help_command(args)
if args['<command>']
help args['<command>']
else
puts 'Usage: crew help <command>'
help
end
end
def install_command(args)
args['<name>'].each do |name|
@pkgName = name
search @pkgName
print_current_package true
@pkg.build_from_source = true if @opt_src || @opt_recursive || (CREW_BUILD_FROM_SOURCE == '1')
resolve_dependencies_and_install
end
end
def list_command(args)
if args['available']
list_available
elsif args['installed']
puts list_installed
elsif args['compatible']
list_compatible true
elsif args['incompatible']
list_compatible false
end
end
def postinstall_command(args)
args['<name>'].each do |name|
@pkgName = name
search @pkgName, true
if @device[:installed_packages].any? { |elem| elem[:name] == @pkgName }
@pkg.postinstall
else
puts "Package #{@pkgName} is not installed. :(".lightred
end
end
end
def prop_command(_)
prop
end
def reinstall_command(args)
args['<name>'].each do |name|
@pkgName = name
search @pkgName
print_current_package
@pkg.build_from_source = true if @opt_src || @opt_recursive || (CREW_BUILD_FROM_SOURCE == '1')
next unless @pkgName
@pkg.in_upgrade = true
resolve_dependencies_and_install
@pkg.in_upgrade = false
end
end
def remove_command(args)
args['<name>'].each {|name| remove name }
end
def search_command(args)
args['<name>'].each do |name|
regexp_search name
end.empty? && list_packages
end
def sysinfo_command(_args)
# newer version of Chrome OS exports info to env by default
lsb_release = if File.file?('/etc/lsb-release')
File.read('/etc/lsb-release').scan(/^(.+?)=(.+)$/).to_h
else
# newer version of Chrome OS exports info to env by default
ENV
end
git_commit_message_format = '%h `%s (%cr)`'
@sysinfo_markdown_header = <<~MDHEADER
<details><summary>Expand</summary>
MDHEADER
@sysinfo_markdown_body = <<~MDBODY
- Architecture: `#{KERN_ARCH}` (`#{ARCH}`)
- Processor vendor: `#{CPUINFO['vendor_id'] || 'ARM'}`
- User space: `#{Dir.exist?('/lib64') ? '64' : '32'}-bit`
- Chromebrew Kernel version: `#{CREW_KERNEL_VERSION}`
- Chromebrew Running in Container: `#{CREW_IN_CONTAINER}`
- Chromebrew version: `#{CREW_VERSION}`
- Chromebrew prefix: `#{CREW_PREFIX}`
- Chromebrew libdir: `#{CREW_LIB_PREFIX}`
- Last update in local repository: #{`git -C '#{CREW_LIB_PATH}' show -s --format='#{git_commit_message_format}'`.chomp}
- OS variant: `#{lsb_release['CHROMEOS_RELEASE_NAME']}`
- OS version: `#{lsb_release['CHROMEOS_RELEASE_BUILDER_PATH']}`
- OS channel: `#{lsb_release['CHROMEOS_RELEASE_TRACK']}`
MDBODY
@sysinfo_markdown_footer = <<~MDFOOTER
</details>
MDFOOTER
if @opt_verbose
puts @sysinfo_markdown_header, @sysinfo_markdown_body, @sysinfo_markdown_footer
else
puts @sysinfo_markdown_body.tr('`', '')
end
end
def update_command(args)
if args['<compatible>']
generate_compatible
else
update
end
end
def upgrade_command(args) = upgrade(*args['<name>'], build_from_source: @opt_src)
def whatprovides_command(args)
args['<pattern>'].each do |name|
whatprovides name
end
end
def is_command(name) = !!!name[/^[-<]/]
Signal.trap('INT') do
if CREW_CACHE_FAILED_BUILD && CREW_CACHE_ENABLED && @pkg.in_build
cache_build
abort 'The build was interrupted. The build directory was cached.'.lightred
end
abort 'Interrupted.'.lightred
end
load_json
command_name = args.select { |k, v| v && is_command(k) }.keys[0]
send("#{command_name}_command", args)