Files
chromebrew/bin/crew
2025-02-10 23:46:51 +00:00

1902 lines
78 KiB
Ruby
Executable File

#!/usr/bin/env ruby
# We require color here to get access to .lightred.
require_relative '../lib/color'
# Disallow sudo.
abort 'Chromebrew should not be run as root.'.lightred if Process.uid.zero?
require_relative '../lib/require_gem'
require_gem('activesupport', 'active_support/core_ext/object/blank')
require_gem('highline')
# The json gem can break when upgrading from a much older version of ruby.
require_gem('json')
require_gem('ptools')
require 'digest/sha2'
require 'fileutils'
require 'mkmf'
require 'tmpdir'
require 'uri'
begin
require_relative '../commands/const'
rescue LoadError
# Older crew installs won't have the commands dir in the sparse checkout,
# so disable sparse checkout if those files are missing.
system 'git sparse-checkout disable'
require_relative '../commands/const'
end
require_relative '../commands/files'
require_relative '../commands/help'
require_relative '../commands/list'
require_relative '../commands/prop'
require_relative '../commands/remove'
require_relative '../commands/search'
require_relative '../commands/sysinfo'
require_relative '../commands/whatprovides'
require_relative '../lib/const'
require_gem 'pry-byebug' if CREW_DEBUG
# Debugging starts at the following breakpoint when enabled via crew
# being started with a --debug flag.
# The following line can be used anywhere as a breakpoint.
binding.pry if CREW_DEBUG
require_relative '../lib/crewlog'
require_relative '../lib/deb_utils'
require_relative '../lib/docopt'
require_relative '../lib/downloader'
require_relative '../lib/gem_compact_index_client'
require_relative '../lib/gnome'
require_relative '../lib/misc_functions'
require_relative '../lib/package'
require_relative '../lib/package_utils'
# Add lib to LOAD_PATH
$LOAD_PATH << File.join(CREW_LIB_PATH, 'lib')
# Parse arguments using docopt
begin
args = Docopt.docopt(CREW_DOCOPT)
args['<name>']&.map! { |arg| arg.tr('-', '_') }
rescue Docopt::Exit => e
if ARGV[0]
case ARGV[0]
when '-V', '--version', 'version'
puts CREW_VERSION
exit 0
when '-L', '--license', 'license'
puts CREW_LICENSE
exit 0
end
unless %w[-h --help].include?(ARGV[0])
if CREW_DOCOPT.include?("crew #{ARGV[0]} ")
puts 'Missing or invalid argument(s).'.lightred
else
puts "Could not understand \"crew #{ARGV.join(' ')}\".".lightred
# Looking for similar commands
unless CREW_COMMANDS.include?(ARGV[0])
similar = CREW_COMMANDS.split.select { |word| MiscFunctions.edit_distance(ARGV[0], word) < 4 }
unless similar.empty?
abort <<~EOT
Did you mean?
#{similar.join("\n ")}
EOT
end
end
end
end
end
abort e.message
end
# override default color options if specified
String.use_color = args['--color'] || !args['--no-color']
@opt_force = args['--force']
@opt_keep = args['--keep']
@opt_source = args['--source']
@opt_recursive = args['--recursive-build']
@opt_version = args['--version']
# Verbose options
@fileutils_verbose = CREW_VERBOSE
@verbose = CREW_VERBOSE ? 'v' : ''
@short_verbose = CREW_VERBOSE ? '-v' : ''
# Make sure crew work directories exist.
FileUtils.mkdir_p CREW_BREW_DIR
FileUtils.mkdir_p CREW_DEST_DIR
class ExitMessage
@messages = []
def self.add(msg, print_last: false)
# Use the print_last option to allow important messages (like sommelier) to print at the bottom
# Usage:
# ExitMessage.add 'Last Message', print_last: true
@messages << [msg.lightcyan, print_last]
end
def self.handle_messages(msg)
puts msg
# Delete printed message from array & only print each message once.
@messages.reject! { |x| x.include? msg }
end
def self.print
# print first non-print_last messages, then print_last messages.
# &:last represent to print_last
handle_messages(@messages.reject(&:last).map(&:first).first) until @messages.reject(&:last).map(&:first).empty?
handle_messages(@messages.select(&:last).map(&:first).first) until @messages.select(&:last).map(&:first).empty?
end
end
at_exit do
GnomePostinstall.run unless GnomePostinstall.gnome_packages.blank?
# Do gem cleanups all at once if needed.
unless @gems_needing_cleanup.blank?
puts "Running gem cleanup for gems: #{@gems_needing_cleanup.join(' ')}".orange
system "gem cleanup #{@gems_needing_cleanup.join(' ')}"
end
# Print exit messages.
ExitMessage.print
end
def print_current_package(extra = false)
status = if PackageUtils.installed?(@pkg.name)
:installed
elsif !PackageUtils.compatible?(@pkg)
:incompatible
else
:available
end
case status
when :installed
print @pkg.name.lightgreen
when :incompatible
print @pkg.name.lightred
when :available
print @pkg.name.lightblue
end
print ": #{@pkg.description}".lightblue if @pkg.description
if extra
puts ''
puts @pkg.homepage if @pkg.homepage
puts "Version: #{@pkg.version}"
print "License: #{@pkg.license}" if @pkg.license
end
puts ''
end
def set_package(pkg_path)
begin
@pkg = Package.load_package(pkg_path)
rescue SyntaxError => e
warn "#{e.class}: #{e.message}".red
end
@pkg.build_from_source = true if @opt_recursive
end
def search(pkg_name, pkg_path: File.join(CREW_PACKAGES_PATH, "#{pkg_name}.rb"), silent: false)
begin
return set_package(pkg_path) if File.file?(pkg_path)
rescue StandardError => e
puts "Error with #{pkg_name}.rb: #{e}".lightred unless e.to_s.include?('uninitialized constant')
end
unless File.file?(pkg_path) && silent
@pkg = nil
abort "Package #{pkg_name} not found. 😞".lightred unless silent
return
end
end
def cache_build
build_cachefile = File.join(CREW_CACHE_DIR, "#{@pkg.name}-#{@pkg.version}-build-#{@device[:architecture]}.tar.zst")
if CREW_CACHE_ENABLED && File.writable?(CREW_CACHE_DIR)
puts 'Caching build dir...'
pkg_build_dirname_absolute = File.join(CREW_BREW_DIR, @extract_dir)
pkg_build_dirname = File.basename(pkg_build_dirname_absolute)
Dir.chdir pkg_build_dirname_absolute do
# Do not use --exclude-vcs w/ tar to exclude .git
# because some builds will use that information.
# Backup build cachefile it if exists.
FileUtils.mv build_cachefile, "#{build_cachefile}.bak", force: true if File.file?(build_cachefile)
FileUtils.mv "#{build_cachefile}.sha256", "#{build_cachefile}.sha256.bak", force: true if File.file?("#{build_cachefile}.sha256")
Dir.chdir(CREW_BREW_DIR) do
system "tar c#{@verbose} #{pkg_build_dirname} \
| nice -n 20 zstd -c --ultra --fast -f -o #{build_cachefile} -"
end
end
system "sha256sum #{build_cachefile} > #{build_cachefile}.sha256"
puts "Build directory cached at #{build_cachefile}".lightgreen
else
puts 'CREW_CACHE_ENABLED is not set.'.orange unless CREW_CACHE_ENABLED
puts 'CREW_CACHE_DIR is not writable.'.lightred unless File.writable?(CREW_CACHE_DIR)
end
end
def update
abort "'crew update' is used to update crew itself. Use 'crew upgrade <package1> [<package2> ...]' to update specific packages.".orange if @pkg_name
unless CREW_NO_GIT
# The following is used in fixup.rb to determine if crew update needs to
# be run again.
@crew_const_git_commit = `git -C #{CREW_LIB_PATH} log -n1 --oneline #{CREW_LIB_PATH}/lib/const.rb`.split.first
unless Dir.exist?(File.join(CREW_LIB_PATH, '.git'))
puts 'Fixing Chromebrew system git repo clone...'.orange
system(<<~GIT_REPAIR_COMMANDS, chdir: CREW_LIB_PATH, %i[out err] => File::NULL)
## Run the git setup commands used in install.sh.
# Make the git default branch error messages go away.
git config --global init.defaultBranch main
# Setup the dir with git information.
git init --ref-format=reftable
git remote add origin #{CREW_REPO}
# Help handle situations where GitHub is down.
git config --local http.lowSpeedLimit 1000
git config --local http.lowSpeedTime 5
# Checkout, overwriting local files.
git fetch --all
git checkout -f master
git reset --hard origin/#{CREW_BRANCH}
GIT_REPAIR_COMMANDS
end
system(<<~GIT_UPDATE_COMMANDS, chdir: CREW_LIB_PATH, exception: true)
## Update crew from git.
# Set sparse-checkout folders.
git sparse-checkout set packages manifest/#{ARCH} lib commands bin crew tests tools
git sparse-checkout reapply
git fetch #{CREW_REPO} #{CREW_BRANCH}
git reset --hard FETCH_HEAD
GIT_UPDATE_COMMANDS
system(<<~GIT_RESTORE_MTIME_COMMAND, chdir: CREW_LIB_PATH, exception: true) if File.file?("#{CREW_PREFIX}/bin/git-restore-mtime")
# Set the mtime on each file in git to the date the file was added,
# not to the date of the last git pull.
git-restore-mtime -sq 2>/dev/null
GIT_RESTORE_MTIME_COMMAND
if Time.now.to_i - @last_update_check > (CREW_UPDATE_CHECK_INTERVAL * 3600 * 24)
puts 'Updating RubyGems.'.orange
system 'gem update -N --system'
system 'gem cleanup'
end
puts 'Package lists, crew, and library updated.'
# Do any fixups necessary after crew has updated from git.
load "#{CREW_LIB_PATH}/lib/fixup.rb"
end
# check for outdated installed packages
puts 'Checking for package updates...' unless CREW_UNATTENDED
can_be_updated = 0
updatable_packages = []
@device[:installed_packages].each do |package|
search package[:name], silent: true
unless @pkg
puts "Package file for #{package[:name]} not found. :(".lightred if CREW_VERBOSE
next
end
different_version = (package[:version] != @pkg.version)
has_sha = !(PackageUtils.get_sha256(@pkg).to_s.empty? || package[:sha256].to_s.empty?)
different_sha = has_sha && package[:sha256] != PackageUtils.get_sha256(@pkg)
can_be_updated += 1 if different_version || different_sha
if different_version && !different_sha && has_sha
unless @pkg.no_compile_needed?
can_be_updated -= 1
updatable_packages.push(@pkg.name)
puts "#{@pkg.name} has a version change but does not have updated binaries".yellow unless CREW_UNATTENDED
end
elsif different_version
updatable_packages.push(@pkg.name)
puts "#{@pkg.name} could be updated from #{package[:version]} to #{@pkg.version}" unless CREW_UNATTENDED
elsif !different_version && different_sha
updatable_packages.push(@pkg.name)
puts "#{@pkg.name} could be updated (rebuild)" unless CREW_UNATTENDED
end
end
if CREW_UNATTENDED && can_be_updated.positive?
puts updatable_packages.to_json
elsif can_be_updated.positive?
puts "\n#{can_be_updated} packages can be updated."
puts 'Run `crew upgrade` to update all packages or `crew upgrade <package1> [<package2> ...]` to update specific packages.'
else
puts 'Your software is up to date.'.lightgreen unless CREW_UNATTENDED
end
end
def upgrade(*pkgs, build_from_source: false)
check_update_avail = lambda do |pkg_file|
pkg_name = File.basename(pkg_file, '.rb')
unless File.file?(pkg_file)
warn "Package file for installed package #{pkg_name} is missing.".lightred
return false
end
pkgs.each do
unless PackageUtils.installed?(pkg_name)
puts 'Package '.lightred + pkg_name.orange + ' is not installed. 😔 You may try this: '.lightred + "crew install #{pkg_name}".lightblue
return false
end
end
pkg_ver_latest = Package.load_package(pkg_file).version
pkg_ver_installed = @device[:installed_packages].select { |pkg| pkg[:name] == pkg_name } [0][:version]
pkg_hash_latest = PackageUtils.get_sha256(Package.load_package(pkg_file))
pkg_hash_installed = @device[:installed_packages].select { |pkg| pkg[:name] == pkg_name } [0][:sha256]
return pkg_hash_latest != pkg_hash_installed unless !pkg_hash_installed || pkg_hash_latest.to_s.empty? || Package.load_package(pkg_file).is_fake?
return pkg_ver_latest != pkg_ver_installed
end
to_be_upgraded = []
if pkgs.any?
# check for specific package(s)
pkgs.each do |pkg_name|
pkg_file = File.join(CREW_PACKAGES_PATH, "#{pkg_name}.rb")
to_be_upgraded << pkg_name if check_update_avail.call(pkg_file)
end
else
# check for all packages if no package name provided
@device[:installed_packages].each do |pkg|
pkg_file = File.join(CREW_PACKAGES_PATH, "#{pkg[:name]}.rb")
to_be_upgraded << pkg[:name] if check_update_avail.call(pkg_file)
end
end
if to_be_upgraded.empty?
puts 'Your software is already up to date.'.lightgreen
return true
end
# Eventually, we should have the upgrade order generated based upon an
# analysis of the dependency hierarchy, to make sure that earlier
# dependencies get upgraded first.
# Manually specify order of packages that need to be have a standalone
# upgrade before any other packages are upgraded.
rerun_upgrade = false
CREW_STANDALONE_UPGRADE_ORDER.each do |upgrade_pkg|
break if rerun_upgrade == true
next unless to_be_upgraded.include?(upgrade_pkg)
puts "Overriding package upgrade list due to need to upgrade #{upgrade_pkg}".green
to_be_upgraded = [upgrade_pkg]
rerun_upgrade = true
end
# install new dependencies (if any)
to_be_upgraded.each do |pkg_name|
search(pkg_name)
resolve_dependencies
end
puts 'Updating packages...'
# upgrade packages
to_be_upgraded.each do |pkg_name|
search(pkg_name)
print_current_package
@pkg.build_from_source = (build_from_source || CREW_BUILD_FROM_SOURCE)
puts "Updating #{@pkg.name}..." if CREW_VERBOSE
@pkg.in_upgrade = true
resolve_dependencies_and_install
end
if rerun_upgrade
at_exit do
puts "Rerunning 'crew upgrade' to make sure upgrades are complete.".lightblue
exec 'crew upgrade'
end
else
puts 'Packages have been updated.'.lightgreen
end
end
def download
test_url = PackageUtils.get_url(@pkg, build_from_source: @opt_source || @pkg.build_from_source)
sha256sum = PackageUtils.get_sha256(@pkg, build_from_source: @opt_source || @pkg.build_from_source)
# Do an early check for a missing binary package and if so rebuild.
if !@pkg.source?(@device[:architecture]) && @pkg.superclass.to_s == 'Pip'
url = `curl -sI #{test_url}`.lines.first.split[1] == '200' && `curl -Ls #{test_url} | sha256sum -`.chomp == sha256sum ? test_url : 'SKIP'
@pkg.missing_binaries = true
else
url = test_url
end
source = @pkg.source?(@device[:architecture])
uri = URI.parse url
filename = File.basename(uri.path)
# # If we're downloading a binary, reset the filename to what it would have been if we didn't download from the API.
filename = "#{@pkg.name}-#{@pkg.version}-chromeos-#{ARCH}.#{@pkg.binary_compression}" if filename.eql?('download')
@extract_dir = "#{@pkg.name}.#{Time.now.utc.strftime('%Y%m%d%H%M%S')}.dir"
build_cachefile = File.join(CREW_CACHE_DIR, "#{@pkg.name}-#{@pkg.version}-build-#{@device[:architecture]}.tar.zst")
return { source:, filename: } if CREW_CACHE_BUILD && File.file?(build_cachefile)
if !url
abort "No precompiled binary or source is available for #{@device[:architecture]}.".lightred
elsif url.casecmp?('SKIP') || (@pkg.no_source_build? || @pkg.gem_compile_needed?)
puts 'Skipping source download...'
elsif @pkg.build_from_source
puts 'Downloading source...'
elsif !source
puts 'Precompiled binary available, downloading...'
else
puts 'No precompiled binary available for your platform, downloading source...'
end
git = true unless @pkg.git_hashtag.to_s.empty?
Dir.chdir CREW_BREW_DIR do
FileUtils.mkdir_p @extract_dir
# We want to skip when no_source_build is true during the build,
# but when we have built a binary we are in upgrade, and we need
# download since we need to extract the just generated binary.
crewlog '(@pkg.no_source_build? || @pkg.gem_compile_needed?) && !@pkg.in_upgrade && !@pkg.in_install && caller.grep(/download_command/).empty?'
crewlog "#{@pkg.no_source_build?} || #{@pkg.gem_compile_needed?} && #{!@pkg.in_upgrade} && #{!@pkg.in_install} && #{caller.grep(/download_command/).empty?}"
next if (@pkg.no_source_build? || @pkg.gem_compile_needed?) && !@pkg.in_upgrade && !@pkg.in_install && caller.grep(/download_command/).empty?
case File.basename(filename)
# Sources that download with our internal downloader.
# This also covers all precompiled binaries.
when /\.zip$/i, /\.(tar(\.(gz|bz2|xz|lzma|lz|zst))?|tgz|tbz|tpxz|txz)$/i, /\.deb$/i, /\.AppImage$/i, /\.gem$/i
# Recall file from cache if requested
if CREW_CACHE_ENABLED
puts "Looking for #{@pkg.name} archive in cache".orange if CREW_VERBOSE
# Privilege CREW_LOCAL_BUILD_DIR over CREW_CACHE_DIR.
local_build_cachefile = File.join(CREW_LOCAL_BUILD_DIR, filename)
crew_cache_dir_cachefile = File.join(CREW_CACHE_DIR, filename)
cachefile = File.file?(local_build_cachefile) ? local_build_cachefile : crew_cache_dir_cachefile
puts "Using #{@pkg.name} archive from the build cache at #{cachefile}; The checksum will not be checked against the package file.".orange if cachefile.include?(CREW_LOCAL_BUILD_DIR)
if File.file?(cachefile)
puts "#{@pkg.name.capitalize} archive file exists in cache".lightgreen if CREW_VERBOSE
# Don't check checksum if file is in the build cache.
if Digest::SHA256.hexdigest(File.read(cachefile)) == sha256sum || sha256sum =~ /^SKIP$/i || cachefile.include?(CREW_LOCAL_BUILD_DIR)
begin
# Hard link cached file if possible.
FileUtils.ln cachefile, CREW_BREW_DIR, force: true, verbose: @fileutils_verbose unless File.identical?(cachefile, "#{CREW_BREW_DIR}/#{filename}")
puts 'Archive hard linked from cache'.green if CREW_VERBOSE
rescue StandardError
# Copy cached file if hard link fails.
FileUtils.cp cachefile, CREW_BREW_DIR, verbose: @fileutils_verbose unless File.identical?(cachefile, "#{CREW_BREW_DIR}/#{filename}")
puts 'Archive copied from cache'.green if CREW_VERBOSE
end
puts 'Archive found in cache'.lightgreen
unless caller.grep(/download_command/).empty?
puts 'Downloaded to: '.lightblue + File.join(CREW_BREW_DIR, filename).blue
FileUtils.rm_rf @extract_dir
end
return { source:, filename: }
else
puts 'Cached archive checksum mismatch. 😔 Will download.'.lightred
cachefile = ''
end
else
puts "Cannot find cached archive at #{cachefile}. 😔 Will download.".orange
cachefile = ''
end
end
# Download file if not cached.
downloader url, sha256sum, filename, CREW_VERBOSE
puts "#{@pkg.name.capitalize} archive downloaded.".lightgreen
# Stow file in cache if requested, if file is not from cache,
# and cache is writable.
if CREW_CACHE_ENABLED && cachefile.to_s.empty? && File.writable?(CREW_CACHE_DIR)
begin
# Hard link to cache if possible.
FileUtils.ln filename, CREW_CACHE_DIR, verbose: @fileutils_verbose
puts 'Archive hard linked to cache'.green if CREW_VERBOSE
rescue StandardError
# Copy to cache if hard link fails.
FileUtils.cp filename, CREW_CACHE_DIR, verbose: @fileutils_verbose
puts 'Archive copied to cache'.green if CREW_VERBOSE
end
end
unless caller.grep(/download_command/).empty?
puts 'Downloaded to: '.lightblue + File.join(CREW_BREW_DIR, filename).blue
FileUtils.rm_rf @extract_dir
end
return { source:, filename: }
when /^SKIP$/i
FileUtils.mkdir_p @extract_dir
else
unless git # We don't want to download a git repository as a file.
FileUtils.mkdir_p @extract_dir
downloader url, sha256sum, filename, CREW_VERBOSE
puts "#{filename}: File downloaded.".lightgreen
FileUtils.mv filename, "#{@extract_dir}/#{filename}"
end
end
# Handle git sources.
if git
# Recall repository from cache if requested
if CREW_CACHE_ENABLED
# No git branch specified, just a git commit or tag
if @pkg.git_branch.to_s.empty?
abort('No Git branch, commit, or tag specified!').lightred if @pkg.git_hashtag.to_s.empty?
cachefile = File.join(CREW_CACHE_DIR, "#{filename}#{@pkg.git_hashtag.gsub('/', '_')}.tar.zst")
# Git branch and git commit specified
elsif !@pkg.git_hashtag.to_s.empty?
cachefile = File.join(CREW_CACHE_DIR, "#{filename}#{@pkg.git_branch.gsub(/[^0-9A-Za-z.-]/, '_')}_#{@pkg.git_hashtag.gsub('/', '_')}.tar.zst")
# Git branch specified, without a specific git commit.
else
# Use to the day granularity for a branch timestamp with no specific commit specified.
cachefile = File.join(CREW_CACHE_DIR, "#{filename}#{@pkg.git_branch.gsub(/[^0-9A-Za-z.-]/, '_')}#{Time.now.strftime('%m%d%Y')}.tar.zst")
end
puts "Git cachefile is #{cachefile}".orange if CREW_VERBOSE
if File.file?(cachefile) && File.file?("#{cachefile}.sha256")
if Dir.chdir CREW_CACHE_DIR do
system "sha256sum -c #{cachefile}.sha256"
end
FileUtils.mkdir_p @extract_dir
system "tar -Izstd -x#{@verbose}f #{cachefile} -C #{@extract_dir}"
return { source:, filename: }
else
puts 'Cached git repository checksum mismatch. 😔 Will download.'.lightred
end
else
puts 'Cannot find cached git repository. 😔 Will download.'.lightred
end
end
# Download via git
FileUtils.mkdir_p @extract_dir
Dir.chdir @extract_dir do
if @pkg.git_branch.to_s.empty?
system 'git init'
system 'git config advice.detachedHead false'
system 'git config init.defaultBranch master'
system "git remote add origin #{@pkg.source_url}", exception: true
system "git fetch #{'--depth 1' unless @pkg.git_clone_deep?} origin #{@pkg.git_hashtag}", exception: true
system 'git checkout FETCH_HEAD'
else
# Leave a message because this step can be slow.
puts 'Downloading src from a git branch. This may take a while...'
system "git clone --branch #{@pkg.git_branch} --single-branch #{@pkg.source_url} tmpdir", exception: true
system 'mv tmpdir/.git . && rm -rf tmpdir'
system "git reset --hard #{@pkg.git_hashtag}", exception: true
end
system 'git submodule update --init --recursive' unless @pkg.no_git_submodules?
system 'git fetch --tags', exception: true if @pkg.git_fetchtags?
puts 'Repository downloaded.'.lightgreen
end
# Stow file in cache if requested and cache is writable.
if CREW_CACHE_ENABLED && File.writable?(CREW_CACHE_DIR)
puts 'Caching downloaded git repo...'
Dir.chdir @extract_dir do
# Do not use --exclude-vcs to exclude .git
# because some builds will use that information.
system "tar c#{@verbose} \
$(find -mindepth 1 -maxdepth 1 -printf '%P\n') | \
nice -n 20 zstd -c -T0 --ultra -20 - > \
#{cachefile}"
end
system 'sha256sum', cachefile, out: "#{cachefile}.sha256"
puts 'Git repo cached.'.lightgreen
end
end
end
return { source:, filename: }
end
def unpack(meta)
target_dir = nil
Dir.chdir CREW_BREW_DIR do
FileUtils.mkdir_p @extract_dir, verbose: @fileutils_verbose
build_cachefile = File.join(CREW_CACHE_DIR, "#{@pkg.name}-#{@pkg.version}-build-#{@device[:architecture]}.tar.zst")
if CREW_CACHE_BUILD && File.file?(build_cachefile) && File.file?("#{build_cachefile}.sha256") && (system "cd #{CREW_CACHE_DIR} && sha256sum -c #{build_cachefile}.sha256")
@pkg.cached_build = true
puts "Extracting cached build directory from #{build_cachefile}".lightgreen
system "tar -Izstd -x#{@verbose}f #{build_cachefile} -C #{CREW_BREW_DIR}", exception: true
# Need to reset @extract_dir to the extracted cached build
# directory.
@extract_dir = `tar -Izstd --exclude='./*/*' -tf #{build_cachefile} | cut -d '/' -f 1 | LC_ALL=C sort -u`.chomp
else
@pkg.cached_build = false
case File.basename meta[:filename]
when /\.zip$/i
puts "Unpacking archive using 'unzip', this may take a while..."
system 'unzip', (CREW_VERBOSE ? '-v' : '-qq'), '-d', @extract_dir, meta[:filename], exception: true
when /\.(tar(\.(bz2|lz|lzma))?|tbz)$/i
puts "Unpacking archive using 'tar', this may take a while..."
system 'tar', "-x#{@verbose}f", meta[:filename], '-C', @extract_dir, exception: true
when /\.(tar(\.(gz|xz))?|tgz|txz|tpxz)$/i
puts "Unpacking archive using 'tar', this may take a while..."
if Kernel.system('zstd --help 2>/dev/null| grep -q lzma', %i[out err] => File::NULL)
system 'tar', '-Izstd', "-x#{@verbose}f", meta[:filename], '-C', @extract_dir, exception: true
else
system 'tar', "-x#{@verbose}f", meta[:filename], '-C', @extract_dir, exception: true
end
when /\.tar\.zst$/i
puts "Unpacking archive using 'tar', this may take a while..."
system 'tar', '-Izstd', "-x#{@verbose}f", meta[:filename], '-C', @extract_dir, exception: true
when /\.deb$/i
puts "Unpacking '.deb' archive, this may take a while..."
DebUtils.extract_deb(meta[:filename], /data\..*/)
system 'tar', "-x#{@verbose}f", *Dir['data.*'], '-C', @extract_dir, exception: true
when /\.AppImage$/i
puts "Unpacking 'AppImage' archive, this may take a while..."
FileUtils.chmod 0o755, meta[:filename], verbose: @fileutils_verbose
system "../#{meta[:filename]}", '--appimage-extract', chdir: @extract_dir, exception: true
when /\.gem$/i
puts "Moving #{@pkg.gem_name} binary gem for install..."
gem_file = "#{@pkg.gem_name}-#{@pkg.gem_version}-#{GEM_ARCH}.gem"
FileUtils.mv meta[:filename], File.join(@extract_dir, gem_file)
end
end
if meta[:source]
# Check the number of directories in the archive
entries = Dir["#{@extract_dir}/*"]
if entries.empty? && CREW_VERBOSE
# This will happen with SKIP packages.
puts "Empty archive: #{meta[:filename]}".orange
end
target_dir = if entries.length == 1 && File.directory?(entries.first)
# Use `extract_dir/dir_in_archive` if there is only one directory.
entries.first
else
# Use `extract_dir` otherwise
@extract_dir
end
else
# Use `extract_dir` for binary distribution
target_dir = @extract_dir
end
# Remove tarball to save space.
FileUtils.rm_f meta[:filename], verbose: @fileutils_verbose if File.file?(meta[:filename])
end
return File.join(CREW_BREW_DIR, target_dir)
end
def build_and_preconfigure(target_dir)
Dir.chdir target_dir do
if @pkg.gem_compile_needed?
puts 'Building binary gem...'
elsif !@pkg.no_compile_needed?
puts 'Building from source, this may take a while...'
# Load musl options only if package is targeted at the musl toolchain
load File.join(CREW_LIB_PATH, 'lib/musl.rb').to_s if @pkg.is_musl?
end
build_start_time = Time.now.to_i
@pkg.in_build = true
unless @pkg.cached_build
@pkg.patch
@pkg.prebuild
end
begin
@pkg.build
rescue StandardError
if CREW_CACHE_FAILED_BUILD
cache_build
abort 'There was a build error, caching build directory.'.lightred
end
abort 'There was a build error.'.lightred
end
@pkg.in_build = false
# wipe crew destdir
FileUtils.rm_rf Dir["#{CREW_DEST_DIR}/*"], verbose: @fileutils_verbose unless @pkg.superclass.to_s == 'RUBY'
puts 'Preconfiguring package...'
cache_build if CREW_CACHE_BUILD
@pkg.install unless @pkg.superclass.to_s == 'RUBY'
build_end_time = Time.now.to_i
crewlog "Build for #{@pkg.name} took #{MiscFunctions.time_difference(build_start_time, build_end_time)}."
end
end
def pre_flight
puts "Performing pre-flight checks for #{@pkg.name}...".lightblue
abort PackageUtils.incompatible_reason(@pkg).join("\n").to_s.lightred unless PackageUtils.compatible?(@pkg)
if @pkg.superclass.to_s == 'RUBY'
puts 'Populating gem information using compact index client...'.lightgreen
$gems ||= BasicCompactIndexClient.new.gems
puts 'Done populating gem information.'.lightgreen
# Update gem sources if updated more than 1 day previously.
gem_spec_cache_dir = File.join(Gem.default_spec_cache_dir, 'rubygems.org%443')
FileUtils.mkdir_p gem_spec_cache_dir
gem_spec_cache_dir_age = (Time.now.to_i - File.mtime(gem_spec_cache_dir).utc.to_i)
puts "Gem source cache age: #{gem_spec_cache_dir_age}".lightgreen if CREW_VERBOSE
if gem_spec_cache_dir_age > (3600 * 24)
puts 'Updating gem source cache.'.lightgreen if CREW_VERBOSE
Kernel.system('gem sources -u')
puts 'Done updating gem source cache.'.lightgreen if CREW_VERBOSE
end
end
@pkg.preflight
end
def pre_install(dest_dir)
Dir.chdir dest_dir do
puts "Performing pre-install for #{@pkg.name}...".lightblue
@pkg.preinstall
# Reload device.json in case preinstall modified it via
# running 'crew remove packages...'
@device = ConvenienceFunctions.load_symbolized_json
end
end
def post_install
GnomePostinstall.add @pkg.name if @pkg.gnome?
# return unless the postinstall function was defined by the package recipe
return unless @pkg.method(:postinstall).source_location[0].include?("#{@pkg.name}.rb")
Dir.mktmpdir do |post_install_tempdir|
Dir.chdir post_install_tempdir do
puts "Performing post-install for #{@pkg.name}...".lightblue
@pkg.postinstall
end
end
end
def compress_doc(dir)
# check whether crew should compress
return if CREW_NOT_COMPRESS || @pkg.no_compress? || !File.file?("#{CREW_PREFIX}/bin/compressdoc")
if Dir.exist? dir
system "find #{dir} -type f ! -perm -200 | xargs -r chmod u+w"
system "compressdoc --zstd #{@short_verbose} #{dir}"
end
end
def determine_conflicts(dir, pkg)
conflicts = []
if File.file?("#{dir}/filelist")
if File.file?(File.join(CREW_META_PATH, "#{pkg}.filelist"))
puts 'Checking for conflicts with files from installed packages...'.orange
conflictscmd = `grep --exclude=#{File.join(CREW_META_PATH, "#{pkg}.filelist")} --exclude=#{CREW_META_PATH}/\\\*_build.filelist -Fxf #{dir}/filelist #{CREW_META_PATH}/*.filelist`
conflicts = conflictscmd.gsub(/(\.filelist|#{CREW_META_PATH})/, '').split("\n")
conflicts.reject!(&:empty?)
end
elsif File.file?(File.join(CREW_META_PATH, "#{pkg}.filelist"))
puts "Checking for conflicts of #{pkg} with files from installed packages...".orange
conflictscmd = `grep --exclude=#{File.join(CREW_META_PATH, "#{pkg}.filelist")} --exclude=#{CREW_META_PATH}/\\\*_build.filelist -Fxf #{File.join(CREW_META_PATH, "#{pkg}.filelist")} #{CREW_META_PATH}/*.filelist`
conflicts = conflictscmd.gsub(/(\.filelist|#{CREW_META_PATH})/, '').split("\n")
conflicts.reject!(&:empty?)
end
if conflicts.any?
puts 'There is a conflict with the same file in another package:'.orange
puts conflicts.to_s.orange
end
conflicts.map! { |x| x.to_s.partition(':').last }
return conflicts
end
def prepare_package(destdir)
# Create the destdir if it does not exist to avoid having to have
# this single line in no_compile_needed packages.
FileUtils.mkdir_p CREW_DEST_PREFIX
Dir.chdir destdir do
# Avoid /usr/local/share/info/dir{.gz} file conflict:
# The install-info program maintains a directory of installed
# info documents in /usr/share/info/dir for the use of info
# readers. This file must not be included in packages other
# than install-info.
# https://www.debian.org/doc/debian-policy/ch-docs.html#info-documents
FileUtils.rm_f "#{CREW_DEST_PREFIX}/share/info/dir"
# Remove all perl module files which will conflict
if @pkg.name =~ /^perl_/
puts 'Removing .packlist and perllocal.pod files to avoid conflicts with other perl packages.'.orange
system "find #{CREW_DEST_DIR} -type f \\( -name '.packlist' -o -name perllocal.pod \\) -delete"
end
# Compress manual files, and move errant files to the correct
# locations.
if File.exist?("#{CREW_DEST_PREFIX}/man")
puts "Files in #{CREW_PREFIX}/man will be moved to #{CREW_MAN_PREFIX}.".orange
FileUtils.mkdir_p CREW_DEST_MAN_PREFIX
FileUtils.mv Dir["#{CREW_DEST_PREFIX}/man/*"], "#{CREW_DEST_MAN_PREFIX}/"
Dir.rmdir "#{CREW_DEST_PREFIX}/man" if Dir.empty?("#{CREW_DEST_PREFIX}/man")
end
if File.exist?("#{CREW_DEST_PREFIX}/info")
puts "Files in #{CREW_PREFIX}/info will be moved to #{CREW_PREFIX}/share/info.".orange
FileUtils.mkdir_p "#{CREW_DEST_PREFIX}/share/info/"
FileUtils.mv Dir["#{CREW_DEST_PREFIX}/info/*"], "#{CREW_DEST_PREFIX}/share/info/"
Dir.rmdir "#{CREW_DEST_PREFIX}/info" if Dir.empty?("#{CREW_DEST_PREFIX}/info")
end
# Remove the "share/info/dir.*" file since it causes conflicts.
FileUtils.rm_f Dir["#{CREW_DEST_PREFIX}/share/info/dir*"]
compress_doc CREW_DEST_MAN_PREFIX
compress_doc "#{CREW_DEST_PREFIX}/share/info"
# Allow postbuild to override the filelist contents
@pkg.postbuild
# create file list
system "find .#{CREW_PREFIX} -type f,l | cut -c2- | LC_ALL=C sort", out: %w[filelist a] if Dir.exist?(CREW_DEST_PREFIX)
system "find .#{HOME} -type f,l | cut -c2- | LC_ALL=C sort", out: %w[filelist a] if Dir.exist?(CREW_DEST_HOME)
if Dir.exist?("#{CREW_LOCAL_REPO_ROOT}/manifest") && File.writable?("#{CREW_LOCAL_REPO_ROOT}/manifest")
FileUtils.mkdir_p "#{CREW_LOCAL_REPO_ROOT}/manifest/#{ARCH}/#{@pkg.name.chr.downcase}"
FileUtils.cp 'filelist', "#{CREW_LOCAL_REPO_ROOT}/manifest/#{ARCH}/#{@pkg.name.chr.downcase}/#{@pkg.name}.filelist"
end
# check for FHS3 compliance
puts 'Checking for FHS3 compliance...'
errors = false
fhs_compliant_prefix = %W[bin etc include lib #{ARCH_LIB} libexec opt sbin share var].uniq
Dir.foreach(CREW_DEST_PREFIX) do |filename|
next if %w[. ..].include?(filename)
unless fhs_compliant_prefix.include?(filename)
if CREW_FHS_NONCOMPLIANCE_ONLY_ADVISORY || @pkg.no_fhs?
puts "Warning: #{CREW_PREFIX}/#{filename} in #{@pkg.name} is not FHS3 compliant.".orange
else
puts "Error: #{CREW_PREFIX}/#{filename} in #{@pkg.name} is not FHS3 compliant.".lightred
errors = true
end
end
end
# check for conflicts with other installed files
conflicts = determine_conflicts(Dir.pwd, @pkg.name)
if conflicts.any?
if CREW_CONFLICTS_ONLY_ADVISORY || @pkg.conflicts_ok?
puts 'Warning: There is a conflict with the same file in another package.'.orange
else
puts 'Error: There is a conflict with the same file in another package.'.lightred
errors = true
end
puts conflicts
end
# abort if errors encountered
abort 'Exiting due to above errors.'.lightred if errors
# Make sure the package file has runtime dependencies added properly.
system "#{CREW_LIB_PATH}/tools/getrealdeps.rb --use-crew-dest-dir #{@pkg.name}", exception: true if File.which('gawk') && File.which('upx') && !@pkg.no_compile_needed?
# create directory list
# Remove CREW_PREFIX and HOME from the generated directorylist.
crew_prefix_escaped = CREW_PREFIX.gsub('/', '\/')
home_escaped = HOME.gsub('/', '\/')
system "find .#{CREW_PREFIX} -type d | cut -c2- | sed '0,/#{crew_prefix_escaped}/{/#{crew_prefix_escaped}/d}'| LC_ALL=C sort", out: %w[dlist a] if Dir.exist?(CREW_DEST_PREFIX)
system "find .#{HOME} -type d | cut -c2- | sed '0,/#{home_escaped}/{/#{home_escaped}/d}' | LC_ALL=C sort", out: %w[dlist a] if Dir.exist?(CREW_DEST_HOME)
strip_dir destdir
# use upx on executables
shrink_dir destdir
end
end
def patchelf_set_need_paths(dir)
return if @pkg.no_patchelf? || @pkg.no_compile_needed?
puts 'Patchelf is currently disabled during builds due to problems with upx.'.yellow
return
# Disable unreachable code check, as this is a temporary situation
# rubocop:disable Lint/UnreachableCode
Dir.chdir dir do
puts 'Running patchelf'.lightblue
abort('No Patchelf found!').lightred unless File.file?("#{CREW_PREFIX}/bin/patchelf")
execfiles = `find . -executable -type f ! \\( -name '*.a' \\) | xargs -P#{CREW_NPROC} -n1 sh -c '[ "$(head -c4 ${1})" = "\x7FELF" ] && echo ${1}' --`.chomp
return if execfiles.empty?
patchelf_lib_prefix = @pkg.is_musl? ? "#{CREW_MUSL_PREFIX}/lib" : CREW_LIB_PREFIX
puts "patchelf_lib_prefix is #{patchelf_lib_prefix}" if CREW_VERBOSE
patchelf_interpreter = @pkg.is_musl? ? "#{CREW_MUSL_PREFIX}/lib/libc.so" : 'CREW_LIB_PREFIX/libc.so.6'
puts "patchelf_interpreter is #{patchelf_interpreter}" if CREW_VERBOSE
puts 'Running patchelf to patch binaries for library paths'.lightblue
execfiles.each_line(chomp: true) do |execfiletopatch|
execfiletopatch = Dir.pwd + execfiletopatch.delete_prefix('.')
neededlibs = `patchelf --print-needed #{execfiletopatch}`
next if neededlibs.to_s.empty?
neededlibs.each_line(chomp: true) do |neededlibspatch|
next if neededlibspatch.include?(patchelf_lib_prefix.to_s)
# Avoid segfaults from not using system versions of these files.
patchelf_veto_files = %w[
libdl.so
ld-linux.so.2
ld-linux-x86-64.so.2
ld-linux-armhf.so.3
libc.so.6
]
next if !@pkg.is_musl? && patchelf_veto_files.any? { |i| neededlibspatch.include? i }
neededlib_basename = File.basename(neededlibspatch)
neededlibspatchednamepath = "#{patchelf_lib_prefix}/" + neededlib_basename
# The first check here can be changed to just check the dest_dir
# hierarchy for neededlib_basename if the intent is to allow
# using a different CREW_PREFIX during package installs.
if File.file?(neededlibspatchednamepath) || File.file?(Dir.pwd + neededlibspatchednamepath)
puts "patchelf --replace-needed #{neededlibspatch} #{neededlibspatchednamepath} #{execfiletopatch}" if CREW_VERBOSE
system "patchelf --replace-needed #{neededlibspatch} #{neededlibspatchednamepath} #{execfiletopatch}"
else
puts "#{execfiletopatch} needed library #{neededlib_basename} not found in #{patchelf_lib_prefix} or #{Dir.pwd + neededlibspatchednamepath}.".lightred
end
end
# Do not set interpreter for non-musl, as this can break apps if there
# is an issue with the crew glibc.
next unless @pkg.is_musl?
puts 'Running patchelf to patch binary interpreter paths'.lightblue
system "patchelf --set-interpreter #{patchelf_interpreter} #{execfiletopatch}"
end
end
# rubocop:enable Lint/UnreachableCode
end
def strip_find_files(find_cmd, strip_option = '')
# Check whether crew should strip.
return if CREW_NOT_STRIP || @pkg.no_strip? || !File.file?("#{CREW_PREFIX}/bin/llvm-strip")
# Run find_cmd and strip only files with ar or elf magic headers.
system "#{find_cmd} | xargs -r chmod u+w"
strip_verbose = CREW_VERBOSE ? 'echo "Stripping ${0:1}" &&' : ''
# The craziness here is from having to escape the special characters
# in the magic headers for these files.
system "#{find_cmd} | xargs -P#{CREW_NPROC} -n1 -r bash -c 'header=$(head -c4 ${0}); elfheader='$(printf '\\\177ELF')' ; arheader=\\!\\<ar ; case $header in $elfheader|$arheader) #{strip_verbose} llvm-strip #{strip_option} ${0} ;; esac'"
end
def strip_dir(dir)
unless CREW_NOT_STRIP || @pkg.no_strip? || @pkg.no_compile_needed?
Dir.chdir dir do
# Strip libraries with -S
puts 'Stripping libraries...'
strip_find_files "find . -type f \\( -name 'lib*.a' -o -name 'lib*.so*' \\) -print", '-S'
# Strip binaries but not compressed archives
puts 'Stripping binaries...'
extensions = %w[bz2 gz lha lz lzh rar tar tbz tgz tpxz txz xz Z zip zst]
inames = extensions.join(' -o -iname *.')
strip_find_files "find . -type f ! \\( -iname *.#{inames} \\) ! \\( -name 'lib*.a' -o -name 'lib*.so' \\) -perm /111 -print"
end
end
end
def shrink_dir(dir)
unless CREW_NOT_SHRINK_ARCHIVE || @pkg.no_shrink?
Dir.chdir dir do
if File.file?("#{CREW_PREFIX}/bin/rdfind")
puts 'Using rdfind to convert duplicate files to hard links.'
system "#{CREW_PREFIX}/bin/rdfind -removeidentinode true -makesymlinks false -makehardlinks true -makeresultsfile false ."
end
# Issues with non-x86_64 in compressing libraries, so just compress
# non-libraries. Also note that one needs to use "upx -d" on a
# compressed file to use ldd.
# sommelier also isn't happy when sommelier and xwayland are compressed
# so don't compress those packages.
if File.executable?("#{CREW_PREFIX}/bin/upx")
# 1. Find executable binaries but also check for hard linked
# files by making sure we have a unique set of
# inodes for the binaries found.
# 2. Copy to a temp file.
# 3. Compress using upx. (Uncompressble files are ignored.)
# 4. Check compression by expanding the compressed file with
# upx.
# 5. If the expansion doesn't error out then it is ok to copy
# over the original. (This also lets us only avoid compressing
# hard linked files multiple times.)
execfiles = `find . -executable -type f ! \\( -name '*.so*' -o -name '*.a' \\) | xargs -P8 -n1 sh -c '[ "$(head -c4 ${1})" = "\x7FELF" ] && echo ${1}' --`.chomp
unless execfiles.empty?
puts 'Using upx to shrink binaries.'
# Copying in the ThreadPoolExecutor loop fails non-deterministically
execfiles.each_line(chomp: true) do |execfilecp|
execfilecp.slice! '.'
next if execfilecp.empty?
execfilecp = File.join(dir, execfilecp)
next unless File.file?(execfilecp)
FileUtils.cp execfilecp, "#{execfilecp}-crewupxtmp"
end
require_gem 'concurrent-ruby'
pool = Concurrent::ThreadPoolExecutor.new(
min_threads: 1,
max_threads: CREW_NPROC,
max_queue: 0, # unbounded work queue
fallback_policy: :caller_runs
)
execfiles.each_line(chomp: true) do |execfile|
pool.post do
execfile.slice! '.'
execfile = File.join(dir, execfile)
puts "Attempting to compress #{execfile} ...".orange
# Make tmp file for compression
unless system "upx --lzma #{execfile}-crewupxtmp"
puts "Compression of #{execfile} failed...".orange if CREW_VERBOSE
FileUtils.rm_f "#{execfile}-crewupxtmp"
end
if File.file?("#{execfile}-crewupxtmp")
puts "Testing compressed #{execfile}...".lightblue if CREW_VERBOSE
if system 'upx', '-t', "#{execfile}-crewupxtmp"
puts "#{execfile} successfully compressed...".lightgreen
FileUtils.cp "#{execfile}-crewupxtmp", execfile
end
end
FileUtils.rm_f "#{execfile}-crewupxtmp"
end
end
pool.shutdown
pool.wait_for_termination
# Make sure temporary compression copies are deleted.
system 'find . -executable -type f -name "*-crewupxtmp" -delete'
end
end
end
end
end
def install_files(src, dst = File.join(CREW_PREFIX, src.delete_prefix('./usr/local')))
if Dir.exist?(src)
if File.executable?("#{CREW_PREFIX}/bin/crew-mvdir") && !CREW_DISABLE_MVDIR
system "crew-mvdir #{@short_verbose} #{src} #{dst}", exception: true
else
warn 'crew-mvdir is not installed. Please install it with \'crew install crew_mvdir\' for improved installation performance'.yellow unless (@pkg.name == 'crew_mvdir') || CREW_DISABLE_MVDIR
if File.executable?("#{CREW_PREFIX}/bin/rsync") && system("#{CREW_PREFIX}/bin/rsync --version > /dev/null")
# rsync src path needs a trailing slash
src << '/' unless src.end_with?('/')
# Check for ACLs support.
rsync_version = `rsync --version`.chomp
if rsync_version.include?('ACLs') && !rsync_version.include?('no ACLs')
system 'rsync', "-ah#{@verbose}HAXW", '--remove-source-files', src, dst, exception: true
else
system 'rsync', "-ah#{@verbose}HXW", '--remove-source-files', src, dst, exception: true
end
else
system "cd #{src}; tar -cf - ./* | (cd #{dst}; tar -x#{@verbose}p --keep-directory-symlink -f -)", exception: true
end
end
else
abort "#{src} directory does not exist.".lightred
end
end
def install_package(pkgdir)
Dir.chdir pkgdir do
# install filelist, dlist and binary files
puts 'Performing install...'
FileUtils.mv 'dlist', File.join(CREW_META_PATH, "#{@pkg.name}.directorylist"), verbose: @fileutils_verbose
FileUtils.mv 'filelist', File.join(CREW_META_PATH, "#{@pkg.name}.filelist"), verbose: @fileutils_verbose
unless CREW_NOT_LINKS || @pkg.no_links?
brokensymlinks = `find . -type l -exec test ! -e {} \\; -print`.chomp
unless brokensymlinks.to_s.empty?
puts 'There are broken symlinks. Will try to fix.'.orange if CREW_VERBOSE
brokensymlinks.each_line(chomp: true) do |fixlink|
brokentarget = `readlink -n #{fixlink}`.chomp
puts "Attempting fix of: #{fixlink.delete_prefix('.')} -> #{brokentarget}".orange if CREW_VERBOSE
fixedtarget = brokentarget.delete_prefix(CREW_DEST_DIR)
fixedlink_loc = File.join(pkgdir, fixlink.delete_prefix('.'))
# If no changes were made, don't replace symlink
unless fixedtarget == brokentarget
FileUtils.ln_sf fixedtarget, fixedlink_loc
puts "Fixed: #{fixedtarget} -> #{fixlink.delete_prefix('.')}".orange if CREW_VERBOSE
end
end
end
if File.executable?("#{CREW_PREFIX}/bin/rdfind")
puts 'Using rdfind to convert duplicate files to hard links.'
system 'rdfind -removeidentinode true -makesymlinks false -makehardlinks true -makeresultsfile false .'
end
end
install_files(".#{CREW_PREFIX}") if Dir.exist?(".#{CREW_PREFIX}")
install_files(".#{HOME}", HOME) if Dir.exist?(".#{HOME}")
end
end
def resolve_dependencies_and_install
@resolve_dependencies_and_install = 1
# Process preflight block to see if package should even
# be downloaded or installed.
pre_flight
begin
origin = @pkg.name
@to_postinstall = []
resolve_dependencies
search origin, silent: true
install
@to_postinstall.append(@pkg.name)
@to_postinstall.each do |dep|
search dep
post_install
end
rescue InstallError => e
abort "#{@pkg.name} failed to install: #{e}".lightred
ensure
# cleanup
unless @opt_keep
FileUtils.rm_rf Dir["#{CREW_BREW_DIR}/*"]
FileUtils.mkdir_p "#{CREW_BREW_DIR}/dest" # this is a little ugly, feel free to find a better way
end
end
# Warn of possible segfaults for older packages on AMD StoneyRidge platforms
# Family 21 identifies AMD Bulldozer/Piledriver/Steamroller/Excavator µArchs
puts <<~EOT.yellow if CREW_IS_AMD && CPUINFO['cpu family'] == '21'
Notice: You are running an AMD StoneyRidge device; due to some bugs some
older packages may fail with a segmentation fault and need to be rebuilt.
If this happens, please report them to:
https://github.com/chromebrew/chromebrew/issues
Otherwise, rebuilding from source (1) or disabling ASLR (2) usually solves the issue:
(1) Run `crew reinstall -s #{@pkg.name}` to rebuild the package from source,
__OR__
(2) Execute `echo 0 | sudo tee /proc/sys/kernel/randomize_va_space` to disable ASLR.
Warning: Disabling ASLR may create security issues, use it at your own risk!
EOT
# Run reload bashrc hook only for installs and reinstalls.
at_exit do
if @pkg&.print_source_bashrc? || @pkg&.gnome?
crewlog "@pkg.print_source_bashrc?:#{@pkg.print_source_bashrc?} @pkg.gnome?:#{@pkg.gnome?}"
# Check to see if the trap was set in #{CREW_PREFIX}/etc/profile
# from crew_profile_base, because otherwise, USR1 will kill crosh.
if File.read("/proc/#{Process.ppid}/comm").include?('[trap set]')
crewlog 'USR1 trap exists. Sourcing ~/.bashrc .'
Process.kill('USR1', Process.ppid)
else
ExitMessage.add <<~PRINT_SOURCE_BASHRC_EOT.lightblue, print_last: true
To finish the installation, please execute the following:
source ~/.bashrc
PRINT_SOURCE_BASHRC_EOT
end
end
end
puts "#{@pkg.name.capitalize} installed!".lightgreen
@resolve_dependencies_and_install = 0
end
def resolve_dependencies
@dependencies = @pkg.get_deps_list(return_attr: true)
# compare dependency version with required range (if installed)
@dependencies.each do |dep|
dep_name = dep.keys[0]
dep_info = @device[:installed_packages].select { |pkg| pkg[:name] == dep_name } [0]
# skip if dependency is not installed
next unless dep_info
_tags, version_check = dep.values[0]
installed_version = dep_info[:version]
next unless version_check
# abort if the range is not fulfilled
abort unless version_check.call(installed_version)
end
# leave only dependency names (remove all package attributes returned by @pkg.get_deps_list)
@dependencies.map!(&:keys).flatten!
# abort & identify incompatible dependencies.
@dependencies.each do |dep|
abort "Some dependencies e.g., #{dep}, are not compatible with your device architecture (#{ARCH}). Unable to continue.".lightred unless PackageUtils.compatible?(Package.load_package("#{CREW_PACKAGES_PATH}/#{dep}.rb"))
end
# leave only not installed packages in dependencies
@dependencies.reject! { |dep_name| @device[:installed_packages].any? { |pkg| pkg[:name] == dep_name } }
# run preflight check for dependencies
@dependencies.each do |dep_name|
Package.load_package(File.join(CREW_PACKAGES_PATH, "#{dep_name}.rb")).preflight
end
return if @dependencies.empty?
puts 'The following packages also need to be installed: '
@dependencies.each do |dep|
FileUtils.cp "#{CREW_LOCAL_REPO_ROOT}/packages/#{dep}.rb", CREW_PACKAGES_PATH if !File.file?(File.join(CREW_PACKAGES_PATH, "#{dep}.rb")) && File.file?(File.join(CREW_LOCAL_REPO_ROOT, "packages/#{dep}.rb")) && Package.agree_default_yes("The package file for #{dep}, which is a required dependency to build #{@pkg.name} only exists in #{CREW_LOCAL_REPO_ROOT}/packages/ . Is it ok to copy it to #{CREW_PACKAGES_PATH} so that the build can continue?")
abort "Dependency #{dep} for #{@pkg.name} was not found.".lightred unless File.file?(File.join(CREW_PACKAGES_PATH, "#{dep}.rb"))
end
puts @dependencies.join(' ')
if @opt_force
puts 'Proceeding with dependency package installation...'.orange
elsif !Package.agree_default_yes('Proceed')
abort 'No changes made.'
end
@dependencies.each do |dep|
search dep
print_current_package
install
end
if @resolve_dependencies_and_install.eql?(1) || @resolve_dependencies_and_build.eql?(1)
@to_postinstall = @dependencies
else
# Make sure the sommelier postinstall happens last so the messages
# from that are not missed by users.
@dependencies.partition { |v| v != 'sommelier' }.reduce(:+)
@dependencies.each do |dep|
search dep
post_install
end
end
end
def install
@pkg.in_install = true
if !@pkg.in_upgrade && PackageUtils.installed?(@pkg.name) && !@pkg.superclass.to_s == 'RUBY'
puts "Package #{@pkg.name} already installed, skipping...".lightgreen
return
end
install_start_time = Time.now.to_i
if @pkg.is_fake?
# use CREW_DEST_DIR
dest_dir = CREW_DEST_DIR
elsif @pkg.superclass.to_s == 'RUBY'
meta = download
target_dir = unpack meta
gem_file = "#{@pkg.gem_name}-#{@pkg.gem_version}-#{GEM_ARCH}.gem"
if File.file?(File.join(target_dir, gem_file))
FileUtils.mv File.join(target_dir, gem_file), File.join(CREW_DEST_DIR, gem_file)
else
build_and_preconfigure target_dir
end
dest_dir = CREW_DEST_DIR
else
meta = download
target_dir = unpack meta
if meta[:source]
# build from source and place binaries at CREW_DEST_DIR
# CREW_DEST_DIR contains usr/local/... hierarchy
build_and_preconfigure target_dir
# prepare filelist and dlist at CREW_DEST_DIR
prepare_package CREW_DEST_DIR
# use CREW_DEST_DIR
dest_dir = CREW_DEST_DIR
else
# use extracted binary directory
dest_dir = target_dir
end
end
# Make backup of installed packages json file.
# If this fails, the install should fail before we create any
# damage, and we should roughly be at maximal disk space usage at this
# point anyways.
begin
FileUtils.cp(File.join(CREW_CONFIG_PATH, 'device.json'), "#{CREW_CONFIG_PATH}/device.json.tmp")
rescue StandardError
puts 'Error writing installed packages json file backup!'.lightred
abort
end
# remove it just before the file copy
if @pkg.in_upgrade
puts 'Attempting removal since this is an upgrade or reinstall...'
Command.remove(@pkg)
end
if @pkg.is_fake?
puts "Install will be skipped since #{@pkg.name} is a fake package.".orange if CREW_VERBOSE
else
# perform pre-install process
pre_install dest_dir
# perform install process
if @pkg.superclass.to_s == 'RUBY'
Dir.chdir(target_dir) do
@pkg.install
end
else
install_package dest_dir
end
unless (@resolve_dependencies_and_install == 1) || (@resolve_dependencies_and_build == 1)
# perform post-install process
post_install
end
end
install_end_time = Time.now.to_i
install_time_elapsed_string = MiscFunctions.time_difference(install_start_time, install_end_time)
crewlog "Build & install for #{@pkg.name} took #{install_time_elapsed_string}."
puts "Build & install for #{@pkg.name} took #{install_time_elapsed_string}. Please ask for #{ARCH} binaries to be generated for #{@pkg.name}.".lightpurple if (install_start_time - install_end_time) > 60
# Add to installed packages list in devices.json, but remove first if it is already there.
crewlog "Adding package #{@pkg.name} to device.json."
@device[:installed_packages].delete_if { |entry| entry[:name] == @pkg.name } and @device[:installed_packages].push(name: @pkg.name, version: @pkg.version, sha256: PackageUtils.get_sha256(@pkg, build_from_source: @opt_source))
ConvenienceFunctions.save_json(@device)
crewlog "#{@pkg.name} in device.json after install: #{`jq --arg key '#{@pkg.name}' -e '.installed_packages[] | select(.name == $key )' #{File.join(CREW_CONFIG_PATH, 'device.json')}`}" if File.which('jq')
@pkg.in_install = false
end
def resolve_dependencies_and_build
@resolve_dependencies_and_build = 1
@to_postinstall = []
begin
origin = @pkg.name
# mark current package as which is required to compile from source
@pkg.build_from_source = true
resolve_dependencies
@to_postinstall.each do |dep|
search dep
post_install
end
search origin, silent: true
build_package CREW_LOCAL_BUILD_DIR
rescue InstallError => e
abort "#{@pkg.name} failed to build: #{e}".lightred
ensure
# cleanup
unless @opt_keep
FileUtils.rm_rf Dir["#{CREW_BREW_DIR}/*"], verbose: @fileutils_verbose
FileUtils.mkdir_p "#{CREW_BREW_DIR}/dest", verbose: @fileutils_verbose # this is a little ugly, feel free to find a better way
end
end
puts "#{@pkg.name.capitalize} is built!".lightgreen
@resolve_dependencies_and_build = 0
end
def build_package(crew_archive_dest)
# Download source code and unpack it.
meta = download
target_dir = unpack meta
# Build from source and place binaries in CREW_DEST_DIR.
build_and_preconfigure target_dir
# Call check method here. This check method is called by this function
# only, therefore it is possible to place time consuming tests in the
# check method.
if Dir.exist? target_dir
Dir.chdir target_dir do
@pkg.check
end
end
# prepare filelist and dlist at CREW_DEST_DIR
prepare_package CREW_DEST_DIR unless @pkg.superclass.to_s == 'RUBY'
# build package from filelist, dlist and binary files in CREW_DEST_DIR
puts 'Archiving...'
archive_package(crew_archive_dest)
end
def archive_package(crew_archive_dest)
if @pkg.superclass.to_s == 'RUBY'
gem_file = "#{@pkg.gem_name}-#{@pkg.gem_version}-#{GEM_ARCH}.gem"
pkg_name = "#{@pkg.name}-#{@pkg.version}-chromeos-#{@device[:architecture]}.gem"
begin
FileUtils.mv File.join(CREW_DEST_DIR, gem_file), File.join(crew_archive_dest, pkg_name)
rescue Errno::EXDEV
# handle Invalid cross-device link error in containers.
FileUtils.cp File.join(CREW_DEST_DIR, gem_file), File.join(crew_archive_dest, pkg_name)
end
# Only use zstd if it is available.
elsif @pkg.no_zstd? || !File.which('zstd')
puts 'Using xz to compress package. This may take some time.'.lightblue
pkg_name = "#{@pkg.name}-#{@pkg.version}-chromeos-#{@device[:architecture]}.tar.xz"
Dir.chdir CREW_DEST_DIR do
system "tar c#{@verbose}Jf #{crew_archive_dest}/#{pkg_name} *"
end
else
puts 'Using zstd to compress package. This may take some time.'.lightblue
pkg_name = "#{@pkg.name}-#{@pkg.version}-chromeos-#{@device[:architecture]}.tar.zst"
Dir.chdir CREW_DEST_DIR do
# Using same zstd compression options as Arch, which privilege
# decompression speed over compression speed.
# See https://lists.archlinux.org/pipermail/arch-dev-public/2019-March/029542.html
# Use nice so that user can (possibly) do other things during compression.
system "tar c#{@verbose} * | nice -n 20 zstd -c -T0 --ultra -20 - > #{crew_archive_dest}/#{pkg_name}"
end
end
system "sha256sum #{crew_archive_dest}/#{pkg_name} > #{crew_archive_dest}/#{pkg_name}.sha256"
# Copy package file for the successfully generated package to CREW_LOCAL_REPO_ROOT only if force is set.
if @opt_force
FileUtils.cp "#{CREW_PACKAGES_PATH}/#{@pkg_name}.rb", "#{CREW_LOCAL_REPO_ROOT}/packages/"
puts "The package file for #{@pkg_name} used has been copied to #{CREW_LOCAL_REPO_ROOT}/packages/".lightblue
if PackageUtils.installed?(@pkg.name)
puts "#{@pkg_name} will now be upgraded...".lightgreen
@pkg.in_upgrade = true
@pkg.build_from_source = false
resolve_dependencies_and_install
@pkg.in_upgrade = false
else
puts "#{@pkg_name} will now be installed...".lightgreen
@pkg.build_from_source = false
resolve_dependencies_and_install
end
end
end
def upload(pkg_name = nil, pkg_version = nil, gitlab_token = nil, gitlab_token_username = nil, binary_compression = nil)
abort "\nPackage to be uploaded was not specified.\n".lightred if pkg_name.nil?
abort "\nGITLAB_TOKEN environment variable not set.\n".lightred if gitlab_token.nil?
abort "\nGITLAB_TOKEN_USERNAME environment variable not set.\n".lightred if gitlab_token_username.nil?
packages = pkg_name
packages.strip!
[packages].each do |package|
binary_compression_not_in_file = binary_compression.nil?
pkg_file = "#{CREW_LOCAL_REPO_ROOT}/packages/#{package}.rb"
binary_sha256_hash = { armv7l: nil, i686: nil, x86_64: nil }
# The following is used to figure out where a non-standard
# binary_sha256 section might be, such as in a gcc_lib file.
starting_binary_sha256_hash = { armv7l: nil, i686: nil, x86_64: nil }
binary_compression = 'gem' if @pkg.superclass.to_s == 'RUBY'
%w[x86_64 i686 armv7l].each do |arch|
# Load existing binary_sha256 hash from the package file.
binary_sha256_hash[arch.to_sym] = @pkg.binary_sha256[arch.to_sym] if @pkg.binary_sha256&.key?(arch.to_sym)
starting_binary_sha256_hash[arch.to_sym] = @pkg.binary_sha256[arch.to_sym] if @pkg.binary_sha256&.key?(arch.to_sym)
release_dir = "#{CREW_LOCAL_REPO_ROOT}/release/#{arch}"
new_tarfile = if binary_compression.nil?
Dir["#{release_dir}/#{package}-#{pkg_version}-chromeos-#{arch}.{tar.xz,tar.zst}"].max_by { |f| File.mtime(f) }
else
"#{release_dir}/#{package}-#{pkg_version}-chromeos-#{arch}.#{binary_compression}"
end
if new_tarfile.nil? || !File.file?(new_tarfile)
puts "#{release_dir}/#{package}-#{pkg_version}-chromeos-#{arch}.#{binary_compression.nil? ? '(tar.xz|tar.zst)' : binary_compression} not found.\n".lightred
next arch
end
if binary_compression_not_in_file
ext = File.extname(new_tarfile)
binary_compression = @pkg.superclass.to_s == 'RUBY' ? 'gem' : "tar#{ext}"
binary_compression_line = " binary_compression '#{binary_compression}'"
puts "Setting binary compression in #{pkg_file} to '#{binary_compression}'..."
# Add binary compression setting, and add the line if it doesn't exist.
file = File.read(pkg_file)
bc_re = /^\ \ binary_compression.*/
source_re = /^\ \ source_sha256.*/
git_hashtag_re = /^\ \ git_hashtag.*/
source_url_re = /^\ \ source_url.*/
if file.match(bc_re)
File.write(pkg_file, file.gsub(bc_re, binary_compression_line))
elsif file.match(source_re)
source_sha256_bc_line = "#{file.match(source_re)}\n#{binary_compression_line}"
File.write(pkg_file, file.gsub(source_re, source_sha256_bc_line))
elsif file.match(git_hashtag_re)
git_hashtag_bc_line = "#{file.match(git_hashtag_re)}\n#{binary_compression_line}"
File.write(pkg_file, file.gsub(git_hashtag_re, git_hashtag_bc_line))
elsif file.match(source_url_re)
source_url_bc_line = "#{file.match(source_url_re)}\n#{binary_compression_line}"
File.write(pkg_file, file.gsub(source_url_re, source_url_bc_line))
else
puts "Unable to tell where to add \"#{binary_compression_line}\" to #{pkg_file}. Please add it and manually.".lightblue
end
end
puts "Package: #{package}, Arch: #{arch}".yellow
puts
puts "\e[1A\e[KGenerating sha256sum ...\r".orange
new_sha256 = Digest::SHA256.hexdigest(File.read(new_tarfile))
puts "Uploading #{new_tarfile} ...".orange if CREW_VERBOSE
noname = new_tarfile.split("#{package}-").last
new_version = noname.split('-chromeos').first
new_url = "#{CREW_GITLAB_PKG_REPO}/generic/#{package}/#{new_version}_#{arch}/#{new_tarfile}".gsub("#{release_dir}/", '')
token_label = gitlab_token.split('-').first == 'glpat' ? 'PRIVATE-TOKEN' : 'DEPLOY-TOKEN'
puts "\e[1A\e[KChecking for existing upload ...\r".orange
if `curl -fsI #{new_url}`.lines.first.split[1] == '200'
puts "\n#{File.basename(new_tarfile)} has already been uploaded.\nPlease change the #{package} package version from #{new_version} and try again.\n".lightred
if Package.agree_default_no('Do you want to overwrite the existing upload instead')
puts "\e[1A\e[KOverwriting existing upload...\r".orange
crewlog "#{arch} = #{new_sha256}"
binary_sha256_hash[arch.to_sym] = new_sha256
else
puts "\e[1A\e[KWill NOT overwite the existing upload. Determining sha256 of already uploaded file...\r".orange
upstream_sha256 = `curl -Ls #{new_url} | sha256sum`.chomp.split.first
crewlog "#{arch} = #{upstream_sha256}"
binary_sha256_hash[arch.to_sym] = upstream_sha256
next arch
end
else
crewlog "#{arch} = #{new_sha256}"
binary_sha256_hash[arch.to_sym] = new_sha256
end
puts "curl -# --header \"#{token_label}: #{gitlab_token}\" --upload-file \"#{new_tarfile}\" \"#{new_url}\" | cat" if CREW_VERBOSE
puts "\e[1A\e[KUploading...\r".orange
output = `curl -# --header "#{token_label}: #{gitlab_token}" --upload-file "#{new_tarfile}" "#{new_url}" | cat`.chomp
puts "\e[1A\e[KChecking upload...\r".orange
if output.include?('201 Created')
puts "curl -Ls #{new_url} | sha256sum" if CREW_VERBOSE
upstream_sha256 = `curl -Ls #{new_url} | sha256sum`.chomp.split.first
if upstream_sha256 == new_sha256
puts "#{output}\n".lightgreen
else
if CREW_VERBOSE
puts "expected sha256 hash=#{new_sha256}"
puts "upstream sha256 hash=#{upstream_sha256}"
end
puts "#{output}. Checksum mismatch. Skipping binary_sha256 update in #{pkg_file}...".lightred
next
end
else
puts output.lightred
puts "#{output}. Unable to upload. Skipping binary_sha256 update in #{pkg_file}...".lightred
next
end
end
# Generate new or replacement binary_sha256 block.
puts "\e[1A\e[KGenerating binary_sha256 block for package file...\r".orange
binary_sha256_block = ''
binary_sha256_block << "\n binary_sha256({\n"
unless binary_sha256_hash[:armv7l].nil?
binary_sha256_block << " aarch64: '#{binary_sha256_hash[:armv7l]}',\n"
binary_sha256_block << " armv7l: '#{binary_sha256_hash[:armv7l]}'"
binary_sha256_block << if binary_sha256_hash[:i686].nil? && binary_sha256_hash[:x86_64].nil?
"\n"
else
",\n"
end
end
unless binary_sha256_hash[:i686].nil?
binary_sha256_block << " i686: '#{binary_sha256_hash[:i686]}'"
binary_sha256_block << if binary_sha256_hash[:x86_64].nil?
"\n"
else
",\n"
end
end
binary_sha256_block << " x86_64: '#{binary_sha256_hash[:x86_64]}'\n" unless binary_sha256_hash[:x86_64].nil?
binary_sha256_block << ' })'
# Replace existing binary_sha256 block (found by looking for the old hash), otherwise add it.
binary_sha256_block_re = /\n^\s*(binary_sha256\(\{)(((?!binary_sha256).)*)#{starting_binary_sha256_hash.compact.values.join('.*')}(((?!\}\)).)*)\}\)/m
file = File.read(pkg_file)
if file.match(binary_sha256_block_re)
File.write(pkg_file, file.gsub(binary_sha256_block_re, "\n#{binary_sha256_block}"))
else
bc_re = /^\ \ binary_compression.*/
binary_sha256_block_with_bc = "#{file.match(bc_re)}\n#{binary_sha256_block}"
File.write(pkg_file, file.gsub(bc_re, binary_sha256_block_with_bc))
end
# Upload python wheels if we are dealing with a pip package, but only
# if a gitlab token username is set. (The generic repo does not
# require a gitlab token username.)
if @pkg.superclass.to_s == 'Pip'
pip_config = `pip config list`.chomp
Kernel.system 'pip config --user set global.trusted-host gitlab.com', %i[err out] => File::NULL unless pip_config.include?("global.trusted-host='gitlab.com'")
pip_cache_dir = `pip cache dir`.chomp
wheels = `find #{pip_cache_dir} -type f -name \"*.whl\" -print`.chomp.split
unless wheels.empty?
wheels.each do |wheel|
puts "Uploading #{wheel}.\nNote that a '400 Bad Request' error here means the wheel has already been uploaded.".orange
# Note that this uses the python twine from https://github.com/pypa/twine/pull/1123
abort 'Twine is broken, cannot upload python wheels.'.lightred unless system('twine --help', %i[out err] => File::NULL)
system("twine upload -u #{gitlab_token_username} -p #{gitlab_token} --repository-url #{CREW_GITLAB_PKG_REPO}/pypi --non-interactive #{wheel}", %i[err] => File::NULL)
FileUtils.rm_f wheel
end
end
end
puts "\e[1A\e[K🎉 Uploads complete for #{package}. 🎉\r\n".lightgreen
end
end
def copy_package(pkg_name, prompt_msg = '')
pkg_file = File.join(CREW_LOCAL_REPO_ROOT, 'packages', "#{pkg_name}.rb")
# Use rubocop to sanitize package file, and let errors get flagged.
if PackageUtils.installed?('ruby_rubocop')
puts "Using rubocop to sanitize #{pkg_file} .".orange
system 'gem install rubocop' unless Kernel.system('rubocop --help 2>/dev/null', %i[out err] => File::NULL)
system "rubocop -c #{File.join(CREW_LOCAL_REPO_ROOT, '.rubocop.yml')} -A #{pkg_file}", exception: true
else
puts 'Package '.lightred + 'ruby_rubocop'.orange + " is not installed. Rubocop will not be used to sanitize #{pkg_file} . 😔 You may try this: ".lightred + 'crew install ruby_rubocop'.lightblue
end
next_pkg = nil
if @opt_force
FileUtils.cp pkg_file, "#{CREW_PACKAGES_PATH}/"
puts "\nCopied #{pkg_file} to #{CREW_PACKAGES_PATH}.\n".lightgreen
else
# This pulls the operation from the calling function
operation = caller_locations(1, 2)[1].to_s.split[3].split('_')[0]
puts prompt_msg.yellow
if Package.agree_default_yes("\nWould you like to copy #{pkg_name}.rb to crew and start the #{operation}")
FileUtils.cp pkg_file, "#{CREW_PACKAGES_PATH}/"
puts "\nCopied #{pkg_file} to #{CREW_PACKAGES_PATH}.\n".lightgreen
else
puts "#{operation.capitalize} skipped."
next_pkg = true
end
end
return next_pkg
end
def check_package(pkg_name)
return unless Dir.exist? CREW_LOCAL_REPO_ROOT
return unless File.file? "#{CREW_LOCAL_REPO_ROOT}/packages/#{pkg_name}.rb"
return copy_package(pkg_name) if @opt_force
# Prompt to copy the local repo package to crew if the package is not found.
unless File.file? "#{CREW_PACKAGES_PATH}/#{pkg_name}.rb"
prompt_msg = "\nThe crew package #{pkg_name} does not exist."
return copy_package(pkg_name, prompt_msg)
end
# Compare local repo package to the crew repo package and prompt to copy if necessary to prepare for the operation.
unless FileUtils.identical? "#{CREW_LOCAL_REPO_ROOT}/packages/#{pkg_name}.rb", "#{CREW_PACKAGES_PATH}/#{pkg_name}.rb"
prompt_msg = "\n#{CREW_LOCAL_REPO_ROOT}/packages/#{pkg_name}.rb does not match the crew package."
return copy_package(pkg_name, prompt_msg)
end
end
def build_command(args)
abort 'Unable to locate local repo root directory. Change to a local chromebrew git repo directory and try again.'.lightred unless Dir.exist? CREW_LOCAL_REPO_ROOT
abort 'Change to a local chromebrew git repo directory and try again.'.lightred if CREW_PACKAGES_PATH.include?(CREW_LOCAL_REPO_ROOT)
unless Dir.exist? CREW_LOCAL_BUILD_DIR
if @opt_force
puts "Attempting to create local build directory at #{CREW_LOCAL_BUILD_DIR} ...".orange
FileUtils.mkdir_p CREW_LOCAL_BUILD_DIR
else
abort "Unable to locate local build directory #{CREW_LOCAL_BUILD_DIR}. It will be created if you build with the '-f' flag.".lightred
end
end
abort "#{CREW_LOCAL_BUILD_DIR} is not writable.".lightred unless File.writable?(CREW_LOCAL_BUILD_DIR)
args['<name>'].each do |name|
# If a package file is explicitly passed, then use that package file, whereever it is.
if name.include?('.rb') && File.file?(name)
FileUtils.cp name, "#{CREW_PACKAGES_PATH}/"
@pkg_name = File.basename(name).gsub('.rb', '')
else
@pkg_name = name
end
next if check_package(@pkg_name)
search @pkg_name
print_current_package CREW_VERBOSE
next unless @pkg_name
# Process preflight block to see if package should be built
pre_flight
crewlog "!@pkg.is_fake? #{!@pkg.is_fake?} && PackageUtils.compatible?(@pkg) #{PackageUtils.compatible?(@pkg)} && @pkg.source?(ARCH): #{@pkg.source?(ARCH)}"
crewlog "(@pkg.no_source_build? #{@pkg.no_source_build?} || @pkg.source_url.to_s.upcase != 'SKIP' #{@pkg.source_url.to_s.upcase != 'SKIP'} || @pkg.gem_compile_needed? #{@pkg.gem_compile_needed?})"
crewlog "!@pkg.no_compile_needed? #{!@pkg.no_compile_needed?} && @pkg.gem_compile_needed? #{@pkg.gem_compile_needed?}"
if !@pkg.is_fake? && PackageUtils.compatible?(@pkg) && @pkg.source?(ARCH) && (@pkg.no_source_build? || @pkg.source_url.to_s.upcase != 'SKIP' || @pkg.gem_compile_needed?) && !@pkg.no_compile_needed?
resolve_dependencies_and_build
else
puts 'Unable to build a fake package. Skipping build.'.lightred if @pkg.is_fake?
puts "Package #{@pkg.name} is not compatible with your device. Skipping build.".lightred unless PackageUtils.compatible?(@pkg)
puts 'Unable to build without source. Skipping build.'.lightred unless @pkg.source?(ARCH) && @pkg.source_url.to_s.upcase != 'SKIP'
puts 'Compile not needed. Skipping build.'.lightred if @pkg.no_compile_needed?
end
end
puts "Builds are located in #{CREW_LOCAL_BUILD_DIR}.".yellow
end
def check_command(args)
args['<name>'].each do |name|
check_package(name)
search name
if @opt_version
Dir.chdir CREW_PACKAGES_PATH do
system "../tools/version.rb #{name} #{@short_verbose}"
end
else
Dir.chdir CREW_PACKAGES_PATH do
system "../tests/prop_test #{name}"
system "../tests/buildsystem_test #{name}"
end
end
end
end
def const_command(args)
args['<name>'].each do |name|
Command.const(name)
end.empty? && Command.const(nil)
end
def deps_command(args)
args['<name>'].each do |name|
@pkg_name = name
search @pkg_name
if args['--tree']
# call `print_deps_tree` (print dependency tree) if --tree is specified
@pkg.print_deps_tree(args)
elsif args['--deep']
system "#{CREW_LIB_PATH}/tools/getrealdeps.rb #{name}"
else
# print dependencies according to the install order if --tree is not specified
puts @pkg.get_deps_list(include_build_deps: args['--include-build-deps'] || 'auto', exclude_buildessential: args['--exclude-buildessential'])
end
end
end
def download_command(args)
args['<name>'].each do |name|
@pkg_name = name
search @pkg_name
@pkg.build_from_source = true if @opt_source
print_current_package CREW_VERBOSE
if ARGV.intersect?(%w[download]) && @pkg.is_fake?
fake_pkg_deplist = @pkg.get_deps_list(return_attr: true).flat_map(&:keys).uniq
until fake_pkg_deplist.blank?
puts "Will download the following packages: #{fake_pkg_deplist.join(' ')}".orange
fake_pkg_deplist.each_with_index do |fake_pkg_dep, index|
@pkg_name = fake_pkg_dep
search @pkg_name
@pkg.build_from_source = true if @opt_source
if @pkg.is_fake?
puts "Expanding #{fake_pkg_dep}...".lightpurple
expanded_pkg_list = @pkg.get_deps_list(return_attr: true).flat_map(&:keys).uniq
fake_pkg_deplist.push(*expanded_pkg_list)
fake_pkg_deplist.delete(@pkg_name)
next fake_pkg_dep
end
total_files_to_check = fake_pkg_deplist.length
numlength = total_files_to_check.to_s.length
puts "[#{(index + 1).to_s.rjust(numlength)}/#{total_files_to_check}] Downloading #{fake_pkg_dep}...".blue
download
fake_pkg_deplist.delete(@pkg_name)
end
end
else
download
end
end
end
def files_command(args)
args['<name>'].each do |name|
search name
Command.files(@pkg)
end
end
def help_command(args)
Command.help(args['<command>'])
end
def install_command(args)
args['<name>'].each do |name|
@pkg_name = name
# Exit early if package is already installed. This prevents the
# postinstall from being run for an already installed package.
if !@pkg_name.start_with?('ruby_') && @device[:installed_packages].any? { |pkg| pkg[:name] == @pkg_name }
puts "Package #{@pkg_name} already installed, skipping...".lightgreen
next
end
next if check_package(@pkg_name)
search @pkg_name
print_current_package true
@pkg.build_from_source = true if @opt_source || @opt_recursive || CREW_BUILD_FROM_SOURCE
next unless @pkg_name
if PackageUtils.compatible?(@pkg)
resolve_dependencies_and_install
else
puts PackageUtils.incompatible_reason(@pkg).join("\n").to_s.lightred
puts 'Skipping install.'.lightred
end
end
end
def list_command(args)
Command.list(args['available'], args['compatible'], args['incompatible'], args['essential'], args['installed'], CREW_VERBOSE)
end
def postinstall_command(args)
args['<name>'].each do |name|
@pkg_name = name
search @pkg_name, silent: true
if @device[:installed_packages].any? { |elem| elem[:name] == @pkg_name }
@pkg.postinstall
else
puts "Package #{@pkg_name} is not installed. :(".lightred
end
end
end
def prop_command(args)
Command.prop(args['<property>'])
end
def reinstall_command(args)
args['<name>'].each do |name|
@pkg_name = name
next if check_package(@pkg_name)
search @pkg_name
print_current_package
@pkg.build_from_source = true if @opt_source || @opt_recursive || CREW_BUILD_FROM_SOURCE
next unless @pkg_name
if PackageUtils.compatible?(@pkg)
@pkg.in_upgrade = true
resolve_dependencies_and_install
@pkg.in_upgrade = false
else
puts "Package #{@pkg.name} is not compatible with your device architecture (#{ARCH}). Skipping reinstall.".lightred
end
end
end
def remove_command(args)
args['<name>'].each do |name|
search name
Command.remove(@pkg)
end
end
def search_command(args)
args['<name>'].each do |name|
Command.search(name, CREW_VERBOSE)
end
end
def sysinfo_command(_args)
Command.sysinfo(CREW_VERBOSE)
end
def update_command(_args)
update
end
def upgrade_command(args) = upgrade(*args['<name>'], build_from_source: @opt_source)
def upload_command(args)
gitlab_token = ENV.fetch('GITLAB_TOKEN', nil)
gitlab_token_username = ENV.fetch('GITLAB_TOKEN_USERNAME', nil)
upload if args['<name>'].empty?
args['<name>'].each do |name|
search name
upload(name, @pkg.version, gitlab_token, gitlab_token_username, @pkg.binary_compression)
end
end
def whatprovides_command(args)
args['<pattern>'].each do |regex|
Command.whatprovides(regex)
end
end
def command?(name) = !!!name[/^[-<]/]
Signal.trap('INT') do
if CREW_CACHE_FAILED_BUILD && CREW_CACHE_ENABLED && @pkg.in_build
cache_build
ExitMessage.add 'The build was interrupted. The build directory was cached.'.lightred
exit 1
end
ExitMessage.add 'Interrupted!'.lightred
exit 1
end
@device = ConvenienceFunctions.load_symbolized_json
@last_update_check = Dir["#{CREW_LIB_PATH}/{.git/FETCH_HEAD,lib/const.rb}"].compact.map { |i| File.mtime(i).utc.to_i }.max
crewlog("The last update was #{MiscFunctions.time_difference(@last_update_check, Time.now.to_i)} ago.")
puts "It has been more than #{CREW_UPDATE_CHECK_INTERVAL} day#{CREW_UPDATE_CHECK_INTERVAL < 2 ? '' : 's'} since crew was last updated. Please run 'crew update'".lightpurple if Time.now.to_i - @last_update_check > (CREW_UPDATE_CHECK_INTERVAL * 3600 * 24)
command_name = args.select { |k, v| v && command?(k) }.keys[0]
send("#{command_name}_command", args)