mirror of
https://github.com/chromebrew/chromebrew.git
synced 2026-01-06 22:24:12 -05:00
Remove a number of unnecessary instance variables (#9310)
This commit is contained in:
committed by
GitHub
parent
d66802945a
commit
66b0446a1c
190
bin/crew
190
bin/crew
@@ -241,17 +241,17 @@ end
|
||||
|
||||
def list_installed
|
||||
if @opt_verbose
|
||||
@installed_packages = []
|
||||
installed_packages = []
|
||||
@device[:installed_packages].each do |package|
|
||||
search package[:name], true
|
||||
@installed_packages.append("#{package[:name]} #{package[:version]}")
|
||||
installed_packages.append("#{package[:name]} #{package[:version]}")
|
||||
end
|
||||
@sorted_installed_packages = @installed_packages.sort
|
||||
@sorted_installed_packages.unshift('======= =======')
|
||||
@sorted_installed_packages.unshift('Package Version')
|
||||
@first_col_width = @sorted_installed_packages.map(&:split).map(&:first).max_by(&:size).size + 2
|
||||
@sorted_installed_packages.map(&:strip).each do |line|
|
||||
puts "%-#{@first_col_width}s%s".lightgreen % line.split
|
||||
sorted_installed_packages = installed_packages.sort
|
||||
sorted_installed_packages.unshift('======= =======')
|
||||
sorted_installed_packages.unshift('Package Version')
|
||||
first_col_width = sorted_installed_packages.map(&:split).map(&:first).max_by(&:size).size + 2
|
||||
sorted_installed_packages.map(&:strip).each do |line|
|
||||
puts "%-#{first_col_width}s%s".lightgreen % line.split
|
||||
end
|
||||
print "\n"
|
||||
else
|
||||
@@ -542,24 +542,24 @@ def help(pkgName = nil)
|
||||
end
|
||||
|
||||
def cache_build
|
||||
@build_cachefile = File.join(CREW_CACHE_DIR, "#{@pkg.name}-#{@pkg.version}-build-#{@device[:architecture]}.tar.zst")
|
||||
build_cachefile = File.join(CREW_CACHE_DIR, "#{@pkg.name}-#{@pkg.version}-build-#{@device[:architecture]}.tar.zst")
|
||||
if CREW_CACHE_ENABLED && File.writable?(CREW_CACHE_DIR)
|
||||
puts 'Caching build dir...'
|
||||
@pkg_build_dirname_absolute = File.join(CREW_BREW_DIR, @extract_dir)
|
||||
@pkg_build_dirname = File.basename(@pkg_build_dirname_absolute)
|
||||
Dir.chdir @pkg_build_dirname_absolute do
|
||||
pkg_build_dirname_absolute = File.join(CREW_BREW_DIR, @extract_dir)
|
||||
pkg_build_dirname = File.basename(pkg_build_dirname_absolute)
|
||||
Dir.chdir pkg_build_dirname_absolute do
|
||||
# Do not use --exclude-vcs w/ tar to exclude .git
|
||||
# because some builds will use that information.
|
||||
# Backup build cachefile it if exists.
|
||||
FileUtils.mv @build_cachefile, "#{@build_cachefile}.bak", force: true if File.file?(@build_cachefile)
|
||||
FileUtils.mv "#{@build_cachefile}.sha256", "#{@build_cachefile}.sha256.bak", force: true if File.file?("#{@build_cachefile}.sha256")
|
||||
FileUtils.mv build_cachefile, "#{build_cachefile}.bak", force: true if File.file?(build_cachefile)
|
||||
FileUtils.mv "#{build_cachefile}.sha256", "#{build_cachefile}.sha256.bak", force: true if File.file?("#{build_cachefile}.sha256")
|
||||
Dir.chdir(CREW_BREW_DIR) do
|
||||
system "tar c#{@verbose} #{@pkg_build_dirname} \
|
||||
| nice -n 20 #{CREW_PREFIX}/bin/zstd -c --ultra --fast -f -o #{@build_cachefile} -"
|
||||
system "tar c#{@verbose} #{pkg_build_dirname} \
|
||||
| nice -n 20 #{CREW_PREFIX}/bin/zstd -c --ultra --fast -f -o #{build_cachefile} -"
|
||||
end
|
||||
end
|
||||
system "sha256sum #{@build_cachefile} > #{@build_cachefile}.sha256"
|
||||
puts "Build directory cached at #{@build_cachefile}".lightgreen
|
||||
system "sha256sum #{build_cachefile} > #{build_cachefile}.sha256"
|
||||
puts "Build directory cached at #{build_cachefile}".lightgreen
|
||||
else
|
||||
puts 'CREW_CACHE_ENABLED is not set.'.orange unless CREW_CACHE_ENABLED
|
||||
puts 'CREW_CACHE_DIR is not writable.'.lightred unless File.writable?(CREW_CACHE_DIR)
|
||||
@@ -762,7 +762,7 @@ def upgrade(*pkgs, build_from_source: false)
|
||||
# version of ruby.
|
||||
if to_be_upgraded.include?('ruby')
|
||||
to_be_upgraded = ['ruby']
|
||||
@rerun_upgrade = true
|
||||
rerun_upgrade = true
|
||||
end
|
||||
|
||||
# install new dependencies (if any)
|
||||
@@ -785,8 +785,8 @@ def upgrade(*pkgs, build_from_source: false)
|
||||
resolve_dependencies_and_install
|
||||
end
|
||||
|
||||
puts 'Packages have been updated.'.lightgreen unless @rerun_upgrade
|
||||
puts "Ruby was upgraded. Please run 'crew upgrade' again to make sure upgrades are complete.".lightblue if @rerun_upgrade
|
||||
puts 'Packages have been updated.'.lightgreen unless rerun_upgrade
|
||||
puts "Ruby was upgraded. Please run 'crew upgrade' again to make sure upgrades are complete.".lightblue if rerun_upgrade
|
||||
end
|
||||
|
||||
def download
|
||||
@@ -798,8 +798,8 @@ def download
|
||||
sha256sum = @pkg.get_sha256(@device[:architecture])
|
||||
@extract_dir = @pkg.get_extract_dir
|
||||
|
||||
@build_cachefile = File.join(CREW_CACHE_DIR, "#{@pkg.name}-#{@pkg.version}-build-#{@device[:architecture]}.tar.zst")
|
||||
return { source:, filename: } if CREW_CACHE_BUILD && File.file?(@build_cachefile)
|
||||
build_cachefile = File.join(CREW_CACHE_DIR, "#{@pkg.name}-#{@pkg.version}-build-#{@device[:architecture]}.tar.zst")
|
||||
return { source:, filename: } if CREW_CACHE_BUILD && File.file?(build_cachefile)
|
||||
|
||||
if !url
|
||||
abort "No precompiled binary or source is available for #{@device[:architecture]}.".lightred
|
||||
@@ -813,7 +813,7 @@ def download
|
||||
puts 'No precompiled binary available for your platform, downloading source...'
|
||||
end
|
||||
|
||||
@git = uri.scheme.eql?('git')
|
||||
git = uri.scheme.eql?('git')
|
||||
|
||||
Dir.chdir CREW_BREW_DIR do
|
||||
case File.basename(filename)
|
||||
@@ -823,9 +823,9 @@ def download
|
||||
if CREW_CACHE_ENABLED
|
||||
puts "Looking for #{@pkg.name} archive in cache".orange if @opt_verbose
|
||||
# Privilege CREW_LOCAL_BUILD_DIR over CREW_CACHE_DIR.
|
||||
@local_build_cachefile = File.join(CREW_LOCAL_BUILD_DIR, filename)
|
||||
@crew_cache_dir_cachefile = File.join(CREW_CACHE_DIR, filename)
|
||||
cachefile = File.file?(@local_build_cachefile) ? @local_build_cachefile : @crew_cache_dir_cachefile
|
||||
local_build_cachefile = File.join(CREW_LOCAL_BUILD_DIR, filename)
|
||||
crew_cache_dir_cachefile = File.join(CREW_CACHE_DIR, filename)
|
||||
cachefile = File.file?(local_build_cachefile) ? local_build_cachefile : crew_cache_dir_cachefile
|
||||
puts "Using #{@pkg.name} archive from the build cache at #{cachefile}; The checksum will not be checked against the package file.".orange if cachefile.include?(CREW_LOCAL_BUILD_DIR)
|
||||
if File.file?(cachefile)
|
||||
puts "#{@pkg.name.capitalize} archive file exists in cache".lightgreen if @opt_verbose
|
||||
@@ -873,7 +873,7 @@ def download
|
||||
when /^SKIP$/i
|
||||
Dir.mkdir @extract_dir
|
||||
when /\.git$/i # Source URLs which end with .git are git sources.
|
||||
@git = true
|
||||
git = true
|
||||
else
|
||||
Dir.mkdir @extract_dir
|
||||
downloader url, sha256sum, filename, @opt_verbose
|
||||
@@ -884,7 +884,7 @@ def download
|
||||
end
|
||||
|
||||
# Handle git sources.
|
||||
if @git
|
||||
if git
|
||||
# Recall repository from cache if requested
|
||||
if CREW_CACHE_ENABLED
|
||||
# No git branch specified, just a git commit or tag
|
||||
@@ -960,14 +960,14 @@ def unpack(meta)
|
||||
Dir.chdir CREW_BREW_DIR do
|
||||
FileUtils.mkdir_p @extract_dir, verbose: @fileutils_verbose
|
||||
|
||||
@build_cachefile = File.join(CREW_CACHE_DIR, "#{@pkg.name}-#{@pkg.version}-build-#{@device[:architecture]}.tar.zst")
|
||||
if CREW_CACHE_BUILD && File.file?(@build_cachefile) && File.file?("#{@build_cachefile}.sha256") && ( system "cd #{CREW_CACHE_DIR} && sha256sum -c #{@build_cachefile}.sha256" )
|
||||
build_cachefile = File.join(CREW_CACHE_DIR, "#{@pkg.name}-#{@pkg.version}-build-#{@device[:architecture]}.tar.zst")
|
||||
if CREW_CACHE_BUILD && File.file?(build_cachefile) && File.file?("#{build_cachefile}.sha256") && ( system "cd #{CREW_CACHE_DIR} && sha256sum -c #{build_cachefile}.sha256" )
|
||||
@pkg.cached_build = true
|
||||
puts "Extracting cached build directory from #{@build_cachefile}".lightgreen
|
||||
system "tar -Izstd -x#{@verbose}f #{@build_cachefile} -C #{CREW_BREW_DIR}", exception: true
|
||||
puts "Extracting cached build directory from #{build_cachefile}".lightgreen
|
||||
system "tar -Izstd -x#{@verbose}f #{build_cachefile} -C #{CREW_BREW_DIR}", exception: true
|
||||
# Need to reset @extract_dir to the extracted cached build
|
||||
# directory.
|
||||
@extract_dir = `tar -Izstd --exclude='./*/*' -tf #{@build_cachefile} | cut -d '/' -f 1 | sort -u`.chomp
|
||||
@extract_dir = `tar -Izstd --exclude='./*/*' -tf #{build_cachefile} | cut -d '/' -f 1 | sort -u`.chomp
|
||||
else
|
||||
@pkg.cached_build = false
|
||||
case File.basename meta[:filename]
|
||||
@@ -1171,18 +1171,18 @@ def prepare_package(destdir)
|
||||
|
||||
# check for FHS3 compliance
|
||||
puts 'Checking for FHS3 compliance...'
|
||||
@_errors = false
|
||||
@fhs_compliant_prefix = %W[bin etc include lib #{ARCH_LIB} libexec opt sbin share var].uniq
|
||||
errors = false
|
||||
fhs_compliant_prefix = %W[bin etc include lib #{ARCH_LIB} libexec opt sbin share var].uniq
|
||||
|
||||
Dir.foreach(CREW_DEST_PREFIX) do |filename|
|
||||
next if %w[. ..].include?(filename)
|
||||
|
||||
unless @fhs_compliant_prefix.include?(filename)
|
||||
unless fhs_compliant_prefix.include?(filename)
|
||||
if CREW_FHS_NONCOMPLIANCE_ONLY_ADVISORY || @pkg.no_fhs?
|
||||
puts "Warning: #{CREW_PREFIX}/#{filename} in #{@pkg.name} is not FHS3 compliant.".orange
|
||||
else
|
||||
puts "Error: #{CREW_PREFIX}/#{filename} in #{@pkg.name} is not FHS3 compliant.".lightred
|
||||
@_errors = true
|
||||
errors = true
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -1194,22 +1194,22 @@ def prepare_package(destdir)
|
||||
puts 'Warning: There is a conflict with the same file in another package.'.orange
|
||||
else
|
||||
puts 'Error: There is a conflict with the same file in another package.'.lightred
|
||||
@_errors = true
|
||||
errors = true
|
||||
end
|
||||
puts conflicts
|
||||
end
|
||||
|
||||
# abort if errors encountered
|
||||
abort 'Exiting due to above errors.'.lightred if @_errors
|
||||
abort 'Exiting due to above errors.'.lightred if errors
|
||||
|
||||
# Make sure the package file has runtime dependencies added properly.
|
||||
system "#{CREW_LIB_PATH}/tools/getrealdeps.rb --use-crew-dest-dir #{@pkg.name}" unless @pkg.no_compile_needed?
|
||||
# create directory list
|
||||
# Remove CREW_PREFIX and HOME from the generated directorylist.
|
||||
@crew_prefix_escaped = CREW_PREFIX.gsub('/', '\/')
|
||||
@home_escaped = HOME.gsub('/', '\/')
|
||||
system "find .#{CREW_PREFIX} -type d | cut -c2- | sed '0,/#{@crew_prefix_escaped}/{/#{@crew_prefix_escaped}/d}'| sort", out: %w[dlist a] if Dir.exist?(CREW_DEST_PREFIX)
|
||||
system "find .#{HOME} -type d | cut -c2- | sed '0,/#{@home_escaped}/{/#{@home_escaped}/d}' | sort", out: %w[dlist a] if Dir.exist?(CREW_DEST_HOME)
|
||||
crew_prefix_escaped = CREW_PREFIX.gsub('/', '\/')
|
||||
home_escaped = HOME.gsub('/', '\/')
|
||||
system "find .#{CREW_PREFIX} -type d | cut -c2- | sed '0,/#{crew_prefix_escaped}/{/#{crew_prefix_escaped}/d}'| sort", out: %w[dlist a] if Dir.exist?(CREW_DEST_PREFIX)
|
||||
system "find .#{HOME} -type d | cut -c2- | sed '0,/#{home_escaped}/{/#{home_escaped}/d}' | sort", out: %w[dlist a] if Dir.exist?(CREW_DEST_HOME)
|
||||
|
||||
strip_dir destdir
|
||||
|
||||
@@ -1232,22 +1232,22 @@ def patchelf_set_need_paths(dir)
|
||||
Dir.chdir dir do
|
||||
puts 'Running patchelf'.lightblue
|
||||
abort('No Patchelf found!').lightred unless File.file?("#{CREW_PREFIX}/bin/patchelf")
|
||||
@execfiles = `find . -executable -type f ! \\( -name '*.a' \\) | xargs -P#{CREW_NPROC} -n1 sh -c '[ "$(head -c4 ${1})" = "\x7FELF" ] && echo ${1}' --`.chomp
|
||||
return if @execfiles.empty?
|
||||
execfiles = `find . -executable -type f ! \\( -name '*.a' \\) | xargs -P#{CREW_NPROC} -n1 sh -c '[ "$(head -c4 ${1})" = "\x7FELF" ] && echo ${1}' --`.chomp
|
||||
return if execfiles.empty?
|
||||
|
||||
@patchelf_lib_prefix = @pkg.is_musl? ? "#{CREW_MUSL_PREFIX}/lib" : CREW_LIB_PREFIX
|
||||
puts "@patchelf_lib_prefix is #{@patchelf_lib_prefix}" if @opt_verbose
|
||||
@patchelf_interpreter = @pkg.is_musl? ? "#{CREW_MUSL_PREFIX}/lib/libc.so" : 'CREW_LIB_PREFIX/libc.so.6'
|
||||
puts "@patchelf_interpreter is #{@patchelf_interpreter}" if @opt_verbose
|
||||
patchelf_lib_prefix = @pkg.is_musl? ? "#{CREW_MUSL_PREFIX}/lib" : CREW_LIB_PREFIX
|
||||
puts "patchelf_lib_prefix is #{patchelf_lib_prefix}" if @opt_verbose
|
||||
patchelf_interpreter = @pkg.is_musl? ? "#{CREW_MUSL_PREFIX}/lib/libc.so" : 'CREW_LIB_PREFIX/libc.so.6'
|
||||
puts "patchelf_interpreter is #{patchelf_interpreter}" if @opt_verbose
|
||||
|
||||
puts 'Running patchelf to patch binaries for library paths'.lightblue
|
||||
@execfiles.each_line(chomp: true) do |execfiletopatch|
|
||||
execfiles.each_line(chomp: true) do |execfiletopatch|
|
||||
execfiletopatch = Dir.pwd + execfiletopatch.delete_prefix('.')
|
||||
@neededlibs = `patchelf --print-needed #{execfiletopatch}`
|
||||
next if @neededlibs.to_s.empty?
|
||||
neededlibs = `patchelf --print-needed #{execfiletopatch}`
|
||||
next if neededlibs.to_s.empty?
|
||||
|
||||
@neededlibs.each_line(chomp: true) do |neededlibspatch|
|
||||
next if neededlibspatch.include?(@patchelf_lib_prefix.to_s)
|
||||
neededlibs.each_line(chomp: true) do |neededlibspatch|
|
||||
next if neededlibspatch.include?(patchelf_lib_prefix.to_s)
|
||||
|
||||
# Avoid segfaults from not using system versions of these files.
|
||||
patchelf_veto_files = %w[
|
||||
@@ -1259,16 +1259,16 @@ def patchelf_set_need_paths(dir)
|
||||
]
|
||||
next if !@pkg.is_musl? && patchelf_veto_files.any? { |i| neededlibspatch.include? i }
|
||||
|
||||
@neededlib_basename = File.basename(neededlibspatch)
|
||||
@neededlibspatchednamepath = "#{@patchelf_lib_prefix}/" + @neededlib_basename
|
||||
neededlib_basename = File.basename(neededlibspatch)
|
||||
neededlibspatchednamepath = "#{patchelf_lib_prefix}/" + neededlib_basename
|
||||
# The first check here can be changed to just check the dest_dir
|
||||
# hierarchy for @neededlib_basename if the intent is to allow
|
||||
# hierarchy for neededlib_basename if the intent is to allow
|
||||
# using a different CREW_PREFIX during package installs.
|
||||
if File.file?(@neededlibspatchednamepath) || File.file?(Dir.pwd + @neededlibspatchednamepath)
|
||||
puts "patchelf --replace-needed #{neededlibspatch} #{@neededlibspatchednamepath} #{execfiletopatch}" if @opt_verbose
|
||||
system "patchelf --replace-needed #{neededlibspatch} #{@neededlibspatchednamepath} #{execfiletopatch}"
|
||||
if File.file?(neededlibspatchednamepath) || File.file?(Dir.pwd + neededlibspatchednamepath)
|
||||
puts "patchelf --replace-needed #{neededlibspatch} #{neededlibspatchednamepath} #{execfiletopatch}" if @opt_verbose
|
||||
system "patchelf --replace-needed #{neededlibspatch} #{neededlibspatchednamepath} #{execfiletopatch}"
|
||||
else
|
||||
puts "#{execfiletopatch} needed library #{@neededlib_basename} not found in #{@patchelf_lib_prefix} or #{Dir.pwd + @neededlibspatchednamepath}.".lightred
|
||||
puts "#{execfiletopatch} needed library #{neededlib_basename} not found in #{patchelf_lib_prefix} or #{Dir.pwd + neededlibspatchednamepath}.".lightred
|
||||
end
|
||||
end
|
||||
# Do not set interpreter for non-musl, as this can break apps if there
|
||||
@@ -1276,7 +1276,7 @@ def patchelf_set_need_paths(dir)
|
||||
next unless @pkg.is_musl?
|
||||
|
||||
puts 'Running patchelf to patch binary interpreter paths'.lightblue
|
||||
system "patchelf --set-interpreter #{@patchelf_interpreter} #{execfiletopatch}"
|
||||
system "patchelf --set-interpreter #{patchelf_interpreter} #{execfiletopatch}"
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -1287,10 +1287,10 @@ def strip_find_files(find_cmd, strip_option = '')
|
||||
|
||||
# Run find_cmd and strip only files with ar or elf magic headers.
|
||||
system "#{find_cmd} | xargs -r chmod u+w"
|
||||
@strip_verbose = @opt_verbose ? 'echo "Stripping ${0:1}" &&' : ''
|
||||
strip_verbose = @opt_verbose ? 'echo "Stripping ${0:1}" &&' : ''
|
||||
# The craziness here is from having to escape the special characters
|
||||
# in the magic headers for these files.
|
||||
system "#{find_cmd} | xargs -P#{CREW_NPROC} -n1 -r bash -c 'header=$(head -c4 ${0}); elfheader='$(printf '\\\177ELF')' ; arheader=\\!\\<ar ; case $header in $elfheader|$arheader) #{@strip_verbose} llvm-strip #{strip_option} ${0} ;; esac'"
|
||||
system "#{find_cmd} | xargs -P#{CREW_NPROC} -n1 -r bash -c 'header=$(head -c4 ${0}); elfheader='$(printf '\\\177ELF')' ; arheader=\\!\\<ar ; case $header in $elfheader|$arheader) #{strip_verbose} llvm-strip #{strip_option} ${0} ;; esac'"
|
||||
end
|
||||
|
||||
def strip_dir(dir)
|
||||
@@ -1332,12 +1332,12 @@ def shrink_dir(dir)
|
||||
# 5. If the expansion doesn't error out then it is ok to copy
|
||||
# over the original. (This also lets us only avoid compressing
|
||||
# hard linked files multiple times.)
|
||||
@execfiles = `find . -executable -type f ! \\( -name '*.so*' -o -name '*.a' \\) | xargs -P8 -n1 sh -c '[ "$(head -c4 ${1})" = "\x7FELF" ] && echo ${1}' --`.chomp
|
||||
execfiles = `find . -executable -type f ! \\( -name '*.so*' -o -name '*.a' \\) | xargs -P8 -n1 sh -c '[ "$(head -c4 ${1})" = "\x7FELF" ] && echo ${1}' --`.chomp
|
||||
|
||||
unless @execfiles.empty?
|
||||
unless execfiles.empty?
|
||||
puts 'Using upx to shrink binaries.'
|
||||
# Copying in the ThreadPoolExecutor loop fails non-deterministically
|
||||
@execfiles.each_line(chomp: true) do |execfilecp|
|
||||
execfiles.each_line(chomp: true) do |execfilecp|
|
||||
execfilecp.slice! '.'
|
||||
next if execfilecp.empty?
|
||||
|
||||
@@ -1360,7 +1360,7 @@ def shrink_dir(dir)
|
||||
max_queue: 0, # unbounded work queue
|
||||
fallback_policy: :caller_runs
|
||||
)
|
||||
@execfiles.each_line(chomp: true) do |execfile|
|
||||
execfiles.each_line(chomp: true) do |execfile|
|
||||
pool.post do
|
||||
execfile.slice! '.'
|
||||
execfile = File.join(dir, execfile)
|
||||
@@ -1401,8 +1401,8 @@ def install_files(src, dst = File.join( CREW_PREFIX, src.delete_prefix('./usr/lo
|
||||
# rsync src path needs a trailing slash
|
||||
src << '/' unless src.end_with?('/')
|
||||
# Check for ACLs support.
|
||||
@rsync_version = `rsync --version`.chomp
|
||||
if @rsync_version.include?('ACLs') && !@rsync_version.include?('no ACLs')
|
||||
rsync_version = `rsync --version`.chomp
|
||||
if rsync_version.include?('ACLs') && !rsync_version.include?('no ACLs')
|
||||
system 'rsync', "-ah#{@verbose}HAXW", '--remove-source-files', src, dst, exception: true
|
||||
else
|
||||
system 'rsync', "-ah#{@verbose}HXW", '--remove-source-files', src, dst, exception: true
|
||||
@@ -1425,20 +1425,20 @@ def install_package(pkgdir)
|
||||
FileUtils.mv 'filelist', File.join(CREW_META_PATH, "#{@pkg.name}.filelist"), verbose: @fileutils_verbose
|
||||
|
||||
unless CREW_NOT_LINKS || @pkg.no_links?
|
||||
@brokensymlinks = nil
|
||||
@brokensymlinks = `find . -type l -exec test ! -e {} \\; -print`.chomp
|
||||
unless @brokensymlinks.to_s.empty?
|
||||
brokensymlinks = nil
|
||||
brokensymlinks = `find . -type l -exec test ! -e {} \\; -print`.chomp
|
||||
unless brokensymlinks.to_s.empty?
|
||||
puts 'There are broken symlinks. Will try to fix.'.orange if @opt_verbose
|
||||
@brokensymlinks.each_line(chomp: true) do |fixlink|
|
||||
@brokentarget = @fixedtarget = nil
|
||||
@brokentarget = `readlink -n #{fixlink}`.chomp
|
||||
puts "Attempting fix of: #{fixlink.delete_prefix('.')} -> #{@brokentarget}".orange if @opt_verbose
|
||||
@fixedtarget = @brokentarget.delete_prefix(CREW_DEST_DIR)
|
||||
@fixedlink_loc = File.join(pkgdir, fixlink.delete_prefix('.'))
|
||||
brokensymlinks.each_line(chomp: true) do |fixlink|
|
||||
brokentarget = fixedtarget = nil
|
||||
brokentarget = `readlink -n #{fixlink}`.chomp
|
||||
puts "Attempting fix of: #{fixlink.delete_prefix('.')} -> #{brokentarget}".orange if @opt_verbose
|
||||
fixedtarget = brokentarget.delete_prefix(CREW_DEST_DIR)
|
||||
fixedlink_loc = File.join(pkgdir, fixlink.delete_prefix('.'))
|
||||
# If no changes were made, don't replace symlink
|
||||
unless @fixedtarget == @brokentarget
|
||||
FileUtils.ln_sf @fixedtarget, @fixedlink_loc
|
||||
puts "Fixed: #{@fixedtarget} -> #{fixlink.delete_prefix('.')}".orange if @opt_verbose
|
||||
unless fixedtarget == brokentarget
|
||||
FileUtils.ln_sf fixedtarget, fixedlink_loc
|
||||
puts "Fixed: #{fixedtarget} -> #{fixlink.delete_prefix('.')}".orange if @opt_verbose
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -1691,9 +1691,9 @@ end
|
||||
def archive_package(crew_archive_dest)
|
||||
# Check to see that there is a working zstd
|
||||
if File.file?("#{CREW_PREFIX}/bin/zstd")
|
||||
@crew_prefix_zstd_available = File.file?("#{CREW_PREFIX}/bin/zstd") ? true : nil
|
||||
crew_prefix_zstd_available = File.file?("#{CREW_PREFIX}/bin/zstd") ? true : nil
|
||||
end
|
||||
if @pkg.no_zstd? || !@crew_prefix_zstd_available
|
||||
if @pkg.no_zstd? || !crew_prefix_zstd_available
|
||||
puts 'Using xz to compress package. This may take some time.'.lightblue
|
||||
pkg_name = "#{@pkg.name}-#{@pkg.version}-chromeos-#{@device[:architecture]}.tar.xz"
|
||||
Dir.chdir CREW_DEST_DIR do
|
||||
@@ -1707,7 +1707,7 @@ def archive_package(crew_archive_dest)
|
||||
# decompression speed over compression speed.
|
||||
# See https://lists.archlinux.org/pipermail/arch-dev-public/2019-March/029542.html
|
||||
# Use nice so that user can (possibly) do other things during compression.
|
||||
if @crew_prefix_zstd_available
|
||||
if crew_prefix_zstd_available
|
||||
puts 'Using standard zstd'.lightblue if @opt_verbose
|
||||
system "tar c#{@verbose} * | nice -n 20 #{CREW_PREFIX}/bin/zstd -c -T0 --ultra -20 - > #{crew_archive_dest}/#{pkg_name}"
|
||||
end
|
||||
@@ -1740,10 +1740,10 @@ def remove(pkgName)
|
||||
CREW_ESSENTIAL_FILES.each do |file|
|
||||
next unless File.symlink?("#{CREW_LIB_PREFIX}/#{file}")
|
||||
|
||||
@canonicalized_file = `readlink -m #{CREW_LIB_PREFIX}/#{file}`.chomp
|
||||
if File.file?(@canonicalized_file) && @canonicalized_file.include?(CREW_PREFIX)
|
||||
puts "Replacing symlinked essential #{file} with hard link to #{@canonicalized_file} to avoid breakage.".lightblue if @opt_verbose
|
||||
FileUtils.ln(@canonicalized_file, "#{CREW_LIB_PREFIX}/#{file}", force: true)
|
||||
canonicalized_file = `readlink -m #{CREW_LIB_PREFIX}/#{file}`.chomp
|
||||
if File.file?(canonicalized_file) && canonicalized_file.include?(CREW_PREFIX)
|
||||
puts "Replacing symlinked essential #{file} with hard link to #{canonicalized_file} to avoid breakage.".lightblue if @opt_verbose
|
||||
FileUtils.ln(canonicalized_file, "#{CREW_LIB_PREFIX}/#{file}", force: true)
|
||||
end
|
||||
end
|
||||
|
||||
@@ -2157,11 +2157,11 @@ def sysinfo_command(_args)
|
||||
|
||||
git_commit_message_format = '%h `%s (%cr)`'
|
||||
|
||||
@sysinfo_markdown_header = <<~MDHEADER
|
||||
sysinfo_markdown_header = <<~MDHEADER
|
||||
<details><summary>Expand</summary>
|
||||
|
||||
MDHEADER
|
||||
@sysinfo_markdown_body = <<~MDBODY
|
||||
sysinfo_markdown_body = <<~MDBODY
|
||||
- Architecture: `#{KERN_ARCH}` (`#{ARCH}`)
|
||||
- Processor vendor: `#{CPUINFO['vendor_id'] || 'ARM'}`
|
||||
- User space: `#{Dir.exist?('/lib64') ? '64' : '32'}-bit`
|
||||
@@ -2178,14 +2178,14 @@ def sysinfo_command(_args)
|
||||
- OS version: `#{lsb_release['CHROMEOS_RELEASE_BUILDER_PATH']}`
|
||||
- OS channel: `#{lsb_release['CHROMEOS_RELEASE_TRACK']}`
|
||||
MDBODY
|
||||
@sysinfo_markdown_footer = <<~MDFOOTER
|
||||
sysinfo_markdown_footer = <<~MDFOOTER
|
||||
|
||||
</details>
|
||||
MDFOOTER
|
||||
if @opt_verbose
|
||||
puts @sysinfo_markdown_header, @sysinfo_markdown_body, @sysinfo_markdown_footer
|
||||
puts sysinfo_markdown_header, sysinfo_markdown_body, sysinfo_markdown_footer
|
||||
else
|
||||
puts @sysinfo_markdown_body.tr('`', '')
|
||||
puts sysinfo_markdown_body.tr('`', '')
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# lib/const.rb
|
||||
# Defines common constants used in different parts of crew
|
||||
|
||||
CREW_VERSION = '1.43.1'
|
||||
CREW_VERSION = '1.43.2'
|
||||
|
||||
# kernel architecture
|
||||
KERN_ARCH = `uname -m`.chomp
|
||||
|
||||
18
lib/musl.rb
18
lib/musl.rb
@@ -85,23 +85,23 @@ class Musl
|
||||
puts 'Running Musl.patchelf'.lightblue
|
||||
abort('No Patchelf found!').lightred unless File.exist?("#{CREW_PREFIX}/bin/patchelf")
|
||||
|
||||
@execfiles = `find . -executable -type f ! \\( -name \"*.a\" \\) -exec sh -c \"file -i \'{}\' | grep -q \'executable; charset=binary\'\" \\; -exec ls -1i {} \\; | sort -u -n -s -k1,1 | awk '{print $2}'`.chomp
|
||||
return if @execfiles.to_s.empty?
|
||||
execfiles = `find . -executable -type f ! \\( -name \"*.a\" \\) -exec sh -c \"file -i \'{}\' | grep -q \'executable; charset=binary\'\" \\; -exec ls -1i {} \\; | sort -u -n -s -k1,1 | awk '{print $2}'`.chomp
|
||||
return if execfiles.to_s.empty?
|
||||
|
||||
puts 'Running patchelf to patch binaries for musl paths'.lightblue
|
||||
@execfiles.each_line(chomp: true) do |execfiletopatch|
|
||||
execfiles.each_line(chomp: true) do |execfiletopatch|
|
||||
execfiletopatch = Dir.pwd + execfiletopatch.delete_prefix('.')
|
||||
system "patchelf --set-interpreter #{CREW_MUSL_PREFIX}/lib/libc.so #{execfiletopatch}"
|
||||
system "patchelf --set-rpath #{CREW_MUSL_PREFIX}/lib #{execfiletopatch}"
|
||||
@neededlibs = `patchelf --print-needed #{execfiletopatch}`
|
||||
next if @neededlibs.to_s.empty?
|
||||
neededlibs = `patchelf --print-needed #{execfiletopatch}`
|
||||
next if neededlibs.to_s.empty?
|
||||
|
||||
@neededlibs.each_line(chomp: true) do |neededlibspatch|
|
||||
neededlibs.each_line(chomp: true) do |neededlibspatch|
|
||||
next if neededlibspatch.include?("#{CREW_MUSL_PREFIX}/lib")
|
||||
|
||||
@neededlibspatchednamepath = "#{CREW_MUSL_PREFIX}/lib/" + File.basename(neededlibspatch)
|
||||
puts "patchelf --replace-needed #{neededlibspatch} #{@neededlibspatchednamepath} #{execfiletopatch}"
|
||||
system "patchelf --replace-needed #{neededlibspatch} #{@neededlibspatchednamepath} #{execfiletopatch}"
|
||||
neededlibspatchednamepath = "#{CREW_MUSL_PREFIX}/lib/" + File.basename(neededlibspatch)
|
||||
puts "patchelf --replace-needed #{neededlibspatch} #{neededlibspatchednamepath} #{execfiletopatch}"
|
||||
system "patchelf --replace-needed #{neededlibspatch} #{neededlibspatchednamepath} #{execfiletopatch}"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@@ -3,18 +3,18 @@
|
||||
# Author: Satadru Pramanik (satmandu) satadru at gmail dot com
|
||||
require 'fileutils'
|
||||
|
||||
@crew_local_repo_root = `git rev-parse --show-toplevel 2> /dev/null`.chomp
|
||||
# When invoked from crew, pwd is CREW_DEST_DIR, so @crew_local_repo_root
|
||||
crew_local_repo_root = `git rev-parse --show-toplevel 2> /dev/null`.chomp
|
||||
# When invoked from crew, pwd is CREW_DEST_DIR, so crew_local_repo_root
|
||||
# is empty.
|
||||
if @crew_local_repo_root.to_s.empty?
|
||||
if crew_local_repo_root.to_s.empty?
|
||||
require_relative '../lib/const'
|
||||
else
|
||||
require File.join(@crew_local_repo_root, 'lib/const')
|
||||
require File.join(crew_local_repo_root, 'lib/const')
|
||||
end
|
||||
|
||||
if ARGV.include?('--use-crew-dest-dir')
|
||||
ARGV.delete('--use-crew-dest-dir')
|
||||
@opt_use_crew_dest_dir = true
|
||||
opt_use_crew_dest_dir = true
|
||||
end
|
||||
|
||||
# Exit quickly if an invalid package name is given.
|
||||
@@ -29,9 +29,9 @@ end
|
||||
# This is a subset of what crew whatprovides gives.
|
||||
def whatprovidesfxn(pkgdepslcl, pkg)
|
||||
filelcl = if pkgdepslcl.include?(CREW_LIB_PREFIX)
|
||||
`#{@grep} --exclude #{pkg}.filelist --exclude #{pkgfilelist} --exclude={"#{CREW_PREFIX}/etc/crew/meta/*_build.filelist"} "#{pkgdepslcl}$" "#{CREW_PREFIX}"/etc/crew/meta/*.filelist`
|
||||
`#{grep} --exclude #{pkg}.filelist --exclude #{pkgfilelist} --exclude={"#{CREW_PREFIX}/etc/crew/meta/*_build.filelist"} "#{pkgdepslcl}$" "#{CREW_PREFIX}"/etc/crew/meta/*.filelist`
|
||||
else
|
||||
`#{@grep} --exclude #{pkg}.filelist --exclude #{pkgfilelist} --exclude={"#{CREW_PREFIX}/etc/crew/meta/*_build.filelist"} "^#{CREW_LIB_PREFIX}.*#{pkgdepslcl}$" "#{CREW_PREFIX}"/etc/crew/meta/*.filelist`
|
||||
`#{grep} --exclude #{pkg}.filelist --exclude #{pkgfilelist} --exclude={"#{CREW_PREFIX}/etc/crew/meta/*_build.filelist"} "^#{CREW_LIB_PREFIX}.*#{pkgdepslcl}$" "#{CREW_PREFIX}"/etc/crew/meta/*.filelist`
|
||||
end
|
||||
filelcl.gsub(/.filelist.*/, '').gsub(%r{.*/}, '').split("\n").uniq.join("\n").gsub(':', '')
|
||||
end
|
||||
@@ -39,7 +39,7 @@ end
|
||||
def main(pkg)
|
||||
puts "Checking for the runtime dependencies of #{pkg}..."
|
||||
|
||||
if @opt_use_crew_dest_dir
|
||||
if opt_use_crew_dest_dir
|
||||
define_singleton_method('pkgfilelist') {File.join(CREW_DEST_DIR, 'filelist')}
|
||||
abort('Pkg was not built.') unless File.exist?(pkgfilelist)
|
||||
else
|
||||
@@ -57,10 +57,10 @@ def main(pkg)
|
||||
|
||||
# Install grep if a functional local copy does not exist.
|
||||
if system('grep --version > /dev/null 2>&1')
|
||||
@grep = 'grep'
|
||||
grep = 'grep'
|
||||
else
|
||||
system('crew install grep')
|
||||
@grep = "#{CREW_PREFIX}/bin/grep"
|
||||
grep = "#{CREW_PREFIX}/bin/grep"
|
||||
end
|
||||
|
||||
# Gawk is needed for adding dependencies.
|
||||
@@ -81,7 +81,7 @@ def main(pkg)
|
||||
# Look at files in CREW_DEST_DIR instead of assuming the package is
|
||||
# normally installed, which lets us avoid installing the package if it
|
||||
# was just built.
|
||||
pkgfiles.map! {|item| item.prepend(CREW_DEST_DIR)} if @opt_use_crew_dest_dir
|
||||
pkgfiles.map! {|item| item.prepend(CREW_DEST_DIR)} if opt_use_crew_dest_dir
|
||||
|
||||
FileUtils.rm_rf("/tmp/deps/#{pkg}")
|
||||
# Remove files we don't care about, such as man files and non-binaries.
|
||||
@@ -92,7 +92,7 @@ def main(pkg)
|
||||
pkgdepsfiles = pkgfiles.map do |i|
|
||||
system("upx -d #{i} > /dev/null 2>&1")
|
||||
FileUtils.mkdir_p("/tmp/deps/#{pkg}/")
|
||||
`readelf -d "#{i}" 2>/dev/null | #{@grep} NEEDED | awk '{print $5}' | sed 's/\\[//g' | sed 's/\\]//g' | awk '!x[$0]++' | tee /tmp/deps/#{pkg}/#{File.basename(i)}`
|
||||
`readelf -d "#{i}" 2>/dev/null | #{grep} NEEDED | awk '{print $5}' | sed 's/\\[//g' | sed 's/\\]//g' | awk '!x[$0]++' | tee /tmp/deps/#{pkg}/#{File.basename(i)}`
|
||||
end
|
||||
pkgdepsfiles = pkgdepsfiles.map do |filedeps|
|
||||
filedeps.split("\n")
|
||||
|
||||
Reference in New Issue
Block a user