Refactor upload (#11821)

* Add crew lockfile implementation.

Signed-off-by: Satadru Pramanik <satadru@gmail.com>

* Refactor crew upload, and add usage of lockfiles.

Signed-off-by: Satadru Pramanik <satadru@gmail.com>

* Adjust crew and add test.

Signed-off-by: Satadru Pramanik <satadru@gmail.com>

* Add more lightred.

Signed-off-by: Satadru Pramanik <satadru@gmail.com>

---------

Signed-off-by: Satadru Pramanik <satadru@gmail.com>
This commit is contained in:
Satadru Pramanik, DO, MPH, MEng
2025-04-25 17:39:18 -04:00
committed by GitHub
parent 2b2235946b
commit ddeeef5ad4
4 changed files with 800 additions and 90 deletions

303
bin/crew
View File

@@ -32,6 +32,7 @@ require_relative '../commands/search'
require_relative '../commands/sysinfo'
require_relative '../commands/whatprovides'
require_relative '../lib/const'
require_relative '../lib/crew_lockfile'
require_gem 'pry-byebug' if CREW_DEBUG
# Debugging starts at the following breakpoint when enabled via crew
# being started with a --debug flag.
@@ -203,8 +204,14 @@ def cache_build
FileUtils.mv build_cachefile, "#{build_cachefile}.bak", force: true if File.file?(build_cachefile)
FileUtils.mv "#{build_cachefile}.sha256", "#{build_cachefile}.sha256.bak", force: true if File.file?("#{build_cachefile}.sha256")
Dir.chdir(CREW_BREW_DIR) do
system "tar c#{@verbose} #{pkg_build_dirname} \
| nice -n 20 zstd -c --ultra --fast -f -o #{build_cachefile} -"
@build_cachefile_lockfile = CrewLockfile.new "#{build_cachefile}.lock"
begin
@build_cachefile_lockfile.lock
system "tar c#{@verbose} #{pkg_build_dirname} \
| nice -n 20 zstd -c --ultra --fast -f -o #{build_cachefile} -"
ensure
@build_cachefile_lockfile.unlock
end
end
end
system "sha256sum #{build_cachefile} > #{build_cachefile}.sha256"
@@ -546,6 +553,13 @@ def download
end
puts "Git cachefile is #{cachefile}".orange if CREW_VERBOSE
if File.file?(cachefile) && File.file?("#{cachefile}.sha256")
while File.file?("#{cachefile}.lock")
@cache_wait_timer = 0
puts "Waited #{@cache_wait_timer}s for #{cachefile} generation..."
sleep 1
@cache_wait_timer += 1
abort "Cachefile not available after #{@cache_wait_timer} seconds." if @cache_wait_timer > 300
end
if Dir.chdir CREW_CACHE_DIR do
system "sha256sum -c #{cachefile}.sha256"
end
@@ -586,10 +600,16 @@ def download
Dir.chdir @extract_dir do
# Do not use --exclude-vcs to exclude .git
# because some builds will use that information.
system "tar c#{@verbose} \
$(find -mindepth 1 -maxdepth 1 -printf '%P\n') | \
nice -n 20 zstd -c -T0 --ultra -20 - > \
#{cachefile}"
@git_cachefile_lockfile = CrewLockfile.new "#{cachefile}.lock"
begin
@git_cachefile_lockfile.lock
system "tar c#{@verbose} \
$(find -mindepth 1 -maxdepth 1 -printf '%P\n') | \
nice -n 20 zstd -c -T0 --ultra -20 - > \
#{cachefile}"
ensure
@git_cachefile_lockfile.unlock
end
end
system 'sha256sum', cachefile, out: "#{cachefile}.sha256"
puts 'Git repo cached.'.lightgreen
@@ -614,6 +634,13 @@ def unpack(meta)
@extract_dir = `tar -Izstd --exclude='./*/*' -tf #{build_cachefile} | cut -d '/' -f 1 | LC_ALL=C sort -u`.chomp
else
@pkg.cached_build = false
while File.file?("#{meta[:filename]}.lock")
@archive_wait_timer = 0
puts "Waited #{@archive_wait_timer}s for #{meta[:filename]} to be available..."
sleep 1
@archive_wait_timer += 1
abort "#{meta[:filename]} not available after #{@archive_wait_timer} seconds." if @archive_wait_timer > 300
end
case File.basename meta[:filename]
when /\.zip$/i
puts "Unpacking archive using 'unzip', this may take a while..."
@@ -1357,17 +1384,6 @@ def build_package(crew_archive_dest)
meta = download
target_dir = unpack meta
# Source ~/.bashrc before every build, and set PATH.
# From https://stackoverflow.com/a/12303443
env = `bash -c ". ~/.bashrc && env"`
env.split("\n").each do |line|
key, value = line.split('=', 2)
if key == 'PATH'
ENV[key] = value
else
ENV[key] ||= value unless value.nil? || value.empty?
end
end
# Build from source and place binaries in CREW_DEST_DIR.
build_and_preconfigure target_dir
@@ -1403,7 +1419,13 @@ def archive_package(crew_archive_dest)
puts 'Using xz to compress package. This may take some time.'.lightblue
pkg_name = "#{@pkg.name}-#{@pkg.version}-chromeos-#{@device[:architecture]}.tar.xz"
Dir.chdir CREW_DEST_DIR do
system "tar c#{@verbose}Jf #{crew_archive_dest}/#{pkg_name} *"
@pkg_name_lockfile = CrewLockfile.new "#{crew_archive_dest}/#{pkg_name}.lock"
begin
@pkg_name_lockfile.lock
system "tar c#{@verbose}Jf #{crew_archive_dest}/#{pkg_name} *"
ensure
@pkg_name_lockfile.unlock
end
end
else
puts 'Using zstd to compress package. This may take some time.'.lightblue
@@ -1413,7 +1435,13 @@ def archive_package(crew_archive_dest)
# decompression speed over compression speed.
# See https://lists.archlinux.org/pipermail/arch-dev-public/2019-March/029542.html
# Use nice so that user can (possibly) do other things during compression.
system "tar c#{@verbose} * | nice -n 20 zstd -c -T0 --ultra -20 - > #{crew_archive_dest}/#{pkg_name}"
@pkg_name_lockfile = CrewLockfile.new "#{crew_archive_dest}/#{pkg_name}.lock"
begin
@pkg_name_lockfile.lock
system "tar c#{@verbose} * | nice -n 20 zstd -c -T0 --ultra -20 - > #{crew_archive_dest}/#{pkg_name}"
ensure
@pkg_name_lockfile.unlock
end
end
end
system "sha256sum #{crew_archive_dest}/#{pkg_name} > #{crew_archive_dest}/#{pkg_name}.sha256"
@@ -1436,6 +1464,41 @@ def archive_package(crew_archive_dest)
end
def upload(pkg_name = nil, pkg_version = nil, gitlab_token = nil, gitlab_token_username = nil, binary_compression = nil)
# Architecture independent:
# 1. Abort early if package manifests exist but are empty, as this
# likely indicates a failed build.
# 2. Check for binary_compression value in file.
# 2a. If missing, determine binary_compression value:
# Always set binary_compression to '.gem' for Ruby gems, otherwise
# assume a default of 'tar.zst'
# 2b. Add missing binary_compression value to file.
#
# 3. Set sha256 hash variables for all architectures to nil.
#
# Per Architecture:
# 3a. Pull sha256 values for binary from package file, if there.
# 3b. Get gitlab URL for uploaded binary.
# 3c. Check to see if an existing upload exists.
# 3d. Check for local binary.
# 3e. If a local binary doesn't exist, but a remote binary exists,
# download it.
# 3f. Skip architecture if a local or remote binary doesn't exist.
# 3g. If a local binary exists, check to make sure it isn't currently
# being written to.
# 3h. Figure out sha256 for local binary. (local_sha256)
# 3i. If an existing upload exists, check its hash. (remote_sha256)
# 3j. If the hashes are different, then ask if the new binary
# should be uploaded. (Default to no.)
# 3k. If the hashes are the same, don't bother uploading.
# 3l. If an existing upload does not exist, decide to upload.
# 4. Upload.
# 5. Generate new or replacement binary_sha256 block and add to
# package file.
# 6. If run architecture specfic manifests for package are missing,
# attempt to install the package so the manifest files for the
# currently running architecture are saved locally. (This is used
# by build workflows to make sure updated manifests get
# uploaded.)
abort "\nPackage to be uploaded was not specified.\n".lightred if pkg_name.nil?
abort "\nGITLAB_TOKEN environment variable not set.\n".lightred if gitlab_token.nil?
abort "\nGITLAB_TOKEN_USERNAME environment variable not set.\n".lightred if gitlab_token_username.nil?
@@ -1444,99 +1507,154 @@ def upload(pkg_name = nil, pkg_version = nil, gitlab_token = nil, gitlab_token_u
packages.strip!
[packages].each do |package|
binary_compression_not_in_file = binary_compression.nil?
pkg_file = "#{CREW_LOCAL_REPO_ROOT}/packages/#{package}.rb"
# 1. Abort early if package manifests exist but are empty, as this
# likely indicates a failed build.
abort "#{package} has an empty manifest. Something probably went wrong with the build.".lightred if File.empty?("#{CREW_LOCAL_REPO_ROOT}/manifest/#{ARCH}/#{@pkg.name.chr}/#{@pkg.name}.filelist")
# 2. Check for binary_compression value in file.
binary_compression_not_in_file = binary_compression.nil?
if binary_compression_not_in_file
# 2a. If missing, determine binary_compression value:
# Always set binary_compression to '.gem' for Ruby gems, otherwise
# assume a default of 'tar.zst'
binary_compression = @pkg.superclass.to_s == 'RUBY' ? 'gem' : 'tar.zst'
binary_compression_line = " binary_compression '#{binary_compression}'"
# 2b. Add missing binary_compression value to file.
puts "Setting binary compression in #{pkg_file} to '#{binary_compression}'..."
file = File.read(pkg_file)
bc_re = /^\ \ binary_compression.*/
source_re = /^\ \ source_sha256.*/
git_hashtag_re = /^\ \ git_hashtag.*/
source_url_re = /^\ \ source_url.*/
if file.match(bc_re)
File.write(pkg_file, file.gsub(bc_re, binary_compression_line))
elsif file.match(source_re)
source_sha256_bc_line = "#{file.match(source_re)}\n#{binary_compression_line}"
File.write(pkg_file, file.gsub(source_re, source_sha256_bc_line))
elsif file.match(git_hashtag_re)
git_hashtag_bc_line = "#{file.match(git_hashtag_re)}\n#{binary_compression_line}"
File.write(pkg_file, file.gsub(git_hashtag_re, git_hashtag_bc_line))
elsif file.match(source_url_re)
source_url_bc_line = "#{file.match(source_url_re)}\n#{binary_compression_line}"
File.write(pkg_file, file.gsub(source_url_re, source_url_bc_line))
else
puts "Unable to tell where to add \"#{binary_compression_line}\" to #{pkg_file}. Please add it and manually.".lightblue
end
end
# 3. Set sha256 hash variables for all architectures to nil.
binary_sha256_hash = { armv7l: nil, i686: nil, x86_64: nil }
# The following is used to figure out where a non-standard
# binary_sha256 section might be, such as in a gcc_lib file.
starting_binary_sha256_hash = { armv7l: nil, i686: nil, x86_64: nil }
binary_compression = 'gem' if @pkg.superclass.to_s == 'RUBY'
%w[x86_64 i686 armv7l].each do |arch|
# Load existing binary_sha256 hash from the package file.
upload_binary = false
local_binary = false
puts "Processing package: #{package}, Arch: #{arch}".yellow
puts
# 3a. Pull sha256 values for binary from package file, if there.
binary_sha256_hash[arch.to_sym] = @pkg.binary_sha256[arch.to_sym] if @pkg.binary_sha256&.key?(arch.to_sym)
starting_binary_sha256_hash[arch.to_sym] = @pkg.binary_sha256[arch.to_sym] if @pkg.binary_sha256&.key?(arch.to_sym)
release_dir = "#{CREW_LOCAL_REPO_ROOT}/release/#{arch}"
new_tarfile = if binary_compression.nil?
Dir["#{release_dir}/#{package}-#{pkg_version}-chromeos-#{arch}.{tar.xz,tar.zst}"].max_by { |f| File.mtime(f) }
else
"#{release_dir}/#{package}-#{pkg_version}-chromeos-#{arch}.#{binary_compression}"
end
if new_tarfile.nil? || !File.file?(new_tarfile)
puts "#{release_dir}/#{package}-#{pkg_version}-chromeos-#{arch}.#{binary_compression.nil? ? '(tar.xz|tar.zst)' : binary_compression} not found.\n".lightred
next arch
end
local_tarfile = "#{release_dir}/#{package}-#{pkg_version}-chromeos-#{arch}.#{binary_compression}"
# If the filelist exists and is empty, the build was probably supposed to have files.
abort "#{package} has an empty manifest. Something probably went wrong with the build.".lightred if File.empty?("#{CREW_LOCAL_REPO_ROOT}/manifest/#{arch}/#{@pkg.name.chr}/#{@pkg.name}.filelist")
if binary_compression_not_in_file
ext = File.extname(new_tarfile)
binary_compression = @pkg.superclass.to_s == 'RUBY' ? 'gem' : "tar#{ext}"
binary_compression_line = " binary_compression '#{binary_compression}'"
puts "Setting binary compression in #{pkg_file} to '#{binary_compression}'..."
# Add binary compression setting, and add the line if it doesn't exist.
file = File.read(pkg_file)
bc_re = /^\ \ binary_compression.*/
source_re = /^\ \ source_sha256.*/
git_hashtag_re = /^\ \ git_hashtag.*/
source_url_re = /^\ \ source_url.*/
if file.match(bc_re)
File.write(pkg_file, file.gsub(bc_re, binary_compression_line))
elsif file.match(source_re)
source_sha256_bc_line = "#{file.match(source_re)}\n#{binary_compression_line}"
File.write(pkg_file, file.gsub(source_re, source_sha256_bc_line))
elsif file.match(git_hashtag_re)
git_hashtag_bc_line = "#{file.match(git_hashtag_re)}\n#{binary_compression_line}"
File.write(pkg_file, file.gsub(git_hashtag_re, git_hashtag_bc_line))
elsif file.match(source_url_re)
source_url_bc_line = "#{file.match(source_url_re)}\n#{binary_compression_line}"
File.write(pkg_file, file.gsub(source_url_re, source_url_bc_line))
else
puts "Unable to tell where to add \"#{binary_compression_line}\" to #{pkg_file}. Please add it and manually.".lightblue
end
end
puts "Package: #{package}, Arch: #{arch}".yellow
puts
puts "\e[1A\e[KGenerating sha256sum ...\r".orange
new_sha256 = Digest::SHA256.hexdigest(File.read(new_tarfile))
puts "Uploading #{new_tarfile} ...".orange if CREW_VERBOSE
noname = new_tarfile.split("#{package}-").last
# 3b. Get gitlab URL for uploaded binary.
noname = local_tarfile.split("#{package}-").last
new_version = noname.split('-chromeos').first
new_url = "#{CREW_GITLAB_PKG_REPO}/generic/#{package}/#{new_version}_#{arch}/#{new_tarfile}".gsub("#{release_dir}/", '')
token_label = gitlab_token.split('-').first == 'glpat' ? 'PRIVATE-TOKEN' : 'DEPLOY-TOKEN'
new_url = "#{CREW_GITLAB_PKG_REPO}/generic/#{package}/#{new_version}_#{arch}/#{local_tarfile}".gsub("#{release_dir}/", '')
# 3c. Check to see if an existing upload exists.
puts "\e[1A\e[KChecking for existing upload ...\r".orange
if `curl -fsI #{new_url}`.lines.first.split[1] == '200'
puts "\n#{File.basename(new_tarfile)} has already been uploaded.\nPlease change the #{package} package version from #{new_version} and try again.\n".lightred
if @opt_force || Package.agree_default_no('Do you want to overwrite the existing upload instead')
puts "\e[1A\e[KOverwriting existing upload...\r".orange
crewlog "#{arch} = #{new_sha256}"
binary_sha256_hash[arch.to_sym] = new_sha256
else
puts "\e[1A\e[KWill NOT overwite the existing upload. Determining sha256 of already uploaded file...\r".orange
upstream_sha256 = `curl -Ls #{new_url} | sha256sum`.chomp.split.first
crewlog "#{arch} = #{upstream_sha256}"
binary_sha256_hash[arch.to_sym] = upstream_sha256
next arch
remote_binary = `curl -fsI #{new_url}`.lines.first.split[1] == '200'
puts "\e[1A\e[KUpload exists.\n".green
# 3d. Check for local binary.
if local_tarfile.nil? || !File.file?(local_tarfile)
puts "\e[1A\e[K#{local_tarfile} not found.\n\r".lightred
puts "\e[1A\e[KSkipping upload for #{arch}.\n\r".lightred
local_binary = false
next arch unless remote_binary
# 3e. If a local binary doesn't exist, but a remote binary exists,
# download it.
system "curl -Ls #{new_url} > #{local_tarfile}"
else
local_binary = true
puts "\e[1A\e[KLocal package binary exists.\n".green
end
# 3f. Skip architecture if a local or remote binary doesn't exist.
next arch unless local_binary || remote_binary
puts "\e[1A\e[KGenerating sha256sums ...\r".orange
# 3g. If a local binary exists, check to make sure it isn't currently
# being written to.
while File.file?("#{local_tarfile}.lock")
@pkg_binary_wait_timer = 0
puts "Waited #{@pkg_binary_wait_timer}s for #{local_tarfile} to be available..."
sleep 1
@pkg_binary_wait_timer += 1
abort "#{local_tarfile} not available after #{@pkg_binary_wait_timer} seconds." if @pkg_binary_wait_timer > 300
end
# At this point we either have a local build, or a downloaded
# binary from a prior build.
# 3h. Figure out sha256 for local binary. (local_sha256)
local_sha256 = Digest::SHA256.hexdigest(File.read(local_tarfile))
# 3i. If an existing upload exists, check its hash. (remote_sha256)
if remote_binary
remote_sha256 = if local_binary
`curl -Ls #{new_url} | sha256sum`.chomp.split.first
else
local_sha256
end
end
if remote_binary
if local_binary
if remote_sha256 == local_sha256
# 3k. If the hashes are the same, don't bother uploading.
puts "\e[1A\e[KThis build of #{File.basename(local_tarfile)} has already been uploaded.\n".lightred
crewlog "#{arch} = #{local_sha256}"
binary_sha256_hash[arch.to_sym] = local_sha256
elsif @opt_force || Package.agree_default_no('Do you want to overwrite the existing upload')
# 3j. If the hashes are different, then ask if the new binary
# should be uploaded. (Default to no.)
puts "\e[1A\e[KOverwriting existing upload...\r".orange
crewlog "#{arch} = #{local_sha256}"
binary_sha256_hash[arch.to_sym] = local_sha256
upload_binary = true
else
puts "\e[1A\e[K\nAnother build of #{File.basename(local_tarfile)} with sha256 #{remote_sha256} has already been uploaded.\n".lightred
crewlog "#{arch} = #{remote_sha256}"
binary_sha256_hash[arch.to_sym] = remote_sha256
end
end
else
crewlog "#{arch} = #{new_sha256}"
binary_sha256_hash[arch.to_sym] = new_sha256
# 3l. If an existing upload does not exist, decide to upload.
upload_binary = true if local_binary
crewlog "#{arch} = #{local_sha256}"
binary_sha256_hash[arch.to_sym] = local_sha256
end
puts "curl -# --header \"#{token_label}: #{gitlab_token}\" --upload-file \"#{new_tarfile}\" \"#{new_url}\" | cat" if CREW_VERBOSE
next unless upload_binary
# 4. Upload.
puts "Uploading #{local_tarfile} ...".orange if CREW_VERBOSE
token_label = gitlab_token.split('-').first == 'glpat' ? 'PRIVATE-TOKEN' : 'DEPLOY-TOKEN'
puts "curl -# --header \"#{token_label}: #{gitlab_token}\" --upload-file \"#{local_tarfile}\" \"#{new_url}\" | cat" if CREW_VERBOSE
puts "\e[1A\e[KUploading...\r".orange
output = `curl -# --header "#{token_label}: #{gitlab_token}" --upload-file "#{new_tarfile}" "#{new_url}" | cat`.chomp
output = `curl -# --header "#{token_label}: #{gitlab_token}" --upload-file "#{local_tarfile}" "#{new_url}" | cat`.chomp
puts "\e[1A\e[KChecking upload...\r".orange
if output.include?('201 Created')
puts "curl -Ls #{new_url} | sha256sum" if CREW_VERBOSE
upstream_sha256 = `curl -Ls #{new_url} | sha256sum`.chomp.split.first
if upstream_sha256 == new_sha256
remote_sha256 = `curl -Ls #{new_url} | sha256sum`.chomp.split.first
if remote_sha256 == local_sha256
puts "#{output}\n".lightgreen
else
if CREW_VERBOSE
puts "expected sha256 hash=#{new_sha256}"
puts "upstream sha256 hash=#{upstream_sha256}"
puts "expected sha256 hash=#{local_sha256}"
puts "remote sha256 hash=#{remote_sha256}"
end
puts "#{output}. Checksum mismatch. Skipping binary_sha256 update in #{pkg_file}...".lightred
next
@@ -1548,7 +1666,8 @@ def upload(pkg_name = nil, pkg_version = nil, gitlab_token = nil, gitlab_token_u
end
end
# Generate new or replacement binary_sha256 block.
# 5. Generate new or replacement binary_sha256 block and add to
# package file.
puts "\e[1A\e[KGenerating binary_sha256 block for package file...\r".orange
binary_sha256_block = ''
binary_sha256_block << "\n binary_sha256({\n"
@@ -1601,6 +1720,12 @@ def upload(pkg_name = nil, pkg_version = nil, gitlab_token = nil, gitlab_token_u
end
end
end
# 6. If run architecture specfic manifests for package are missing,
# attempt to install the package so the manifest files for the
# currently running architecture are saved locally. (This is used
# by build workflows to make sure updated manifests get
# uploaded.)
system "crew install #{@pkg.name}", exception: false unless File.exist?("#{CREW_LOCAL_REPO_ROOT}/manifest/#{ARCH}/#{@pkg.name.chr}/#{@pkg.name}.filelist")
puts "\e[1A\e[K🎉 Uploads complete for #{package}. 🎉\r\n".lightgreen
end
end

View File

@@ -3,7 +3,7 @@
require 'etc'
OLD_CREW_VERSION ||= defined?(CREW_VERSION) ? CREW_VERSION : '1.0'
CREW_VERSION ||= '1.58.8' unless defined?(CREW_VERSION) && CREW_VERSION == OLD_CREW_VERSION
CREW_VERSION ||= '1.58.9' unless defined?(CREW_VERSION) && CREW_VERSION == OLD_CREW_VERSION
# Kernel architecture.
KERN_ARCH ||= Etc.uname[:machine]

569
lib/crew_lockfile.rb Normal file
View File

@@ -0,0 +1,569 @@
# Based upon the fork of https://github.com/ahoward/lockfile at
# https://github.com/mikisvaz/lockfile/tree/master and the PRs at
# https://github.com/ahoward/lockfile/pulls
# Usage:
# require_relative '../lib/crew_lockfile'
# lockfile = CrewLockfile.new 'file.lock'
# begin
# lockfile.lock
# p 42
# ensure
# lockfile.unlock
# end
unless defined?($__crew_lockfile__) || defined?(CrewLockfile)
require 'socket'
require 'timeout'
require 'fileutils'
require_relative '../lib/color'
class CrewLockfile
VERSION = '2.1.9'
def self.version = CrewLockfile::VERSION
def version = CrewLockfile::VERSION
def self.description
'a ruby library for creating perfect and NFS safe lockfiles'
end
class LockError < StandardError; end
class StolenLockError < LockError; end
class StackingLockError < LockError; end
class StatLockError < LockError; end
class MaxTriesLockError < LockError; end
class TimeoutLockError < LockError; end
class NFSLockError < LockError; end
class UnLockError < LockError; end
class SleepCycle < Array
attr_reader :min, :max, :range, :inc
def initialize(min, max, inc)
super(min)
super(max)
super(inc)
@min = Float(min)
@max = Float(max)
@inc = Float(inc)
@range = @max - @min
raise RangeError, "max(#{@max}) <= min(#{@min})" if @max <= @min
raise RangeError, "inc(#{@inc}) > range(#{@range})" if @inc > @range
raise RangeError, "inc(#{@inc}) <= 0" if @inc <= 0
raise RangeError, "range(#{@range}) <= 0" if @range <= 0
s = @min
push(s) and s += @inc while s <= @max
self[-1] = @max if self[-1] < @max
reset
end
def next
ret = self[@idx]
@idx = ((@idx + 1) % size)
ret
end
def reset
@idx = 0
end
end
HOSTNAME = Socket.gethostname
DEFAULT_RETRIES = nil # maximum number of attempts
DEFAULT_TIMEOUT = nil # the longest we will try
DEFAULT_MAX_AGE = 3600 # lockfiles older than this are stale
DEFAULT_SLEEP_INC = 2 # sleep cycle is this much longer each time
DEFAULT_MIN_SLEEP = 2 # shortest sleep time
DEFAULT_MAX_SLEEP = 32 # longest sleep time
DEFAULT_SUSPEND = 1800 # iff we steal a lock wait this long before we go on
DEFAULT_REFRESH = 8 # how often we touch/validate the lock
DEFAULT_DONT_CLEAN = false # iff we leave lock files lying around
DEFAULT_POLL_RETRIES = 16 # this many polls makes one 'try'
DEFAULT_POLL_MAX_SLEEP = 0.08 # the longest we'll sleep between polls
DEFAULT_DONT_SWEEP = false # if we cleanup after other process on our host
DEFAULT_DONT_USE_LOCK_ID = false # if we dump lock info into lockfile
DEFAULT_DEBUG = ENV['LOCKFILE_DEBUG'] || false
class << CrewLockfile
attr_accessor :retries, :max_age, :sleep_inc, :min_sleep, :max_sleep, :suspend, :timeout, :refresh, :debug, :dont_clean, :poll_retries, :poll_max_sleep, :dont_sweep, :dont_use_lock_id
def init
@retries = DEFAULT_RETRIES
@max_age = DEFAULT_MAX_AGE
@sleep_inc = DEFAULT_SLEEP_INC
@min_sleep = DEFAULT_MIN_SLEEP
@max_sleep = DEFAULT_MAX_SLEEP
@suspend = DEFAULT_SUSPEND
@timeout = DEFAULT_TIMEOUT
@refresh = DEFAULT_REFRESH
@dont_clean = DEFAULT_DONT_CLEAN
@poll_retries = DEFAULT_POLL_RETRIES
@poll_max_sleep = DEFAULT_POLL_MAX_SLEEP
@dont_sweep = DEFAULT_DONT_SWEEP
@dont_use_lock_id = DEFAULT_DONT_USE_LOCK_ID
@debug = DEFAULT_DEBUG
$stdout.sync = true if @debug
$stderr.sync = true if @debug
end
end
CrewLockfile.init
attr_reader :klass, :path, :opts, :locked, :thief, :refresher, :dirname, :basename, :clean, :retries, :max_age, :sleep_inc, :min_sleep, :max_sleep, :suspend, :refresh, :timeout, :dont_clean, :poll_retries, :poll_max_sleep, :dont_sweep, :dont_use_lock_id
attr_accessor :debug
alias thief? thief
alias locked? locked
alias debug? debug
def self.create(path, *options, &)
opts = {
'retries' => 0,
'min_sleep' => 0,
'max_sleep' => 1,
'sleep_inc' => 1,
'max_age' => nil,
'suspend' => 0,
'refresh' => nil,
'timeout' => nil,
'poll_retries' => 0,
'dont_clean' => true,
'dont_sweep' => false,
'dont_use_lock_id' => true
}
begin
new(path, opts).lock
rescue LockError
raise Errno::EEXIST, path
end
File.open(path, *options, &)
end
def self.finalizer_proc(file)
pid = Process.pid
lambda do |_id|
File.unlink file if Process.pid == pid
rescue StandardError
nil
end
end
def initialize(path, opts = {}, &block)
@klass = self.class
@path = path
@opts = opts
@retries = getopt 'retries', @klass.retries
@max_age = getopt 'max_age', @klass.max_age
@sleep_inc = getopt 'sleep_inc', @klass.sleep_inc
@min_sleep = getopt 'min_sleep', @klass.min_sleep
@max_sleep = getopt 'max_sleep', @klass.max_sleep
@suspend = getopt 'suspend', @klass.suspend
@timeout = getopt 'timeout', @klass.timeout
@refresh = getopt 'refresh', @klass.refresh
@dont_clean = getopt 'dont_clean', @klass.dont_clean
@poll_retries = getopt 'poll_retries', @klass.poll_retries
@poll_max_sleep = getopt 'poll_max_sleep', @klass.poll_max_sleep
@dont_sweep = getopt 'dont_sweep', @klass.dont_sweep
@dont_use_lock_id = getopt 'dont_use_lock_id', @klass.dont_use_lock_id
@debug = getopt 'debug', @klass.debug
@semaphore = Mutex.new
@sleep_cycle = SleepCycle.new @min_sleep, @max_sleep, @sleep_inc
@clean = @dont_clean ? nil : CrewLockfile.finalizer_proc(@path)
@dirname = File.dirname @path
@basename = File.basename @path
@thief = false
@locked = false
@refresher = nil
lock(&block) if block
end
##
# Executes the given block after acquiring the lock and
# ensures that the lock is relinquished afterwards.
#
def synchronize
raise ArgumentError, 'block must be given' unless block_given?
begin
lock
yield
ensure
unlock
end
end
def lock
raise StackingLockError, "<#{@path}> is locked!".lightred if @locked
sweep unless @dont_sweep
ret = nil
attempt do
@sleep_cycle.reset
create_tmplock do |f|
Timeout.timeout(@timeout) do
tmp_path = f.path
tmp_stat = f.lstat
n_retries = 0
trace { "attempting to lock <#{@path}>..." }
begin
i = 0
begin
trace { "polling attempt <#{i}>..." }
begin
File.link tmp_path, @path
rescue Errno::ENOENT
try_again!
end
lock_stat = File.lstat @path
raise StatLockError, 'stats do not agree'.lightred unless
(tmp_stat.rdev == lock_stat.rdev) && (tmp_stat.ino == lock_stat.ino)
trace { "acquired lock <#{@path}>" }
@locked = true
rescue StandardError
i += 1
unless i >= @poll_retries
t = [rand(@poll_max_sleep), @poll_max_sleep].min
trace { "poll sleep <#{t}>..." }
sleep t
retry
end
raise
end
rescue StandardError
n_retries += 1
trace { "n_retries <#{n_retries}>" }
case validlock?
when true
raise MaxTriesLockError, "surpassed retries <#{@retries}>" if
@retries && (n_retries >= @retries)
trace { 'found valid lock' }
sleeptime = @sleep_cycle.next
trace { "sleep <#{sleeptime}>..." }
sleep sleeptime
when false
trace { 'found invalid lock and removing' }
begin
File.unlink @path
@thief = true
warn "<#{@path}> stolen by <#{Process.pid}> at <#{timestamp}>"
trace { 'i am a thief!' }
rescue Errno::ENOENT => e
warn(errmsg(e))
end
trace { "suspending <#{@suspend}>" }
sleep @suspend
when nil
raise MaxTriesLockError, "surpassed retries <#{@retries}>" if
@retries && (n_retries >= @retries)
end
retry
end
end
rescue Timeout::Error
raise TimeoutLockError, "surpassed timeout <#{@timeout}>"
end
if block_given?
stolen = false
@refresher = (@refresh ? new_refresher : nil)
begin
begin
ret = yield @path
rescue StolenLockError
stolen = true
raise
end
ensure
begin
if @refresher&.status
begin
@semaphore.synchronize do
@refresher.kill
end
rescue StandardError
@refresher.kill
end
end
@refresher = nil
ensure
unlock unless stolen
end
end
else
@refresher = (@refresh ? new_refresher : nil)
ObjectSpace.define_finalizer self, @clean if @clean
ret = self
end
rescue Errno::ESTALE, Errno::EIO => e
raise(NFSLockError, errmsg(e))
end
return ret
end
def sweep
glob = File.join(@dirname, '.*lck')
paths = Dir[glob]
paths.each do |path|
basename = File.basename path
pat = /^\s*\.([^_]+)_([^_]+)/o
if pat.match(basename)
host = ::Regexp.last_match(1)
pid = ::Regexp.last_match(2)
else
next
end
host.gsub!(/^\.+|\.+$/, '')
quad = host.split '.'
host = quad.first
pat = /^\s*#{host}/i
if pat.match(HOSTNAME) && /^\s*\d+\s*$/.match(pid)
if alive?(pid)
trace { "process <#{pid}> on <#{host}> is still alive" }
trace { "ignoring <#{path}>" }
else
trace { "process <#{pid}> on <#{host}> is no longer alive" }
trace { "sweeping <#{path}>" }
FileUtils.rm_f path
end
else
trace { "ignoring <#{path}> generated by <#{host}>" }
end
rescue StandardError
next
end
rescue StandardError => e
warn(errmsg(e))
end
def alive?(pid)
pid = Integer(pid.to_s)
begin
Process.kill 0, pid
true
rescue Errno::ESRCH
false
end
end
def unlock
raise UnLockError, "<#{@path}> is not locked!" unless @locked
if @refresher&.status
begin
@semaphore.synchronize do
@refresher.kill
end
rescue StandardError
@refresher.kill
end
end
@refresher = nil
begin
File.unlink @path
rescue Errno::ENOENT
raise StolenLockError, @path
ensure
@thief = false
@locked = false
ObjectSpace.undefine_finalizer self if @clean
end
end
def new_refresher
Thread.new(Thread.current, @path, @refresh, @dont_use_lock_id) do |thread, path, refresh, dont_use_lock_id|
loop do
touch path
trace { "touched <#{path}> @ <#{Time.now.to_f}>" }
unless dont_use_lock_id
txt = nil
@semaphore.synchronize do
txt = File.read(path)
end
loaded = load_lock_id(txt)
trace { "loaded <\n#{loaded.inspect}\n>" }
raise unless loaded == @lock_id
end
sleep refresh
# rescue Exception => e
rescue StandardError => e
trace { errmsg e }
thread.raise StolenLockError
Thread.exit
end
end
end
def validlock?
if @max_age
begin
uncache @path
rescue StandardError
nil
end
begin
return((Time.now - File.stat(@path).mtime) < @max_age)
rescue Errno::ENOENT
return false
end
else
exist = File.exist?(@path)
return(exist ? true : nil)
end
end
def uncache(file)
refresh = nil
begin
is_a_file = file.is_a?(File)
path = (is_a_file ? file.path : file.to_s)
stat = (is_a_file ? file.stat : File.stat(file.to_s))
refresh = tmpnam(File.dirname(path))
File.link path, refresh
File.chmod stat.mode, path
File.utime stat.atime, stat.mtime, path
ensure
begin
File.unlink refresh if refresh
rescue Errno::ENOENT => e
warn(errmsg(e))
end
end
end
def create_tmplock
tmplock = tmpnam @dirname
begin
create(tmplock) do |f|
unless dont_use_lock_id
@lock_id = gen_lock_id
dumped = dump_lock_id
trace { "lock_id <\n#{@lock_id.inspect}\n>" }
f.write dumped
f.flush
end
yield f
end
ensure
if tmplock
begin
File.unlink tmplock
rescue Errno::ENOENT => e
warn(errmsg(e))
end
end
end
end
def gen_lock_id
{ 'host' => HOSTNAME.to_s,
'pid' => Process.pid.to_s,
'ppid' => Process.ppid.to_s,
'time' => timestamp }
end
def timestamp
time = Time.now
usec = time.usec.to_s
usec << '0' while usec.size < 6
"#{time.strftime('%Y-%m-%d %H:%M:%S')}.#{usec}"
end
def dump_lock_id(lock_id = @lock_id)
@host, @pid, @ppid, @time = lock_id.values_at('host', 'pid', 'ppid', 'time')
format("host: %s\npid: %s\nppid: %s\ntime: %s\n", @host, @pid, @ppid, @time)
end
def load_lock_id(buf)
lock_id = {}
kv = /([^:]+):(.*)/o
buf.each_line do |line|
m = kv.match line
k = m[1]
v = m[2]
next unless m && k && v
lock_id[k.strip] = v.strip
end
lock_id
end
def tmpnam(dir, seed = File.basename($PROGRAM_NAME))
pid = Process.pid
time = Time.now
sec = time.to_i
usec = time.usec
format('%s%s.%s_%d_%s_%d_%d_%d.lck', dir, File::SEPARATOR, HOSTNAME, pid, seed, sec, usec, rand(sec))
end
def create(path)
umask = nil
f = nil
begin
umask = File.umask 0o22
f = File.open path, File::WRONLY | File::CREAT | File::EXCL, 0o644
ensure
File.umask umask if umask
end
return(block_given? ? begin; yield f; ensure; f.close; end : f)
end
def touch(path)
FileUtils.touch path
end
def getopt(key, default = nil)
[key, key.to_s, key.to_s.intern].each do |k|
return @opts[k] if @opts.key?(k)
end
return default
end
def to_str
@path
end
alias to_s to_str
def trace(tracemsg = nil)
warn(tracemsg || yield) if @debug
end
def errmsg(err)
format("%s (%s)\n%s\n", err.class, err.message, err.backtrace.join("\n")).lightred
end
def attempt
ret = nil
loop { break unless catch('attempt') { ret = yield } == 'try_again' }
ret
end
def try_again!
throw 'attempt', 'try_again'
end
alias again! try_again!
def give_up!
throw 'attempt', 'give_up'
end
end
def CrewLockfile(path, *options, &) # rubocop: disable Naming/MethodName
CrewLockfile.new(path, *options, &)
end
$__crew_lockfile__ = __FILE__
end

16
tests/test_crew_lockfile.rb Executable file
View File

@@ -0,0 +1,16 @@
#!/usr/bin/env ruby
require_relative '../lib/crew_lockfile'
puts 'Before lock:'
system 'ls test.lock'
lockfile = CrewLockfile.new 'test.lock'
lockfile.debug = true
begin
lockfile.lock
puts 'During lock:'
system 'ls test.lock'
p 42
ensure
lockfile.unlock
end
puts 'After lock:'
system 'ls test.lock'