Merge pull request #18809 from reitermarkus/download-lock-interrupt
Fix incorrect download locking.
This commit is contained in:
commit
cf75c48951
@ -392,60 +392,63 @@ class CurlDownloadStrategy < AbstractFileDownloadStrategy
|
||||
end_time = Time.now + timeout if timeout
|
||||
|
||||
download_lock = DownloadLock.new(temporary_path)
|
||||
download_lock.lock
|
||||
|
||||
urls = [url, *mirrors]
|
||||
|
||||
begin
|
||||
url = urls.shift
|
||||
download_lock.lock
|
||||
|
||||
if (domain = Homebrew::EnvConfig.artifact_domain)
|
||||
url = url.sub(%r{^https?://#{GitHubPackages::URL_DOMAIN}/}o, "#{domain.chomp("/")}/")
|
||||
urls = [] if Homebrew::EnvConfig.artifact_domain_no_fallback?
|
||||
end
|
||||
urls = [url, *mirrors]
|
||||
|
||||
ohai "Downloading #{url}"
|
||||
begin
|
||||
url = urls.shift
|
||||
|
||||
use_cached_location = cached_location.exist?
|
||||
use_cached_location = false if version.respond_to?(:latest?) && version.latest?
|
||||
|
||||
resolved_url, _, last_modified, _, is_redirection = begin
|
||||
resolve_url_basename_time_file_size(url, timeout: Utils::Timer.remaining!(end_time))
|
||||
rescue ErrorDuringExecution
|
||||
raise unless use_cached_location
|
||||
end
|
||||
|
||||
# Authorization is no longer valid after redirects
|
||||
meta[:headers]&.delete_if { |header| header.start_with?("Authorization") } if is_redirection
|
||||
|
||||
# The cached location is no longer fresh if Last-Modified is after the file's timestamp
|
||||
use_cached_location = false if cached_location.exist? && last_modified && last_modified > cached_location.mtime
|
||||
|
||||
if use_cached_location
|
||||
puts "Already downloaded: #{cached_location}"
|
||||
else
|
||||
begin
|
||||
_fetch(url:, resolved_url:, timeout: Utils::Timer.remaining!(end_time))
|
||||
rescue ErrorDuringExecution
|
||||
raise CurlDownloadStrategyError, url
|
||||
if (domain = Homebrew::EnvConfig.artifact_domain)
|
||||
url = url.sub(%r{^https?://#{GitHubPackages::URL_DOMAIN}/}o, "#{domain.chomp("/")}/")
|
||||
urls = [] if Homebrew::EnvConfig.artifact_domain_no_fallback?
|
||||
end
|
||||
cached_location.dirname.mkpath
|
||||
temporary_path.rename(cached_location)
|
||||
|
||||
ohai "Downloading #{url}"
|
||||
|
||||
use_cached_location = cached_location.exist?
|
||||
use_cached_location = false if version.respond_to?(:latest?) && version.latest?
|
||||
|
||||
resolved_url, _, last_modified, _, is_redirection = begin
|
||||
resolve_url_basename_time_file_size(url, timeout: Utils::Timer.remaining!(end_time))
|
||||
rescue ErrorDuringExecution
|
||||
raise unless use_cached_location
|
||||
end
|
||||
|
||||
# Authorization is no longer valid after redirects
|
||||
meta[:headers]&.delete_if { |header| header.start_with?("Authorization") } if is_redirection
|
||||
|
||||
# The cached location is no longer fresh if Last-Modified is after the file's timestamp
|
||||
if cached_location.exist? && last_modified && last_modified > cached_location.mtime
|
||||
use_cached_location = false
|
||||
end
|
||||
|
||||
if use_cached_location
|
||||
puts "Already downloaded: #{cached_location}"
|
||||
else
|
||||
begin
|
||||
_fetch(url:, resolved_url:, timeout: Utils::Timer.remaining!(end_time))
|
||||
rescue ErrorDuringExecution
|
||||
raise CurlDownloadStrategyError, url
|
||||
end
|
||||
cached_location.dirname.mkpath
|
||||
temporary_path.rename(cached_location)
|
||||
end
|
||||
|
||||
symlink_location.dirname.mkpath
|
||||
FileUtils.ln_s cached_location.relative_path_from(symlink_location.dirname), symlink_location, force: true
|
||||
rescue CurlDownloadStrategyError
|
||||
raise if urls.empty?
|
||||
|
||||
puts "Trying a mirror..."
|
||||
retry
|
||||
rescue Timeout::Error => e
|
||||
raise Timeout::Error, "Timed out downloading #{self.url}: #{e}"
|
||||
end
|
||||
|
||||
symlink_location.dirname.mkpath
|
||||
FileUtils.ln_s cached_location.relative_path_from(symlink_location.dirname), symlink_location, force: true
|
||||
rescue CurlDownloadStrategyError
|
||||
raise if urls.empty?
|
||||
|
||||
puts "Trying a mirror..."
|
||||
retry
|
||||
rescue Timeout::Error => e
|
||||
raise Timeout::Error, "Timed out downloading #{self.url}: #{e}"
|
||||
ensure
|
||||
download_lock.unlock(unlink: true)
|
||||
end
|
||||
ensure
|
||||
download_lock&.unlock
|
||||
download_lock&.path&.unlink
|
||||
end
|
||||
|
||||
def clear_cache
|
||||
|
||||
@ -5,6 +5,9 @@ require "fcntl"
|
||||
|
||||
# A lock file to prevent multiple Homebrew processes from modifying the same path.
|
||||
class LockFile
|
||||
class OpenFileChangedOnDisk < RuntimeError; end
|
||||
private_constant :OpenFileChangedOnDisk
|
||||
|
||||
attr_reader :path
|
||||
|
||||
sig { params(type: Symbol, locked_path: Pathname).void }
|
||||
@ -15,19 +18,61 @@ class LockFile
|
||||
@lockfile = nil
|
||||
end
|
||||
|
||||
sig { void }
|
||||
def lock
|
||||
@path.parent.mkpath
|
||||
create_lockfile
|
||||
return if @lockfile.flock(File::LOCK_EX | File::LOCK_NB)
|
||||
ignore_interrupts do
|
||||
next if @lockfile.present?
|
||||
|
||||
raise OperationInProgressError, @locked_path
|
||||
path.dirname.mkpath
|
||||
|
||||
begin
|
||||
lockfile = begin
|
||||
path.open(File::RDWR | File::CREAT)
|
||||
rescue Errno::EMFILE
|
||||
odie "The maximum number of open files on this system has been reached. " \
|
||||
"Use `ulimit -n` to increase this limit."
|
||||
end
|
||||
lockfile.fcntl(Fcntl::F_SETFD, Fcntl::FD_CLOEXEC)
|
||||
|
||||
if lockfile.flock(File::LOCK_EX | File::LOCK_NB)
|
||||
# This prevents a race condition in case the file we locked doesn't exist on disk anymore, e.g.:
|
||||
#
|
||||
# 1. Process A creates and opens the file.
|
||||
# 2. Process A locks the file.
|
||||
# 3. Process B opens the file.
|
||||
# 4. Process A unlinks the file.
|
||||
# 5. Process A unlocks the file.
|
||||
# 6. Process B locks the file.
|
||||
# 7. Process C creates and opens the file.
|
||||
# 8. Process C locks the file.
|
||||
# 9. Process B and C hold locks to files with different inode numbers. 💥
|
||||
if !path.exist? || lockfile.stat.ino != path.stat.ino
|
||||
lockfile.close
|
||||
raise OpenFileChangedOnDisk
|
||||
end
|
||||
|
||||
@lockfile = lockfile
|
||||
next
|
||||
end
|
||||
rescue OpenFileChangedOnDisk
|
||||
retry
|
||||
end
|
||||
|
||||
lockfile.close
|
||||
raise OperationInProgressError, @locked_path
|
||||
end
|
||||
end
|
||||
|
||||
def unlock
|
||||
return if @lockfile.nil? || @lockfile.closed?
|
||||
sig { params(unlink: T::Boolean).void }
|
||||
def unlock(unlink: false)
|
||||
ignore_interrupts do
|
||||
next if @lockfile.nil?
|
||||
|
||||
@lockfile.flock(File::LOCK_UN)
|
||||
@lockfile.close
|
||||
@path.unlink if unlink
|
||||
@lockfile.flock(File::LOCK_UN)
|
||||
@lockfile.close
|
||||
@lockfile = nil
|
||||
end
|
||||
end
|
||||
|
||||
def with_lock
|
||||
@ -36,19 +81,6 @@ class LockFile
|
||||
ensure
|
||||
unlock
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def create_lockfile
|
||||
return if @lockfile.present? && !@lockfile.closed?
|
||||
|
||||
begin
|
||||
@lockfile = @path.open(File::RDWR | File::CREAT)
|
||||
rescue Errno::EMFILE
|
||||
odie "The maximum number of open files on this system has been reached. Use `ulimit -n` to increase this limit."
|
||||
end
|
||||
@lockfile.fcntl(Fcntl::F_SETFD, Fcntl::FD_CLOEXEC)
|
||||
end
|
||||
end
|
||||
|
||||
# A lock file for a formula.
|
||||
|
||||
@ -5,18 +5,26 @@ require "lock_file"
|
||||
RSpec.describe LockFile do
|
||||
subject(:lock_file) { described_class.new(:lock, Pathname("foo")) }
|
||||
|
||||
let(:lock_file_copy) { described_class.new(:lock, Pathname("foo")) }
|
||||
|
||||
describe "#lock" do
|
||||
it "does not raise an error when already locked" do
|
||||
it "ensures the lock file is created" do
|
||||
expect(lock_file.path).not_to exist
|
||||
lock_file.lock
|
||||
expect(lock_file.path).to exist
|
||||
end
|
||||
|
||||
it "does not raise an error when the same instance is locked multiple times" do
|
||||
lock_file.lock
|
||||
|
||||
expect { lock_file.lock }.not_to raise_error
|
||||
end
|
||||
|
||||
it "raises an error if a lock already exists" do
|
||||
it "raises an error if another instance is already locked" do
|
||||
lock_file.lock
|
||||
|
||||
expect do
|
||||
described_class.new(:lock, Pathname("foo")).lock
|
||||
lock_file_copy.lock
|
||||
end.to raise_error(OperationInProgressError)
|
||||
end
|
||||
end
|
||||
@ -30,7 +38,20 @@ RSpec.describe LockFile do
|
||||
lock_file.lock
|
||||
lock_file.unlock
|
||||
|
||||
expect { described_class.new(:lock, Pathname("foo")).lock }.not_to raise_error
|
||||
expect { lock_file_copy.lock }.not_to raise_error
|
||||
end
|
||||
|
||||
it "allows deleting a lock file only by the instance that locked it" do
|
||||
lock_file.lock
|
||||
expect(lock_file.path).to exist
|
||||
|
||||
expect(lock_file_copy.path).to exist
|
||||
lock_file_copy.unlock(unlink: true)
|
||||
expect(lock_file_copy.path).to exist
|
||||
expect(lock_file.path).to exist
|
||||
|
||||
lock_file.unlock(unlink: true)
|
||||
expect(lock_file.path).not_to exist
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user