Merge pull request #18809 from reitermarkus/download-lock-interrupt

Fix incorrect download locking.
This commit is contained in:
Mike McQuaid 2024-11-25 08:36:58 +00:00 committed by GitHub
commit cf75c48951
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
3 changed files with 128 additions and 72 deletions

View File

@ -392,6 +392,7 @@ class CurlDownloadStrategy < AbstractFileDownloadStrategy
end_time = Time.now + timeout if timeout end_time = Time.now + timeout if timeout
download_lock = DownloadLock.new(temporary_path) download_lock = DownloadLock.new(temporary_path)
begin
download_lock.lock download_lock.lock
urls = [url, *mirrors] urls = [url, *mirrors]
@ -419,7 +420,9 @@ class CurlDownloadStrategy < AbstractFileDownloadStrategy
meta[:headers]&.delete_if { |header| header.start_with?("Authorization") } if is_redirection meta[:headers]&.delete_if { |header| header.start_with?("Authorization") } if is_redirection
# The cached location is no longer fresh if Last-Modified is after the file's timestamp # The cached location is no longer fresh if Last-Modified is after the file's timestamp
use_cached_location = false if cached_location.exist? && last_modified && last_modified > cached_location.mtime if cached_location.exist? && last_modified && last_modified > cached_location.mtime
use_cached_location = false
end
if use_cached_location if use_cached_location
puts "Already downloaded: #{cached_location}" puts "Already downloaded: #{cached_location}"
@ -444,8 +447,8 @@ class CurlDownloadStrategy < AbstractFileDownloadStrategy
raise Timeout::Error, "Timed out downloading #{self.url}: #{e}" raise Timeout::Error, "Timed out downloading #{self.url}: #{e}"
end end
ensure ensure
download_lock&.unlock download_lock.unlock(unlink: true)
download_lock&.path&.unlink end
end end
def clear_cache def clear_cache

View File

@ -5,6 +5,9 @@ require "fcntl"
# A lock file to prevent multiple Homebrew processes from modifying the same path. # A lock file to prevent multiple Homebrew processes from modifying the same path.
class LockFile class LockFile
class OpenFileChangedOnDisk < RuntimeError; end
private_constant :OpenFileChangedOnDisk
attr_reader :path attr_reader :path
sig { params(type: Symbol, locked_path: Pathname).void } sig { params(type: Symbol, locked_path: Pathname).void }
@ -15,19 +18,61 @@ class LockFile
@lockfile = nil @lockfile = nil
end end
sig { void }
def lock def lock
@path.parent.mkpath ignore_interrupts do
create_lockfile next if @lockfile.present?
return if @lockfile.flock(File::LOCK_EX | File::LOCK_NB)
raise OperationInProgressError, @locked_path path.dirname.mkpath
begin
lockfile = begin
path.open(File::RDWR | File::CREAT)
rescue Errno::EMFILE
odie "The maximum number of open files on this system has been reached. " \
"Use `ulimit -n` to increase this limit."
end
lockfile.fcntl(Fcntl::F_SETFD, Fcntl::FD_CLOEXEC)
if lockfile.flock(File::LOCK_EX | File::LOCK_NB)
# This prevents a race condition in case the file we locked doesn't exist on disk anymore, e.g.:
#
# 1. Process A creates and opens the file.
# 2. Process A locks the file.
# 3. Process B opens the file.
# 4. Process A unlinks the file.
# 5. Process A unlocks the file.
# 6. Process B locks the file.
# 7. Process C creates and opens the file.
# 8. Process C locks the file.
# 9. Process B and C hold locks to files with different inode numbers. 💥
if !path.exist? || lockfile.stat.ino != path.stat.ino
lockfile.close
raise OpenFileChangedOnDisk
end end
def unlock @lockfile = lockfile
return if @lockfile.nil? || @lockfile.closed? next
end
rescue OpenFileChangedOnDisk
retry
end
lockfile.close
raise OperationInProgressError, @locked_path
end
end
sig { params(unlink: T::Boolean).void }
def unlock(unlink: false)
ignore_interrupts do
next if @lockfile.nil?
@path.unlink if unlink
@lockfile.flock(File::LOCK_UN) @lockfile.flock(File::LOCK_UN)
@lockfile.close @lockfile.close
@lockfile = nil
end
end end
def with_lock def with_lock
@ -36,19 +81,6 @@ class LockFile
ensure ensure
unlock unlock
end end
private
def create_lockfile
return if @lockfile.present? && !@lockfile.closed?
begin
@lockfile = @path.open(File::RDWR | File::CREAT)
rescue Errno::EMFILE
odie "The maximum number of open files on this system has been reached. Use `ulimit -n` to increase this limit."
end
@lockfile.fcntl(Fcntl::F_SETFD, Fcntl::FD_CLOEXEC)
end
end end
# A lock file for a formula. # A lock file for a formula.

View File

@ -5,18 +5,26 @@ require "lock_file"
RSpec.describe LockFile do RSpec.describe LockFile do
subject(:lock_file) { described_class.new(:lock, Pathname("foo")) } subject(:lock_file) { described_class.new(:lock, Pathname("foo")) }
let(:lock_file_copy) { described_class.new(:lock, Pathname("foo")) }
describe "#lock" do describe "#lock" do
it "does not raise an error when already locked" do it "ensures the lock file is created" do
expect(lock_file.path).not_to exist
lock_file.lock
expect(lock_file.path).to exist
end
it "does not raise an error when the same instance is locked multiple times" do
lock_file.lock lock_file.lock
expect { lock_file.lock }.not_to raise_error expect { lock_file.lock }.not_to raise_error
end end
it "raises an error if a lock already exists" do it "raises an error if another instance is already locked" do
lock_file.lock lock_file.lock
expect do expect do
described_class.new(:lock, Pathname("foo")).lock lock_file_copy.lock
end.to raise_error(OperationInProgressError) end.to raise_error(OperationInProgressError)
end end
end end
@ -30,7 +38,20 @@ RSpec.describe LockFile do
lock_file.lock lock_file.lock
lock_file.unlock lock_file.unlock
expect { described_class.new(:lock, Pathname("foo")).lock }.not_to raise_error expect { lock_file_copy.lock }.not_to raise_error
end
it "allows deleting a lock file only by the instance that locked it" do
lock_file.lock
expect(lock_file.path).to exist
expect(lock_file_copy.path).to exist
lock_file_copy.unlock(unlink: true)
expect(lock_file_copy.path).to exist
expect(lock_file.path).to exist
lock_file.unlock(unlink: true)
expect(lock_file.path).not_to exist
end end
end end
end end