Allow specifying timeouts for commands and downloads.
This commit is contained in:
parent
234fae6bd6
commit
712a95fdd0
@ -19,9 +19,10 @@ module Cask
|
||||
@quarantine = quarantine
|
||||
end
|
||||
|
||||
def fetch(verify_download_integrity: true)
|
||||
def fetch(quiet: nil, verify_download_integrity: true, timeout: nil)
|
||||
downloaded_path = begin
|
||||
downloader.fetch
|
||||
downloader.shutup! if quiet
|
||||
downloader.fetch(timeout: timeout)
|
||||
downloader.cached_location
|
||||
rescue => e
|
||||
error = CaskError.new("Download failed on Cask '#{cask}' with message: #{e}")
|
||||
@ -40,8 +41,8 @@ module Cask
|
||||
end
|
||||
end
|
||||
|
||||
def time_file_size
|
||||
downloader.resolved_time_file_size
|
||||
def time_file_size(timeout: nil)
|
||||
downloader.resolved_time_file_size(timeout: timeout)
|
||||
end
|
||||
|
||||
def clear_cache
|
||||
|
@ -62,13 +62,15 @@ module Cask
|
||||
EOS
|
||||
end
|
||||
|
||||
def fetch
|
||||
sig { params(quiet: T.nilable(T::Boolean), timeout: T.nilable(T.any(Integer, Float))).void }
|
||||
def fetch(quiet: nil, timeout: nil)
|
||||
odebug "Cask::Installer#fetch"
|
||||
|
||||
verify_has_sha if require_sha? && !force?
|
||||
satisfy_dependencies
|
||||
|
||||
download
|
||||
download(quiet: quiet, timeout: timeout)
|
||||
|
||||
satisfy_dependencies
|
||||
end
|
||||
|
||||
def stage
|
||||
@ -162,9 +164,10 @@ module Cask
|
||||
@downloader ||= Download.new(@cask, quarantine: quarantine?)
|
||||
end
|
||||
|
||||
sig { returns(Pathname) }
|
||||
def download
|
||||
@download ||= downloader.fetch(verify_download_integrity: @verify_download_integrity)
|
||||
sig { params(quiet: T.nilable(T::Boolean), timeout: T.nilable(T.any(Integer, Float))).returns(Pathname) }
|
||||
def download(quiet: nil, timeout: nil)
|
||||
@download ||= downloader.fetch(quiet: quiet, verify_download_integrity: @verify_download_integrity,
|
||||
timeout: timeout)
|
||||
end
|
||||
|
||||
def verify_has_sha
|
||||
@ -179,7 +182,7 @@ module Cask
|
||||
|
||||
def primary_container
|
||||
@primary_container ||= begin
|
||||
downloaded_path = download
|
||||
downloaded_path = download(quiet: true)
|
||||
UnpackStrategy.detect(downloaded_path, type: @cask.container&.type, merge_xattrs: true)
|
||||
end
|
||||
end
|
||||
|
@ -15,6 +15,9 @@ require "utils/curl"
|
||||
|
||||
require "github_packages"
|
||||
|
||||
require "extend/time"
|
||||
using TimeRemaining
|
||||
|
||||
# @abstract Abstract superclass for all download strategies.
|
||||
#
|
||||
# @api private
|
||||
@ -52,7 +55,7 @@ class AbstractDownloadStrategy
|
||||
# Download and cache the resource at {#cached_location}.
|
||||
#
|
||||
# @api public
|
||||
def fetch; end
|
||||
def fetch(timeout: nil); end
|
||||
|
||||
# Disable any output during downloading.
|
||||
#
|
||||
@ -176,18 +179,20 @@ class VCSDownloadStrategy < AbstractDownloadStrategy
|
||||
# Download and cache the repository at {#cached_location}.
|
||||
#
|
||||
# @api public
|
||||
def fetch
|
||||
def fetch(timeout: nil)
|
||||
end_time = Time.now + timeout if timeout
|
||||
|
||||
ohai "Cloning #{url}"
|
||||
|
||||
if cached_location.exist? && repo_valid?
|
||||
puts "Updating #{cached_location}"
|
||||
update
|
||||
update(timeout: timeout)
|
||||
elsif cached_location.exist?
|
||||
puts "Removing invalid repository from cache"
|
||||
clear_cache
|
||||
clone_repo
|
||||
clone_repo(timeout: end_time)
|
||||
else
|
||||
clone_repo
|
||||
clone_repo(timeout: end_time)
|
||||
end
|
||||
|
||||
version.update_commit(last_commit) if head?
|
||||
@ -232,9 +237,11 @@ class VCSDownloadStrategy < AbstractDownloadStrategy
|
||||
raise NotImplementedError
|
||||
end
|
||||
|
||||
def clone_repo; end
|
||||
sig { params(timeout: T.nilable(Time)).void }
|
||||
def clone_repo(timeout: nil); end
|
||||
|
||||
def update; end
|
||||
sig { params(timeout: T.nilable(Time)).void }
|
||||
def update(timeout: nil); end
|
||||
|
||||
def current_revision; end
|
||||
|
||||
@ -353,7 +360,9 @@ class CurlDownloadStrategy < AbstractFileDownloadStrategy
|
||||
# Download and cache the file at {#cached_location}.
|
||||
#
|
||||
# @api public
|
||||
def fetch
|
||||
def fetch(timeout: nil)
|
||||
end_time = Time.now + timeout if timeout
|
||||
|
||||
download_lock = LockFile.new(temporary_path.basename)
|
||||
download_lock.lock
|
||||
|
||||
@ -364,7 +373,7 @@ class CurlDownloadStrategy < AbstractFileDownloadStrategy
|
||||
|
||||
ohai "Downloading #{url}"
|
||||
|
||||
resolved_url, _, url_time, = resolve_url_basename_time_file_size(url)
|
||||
resolved_url, _, url_time, = resolve_url_basename_time_file_size(url, timeout: end_time&.remaining!)
|
||||
|
||||
fresh = if cached_location.exist? && url_time
|
||||
url_time <= cached_location.mtime
|
||||
@ -378,7 +387,7 @@ class CurlDownloadStrategy < AbstractFileDownloadStrategy
|
||||
puts "Already downloaded: #{cached_location}"
|
||||
else
|
||||
begin
|
||||
_fetch(url: url, resolved_url: resolved_url)
|
||||
_fetch(url: url, resolved_url: resolved_url, timeout: end_time&.remaining!)
|
||||
rescue ErrorDuringExecution
|
||||
raise CurlDownloadStrategyError, url
|
||||
end
|
||||
@ -395,6 +404,8 @@ class CurlDownloadStrategy < AbstractFileDownloadStrategy
|
||||
|
||||
puts "Trying a mirror..."
|
||||
retry
|
||||
rescue Timeout::Error => e
|
||||
raise Timeout::Error, "Timed out downloading #{self.url}: #{e}"
|
||||
end
|
||||
ensure
|
||||
download_lock&.unlock
|
||||
@ -406,19 +417,19 @@ class CurlDownloadStrategy < AbstractFileDownloadStrategy
|
||||
rm_rf(temporary_path)
|
||||
end
|
||||
|
||||
def resolved_time_file_size
|
||||
_, _, time, file_size = resolve_url_basename_time_file_size(url)
|
||||
def resolved_time_file_size(timeout: nil)
|
||||
_, _, time, file_size = resolve_url_basename_time_file_size(url, timeout: timeout)
|
||||
[time, file_size]
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def resolved_url_and_basename
|
||||
resolved_url, basename, = resolve_url_basename_time_file_size(url)
|
||||
def resolved_url_and_basename(timeout: nil)
|
||||
resolved_url, basename, = resolve_url_basename_time_file_size(url, timeout: nil)
|
||||
[resolved_url, basename]
|
||||
end
|
||||
|
||||
def resolve_url_basename_time_file_size(url)
|
||||
def resolve_url_basename_time_file_size(url, timeout: nil)
|
||||
@resolved_info_cache ||= {}
|
||||
return @resolved_info_cache[url] if @resolved_info_cache.include?(url)
|
||||
|
||||
@ -426,7 +437,7 @@ class CurlDownloadStrategy < AbstractFileDownloadStrategy
|
||||
url = url.sub(%r{^((ht|f)tps?://)?}, "#{domain.chomp("/")}/")
|
||||
end
|
||||
|
||||
out, _, status= curl_output("--location", "--silent", "--head", "--request", "GET", url.to_s)
|
||||
out, _, status= curl_output("--location", "--silent", "--head", "--request", "GET", url.to_s, timeout: timeout)
|
||||
|
||||
lines = status.success? ? out.lines.map(&:chomp) : []
|
||||
|
||||
@ -485,7 +496,7 @@ class CurlDownloadStrategy < AbstractFileDownloadStrategy
|
||||
@resolved_info_cache[url] = [redirect_url, basename, time, file_size]
|
||||
end
|
||||
|
||||
def _fetch(url:, resolved_url:)
|
||||
def _fetch(url:, resolved_url:, timeout:)
|
||||
ohai "Downloading from #{resolved_url}" if url != resolved_url
|
||||
|
||||
if Homebrew::EnvConfig.no_insecure_redirect? &&
|
||||
@ -494,7 +505,7 @@ class CurlDownloadStrategy < AbstractFileDownloadStrategy
|
||||
raise CurlDownloadStrategyError, url
|
||||
end
|
||||
|
||||
curl_download resolved_url, to: temporary_path
|
||||
curl_download resolved_url, to: temporary_path, timeout: timeout
|
||||
end
|
||||
|
||||
# Curl options to be always passed to curl,
|
||||
@ -567,9 +578,9 @@ class CurlApacheMirrorDownloadStrategy < CurlDownloadStrategy
|
||||
@combined_mirrors = [*@mirrors, *backup_mirrors]
|
||||
end
|
||||
|
||||
def resolve_url_basename_time_file_size(url)
|
||||
def resolve_url_basename_time_file_size(url, timeout: nil)
|
||||
if url == self.url
|
||||
super("#{apache_mirrors["preferred"]}#{apache_mirrors["path_info"]}")
|
||||
super("#{apache_mirrors["preferred"]}#{apache_mirrors["path_info"]}", timeout: timeout)
|
||||
else
|
||||
super
|
||||
end
|
||||
@ -592,7 +603,7 @@ end
|
||||
class CurlPostDownloadStrategy < CurlDownloadStrategy
|
||||
private
|
||||
|
||||
def _fetch(url:, resolved_url:)
|
||||
def _fetch(url:, resolved_url:, timeout:)
|
||||
args = if meta.key?(:data)
|
||||
escape_data = ->(d) { ["-d", URI.encode_www_form([d])] }
|
||||
[url, *meta[:data].flat_map(&escape_data)]
|
||||
@ -601,7 +612,7 @@ class CurlPostDownloadStrategy < CurlDownloadStrategy
|
||||
query.nil? ? [url, "-X", "POST"] : [url, "-d", query]
|
||||
end
|
||||
|
||||
curl_download(*args, to: temporary_path)
|
||||
curl_download(*args, to: temporary_path, timeout: timeout)
|
||||
end
|
||||
end
|
||||
|
||||
@ -641,7 +652,7 @@ class SubversionDownloadStrategy < VCSDownloadStrategy
|
||||
# Download and cache the repository at {#cached_location}.
|
||||
#
|
||||
# @api public
|
||||
def fetch
|
||||
def fetch(timeout: nil)
|
||||
if @url.chomp("/") != repo_url || !silent_command("svn", args: ["switch", @url, cached_location]).success?
|
||||
clear_cache
|
||||
end
|
||||
@ -682,7 +693,11 @@ class SubversionDownloadStrategy < VCSDownloadStrategy
|
||||
end
|
||||
end
|
||||
|
||||
def fetch_repo(target, url, revision = nil, ignore_externals: false)
|
||||
sig {
|
||||
params(target: Pathname, url: String, revision: T.nilable(String), ignore_externals: T::Boolean,
|
||||
timeout: T.nilable(Time)).void
|
||||
}
|
||||
def fetch_repo(target, url, revision = nil, ignore_externals: false, timeout: nil)
|
||||
# Use "svn update" when the repository already exists locally.
|
||||
# This saves on bandwidth and will have a similar effect to verifying the
|
||||
# cache as it will make any changes to get the right revision.
|
||||
@ -702,9 +717,9 @@ class SubversionDownloadStrategy < VCSDownloadStrategy
|
||||
end
|
||||
|
||||
if target.directory?
|
||||
command!("svn", args: ["update", *args], chdir: target.to_s)
|
||||
command! "svn", args: ["update", *args], chdir: target.to_s, timeout: timeout&.remaining
|
||||
else
|
||||
command!("svn", args: ["checkout", url, target, *args])
|
||||
command! "svn", args: ["checkout", url, target, *args], timeout: timeout&.remaining
|
||||
end
|
||||
end
|
||||
|
||||
@ -717,20 +732,22 @@ class SubversionDownloadStrategy < VCSDownloadStrategy
|
||||
(cached_location/".svn").directory?
|
||||
end
|
||||
|
||||
def clone_repo
|
||||
sig { params(timeout: T.nilable(Time)).void }
|
||||
def clone_repo(timeout: nil)
|
||||
case @ref_type
|
||||
when :revision
|
||||
fetch_repo cached_location, @url, @ref
|
||||
fetch_repo cached_location, @url, @ref, timeout: timeout
|
||||
when :revisions
|
||||
# nil is OK for main_revision, as fetch_repo will then get latest
|
||||
main_revision = @ref[:trunk]
|
||||
fetch_repo cached_location, @url, main_revision, ignore_externals: true
|
||||
fetch_repo cached_location, @url, main_revision, ignore_externals: true, timeout: timeout
|
||||
|
||||
externals do |external_name, external_url|
|
||||
fetch_repo cached_location/external_name, external_url, @ref[external_name], ignore_externals: true
|
||||
fetch_repo cached_location/external_name, external_url, @ref[external_name], ignore_externals: true,
|
||||
timeout: timeout
|
||||
end
|
||||
else
|
||||
fetch_repo cached_location, @url
|
||||
fetch_repo cached_location, @url, timeout: timeout
|
||||
end
|
||||
end
|
||||
alias update clone_repo
|
||||
@ -779,12 +796,13 @@ class GitDownloadStrategy < VCSDownloadStrategy
|
||||
0
|
||||
end
|
||||
|
||||
def update
|
||||
sig { params(timeout: T.nilable(Time)).void }
|
||||
def update(timeout: nil)
|
||||
config_repo
|
||||
update_repo
|
||||
checkout
|
||||
update_repo(timeout: timeout)
|
||||
checkout(timeout: timeout)
|
||||
reset
|
||||
update_submodules if submodules?
|
||||
update_submodules(timeout: timeout) if submodules?
|
||||
end
|
||||
|
||||
def shallow_clone?
|
||||
@ -845,6 +863,7 @@ class GitDownloadStrategy < VCSDownloadStrategy
|
||||
end
|
||||
end
|
||||
|
||||
sig { void }
|
||||
def config_repo
|
||||
command! "git",
|
||||
args: ["config", "remote.origin.url", @url],
|
||||
@ -857,35 +876,42 @@ class GitDownloadStrategy < VCSDownloadStrategy
|
||||
chdir: cached_location
|
||||
end
|
||||
|
||||
def update_repo
|
||||
sig { params(timeout: T.nilable(Time)).void }
|
||||
def update_repo(timeout: nil)
|
||||
return if @ref_type != :branch && ref?
|
||||
|
||||
if !shallow_clone? && shallow_dir?
|
||||
command! "git",
|
||||
args: ["fetch", "origin", "--unshallow"],
|
||||
chdir: cached_location
|
||||
args: ["fetch", "origin", "--unshallow"],
|
||||
chdir: cached_location,
|
||||
timeout: timeout&.remaining
|
||||
else
|
||||
command! "git",
|
||||
args: ["fetch", "origin"],
|
||||
chdir: cached_location
|
||||
args: ["fetch", "origin"],
|
||||
chdir: cached_location,
|
||||
timeout: timeout&.remaining
|
||||
end
|
||||
end
|
||||
|
||||
def clone_repo
|
||||
command! "git", args: clone_args
|
||||
sig { params(timeout: T.nilable(Time)).void }
|
||||
def clone_repo(timeout: nil)
|
||||
command! "git", args: clone_args, timeout: timeout&.remaining
|
||||
|
||||
command! "git",
|
||||
args: ["config", "homebrew.cacheversion", cache_version],
|
||||
chdir: cached_location
|
||||
checkout
|
||||
update_submodules if submodules?
|
||||
args: ["config", "homebrew.cacheversion", cache_version],
|
||||
chdir: cached_location,
|
||||
timeout: timeout&.remaining
|
||||
checkout(timeout: timeout)
|
||||
update_submodules(timeout: timeout) if submodules?
|
||||
end
|
||||
|
||||
def checkout
|
||||
sig { params(timeout: T.nilable(Time)).void }
|
||||
def checkout(timeout: nil)
|
||||
ohai "Checking out #{@ref_type} #{@ref}" if @ref_type && @ref
|
||||
command! "git", args: ["checkout", "-f", @ref, "--"], chdir: cached_location
|
||||
command! "git", args: ["checkout", "-f", @ref, "--"], chdir: cached_location, timeout: timeout&.remaining
|
||||
end
|
||||
|
||||
sig { void }
|
||||
def reset
|
||||
ref = case @ref_type
|
||||
when :branch
|
||||
@ -899,13 +925,16 @@ class GitDownloadStrategy < VCSDownloadStrategy
|
||||
chdir: cached_location
|
||||
end
|
||||
|
||||
def update_submodules
|
||||
sig { params(timeout: T.nilable(Time)).void }
|
||||
def update_submodules(timeout: nil)
|
||||
command! "git",
|
||||
args: ["submodule", "foreach", "--recursive", "git submodule sync"],
|
||||
chdir: cached_location
|
||||
args: ["submodule", "foreach", "--recursive", "git submodule sync"],
|
||||
chdir: cached_location,
|
||||
timeout: timeout&.remaining
|
||||
command! "git",
|
||||
args: ["submodule", "update", "--init", "--recursive"],
|
||||
chdir: cached_location
|
||||
args: ["submodule", "update", "--init", "--recursive"],
|
||||
chdir: cached_location,
|
||||
timeout: timeout&.remaining
|
||||
fix_absolute_submodule_gitdir_references!
|
||||
end
|
||||
|
||||
@ -1057,19 +1086,23 @@ class CVSDownloadStrategy < VCSDownloadStrategy
|
||||
"-Q" unless verbose?
|
||||
end
|
||||
|
||||
def clone_repo
|
||||
sig { params(timeout: T.nilable(Time)).void }
|
||||
def clone_repo(timeout: nil)
|
||||
# Login is only needed (and allowed) with pserver; skip for anoncvs.
|
||||
command! "cvs", args: [*quiet_flag, "-d", @url, "login"] if @url.include? "pserver"
|
||||
command! "cvs", args: [*quiet_flag, "-d", @url, "login"], timeout: timeout&.remaining if @url.include? "pserver"
|
||||
|
||||
command! "cvs",
|
||||
args: [*quiet_flag, "-d", @url, "checkout", "-d", cached_location.basename, @module],
|
||||
chdir: cached_location.dirname
|
||||
args: [*quiet_flag, "-d", @url, "checkout", "-d", cached_location.basename, @module],
|
||||
chdir: cached_location.dirname,
|
||||
timeout: timeout&.remaining
|
||||
end
|
||||
|
||||
def update
|
||||
sig { params(timeout: T.nilable(Time)).void }
|
||||
def update(timeout: nil)
|
||||
command! "cvs",
|
||||
args: [*quiet_flag, "update"],
|
||||
chdir: cached_location
|
||||
args: [*quiet_flag, "update"],
|
||||
chdir: cached_location,
|
||||
timeout: timeout&.remaining
|
||||
end
|
||||
|
||||
def split_url(in_url)
|
||||
@ -1121,12 +1154,14 @@ class MercurialDownloadStrategy < VCSDownloadStrategy
|
||||
(cached_location/".hg").directory?
|
||||
end
|
||||
|
||||
def clone_repo
|
||||
command! "hg", args: ["clone", @url, cached_location]
|
||||
sig { params(timeout: T.nilable(Time)).void }
|
||||
def clone_repo(timeout: nil)
|
||||
command! "hg", args: ["clone", @url, cached_location], timeout: timeout&.remaining
|
||||
end
|
||||
|
||||
def update
|
||||
command! "hg", args: ["--cwd", cached_location, "pull", "--update"]
|
||||
sig { params(timeout: T.nilable(Time)).void }
|
||||
def update(timeout: nil)
|
||||
command! "hg", args: ["--cwd", cached_location, "pull", "--update"], timeout: timeout&.remaining
|
||||
|
||||
update_args = if @ref_type && @ref
|
||||
ohai "Checking out #{@ref_type} #{@ref}"
|
||||
@ -1135,7 +1170,7 @@ class MercurialDownloadStrategy < VCSDownloadStrategy
|
||||
["--clean"]
|
||||
end
|
||||
|
||||
command! "hg", args: ["--cwd", cached_location, "update", *update_args]
|
||||
command! "hg", args: ["--cwd", cached_location, "update", *update_args], timeout: timeout&.remaining
|
||||
end
|
||||
end
|
||||
|
||||
@ -1184,16 +1219,20 @@ class BazaarDownloadStrategy < VCSDownloadStrategy
|
||||
(cached_location/".bzr").directory?
|
||||
end
|
||||
|
||||
def clone_repo
|
||||
sig { params(timeout: T.nilable(Time)).void }
|
||||
def clone_repo(timeout: nil)
|
||||
# "lightweight" means history-less
|
||||
command! "bzr",
|
||||
args: ["checkout", "--lightweight", @url, cached_location]
|
||||
args: ["checkout", "--lightweight", @url, cached_location],
|
||||
timeout: timeout&.remaining
|
||||
end
|
||||
|
||||
def update
|
||||
sig { params(timeout: T.nilable(Time)).void }
|
||||
def update(timeout: nil)
|
||||
command! "bzr",
|
||||
args: ["update"],
|
||||
chdir: cached_location
|
||||
args: ["update"],
|
||||
chdir: cached_location,
|
||||
timeout: timeout&.remaining
|
||||
end
|
||||
end
|
||||
|
||||
@ -1236,12 +1275,14 @@ class FossilDownloadStrategy < VCSDownloadStrategy
|
||||
"fossil"
|
||||
end
|
||||
|
||||
def clone_repo
|
||||
silent_command!("fossil", args: ["clone", @url, cached_location])
|
||||
sig { params(timeout: T.nilable(Time)).void }
|
||||
def clone_repo(timeout: nil)
|
||||
silent_command! "fossil", args: ["clone", @url, cached_location], timeout: timeout&.remaining
|
||||
end
|
||||
|
||||
def update
|
||||
silent_command!("fossil", args: ["pull", "-R", cached_location])
|
||||
sig { params(timeout: T.nilable(Time)).void }
|
||||
def update(timeout: nil)
|
||||
silent_command! "fossil", args: ["pull", "-R", cached_location], timeout: timeout&.remaining
|
||||
end
|
||||
end
|
||||
|
||||
|
18
Library/Homebrew/extend/time.rb
Normal file
18
Library/Homebrew/extend/time.rb
Normal file
@ -0,0 +1,18 @@
|
||||
# typed: false
|
||||
# frozen_string_literal: true
|
||||
|
||||
module TimeRemaining
|
||||
refine Time do
|
||||
def remaining
|
||||
[0, self - Time.now].max
|
||||
end
|
||||
|
||||
def remaining!
|
||||
r = remaining
|
||||
|
||||
raise Timeout::Error if r <= 0
|
||||
|
||||
r
|
||||
end
|
||||
end
|
||||
end
|
@ -28,4 +28,16 @@ when "HashValidator"
|
||||
def assert_valid_keys!(*valid_keys); end
|
||||
end
|
||||
RUBY
|
||||
when "TimeRemaining"
|
||||
puts <<-RUBY
|
||||
# typed: strict
|
||||
|
||||
class ::Time
|
||||
sig { returns(T.any(Integer, Float)) }
|
||||
def remaining; end
|
||||
|
||||
sig { returns(T.any(Integer, Float)) }
|
||||
def remaining!; end
|
||||
end
|
||||
RUBY
|
||||
end
|
||||
|
@ -10,13 +10,15 @@ require "extend/io"
|
||||
require "extend/predicable"
|
||||
require "extend/hash_validator"
|
||||
|
||||
require "extend/time"
|
||||
|
||||
# Class for running sub-processes and capturing their output and exit status.
|
||||
#
|
||||
# @api private
|
||||
class SystemCommand
|
||||
extend T::Sig
|
||||
|
||||
using HashValidator
|
||||
using TimeRemaining
|
||||
|
||||
# Helper functions for calling {SystemCommand.run}.
|
||||
module Mixin
|
||||
@ -78,10 +80,24 @@ class SystemCommand
|
||||
verbose: T.nilable(T::Boolean),
|
||||
secrets: T.any(String, T::Array[String]),
|
||||
chdir: T.any(String, Pathname),
|
||||
timeout: T.nilable(T.any(Integer, Float)),
|
||||
).void
|
||||
}
|
||||
def initialize(executable, args: [], sudo: false, env: {}, input: [], must_succeed: false,
|
||||
print_stdout: false, print_stderr: true, debug: nil, verbose: nil, secrets: [], chdir: T.unsafe(nil))
|
||||
def initialize(
|
||||
executable,
|
||||
args: [],
|
||||
sudo: false,
|
||||
env: {},
|
||||
input: [],
|
||||
must_succeed: false,
|
||||
print_stdout: false,
|
||||
print_stderr: true,
|
||||
debug: nil,
|
||||
verbose: false,
|
||||
secrets: [],
|
||||
chdir: T.unsafe(nil),
|
||||
timeout: nil
|
||||
)
|
||||
require "extend/ENV"
|
||||
@executable = executable
|
||||
@args = args
|
||||
@ -100,6 +116,7 @@ class SystemCommand
|
||||
@verbose = verbose
|
||||
@secrets = (Array(secrets) + ENV.sensitive_environment.values).uniq
|
||||
@chdir = chdir
|
||||
@timeout = timeout
|
||||
end
|
||||
|
||||
sig { returns(T::Array[String]) }
|
||||
@ -197,10 +214,14 @@ class SystemCommand
|
||||
|
||||
sig { params(sources: T::Array[IO], _block: T.proc.params(type: Symbol, line: String).void).void }
|
||||
def each_line_from(sources, &_block)
|
||||
loop do
|
||||
readable_sources, = IO.select(sources)
|
||||
end_time = Time.now + @timeout if @timeout
|
||||
|
||||
readable_sources = T.must(readable_sources).reject(&:eof?)
|
||||
loop do
|
||||
select_timeout = end_time&.remaining!
|
||||
readable_sources, = IO.select(sources, [], [], select_timeout)
|
||||
raise Timeout::Error if readable_sources.nil?
|
||||
|
||||
readable_sources = readable_sources.reject(&:eof?)
|
||||
|
||||
break if readable_sources.empty?
|
||||
|
||||
|
@ -3,11 +3,15 @@
|
||||
|
||||
require "open3"
|
||||
|
||||
require "extend/time"
|
||||
|
||||
module Utils
|
||||
# Helper function for interacting with `curl`.
|
||||
#
|
||||
# @api private
|
||||
module Curl
|
||||
using TimeRemaining
|
||||
|
||||
module_function
|
||||
|
||||
def curl_executable
|
||||
@ -49,14 +53,20 @@ module Utils
|
||||
args << "--silent" unless $stdout.tty?
|
||||
end
|
||||
|
||||
args << "--connect-timeout" << connect_timeout.round(3) if options[:connect_timeout]
|
||||
args << "--max-time" << max_time.round(3) if options[:max_time]
|
||||
args << "--retry" << Homebrew::EnvConfig.curl_retries unless options[:retry] == false
|
||||
args << "--retry-max-time" << retry_max_time.round if options[:retry_max_time]
|
||||
|
||||
args + extra_args
|
||||
end
|
||||
|
||||
def curl_with_workarounds(
|
||||
*args, secrets: nil, print_stdout: nil, print_stderr: nil, debug: nil, verbose: nil, env: {}, **options
|
||||
*args,
|
||||
secrets: nil, print_stdout: nil, print_stderr: nil, debug: nil, verbose: nil, env: {}, timeout: nil, **options
|
||||
)
|
||||
end_time = Time.now + timeout if timeout
|
||||
|
||||
command_options = {
|
||||
secrets: secrets,
|
||||
print_stdout: print_stdout,
|
||||
@ -68,14 +78,22 @@ module Utils
|
||||
# SSL_CERT_FILE can be incorrectly set by users or portable-ruby and screw
|
||||
# with SSL downloads so unset it here.
|
||||
result = system_command curl_executable,
|
||||
args: curl_args(*args, **options),
|
||||
env: { "SSL_CERT_FILE" => nil }.merge(env),
|
||||
args: curl_args(*args, **options),
|
||||
env: { "SSL_CERT_FILE" => nil }.merge(env),
|
||||
timeout: end_time&.remaining,
|
||||
**command_options
|
||||
|
||||
return result if result.success? || !args.exclude?("--http1.1")
|
||||
|
||||
raise Timeout::Error, result.stderr.chomp if result.status.exitstatus == 28
|
||||
|
||||
# Error in the HTTP2 framing layer
|
||||
return curl_with_workarounds(*args, "--http1.1", **command_options, **options) if result.status.exitstatus == 16
|
||||
if result.status.exitstatus == 16
|
||||
return curl_with_workarounds(
|
||||
*args, "--http1.1",
|
||||
timeout: end_time&.remaining, **command_options, **options
|
||||
)
|
||||
end
|
||||
|
||||
# This is a workaround for https://github.com/curl/curl/issues/1618.
|
||||
if result.status.exitstatus == 56 # Unexpected EOF
|
||||
|
Loading…
x
Reference in New Issue
Block a user