Resolve URL to get real file extension.

This commit is contained in:
Markus Reiter 2018-08-10 04:11:54 +02:00
parent 47d3bbee1c
commit fbcaa8c85a
11 changed files with 372 additions and 128 deletions

View File

@ -1,4 +1,5 @@
require "fileutils"
require "hbc/cache"
require "hbc/quarantine"
require "hbc/verify"
@ -19,11 +20,6 @@ module Hbc
downloaded_path
end
private
attr_reader :force
attr_accessor :downloaded_path
def downloader
@downloader ||= begin
strategy = DownloadStrategyDetector.detect(cask.url.to_s, cask.url.using)
@ -31,6 +27,11 @@ module Hbc
end
end
private
attr_reader :force
attr_accessor :downloaded_path
def clear_cache
downloader.clear_cache if force || cask.version.latest?
end
@ -39,7 +40,9 @@ module Hbc
downloader.fetch
@downloaded_path = downloader.cached_location
rescue StandardError => e
raise CaskError, "Download failed on Cask '#{cask}' with message: #{e}"
error = CaskError.new("Download failed on Cask '#{cask}' with message: #{e}")
error.set_backtrace e.backtrace
raise error
end
def quarantine

View File

@ -48,7 +48,7 @@ module CleanupRefinement
end
def stale?(scrub = false)
return false unless file?
return false unless file? || (symlink? && resolved_path.file?)
stale_formula?(scrub) || stale_cask?(scrub)
end
@ -209,6 +209,19 @@ module Homebrew
end
end
def cleanup_unreferenced_downloads
return if dry_run?
return unless (cache/"downloads").directory?
downloads = (cache/"downloads").children.reject { |path| path.incomplete? } # rubocop:disable Style/SymbolProc
referenced_downloads = [cache, cache/"Cask"].select(&:directory?)
.flat_map(&:children)
.select(&:symlink?)
.map(&:resolved_path)
(downloads - referenced_downloads).each(&:unlink)
end
def cleanup_cache(entries = nil)
entries ||= [cache, cache/"Cask"].select(&:directory?).flat_map(&:children)
@ -217,7 +230,14 @@ module Homebrew
next cleanup_path(path) { FileUtils.rm_rf path } if path.nested_cache?
if path.prune?(days)
if path.file?
if path.symlink?
resolved_path = path.resolved_path
cleanup_path(path) do
resolved_path.unlink if resolved_path.exist?
path.unlink
end
elsif path.file?
cleanup_path(path) { path.unlink }
elsif path.directory? && path.to_s.include?("--")
cleanup_path(path) { FileUtils.rm_rf path }
@ -227,6 +247,8 @@ module Homebrew
next cleanup_path(path) { path.unlink } if path.stale?(scrub?)
end
cleanup_unreferenced_downloads
end
def cleanup_path(path)

View File

@ -7,6 +7,7 @@ require "migrator"
require "formulary"
require "descriptions"
require "cleanup"
require "hbc/download"
module Homebrew
module_function
@ -112,6 +113,7 @@ module Homebrew
migrate_legacy_cache_if_necessary
migrate_cache_entries_to_double_dashes(initial_version)
migrate_cache_entries_to_symlinks(initial_version)
migrate_legacy_keg_symlinks_if_necessary
if !updated
@ -206,6 +208,36 @@ module Homebrew
end
end
def formula_resources(formula)
specs = [formula.stable, formula.devel, formula.head].compact
[*formula.bottle&.resource] + specs.flat_map do |spec|
[
spec,
*spec.resources.values,
*spec.patches.select(&:external?).map(&:resource),
]
end
end
def parse_extname(url)
uri_path = if URI::DEFAULT_PARSER.make_regexp =~ url
uri = URI(url)
uri.query ? "#{uri.path}?#{uri.query}" : uri.path
else
url
end
# Given a URL like https://example.com/download.php?file=foo-1.0.tar.gz
# the extension we want is ".tar.gz", not ".php".
Pathname.new(uri_path).ascend do |path|
ext = path.extname[/[^?&]+/]
return ext if ext
end
nil
end
def migrate_cache_entries_to_double_dashes(initial_version)
return if initial_version && initial_version > "1.7.1"
@ -214,25 +246,16 @@ module Homebrew
ohai "Migrating cache entries..."
Formula.each do |formula|
specs = [*formula.stable, *formula.devel, *formula.head]
resources = [*formula.bottle&.resource] + specs.flat_map do |spec|
[
spec,
*spec.resources.values,
*spec.patches.select(&:external?).map(&:resource),
]
end
resources.each do |resource|
formula_resources(formula).each do |resource|
downloader = resource.downloader
url = downloader.url
name = resource.download_name
version = resource.version
new_location = downloader.cached_location
extname = new_location.extname
old_location = downloader.cached_location.dirname/"#{name}-#{version}#{extname}"
extname = parse_extname(url)
old_location = downloader.cache/"#{name}-#{version}#{extname}"
new_location = downloader.cache/"#{name}--#{version}#{extname}"
next unless old_location.file?
@ -253,6 +276,86 @@ module Homebrew
end
end
def migrate_cache_entries_to_symlinks(initial_version)
return if initial_version && initial_version > "1.7.2"
return if ENV.key?("HOMEBREW_DISABLE_LOAD_FORMULA")
ohai "Migrating cache entries..."
load_formula = lambda do |formula|
begin
Formula[formula]
rescue FormulaUnavailableError
nil
end
end
load_cask = lambda do |cask|
begin
Hbc::CaskLoader.load(cask)
rescue Hbc::CaskUnavailableError
nil
end
end
formula_downloaders = if HOMEBREW_CACHE.directory?
HOMEBREW_CACHE.children
.select(&:file?)
.map { |child| child.basename.to_s.sub(/\-\-.*/, "") }
.uniq
.map(&load_formula)
.compact
.flat_map { |formula| formula_resources(formula) }
.map { |resource| [resource.downloader, resource.download_name, resource.version] }
else
[]
end
cask_downloaders = if (HOMEBREW_CACHE/"Cask").directory?
(HOMEBREW_CACHE/"Cask").children
.map { |child| child.basename.to_s.sub(/\-\-.*/, "") }
.uniq
.map(&load_cask)
.compact
.map { |cask| [Hbc::Download.new(cask).downloader, cask.token, cask.version] }
else
[]
end
downloaders = formula_downloaders + cask_downloaders
downloaders.each do |downloader, name, version|
next unless downloader.respond_to?(:symlink_location)
url = downloader.url
extname = parse_extname(url)
old_location = downloader.cache/"#{name}--#{version}#{extname}"
next unless old_location.file?
new_symlink_location = downloader.symlink_location
new_location = downloader.cached_location
if new_location.exist? && new_symlink_location.symlink?
begin
FileUtils.rm_rf old_location unless old_location == new_symlink_location
rescue Errno::EACCES
opoo "Could not remove #{old_location}, please do so manually."
end
else
begin
new_location.dirname.mkpath
FileUtils.mv old_location, new_location unless new_location.exist?
symlink_target = new_location.relative_path_from(new_symlink_location.dirname)
new_symlink_location.dirname.mkpath
FileUtils.ln_s symlink_target, new_symlink_location, force: true
rescue Errno::EACCES
opoo "Could not move #{old_location} to #{new_location}, please do so manually."
end
end
end
end
def migrate_legacy_repository_if_necessary
return unless HOMEBREW_PREFIX.to_s == "/usr/local"
return unless HOMEBREW_REPOSITORY.to_s == "/usr/local"

View File

@ -37,6 +37,7 @@ module Homebrew
ENV.delete("HOMEBREW_COLOR")
ENV.delete("HOMEBREW_NO_COLOR")
ENV.delete("HOMEBREW_VERBOSE")
ENV.delete("HOMEBREW_DEBUG")
ENV.delete("VERBOSE")
ENV.delete("HOMEBREW_CASK_OPTS")
ENV.delete("HOMEBREW_TEMP")

View File

@ -2,6 +2,8 @@ require "json"
require "rexml/document"
require "time"
require "unpack_strategy"
require "lazy_object"
require "cgi"
class AbstractDownloadStrategy
extend Forwardable
@ -14,7 +16,7 @@ class AbstractDownloadStrategy
end
end
attr_reader :cached_location
attr_reader :cache, :cached_location, :url
attr_reader :meta, :name, :version, :shutup
private :meta, :name, :version, :shutup
@ -51,7 +53,7 @@ class AbstractDownloadStrategy
UnpackStrategy.detect(cached_location,
extension_only: true,
ref_type: @ref_type, ref: @ref)
.extract_nestedly(basename: basename_without_params,
.extract_nestedly(basename: basename,
extension_only: true,
verbose: ARGV.verbose? && !shutup)
chdir
@ -82,11 +84,8 @@ class AbstractDownloadStrategy
rm_rf(cached_location)
end
def basename_without_params
return unless @url
# Strip any ?thing=wad out of .c?thing=wad style extensions
File.basename(@url)[/[^?]+/]
def basename
nil
end
private
@ -122,7 +121,7 @@ class VCSDownloadStrategy < AbstractDownloadStrategy
end
def fetch
ohai "Cloning #{@url}"
ohai "Cloning #{url}"
if cached_location.exist? && repo_valid?
puts "Updating #{cached_location}"
@ -193,30 +192,78 @@ class AbstractFileDownloadStrategy < AbstractDownloadStrategy
def initialize(url, name, version, **meta)
super
@cached_location = @cache/"#{name}--#{version}#{ext}"
@temporary_path = Pathname.new("#{cached_location}.incomplete")
end
def symlink_location
return @symlink_location if defined?(@symlink_location)
ext = Pathname(parse_basename(url)).extname
@symlink_location = @cache/"#{name}--#{version}#{ext}"
end
def cached_location
return @cached_location if defined?(@cached_location)
url_sha256 = Digest::SHA256.hexdigest(url)
downloads = Pathname.glob(HOMEBREW_CACHE/"downloads/#{url_sha256}--*")
@cached_location = if downloads.count == 1
downloads.first
else
HOMEBREW_CACHE/"downloads/#{url_sha256}--#{resolved_basename}"
end
end
def basename
cached_location.basename.sub(/^[\da-f]{64}\-\-/, "")
end
private
def ext
uri_path = if URI::DEFAULT_PARSER.make_regexp =~ @url
uri = URI(@url)
def resolved_url
resolved_url, = resolved_url_and_basename
resolved_url
end
def resolved_basename
_, resolved_basename = resolved_url_and_basename
resolved_basename
end
def resolved_url_and_basename
return @resolved_url_and_basename if defined?(@resolved_url_and_basename)
@resolved_url_and_basename = [url, parse_basename(url)]
end
def parse_basename(url)
uri_path = if URI::DEFAULT_PARSER.make_regexp =~ url
uri = URI(url)
if uri.query
query_params = CGI.parse(uri.query)
query_params["response-content-disposition"].each do |param|
query_basename = param[/attachment;\s*filename=(["']?)(.+)\1/i, 2]
return query_basename if query_basename
end
end
uri.query ? "#{uri.path}?#{uri.query}" : uri.path
else
@url
url
end
uri_path = URI.decode_www_form_component(uri_path)
# We need a Pathname because we've monkeypatched extname to support double
# extensions (e.g. tar.gz).
# We can't use basename_without_params, because given a URL like
# https://example.com/download.php?file=foo-1.0.tar.gz
# the extension we want is ".tar.gz", not ".php".
# Given a URL like https://example.com/download.php?file=foo-1.0.tar.gz
# the basename we want is "foo-1.0.tar.gz", not "download.php".
Pathname.new(uri_path).ascend do |path|
ext = path.extname[/[^?&]+/]
return ext if ext
return path.basename.to_s[/[^?&]+#{Regexp.escape(ext)}/] if ext
end
nil
File.basename(uri_path)
end
end
@ -229,23 +276,34 @@ class CurlDownloadStrategy < AbstractFileDownloadStrategy
end
def fetch
ohai "Downloading #{@url}"
urls = [url, *mirrors]
if cached_location.exist?
puts "Already downloaded: #{cached_location}"
else
begin
_fetch
rescue ErrorDuringExecution
raise CurlDownloadStrategyError, @url
begin
url = urls.shift
ohai "Downloading #{url}"
if cached_location.exist?
puts "Already downloaded: #{cached_location}"
else
begin
resolved_url, = resolve_url_and_basename(url)
_fetch(url: url, resolved_url: resolved_url)
rescue ErrorDuringExecution
raise CurlDownloadStrategyError, url
end
ignore_interrupts do
temporary_path.rename(cached_location)
symlink_location.dirname.mkpath
FileUtils.ln_s cached_location.relative_path_from(symlink_location.dirname), symlink_location, force: true
end
end
ignore_interrupts { temporary_path.rename(cached_location) }
rescue CurlDownloadStrategyError
raise if urls.empty?
puts "Trying a mirror..."
retry
end
rescue CurlDownloadStrategyError
raise if mirrors.empty?
puts "Trying a mirror..."
@url = mirrors.shift
retry
end
def clear_cache
@ -255,18 +313,52 @@ class CurlDownloadStrategy < AbstractFileDownloadStrategy
private
# Private method, can be overridden if needed.
def _fetch
url = @url
def resolved_url_and_basename
return @resolved_url_and_basename if defined?(@resolved_url_and_basename)
@resolved_url_and_basename = resolve_url_and_basename(url)
end
def resolve_url_and_basename(url)
if ENV["HOMEBREW_ARTIFACT_DOMAIN"]
url = url.sub(%r{^((ht|f)tps?://)?}, ENV["HOMEBREW_ARTIFACT_DOMAIN"].chomp("/") + "/")
ohai "Downloading from #{url}"
end
out, _, status= curl_output("--location", "--silent", "--head", url.to_s)
lines = status.success? ? out.lines.map(&:chomp) : []
locations = lines.map { |line| line[/^Location:\s*(.*)$/i, 1] }
.compact
redirect_url = locations.reduce(url) do |current_url, location|
if location.start_with?("/")
uri = URI(current_url)
"#{uri.scheme}://#{uri.host}#{location}"
else
location
end
end
filenames = lines.map { |line| line[/^Content\-Disposition:\s*attachment;\s*filename=(["']?)(.+)\1$/i, 2] }
.compact
basename = filenames.last || parse_basename(redirect_url)
[redirect_url, basename]
end
def _fetch(url:, resolved_url:)
temporary_path.dirname.mkpath
curl_download resolved_url(url), to: temporary_path
ohai "Downloading from #{resolved_url}" if url != resolved_url
if ENV["HOMEBREW_NO_INSECURE_REDIRECT"] &&
url.start_with?("https://") && !resolved_url.start_with?("https://")
$stderr.puts "HTTPS to HTTP redirect detected & HOMEBREW_NO_INSECURE_REDIRECT is set."
raise CurlDownloadStrategyError, url
end
curl_download resolved_url, to: temporary_path
end
# Curl options to be always passed to curl,
@ -291,27 +383,6 @@ class CurlDownloadStrategy < AbstractFileDownloadStrategy
{}
end
def resolved_url(url)
redirect_url, _, status = curl_output(
"--silent", "--head",
"--write-out", "%{redirect_url}",
"--output", "/dev/null",
url.to_s
)
return url unless status.success?
return url if redirect_url.empty?
ohai "Downloading from #{redirect_url}"
if ENV["HOMEBREW_NO_INSECURE_REDIRECT"] &&
url.start_with?("https://") && !redirect_url.start_with?("https://")
puts "HTTPS to HTTP redirect detected & HOMEBREW_NO_INSECURE_REDIRECT is set."
raise CurlDownloadStrategyError, url
end
redirect_url
end
def curl_output(*args, **options)
super(*_curl_args, *args, **_curl_opts, **options)
end
@ -324,23 +395,30 @@ end
# Detect and download from Apache Mirror
class CurlApacheMirrorDownloadStrategy < CurlDownloadStrategy
def apache_mirrors
mirrors, = curl_output("--silent", "--location", "#{@url}&asjson=1")
JSON.parse(mirrors)
def mirrors
return @combined_mirrors if defined?(@combined_mirrors)
backup_mirrors = apache_mirrors.fetch("backup", [])
.map { |mirror| "#{mirror}#{apache_mirrors["path_info"]}" }
@combined_mirrors = [*@mirrors, *backup_mirrors]
end
def _fetch
return super if @tried_apache_mirror
@tried_apache_mirror = true
private
mirrors = apache_mirrors
path_info = mirrors.fetch("path_info")
@url = mirrors.fetch("preferred") + path_info
@mirrors |= %W[https://archive.apache.org/dist/#{path_info}]
def resolved_url_and_basename
return @resolved_url_and_basename if defined?(@resolved_url_and_basename)
@resolved_url_and_basename = [
"#{apache_mirrors["preferred"]}#{apache_mirrors["path_info"]}",
File.basename(apache_mirrors["path_info"]),
]
end
ohai "Best Mirror #{@url}"
super
rescue IndexError, JSON::ParserError
def apache_mirrors
return @apache_mirrors if defined?(@apache_mirrors)
json, = curl_output("--silent", "--location", "#{url}&asjson=1")
@apache_mirrors = JSON.parse(json)
rescue JSON::ParserError
raise CurlDownloadStrategyError, "Couldn't determine mirror, try again later."
end
end
@ -348,12 +426,14 @@ end
# Download via an HTTP POST.
# Query parameters on the URL are converted into POST parameters
class CurlPostDownloadStrategy < CurlDownloadStrategy
def _fetch
private
def _fetch(url:, resolved_url:)
args = if meta.key?(:data)
escape_data = ->(d) { ["-d", URI.encode_www_form([d])] }
[@url, *meta[:data].flat_map(&escape_data)]
[url, *meta[:data].flat_map(&escape_data)]
else
url, query = @url.split("?", 2)
url, query = url.split("?", 2)
query.nil? ? [url, "-X", "POST"] : [url, "-d", query]
end
@ -366,7 +446,7 @@ end
class NoUnzipCurlDownloadStrategy < CurlDownloadStrategy
def stage
UnpackStrategy::Uncompressed.new(cached_location)
.extract(basename: basename_without_params,
.extract(basename: basename,
verbose: ARGV.verbose? && !shutup)
end
end
@ -386,10 +466,10 @@ end
# because it lets you use a private S3 bucket as a repo for internal
# distribution. (It will work for public buckets as well.)
class S3DownloadStrategy < CurlDownloadStrategy
def _fetch
if @url !~ %r{^https?://([^.].*)\.s3\.amazonaws\.com/(.+)$} &&
@url !~ %r{^s3://([^.].*?)/(.+)$}
raise "Bad S3 URL: " + @url
def _fetch(url:, resolved_url:)
if url !~ %r{^https?://([^.].*)\.s3\.amazonaws\.com/(.+)$} &&
url !~ %r{^s3://([^.].*?)/(.+)$}
raise "Bad S3 URL: " + url
end
bucket = Regexp.last_match(1)
key = Regexp.last_match(2)
@ -402,7 +482,7 @@ class S3DownloadStrategy < CurlDownloadStrategy
s3url = signer.presigned_url :get_object, bucket: bucket, key: key
rescue Aws::Sigv4::Errors::MissingCredentialsError
ohai "AWS credentials missing, trying public URL instead."
s3url = @url
s3url = url
end
curl_download s3url, to: temporary_path
@ -428,24 +508,23 @@ class GitHubPrivateRepositoryDownloadStrategy < CurlDownloadStrategy
end
def parse_url_pattern
url_pattern = %r{https://github.com/([^/]+)/([^/]+)/(\S+)}
unless @url =~ url_pattern
unless match = url.match(%r{https://github.com/([^/]+)/([^/]+)/(\S+)})
raise CurlDownloadStrategyError, "Invalid url pattern for GitHub Repository."
end
_, @owner, @repo, @filepath = *@url.match(url_pattern)
_, @owner, @repo, @filepath = *match
end
def download_url
"https://#{@github_token}@github.com/#{@owner}/#{@repo}/#{@filepath}"
end
def _fetch
private
def _fetch(url:, resolved_url:)
curl_download download_url, to: temporary_path
end
private
def set_github_token
@github_token = ENV["HOMEBREW_GITHUB_API_TOKEN"]
unless @github_token
@ -486,14 +565,14 @@ class GitHubPrivateRepositoryReleaseDownloadStrategy < GitHubPrivateRepositoryDo
"https://#{@github_token}@api.github.com/repos/#{@owner}/#{@repo}/releases/assets/#{asset_id}"
end
def _fetch
private
def _fetch(url:, resolved_url:)
# HTTP request header `Accept: application/octet-stream` is required.
# Without this, the GitHub API will respond with metadata, not binary.
curl_download download_url, "--header", "Accept: application/octet-stream", to: temporary_path
end
private
def asset_id
@asset_id ||= resolve_asset_id
end

View File

@ -24,6 +24,12 @@ module DiskUsageExtension
private
def compute_disk_usage
if symlink? && !exist?
@file_count = 1
@disk_usage = 0
return
end
path = if symlink?
resolved_path
else

View File

@ -10,7 +10,7 @@ describe Hbc::CLI::Reinstall, :cask do
output = Regexp.new <<~EOS
==> Downloading file:.*caffeine.zip
Already downloaded: .*local-caffeine--1.2.3.zip
Already downloaded: .*caffeine.zip
==> Verifying checksum for Cask local-caffeine
==> Uninstalling Cask local-caffeine
==> Backing App 'Caffeine.app' up to '.*Caffeine.app'.

View File

@ -8,7 +8,7 @@ describe "brew --cache", :integration_test do
it "prints all cache files for a given Formula" do
expect { brew "--cache", testball }
.to output(%r{#{HOMEBREW_CACHE}/testball-}).to_stdout
.to output(%r{#{HOMEBREW_CACHE}/downloads/[\da-f]{64}\-\-testball\-}).to_stdout
.and not_to_output.to_stderr
.and be_a_success
end

View File

@ -2,10 +2,9 @@ describe "brew fetch", :integration_test do
it "downloads the Formula's URL" do
setup_test_formula "testball"
expect(HOMEBREW_CACHE/"testball--0.1.tbz").not_to exist
expect { brew "fetch", "testball" }.to be_a_success
expect(HOMEBREW_CACHE/"testball--0.1.tbz").to be_a_symlink
expect(HOMEBREW_CACHE/"testball--0.1.tbz").to exist
end
end

View File

@ -43,4 +43,42 @@ describe "brew update-report" do
expect(new_cache_file).not_to exist
end
end
describe "::migrate_cache_entries_to_symlinks" do
let(:formula_name) { "foo" }
let(:f) {
formula formula_name do
url "https://example.com/foo-1.2.3.tar.gz"
version "1.2.3"
end
}
let(:old_cache_file) { HOMEBREW_CACHE/"#{formula_name}--1.2.3.tar.gz" }
let(:new_cache_symlink) { HOMEBREW_CACHE/"#{formula_name}--1.2.3.tar.gz" }
let(:new_cache_file) { HOMEBREW_CACHE/"downloads/5994e3a27baa3f448a001fb071ab1f0bf25c87aebcb254d91a6d0b02f46eef86--foo-1.2.3.tar.gz" }
before(:each) do
old_cache_file.dirname.mkpath
FileUtils.touch old_cache_file
allow(Formula).to receive(:[]).and_return(f)
end
it "moves old files to use symlinks when upgrading from <= 1.7.2" do
Homebrew.migrate_cache_entries_to_symlinks(Version.new("1.7.2"))
expect(old_cache_file).to eq(new_cache_symlink)
expect(new_cache_symlink).to be_a_symlink
expect(new_cache_symlink.readlink.to_s)
.to eq "downloads/5994e3a27baa3f448a001fb071ab1f0bf25c87aebcb254d91a6d0b02f46eef86--foo-1.2.3.tar.gz"
expect(new_cache_file).to exist
expect(new_cache_file).to be_a_file
end
it "does not move files if upgrading from > 1.7.2" do
Homebrew.migrate_cache_entries_to_symlinks(Version.new("1.7.3"))
expect(old_cache_file).to exist
expect(new_cache_file).not_to exist
expect(new_cache_symlink).not_to be_a_symlink
end
end
end

View File

@ -207,16 +207,12 @@ describe S3DownloadStrategy do
let(:url) { "https://bucket.s3.amazonaws.com/foo.tar.gz" }
let(:version) { nil }
describe "#_fetch" do
subject { described_class.new(url, name, version)._fetch }
describe "#fetch" do
context "when given Bad S3 URL" do
let(:url) { "https://example.com/foo.tar.gz" }
it "raises Bad S3 URL error" do
expect {
subject._fetch
}.to raise_error(RuntimeError)
expect { subject.fetch }.to raise_error(RuntimeError, /S3/)
end
end
end
@ -238,18 +234,19 @@ describe CurlDownloadStrategy do
subject { described_class.new(url, name, version, **specs).cached_location }
context "when URL ends with file" do
it { is_expected.to eq(HOMEBREW_CACHE/"foo--1.2.3.tar.gz") }
it { is_expected.to eq(HOMEBREW_CACHE/"downloads/3d1c0ae7da22be9d83fb1eb774df96b7c4da71d3cf07e1cb28555cf9a5e5af70--foo.tar.gz") }
end
context "when URL file is in middle" do
let(:url) { "https://example.com/foo.tar.gz/from/this/mirror" }
it { is_expected.to eq(HOMEBREW_CACHE/"foo--1.2.3.tar.gz") }
it { is_expected.to eq(HOMEBREW_CACHE/"downloads/1ab61269ba52c83994510b1e28dd04167a2f2e8393a35a9c50c1f7d33fd8f619--foo.tar.gz") }
end
end
describe "#fetch" do
before(:each) do
subject.temporary_path.dirname.mkpath
FileUtils.touch subject.temporary_path
end
@ -330,11 +327,6 @@ describe CurlDownloadStrategy do
its("cached_location.extname") { is_expected.to eq(".dmg") }
end
context "with no discernible file name in it" do
let(:url) { "https://example.com/download" }
its("cached_location.basename.to_path") { is_expected.to eq("foo--1.2.3") }
end
context "with a file name trailing the first query parameter" do
let(:url) { "https://example.com/download?file=cask.zip&a=1" }
its("cached_location.extname") { is_expected.to eq(".zip") }
@ -380,6 +372,7 @@ describe CurlPostDownloadStrategy do
describe "#fetch" do
before(:each) do
subject.temporary_path.dirname.mkpath
FileUtils.touch subject.temporary_path
end