Remove cask download strategies.
This commit is contained in:
parent
ad150e7595
commit
290910d3c0
@ -10,8 +10,6 @@ require "hbc/caskroom"
|
||||
require "hbc/checkable"
|
||||
require "hbc/cli"
|
||||
require "hbc/cask_dependencies"
|
||||
require "hbc/download"
|
||||
require "hbc/download_strategy"
|
||||
require "hbc/exceptions"
|
||||
require "hbc/installer"
|
||||
require "hbc/macos"
|
||||
|
||||
@ -1,3 +1,5 @@
|
||||
require "hbc/download"
|
||||
|
||||
module Hbc
|
||||
class Auditor
|
||||
def self.audit(cask, audit_download: false, check_token_conflicts: false, commit_range: nil)
|
||||
|
||||
@ -1,3 +1,5 @@
|
||||
require "hbc/download"
|
||||
|
||||
module Hbc
|
||||
class CLI
|
||||
class Fetch < AbstractCommand
|
||||
|
||||
@ -1,275 +0,0 @@
|
||||
require "cgi"
|
||||
|
||||
# We abuse Homebrew's download strategies considerably here.
|
||||
# * Our downloader instances only invoke the fetch and
|
||||
# clear_cache methods, ignoring stage
|
||||
# * Our overridden fetch methods are expected to return
|
||||
# a value: the successfully downloaded file.
|
||||
|
||||
module Hbc
|
||||
class AbstractDownloadStrategy
|
||||
attr_reader :cask, :name, :url, :uri_object, :version
|
||||
|
||||
def initialize(cask, command: SystemCommand)
|
||||
@cask = cask
|
||||
@command = command
|
||||
# TODO: this excess of attributes is a function of integrating
|
||||
# with Homebrew's classes. Later we should be able to remove
|
||||
# these in favor of @cask
|
||||
@name = cask.token
|
||||
@url = cask.url.to_s
|
||||
@uri_object = cask.url
|
||||
@version = cask.version
|
||||
end
|
||||
|
||||
# All download strategies are expected to implement these methods
|
||||
def fetch; end
|
||||
|
||||
def cached_location; end
|
||||
|
||||
def clear_cache; end
|
||||
end
|
||||
|
||||
class HbVCSDownloadStrategy < AbstractDownloadStrategy
|
||||
REF_TYPES = [:branch, :revision, :revisions, :tag].freeze
|
||||
|
||||
def initialize(*args, **options)
|
||||
super(*args, **options)
|
||||
@ref_type, @ref = extract_ref
|
||||
@clone = Cache.path.join(cache_filename)
|
||||
end
|
||||
|
||||
def extract_ref
|
||||
key = REF_TYPES.find do |type|
|
||||
uri_object.respond_to?(type) && uri_object.send(type)
|
||||
end
|
||||
[key, key ? uri_object.send(key) : nil]
|
||||
end
|
||||
|
||||
def cache_filename
|
||||
"#{name}--#{cache_tag}"
|
||||
end
|
||||
|
||||
def cache_tag
|
||||
"__UNKNOWN__"
|
||||
end
|
||||
|
||||
def cached_location
|
||||
@clone
|
||||
end
|
||||
|
||||
def clear_cache
|
||||
cached_location.rmtree if cached_location.exist?
|
||||
end
|
||||
end
|
||||
|
||||
class CurlDownloadStrategy < AbstractDownloadStrategy
|
||||
def tarball_path
|
||||
@tarball_path ||= Cache.path.join("#{name}--#{version}#{ext}")
|
||||
end
|
||||
|
||||
def temporary_path
|
||||
@temporary_path ||= tarball_path.sub(/$/, ".incomplete")
|
||||
end
|
||||
|
||||
def cached_location
|
||||
tarball_path
|
||||
end
|
||||
|
||||
def clear_cache
|
||||
[cached_location, temporary_path].each do |path|
|
||||
next unless path.exist?
|
||||
|
||||
begin
|
||||
LockFile.new(path.basename).with_lock do
|
||||
path.unlink
|
||||
end
|
||||
rescue OperationInProgressError
|
||||
raise CurlDownloadStrategyError, "#{path} is in use by another process"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def _fetch
|
||||
curl_download url, *cask_curl_args, to: temporary_path, user_agent: uri_object.user_agent
|
||||
end
|
||||
|
||||
def fetch
|
||||
tarball_path.dirname.mkpath
|
||||
|
||||
ohai "Downloading #{@url}"
|
||||
if tarball_path.exist?
|
||||
puts "Already downloaded: #{tarball_path}"
|
||||
else
|
||||
had_incomplete_download = temporary_path.exist?
|
||||
begin
|
||||
LockFile.new(temporary_path.basename).with_lock do
|
||||
_fetch
|
||||
end
|
||||
rescue ErrorDuringExecution => e
|
||||
# 33 == range not supported
|
||||
# try wiping the incomplete download and retrying once
|
||||
if e.status.exitstatus == 33 && had_incomplete_download
|
||||
ohai "Trying a full download"
|
||||
temporary_path.unlink
|
||||
had_incomplete_download = false
|
||||
retry
|
||||
end
|
||||
|
||||
msg = @url
|
||||
msg.concat("\nThe incomplete download is cached at #{temporary_path}") if temporary_path.exist?
|
||||
raise CurlDownloadStrategyError, msg
|
||||
end
|
||||
ignore_interrupts { temporary_path.rename(tarball_path) }
|
||||
end
|
||||
tarball_path
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def cask_curl_args
|
||||
cookies_args + referer_args
|
||||
end
|
||||
|
||||
def cookies_args
|
||||
if uri_object.cookies
|
||||
[
|
||||
"-b",
|
||||
# sort_by is for predictability between Ruby versions
|
||||
uri_object
|
||||
.cookies
|
||||
.sort_by(&:to_s)
|
||||
.map { |key, value| "#{CGI.escape(key.to_s)}=#{CGI.escape(value.to_s)}" }
|
||||
.join(";"),
|
||||
]
|
||||
else
|
||||
[]
|
||||
end
|
||||
end
|
||||
|
||||
def referer_args
|
||||
if uri_object.referer
|
||||
["-e", uri_object.referer]
|
||||
else
|
||||
[]
|
||||
end
|
||||
end
|
||||
|
||||
def ext
|
||||
Pathname.new(@url).extname[/[^?&]+/]
|
||||
end
|
||||
end
|
||||
|
||||
class CurlPostDownloadStrategy < CurlDownloadStrategy
|
||||
def cask_curl_args
|
||||
super.concat(post_args)
|
||||
end
|
||||
|
||||
def post_args
|
||||
if uri_object.data
|
||||
# sort_by is for predictability between Ruby versions
|
||||
uri_object
|
||||
.data
|
||||
.sort_by(&:to_s)
|
||||
.map { |key, value| ["-d", "#{CGI.escape(key.to_s)}=#{CGI.escape(value.to_s)}"] }
|
||||
.flatten
|
||||
else
|
||||
["-X", "POST"]
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
class SubversionDownloadStrategy < HbVCSDownloadStrategy
|
||||
def cache_tag
|
||||
# TODO: pass versions as symbols, support :head here
|
||||
(version == "head") ? "svn-HEAD" : "svn"
|
||||
end
|
||||
|
||||
def repo_valid?
|
||||
(@clone/".svn").directory?
|
||||
end
|
||||
|
||||
def repo_url
|
||||
`svn info '#{@clone}' 2>/dev/null`.strip[/^URL: (.+)$/, 1]
|
||||
end
|
||||
|
||||
# super does not provide checks for already-existing downloads
|
||||
def fetch
|
||||
cached_location.dirname.mkpath
|
||||
|
||||
if cached_location.directory?
|
||||
puts "Already downloaded: #{cached_location}"
|
||||
else
|
||||
@url = @url.sub(/^svn\+/, "") if @url =~ %r{^svn\+http://}
|
||||
ohai "Checking out #{@url}"
|
||||
|
||||
clear_cache unless @url.chomp("/") == repo_url || quiet_system("svn", "switch", @url, @clone)
|
||||
|
||||
if @clone.exist? && !repo_valid?
|
||||
puts "Removing invalid SVN repo from cache"
|
||||
clear_cache
|
||||
end
|
||||
|
||||
case @ref_type
|
||||
when :revision
|
||||
fetch_repo @clone, @url, @ref
|
||||
when :revisions
|
||||
# nil is OK for main_revision, as fetch_repo will then get latest
|
||||
main_revision = @ref[:trunk]
|
||||
fetch_repo @clone, @url, main_revision, true
|
||||
|
||||
fetch_externals do |external_name, external_url|
|
||||
fetch_repo @clone + external_name, external_url, @ref[external_name], true
|
||||
end
|
||||
else
|
||||
fetch_repo @clone, @url
|
||||
end
|
||||
end
|
||||
cached_location
|
||||
end
|
||||
|
||||
# This primary reason for redefining this method is the trust_cert
|
||||
# option, controllable from the Cask definition. We also force
|
||||
# consistent timestamps. The rest of this method is similar to
|
||||
# Homebrew's, but translated to local idiom.
|
||||
def fetch_repo(target, url, revision = uri_object.revision, ignore_externals = false)
|
||||
# Use "svn up" when the repository already exists locally.
|
||||
# This saves on bandwidth and will have a similar effect to verifying the
|
||||
# cache as it will make any changes to get the right revision.
|
||||
svncommand = target.directory? ? "up" : "checkout"
|
||||
args = [svncommand]
|
||||
|
||||
# SVN shipped with XCode 3.1.4 can't force a checkout.
|
||||
args << "--force" unless MacOS.version == :leopard
|
||||
|
||||
# make timestamps consistent for checksumming
|
||||
args.concat(%w[--config-option config:miscellany:use-commit-times=yes])
|
||||
|
||||
if uri_object.trust_cert
|
||||
args << "--trust-server-cert"
|
||||
args << "--non-interactive"
|
||||
end
|
||||
|
||||
args << url unless target.directory?
|
||||
args << target
|
||||
args << "-r" << revision if revision
|
||||
args << "--ignore-externals" if ignore_externals
|
||||
@command.run!("/usr/bin/svn",
|
||||
args: args,
|
||||
print_stderr: false)
|
||||
end
|
||||
|
||||
def shell_quote(str)
|
||||
# Oh god escaping shell args.
|
||||
# See http://notetoself.vrensk.com/2008/08/escaping-single-quotes-in-ruby-harder-than-expected/
|
||||
str.gsub(/\\|'/) { |c| "\\#{c}" }
|
||||
end
|
||||
|
||||
def fetch_externals
|
||||
`svn propget svn:externals '#{shell_quote(@url)}'`.chomp.each_line do |line|
|
||||
name, url = line.split(/\s+/)
|
||||
yield name, url
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
@ -4,6 +4,7 @@ require "formula_installer"
|
||||
require "unpack_strategy"
|
||||
|
||||
require "hbc/cask_dependencies"
|
||||
require "hbc/download"
|
||||
require "hbc/staged"
|
||||
require "hbc/verify"
|
||||
|
||||
|
||||
@ -1,310 +0,0 @@
|
||||
describe "download strategies", :cask do
|
||||
let(:url) { "https://example.com/cask.dmg" }
|
||||
let(:url_options) { {} }
|
||||
let(:cask) {
|
||||
instance_double(Hbc::Cask, token: "some-cask",
|
||||
url: URL.new(url, **url_options),
|
||||
version: "1.2.3.4")
|
||||
}
|
||||
|
||||
describe Hbc::CurlDownloadStrategy do
|
||||
let(:downloader) { Hbc::CurlDownloadStrategy.new(cask) }
|
||||
|
||||
before do
|
||||
allow(downloader.temporary_path).to receive(:rename)
|
||||
end
|
||||
|
||||
it "properly assigns a name and uri based on the Cask" do
|
||||
expect(downloader.name).to eq("some-cask")
|
||||
expect(downloader.url).to eq("https://example.com/cask.dmg")
|
||||
expect(downloader.version.to_s).to eq("1.2.3.4")
|
||||
end
|
||||
|
||||
it "calls curl with default arguments for a simple Cask" do
|
||||
allow(downloader).to receive(:curl)
|
||||
|
||||
downloader.fetch
|
||||
|
||||
expect(downloader).to have_received(:curl).with(
|
||||
"--location",
|
||||
"--remote-time",
|
||||
"--continue-at", "-",
|
||||
"--output", kind_of(Pathname),
|
||||
cask.url.to_s,
|
||||
user_agent: :default
|
||||
)
|
||||
end
|
||||
|
||||
context "with an explicit user agent" do
|
||||
let(:url_options) { { user_agent: "Mozilla/25.0.1" } }
|
||||
|
||||
it "adds the appropriate curl args" do
|
||||
expect(downloader).to receive(:system_command!) { |*, args:, **|
|
||||
expect(args.each_cons(2)).to include(["--user-agent", "Mozilla/25.0.1"])
|
||||
}
|
||||
|
||||
downloader.fetch
|
||||
end
|
||||
end
|
||||
|
||||
context "with a generalized fake user agent" do
|
||||
alias_matcher :a_string_matching, :match
|
||||
|
||||
let(:url_options) { { user_agent: :fake } }
|
||||
|
||||
it "adds the appropriate curl args" do
|
||||
expect(downloader).to receive(:system_command!) { |*, args:, **|
|
||||
expect(args.each_cons(2).to_a).to include(["--user-agent", a_string_matching(/Mozilla.*Mac OS X 10.*AppleWebKit/)])
|
||||
}
|
||||
|
||||
downloader.fetch
|
||||
end
|
||||
end
|
||||
|
||||
context "with cookies set" do
|
||||
let(:url_options) {
|
||||
{
|
||||
cookies: {
|
||||
coo: "kie",
|
||||
mon: "ster",
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
it "adds curl args for cookies" do
|
||||
curl_args = []
|
||||
allow(downloader).to receive(:curl) { |*args| curl_args = args }
|
||||
|
||||
downloader.fetch
|
||||
|
||||
expect(curl_args.each_cons(2)).to include(["-b", "coo=kie;mon=ster"])
|
||||
end
|
||||
end
|
||||
|
||||
context "with referer set" do
|
||||
let(:url_options) { { referer: "https://somehost/also" } }
|
||||
|
||||
it "adds curl args for referer" do
|
||||
curl_args = []
|
||||
allow(downloader).to receive(:curl) { |*args| curl_args = args }
|
||||
|
||||
downloader.fetch
|
||||
|
||||
expect(curl_args.each_cons(2)).to include(["-e", "https://somehost/also"])
|
||||
end
|
||||
end
|
||||
|
||||
context "with a file name trailing the URL path" do
|
||||
describe "#tarball_path" do
|
||||
subject { downloader.tarball_path }
|
||||
|
||||
its(:extname) { is_expected.to eq(".dmg") }
|
||||
end
|
||||
end
|
||||
|
||||
context "with no discernible file name in it" do
|
||||
let(:url) { "https://example.com/download" }
|
||||
|
||||
describe "#tarball_path" do
|
||||
subject { downloader.tarball_path }
|
||||
|
||||
its(:to_path) { is_expected.to end_with("some-cask--1.2.3.4") }
|
||||
end
|
||||
end
|
||||
|
||||
context "with a file name trailing the first query parameter" do
|
||||
let(:url) { "https://example.com/download?file=cask.zip&a=1" }
|
||||
|
||||
describe "#tarball_path" do
|
||||
subject { downloader.tarball_path }
|
||||
|
||||
its(:extname) { is_expected.to eq(".zip") }
|
||||
end
|
||||
end
|
||||
|
||||
context "with a file name trailing the second query parameter" do
|
||||
let(:url) { "https://example.com/dl?a=1&file=cask.zip&b=2" }
|
||||
|
||||
describe "#tarball_path" do
|
||||
subject { downloader.tarball_path }
|
||||
|
||||
its(:extname) { is_expected.to eq(".zip") }
|
||||
end
|
||||
end
|
||||
|
||||
context "with an unusually long query string" do
|
||||
let(:url) do
|
||||
[
|
||||
"https://node49152.ssl.fancycdn.example.com",
|
||||
"/fancycdn/node/49152/file/upload/download",
|
||||
"?cask_class=zf920df",
|
||||
"&cask_group=2348779087242312",
|
||||
"&cask_archive_file_name=cask.zip",
|
||||
"&signature=CGmDulxL8pmutKTlCleNTUY%2FyO9Xyl5u9yVZUE0",
|
||||
"uWrjadjuz67Jp7zx3H7NEOhSyOhu8nzicEHRBjr3uSoOJzwkLC8L",
|
||||
"BLKnz%2B2X%2Biq5m6IdwSVFcLp2Q1Hr2kR7ETn3rF1DIq5o0lHC",
|
||||
"yzMmyNe5giEKJNW8WF0KXriULhzLTWLSA3ZTLCIofAdRiiGje1kN",
|
||||
"YY3C0SBqymQB8CG3ONn5kj7CIGbxrDOq5xI2ZSJdIyPysSX7SLvE",
|
||||
"DBw2KdR24q9t1wfjS9LUzelf5TWk6ojj8p9%2FHjl%2Fi%2FVCXN",
|
||||
"N4o1mW%2FMayy2tTY1qcC%2FTmqI1ulZS8SNuaSgr9Iys9oDF1%2",
|
||||
"BPK%2B4Sg==",
|
||||
].join
|
||||
end
|
||||
|
||||
describe "#tarball_path" do
|
||||
subject { downloader.tarball_path }
|
||||
|
||||
its(:extname) { is_expected.to eq(".zip") }
|
||||
its("to_path.length") { is_expected.to be_between(0, 255) }
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe Hbc::CurlPostDownloadStrategy do
|
||||
let(:downloader) { Hbc::CurlPostDownloadStrategy.new(cask) }
|
||||
|
||||
before do
|
||||
allow(downloader.temporary_path).to receive(:rename)
|
||||
end
|
||||
|
||||
context "with :using and :data specified" do
|
||||
let(:url_options) {
|
||||
{
|
||||
using: :post,
|
||||
data: {
|
||||
form: "data",
|
||||
is: "good",
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
it "adds curl args for post arguments" do
|
||||
curl_args = []
|
||||
allow(downloader).to receive(:curl) { |*args| curl_args = args }
|
||||
|
||||
downloader.fetch
|
||||
|
||||
expect(curl_args.each_cons(2)).to include(["-d", "form=data"])
|
||||
expect(curl_args.each_cons(2)).to include(["-d", "is=good"])
|
||||
end
|
||||
end
|
||||
|
||||
context "with :using but no :data" do
|
||||
let(:url_options) { { using: :post } }
|
||||
|
||||
it "adds curl args for a POST request" do
|
||||
curl_args = []
|
||||
allow(downloader).to receive(:curl) { |*args| curl_args = args }
|
||||
|
||||
downloader.fetch
|
||||
|
||||
expect(curl_args.each_cons(2)).to include(["-X", "POST"])
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe Hbc::SubversionDownloadStrategy do
|
||||
let(:url_options) { { using: :svn } }
|
||||
let(:fake_system_command) { class_double(SystemCommand) }
|
||||
let(:downloader) { Hbc::SubversionDownloadStrategy.new(cask, command: fake_system_command) }
|
||||
|
||||
before do
|
||||
allow(fake_system_command).to receive(:run!)
|
||||
end
|
||||
|
||||
it "returns a tarball path on fetch" do
|
||||
allow(downloader).to receive(:compress)
|
||||
allow(downloader).to receive(:fetch_repo)
|
||||
|
||||
expect(downloader.fetch).to equal(downloader.cached_location)
|
||||
end
|
||||
|
||||
it "calls fetch_repo with default arguments for a simple Cask" do
|
||||
allow(downloader).to receive(:compress)
|
||||
allow(downloader).to receive(:fetch_repo)
|
||||
|
||||
downloader.fetch
|
||||
|
||||
expect(downloader).to have_received(:fetch_repo).with(
|
||||
downloader.cached_location,
|
||||
cask.url.to_s,
|
||||
)
|
||||
end
|
||||
|
||||
it "calls svn with default arguments for a simple Cask" do
|
||||
allow(downloader).to receive(:compress)
|
||||
|
||||
downloader.fetch
|
||||
|
||||
expect(fake_system_command).to have_received(:run!).with(
|
||||
"/usr/bin/svn",
|
||||
hash_including(args: [
|
||||
"checkout",
|
||||
"--force",
|
||||
"--config-option",
|
||||
"config:miscellany:use-commit-times=yes",
|
||||
cask.url.to_s,
|
||||
downloader.cached_location,
|
||||
]),
|
||||
)
|
||||
end
|
||||
|
||||
context "with trust_cert set on the URL" do
|
||||
let(:url_options) {
|
||||
{
|
||||
using: :svn,
|
||||
trust_cert: true,
|
||||
}
|
||||
}
|
||||
|
||||
it "adds svn arguments for :trust_cert" do
|
||||
allow(downloader).to receive(:compress)
|
||||
|
||||
downloader.fetch
|
||||
|
||||
expect(fake_system_command).to have_received(:run!).with(
|
||||
"/usr/bin/svn",
|
||||
hash_including(args: [
|
||||
"checkout",
|
||||
"--force",
|
||||
"--config-option",
|
||||
"config:miscellany:use-commit-times=yes",
|
||||
"--trust-server-cert",
|
||||
"--non-interactive",
|
||||
cask.url.to_s,
|
||||
downloader.cached_location,
|
||||
]),
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
context "with :revision set on url" do
|
||||
let(:url_options) {
|
||||
{
|
||||
using: :svn,
|
||||
revision: "10",
|
||||
}
|
||||
}
|
||||
|
||||
it "adds svn arguments for :revision" do
|
||||
allow(downloader).to receive(:compress)
|
||||
|
||||
downloader.fetch
|
||||
|
||||
expect(fake_system_command).to have_received(:run!).with(
|
||||
"/usr/bin/svn",
|
||||
hash_including(args: [
|
||||
"checkout",
|
||||
"--force",
|
||||
"--config-option",
|
||||
"config:miscellany:use-commit-times=yes",
|
||||
cask.url.to_s,
|
||||
downloader.cached_location,
|
||||
"-r",
|
||||
"10",
|
||||
]),
|
||||
)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
Loading…
x
Reference in New Issue
Block a user