2017-03-05 19:26:56 +01:00
|
|
|
describe "download strategies", :cask do
|
2016-08-18 22:11:42 +03:00
|
|
|
let(:url) { "http://example.com/cask.dmg" }
|
2017-05-29 18:24:52 +01:00
|
|
|
let(:url_options) { {} }
|
2016-08-18 22:11:42 +03:00
|
|
|
let(:cask) {
|
|
|
|
instance_double(Hbc::Cask, token: "some-cask",
|
|
|
|
url: Hbc::URL.new(url, url_options),
|
|
|
|
version: "1.2.3.4")
|
|
|
|
}
|
|
|
|
|
|
|
|
describe Hbc::CurlDownloadStrategy do
|
|
|
|
let(:downloader) { Hbc::CurlDownloadStrategy.new(cask) }
|
|
|
|
|
|
|
|
before do
|
|
|
|
allow(downloader.temporary_path).to receive(:rename)
|
|
|
|
end
|
|
|
|
|
|
|
|
it "properly assigns a name and uri based on the Cask" do
|
|
|
|
expect(downloader.name).to eq("some-cask")
|
|
|
|
expect(downloader.url).to eq("http://example.com/cask.dmg")
|
|
|
|
expect(downloader.version.to_s).to eq("1.2.3.4")
|
|
|
|
end
|
|
|
|
|
|
|
|
it "calls curl with default arguments for a simple Cask" do
|
|
|
|
allow(downloader).to receive(:curl)
|
|
|
|
|
2017-07-29 19:55:05 +02:00
|
|
|
downloader.fetch
|
2016-08-18 22:11:42 +03:00
|
|
|
|
|
|
|
expect(downloader).to have_received(:curl).with(
|
|
|
|
cask.url.to_s,
|
2017-08-07 14:31:56 -07:00
|
|
|
"-C", 0,
|
|
|
|
"-o", kind_of(Pathname)
|
2016-08-18 22:11:42 +03:00
|
|
|
)
|
|
|
|
end
|
|
|
|
|
|
|
|
context "with an explicit user agent" do
|
|
|
|
let(:url_options) { { user_agent: "Mozilla/25.0.1" } }
|
|
|
|
|
|
|
|
it "adds the appropriate curl args" do
|
2017-08-07 14:31:56 -07:00
|
|
|
curl_args = []
|
|
|
|
allow(downloader).to receive(:curl) { |*args| curl_args = args }
|
2016-08-18 22:11:42 +03:00
|
|
|
|
2017-07-29 19:55:05 +02:00
|
|
|
downloader.fetch
|
2017-08-07 14:31:56 -07:00
|
|
|
|
|
|
|
expect(curl_args.each_cons(2)).to include(["-A", "Mozilla/25.0.1"])
|
2016-08-18 22:11:42 +03:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context "with a generalized fake user agent" do
|
|
|
|
let(:url_options) { { user_agent: :fake } }
|
|
|
|
|
|
|
|
it "adds the appropriate curl args" do
|
2017-08-07 14:31:56 -07:00
|
|
|
curl_args = []
|
|
|
|
allow(downloader).to receive(:curl) { |*args| curl_args = args }
|
2016-08-18 22:11:42 +03:00
|
|
|
|
2017-07-29 19:55:05 +02:00
|
|
|
downloader.fetch
|
2017-08-07 14:31:56 -07:00
|
|
|
|
|
|
|
expect(curl_args.each_cons(2)).to include(["-A", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10) https://caskroom.github.io"])
|
2016-08-18 22:11:42 +03:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context "with cookies set" do
|
|
|
|
let(:url_options) {
|
|
|
|
{
|
|
|
|
cookies: {
|
2016-10-14 20:33:16 +02:00
|
|
|
coo: "kie",
|
|
|
|
mon: "ster",
|
|
|
|
},
|
2016-08-18 22:11:42 +03:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
it "adds curl args for cookies" do
|
|
|
|
curl_args = []
|
|
|
|
allow(downloader).to receive(:curl) { |*args| curl_args = args }
|
|
|
|
|
2017-07-29 19:55:05 +02:00
|
|
|
downloader.fetch
|
2016-08-18 22:11:42 +03:00
|
|
|
|
|
|
|
expect(curl_args.each_cons(2)).to include(["-b", "coo=kie;mon=ster"])
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context "with referer set" do
|
|
|
|
let(:url_options) { { referer: "http://somehost/also" } }
|
|
|
|
|
|
|
|
it "adds curl args for referer" do
|
|
|
|
curl_args = []
|
|
|
|
allow(downloader).to receive(:curl) { |*args| curl_args = args }
|
|
|
|
|
2017-07-29 19:55:05 +02:00
|
|
|
downloader.fetch
|
2016-08-18 22:11:42 +03:00
|
|
|
|
|
|
|
expect(curl_args.each_cons(2)).to include(["-e", "http://somehost/also"])
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe Hbc::CurlPostDownloadStrategy do
|
|
|
|
let(:downloader) { Hbc::CurlPostDownloadStrategy.new(cask) }
|
|
|
|
|
|
|
|
before do
|
|
|
|
allow(downloader.temporary_path).to receive(:rename)
|
|
|
|
end
|
|
|
|
|
|
|
|
context "with :using and :data specified" do
|
|
|
|
let(:url_options) {
|
|
|
|
{
|
|
|
|
using: :post,
|
|
|
|
data: {
|
2016-10-14 20:33:16 +02:00
|
|
|
form: "data",
|
|
|
|
is: "good",
|
|
|
|
},
|
2016-08-18 22:11:42 +03:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
it "adds curl args for post arguments" do
|
|
|
|
curl_args = []
|
|
|
|
allow(downloader).to receive(:curl) { |*args| curl_args = args }
|
|
|
|
|
2017-07-29 19:55:05 +02:00
|
|
|
downloader.fetch
|
2016-08-18 22:11:42 +03:00
|
|
|
|
|
|
|
expect(curl_args.each_cons(2)).to include(["-d", "form=data"])
|
|
|
|
expect(curl_args.each_cons(2)).to include(["-d", "is=good"])
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context "with :using but no :data" do
|
|
|
|
let(:url_options) { { using: :post } }
|
|
|
|
|
|
|
|
it "adds curl args for a POST request" do
|
|
|
|
curl_args = []
|
|
|
|
allow(downloader).to receive(:curl) { |*args| curl_args = args }
|
|
|
|
|
2017-07-29 19:55:05 +02:00
|
|
|
downloader.fetch
|
2016-08-18 22:11:42 +03:00
|
|
|
|
|
|
|
expect(curl_args.each_cons(2)).to include(["-X", "POST"])
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe Hbc::SubversionDownloadStrategy do
|
|
|
|
let(:url_options) { { using: :svn } }
|
|
|
|
let(:fake_system_command) { class_double(Hbc::SystemCommand) }
|
2017-08-07 14:31:56 -07:00
|
|
|
let(:downloader) { Hbc::SubversionDownloadStrategy.new(cask, fake_system_command) }
|
2016-08-18 22:11:42 +03:00
|
|
|
before do
|
|
|
|
allow(fake_system_command).to receive(:run!)
|
|
|
|
end
|
|
|
|
|
|
|
|
it "returns a tarball path on fetch" do
|
|
|
|
allow(downloader).to receive(:compress)
|
|
|
|
allow(downloader).to receive(:fetch_repo)
|
|
|
|
|
2017-08-07 14:31:56 -07:00
|
|
|
expect(downloader.fetch).to equal(downloader.tarball_path)
|
2016-08-18 22:11:42 +03:00
|
|
|
end
|
|
|
|
|
|
|
|
it "calls fetch_repo with default arguments for a simple Cask" do
|
|
|
|
allow(downloader).to receive(:compress)
|
|
|
|
allow(downloader).to receive(:fetch_repo)
|
|
|
|
|
2017-07-29 19:55:05 +02:00
|
|
|
downloader.fetch
|
2016-08-18 22:11:42 +03:00
|
|
|
|
|
|
|
expect(downloader).to have_received(:fetch_repo).with(
|
|
|
|
downloader.cached_location,
|
2017-02-12 15:06:54 +00:00
|
|
|
cask.url.to_s,
|
2016-08-18 22:11:42 +03:00
|
|
|
)
|
|
|
|
end
|
|
|
|
|
|
|
|
it "calls svn with default arguments for a simple Cask" do
|
|
|
|
allow(downloader).to receive(:compress)
|
|
|
|
|
2017-07-29 19:55:05 +02:00
|
|
|
downloader.fetch
|
2016-08-18 22:11:42 +03:00
|
|
|
|
|
|
|
expect(fake_system_command).to have_received(:run!).with(
|
|
|
|
"/usr/bin/svn",
|
|
|
|
hash_including(args: [
|
2016-10-14 20:33:16 +02:00
|
|
|
"checkout",
|
|
|
|
"--force",
|
|
|
|
"--config-option",
|
|
|
|
"config:miscellany:use-commit-times=yes",
|
|
|
|
cask.url.to_s,
|
|
|
|
downloader.cached_location,
|
2017-02-12 15:06:54 +00:00
|
|
|
]),
|
2016-08-18 22:11:42 +03:00
|
|
|
)
|
|
|
|
end
|
|
|
|
|
|
|
|
context "with trust_cert set on the URL" do
|
|
|
|
let(:url_options) {
|
|
|
|
{
|
|
|
|
using: :svn,
|
|
|
|
trust_cert: true,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
it "adds svn arguments for :trust_cert" do
|
|
|
|
allow(downloader).to receive(:compress)
|
|
|
|
|
2017-07-29 19:55:05 +02:00
|
|
|
downloader.fetch
|
2016-08-18 22:11:42 +03:00
|
|
|
|
|
|
|
expect(fake_system_command).to have_received(:run!).with(
|
|
|
|
"/usr/bin/svn",
|
|
|
|
hash_including(args: [
|
2016-10-14 20:33:16 +02:00
|
|
|
"checkout",
|
|
|
|
"--force",
|
|
|
|
"--config-option",
|
|
|
|
"config:miscellany:use-commit-times=yes",
|
|
|
|
"--trust-server-cert",
|
|
|
|
"--non-interactive",
|
|
|
|
cask.url.to_s,
|
|
|
|
downloader.cached_location,
|
2017-02-12 15:06:54 +00:00
|
|
|
]),
|
2016-08-18 22:11:42 +03:00
|
|
|
)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context "with :revision set on url" do
|
|
|
|
let(:url_options) {
|
|
|
|
{
|
|
|
|
using: :svn,
|
|
|
|
revision: "10",
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
it "adds svn arguments for :revision" do
|
|
|
|
allow(downloader).to receive(:compress)
|
|
|
|
|
2017-07-29 19:55:05 +02:00
|
|
|
downloader.fetch
|
2016-08-18 22:11:42 +03:00
|
|
|
|
|
|
|
expect(fake_system_command).to have_received(:run!).with(
|
|
|
|
"/usr/bin/svn",
|
|
|
|
hash_including(args: [
|
2016-10-14 20:33:16 +02:00
|
|
|
"checkout",
|
|
|
|
"--force",
|
|
|
|
"--config-option",
|
|
|
|
"config:miscellany:use-commit-times=yes",
|
|
|
|
cask.url.to_s,
|
|
|
|
downloader.cached_location,
|
|
|
|
"-r",
|
|
|
|
"10",
|
2017-02-12 15:06:54 +00:00
|
|
|
]),
|
2016-08-18 22:11:42 +03:00
|
|
|
)
|
|
|
|
end
|
|
|
|
end
|
2017-08-07 14:31:56 -07:00
|
|
|
|
|
|
|
it "runs tar to serialize svn downloads" do
|
|
|
|
# sneaky stub to remake the directory, since homebrew code removes it
|
|
|
|
# before tar is called
|
|
|
|
allow(downloader).to receive(:fetch_repo) {
|
|
|
|
downloader.cached_location.mkdir
|
|
|
|
}
|
|
|
|
|
|
|
|
downloader.fetch
|
|
|
|
|
|
|
|
expect(fake_system_command).to have_received(:run!).with(
|
|
|
|
"/usr/bin/tar",
|
|
|
|
hash_including(args: [
|
|
|
|
'-s/^\\.//',
|
|
|
|
"--exclude",
|
|
|
|
".svn",
|
|
|
|
"-cf",
|
|
|
|
downloader.tarball_path,
|
|
|
|
"--",
|
|
|
|
".",
|
|
|
|
]),
|
|
|
|
)
|
|
|
|
end
|
2016-08-18 22:11:42 +03:00
|
|
|
end
|
2017-08-07 14:31:56 -07:00
|
|
|
|
|
|
|
# does not work yet, because (for unknown reasons), the tar command
|
|
|
|
# returns an error code when running under the test suite
|
|
|
|
# it 'creates a tarball matching the expected checksum' do
|
|
|
|
# cask = Hbc::CaskLoader.load('svn-download-check-cask')
|
|
|
|
# downloader = Hbc::SubversionDownloadStrategy.new(cask)
|
|
|
|
# # special mocking required for tar to have something to work with
|
|
|
|
# def downloader.fetch_repo(target, url, revision = nil, ignore_externals=false)
|
|
|
|
# target.mkpath
|
|
|
|
# FileUtils.touch(target.join('empty_file.txt'))
|
|
|
|
# File.utime(1000,1000,target.join('empty_file.txt'))
|
|
|
|
# end
|
|
|
|
# expect(downloader.fetch).to equal(downloader.tarball_path)
|
|
|
|
# d = Hbc::Download.new(cask)
|
|
|
|
# d.send(:_check_sums, downloader.tarball_path, cask.sums)
|
|
|
|
# end
|
2016-08-18 22:11:42 +03:00
|
|
|
end
|