 45978435e7
			
		
	
	
		45978435e7
		
			
		
	
	
	
	
		
			
			- Previously I thought that comments were fine to discourage people from wasting their time trying to bump things that used `undef` that Sorbet didn't support. But RuboCop is better at this since it'll complain if the comments are unnecessary. - Suggested in https://github.com/Homebrew/brew/pull/18018#issuecomment-2283369501. - I've gone for a mixture of `rubocop:disable` for the files that can't be `typed: strict` (use of undef, required before everything else, etc) and `rubocop:todo` for everything else that should be tried to make strictly typed. There's no functional difference between the two as `rubocop:todo` is `rubocop:disable` with a different name. - And I entirely disabled the cop for the docs/ directory since `typed: strict` isn't going to gain us anything for some Markdown linting config files. - This means that now it's easier to track what needs to be done rather than relying on checklists of files in our big Sorbet issue: ```shell $ git grep 'typed: true # rubocop:todo Sorbet/StrictSigil' | wc -l 268 ``` - And this is confirmed working for new files: ```shell $ git status On branch use-rubocop-for-sorbet-strict-sigils Untracked files: (use "git add <file>..." to include in what will be committed) Library/Homebrew/bad.rb Library/Homebrew/good.rb nothing added to commit but untracked files present (use "git add" to track) $ brew style Offenses: bad.rb:1:1: C: Sorbet/StrictSigil: Sorbet sigil should be at least strict got true. ^^^^^^^^^^^^^ 1340 files inspected, 1 offense detected ```
		
			
				
	
	
		
			134 lines
		
	
	
		
			2.9 KiB
		
	
	
	
		
			Ruby
		
	
	
	
	
	
			
		
		
	
	
			134 lines
		
	
	
		
			2.9 KiB
		
	
	
	
		
			Ruby
		
	
	
	
	
	
| # typed: true # rubocop:todo Sorbet/StrictSigil
 | |
| # frozen_string_literal: true
 | |
| 
 | |
| require "url"
 | |
| require "checksum"
 | |
| require "download_strategy"
 | |
| 
 | |
| class Downloadable
 | |
|   include Context
 | |
|   extend T::Helpers
 | |
| 
 | |
|   abstract!
 | |
| 
 | |
|   sig { returns(T.nilable(URL)) }
 | |
|   attr_reader :url
 | |
| 
 | |
|   sig { returns(T.nilable(Checksum)) }
 | |
|   attr_reader :checksum
 | |
| 
 | |
|   sig { returns(T::Array[String]) }
 | |
|   attr_reader :mirrors
 | |
| 
 | |
|   sig { void }
 | |
|   def initialize
 | |
|     @mirrors = T.let([], T::Array[String])
 | |
|   end
 | |
| 
 | |
|   def initialize_dup(other)
 | |
|     super
 | |
|     @checksum = @checksum.dup
 | |
|     @mirrors = @mirrors.dup
 | |
|     @version = @version.dup
 | |
|   end
 | |
| 
 | |
|   sig { override.returns(T.self_type) }
 | |
|   def freeze
 | |
|     @checksum.freeze
 | |
|     @mirrors.freeze
 | |
|     @version.freeze
 | |
|     super
 | |
|   end
 | |
| 
 | |
|   sig { returns(T::Boolean) }
 | |
|   def downloaded?
 | |
|     cached_download.exist?
 | |
|   end
 | |
| 
 | |
|   sig { returns(Pathname) }
 | |
|   def cached_download
 | |
|     downloader.cached_location
 | |
|   end
 | |
| 
 | |
|   sig { void }
 | |
|   def clear_cache
 | |
|     downloader.clear_cache
 | |
|   end
 | |
| 
 | |
|   sig { returns(T.nilable(Version)) }
 | |
|   def version
 | |
|     return @version if @version && !@version.null?
 | |
| 
 | |
|     version = determine_url&.version
 | |
|     version unless version&.null?
 | |
|   end
 | |
| 
 | |
|   sig { returns(T.class_of(AbstractDownloadStrategy)) }
 | |
|   def download_strategy
 | |
|     @download_strategy ||= determine_url&.download_strategy
 | |
|   end
 | |
| 
 | |
|   sig { returns(AbstractDownloadStrategy) }
 | |
|   def downloader
 | |
|     @downloader ||= begin
 | |
|       primary_url, *mirrors = determine_url_mirrors
 | |
|       raise ArgumentError, "attempted to use a Downloadable without a URL!" if primary_url.blank?
 | |
| 
 | |
|       download_strategy.new(primary_url, download_name, version,
 | |
|                             mirrors:, cache:, **T.must(@url).specs)
 | |
|     end
 | |
|   end
 | |
| 
 | |
|   sig { params(verify_download_integrity: T::Boolean, timeout: T.nilable(T.any(Integer, Float))).returns(Pathname) }
 | |
|   def fetch(verify_download_integrity: true, timeout: nil)
 | |
|     cache.mkpath
 | |
| 
 | |
|     begin
 | |
|       downloader.fetch(timeout:)
 | |
|     rescue ErrorDuringExecution, CurlDownloadStrategyError => e
 | |
|       raise DownloadError.new(self, e)
 | |
|     end
 | |
| 
 | |
|     download = cached_download
 | |
|     verify_download_integrity(download) if verify_download_integrity
 | |
|     download
 | |
|   end
 | |
| 
 | |
|   sig { params(filename: Pathname).void }
 | |
|   def verify_download_integrity(filename)
 | |
|     if filename.file?
 | |
|       ohai "Verifying checksum for '#{filename.basename}'" if verbose?
 | |
|       filename.verify_checksum(checksum)
 | |
|     end
 | |
|   rescue ChecksumMissingError
 | |
|     opoo <<~EOS
 | |
|       Cannot verify integrity of '#{filename.basename}'.
 | |
|       No checksum was provided.
 | |
|       For your reference, the checksum is:
 | |
|         sha256 "#{filename.sha256}"
 | |
|     EOS
 | |
|   end
 | |
| 
 | |
|   sig { overridable.returns(String) }
 | |
|   def download_name
 | |
|     File.basename(determine_url.to_s)
 | |
|   end
 | |
| 
 | |
|   private
 | |
| 
 | |
|   sig { overridable.returns(T.nilable(URL)) }
 | |
|   def determine_url
 | |
|     @url
 | |
|   end
 | |
| 
 | |
|   sig { overridable.returns(T::Array[String]) }
 | |
|   def determine_url_mirrors
 | |
|     [determine_url.to_s, *mirrors].uniq
 | |
|   end
 | |
| 
 | |
|   sig { overridable.returns(Pathname) }
 | |
|   def cache
 | |
|     HOMEBREW_CACHE
 | |
|   end
 | |
| end
 |