Merge branch 'master' into mlh-outdated-packages
This commit is contained in:
commit
6c266a6964
2
.github/workflows/docker.yml
vendored
2
.github/workflows/docker.yml
vendored
@ -1,6 +1,6 @@
|
||||
name: Docker
|
||||
on:
|
||||
pull_request:
|
||||
push:
|
||||
paths:
|
||||
- .github/workflows/docker.yml
|
||||
- Dockerfile
|
||||
|
||||
2
.github/workflows/spdx.yml
vendored
2
.github/workflows/spdx.yml
vendored
@ -1,6 +1,6 @@
|
||||
name: Update license data
|
||||
on:
|
||||
pull_request:
|
||||
push:
|
||||
paths:
|
||||
- .github/workflows/spdx.yml
|
||||
schedule:
|
||||
|
||||
21
.github/workflows/tests.yml
vendored
21
.github/workflows/tests.yml
vendored
@ -13,6 +13,11 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-latest, macOS-latest]
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
core-tap: 'linuxbrew-core'
|
||||
- os: macOS-latest
|
||||
core-tap: 'homebrew-core'
|
||||
steps:
|
||||
- name: Set up Homebrew
|
||||
id: set-up-homebrew
|
||||
@ -23,14 +28,11 @@ jobs:
|
||||
with:
|
||||
username: BrewTestBot
|
||||
|
||||
- name: Run brew test-bot --only-cleanup-before
|
||||
run: brew test-bot --only-cleanup-before
|
||||
- run: brew test-bot --only-cleanup-before
|
||||
|
||||
- name: Run brew config
|
||||
run: brew config
|
||||
- run: brew config
|
||||
|
||||
- name: Run brew doctor
|
||||
run: brew doctor
|
||||
- run: brew doctor
|
||||
|
||||
- name: Cache Bundler RubyGems
|
||||
id: cache
|
||||
@ -99,12 +101,7 @@ jobs:
|
||||
- name: Run brew readall on all taps
|
||||
run: brew readall --aliases
|
||||
|
||||
- name: Run brew style on homebrew-core
|
||||
if: matrix.os == 'macOS-latest'
|
||||
run: brew style --display-cop-names homebrew/core
|
||||
|
||||
- name: Run brew style on linuxbrew-core
|
||||
if: matrix.os == 'ubuntu-latest'
|
||||
- name: Run brew style on ${{ matrix.core-tap }}
|
||||
run: brew style --display-cop-names homebrew/core
|
||||
|
||||
- name: Run brew style on official taps
|
||||
|
||||
6
.gitignore
vendored
6
.gitignore
vendored
@ -71,7 +71,7 @@
|
||||
**/vendor/bundle/ruby/*/gems/mechanize-*/lib/mechanize/d*
|
||||
**/vendor/bundle/ruby/*/gems/mechanize-*/lib/mechanize/e*
|
||||
**/vendor/bundle/ruby/*/gems/mechanize-*/lib/mechanize/f*
|
||||
**/vendor/bundle/ruby/*/gems/mechanize-*/lib/mechanize/h*
|
||||
**/vendor/bundle/ruby/*/gems/mechanize-*/lib/mechanize/h*.rb
|
||||
**/vendor/bundle/ruby/*/gems/mechanize-*/lib/mechanize/i*
|
||||
**/vendor/bundle/ruby/*/gems/mechanize-*/lib/mechanize/p*
|
||||
**/vendor/bundle/ruby/*/gems/mechanize-*/lib/mechanize/r*
|
||||
@ -117,6 +117,7 @@
|
||||
**/vendor/bundle/ruby/*/gems/rainbow-*/
|
||||
**/vendor/bundle/ruby/*/gems/rdiscount-*/
|
||||
**/vendor/bundle/ruby/*/gems/regexp_parser-*/
|
||||
**/vendor/bundle/ruby/*/gems/rexml-*/
|
||||
**/vendor/bundle/ruby/*/gems/ronn-*/
|
||||
**/vendor/bundle/ruby/*/gems/rspec-*/
|
||||
**/vendor/bundle/ruby/*/gems/rspec-core-*/
|
||||
@ -139,9 +140,6 @@
|
||||
**/vendor/bundle/ruby/*/gems/webrobots-*/
|
||||
|
||||
# Ignore conditional dependencies we don't wish to vendor
|
||||
**/vendor/bundle/ruby/*/gems/bindata-*/
|
||||
**/vendor/bundle/ruby/*/gems/elftools-*/
|
||||
**/vendor/bundle/ruby/*/gems/patchelf-*/
|
||||
**/vendor/bundle/ruby/*/gems/sorbet-*/
|
||||
**/vendor/bundle/ruby/*/gems/sorbet-runtime-*/
|
||||
**/vendor/bundle/ruby/*/gems/tapioca-*/
|
||||
|
||||
@ -330,14 +330,11 @@ module Cask
|
||||
end
|
||||
|
||||
def check_languages
|
||||
invalid = []
|
||||
@cask.languages.each do |language|
|
||||
invalid << language.to_s unless language.match?(/^[a-z]{2}$/) || language.match?(/^[a-z]{2}-[A-Z]{2}$/)
|
||||
Locale.parse(language)
|
||||
rescue Locale::ParserError
|
||||
add_error "Locale '#{language}' is invalid."
|
||||
end
|
||||
|
||||
return if invalid.empty?
|
||||
|
||||
add_error "locale #{invalid.join(", ")} are invalid"
|
||||
end
|
||||
|
||||
def check_token_conflicts
|
||||
@ -380,8 +377,10 @@ module Cask
|
||||
|
||||
add_warning "cask token contains .app" if token.end_with? ".app"
|
||||
|
||||
if cask.token.end_with? "alpha", "beta", "release candidate"
|
||||
add_warning "cask token contains version designation"
|
||||
if /-(?<designation>alpha|beta|rc|release-candidate)$/ =~ cask.token
|
||||
if cask.tap.official? && cask.tap != "homebrew/cask-versions"
|
||||
add_warning "cask token contains version designation '#{designation}'"
|
||||
end
|
||||
end
|
||||
|
||||
add_warning "cask token mentions launcher" if token.end_with? "launcher"
|
||||
|
||||
@ -31,30 +31,39 @@ module Cask
|
||||
end
|
||||
|
||||
def run
|
||||
output = args.any? ? provided_list : Caskroom.casks
|
||||
self.class.list_casks(
|
||||
*casks,
|
||||
json: json?,
|
||||
one: one?,
|
||||
full_name: full_name?,
|
||||
versions: versions?,
|
||||
)
|
||||
end
|
||||
|
||||
if json?
|
||||
def self.list_casks(*casks, json: false, one: false, full_name: false, versions: false)
|
||||
output = if casks.any?
|
||||
casks.each do |cask|
|
||||
raise CaskNotInstalledError, cask unless cask.installed?
|
||||
end
|
||||
else
|
||||
Caskroom.casks
|
||||
end
|
||||
|
||||
if json
|
||||
puts JSON.generate(output.map(&:to_h))
|
||||
elsif one?
|
||||
elsif one
|
||||
puts output.map(&:to_s)
|
||||
elsif full_name?
|
||||
elsif full_name
|
||||
puts output.map(&:full_name).sort(&tap_and_name_comparison)
|
||||
elsif versions?
|
||||
puts output.map(&self.class.method(:format_versioned))
|
||||
elsif !output.empty? && args.any?
|
||||
puts output.map(&self.class.method(:list_artifacts))
|
||||
elsif versions
|
||||
puts output.map(&method(:format_versioned))
|
||||
elsif !output.empty? && casks.any?
|
||||
puts output.map(&method(:list_artifacts))
|
||||
elsif !output.empty?
|
||||
puts Formatter.columns(output.map(&:to_s))
|
||||
end
|
||||
end
|
||||
|
||||
def provided_list
|
||||
casks.each do |cask|
|
||||
raise CaskNotInstalledError, cask unless cask.installed?
|
||||
end
|
||||
casks
|
||||
end
|
||||
|
||||
def self.list_artifacts(cask)
|
||||
cask.artifacts.group_by(&:class).each do |klass, artifacts|
|
||||
next unless klass.respond_to?(:english_description)
|
||||
|
||||
@ -136,7 +136,7 @@ module Cask
|
||||
end
|
||||
|
||||
def language_eval
|
||||
return @language if instance_variable_defined?(:@language)
|
||||
return @language if defined?(@language)
|
||||
|
||||
return @language = nil if @language_blocks.nil? || @language_blocks.empty?
|
||||
|
||||
|
||||
@ -129,7 +129,7 @@ module Cask
|
||||
def to_s
|
||||
<<~EOS
|
||||
Cask '#{token}' requires a checksum:
|
||||
#{Formatter.identifier("sha256 '#{actual}'")}
|
||||
#{Formatter.identifier('sha256 "#{actual}"')}
|
||||
EOS
|
||||
end
|
||||
end
|
||||
|
||||
@ -2,8 +2,7 @@
|
||||
|
||||
require "fetch"
|
||||
require "cli/parser"
|
||||
require "cask/cmd"
|
||||
require "cask/cask_loader"
|
||||
require "cask/download"
|
||||
|
||||
module Homebrew
|
||||
extend Fetch
|
||||
@ -37,29 +36,27 @@ module Homebrew
|
||||
|
||||
if args.no_named?
|
||||
puts HOMEBREW_CACHE
|
||||
elsif args.formula?
|
||||
args.named.each do |name|
|
||||
print_formula_cache name, args: args
|
||||
end
|
||||
return
|
||||
end
|
||||
|
||||
formulae_or_casks = if args.formula?
|
||||
args.formulae
|
||||
elsif args.cask?
|
||||
args.named.each do |name|
|
||||
print_cask_cache name
|
||||
end
|
||||
args.loaded_casks
|
||||
else
|
||||
args.named.each do |name|
|
||||
print_formula_cache name, args: args
|
||||
rescue FormulaUnavailableError
|
||||
begin
|
||||
print_cask_cache name
|
||||
rescue Cask::CaskUnavailableError
|
||||
odie "No available formula or cask with the name \"#{name}\""
|
||||
end
|
||||
args.formulae_and_casks
|
||||
end
|
||||
|
||||
formulae_or_casks.each do |formula_or_cask|
|
||||
if formula_or_cask.is_a? Formula
|
||||
print_formula_cache formula_or_cask, args: args
|
||||
else
|
||||
print_cask_cache formula_or_cask
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def print_formula_cache(name, args:)
|
||||
formula = Formulary.factory(name, force_bottle: args.force_bottle?, flags: args.flags_only)
|
||||
def print_formula_cache(formula, args:)
|
||||
if fetch_bottle?(formula, args: args)
|
||||
puts formula.bottle.cached_download
|
||||
else
|
||||
@ -67,8 +64,7 @@ module Homebrew
|
||||
end
|
||||
end
|
||||
|
||||
def print_cask_cache(name)
|
||||
cask = Cask::CaskLoader.load name
|
||||
puts Cask::Cmd::Cache.cached_location(cask)
|
||||
def print_cask_cache(cask)
|
||||
puts Cask::Download.new(cask).downloader.cached_location
|
||||
end
|
||||
end
|
||||
|
||||
@ -33,6 +33,10 @@ module Homebrew
|
||||
def cleanup
|
||||
args = cleanup_args.parse
|
||||
|
||||
if args.prune.present? && !Integer(args.prune, exception: false) && args.prune != "all"
|
||||
raise UsageError, "--prune= expects an integer or 'all'."
|
||||
end
|
||||
|
||||
cleanup = Cleanup.new(*args.named, dry_run: args.dry_run?, scrub: args.s?, days: args.prune&.to_i)
|
||||
if args.prune_prefix?
|
||||
cleanup.prune_prefix_symlinks_and_directories
|
||||
|
||||
@ -1,8 +1,6 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
require "cli/parser"
|
||||
require "cask/cask_loader"
|
||||
require "cask/exceptions"
|
||||
|
||||
module Homebrew
|
||||
module_function
|
||||
@ -23,21 +21,22 @@ module Homebrew
|
||||
|
||||
if args.no_named?
|
||||
exec_browser HOMEBREW_WWW
|
||||
return
|
||||
end
|
||||
|
||||
homepages = args.formulae_and_casks.map do |formula_or_cask|
|
||||
puts "Opening homepage for #{name_of(formula_or_cask)}"
|
||||
formula_or_cask.homepage
|
||||
end
|
||||
|
||||
exec_browser(*homepages)
|
||||
end
|
||||
|
||||
def name_of(formula_or_cask)
|
||||
if formula_or_cask.is_a? Formula
|
||||
"Formula #{formula_or_cask.name}"
|
||||
else
|
||||
homepages = args.named.map do |name|
|
||||
f = Formulary.factory(name)
|
||||
puts "Opening homepage for formula #{name}"
|
||||
f.homepage
|
||||
rescue FormulaUnavailableError
|
||||
begin
|
||||
c = Cask::CaskLoader.load(name)
|
||||
puts "Opening homepage for cask #{name}"
|
||||
c.homepage
|
||||
rescue Cask::CaskUnavailableError
|
||||
odie "No available formula or cask with the name \"#{name}\""
|
||||
end
|
||||
end
|
||||
exec_browser(*homepages)
|
||||
"Cask #{formula_or_cask.token}"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@ -168,11 +168,12 @@ module Homebrew
|
||||
end
|
||||
|
||||
def list_casks(args:)
|
||||
cask_list = Cask::Cmd::List.new args.named
|
||||
cask_list.one = args.public_send(:'1?')
|
||||
cask_list.versions = args.versions?
|
||||
cask_list.full_name = args.full_name?
|
||||
cask_list.run
|
||||
Cask::Cmd::List.list_casks(
|
||||
*args.loaded_casks,
|
||||
one: args.public_send(:'1?'),
|
||||
full_name: args.full_name?,
|
||||
versions: args.versions?,
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
@ -261,9 +261,8 @@ module Homebrew
|
||||
!(versioned_formulae = formula.versioned_formulae).empty?
|
||||
versioned_aliases = formula.aliases.grep(/.@\d/)
|
||||
_, last_alias_version = versioned_formulae.map(&:name).last.split("@")
|
||||
major, minor, = formula.version.to_s.split(".")
|
||||
alias_name_major = "#{formula.name}@#{major}"
|
||||
alias_name_major_minor = "#{alias_name_major}.#{minor}"
|
||||
alias_name_major = "#{formula.name}@#{formula.version.major}"
|
||||
alias_name_major_minor = "#{alias_name_major}.#{formula.version.minor}"
|
||||
alias_name = if last_alias_version.split(".").length == 1
|
||||
alias_name_major
|
||||
else
|
||||
@ -488,11 +487,7 @@ module Homebrew
|
||||
return unless formula.name == "postgresql"
|
||||
return unless @core_tap
|
||||
|
||||
major_version = formula.version
|
||||
.to_s
|
||||
.split(".")
|
||||
.first
|
||||
.to_i
|
||||
major_version = formula.version.major.to_i
|
||||
previous_major_version = major_version - 1
|
||||
previous_formula_name = "postgresql@#{previous_major_version}"
|
||||
begin
|
||||
@ -684,12 +679,17 @@ module Homebrew
|
||||
}.freeze
|
||||
|
||||
GITHUB_PRERELEASE_ALLOWLIST = {
|
||||
"cbmc" => "5.12.6",
|
||||
"elm-format" => "0.8.3",
|
||||
"gitless" => "0.8.8",
|
||||
"infrakit" => "0.5",
|
||||
"riff" => "0.5.0",
|
||||
"telegram-cli" => "1.3.1",
|
||||
"volta" => "0.8.6",
|
||||
}.freeze
|
||||
|
||||
# version_prefix = stable_version_string.sub(/\d+$/, "")
|
||||
# version_prefix = stable_version_string.split(".")[0..1].join(".")
|
||||
# version_prefix = stable.version.major_minor
|
||||
|
||||
def audit_specs
|
||||
problem "Head-only (no stable download)" if head_only?(formula)
|
||||
@ -753,11 +753,9 @@ module Homebrew
|
||||
|
||||
stable_version_string = stable.version.to_s
|
||||
stable_url_version = Version.parse(stable.url)
|
||||
_, stable_url_minor_version, = stable_url_version.to_s
|
||||
.split(".", 3)
|
||||
.map(&:to_i)
|
||||
stable_url_minor_version = stable_url_version.minor.to_i
|
||||
|
||||
formula_suffix = stable_version_string.split(".").last.to_i
|
||||
formula_suffix = stable.version.patch.to_i
|
||||
throttled_rate = THROTTLED_FORMULAE[formula.name]
|
||||
if throttled_rate && formula_suffix.modulo(throttled_rate).nonzero?
|
||||
problem "should only be updated every #{throttled_rate} releases on multiples of #{throttled_rate}"
|
||||
@ -771,7 +769,7 @@ module Homebrew
|
||||
|
||||
problem "Stable version URLs should not contain #{matched}"
|
||||
when %r{download\.gnome\.org/sources}, %r{ftp\.gnome\.org/pub/GNOME/sources}i
|
||||
version_prefix = stable_version_string.split(".")[0..1].join(".")
|
||||
version_prefix = stable.version.major_minor
|
||||
return if GNOME_DEVEL_ALLOWLIST[formula.name] == version_prefix
|
||||
return if stable_url_version < Version.create("1.0")
|
||||
return if stable_url_minor_version.even?
|
||||
@ -781,7 +779,7 @@ module Homebrew
|
||||
return if stable_url_minor_version.even?
|
||||
|
||||
problem "#{stable.version} is a development release"
|
||||
when %r{^https://github.com/([\w-]+)/([\w-]+)/}
|
||||
when %r{^https://github.com/([\w-]+)/([\w-]+)}
|
||||
owner = Regexp.last_match(1)
|
||||
repo = Regexp.last_match(2)
|
||||
tag = url.match(%r{^https://github\.com/[\w-]+/[\w-]+/archive/([^/]+)\.(tar\.gz|zip)$})
|
||||
@ -790,10 +788,11 @@ module Homebrew
|
||||
tag ||= url.match(%r{^https://github\.com/[\w-]+/[\w-]+/releases/download/([^/]+)/})
|
||||
.to_a
|
||||
.second
|
||||
tag ||= formula.stable.specs[:tag]
|
||||
|
||||
begin
|
||||
if @online && (release = GitHub.open_api("#{GitHub::API_URL}/repos/#{owner}/#{repo}/releases/tags/#{tag}"))
|
||||
if release["prerelease"] && !GITHUB_PRERELEASE_ALLOWLIST.include?(formula.name)
|
||||
if release["prerelease"] && (GITHUB_PRERELEASE_ALLOWLIST[formula.name] != formula.version)
|
||||
problem "#{tag} is a GitHub prerelease"
|
||||
elsif release["draft"]
|
||||
problem "#{tag} is a GitHub draft"
|
||||
|
||||
@ -493,10 +493,8 @@ module Homebrew
|
||||
end
|
||||
end
|
||||
|
||||
def check_open_pull_requests(formula, tap_full_name)
|
||||
# check for open requests
|
||||
pull_requests = GitHub.fetch_pull_requests(formula.name, tap_full_name, state: "open")
|
||||
check_for_duplicate_pull_requests(pull_requests, args: args)
|
||||
def check_open_pull_requests(formula, tap_full_name, args:)
|
||||
GitHub.check_for_duplicate_pull_requests(formula.name, tap_full_name, state: "open", args: args)
|
||||
end
|
||||
|
||||
def check_closed_pull_requests(formula, tap_full_name, version: nil, url: nil, tag: nil, args:)
|
||||
@ -506,28 +504,7 @@ module Homebrew
|
||||
version = Version.detect(url, specs)
|
||||
end
|
||||
# if we haven't already found open requests, try for an exact match across closed requests
|
||||
pull_requests = GitHub.fetch_pull_requests("#{formula.name} #{version}", tap_full_name, state: "closed")
|
||||
check_for_duplicate_pull_requests(pull_requests, args: args)
|
||||
end
|
||||
|
||||
def check_for_duplicate_pull_requests(pull_requests, args:)
|
||||
return if pull_requests.blank?
|
||||
|
||||
duplicates_message = <<~EOS
|
||||
These pull requests may be duplicates:
|
||||
#{pull_requests.map { |pr| "#{pr["title"]} #{pr["html_url"]}" }.join("\n")}
|
||||
EOS
|
||||
error_message = "Duplicate PRs should not be opened. Use --force to override this error."
|
||||
if args.force? && !args.quiet?
|
||||
opoo duplicates_message
|
||||
elsif !args.force? && args.quiet?
|
||||
odie error_message
|
||||
elsif !args.force?
|
||||
odie <<~EOS
|
||||
#{duplicates_message.chomp}
|
||||
#{error_message}
|
||||
EOS
|
||||
end
|
||||
GitHub.check_for_duplicate_pull_requests("#{formula.name} #{version}", tap_full_name, state: "closed", args: args)
|
||||
end
|
||||
|
||||
def alias_update_pair(formula, new_formula_version)
|
||||
|
||||
@ -50,7 +50,7 @@ module Homebrew
|
||||
on:
|
||||
push:
|
||||
branches: master
|
||||
pull_request: []
|
||||
pull_request:
|
||||
jobs:
|
||||
test-bot:
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
@ -35,6 +35,6 @@ module Homebrew
|
||||
ohai "git add"
|
||||
safe_system "git", "add", SPDX::JSON_PATH
|
||||
ohai "git commit"
|
||||
system "git", "commit", "--message", "data/spdx.json: update to #{latest_tag}"
|
||||
system "git", "commit", "--message", "data/spdx.json: update to #{SPDX.latest_tag}"
|
||||
end
|
||||
end
|
||||
|
||||
@ -1,7 +1,6 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
require "json"
|
||||
require "rexml/document"
|
||||
require "time"
|
||||
require "unpack_strategy"
|
||||
require "lazy_object"
|
||||
@ -38,6 +37,11 @@ class AbstractDownloadStrategy
|
||||
# Download and cache the resource as {#cached_location}.
|
||||
def fetch; end
|
||||
|
||||
# TODO: Deprecate once we have an explicitly documented alternative.
|
||||
def shutup!
|
||||
@quiet = true
|
||||
end
|
||||
|
||||
def puts(*args)
|
||||
super(*args) unless quiet?
|
||||
end
|
||||
@ -518,9 +522,14 @@ class SubversionDownloadStrategy < VCSDownloadStrategy
|
||||
end
|
||||
|
||||
def source_modified_time
|
||||
out, = system_command("svn", args: ["info", "--xml"], chdir: cached_location)
|
||||
xml = REXML::Document.new(out)
|
||||
Time.parse REXML::XPath.first(xml, "//date/text()").to_s
|
||||
time = if Version.create(Utils.svn_version) >= Version.create("1.9")
|
||||
out, = system_command("svn", args: ["info", "--show-item", "last-changed-date"], chdir: cached_location)
|
||||
out
|
||||
else
|
||||
out, = system_command("svn", args: ["info"], chdir: cached_location)
|
||||
out[/^Last Changed Date: (.+)$/, 1]
|
||||
end
|
||||
Time.parse time
|
||||
end
|
||||
|
||||
def last_commit
|
||||
|
||||
@ -146,9 +146,9 @@ module Homebrew
|
||||
description: "Use this personal access token for the GitHub API, for features such as " \
|
||||
"`brew search`. You can create one at <https://github.com/settings/tokens>. If set, " \
|
||||
"GitHub will allow you a greater number of API requests. For more information, see: " \
|
||||
"<https://developer.github.com/v3/#rate-limiting>\n\n *Note:* Homebrew doesn't " \
|
||||
"require permissions for any of the scopes, but some developer commands may require " \
|
||||
"additional permissions.",
|
||||
"<https://docs.github.com/en/rest/overview/resources-in-the-rest-api#rate-limiting>.\n\n" \
|
||||
" *Note:* Homebrew doesn't require permissions for any of the scopes, but some developer " \
|
||||
"commands may require additional permissions.",
|
||||
},
|
||||
HOMEBREW_GITHUB_API_USERNAME: {
|
||||
description: "Use this username for authentication with the GitHub API, for features " \
|
||||
|
||||
@ -8,15 +8,10 @@ class SystemConfig
|
||||
# java_home doesn't exist on all macOSs; it might be missing on older versions.
|
||||
return "N/A" unless File.executable? "/usr/libexec/java_home"
|
||||
|
||||
out, _, status = system_command("/usr/libexec/java_home", args: ["--xml", "--failfast"], print_stderr: false)
|
||||
return "N/A" unless status.success?
|
||||
result = system_command("/usr/libexec/java_home", args: ["--xml", "--failfast"], print_stderr: false)
|
||||
return "N/A" unless result.success?
|
||||
|
||||
javas = []
|
||||
xml = REXML::Document.new(out)
|
||||
REXML::XPath.each(xml, "//key[text()='JVMVersion']/following-sibling::string") do |item|
|
||||
javas << item.text
|
||||
end
|
||||
javas.uniq.join(", ")
|
||||
result.plist.map { |jvm| jvm["JVMVersion"] }.uniq.join(", ")
|
||||
end
|
||||
|
||||
def describe_homebrew_ruby
|
||||
|
||||
@ -10,11 +10,14 @@ require "pp"
|
||||
require_relative "load_path"
|
||||
|
||||
require "rubygems"
|
||||
# Only require "core_ext" here to ensure we're only requiring the minimum of
|
||||
# what we need.
|
||||
require "active_support/core_ext/object/blank"
|
||||
require "active_support/core_ext/numeric/time"
|
||||
require "active_support/core_ext/object/try"
|
||||
require "active_support/core_ext/array/access"
|
||||
require "active_support/i18n"
|
||||
require "active_support/inflector/inflections"
|
||||
require "active_support/core_ext/string/inflections"
|
||||
require "active_support/core_ext/array/conversions"
|
||||
|
||||
I18n.backend.available_locales # Initialize locales so they can be overwritten.
|
||||
I18n.backend.store_translations :en, support: { array: { last_word_connector: " and " } }
|
||||
|
||||
@ -1,27 +1,56 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
# Representation of a system locale.
|
||||
#
|
||||
# Used to compare the system language and languages defined using cask `language` stanza.
|
||||
#
|
||||
# @api private
|
||||
class Locale
|
||||
# Error when a string cannot be parsed to a `Locale`.
|
||||
class ParserError < StandardError
|
||||
end
|
||||
|
||||
LANGUAGE_REGEX = /(?:[a-z]{2,3})/.freeze # ISO 639-1 or ISO 639-2
|
||||
REGION_REGEX = /(?:[A-Z]{2}|\d{3})/.freeze # ISO 3166-1 or UN M.49
|
||||
SCRIPT_REGEX = /(?:[A-Z][a-z]{3})/.freeze # ISO 15924
|
||||
# ISO 639-1 or ISO 639-2
|
||||
LANGUAGE_REGEX = /(?:[a-z]{2,3})/.freeze
|
||||
private_constant :LANGUAGE_REGEX
|
||||
|
||||
# ISO 3166-1 or UN M.49
|
||||
REGION_REGEX = /(?:[A-Z]{2}|\d{3})/.freeze
|
||||
private_constant :REGION_REGEX
|
||||
|
||||
# ISO 15924
|
||||
SCRIPT_REGEX = /(?:[A-Z][a-z]{3})/.freeze
|
||||
private_constant :SCRIPT_REGEX
|
||||
|
||||
LOCALE_REGEX = /\A((?:#{LANGUAGE_REGEX}|#{REGION_REGEX}|#{SCRIPT_REGEX})(?:-|$)){1,3}\Z/.freeze
|
||||
private_constant :LOCALE_REGEX
|
||||
|
||||
def self.parse(string)
|
||||
string = string.to_s
|
||||
|
||||
raise ParserError, "'#{string}' cannot be parsed to a #{self}" unless string.match?(LOCALE_REGEX)
|
||||
|
||||
scan = proc do |regex|
|
||||
string.scan(/(?:-|^)(#{regex})(?:-|$)/).flatten.first
|
||||
if locale = try_parse(string)
|
||||
return locale
|
||||
end
|
||||
|
||||
language = scan.call(LANGUAGE_REGEX)
|
||||
region = scan.call(REGION_REGEX)
|
||||
script = scan.call(SCRIPT_REGEX)
|
||||
raise ParserError, "'#{string}' cannot be parsed to a #{self}"
|
||||
end
|
||||
|
||||
def self.try_parse(string)
|
||||
return if string.blank?
|
||||
|
||||
scanner = StringScanner.new(string)
|
||||
|
||||
if language = scanner.scan(LANGUAGE_REGEX)
|
||||
sep = scanner.scan(/-/)
|
||||
return if (sep && scanner.eos?) || (sep.nil? && !scanner.eos?)
|
||||
end
|
||||
|
||||
if region = scanner.scan(REGION_REGEX)
|
||||
sep = scanner.scan(/-/)
|
||||
return if (sep && scanner.eos?) || (sep.nil? && !scanner.eos?)
|
||||
end
|
||||
|
||||
script = scanner.scan(SCRIPT_REGEX)
|
||||
|
||||
return unless scanner.eos?
|
||||
|
||||
new(language, region, script)
|
||||
end
|
||||
@ -46,7 +75,10 @@ class Locale
|
||||
end
|
||||
|
||||
def include?(other)
|
||||
other = self.class.parse(other) unless other.is_a?(self.class)
|
||||
unless other.is_a?(self.class)
|
||||
other = self.class.try_parse(other)
|
||||
return false if other.nil?
|
||||
end
|
||||
|
||||
[:language, :region, :script].all? do |var|
|
||||
if other.public_send(var).nil?
|
||||
@ -58,12 +90,14 @@ class Locale
|
||||
end
|
||||
|
||||
def eql?(other)
|
||||
other = self.class.parse(other) unless other.is_a?(self.class)
|
||||
unless other.is_a?(self.class)
|
||||
other = self.class.try_parse(other)
|
||||
return false if other.nil?
|
||||
end
|
||||
|
||||
[:language, :region, :script].all? do |var|
|
||||
public_send(var) == other.public_send(var)
|
||||
end
|
||||
rescue ParserError
|
||||
false
|
||||
end
|
||||
alias == eql?
|
||||
|
||||
|
||||
@ -14,6 +14,7 @@ OFFICIAL_CMD_TAPS = {
|
||||
DEPRECATED_OFFICIAL_TAPS = %w[
|
||||
apache
|
||||
binary
|
||||
cask-eid
|
||||
completions
|
||||
devel-only
|
||||
dupes
|
||||
|
||||
@ -131,7 +131,6 @@ module ELFShim
|
||||
end
|
||||
|
||||
def patchelf_patcher
|
||||
Homebrew.install_bundler_gems!
|
||||
require "patchelf"
|
||||
@patchelf_patcher ||= PatchELF::Patcher.new to_s, on_error: :silent
|
||||
end
|
||||
|
||||
@ -4,11 +4,20 @@ require "version"
|
||||
|
||||
class PkgVersion
|
||||
include Comparable
|
||||
extend Forwardable
|
||||
|
||||
RX = /\A(.+?)(?:_(\d+))?\z/.freeze
|
||||
|
||||
attr_reader :version, :revision
|
||||
|
||||
delegate [ # rubocop:disable Layout/HashAlignment
|
||||
:major,
|
||||
:minor,
|
||||
:patch,
|
||||
:major_minor,
|
||||
:major_minor_patch,
|
||||
] => :version
|
||||
|
||||
def self.parse(path)
|
||||
_, version, revision = *path.match(RX)
|
||||
version = Version.create(version)
|
||||
|
||||
@ -23,8 +23,11 @@ module Homebrew
|
||||
options |= f.build.used_options
|
||||
options &= f.options
|
||||
|
||||
build_from_source_formulae = args.build_from_source_formulae
|
||||
build_from_source_formulae << f.full_name if build_from_source
|
||||
|
||||
fi = FormulaInstaller.new(f, force_bottle: args.force_bottle?,
|
||||
build_from_source_formulae: args.build_from_source_formulae,
|
||||
build_from_source_formulae: build_from_source_formulae,
|
||||
debug: args.debug?, quiet: args.quiet?, verbose: args.verbose?)
|
||||
fi.options = options
|
||||
fi.force = args.force?
|
||||
@ -33,7 +36,6 @@ module Homebrew
|
||||
fi.interactive = args.interactive?
|
||||
fi.git = args.git?
|
||||
fi.link_keg ||= keg_was_linked if keg_had_linked_opt
|
||||
fi.build_from_source = true if build_from_source
|
||||
if tab
|
||||
fi.build_bottle ||= tab.built_bottle?
|
||||
fi.installed_as_dependency = tab.installed_as_dependency
|
||||
|
||||
@ -19,6 +19,9 @@ module RuboCop
|
||||
return
|
||||
end
|
||||
|
||||
@offensive_node = desc_call
|
||||
@offense_source_range = desc_call.source_range
|
||||
|
||||
desc = desc_call.first_argument
|
||||
|
||||
# Check if the desc is empty.
|
||||
|
||||
@ -35,6 +35,7 @@ module RuboCop
|
||||
go@1.11
|
||||
go@1.12
|
||||
go@1.13
|
||||
go@1.14
|
||||
haskell-stack
|
||||
ldc
|
||||
mlton
|
||||
|
||||
@ -82,6 +82,7 @@ false:
|
||||
- ./cleanup.rb
|
||||
- ./cli/parser.rb
|
||||
- ./cmd/--cache.rb
|
||||
- ./cmd/--caskroom.rb
|
||||
- ./cmd/--cellar.rb
|
||||
- ./cmd/--env.rb
|
||||
- ./cmd/--prefix.rb
|
||||
@ -489,9 +490,12 @@ false:
|
||||
- ./rubocops/cask/homepage_matches_url.rb
|
||||
- ./rubocops/cask/homepage_url_trailing_slash.rb
|
||||
- ./rubocops/cask/mixin/cask_help.rb
|
||||
- ./rubocops/cask/mixin/on_desc_stanza.rb
|
||||
- ./rubocops/cask/mixin/on_homepage_stanza.rb
|
||||
- ./rubocops/cask/no_dsl_version.rb
|
||||
- ./rubocops/cask/stanza_order.rb
|
||||
- ./rubocops/shared/desc_helper.rb
|
||||
- ./rubocops/shared/helper_functions.rb
|
||||
- ./searchable.rb
|
||||
- ./test/PATH_spec.rb
|
||||
- ./test/bash_spec.rb
|
||||
@ -512,6 +516,7 @@ false:
|
||||
- ./test/checksum_spec.rb
|
||||
- ./test/cleaner_spec.rb
|
||||
- ./test/cmd/--cache_spec.rb
|
||||
- ./test/cmd/--caskroom_spec.rb
|
||||
- ./test/cmd/--cellar_spec.rb
|
||||
- ./test/cmd/--env_spec.rb
|
||||
- ./test/cmd/--prefix_spec.rb
|
||||
@ -555,6 +560,7 @@ false:
|
||||
- ./test/compiler_failure_spec.rb
|
||||
- ./test/cxxstdlib_spec.rb
|
||||
- ./test/dependable_spec.rb
|
||||
- ./test/dependencies_helpers_spec.rb
|
||||
- ./test/descriptions_spec.rb
|
||||
- ./test/dev-cmd/bottle_spec.rb
|
||||
- ./test/dev-cmd/bump-formula-pr_spec.rb
|
||||
@ -617,6 +623,7 @@ false:
|
||||
- ./test/requirements/osxfuse_requirement_spec.rb
|
||||
- ./test/requirements_spec.rb
|
||||
- ./test/rubocop_spec.rb
|
||||
- ./test/rubocops/cask/desc_spec.rb
|
||||
- ./test/rubocops/cask/homepage_matches_url_spec.rb
|
||||
- ./test/rubocops/cask/homepage_url_trailing_slash_spec.rb
|
||||
- ./test/rubocops/cask/no_dsl_version_spec.rb
|
||||
@ -818,6 +825,7 @@ false:
|
||||
- ./test/utils/github_spec.rb
|
||||
- ./test/utils/popen_spec.rb
|
||||
- ./test/utils/shell_spec.rb
|
||||
- ./test/utils/spdx_spec.rb
|
||||
- ./test/utils/svn_spec.rb
|
||||
- ./test/utils/tty_spec.rb
|
||||
- ./test/version_spec.rb
|
||||
@ -837,13 +845,17 @@ true:
|
||||
- ./checksum.rb
|
||||
- ./cleaner.rb
|
||||
- ./cli/args.rb
|
||||
- ./compat/cli/parser.rb
|
||||
- ./compat/dependencies_helpers.rb
|
||||
- ./compat/extend/nil.rb
|
||||
- ./compat/extend/string.rb
|
||||
- ./compat/formula.rb
|
||||
- ./compat/os/mac.rb
|
||||
- ./compilers.rb
|
||||
- ./config.rb
|
||||
- ./context.rb
|
||||
- ./dependable.rb
|
||||
- ./dependencies_helpers.rb
|
||||
- ./dependency_collector.rb
|
||||
- ./description_cache_store.rb
|
||||
- ./descriptions.rb
|
||||
@ -883,6 +895,7 @@ true:
|
||||
- ./rubocops/cask/ast/cask_header.rb
|
||||
- ./rubocops/cask/ast/stanza.rb
|
||||
- ./rubocops/cask/constants/stanza.rb
|
||||
- ./rubocops/cask/desc.rb
|
||||
- ./rubocops/cask/extend/string.rb
|
||||
- ./rubocops/deprecate.rb
|
||||
- ./tap_constants.rb
|
||||
@ -894,6 +907,7 @@ true:
|
||||
- ./utils/notability.rb
|
||||
- ./utils/shebang.rb
|
||||
- ./utils/shell.rb
|
||||
- ./utils/spdx.rb
|
||||
- ./utils/svn.rb
|
||||
- ./utils/tty.rb
|
||||
- ./utils/user.rb
|
||||
|
||||
@ -17,5 +17,21 @@ module Homebrew::CLI
|
||||
def named_args; end
|
||||
|
||||
def force_bottle?; end
|
||||
|
||||
def debug?; end
|
||||
|
||||
def quiet?; end
|
||||
|
||||
def verbose?; end
|
||||
end
|
||||
|
||||
|
||||
class Parser
|
||||
module Compat
|
||||
include Kernel
|
||||
module DeprecatedArgs
|
||||
include Kernel
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@ -20,6 +20,16 @@ module Dependable
|
||||
def tags; end
|
||||
end
|
||||
|
||||
module DependenciesHelpers
|
||||
include Kernel
|
||||
|
||||
module Compat
|
||||
include Kernel
|
||||
|
||||
def args_includes_ignores(args); end
|
||||
end
|
||||
end
|
||||
|
||||
class Formula
|
||||
module Compat
|
||||
include Kernel
|
||||
|
||||
11
Library/Homebrew/sorbet/rbi/utils/spdx.rbi
Normal file
11
Library/Homebrew/sorbet/rbi/utils/spdx.rbi
Normal file
@ -0,0 +1,11 @@
|
||||
# typed: strict
|
||||
|
||||
module SPDX
|
||||
include Kernel
|
||||
|
||||
def spdx_data; end
|
||||
|
||||
def download_latest_license_data!(to: JSON_PATH); end
|
||||
|
||||
def curl_download(*args, to: nil, partial: true, **options); end
|
||||
end
|
||||
@ -2,7 +2,6 @@
|
||||
|
||||
require "hardware"
|
||||
require "software_spec"
|
||||
require "rexml/document"
|
||||
require "development_tools"
|
||||
require "extend/ENV"
|
||||
|
||||
|
||||
@ -234,9 +234,10 @@ class Tap
|
||||
require "descriptions"
|
||||
|
||||
if official? && DEPRECATED_OFFICIAL_TAPS.include?(repo)
|
||||
odie "#{name} was deprecated. This tap is now empty as all its formulae were migrated."
|
||||
odie "#{name} was deprecated. This tap is now empty and all its contents were either deleted or migrated."
|
||||
elsif user == "caskroom"
|
||||
odie "#{name} was moved. Tap homebrew/cask-#{repo} instead."
|
||||
new_repo = repo == "cask" ? "cask" : "cask-#{repo}"
|
||||
odie "#{name} was moved. Tap homebrew/#{new_repo} instead."
|
||||
end
|
||||
|
||||
requested_remote = clone_target || default_remote
|
||||
|
||||
@ -212,11 +212,19 @@ describe Cask::Audit, :cask do
|
||||
end
|
||||
end
|
||||
|
||||
context "when cask token contains version" do
|
||||
context "when cask token contains version designation" do
|
||||
let(:cask_token) { "token-beta" }
|
||||
|
||||
it "warns about version in token" do
|
||||
expect(subject).to warn_with(/token contains version/)
|
||||
it "warns about version in token if the cask is from an official tap" do
|
||||
allow(cask).to receive(:tap).and_return(Tap.fetch("homebrew/cask"))
|
||||
|
||||
expect(subject).to warn_with(/token contains version designation/)
|
||||
end
|
||||
|
||||
it "does not warn about version in token if the cask is from the `cask-versions` tap" do
|
||||
allow(cask).to receive(:tap).and_return(Tap.fetch("homebrew/cask-versions"))
|
||||
|
||||
expect(subject).not_to warn_with(/token contains version designation/)
|
||||
end
|
||||
end
|
||||
|
||||
@ -270,7 +278,6 @@ describe Cask::Audit, :cask do
|
||||
end
|
||||
|
||||
describe "locale validation" do
|
||||
let(:strict) { true }
|
||||
let(:cask) do
|
||||
tmp_cask "locale-cask-test", <<~RUBY
|
||||
cask 'locale-cask-test' do
|
||||
@ -310,7 +317,9 @@ describe Cask::Audit, :cask do
|
||||
|
||||
context "when cask locale is invalid" do
|
||||
it "error with invalid locale" do
|
||||
expect(subject).to fail_with(/locale ZH-CN, zh-, zh-cn are invalid/)
|
||||
expect(subject).to fail_with(/Locale 'ZH-CN' is invalid\./)
|
||||
expect(subject).to fail_with(/Locale 'zh-' is invalid\./)
|
||||
expect(subject).to fail_with(/Locale 'zh-cn' is invalid\./)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@ -26,6 +26,9 @@ describe Locale do
|
||||
expect { described_class.parse("zh-CN_Hans") }.to raise_error(Locale::ParserError)
|
||||
expect { described_class.parse("zhCN") }.to raise_error(Locale::ParserError)
|
||||
expect { described_class.parse("zh_Hans") }.to raise_error(Locale::ParserError)
|
||||
expect { described_class.parse("zh-") }.to raise_error(Locale::ParserError)
|
||||
expect { described_class.parse("ZH-CN") }.to raise_error(Locale::ParserError)
|
||||
expect { described_class.parse("zh-cn") }.to raise_error(Locale::ParserError)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@ -85,4 +85,46 @@ describe PkgVersion do
|
||||
expect(p1.hash).not_to eq(p4.hash)
|
||||
end
|
||||
end
|
||||
|
||||
describe "#version" do
|
||||
it "returns package version" do
|
||||
expect(described_class.parse("1.2.3_4").version).to be == Version.create("1.2.3")
|
||||
end
|
||||
end
|
||||
|
||||
describe "#revision" do
|
||||
it "returns package revision" do
|
||||
expect(described_class.parse("1.2.3_4").revision).to be == 4
|
||||
end
|
||||
end
|
||||
|
||||
describe "#major" do
|
||||
it "returns major version token" do
|
||||
expect(described_class.parse("1.2.3_4").major).to be == Version::Token.create("1")
|
||||
end
|
||||
end
|
||||
|
||||
describe "#minor" do
|
||||
it "returns minor version token" do
|
||||
expect(described_class.parse("1.2.3_4").minor).to be == Version::Token.create("2")
|
||||
end
|
||||
end
|
||||
|
||||
describe "#patch" do
|
||||
it "returns patch version token" do
|
||||
expect(described_class.parse("1.2.3_4").patch).to be == Version::Token.create("3")
|
||||
end
|
||||
end
|
||||
|
||||
describe "#major_minor" do
|
||||
it "returns major.minor version" do
|
||||
expect(described_class.parse("1.2.3_4").major_minor).to be == Version.create("1.2")
|
||||
end
|
||||
end
|
||||
|
||||
describe "#major_minor_patch" do
|
||||
it "returns major.minor.patch version" do
|
||||
expect(described_class.parse("1.2.3_4").major_minor_patch).to be == Version.create("1.2.3")
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@ -120,7 +120,7 @@ describe Utils do
|
||||
describe "::git_version" do
|
||||
it "returns nil when git is not available" do
|
||||
stub_const("HOMEBREW_SHIMS_PATH", HOMEBREW_PREFIX/"bin/shim")
|
||||
expect(described_class.git_path).to eq(nil)
|
||||
expect(described_class.git_version).to eq(nil)
|
||||
end
|
||||
|
||||
it "returns version of git when git is available" do
|
||||
|
||||
@ -17,6 +17,21 @@ describe Utils do
|
||||
end
|
||||
end
|
||||
|
||||
describe "#self.svn_version" do
|
||||
before do
|
||||
described_class.clear_svn_version_cache
|
||||
end
|
||||
|
||||
it "returns nil when svn is not available" do
|
||||
allow(described_class).to receive(:svn_available?).and_return(false)
|
||||
expect(described_class.svn_version).to eq(nil)
|
||||
end
|
||||
|
||||
it "returns version of svn when svn is available", :needs_svn do
|
||||
expect(described_class.svn_version).not_to be_nil
|
||||
end
|
||||
end
|
||||
|
||||
describe "#self.svn_remote_exists?" do
|
||||
it "returns true when svn is not available" do
|
||||
allow(described_class).to receive(:svn_available?).and_return(false)
|
||||
|
||||
@ -183,6 +183,15 @@ describe Version do
|
||||
expect(described_class.create("1")).to be == 1
|
||||
end
|
||||
|
||||
it "can be compared against tokens" do
|
||||
expect(described_class.create("2.1.0-p194")).to be > Version::Token.create("2")
|
||||
expect(described_class.create("1")).to be == Version::Token.create("1")
|
||||
end
|
||||
|
||||
it "can be compared against Version::NULL_TOKEN" do
|
||||
expect(described_class.create("2.1.0-p194")).to be > Version::NULL_TOKEN
|
||||
end
|
||||
|
||||
specify "comparison returns nil for non-version" do
|
||||
v = described_class.create("1.0")
|
||||
expect(v <=> Object.new).to be nil
|
||||
@ -276,6 +285,76 @@ describe Version do
|
||||
expect(v2.to_str).to eq("HEAD-ffffff")
|
||||
end
|
||||
|
||||
describe "#major" do
|
||||
it "returns major version token" do
|
||||
expect(described_class.create("1").major).to be == Version::Token.create("1")
|
||||
expect(described_class.create("1.2").major).to be == Version::Token.create("1")
|
||||
expect(described_class.create("1.2.3").major).to be == Version::Token.create("1")
|
||||
expect(described_class.create("1.2.3alpha").major).to be == Version::Token.create("1")
|
||||
expect(described_class.create("1.2.3alpha4").major).to be == Version::Token.create("1")
|
||||
expect(described_class.create("1.2.3beta4").major).to be == Version::Token.create("1")
|
||||
expect(described_class.create("1.2.3pre4").major).to be == Version::Token.create("1")
|
||||
expect(described_class.create("1.2.3rc4").major).to be == Version::Token.create("1")
|
||||
expect(described_class.create("1.2.3-p4").major).to be == Version::Token.create("1")
|
||||
end
|
||||
end
|
||||
|
||||
describe "#minor" do
|
||||
it "returns minor version token" do
|
||||
expect(described_class.create("1").minor).to be nil
|
||||
expect(described_class.create("1.2").minor).to be == Version::Token.create("2")
|
||||
expect(described_class.create("1.2.3").minor).to be == Version::Token.create("2")
|
||||
expect(described_class.create("1.2.3alpha").minor).to be == Version::Token.create("2")
|
||||
expect(described_class.create("1.2.3alpha4").minor).to be == Version::Token.create("2")
|
||||
expect(described_class.create("1.2.3beta4").minor).to be == Version::Token.create("2")
|
||||
expect(described_class.create("1.2.3pre4").minor).to be == Version::Token.create("2")
|
||||
expect(described_class.create("1.2.3rc4").minor).to be == Version::Token.create("2")
|
||||
expect(described_class.create("1.2.3-p4").minor).to be == Version::Token.create("2")
|
||||
end
|
||||
end
|
||||
|
||||
describe "#patch" do
|
||||
it "returns patch version token" do
|
||||
expect(described_class.create("1").patch).to be nil
|
||||
expect(described_class.create("1.2").patch).to be nil
|
||||
expect(described_class.create("1.2.3").patch).to be == Version::Token.create("3")
|
||||
expect(described_class.create("1.2.3alpha").patch).to be == Version::Token.create("3")
|
||||
expect(described_class.create("1.2.3alpha4").patch).to be == Version::Token.create("3")
|
||||
expect(described_class.create("1.2.3beta4").patch).to be == Version::Token.create("3")
|
||||
expect(described_class.create("1.2.3pre4").patch).to be == Version::Token.create("3")
|
||||
expect(described_class.create("1.2.3rc4").patch).to be == Version::Token.create("3")
|
||||
expect(described_class.create("1.2.3-p4").patch).to be == Version::Token.create("3")
|
||||
end
|
||||
end
|
||||
|
||||
describe "#major_minor" do
|
||||
it "returns major.minor version" do
|
||||
expect(described_class.create("1").major_minor).to be == described_class.create("1")
|
||||
expect(described_class.create("1.2").major_minor).to be == described_class.create("1.2")
|
||||
expect(described_class.create("1.2.3").major_minor).to be == described_class.create("1.2")
|
||||
expect(described_class.create("1.2.3alpha").major_minor).to be == described_class.create("1.2")
|
||||
expect(described_class.create("1.2.3alpha4").major_minor).to be == described_class.create("1.2")
|
||||
expect(described_class.create("1.2.3beta4").major_minor).to be == described_class.create("1.2")
|
||||
expect(described_class.create("1.2.3pre4").major_minor).to be == described_class.create("1.2")
|
||||
expect(described_class.create("1.2.3rc4").major_minor).to be == described_class.create("1.2")
|
||||
expect(described_class.create("1.2.3-p4").major_minor).to be == described_class.create("1.2")
|
||||
end
|
||||
end
|
||||
|
||||
describe "#major_minor_patch" do
|
||||
it "returns major.minor.patch version" do
|
||||
expect(described_class.create("1").major_minor_patch).to be == described_class.create("1")
|
||||
expect(described_class.create("1.2").major_minor_patch).to be == described_class.create("1.2")
|
||||
expect(described_class.create("1.2.3").major_minor_patch).to be == described_class.create("1.2.3")
|
||||
expect(described_class.create("1.2.3alpha").major_minor_patch).to be == described_class.create("1.2.3")
|
||||
expect(described_class.create("1.2.3alpha4").major_minor_patch).to be == described_class.create("1.2.3")
|
||||
expect(described_class.create("1.2.3beta4").major_minor_patch).to be == described_class.create("1.2.3")
|
||||
expect(described_class.create("1.2.3pre4").major_minor_patch).to be == described_class.create("1.2.3")
|
||||
expect(described_class.create("1.2.3rc4").major_minor_patch).to be == described_class.create("1.2.3")
|
||||
expect(described_class.create("1.2.3-p4").major_minor_patch).to be == described_class.create("1.2.3")
|
||||
end
|
||||
end
|
||||
|
||||
describe "::parse" do
|
||||
it "returns a NULL version when the URL cannot be parsed" do
|
||||
expect(described_class.parse("https://brew.sh/blah.tar")).to be_null
|
||||
|
||||
@ -593,4 +593,35 @@ module GitHub
|
||||
[GitHub::AuthenticationFailedError, GitHub::HTTPNotFoundError,
|
||||
GitHub::RateLimitExceededError, GitHub::Error, JSON::ParserError].freeze
|
||||
end
|
||||
|
||||
def fetch_pull_requests(query, tap_full_name, state: nil)
|
||||
GitHub.issues_for_formula(query, tap_full_name: tap_full_name, state: state).select do |pr|
|
||||
pr["html_url"].include?("/pull/") &&
|
||||
/(^|\s)#{Regexp.quote(query)}(:|\s|$)/i =~ pr["title"]
|
||||
end
|
||||
rescue GitHub::RateLimitExceededError => e
|
||||
opoo e.message
|
||||
[]
|
||||
end
|
||||
|
||||
def check_for_duplicate_pull_requests(query, tap_full_name, state:, args:)
|
||||
pull_requests = fetch_pull_requests(query, tap_full_name, state: state)
|
||||
return if pull_requests.blank?
|
||||
|
||||
duplicates_message = <<~EOS
|
||||
These pull requests may be duplicates:
|
||||
#{pull_requests.map { |pr| "#{pr["title"]} #{pr["html_url"]}" }.join("\n")}
|
||||
EOS
|
||||
error_message = "Duplicate PRs should not be opened. Use --force to override this error."
|
||||
if args.force? && !args.quiet?
|
||||
opoo duplicates_message
|
||||
elsif !args.force? && args.quiet?
|
||||
odie error_message
|
||||
elsif !args.force?
|
||||
odie <<~EOS
|
||||
#{duplicates_message.chomp}
|
||||
#{error_message}
|
||||
EOS
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@ -13,6 +13,7 @@ module PyPI
|
||||
diffoscope
|
||||
dxpy
|
||||
molecule
|
||||
xonsh
|
||||
].freeze
|
||||
|
||||
@pipgrip_installed = nil
|
||||
|
||||
@ -11,7 +11,9 @@ test_ruby () {
|
||||
|
||||
setup-ruby-path() {
|
||||
local vendor_dir
|
||||
local vendor_ruby_root
|
||||
local vendor_ruby_path
|
||||
local vendor_ruby_terminfo
|
||||
local vendor_ruby_latest_version
|
||||
local vendor_ruby_current_version
|
||||
local usable_ruby
|
||||
|
||||
@ -12,8 +12,11 @@ module SPDX
|
||||
@spdx_data ||= JSON.parse(JSON_PATH.read)
|
||||
end
|
||||
|
||||
def latest_tag
|
||||
@latest_tag ||= GitHub.open_api(API_URL)["tag_name"]
|
||||
end
|
||||
|
||||
def download_latest_license_data!(to: JSON_PATH)
|
||||
latest_tag = GitHub.open_api(API_URL)["tag_name"]
|
||||
data_url = "https://raw.githubusercontent.com/spdx/license-list-data/#{latest_tag}/json/licenses.json"
|
||||
curl_download(data_url, to: to, partial: false)
|
||||
end
|
||||
|
||||
@ -2,13 +2,23 @@
|
||||
|
||||
module Utils
|
||||
def self.clear_svn_version_cache
|
||||
remove_instance_variable(:@svn) if instance_variable_defined?(:@svn)
|
||||
remove_instance_variable(:@svn_available) if defined?(@svn_available)
|
||||
remove_instance_variable(:@svn_version) if defined?(@svn_version)
|
||||
end
|
||||
|
||||
def self.svn_available?
|
||||
return @svn if instance_variable_defined?(:@svn)
|
||||
return @svn_available if defined?(@svn_available)
|
||||
|
||||
@svn = quiet_system HOMEBREW_SHIMS_PATH/"scm/svn", "--version"
|
||||
@svn_available = quiet_system HOMEBREW_SHIMS_PATH/"scm/svn", "--version"
|
||||
end
|
||||
|
||||
def self.svn_version
|
||||
return unless svn_available?
|
||||
return @svn_version if defined?(@svn_version)
|
||||
|
||||
@svn_version = Utils.popen_read(
|
||||
HOMEBREW_SHIMS_PATH/"scm/svn", "--version"
|
||||
).chomp[/svn, version (\d+(?:\.\d+)*)/, 1]
|
||||
end
|
||||
|
||||
def self.svn_remote_exists?(url)
|
||||
|
||||
10
Library/Homebrew/vendor/bundle/bundler/setup.rb
vendored
10
Library/Homebrew/vendor/bundle/bundler/setup.rb
vendored
@ -51,13 +51,13 @@ $:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/parallel-1.19.2/lib"
|
||||
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/parallel_tests-3.1.0/lib"
|
||||
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/parser-2.7.1.4/lib"
|
||||
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/rainbow-3.0.0/lib"
|
||||
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/sorbet-runtime-0.5.5823/lib"
|
||||
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/sorbet-runtime-0.5.5866/lib"
|
||||
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/parlour-4.0.1/lib"
|
||||
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/patchelf-1.2.0/lib"
|
||||
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/plist-3.5.0/lib"
|
||||
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/pry-0.13.1/lib"
|
||||
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/extensions/universal-darwin-19/2.6.0/rdiscount-2.2.0.1"
|
||||
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/rdiscount-2.2.0.1/lib"
|
||||
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/extensions/universal-darwin-19/2.6.0/rdiscount-2.2.0.2"
|
||||
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/rdiscount-2.2.0.2/lib"
|
||||
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/regexp_parser-1.7.1/lib"
|
||||
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/rexml-3.2.4/lib"
|
||||
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/ronn-0.7.3/lib"
|
||||
@ -76,7 +76,7 @@ $:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/rubocop-0.88.0/lib"
|
||||
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/rubocop-performance-1.7.1/lib"
|
||||
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/rubocop-rspec-1.42.0/lib"
|
||||
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/ruby-macho-2.2.0/lib"
|
||||
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/sorbet-static-0.5.5823-universal-darwin-19/lib"
|
||||
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/sorbet-0.5.5823/lib"
|
||||
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/sorbet-static-0.5.5866-universal-darwin-19/lib"
|
||||
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/sorbet-0.5.5866/lib"
|
||||
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/thor-1.0.1/lib"
|
||||
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/tapioca-0.4.1/lib"
|
||||
|
||||
37
Library/Homebrew/vendor/bundle/ruby/2.6.0/gems/bindata-2.4.8/lib/bindata.rb
vendored
Normal file
37
Library/Homebrew/vendor/bundle/ruby/2.6.0/gems/bindata-2.4.8/lib/bindata.rb
vendored
Normal file
@ -0,0 +1,37 @@
|
||||
# BinData -- Binary data manipulator.
|
||||
# Copyright (c) 2007 - 2018 Dion Mendel.
|
||||
|
||||
require 'bindata/version'
|
||||
require 'bindata/array'
|
||||
require 'bindata/bits'
|
||||
require 'bindata/buffer'
|
||||
require 'bindata/choice'
|
||||
require 'bindata/count_bytes_remaining'
|
||||
require 'bindata/delayed_io'
|
||||
require 'bindata/float'
|
||||
require 'bindata/int'
|
||||
require 'bindata/primitive'
|
||||
require 'bindata/record'
|
||||
require 'bindata/rest'
|
||||
require 'bindata/skip'
|
||||
require 'bindata/string'
|
||||
require 'bindata/stringz'
|
||||
require 'bindata/struct'
|
||||
require 'bindata/trace'
|
||||
require 'bindata/uint8_array'
|
||||
require 'bindata/virtual'
|
||||
require 'bindata/alignment'
|
||||
require 'bindata/warnings'
|
||||
|
||||
# = BinData
|
||||
#
|
||||
# A declarative way to read and write structured binary data.
|
||||
#
|
||||
# A full reference manual is available online at
|
||||
# https://github.com/dmendel/bindata/wiki
|
||||
#
|
||||
# == License
|
||||
#
|
||||
# BinData is released under the same license as Ruby.
|
||||
#
|
||||
# Copyright (c) 2007 - 2018 Dion Mendel.
|
||||
79
Library/Homebrew/vendor/bundle/ruby/2.6.0/gems/bindata-2.4.8/lib/bindata/alignment.rb
vendored
Normal file
79
Library/Homebrew/vendor/bundle/ruby/2.6.0/gems/bindata-2.4.8/lib/bindata/alignment.rb
vendored
Normal file
@ -0,0 +1,79 @@
|
||||
require 'bindata/base_primitive'
|
||||
|
||||
module BinData
|
||||
# Resets the stream alignment to the next byte. This is
|
||||
# only useful when using bit-based primitives.
|
||||
#
|
||||
# class MyRec < BinData::Record
|
||||
# bit4 :a
|
||||
# resume_byte_alignment
|
||||
# bit4 :b
|
||||
# end
|
||||
#
|
||||
# MyRec.read("\x12\x34") #=> {"a" => 1, "b" => 3}
|
||||
#
|
||||
class ResumeByteAlignment < BinData::Base
|
||||
def clear?; true; end
|
||||
def assign(val); end
|
||||
def snapshot; nil; end
|
||||
def do_num_bytes; 0; end
|
||||
|
||||
def do_read(io)
|
||||
io.reset_read_bits
|
||||
end
|
||||
|
||||
def do_write(io)
|
||||
io.flushbits
|
||||
end
|
||||
end
|
||||
|
||||
# A monkey patch to force byte-aligned primitives to
|
||||
# become bit-aligned. This allows them to be used at
|
||||
# non byte based boundaries.
|
||||
#
|
||||
# class BitString < BinData::String
|
||||
# bit_aligned
|
||||
# end
|
||||
#
|
||||
# class MyRecord < BinData::Record
|
||||
# bit4 :preamble
|
||||
# bit_string :str, length: 2
|
||||
# end
|
||||
#
|
||||
module BitAligned
|
||||
class BitAlignedIO
|
||||
def initialize(io)
|
||||
@io = io
|
||||
end
|
||||
def readbytes(n)
|
||||
n.times.inject("") do |bytes, _|
|
||||
bytes << @io.readbits(8, :big).chr
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def bit_aligned?
|
||||
true
|
||||
end
|
||||
|
||||
def read_and_return_value(io)
|
||||
super(BitAlignedIO.new(io))
|
||||
end
|
||||
|
||||
def do_num_bytes
|
||||
super.to_f
|
||||
end
|
||||
|
||||
def do_write(io)
|
||||
value_to_binary_string(_value).each_byte { |v| io.writebits(v, 8, :big) }
|
||||
end
|
||||
end
|
||||
|
||||
def BasePrimitive.bit_aligned
|
||||
include BitAligned
|
||||
end
|
||||
|
||||
def Primitive.bit_aligned
|
||||
fail "'bit_aligned' is not needed for BinData::Primitives"
|
||||
end
|
||||
end
|
||||
344
Library/Homebrew/vendor/bundle/ruby/2.6.0/gems/bindata-2.4.8/lib/bindata/array.rb
vendored
Normal file
344
Library/Homebrew/vendor/bundle/ruby/2.6.0/gems/bindata-2.4.8/lib/bindata/array.rb
vendored
Normal file
@ -0,0 +1,344 @@
|
||||
require 'bindata/base'
|
||||
require 'bindata/dsl'
|
||||
|
||||
module BinData
|
||||
# An Array is a list of data objects of the same type.
|
||||
#
|
||||
# require 'bindata'
|
||||
#
|
||||
# data = "\x03\x04\x05\x06\x07\x08\x09"
|
||||
#
|
||||
# obj = BinData::Array.new(type: :int8, initial_length: 6)
|
||||
# obj.read(data) #=> [3, 4, 5, 6, 7, 8]
|
||||
#
|
||||
# obj = BinData::Array.new(type: :int8,
|
||||
# read_until: -> { index == 1 })
|
||||
# obj.read(data) #=> [3, 4]
|
||||
#
|
||||
# obj = BinData::Array.new(type: :int8,
|
||||
# read_until: -> { element >= 6 })
|
||||
# obj.read(data) #=> [3, 4, 5, 6]
|
||||
#
|
||||
# obj = BinData::Array.new(type: :int8,
|
||||
# read_until: -> { array[index] + array[index - 1] == 13 })
|
||||
# obj.read(data) #=> [3, 4, 5, 6, 7]
|
||||
#
|
||||
# obj = BinData::Array.new(type: :int8, read_until: :eof)
|
||||
# obj.read(data) #=> [3, 4, 5, 6, 7, 8, 9]
|
||||
#
|
||||
# == Parameters
|
||||
#
|
||||
# Parameters may be provided at initialisation to control the behaviour of
|
||||
# an object. These params are:
|
||||
#
|
||||
# <tt>:type</tt>:: The symbol representing the data type of the
|
||||
# array elements. If the type is to have params
|
||||
# passed to it, then it should be provided as
|
||||
# <tt>[type_symbol, hash_params]</tt>.
|
||||
# <tt>:initial_length</tt>:: The initial length of the array.
|
||||
# <tt>:read_until</tt>:: While reading, elements are read until this
|
||||
# condition is true. This is typically used to
|
||||
# read an array until a sentinel value is found.
|
||||
# The variables +index+, +element+ and +array+
|
||||
# are made available to any lambda assigned to
|
||||
# this parameter. If the value of this parameter
|
||||
# is the symbol :eof, then the array will read
|
||||
# as much data from the stream as possible.
|
||||
#
|
||||
# Each data object in an array has the variable +index+ made available
|
||||
# to any lambda evaluated as a parameter of that data object.
|
||||
class Array < BinData::Base
|
||||
extend DSLMixin
|
||||
include Enumerable
|
||||
|
||||
dsl_parser :array
|
||||
arg_processor :array
|
||||
|
||||
mandatory_parameter :type
|
||||
optional_parameters :initial_length, :read_until
|
||||
mutually_exclusive_parameters :initial_length, :read_until
|
||||
|
||||
def initialize_shared_instance
|
||||
@element_prototype = get_parameter(:type)
|
||||
if get_parameter(:read_until) == :eof
|
||||
extend ReadUntilEOFPlugin
|
||||
elsif has_parameter?(:read_until)
|
||||
extend ReadUntilPlugin
|
||||
elsif has_parameter?(:initial_length)
|
||||
extend InitialLengthPlugin
|
||||
end
|
||||
|
||||
super
|
||||
end
|
||||
|
||||
def initialize_instance
|
||||
@element_list = nil
|
||||
end
|
||||
|
||||
def clear?
|
||||
@element_list.nil? || elements.all?(&:clear?)
|
||||
end
|
||||
|
||||
def assign(array)
|
||||
return if self.equal?(array) # prevent self assignment
|
||||
raise ArgumentError, "can't set a nil value for #{debug_name}" if array.nil?
|
||||
|
||||
@element_list = []
|
||||
concat(array)
|
||||
end
|
||||
|
||||
def snapshot
|
||||
elements.collect(&:snapshot)
|
||||
end
|
||||
|
||||
def find_index(obj)
|
||||
elements.index(obj)
|
||||
end
|
||||
alias index find_index
|
||||
|
||||
# Returns the first index of +obj+ in self.
|
||||
#
|
||||
# Uses equal? for the comparator.
|
||||
def find_index_of(obj)
|
||||
elements.index { |el| el.equal?(obj) }
|
||||
end
|
||||
|
||||
def push(*args)
|
||||
insert(-1, *args)
|
||||
self
|
||||
end
|
||||
alias << push
|
||||
|
||||
def unshift(*args)
|
||||
insert(0, *args)
|
||||
self
|
||||
end
|
||||
|
||||
def concat(array)
|
||||
insert(-1, *array.to_ary)
|
||||
self
|
||||
end
|
||||
|
||||
def insert(index, *objs)
|
||||
extend_array(index - 1)
|
||||
abs_index = (index >= 0) ? index : index + 1 + length
|
||||
|
||||
# insert elements before...
|
||||
new_elements = objs.map { new_element }
|
||||
elements.insert(index, *new_elements)
|
||||
|
||||
# ...assigning values
|
||||
objs.each_with_index do |obj, i|
|
||||
self[abs_index + i] = obj
|
||||
end
|
||||
|
||||
self
|
||||
end
|
||||
|
||||
# Returns the element at +index+.
|
||||
def [](arg1, arg2 = nil)
|
||||
if arg1.respond_to?(:to_int) && arg2.nil?
|
||||
slice_index(arg1.to_int)
|
||||
elsif arg1.respond_to?(:to_int) && arg2.respond_to?(:to_int)
|
||||
slice_start_length(arg1.to_int, arg2.to_int)
|
||||
elsif arg1.is_a?(Range) && arg2.nil?
|
||||
slice_range(arg1)
|
||||
else
|
||||
raise TypeError, "can't convert #{arg1} into Integer" unless arg1.respond_to?(:to_int)
|
||||
raise TypeError, "can't convert #{arg2} into Integer" unless arg2.respond_to?(:to_int)
|
||||
end
|
||||
end
|
||||
alias slice []
|
||||
|
||||
def slice_index(index)
|
||||
extend_array(index)
|
||||
at(index)
|
||||
end
|
||||
|
||||
def slice_start_length(start, length)
|
||||
elements[start, length]
|
||||
end
|
||||
|
||||
def slice_range(range)
|
||||
elements[range]
|
||||
end
|
||||
private :slice_index, :slice_start_length, :slice_range
|
||||
|
||||
# Returns the element at +index+. Unlike +slice+, if +index+ is out
|
||||
# of range the array will not be automatically extended.
|
||||
def at(index)
|
||||
elements[index]
|
||||
end
|
||||
|
||||
# Sets the element at +index+.
|
||||
def []=(index, value)
|
||||
extend_array(index)
|
||||
elements[index].assign(value)
|
||||
end
|
||||
|
||||
# Returns the first element, or the first +n+ elements, of the array.
|
||||
# If the array is empty, the first form returns nil, and the second
|
||||
# form returns an empty array.
|
||||
def first(n = nil)
|
||||
if n.nil? && empty?
|
||||
# explicitly return nil as arrays grow automatically
|
||||
nil
|
||||
elsif n.nil?
|
||||
self[0]
|
||||
else
|
||||
self[0, n]
|
||||
end
|
||||
end
|
||||
|
||||
# Returns the last element, or the last +n+ elements, of the array.
|
||||
# If the array is empty, the first form returns nil, and the second
|
||||
# form returns an empty array.
|
||||
def last(n = nil)
|
||||
if n.nil?
|
||||
self[-1]
|
||||
else
|
||||
n = length if n > length
|
||||
self[-n, n]
|
||||
end
|
||||
end
|
||||
|
||||
def length
|
||||
elements.length
|
||||
end
|
||||
alias size length
|
||||
|
||||
def empty?
|
||||
length.zero?
|
||||
end
|
||||
|
||||
# Allow this object to be used in array context.
|
||||
def to_ary
|
||||
collect { |el| el }
|
||||
end
|
||||
|
||||
def each
|
||||
elements.each { |el| yield el }
|
||||
end
|
||||
|
||||
def debug_name_of(child) #:nodoc:
|
||||
index = find_index_of(child)
|
||||
"#{debug_name}[#{index}]"
|
||||
end
|
||||
|
||||
def offset_of(child) #:nodoc:
|
||||
index = find_index_of(child)
|
||||
sum = sum_num_bytes_below_index(index)
|
||||
|
||||
child.bit_aligned? ? sum.floor : sum.ceil
|
||||
end
|
||||
|
||||
def do_write(io) #:nodoc:
|
||||
elements.each { |el| el.do_write(io) }
|
||||
end
|
||||
|
||||
def do_num_bytes #:nodoc:
|
||||
sum_num_bytes_for_all_elements
|
||||
end
|
||||
|
||||
#---------------
|
||||
private
|
||||
|
||||
def extend_array(max_index)
|
||||
max_length = max_index + 1
|
||||
while elements.length < max_length
|
||||
append_new_element
|
||||
end
|
||||
end
|
||||
|
||||
def elements
|
||||
@element_list ||= []
|
||||
end
|
||||
|
||||
def append_new_element
|
||||
element = new_element
|
||||
elements << element
|
||||
element
|
||||
end
|
||||
|
||||
def new_element
|
||||
@element_prototype.instantiate(nil, self)
|
||||
end
|
||||
|
||||
def sum_num_bytes_for_all_elements
|
||||
sum_num_bytes_below_index(length)
|
||||
end
|
||||
|
||||
def sum_num_bytes_below_index(index)
|
||||
(0...index).inject(0) do |sum, i|
|
||||
nbytes = elements[i].do_num_bytes
|
||||
|
||||
if nbytes.is_a?(Integer)
|
||||
sum.ceil + nbytes
|
||||
else
|
||||
sum + nbytes
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
class ArrayArgProcessor < BaseArgProcessor
|
||||
def sanitize_parameters!(obj_class, params) #:nodoc:
|
||||
# ensure one of :initial_length and :read_until exists
|
||||
unless params.has_at_least_one_of?(:initial_length, :read_until)
|
||||
params[:initial_length] = 0
|
||||
end
|
||||
|
||||
params.warn_replacement_parameter(:length, :initial_length)
|
||||
params.warn_replacement_parameter(:read_length, :initial_length)
|
||||
params.must_be_integer(:initial_length)
|
||||
|
||||
params.merge!(obj_class.dsl_params)
|
||||
params.sanitize_object_prototype(:type)
|
||||
end
|
||||
end
|
||||
|
||||
# Logic for the :read_until parameter
|
||||
module ReadUntilPlugin
|
||||
def do_read(io)
|
||||
loop do
|
||||
element = append_new_element
|
||||
element.do_read(io)
|
||||
variables = { index: self.length - 1, element: self.last, array: self }
|
||||
break if eval_parameter(:read_until, variables)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# Logic for the read_until: :eof parameter
|
||||
module ReadUntilEOFPlugin
|
||||
def do_read(io)
|
||||
loop do
|
||||
element = append_new_element
|
||||
begin
|
||||
element.do_read(io)
|
||||
rescue EOFError, IOError
|
||||
elements.pop
|
||||
break
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# Logic for the :initial_length parameter
|
||||
module InitialLengthPlugin
|
||||
def do_read(io)
|
||||
elements.each { |el| el.do_read(io) }
|
||||
end
|
||||
|
||||
def elements
|
||||
if @element_list.nil?
|
||||
@element_list = []
|
||||
eval_parameter(:initial_length).times do
|
||||
@element_list << new_element
|
||||
end
|
||||
end
|
||||
|
||||
@element_list
|
||||
end
|
||||
end
|
||||
end
|
||||
335
Library/Homebrew/vendor/bundle/ruby/2.6.0/gems/bindata-2.4.8/lib/bindata/base.rb
vendored
Normal file
335
Library/Homebrew/vendor/bundle/ruby/2.6.0/gems/bindata-2.4.8/lib/bindata/base.rb
vendored
Normal file
@ -0,0 +1,335 @@
|
||||
require 'bindata/framework'
|
||||
require 'bindata/io'
|
||||
require 'bindata/lazy'
|
||||
require 'bindata/name'
|
||||
require 'bindata/params'
|
||||
require 'bindata/registry'
|
||||
require 'bindata/sanitize'
|
||||
|
||||
module BinData
|
||||
# This is the abstract base class for all data objects.
|
||||
class Base
|
||||
extend AcceptedParametersPlugin
|
||||
include Framework
|
||||
include RegisterNamePlugin
|
||||
|
||||
class << self
|
||||
# Instantiates this class and reads from +io+, returning the newly
|
||||
# created data object. +args+ will be used when instantiating.
|
||||
def read(io, *args, &block)
|
||||
obj = self.new(*args)
|
||||
obj.read(io, &block)
|
||||
obj
|
||||
end
|
||||
|
||||
# The arg processor for this class.
|
||||
def arg_processor(name = nil)
|
||||
@arg_processor ||= nil
|
||||
|
||||
if name
|
||||
@arg_processor = "#{name}_arg_processor".gsub(/(?:^|_)(.)/) { $1.upcase }.to_sym
|
||||
elsif @arg_processor.is_a? Symbol
|
||||
@arg_processor = BinData.const_get(@arg_processor).new
|
||||
elsif @arg_processor.nil?
|
||||
@arg_processor = superclass.arg_processor
|
||||
else
|
||||
@arg_processor
|
||||
end
|
||||
end
|
||||
|
||||
# The name of this class as used by Records, Arrays etc.
|
||||
def bindata_name
|
||||
RegisteredClasses.underscore_name(name)
|
||||
end
|
||||
|
||||
# Call this method if this class is abstract and not to be used.
|
||||
def unregister_self
|
||||
RegisteredClasses.unregister(name)
|
||||
end
|
||||
|
||||
# Registers all subclasses of this class for use
|
||||
def register_subclasses #:nodoc:
|
||||
singleton_class.send(:undef_method, :inherited)
|
||||
define_singleton_method(:inherited) do |subclass|
|
||||
RegisteredClasses.register(subclass.name, subclass)
|
||||
register_subclasses
|
||||
end
|
||||
end
|
||||
|
||||
private :unregister_self, :register_subclasses
|
||||
end
|
||||
|
||||
# Register all subclasses of this class.
|
||||
register_subclasses
|
||||
|
||||
# Set the initial arg processor.
|
||||
arg_processor :base
|
||||
|
||||
# Creates a new data object.
|
||||
#
|
||||
# Args are optional, but if present, must be in the following order.
|
||||
#
|
||||
# +value+ is a value that is +assign+ed immediately after initialization.
|
||||
#
|
||||
# +parameters+ is a hash containing symbol keys. Some parameters may
|
||||
# reference callable objects (methods or procs).
|
||||
#
|
||||
# +parent+ is the parent data object (e.g. struct, array, choice) this
|
||||
# object resides under.
|
||||
#
|
||||
def initialize(*args)
|
||||
value, @params, @parent = extract_args(args)
|
||||
|
||||
initialize_shared_instance
|
||||
initialize_instance
|
||||
assign(value) if value
|
||||
end
|
||||
|
||||
attr_accessor :parent
|
||||
protected :parent=
|
||||
|
||||
# Creates a new data object based on this instance.
|
||||
#
|
||||
# All parameters will be be duplicated. Use this method
|
||||
# when creating multiple objects with the same parameters.
|
||||
def new(value = nil, parent = nil)
|
||||
obj = clone
|
||||
obj.parent = parent if parent
|
||||
obj.initialize_instance
|
||||
obj.assign(value) if value
|
||||
|
||||
obj
|
||||
end
|
||||
|
||||
# Returns the result of evaluating the parameter identified by +key+.
|
||||
#
|
||||
# +overrides+ is an optional +parameters+ like hash that allow the
|
||||
# parameters given at object construction to be overridden.
|
||||
#
|
||||
# Returns nil if +key+ does not refer to any parameter.
|
||||
def eval_parameter(key, overrides = nil)
|
||||
value = get_parameter(key)
|
||||
if value.is_a?(Symbol) || value.respond_to?(:arity)
|
||||
lazy_evaluator.lazy_eval(value, overrides)
|
||||
else
|
||||
value
|
||||
end
|
||||
end
|
||||
|
||||
# Returns a lazy evaluator for this object.
|
||||
def lazy_evaluator #:nodoc:
|
||||
@lazy ||= LazyEvaluator.new(self)
|
||||
end
|
||||
|
||||
# Returns the parameter referenced by +key+.
|
||||
# Use this method if you are sure the parameter is not to be evaluated.
|
||||
# You most likely want #eval_parameter.
|
||||
def get_parameter(key)
|
||||
@params[key]
|
||||
end
|
||||
|
||||
# Returns whether +key+ exists in the +parameters+ hash.
|
||||
def has_parameter?(key)
|
||||
@params.has_parameter?(key)
|
||||
end
|
||||
|
||||
# Resets the internal state to that of a newly created object.
|
||||
def clear
|
||||
initialize_instance
|
||||
end
|
||||
|
||||
# Reads data into this data object.
|
||||
def read(io, &block)
|
||||
io = BinData::IO::Read.new(io) unless BinData::IO::Read === io
|
||||
|
||||
start_read do
|
||||
clear
|
||||
do_read(io)
|
||||
end
|
||||
block.call(self) if block_given?
|
||||
|
||||
self
|
||||
end
|
||||
|
||||
# Writes the value for this data object to +io+.
|
||||
def write(io, &block)
|
||||
io = BinData::IO::Write.new(io) unless BinData::IO::Write === io
|
||||
|
||||
do_write(io)
|
||||
io.flush
|
||||
|
||||
block.call(self) if block_given?
|
||||
|
||||
self
|
||||
end
|
||||
|
||||
# Returns the number of bytes it will take to write this data object.
|
||||
def num_bytes
|
||||
do_num_bytes.ceil
|
||||
end
|
||||
|
||||
# Returns the string representation of this data object.
|
||||
def to_binary_s(&block)
|
||||
io = BinData::IO.create_string_io
|
||||
write(io, &block)
|
||||
io.string
|
||||
end
|
||||
|
||||
# Returns the hexadecimal string representation of this data object.
|
||||
def to_hex(&block)
|
||||
to_binary_s(&block).unpack('H*')[0]
|
||||
end
|
||||
|
||||
# Return a human readable representation of this data object.
|
||||
def inspect
|
||||
snapshot.inspect
|
||||
end
|
||||
|
||||
# Return a string representing this data object.
|
||||
def to_s
|
||||
snapshot.to_s
|
||||
end
|
||||
|
||||
# Work with Ruby's pretty-printer library.
|
||||
def pretty_print(pp) #:nodoc:
|
||||
pp.pp(snapshot)
|
||||
end
|
||||
|
||||
# Override and delegate =~ as it is defined in Object.
|
||||
def =~(other)
|
||||
snapshot =~ other
|
||||
end
|
||||
|
||||
# Returns a user friendly name of this object for debugging purposes.
|
||||
def debug_name
|
||||
if @parent
|
||||
@parent.debug_name_of(self)
|
||||
else
|
||||
"obj"
|
||||
end
|
||||
end
|
||||
|
||||
# Returns the offset (in bytes) of this object with respect to its most
|
||||
# distant ancestor.
|
||||
def abs_offset
|
||||
if @parent
|
||||
@parent.abs_offset + @parent.offset_of(self)
|
||||
else
|
||||
0
|
||||
end
|
||||
end
|
||||
|
||||
# Returns the offset (in bytes) of this object with respect to its parent.
|
||||
def rel_offset
|
||||
if @parent
|
||||
@parent.offset_of(self)
|
||||
else
|
||||
0
|
||||
end
|
||||
end
|
||||
|
||||
def ==(other) #:nodoc:
|
||||
# double dispatch
|
||||
other == snapshot
|
||||
end
|
||||
|
||||
# A version of +respond_to?+ used by the lazy evaluator. It doesn't
|
||||
# reinvoke the evaluator so as to avoid infinite evaluation loops.
|
||||
def safe_respond_to?(symbol, include_private = false) #:nodoc:
|
||||
base_respond_to?(symbol, include_private)
|
||||
end
|
||||
|
||||
alias base_respond_to? respond_to?
|
||||
|
||||
#---------------
|
||||
private
|
||||
|
||||
def extract_args(args)
|
||||
self.class.arg_processor.extract_args(self.class, args)
|
||||
end
|
||||
|
||||
def start_read
|
||||
top_level_set(:in_read, true)
|
||||
yield
|
||||
ensure
|
||||
top_level_set(:in_read, false)
|
||||
end
|
||||
|
||||
# Is this object tree currently being read? Used by BasePrimitive.
|
||||
def reading?
|
||||
top_level_get(:in_read)
|
||||
end
|
||||
|
||||
def top_level_set(sym, value)
|
||||
top_level.instance_variable_set("@tl_#{sym}", value)
|
||||
end
|
||||
|
||||
def top_level_get(sym)
|
||||
tl = top_level
|
||||
tl.instance_variable_defined?("@tl_#{sym}") &&
|
||||
tl.instance_variable_get("@tl_#{sym}")
|
||||
end
|
||||
|
||||
def top_level
|
||||
if parent.nil?
|
||||
tl = self
|
||||
else
|
||||
tl = parent
|
||||
tl = tl.parent while tl.parent
|
||||
end
|
||||
|
||||
tl
|
||||
end
|
||||
|
||||
def binary_string(str)
|
||||
str.to_s.dup.force_encoding(Encoding::BINARY)
|
||||
end
|
||||
end
|
||||
|
||||
# ArgProcessors process the arguments passed to BinData::Base.new into
|
||||
# the form required to initialise the BinData object.
|
||||
#
|
||||
# Any passed parameters are sanitized so the BinData object doesn't
|
||||
# need to perform error checking on the parameters.
|
||||
class BaseArgProcessor
|
||||
@@empty_hash = Hash.new.freeze
|
||||
|
||||
# Takes the arguments passed to BinData::Base.new and
|
||||
# extracts [value, sanitized_parameters, parent].
|
||||
def extract_args(obj_class, obj_args)
|
||||
value, params, parent = separate_args(obj_class, obj_args)
|
||||
sanitized_params = SanitizedParameters.sanitize(params, obj_class)
|
||||
|
||||
[value, sanitized_params, parent]
|
||||
end
|
||||
|
||||
# Separates the arguments passed to BinData::Base.new into
|
||||
# [value, parameters, parent]. Called by #extract_args.
|
||||
def separate_args(_obj_class, obj_args)
|
||||
args = obj_args.dup
|
||||
value = parameters = parent = nil
|
||||
|
||||
if args.length > 1 && args.last.is_a?(BinData::Base)
|
||||
parent = args.pop
|
||||
end
|
||||
|
||||
if args.length > 0 && args.last.is_a?(Hash)
|
||||
parameters = args.pop
|
||||
end
|
||||
|
||||
if args.length > 0
|
||||
value = args.pop
|
||||
end
|
||||
|
||||
parameters ||= @@empty_hash
|
||||
|
||||
[value, parameters, parent]
|
||||
end
|
||||
|
||||
# Performs sanity checks on the given parameters.
|
||||
# This method converts the parameters to the form expected
|
||||
# by the data object.
|
||||
def sanitize_parameters!(obj_class, obj_params)
|
||||
end
|
||||
end
|
||||
end
|
||||
248
Library/Homebrew/vendor/bundle/ruby/2.6.0/gems/bindata-2.4.8/lib/bindata/base_primitive.rb
vendored
Normal file
248
Library/Homebrew/vendor/bundle/ruby/2.6.0/gems/bindata-2.4.8/lib/bindata/base_primitive.rb
vendored
Normal file
@ -0,0 +1,248 @@
|
||||
require 'bindata/base'
|
||||
|
||||
module BinData
|
||||
# A BinData::BasePrimitive object is a container for a value that has a
|
||||
# particular binary representation. A value corresponds to a primitive type
|
||||
# such as as integer, float or string. Only one value can be contained by
|
||||
# this object. This value can be read from or written to an IO stream.
|
||||
#
|
||||
# require 'bindata'
|
||||
#
|
||||
# obj = BinData::Uint8.new(initial_value: 42)
|
||||
# obj #=> 42
|
||||
# obj.assign(5)
|
||||
# obj #=> 5
|
||||
# obj.clear
|
||||
# obj #=> 42
|
||||
#
|
||||
# obj = BinData::Uint8.new(value: 42)
|
||||
# obj #=> 42
|
||||
# obj.assign(5)
|
||||
# obj #=> 42
|
||||
#
|
||||
# obj = BinData::Uint8.new(assert: 3)
|
||||
# obj.read("\005") #=> BinData::ValidityError: value is '5' but expected '3'
|
||||
#
|
||||
# obj = BinData::Uint8.new(assert: -> { value < 5 })
|
||||
# obj.read("\007") #=> BinData::ValidityError: value not as expected
|
||||
#
|
||||
# == Parameters
|
||||
#
|
||||
# Parameters may be provided at initialisation to control the behaviour of
|
||||
# an object. These params include those for BinData::Base as well as:
|
||||
#
|
||||
# [<tt>:initial_value</tt>] This is the initial value to use before one is
|
||||
# either #read or explicitly set with #value=.
|
||||
# [<tt>:value</tt>] The object will always have this value.
|
||||
# Calls to #value= are ignored when
|
||||
# using this param. While reading, #value
|
||||
# will return the value of the data read from the
|
||||
# IO, not the result of the <tt>:value</tt> param.
|
||||
# [<tt>:assert</tt>] Raise an error unless the value read or assigned
|
||||
# meets this criteria. The variable +value+ is
|
||||
# made available to any lambda assigned to this
|
||||
# parameter. A boolean return indicates success
|
||||
# or failure. Any other return is compared to
|
||||
# the value just read in.
|
||||
# [<tt>:asserted_value</tt>] Equivalent to <tt>:assert</tt> and <tt>:value</tt>.
|
||||
#
|
||||
class BasePrimitive < BinData::Base
|
||||
unregister_self
|
||||
|
||||
optional_parameters :initial_value, :value, :assert, :asserted_value
|
||||
mutually_exclusive_parameters :initial_value, :value
|
||||
mutually_exclusive_parameters :asserted_value, :value, :assert
|
||||
|
||||
def initialize_shared_instance
|
||||
extend InitialValuePlugin if has_parameter?(:initial_value)
|
||||
extend ValuePlugin if has_parameter?(:value)
|
||||
extend AssertPlugin if has_parameter?(:assert)
|
||||
extend AssertedValuePlugin if has_parameter?(:asserted_value)
|
||||
super
|
||||
end
|
||||
|
||||
def initialize_instance
|
||||
@value = nil
|
||||
end
|
||||
|
||||
def clear? #:nodoc:
|
||||
@value.nil?
|
||||
end
|
||||
|
||||
def assign(val)
|
||||
raise ArgumentError, "can't set a nil value for #{debug_name}" if val.nil?
|
||||
|
||||
raw_val = val.respond_to?(:snapshot) ? val.snapshot : val
|
||||
@value =
|
||||
begin
|
||||
raw_val.dup
|
||||
rescue TypeError
|
||||
# can't dup Fixnums
|
||||
raw_val
|
||||
end
|
||||
end
|
||||
|
||||
def snapshot
|
||||
_value
|
||||
end
|
||||
|
||||
def value
|
||||
snapshot
|
||||
end
|
||||
|
||||
def value=(val)
|
||||
assign(val)
|
||||
end
|
||||
|
||||
def respond_to?(symbol, include_private = false) #:nodoc:
|
||||
child = snapshot
|
||||
child.respond_to?(symbol, include_private) || super
|
||||
end
|
||||
|
||||
def method_missing(symbol, *args, &block) #:nodoc:
|
||||
child = snapshot
|
||||
if child.respond_to?(symbol)
|
||||
self.class.class_eval \
|
||||
"def #{symbol}(*args, &block);" \
|
||||
" snapshot.#{symbol}(*args, &block);" \
|
||||
"end"
|
||||
child.__send__(symbol, *args, &block)
|
||||
else
|
||||
super
|
||||
end
|
||||
end
|
||||
|
||||
def <=>(other)
|
||||
snapshot <=> other
|
||||
end
|
||||
|
||||
def eql?(other)
|
||||
# double dispatch
|
||||
other.eql?(snapshot)
|
||||
end
|
||||
|
||||
def hash
|
||||
snapshot.hash
|
||||
end
|
||||
|
||||
def do_read(io) #:nodoc:
|
||||
@value = read_and_return_value(io)
|
||||
end
|
||||
|
||||
def do_write(io) #:nodoc:
|
||||
io.writebytes(value_to_binary_string(_value))
|
||||
end
|
||||
|
||||
def do_num_bytes #:nodoc:
|
||||
value_to_binary_string(_value).length
|
||||
end
|
||||
|
||||
#---------------
|
||||
private
|
||||
|
||||
# The unmodified value of this data object. Note that #snapshot calls this
|
||||
# method. This indirection is so that #snapshot can be overridden in
|
||||
# subclasses to modify the presentation value.
|
||||
def _value
|
||||
@value != nil ? @value : sensible_default
|
||||
end
|
||||
|
||||
# Logic for the :value parameter
|
||||
module ValuePlugin
|
||||
def assign(val)
|
||||
# Ignored
|
||||
end
|
||||
|
||||
def _value
|
||||
reading? ? @value : eval_parameter(:value)
|
||||
end
|
||||
end
|
||||
|
||||
# Logic for the :initial_value parameter
|
||||
module InitialValuePlugin
|
||||
def _value
|
||||
@value != nil ? @value : eval_parameter(:initial_value)
|
||||
end
|
||||
end
|
||||
|
||||
# Logic for the :assert parameter
|
||||
module AssertPlugin
|
||||
def assign(val)
|
||||
super(val)
|
||||
assert!
|
||||
end
|
||||
|
||||
def do_read(io) #:nodoc:
|
||||
super(io)
|
||||
assert!
|
||||
end
|
||||
|
||||
def assert!
|
||||
current_value = snapshot
|
||||
expected = eval_parameter(:assert, value: current_value)
|
||||
|
||||
msg =
|
||||
if !expected
|
||||
"value '#{current_value}' not as expected"
|
||||
elsif expected != true && current_value != expected
|
||||
"value is '#{current_value}' but expected '#{expected}'"
|
||||
else
|
||||
nil
|
||||
end
|
||||
|
||||
raise ValidityError, "#{msg} for #{debug_name}" if msg
|
||||
end
|
||||
end
|
||||
|
||||
# Logic for the :asserted_value parameter
|
||||
module AssertedValuePlugin
|
||||
def assign(val)
|
||||
assert_value(val)
|
||||
super(val)
|
||||
end
|
||||
|
||||
def _value
|
||||
reading? ? @value : eval_parameter(:asserted_value)
|
||||
end
|
||||
|
||||
def do_read(io) #:nodoc:
|
||||
super(io)
|
||||
assert!
|
||||
end
|
||||
|
||||
def assert!
|
||||
assert_value(snapshot)
|
||||
end
|
||||
|
||||
def assert_value(current_value)
|
||||
expected = eval_parameter(:asserted_value, value: current_value)
|
||||
if current_value != expected
|
||||
raise ValidityError,
|
||||
"value is '#{current_value}' but " \
|
||||
"expected '#{expected}' for #{debug_name}"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
###########################################################################
|
||||
# To be implemented by subclasses
|
||||
|
||||
# Return the string representation that +val+ will take when written.
|
||||
def value_to_binary_string(val)
|
||||
raise NotImplementedError
|
||||
end
|
||||
|
||||
# Read a number of bytes from +io+ and return the value they represent.
|
||||
def read_and_return_value(io)
|
||||
raise NotImplementedError
|
||||
end
|
||||
|
||||
# Return a sensible default for this data.
|
||||
def sensible_default
|
||||
raise NotImplementedError
|
||||
end
|
||||
|
||||
# To be implemented by subclasses
|
||||
###########################################################################
|
||||
end
|
||||
end
|
||||
186
Library/Homebrew/vendor/bundle/ruby/2.6.0/gems/bindata-2.4.8/lib/bindata/bits.rb
vendored
Normal file
186
Library/Homebrew/vendor/bundle/ruby/2.6.0/gems/bindata-2.4.8/lib/bindata/bits.rb
vendored
Normal file
@ -0,0 +1,186 @@
|
||||
require 'thread'
|
||||
require 'bindata/base_primitive'
|
||||
|
||||
module BinData
|
||||
# Defines a number of classes that contain a bit based integer.
|
||||
# The integer is defined by endian and number of bits.
|
||||
|
||||
module BitField #:nodoc: all
|
||||
@@mutex = Mutex.new
|
||||
|
||||
class << self
|
||||
def define_class(name, nbits, endian, signed = :unsigned)
|
||||
@@mutex.synchronize do
|
||||
unless BinData.const_defined?(name)
|
||||
new_class = Class.new(BinData::BasePrimitive)
|
||||
BitField.define_methods(new_class, nbits, endian.to_sym, signed.to_sym)
|
||||
RegisteredClasses.register(name, new_class)
|
||||
|
||||
BinData.const_set(name, new_class)
|
||||
end
|
||||
end
|
||||
|
||||
BinData.const_get(name)
|
||||
end
|
||||
|
||||
def define_methods(bit_class, nbits, endian, signed)
|
||||
bit_class.module_eval <<-END
|
||||
#{create_params_code(nbits)}
|
||||
|
||||
def assign(val)
|
||||
#{create_nbits_code(nbits)}
|
||||
#{create_clamp_code(nbits, signed)}
|
||||
super(val)
|
||||
end
|
||||
|
||||
def do_write(io)
|
||||
#{create_nbits_code(nbits)}
|
||||
val = _value
|
||||
#{create_int2uint_code(nbits, signed)}
|
||||
io.writebits(val, #{nbits}, :#{endian})
|
||||
end
|
||||
|
||||
def do_num_bytes
|
||||
#{create_nbits_code(nbits)}
|
||||
#{create_do_num_bytes_code(nbits)}
|
||||
end
|
||||
|
||||
def bit_aligned?
|
||||
true
|
||||
end
|
||||
|
||||
#---------------
|
||||
private
|
||||
|
||||
def read_and_return_value(io)
|
||||
#{create_nbits_code(nbits)}
|
||||
val = io.readbits(#{nbits}, :#{endian})
|
||||
#{create_uint2int_code(nbits, signed)}
|
||||
val
|
||||
end
|
||||
|
||||
def sensible_default
|
||||
0
|
||||
end
|
||||
END
|
||||
end
|
||||
|
||||
def create_params_code(nbits)
|
||||
if nbits == :nbits
|
||||
"mandatory_parameter :nbits"
|
||||
else
|
||||
""
|
||||
end
|
||||
end
|
||||
|
||||
def create_nbits_code(nbits)
|
||||
if nbits == :nbits
|
||||
"nbits = eval_parameter(:nbits)"
|
||||
else
|
||||
""
|
||||
end
|
||||
end
|
||||
|
||||
def create_do_num_bytes_code(nbits)
|
||||
if nbits == :nbits
|
||||
"nbits / 8.0"
|
||||
else
|
||||
nbits / 8.0
|
||||
end
|
||||
end
|
||||
|
||||
def create_clamp_code(nbits, signed)
|
||||
if nbits == :nbits
|
||||
create_dynamic_clamp_code(signed)
|
||||
else
|
||||
create_fixed_clamp_code(nbits, signed)
|
||||
end
|
||||
end
|
||||
|
||||
def create_dynamic_clamp_code(signed)
|
||||
if signed == :signed
|
||||
max = "max = (1 << (nbits - 1)) - 1"
|
||||
min = "min = -(max + 1)"
|
||||
else
|
||||
max = "max = (1 << nbits) - 1"
|
||||
min = "min = 0"
|
||||
end
|
||||
|
||||
"#{max}; #{min}; val = (val < min) ? min : (val > max) ? max : val"
|
||||
end
|
||||
|
||||
def create_fixed_clamp_code(nbits, signed)
|
||||
if nbits == 1 && signed == :signed
|
||||
raise "signed bitfield must have more than one bit"
|
||||
end
|
||||
|
||||
if signed == :signed
|
||||
max = (1 << (nbits - 1)) - 1
|
||||
min = -(max + 1)
|
||||
else
|
||||
min = 0
|
||||
max = (1 << nbits) - 1
|
||||
end
|
||||
|
||||
clamp = "(val < #{min}) ? #{min} : (val > #{max}) ? #{max} : val"
|
||||
|
||||
if nbits == 1
|
||||
# allow single bits to be used as booleans
|
||||
clamp = "(val == true) ? 1 : (not val) ? 0 : #{clamp}"
|
||||
end
|
||||
|
||||
"val = #{clamp}"
|
||||
end
|
||||
|
||||
def create_int2uint_code(nbits, signed)
|
||||
if signed != :signed
|
||||
""
|
||||
elsif nbits == :nbits
|
||||
"val &= (1 << nbits) - 1"
|
||||
else
|
||||
"val &= #{(1 << nbits) - 1}"
|
||||
end
|
||||
end
|
||||
|
||||
def create_uint2int_code(nbits, signed)
|
||||
if signed != :signed
|
||||
""
|
||||
elsif nbits == :nbits
|
||||
"val -= (1 << nbits) if (val >= (1 << (nbits - 1)))"
|
||||
else
|
||||
"val -= #{1 << nbits} if (val >= #{1 << (nbits - 1)})"
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# Create classes for dynamic bitfields
|
||||
{
|
||||
"Bit" => :big,
|
||||
"BitLe" => :little,
|
||||
"Sbit" => [:big, :signed],
|
||||
"SbitLe" => [:little, :signed],
|
||||
}.each_pair { |name, args| BitField.define_class(name, :nbits, *args) }
|
||||
|
||||
# Create classes on demand
|
||||
module BitFieldFactory
|
||||
def const_missing(name)
|
||||
mappings = {
|
||||
/^Bit(\d+)$/ => :big,
|
||||
/^Bit(\d+)le$/ => :little,
|
||||
/^Sbit(\d+)$/ => [:big, :signed],
|
||||
/^Sbit(\d+)le$/ => [:little, :signed]
|
||||
}
|
||||
|
||||
mappings.each_pair do |regex, args|
|
||||
if regex =~ name.to_s
|
||||
nbits = $1.to_i
|
||||
return BitField.define_class(name, nbits, *args)
|
||||
end
|
||||
end
|
||||
|
||||
super(name)
|
||||
end
|
||||
end
|
||||
BinData.extend BitFieldFactory
|
||||
end
|
||||
117
Library/Homebrew/vendor/bundle/ruby/2.6.0/gems/bindata-2.4.8/lib/bindata/buffer.rb
vendored
Normal file
117
Library/Homebrew/vendor/bundle/ruby/2.6.0/gems/bindata-2.4.8/lib/bindata/buffer.rb
vendored
Normal file
@ -0,0 +1,117 @@
|
||||
require 'bindata/base'
|
||||
require 'bindata/dsl'
|
||||
|
||||
module BinData
|
||||
# A Buffer is conceptually a substream within a data stream. It has a
|
||||
# defined size and it will always read or write the exact number of bytes to
|
||||
# fill the buffer. Short reads will skip over unused bytes and short writes
|
||||
# will pad the substream with "\0" bytes.
|
||||
#
|
||||
# require 'bindata'
|
||||
#
|
||||
# obj = BinData::Buffer.new(length: 5, type: [:string, {value: "abc"}])
|
||||
# obj.to_binary_s #=> "abc\000\000"
|
||||
#
|
||||
#
|
||||
# class MyBuffer < BinData::Buffer
|
||||
# default_parameter length: 8
|
||||
#
|
||||
# endian :little
|
||||
#
|
||||
# uint16 :num1
|
||||
# uint16 :num2
|
||||
# # padding occurs here
|
||||
# end
|
||||
#
|
||||
# obj = MyBuffer.read("\001\000\002\000\000\000\000\000")
|
||||
# obj.num1 #=> 1
|
||||
# obj.num1 #=> 2
|
||||
# obj.raw_num_bytes #=> 4
|
||||
# obj.num_bytes #=> 8
|
||||
#
|
||||
#
|
||||
# class StringTable < BinData::Record
|
||||
# endian :little
|
||||
#
|
||||
# uint16 :table_size_in_bytes
|
||||
# buffer :strings, length: :table_size_in_bytes do
|
||||
# array read_until: :eof do
|
||||
# uint8 :len
|
||||
# string :str, length: :len
|
||||
# end
|
||||
# end
|
||||
# end
|
||||
#
|
||||
#
|
||||
# == Parameters
|
||||
#
|
||||
# Parameters may be provided at initialisation to control the behaviour of
|
||||
# an object. These params are:
|
||||
#
|
||||
# <tt>:length</tt>:: The number of bytes in the buffer.
|
||||
# <tt>:type</tt>:: The single type inside the buffer. Use a struct if
|
||||
# multiple fields are required.
|
||||
class Buffer < BinData::Base
|
||||
extend DSLMixin
|
||||
|
||||
dsl_parser :buffer
|
||||
arg_processor :buffer
|
||||
|
||||
mandatory_parameters :length, :type
|
||||
|
||||
def initialize_instance
|
||||
@type = get_parameter(:type).instantiate(nil, self)
|
||||
end
|
||||
|
||||
# The number of bytes used, ignoring the padding imposed by the buffer.
|
||||
def raw_num_bytes
|
||||
@type.num_bytes
|
||||
end
|
||||
|
||||
def clear?
|
||||
@type.clear?
|
||||
end
|
||||
|
||||
def assign(val)
|
||||
@type.assign(val)
|
||||
end
|
||||
|
||||
def snapshot
|
||||
@type.snapshot
|
||||
end
|
||||
|
||||
def respond_to?(symbol, include_private = false) #:nodoc:
|
||||
@type.respond_to?(symbol, include_private) || super
|
||||
end
|
||||
|
||||
def method_missing(symbol, *args, &block) #:nodoc:
|
||||
@type.__send__(symbol, *args, &block)
|
||||
end
|
||||
|
||||
def do_read(io) #:nodoc:
|
||||
io.with_buffer(eval_parameter(:length)) do
|
||||
@type.do_read(io)
|
||||
end
|
||||
end
|
||||
|
||||
def do_write(io) #:nodoc:
|
||||
io.with_buffer(eval_parameter(:length)) do
|
||||
@type.do_write(io)
|
||||
end
|
||||
end
|
||||
|
||||
def do_num_bytes #:nodoc:
|
||||
eval_parameter(:length)
|
||||
end
|
||||
end
|
||||
|
||||
class BufferArgProcessor < BaseArgProcessor
|
||||
include MultiFieldArgSeparator
|
||||
|
||||
def sanitize_parameters!(obj_class, params)
|
||||
params.merge!(obj_class.dsl_params)
|
||||
params.must_be_integer(:length)
|
||||
params.sanitize_object_prototype(:type)
|
||||
end
|
||||
end
|
||||
end
|
||||
186
Library/Homebrew/vendor/bundle/ruby/2.6.0/gems/bindata-2.4.8/lib/bindata/choice.rb
vendored
Normal file
186
Library/Homebrew/vendor/bundle/ruby/2.6.0/gems/bindata-2.4.8/lib/bindata/choice.rb
vendored
Normal file
@ -0,0 +1,186 @@
|
||||
require 'bindata/base'
|
||||
require 'bindata/dsl'
|
||||
|
||||
module BinData
|
||||
# A Choice is a collection of data objects of which only one is active
|
||||
# at any particular time. Method calls will be delegated to the active
|
||||
# choice.
|
||||
#
|
||||
# require 'bindata'
|
||||
#
|
||||
# type1 = [:string, {value: "Type1"}]
|
||||
# type2 = [:string, {value: "Type2"}]
|
||||
#
|
||||
# choices = {5 => type1, 17 => type2}
|
||||
# a = BinData::Choice.new(choices: choices, selection: 5)
|
||||
# a # => "Type1"
|
||||
#
|
||||
# choices = [ type1, type2 ]
|
||||
# a = BinData::Choice.new(choices: choices, selection: 1)
|
||||
# a # => "Type2"
|
||||
#
|
||||
# choices = [ nil, nil, nil, type1, nil, type2 ]
|
||||
# a = BinData::Choice.new(choices: choices, selection: 3)
|
||||
# a # => "Type1"
|
||||
#
|
||||
#
|
||||
# Chooser = Struct.new(:choice)
|
||||
# mychoice = Chooser.new
|
||||
# mychoice.choice = 'big'
|
||||
#
|
||||
# choices = {'big' => :uint16be, 'little' => :uint16le}
|
||||
# a = BinData::Choice.new(choices: choices, copy_on_change: true,
|
||||
# selection: -> { mychoice.choice })
|
||||
# a.assign(256)
|
||||
# a.to_binary_s #=> "\001\000"
|
||||
#
|
||||
# mychoice.choice = 'little'
|
||||
# a.to_binary_s #=> "\000\001"
|
||||
#
|
||||
# == Parameters
|
||||
#
|
||||
# Parameters may be provided at initialisation to control the behaviour of
|
||||
# an object. These params are:
|
||||
#
|
||||
# <tt>:choices</tt>:: Either an array or a hash specifying the possible
|
||||
# data objects. The format of the
|
||||
# array/hash.values is a list of symbols
|
||||
# representing the data object type. If a choice
|
||||
# is to have params passed to it, then it should
|
||||
# be provided as [type_symbol, hash_params]. An
|
||||
# implementation constraint is that the hash may
|
||||
# not contain symbols as keys, with the exception
|
||||
# of :default. :default is to be used when then
|
||||
# :selection does not exist in the :choices hash.
|
||||
# <tt>:selection</tt>:: An index/key into the :choices array/hash which
|
||||
# specifies the currently active choice.
|
||||
# <tt>:copy_on_change</tt>:: If set to true, copy the value of the previous
|
||||
# selection to the current selection whenever the
|
||||
# selection changes. Default is false.
|
||||
class Choice < BinData::Base
|
||||
extend DSLMixin
|
||||
|
||||
dsl_parser :choice
|
||||
arg_processor :choice
|
||||
|
||||
mandatory_parameters :choices, :selection
|
||||
optional_parameter :copy_on_change
|
||||
|
||||
def initialize_shared_instance
|
||||
extend CopyOnChangePlugin if eval_parameter(:copy_on_change) == true
|
||||
super
|
||||
end
|
||||
|
||||
def initialize_instance
|
||||
@choices = {}
|
||||
@last_selection = nil
|
||||
end
|
||||
|
||||
# Returns the current selection.
|
||||
def selection
|
||||
selection = eval_parameter(:selection)
|
||||
if selection.nil?
|
||||
raise IndexError, ":selection returned nil for #{debug_name}"
|
||||
end
|
||||
selection
|
||||
end
|
||||
|
||||
def respond_to?(symbol, include_private = false) #:nodoc:
|
||||
current_choice.respond_to?(symbol, include_private) || super
|
||||
end
|
||||
|
||||
def method_missing(symbol, *args, &block) #:nodoc:
|
||||
current_choice.__send__(symbol, *args, &block)
|
||||
end
|
||||
|
||||
%w(clear? assign snapshot do_read do_write do_num_bytes).each do |m|
|
||||
module_eval <<-END
|
||||
def #{m}(*args)
|
||||
current_choice.#{m}(*args)
|
||||
end
|
||||
END
|
||||
end
|
||||
|
||||
#---------------
|
||||
private
|
||||
|
||||
def current_choice
|
||||
current_selection = selection
|
||||
@choices[current_selection] ||= instantiate_choice(current_selection)
|
||||
end
|
||||
|
||||
def instantiate_choice(selection)
|
||||
prototype = get_parameter(:choices)[selection]
|
||||
if prototype.nil?
|
||||
raise IndexError, "selection '#{selection}' does not exist in :choices for #{debug_name}"
|
||||
end
|
||||
prototype.instantiate(nil, self)
|
||||
end
|
||||
end
|
||||
|
||||
class ChoiceArgProcessor < BaseArgProcessor
|
||||
def sanitize_parameters!(obj_class, params) #:nodoc:
|
||||
params.merge!(obj_class.dsl_params)
|
||||
|
||||
params.sanitize_choices(:choices) do |choices|
|
||||
hash_choices = choices_as_hash(choices)
|
||||
ensure_valid_keys(hash_choices)
|
||||
hash_choices
|
||||
end
|
||||
end
|
||||
|
||||
#-------------
|
||||
private
|
||||
|
||||
def choices_as_hash(choices)
|
||||
if choices.respond_to?(:to_ary)
|
||||
key_array_by_index(choices.to_ary)
|
||||
else
|
||||
choices
|
||||
end
|
||||
end
|
||||
|
||||
def key_array_by_index(array)
|
||||
result = {}
|
||||
array.each_with_index do |el, i|
|
||||
result[i] = el unless el.nil?
|
||||
end
|
||||
result
|
||||
end
|
||||
|
||||
def ensure_valid_keys(choices)
|
||||
if choices.key?(nil)
|
||||
raise ArgumentError, ":choices hash may not have nil key"
|
||||
end
|
||||
if choices.keys.detect { |key| key.is_a?(Symbol) && key != :default }
|
||||
raise ArgumentError, ":choices hash may not have symbols for keys"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# Logic for the :copy_on_change parameter
|
||||
module CopyOnChangePlugin
|
||||
def current_choice
|
||||
obj = super
|
||||
copy_previous_value(obj)
|
||||
obj
|
||||
end
|
||||
|
||||
def copy_previous_value(obj)
|
||||
current_selection = selection
|
||||
prev = get_previous_choice(current_selection)
|
||||
obj.assign(prev) unless prev.nil?
|
||||
remember_current_selection(current_selection)
|
||||
end
|
||||
|
||||
def get_previous_choice(selection)
|
||||
if @last_selection && selection != @last_selection
|
||||
@choices[@last_selection]
|
||||
end
|
||||
end
|
||||
|
||||
def remember_current_selection(selection)
|
||||
@last_selection = selection
|
||||
end
|
||||
end
|
||||
end
|
||||
@ -0,0 +1,34 @@
|
||||
require "bindata/base_primitive"
|
||||
|
||||
module BinData
|
||||
# Counts the number of bytes remaining in the input stream from the current
|
||||
# position to the end of the stream. This only makes sense for seekable
|
||||
# streams.
|
||||
#
|
||||
# require 'bindata'
|
||||
#
|
||||
# class A < BinData::Record
|
||||
# count_bytes_remaining :bytes_remaining
|
||||
# string :all_data, read_length: :bytes_remaining
|
||||
# end
|
||||
#
|
||||
# obj = A.read("abcdefghij")
|
||||
# obj.all_data #=> "abcdefghij"
|
||||
#
|
||||
class CountBytesRemaining < BinData::BasePrimitive
|
||||
#---------------
|
||||
private
|
||||
|
||||
def value_to_binary_string(val)
|
||||
""
|
||||
end
|
||||
|
||||
def read_and_return_value(io)
|
||||
io.num_bytes_remaining
|
||||
end
|
||||
|
||||
def sensible_default
|
||||
0
|
||||
end
|
||||
end
|
||||
end
|
||||
190
Library/Homebrew/vendor/bundle/ruby/2.6.0/gems/bindata-2.4.8/lib/bindata/delayed_io.rb
vendored
Normal file
190
Library/Homebrew/vendor/bundle/ruby/2.6.0/gems/bindata-2.4.8/lib/bindata/delayed_io.rb
vendored
Normal file
@ -0,0 +1,190 @@
|
||||
require 'bindata/base'
|
||||
require 'bindata/dsl'
|
||||
|
||||
module BinData
|
||||
# BinData declarations are evaluated in a single pass.
|
||||
# However, some binary formats require multi pass processing. A common
|
||||
# reason is seeking backwards in the input stream.
|
||||
#
|
||||
# DelayedIO supports multi pass processing. It works by ignoring the normal
|
||||
# #read or #write calls. The user must explicitly call the #read_now! or
|
||||
# #write_now! methods to process an additional pass. This additional pass
|
||||
# must specify the abs_offset of the I/O operation.
|
||||
#
|
||||
# require 'bindata'
|
||||
#
|
||||
# obj = BinData::DelayedIO.new(read_abs_offset: 3, type: :uint16be)
|
||||
# obj.read("\x00\x00\x00\x11\x12")
|
||||
# obj #=> 0
|
||||
#
|
||||
# obj.read_now!
|
||||
# obj #=> 0x1112
|
||||
#
|
||||
# - OR -
|
||||
#
|
||||
# obj.read("\x00\x00\x00\x11\x12") { obj.read_now! } #=> 0x1122
|
||||
#
|
||||
# obj.to_binary_s { obj.write_now! } #=> "\x00\x00\x00\x11\x12"
|
||||
#
|
||||
# You can use the +auto_call_delayed_io+ keyword to cause #read and #write to
|
||||
# automatically perform the extra passes.
|
||||
#
|
||||
# class ReversePascalString < BinData::Record
|
||||
# auto_call_delayed_io
|
||||
#
|
||||
# delayed_io :str, read_abs_offset: 0 do
|
||||
# string read_length: :len
|
||||
# end
|
||||
# count_bytes_remaining :total_size
|
||||
# skip to_abs_offset: -> { total_size - 1 }
|
||||
# uint8 :len, value: -> { str.length }
|
||||
# end
|
||||
#
|
||||
# s = ReversePascalString.read("hello\x05")
|
||||
# s.to_binary_s #=> "hello\x05"
|
||||
#
|
||||
#
|
||||
# == Parameters
|
||||
#
|
||||
# Parameters may be provided at initialisation to control the behaviour of
|
||||
# an object. These params are:
|
||||
#
|
||||
# <tt>:read_abs_offset</tt>:: The abs_offset to start reading at.
|
||||
# <tt>:type</tt>:: The single type inside the delayed io. Use
|
||||
# a struct if multiple fields are required.
|
||||
class DelayedIO < BinData::Base
|
||||
extend DSLMixin
|
||||
|
||||
dsl_parser :delayed_io
|
||||
arg_processor :delayed_io
|
||||
|
||||
mandatory_parameters :read_abs_offset, :type
|
||||
|
||||
def initialize_instance
|
||||
@type = get_parameter(:type).instantiate(nil, self)
|
||||
@abs_offset = nil
|
||||
@read_io = nil
|
||||
@write_io = nil
|
||||
end
|
||||
|
||||
def clear?
|
||||
@type.clear?
|
||||
end
|
||||
|
||||
def assign(val)
|
||||
@type.assign(val)
|
||||
end
|
||||
|
||||
def snapshot
|
||||
@type.snapshot
|
||||
end
|
||||
|
||||
def num_bytes
|
||||
@type.num_bytes
|
||||
end
|
||||
|
||||
def respond_to?(symbol, include_private = false) #:nodoc:
|
||||
@type.respond_to?(symbol, include_private) || super
|
||||
end
|
||||
|
||||
def method_missing(symbol, *args, &block) #:nodoc:
|
||||
@type.__send__(symbol, *args, &block)
|
||||
end
|
||||
|
||||
def abs_offset
|
||||
@abs_offset || eval_parameter(:read_abs_offset)
|
||||
end
|
||||
|
||||
# Sets the +abs_offset+ to use when writing this object.
|
||||
def abs_offset=(offset)
|
||||
@abs_offset = offset
|
||||
end
|
||||
|
||||
def rel_offset
|
||||
abs_offset
|
||||
end
|
||||
|
||||
def do_read(io) #:nodoc:
|
||||
@read_io = io
|
||||
end
|
||||
|
||||
def do_write(io) #:nodoc:
|
||||
@write_io = io
|
||||
end
|
||||
|
||||
def do_num_bytes #:nodoc:
|
||||
0
|
||||
end
|
||||
|
||||
# DelayedIO objects aren't read when #read is called.
|
||||
# The reading is delayed until this method is called.
|
||||
def read_now!
|
||||
raise IOError, "read from where?" unless @read_io
|
||||
|
||||
@read_io.seekbytes(abs_offset - @read_io.offset)
|
||||
start_read do
|
||||
@type.do_read(@read_io)
|
||||
end
|
||||
end
|
||||
|
||||
# DelayedIO objects aren't written when #write is called.
|
||||
# The writing is delayed until this method is called.
|
||||
def write_now!
|
||||
raise IOError, "write to where?" unless @write_io
|
||||
@write_io.seekbytes(abs_offset - @write_io.offset)
|
||||
@type.do_write(@write_io)
|
||||
end
|
||||
end
|
||||
|
||||
class DelayedIoArgProcessor < BaseArgProcessor
|
||||
include MultiFieldArgSeparator
|
||||
|
||||
def sanitize_parameters!(obj_class, params)
|
||||
params.merge!(obj_class.dsl_params)
|
||||
params.must_be_integer(:read_abs_offset)
|
||||
params.sanitize_object_prototype(:type)
|
||||
end
|
||||
end
|
||||
|
||||
# Add +auto_call_delayed_io+ keyword to BinData::Base.
|
||||
class Base
|
||||
class << self
|
||||
# The +auto_call_delayed_io+ keyword sets a data object tree to perform
|
||||
# multi pass I/O automatically.
|
||||
def auto_call_delayed_io
|
||||
return if DelayedIO.method_defined? :initialize_instance_without_record_io
|
||||
|
||||
include AutoCallDelayedIO
|
||||
DelayedIO.send(:alias_method, :initialize_instance_without_record_io, :initialize_instance)
|
||||
DelayedIO.send(:define_method, :initialize_instance) do
|
||||
if @parent && !defined? @delayed_io_recorded
|
||||
@delayed_io_recorded = true
|
||||
list = top_level_get(:delayed_ios)
|
||||
list << self if list
|
||||
end
|
||||
|
||||
initialize_instance_without_record_io
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
module AutoCallDelayedIO
|
||||
def initialize_shared_instance
|
||||
top_level_set(:delayed_ios, [])
|
||||
super
|
||||
end
|
||||
|
||||
def read(io)
|
||||
super(io) { top_level_get(:delayed_ios).each(&:read_now!) }
|
||||
end
|
||||
|
||||
def write(io, *_)
|
||||
super(io) { top_level_get(:delayed_ios).each(&:write_now!) }
|
||||
end
|
||||
|
||||
def num_bytes
|
||||
to_binary_s.size
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
484
Library/Homebrew/vendor/bundle/ruby/2.6.0/gems/bindata-2.4.8/lib/bindata/dsl.rb
vendored
Normal file
484
Library/Homebrew/vendor/bundle/ruby/2.6.0/gems/bindata-2.4.8/lib/bindata/dsl.rb
vendored
Normal file
@ -0,0 +1,484 @@
|
||||
module BinData
|
||||
# Extracts args for Records and Buffers.
|
||||
#
|
||||
# Foo.new(bar: "baz) is ambiguous as to whether :bar is a value or parameter.
|
||||
#
|
||||
# BaseArgExtractor always assumes :bar is parameter. This extractor correctly
|
||||
# identifies it as value or parameter.
|
||||
module MultiFieldArgSeparator
|
||||
def separate_args(obj_class, obj_args)
|
||||
value, parameters, parent = super(obj_class, obj_args)
|
||||
|
||||
if parameters_is_value?(obj_class, value, parameters)
|
||||
value = parameters
|
||||
parameters = {}
|
||||
end
|
||||
|
||||
[value, parameters, parent]
|
||||
end
|
||||
|
||||
def parameters_is_value?(obj_class, value, parameters)
|
||||
if value.nil? && !parameters.empty?
|
||||
field_names_in_parameters?(obj_class, parameters)
|
||||
else
|
||||
false
|
||||
end
|
||||
end
|
||||
|
||||
def field_names_in_parameters?(obj_class, parameters)
|
||||
field_names = obj_class.fields.field_names
|
||||
param_keys = parameters.keys
|
||||
|
||||
!(field_names & param_keys).empty?
|
||||
end
|
||||
end
|
||||
|
||||
# BinData classes that are part of the DSL must be extended by this.
|
||||
module DSLMixin
|
||||
def dsl_parser(parser_type = nil)
|
||||
@dsl_parser ||= begin
|
||||
parser_type ||= superclass.dsl_parser.parser_type
|
||||
DSLParser.new(self, parser_type)
|
||||
end
|
||||
end
|
||||
|
||||
def method_missing(symbol, *args, &block) #:nodoc:
|
||||
dsl_parser.__send__(symbol, *args, &block)
|
||||
end
|
||||
|
||||
# Assert object is not an array or string.
|
||||
def to_ary; nil; end
|
||||
def to_str; nil; end
|
||||
|
||||
# A DSLParser parses and accumulates field definitions of the form
|
||||
#
|
||||
# type name, params
|
||||
#
|
||||
# where:
|
||||
# * +type+ is the under_scored name of a registered type
|
||||
# * +name+ is the (possible optional) name of the field
|
||||
# * +params+ is a hash containing any parameters
|
||||
#
|
||||
class DSLParser
|
||||
def initialize(the_class, parser_type)
|
||||
raise "unknown parser type #{parser_type}" unless parser_abilities[parser_type]
|
||||
|
||||
@the_class = the_class
|
||||
@parser_type = parser_type
|
||||
@validator = DSLFieldValidator.new(the_class, self)
|
||||
@endian = nil
|
||||
end
|
||||
|
||||
attr_reader :parser_type
|
||||
|
||||
def endian(endian = nil)
|
||||
if endian
|
||||
set_endian(endian)
|
||||
elsif @endian.nil?
|
||||
set_endian(parent_attribute(:endian))
|
||||
end
|
||||
@endian
|
||||
end
|
||||
|
||||
def search_prefix(*args)
|
||||
@search_prefix ||= parent_attribute(:search_prefix, []).dup
|
||||
|
||||
prefix = args.collect(&:to_sym).compact
|
||||
unless prefix.empty?
|
||||
if fields?
|
||||
dsl_raise SyntaxError, "search_prefix must be called before defining fields"
|
||||
end
|
||||
|
||||
@search_prefix = prefix.concat(@search_prefix)
|
||||
end
|
||||
|
||||
@search_prefix
|
||||
end
|
||||
|
||||
def hide(*args)
|
||||
if option?(:hidden_fields)
|
||||
@hide ||= parent_attribute(:hide, []).dup
|
||||
|
||||
hidden = args.collect(&:to_sym).compact
|
||||
@hide.concat(hidden)
|
||||
|
||||
@hide
|
||||
end
|
||||
end
|
||||
|
||||
def fields
|
||||
@fields ||= SanitizedFields.new(hints, parent_fields)
|
||||
end
|
||||
|
||||
def dsl_params
|
||||
abilities = parser_abilities[@parser_type]
|
||||
send(abilities.at(0), abilities.at(1))
|
||||
end
|
||||
|
||||
def method_missing(*args, &block)
|
||||
ensure_hints
|
||||
parse_and_append_field(*args, &block)
|
||||
end
|
||||
|
||||
#-------------
|
||||
private
|
||||
|
||||
def parser_abilities
|
||||
@abilities ||= {
|
||||
struct: [:to_struct_params, :struct, [:multiple_fields, :optional_fieldnames, :hidden_fields]],
|
||||
array: [:to_object_params, :type, [:multiple_fields, :optional_fieldnames]],
|
||||
buffer: [:to_object_params, :type, [:multiple_fields, :optional_fieldnames, :hidden_fields]],
|
||||
choice: [:to_choice_params, :choices, [:multiple_fields, :all_or_none_fieldnames, :fieldnames_are_values]],
|
||||
delayed_io: [:to_object_params, :type, [:multiple_fields, :optional_fieldnames, :hidden_fields]],
|
||||
primitive: [:to_struct_params, :struct, [:multiple_fields, :optional_fieldnames]],
|
||||
skip: [:to_object_params, :until_valid, [:multiple_fields, :optional_fieldnames]],
|
||||
}
|
||||
end
|
||||
|
||||
def option?(opt)
|
||||
parser_abilities[@parser_type].at(2).include?(opt)
|
||||
end
|
||||
|
||||
def ensure_hints
|
||||
endian
|
||||
search_prefix
|
||||
end
|
||||
|
||||
def hints
|
||||
{ endian: endian, search_prefix: search_prefix }
|
||||
end
|
||||
|
||||
def set_endian(endian)
|
||||
if endian
|
||||
if fields?
|
||||
dsl_raise SyntaxError, "endian must be called before defining fields"
|
||||
end
|
||||
if !valid_endian?(endian)
|
||||
dsl_raise ArgumentError, "unknown value for endian '#{endian}'"
|
||||
end
|
||||
|
||||
if endian == :big_and_little
|
||||
DSLBigAndLittleEndianHandler.handle(@the_class)
|
||||
end
|
||||
|
||||
@endian = endian
|
||||
end
|
||||
end
|
||||
|
||||
def valid_endian?(endian)
|
||||
[:big, :little, :big_and_little].include?(endian)
|
||||
end
|
||||
|
||||
def parent_fields
|
||||
parent_attribute(:fields)
|
||||
end
|
||||
|
||||
def fields?
|
||||
defined?(@fields) && !@fields.empty?
|
||||
end
|
||||
|
||||
def parse_and_append_field(*args, &block)
|
||||
parser = DSLFieldParser.new(hints, *args, &block)
|
||||
begin
|
||||
@validator.validate_field(parser.name)
|
||||
append_field(parser.type, parser.name, parser.params)
|
||||
rescue Exception => err
|
||||
dsl_raise err.class, err.message
|
||||
end
|
||||
end
|
||||
|
||||
def append_field(type, name, params)
|
||||
fields.add_field(type, name, params)
|
||||
rescue BinData::UnRegisteredTypeError => err
|
||||
raise TypeError, "unknown type '#{err.message}'"
|
||||
end
|
||||
|
||||
def parent_attribute(attr, default = nil)
|
||||
parent = @the_class.superclass
|
||||
parser = parent.respond_to?(:dsl_parser) ? parent.dsl_parser : nil
|
||||
if parser && parser.respond_to?(attr)
|
||||
parser.send(attr)
|
||||
else
|
||||
default
|
||||
end
|
||||
end
|
||||
|
||||
def dsl_raise(exception, msg)
|
||||
backtrace = caller
|
||||
backtrace.shift while %r{bindata/dsl.rb} =~ backtrace.first
|
||||
|
||||
raise exception, "#{msg} in #{@the_class}", backtrace
|
||||
end
|
||||
|
||||
def to_object_params(key)
|
||||
case fields.length
|
||||
when 0
|
||||
{}
|
||||
when 1
|
||||
{key => fields[0].prototype}
|
||||
else
|
||||
{key=> [:struct, to_struct_params]}
|
||||
end
|
||||
end
|
||||
|
||||
def to_choice_params(key)
|
||||
if fields.empty?
|
||||
{}
|
||||
elsif fields.all_field_names_blank?
|
||||
{key => fields.collect(&:prototype)}
|
||||
else
|
||||
choices = {}
|
||||
fields.each { |f| choices[f.name] = f.prototype }
|
||||
{key => choices}
|
||||
end
|
||||
end
|
||||
|
||||
def to_struct_params(*unused)
|
||||
result = {fields: fields}
|
||||
if !endian.nil?
|
||||
result[:endian] = endian
|
||||
end
|
||||
if !search_prefix.empty?
|
||||
result[:search_prefix] = search_prefix
|
||||
end
|
||||
if option?(:hidden_fields) && !hide.empty?
|
||||
result[:hide] = hide
|
||||
end
|
||||
|
||||
result
|
||||
end
|
||||
end
|
||||
|
||||
# Handles the :big_and_little endian option.
|
||||
# This option creates two subclasses, each handling
|
||||
# :big or :little endian.
|
||||
class DSLBigAndLittleEndianHandler
|
||||
class << self
|
||||
def handle(bnl_class)
|
||||
make_class_abstract(bnl_class)
|
||||
create_subclasses_with_endian(bnl_class)
|
||||
override_new_in_class(bnl_class)
|
||||
delegate_field_creation(bnl_class)
|
||||
fixup_subclass_hierarchy(bnl_class)
|
||||
end
|
||||
|
||||
def make_class_abstract(bnl_class)
|
||||
bnl_class.send(:unregister_self)
|
||||
end
|
||||
|
||||
def create_subclasses_with_endian(bnl_class)
|
||||
instance_eval "class ::#{bnl_class}Be < ::#{bnl_class}; endian :big; end"
|
||||
instance_eval "class ::#{bnl_class}Le < ::#{bnl_class}; endian :little; end"
|
||||
end
|
||||
|
||||
def override_new_in_class(bnl_class)
|
||||
endian_classes = {
|
||||
big: class_with_endian(bnl_class, :big),
|
||||
little: class_with_endian(bnl_class, :little),
|
||||
}
|
||||
bnl_class.define_singleton_method(:new) do |*args|
|
||||
if self == bnl_class
|
||||
_, options, _ = arg_processor.separate_args(self, args)
|
||||
delegate = endian_classes[options[:endian]]
|
||||
return delegate.new(*args) if delegate
|
||||
end
|
||||
|
||||
super(*args)
|
||||
end
|
||||
end
|
||||
|
||||
def delegate_field_creation(bnl_class)
|
||||
endian_classes = {
|
||||
big: class_with_endian(bnl_class, :big),
|
||||
little: class_with_endian(bnl_class, :little),
|
||||
}
|
||||
|
||||
parser = bnl_class.dsl_parser
|
||||
parser.define_singleton_method(:parse_and_append_field) do |*args, &block|
|
||||
endian_classes[:big].send(*args, &block)
|
||||
endian_classes[:little].send(*args, &block)
|
||||
end
|
||||
end
|
||||
|
||||
def fixup_subclass_hierarchy(bnl_class)
|
||||
parent = bnl_class.superclass
|
||||
if obj_attribute(parent, :endian) == :big_and_little
|
||||
be_subclass = class_with_endian(bnl_class, :big)
|
||||
be_parent = class_with_endian(parent, :big)
|
||||
be_fields = obj_attribute(be_parent, :fields)
|
||||
|
||||
le_subclass = class_with_endian(bnl_class, :little)
|
||||
le_parent = class_with_endian(parent, :little)
|
||||
le_fields = obj_attribute(le_parent, :fields)
|
||||
|
||||
be_subclass.dsl_parser.define_singleton_method(:parent_fields) do
|
||||
be_fields
|
||||
end
|
||||
le_subclass.dsl_parser.define_singleton_method(:parent_fields) do
|
||||
le_fields
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def class_with_endian(class_name, endian)
|
||||
hints = {
|
||||
endian: endian,
|
||||
search_prefix: class_name.dsl_parser.search_prefix,
|
||||
}
|
||||
RegisteredClasses.lookup(class_name, hints)
|
||||
end
|
||||
|
||||
def obj_attribute(obj, attr)
|
||||
obj.dsl_parser.send(attr)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# Extracts the details from a field declaration.
|
||||
class DSLFieldParser
|
||||
def initialize(hints, symbol, *args, &block)
|
||||
@hints = hints
|
||||
@type = symbol
|
||||
@name = name_from_field_declaration(args)
|
||||
@params = params_from_field_declaration(args, &block)
|
||||
end
|
||||
|
||||
attr_reader :type, :name, :params
|
||||
|
||||
def name_from_field_declaration(args)
|
||||
name, _ = args
|
||||
if name == "" || name.is_a?(Hash)
|
||||
nil
|
||||
else
|
||||
name
|
||||
end
|
||||
end
|
||||
|
||||
def params_from_field_declaration(args, &block)
|
||||
params = params_from_args(args)
|
||||
|
||||
if block_given?
|
||||
params.merge(params_from_block(&block))
|
||||
else
|
||||
params
|
||||
end
|
||||
end
|
||||
|
||||
def params_from_args(args)
|
||||
name, params = args
|
||||
params = name if name.is_a?(Hash)
|
||||
|
||||
params || {}
|
||||
end
|
||||
|
||||
def params_from_block(&block)
|
||||
bindata_classes = {
|
||||
array: BinData::Array,
|
||||
buffer: BinData::Buffer,
|
||||
choice: BinData::Choice,
|
||||
delayed_io: BinData::DelayedIO,
|
||||
skip: BinData::Skip,
|
||||
struct: BinData::Struct,
|
||||
}
|
||||
|
||||
if bindata_classes.include?(@type)
|
||||
parser = DSLParser.new(bindata_classes[@type], @type)
|
||||
parser.endian(@hints[:endian])
|
||||
parser.search_prefix(*@hints[:search_prefix])
|
||||
parser.instance_eval(&block)
|
||||
|
||||
parser.dsl_params
|
||||
else
|
||||
{}
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# Validates a field defined in a DSLMixin.
|
||||
class DSLFieldValidator
|
||||
def initialize(the_class, parser)
|
||||
@the_class = the_class
|
||||
@dsl_parser = parser
|
||||
end
|
||||
|
||||
def validate_field(name)
|
||||
if must_not_have_a_name_failed?(name)
|
||||
raise SyntaxError, "field must not have a name"
|
||||
end
|
||||
|
||||
if all_or_none_names_failed?(name)
|
||||
raise SyntaxError, "fields must either all have names, or none must have names"
|
||||
end
|
||||
|
||||
if must_have_a_name_failed?(name)
|
||||
raise SyntaxError, "field must have a name"
|
||||
end
|
||||
|
||||
ensure_valid_name(name)
|
||||
end
|
||||
|
||||
def ensure_valid_name(name)
|
||||
if name && !option?(:fieldnames_are_values)
|
||||
if malformed_name?(name)
|
||||
raise NameError.new("", name), "field '#{name}' is an illegal fieldname"
|
||||
end
|
||||
|
||||
if duplicate_name?(name)
|
||||
raise SyntaxError, "duplicate field '#{name}'"
|
||||
end
|
||||
|
||||
if name_shadows_method?(name)
|
||||
raise NameError.new("", name), "field '#{name}' shadows an existing method"
|
||||
end
|
||||
|
||||
if name_is_reserved?(name)
|
||||
raise NameError.new("", name), "field '#{name}' is a reserved name"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def must_not_have_a_name_failed?(name)
|
||||
option?(:no_fieldnames) && !name.nil?
|
||||
end
|
||||
|
||||
def must_have_a_name_failed?(name)
|
||||
option?(:mandatory_fieldnames) && name.nil?
|
||||
end
|
||||
|
||||
def all_or_none_names_failed?(name)
|
||||
if option?(:all_or_none_fieldnames) && !fields.empty?
|
||||
all_names_blank = fields.all_field_names_blank?
|
||||
no_names_blank = fields.no_field_names_blank?
|
||||
|
||||
(!name.nil? && all_names_blank) || (name.nil? && no_names_blank)
|
||||
else
|
||||
false
|
||||
end
|
||||
end
|
||||
|
||||
def malformed_name?(name)
|
||||
/^[a-z_]\w*$/ !~ name.to_s
|
||||
end
|
||||
|
||||
def duplicate_name?(name)
|
||||
fields.field_name?(name)
|
||||
end
|
||||
|
||||
def name_shadows_method?(name)
|
||||
@the_class.method_defined?(name)
|
||||
end
|
||||
|
||||
def name_is_reserved?(name)
|
||||
BinData::Struct::RESERVED.include?(name.to_sym)
|
||||
end
|
||||
|
||||
def fields
|
||||
@dsl_parser.fields
|
||||
end
|
||||
|
||||
def option?(opt)
|
||||
@dsl_parser.send(:option?, opt)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
83
Library/Homebrew/vendor/bundle/ruby/2.6.0/gems/bindata-2.4.8/lib/bindata/float.rb
vendored
Normal file
83
Library/Homebrew/vendor/bundle/ruby/2.6.0/gems/bindata-2.4.8/lib/bindata/float.rb
vendored
Normal file
@ -0,0 +1,83 @@
|
||||
require 'bindata/base_primitive'
|
||||
|
||||
module BinData
|
||||
# Defines a number of classes that contain a floating point number.
|
||||
# The float is defined by precision and endian.
|
||||
|
||||
module FloatingPoint #:nodoc: all
|
||||
class << self
|
||||
PRECISION = {
|
||||
single: 4,
|
||||
double: 8,
|
||||
}
|
||||
|
||||
PACK_CODE = {
|
||||
[:single, :little] => 'e',
|
||||
[:single, :big] => 'g',
|
||||
[:double, :little] => 'E',
|
||||
[:double, :big] => 'G',
|
||||
}
|
||||
|
||||
def define_methods(float_class, precision, endian)
|
||||
float_class.module_eval <<-END
|
||||
def do_num_bytes
|
||||
#{create_num_bytes_code(precision)}
|
||||
end
|
||||
|
||||
#---------------
|
||||
private
|
||||
|
||||
def sensible_default
|
||||
0.0
|
||||
end
|
||||
|
||||
def value_to_binary_string(val)
|
||||
#{create_to_binary_s_code(precision, endian)}
|
||||
end
|
||||
|
||||
def read_and_return_value(io)
|
||||
#{create_read_code(precision, endian)}
|
||||
end
|
||||
END
|
||||
end
|
||||
|
||||
def create_num_bytes_code(precision)
|
||||
PRECISION[precision]
|
||||
end
|
||||
|
||||
def create_read_code(precision, endian)
|
||||
nbytes = PRECISION[precision]
|
||||
unpack = PACK_CODE[[precision, endian]]
|
||||
|
||||
"io.readbytes(#{nbytes}).unpack('#{unpack}').at(0)"
|
||||
end
|
||||
|
||||
def create_to_binary_s_code(precision, endian)
|
||||
pack = PACK_CODE[[precision, endian]]
|
||||
|
||||
"[val].pack('#{pack}')"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
# Single precision floating point number in little endian format
|
||||
class FloatLe < BinData::BasePrimitive
|
||||
FloatingPoint.define_methods(self, :single, :little)
|
||||
end
|
||||
|
||||
# Single precision floating point number in big endian format
|
||||
class FloatBe < BinData::BasePrimitive
|
||||
FloatingPoint.define_methods(self, :single, :big)
|
||||
end
|
||||
|
||||
# Double precision floating point number in little endian format
|
||||
class DoubleLe < BinData::BasePrimitive
|
||||
FloatingPoint.define_methods(self, :double, :little)
|
||||
end
|
||||
|
||||
# Double precision floating point number in big endian format
|
||||
class DoubleBe < BinData::BasePrimitive
|
||||
FloatingPoint.define_methods(self, :double, :big)
|
||||
end
|
||||
end
|
||||
75
Library/Homebrew/vendor/bundle/ruby/2.6.0/gems/bindata-2.4.8/lib/bindata/framework.rb
vendored
Normal file
75
Library/Homebrew/vendor/bundle/ruby/2.6.0/gems/bindata-2.4.8/lib/bindata/framework.rb
vendored
Normal file
@ -0,0 +1,75 @@
|
||||
module BinData
|
||||
# Error raised when unexpected results occur when reading data from IO.
|
||||
class ValidityError < StandardError ; end
|
||||
|
||||
# All methods provided by the framework are to be implemented or overridden
|
||||
# by subclasses of BinData::Base.
|
||||
module Framework
|
||||
# Initializes the state of the object. All instance variables that
|
||||
# are used by the object must be initialized here.
|
||||
def initialize_instance
|
||||
end
|
||||
|
||||
# Initialises state that is shared by objects with the same parameters.
|
||||
#
|
||||
# This should only be used when optimising for performance. Instance
|
||||
# variables set here, and changes to the singleton class will be shared
|
||||
# between all objects that are initialized with the same parameters.
|
||||
# This method is called only once for a particular set of parameters.
|
||||
def initialize_shared_instance
|
||||
end
|
||||
|
||||
# Returns true if the object has not been changed since creation.
|
||||
def clear?
|
||||
raise NotImplementedError
|
||||
end
|
||||
|
||||
# Assigns the value of +val+ to this data object. Note that +val+ must
|
||||
# always be deep copied to ensure no aliasing problems can occur.
|
||||
def assign(val)
|
||||
raise NotImplementedError
|
||||
end
|
||||
|
||||
# Returns a snapshot of this data object.
|
||||
def snapshot
|
||||
raise NotImplementedError
|
||||
end
|
||||
|
||||
# Returns the debug name of +child+. This only needs to be implemented
|
||||
# by objects that contain child objects.
|
||||
def debug_name_of(child) #:nodoc:
|
||||
debug_name
|
||||
end
|
||||
|
||||
# Returns the offset of +child+. This only needs to be implemented
|
||||
# by objects that contain child objects.
|
||||
def offset_of(child) #:nodoc:
|
||||
0
|
||||
end
|
||||
|
||||
# Is this object aligned on non-byte boundaries?
|
||||
def bit_aligned?
|
||||
false
|
||||
end
|
||||
|
||||
# Reads the data for this data object from +io+.
|
||||
def do_read(io) #:nodoc:
|
||||
raise NotImplementedError
|
||||
end
|
||||
|
||||
# Writes the value for this data to +io+.
|
||||
def do_write(io) #:nodoc:
|
||||
raise NotImplementedError
|
||||
end
|
||||
|
||||
# Returns the number of bytes it will take to write this data.
|
||||
def do_num_bytes #:nodoc:
|
||||
raise NotImplementedError
|
||||
end
|
||||
|
||||
# Set visibility requirements of methods to implement
|
||||
public :clear?, :assign, :snapshot, :debug_name_of, :offset_of
|
||||
protected :initialize_instance, :initialize_shared_instance
|
||||
protected :do_read, :do_write, :do_num_bytes
|
||||
end
|
||||
end
|
||||
212
Library/Homebrew/vendor/bundle/ruby/2.6.0/gems/bindata-2.4.8/lib/bindata/int.rb
vendored
Normal file
212
Library/Homebrew/vendor/bundle/ruby/2.6.0/gems/bindata-2.4.8/lib/bindata/int.rb
vendored
Normal file
@ -0,0 +1,212 @@
|
||||
require 'thread'
|
||||
require 'bindata/base_primitive'
|
||||
|
||||
module BinData
|
||||
# Defines a number of classes that contain an integer. The integer
|
||||
# is defined by endian, signedness and number of bytes.
|
||||
|
||||
module Int #:nodoc: all
|
||||
@@mutex = Mutex.new
|
||||
|
||||
class << self
|
||||
def define_class(name, nbits, endian, signed)
|
||||
@@mutex.synchronize do
|
||||
unless BinData.const_defined?(name)
|
||||
new_class = Class.new(BinData::BasePrimitive)
|
||||
Int.define_methods(new_class, nbits, endian.to_sym, signed.to_sym)
|
||||
RegisteredClasses.register(name, new_class)
|
||||
|
||||
BinData.const_set(name, new_class)
|
||||
end
|
||||
end
|
||||
|
||||
BinData.const_get(name)
|
||||
end
|
||||
|
||||
def define_methods(int_class, nbits, endian, signed)
|
||||
raise "nbits must be divisible by 8" unless (nbits % 8).zero?
|
||||
|
||||
int_class.module_eval <<-END
|
||||
def assign(val)
|
||||
#{create_clamp_code(nbits, signed)}
|
||||
super(val)
|
||||
end
|
||||
|
||||
def do_num_bytes
|
||||
#{nbits / 8}
|
||||
end
|
||||
|
||||
#---------------
|
||||
private
|
||||
|
||||
def sensible_default
|
||||
0
|
||||
end
|
||||
|
||||
def value_to_binary_string(val)
|
||||
#{create_clamp_code(nbits, signed)}
|
||||
#{create_to_binary_s_code(nbits, endian, signed)}
|
||||
end
|
||||
|
||||
def read_and_return_value(io)
|
||||
#{create_read_code(nbits, endian, signed)}
|
||||
end
|
||||
END
|
||||
end
|
||||
|
||||
#-------------
|
||||
private
|
||||
|
||||
def create_clamp_code(nbits, signed)
|
||||
if signed == :signed
|
||||
max = (1 << (nbits - 1)) - 1
|
||||
min = -(max + 1)
|
||||
else
|
||||
max = (1 << nbits) - 1
|
||||
min = 0
|
||||
end
|
||||
|
||||
"val = (val < #{min}) ? #{min} : (val > #{max}) ? #{max} : val"
|
||||
end
|
||||
|
||||
def create_read_code(nbits, endian, signed)
|
||||
read_str = create_raw_read_code(nbits, endian, signed)
|
||||
|
||||
if need_signed_conversion_code?(nbits, signed)
|
||||
"val = #{read_str} ; #{create_uint2int_code(nbits)}"
|
||||
else
|
||||
read_str
|
||||
end
|
||||
end
|
||||
|
||||
def create_raw_read_code(nbits, endian, signed)
|
||||
# special case 8bit integers for speed
|
||||
if nbits == 8
|
||||
"io.readbytes(1).ord"
|
||||
else
|
||||
unpack_str = create_read_unpack_code(nbits, endian, signed)
|
||||
assemble_str = create_read_assemble_code(nbits, endian, signed)
|
||||
|
||||
"(#{unpack_str} ; #{assemble_str})"
|
||||
end
|
||||
end
|
||||
|
||||
def create_read_unpack_code(nbits, endian, signed)
|
||||
nbytes = nbits / 8
|
||||
pack_directive = pack_directive(nbits, endian, signed)
|
||||
|
||||
"ints = io.readbytes(#{nbytes}).unpack('#{pack_directive}')"
|
||||
end
|
||||
|
||||
def create_read_assemble_code(nbits, endian, signed)
|
||||
nwords = nbits / bits_per_word(nbits)
|
||||
|
||||
idx = (0...nwords).to_a
|
||||
idx.reverse! if endian == :big
|
||||
|
||||
parts = (0...nwords).collect do |i|
|
||||
"(ints.at(#{idx[i]}) << #{bits_per_word(nbits) * i})"
|
||||
end
|
||||
parts[0].sub!(/ << 0\b/, "") # Remove " << 0" for optimisation
|
||||
|
||||
parts.join(" + ")
|
||||
end
|
||||
|
||||
def create_to_binary_s_code(nbits, endian, signed)
|
||||
# special case 8bit integers for speed
|
||||
return "(val & 0xff).chr" if nbits == 8
|
||||
|
||||
pack_directive = pack_directive(nbits, endian, signed)
|
||||
words = val_as_packed_words(nbits, endian, signed)
|
||||
pack_str = "[#{words}].pack('#{pack_directive}')"
|
||||
|
||||
if need_signed_conversion_code?(nbits, signed)
|
||||
"#{create_int2uint_code(nbits)} ; #{pack_str}"
|
||||
else
|
||||
pack_str
|
||||
end
|
||||
end
|
||||
|
||||
def val_as_packed_words(nbits, endian, signed)
|
||||
nwords = nbits / bits_per_word(nbits)
|
||||
mask = (1 << bits_per_word(nbits)) - 1
|
||||
|
||||
vals = (0...nwords).collect { |i| "val >> #{bits_per_word(nbits) * i}" }
|
||||
vals[0].sub!(/ >> 0\b/, "") # Remove " >> 0" for optimisation
|
||||
vals.reverse! if (endian == :big)
|
||||
|
||||
vals = vals.collect { |val| "#{val} & #{mask}" } # TODO: "& mask" is needed to work around jruby bug. Remove this line when fixed.
|
||||
vals.join(",")
|
||||
end
|
||||
|
||||
def create_int2uint_code(nbits)
|
||||
"val &= #{(1 << nbits) - 1}"
|
||||
end
|
||||
|
||||
def create_uint2int_code(nbits)
|
||||
"(val >= #{1 << (nbits - 1)}) ? val - #{1 << nbits} : val"
|
||||
end
|
||||
|
||||
def bits_per_word(nbits)
|
||||
(nbits % 64).zero? ? 64 :
|
||||
(nbits % 32).zero? ? 32 :
|
||||
(nbits % 16).zero? ? 16 :
|
||||
8
|
||||
end
|
||||
|
||||
def pack_directive(nbits, endian, signed)
|
||||
nwords = nbits / bits_per_word(nbits)
|
||||
|
||||
directives = { 8 => "C", 16 => "S", 32 => "L", 64 => "Q" }
|
||||
|
||||
d = directives[bits_per_word(nbits)]
|
||||
d << ((endian == :big) ? ">" : "<") unless d == "C"
|
||||
|
||||
if signed == :signed && directives.key?(nbits)
|
||||
(d * nwords).downcase
|
||||
else
|
||||
d * nwords
|
||||
end
|
||||
end
|
||||
|
||||
def need_signed_conversion_code?(nbits, signed)
|
||||
signed == :signed && ![64, 32, 16].include?(nbits)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
# Unsigned 1 byte integer.
|
||||
class Uint8 < BinData::BasePrimitive
|
||||
Int.define_methods(self, 8, :little, :unsigned)
|
||||
end
|
||||
|
||||
# Signed 1 byte integer.
|
||||
class Int8 < BinData::BasePrimitive
|
||||
Int.define_methods(self, 8, :little, :signed)
|
||||
end
|
||||
|
||||
# Create classes on demand
|
||||
module IntFactory
|
||||
def const_missing(name)
|
||||
mappings = {
|
||||
/^Uint(\d+)be$/ => [:big, :unsigned],
|
||||
/^Uint(\d+)le$/ => [:little, :unsigned],
|
||||
/^Int(\d+)be$/ => [:big, :signed],
|
||||
/^Int(\d+)le$/ => [:little, :signed],
|
||||
}
|
||||
|
||||
mappings.each_pair do |regex, args|
|
||||
if regex =~ name.to_s
|
||||
nbits = $1.to_i
|
||||
if nbits > 0 && (nbits % 8).zero?
|
||||
return Int.define_class(name, nbits, *args)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
super
|
||||
end
|
||||
end
|
||||
BinData.extend IntFactory
|
||||
end
|
||||
496
Library/Homebrew/vendor/bundle/ruby/2.6.0/gems/bindata-2.4.8/lib/bindata/io.rb
vendored
Normal file
496
Library/Homebrew/vendor/bundle/ruby/2.6.0/gems/bindata-2.4.8/lib/bindata/io.rb
vendored
Normal file
@ -0,0 +1,496 @@
|
||||
require 'stringio'
|
||||
|
||||
module BinData
|
||||
# A wrapper around an IO object. The wrapper provides a consistent
|
||||
# interface for BinData objects to use when accessing the IO.
|
||||
module IO
|
||||
|
||||
# Common operations for both Read and Write.
|
||||
module Common
|
||||
def initialize(io)
|
||||
if self.class === io
|
||||
raise ArgumentError, "io must not be a #{self.class}"
|
||||
end
|
||||
|
||||
# wrap strings in a StringIO
|
||||
if io.respond_to?(:to_str)
|
||||
io = BinData::IO.create_string_io(io.to_str)
|
||||
end
|
||||
|
||||
@raw_io = io
|
||||
@buffer_end_points = nil
|
||||
|
||||
extend seekable? ? SeekableStream : UnSeekableStream
|
||||
stream_init
|
||||
end
|
||||
|
||||
#-------------
|
||||
private
|
||||
|
||||
def seekable?
|
||||
@raw_io.pos
|
||||
rescue NoMethodError, Errno::ESPIPE, Errno::EPIPE, Errno::EINVAL
|
||||
nil
|
||||
end
|
||||
|
||||
def seek(n)
|
||||
seek_raw(buffer_limited_n(n))
|
||||
end
|
||||
|
||||
def buffer_limited_n(n)
|
||||
if @buffer_end_points
|
||||
if n.nil? || n > 0
|
||||
max = @buffer_end_points[1] - offset
|
||||
n = max if n.nil? || n > max
|
||||
else
|
||||
min = @buffer_end_points[0] - offset
|
||||
n = min if n < min
|
||||
end
|
||||
end
|
||||
|
||||
n
|
||||
end
|
||||
|
||||
def with_buffer_common(n)
|
||||
prev = @buffer_end_points
|
||||
if prev
|
||||
avail = prev[1] - offset
|
||||
n = avail if n > avail
|
||||
end
|
||||
@buffer_end_points = [offset, offset + n]
|
||||
begin
|
||||
yield(*@buffer_end_points)
|
||||
ensure
|
||||
@buffer_end_points = prev
|
||||
end
|
||||
end
|
||||
|
||||
# Use #seek and #pos on seekable streams
|
||||
module SeekableStream
|
||||
# The number of bytes remaining in the input stream.
|
||||
def num_bytes_remaining
|
||||
start_mark = @raw_io.pos
|
||||
@raw_io.seek(0, ::IO::SEEK_END)
|
||||
end_mark = @raw_io.pos
|
||||
|
||||
if @buffer_end_points
|
||||
if @buffer_end_points[1] < end_mark
|
||||
end_mark = @buffer_end_points[1]
|
||||
end
|
||||
end
|
||||
|
||||
bytes_remaining = end_mark - start_mark
|
||||
@raw_io.seek(start_mark, ::IO::SEEK_SET)
|
||||
|
||||
bytes_remaining
|
||||
end
|
||||
|
||||
# All io calls in +block+ are rolled back after this
|
||||
# method completes.
|
||||
def with_readahead
|
||||
mark = @raw_io.pos
|
||||
begin
|
||||
yield
|
||||
ensure
|
||||
@raw_io.seek(mark, ::IO::SEEK_SET)
|
||||
end
|
||||
end
|
||||
|
||||
#-----------
|
||||
private
|
||||
|
||||
def stream_init
|
||||
@initial_pos = @raw_io.pos
|
||||
end
|
||||
|
||||
def offset_raw
|
||||
@raw_io.pos - @initial_pos
|
||||
end
|
||||
|
||||
def seek_raw(n)
|
||||
@raw_io.seek(n, ::IO::SEEK_CUR)
|
||||
end
|
||||
|
||||
def read_raw(n)
|
||||
@raw_io.read(n)
|
||||
end
|
||||
|
||||
def write_raw(data)
|
||||
@raw_io.write(data)
|
||||
end
|
||||
end
|
||||
|
||||
# Manually keep track of offset for unseekable streams.
|
||||
module UnSeekableStream
|
||||
def offset_raw
|
||||
@offset
|
||||
end
|
||||
|
||||
# The number of bytes remaining in the input stream.
|
||||
def num_bytes_remaining
|
||||
raise IOError, "stream is unseekable"
|
||||
end
|
||||
|
||||
# All io calls in +block+ are rolled back after this
|
||||
# method completes.
|
||||
def with_readahead
|
||||
mark = @offset
|
||||
@read_data = ""
|
||||
@in_readahead = true
|
||||
|
||||
class << self
|
||||
alias_method :read_raw_without_readahead, :read_raw
|
||||
alias_method :read_raw, :read_raw_with_readahead
|
||||
end
|
||||
|
||||
begin
|
||||
yield
|
||||
ensure
|
||||
@offset = mark
|
||||
@in_readahead = false
|
||||
end
|
||||
end
|
||||
|
||||
#-----------
|
||||
private
|
||||
|
||||
def stream_init
|
||||
@offset = 0
|
||||
end
|
||||
|
||||
def read_raw(n)
|
||||
data = @raw_io.read(n)
|
||||
@offset += data.size if data
|
||||
data
|
||||
end
|
||||
|
||||
def read_raw_with_readahead(n)
|
||||
data = ""
|
||||
|
||||
unless @read_data.empty? || @in_readahead
|
||||
bytes_to_consume = [n, @read_data.length].min
|
||||
data << @read_data.slice!(0, bytes_to_consume)
|
||||
n -= bytes_to_consume
|
||||
|
||||
if @read_data.empty?
|
||||
class << self
|
||||
alias_method :read_raw, :read_raw_without_readahead
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
raw_data = @raw_io.read(n)
|
||||
data << raw_data if raw_data
|
||||
|
||||
if @in_readahead
|
||||
@read_data << data
|
||||
end
|
||||
|
||||
@offset += data.size
|
||||
|
||||
data
|
||||
end
|
||||
|
||||
def write_raw(data)
|
||||
@offset += data.size
|
||||
@raw_io.write(data)
|
||||
end
|
||||
|
||||
def seek_raw(n)
|
||||
raise IOError, "stream is unseekable" if n < 0
|
||||
|
||||
# NOTE: how do we seek on a writable stream?
|
||||
|
||||
# skip over data in 8k blocks
|
||||
while n > 0
|
||||
bytes_to_read = [n, 8192].min
|
||||
read_raw(bytes_to_read)
|
||||
n -= bytes_to_read
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# Creates a StringIO around +str+.
|
||||
def self.create_string_io(str = "")
|
||||
s = StringIO.new(str.dup.force_encoding(Encoding::BINARY))
|
||||
s.binmode
|
||||
s
|
||||
end
|
||||
|
||||
# Create a new IO Read wrapper around +io+. +io+ must provide #read,
|
||||
# #pos if reading the current stream position and #seek if setting the
|
||||
# current stream position. If +io+ is a string it will be automatically
|
||||
# wrapped in an StringIO object.
|
||||
#
|
||||
# The IO can handle bitstreams in either big or little endian format.
|
||||
#
|
||||
# M byte1 L M byte2 L
|
||||
# S 76543210 S S fedcba98 S
|
||||
# B B B B
|
||||
#
|
||||
# In big endian format:
|
||||
# readbits(6), readbits(5) #=> [765432, 10fed]
|
||||
#
|
||||
# In little endian format:
|
||||
# readbits(6), readbits(5) #=> [543210, a9876]
|
||||
#
|
||||
class Read
|
||||
include Common
|
||||
|
||||
def initialize(io)
|
||||
super(io)
|
||||
|
||||
# bits when reading
|
||||
@rnbits = 0
|
||||
@rval = 0
|
||||
@rendian = nil
|
||||
end
|
||||
|
||||
# Sets a buffer of +n+ bytes on the io stream. Any reading or seeking
|
||||
# calls inside the +block+ will be contained within this buffer.
|
||||
def with_buffer(n)
|
||||
with_buffer_common(n) do
|
||||
yield
|
||||
read
|
||||
end
|
||||
end
|
||||
|
||||
# Returns the current offset of the io stream. Offset will be rounded
|
||||
# up when reading bitfields.
|
||||
def offset
|
||||
offset_raw
|
||||
end
|
||||
|
||||
# Seek +n+ bytes from the current position in the io stream.
|
||||
def seekbytes(n)
|
||||
reset_read_bits
|
||||
seek(n)
|
||||
end
|
||||
|
||||
# Reads exactly +n+ bytes from +io+.
|
||||
#
|
||||
# If the data read is nil an EOFError is raised.
|
||||
#
|
||||
# If the data read is too short an IOError is raised.
|
||||
def readbytes(n)
|
||||
reset_read_bits
|
||||
read(n)
|
||||
end
|
||||
|
||||
# Reads all remaining bytes from the stream.
|
||||
def read_all_bytes
|
||||
reset_read_bits
|
||||
read
|
||||
end
|
||||
|
||||
# Reads exactly +nbits+ bits from the stream. +endian+ specifies whether
|
||||
# the bits are stored in +:big+ or +:little+ endian format.
|
||||
def readbits(nbits, endian)
|
||||
if @rendian != endian
|
||||
# don't mix bits of differing endian
|
||||
reset_read_bits
|
||||
@rendian = endian
|
||||
end
|
||||
|
||||
if endian == :big
|
||||
read_big_endian_bits(nbits)
|
||||
else
|
||||
read_little_endian_bits(nbits)
|
||||
end
|
||||
end
|
||||
|
||||
# Discards any read bits so the stream becomes aligned at the
|
||||
# next byte boundary.
|
||||
def reset_read_bits
|
||||
@rnbits = 0
|
||||
@rval = 0
|
||||
end
|
||||
|
||||
#---------------
|
||||
private
|
||||
|
||||
def read(n = nil)
|
||||
str = read_raw(buffer_limited_n(n))
|
||||
if n
|
||||
raise EOFError, "End of file reached" if str.nil?
|
||||
raise IOError, "data truncated" if str.size < n
|
||||
end
|
||||
str
|
||||
end
|
||||
|
||||
def read_big_endian_bits(nbits)
|
||||
while @rnbits < nbits
|
||||
accumulate_big_endian_bits
|
||||
end
|
||||
|
||||
val = (@rval >> (@rnbits - nbits)) & mask(nbits)
|
||||
@rnbits -= nbits
|
||||
@rval &= mask(@rnbits)
|
||||
|
||||
val
|
||||
end
|
||||
|
||||
def accumulate_big_endian_bits
|
||||
byte = read(1).unpack('C').at(0) & 0xff
|
||||
@rval = (@rval << 8) | byte
|
||||
@rnbits += 8
|
||||
end
|
||||
|
||||
def read_little_endian_bits(nbits)
|
||||
while @rnbits < nbits
|
||||
accumulate_little_endian_bits
|
||||
end
|
||||
|
||||
val = @rval & mask(nbits)
|
||||
@rnbits -= nbits
|
||||
@rval >>= nbits
|
||||
|
||||
val
|
||||
end
|
||||
|
||||
def accumulate_little_endian_bits
|
||||
byte = read(1).unpack('C').at(0) & 0xff
|
||||
@rval = @rval | (byte << @rnbits)
|
||||
@rnbits += 8
|
||||
end
|
||||
|
||||
def mask(nbits)
|
||||
(1 << nbits) - 1
|
||||
end
|
||||
end
|
||||
|
||||
# Create a new IO Write wrapper around +io+. +io+ must provide #write.
|
||||
# If +io+ is a string it will be automatically wrapped in an StringIO
|
||||
# object.
|
||||
#
|
||||
# The IO can handle bitstreams in either big or little endian format.
|
||||
#
|
||||
# See IO::Read for more information.
|
||||
class Write
|
||||
include Common
|
||||
def initialize(io)
|
||||
super(io)
|
||||
|
||||
@wnbits = 0
|
||||
@wval = 0
|
||||
@wendian = nil
|
||||
end
|
||||
|
||||
# Sets a buffer of +n+ bytes on the io stream. Any writes inside the
|
||||
# +block+ will be contained within this buffer. If less than +n+ bytes
|
||||
# are written inside the block, the remainder will be padded with '\0'
|
||||
# bytes.
|
||||
def with_buffer(n)
|
||||
with_buffer_common(n) do |_buf_start, buf_end|
|
||||
yield
|
||||
write("\0" * (buf_end - offset))
|
||||
end
|
||||
end
|
||||
|
||||
# Returns the current offset of the io stream. Offset will be rounded
|
||||
# up when writing bitfields.
|
||||
def offset
|
||||
offset_raw + (@wnbits > 0 ? 1 : 0)
|
||||
end
|
||||
|
||||
# Seek +n+ bytes from the current position in the io stream.
|
||||
def seekbytes(n)
|
||||
flushbits
|
||||
seek(n)
|
||||
end
|
||||
|
||||
# Writes the given string of bytes to the io stream.
|
||||
def writebytes(str)
|
||||
flushbits
|
||||
write(str)
|
||||
end
|
||||
|
||||
# Writes +nbits+ bits from +val+ to the stream. +endian+ specifies whether
|
||||
# the bits are to be stored in +:big+ or +:little+ endian format.
|
||||
def writebits(val, nbits, endian)
|
||||
if @wendian != endian
|
||||
# don't mix bits of differing endian
|
||||
flushbits
|
||||
@wendian = endian
|
||||
end
|
||||
|
||||
clamped_val = val & mask(nbits)
|
||||
|
||||
if endian == :big
|
||||
write_big_endian_bits(clamped_val, nbits)
|
||||
else
|
||||
write_little_endian_bits(clamped_val, nbits)
|
||||
end
|
||||
end
|
||||
|
||||
# To be called after all +writebits+ have been applied.
|
||||
def flushbits
|
||||
raise "Internal state error nbits = #{@wnbits}" if @wnbits >= 8
|
||||
|
||||
if @wnbits > 0
|
||||
writebits(0, 8 - @wnbits, @wendian)
|
||||
end
|
||||
end
|
||||
alias flush flushbits
|
||||
|
||||
#---------------
|
||||
private
|
||||
|
||||
def write(data)
|
||||
n = buffer_limited_n(data.size)
|
||||
if n < data.size
|
||||
data = data[0, n]
|
||||
end
|
||||
|
||||
write_raw(data)
|
||||
end
|
||||
|
||||
def write_big_endian_bits(val, nbits)
|
||||
while nbits > 0
|
||||
bits_req = 8 - @wnbits
|
||||
if nbits >= bits_req
|
||||
msb_bits = (val >> (nbits - bits_req)) & mask(bits_req)
|
||||
nbits -= bits_req
|
||||
val &= mask(nbits)
|
||||
|
||||
@wval = (@wval << bits_req) | msb_bits
|
||||
write(@wval.chr)
|
||||
|
||||
@wval = 0
|
||||
@wnbits = 0
|
||||
else
|
||||
@wval = (@wval << nbits) | val
|
||||
@wnbits += nbits
|
||||
nbits = 0
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def write_little_endian_bits(val, nbits)
|
||||
while nbits > 0
|
||||
bits_req = 8 - @wnbits
|
||||
if nbits >= bits_req
|
||||
lsb_bits = val & mask(bits_req)
|
||||
nbits -= bits_req
|
||||
val >>= bits_req
|
||||
|
||||
@wval = @wval | (lsb_bits << @wnbits)
|
||||
write(@wval.chr)
|
||||
|
||||
@wval = 0
|
||||
@wnbits = 0
|
||||
else
|
||||
@wval = @wval | (val << @wnbits)
|
||||
@wnbits += nbits
|
||||
nbits = 0
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def mask(nbits)
|
||||
(1 << nbits) - 1
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
109
Library/Homebrew/vendor/bundle/ruby/2.6.0/gems/bindata-2.4.8/lib/bindata/lazy.rb
vendored
Normal file
109
Library/Homebrew/vendor/bundle/ruby/2.6.0/gems/bindata-2.4.8/lib/bindata/lazy.rb
vendored
Normal file
@ -0,0 +1,109 @@
|
||||
module BinData
|
||||
# A LazyEvaluator is bound to a data object. The evaluator will evaluate
|
||||
# lambdas in the context of this data object. These lambdas
|
||||
# are those that are passed to data objects as parameters, e.g.:
|
||||
#
|
||||
# BinData::String.new(value: -> { %w(a test message).join(" ") })
|
||||
#
|
||||
# As a shortcut, :foo is the equivalent of lambda { foo }.
|
||||
#
|
||||
# When evaluating lambdas, unknown methods are resolved in the context of the
|
||||
# parent of the bound data object. Resolution is attempted firstly as keys
|
||||
# in #parameters, and secondly as methods in this parent. This
|
||||
# resolution propagates up the chain of parent data objects.
|
||||
#
|
||||
# An evaluation will recurse until it returns a result that is not
|
||||
# a lambda or a symbol.
|
||||
#
|
||||
# This resolution process makes the lambda easier to read as we just write
|
||||
# <tt>field</tt> instead of <tt>obj.field</tt>.
|
||||
class LazyEvaluator
|
||||
|
||||
# Creates a new evaluator. All lazy evaluation is performed in the
|
||||
# context of +obj+.
|
||||
def initialize(obj)
|
||||
@obj = obj
|
||||
end
|
||||
|
||||
def lazy_eval(val, overrides = nil)
|
||||
@overrides = overrides if overrides
|
||||
if val.is_a? Symbol
|
||||
__send__(val)
|
||||
elsif callable?(val)
|
||||
instance_exec(&val)
|
||||
else
|
||||
val
|
||||
end
|
||||
end
|
||||
|
||||
# Returns a LazyEvaluator for the parent of this data object.
|
||||
def parent
|
||||
if @obj.parent
|
||||
@obj.parent.lazy_evaluator
|
||||
else
|
||||
nil
|
||||
end
|
||||
end
|
||||
|
||||
# Returns the index of this data object inside it's nearest container
|
||||
# array.
|
||||
def index
|
||||
return @overrides[:index] if defined?(@overrides) && @overrides.key?(:index)
|
||||
|
||||
child = @obj
|
||||
parent = @obj.parent
|
||||
while parent
|
||||
if parent.respond_to?(:find_index_of)
|
||||
return parent.find_index_of(child)
|
||||
end
|
||||
child = parent
|
||||
parent = parent.parent
|
||||
end
|
||||
raise NoMethodError, "no index found"
|
||||
end
|
||||
|
||||
def method_missing(symbol, *args)
|
||||
return @overrides[symbol] if defined?(@overrides) && @overrides.key?(symbol)
|
||||
|
||||
if @obj.parent
|
||||
eval_symbol_in_parent_context(symbol, args)
|
||||
else
|
||||
super
|
||||
end
|
||||
end
|
||||
|
||||
#---------------
|
||||
private
|
||||
|
||||
def eval_symbol_in_parent_context(symbol, args)
|
||||
result = resolve_symbol_in_parent_context(symbol, args)
|
||||
recursively_eval(result, args)
|
||||
end
|
||||
|
||||
def resolve_symbol_in_parent_context(symbol, args)
|
||||
obj_parent = @obj.parent
|
||||
|
||||
if obj_parent.has_parameter?(symbol)
|
||||
obj_parent.get_parameter(symbol)
|
||||
elsif obj_parent.safe_respond_to?(symbol, true)
|
||||
obj_parent.__send__(symbol, *args)
|
||||
else
|
||||
symbol
|
||||
end
|
||||
end
|
||||
|
||||
def recursively_eval(val, args)
|
||||
if val.is_a?(Symbol)
|
||||
parent.__send__(val, *args)
|
||||
elsif callable?(val)
|
||||
parent.instance_exec(&val)
|
||||
else
|
||||
val
|
||||
end
|
||||
end
|
||||
|
||||
def callable?(obj)
|
||||
Proc === obj || Method === obj || UnboundMethod === obj
|
||||
end
|
||||
end
|
||||
end
|
||||
28
Library/Homebrew/vendor/bundle/ruby/2.6.0/gems/bindata-2.4.8/lib/bindata/name.rb
vendored
Normal file
28
Library/Homebrew/vendor/bundle/ruby/2.6.0/gems/bindata-2.4.8/lib/bindata/name.rb
vendored
Normal file
@ -0,0 +1,28 @@
|
||||
module BinData
|
||||
# == Parameters
|
||||
#
|
||||
# Parameters may be provided at initialisation to control the behaviour of
|
||||
# an object. These parameters are:
|
||||
#
|
||||
# <tt>:name</tt>:: The name that this object can be referred to may be
|
||||
# set explicitly. This is only useful when dynamically
|
||||
# generating types.
|
||||
# <code><pre>
|
||||
# BinData::Struct.new(name: :my_struct, fields: ...)
|
||||
# array = BinData::Array.new(type: :my_struct)
|
||||
# </pre></code>
|
||||
module RegisterNamePlugin
|
||||
|
||||
def self.included(base) #:nodoc:
|
||||
# The registered name may be provided explicitly.
|
||||
base.optional_parameter :name
|
||||
end
|
||||
|
||||
def initialize_shared_instance
|
||||
if has_parameter?(:name)
|
||||
RegisteredClasses.register(get_parameter(:name), self)
|
||||
end
|
||||
super
|
||||
end
|
||||
end
|
||||
end
|
||||
94
Library/Homebrew/vendor/bundle/ruby/2.6.0/gems/bindata-2.4.8/lib/bindata/offset.rb
vendored
Normal file
94
Library/Homebrew/vendor/bundle/ruby/2.6.0/gems/bindata-2.4.8/lib/bindata/offset.rb
vendored
Normal file
@ -0,0 +1,94 @@
|
||||
module BinData
|
||||
# WARNING: THIS IS UNSUPPORTED!!
|
||||
#
|
||||
# This was a (failed) experimental feature that allowed seeking within the
|
||||
# input stream. It remains here for backwards compatability for the few
|
||||
# people that used it.
|
||||
#
|
||||
# The official way to skip around the stream is to use BinData::Skip with
|
||||
# the `:to_abs_offset` parameter.
|
||||
#
|
||||
# == Parameters
|
||||
#
|
||||
# Parameters may be provided at initialisation to control the behaviour of
|
||||
# an object. These parameters are:
|
||||
#
|
||||
# [<tt>:check_offset</tt>] Raise an error if the current IO offset doesn't
|
||||
# meet this criteria. A boolean return indicates
|
||||
# success or failure. Any other return is compared
|
||||
# to the current offset. The variable +offset+
|
||||
# is made available to any lambda assigned to
|
||||
# this parameter. This parameter is only checked
|
||||
# before reading.
|
||||
# [<tt>:adjust_offset</tt>] Ensures that the current IO offset is at this
|
||||
# position before reading. This is like
|
||||
# <tt>:check_offset</tt>, except that it will
|
||||
# adjust the IO offset instead of raising an error.
|
||||
module CheckOrAdjustOffsetPlugin
|
||||
|
||||
def self.included(base) #:nodoc:
|
||||
base.optional_parameters :check_offset, :adjust_offset
|
||||
base.mutually_exclusive_parameters :check_offset, :adjust_offset
|
||||
end
|
||||
|
||||
def initialize_shared_instance
|
||||
extend CheckOffsetMixin if has_parameter?(:check_offset)
|
||||
extend AdjustOffsetMixin if has_parameter?(:adjust_offset)
|
||||
super
|
||||
end
|
||||
|
||||
module CheckOffsetMixin
|
||||
def do_read(io) #:nodoc:
|
||||
check_offset(io)
|
||||
super(io)
|
||||
end
|
||||
|
||||
#---------------
|
||||
private
|
||||
|
||||
def check_offset(io)
|
||||
actual_offset = io.offset
|
||||
expected = eval_parameter(:check_offset, offset: actual_offset)
|
||||
|
||||
if !expected
|
||||
raise ValidityError, "offset not as expected for #{debug_name}"
|
||||
elsif actual_offset != expected && expected != true
|
||||
raise ValidityError,
|
||||
"offset is '#{actual_offset}' but " +
|
||||
"expected '#{expected}' for #{debug_name}"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
module AdjustOffsetMixin
|
||||
def do_read(io) #:nodoc:
|
||||
adjust_offset(io)
|
||||
super(io)
|
||||
end
|
||||
|
||||
#---------------
|
||||
private
|
||||
|
||||
def adjust_offset(io)
|
||||
actual_offset = io.offset
|
||||
expected = eval_parameter(:adjust_offset)
|
||||
if actual_offset != expected
|
||||
begin
|
||||
seek = expected - actual_offset
|
||||
io.seekbytes(seek)
|
||||
warn "adjusting stream position by #{seek} bytes" if $VERBOSE
|
||||
rescue
|
||||
raise ValidityError,
|
||||
"offset is '#{actual_offset}' but couldn't seek to " +
|
||||
"expected '#{expected}' for #{debug_name}"
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# Add these offset options to Base
|
||||
class Base
|
||||
include CheckOrAdjustOffsetPlugin
|
||||
end
|
||||
end
|
||||
128
Library/Homebrew/vendor/bundle/ruby/2.6.0/gems/bindata-2.4.8/lib/bindata/params.rb
vendored
Normal file
128
Library/Homebrew/vendor/bundle/ruby/2.6.0/gems/bindata-2.4.8/lib/bindata/params.rb
vendored
Normal file
@ -0,0 +1,128 @@
|
||||
require 'bindata/lazy'
|
||||
|
||||
module BinData
|
||||
module AcceptedParametersPlugin
|
||||
# Mandatory parameters must be present when instantiating a data object.
|
||||
def mandatory_parameters(*args)
|
||||
accepted_parameters.mandatory(*args)
|
||||
end
|
||||
|
||||
# Optional parameters may be present when instantiating a data object.
|
||||
def optional_parameters(*args)
|
||||
accepted_parameters.optional(*args)
|
||||
end
|
||||
|
||||
# Default parameters can be overridden when instantiating a data object.
|
||||
def default_parameters(*args)
|
||||
accepted_parameters.default(*args)
|
||||
end
|
||||
|
||||
# Mutually exclusive parameters may not all be present when
|
||||
# instantiating a data object.
|
||||
def mutually_exclusive_parameters(*args)
|
||||
accepted_parameters.mutually_exclusive(*args)
|
||||
end
|
||||
|
||||
alias mandatory_parameter mandatory_parameters
|
||||
alias optional_parameter optional_parameters
|
||||
alias default_parameter default_parameters
|
||||
|
||||
def accepted_parameters #:nodoc:
|
||||
@accepted_parameters ||= begin
|
||||
ancestor_params = superclass.respond_to?(:accepted_parameters) ?
|
||||
superclass.accepted_parameters : nil
|
||||
AcceptedParameters.new(ancestor_params)
|
||||
end
|
||||
end
|
||||
|
||||
# BinData objects accept parameters when initializing. AcceptedParameters
|
||||
# allow a BinData class to declaratively identify accepted parameters as
|
||||
# mandatory, optional, default or mutually exclusive.
|
||||
class AcceptedParameters
|
||||
def initialize(ancestor_parameters = nil)
|
||||
if ancestor_parameters
|
||||
@mandatory = ancestor_parameters.mandatory.dup
|
||||
@optional = ancestor_parameters.optional.dup
|
||||
@default = ancestor_parameters.default.dup
|
||||
@mutually_exclusive = ancestor_parameters.mutually_exclusive.dup
|
||||
else
|
||||
@mandatory = []
|
||||
@optional = []
|
||||
@default = Hash.new
|
||||
@mutually_exclusive = []
|
||||
end
|
||||
end
|
||||
|
||||
def mandatory(*args)
|
||||
unless args.empty?
|
||||
@mandatory.concat(to_syms(args))
|
||||
@mandatory.uniq!
|
||||
end
|
||||
@mandatory
|
||||
end
|
||||
|
||||
def optional(*args)
|
||||
unless args.empty?
|
||||
@optional.concat(to_syms(args))
|
||||
@optional.uniq!
|
||||
end
|
||||
@optional
|
||||
end
|
||||
|
||||
def default(args = nil)
|
||||
if args
|
||||
to_syms(args.keys) # call for side effect of validating names
|
||||
args.each_pair do |param, value|
|
||||
@default[param.to_sym] = value
|
||||
end
|
||||
end
|
||||
@default
|
||||
end
|
||||
|
||||
def mutually_exclusive(*args)
|
||||
arg1 = args.shift
|
||||
until args.empty?
|
||||
args.each do |arg2|
|
||||
@mutually_exclusive.push([arg1.to_sym, arg2.to_sym])
|
||||
@mutually_exclusive.uniq!
|
||||
end
|
||||
arg1 = args.shift
|
||||
end
|
||||
@mutually_exclusive
|
||||
end
|
||||
|
||||
def all
|
||||
(@mandatory + @optional + @default.keys).uniq
|
||||
end
|
||||
|
||||
#---------------
|
||||
private
|
||||
|
||||
def to_syms(args)
|
||||
syms = args.collect(&:to_sym)
|
||||
ensure_valid_names(syms)
|
||||
syms
|
||||
end
|
||||
|
||||
def ensure_valid_names(names)
|
||||
invalid_names = self.class.invalid_parameter_names
|
||||
names.each do |name|
|
||||
if invalid_names.include?(name)
|
||||
raise NameError.new("Rename parameter '#{name}' " \
|
||||
"as it shadows an existing method.", name)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def self.invalid_parameter_names
|
||||
@invalid_names ||= begin
|
||||
all_names = LazyEvaluator.instance_methods(true) + Kernel.methods
|
||||
allowed_names = [:name, :type]
|
||||
invalid_names = (all_names - allowed_names).uniq
|
||||
|
||||
Hash[*invalid_names.collect { |key| [key.to_sym, true] }.flatten]
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
143
Library/Homebrew/vendor/bundle/ruby/2.6.0/gems/bindata-2.4.8/lib/bindata/primitive.rb
vendored
Normal file
143
Library/Homebrew/vendor/bundle/ruby/2.6.0/gems/bindata-2.4.8/lib/bindata/primitive.rb
vendored
Normal file
@ -0,0 +1,143 @@
|
||||
require 'bindata/base_primitive'
|
||||
require 'bindata/dsl'
|
||||
require 'bindata/struct'
|
||||
|
||||
module BinData
|
||||
# A Primitive is a declarative way to define a new BinData data type.
|
||||
# The data type must contain a primitive value only, i.e numbers or strings.
|
||||
# For new data types that contain multiple values see BinData::Record.
|
||||
#
|
||||
# To define a new data type, set fields as if for Record and add a
|
||||
# #get and #set method to extract / convert the data between the fields
|
||||
# and the #value of the object.
|
||||
#
|
||||
# require 'bindata'
|
||||
#
|
||||
# class PascalString < BinData::Primitive
|
||||
# uint8 :len, value: -> { data.length }
|
||||
# string :data, read_length: :len
|
||||
#
|
||||
# def get
|
||||
# self.data
|
||||
# end
|
||||
#
|
||||
# def set(v)
|
||||
# self.data = v
|
||||
# end
|
||||
# end
|
||||
#
|
||||
# ps = PascalString.new(initial_value: "hello")
|
||||
# ps.to_binary_s #=> "\005hello"
|
||||
# ps.read("\003abcde")
|
||||
# ps #=> "abc"
|
||||
#
|
||||
# # Unsigned 24 bit big endian integer
|
||||
# class Uint24be < BinData::Primitive
|
||||
# uint8 :byte1
|
||||
# uint8 :byte2
|
||||
# uint8 :byte3
|
||||
#
|
||||
# def get
|
||||
# (self.byte1 << 16) | (self.byte2 << 8) | self.byte3
|
||||
# end
|
||||
#
|
||||
# def set(v)
|
||||
# v = 0 if v < 0
|
||||
# v = 0xffffff if v > 0xffffff
|
||||
#
|
||||
# self.byte1 = (v >> 16) & 0xff
|
||||
# self.byte2 = (v >> 8) & 0xff
|
||||
# self.byte3 = v & 0xff
|
||||
# end
|
||||
# end
|
||||
#
|
||||
# u24 = Uint24be.new
|
||||
# u24.read("\x12\x34\x56")
|
||||
# "0x%x" % u24 #=> 0x123456
|
||||
#
|
||||
# == Parameters
|
||||
#
|
||||
# Primitive objects accept all the parameters that BinData::BasePrimitive do.
|
||||
#
|
||||
class Primitive < BasePrimitive
|
||||
extend DSLMixin
|
||||
|
||||
unregister_self
|
||||
dsl_parser :primitive
|
||||
arg_processor :primitive
|
||||
|
||||
mandatory_parameter :struct_params
|
||||
|
||||
def initialize_instance
|
||||
super
|
||||
@struct = BinData::Struct.new(get_parameter(:struct_params), self)
|
||||
end
|
||||
|
||||
def respond_to?(symbol, include_private = false) #:nodoc:
|
||||
@struct.respond_to?(symbol, include_private) || super
|
||||
end
|
||||
|
||||
def method_missing(symbol, *args, &block) #:nodoc:
|
||||
if @struct.respond_to?(symbol)
|
||||
@struct.__send__(symbol, *args, &block)
|
||||
else
|
||||
super
|
||||
end
|
||||
end
|
||||
|
||||
def assign(val)
|
||||
super(val)
|
||||
set(_value)
|
||||
@value = get
|
||||
end
|
||||
|
||||
def debug_name_of(child) #:nodoc:
|
||||
debug_name + "-internal-"
|
||||
end
|
||||
|
||||
def do_write(io)
|
||||
set(_value)
|
||||
@struct.do_write(io)
|
||||
end
|
||||
|
||||
def do_num_bytes
|
||||
set(_value)
|
||||
@struct.do_num_bytes
|
||||
end
|
||||
|
||||
#---------------
|
||||
private
|
||||
|
||||
def sensible_default
|
||||
get
|
||||
end
|
||||
|
||||
def read_and_return_value(io)
|
||||
@struct.do_read(io)
|
||||
get
|
||||
end
|
||||
|
||||
###########################################################################
|
||||
# To be implemented by subclasses
|
||||
|
||||
# Extracts the value for this data object from the fields of the
|
||||
# internal struct.
|
||||
def get
|
||||
raise NotImplementedError
|
||||
end
|
||||
|
||||
# Sets the fields of the internal struct to represent +v+.
|
||||
def set(v)
|
||||
raise NotImplementedError
|
||||
end
|
||||
|
||||
# To be implemented by subclasses
|
||||
###########################################################################
|
||||
end
|
||||
|
||||
class PrimitiveArgProcessor < BaseArgProcessor
|
||||
def sanitize_parameters!(obj_class, params)
|
||||
params[:struct_params] = params.create_sanitized_params(obj_class.dsl_params, BinData::Struct)
|
||||
end
|
||||
end
|
||||
end
|
||||
23
Library/Homebrew/vendor/bundle/ruby/2.6.0/gems/bindata-2.4.8/lib/bindata/record.rb
vendored
Normal file
23
Library/Homebrew/vendor/bundle/ruby/2.6.0/gems/bindata-2.4.8/lib/bindata/record.rb
vendored
Normal file
@ -0,0 +1,23 @@
|
||||
require 'bindata/dsl'
|
||||
require 'bindata/struct'
|
||||
|
||||
module BinData
|
||||
# A Record is a declarative wrapper around Struct.
|
||||
#
|
||||
# See +Struct+ for more info.
|
||||
class Record < BinData::Struct
|
||||
extend DSLMixin
|
||||
|
||||
unregister_self
|
||||
dsl_parser :struct
|
||||
arg_processor :record
|
||||
end
|
||||
|
||||
class RecordArgProcessor < StructArgProcessor
|
||||
include MultiFieldArgSeparator
|
||||
|
||||
def sanitize_parameters!(obj_class, params)
|
||||
super(obj_class, params.merge!(obj_class.dsl_params))
|
||||
end
|
||||
end
|
||||
end
|
||||
134
Library/Homebrew/vendor/bundle/ruby/2.6.0/gems/bindata-2.4.8/lib/bindata/registry.rb
vendored
Normal file
134
Library/Homebrew/vendor/bundle/ruby/2.6.0/gems/bindata-2.4.8/lib/bindata/registry.rb
vendored
Normal file
@ -0,0 +1,134 @@
|
||||
module BinData
|
||||
|
||||
class UnRegisteredTypeError < StandardError ; end
|
||||
|
||||
# This registry contains a register of name -> class mappings.
|
||||
#
|
||||
# Numerics (integers and floating point numbers) have an endian property as
|
||||
# part of their name (e.g. int32be, float_le).
|
||||
#
|
||||
# Classes can be looked up based on their full name or an abbreviated +name+
|
||||
# with +hints+.
|
||||
#
|
||||
# There are two hints supported, :endian and :search_prefix.
|
||||
#
|
||||
# #lookup("int32", { endian: :big }) will return Int32Be.
|
||||
#
|
||||
# #lookup("my_type", { search_prefix: :ns }) will return NsMyType.
|
||||
#
|
||||
# Names are stored in under_score_style, not camelCase.
|
||||
class Registry
|
||||
|
||||
def initialize
|
||||
@registry = {}
|
||||
end
|
||||
|
||||
def register(name, class_to_register)
|
||||
return if name.nil? || class_to_register.nil?
|
||||
|
||||
formatted_name = underscore_name(name)
|
||||
warn_if_name_is_already_registered(formatted_name, class_to_register)
|
||||
|
||||
@registry[formatted_name] = class_to_register
|
||||
end
|
||||
|
||||
def unregister(name)
|
||||
@registry.delete(underscore_name(name))
|
||||
end
|
||||
|
||||
def lookup(name, hints = {})
|
||||
the_class = @registry[normalize_name(name, hints)]
|
||||
if the_class
|
||||
the_class
|
||||
elsif @registry[normalize_name(name, hints.merge(endian: :big))]
|
||||
raise(UnRegisteredTypeError, "#{name}, do you need to specify endian?")
|
||||
else
|
||||
raise(UnRegisteredTypeError, name)
|
||||
end
|
||||
end
|
||||
|
||||
# Convert CamelCase +name+ to underscore style.
|
||||
def underscore_name(name)
|
||||
name.
|
||||
to_s.
|
||||
sub(/.*::/, "").
|
||||
gsub(/([A-Z]+)([A-Z][a-z])/, '\1_\2').
|
||||
gsub(/([a-z\d])([A-Z])/, '\1_\2').
|
||||
tr("-", "_").
|
||||
downcase
|
||||
end
|
||||
|
||||
#---------------
|
||||
private
|
||||
|
||||
def normalize_name(name, hints)
|
||||
name = underscore_name(name)
|
||||
|
||||
if !registered?(name)
|
||||
search_prefix = [""].concat(Array(hints[:search_prefix]))
|
||||
search_prefix.each do |prefix|
|
||||
nwp = name_with_prefix(name, prefix)
|
||||
if registered?(nwp)
|
||||
name = nwp
|
||||
break
|
||||
end
|
||||
|
||||
nwe = name_with_endian(nwp, hints[:endian])
|
||||
if registered?(nwe)
|
||||
name = nwe
|
||||
break
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
name
|
||||
end
|
||||
|
||||
def name_with_prefix(name, prefix)
|
||||
prefix = prefix.to_s.chomp("_")
|
||||
if prefix == ""
|
||||
name
|
||||
else
|
||||
"#{prefix}_#{name}"
|
||||
end
|
||||
end
|
||||
|
||||
def name_with_endian(name, endian)
|
||||
return name if endian.nil?
|
||||
|
||||
suffix = (endian == :little) ? "le" : "be"
|
||||
if /^u?int\d+$/ =~ name
|
||||
name + suffix
|
||||
else
|
||||
name + "_" + suffix
|
||||
end
|
||||
end
|
||||
|
||||
def registered?(name)
|
||||
register_dynamic_class(name) unless @registry.key?(name)
|
||||
|
||||
@registry.key?(name)
|
||||
end
|
||||
|
||||
def register_dynamic_class(name)
|
||||
if /^u?int\d+(le|be)$/ =~ name || /^s?bit\d+(le)?$/ =~ name
|
||||
class_name = name.gsub(/(?:^|_)(.)/) { $1.upcase }
|
||||
begin
|
||||
BinData.const_get(class_name)
|
||||
rescue NameError
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def warn_if_name_is_already_registered(name, class_to_register)
|
||||
prev_class = @registry[name]
|
||||
if $VERBOSE && prev_class && prev_class != class_to_register
|
||||
warn "warning: replacing registered class #{prev_class} " \
|
||||
"with #{class_to_register}"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# A singleton registry of all registered classes.
|
||||
RegisteredClasses = Registry.new
|
||||
end
|
||||
34
Library/Homebrew/vendor/bundle/ruby/2.6.0/gems/bindata-2.4.8/lib/bindata/rest.rb
vendored
Normal file
34
Library/Homebrew/vendor/bundle/ruby/2.6.0/gems/bindata-2.4.8/lib/bindata/rest.rb
vendored
Normal file
@ -0,0 +1,34 @@
|
||||
require "bindata/base_primitive"
|
||||
|
||||
module BinData
|
||||
# Rest will consume the input stream from the current position to the end of
|
||||
# the stream. This will mainly be useful for debugging and developing.
|
||||
#
|
||||
# require 'bindata'
|
||||
#
|
||||
# class A < BinData::Record
|
||||
# string :a, read_length: 5
|
||||
# rest :rest
|
||||
# end
|
||||
#
|
||||
# obj = A.read("abcdefghij")
|
||||
# obj.a #=> "abcde"
|
||||
# obj.rest #=" "fghij"
|
||||
#
|
||||
class Rest < BinData::BasePrimitive
|
||||
#---------------
|
||||
private
|
||||
|
||||
def value_to_binary_string(val)
|
||||
val
|
||||
end
|
||||
|
||||
def read_and_return_value(io)
|
||||
io.read_all_bytes
|
||||
end
|
||||
|
||||
def sensible_default
|
||||
""
|
||||
end
|
||||
end
|
||||
end
|
||||
372
Library/Homebrew/vendor/bundle/ruby/2.6.0/gems/bindata-2.4.8/lib/bindata/sanitize.rb
vendored
Normal file
372
Library/Homebrew/vendor/bundle/ruby/2.6.0/gems/bindata-2.4.8/lib/bindata/sanitize.rb
vendored
Normal file
@ -0,0 +1,372 @@
|
||||
require 'bindata/registry'
|
||||
|
||||
module BinData
|
||||
|
||||
# Subclasses of this are sanitized
|
||||
class SanitizedParameter; end
|
||||
|
||||
class SanitizedPrototype < SanitizedParameter
|
||||
def initialize(obj_type, obj_params, hints)
|
||||
raw_hints = hints.dup
|
||||
if raw_hints[:endian].respond_to?(:endian)
|
||||
raw_hints[:endian] = raw_hints[:endian].endian
|
||||
end
|
||||
obj_params ||= {}
|
||||
|
||||
if BinData::Base === obj_type
|
||||
obj_class = obj_type
|
||||
else
|
||||
obj_class = RegisteredClasses.lookup(obj_type, raw_hints)
|
||||
end
|
||||
|
||||
if BinData::Base === obj_class
|
||||
@factory = obj_class
|
||||
else
|
||||
@obj_class = obj_class
|
||||
@obj_params = SanitizedParameters.new(obj_params, @obj_class, hints)
|
||||
end
|
||||
end
|
||||
|
||||
def has_parameter?(param)
|
||||
if defined? @factory
|
||||
@factory.has_parameter?(param)
|
||||
else
|
||||
@obj_params.has_parameter?(param)
|
||||
end
|
||||
end
|
||||
|
||||
def instantiate(value = nil, parent = nil)
|
||||
@factory ||= @obj_class.new(@obj_params)
|
||||
|
||||
@factory.new(value, parent)
|
||||
end
|
||||
end
|
||||
#----------------------------------------------------------------------------
|
||||
|
||||
class SanitizedField < SanitizedParameter
|
||||
def initialize(name, field_type, field_params, hints)
|
||||
@name = name
|
||||
@prototype = SanitizedPrototype.new(field_type, field_params, hints)
|
||||
end
|
||||
|
||||
attr_reader :prototype
|
||||
|
||||
def name_as_sym
|
||||
@name.nil? ? nil : @name.to_sym
|
||||
end
|
||||
|
||||
def name
|
||||
@name
|
||||
end
|
||||
|
||||
def has_parameter?(param)
|
||||
@prototype.has_parameter?(param)
|
||||
end
|
||||
|
||||
def instantiate(value = nil, parent = nil)
|
||||
@prototype.instantiate(value, parent)
|
||||
end
|
||||
end
|
||||
#----------------------------------------------------------------------------
|
||||
|
||||
class SanitizedFields < SanitizedParameter
|
||||
include Enumerable
|
||||
|
||||
def initialize(hints, base_fields = nil)
|
||||
@hints = hints
|
||||
if base_fields
|
||||
@fields = base_fields.raw_fields
|
||||
else
|
||||
@fields = []
|
||||
end
|
||||
end
|
||||
|
||||
def add_field(type, name, params)
|
||||
name = nil if name == ""
|
||||
|
||||
@fields << SanitizedField.new(name, type, params, @hints)
|
||||
end
|
||||
|
||||
def raw_fields
|
||||
@fields.dup
|
||||
end
|
||||
|
||||
def [](idx)
|
||||
@fields[idx]
|
||||
end
|
||||
|
||||
def empty?
|
||||
@fields.empty?
|
||||
end
|
||||
|
||||
def length
|
||||
@fields.length
|
||||
end
|
||||
|
||||
def each(&block)
|
||||
@fields.each(&block)
|
||||
end
|
||||
|
||||
def field_names
|
||||
@fields.collect(&:name_as_sym)
|
||||
end
|
||||
|
||||
def field_name?(name)
|
||||
@fields.detect { |f| f.name_as_sym == name.to_sym }
|
||||
end
|
||||
|
||||
def all_field_names_blank?
|
||||
@fields.all? { |f| f.name.nil? }
|
||||
end
|
||||
|
||||
def no_field_names_blank?
|
||||
@fields.all? { |f| f.name != nil }
|
||||
end
|
||||
|
||||
def any_field_has_parameter?(parameter)
|
||||
@fields.any? { |f| f.has_parameter?(parameter) }
|
||||
end
|
||||
end
|
||||
#----------------------------------------------------------------------------
|
||||
|
||||
class SanitizedChoices < SanitizedParameter
|
||||
def initialize(choices, hints)
|
||||
@choices = {}
|
||||
choices.each_pair do |key, val|
|
||||
if SanitizedParameter === val
|
||||
prototype = val
|
||||
else
|
||||
type, param = val
|
||||
prototype = SanitizedPrototype.new(type, param, hints)
|
||||
end
|
||||
|
||||
if key == :default
|
||||
@choices.default = prototype
|
||||
else
|
||||
@choices[key] = prototype
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def [](key)
|
||||
@choices[key]
|
||||
end
|
||||
end
|
||||
#----------------------------------------------------------------------------
|
||||
|
||||
class SanitizedBigEndian < SanitizedParameter
|
||||
def endian
|
||||
:big
|
||||
end
|
||||
end
|
||||
|
||||
class SanitizedLittleEndian < SanitizedParameter
|
||||
def endian
|
||||
:little
|
||||
end
|
||||
end
|
||||
#----------------------------------------------------------------------------
|
||||
|
||||
# BinData objects are instantiated with parameters to determine their
|
||||
# behaviour. These parameters must be sanitized to ensure their values
|
||||
# are valid. When instantiating many objects with identical parameters,
|
||||
# such as an array of records, there is much duplicated sanitizing.
|
||||
#
|
||||
# The purpose of the sanitizing code is to eliminate the duplicated
|
||||
# validation.
|
||||
#
|
||||
# SanitizedParameters is a hash-like collection of parameters. Its purpose
|
||||
# is to recursively sanitize the parameters of an entire BinData object chain
|
||||
# at a single time.
|
||||
class SanitizedParameters < Hash
|
||||
|
||||
# Memoized constants
|
||||
BIG_ENDIAN = SanitizedBigEndian.new
|
||||
LITTLE_ENDIAN = SanitizedLittleEndian.new
|
||||
|
||||
class << self
|
||||
def sanitize(parameters, the_class)
|
||||
if SanitizedParameters === parameters
|
||||
parameters
|
||||
else
|
||||
SanitizedParameters.new(parameters, the_class, {})
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def initialize(parameters, the_class, hints)
|
||||
parameters.each_pair { |key, value| self[key.to_sym] = value }
|
||||
|
||||
@the_class = the_class
|
||||
|
||||
if hints[:endian]
|
||||
self[:endian] ||= hints[:endian]
|
||||
end
|
||||
|
||||
if hints[:search_prefix] && !hints[:search_prefix].empty?
|
||||
self[:search_prefix] = Array(self[:search_prefix]).concat(Array(hints[:search_prefix]))
|
||||
end
|
||||
|
||||
sanitize!
|
||||
end
|
||||
|
||||
alias_method :has_parameter?, :key?
|
||||
|
||||
def has_at_least_one_of?(*keys)
|
||||
keys.each do |key|
|
||||
return true if has_parameter?(key)
|
||||
end
|
||||
|
||||
false
|
||||
end
|
||||
|
||||
def warn_replacement_parameter(bad_key, suggested_key)
|
||||
if has_parameter?(bad_key)
|
||||
Kernel.warn ":#{bad_key} is not used with #{@the_class}. " \
|
||||
"You probably want to change this to :#{suggested_key}"
|
||||
end
|
||||
end
|
||||
|
||||
# def warn_renamed_parameter(old_key, new_key)
|
||||
# val = delete(old_key)
|
||||
# if val
|
||||
# self[new_key] = val
|
||||
# Kernel.warn ":#{old_key} has been renamed to :#{new_key} in #{@the_class}. " \
|
||||
# "Using :#{old_key} is now deprecated and will be removed in the future"
|
||||
# end
|
||||
# end
|
||||
|
||||
def must_be_integer(*keys)
|
||||
keys.each do |key|
|
||||
if has_parameter?(key)
|
||||
parameter = self[key]
|
||||
unless Symbol === parameter ||
|
||||
parameter.respond_to?(:arity) ||
|
||||
parameter.respond_to?(:to_int)
|
||||
raise ArgumentError, "parameter '#{key}' in #{@the_class} must " \
|
||||
"evaluate to an integer, got #{parameter.class}"
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def rename_parameter(old_key, new_key)
|
||||
if has_parameter?(old_key)
|
||||
self[new_key] = delete(old_key)
|
||||
end
|
||||
end
|
||||
|
||||
def sanitize_object_prototype(key)
|
||||
sanitize(key) { |obj_type, obj_params| create_sanitized_object_prototype(obj_type, obj_params) }
|
||||
end
|
||||
|
||||
def sanitize_fields(key, &block)
|
||||
sanitize(key) do |fields|
|
||||
sanitized_fields = create_sanitized_fields
|
||||
yield(fields, sanitized_fields)
|
||||
sanitized_fields
|
||||
end
|
||||
end
|
||||
|
||||
def sanitize_choices(key, &block)
|
||||
sanitize(key) do |obj|
|
||||
create_sanitized_choices(yield(obj))
|
||||
end
|
||||
end
|
||||
|
||||
def sanitize_endian(key)
|
||||
sanitize(key) { |endian| create_sanitized_endian(endian) }
|
||||
end
|
||||
|
||||
def sanitize(key, &block)
|
||||
if needs_sanitizing?(key)
|
||||
self[key] = yield(self[key])
|
||||
end
|
||||
end
|
||||
|
||||
def create_sanitized_params(params, the_class)
|
||||
SanitizedParameters.new(params, the_class, hints)
|
||||
end
|
||||
|
||||
def hints
|
||||
{ endian: self[:endian], search_prefix: self[:search_prefix] }
|
||||
end
|
||||
|
||||
#---------------
|
||||
private
|
||||
|
||||
def sanitize!
|
||||
ensure_no_nil_values
|
||||
merge_default_parameters!
|
||||
|
||||
@the_class.arg_processor.sanitize_parameters!(@the_class, self)
|
||||
|
||||
ensure_mandatory_parameters_exist
|
||||
ensure_mutual_exclusion_of_parameters
|
||||
end
|
||||
|
||||
def needs_sanitizing?(key)
|
||||
has_key?(key) && ! self[key].is_a?(SanitizedParameter)
|
||||
end
|
||||
|
||||
def ensure_no_nil_values
|
||||
each do |key, value|
|
||||
if value.nil?
|
||||
raise ArgumentError,
|
||||
"parameter '#{key}' has nil value in #{@the_class}"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def merge_default_parameters!
|
||||
@the_class.default_parameters.each do |key, value|
|
||||
self[key] = value unless has_key?(key)
|
||||
end
|
||||
end
|
||||
|
||||
def ensure_mandatory_parameters_exist
|
||||
@the_class.mandatory_parameters.each do |key|
|
||||
unless has_parameter?(key)
|
||||
raise ArgumentError,
|
||||
"parameter '#{key}' must be specified in #{@the_class}"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def ensure_mutual_exclusion_of_parameters
|
||||
return if length < 2
|
||||
|
||||
@the_class.mutually_exclusive_parameters.each do |key1, key2|
|
||||
if has_parameter?(key1) && has_parameter?(key2)
|
||||
raise ArgumentError, "params '#{key1}' and '#{key2}' " \
|
||||
"are mutually exclusive in #{@the_class}"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def create_sanitized_endian(endian)
|
||||
if endian == :big
|
||||
BIG_ENDIAN
|
||||
elsif endian == :little
|
||||
LITTLE_ENDIAN
|
||||
elsif endian == :big_and_little
|
||||
raise ArgumentError, "endian: :big or endian: :little is required"
|
||||
else
|
||||
raise ArgumentError, "unknown value for endian '#{endian}'"
|
||||
end
|
||||
end
|
||||
|
||||
def create_sanitized_choices(choices)
|
||||
SanitizedChoices.new(choices, hints)
|
||||
end
|
||||
|
||||
def create_sanitized_fields
|
||||
SanitizedFields.new(hints)
|
||||
end
|
||||
|
||||
def create_sanitized_object_prototype(obj_type, obj_params)
|
||||
SanitizedPrototype.new(obj_type, obj_params, hints)
|
||||
end
|
||||
end
|
||||
#----------------------------------------------------------------------------
|
||||
end
|
||||
133
Library/Homebrew/vendor/bundle/ruby/2.6.0/gems/bindata-2.4.8/lib/bindata/skip.rb
vendored
Normal file
133
Library/Homebrew/vendor/bundle/ruby/2.6.0/gems/bindata-2.4.8/lib/bindata/skip.rb
vendored
Normal file
@ -0,0 +1,133 @@
|
||||
require "bindata/base_primitive"
|
||||
|
||||
module BinData
|
||||
# Skip will skip over bytes from the input stream. If the stream is not
|
||||
# seekable, then the bytes are consumed and discarded.
|
||||
#
|
||||
# When writing, skip will write the appropriate number of zero bytes.
|
||||
#
|
||||
# require 'bindata'
|
||||
#
|
||||
# class A < BinData::Record
|
||||
# skip length: 5
|
||||
# string :a, read_length: 5
|
||||
# end
|
||||
#
|
||||
# obj = A.read("abcdefghij")
|
||||
# obj.a #=> "fghij"
|
||||
#
|
||||
#
|
||||
# class B < BinData::Record
|
||||
# skip until_valid: [:string, {read_length: 2, assert: "ef"} ]
|
||||
# string :b, read_length: 5
|
||||
# end
|
||||
#
|
||||
# obj = B.read("abcdefghij")
|
||||
# obj.b #=> "efghi"
|
||||
#
|
||||
#
|
||||
# == Parameters
|
||||
#
|
||||
# Skip objects accept all the params that BinData::BasePrimitive
|
||||
# does, as well as the following:
|
||||
#
|
||||
# <tt>:length</tt>:: The number of bytes to skip.
|
||||
# <tt>:to_abs_offset</tt>:: Skips to the given absolute offset.
|
||||
# <tt>:until_valid</tt>:: Skips untils a given byte pattern is matched.
|
||||
# This parameter contains a type that will raise
|
||||
# a BinData::ValidityError unless an acceptable byte
|
||||
# sequence is found. The type is represented by a
|
||||
# Symbol, or if the type is to have params #
|
||||
# passed to it, then it should be provided as #
|
||||
# <tt>[type_symbol, hash_params]</tt>.
|
||||
#
|
||||
class Skip < BinData::BasePrimitive
|
||||
arg_processor :skip
|
||||
|
||||
optional_parameters :length, :to_abs_offset, :until_valid
|
||||
mutually_exclusive_parameters :length, :to_abs_offset, :until_valid
|
||||
|
||||
def initialize_shared_instance
|
||||
extend SkipLengthPlugin if has_parameter?(:length)
|
||||
extend SkipToAbsOffsetPlugin if has_parameter?(:to_abs_offset)
|
||||
extend SkipUntilValidPlugin if has_parameter?(:until_valid)
|
||||
super
|
||||
end
|
||||
|
||||
#---------------
|
||||
private
|
||||
|
||||
def value_to_binary_string(val)
|
||||
len = skip_length
|
||||
if len < 0
|
||||
raise ValidityError, "#{debug_name} attempted to seek backwards by #{len.abs} bytes"
|
||||
end
|
||||
|
||||
"\000" * skip_length
|
||||
end
|
||||
|
||||
def read_and_return_value(io)
|
||||
len = skip_length
|
||||
if len < 0
|
||||
raise ValidityError, "#{debug_name} attempted to seek backwards by #{len.abs} bytes"
|
||||
end
|
||||
|
||||
io.seekbytes(len)
|
||||
""
|
||||
end
|
||||
|
||||
def sensible_default
|
||||
""
|
||||
end
|
||||
end
|
||||
|
||||
class SkipArgProcessor < BaseArgProcessor
|
||||
def sanitize_parameters!(obj_class, params)
|
||||
unless params.has_at_least_one_of?(:length, :to_abs_offset, :until_valid)
|
||||
raise ArgumentError,
|
||||
"#{obj_class} requires either :length, :to_abs_offset or :until_valid"
|
||||
end
|
||||
params.must_be_integer(:to_abs_offset, :length)
|
||||
params.sanitize_object_prototype(:until_valid)
|
||||
end
|
||||
end
|
||||
|
||||
# Logic for the :length parameter
|
||||
module SkipLengthPlugin
|
||||
def skip_length
|
||||
eval_parameter(:length)
|
||||
end
|
||||
end
|
||||
|
||||
# Logic for the :to_abs_offset parameter
|
||||
module SkipToAbsOffsetPlugin
|
||||
def skip_length
|
||||
eval_parameter(:to_abs_offset) - abs_offset
|
||||
end
|
||||
end
|
||||
|
||||
# Logic for the :until_valid parameter
|
||||
module SkipUntilValidPlugin
|
||||
def skip_length
|
||||
# no skipping when writing
|
||||
0
|
||||
end
|
||||
|
||||
def read_and_return_value(io)
|
||||
prototype = get_parameter(:until_valid)
|
||||
validator = prototype.instantiate(nil, self)
|
||||
|
||||
valid = false
|
||||
until valid
|
||||
begin
|
||||
io.with_readahead do
|
||||
validator.read(io)
|
||||
valid = true
|
||||
end
|
||||
rescue ValidityError
|
||||
io.readbytes(1)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
153
Library/Homebrew/vendor/bundle/ruby/2.6.0/gems/bindata-2.4.8/lib/bindata/string.rb
vendored
Normal file
153
Library/Homebrew/vendor/bundle/ruby/2.6.0/gems/bindata-2.4.8/lib/bindata/string.rb
vendored
Normal file
@ -0,0 +1,153 @@
|
||||
require "bindata/base_primitive"
|
||||
|
||||
module BinData
|
||||
# A String is a sequence of bytes. This is the same as strings in Ruby 1.8.
|
||||
# The issue of character encoding is ignored by this class.
|
||||
#
|
||||
# require 'bindata'
|
||||
#
|
||||
# data = "abcdefghij"
|
||||
#
|
||||
# obj = BinData::String.new(read_length: 5)
|
||||
# obj.read(data)
|
||||
# obj #=> "abcde"
|
||||
#
|
||||
# obj = BinData::String.new(length: 6)
|
||||
# obj.read(data)
|
||||
# obj #=> "abcdef"
|
||||
# obj.assign("abcdefghij")
|
||||
# obj #=> "abcdef"
|
||||
# obj.assign("abcd")
|
||||
# obj #=> "abcd\000\000"
|
||||
#
|
||||
# obj = BinData::String.new(length: 6, trim_padding: true)
|
||||
# obj.assign("abcd")
|
||||
# obj #=> "abcd"
|
||||
# obj.to_binary_s #=> "abcd\000\000"
|
||||
#
|
||||
# obj = BinData::String.new(length: 6, pad_byte: 'A')
|
||||
# obj.assign("abcd")
|
||||
# obj #=> "abcdAA"
|
||||
# obj.to_binary_s #=> "abcdAA"
|
||||
#
|
||||
# == Parameters
|
||||
#
|
||||
# String objects accept all the params that BinData::BasePrimitive
|
||||
# does, as well as the following:
|
||||
#
|
||||
# <tt>:read_length</tt>:: The length in bytes to use when reading a value.
|
||||
# <tt>:length</tt>:: The fixed length of the string. If a shorter
|
||||
# string is set, it will be padded to this length.
|
||||
# <tt>:pad_byte</tt>:: The byte to use when padding a string to a
|
||||
# set length. Valid values are Integers and
|
||||
# Strings of length 1. "\0" is the default.
|
||||
# <tt>:pad_front</tt>:: Signifies that the padding occurs at the front
|
||||
# of the string rather than the end. Default
|
||||
# is false.
|
||||
# <tt>:trim_padding</tt>:: Boolean, default false. If set, #value will
|
||||
# return the value with all pad_bytes trimmed
|
||||
# from the end of the string. The value will
|
||||
# not be trimmed when writing.
|
||||
class String < BinData::BasePrimitive
|
||||
arg_processor :string
|
||||
|
||||
optional_parameters :read_length, :length, :trim_padding, :pad_front, :pad_left
|
||||
default_parameters pad_byte: "\0"
|
||||
mutually_exclusive_parameters :read_length, :length
|
||||
mutually_exclusive_parameters :length, :value
|
||||
|
||||
def initialize_shared_instance
|
||||
if (has_parameter?(:value) || has_parameter?(:asserted_value)) &&
|
||||
!has_parameter?(:read_length)
|
||||
extend WarnNoReadLengthPlugin
|
||||
end
|
||||
super
|
||||
end
|
||||
|
||||
def assign(val)
|
||||
super(binary_string(val))
|
||||
end
|
||||
|
||||
def snapshot
|
||||
# override to trim padding
|
||||
snap = super
|
||||
snap = clamp_to_length(snap)
|
||||
|
||||
if get_parameter(:trim_padding)
|
||||
trim_padding(snap)
|
||||
else
|
||||
snap
|
||||
end
|
||||
end
|
||||
|
||||
#---------------
|
||||
private
|
||||
|
||||
def clamp_to_length(str)
|
||||
str = binary_string(str)
|
||||
|
||||
len = eval_parameter(:length) || str.length
|
||||
if str.length == len
|
||||
str
|
||||
elsif str.length > len
|
||||
str.slice(0, len)
|
||||
else
|
||||
padding = (eval_parameter(:pad_byte) * (len - str.length))
|
||||
if get_parameter(:pad_front)
|
||||
padding + str
|
||||
else
|
||||
str + padding
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def trim_padding(str)
|
||||
if get_parameter(:pad_front)
|
||||
str.sub(/\A#{eval_parameter(:pad_byte)}*/, "")
|
||||
else
|
||||
str.sub(/#{eval_parameter(:pad_byte)}*\z/, "")
|
||||
end
|
||||
end
|
||||
|
||||
def value_to_binary_string(val)
|
||||
clamp_to_length(val)
|
||||
end
|
||||
|
||||
def read_and_return_value(io)
|
||||
len = eval_parameter(:read_length) || eval_parameter(:length) || 0
|
||||
io.readbytes(len)
|
||||
end
|
||||
|
||||
def sensible_default
|
||||
""
|
||||
end
|
||||
end
|
||||
|
||||
class StringArgProcessor < BaseArgProcessor
|
||||
def sanitize_parameters!(obj_class, params)
|
||||
params.warn_replacement_parameter(:initial_length, :read_length)
|
||||
params.must_be_integer(:read_length, :length)
|
||||
params.rename_parameter(:pad_left, :pad_front)
|
||||
params.sanitize(:pad_byte) { |byte| sanitized_pad_byte(byte) }
|
||||
end
|
||||
|
||||
#-------------
|
||||
private
|
||||
|
||||
def sanitized_pad_byte(byte)
|
||||
pad_byte = byte.is_a?(Integer) ? byte.chr : byte.to_s
|
||||
if pad_byte.bytesize > 1
|
||||
raise ArgumentError, ":pad_byte must not contain more than 1 byte"
|
||||
end
|
||||
pad_byte
|
||||
end
|
||||
end
|
||||
|
||||
# Warns when reading if :value && no :read_length
|
||||
module WarnNoReadLengthPlugin
|
||||
def read_and_return_value(io)
|
||||
warn "#{debug_name} does not have a :read_length parameter - returning empty string"
|
||||
""
|
||||
end
|
||||
end
|
||||
end
|
||||
96
Library/Homebrew/vendor/bundle/ruby/2.6.0/gems/bindata-2.4.8/lib/bindata/stringz.rb
vendored
Normal file
96
Library/Homebrew/vendor/bundle/ruby/2.6.0/gems/bindata-2.4.8/lib/bindata/stringz.rb
vendored
Normal file
@ -0,0 +1,96 @@
|
||||
require "bindata/base_primitive"
|
||||
|
||||
module BinData
|
||||
# A BinData::Stringz object is a container for a zero ("\0") terminated
|
||||
# string.
|
||||
#
|
||||
# For convenience, the zero terminator is not necessary when setting the
|
||||
# value. Likewise, the returned value will not be zero terminated.
|
||||
#
|
||||
# require 'bindata'
|
||||
#
|
||||
# data = "abcd\x00efgh"
|
||||
#
|
||||
# obj = BinData::Stringz.new
|
||||
# obj.read(data)
|
||||
# obj.snapshot #=> "abcd"
|
||||
# obj.num_bytes #=> 5
|
||||
# obj.to_binary_s #=> "abcd\000"
|
||||
#
|
||||
# == Parameters
|
||||
#
|
||||
# Stringz objects accept all the params that BinData::BasePrimitive
|
||||
# does, as well as the following:
|
||||
#
|
||||
# <tt>:max_length</tt>:: The maximum length of the string including the zero
|
||||
# byte.
|
||||
class Stringz < BinData::BasePrimitive
|
||||
|
||||
optional_parameters :max_length
|
||||
|
||||
def assign(val)
|
||||
super(binary_string(val))
|
||||
end
|
||||
|
||||
def snapshot
|
||||
# override to always remove trailing zero bytes
|
||||
result = super
|
||||
trim_and_zero_terminate(result).chomp("\0")
|
||||
end
|
||||
|
||||
#---------------
|
||||
private
|
||||
|
||||
def value_to_binary_string(val)
|
||||
trim_and_zero_terminate(val)
|
||||
end
|
||||
|
||||
def read_and_return_value(io)
|
||||
max_length = eval_parameter(:max_length)
|
||||
str = ""
|
||||
i = 0
|
||||
ch = nil
|
||||
|
||||
# read until zero byte or we have read in the max number of bytes
|
||||
while ch != "\0" && i != max_length
|
||||
ch = io.readbytes(1)
|
||||
str << ch
|
||||
i += 1
|
||||
end
|
||||
|
||||
trim_and_zero_terminate(str)
|
||||
end
|
||||
|
||||
def sensible_default
|
||||
""
|
||||
end
|
||||
|
||||
def trim_and_zero_terminate(str)
|
||||
result = binary_string(str)
|
||||
truncate_after_first_zero_byte!(result)
|
||||
trim_to!(result, eval_parameter(:max_length))
|
||||
append_zero_byte_if_needed!(result)
|
||||
result
|
||||
end
|
||||
|
||||
def truncate_after_first_zero_byte!(str)
|
||||
str.sub!(/([^\0]*\0).*/, '\1')
|
||||
end
|
||||
|
||||
def trim_to!(str, max_length = nil)
|
||||
if max_length
|
||||
max_length = 1 if max_length < 1
|
||||
str.slice!(max_length..-1)
|
||||
if str.length == max_length && str[-1, 1] != "\0"
|
||||
str[-1, 1] = "\0"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def append_zero_byte_if_needed!(str)
|
||||
if str.length == 0 || str[-1, 1] != "\0"
|
||||
str << "\0"
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
415
Library/Homebrew/vendor/bundle/ruby/2.6.0/gems/bindata-2.4.8/lib/bindata/struct.rb
vendored
Normal file
415
Library/Homebrew/vendor/bundle/ruby/2.6.0/gems/bindata-2.4.8/lib/bindata/struct.rb
vendored
Normal file
@ -0,0 +1,415 @@
|
||||
require 'bindata/base'
|
||||
|
||||
module BinData
|
||||
|
||||
class Base
|
||||
optional_parameter :onlyif, :byte_align # Used by Struct
|
||||
end
|
||||
|
||||
# A Struct is an ordered collection of named data objects.
|
||||
#
|
||||
# require 'bindata'
|
||||
#
|
||||
# class Tuple < BinData::Record
|
||||
# int8 :x
|
||||
# int8 :y
|
||||
# int8 :z
|
||||
# end
|
||||
#
|
||||
# obj = BinData::Struct.new(hide: :a,
|
||||
# fields: [ [:int32le, :a],
|
||||
# [:int16le, :b],
|
||||
# [:tuple, :s] ])
|
||||
# obj.field_names =># [:b, :s]
|
||||
#
|
||||
#
|
||||
# == Parameters
|
||||
#
|
||||
# Parameters may be provided at initialisation to control the behaviour of
|
||||
# an object. These params are:
|
||||
#
|
||||
# <tt>:fields</tt>:: An array specifying the fields for this struct.
|
||||
# Each element of the array is of the form [type, name,
|
||||
# params]. Type is a symbol representing a registered
|
||||
# type. Name is the name of this field. Params is an
|
||||
# optional hash of parameters to pass to this field
|
||||
# when instantiating it. If name is "" or nil, then
|
||||
# that field is anonymous and behaves as a hidden field.
|
||||
# <tt>:hide</tt>:: A list of the names of fields that are to be hidden
|
||||
# from the outside world. Hidden fields don't appear
|
||||
# in #snapshot or #field_names but are still accessible
|
||||
# by name.
|
||||
# <tt>:endian</tt>:: Either :little or :big. This specifies the default
|
||||
# endian of any numerics in this struct, or in any
|
||||
# nested data objects.
|
||||
# <tt>:search_prefix</tt>:: Allows abbreviated type names. If a type is
|
||||
# unrecognised, then each prefix is applied until
|
||||
# a match is found.
|
||||
#
|
||||
# == Field Parameters
|
||||
#
|
||||
# Fields may have have extra parameters as listed below:
|
||||
#
|
||||
# [<tt>:onlyif</tt>] Used to indicate a data object is optional.
|
||||
# if +false+, this object will not be included in any
|
||||
# calls to #read, #write, #num_bytes or #snapshot.
|
||||
# [<tt>:byte_align</tt>] This field's rel_offset must be a multiple of
|
||||
# <tt>:byte_align</tt>.
|
||||
class Struct < BinData::Base
|
||||
arg_processor :struct
|
||||
|
||||
mandatory_parameter :fields
|
||||
optional_parameters :endian, :search_prefix, :hide
|
||||
|
||||
# These reserved words may not be used as field names
|
||||
RESERVED =
|
||||
Hash[*
|
||||
(Hash.instance_methods +
|
||||
%w{alias and begin break case class def defined do else elsif
|
||||
end ensure false for if in module next nil not or redo
|
||||
rescue retry return self super then true undef unless until
|
||||
when while yield} +
|
||||
%w{array element index value} +
|
||||
%w{type initial_length read_until} +
|
||||
%w{fields endian search_prefix hide only_if byte_align} +
|
||||
%w{choices selection copy_on_change} +
|
||||
%w{read_abs_offset struct_params}).collect(&:to_sym).
|
||||
uniq.collect { |key| [key, true] }.flatten
|
||||
]
|
||||
|
||||
def initialize_shared_instance
|
||||
fields = get_parameter(:fields)
|
||||
@field_names = fields.field_names.freeze
|
||||
extend ByteAlignPlugin if fields.any_field_has_parameter?(:byte_align)
|
||||
define_field_accessors
|
||||
super
|
||||
end
|
||||
|
||||
def initialize_instance
|
||||
@field_objs = []
|
||||
end
|
||||
|
||||
def clear #:nodoc:
|
||||
@field_objs.each { |f| f.clear unless f.nil? }
|
||||
end
|
||||
|
||||
def clear? #:nodoc:
|
||||
@field_objs.all? { |f| f.nil? || f.clear? }
|
||||
end
|
||||
|
||||
def assign(val)
|
||||
clear
|
||||
assign_fields(val)
|
||||
end
|
||||
|
||||
def snapshot
|
||||
snapshot = Snapshot.new
|
||||
field_names.each do |name|
|
||||
obj = find_obj_for_name(name)
|
||||
snapshot[name] = obj.snapshot if include_obj?(obj)
|
||||
end
|
||||
snapshot
|
||||
end
|
||||
|
||||
# Returns a list of the names of all fields accessible through this
|
||||
# object. +include_hidden+ specifies whether to include hidden names
|
||||
# in the listing.
|
||||
def field_names(include_hidden = false)
|
||||
if include_hidden
|
||||
@field_names.compact
|
||||
else
|
||||
hidden = get_parameter(:hide) || []
|
||||
@field_names.compact - hidden
|
||||
end
|
||||
end
|
||||
|
||||
def debug_name_of(child) #:nodoc:
|
||||
field_name = @field_names[find_index_of(child)]
|
||||
"#{debug_name}.#{field_name}"
|
||||
end
|
||||
|
||||
def offset_of(child) #:nodoc:
|
||||
instantiate_all_objs
|
||||
sum = sum_num_bytes_below_index(find_index_of(child))
|
||||
child.bit_aligned? ? sum.floor : sum.ceil
|
||||
end
|
||||
|
||||
def do_read(io) #:nodoc:
|
||||
instantiate_all_objs
|
||||
@field_objs.each { |f| f.do_read(io) if include_obj?(f) }
|
||||
end
|
||||
|
||||
def do_write(io) #:nodoc
|
||||
instantiate_all_objs
|
||||
@field_objs.each { |f| f.do_write(io) if include_obj?(f) }
|
||||
end
|
||||
|
||||
def do_num_bytes #:nodoc:
|
||||
instantiate_all_objs
|
||||
sum_num_bytes_for_all_fields
|
||||
end
|
||||
|
||||
def [](key)
|
||||
find_obj_for_name(key)
|
||||
end
|
||||
|
||||
def []=(key, value)
|
||||
obj = find_obj_for_name(key)
|
||||
if obj
|
||||
obj.assign(value)
|
||||
end
|
||||
end
|
||||
|
||||
def key?(key)
|
||||
@field_names.index(base_field_name(key))
|
||||
end
|
||||
|
||||
def each_pair
|
||||
@field_names.compact.each do |name|
|
||||
yield [name, find_obj_for_name(name)]
|
||||
end
|
||||
end
|
||||
|
||||
#---------------
|
||||
private
|
||||
|
||||
def define_field_accessors
|
||||
get_parameter(:fields).each_with_index do |field, i|
|
||||
name = field.name_as_sym
|
||||
define_field_accessors_for(name, i) if name
|
||||
end
|
||||
end
|
||||
|
||||
def define_field_accessors_for(name, index)
|
||||
define_singleton_method(name) do
|
||||
instantiate_obj_at(index) if @field_objs[index].nil?
|
||||
@field_objs[index]
|
||||
end
|
||||
define_singleton_method("#{name}=") do |*vals|
|
||||
instantiate_obj_at(index) if @field_objs[index].nil?
|
||||
@field_objs[index].assign(*vals)
|
||||
end
|
||||
define_singleton_method("#{name}?") do
|
||||
instantiate_obj_at(index) if @field_objs[index].nil?
|
||||
include_obj?(@field_objs[index])
|
||||
end
|
||||
end
|
||||
|
||||
def find_index_of(obj)
|
||||
@field_objs.index { |el| el.equal?(obj) }
|
||||
end
|
||||
|
||||
def find_obj_for_name(name)
|
||||
index = @field_names.index(base_field_name(name))
|
||||
if index
|
||||
instantiate_obj_at(index)
|
||||
@field_objs[index]
|
||||
else
|
||||
nil
|
||||
end
|
||||
end
|
||||
|
||||
def base_field_name(name)
|
||||
name.to_s.sub(/(=|\?)\z/, "").to_sym
|
||||
end
|
||||
|
||||
def instantiate_all_objs
|
||||
@field_names.each_index { |i| instantiate_obj_at(i) }
|
||||
end
|
||||
|
||||
def instantiate_obj_at(index)
|
||||
if @field_objs[index].nil?
|
||||
field = get_parameter(:fields)[index]
|
||||
@field_objs[index] = field.instantiate(nil, self)
|
||||
end
|
||||
end
|
||||
|
||||
def assign_fields(val)
|
||||
src = as_stringified_hash(val)
|
||||
|
||||
@field_names.compact.each do |name|
|
||||
obj = find_obj_for_name(name)
|
||||
if obj && src.key?(name)
|
||||
obj.assign(src[name])
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def as_stringified_hash(val)
|
||||
if BinData::Struct === val
|
||||
val
|
||||
elsif val.nil?
|
||||
{}
|
||||
else
|
||||
hash = Snapshot.new
|
||||
val.each_pair { |k,v| hash[k] = v }
|
||||
hash
|
||||
end
|
||||
end
|
||||
|
||||
def sum_num_bytes_for_all_fields
|
||||
sum_num_bytes_below_index(@field_objs.length)
|
||||
end
|
||||
|
||||
def sum_num_bytes_below_index(index)
|
||||
(0...index).inject(0) do |sum, i|
|
||||
obj = @field_objs[i]
|
||||
if include_obj?(obj)
|
||||
nbytes = obj.do_num_bytes
|
||||
(nbytes.is_a?(Integer) ? sum.ceil : sum) + nbytes
|
||||
else
|
||||
sum
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def include_obj?(obj)
|
||||
!obj.has_parameter?(:onlyif) || obj.eval_parameter(:onlyif)
|
||||
end
|
||||
|
||||
# A hash that can be accessed via attributes.
|
||||
class Snapshot < ::Hash #:nodoc:
|
||||
def []=(key, value)
|
||||
super unless value.nil?
|
||||
end
|
||||
|
||||
def respond_to?(symbol, include_private = false)
|
||||
key?(symbol) || super
|
||||
end
|
||||
|
||||
def method_missing(symbol, *args)
|
||||
key?(symbol) ? self[symbol] : super
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# Align fields to a multiple of :byte_align
|
||||
module ByteAlignPlugin
|
||||
def do_read(io)
|
||||
initial_offset = io.offset
|
||||
instantiate_all_objs
|
||||
@field_objs.each do |f|
|
||||
if include_obj?(f)
|
||||
if align_obj?(f)
|
||||
io.seekbytes(bytes_to_align(f, io.offset - initial_offset))
|
||||
end
|
||||
f.do_read(io)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def do_write(io)
|
||||
initial_offset = io.offset
|
||||
instantiate_all_objs
|
||||
@field_objs.each do |f|
|
||||
if include_obj?(f)
|
||||
if align_obj?(f)
|
||||
io.writebytes("\x00" * bytes_to_align(f, io.offset - initial_offset))
|
||||
end
|
||||
f.do_write(io)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def sum_num_bytes_below_index(index)
|
||||
sum = 0
|
||||
(0...@field_objs.length).each do |i|
|
||||
obj = @field_objs[i]
|
||||
if include_obj?(obj)
|
||||
sum = sum.ceil + bytes_to_align(obj, sum.ceil) if align_obj?(obj)
|
||||
|
||||
break if i >= index
|
||||
|
||||
nbytes = obj.do_num_bytes
|
||||
sum = (nbytes.is_a?(Integer) ? sum.ceil : sum) + nbytes
|
||||
end
|
||||
end
|
||||
|
||||
sum
|
||||
end
|
||||
|
||||
def bytes_to_align(obj, rel_offset)
|
||||
align = obj.eval_parameter(:byte_align)
|
||||
(align - (rel_offset % align)) % align
|
||||
end
|
||||
|
||||
def align_obj?(obj)
|
||||
obj.has_parameter?(:byte_align)
|
||||
end
|
||||
end
|
||||
|
||||
class StructArgProcessor < BaseArgProcessor
|
||||
def sanitize_parameters!(obj_class, params)
|
||||
sanitize_endian(params)
|
||||
sanitize_search_prefix(params)
|
||||
sanitize_fields(obj_class, params)
|
||||
sanitize_hide(params)
|
||||
end
|
||||
|
||||
#-------------
|
||||
private
|
||||
|
||||
def sanitize_endian(params)
|
||||
params.sanitize_endian(:endian)
|
||||
end
|
||||
|
||||
def sanitize_search_prefix(params)
|
||||
params.sanitize(:search_prefix) do |sprefix|
|
||||
search_prefix = []
|
||||
Array(sprefix).each do |prefix|
|
||||
prefix = prefix.to_s.chomp("_")
|
||||
search_prefix << prefix if prefix != ""
|
||||
end
|
||||
|
||||
search_prefix
|
||||
end
|
||||
end
|
||||
|
||||
def sanitize_fields(obj_class, params)
|
||||
params.sanitize_fields(:fields) do |fields, sanitized_fields|
|
||||
fields.each do |ftype, fname, fparams|
|
||||
sanitized_fields.add_field(ftype, fname, fparams)
|
||||
end
|
||||
|
||||
field_names = sanitized_field_names(sanitized_fields)
|
||||
ensure_field_names_are_valid(obj_class, field_names)
|
||||
end
|
||||
end
|
||||
|
||||
def sanitize_hide(params)
|
||||
params.sanitize(:hide) do |hidden|
|
||||
field_names = sanitized_field_names(params[:fields])
|
||||
hfield_names = hidden_field_names(hidden)
|
||||
|
||||
hfield_names & field_names
|
||||
end
|
||||
end
|
||||
|
||||
def sanitized_field_names(sanitized_fields)
|
||||
sanitized_fields.field_names.compact
|
||||
end
|
||||
|
||||
def hidden_field_names(hidden)
|
||||
(hidden || []).collect(&:to_sym)
|
||||
end
|
||||
|
||||
def ensure_field_names_are_valid(obj_class, field_names)
|
||||
reserved_names = BinData::Struct::RESERVED
|
||||
|
||||
field_names.each do |name|
|
||||
if obj_class.method_defined?(name)
|
||||
raise NameError.new("Rename field '#{name}' in #{obj_class}, " \
|
||||
"as it shadows an existing method.", name)
|
||||
end
|
||||
if reserved_names.include?(name)
|
||||
raise NameError.new("Rename field '#{name}' in #{obj_class}, " \
|
||||
"as it is a reserved name.", name)
|
||||
end
|
||||
if field_names.count(name) != 1
|
||||
raise NameError.new("field '#{name}' in #{obj_class}, " \
|
||||
"is defined multiple times.", name)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
95
Library/Homebrew/vendor/bundle/ruby/2.6.0/gems/bindata-2.4.8/lib/bindata/trace.rb
vendored
Normal file
95
Library/Homebrew/vendor/bundle/ruby/2.6.0/gems/bindata-2.4.8/lib/bindata/trace.rb
vendored
Normal file
@ -0,0 +1,95 @@
|
||||
module BinData
|
||||
# reference to the current tracer
|
||||
@tracer ||= nil
|
||||
|
||||
class Tracer #:nodoc:
|
||||
def initialize(io)
|
||||
@trace_io = io
|
||||
end
|
||||
|
||||
def trace(msg)
|
||||
@trace_io.puts(msg)
|
||||
end
|
||||
|
||||
def trace_obj(obj_name, val)
|
||||
if val.length > 30
|
||||
val = val.slice(0..30) + "..."
|
||||
end
|
||||
|
||||
trace "#{obj_name} => #{val}"
|
||||
end
|
||||
end
|
||||
|
||||
# Turn on trace information when reading a BinData object.
|
||||
# If +block+ is given then the tracing only occurs for that block.
|
||||
# This is useful for debugging a BinData declaration.
|
||||
def trace_reading(io = STDERR)
|
||||
@tracer = Tracer.new(io)
|
||||
[BasePrimitive, Choice].each(&:turn_on_tracing)
|
||||
|
||||
if block_given?
|
||||
begin
|
||||
yield
|
||||
ensure
|
||||
[BasePrimitive, Choice].each(&:turn_off_tracing)
|
||||
@tracer = nil
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def trace_message #:nodoc:
|
||||
yield @tracer if @tracer
|
||||
end
|
||||
|
||||
module_function :trace_reading, :trace_message
|
||||
|
||||
class BasePrimitive < BinData::Base
|
||||
class << self
|
||||
def turn_on_tracing
|
||||
alias_method :do_read_without_hook, :do_read
|
||||
alias_method :do_read, :do_read_with_hook
|
||||
end
|
||||
|
||||
def turn_off_tracing
|
||||
alias_method :do_read, :do_read_without_hook
|
||||
end
|
||||
end
|
||||
|
||||
def do_read_with_hook(io)
|
||||
do_read_without_hook(io)
|
||||
trace_value
|
||||
end
|
||||
|
||||
def trace_value
|
||||
BinData.trace_message do |tracer|
|
||||
value_string = _value.inspect
|
||||
tracer.trace_obj(debug_name, value_string)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
class Choice < BinData::Base
|
||||
class << self
|
||||
def turn_on_tracing
|
||||
alias_method :do_read_without_hook, :do_read
|
||||
alias_method :do_read, :do_read_with_hook
|
||||
end
|
||||
|
||||
def turn_off_tracing
|
||||
alias_method :do_read, :do_read_without_hook
|
||||
end
|
||||
end
|
||||
|
||||
def do_read_with_hook(io)
|
||||
trace_selection
|
||||
do_read_without_hook(io)
|
||||
end
|
||||
|
||||
def trace_selection
|
||||
BinData.trace_message do |tracer|
|
||||
selection_string = eval_parameter(:selection).inspect
|
||||
tracer.trace_obj("#{debug_name}-selection-", selection_string)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
62
Library/Homebrew/vendor/bundle/ruby/2.6.0/gems/bindata-2.4.8/lib/bindata/uint8_array.rb
vendored
Normal file
62
Library/Homebrew/vendor/bundle/ruby/2.6.0/gems/bindata-2.4.8/lib/bindata/uint8_array.rb
vendored
Normal file
@ -0,0 +1,62 @@
|
||||
require "bindata/base_primitive"
|
||||
|
||||
module BinData
|
||||
# Uint8Array is a specialised type of array that only contains
|
||||
# bytes (Uint8). It is a faster and more memory efficient version
|
||||
# of `BinData::Array.new(:type => :uint8)`.
|
||||
#
|
||||
# require 'bindata'
|
||||
#
|
||||
# obj = BinData::Uint8Array.new(initial_length: 5)
|
||||
# obj.read("abcdefg") #=> [97, 98, 99, 100, 101]
|
||||
# obj[2] #=> 99
|
||||
# obj.collect { |x| x.chr }.join #=> "abcde"
|
||||
#
|
||||
# == Parameters
|
||||
#
|
||||
# Parameters may be provided at initialisation to control the behaviour of
|
||||
# an object. These params are:
|
||||
#
|
||||
# <tt>:initial_length</tt>:: The initial length of the array.
|
||||
# <tt>:read_until</tt>:: May only have a value of `:eof`. This parameter
|
||||
# instructs the array to read as much data from
|
||||
# the stream as possible.
|
||||
class Uint8Array < BinData::BasePrimitive
|
||||
optional_parameters :initial_length, :read_until
|
||||
mutually_exclusive_parameters :initial_length, :read_until
|
||||
arg_processor :uint8_array
|
||||
|
||||
#---------------
|
||||
private
|
||||
|
||||
def value_to_binary_string(val)
|
||||
val.pack("C*")
|
||||
end
|
||||
|
||||
def read_and_return_value(io)
|
||||
if has_parameter?(:initial_length)
|
||||
data = io.readbytes(eval_parameter(:initial_length))
|
||||
else
|
||||
data = io.read_all_bytes
|
||||
end
|
||||
|
||||
data.unpack("C*")
|
||||
end
|
||||
|
||||
def sensible_default
|
||||
[]
|
||||
end
|
||||
end
|
||||
|
||||
class Uint8ArrayArgProcessor < BaseArgProcessor
|
||||
def sanitize_parameters!(obj_class, params) #:nodoc:
|
||||
# ensure one of :initial_length and :read_until exists
|
||||
unless params.has_at_least_one_of?(:initial_length, :read_until)
|
||||
params[:initial_length] = 0
|
||||
end
|
||||
|
||||
msg = "Parameter :read_until must have a value of :eof"
|
||||
params.sanitize(:read_until) { |val| raise ArgumentError, msg unless val == :eof }
|
||||
end
|
||||
end
|
||||
end
|
||||
3
Library/Homebrew/vendor/bundle/ruby/2.6.0/gems/bindata-2.4.8/lib/bindata/version.rb
vendored
Normal file
3
Library/Homebrew/vendor/bundle/ruby/2.6.0/gems/bindata-2.4.8/lib/bindata/version.rb
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
module BinData
|
||||
VERSION = "2.4.8"
|
||||
end
|
||||
47
Library/Homebrew/vendor/bundle/ruby/2.6.0/gems/bindata-2.4.8/lib/bindata/virtual.rb
vendored
Normal file
47
Library/Homebrew/vendor/bundle/ruby/2.6.0/gems/bindata-2.4.8/lib/bindata/virtual.rb
vendored
Normal file
@ -0,0 +1,47 @@
|
||||
require "bindata/base"
|
||||
|
||||
module BinData
|
||||
# A virtual field is one that is neither read, written nor occupies space in
|
||||
# the data stream. It is used to make assertions or as a convenient label
|
||||
# for determining offsets or storing values.
|
||||
#
|
||||
# require 'bindata'
|
||||
#
|
||||
# class A < BinData::Record
|
||||
# string :a, read_length: 5
|
||||
# string :b, read_length: 5
|
||||
# virtual :c, assert: -> { a == b }
|
||||
# end
|
||||
#
|
||||
# obj = A.read("abcdeabcde")
|
||||
# obj.a #=> "abcde"
|
||||
# obj.c.offset #=> 10
|
||||
#
|
||||
# obj = A.read("abcdeABCDE") #=> BinData::ValidityError: assertion failed for obj.c
|
||||
#
|
||||
# == Parameters
|
||||
#
|
||||
# Parameters may be provided at initialisation to control the behaviour of
|
||||
# an object. These params include those for BinData::Base as well as:
|
||||
#
|
||||
# [<tt>:assert</tt>] Raise an error when reading or assigning if the value
|
||||
# of this evaluated parameter is false.
|
||||
# [<tt>:value</tt>] The virtual object will always have this value.
|
||||
#
|
||||
class Virtual < BinData::BasePrimitive
|
||||
|
||||
def do_read(io)
|
||||
end
|
||||
|
||||
def do_write(io)
|
||||
end
|
||||
|
||||
def do_num_bytes
|
||||
0.0
|
||||
end
|
||||
|
||||
def sensible_default
|
||||
nil
|
||||
end
|
||||
end
|
||||
end
|
||||
36
Library/Homebrew/vendor/bundle/ruby/2.6.0/gems/bindata-2.4.8/lib/bindata/warnings.rb
vendored
Normal file
36
Library/Homebrew/vendor/bundle/ruby/2.6.0/gems/bindata-2.4.8/lib/bindata/warnings.rb
vendored
Normal file
@ -0,0 +1,36 @@
|
||||
module BinData
|
||||
class Base
|
||||
# Don't override initialize. If you are defining a new kind of datatype
|
||||
# (list, array, choice etc) then put your initialization code in
|
||||
# #initialize_instance. BinData objects might be initialized as prototypes
|
||||
# and your initialization code may not be called.
|
||||
#
|
||||
# If you're subclassing BinData::Record, you are definitely doing the wrong
|
||||
# thing. Read the documentation on how to use BinData.
|
||||
# http://github.com/dmendel/bindata/wiki/Records
|
||||
alias_method :initialize_without_warning, :initialize
|
||||
def initialize_with_warning(*args)
|
||||
owner = method(:initialize).owner
|
||||
if owner != BinData::Base
|
||||
msg = "Don't override #initialize on #{owner}."
|
||||
if %w(BinData::Base BinData::BasePrimitive).include? self.class.superclass.name
|
||||
msg += "\nrename #initialize to #initialize_instance."
|
||||
end
|
||||
fail msg
|
||||
end
|
||||
initialize_without_warning(*args)
|
||||
end
|
||||
alias initialize initialize_with_warning
|
||||
|
||||
def initialize_instance(*args)
|
||||
unless args.empty?
|
||||
fail "#{caller[0]} remove the call to super in #initialize_instance"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
class Struct
|
||||
# has_key? is deprecated
|
||||
alias has_key? key?
|
||||
end
|
||||
end
|
||||
11
Library/Homebrew/vendor/bundle/ruby/2.6.0/gems/elftools-1.1.2/lib/elftools.rb
vendored
Normal file
11
Library/Homebrew/vendor/bundle/ruby/2.6.0/gems/elftools-1.1.2/lib/elftools.rb
vendored
Normal file
@ -0,0 +1,11 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
require 'elftools/constants'
|
||||
require 'elftools/elf_file'
|
||||
require 'elftools/version'
|
||||
|
||||
# The ELF parsing tools!
|
||||
# Main entry point is {ELFTools::ELFFile}, see it
|
||||
# for more information.
|
||||
module ELFTools
|
||||
end
|
||||
303
Library/Homebrew/vendor/bundle/ruby/2.6.0/gems/elftools-1.1.2/lib/elftools/constants.rb
vendored
Normal file
303
Library/Homebrew/vendor/bundle/ruby/2.6.0/gems/elftools-1.1.2/lib/elftools/constants.rb
vendored
Normal file
@ -0,0 +1,303 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
module ELFTools
|
||||
# Define constants from elf.h.
|
||||
# Mostly refer from https://github.com/torvalds/linux/blob/master/include/uapi/linux/elf.h
|
||||
# and binutils/elfcpp/elfcpp.h.
|
||||
module Constants
|
||||
# ELF magic header
|
||||
ELFMAG = "\x7FELF"
|
||||
|
||||
# Values of `d_un.d_val' in the DT_FLAGS and DT_FLAGS_1 entry.
|
||||
module DF
|
||||
DF_ORIGIN = 0x00000001 # Object may use DF_ORIGIN
|
||||
DF_SYMBOLIC = 0x00000002 # Symbol resolutions starts here
|
||||
DF_TEXTREL = 0x00000004 # Object contains text relocations
|
||||
DF_BIND_NOW = 0x00000008 # No lazy binding for this object
|
||||
DF_STATIC_TLS = 0x00000010 # Module uses the static TLS model
|
||||
|
||||
DF_1_NOW = 0x00000001 # Set RTLD_NOW for this object.
|
||||
DF_1_GLOBAL = 0x00000002 # Set RTLD_GLOBAL for this object.
|
||||
DF_1_GROUP = 0x00000004 # Set RTLD_GROUP for this object.
|
||||
DF_1_NODELETE = 0x00000008 # Set RTLD_NODELETE for this object.
|
||||
DF_1_LOADFLTR = 0x00000010 # Trigger filtee loading at runtime.
|
||||
DF_1_INITFIRST = 0x00000020 # Set RTLD_INITFIRST for this object
|
||||
DF_1_NOOPEN = 0x00000040 # Set RTLD_NOOPEN for this object.
|
||||
DF_1_ORIGIN = 0x00000080 # $ORIGIN must be handled.
|
||||
DF_1_DIRECT = 0x00000100 # Direct binding enabled.
|
||||
DF_1_TRANS = 0x00000200 # :nodoc:
|
||||
DF_1_INTERPOSE = 0x00000400 # Object is used to interpose.
|
||||
DF_1_NODEFLIB = 0x00000800 # Ignore default lib search path.
|
||||
DF_1_NODUMP = 0x00001000 # Object can't be dldump'ed.
|
||||
DF_1_CONFALT = 0x00002000 # Configuration alternative created.
|
||||
DF_1_ENDFILTEE = 0x00004000 # Filtee terminates filters search.
|
||||
DF_1_DISPRELDNE = 0x00008000 # Disp reloc applied at build time.
|
||||
DF_1_DISPRELPND = 0x00010000 # Disp reloc applied at run-time.
|
||||
DF_1_NODIRECT = 0x00020000 # Object has no-direct binding.
|
||||
DF_1_IGNMULDEF = 0x00040000 # :nodoc:
|
||||
DF_1_NOKSYMS = 0x00080000 # :nodoc:
|
||||
DF_1_NOHDR = 0x00100000 # :nodoc:
|
||||
DF_1_EDITED = 0x00200000 # Object is modified after built.
|
||||
DF_1_NORELOC = 0x00400000 # :nodoc:
|
||||
DF_1_SYMINTPOSE = 0x00800000 # Object has individual interposers.
|
||||
DF_1_GLOBAUDIT = 0x01000000 # Global auditing required.
|
||||
DF_1_SINGLETON = 0x02000000 # Singleton symbols are used.
|
||||
end
|
||||
include DF
|
||||
|
||||
# Dynamic table types, records in +d_tag+.
|
||||
module DT
|
||||
DT_NULL = 0 # marks the end of the _DYNAMIC array
|
||||
DT_NEEDED = 1 # libraries need to be linked by loader
|
||||
DT_PLTRELSZ = 2 # total size of relocation entries
|
||||
DT_PLTGOT = 3 # address of procedure linkage table or global offset table
|
||||
DT_HASH = 4 # address of symbol hash table
|
||||
DT_STRTAB = 5 # address of string table
|
||||
DT_SYMTAB = 6 # address of symbol table
|
||||
DT_RELA = 7 # address of a relocation table
|
||||
DT_RELASZ = 8 # total size of the {DT_RELA} table
|
||||
DT_RELAENT = 9 # size of each entry in the {DT_RELA} table
|
||||
DT_STRSZ = 10 # total size of {DT_STRTAB}
|
||||
DT_SYMENT = 11 # size of each entry in {DT_SYMTAB}
|
||||
DT_INIT = 12 # where the initialization function is
|
||||
DT_FINI = 13 # where the termination function is
|
||||
DT_SONAME = 14 # the shared object name
|
||||
DT_RPATH = 15 # has been superseded by {DT_RUNPATH}
|
||||
DT_SYMBOLIC = 16 # has been superseded by the DF_SYMBOLIC flag
|
||||
DT_REL = 17 # similar to {DT_RELA}
|
||||
DT_RELSZ = 18 # total size of the {DT_REL} table
|
||||
DT_RELENT = 19 # size of each entry in the {DT_REL} table
|
||||
DT_PLTREL = 20 # type of relocation entry, either {DT_REL} or {DT_RELA}
|
||||
DT_DEBUG = 21 # for debugging
|
||||
DT_TEXTREL = 22 # has been superseded by the DF_TEXTREL flag
|
||||
DT_JMPREL = 23 # address of relocation entries that are associated solely with the procedure linkage table
|
||||
DT_BIND_NOW = 24 # if the loader needs to do relocate now, superseded by the DF_BIND_NOW flag
|
||||
DT_INIT_ARRAY = 25 # address init array
|
||||
DT_FINI_ARRAY = 26 # address of fini array
|
||||
DT_INIT_ARRAYSZ = 27 # total size of init array
|
||||
DT_FINI_ARRAYSZ = 28 # total size of fini array
|
||||
DT_RUNPATH = 29 # path of libraries for searching
|
||||
DT_FLAGS = 30 # flags
|
||||
DT_ENCODING = 32 # just a lower bound
|
||||
# Values between {DT_LOOS} and {DT_HIOS} are reserved for operating system-specific semantics.
|
||||
DT_LOOS = 0x6000000d
|
||||
DT_HIOS = 0x6ffff000 # see {DT_LOOS}
|
||||
# Values between {DT_VALRNGLO} and {DT_VALRNGHI} use the +d_un.d_val+ field of the dynamic structure.
|
||||
DT_VALRNGLO = 0x6ffffd00
|
||||
DT_VALRNGHI = 0x6ffffdff # see {DT_VALRNGLO}
|
||||
# Values between {DT_ADDRRNGLO} and {DT_ADDRRNGHI} use the +d_un.d_ptr+ field of the dynamic structure.
|
||||
DT_ADDRRNGLO = 0x6ffffe00
|
||||
DT_GNU_HASH = 0x6ffffef5 # the gnu hash
|
||||
DT_ADDRRNGHI = 0x6ffffeff # see {DT_ADDRRNGLO}
|
||||
DT_RELACOUNT = 0x6ffffff9 # relative relocation count
|
||||
DT_RELCOUNT = 0x6ffffffa # relative relocation count
|
||||
DT_FLAGS_1 = 0x6ffffffb # flags
|
||||
DT_VERDEF = 0x6ffffffc # address of version definition table
|
||||
DT_VERDEFNUM = 0x6ffffffd # number of entries in {DT_VERDEF}
|
||||
DT_VERNEED = 0x6ffffffe # address of version dependency table
|
||||
DT_VERNEEDNUM = 0x6fffffff # number of entries in {DT_VERNEED}
|
||||
# Values between {DT_LOPROC} and {DT_HIPROC} are reserved for processor-specific semantics.
|
||||
DT_LOPROC = 0x70000000
|
||||
DT_HIPROC = 0x7fffffff # see {DT_LOPROC}
|
||||
end
|
||||
include DT
|
||||
|
||||
# These constants define the various ELF target machines.
|
||||
module EM
|
||||
EM_NONE = 0 # none
|
||||
EM_M32 = 1 # AT&T WE 32100
|
||||
EM_SPARC = 2 # SPARC
|
||||
EM_386 = 3 # Intel 80386
|
||||
EM_68K = 4 # Motorola 68000
|
||||
EM_88K = 5 # Motorola 88000
|
||||
EM_486 = 6 # Intel 80486
|
||||
EM_860 = 7 # Intel 80860
|
||||
EM_MIPS = 8 # MIPS R3000 (officially, big-endian only)
|
||||
|
||||
# Next two are historical and binaries and
|
||||
# modules of these types will be rejected by Linux.
|
||||
EM_MIPS_RS3_LE = 10 # MIPS R3000 little-endian
|
||||
EM_MIPS_RS4_BE = 10 # MIPS R4000 big-endian
|
||||
|
||||
EM_PARISC = 15 # HPPA
|
||||
EM_SPARC32PLUS = 18 # Sun's "v8plus"
|
||||
EM_PPC = 20 # PowerPC
|
||||
EM_PPC64 = 21 # PowerPC64
|
||||
EM_SPU = 23 # Cell BE SPU
|
||||
EM_ARM = 40 # ARM 32 bit
|
||||
EM_SH = 42 # SuperH
|
||||
EM_SPARCV9 = 43 # SPARC v9 64-bit
|
||||
EM_H8_300 = 46 # Renesas H8/300
|
||||
EM_IA_64 = 50 # HP/Intel IA-64
|
||||
EM_X86_64 = 62 # AMD x86-64
|
||||
EM_S390 = 22 # IBM S/390
|
||||
EM_CRIS = 76 # Axis Communications 32-bit embedded processor
|
||||
EM_M32R = 88 # Renesas M32R
|
||||
EM_MN10300 = 89 # Panasonic/MEI MN10300, AM33
|
||||
EM_OPENRISC = 92 # OpenRISC 32-bit embedded processor
|
||||
EM_BLACKFIN = 106 # ADI Blackfin Processor
|
||||
EM_ALTERA_NIOS2 = 113 # Altera Nios II soft-core processor
|
||||
EM_TI_C6000 = 140 # TI C6X DSPs
|
||||
EM_AARCH64 = 183 # ARM 64 bit
|
||||
EM_TILEPRO = 188 # Tilera TILEPro
|
||||
EM_MICROBLAZE = 189 # Xilinx MicroBlaze
|
||||
EM_TILEGX = 191 # Tilera TILE-Gx
|
||||
EM_BPF = 247 # Linux BPF - in-kernel virtual machine
|
||||
EM_FRV = 0x5441 # Fujitsu FR-V
|
||||
EM_AVR32 = 0x18ad # Atmel AVR32
|
||||
|
||||
# This is an interim value that we will use until the committee comes up with a final number.
|
||||
EM_ALPHA = 0x9026
|
||||
|
||||
# Bogus old m32r magic number, used by old tools.
|
||||
EM_CYGNUS_M32R = 0x9041
|
||||
# This is the old interim value for S/390 architecture
|
||||
EM_S390_OLD = 0xA390
|
||||
# Also Panasonic/MEI MN10300, AM33
|
||||
EM_CYGNUS_MN10300 = 0xbeef
|
||||
|
||||
# Return the architecture name according to +val+.
|
||||
# Used by {ELFTools::ELFFile#machine}.
|
||||
#
|
||||
# Only supports famous archs.
|
||||
# @param [Integer] val Value of +e_machine+.
|
||||
# @return [String]
|
||||
# Name of architecture.
|
||||
# @example
|
||||
# mapping(3)
|
||||
# #=> 'Intel 80386'
|
||||
# mapping(6)
|
||||
# #=> 'Intel 80386'
|
||||
# mapping(62)
|
||||
# #=> 'Advanced Micro Devices X86-64'
|
||||
# mapping(1337)
|
||||
# #=> '<unknown>: 0x539'
|
||||
def self.mapping(val)
|
||||
case val
|
||||
when EM_NONE then 'None'
|
||||
when EM_386, EM_486 then 'Intel 80386'
|
||||
when EM_860 then 'Intel 80860'
|
||||
when EM_MIPS then 'MIPS R3000'
|
||||
when EM_PPC then 'PowerPC'
|
||||
when EM_PPC64 then 'PowerPC64'
|
||||
when EM_ARM then 'ARM'
|
||||
when EM_IA_64 then 'Intel IA-64'
|
||||
when EM_AARCH64 then 'AArch64'
|
||||
when EM_X86_64 then 'Advanced Micro Devices X86-64'
|
||||
else format('<unknown>: 0x%x', val)
|
||||
end
|
||||
end
|
||||
end
|
||||
include EM
|
||||
|
||||
# This module defines elf file types.
|
||||
module ET
|
||||
ET_NONE = 0 # no file type
|
||||
ET_REL = 1 # relocatable file
|
||||
ET_EXEC = 2 # executable file
|
||||
ET_DYN = 3 # shared object
|
||||
ET_CORE = 4 # core file
|
||||
# Return the type name according to +e_type+ in ELF file header.
|
||||
# @return [String] Type in string format.
|
||||
def self.mapping(type)
|
||||
case type
|
||||
when Constants::ET_NONE then 'NONE'
|
||||
when Constants::ET_REL then 'REL'
|
||||
when Constants::ET_EXEC then 'EXEC'
|
||||
when Constants::ET_DYN then 'DYN'
|
||||
when Constants::ET_CORE then 'CORE'
|
||||
else '<unknown>'
|
||||
end
|
||||
end
|
||||
end
|
||||
include ET
|
||||
|
||||
# Program header types, records in +p_type+.
|
||||
module PT
|
||||
PT_NULL = 0 # null segment
|
||||
PT_LOAD = 1 # segment to be load
|
||||
PT_DYNAMIC = 2 # dynamic tags
|
||||
PT_INTERP = 3 # interpreter, same as .interp section
|
||||
PT_NOTE = 4 # same as .note* section
|
||||
PT_SHLIB = 5 # reserved
|
||||
PT_PHDR = 6 # where program header starts
|
||||
PT_TLS = 7 # thread local storage segment
|
||||
PT_LOOS = 0x60000000 # OS-specific
|
||||
PT_HIOS = 0x6fffffff # OS-specific
|
||||
# Values between {PT_LOPROC} and {PT_HIPROC} are reserved for processor-specific semantics.
|
||||
PT_LOPROC = 0x70000000
|
||||
PT_HIPROC = 0x7fffffff # see {PT_LOPROC}
|
||||
PT_GNU_EH_FRAME = 0x6474e550 # for exception handler
|
||||
PT_GNU_STACK = 0x6474e551 # permission of stack
|
||||
PT_GNU_RELRO = 0x6474e552 # read only after relocation
|
||||
end
|
||||
include PT
|
||||
|
||||
# Section header types, records in +sh_type+.
|
||||
module SHT
|
||||
SHT_NULL = 0 # null section
|
||||
SHT_PROGBITS = 1 # information defined by program itself
|
||||
SHT_SYMTAB = 2 # symbol table section
|
||||
SHT_STRTAB = 3 # string table section
|
||||
SHT_RELA = 4 # relocation with addends
|
||||
SHT_HASH = 5 # symbol hash table
|
||||
SHT_DYNAMIC = 6 # information of dynamic linking
|
||||
SHT_NOTE = 7 # note section
|
||||
SHT_NOBITS = 8 # section occupies no space
|
||||
SHT_REL = 9 # relocation
|
||||
SHT_SHLIB = 10 # reserved
|
||||
SHT_DYNSYM = 11 # symbols for dynamic
|
||||
# Values between {SHT_LOPROC} and {SHT_HIPROC} are reserved for processor-specific semantics.
|
||||
SHT_LOPROC = 0x70000000
|
||||
SHT_HIPROC = 0x7fffffff # see {SHT_LOPROC}
|
||||
# Values between {SHT_LOUSER} and {SHT_HIUSER} are reserved for application programs.
|
||||
SHT_LOUSER = 0x80000000
|
||||
SHT_HIUSER = 0xffffffff # see {SHT_LOUSER}
|
||||
end
|
||||
include SHT
|
||||
|
||||
# Symbol binding from Sym st_info field.
|
||||
module STB
|
||||
STB_LOCAL = 0 # Local symbol
|
||||
STB_GLOBAL = 1 # Global symbol
|
||||
STB_WEAK = 2 # Weak symbol
|
||||
STB_NUM = 3 # Number of defined types.
|
||||
STB_LOOS = 10 # Start of OS-specific
|
||||
STB_GNU_UNIQUE = 10 # Unique symbol.
|
||||
STB_HIOS = 12 # End of OS-specific
|
||||
STB_LOPROC = 13 # Start of processor-specific
|
||||
STB_HIPROC = 15 # End of processor-specific
|
||||
end
|
||||
include STB
|
||||
|
||||
# Symbol types from Sym st_info field.
|
||||
module STT
|
||||
STT_NOTYPE = 0 # Symbol type is unspecified
|
||||
STT_OBJECT = 1 # Symbol is a data object
|
||||
STT_FUNC = 2 # Symbol is a code object
|
||||
STT_SECTION = 3 # Symbol associated with a section
|
||||
STT_FILE = 4 # Symbol's name is file name
|
||||
STT_COMMON = 5 # Symbol is a common data object
|
||||
STT_TLS = 6 # Symbol is thread-local data object
|
||||
STT_NUM = 7 # Number of defined types.
|
||||
|
||||
# GNU extension: symbol value points to a function which is called
|
||||
# at runtime to determine the final value of the symbol.
|
||||
STT_GNU_IFUNC = 10
|
||||
|
||||
STT_LOOS = 10 # Start of OS-specific
|
||||
STT_HIOS = 12 # End of OS-specific
|
||||
STT_LOPROC = 13 # Start of processor-specific
|
||||
STT_HIPROC = 15 # End of processor-specific
|
||||
|
||||
# The section type that must be used for register symbols on
|
||||
# Sparc. These symbols initialize a global register.
|
||||
STT_SPARC_REGISTER = 13
|
||||
|
||||
# ARM: a THUMB function. This is not defined in ARM ELF Specification but
|
||||
# used by the GNU tool-chain.
|
||||
STT_ARM_TFUNC = 13
|
||||
end
|
||||
include STT
|
||||
end
|
||||
end
|
||||
178
Library/Homebrew/vendor/bundle/ruby/2.6.0/gems/elftools-1.1.2/lib/elftools/dynamic.rb
vendored
Normal file
178
Library/Homebrew/vendor/bundle/ruby/2.6.0/gems/elftools-1.1.2/lib/elftools/dynamic.rb
vendored
Normal file
@ -0,0 +1,178 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
module ELFTools
|
||||
# Define common methods for dynamic sections and dynamic segments.
|
||||
#
|
||||
# @note
|
||||
# This module can only be included by {ELFTools::Sections::DynamicSection}
|
||||
# and {ELFTools::Segments::DynamicSegment} because methods here assume some
|
||||
# attributes exist.
|
||||
module Dynamic
|
||||
# Iterate all tags.
|
||||
#
|
||||
# @note
|
||||
# This method assume the following methods already exist:
|
||||
# header
|
||||
# tag_start
|
||||
# @yieldparam [ELFTools::Dynamic::Tag] tag
|
||||
# @return [Enumerator<ELFTools::Dynamic::Tag>, Array<ELFTools::Dynamic::Tag>]
|
||||
# If block is not given, an enumerator will be returned.
|
||||
# Otherwise, return array of tags.
|
||||
def each_tags(&block)
|
||||
return enum_for(:each_tags) unless block_given?
|
||||
|
||||
arr = []
|
||||
0.step do |i|
|
||||
tag = tag_at(i).tap(&block)
|
||||
arr << tag
|
||||
break if tag.header.d_tag == ELFTools::Constants::DT_NULL
|
||||
end
|
||||
arr
|
||||
end
|
||||
|
||||
# Use {#tags} to get all tags.
|
||||
# @return [Array<ELFTools::Dynamic::Tag>]
|
||||
# Array of tags.
|
||||
def tags
|
||||
@tags ||= each_tags.to_a
|
||||
end
|
||||
|
||||
# Get a tag of specific type.
|
||||
# @param [Integer, Symbol, String] type
|
||||
# Constant value, symbol, or string of type
|
||||
# is acceptable. See examples for more information.
|
||||
# @return [ELFTools::Dynamic::Tag] The desired tag.
|
||||
# @example
|
||||
# dynamic = elf.segment_by_type(:dynamic)
|
||||
# # type as integer
|
||||
# dynamic.tag_by_type(0) # the null tag
|
||||
# #=> #<ELFTools::Dynamic::Tag:0x0055b5a5ecad28 @header={:d_tag=>0, :d_val=>0}>
|
||||
# dynamic.tag_by_type(ELFTools::Constants::DT_NULL)
|
||||
# #=> #<ELFTools::Dynamic::Tag:0x0055b5a5ecad28 @header={:d_tag=>0, :d_val=>0}>
|
||||
#
|
||||
# # symbol
|
||||
# dynamic.tag_by_type(:null)
|
||||
# #=> #<ELFTools::Dynamic::Tag:0x0055b5a5ecad28 @header={:d_tag=>0, :d_val=>0}>
|
||||
# dynamic.tag_by_type(:pltgot)
|
||||
# #=> #<ELFTools::Dynamic::Tag:0x0055d3d2d91b28 @header={:d_tag=>3, :d_val=>6295552}>
|
||||
#
|
||||
# # string
|
||||
# dynamic.tag_by_type('null')
|
||||
# #=> #<ELFTools::Dynamic::Tag:0x0055b5a5ecad28 @header={:d_tag=>0, :d_val=>0}>
|
||||
# dynamic.tag_by_type('DT_PLTGOT')
|
||||
# #=> #<ELFTools::Dynamic::Tag:0x0055d3d2d91b28 @header={:d_tag=>3, :d_val=>6295552}>
|
||||
def tag_by_type(type)
|
||||
type = Util.to_constant(Constants::DT, type)
|
||||
each_tags.find { |tag| tag.header.d_tag == type }
|
||||
end
|
||||
|
||||
# Get tags of specific type.
|
||||
# @param [Integer, Symbol, String] type
|
||||
# Constant value, symbol, or string of type
|
||||
# is acceptable. See examples for more information.
|
||||
# @return [Array<ELFTools::Dynamic::Tag>] The desired tags.
|
||||
#
|
||||
# @see #tag_by_type
|
||||
def tags_by_type(type)
|
||||
type = Util.to_constant(Constants::DT, type)
|
||||
each_tags.select { |tag| tag.header.d_tag == type }
|
||||
end
|
||||
|
||||
# Get the +n+-th tag.
|
||||
#
|
||||
# Tags are lazy loaded.
|
||||
# @note
|
||||
# This method assume the following methods already exist:
|
||||
# header
|
||||
# tag_start
|
||||
# @note
|
||||
# We cannot do bound checking of +n+ here since the only way to get size
|
||||
# of tags is calling +tags.size+.
|
||||
# @param [Integer] n The index.
|
||||
# @return [ELFTools::Dynamic::Tag] The desired tag.
|
||||
def tag_at(n)
|
||||
return if n.negative?
|
||||
|
||||
@tag_at_map ||= {}
|
||||
return @tag_at_map[n] if @tag_at_map[n]
|
||||
|
||||
dyn = Structs::ELF_Dyn.new(endian: endian)
|
||||
dyn.elf_class = header.elf_class
|
||||
stream.pos = tag_start + n * dyn.num_bytes
|
||||
dyn.offset = stream.pos
|
||||
@tag_at_map[n] = Tag.new(dyn.read(stream), stream, method(:str_offset))
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def endian
|
||||
header.class.self_endian
|
||||
end
|
||||
|
||||
# Get the DT_STRTAB's +d_val+ offset related to file.
|
||||
def str_offset
|
||||
# TODO: handle DT_STRTAB not exitsts.
|
||||
@str_offset ||= @offset_from_vma.call(tag_by_type(:strtab).header.d_val.to_i)
|
||||
end
|
||||
|
||||
# A tag class.
|
||||
class Tag
|
||||
attr_reader :header # @return [ELFTools::Structs::ELF_Dyn] The dynamic tag header.
|
||||
attr_reader :stream # @return [#pos=, #read] Streaming object.
|
||||
|
||||
# Instantiate a {ELFTools::Dynamic::Tag} object.
|
||||
# @param [ELF_Dyn] header The dynamic tag header.
|
||||
# @param [#pos=, #read] stream Streaming object.
|
||||
# @param [Method] str_offset
|
||||
# Call this method to get the string offset related
|
||||
# to file.
|
||||
def initialize(header, stream, str_offset)
|
||||
@header = header
|
||||
@stream = stream
|
||||
@str_offset = str_offset
|
||||
end
|
||||
|
||||
# Some dynamic have name.
|
||||
TYPE_WITH_NAME = [Constants::DT_NEEDED,
|
||||
Constants::DT_SONAME,
|
||||
Constants::DT_RPATH,
|
||||
Constants::DT_RUNPATH].freeze
|
||||
# Return the content of this tag records.
|
||||
#
|
||||
# For normal tags, this method just return
|
||||
# +header.d_val+. For tags with +header.d_val+
|
||||
# in meaning of string offset (e.g. DT_NEEDED), this method would
|
||||
# return the string it specified.
|
||||
# Tags with type in {TYPE_WITH_NAME} are those tags with name.
|
||||
# @return [Integer, String] The content this tag records.
|
||||
# @example
|
||||
# dynamic = elf.segment_by_type(:dynamic)
|
||||
# dynamic.tag_by_type(:init).value
|
||||
# #=> 4195600 # 0x400510
|
||||
# dynamic.tag_by_type(:needed).value
|
||||
# #=> 'libc.so.6'
|
||||
def value
|
||||
name || header.d_val.to_i
|
||||
end
|
||||
|
||||
# Is this tag has a name?
|
||||
#
|
||||
# The criteria here is if this tag's type is in {TYPE_WITH_NAME}.
|
||||
# @return [Boolean] Is this tag has a name.
|
||||
def name?
|
||||
TYPE_WITH_NAME.include?(header.d_tag)
|
||||
end
|
||||
|
||||
# Return the name of this tag.
|
||||
#
|
||||
# Only tags with name would return a name.
|
||||
# Others would return +nil+.
|
||||
# @return [String, nil] The name.
|
||||
def name
|
||||
return nil unless name?
|
||||
|
||||
Util.cstring(stream, @str_offset.call + header.d_val.to_i)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
377
Library/Homebrew/vendor/bundle/ruby/2.6.0/gems/elftools-1.1.2/lib/elftools/elf_file.rb
vendored
Normal file
377
Library/Homebrew/vendor/bundle/ruby/2.6.0/gems/elftools-1.1.2/lib/elftools/elf_file.rb
vendored
Normal file
@ -0,0 +1,377 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
require 'elftools/constants'
|
||||
require 'elftools/exceptions'
|
||||
require 'elftools/lazy_array'
|
||||
require 'elftools/sections/sections'
|
||||
require 'elftools/segments/segments'
|
||||
require 'elftools/structs'
|
||||
|
||||
module ELFTools
|
||||
# The main class for using elftools.
|
||||
class ELFFile
|
||||
attr_reader :stream # @return [#pos=, #read] The +File+ object.
|
||||
attr_reader :elf_class # @return [Integer] 32 or 64.
|
||||
attr_reader :endian # @return [Symbol] +:little+ or +:big+.
|
||||
|
||||
# Instantiate an {ELFFile} object.
|
||||
#
|
||||
# @param [#pos=, #read] stream
|
||||
# The +File+ object to be fetch information from.
|
||||
# @example
|
||||
# ELFFile.new(File.open('/bin/cat'))
|
||||
# #=> #<ELFTools::ELFFile:0x00564b106c32a0 @elf_class=64, @endian=:little, @stream=#<File:/bin/cat>>
|
||||
def initialize(stream)
|
||||
@stream = stream
|
||||
# always set binmode if stream is an IO object.
|
||||
@stream.binmode if @stream.respond_to?(:binmode)
|
||||
identify # fetch the most basic information
|
||||
end
|
||||
|
||||
# Return the file header.
|
||||
#
|
||||
# Lazy loading.
|
||||
# @return [ELFTools::Structs::ELF_Ehdr] The header.
|
||||
def header
|
||||
return @header if defined?(@header)
|
||||
|
||||
stream.pos = 0
|
||||
@header = Structs::ELF_Ehdr.new(endian: endian, offset: stream.pos)
|
||||
@header.elf_class = elf_class
|
||||
@header.read(stream)
|
||||
end
|
||||
|
||||
# Return the BuildID of ELF.
|
||||
# @return [String, nil]
|
||||
# BuildID in hex form will be returned.
|
||||
# +nil+ is returned if the .note.gnu.build-id section
|
||||
# is not found.
|
||||
# @example
|
||||
# elf.build_id
|
||||
# #=> '73ab62cb7bc9959ce053c2b711322158708cdc07'
|
||||
def build_id
|
||||
section = section_by_name('.note.gnu.build-id')
|
||||
return nil if section.nil?
|
||||
|
||||
note = section.notes.first
|
||||
return nil if note.nil?
|
||||
|
||||
note.desc.unpack('H*').first
|
||||
end
|
||||
|
||||
# Get machine architecture.
|
||||
#
|
||||
# Mappings of architecture can be found
|
||||
# in {ELFTools::Constants::EM.mapping}.
|
||||
# @return [String]
|
||||
# Name of architecture.
|
||||
# @example
|
||||
# elf.machine
|
||||
# #=> 'Advanced Micro Devices X86-64'
|
||||
def machine
|
||||
ELFTools::Constants::EM.mapping(header.e_machine)
|
||||
end
|
||||
|
||||
# Return the ELF type according to +e_type+.
|
||||
# @return [String] Type in string format.
|
||||
# @example
|
||||
# ELFFile.new(File.open('spec/files/libc.so.6')).elf_type
|
||||
# #=> 'DYN'
|
||||
# ELFFile.new(File.open('spec/files/amd64.elf')).elf_type
|
||||
# #=> 'EXEC'
|
||||
def elf_type
|
||||
ELFTools::Constants::ET.mapping(header.e_type)
|
||||
end
|
||||
|
||||
#========= method about sections
|
||||
|
||||
# Number of sections in this file.
|
||||
# @return [Integer] The desired number.
|
||||
# @example
|
||||
# elf.num_sections
|
||||
# #=> 29
|
||||
def num_sections
|
||||
header.e_shnum
|
||||
end
|
||||
|
||||
# Acquire the section named as +name+.
|
||||
# @param [String] name The desired section name.
|
||||
# @return [ELFTools::Sections::Section, nil] The target section.
|
||||
# @example
|
||||
# elf.section_by_name('.note.gnu.build-id')
|
||||
# #=> #<ELFTools::Sections::Section:0x005647b1282428>
|
||||
# elf.section_by_name('')
|
||||
# #=> #<ELFTools::Sections::NullSection:0x005647b11da110>
|
||||
# elf.section_by_name('no such section')
|
||||
# #=> nil
|
||||
def section_by_name(name)
|
||||
each_sections.find { |sec| sec.name == name }
|
||||
end
|
||||
|
||||
# Iterate all sections.
|
||||
#
|
||||
# All sections are lazy loading, the section
|
||||
# only be created whenever accessing it.
|
||||
# This method is useful for {#section_by_name}
|
||||
# since not all sections need to be created.
|
||||
# @yieldparam [ELFTools::Sections::Section] section A section.
|
||||
# @yieldreturn [void]
|
||||
# @return [Enumerator<ELFTools::Sections::Section>, Array<ELFTools::Sections::Section>]
|
||||
# As +Array#each+, if block is not given, a enumerator will be returned,
|
||||
# otherwise, the whole sections will be returned.
|
||||
def each_sections(&block)
|
||||
return enum_for(:each_sections) unless block_given?
|
||||
|
||||
Array.new(num_sections) do |i|
|
||||
section_at(i).tap(&block)
|
||||
end
|
||||
end
|
||||
|
||||
# Simply use {#sections} to get all sections.
|
||||
# @return [Array<ELFTools::Sections::Section>]
|
||||
# Whole sections.
|
||||
def sections
|
||||
each_sections.to_a
|
||||
end
|
||||
|
||||
# Acquire the +n+-th section, 0-based.
|
||||
#
|
||||
# Sections are lazy loaded.
|
||||
# @param [Integer] n The index.
|
||||
# @return [ELFTools::Sections::Section, nil]
|
||||
# The target section.
|
||||
# If +n+ is out of bound, +nil+ is returned.
|
||||
def section_at(n)
|
||||
@sections ||= LazyArray.new(num_sections, &method(:create_section))
|
||||
@sections[n]
|
||||
end
|
||||
|
||||
# Fetch all sections with specific type.
|
||||
#
|
||||
# The available types are listed in {ELFTools::Constants::PT}.
|
||||
# This method accept giving block.
|
||||
# @param [Integer, Symbol, String] type
|
||||
# The type needed, similar format as {#segment_by_type}.
|
||||
# @yieldparam [ELFTools::Sections::Section] section A section in specific type.
|
||||
# @yieldreturn [void]
|
||||
# @return [Array<ELFTools::Sections::section>] The target sections.
|
||||
# @example
|
||||
# elf = ELFTools::ELFFile.new(File.open('spec/files/amd64.elf'))
|
||||
# elf.sections_by_type(:rela)
|
||||
# #=> [#<ELFTools::Sections::RelocationSection:0x00563cd3219970>,
|
||||
# # #<ELFTools::Sections::RelocationSection:0x00563cd3b89d70>]
|
||||
def sections_by_type(type, &block)
|
||||
type = Util.to_constant(Constants::SHT, type)
|
||||
Util.select_by_type(each_sections, type, &block)
|
||||
end
|
||||
|
||||
# Get the string table section.
|
||||
#
|
||||
# This section is acquired by using the +e_shstrndx+
|
||||
# in ELF header.
|
||||
# @return [ELFTools::Sections::StrTabSection] The desired section.
|
||||
def strtab_section
|
||||
section_at(header.e_shstrndx)
|
||||
end
|
||||
|
||||
#========= method about segments
|
||||
|
||||
# Number of segments in this file.
|
||||
# @return [Integer] The desited number.
|
||||
def num_segments
|
||||
header.e_phnum
|
||||
end
|
||||
|
||||
# Iterate all segments.
|
||||
#
|
||||
# All segments are lazy loading, the segment
|
||||
# only be created whenever accessing it.
|
||||
# This method is useful for {#segment_by_type}
|
||||
# since not all segments need to be created.
|
||||
# @yieldparam [ELFTools::Segments::Segment] segment A segment.
|
||||
# @yieldreturn [void]
|
||||
# @return [Array<ELFTools::Segments::Segment>]
|
||||
# Whole segments will be returned.
|
||||
def each_segments(&block)
|
||||
return enum_for(:each_segments) unless block_given?
|
||||
|
||||
Array.new(num_segments) do |i|
|
||||
segment_at(i).tap(&block)
|
||||
end
|
||||
end
|
||||
|
||||
# Simply use {#segments} to get all segments.
|
||||
# @return [Array<ELFTools::Segments::Segment>]
|
||||
# Whole segments.
|
||||
def segments
|
||||
each_segments.to_a
|
||||
end
|
||||
|
||||
# Get the first segment with +p_type=type+.
|
||||
# The available types are listed in {ELFTools::Constants::PT}.
|
||||
#
|
||||
# @note
|
||||
# This method will return the first segment found,
|
||||
# to found all segments with specific type you can use {#segments_by_type}.
|
||||
# @param [Integer, Symbol, String] type
|
||||
# See examples for clear usage.
|
||||
# @return [ELFTools::Segments::Segment] The target segment.
|
||||
# @example
|
||||
# # type as an integer
|
||||
# elf.segment_by_type(ELFTools::Constants::PT_NOTE)
|
||||
# #=> #<ELFTools::Segments::NoteSegment:0x005629dda1e4f8>
|
||||
#
|
||||
# elf.segment_by_type(4) # PT_NOTE
|
||||
# #=> #<ELFTools::Segments::NoteSegment:0x005629dda1e4f8>
|
||||
#
|
||||
# # type as a symbol
|
||||
# elf.segment_by_type(:PT_NOTE)
|
||||
# #=> #<ELFTools::Segments::NoteSegment:0x005629dda1e4f8>
|
||||
#
|
||||
# # you can do this
|
||||
# elf.segment_by_type(:note) # will be transformed into `PT_NOTE`
|
||||
# #=> #<ELFTools::Segments::NoteSegment:0x005629dda1e4f8>
|
||||
#
|
||||
# # type as a string
|
||||
# elf.segment_by_type('PT_NOTE')
|
||||
# #=> #<ELFTools::Segments::NoteSegment:0x005629dda1e4f8>
|
||||
#
|
||||
# # this is ok
|
||||
# elf.segment_by_type('note') # will be tranformed into `PT_NOTE`
|
||||
# #=> #<ELFTools::Segments::NoteSegment:0x005629dda1e4f8>
|
||||
# @example
|
||||
# elf.segment_by_type(1337)
|
||||
# # ArgumentError: No constants in Constants::PT is 1337
|
||||
#
|
||||
# elf.segment_by_type('oao')
|
||||
# # ArgumentError: No constants in Constants::PT named "PT_OAO"
|
||||
# @example
|
||||
# elf.segment_by_type(0)
|
||||
# #=> nil # no such segment exists
|
||||
def segment_by_type(type)
|
||||
type = Util.to_constant(Constants::PT, type)
|
||||
each_segments.find { |seg| seg.header.p_type == type }
|
||||
end
|
||||
|
||||
# Fetch all segments with specific type.
|
||||
#
|
||||
# If you want to find only one segment,
|
||||
# use {#segment_by_type} instead.
|
||||
# This method accept giving block.
|
||||
# @param [Integer, Symbol, String] type
|
||||
# The type needed, same format as {#segment_by_type}.
|
||||
# @yieldparam [ELFTools::Segments::Segment] segment A segment in specific type.
|
||||
# @yieldreturn [void]
|
||||
# @return [Array<ELFTools::Segments::Segment>] The target segments.
|
||||
def segments_by_type(type, &block)
|
||||
type = Util.to_constant(Constants::PT, type)
|
||||
Util.select_by_type(each_segments, type, &block)
|
||||
end
|
||||
|
||||
# Acquire the +n+-th segment, 0-based.
|
||||
#
|
||||
# Segments are lazy loaded.
|
||||
# @param [Integer] n The index.
|
||||
# @return [ELFTools::Segments::Segment, nil]
|
||||
# The target segment.
|
||||
# If +n+ is out of bound, +nil+ is returned.
|
||||
def segment_at(n)
|
||||
@segments ||= LazyArray.new(num_segments, &method(:create_segment))
|
||||
@segments[n]
|
||||
end
|
||||
|
||||
# Get the offset related to file, given virtual memory address.
|
||||
#
|
||||
# This method should work no matter ELF is a PIE or not.
|
||||
# This method refers from (actually equals to) binutils/readelf.c#offset_from_vma.
|
||||
# @param [Integer] vma The virtual address to be queried.
|
||||
# @return [Integer] Related file offset.
|
||||
# @example
|
||||
# elf = ELFTools::ELFFile.new(File.open('/bin/cat'))
|
||||
# elf.offset_from_vma(0x401337)
|
||||
# #=> 4919 # 0x1337
|
||||
def offset_from_vma(vma, size = 0)
|
||||
segments_by_type(:load) do |seg|
|
||||
return seg.vma_to_offset(vma) if seg.vma_in?(vma, size)
|
||||
end
|
||||
end
|
||||
|
||||
# The patch status.
|
||||
# @return [Hash{Integer => String}]
|
||||
def patches
|
||||
patch = {}
|
||||
loaded_headers.each do |header|
|
||||
header.patches.each do |key, val|
|
||||
patch[key + header.offset] = val
|
||||
end
|
||||
end
|
||||
patch
|
||||
end
|
||||
|
||||
# Apply patches and save as +filename+.
|
||||
#
|
||||
# @param [String] filename
|
||||
# @return [void]
|
||||
def save(filename)
|
||||
stream.pos = 0
|
||||
all = stream.read.force_encoding('ascii-8bit')
|
||||
patches.each do |pos, val|
|
||||
all[pos, val.size] = val
|
||||
end
|
||||
IO.binwrite(filename, all)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
# bad idea..
|
||||
def loaded_headers
|
||||
explore = lambda do |obj|
|
||||
return obj if obj.is_a?(::ELFTools::Structs::ELFStruct)
|
||||
return obj.map(&explore) if obj.is_a?(Array)
|
||||
|
||||
obj.instance_variables.map do |s|
|
||||
explore.call(obj.instance_variable_get(s))
|
||||
end
|
||||
end
|
||||
explore.call(self).flatten
|
||||
end
|
||||
|
||||
def identify
|
||||
stream.pos = 0
|
||||
magic = stream.read(4)
|
||||
raise ELFMagicError, "Invalid magic number #{magic.inspect}" unless magic == Constants::ELFMAG
|
||||
|
||||
ei_class = stream.read(1).ord
|
||||
@elf_class = {
|
||||
1 => 32,
|
||||
2 => 64
|
||||
}[ei_class]
|
||||
raise ELFClassError, format('Invalid EI_CLASS "\x%02x"', ei_class) if elf_class.nil?
|
||||
|
||||
ei_data = stream.read(1).ord
|
||||
@endian = {
|
||||
1 => :little,
|
||||
2 => :big
|
||||
}[ei_data]
|
||||
raise ELFDataError, format('Invalid EI_DATA "\x%02x"', ei_data) if endian.nil?
|
||||
end
|
||||
|
||||
def create_section(n)
|
||||
stream.pos = header.e_shoff + n * header.e_shentsize
|
||||
shdr = Structs::ELF_Shdr.new(endian: endian, offset: stream.pos)
|
||||
shdr.elf_class = elf_class
|
||||
shdr.read(stream)
|
||||
Sections::Section.create(shdr, stream,
|
||||
offset_from_vma: method(:offset_from_vma),
|
||||
strtab: method(:strtab_section),
|
||||
section_at: method(:section_at))
|
||||
end
|
||||
|
||||
def create_segment(n)
|
||||
stream.pos = header.e_phoff + n * header.e_phentsize
|
||||
phdr = Structs::ELF_Phdr[elf_class].new(endian: endian, offset: stream.pos)
|
||||
phdr.elf_class = elf_class
|
||||
Segments::Segment.create(phdr.read(stream), stream, offset_from_vma: method(:offset_from_vma))
|
||||
end
|
||||
end
|
||||
end
|
||||
15
Library/Homebrew/vendor/bundle/ruby/2.6.0/gems/elftools-1.1.2/lib/elftools/exceptions.rb
vendored
Normal file
15
Library/Homebrew/vendor/bundle/ruby/2.6.0/gems/elftools-1.1.2/lib/elftools/exceptions.rb
vendored
Normal file
@ -0,0 +1,15 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
module ELFTools
|
||||
# Being raised when parsing error.
|
||||
class ELFError < StandardError; end
|
||||
|
||||
# Raised on invalid ELF magic.
|
||||
class ELFMagicError < ELFError; end
|
||||
|
||||
# Raised on invalid ELF class (EI_CLASS).
|
||||
class ELFClassError < ELFError; end
|
||||
|
||||
# Raised on invalid ELF data encoding (EI_DATA).
|
||||
class ELFDataError < ELFError; end
|
||||
end
|
||||
47
Library/Homebrew/vendor/bundle/ruby/2.6.0/gems/elftools-1.1.2/lib/elftools/lazy_array.rb
vendored
Normal file
47
Library/Homebrew/vendor/bundle/ruby/2.6.0/gems/elftools-1.1.2/lib/elftools/lazy_array.rb
vendored
Normal file
@ -0,0 +1,47 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
module ELFTools
|
||||
# A helper class for {ELFTools} easy to implement
|
||||
# 'lazy loading' objects.
|
||||
# Mainly used when loading sections, segments, and
|
||||
# symbols.
|
||||
class LazyArray
|
||||
# Instantiate a {LazyArray} object.
|
||||
# @param [Integer] size
|
||||
# The size of array.
|
||||
# @yieldparam [Integer] i
|
||||
# Needs the +i+-th element.
|
||||
# @yieldreturn [Object]
|
||||
# Value of the +i+-th element.
|
||||
# @example
|
||||
# arr = LazyArray.new(10) { |i| p "calc #{i}"; i * i }
|
||||
# p arr[2]
|
||||
# # "calc 2"
|
||||
# # 4
|
||||
#
|
||||
# p arr[3]
|
||||
# # "calc 3"
|
||||
# # 9
|
||||
#
|
||||
# p arr[3]
|
||||
# # 9
|
||||
def initialize(size, &block)
|
||||
@internal = Array.new(size)
|
||||
@block = block
|
||||
end
|
||||
|
||||
# To access elements like a normal array.
|
||||
#
|
||||
# Elements are lazy loaded at the first time
|
||||
# access it.
|
||||
# @return [Object]
|
||||
# The element, returned type is the
|
||||
# return type of block given in {#initialize}.
|
||||
def [](i)
|
||||
# XXX: support negative index?
|
||||
return nil unless i.between?(0, @internal.size - 1)
|
||||
|
||||
@internal[i] ||= @block.call(i)
|
||||
end
|
||||
end
|
||||
end
|
||||
125
Library/Homebrew/vendor/bundle/ruby/2.6.0/gems/elftools-1.1.2/lib/elftools/note.rb
vendored
Normal file
125
Library/Homebrew/vendor/bundle/ruby/2.6.0/gems/elftools-1.1.2/lib/elftools/note.rb
vendored
Normal file
@ -0,0 +1,125 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
require 'elftools/structs'
|
||||
require 'elftools/util'
|
||||
|
||||
module ELFTools
|
||||
# Since both note sections and note segments refer to notes, this module
|
||||
# defines common methods for {ELFTools::Sections::NoteSection} and
|
||||
# {ELFTools::Segments::NoteSegment}.
|
||||
#
|
||||
# @note
|
||||
# This module can only be included in {ELFTools::Sections::NoteSection} and
|
||||
# {ELFTools::Segments::NoteSegment} since some methods here assume some
|
||||
# attributes already exist.
|
||||
module Note
|
||||
# Since size of {ELFTools::Structs::ELF_Nhdr} will not change no matter in
|
||||
# what endian and what arch, we can do this here. This value should equal
|
||||
# to 12.
|
||||
SIZE_OF_NHDR = Structs::ELF_Nhdr.new(endian: :little).num_bytes
|
||||
|
||||
# Iterate all notes in a note section or segment.
|
||||
#
|
||||
# Structure of notes are:
|
||||
# +---------------+
|
||||
# | Note 1 header |
|
||||
# +---------------+
|
||||
# | Note 1 name |
|
||||
# +---------------+
|
||||
# | Note 1 desc |
|
||||
# +---------------+
|
||||
# | Note 2 header |
|
||||
# +---------------+
|
||||
# | ... |
|
||||
# +---------------+
|
||||
#
|
||||
# @note
|
||||
# This method assume following methods exist:
|
||||
# stream
|
||||
# note_start
|
||||
# note_total_size
|
||||
# @return [Enumerator<ELFTools::Note::Note>, Array<ELFTools::Note::Note>]
|
||||
# If block is not given, an enumerator will be returned.
|
||||
# Otherwise, return the array of notes.
|
||||
def each_notes
|
||||
return enum_for(:each_notes) unless block_given?
|
||||
|
||||
@notes_offset_map ||= {}
|
||||
cur = note_start
|
||||
notes = []
|
||||
while cur < note_start + note_total_size
|
||||
stream.pos = cur
|
||||
@notes_offset_map[cur] ||= create_note(cur)
|
||||
note = @notes_offset_map[cur]
|
||||
# name and desc size needs to be 4-bytes align
|
||||
name_size = Util.align(note.header.n_namesz, 2)
|
||||
desc_size = Util.align(note.header.n_descsz, 2)
|
||||
cur += SIZE_OF_NHDR + name_size + desc_size
|
||||
notes << note
|
||||
yield note
|
||||
end
|
||||
notes
|
||||
end
|
||||
|
||||
# Simply +#notes+ to get all notes.
|
||||
# @return [Array<ELFTools::Note::Note>]
|
||||
# Whole notes.
|
||||
def notes
|
||||
each_notes.to_a
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
# Get the endian.
|
||||
#
|
||||
# @note This method assume method +header+ exists.
|
||||
# @return [Symbol] +:little+ or +:big+.
|
||||
def endian
|
||||
header.class.self_endian
|
||||
end
|
||||
|
||||
def create_note(cur)
|
||||
nhdr = Structs::ELF_Nhdr.new(endian: endian, offset: stream.pos).read(stream)
|
||||
ELFTools::Note::Note.new(nhdr, stream, cur)
|
||||
end
|
||||
|
||||
# Class of a note.
|
||||
class Note
|
||||
attr_reader :header # @return [ELFTools::Structs::ELF_Nhdr] Note header.
|
||||
attr_reader :stream # @return [#pos=, #read] Streaming object.
|
||||
attr_reader :offset # @return [Integer] Address of this note start, includes note header.
|
||||
|
||||
# Instantiate a {ELFTools::Note::Note} object.
|
||||
# @param [ELF_Nhdr] header The note header.
|
||||
# @param [#pos=, #read] stream Streaming object.
|
||||
# @param [Integer] offset
|
||||
# Start address of this note, includes the header.
|
||||
def initialize(header, stream, offset)
|
||||
@header = header
|
||||
@stream = stream
|
||||
@offset = offset
|
||||
end
|
||||
|
||||
# Name of this note.
|
||||
# @return [String] The name.
|
||||
def name
|
||||
return @name if defined?(@name)
|
||||
|
||||
stream.pos = @offset + SIZE_OF_NHDR
|
||||
@name = stream.read(header.n_namesz)[0..-2]
|
||||
end
|
||||
|
||||
# Description of this note.
|
||||
# @return [String] The description.
|
||||
def desc
|
||||
return @desc if instance_variable_defined?(:@desc)
|
||||
|
||||
stream.pos = @offset + SIZE_OF_NHDR + Util.align(header.n_namesz, 2)
|
||||
@desc = stream.read(header.n_descsz)
|
||||
end
|
||||
|
||||
# If someone likes to use full name.
|
||||
alias description desc
|
||||
end
|
||||
end
|
||||
end
|
||||
@ -0,0 +1,22 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
require 'elftools/dynamic'
|
||||
require 'elftools/sections/section'
|
||||
|
||||
module ELFTools
|
||||
module Sections
|
||||
# Class for dynamic table section.
|
||||
#
|
||||
# This section should always be named .dynamic.
|
||||
# This class knows how to get the list of dynamic tags.
|
||||
class DynamicSection < Section
|
||||
include ELFTools::Dynamic
|
||||
|
||||
# Get the start address of tags.
|
||||
# @return [Integer] Start address of tags.
|
||||
def tag_start
|
||||
header.sh_offset
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
@ -0,0 +1,27 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
require 'elftools/note'
|
||||
require 'elftools/sections/section'
|
||||
|
||||
module ELFTools
|
||||
module Sections
|
||||
# Class of note section.
|
||||
# Note section records notes
|
||||
class NoteSection < Section
|
||||
# Load note related methods.
|
||||
include ELFTools::Note
|
||||
|
||||
# Address offset of notes start.
|
||||
# @return [Integer] The offset.
|
||||
def note_start
|
||||
header.sh_offset
|
||||
end
|
||||
|
||||
# The total size of notes in this section.
|
||||
# @return [Integer] The size.
|
||||
def note_total_size
|
||||
header.sh_size
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
@ -0,0 +1,18 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
require 'elftools/sections/section'
|
||||
|
||||
module ELFTools
|
||||
module Sections
|
||||
# Class of null section.
|
||||
# Null section is for specific the end
|
||||
# of linked list (+sh_link+) between sections.
|
||||
class NullSection < Section
|
||||
# Is this a null section?
|
||||
# @return [Boolean] Yes it is.
|
||||
def null?
|
||||
true
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
@ -0,0 +1,109 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
require 'elftools/constants'
|
||||
require 'elftools/sections/section'
|
||||
require 'elftools/structs'
|
||||
|
||||
module ELFTools
|
||||
module Sections
|
||||
# Class of note section.
|
||||
# Note section records notes
|
||||
class RelocationSection < Section
|
||||
# Is this relocation a RELA or REL type.
|
||||
# @return [Boolean] If is RELA.
|
||||
def rela?
|
||||
header.sh_type == Constants::SHT_RELA
|
||||
end
|
||||
|
||||
# Number of relocations in this section.
|
||||
# @return [Integer] The number.
|
||||
def num_relocations
|
||||
header.sh_size / header.sh_entsize
|
||||
end
|
||||
|
||||
# Acquire the +n+-th relocation, 0-based.
|
||||
#
|
||||
# relocations are lazy loaded.
|
||||
# @param [Integer] n The index.
|
||||
# @return [ELFTools::Relocation, nil]
|
||||
# The target relocation.
|
||||
# If +n+ is out of bound, +nil+ is returned.
|
||||
def relocation_at(n)
|
||||
@relocations ||= LazyArray.new(num_relocations, &method(:create_relocation))
|
||||
@relocations[n]
|
||||
end
|
||||
|
||||
# Iterate all relocations.
|
||||
#
|
||||
# All relocations are lazy loading, the relocation
|
||||
# only be created whenever accessing it.
|
||||
# @yieldparam [ELFTools::Relocation] rel A relocation object.
|
||||
# @yieldreturn [void]
|
||||
# @return [Enumerator<ELFTools::Relocation>, Array<ELFTools::Relocation>]
|
||||
# If block is not given, an enumerator will be returned.
|
||||
# Otherwise, the whole relocations will be returned.
|
||||
def each_relocations(&block)
|
||||
return enum_for(:each_relocations) unless block_given?
|
||||
|
||||
Array.new(num_relocations) do |i|
|
||||
relocation_at(i).tap(&block)
|
||||
end
|
||||
end
|
||||
|
||||
# Simply use {#relocations} to get all relocations.
|
||||
# @return [Array<ELFTools::Relocation>]
|
||||
# Whole relocations.
|
||||
def relocations
|
||||
each_relocations.to_a
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def create_relocation(n)
|
||||
stream.pos = header.sh_offset + n * header.sh_entsize
|
||||
klass = rela? ? Structs::ELF_Rela : Structs::ELF_Rel
|
||||
rel = klass.new(endian: header.class.self_endian, offset: stream.pos)
|
||||
rel.elf_class = header.elf_class
|
||||
rel.read(stream)
|
||||
Relocation.new(rel, stream)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# A relocation entry.
|
||||
#
|
||||
# Can be either a REL or RELA relocation.
|
||||
# XXX: move this to an independent file?
|
||||
class Relocation
|
||||
attr_reader :header # @return [ELFTools::Structs::ELF_Rel, ELFTools::Structs::ELF_Rela] Rel(a) header.
|
||||
attr_reader :stream # @return [#pos=, #read] Streaming object.
|
||||
|
||||
# Instantiate a {Relocation} object.
|
||||
def initialize(header, stream)
|
||||
@header = header
|
||||
@stream = stream
|
||||
end
|
||||
|
||||
# +r_info+ contains sym and type, use two methods
|
||||
# to access them easier.
|
||||
# @return [Integer] sym infor.
|
||||
def r_info_sym
|
||||
header.r_info >> mask_bit
|
||||
end
|
||||
alias symbol_index r_info_sym
|
||||
|
||||
# +r_info+ contains sym and type, use two methods
|
||||
# to access them easier.
|
||||
# @return [Integer] type infor.
|
||||
def r_info_type
|
||||
header.r_info & ((1 << mask_bit) - 1)
|
||||
end
|
||||
alias type r_info_type
|
||||
|
||||
private
|
||||
|
||||
def mask_bit
|
||||
header.elf_class == 32 ? 8 : 32
|
||||
end
|
||||
end
|
||||
end
|
||||
56
Library/Homebrew/vendor/bundle/ruby/2.6.0/gems/elftools-1.1.2/lib/elftools/sections/section.rb
vendored
Normal file
56
Library/Homebrew/vendor/bundle/ruby/2.6.0/gems/elftools-1.1.2/lib/elftools/sections/section.rb
vendored
Normal file
@ -0,0 +1,56 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
require 'elftools/constants'
|
||||
module ELFTools
|
||||
module Sections
|
||||
# Base class of sections.
|
||||
class Section
|
||||
attr_reader :header # @return [ELFTools::Structs::ELF_Shdr] Section header.
|
||||
attr_reader :stream # @return [#pos=, #read] Streaming object.
|
||||
|
||||
# Instantiate a {Section} object.
|
||||
# @param [ELFTools::Structs::ELF_Shdr] header
|
||||
# The section header object.
|
||||
# @param [#pos=, #read] stream
|
||||
# The streaming object for further dump.
|
||||
# @param [ELFTools::Sections::StrTabSection, Proc] strtab
|
||||
# The string table object. For fetching section names.
|
||||
# If +Proc+ if given, it will call at the first
|
||||
# time access +#name+.
|
||||
# @param [Method] offset_from_vma
|
||||
# The method to get offset of file, given virtual memory address.
|
||||
def initialize(header, stream, offset_from_vma: nil, strtab: nil, **_kwargs)
|
||||
@header = header
|
||||
@stream = stream
|
||||
@strtab = strtab
|
||||
@offset_from_vma = offset_from_vma
|
||||
end
|
||||
|
||||
# Return +header.sh_type+ in a simplier way.
|
||||
# @return [Integer]
|
||||
# The type, meaning of types are defined in {Constants::SHT}.
|
||||
def type
|
||||
header.sh_type.to_i
|
||||
end
|
||||
|
||||
# Get name of this section.
|
||||
# @return [String] The name.
|
||||
def name
|
||||
@name ||= @strtab.call.name_at(header.sh_name)
|
||||
end
|
||||
|
||||
# Fetch data of this section.
|
||||
# @return [String] Data.
|
||||
def data
|
||||
stream.pos = header.sh_offset
|
||||
stream.read(header.sh_size)
|
||||
end
|
||||
|
||||
# Is this a null section?
|
||||
# @return [Boolean] No it's not.
|
||||
def null?
|
||||
false
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
38
Library/Homebrew/vendor/bundle/ruby/2.6.0/gems/elftools-1.1.2/lib/elftools/sections/sections.rb
vendored
Normal file
38
Library/Homebrew/vendor/bundle/ruby/2.6.0/gems/elftools-1.1.2/lib/elftools/sections/sections.rb
vendored
Normal file
@ -0,0 +1,38 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
# Require this file to load all sections classes.
|
||||
|
||||
require 'elftools/sections/section'
|
||||
|
||||
require 'elftools/sections/dynamic_section'
|
||||
require 'elftools/sections/note_section'
|
||||
require 'elftools/sections/null_section'
|
||||
require 'elftools/sections/relocation_section'
|
||||
require 'elftools/sections/str_tab_section'
|
||||
require 'elftools/sections/sym_tab_section'
|
||||
|
||||
module ELFTools
|
||||
# Defines different types of sections in this module.
|
||||
module Sections
|
||||
# Class methods of {Sections::Section}.
|
||||
class << Section
|
||||
# Use different class according to +header.sh_type+.
|
||||
# @param [ELFTools::Structs::ELF_Shdr] header Section header.
|
||||
# @param [#pos=, #read] stream Streaming object.
|
||||
# @return [ELFTools::Sections::Section]
|
||||
# Return object dependes on +header.sh_type+.
|
||||
def create(header, stream, *args, **kwargs)
|
||||
klass = case header.sh_type
|
||||
when Constants::SHT_DYNAMIC then DynamicSection
|
||||
when Constants::SHT_NULL then NullSection
|
||||
when Constants::SHT_NOTE then NoteSection
|
||||
when Constants::SHT_RELA, Constants::SHT_REL then RelocationSection
|
||||
when Constants::SHT_STRTAB then StrTabSection
|
||||
when Constants::SHT_SYMTAB, Constants::SHT_DYNSYM then SymTabSection
|
||||
else Section
|
||||
end
|
||||
klass.new(header, stream, *args, **kwargs)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
@ -0,0 +1,21 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
require 'elftools/sections/section'
|
||||
require 'elftools/util'
|
||||
|
||||
module ELFTools
|
||||
module Sections
|
||||
# Class of string table section.
|
||||
# Usually for section .strtab and .dynstr,
|
||||
# which record names.
|
||||
class StrTabSection < Section
|
||||
# Return the section or symbol name.
|
||||
# @param [Integer] offset
|
||||
# Usually from +shdr.sh_name+ or +sym.st_name+.
|
||||
# @return [String] The name without null bytes.
|
||||
def name_at(offset)
|
||||
Util.cstring(stream, header.sh_offset + offset)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
@ -0,0 +1,127 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
require 'elftools/sections/section'
|
||||
|
||||
module ELFTools
|
||||
module Sections
|
||||
# Class of symbol table section.
|
||||
# Usually for section .symtab and .dynsym,
|
||||
# which will refer to symbols in ELF file.
|
||||
class SymTabSection < Section
|
||||
# Instantiate a {SymTabSection} object.
|
||||
# There's a +section_at+ lambda for {SymTabSection}
|
||||
# to easily fetch other sections.
|
||||
# @param [ELFTools::Structs::ELF_Shdr] header
|
||||
# See {Section#initialize} for more information.
|
||||
# @param [#pos=, #read] stream
|
||||
# See {Section#initialize} for more information.
|
||||
# @param [Proc] section_at
|
||||
# The method for fetching other sections by index.
|
||||
# This lambda should be {ELFTools::ELFFile#section_at}.
|
||||
def initialize(header, stream, section_at: nil, **_kwargs)
|
||||
@section_at = section_at
|
||||
# For faster #symbol_by_name
|
||||
super
|
||||
end
|
||||
|
||||
# Number of symbols.
|
||||
# @return [Integer] The number.
|
||||
# @example
|
||||
# symtab.num_symbols
|
||||
# #=> 75
|
||||
def num_symbols
|
||||
header.sh_size / header.sh_entsize
|
||||
end
|
||||
|
||||
# Acquire the +n+-th symbol, 0-based.
|
||||
#
|
||||
# Symbols are lazy loaded.
|
||||
# @param [Integer] n The index.
|
||||
# @return [ELFTools::Sections::Symbol, nil]
|
||||
# The target symbol.
|
||||
# If +n+ is out of bound, +nil+ is returned.
|
||||
def symbol_at(n)
|
||||
@symbols ||= LazyArray.new(num_symbols, &method(:create_symbol))
|
||||
@symbols[n]
|
||||
end
|
||||
|
||||
# Iterate all symbols.
|
||||
#
|
||||
# All symbols are lazy loading, the symbol
|
||||
# only be created whenever accessing it.
|
||||
# This method is useful for {#symbol_by_name}
|
||||
# since not all symbols need to be created.
|
||||
# @yieldparam [ELFTools::Sections::Symbol] sym A symbol object.
|
||||
# @yieldreturn [void]
|
||||
# @return [Enumerator<ELFTools::Sections::Symbol>, Array<ELFTools::Sections::Symbol>]
|
||||
# If block is not given, an enumerator will be returned.
|
||||
# Otherwise return array of symbols.
|
||||
def each_symbols(&block)
|
||||
return enum_for(:each_symbols) unless block_given?
|
||||
|
||||
Array.new(num_symbols) do |i|
|
||||
symbol_at(i).tap(&block)
|
||||
end
|
||||
end
|
||||
|
||||
# Simply use {#symbols} to get all symbols.
|
||||
# @return [Array<ELFTools::Sections::Symbol>]
|
||||
# The whole symbols.
|
||||
def symbols
|
||||
each_symbols.to_a
|
||||
end
|
||||
|
||||
# Get symbol by its name.
|
||||
# @param [String] name
|
||||
# The name of symbol.
|
||||
# @return [ELFTools::Sections::Symbol] Desired symbol.
|
||||
def symbol_by_name(name)
|
||||
each_symbols.find { |symbol| symbol.name == name }
|
||||
end
|
||||
|
||||
# Return the symbol string section.
|
||||
# Lazy loaded.
|
||||
# @return [ELFTools::Sections::StrTabSection] The string table section.
|
||||
def symstr
|
||||
@symstr ||= @section_at.call(header.sh_link)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def create_symbol(n)
|
||||
stream.pos = header.sh_offset + n * header.sh_entsize
|
||||
sym = Structs::ELF_sym[header.elf_class].new(endian: header.class.self_endian, offset: stream.pos)
|
||||
sym.read(stream)
|
||||
Symbol.new(sym, stream, symstr: method(:symstr))
|
||||
end
|
||||
end
|
||||
|
||||
# Class of symbol.
|
||||
#
|
||||
# XXX: Should this class be defined in an independent file?
|
||||
class Symbol
|
||||
attr_reader :header # @return [ELFTools::Structs::ELF32_sym, ELFTools::Structs::ELF64_sym] Section header.
|
||||
attr_reader :stream # @return [#pos=, #read] Streaming object.
|
||||
|
||||
# Instantiate a {ELFTools::Sections::Symbol} object.
|
||||
# @param [ELFTools::Structs::ELF32_sym, ELFTools::Structs::ELF64_sym] header
|
||||
# The symbol header.
|
||||
# @param [#pos=, #read] stream The streaming object.
|
||||
# @param [ELFTools::Sections::StrTabSection, Proc] symstr
|
||||
# The symbol string section.
|
||||
# If +Proc+ is given, it will be called at the first time
|
||||
# access {Symbol#name}.
|
||||
def initialize(header, stream, symstr: nil)
|
||||
@header = header
|
||||
@stream = stream
|
||||
@symstr = symstr
|
||||
end
|
||||
|
||||
# Return the symbol name.
|
||||
# @return [String] The name.
|
||||
def name
|
||||
@name ||= @symstr.call.name_at(header.st_name)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
@ -0,0 +1,20 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
require 'elftools/segments/segment'
|
||||
require 'elftools/dynamic'
|
||||
|
||||
module ELFTools
|
||||
module Segments
|
||||
# Class for dynamic table segment.
|
||||
#
|
||||
# This class knows how to get the list of dynamic tags.
|
||||
class DynamicSegment < Segment
|
||||
include Dynamic # rock!
|
||||
# Get the start address of tags.
|
||||
# @return [Integer] Start address of tags.
|
||||
def tag_start
|
||||
header.p_offset
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
@ -0,0 +1,20 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
require 'elftools/segments/segment'
|
||||
|
||||
module ELFTools
|
||||
module Segments
|
||||
# For DT_INTERP segment, knows how to get path of
|
||||
# ELF interpreter.
|
||||
class InterpSegment < Segment
|
||||
# Get the path of interpreter.
|
||||
# @return [String] Path to the interpreter.
|
||||
# @example
|
||||
# interp_segment.interp_name
|
||||
# #=> '/lib64/ld-linux-x86-64.so.2'
|
||||
def interp_name
|
||||
data[0..-2] # remove last null byte
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
@ -0,0 +1,91 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
require 'elftools/segments/segment'
|
||||
|
||||
module ELFTools
|
||||
module Segments
|
||||
# For DT_LOAD segment.
|
||||
# Able to query between file offset and virtual memory address.
|
||||
class LoadSegment < Segment
|
||||
# Returns the start of this segment.
|
||||
# @return [Integer]
|
||||
# The file offset.
|
||||
def file_head
|
||||
header.p_offset.to_i
|
||||
end
|
||||
|
||||
# Returns size in file.
|
||||
# @return [Integer]
|
||||
# The size.
|
||||
def size
|
||||
header.p_filesz.to_i
|
||||
end
|
||||
|
||||
# Returns the end of this segment.
|
||||
# @return [Integer]
|
||||
# The file offset.
|
||||
def file_tail
|
||||
file_head + size
|
||||
end
|
||||
|
||||
# Returns the start virtual address of this segment.
|
||||
# @return [Integer]
|
||||
# The vma.
|
||||
def mem_head
|
||||
header.p_vaddr.to_i
|
||||
end
|
||||
|
||||
# Returns size in memory.
|
||||
# @return [Integer]
|
||||
# The size.
|
||||
def mem_size
|
||||
header.p_memsz.to_i
|
||||
end
|
||||
|
||||
# Returns the end virtual address of this segment.
|
||||
# @return [Integer]
|
||||
# The vma.
|
||||
def mem_tail
|
||||
mem_head + mem_size
|
||||
end
|
||||
|
||||
# Query if the given file offset located in this segment.
|
||||
# @param [Integer] offset
|
||||
# File offset.
|
||||
# @param [Integer] size
|
||||
# Size.
|
||||
# @return [Boolean]
|
||||
def offset_in?(offset, size = 0)
|
||||
file_head <= offset && offset + size < file_tail
|
||||
end
|
||||
|
||||
# Convert file offset into virtual memory address.
|
||||
# @param [Integer] offset
|
||||
# File offset.
|
||||
# @return [Integer]
|
||||
def offset_to_vma(offset)
|
||||
# XXX: What if file_head is not aligned with p_vaddr (which is invalid according to ELF spec)?
|
||||
offset - file_head + header.p_vaddr
|
||||
end
|
||||
|
||||
# Query if the given virtual memory address located in this segment.
|
||||
# @param [Integer] vma
|
||||
# Virtual memory address.
|
||||
# @param [Integer] size
|
||||
# Size.
|
||||
# @return [Boolean]
|
||||
def vma_in?(vma, size = 0)
|
||||
vma >= (header.p_vaddr & -header.p_align) &&
|
||||
vma + size <= mem_tail
|
||||
end
|
||||
|
||||
# Convert virtual memory address into file offset.
|
||||
# @param [Integer] vma
|
||||
# Virtual memory address.
|
||||
# @return [Integer]
|
||||
def vma_to_offset(vma)
|
||||
vma - header.p_vaddr + header.p_offset
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
@ -0,0 +1,26 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
require 'elftools/note'
|
||||
require 'elftools/segments/segment'
|
||||
|
||||
module ELFTools
|
||||
module Segments
|
||||
# Class of note segment.
|
||||
class NoteSegment < Segment
|
||||
# Load note related methods.
|
||||
include ELFTools::Note
|
||||
|
||||
# Address offset of notes start.
|
||||
# @return [Integer] The offset.
|
||||
def note_start
|
||||
header.p_offset
|
||||
end
|
||||
|
||||
# The total size of notes in this segment.
|
||||
# @return [Integer] The size.
|
||||
def note_total_size
|
||||
header.p_filesz
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
56
Library/Homebrew/vendor/bundle/ruby/2.6.0/gems/elftools-1.1.2/lib/elftools/segments/segment.rb
vendored
Normal file
56
Library/Homebrew/vendor/bundle/ruby/2.6.0/gems/elftools-1.1.2/lib/elftools/segments/segment.rb
vendored
Normal file
@ -0,0 +1,56 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
module ELFTools
|
||||
module Segments
|
||||
# Base class of segments.
|
||||
class Segment
|
||||
attr_reader :header # @return [ELFTools::Structs::ELF32_Phdr, ELFTools::Structs::ELF64_Phdr] Program header.
|
||||
attr_reader :stream # @return [#pos=, #read] Streaming object.
|
||||
|
||||
# Instantiate a {Segment} object.
|
||||
# @param [ELFTools::Structs::ELF32_Phdr, ELFTools::Structs::ELF64_Phdr] header
|
||||
# Program header.
|
||||
# @param [#pos=, #read] stream
|
||||
# Streaming object.
|
||||
# @param [Method] offset_from_vma
|
||||
# The method to get offset of file, given virtual memory address.
|
||||
def initialize(header, stream, offset_from_vma: nil)
|
||||
@header = header
|
||||
@stream = stream
|
||||
@offset_from_vma = offset_from_vma
|
||||
end
|
||||
|
||||
# Return +header.p_type+ in a simplier way.
|
||||
# @return [Integer]
|
||||
# The type, meaning of types are defined in {Constants::PT}.
|
||||
def type
|
||||
header.p_type
|
||||
end
|
||||
|
||||
# The content in this segment.
|
||||
# @return [String] The content.
|
||||
def data
|
||||
stream.pos = header.p_offset
|
||||
stream.read(header.p_filesz)
|
||||
end
|
||||
|
||||
# Is this segment readable?
|
||||
# @return [Boolean] Ture or false.
|
||||
def readable?
|
||||
(header.p_flags & 4) == 4
|
||||
end
|
||||
|
||||
# Is this segment writable?
|
||||
# @return [Boolean] Ture or false.
|
||||
def writable?
|
||||
(header.p_flags & 2) == 2
|
||||
end
|
||||
|
||||
# Is this segment executable?
|
||||
# @return [Boolean] Ture or false.
|
||||
def executable?
|
||||
(header.p_flags & 1) == 1
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
34
Library/Homebrew/vendor/bundle/ruby/2.6.0/gems/elftools-1.1.2/lib/elftools/segments/segments.rb
vendored
Normal file
34
Library/Homebrew/vendor/bundle/ruby/2.6.0/gems/elftools-1.1.2/lib/elftools/segments/segments.rb
vendored
Normal file
@ -0,0 +1,34 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
# Require this file to load all segment classes.
|
||||
|
||||
require 'elftools/segments/segment'
|
||||
|
||||
require 'elftools/segments/dynamic_segment'
|
||||
require 'elftools/segments/interp_segment'
|
||||
require 'elftools/segments/load_segment'
|
||||
require 'elftools/segments/note_segment'
|
||||
|
||||
module ELFTools
|
||||
# Module for defining different types of segments.
|
||||
module Segments
|
||||
# Class methods of {Segments::Segment}.
|
||||
class << Segment
|
||||
# Use different class according to +header.p_type+.
|
||||
# @param [ELFTools::Structs::ELF32_Phdr, ELFTools::Structs::ELF64_Phdr] header Program header of a segment.
|
||||
# @param [#pos=, #read] stream Streaming object.
|
||||
# @return [ELFTools::Segments::Segment]
|
||||
# Return object dependes on +header.p_type+.
|
||||
def create(header, stream, *args, **kwargs)
|
||||
klass = case header.p_type
|
||||
when Constants::PT_DYNAMIC then DynamicSegment
|
||||
when Constants::PT_INTERP then InterpSegment
|
||||
when Constants::PT_LOAD then LoadSegment
|
||||
when Constants::PT_NOTE then NoteSegment
|
||||
else Segment
|
||||
end
|
||||
klass.new(header, stream, *args, **kwargs)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
211
Library/Homebrew/vendor/bundle/ruby/2.6.0/gems/elftools-1.1.2/lib/elftools/structs.rb
vendored
Normal file
211
Library/Homebrew/vendor/bundle/ruby/2.6.0/gems/elftools-1.1.2/lib/elftools/structs.rb
vendored
Normal file
@ -0,0 +1,211 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
require 'bindata'
|
||||
|
||||
module ELFTools
|
||||
# Define ELF related structures in this module.
|
||||
#
|
||||
# Structures are fetched from https://github.com/torvalds/linux/blob/master/include/uapi/linux/elf.h.
|
||||
# Use gem +bindata+ to have these structures support 32/64 bits and little/big endian simultaneously.
|
||||
module Structs
|
||||
# The base structure to define common methods.
|
||||
class ELFStruct < BinData::Record
|
||||
# DRY. Many fields have different type in different arch.
|
||||
CHOICE_SIZE_T = {
|
||||
selection: :elf_class, choices: { 32 => :uint32, 64 => :uint64 }
|
||||
}.freeze
|
||||
|
||||
attr_accessor :elf_class # @return [Integer] 32 or 64.
|
||||
attr_accessor :offset # @return [Integer] The file offset of this header.
|
||||
|
||||
# Records which fields have been patched.
|
||||
# @return [Hash{Integer => Integer}] Patches.
|
||||
def patches
|
||||
@patches ||= {}
|
||||
end
|
||||
|
||||
class << self
|
||||
# Hooks the constructor.
|
||||
#
|
||||
# +BinData::Record+ doesn't allow us to override +#initialize+, so we hack +new+ here.
|
||||
def new(*args)
|
||||
# XXX: The better implementation is +new(*args, **kwargs)+, but we can't do this unless bindata changed
|
||||
# lib/bindata/dsl.rb#override_new_in_class to invoke +new+ with both +args+ and +kwargs+.
|
||||
kwargs = args.last.is_a?(Hash) ? args.last : {}
|
||||
offset = kwargs.delete(:offset)
|
||||
super.tap do |obj|
|
||||
obj.offset = offset
|
||||
obj.field_names.each do |f|
|
||||
m = "#{f}=".to_sym
|
||||
old_method = obj.singleton_method(m)
|
||||
obj.singleton_class.send(:undef_method, m)
|
||||
obj.define_singleton_method(m) do |val|
|
||||
org = obj.send(f)
|
||||
obj.patches[org.abs_offset] = ELFStruct.pack(val, org.num_bytes)
|
||||
old_method.call(val)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# Gets the endianness of current class.
|
||||
# @return [:little, :big] The endianness.
|
||||
def self_endian
|
||||
bindata_name[-2..-1] == 'be' ? :big : :little
|
||||
end
|
||||
|
||||
# Packs an integer to string.
|
||||
# @param [Integer] val
|
||||
# @param [Integer] bytes
|
||||
# @return [String]
|
||||
def pack(val, bytes)
|
||||
raise ArgumentError, "Not supported assign type #{val.class}" unless val.is_a?(Integer)
|
||||
|
||||
number = val & ((1 << (8 * bytes)) - 1)
|
||||
out = []
|
||||
bytes.times do
|
||||
out << (number & 0xff)
|
||||
number >>= 8
|
||||
end
|
||||
out = out.pack('C*')
|
||||
self_endian == :little ? out : out.reverse
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# ELF header structure.
|
||||
class ELF_Ehdr < ELFStruct
|
||||
endian :big_and_little
|
||||
struct :e_ident do
|
||||
string :magic, read_length: 4
|
||||
int8 :ei_class
|
||||
int8 :ei_data
|
||||
int8 :ei_version
|
||||
int8 :ei_osabi
|
||||
int8 :ei_abiversion
|
||||
string :ei_padding, read_length: 7 # no use
|
||||
end
|
||||
uint16 :e_type
|
||||
uint16 :e_machine
|
||||
uint32 :e_version
|
||||
# entry point
|
||||
choice :e_entry, **CHOICE_SIZE_T
|
||||
choice :e_phoff, **CHOICE_SIZE_T
|
||||
choice :e_shoff, **CHOICE_SIZE_T
|
||||
uint32 :e_flags
|
||||
uint16 :e_ehsize # size of this header
|
||||
uint16 :e_phentsize # size of each segment
|
||||
uint16 :e_phnum # number of segments
|
||||
uint16 :e_shentsize # size of each section
|
||||
uint16 :e_shnum # number of sections
|
||||
uint16 :e_shstrndx # index of string table section
|
||||
end
|
||||
|
||||
# Section header structure.
|
||||
class ELF_Shdr < ELFStruct
|
||||
endian :big_and_little
|
||||
uint32 :sh_name
|
||||
uint32 :sh_type
|
||||
choice :sh_flags, **CHOICE_SIZE_T
|
||||
choice :sh_addr, **CHOICE_SIZE_T
|
||||
choice :sh_offset, **CHOICE_SIZE_T
|
||||
choice :sh_size, **CHOICE_SIZE_T
|
||||
uint32 :sh_link
|
||||
uint32 :sh_info
|
||||
choice :sh_addralign, **CHOICE_SIZE_T
|
||||
choice :sh_entsize, **CHOICE_SIZE_T
|
||||
end
|
||||
|
||||
# Program header structure for 32-bit.
|
||||
class ELF32_Phdr < ELFStruct
|
||||
endian :big_and_little
|
||||
uint32 :p_type
|
||||
uint32 :p_offset
|
||||
uint32 :p_vaddr
|
||||
uint32 :p_paddr
|
||||
uint32 :p_filesz
|
||||
uint32 :p_memsz
|
||||
uint32 :p_flags
|
||||
uint32 :p_align
|
||||
end
|
||||
|
||||
# Program header structure for 64-bit.
|
||||
class ELF64_Phdr < ELFStruct
|
||||
endian :big_and_little
|
||||
uint32 :p_type
|
||||
uint32 :p_flags
|
||||
uint64 :p_offset
|
||||
uint64 :p_vaddr
|
||||
uint64 :p_paddr
|
||||
uint64 :p_filesz
|
||||
uint64 :p_memsz
|
||||
uint64 :p_align
|
||||
end
|
||||
|
||||
# Gets the class of program header according to bits.
|
||||
ELF_Phdr = {
|
||||
32 => ELF32_Phdr,
|
||||
64 => ELF64_Phdr
|
||||
}.freeze
|
||||
|
||||
# Symbol structure for 32-bit.
|
||||
class ELF32_sym < ELFStruct
|
||||
endian :big_and_little
|
||||
uint32 :st_name
|
||||
uint32 :st_value
|
||||
uint32 :st_size
|
||||
uint8 :st_info
|
||||
uint8 :st_other
|
||||
uint16 :st_shndx
|
||||
end
|
||||
|
||||
# Symbol structure for 64-bit.
|
||||
class ELF64_sym < ELFStruct
|
||||
endian :big_and_little
|
||||
uint32 :st_name # Symbol name, index in string tbl
|
||||
uint8 :st_info # Type and binding attributes
|
||||
uint8 :st_other # No defined meaning, 0
|
||||
uint16 :st_shndx # Associated section index
|
||||
uint64 :st_value # Value of the symbol
|
||||
uint64 :st_size # Associated symbol size
|
||||
end
|
||||
|
||||
# Get symbol header class according to bits.
|
||||
ELF_sym = {
|
||||
32 => ELF32_sym,
|
||||
64 => ELF64_sym
|
||||
}.freeze
|
||||
|
||||
# Note header.
|
||||
class ELF_Nhdr < ELFStruct
|
||||
endian :big_and_little
|
||||
uint32 :n_namesz # Name size
|
||||
uint32 :n_descsz # Content size
|
||||
uint32 :n_type # Content type
|
||||
end
|
||||
|
||||
# Dynamic tag header.
|
||||
class ELF_Dyn < ELFStruct
|
||||
endian :big_and_little
|
||||
choice :d_tag, selection: :elf_class, choices: { 32 => :int32, 64 => :int64 }
|
||||
# This is an union type named +d_un+ in original source,
|
||||
# simplify it to be +d_val+ here.
|
||||
choice :d_val, **CHOICE_SIZE_T
|
||||
end
|
||||
|
||||
# Rel header in .rel section.
|
||||
class ELF_Rel < ELFStruct
|
||||
endian :big_and_little
|
||||
choice :r_offset, **CHOICE_SIZE_T
|
||||
choice :r_info, **CHOICE_SIZE_T
|
||||
end
|
||||
|
||||
# Rela header in .rela section.
|
||||
class ELF_Rela < ELFStruct
|
||||
endian :big_and_little
|
||||
choice :r_offset, **CHOICE_SIZE_T
|
||||
choice :r_info, **CHOICE_SIZE_T
|
||||
choice :r_addend, selection: :elf_class, choices: { 32 => :int32, 64 => :int64 }
|
||||
end
|
||||
end
|
||||
end
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user