Merge pull request #16267 from Bo98/vendor-3.1
This commit is contained in:
commit
ec5daf401c
4
.gitignore
vendored
4
.gitignore
vendored
@ -37,9 +37,11 @@
|
|||||||
**/vendor/bundle/ruby/*/plugins
|
**/vendor/bundle/ruby/*/plugins
|
||||||
**/vendor/bundle/ruby/*/specifications
|
**/vendor/bundle/ruby/*/specifications
|
||||||
|
|
||||||
# Ignore Ruby gems for versions other than we explicitly vendor
|
# Ignore Ruby gems for versions other than we explicitly vendor.
|
||||||
|
# Keep this in sync with the list in standalone/init.rb.
|
||||||
**/vendor/bundle/ruby/*/
|
**/vendor/bundle/ruby/*/
|
||||||
!**/vendor/bundle/ruby/2.6.0/
|
!**/vendor/bundle/ruby/2.6.0/
|
||||||
|
!**/vendor/bundle/ruby/3.1.0/
|
||||||
|
|
||||||
# Ignore Bundler binary files
|
# Ignore Bundler binary files
|
||||||
**/vendor/bundle/ruby/*/gems/**/*.bundle
|
**/vendor/bundle/ruby/*/gems/**/*.bundle
|
||||||
|
|||||||
@ -4,9 +4,9 @@
|
|||||||
# This file is included before any other files. It intentionally has typing disabled and has minimal use of `require`.
|
# This file is included before any other files. It intentionally has typing disabled and has minimal use of `require`.
|
||||||
|
|
||||||
required_ruby_major, required_ruby_minor, = ENV.fetch("HOMEBREW_REQUIRED_RUBY_VERSION", "").split(".").map(&:to_i)
|
required_ruby_major, required_ruby_minor, = ENV.fetch("HOMEBREW_REQUIRED_RUBY_VERSION", "").split(".").map(&:to_i)
|
||||||
unsupported_ruby = if required_ruby_minor.nil?
|
gems_vendored = if required_ruby_minor.nil?
|
||||||
# We're probably only running rubocop etc so just assume supported
|
# We're likely here if running RuboCop etc, so just assume we don't need to install gems as we likely already have
|
||||||
false
|
true
|
||||||
else
|
else
|
||||||
ruby_major, ruby_minor, = RUBY_VERSION.split(".").map(&:to_i)
|
ruby_major, ruby_minor, = RUBY_VERSION.split(".").map(&:to_i)
|
||||||
if ruby_major < required_ruby_major || (ruby_major == required_ruby_major && ruby_minor < required_ruby_minor)
|
if ruby_major < required_ruby_major || (ruby_major == required_ruby_major && ruby_minor < required_ruby_minor)
|
||||||
@ -14,7 +14,9 @@ else
|
|||||||
"You're running #{RUBY_VERSION}."
|
"You're running #{RUBY_VERSION}."
|
||||||
end
|
end
|
||||||
|
|
||||||
ruby_major != required_ruby_major || ruby_minor != required_ruby_minor
|
# This list should match .gitignore
|
||||||
|
vendored_versions = ["2.6", "3.1"].freeze
|
||||||
|
vendored_versions.include?("#{ruby_major}.#{ruby_minor}")
|
||||||
end.freeze
|
end.freeze
|
||||||
|
|
||||||
# We trust base Ruby to provide what we need.
|
# We trust base Ruby to provide what we need.
|
||||||
@ -29,7 +31,7 @@ require_relative "../utils/gems"
|
|||||||
Homebrew.setup_gem_environment!(setup_path: false)
|
Homebrew.setup_gem_environment!(setup_path: false)
|
||||||
|
|
||||||
# Install gems for Rubies we don't vendor for.
|
# Install gems for Rubies we don't vendor for.
|
||||||
if unsupported_ruby && !ENV["HOMEBREW_SKIP_INITIAL_GEM_INSTALL"]
|
if !gems_vendored && !ENV["HOMEBREW_SKIP_INITIAL_GEM_INSTALL"]
|
||||||
Homebrew.install_bundler_gems!(setup_path: false)
|
Homebrew.install_bundler_gems!(setup_path: false)
|
||||||
ENV["HOMEBREW_SKIP_INITIAL_GEM_INSTALL"] = "1"
|
ENV["HOMEBREW_SKIP_INITIAL_GEM_INSTALL"] = "1"
|
||||||
end
|
end
|
||||||
|
|||||||
20
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/activesupport-6.1.7.6/MIT-LICENSE
vendored
Normal file
20
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/activesupport-6.1.7.6/MIT-LICENSE
vendored
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
Copyright (c) 2005-2022 David Heinemeier Hansson
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining
|
||||||
|
a copy of this software and associated documentation files (the
|
||||||
|
"Software"), to deal in the Software without restriction, including
|
||||||
|
without limitation the rights to use, copy, modify, merge, publish,
|
||||||
|
distribute, sublicense, and/or sell copies of the Software, and to
|
||||||
|
permit persons to whom the Software is furnished to do so, subject to
|
||||||
|
the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be
|
||||||
|
included in all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||||
|
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||||
|
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||||
|
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
||||||
|
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||||
|
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
||||||
|
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||||
@ -0,0 +1,104 @@
|
|||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
class Array
|
||||||
|
# Returns the tail of the array from +position+.
|
||||||
|
#
|
||||||
|
# %w( a b c d ).from(0) # => ["a", "b", "c", "d"]
|
||||||
|
# %w( a b c d ).from(2) # => ["c", "d"]
|
||||||
|
# %w( a b c d ).from(10) # => []
|
||||||
|
# %w().from(0) # => []
|
||||||
|
# %w( a b c d ).from(-2) # => ["c", "d"]
|
||||||
|
# %w( a b c ).from(-10) # => []
|
||||||
|
def from(position)
|
||||||
|
self[position, length] || []
|
||||||
|
end
|
||||||
|
|
||||||
|
# Returns the beginning of the array up to +position+.
|
||||||
|
#
|
||||||
|
# %w( a b c d ).to(0) # => ["a"]
|
||||||
|
# %w( a b c d ).to(2) # => ["a", "b", "c"]
|
||||||
|
# %w( a b c d ).to(10) # => ["a", "b", "c", "d"]
|
||||||
|
# %w().to(0) # => []
|
||||||
|
# %w( a b c d ).to(-2) # => ["a", "b", "c"]
|
||||||
|
# %w( a b c ).to(-10) # => []
|
||||||
|
def to(position)
|
||||||
|
if position >= 0
|
||||||
|
take position + 1
|
||||||
|
else
|
||||||
|
self[0..position]
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# Returns a new array that includes the passed elements.
|
||||||
|
#
|
||||||
|
# [ 1, 2, 3 ].including(4, 5) # => [ 1, 2, 3, 4, 5 ]
|
||||||
|
# [ [ 0, 1 ] ].including([ [ 1, 0 ] ]) # => [ [ 0, 1 ], [ 1, 0 ] ]
|
||||||
|
def including(*elements)
|
||||||
|
self + elements.flatten(1)
|
||||||
|
end
|
||||||
|
|
||||||
|
# Returns a copy of the Array excluding the specified elements.
|
||||||
|
#
|
||||||
|
# ["David", "Rafael", "Aaron", "Todd"].excluding("Aaron", "Todd") # => ["David", "Rafael"]
|
||||||
|
# [ [ 0, 1 ], [ 1, 0 ] ].excluding([ [ 1, 0 ] ]) # => [ [ 0, 1 ] ]
|
||||||
|
#
|
||||||
|
# Note: This is an optimization of <tt>Enumerable#excluding</tt> that uses <tt>Array#-</tt>
|
||||||
|
# instead of <tt>Array#reject</tt> for performance reasons.
|
||||||
|
def excluding(*elements)
|
||||||
|
self - elements.flatten(1)
|
||||||
|
end
|
||||||
|
|
||||||
|
# Alias for #excluding.
|
||||||
|
def without(*elements)
|
||||||
|
excluding(*elements)
|
||||||
|
end
|
||||||
|
|
||||||
|
# Equal to <tt>self[1]</tt>.
|
||||||
|
#
|
||||||
|
# %w( a b c d e ).second # => "b"
|
||||||
|
def second
|
||||||
|
self[1]
|
||||||
|
end
|
||||||
|
|
||||||
|
# Equal to <tt>self[2]</tt>.
|
||||||
|
#
|
||||||
|
# %w( a b c d e ).third # => "c"
|
||||||
|
def third
|
||||||
|
self[2]
|
||||||
|
end
|
||||||
|
|
||||||
|
# Equal to <tt>self[3]</tt>.
|
||||||
|
#
|
||||||
|
# %w( a b c d e ).fourth # => "d"
|
||||||
|
def fourth
|
||||||
|
self[3]
|
||||||
|
end
|
||||||
|
|
||||||
|
# Equal to <tt>self[4]</tt>.
|
||||||
|
#
|
||||||
|
# %w( a b c d e ).fifth # => "e"
|
||||||
|
def fifth
|
||||||
|
self[4]
|
||||||
|
end
|
||||||
|
|
||||||
|
# Equal to <tt>self[41]</tt>. Also known as accessing "the reddit".
|
||||||
|
#
|
||||||
|
# (1..42).to_a.forty_two # => 42
|
||||||
|
def forty_two
|
||||||
|
self[41]
|
||||||
|
end
|
||||||
|
|
||||||
|
# Equal to <tt>self[-3]</tt>.
|
||||||
|
#
|
||||||
|
# %w( a b c d e ).third_to_last # => "c"
|
||||||
|
def third_to_last
|
||||||
|
self[-3]
|
||||||
|
end
|
||||||
|
|
||||||
|
# Equal to <tt>self[-2]</tt>.
|
||||||
|
#
|
||||||
|
# %w( a b c d e ).second_to_last # => "d"
|
||||||
|
def second_to_last
|
||||||
|
self[-2]
|
||||||
|
end
|
||||||
|
end
|
||||||
@ -0,0 +1,260 @@
|
|||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
module Enumerable
|
||||||
|
INDEX_WITH_DEFAULT = Object.new
|
||||||
|
private_constant :INDEX_WITH_DEFAULT
|
||||||
|
|
||||||
|
# Enumerable#sum was added in Ruby 2.4, but it only works with Numeric elements
|
||||||
|
# when we omit an identity.
|
||||||
|
|
||||||
|
# :stopdoc:
|
||||||
|
|
||||||
|
# We can't use Refinements here because Refinements with Module which will be prepended
|
||||||
|
# doesn't work well https://bugs.ruby-lang.org/issues/13446
|
||||||
|
alias :_original_sum_with_required_identity :sum
|
||||||
|
private :_original_sum_with_required_identity
|
||||||
|
|
||||||
|
# :startdoc:
|
||||||
|
|
||||||
|
# Calculates a sum from the elements.
|
||||||
|
#
|
||||||
|
# payments.sum { |p| p.price * p.tax_rate }
|
||||||
|
# payments.sum(&:price)
|
||||||
|
#
|
||||||
|
# The latter is a shortcut for:
|
||||||
|
#
|
||||||
|
# payments.inject(0) { |sum, p| sum + p.price }
|
||||||
|
#
|
||||||
|
# It can also calculate the sum without the use of a block.
|
||||||
|
#
|
||||||
|
# [5, 15, 10].sum # => 30
|
||||||
|
# ['foo', 'bar'].sum # => "foobar"
|
||||||
|
# [[1, 2], [3, 1, 5]].sum # => [1, 2, 3, 1, 5]
|
||||||
|
#
|
||||||
|
# The default sum of an empty list is zero. You can override this default:
|
||||||
|
#
|
||||||
|
# [].sum(Payment.new(0)) { |i| i.amount } # => Payment.new(0)
|
||||||
|
def sum(identity = nil, &block)
|
||||||
|
if identity
|
||||||
|
_original_sum_with_required_identity(identity, &block)
|
||||||
|
elsif block_given?
|
||||||
|
map(&block).sum(identity)
|
||||||
|
else
|
||||||
|
inject(:+) || 0
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# Convert an enumerable to a hash, using the block result as the key and the
|
||||||
|
# element as the value.
|
||||||
|
#
|
||||||
|
# people.index_by(&:login)
|
||||||
|
# # => { "nextangle" => <Person ...>, "chade-" => <Person ...>, ...}
|
||||||
|
#
|
||||||
|
# people.index_by { |person| "#{person.first_name} #{person.last_name}" }
|
||||||
|
# # => { "Chade- Fowlersburg-e" => <Person ...>, "David Heinemeier Hansson" => <Person ...>, ...}
|
||||||
|
def index_by
|
||||||
|
if block_given?
|
||||||
|
result = {}
|
||||||
|
each { |elem| result[yield(elem)] = elem }
|
||||||
|
result
|
||||||
|
else
|
||||||
|
to_enum(:index_by) { size if respond_to?(:size) }
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# Convert an enumerable to a hash, using the element as the key and the block
|
||||||
|
# result as the value.
|
||||||
|
#
|
||||||
|
# post = Post.new(title: "hey there", body: "what's up?")
|
||||||
|
#
|
||||||
|
# %i( title body ).index_with { |attr_name| post.public_send(attr_name) }
|
||||||
|
# # => { title: "hey there", body: "what's up?" }
|
||||||
|
#
|
||||||
|
# If an argument is passed instead of a block, it will be used as the value
|
||||||
|
# for all elements:
|
||||||
|
#
|
||||||
|
# %i( created_at updated_at ).index_with(Time.now)
|
||||||
|
# # => { created_at: 2020-03-09 22:31:47, updated_at: 2020-03-09 22:31:47 }
|
||||||
|
def index_with(default = INDEX_WITH_DEFAULT)
|
||||||
|
if block_given?
|
||||||
|
result = {}
|
||||||
|
each { |elem| result[elem] = yield(elem) }
|
||||||
|
result
|
||||||
|
elsif default != INDEX_WITH_DEFAULT
|
||||||
|
result = {}
|
||||||
|
each { |elem| result[elem] = default }
|
||||||
|
result
|
||||||
|
else
|
||||||
|
to_enum(:index_with) { size if respond_to?(:size) }
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# Returns +true+ if the enumerable has more than 1 element. Functionally
|
||||||
|
# equivalent to <tt>enum.to_a.size > 1</tt>. Can be called with a block too,
|
||||||
|
# much like any?, so <tt>people.many? { |p| p.age > 26 }</tt> returns +true+
|
||||||
|
# if more than one person is over 26.
|
||||||
|
def many?
|
||||||
|
cnt = 0
|
||||||
|
if block_given?
|
||||||
|
any? do |element|
|
||||||
|
cnt += 1 if yield element
|
||||||
|
cnt > 1
|
||||||
|
end
|
||||||
|
else
|
||||||
|
any? { (cnt += 1) > 1 }
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# Returns a new array that includes the passed elements.
|
||||||
|
#
|
||||||
|
# [ 1, 2, 3 ].including(4, 5)
|
||||||
|
# # => [ 1, 2, 3, 4, 5 ]
|
||||||
|
#
|
||||||
|
# ["David", "Rafael"].including %w[ Aaron Todd ]
|
||||||
|
# # => ["David", "Rafael", "Aaron", "Todd"]
|
||||||
|
def including(*elements)
|
||||||
|
to_a.including(*elements)
|
||||||
|
end
|
||||||
|
|
||||||
|
# The negative of the <tt>Enumerable#include?</tt>. Returns +true+ if the
|
||||||
|
# collection does not include the object.
|
||||||
|
def exclude?(object)
|
||||||
|
!include?(object)
|
||||||
|
end
|
||||||
|
|
||||||
|
# Returns a copy of the enumerable excluding the specified elements.
|
||||||
|
#
|
||||||
|
# ["David", "Rafael", "Aaron", "Todd"].excluding "Aaron", "Todd"
|
||||||
|
# # => ["David", "Rafael"]
|
||||||
|
#
|
||||||
|
# ["David", "Rafael", "Aaron", "Todd"].excluding %w[ Aaron Todd ]
|
||||||
|
# # => ["David", "Rafael"]
|
||||||
|
#
|
||||||
|
# {foo: 1, bar: 2, baz: 3}.excluding :bar
|
||||||
|
# # => {foo: 1, baz: 3}
|
||||||
|
def excluding(*elements)
|
||||||
|
elements.flatten!(1)
|
||||||
|
reject { |element| elements.include?(element) }
|
||||||
|
end
|
||||||
|
|
||||||
|
# Alias for #excluding.
|
||||||
|
def without(*elements)
|
||||||
|
excluding(*elements)
|
||||||
|
end
|
||||||
|
|
||||||
|
# Extract the given key from each element in the enumerable.
|
||||||
|
#
|
||||||
|
# [{ name: "David" }, { name: "Rafael" }, { name: "Aaron" }].pluck(:name)
|
||||||
|
# # => ["David", "Rafael", "Aaron"]
|
||||||
|
#
|
||||||
|
# [{ id: 1, name: "David" }, { id: 2, name: "Rafael" }].pluck(:id, :name)
|
||||||
|
# # => [[1, "David"], [2, "Rafael"]]
|
||||||
|
def pluck(*keys)
|
||||||
|
if keys.many?
|
||||||
|
map { |element| keys.map { |key| element[key] } }
|
||||||
|
else
|
||||||
|
key = keys.first
|
||||||
|
map { |element| element[key] }
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# Extract the given key from the first element in the enumerable.
|
||||||
|
#
|
||||||
|
# [{ name: "David" }, { name: "Rafael" }, { name: "Aaron" }].pick(:name)
|
||||||
|
# # => "David"
|
||||||
|
#
|
||||||
|
# [{ id: 1, name: "David" }, { id: 2, name: "Rafael" }].pick(:id, :name)
|
||||||
|
# # => [1, "David"]
|
||||||
|
def pick(*keys)
|
||||||
|
return if none?
|
||||||
|
|
||||||
|
if keys.many?
|
||||||
|
keys.map { |key| first[key] }
|
||||||
|
else
|
||||||
|
first[keys.first]
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# Returns a new +Array+ without the blank items.
|
||||||
|
# Uses Object#blank? for determining if an item is blank.
|
||||||
|
#
|
||||||
|
# [1, "", nil, 2, " ", [], {}, false, true].compact_blank
|
||||||
|
# # => [1, 2, true]
|
||||||
|
#
|
||||||
|
# Set.new([nil, "", 1, 2])
|
||||||
|
# # => [2, 1] (or [1, 2])
|
||||||
|
#
|
||||||
|
# When called on a +Hash+, returns a new +Hash+ without the blank values.
|
||||||
|
#
|
||||||
|
# { a: "", b: 1, c: nil, d: [], e: false, f: true }.compact_blank
|
||||||
|
# #=> { b: 1, f: true }
|
||||||
|
def compact_blank
|
||||||
|
reject(&:blank?)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
class Hash
|
||||||
|
# Hash#reject has its own definition, so this needs one too.
|
||||||
|
def compact_blank #:nodoc:
|
||||||
|
reject { |_k, v| v.blank? }
|
||||||
|
end
|
||||||
|
|
||||||
|
# Removes all blank values from the +Hash+ in place and returns self.
|
||||||
|
# Uses Object#blank? for determining if a value is blank.
|
||||||
|
#
|
||||||
|
# h = { a: "", b: 1, c: nil, d: [], e: false, f: true }
|
||||||
|
# h.compact_blank!
|
||||||
|
# # => { b: 1, f: true }
|
||||||
|
def compact_blank!
|
||||||
|
# use delete_if rather than reject! because it always returns self even if nothing changed
|
||||||
|
delete_if { |_k, v| v.blank? }
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
class Range #:nodoc:
|
||||||
|
# Optimize range sum to use arithmetic progression if a block is not given and
|
||||||
|
# we have a range of numeric values.
|
||||||
|
def sum(identity = nil)
|
||||||
|
if block_given? || !(first.is_a?(Integer) && last.is_a?(Integer))
|
||||||
|
super
|
||||||
|
else
|
||||||
|
actual_last = exclude_end? ? (last - 1) : last
|
||||||
|
if actual_last >= first
|
||||||
|
sum = identity || 0
|
||||||
|
sum + (actual_last - first + 1) * (actual_last + first) / 2
|
||||||
|
else
|
||||||
|
identity || 0
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# Using Refinements here in order not to expose our internal method
|
||||||
|
using Module.new {
|
||||||
|
refine Array do
|
||||||
|
alias :orig_sum :sum
|
||||||
|
end
|
||||||
|
}
|
||||||
|
|
||||||
|
class Array #:nodoc:
|
||||||
|
# Array#sum was added in Ruby 2.4 but it only works with Numeric elements.
|
||||||
|
def sum(init = nil, &block)
|
||||||
|
if init.is_a?(Numeric) || first.is_a?(Numeric)
|
||||||
|
init ||= 0
|
||||||
|
orig_sum(init, &block)
|
||||||
|
else
|
||||||
|
super
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# Removes all blank elements from the +Array+ in place and returns self.
|
||||||
|
# Uses Object#blank? for determining if an item is blank.
|
||||||
|
#
|
||||||
|
# a = [1, "", nil, 2, " ", [], {}, false, true]
|
||||||
|
# a.compact_blank!
|
||||||
|
# # => [1, 2, true]
|
||||||
|
def compact_blank!
|
||||||
|
# use delete_if rather than reject! because it always returns self even if nothing changed
|
||||||
|
delete_if(&:blank?)
|
||||||
|
end
|
||||||
|
end
|
||||||
@ -0,0 +1,70 @@
|
|||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
require "fileutils"
|
||||||
|
|
||||||
|
class File
|
||||||
|
# Write to a file atomically. Useful for situations where you don't
|
||||||
|
# want other processes or threads to see half-written files.
|
||||||
|
#
|
||||||
|
# File.atomic_write('important.file') do |file|
|
||||||
|
# file.write('hello')
|
||||||
|
# end
|
||||||
|
#
|
||||||
|
# This method needs to create a temporary file. By default it will create it
|
||||||
|
# in the same directory as the destination file. If you don't like this
|
||||||
|
# behavior you can provide a different directory but it must be on the
|
||||||
|
# same physical filesystem as the file you're trying to write.
|
||||||
|
#
|
||||||
|
# File.atomic_write('/data/something.important', '/data/tmp') do |file|
|
||||||
|
# file.write('hello')
|
||||||
|
# end
|
||||||
|
def self.atomic_write(file_name, temp_dir = dirname(file_name))
|
||||||
|
require "tempfile" unless defined?(Tempfile)
|
||||||
|
|
||||||
|
Tempfile.open(".#{basename(file_name)}", temp_dir) do |temp_file|
|
||||||
|
temp_file.binmode
|
||||||
|
return_val = yield temp_file
|
||||||
|
temp_file.close
|
||||||
|
|
||||||
|
old_stat = if exist?(file_name)
|
||||||
|
# Get original file permissions
|
||||||
|
stat(file_name)
|
||||||
|
else
|
||||||
|
# If not possible, probe which are the default permissions in the
|
||||||
|
# destination directory.
|
||||||
|
probe_stat_in(dirname(file_name))
|
||||||
|
end
|
||||||
|
|
||||||
|
if old_stat
|
||||||
|
# Set correct permissions on new file
|
||||||
|
begin
|
||||||
|
chown(old_stat.uid, old_stat.gid, temp_file.path)
|
||||||
|
# This operation will affect filesystem ACL's
|
||||||
|
chmod(old_stat.mode, temp_file.path)
|
||||||
|
rescue Errno::EPERM, Errno::EACCES
|
||||||
|
# Changing file ownership failed, moving on.
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# Overwrite original file with temp file
|
||||||
|
rename(temp_file.path, file_name)
|
||||||
|
return_val
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# Private utility method.
|
||||||
|
def self.probe_stat_in(dir) #:nodoc:
|
||||||
|
basename = [
|
||||||
|
".permissions_check",
|
||||||
|
Thread.current.object_id,
|
||||||
|
Process.pid,
|
||||||
|
rand(1000000)
|
||||||
|
].join(".")
|
||||||
|
|
||||||
|
file_name = join(dir, basename)
|
||||||
|
FileUtils.touch(file_name)
|
||||||
|
stat(file_name)
|
||||||
|
ensure
|
||||||
|
FileUtils.rm_f(file_name) if file_name
|
||||||
|
end
|
||||||
|
end
|
||||||
@ -0,0 +1,34 @@
|
|||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
class Hash
|
||||||
|
# Returns a new hash with +self+ and +other_hash+ merged recursively.
|
||||||
|
#
|
||||||
|
# h1 = { a: true, b: { c: [1, 2, 3] } }
|
||||||
|
# h2 = { a: false, b: { x: [3, 4, 5] } }
|
||||||
|
#
|
||||||
|
# h1.deep_merge(h2) # => { a: false, b: { c: [1, 2, 3], x: [3, 4, 5] } }
|
||||||
|
#
|
||||||
|
# Like with Hash#merge in the standard library, a block can be provided
|
||||||
|
# to merge values:
|
||||||
|
#
|
||||||
|
# h1 = { a: 100, b: 200, c: { c1: 100 } }
|
||||||
|
# h2 = { b: 250, c: { c1: 200 } }
|
||||||
|
# h1.deep_merge(h2) { |key, this_val, other_val| this_val + other_val }
|
||||||
|
# # => { a: 100, b: 450, c: { c1: 300 } }
|
||||||
|
def deep_merge(other_hash, &block)
|
||||||
|
dup.deep_merge!(other_hash, &block)
|
||||||
|
end
|
||||||
|
|
||||||
|
# Same as +deep_merge+, but modifies +self+.
|
||||||
|
def deep_merge!(other_hash, &block)
|
||||||
|
merge!(other_hash) do |key, this_val, other_val|
|
||||||
|
if this_val.is_a?(Hash) && other_val.is_a?(Hash)
|
||||||
|
this_val.deep_merge(other_val, &block)
|
||||||
|
elsif block_given?
|
||||||
|
block.call(key, this_val, other_val)
|
||||||
|
else
|
||||||
|
other_val
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
@ -0,0 +1,46 @@
|
|||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
class Hash
|
||||||
|
# Returns a new hash with all values converted by the block operation.
|
||||||
|
# This includes the values from the root hash and from all
|
||||||
|
# nested hashes and arrays.
|
||||||
|
#
|
||||||
|
# hash = { person: { name: 'Rob', age: '28' } }
|
||||||
|
#
|
||||||
|
# hash.deep_transform_values{ |value| value.to_s.upcase }
|
||||||
|
# # => {person: {name: "ROB", age: "28"}}
|
||||||
|
def deep_transform_values(&block)
|
||||||
|
_deep_transform_values_in_object(self, &block)
|
||||||
|
end
|
||||||
|
|
||||||
|
# Destructively converts all values by using the block operation.
|
||||||
|
# This includes the values from the root hash and from all
|
||||||
|
# nested hashes and arrays.
|
||||||
|
def deep_transform_values!(&block)
|
||||||
|
_deep_transform_values_in_object!(self, &block)
|
||||||
|
end
|
||||||
|
|
||||||
|
private
|
||||||
|
# Support methods for deep transforming nested hashes and arrays.
|
||||||
|
def _deep_transform_values_in_object(object, &block)
|
||||||
|
case object
|
||||||
|
when Hash
|
||||||
|
object.transform_values { |value| _deep_transform_values_in_object(value, &block) }
|
||||||
|
when Array
|
||||||
|
object.map { |e| _deep_transform_values_in_object(e, &block) }
|
||||||
|
else
|
||||||
|
yield(object)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def _deep_transform_values_in_object!(object, &block)
|
||||||
|
case object
|
||||||
|
when Hash
|
||||||
|
object.transform_values! { |value| _deep_transform_values_in_object!(value, &block) }
|
||||||
|
when Array
|
||||||
|
object.map! { |e| _deep_transform_values_in_object!(e, &block) }
|
||||||
|
else
|
||||||
|
yield(object)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
@ -0,0 +1,24 @@
|
|||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
class Hash
|
||||||
|
# Returns a hash that includes everything except given keys.
|
||||||
|
# hash = { a: true, b: false, c: nil }
|
||||||
|
# hash.except(:c) # => { a: true, b: false }
|
||||||
|
# hash.except(:a, :b) # => { c: nil }
|
||||||
|
# hash # => { a: true, b: false, c: nil }
|
||||||
|
#
|
||||||
|
# This is useful for limiting a set of parameters to everything but a few known toggles:
|
||||||
|
# @person.update(params[:person].except(:admin))
|
||||||
|
def except(*keys)
|
||||||
|
slice(*self.keys - keys)
|
||||||
|
end unless method_defined?(:except)
|
||||||
|
|
||||||
|
# Removes the given keys from hash and returns it.
|
||||||
|
# hash = { a: true, b: false, c: nil }
|
||||||
|
# hash.except!(:c) # => { a: true, b: false }
|
||||||
|
# hash # => { a: true, b: false }
|
||||||
|
def except!(*keys)
|
||||||
|
keys.each { |key| delete(key) }
|
||||||
|
self
|
||||||
|
end
|
||||||
|
end
|
||||||
@ -0,0 +1,143 @@
|
|||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
class Hash
|
||||||
|
# Returns a new hash with all keys converted to strings.
|
||||||
|
#
|
||||||
|
# hash = { name: 'Rob', age: '28' }
|
||||||
|
#
|
||||||
|
# hash.stringify_keys
|
||||||
|
# # => {"name"=>"Rob", "age"=>"28"}
|
||||||
|
def stringify_keys
|
||||||
|
transform_keys(&:to_s)
|
||||||
|
end
|
||||||
|
|
||||||
|
# Destructively converts all keys to strings. Same as
|
||||||
|
# +stringify_keys+, but modifies +self+.
|
||||||
|
def stringify_keys!
|
||||||
|
transform_keys!(&:to_s)
|
||||||
|
end
|
||||||
|
|
||||||
|
# Returns a new hash with all keys converted to symbols, as long as
|
||||||
|
# they respond to +to_sym+.
|
||||||
|
#
|
||||||
|
# hash = { 'name' => 'Rob', 'age' => '28' }
|
||||||
|
#
|
||||||
|
# hash.symbolize_keys
|
||||||
|
# # => {:name=>"Rob", :age=>"28"}
|
||||||
|
def symbolize_keys
|
||||||
|
transform_keys { |key| key.to_sym rescue key }
|
||||||
|
end
|
||||||
|
alias_method :to_options, :symbolize_keys
|
||||||
|
|
||||||
|
# Destructively converts all keys to symbols, as long as they respond
|
||||||
|
# to +to_sym+. Same as +symbolize_keys+, but modifies +self+.
|
||||||
|
def symbolize_keys!
|
||||||
|
transform_keys! { |key| key.to_sym rescue key }
|
||||||
|
end
|
||||||
|
alias_method :to_options!, :symbolize_keys!
|
||||||
|
|
||||||
|
# Validates all keys in a hash match <tt>*valid_keys</tt>, raising
|
||||||
|
# +ArgumentError+ on a mismatch.
|
||||||
|
#
|
||||||
|
# Note that keys are treated differently than HashWithIndifferentAccess,
|
||||||
|
# meaning that string and symbol keys will not match.
|
||||||
|
#
|
||||||
|
# { name: 'Rob', years: '28' }.assert_valid_keys(:name, :age) # => raises "ArgumentError: Unknown key: :years. Valid keys are: :name, :age"
|
||||||
|
# { name: 'Rob', age: '28' }.assert_valid_keys('name', 'age') # => raises "ArgumentError: Unknown key: :name. Valid keys are: 'name', 'age'"
|
||||||
|
# { name: 'Rob', age: '28' }.assert_valid_keys(:name, :age) # => passes, raises nothing
|
||||||
|
def assert_valid_keys(*valid_keys)
|
||||||
|
valid_keys.flatten!
|
||||||
|
each_key do |k|
|
||||||
|
unless valid_keys.include?(k)
|
||||||
|
raise ArgumentError.new("Unknown key: #{k.inspect}. Valid keys are: #{valid_keys.map(&:inspect).join(', ')}")
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# Returns a new hash with all keys converted by the block operation.
|
||||||
|
# This includes the keys from the root hash and from all
|
||||||
|
# nested hashes and arrays.
|
||||||
|
#
|
||||||
|
# hash = { person: { name: 'Rob', age: '28' } }
|
||||||
|
#
|
||||||
|
# hash.deep_transform_keys{ |key| key.to_s.upcase }
|
||||||
|
# # => {"PERSON"=>{"NAME"=>"Rob", "AGE"=>"28"}}
|
||||||
|
def deep_transform_keys(&block)
|
||||||
|
_deep_transform_keys_in_object(self, &block)
|
||||||
|
end
|
||||||
|
|
||||||
|
# Destructively converts all keys by using the block operation.
|
||||||
|
# This includes the keys from the root hash and from all
|
||||||
|
# nested hashes and arrays.
|
||||||
|
def deep_transform_keys!(&block)
|
||||||
|
_deep_transform_keys_in_object!(self, &block)
|
||||||
|
end
|
||||||
|
|
||||||
|
# Returns a new hash with all keys converted to strings.
|
||||||
|
# This includes the keys from the root hash and from all
|
||||||
|
# nested hashes and arrays.
|
||||||
|
#
|
||||||
|
# hash = { person: { name: 'Rob', age: '28' } }
|
||||||
|
#
|
||||||
|
# hash.deep_stringify_keys
|
||||||
|
# # => {"person"=>{"name"=>"Rob", "age"=>"28"}}
|
||||||
|
def deep_stringify_keys
|
||||||
|
deep_transform_keys(&:to_s)
|
||||||
|
end
|
||||||
|
|
||||||
|
# Destructively converts all keys to strings.
|
||||||
|
# This includes the keys from the root hash and from all
|
||||||
|
# nested hashes and arrays.
|
||||||
|
def deep_stringify_keys!
|
||||||
|
deep_transform_keys!(&:to_s)
|
||||||
|
end
|
||||||
|
|
||||||
|
# Returns a new hash with all keys converted to symbols, as long as
|
||||||
|
# they respond to +to_sym+. This includes the keys from the root hash
|
||||||
|
# and from all nested hashes and arrays.
|
||||||
|
#
|
||||||
|
# hash = { 'person' => { 'name' => 'Rob', 'age' => '28' } }
|
||||||
|
#
|
||||||
|
# hash.deep_symbolize_keys
|
||||||
|
# # => {:person=>{:name=>"Rob", :age=>"28"}}
|
||||||
|
def deep_symbolize_keys
|
||||||
|
deep_transform_keys { |key| key.to_sym rescue key }
|
||||||
|
end
|
||||||
|
|
||||||
|
# Destructively converts all keys to symbols, as long as they respond
|
||||||
|
# to +to_sym+. This includes the keys from the root hash and from all
|
||||||
|
# nested hashes and arrays.
|
||||||
|
def deep_symbolize_keys!
|
||||||
|
deep_transform_keys! { |key| key.to_sym rescue key }
|
||||||
|
end
|
||||||
|
|
||||||
|
private
|
||||||
|
# Support methods for deep transforming nested hashes and arrays.
|
||||||
|
def _deep_transform_keys_in_object(object, &block)
|
||||||
|
case object
|
||||||
|
when Hash
|
||||||
|
object.each_with_object({}) do |(key, value), result|
|
||||||
|
result[yield(key)] = _deep_transform_keys_in_object(value, &block)
|
||||||
|
end
|
||||||
|
when Array
|
||||||
|
object.map { |e| _deep_transform_keys_in_object(e, &block) }
|
||||||
|
else
|
||||||
|
object
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def _deep_transform_keys_in_object!(object, &block)
|
||||||
|
case object
|
||||||
|
when Hash
|
||||||
|
object.keys.each do |key|
|
||||||
|
value = object.delete(key)
|
||||||
|
object[yield(key)] = _deep_transform_keys_in_object!(value, &block)
|
||||||
|
end
|
||||||
|
object
|
||||||
|
when Array
|
||||||
|
object.map! { |e| _deep_transform_keys_in_object!(e, &block) }
|
||||||
|
else
|
||||||
|
object
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
@ -0,0 +1,27 @@
|
|||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
class Hash
|
||||||
|
# Replaces the hash with only the given keys.
|
||||||
|
# Returns a hash containing the removed key/value pairs.
|
||||||
|
#
|
||||||
|
# hash = { a: 1, b: 2, c: 3, d: 4 }
|
||||||
|
# hash.slice!(:a, :b) # => {:c=>3, :d=>4}
|
||||||
|
# hash # => {:a=>1, :b=>2}
|
||||||
|
def slice!(*keys)
|
||||||
|
omit = slice(*self.keys - keys)
|
||||||
|
hash = slice(*keys)
|
||||||
|
hash.default = default
|
||||||
|
hash.default_proc = default_proc if default_proc
|
||||||
|
replace(hash)
|
||||||
|
omit
|
||||||
|
end
|
||||||
|
|
||||||
|
# Removes and returns the key/value pairs matching the given keys.
|
||||||
|
#
|
||||||
|
# hash = { a: 1, b: 2, c: 3, d: 4 }
|
||||||
|
# hash.extract!(:a, :b) # => {:a=>1, :b=>2}
|
||||||
|
# hash # => {:c=>3, :d=>4}
|
||||||
|
def extract!(*keys)
|
||||||
|
keys.each_with_object(self.class.new) { |key, result| result[key] = delete(key) if has_key?(key) }
|
||||||
|
end
|
||||||
|
end
|
||||||
@ -0,0 +1,55 @@
|
|||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
require "active_support/core_ext/object/duplicable"
|
||||||
|
|
||||||
|
class Object
|
||||||
|
# Returns a deep copy of object if it's duplicable. If it's
|
||||||
|
# not duplicable, returns +self+.
|
||||||
|
#
|
||||||
|
# object = Object.new
|
||||||
|
# dup = object.deep_dup
|
||||||
|
# dup.instance_variable_set(:@a, 1)
|
||||||
|
#
|
||||||
|
# object.instance_variable_defined?(:@a) # => false
|
||||||
|
# dup.instance_variable_defined?(:@a) # => true
|
||||||
|
def deep_dup
|
||||||
|
duplicable? ? dup : self
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
class Array
|
||||||
|
# Returns a deep copy of array.
|
||||||
|
#
|
||||||
|
# array = [1, [2, 3]]
|
||||||
|
# dup = array.deep_dup
|
||||||
|
# dup[1][2] = 4
|
||||||
|
#
|
||||||
|
# array[1][2] # => nil
|
||||||
|
# dup[1][2] # => 4
|
||||||
|
def deep_dup
|
||||||
|
map(&:deep_dup)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
class Hash
|
||||||
|
# Returns a deep copy of hash.
|
||||||
|
#
|
||||||
|
# hash = { a: { b: 'b' } }
|
||||||
|
# dup = hash.deep_dup
|
||||||
|
# dup[:a][:c] = 'c'
|
||||||
|
#
|
||||||
|
# hash[:a][:c] # => nil
|
||||||
|
# dup[:a][:c] # => "c"
|
||||||
|
def deep_dup
|
||||||
|
hash = dup
|
||||||
|
each_pair do |key, value|
|
||||||
|
if (::String === key && key.frozen?) || ::Symbol === key
|
||||||
|
hash[key] = value.deep_dup
|
||||||
|
else
|
||||||
|
hash.delete(key)
|
||||||
|
hash[key.deep_dup] = value.deep_dup
|
||||||
|
end
|
||||||
|
end
|
||||||
|
hash
|
||||||
|
end
|
||||||
|
end
|
||||||
@ -0,0 +1,49 @@
|
|||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
#--
|
||||||
|
# Most objects are cloneable, but not all. For example you can't dup methods:
|
||||||
|
#
|
||||||
|
# method(:puts).dup # => TypeError: allocator undefined for Method
|
||||||
|
#
|
||||||
|
# Classes may signal their instances are not duplicable removing +dup+/+clone+
|
||||||
|
# or raising exceptions from them. So, to dup an arbitrary object you normally
|
||||||
|
# use an optimistic approach and are ready to catch an exception, say:
|
||||||
|
#
|
||||||
|
# arbitrary_object.dup rescue object
|
||||||
|
#
|
||||||
|
# Rails dups objects in a few critical spots where they are not that arbitrary.
|
||||||
|
# That rescue is very expensive (like 40 times slower than a predicate), and it
|
||||||
|
# is often triggered.
|
||||||
|
#
|
||||||
|
# That's why we hardcode the following cases and check duplicable? instead of
|
||||||
|
# using that rescue idiom.
|
||||||
|
#++
|
||||||
|
class Object
|
||||||
|
# Can you safely dup this object?
|
||||||
|
#
|
||||||
|
# False for method objects;
|
||||||
|
# true otherwise.
|
||||||
|
def duplicable?
|
||||||
|
true
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
class Method
|
||||||
|
# Methods are not duplicable:
|
||||||
|
#
|
||||||
|
# method(:puts).duplicable? # => false
|
||||||
|
# method(:puts).dup # => TypeError: allocator undefined for Method
|
||||||
|
def duplicable?
|
||||||
|
false
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
class UnboundMethod
|
||||||
|
# Unbound methods are not duplicable:
|
||||||
|
#
|
||||||
|
# method(:puts).unbind.duplicable? # => false
|
||||||
|
# method(:puts).unbind.dup # => TypeError: allocator undefined for UnboundMethod
|
||||||
|
def duplicable?
|
||||||
|
false
|
||||||
|
end
|
||||||
|
end
|
||||||
@ -0,0 +1,13 @@
|
|||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
class String
|
||||||
|
# The inverse of <tt>String#include?</tt>. Returns true if the string
|
||||||
|
# does not include the other string.
|
||||||
|
#
|
||||||
|
# "hello".exclude? "lo" # => false
|
||||||
|
# "hello".exclude? "ol" # => true
|
||||||
|
# "hello".exclude? ?h # => false
|
||||||
|
def exclude?(string)
|
||||||
|
!include?(string)
|
||||||
|
end
|
||||||
|
end
|
||||||
@ -0,0 +1,145 @@
|
|||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
class String
|
||||||
|
# Returns the string, first removing all whitespace on both ends of
|
||||||
|
# the string, and then changing remaining consecutive whitespace
|
||||||
|
# groups into one space each.
|
||||||
|
#
|
||||||
|
# Note that it handles both ASCII and Unicode whitespace.
|
||||||
|
#
|
||||||
|
# %{ Multi-line
|
||||||
|
# string }.squish # => "Multi-line string"
|
||||||
|
# " foo bar \n \t boo".squish # => "foo bar boo"
|
||||||
|
def squish
|
||||||
|
dup.squish!
|
||||||
|
end
|
||||||
|
|
||||||
|
# Performs a destructive squish. See String#squish.
|
||||||
|
# str = " foo bar \n \t boo"
|
||||||
|
# str.squish! # => "foo bar boo"
|
||||||
|
# str # => "foo bar boo"
|
||||||
|
def squish!
|
||||||
|
gsub!(/[[:space:]]+/, " ")
|
||||||
|
strip!
|
||||||
|
self
|
||||||
|
end
|
||||||
|
|
||||||
|
# Returns a new string with all occurrences of the patterns removed.
|
||||||
|
# str = "foo bar test"
|
||||||
|
# str.remove(" test") # => "foo bar"
|
||||||
|
# str.remove(" test", /bar/) # => "foo "
|
||||||
|
# str # => "foo bar test"
|
||||||
|
def remove(*patterns)
|
||||||
|
dup.remove!(*patterns)
|
||||||
|
end
|
||||||
|
|
||||||
|
# Alters the string by removing all occurrences of the patterns.
|
||||||
|
# str = "foo bar test"
|
||||||
|
# str.remove!(" test", /bar/) # => "foo "
|
||||||
|
# str # => "foo "
|
||||||
|
def remove!(*patterns)
|
||||||
|
patterns.each do |pattern|
|
||||||
|
gsub! pattern, ""
|
||||||
|
end
|
||||||
|
|
||||||
|
self
|
||||||
|
end
|
||||||
|
|
||||||
|
# Truncates a given +text+ after a given <tt>length</tt> if +text+ is longer than <tt>length</tt>:
|
||||||
|
#
|
||||||
|
# 'Once upon a time in a world far far away'.truncate(27)
|
||||||
|
# # => "Once upon a time in a wo..."
|
||||||
|
#
|
||||||
|
# Pass a string or regexp <tt>:separator</tt> to truncate +text+ at a natural break:
|
||||||
|
#
|
||||||
|
# 'Once upon a time in a world far far away'.truncate(27, separator: ' ')
|
||||||
|
# # => "Once upon a time in a..."
|
||||||
|
#
|
||||||
|
# 'Once upon a time in a world far far away'.truncate(27, separator: /\s/)
|
||||||
|
# # => "Once upon a time in a..."
|
||||||
|
#
|
||||||
|
# The last characters will be replaced with the <tt>:omission</tt> string (defaults to "...")
|
||||||
|
# for a total length not exceeding <tt>length</tt>:
|
||||||
|
#
|
||||||
|
# 'And they found that many people were sleeping better.'.truncate(25, omission: '... (continued)')
|
||||||
|
# # => "And they f... (continued)"
|
||||||
|
def truncate(truncate_at, options = {})
|
||||||
|
return dup unless length > truncate_at
|
||||||
|
|
||||||
|
omission = options[:omission] || "..."
|
||||||
|
length_with_room_for_omission = truncate_at - omission.length
|
||||||
|
stop = \
|
||||||
|
if options[:separator]
|
||||||
|
rindex(options[:separator], length_with_room_for_omission) || length_with_room_for_omission
|
||||||
|
else
|
||||||
|
length_with_room_for_omission
|
||||||
|
end
|
||||||
|
|
||||||
|
+"#{self[0, stop]}#{omission}"
|
||||||
|
end
|
||||||
|
|
||||||
|
# Truncates +text+ to at most <tt>bytesize</tt> bytes in length without
|
||||||
|
# breaking string encoding by splitting multibyte characters or breaking
|
||||||
|
# grapheme clusters ("perceptual characters") by truncating at combining
|
||||||
|
# characters.
|
||||||
|
#
|
||||||
|
# >> "🔪🔪🔪🔪🔪🔪🔪🔪🔪🔪🔪🔪🔪🔪🔪🔪🔪🔪🔪🔪".size
|
||||||
|
# => 20
|
||||||
|
# >> "🔪🔪🔪🔪🔪🔪🔪🔪🔪🔪🔪🔪🔪🔪🔪🔪🔪🔪🔪🔪".bytesize
|
||||||
|
# => 80
|
||||||
|
# >> "🔪🔪🔪🔪🔪🔪🔪🔪🔪🔪🔪🔪🔪🔪🔪🔪🔪🔪🔪🔪".truncate_bytes(20)
|
||||||
|
# => "🔪🔪🔪🔪…"
|
||||||
|
#
|
||||||
|
# The truncated text ends with the <tt>:omission</tt> string, defaulting
|
||||||
|
# to "…", for a total length not exceeding <tt>bytesize</tt>.
|
||||||
|
def truncate_bytes(truncate_at, omission: "…")
|
||||||
|
omission ||= ""
|
||||||
|
|
||||||
|
case
|
||||||
|
when bytesize <= truncate_at
|
||||||
|
dup
|
||||||
|
when omission.bytesize > truncate_at
|
||||||
|
raise ArgumentError, "Omission #{omission.inspect} is #{omission.bytesize}, larger than the truncation length of #{truncate_at} bytes"
|
||||||
|
when omission.bytesize == truncate_at
|
||||||
|
omission.dup
|
||||||
|
else
|
||||||
|
self.class.new.tap do |cut|
|
||||||
|
cut_at = truncate_at - omission.bytesize
|
||||||
|
|
||||||
|
scan(/\X/) do |grapheme|
|
||||||
|
if cut.bytesize + grapheme.bytesize <= cut_at
|
||||||
|
cut << grapheme
|
||||||
|
else
|
||||||
|
break
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
cut << omission
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# Truncates a given +text+ after a given number of words (<tt>words_count</tt>):
|
||||||
|
#
|
||||||
|
# 'Once upon a time in a world far far away'.truncate_words(4)
|
||||||
|
# # => "Once upon a time..."
|
||||||
|
#
|
||||||
|
# Pass a string or regexp <tt>:separator</tt> to specify a different separator of words:
|
||||||
|
#
|
||||||
|
# 'Once<br>upon<br>a<br>time<br>in<br>a<br>world'.truncate_words(5, separator: '<br>')
|
||||||
|
# # => "Once<br>upon<br>a<br>time<br>in..."
|
||||||
|
#
|
||||||
|
# The last characters will be replaced with the <tt>:omission</tt> string (defaults to "..."):
|
||||||
|
#
|
||||||
|
# 'And they found that many people were sleeping better.'.truncate_words(5, omission: '... (continued)')
|
||||||
|
# # => "And they found that many... (continued)"
|
||||||
|
def truncate_words(words_count, options = {})
|
||||||
|
sep = options[:separator] || /\s+/
|
||||||
|
sep = Regexp.escape(sep.to_s) unless Regexp === sep
|
||||||
|
if self =~ /\A((?>.+?#{sep}){#{words_count - 1}}.+?)#{sep}.*/m
|
||||||
|
$1 + (options[:omission] || "...")
|
||||||
|
else
|
||||||
|
dup
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
@ -0,0 +1,45 @@
|
|||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
class String
|
||||||
|
# Same as +indent+, except it indents the receiver in-place.
|
||||||
|
#
|
||||||
|
# Returns the indented string, or +nil+ if there was nothing to indent.
|
||||||
|
def indent!(amount, indent_string = nil, indent_empty_lines = false)
|
||||||
|
indent_string = indent_string || self[/^[ \t]/] || " "
|
||||||
|
re = indent_empty_lines ? /^/ : /^(?!$)/
|
||||||
|
gsub!(re, indent_string * amount)
|
||||||
|
end
|
||||||
|
|
||||||
|
# Indents the lines in the receiver:
|
||||||
|
#
|
||||||
|
# <<EOS.indent(2)
|
||||||
|
# def some_method
|
||||||
|
# some_code
|
||||||
|
# end
|
||||||
|
# EOS
|
||||||
|
# # =>
|
||||||
|
# def some_method
|
||||||
|
# some_code
|
||||||
|
# end
|
||||||
|
#
|
||||||
|
# The second argument, +indent_string+, specifies which indent string to
|
||||||
|
# use. The default is +nil+, which tells the method to make a guess by
|
||||||
|
# peeking at the first indented line, and fallback to a space if there is
|
||||||
|
# none.
|
||||||
|
#
|
||||||
|
# " foo".indent(2) # => " foo"
|
||||||
|
# "foo\n\t\tbar".indent(2) # => "\t\tfoo\n\t\t\t\tbar"
|
||||||
|
# "foo".indent(2, "\t") # => "\t\tfoo"
|
||||||
|
#
|
||||||
|
# While +indent_string+ is typically one space or tab, it may be any string.
|
||||||
|
#
|
||||||
|
# The third argument, +indent_empty_lines+, is a flag that says whether
|
||||||
|
# empty lines should be indented. Default is false.
|
||||||
|
#
|
||||||
|
# "foo\n\nbar".indent(2) # => " foo\n\n bar"
|
||||||
|
# "foo\n\nbar".indent(2, nil, true) # => " foo\n \n bar"
|
||||||
|
#
|
||||||
|
def indent(amount, indent_string = nil, indent_empty_lines = false)
|
||||||
|
dup.tap { |_| _.indent!(amount, indent_string, indent_empty_lines) }
|
||||||
|
end
|
||||||
|
end
|
||||||
202
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/addressable-2.8.5/LICENSE.txt
vendored
Normal file
202
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/addressable-2.8.5/LICENSE.txt
vendored
Normal file
@ -0,0 +1,202 @@
|
|||||||
|
|
||||||
|
Apache License
|
||||||
|
Version 2.0, January 2004
|
||||||
|
http://www.apache.org/licenses/
|
||||||
|
|
||||||
|
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||||
|
|
||||||
|
1. Definitions.
|
||||||
|
|
||||||
|
"License" shall mean the terms and conditions for use, reproduction,
|
||||||
|
and distribution as defined by Sections 1 through 9 of this document.
|
||||||
|
|
||||||
|
"Licensor" shall mean the copyright owner or entity authorized by
|
||||||
|
the copyright owner that is granting the License.
|
||||||
|
|
||||||
|
"Legal Entity" shall mean the union of the acting entity and all
|
||||||
|
other entities that control, are controlled by, or are under common
|
||||||
|
control with that entity. For the purposes of this definition,
|
||||||
|
"control" means (i) the power, direct or indirect, to cause the
|
||||||
|
direction or management of such entity, whether by contract or
|
||||||
|
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||||
|
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||||
|
|
||||||
|
"You" (or "Your") shall mean an individual or Legal Entity
|
||||||
|
exercising permissions granted by this License.
|
||||||
|
|
||||||
|
"Source" form shall mean the preferred form for making modifications,
|
||||||
|
including but not limited to software source code, documentation
|
||||||
|
source, and configuration files.
|
||||||
|
|
||||||
|
"Object" form shall mean any form resulting from mechanical
|
||||||
|
transformation or translation of a Source form, including but
|
||||||
|
not limited to compiled object code, generated documentation,
|
||||||
|
and conversions to other media types.
|
||||||
|
|
||||||
|
"Work" shall mean the work of authorship, whether in Source or
|
||||||
|
Object form, made available under the License, as indicated by a
|
||||||
|
copyright notice that is included in or attached to the work
|
||||||
|
(an example is provided in the Appendix below).
|
||||||
|
|
||||||
|
"Derivative Works" shall mean any work, whether in Source or Object
|
||||||
|
form, that is based on (or derived from) the Work and for which the
|
||||||
|
editorial revisions, annotations, elaborations, or other modifications
|
||||||
|
represent, as a whole, an original work of authorship. For the purposes
|
||||||
|
of this License, Derivative Works shall not include works that remain
|
||||||
|
separable from, or merely link (or bind by name) to the interfaces of,
|
||||||
|
the Work and Derivative Works thereof.
|
||||||
|
|
||||||
|
"Contribution" shall mean any work of authorship, including
|
||||||
|
the original version of the Work and any modifications or additions
|
||||||
|
to that Work or Derivative Works thereof, that is intentionally
|
||||||
|
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||||
|
or by an individual or Legal Entity authorized to submit on behalf of
|
||||||
|
the copyright owner. For the purposes of this definition, "submitted"
|
||||||
|
means any form of electronic, verbal, or written communication sent
|
||||||
|
to the Licensor or its representatives, including but not limited to
|
||||||
|
communication on electronic mailing lists, source code control systems,
|
||||||
|
and issue tracking systems that are managed by, or on behalf of, the
|
||||||
|
Licensor for the purpose of discussing and improving the Work, but
|
||||||
|
excluding communication that is conspicuously marked or otherwise
|
||||||
|
designated in writing by the copyright owner as "Not a Contribution."
|
||||||
|
|
||||||
|
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||||
|
on behalf of whom a Contribution has been received by Licensor and
|
||||||
|
subsequently incorporated within the Work.
|
||||||
|
|
||||||
|
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||||
|
this License, each Contributor hereby grants to You a perpetual,
|
||||||
|
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||||
|
copyright license to reproduce, prepare Derivative Works of,
|
||||||
|
publicly display, publicly perform, sublicense, and distribute the
|
||||||
|
Work and such Derivative Works in Source or Object form.
|
||||||
|
|
||||||
|
3. Grant of Patent License. Subject to the terms and conditions of
|
||||||
|
this License, each Contributor hereby grants to You a perpetual,
|
||||||
|
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||||
|
(except as stated in this section) patent license to make, have made,
|
||||||
|
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||||
|
where such license applies only to those patent claims licensable
|
||||||
|
by such Contributor that are necessarily infringed by their
|
||||||
|
Contribution(s) alone or by combination of their Contribution(s)
|
||||||
|
with the Work to which such Contribution(s) was submitted. If You
|
||||||
|
institute patent litigation against any entity (including a
|
||||||
|
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||||
|
or a Contribution incorporated within the Work constitutes direct
|
||||||
|
or contributory patent infringement, then any patent licenses
|
||||||
|
granted to You under this License for that Work shall terminate
|
||||||
|
as of the date such litigation is filed.
|
||||||
|
|
||||||
|
4. Redistribution. You may reproduce and distribute copies of the
|
||||||
|
Work or Derivative Works thereof in any medium, with or without
|
||||||
|
modifications, and in Source or Object form, provided that You
|
||||||
|
meet the following conditions:
|
||||||
|
|
||||||
|
(a) You must give any other recipients of the Work or
|
||||||
|
Derivative Works a copy of this License; and
|
||||||
|
|
||||||
|
(b) You must cause any modified files to carry prominent notices
|
||||||
|
stating that You changed the files; and
|
||||||
|
|
||||||
|
(c) You must retain, in the Source form of any Derivative Works
|
||||||
|
that You distribute, all copyright, patent, trademark, and
|
||||||
|
attribution notices from the Source form of the Work,
|
||||||
|
excluding those notices that do not pertain to any part of
|
||||||
|
the Derivative Works; and
|
||||||
|
|
||||||
|
(d) If the Work includes a "NOTICE" text file as part of its
|
||||||
|
distribution, then any Derivative Works that You distribute must
|
||||||
|
include a readable copy of the attribution notices contained
|
||||||
|
within such NOTICE file, excluding those notices that do not
|
||||||
|
pertain to any part of the Derivative Works, in at least one
|
||||||
|
of the following places: within a NOTICE text file distributed
|
||||||
|
as part of the Derivative Works; within the Source form or
|
||||||
|
documentation, if provided along with the Derivative Works; or,
|
||||||
|
within a display generated by the Derivative Works, if and
|
||||||
|
wherever such third-party notices normally appear. The contents
|
||||||
|
of the NOTICE file are for informational purposes only and
|
||||||
|
do not modify the License. You may add Your own attribution
|
||||||
|
notices within Derivative Works that You distribute, alongside
|
||||||
|
or as an addendum to the NOTICE text from the Work, provided
|
||||||
|
that such additional attribution notices cannot be construed
|
||||||
|
as modifying the License.
|
||||||
|
|
||||||
|
You may add Your own copyright statement to Your modifications and
|
||||||
|
may provide additional or different license terms and conditions
|
||||||
|
for use, reproduction, or distribution of Your modifications, or
|
||||||
|
for any such Derivative Works as a whole, provided Your use,
|
||||||
|
reproduction, and distribution of the Work otherwise complies with
|
||||||
|
the conditions stated in this License.
|
||||||
|
|
||||||
|
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||||
|
any Contribution intentionally submitted for inclusion in the Work
|
||||||
|
by You to the Licensor shall be under the terms and conditions of
|
||||||
|
this License, without any additional terms or conditions.
|
||||||
|
Notwithstanding the above, nothing herein shall supersede or modify
|
||||||
|
the terms of any separate license agreement you may have executed
|
||||||
|
with Licensor regarding such Contributions.
|
||||||
|
|
||||||
|
6. Trademarks. This License does not grant permission to use the trade
|
||||||
|
names, trademarks, service marks, or product names of the Licensor,
|
||||||
|
except as required for reasonable and customary use in describing the
|
||||||
|
origin of the Work and reproducing the content of the NOTICE file.
|
||||||
|
|
||||||
|
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||||
|
agreed to in writing, Licensor provides the Work (and each
|
||||||
|
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||||
|
implied, including, without limitation, any warranties or conditions
|
||||||
|
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||||
|
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||||
|
appropriateness of using or redistributing the Work and assume any
|
||||||
|
risks associated with Your exercise of permissions under this License.
|
||||||
|
|
||||||
|
8. Limitation of Liability. In no event and under no legal theory,
|
||||||
|
whether in tort (including negligence), contract, or otherwise,
|
||||||
|
unless required by applicable law (such as deliberate and grossly
|
||||||
|
negligent acts) or agreed to in writing, shall any Contributor be
|
||||||
|
liable to You for damages, including any direct, indirect, special,
|
||||||
|
incidental, or consequential damages of any character arising as a
|
||||||
|
result of this License or out of the use or inability to use the
|
||||||
|
Work (including but not limited to damages for loss of goodwill,
|
||||||
|
work stoppage, computer failure or malfunction, or any and all
|
||||||
|
other commercial damages or losses), even if such Contributor
|
||||||
|
has been advised of the possibility of such damages.
|
||||||
|
|
||||||
|
9. Accepting Warranty or Additional Liability. While redistributing
|
||||||
|
the Work or Derivative Works thereof, You may choose to offer,
|
||||||
|
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||||
|
or other liability obligations and/or rights consistent with this
|
||||||
|
License. However, in accepting such obligations, You may act only
|
||||||
|
on Your own behalf and on Your sole responsibility, not on behalf
|
||||||
|
of any other Contributor, and only if You agree to indemnify,
|
||||||
|
defend, and hold each Contributor harmless for any liability
|
||||||
|
incurred by, or claims asserted against, such Contributor by reason
|
||||||
|
of your accepting any such warranty or additional liability.
|
||||||
|
|
||||||
|
END OF TERMS AND CONDITIONS
|
||||||
|
|
||||||
|
APPENDIX: How to apply the Apache License to your work.
|
||||||
|
|
||||||
|
To apply the Apache License to your work, attach the following
|
||||||
|
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||||
|
replaced with your own identifying information. (Don't include
|
||||||
|
the brackets!) The text should be enclosed in the appropriate
|
||||||
|
comment syntax for the file format. We also recommend that a
|
||||||
|
file or class name and description of purpose be included on the
|
||||||
|
same "printed page" as the copyright notice for easier
|
||||||
|
identification within third-party archives.
|
||||||
|
|
||||||
|
Copyright [yyyy] [name of copyright owner]
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
BIN
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/addressable-2.8.5/data/unicode.data
vendored
Normal file
BIN
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/addressable-2.8.5/data/unicode.data
vendored
Normal file
Binary file not shown.
4
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/addressable-2.8.5/lib/addressable.rb
vendored
Normal file
4
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/addressable-2.8.5/lib/addressable.rb
vendored
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
require 'addressable/uri'
|
||||||
|
require 'addressable/template'
|
||||||
26
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/addressable-2.8.5/lib/addressable/idna.rb
vendored
Normal file
26
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/addressable-2.8.5/lib/addressable/idna.rb
vendored
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
#--
|
||||||
|
# Copyright (C) Bob Aman
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
#++
|
||||||
|
|
||||||
|
|
||||||
|
begin
|
||||||
|
require "addressable/idna/native"
|
||||||
|
rescue LoadError
|
||||||
|
# libidn or the idn gem was not available, fall back on a pure-Ruby
|
||||||
|
# implementation...
|
||||||
|
require "addressable/idna/pure"
|
||||||
|
end
|
||||||
66
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/addressable-2.8.5/lib/addressable/idna/native.rb
vendored
Normal file
66
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/addressable-2.8.5/lib/addressable/idna/native.rb
vendored
Normal file
@ -0,0 +1,66 @@
|
|||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
#--
|
||||||
|
# Copyright (C) Bob Aman
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
#++
|
||||||
|
|
||||||
|
|
||||||
|
require "idn"
|
||||||
|
|
||||||
|
module Addressable
|
||||||
|
module IDNA
|
||||||
|
def self.punycode_encode(value)
|
||||||
|
IDN::Punycode.encode(value.to_s)
|
||||||
|
end
|
||||||
|
|
||||||
|
def self.punycode_decode(value)
|
||||||
|
IDN::Punycode.decode(value.to_s)
|
||||||
|
end
|
||||||
|
|
||||||
|
class << self
|
||||||
|
# @deprecated Use {String#unicode_normalize(:nfkc)} instead
|
||||||
|
def unicode_normalize_kc(value)
|
||||||
|
value.to_s.unicode_normalize(:nfkc)
|
||||||
|
end
|
||||||
|
|
||||||
|
extend Gem::Deprecate
|
||||||
|
deprecate :unicode_normalize_kc, "String#unicode_normalize(:nfkc)", 2023, 4
|
||||||
|
end
|
||||||
|
|
||||||
|
def self.to_ascii(value)
|
||||||
|
value.to_s.split('.', -1).map do |segment|
|
||||||
|
if segment.size > 0 && segment.size < 64
|
||||||
|
IDN::Idna.toASCII(segment, IDN::Idna::ALLOW_UNASSIGNED)
|
||||||
|
elsif segment.size >= 64
|
||||||
|
segment
|
||||||
|
else
|
||||||
|
''
|
||||||
|
end
|
||||||
|
end.join('.')
|
||||||
|
end
|
||||||
|
|
||||||
|
def self.to_unicode(value)
|
||||||
|
value.to_s.split('.', -1).map do |segment|
|
||||||
|
if segment.size > 0 && segment.size < 64
|
||||||
|
IDN::Idna.toUnicode(segment, IDN::Idna::ALLOW_UNASSIGNED)
|
||||||
|
elsif segment.size >= 64
|
||||||
|
segment
|
||||||
|
else
|
||||||
|
''
|
||||||
|
end
|
||||||
|
end.join('.')
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
505
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/addressable-2.8.5/lib/addressable/idna/pure.rb
vendored
Normal file
505
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/addressable-2.8.5/lib/addressable/idna/pure.rb
vendored
Normal file
@ -0,0 +1,505 @@
|
|||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
#--
|
||||||
|
# Copyright (C) Bob Aman
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
#++
|
||||||
|
|
||||||
|
|
||||||
|
module Addressable
|
||||||
|
module IDNA
|
||||||
|
# This module is loosely based on idn_actionmailer by Mick Staugaard,
|
||||||
|
# the unicode library by Yoshida Masato, and the punycode implementation
|
||||||
|
# by Kazuhiro Nishiyama. Most of the code was copied verbatim, but
|
||||||
|
# some reformatting was done, and some translation from C was done.
|
||||||
|
#
|
||||||
|
# Without their code to work from as a base, we'd all still be relying
|
||||||
|
# on the presence of libidn. Which nobody ever seems to have installed.
|
||||||
|
#
|
||||||
|
# Original sources:
|
||||||
|
# http://github.com/staugaard/idn_actionmailer
|
||||||
|
# http://www.yoshidam.net/Ruby.html#unicode
|
||||||
|
# http://rubyforge.org/frs/?group_id=2550
|
||||||
|
|
||||||
|
|
||||||
|
UNICODE_TABLE = File.expand_path(
|
||||||
|
File.join(File.dirname(__FILE__), '../../..', 'data/unicode.data')
|
||||||
|
)
|
||||||
|
|
||||||
|
ACE_PREFIX = "xn--"
|
||||||
|
|
||||||
|
UTF8_REGEX = /\A(?:
|
||||||
|
[\x09\x0A\x0D\x20-\x7E] # ASCII
|
||||||
|
| [\xC2-\xDF][\x80-\xBF] # non-overlong 2-byte
|
||||||
|
| \xE0[\xA0-\xBF][\x80-\xBF] # excluding overlongs
|
||||||
|
| [\xE1-\xEC\xEE\xEF][\x80-\xBF]{2} # straight 3-byte
|
||||||
|
| \xED[\x80-\x9F][\x80-\xBF] # excluding surrogates
|
||||||
|
| \xF0[\x90-\xBF][\x80-\xBF]{2} # planes 1-3
|
||||||
|
| [\xF1-\xF3][\x80-\xBF]{3} # planes 4nil5
|
||||||
|
| \xF4[\x80-\x8F][\x80-\xBF]{2} # plane 16
|
||||||
|
)*\z/mnx
|
||||||
|
|
||||||
|
UTF8_REGEX_MULTIBYTE = /(?:
|
||||||
|
[\xC2-\xDF][\x80-\xBF] # non-overlong 2-byte
|
||||||
|
| \xE0[\xA0-\xBF][\x80-\xBF] # excluding overlongs
|
||||||
|
| [\xE1-\xEC\xEE\xEF][\x80-\xBF]{2} # straight 3-byte
|
||||||
|
| \xED[\x80-\x9F][\x80-\xBF] # excluding surrogates
|
||||||
|
| \xF0[\x90-\xBF][\x80-\xBF]{2} # planes 1-3
|
||||||
|
| [\xF1-\xF3][\x80-\xBF]{3} # planes 4nil5
|
||||||
|
| \xF4[\x80-\x8F][\x80-\xBF]{2} # plane 16
|
||||||
|
)/mnx
|
||||||
|
|
||||||
|
# :startdoc:
|
||||||
|
|
||||||
|
# Converts from a Unicode internationalized domain name to an ASCII
|
||||||
|
# domain name as described in RFC 3490.
|
||||||
|
def self.to_ascii(input)
|
||||||
|
input = input.to_s unless input.is_a?(String)
|
||||||
|
input = input.dup.force_encoding(Encoding::UTF_8).unicode_normalize(:nfkc)
|
||||||
|
if input.respond_to?(:force_encoding)
|
||||||
|
input.force_encoding(Encoding::ASCII_8BIT)
|
||||||
|
end
|
||||||
|
if input =~ UTF8_REGEX && input =~ UTF8_REGEX_MULTIBYTE
|
||||||
|
parts = unicode_downcase(input).split('.')
|
||||||
|
parts.map! do |part|
|
||||||
|
if part.respond_to?(:force_encoding)
|
||||||
|
part.force_encoding(Encoding::ASCII_8BIT)
|
||||||
|
end
|
||||||
|
if part =~ UTF8_REGEX && part =~ UTF8_REGEX_MULTIBYTE
|
||||||
|
ACE_PREFIX + punycode_encode(part)
|
||||||
|
else
|
||||||
|
part
|
||||||
|
end
|
||||||
|
end
|
||||||
|
parts.join('.')
|
||||||
|
else
|
||||||
|
input
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# Converts from an ASCII domain name to a Unicode internationalized
|
||||||
|
# domain name as described in RFC 3490.
|
||||||
|
def self.to_unicode(input)
|
||||||
|
input = input.to_s unless input.is_a?(String)
|
||||||
|
parts = input.split('.')
|
||||||
|
parts.map! do |part|
|
||||||
|
if part =~ /^#{ACE_PREFIX}(.+)/
|
||||||
|
begin
|
||||||
|
punycode_decode(part[/^#{ACE_PREFIX}(.+)/, 1])
|
||||||
|
rescue Addressable::IDNA::PunycodeBadInput
|
||||||
|
# toUnicode is explicitly defined as never-fails by the spec
|
||||||
|
part
|
||||||
|
end
|
||||||
|
else
|
||||||
|
part
|
||||||
|
end
|
||||||
|
end
|
||||||
|
output = parts.join('.')
|
||||||
|
if output.respond_to?(:force_encoding)
|
||||||
|
output.force_encoding(Encoding::UTF_8)
|
||||||
|
end
|
||||||
|
output
|
||||||
|
end
|
||||||
|
|
||||||
|
class << self
|
||||||
|
# @deprecated Use {String#unicode_normalize(:nfkc)} instead
|
||||||
|
def unicode_normalize_kc(value)
|
||||||
|
value.to_s.unicode_normalize(:nfkc)
|
||||||
|
end
|
||||||
|
|
||||||
|
extend Gem::Deprecate
|
||||||
|
deprecate :unicode_normalize_kc, "String#unicode_normalize(:nfkc)", 2023, 4
|
||||||
|
end
|
||||||
|
|
||||||
|
##
|
||||||
|
# Unicode aware downcase method.
|
||||||
|
#
|
||||||
|
# @api private
|
||||||
|
# @param [String] input
|
||||||
|
# The input string.
|
||||||
|
# @return [String] The downcased result.
|
||||||
|
def self.unicode_downcase(input)
|
||||||
|
input = input.to_s unless input.is_a?(String)
|
||||||
|
unpacked = input.unpack("U*")
|
||||||
|
unpacked.map! { |codepoint| lookup_unicode_lowercase(codepoint) }
|
||||||
|
return unpacked.pack("U*")
|
||||||
|
end
|
||||||
|
private_class_method :unicode_downcase
|
||||||
|
|
||||||
|
def self.lookup_unicode_lowercase(codepoint)
|
||||||
|
codepoint_data = UNICODE_DATA[codepoint]
|
||||||
|
(codepoint_data ?
|
||||||
|
(codepoint_data[UNICODE_DATA_LOWERCASE] || codepoint) :
|
||||||
|
codepoint)
|
||||||
|
end
|
||||||
|
private_class_method :lookup_unicode_lowercase
|
||||||
|
|
||||||
|
UNICODE_DATA_COMBINING_CLASS = 0
|
||||||
|
UNICODE_DATA_EXCLUSION = 1
|
||||||
|
UNICODE_DATA_CANONICAL = 2
|
||||||
|
UNICODE_DATA_COMPATIBILITY = 3
|
||||||
|
UNICODE_DATA_UPPERCASE = 4
|
||||||
|
UNICODE_DATA_LOWERCASE = 5
|
||||||
|
UNICODE_DATA_TITLECASE = 6
|
||||||
|
|
||||||
|
begin
|
||||||
|
if defined?(FakeFS)
|
||||||
|
fakefs_state = FakeFS.activated?
|
||||||
|
FakeFS.deactivate!
|
||||||
|
end
|
||||||
|
# This is a sparse Unicode table. Codepoints without entries are
|
||||||
|
# assumed to have the value: [0, 0, nil, nil, nil, nil, nil]
|
||||||
|
UNICODE_DATA = File.open(UNICODE_TABLE, "rb") do |file|
|
||||||
|
Marshal.load(file.read)
|
||||||
|
end
|
||||||
|
ensure
|
||||||
|
if defined?(FakeFS)
|
||||||
|
FakeFS.activate! if fakefs_state
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
COMPOSITION_TABLE = {}
|
||||||
|
UNICODE_DATA.each do |codepoint, data|
|
||||||
|
canonical = data[UNICODE_DATA_CANONICAL]
|
||||||
|
exclusion = data[UNICODE_DATA_EXCLUSION]
|
||||||
|
|
||||||
|
if canonical && exclusion == 0
|
||||||
|
COMPOSITION_TABLE[canonical.unpack("C*")] = codepoint
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
UNICODE_MAX_LENGTH = 256
|
||||||
|
ACE_MAX_LENGTH = 256
|
||||||
|
|
||||||
|
PUNYCODE_BASE = 36
|
||||||
|
PUNYCODE_TMIN = 1
|
||||||
|
PUNYCODE_TMAX = 26
|
||||||
|
PUNYCODE_SKEW = 38
|
||||||
|
PUNYCODE_DAMP = 700
|
||||||
|
PUNYCODE_INITIAL_BIAS = 72
|
||||||
|
PUNYCODE_INITIAL_N = 0x80
|
||||||
|
PUNYCODE_DELIMITER = 0x2D
|
||||||
|
|
||||||
|
PUNYCODE_MAXINT = 1 << 64
|
||||||
|
|
||||||
|
PUNYCODE_PRINT_ASCII =
|
||||||
|
"\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n" +
|
||||||
|
"\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n" +
|
||||||
|
" !\"\#$%&'()*+,-./" +
|
||||||
|
"0123456789:;<=>?" +
|
||||||
|
"@ABCDEFGHIJKLMNO" +
|
||||||
|
"PQRSTUVWXYZ[\\]^_" +
|
||||||
|
"`abcdefghijklmno" +
|
||||||
|
"pqrstuvwxyz{|}~\n"
|
||||||
|
|
||||||
|
# Input is invalid.
|
||||||
|
class PunycodeBadInput < StandardError; end
|
||||||
|
# Output would exceed the space provided.
|
||||||
|
class PunycodeBigOutput < StandardError; end
|
||||||
|
# Input needs wider integers to process.
|
||||||
|
class PunycodeOverflow < StandardError; end
|
||||||
|
|
||||||
|
def self.punycode_encode(unicode)
|
||||||
|
unicode = unicode.to_s unless unicode.is_a?(String)
|
||||||
|
input = unicode.unpack("U*")
|
||||||
|
output = [0] * (ACE_MAX_LENGTH + 1)
|
||||||
|
input_length = input.size
|
||||||
|
output_length = [ACE_MAX_LENGTH]
|
||||||
|
|
||||||
|
# Initialize the state
|
||||||
|
n = PUNYCODE_INITIAL_N
|
||||||
|
delta = out = 0
|
||||||
|
max_out = output_length[0]
|
||||||
|
bias = PUNYCODE_INITIAL_BIAS
|
||||||
|
|
||||||
|
# Handle the basic code points:
|
||||||
|
input_length.times do |j|
|
||||||
|
if punycode_basic?(input[j])
|
||||||
|
if max_out - out < 2
|
||||||
|
raise PunycodeBigOutput,
|
||||||
|
"Output would exceed the space provided."
|
||||||
|
end
|
||||||
|
output[out] = input[j]
|
||||||
|
out += 1
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
h = b = out
|
||||||
|
|
||||||
|
# h is the number of code points that have been handled, b is the
|
||||||
|
# number of basic code points, and out is the number of characters
|
||||||
|
# that have been output.
|
||||||
|
|
||||||
|
if b > 0
|
||||||
|
output[out] = PUNYCODE_DELIMITER
|
||||||
|
out += 1
|
||||||
|
end
|
||||||
|
|
||||||
|
# Main encoding loop:
|
||||||
|
|
||||||
|
while h < input_length
|
||||||
|
# All non-basic code points < n have been
|
||||||
|
# handled already. Find the next larger one:
|
||||||
|
|
||||||
|
m = PUNYCODE_MAXINT
|
||||||
|
input_length.times do |j|
|
||||||
|
m = input[j] if (n...m) === input[j]
|
||||||
|
end
|
||||||
|
|
||||||
|
# Increase delta enough to advance the decoder's
|
||||||
|
# <n,i> state to <m,0>, but guard against overflow:
|
||||||
|
|
||||||
|
if m - n > (PUNYCODE_MAXINT - delta) / (h + 1)
|
||||||
|
raise PunycodeOverflow, "Input needs wider integers to process."
|
||||||
|
end
|
||||||
|
delta += (m - n) * (h + 1)
|
||||||
|
n = m
|
||||||
|
|
||||||
|
input_length.times do |j|
|
||||||
|
# Punycode does not need to check whether input[j] is basic:
|
||||||
|
if input[j] < n
|
||||||
|
delta += 1
|
||||||
|
if delta == 0
|
||||||
|
raise PunycodeOverflow,
|
||||||
|
"Input needs wider integers to process."
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
if input[j] == n
|
||||||
|
# Represent delta as a generalized variable-length integer:
|
||||||
|
|
||||||
|
q = delta; k = PUNYCODE_BASE
|
||||||
|
while true
|
||||||
|
if out >= max_out
|
||||||
|
raise PunycodeBigOutput,
|
||||||
|
"Output would exceed the space provided."
|
||||||
|
end
|
||||||
|
t = (
|
||||||
|
if k <= bias
|
||||||
|
PUNYCODE_TMIN
|
||||||
|
elsif k >= bias + PUNYCODE_TMAX
|
||||||
|
PUNYCODE_TMAX
|
||||||
|
else
|
||||||
|
k - bias
|
||||||
|
end
|
||||||
|
)
|
||||||
|
break if q < t
|
||||||
|
output[out] =
|
||||||
|
punycode_encode_digit(t + (q - t) % (PUNYCODE_BASE - t))
|
||||||
|
out += 1
|
||||||
|
q = (q - t) / (PUNYCODE_BASE - t)
|
||||||
|
k += PUNYCODE_BASE
|
||||||
|
end
|
||||||
|
|
||||||
|
output[out] = punycode_encode_digit(q)
|
||||||
|
out += 1
|
||||||
|
bias = punycode_adapt(delta, h + 1, h == b)
|
||||||
|
delta = 0
|
||||||
|
h += 1
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
delta += 1
|
||||||
|
n += 1
|
||||||
|
end
|
||||||
|
|
||||||
|
output_length[0] = out
|
||||||
|
|
||||||
|
outlen = out
|
||||||
|
outlen.times do |j|
|
||||||
|
c = output[j]
|
||||||
|
unless c >= 0 && c <= 127
|
||||||
|
raise StandardError, "Invalid output char."
|
||||||
|
end
|
||||||
|
unless PUNYCODE_PRINT_ASCII[c]
|
||||||
|
raise PunycodeBadInput, "Input is invalid."
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
output[0..outlen].map { |x| x.chr }.join("").sub(/\0+\z/, "")
|
||||||
|
end
|
||||||
|
private_class_method :punycode_encode
|
||||||
|
|
||||||
|
def self.punycode_decode(punycode)
|
||||||
|
input = []
|
||||||
|
output = []
|
||||||
|
|
||||||
|
if ACE_MAX_LENGTH * 2 < punycode.size
|
||||||
|
raise PunycodeBigOutput, "Output would exceed the space provided."
|
||||||
|
end
|
||||||
|
punycode.each_byte do |c|
|
||||||
|
unless c >= 0 && c <= 127
|
||||||
|
raise PunycodeBadInput, "Input is invalid."
|
||||||
|
end
|
||||||
|
input.push(c)
|
||||||
|
end
|
||||||
|
|
||||||
|
input_length = input.length
|
||||||
|
output_length = [UNICODE_MAX_LENGTH]
|
||||||
|
|
||||||
|
# Initialize the state
|
||||||
|
n = PUNYCODE_INITIAL_N
|
||||||
|
|
||||||
|
out = i = 0
|
||||||
|
max_out = output_length[0]
|
||||||
|
bias = PUNYCODE_INITIAL_BIAS
|
||||||
|
|
||||||
|
# Handle the basic code points: Let b be the number of input code
|
||||||
|
# points before the last delimiter, or 0 if there is none, then
|
||||||
|
# copy the first b code points to the output.
|
||||||
|
|
||||||
|
b = 0
|
||||||
|
input_length.times do |j|
|
||||||
|
b = j if punycode_delimiter?(input[j])
|
||||||
|
end
|
||||||
|
if b > max_out
|
||||||
|
raise PunycodeBigOutput, "Output would exceed the space provided."
|
||||||
|
end
|
||||||
|
|
||||||
|
b.times do |j|
|
||||||
|
unless punycode_basic?(input[j])
|
||||||
|
raise PunycodeBadInput, "Input is invalid."
|
||||||
|
end
|
||||||
|
output[out] = input[j]
|
||||||
|
out+=1
|
||||||
|
end
|
||||||
|
|
||||||
|
# Main decoding loop: Start just after the last delimiter if any
|
||||||
|
# basic code points were copied; start at the beginning otherwise.
|
||||||
|
|
||||||
|
in_ = b > 0 ? b + 1 : 0
|
||||||
|
while in_ < input_length
|
||||||
|
|
||||||
|
# in_ is the index of the next character to be consumed, and
|
||||||
|
# out is the number of code points in the output array.
|
||||||
|
|
||||||
|
# Decode a generalized variable-length integer into delta,
|
||||||
|
# which gets added to i. The overflow checking is easier
|
||||||
|
# if we increase i as we go, then subtract off its starting
|
||||||
|
# value at the end to obtain delta.
|
||||||
|
|
||||||
|
oldi = i; w = 1; k = PUNYCODE_BASE
|
||||||
|
while true
|
||||||
|
if in_ >= input_length
|
||||||
|
raise PunycodeBadInput, "Input is invalid."
|
||||||
|
end
|
||||||
|
digit = punycode_decode_digit(input[in_])
|
||||||
|
in_+=1
|
||||||
|
if digit >= PUNYCODE_BASE
|
||||||
|
raise PunycodeBadInput, "Input is invalid."
|
||||||
|
end
|
||||||
|
if digit > (PUNYCODE_MAXINT - i) / w
|
||||||
|
raise PunycodeOverflow, "Input needs wider integers to process."
|
||||||
|
end
|
||||||
|
i += digit * w
|
||||||
|
t = (
|
||||||
|
if k <= bias
|
||||||
|
PUNYCODE_TMIN
|
||||||
|
elsif k >= bias + PUNYCODE_TMAX
|
||||||
|
PUNYCODE_TMAX
|
||||||
|
else
|
||||||
|
k - bias
|
||||||
|
end
|
||||||
|
)
|
||||||
|
break if digit < t
|
||||||
|
if w > PUNYCODE_MAXINT / (PUNYCODE_BASE - t)
|
||||||
|
raise PunycodeOverflow, "Input needs wider integers to process."
|
||||||
|
end
|
||||||
|
w *= PUNYCODE_BASE - t
|
||||||
|
k += PUNYCODE_BASE
|
||||||
|
end
|
||||||
|
|
||||||
|
bias = punycode_adapt(i - oldi, out + 1, oldi == 0)
|
||||||
|
|
||||||
|
# I was supposed to wrap around from out + 1 to 0,
|
||||||
|
# incrementing n each time, so we'll fix that now:
|
||||||
|
|
||||||
|
if i / (out + 1) > PUNYCODE_MAXINT - n
|
||||||
|
raise PunycodeOverflow, "Input needs wider integers to process."
|
||||||
|
end
|
||||||
|
n += i / (out + 1)
|
||||||
|
i %= out + 1
|
||||||
|
|
||||||
|
# Insert n at position i of the output:
|
||||||
|
|
||||||
|
# not needed for Punycode:
|
||||||
|
# raise PUNYCODE_INVALID_INPUT if decode_digit(n) <= base
|
||||||
|
if out >= max_out
|
||||||
|
raise PunycodeBigOutput, "Output would exceed the space provided."
|
||||||
|
end
|
||||||
|
|
||||||
|
#memmove(output + i + 1, output + i, (out - i) * sizeof *output)
|
||||||
|
output[i + 1, out - i] = output[i, out - i]
|
||||||
|
output[i] = n
|
||||||
|
i += 1
|
||||||
|
|
||||||
|
out += 1
|
||||||
|
end
|
||||||
|
|
||||||
|
output_length[0] = out
|
||||||
|
|
||||||
|
output.pack("U*")
|
||||||
|
end
|
||||||
|
private_class_method :punycode_decode
|
||||||
|
|
||||||
|
def self.punycode_basic?(codepoint)
|
||||||
|
codepoint < 0x80
|
||||||
|
end
|
||||||
|
private_class_method :punycode_basic?
|
||||||
|
|
||||||
|
def self.punycode_delimiter?(codepoint)
|
||||||
|
codepoint == PUNYCODE_DELIMITER
|
||||||
|
end
|
||||||
|
private_class_method :punycode_delimiter?
|
||||||
|
|
||||||
|
def self.punycode_encode_digit(d)
|
||||||
|
d + 22 + 75 * ((d < 26) ? 1 : 0)
|
||||||
|
end
|
||||||
|
private_class_method :punycode_encode_digit
|
||||||
|
|
||||||
|
# Returns the numeric value of a basic codepoint
|
||||||
|
# (for use in representing integers) in the range 0 to
|
||||||
|
# base - 1, or PUNYCODE_BASE if codepoint does not represent a value.
|
||||||
|
def self.punycode_decode_digit(codepoint)
|
||||||
|
if codepoint - 48 < 10
|
||||||
|
codepoint - 22
|
||||||
|
elsif codepoint - 65 < 26
|
||||||
|
codepoint - 65
|
||||||
|
elsif codepoint - 97 < 26
|
||||||
|
codepoint - 97
|
||||||
|
else
|
||||||
|
PUNYCODE_BASE
|
||||||
|
end
|
||||||
|
end
|
||||||
|
private_class_method :punycode_decode_digit
|
||||||
|
|
||||||
|
# Bias adaptation method
|
||||||
|
def self.punycode_adapt(delta, numpoints, firsttime)
|
||||||
|
delta = firsttime ? delta / PUNYCODE_DAMP : delta >> 1
|
||||||
|
# delta >> 1 is a faster way of doing delta / 2
|
||||||
|
delta += delta / numpoints
|
||||||
|
difference = PUNYCODE_BASE - PUNYCODE_TMIN
|
||||||
|
|
||||||
|
k = 0
|
||||||
|
while delta > (difference * PUNYCODE_TMAX) / 2
|
||||||
|
delta /= difference
|
||||||
|
k += PUNYCODE_BASE
|
||||||
|
end
|
||||||
|
|
||||||
|
k + (difference + 1) * delta / (delta + PUNYCODE_SKEW)
|
||||||
|
end
|
||||||
|
private_class_method :punycode_adapt
|
||||||
|
end
|
||||||
|
# :startdoc:
|
||||||
|
end
|
||||||
1029
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/addressable-2.8.5/lib/addressable/template.rb
vendored
Normal file
1029
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/addressable-2.8.5/lib/addressable/template.rb
vendored
Normal file
File diff suppressed because it is too large
Load Diff
2591
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/addressable-2.8.5/lib/addressable/uri.rb
vendored
Normal file
2591
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/addressable-2.8.5/lib/addressable/uri.rb
vendored
Normal file
File diff suppressed because it is too large
Load Diff
31
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/addressable-2.8.5/lib/addressable/version.rb
vendored
Normal file
31
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/addressable-2.8.5/lib/addressable/version.rb
vendored
Normal file
@ -0,0 +1,31 @@
|
|||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
#--
|
||||||
|
# Copyright (C) Bob Aman
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
#++
|
||||||
|
|
||||||
|
|
||||||
|
# Used to prevent the class/module from being loaded more than once
|
||||||
|
if !defined?(Addressable::VERSION)
|
||||||
|
module Addressable
|
||||||
|
module VERSION
|
||||||
|
MAJOR = 2
|
||||||
|
MINOR = 8
|
||||||
|
TINY = 5
|
||||||
|
|
||||||
|
STRING = [MAJOR, MINOR, TINY].join('.')
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
25
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/bindata-2.4.15/LICENSE
vendored
Normal file
25
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/bindata-2.4.15/LICENSE
vendored
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
BSD 2-Clause License
|
||||||
|
|
||||||
|
Copyright (c) 2007-2022, Dion Mendel
|
||||||
|
All rights reserved.
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without
|
||||||
|
modification, are permitted provided that the following conditions are met:
|
||||||
|
|
||||||
|
1. Redistributions of source code must retain the above copyright notice, this
|
||||||
|
list of conditions and the following disclaimer.
|
||||||
|
|
||||||
|
2. Redistributions in binary form must reproduce the above copyright notice,
|
||||||
|
this list of conditions and the following disclaimer in the documentation
|
||||||
|
and/or other materials provided with the distribution.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||||
|
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||||
|
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||||
|
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
|
||||||
|
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||||
|
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||||
|
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||||
|
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||||
|
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||||
|
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
37
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/bindata-2.4.15/lib/bindata.rb
vendored
Normal file
37
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/bindata-2.4.15/lib/bindata.rb
vendored
Normal file
@ -0,0 +1,37 @@
|
|||||||
|
# BinData -- Binary data manipulator.
|
||||||
|
# Copyright (c) 2007 - 2018 Dion Mendel.
|
||||||
|
|
||||||
|
require 'bindata/version'
|
||||||
|
require 'bindata/array'
|
||||||
|
require 'bindata/bits'
|
||||||
|
require 'bindata/buffer'
|
||||||
|
require 'bindata/choice'
|
||||||
|
require 'bindata/count_bytes_remaining'
|
||||||
|
require 'bindata/delayed_io'
|
||||||
|
require 'bindata/float'
|
||||||
|
require 'bindata/int'
|
||||||
|
require 'bindata/primitive'
|
||||||
|
require 'bindata/record'
|
||||||
|
require 'bindata/rest'
|
||||||
|
require 'bindata/skip'
|
||||||
|
require 'bindata/string'
|
||||||
|
require 'bindata/stringz'
|
||||||
|
require 'bindata/struct'
|
||||||
|
require 'bindata/trace'
|
||||||
|
require 'bindata/uint8_array'
|
||||||
|
require 'bindata/virtual'
|
||||||
|
require 'bindata/alignment'
|
||||||
|
require 'bindata/warnings'
|
||||||
|
|
||||||
|
# = BinData
|
||||||
|
#
|
||||||
|
# A declarative way to read and write structured binary data.
|
||||||
|
#
|
||||||
|
# A full reference manual is available online at
|
||||||
|
# https://github.com/dmendel/bindata/wiki
|
||||||
|
#
|
||||||
|
# == License
|
||||||
|
#
|
||||||
|
# BinData is released under the same license as Ruby.
|
||||||
|
#
|
||||||
|
# Copyright (c) 2007 - 2018 Dion Mendel.
|
||||||
79
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/bindata-2.4.15/lib/bindata/alignment.rb
vendored
Normal file
79
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/bindata-2.4.15/lib/bindata/alignment.rb
vendored
Normal file
@ -0,0 +1,79 @@
|
|||||||
|
require 'bindata/base_primitive'
|
||||||
|
|
||||||
|
module BinData
|
||||||
|
# Resets the stream alignment to the next byte. This is
|
||||||
|
# only useful when using bit-based primitives.
|
||||||
|
#
|
||||||
|
# class MyRec < BinData::Record
|
||||||
|
# bit4 :a
|
||||||
|
# resume_byte_alignment
|
||||||
|
# bit4 :b
|
||||||
|
# end
|
||||||
|
#
|
||||||
|
# MyRec.read("\x12\x34") #=> {"a" => 1, "b" => 3}
|
||||||
|
#
|
||||||
|
class ResumeByteAlignment < BinData::Base
|
||||||
|
def clear?; true; end
|
||||||
|
def assign(val); end
|
||||||
|
def snapshot; nil; end
|
||||||
|
def do_num_bytes; 0; end
|
||||||
|
|
||||||
|
def do_read(io)
|
||||||
|
io.reset_read_bits
|
||||||
|
end
|
||||||
|
|
||||||
|
def do_write(io)
|
||||||
|
io.flushbits
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# A monkey patch to force byte-aligned primitives to
|
||||||
|
# become bit-aligned. This allows them to be used at
|
||||||
|
# non byte based boundaries.
|
||||||
|
#
|
||||||
|
# class BitString < BinData::String
|
||||||
|
# bit_aligned
|
||||||
|
# end
|
||||||
|
#
|
||||||
|
# class MyRecord < BinData::Record
|
||||||
|
# bit4 :preamble
|
||||||
|
# bit_string :str, length: 2
|
||||||
|
# end
|
||||||
|
#
|
||||||
|
module BitAligned
|
||||||
|
class BitAlignedIO
|
||||||
|
def initialize(io)
|
||||||
|
@io = io
|
||||||
|
end
|
||||||
|
def readbytes(n)
|
||||||
|
n.times.inject("") do |bytes, _|
|
||||||
|
bytes += @io.readbits(8, :big).chr
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def bit_aligned?
|
||||||
|
true
|
||||||
|
end
|
||||||
|
|
||||||
|
def read_and_return_value(io)
|
||||||
|
super(BitAlignedIO.new(io))
|
||||||
|
end
|
||||||
|
|
||||||
|
def do_num_bytes
|
||||||
|
super.to_f
|
||||||
|
end
|
||||||
|
|
||||||
|
def do_write(io)
|
||||||
|
value_to_binary_string(_value).each_byte { |v| io.writebits(v, 8, :big) }
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def BasePrimitive.bit_aligned
|
||||||
|
include BitAligned
|
||||||
|
end
|
||||||
|
|
||||||
|
def Primitive.bit_aligned
|
||||||
|
fail "'bit_aligned' is not needed for BinData::Primitives"
|
||||||
|
end
|
||||||
|
end
|
||||||
344
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/bindata-2.4.15/lib/bindata/array.rb
vendored
Normal file
344
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/bindata-2.4.15/lib/bindata/array.rb
vendored
Normal file
@ -0,0 +1,344 @@
|
|||||||
|
require 'bindata/base'
|
||||||
|
require 'bindata/dsl'
|
||||||
|
|
||||||
|
module BinData
|
||||||
|
# An Array is a list of data objects of the same type.
|
||||||
|
#
|
||||||
|
# require 'bindata'
|
||||||
|
#
|
||||||
|
# data = "\x03\x04\x05\x06\x07\x08\x09"
|
||||||
|
#
|
||||||
|
# obj = BinData::Array.new(type: :int8, initial_length: 6)
|
||||||
|
# obj.read(data) #=> [3, 4, 5, 6, 7, 8]
|
||||||
|
#
|
||||||
|
# obj = BinData::Array.new(type: :int8,
|
||||||
|
# read_until: -> { index == 1 })
|
||||||
|
# obj.read(data) #=> [3, 4]
|
||||||
|
#
|
||||||
|
# obj = BinData::Array.new(type: :int8,
|
||||||
|
# read_until: -> { element >= 6 })
|
||||||
|
# obj.read(data) #=> [3, 4, 5, 6]
|
||||||
|
#
|
||||||
|
# obj = BinData::Array.new(type: :int8,
|
||||||
|
# read_until: -> { array[index] + array[index - 1] == 13 })
|
||||||
|
# obj.read(data) #=> [3, 4, 5, 6, 7]
|
||||||
|
#
|
||||||
|
# obj = BinData::Array.new(type: :int8, read_until: :eof)
|
||||||
|
# obj.read(data) #=> [3, 4, 5, 6, 7, 8, 9]
|
||||||
|
#
|
||||||
|
# == Parameters
|
||||||
|
#
|
||||||
|
# Parameters may be provided at initialisation to control the behaviour of
|
||||||
|
# an object. These params are:
|
||||||
|
#
|
||||||
|
# <tt>:type</tt>:: The symbol representing the data type of the
|
||||||
|
# array elements. If the type is to have params
|
||||||
|
# passed to it, then it should be provided as
|
||||||
|
# <tt>[type_symbol, hash_params]</tt>.
|
||||||
|
# <tt>:initial_length</tt>:: The initial length of the array.
|
||||||
|
# <tt>:read_until</tt>:: While reading, elements are read until this
|
||||||
|
# condition is true. This is typically used to
|
||||||
|
# read an array until a sentinel value is found.
|
||||||
|
# The variables +index+, +element+ and +array+
|
||||||
|
# are made available to any lambda assigned to
|
||||||
|
# this parameter. If the value of this parameter
|
||||||
|
# is the symbol :eof, then the array will read
|
||||||
|
# as much data from the stream as possible.
|
||||||
|
#
|
||||||
|
# Each data object in an array has the variable +index+ made available
|
||||||
|
# to any lambda evaluated as a parameter of that data object.
|
||||||
|
class Array < BinData::Base
|
||||||
|
extend DSLMixin
|
||||||
|
include Enumerable
|
||||||
|
|
||||||
|
dsl_parser :array
|
||||||
|
arg_processor :array
|
||||||
|
|
||||||
|
mandatory_parameter :type
|
||||||
|
optional_parameters :initial_length, :read_until
|
||||||
|
mutually_exclusive_parameters :initial_length, :read_until
|
||||||
|
|
||||||
|
def initialize_shared_instance
|
||||||
|
@element_prototype = get_parameter(:type)
|
||||||
|
if get_parameter(:read_until) == :eof
|
||||||
|
extend ReadUntilEOFPlugin
|
||||||
|
elsif has_parameter?(:read_until)
|
||||||
|
extend ReadUntilPlugin
|
||||||
|
elsif has_parameter?(:initial_length)
|
||||||
|
extend InitialLengthPlugin
|
||||||
|
end
|
||||||
|
|
||||||
|
super
|
||||||
|
end
|
||||||
|
|
||||||
|
def initialize_instance
|
||||||
|
@element_list = nil
|
||||||
|
end
|
||||||
|
|
||||||
|
def clear?
|
||||||
|
@element_list.nil? || elements.all?(&:clear?)
|
||||||
|
end
|
||||||
|
|
||||||
|
def assign(array)
|
||||||
|
return if self.equal?(array) # prevent self assignment
|
||||||
|
raise ArgumentError, "can't set a nil value for #{debug_name}" if array.nil?
|
||||||
|
|
||||||
|
@element_list = []
|
||||||
|
concat(array)
|
||||||
|
end
|
||||||
|
|
||||||
|
def snapshot
|
||||||
|
elements.collect(&:snapshot)
|
||||||
|
end
|
||||||
|
|
||||||
|
def find_index(obj)
|
||||||
|
elements.index(obj)
|
||||||
|
end
|
||||||
|
alias index find_index
|
||||||
|
|
||||||
|
# Returns the first index of +obj+ in self.
|
||||||
|
#
|
||||||
|
# Uses equal? for the comparator.
|
||||||
|
def find_index_of(obj)
|
||||||
|
elements.index { |el| el.equal?(obj) }
|
||||||
|
end
|
||||||
|
|
||||||
|
def push(*args)
|
||||||
|
insert(-1, *args)
|
||||||
|
self
|
||||||
|
end
|
||||||
|
alias << push
|
||||||
|
|
||||||
|
def unshift(*args)
|
||||||
|
insert(0, *args)
|
||||||
|
self
|
||||||
|
end
|
||||||
|
|
||||||
|
def concat(array)
|
||||||
|
insert(-1, *array.to_ary)
|
||||||
|
self
|
||||||
|
end
|
||||||
|
|
||||||
|
def insert(index, *objs)
|
||||||
|
extend_array(index - 1)
|
||||||
|
abs_index = (index >= 0) ? index : index + 1 + length
|
||||||
|
|
||||||
|
# insert elements before...
|
||||||
|
new_elements = objs.map { new_element }
|
||||||
|
elements.insert(index, *new_elements)
|
||||||
|
|
||||||
|
# ...assigning values
|
||||||
|
objs.each_with_index do |obj, i|
|
||||||
|
self[abs_index + i] = obj
|
||||||
|
end
|
||||||
|
|
||||||
|
self
|
||||||
|
end
|
||||||
|
|
||||||
|
# Returns the element at +index+.
|
||||||
|
def [](arg1, arg2 = nil)
|
||||||
|
if arg1.respond_to?(:to_int) && arg2.nil?
|
||||||
|
slice_index(arg1.to_int)
|
||||||
|
elsif arg1.respond_to?(:to_int) && arg2.respond_to?(:to_int)
|
||||||
|
slice_start_length(arg1.to_int, arg2.to_int)
|
||||||
|
elsif arg1.is_a?(Range) && arg2.nil?
|
||||||
|
slice_range(arg1)
|
||||||
|
else
|
||||||
|
raise TypeError, "can't convert #{arg1} into Integer" unless arg1.respond_to?(:to_int)
|
||||||
|
raise TypeError, "can't convert #{arg2} into Integer" unless arg2.respond_to?(:to_int)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
alias slice []
|
||||||
|
|
||||||
|
def slice_index(index)
|
||||||
|
extend_array(index)
|
||||||
|
at(index)
|
||||||
|
end
|
||||||
|
|
||||||
|
def slice_start_length(start, length)
|
||||||
|
elements[start, length]
|
||||||
|
end
|
||||||
|
|
||||||
|
def slice_range(range)
|
||||||
|
elements[range]
|
||||||
|
end
|
||||||
|
private :slice_index, :slice_start_length, :slice_range
|
||||||
|
|
||||||
|
# Returns the element at +index+. Unlike +slice+, if +index+ is out
|
||||||
|
# of range the array will not be automatically extended.
|
||||||
|
def at(index)
|
||||||
|
elements[index]
|
||||||
|
end
|
||||||
|
|
||||||
|
# Sets the element at +index+.
|
||||||
|
def []=(index, value)
|
||||||
|
extend_array(index)
|
||||||
|
elements[index].assign(value)
|
||||||
|
end
|
||||||
|
|
||||||
|
# Returns the first element, or the first +n+ elements, of the array.
|
||||||
|
# If the array is empty, the first form returns nil, and the second
|
||||||
|
# form returns an empty array.
|
||||||
|
def first(n = nil)
|
||||||
|
if n.nil? && empty?
|
||||||
|
# explicitly return nil as arrays grow automatically
|
||||||
|
nil
|
||||||
|
elsif n.nil?
|
||||||
|
self[0]
|
||||||
|
else
|
||||||
|
self[0, n]
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# Returns the last element, or the last +n+ elements, of the array.
|
||||||
|
# If the array is empty, the first form returns nil, and the second
|
||||||
|
# form returns an empty array.
|
||||||
|
def last(n = nil)
|
||||||
|
if n.nil?
|
||||||
|
self[-1]
|
||||||
|
else
|
||||||
|
n = length if n > length
|
||||||
|
self[-n, n]
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def length
|
||||||
|
elements.length
|
||||||
|
end
|
||||||
|
alias size length
|
||||||
|
|
||||||
|
def empty?
|
||||||
|
length.zero?
|
||||||
|
end
|
||||||
|
|
||||||
|
# Allow this object to be used in array context.
|
||||||
|
def to_ary
|
||||||
|
collect { |el| el }
|
||||||
|
end
|
||||||
|
|
||||||
|
def each
|
||||||
|
elements.each { |el| yield el }
|
||||||
|
end
|
||||||
|
|
||||||
|
def debug_name_of(child) #:nodoc:
|
||||||
|
index = find_index_of(child)
|
||||||
|
"#{debug_name}[#{index}]"
|
||||||
|
end
|
||||||
|
|
||||||
|
def offset_of(child) #:nodoc:
|
||||||
|
index = find_index_of(child)
|
||||||
|
sum = sum_num_bytes_below_index(index)
|
||||||
|
|
||||||
|
child.bit_aligned? ? sum.floor : sum.ceil
|
||||||
|
end
|
||||||
|
|
||||||
|
def do_write(io) #:nodoc:
|
||||||
|
elements.each { |el| el.do_write(io) }
|
||||||
|
end
|
||||||
|
|
||||||
|
def do_num_bytes #:nodoc:
|
||||||
|
sum_num_bytes_for_all_elements
|
||||||
|
end
|
||||||
|
|
||||||
|
#---------------
|
||||||
|
private
|
||||||
|
|
||||||
|
def extend_array(max_index)
|
||||||
|
max_length = max_index + 1
|
||||||
|
while elements.length < max_length
|
||||||
|
append_new_element
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def elements
|
||||||
|
@element_list ||= []
|
||||||
|
end
|
||||||
|
|
||||||
|
def append_new_element
|
||||||
|
element = new_element
|
||||||
|
elements << element
|
||||||
|
element
|
||||||
|
end
|
||||||
|
|
||||||
|
def new_element
|
||||||
|
@element_prototype.instantiate(nil, self)
|
||||||
|
end
|
||||||
|
|
||||||
|
def sum_num_bytes_for_all_elements
|
||||||
|
sum_num_bytes_below_index(length)
|
||||||
|
end
|
||||||
|
|
||||||
|
def sum_num_bytes_below_index(index)
|
||||||
|
(0...index).inject(0) do |sum, i|
|
||||||
|
nbytes = elements[i].do_num_bytes
|
||||||
|
|
||||||
|
if nbytes.is_a?(Integer)
|
||||||
|
sum.ceil + nbytes
|
||||||
|
else
|
||||||
|
sum + nbytes
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
class ArrayArgProcessor < BaseArgProcessor
|
||||||
|
def sanitize_parameters!(obj_class, params) #:nodoc:
|
||||||
|
# ensure one of :initial_length and :read_until exists
|
||||||
|
unless params.has_at_least_one_of?(:initial_length, :read_until)
|
||||||
|
params[:initial_length] = 0
|
||||||
|
end
|
||||||
|
|
||||||
|
params.warn_replacement_parameter(:length, :initial_length)
|
||||||
|
params.warn_replacement_parameter(:read_length, :initial_length)
|
||||||
|
params.must_be_integer(:initial_length)
|
||||||
|
|
||||||
|
params.merge!(obj_class.dsl_params)
|
||||||
|
params.sanitize_object_prototype(:type)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# Logic for the :read_until parameter
|
||||||
|
module ReadUntilPlugin
|
||||||
|
def do_read(io)
|
||||||
|
loop do
|
||||||
|
element = append_new_element
|
||||||
|
element.do_read(io)
|
||||||
|
variables = { index: self.length - 1, element: self.last, array: self }
|
||||||
|
break if eval_parameter(:read_until, variables)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# Logic for the read_until: :eof parameter
|
||||||
|
module ReadUntilEOFPlugin
|
||||||
|
def do_read(io)
|
||||||
|
loop do
|
||||||
|
element = append_new_element
|
||||||
|
begin
|
||||||
|
element.do_read(io)
|
||||||
|
rescue EOFError, IOError
|
||||||
|
elements.pop
|
||||||
|
break
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# Logic for the :initial_length parameter
|
||||||
|
module InitialLengthPlugin
|
||||||
|
def do_read(io)
|
||||||
|
elements.each { |el| el.do_read(io) }
|
||||||
|
end
|
||||||
|
|
||||||
|
def elements
|
||||||
|
if @element_list.nil?
|
||||||
|
@element_list = []
|
||||||
|
eval_parameter(:initial_length).times do
|
||||||
|
@element_list << new_element
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
@element_list
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
335
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/bindata-2.4.15/lib/bindata/base.rb
vendored
Normal file
335
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/bindata-2.4.15/lib/bindata/base.rb
vendored
Normal file
@ -0,0 +1,335 @@
|
|||||||
|
require 'bindata/framework'
|
||||||
|
require 'bindata/io'
|
||||||
|
require 'bindata/lazy'
|
||||||
|
require 'bindata/name'
|
||||||
|
require 'bindata/params'
|
||||||
|
require 'bindata/registry'
|
||||||
|
require 'bindata/sanitize'
|
||||||
|
|
||||||
|
module BinData
|
||||||
|
# This is the abstract base class for all data objects.
|
||||||
|
class Base
|
||||||
|
extend AcceptedParametersPlugin
|
||||||
|
include Framework
|
||||||
|
include RegisterNamePlugin
|
||||||
|
|
||||||
|
class << self
|
||||||
|
# Instantiates this class and reads from +io+, returning the newly
|
||||||
|
# created data object. +args+ will be used when instantiating.
|
||||||
|
def read(io, *args, &block)
|
||||||
|
obj = self.new(*args)
|
||||||
|
obj.read(io, &block)
|
||||||
|
obj
|
||||||
|
end
|
||||||
|
|
||||||
|
# The arg processor for this class.
|
||||||
|
def arg_processor(name = nil)
|
||||||
|
@arg_processor ||= nil
|
||||||
|
|
||||||
|
if name
|
||||||
|
@arg_processor = "#{name}_arg_processor".gsub(/(?:^|_)(.)/) { $1.upcase }.to_sym
|
||||||
|
elsif @arg_processor.is_a? Symbol
|
||||||
|
@arg_processor = BinData.const_get(@arg_processor).new
|
||||||
|
elsif @arg_processor.nil?
|
||||||
|
@arg_processor = superclass.arg_processor
|
||||||
|
else
|
||||||
|
@arg_processor
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# The name of this class as used by Records, Arrays etc.
|
||||||
|
def bindata_name
|
||||||
|
RegisteredClasses.underscore_name(name)
|
||||||
|
end
|
||||||
|
|
||||||
|
# Call this method if this class is abstract and not to be used.
|
||||||
|
def unregister_self
|
||||||
|
RegisteredClasses.unregister(name)
|
||||||
|
end
|
||||||
|
|
||||||
|
# Registers all subclasses of this class for use
|
||||||
|
def register_subclasses #:nodoc:
|
||||||
|
singleton_class.send(:undef_method, :inherited)
|
||||||
|
define_singleton_method(:inherited) do |subclass|
|
||||||
|
RegisteredClasses.register(subclass.name, subclass)
|
||||||
|
register_subclasses
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
private :unregister_self, :register_subclasses
|
||||||
|
end
|
||||||
|
|
||||||
|
# Register all subclasses of this class.
|
||||||
|
register_subclasses
|
||||||
|
|
||||||
|
# Set the initial arg processor.
|
||||||
|
arg_processor :base
|
||||||
|
|
||||||
|
# Creates a new data object.
|
||||||
|
#
|
||||||
|
# Args are optional, but if present, must be in the following order.
|
||||||
|
#
|
||||||
|
# +value+ is a value that is +assign+ed immediately after initialization.
|
||||||
|
#
|
||||||
|
# +parameters+ is a hash containing symbol keys. Some parameters may
|
||||||
|
# reference callable objects (methods or procs).
|
||||||
|
#
|
||||||
|
# +parent+ is the parent data object (e.g. struct, array, choice) this
|
||||||
|
# object resides under.
|
||||||
|
#
|
||||||
|
def initialize(*args)
|
||||||
|
value, @params, @parent = extract_args(args)
|
||||||
|
|
||||||
|
initialize_shared_instance
|
||||||
|
initialize_instance
|
||||||
|
assign(value) if value
|
||||||
|
end
|
||||||
|
|
||||||
|
attr_accessor :parent
|
||||||
|
protected :parent=
|
||||||
|
|
||||||
|
# Creates a new data object based on this instance.
|
||||||
|
#
|
||||||
|
# All parameters will be be duplicated. Use this method
|
||||||
|
# when creating multiple objects with the same parameters.
|
||||||
|
def new(value = nil, parent = nil)
|
||||||
|
obj = clone
|
||||||
|
obj.parent = parent if parent
|
||||||
|
obj.initialize_instance
|
||||||
|
obj.assign(value) if value
|
||||||
|
|
||||||
|
obj
|
||||||
|
end
|
||||||
|
|
||||||
|
# Returns the result of evaluating the parameter identified by +key+.
|
||||||
|
#
|
||||||
|
# +overrides+ is an optional +parameters+ like hash that allow the
|
||||||
|
# parameters given at object construction to be overridden.
|
||||||
|
#
|
||||||
|
# Returns nil if +key+ does not refer to any parameter.
|
||||||
|
def eval_parameter(key, overrides = nil)
|
||||||
|
value = get_parameter(key)
|
||||||
|
if value.is_a?(Symbol) || value.respond_to?(:arity)
|
||||||
|
lazy_evaluator.lazy_eval(value, overrides)
|
||||||
|
else
|
||||||
|
value
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# Returns a lazy evaluator for this object.
|
||||||
|
def lazy_evaluator #:nodoc:
|
||||||
|
@lazy ||= LazyEvaluator.new(self)
|
||||||
|
end
|
||||||
|
|
||||||
|
# Returns the parameter referenced by +key+.
|
||||||
|
# Use this method if you are sure the parameter is not to be evaluated.
|
||||||
|
# You most likely want #eval_parameter.
|
||||||
|
def get_parameter(key)
|
||||||
|
@params[key]
|
||||||
|
end
|
||||||
|
|
||||||
|
# Returns whether +key+ exists in the +parameters+ hash.
|
||||||
|
def has_parameter?(key)
|
||||||
|
@params.has_parameter?(key)
|
||||||
|
end
|
||||||
|
|
||||||
|
# Resets the internal state to that of a newly created object.
|
||||||
|
def clear
|
||||||
|
initialize_instance
|
||||||
|
end
|
||||||
|
|
||||||
|
# Reads data into this data object.
|
||||||
|
def read(io, &block)
|
||||||
|
io = BinData::IO::Read.new(io) unless BinData::IO::Read === io
|
||||||
|
|
||||||
|
start_read do
|
||||||
|
clear
|
||||||
|
do_read(io)
|
||||||
|
end
|
||||||
|
block.call(self) if block_given?
|
||||||
|
|
||||||
|
self
|
||||||
|
end
|
||||||
|
|
||||||
|
# Writes the value for this data object to +io+.
|
||||||
|
def write(io, &block)
|
||||||
|
io = BinData::IO::Write.new(io) unless BinData::IO::Write === io
|
||||||
|
|
||||||
|
do_write(io)
|
||||||
|
io.flush
|
||||||
|
|
||||||
|
block.call(self) if block_given?
|
||||||
|
|
||||||
|
self
|
||||||
|
end
|
||||||
|
|
||||||
|
# Returns the number of bytes it will take to write this data object.
|
||||||
|
def num_bytes
|
||||||
|
do_num_bytes.ceil
|
||||||
|
end
|
||||||
|
|
||||||
|
# Returns the string representation of this data object.
|
||||||
|
def to_binary_s(&block)
|
||||||
|
io = BinData::IO.create_string_io
|
||||||
|
write(io, &block)
|
||||||
|
io.string
|
||||||
|
end
|
||||||
|
|
||||||
|
# Returns the hexadecimal string representation of this data object.
|
||||||
|
def to_hex(&block)
|
||||||
|
to_binary_s(&block).unpack('H*')[0]
|
||||||
|
end
|
||||||
|
|
||||||
|
# Return a human readable representation of this data object.
|
||||||
|
def inspect
|
||||||
|
snapshot.inspect
|
||||||
|
end
|
||||||
|
|
||||||
|
# Return a string representing this data object.
|
||||||
|
def to_s
|
||||||
|
snapshot.to_s
|
||||||
|
end
|
||||||
|
|
||||||
|
# Work with Ruby's pretty-printer library.
|
||||||
|
def pretty_print(pp) #:nodoc:
|
||||||
|
pp.pp(snapshot)
|
||||||
|
end
|
||||||
|
|
||||||
|
# Override and delegate =~ as it is defined in Object.
|
||||||
|
def =~(other)
|
||||||
|
snapshot =~ other
|
||||||
|
end
|
||||||
|
|
||||||
|
# Returns a user friendly name of this object for debugging purposes.
|
||||||
|
def debug_name
|
||||||
|
if @parent
|
||||||
|
@parent.debug_name_of(self)
|
||||||
|
else
|
||||||
|
"obj"
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# Returns the offset (in bytes) of this object with respect to its most
|
||||||
|
# distant ancestor.
|
||||||
|
def abs_offset
|
||||||
|
if @parent
|
||||||
|
@parent.abs_offset + @parent.offset_of(self)
|
||||||
|
else
|
||||||
|
0
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# Returns the offset (in bytes) of this object with respect to its parent.
|
||||||
|
def rel_offset
|
||||||
|
if @parent
|
||||||
|
@parent.offset_of(self)
|
||||||
|
else
|
||||||
|
0
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def ==(other) #:nodoc:
|
||||||
|
# double dispatch
|
||||||
|
other == snapshot
|
||||||
|
end
|
||||||
|
|
||||||
|
# A version of +respond_to?+ used by the lazy evaluator. It doesn't
|
||||||
|
# reinvoke the evaluator so as to avoid infinite evaluation loops.
|
||||||
|
def safe_respond_to?(symbol, include_private = false) #:nodoc:
|
||||||
|
base_respond_to?(symbol, include_private)
|
||||||
|
end
|
||||||
|
|
||||||
|
alias base_respond_to? respond_to?
|
||||||
|
|
||||||
|
#---------------
|
||||||
|
private
|
||||||
|
|
||||||
|
def extract_args(args)
|
||||||
|
self.class.arg_processor.extract_args(self.class, args)
|
||||||
|
end
|
||||||
|
|
||||||
|
def start_read
|
||||||
|
top_level_set(:in_read, true)
|
||||||
|
yield
|
||||||
|
ensure
|
||||||
|
top_level_set(:in_read, false)
|
||||||
|
end
|
||||||
|
|
||||||
|
# Is this object tree currently being read? Used by BasePrimitive.
|
||||||
|
def reading?
|
||||||
|
top_level_get(:in_read)
|
||||||
|
end
|
||||||
|
|
||||||
|
def top_level_set(sym, value)
|
||||||
|
top_level.instance_variable_set("@tl_#{sym}", value)
|
||||||
|
end
|
||||||
|
|
||||||
|
def top_level_get(sym)
|
||||||
|
tl = top_level
|
||||||
|
tl.instance_variable_defined?("@tl_#{sym}") &&
|
||||||
|
tl.instance_variable_get("@tl_#{sym}")
|
||||||
|
end
|
||||||
|
|
||||||
|
def top_level
|
||||||
|
if parent.nil?
|
||||||
|
tl = self
|
||||||
|
else
|
||||||
|
tl = parent
|
||||||
|
tl = tl.parent while tl.parent
|
||||||
|
end
|
||||||
|
|
||||||
|
tl
|
||||||
|
end
|
||||||
|
|
||||||
|
def binary_string(str)
|
||||||
|
str.to_s.dup.force_encoding(Encoding::BINARY)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# ArgProcessors process the arguments passed to BinData::Base.new into
|
||||||
|
# the form required to initialise the BinData object.
|
||||||
|
#
|
||||||
|
# Any passed parameters are sanitized so the BinData object doesn't
|
||||||
|
# need to perform error checking on the parameters.
|
||||||
|
class BaseArgProcessor
|
||||||
|
@@empty_hash = Hash.new.freeze
|
||||||
|
|
||||||
|
# Takes the arguments passed to BinData::Base.new and
|
||||||
|
# extracts [value, sanitized_parameters, parent].
|
||||||
|
def extract_args(obj_class, obj_args)
|
||||||
|
value, params, parent = separate_args(obj_class, obj_args)
|
||||||
|
sanitized_params = SanitizedParameters.sanitize(params, obj_class)
|
||||||
|
|
||||||
|
[value, sanitized_params, parent]
|
||||||
|
end
|
||||||
|
|
||||||
|
# Separates the arguments passed to BinData::Base.new into
|
||||||
|
# [value, parameters, parent]. Called by #extract_args.
|
||||||
|
def separate_args(_obj_class, obj_args)
|
||||||
|
args = obj_args.dup
|
||||||
|
value = parameters = parent = nil
|
||||||
|
|
||||||
|
if args.length > 1 && args.last.is_a?(BinData::Base)
|
||||||
|
parent = args.pop
|
||||||
|
end
|
||||||
|
|
||||||
|
if args.length > 0 && args.last.is_a?(Hash)
|
||||||
|
parameters = args.pop
|
||||||
|
end
|
||||||
|
|
||||||
|
if args.length > 0
|
||||||
|
value = args.pop
|
||||||
|
end
|
||||||
|
|
||||||
|
parameters ||= @@empty_hash
|
||||||
|
|
||||||
|
[value, parameters, parent]
|
||||||
|
end
|
||||||
|
|
||||||
|
# Performs sanity checks on the given parameters.
|
||||||
|
# This method converts the parameters to the form expected
|
||||||
|
# by the data object.
|
||||||
|
def sanitize_parameters!(obj_class, obj_params)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
248
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/bindata-2.4.15/lib/bindata/base_primitive.rb
vendored
Normal file
248
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/bindata-2.4.15/lib/bindata/base_primitive.rb
vendored
Normal file
@ -0,0 +1,248 @@
|
|||||||
|
require 'bindata/base'
|
||||||
|
|
||||||
|
module BinData
|
||||||
|
# A BinData::BasePrimitive object is a container for a value that has a
|
||||||
|
# particular binary representation. A value corresponds to a primitive type
|
||||||
|
# such as as integer, float or string. Only one value can be contained by
|
||||||
|
# this object. This value can be read from or written to an IO stream.
|
||||||
|
#
|
||||||
|
# require 'bindata'
|
||||||
|
#
|
||||||
|
# obj = BinData::Uint8.new(initial_value: 42)
|
||||||
|
# obj #=> 42
|
||||||
|
# obj.assign(5)
|
||||||
|
# obj #=> 5
|
||||||
|
# obj.clear
|
||||||
|
# obj #=> 42
|
||||||
|
#
|
||||||
|
# obj = BinData::Uint8.new(value: 42)
|
||||||
|
# obj #=> 42
|
||||||
|
# obj.assign(5)
|
||||||
|
# obj #=> 42
|
||||||
|
#
|
||||||
|
# obj = BinData::Uint8.new(assert: 3)
|
||||||
|
# obj.read("\005") #=> BinData::ValidityError: value is '5' but expected '3'
|
||||||
|
#
|
||||||
|
# obj = BinData::Uint8.new(assert: -> { value < 5 })
|
||||||
|
# obj.read("\007") #=> BinData::ValidityError: value not as expected
|
||||||
|
#
|
||||||
|
# == Parameters
|
||||||
|
#
|
||||||
|
# Parameters may be provided at initialisation to control the behaviour of
|
||||||
|
# an object. These params include those for BinData::Base as well as:
|
||||||
|
#
|
||||||
|
# [<tt>:initial_value</tt>] This is the initial value to use before one is
|
||||||
|
# either #read or explicitly set with #value=.
|
||||||
|
# [<tt>:value</tt>] The object will always have this value.
|
||||||
|
# Calls to #value= are ignored when
|
||||||
|
# using this param. While reading, #value
|
||||||
|
# will return the value of the data read from the
|
||||||
|
# IO, not the result of the <tt>:value</tt> param.
|
||||||
|
# [<tt>:assert</tt>] Raise an error unless the value read or assigned
|
||||||
|
# meets this criteria. The variable +value+ is
|
||||||
|
# made available to any lambda assigned to this
|
||||||
|
# parameter. A boolean return indicates success
|
||||||
|
# or failure. Any other return is compared to
|
||||||
|
# the value just read in.
|
||||||
|
# [<tt>:asserted_value</tt>] Equivalent to <tt>:assert</tt> and <tt>:value</tt>.
|
||||||
|
#
|
||||||
|
class BasePrimitive < BinData::Base
|
||||||
|
unregister_self
|
||||||
|
|
||||||
|
optional_parameters :initial_value, :value, :assert, :asserted_value
|
||||||
|
mutually_exclusive_parameters :initial_value, :value
|
||||||
|
mutually_exclusive_parameters :asserted_value, :value, :assert
|
||||||
|
|
||||||
|
def initialize_shared_instance
|
||||||
|
extend InitialValuePlugin if has_parameter?(:initial_value)
|
||||||
|
extend ValuePlugin if has_parameter?(:value)
|
||||||
|
extend AssertPlugin if has_parameter?(:assert)
|
||||||
|
extend AssertedValuePlugin if has_parameter?(:asserted_value)
|
||||||
|
super
|
||||||
|
end
|
||||||
|
|
||||||
|
def initialize_instance
|
||||||
|
@value = nil
|
||||||
|
end
|
||||||
|
|
||||||
|
def clear? #:nodoc:
|
||||||
|
@value.nil?
|
||||||
|
end
|
||||||
|
|
||||||
|
def assign(val)
|
||||||
|
raise ArgumentError, "can't set a nil value for #{debug_name}" if val.nil?
|
||||||
|
|
||||||
|
raw_val = val.respond_to?(:snapshot) ? val.snapshot : val
|
||||||
|
@value =
|
||||||
|
begin
|
||||||
|
raw_val.dup
|
||||||
|
rescue TypeError
|
||||||
|
# can't dup Fixnums
|
||||||
|
raw_val
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def snapshot
|
||||||
|
_value
|
||||||
|
end
|
||||||
|
|
||||||
|
def value
|
||||||
|
snapshot
|
||||||
|
end
|
||||||
|
|
||||||
|
def value=(val)
|
||||||
|
assign(val)
|
||||||
|
end
|
||||||
|
|
||||||
|
def respond_to?(symbol, include_private = false) #:nodoc:
|
||||||
|
child = snapshot
|
||||||
|
child.respond_to?(symbol, include_private) || super
|
||||||
|
end
|
||||||
|
|
||||||
|
def method_missing(symbol, *args, &block) #:nodoc:
|
||||||
|
child = snapshot
|
||||||
|
if child.respond_to?(symbol)
|
||||||
|
self.class.class_eval \
|
||||||
|
"def #{symbol}(*args, &block);" \
|
||||||
|
" snapshot.#{symbol}(*args, &block);" \
|
||||||
|
"end"
|
||||||
|
child.__send__(symbol, *args, &block)
|
||||||
|
else
|
||||||
|
super
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def <=>(other)
|
||||||
|
snapshot <=> other
|
||||||
|
end
|
||||||
|
|
||||||
|
def eql?(other)
|
||||||
|
# double dispatch
|
||||||
|
other.eql?(snapshot)
|
||||||
|
end
|
||||||
|
|
||||||
|
def hash
|
||||||
|
snapshot.hash
|
||||||
|
end
|
||||||
|
|
||||||
|
def do_read(io) #:nodoc:
|
||||||
|
@value = read_and_return_value(io)
|
||||||
|
end
|
||||||
|
|
||||||
|
def do_write(io) #:nodoc:
|
||||||
|
io.writebytes(value_to_binary_string(_value))
|
||||||
|
end
|
||||||
|
|
||||||
|
def do_num_bytes #:nodoc:
|
||||||
|
value_to_binary_string(_value).length
|
||||||
|
end
|
||||||
|
|
||||||
|
#---------------
|
||||||
|
private
|
||||||
|
|
||||||
|
# The unmodified value of this data object. Note that #snapshot calls this
|
||||||
|
# method. This indirection is so that #snapshot can be overridden in
|
||||||
|
# subclasses to modify the presentation value.
|
||||||
|
def _value
|
||||||
|
@value != nil ? @value : sensible_default
|
||||||
|
end
|
||||||
|
|
||||||
|
# Logic for the :value parameter
|
||||||
|
module ValuePlugin
|
||||||
|
def assign(val)
|
||||||
|
# Ignored
|
||||||
|
end
|
||||||
|
|
||||||
|
def _value
|
||||||
|
reading? ? @value : eval_parameter(:value)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# Logic for the :initial_value parameter
|
||||||
|
module InitialValuePlugin
|
||||||
|
def _value
|
||||||
|
@value != nil ? @value : eval_parameter(:initial_value)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# Logic for the :assert parameter
|
||||||
|
module AssertPlugin
|
||||||
|
def assign(val)
|
||||||
|
super(val)
|
||||||
|
assert!
|
||||||
|
end
|
||||||
|
|
||||||
|
def do_read(io) #:nodoc:
|
||||||
|
super(io)
|
||||||
|
assert!
|
||||||
|
end
|
||||||
|
|
||||||
|
def assert!
|
||||||
|
current_value = snapshot
|
||||||
|
expected = eval_parameter(:assert, value: current_value)
|
||||||
|
|
||||||
|
msg =
|
||||||
|
if !expected
|
||||||
|
"value '#{current_value}' not as expected"
|
||||||
|
elsif expected != true && current_value != expected
|
||||||
|
"value is '#{current_value}' but expected '#{expected}'"
|
||||||
|
else
|
||||||
|
nil
|
||||||
|
end
|
||||||
|
|
||||||
|
raise ValidityError, "#{msg} for #{debug_name}" if msg
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# Logic for the :asserted_value parameter
|
||||||
|
module AssertedValuePlugin
|
||||||
|
def assign(val)
|
||||||
|
assert_value(val)
|
||||||
|
super(val)
|
||||||
|
end
|
||||||
|
|
||||||
|
def _value
|
||||||
|
reading? ? @value : eval_parameter(:asserted_value)
|
||||||
|
end
|
||||||
|
|
||||||
|
def do_read(io) #:nodoc:
|
||||||
|
super(io)
|
||||||
|
assert!
|
||||||
|
end
|
||||||
|
|
||||||
|
def assert!
|
||||||
|
assert_value(snapshot)
|
||||||
|
end
|
||||||
|
|
||||||
|
def assert_value(current_value)
|
||||||
|
expected = eval_parameter(:asserted_value, value: current_value)
|
||||||
|
if current_value != expected
|
||||||
|
raise ValidityError,
|
||||||
|
"value is '#{current_value}' but " \
|
||||||
|
"expected '#{expected}' for #{debug_name}"
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
###########################################################################
|
||||||
|
# To be implemented by subclasses
|
||||||
|
|
||||||
|
# Return the string representation that +val+ will take when written.
|
||||||
|
def value_to_binary_string(val)
|
||||||
|
raise NotImplementedError
|
||||||
|
end
|
||||||
|
|
||||||
|
# Read a number of bytes from +io+ and return the value they represent.
|
||||||
|
def read_and_return_value(io)
|
||||||
|
raise NotImplementedError
|
||||||
|
end
|
||||||
|
|
||||||
|
# Return a sensible default for this data.
|
||||||
|
def sensible_default
|
||||||
|
raise NotImplementedError
|
||||||
|
end
|
||||||
|
|
||||||
|
# To be implemented by subclasses
|
||||||
|
###########################################################################
|
||||||
|
end
|
||||||
|
end
|
||||||
186
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/bindata-2.4.15/lib/bindata/bits.rb
vendored
Normal file
186
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/bindata-2.4.15/lib/bindata/bits.rb
vendored
Normal file
@ -0,0 +1,186 @@
|
|||||||
|
require 'thread'
|
||||||
|
require 'bindata/base_primitive'
|
||||||
|
|
||||||
|
module BinData
|
||||||
|
# Defines a number of classes that contain a bit based integer.
|
||||||
|
# The integer is defined by endian and number of bits.
|
||||||
|
|
||||||
|
module BitField #:nodoc: all
|
||||||
|
@@mutex = Mutex.new
|
||||||
|
|
||||||
|
class << self
|
||||||
|
def define_class(name, nbits, endian, signed = :unsigned)
|
||||||
|
@@mutex.synchronize do
|
||||||
|
unless BinData.const_defined?(name)
|
||||||
|
new_class = Class.new(BinData::BasePrimitive)
|
||||||
|
BitField.define_methods(new_class, nbits, endian.to_sym, signed.to_sym)
|
||||||
|
RegisteredClasses.register(name, new_class)
|
||||||
|
|
||||||
|
BinData.const_set(name, new_class)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
BinData.const_get(name)
|
||||||
|
end
|
||||||
|
|
||||||
|
def define_methods(bit_class, nbits, endian, signed)
|
||||||
|
bit_class.module_eval <<-END
|
||||||
|
#{create_params_code(nbits)}
|
||||||
|
|
||||||
|
def assign(val)
|
||||||
|
#{create_nbits_code(nbits)}
|
||||||
|
#{create_clamp_code(nbits, signed)}
|
||||||
|
super(val)
|
||||||
|
end
|
||||||
|
|
||||||
|
def do_write(io)
|
||||||
|
#{create_nbits_code(nbits)}
|
||||||
|
val = _value
|
||||||
|
#{create_int2uint_code(nbits, signed)}
|
||||||
|
io.writebits(val, #{nbits}, :#{endian})
|
||||||
|
end
|
||||||
|
|
||||||
|
def do_num_bytes
|
||||||
|
#{create_nbits_code(nbits)}
|
||||||
|
#{create_do_num_bytes_code(nbits)}
|
||||||
|
end
|
||||||
|
|
||||||
|
def bit_aligned?
|
||||||
|
true
|
||||||
|
end
|
||||||
|
|
||||||
|
#---------------
|
||||||
|
private
|
||||||
|
|
||||||
|
def read_and_return_value(io)
|
||||||
|
#{create_nbits_code(nbits)}
|
||||||
|
val = io.readbits(#{nbits}, :#{endian})
|
||||||
|
#{create_uint2int_code(nbits, signed)}
|
||||||
|
val
|
||||||
|
end
|
||||||
|
|
||||||
|
def sensible_default
|
||||||
|
0
|
||||||
|
end
|
||||||
|
END
|
||||||
|
end
|
||||||
|
|
||||||
|
def create_params_code(nbits)
|
||||||
|
if nbits == :nbits
|
||||||
|
"mandatory_parameter :nbits"
|
||||||
|
else
|
||||||
|
""
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def create_nbits_code(nbits)
|
||||||
|
if nbits == :nbits
|
||||||
|
"nbits = eval_parameter(:nbits)"
|
||||||
|
else
|
||||||
|
""
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def create_do_num_bytes_code(nbits)
|
||||||
|
if nbits == :nbits
|
||||||
|
"nbits / 8.0"
|
||||||
|
else
|
||||||
|
nbits / 8.0
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def create_clamp_code(nbits, signed)
|
||||||
|
if nbits == :nbits
|
||||||
|
create_dynamic_clamp_code(signed)
|
||||||
|
else
|
||||||
|
create_fixed_clamp_code(nbits, signed)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def create_dynamic_clamp_code(signed)
|
||||||
|
if signed == :signed
|
||||||
|
max = "(1 << (nbits - 1)) - 1"
|
||||||
|
min = "-((#{max}) + 1)"
|
||||||
|
else
|
||||||
|
max = "(1 << nbits) - 1"
|
||||||
|
min = "0"
|
||||||
|
end
|
||||||
|
|
||||||
|
"val = val.clamp(#{min}, #{max})"
|
||||||
|
end
|
||||||
|
|
||||||
|
def create_fixed_clamp_code(nbits, signed)
|
||||||
|
if nbits == 1 && signed == :signed
|
||||||
|
raise "signed bitfield must have more than one bit"
|
||||||
|
end
|
||||||
|
|
||||||
|
if signed == :signed
|
||||||
|
max = "(1 << (#{nbits} - 1)) - 1"
|
||||||
|
min = "-((#{max}) + 1)"
|
||||||
|
else
|
||||||
|
min = "0"
|
||||||
|
max = "(1 << #{nbits}) - 1"
|
||||||
|
end
|
||||||
|
|
||||||
|
clamp = "(val = val.clamp(#{min}, #{max}))"
|
||||||
|
|
||||||
|
if nbits == 1
|
||||||
|
# allow single bits to be used as booleans
|
||||||
|
clamp = "(val == true) ? 1 : (not val) ? 0 : #{clamp}"
|
||||||
|
end
|
||||||
|
|
||||||
|
"val = #{clamp}"
|
||||||
|
end
|
||||||
|
|
||||||
|
def create_int2uint_code(nbits, signed)
|
||||||
|
if signed != :signed
|
||||||
|
""
|
||||||
|
elsif nbits == :nbits
|
||||||
|
"val &= (1 << nbits) - 1"
|
||||||
|
else
|
||||||
|
"val &= #{(1 << nbits) - 1}"
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def create_uint2int_code(nbits, signed)
|
||||||
|
if signed != :signed
|
||||||
|
""
|
||||||
|
elsif nbits == :nbits
|
||||||
|
"val -= (1 << nbits) if (val >= (1 << (nbits - 1)))"
|
||||||
|
else
|
||||||
|
"val -= #{1 << nbits} if (val >= #{1 << (nbits - 1)})"
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# Create classes for dynamic bitfields
|
||||||
|
{
|
||||||
|
"Bit" => :big,
|
||||||
|
"BitLe" => :little,
|
||||||
|
"Sbit" => [:big, :signed],
|
||||||
|
"SbitLe" => [:little, :signed],
|
||||||
|
}.each_pair { |name, args| BitField.define_class(name, :nbits, *args) }
|
||||||
|
|
||||||
|
# Create classes on demand
|
||||||
|
module BitFieldFactory
|
||||||
|
def const_missing(name)
|
||||||
|
mappings = {
|
||||||
|
/^Bit(\d+)$/ => :big,
|
||||||
|
/^Bit(\d+)le$/ => :little,
|
||||||
|
/^Sbit(\d+)$/ => [:big, :signed],
|
||||||
|
/^Sbit(\d+)le$/ => [:little, :signed]
|
||||||
|
}
|
||||||
|
|
||||||
|
mappings.each_pair do |regex, args|
|
||||||
|
if regex =~ name.to_s
|
||||||
|
nbits = $1.to_i
|
||||||
|
return BitField.define_class(name, nbits, *args)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
super(name)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
BinData.extend BitFieldFactory
|
||||||
|
end
|
||||||
117
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/bindata-2.4.15/lib/bindata/buffer.rb
vendored
Normal file
117
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/bindata-2.4.15/lib/bindata/buffer.rb
vendored
Normal file
@ -0,0 +1,117 @@
|
|||||||
|
require 'bindata/base'
|
||||||
|
require 'bindata/dsl'
|
||||||
|
|
||||||
|
module BinData
|
||||||
|
# A Buffer is conceptually a substream within a data stream. It has a
|
||||||
|
# defined size and it will always read or write the exact number of bytes to
|
||||||
|
# fill the buffer. Short reads will skip over unused bytes and short writes
|
||||||
|
# will pad the substream with "\0" bytes.
|
||||||
|
#
|
||||||
|
# require 'bindata'
|
||||||
|
#
|
||||||
|
# obj = BinData::Buffer.new(length: 5, type: [:string, {value: "abc"}])
|
||||||
|
# obj.to_binary_s #=> "abc\000\000"
|
||||||
|
#
|
||||||
|
#
|
||||||
|
# class MyBuffer < BinData::Buffer
|
||||||
|
# default_parameter length: 8
|
||||||
|
#
|
||||||
|
# endian :little
|
||||||
|
#
|
||||||
|
# uint16 :num1
|
||||||
|
# uint16 :num2
|
||||||
|
# # padding occurs here
|
||||||
|
# end
|
||||||
|
#
|
||||||
|
# obj = MyBuffer.read("\001\000\002\000\000\000\000\000")
|
||||||
|
# obj.num1 #=> 1
|
||||||
|
# obj.num1 #=> 2
|
||||||
|
# obj.raw_num_bytes #=> 4
|
||||||
|
# obj.num_bytes #=> 8
|
||||||
|
#
|
||||||
|
#
|
||||||
|
# class StringTable < BinData::Record
|
||||||
|
# endian :little
|
||||||
|
#
|
||||||
|
# uint16 :table_size_in_bytes
|
||||||
|
# buffer :strings, length: :table_size_in_bytes do
|
||||||
|
# array read_until: :eof do
|
||||||
|
# uint8 :len
|
||||||
|
# string :str, length: :len
|
||||||
|
# end
|
||||||
|
# end
|
||||||
|
# end
|
||||||
|
#
|
||||||
|
#
|
||||||
|
# == Parameters
|
||||||
|
#
|
||||||
|
# Parameters may be provided at initialisation to control the behaviour of
|
||||||
|
# an object. These params are:
|
||||||
|
#
|
||||||
|
# <tt>:length</tt>:: The number of bytes in the buffer.
|
||||||
|
# <tt>:type</tt>:: The single type inside the buffer. Use a struct if
|
||||||
|
# multiple fields are required.
|
||||||
|
class Buffer < BinData::Base
|
||||||
|
extend DSLMixin
|
||||||
|
|
||||||
|
dsl_parser :buffer
|
||||||
|
arg_processor :buffer
|
||||||
|
|
||||||
|
mandatory_parameters :length, :type
|
||||||
|
|
||||||
|
def initialize_instance
|
||||||
|
@type = get_parameter(:type).instantiate(nil, self)
|
||||||
|
end
|
||||||
|
|
||||||
|
# The number of bytes used, ignoring the padding imposed by the buffer.
|
||||||
|
def raw_num_bytes
|
||||||
|
@type.num_bytes
|
||||||
|
end
|
||||||
|
|
||||||
|
def clear?
|
||||||
|
@type.clear?
|
||||||
|
end
|
||||||
|
|
||||||
|
def assign(val)
|
||||||
|
@type.assign(val)
|
||||||
|
end
|
||||||
|
|
||||||
|
def snapshot
|
||||||
|
@type.snapshot
|
||||||
|
end
|
||||||
|
|
||||||
|
def respond_to?(symbol, include_private = false) #:nodoc:
|
||||||
|
@type.respond_to?(symbol, include_private) || super
|
||||||
|
end
|
||||||
|
|
||||||
|
def method_missing(symbol, *args, &block) #:nodoc:
|
||||||
|
@type.__send__(symbol, *args, &block)
|
||||||
|
end
|
||||||
|
|
||||||
|
def do_read(io) #:nodoc:
|
||||||
|
io.with_buffer(eval_parameter(:length)) do
|
||||||
|
@type.do_read(io)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def do_write(io) #:nodoc:
|
||||||
|
io.with_buffer(eval_parameter(:length)) do
|
||||||
|
@type.do_write(io)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def do_num_bytes #:nodoc:
|
||||||
|
eval_parameter(:length)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
class BufferArgProcessor < BaseArgProcessor
|
||||||
|
include MultiFieldArgSeparator
|
||||||
|
|
||||||
|
def sanitize_parameters!(obj_class, params)
|
||||||
|
params.merge!(obj_class.dsl_params)
|
||||||
|
params.must_be_integer(:length)
|
||||||
|
params.sanitize_object_prototype(:type)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
186
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/bindata-2.4.15/lib/bindata/choice.rb
vendored
Normal file
186
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/bindata-2.4.15/lib/bindata/choice.rb
vendored
Normal file
@ -0,0 +1,186 @@
|
|||||||
|
require 'bindata/base'
|
||||||
|
require 'bindata/dsl'
|
||||||
|
|
||||||
|
module BinData
|
||||||
|
# A Choice is a collection of data objects of which only one is active
|
||||||
|
# at any particular time. Method calls will be delegated to the active
|
||||||
|
# choice.
|
||||||
|
#
|
||||||
|
# require 'bindata'
|
||||||
|
#
|
||||||
|
# type1 = [:string, {value: "Type1"}]
|
||||||
|
# type2 = [:string, {value: "Type2"}]
|
||||||
|
#
|
||||||
|
# choices = {5 => type1, 17 => type2}
|
||||||
|
# a = BinData::Choice.new(choices: choices, selection: 5)
|
||||||
|
# a # => "Type1"
|
||||||
|
#
|
||||||
|
# choices = [ type1, type2 ]
|
||||||
|
# a = BinData::Choice.new(choices: choices, selection: 1)
|
||||||
|
# a # => "Type2"
|
||||||
|
#
|
||||||
|
# choices = [ nil, nil, nil, type1, nil, type2 ]
|
||||||
|
# a = BinData::Choice.new(choices: choices, selection: 3)
|
||||||
|
# a # => "Type1"
|
||||||
|
#
|
||||||
|
#
|
||||||
|
# Chooser = Struct.new(:choice)
|
||||||
|
# mychoice = Chooser.new
|
||||||
|
# mychoice.choice = 'big'
|
||||||
|
#
|
||||||
|
# choices = {'big' => :uint16be, 'little' => :uint16le}
|
||||||
|
# a = BinData::Choice.new(choices: choices, copy_on_change: true,
|
||||||
|
# selection: -> { mychoice.choice })
|
||||||
|
# a.assign(256)
|
||||||
|
# a.to_binary_s #=> "\001\000"
|
||||||
|
#
|
||||||
|
# mychoice.choice = 'little'
|
||||||
|
# a.to_binary_s #=> "\000\001"
|
||||||
|
#
|
||||||
|
# == Parameters
|
||||||
|
#
|
||||||
|
# Parameters may be provided at initialisation to control the behaviour of
|
||||||
|
# an object. These params are:
|
||||||
|
#
|
||||||
|
# <tt>:choices</tt>:: Either an array or a hash specifying the possible
|
||||||
|
# data objects. The format of the
|
||||||
|
# array/hash.values is a list of symbols
|
||||||
|
# representing the data object type. If a choice
|
||||||
|
# is to have params passed to it, then it should
|
||||||
|
# be provided as [type_symbol, hash_params]. An
|
||||||
|
# implementation constraint is that the hash may
|
||||||
|
# not contain symbols as keys, with the exception
|
||||||
|
# of :default. :default is to be used when then
|
||||||
|
# :selection does not exist in the :choices hash.
|
||||||
|
# <tt>:selection</tt>:: An index/key into the :choices array/hash which
|
||||||
|
# specifies the currently active choice.
|
||||||
|
# <tt>:copy_on_change</tt>:: If set to true, copy the value of the previous
|
||||||
|
# selection to the current selection whenever the
|
||||||
|
# selection changes. Default is false.
|
||||||
|
class Choice < BinData::Base
|
||||||
|
extend DSLMixin
|
||||||
|
|
||||||
|
dsl_parser :choice
|
||||||
|
arg_processor :choice
|
||||||
|
|
||||||
|
mandatory_parameters :choices, :selection
|
||||||
|
optional_parameter :copy_on_change
|
||||||
|
|
||||||
|
def initialize_shared_instance
|
||||||
|
extend CopyOnChangePlugin if eval_parameter(:copy_on_change) == true
|
||||||
|
super
|
||||||
|
end
|
||||||
|
|
||||||
|
def initialize_instance
|
||||||
|
@choices = {}
|
||||||
|
@last_selection = nil
|
||||||
|
end
|
||||||
|
|
||||||
|
# Returns the current selection.
|
||||||
|
def selection
|
||||||
|
selection = eval_parameter(:selection)
|
||||||
|
if selection.nil?
|
||||||
|
raise IndexError, ":selection returned nil for #{debug_name}"
|
||||||
|
end
|
||||||
|
selection
|
||||||
|
end
|
||||||
|
|
||||||
|
def respond_to?(symbol, include_private = false) #:nodoc:
|
||||||
|
current_choice.respond_to?(symbol, include_private) || super
|
||||||
|
end
|
||||||
|
|
||||||
|
def method_missing(symbol, *args, &block) #:nodoc:
|
||||||
|
current_choice.__send__(symbol, *args, &block)
|
||||||
|
end
|
||||||
|
|
||||||
|
%w(clear? assign snapshot do_read do_write do_num_bytes).each do |m|
|
||||||
|
module_eval <<-END
|
||||||
|
def #{m}(*args)
|
||||||
|
current_choice.#{m}(*args)
|
||||||
|
end
|
||||||
|
END
|
||||||
|
end
|
||||||
|
|
||||||
|
#---------------
|
||||||
|
private
|
||||||
|
|
||||||
|
def current_choice
|
||||||
|
current_selection = selection
|
||||||
|
@choices[current_selection] ||= instantiate_choice(current_selection)
|
||||||
|
end
|
||||||
|
|
||||||
|
def instantiate_choice(selection)
|
||||||
|
prototype = get_parameter(:choices)[selection]
|
||||||
|
if prototype.nil?
|
||||||
|
raise IndexError, "selection '#{selection}' does not exist in :choices for #{debug_name}"
|
||||||
|
end
|
||||||
|
prototype.instantiate(nil, self)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
class ChoiceArgProcessor < BaseArgProcessor
|
||||||
|
def sanitize_parameters!(obj_class, params) #:nodoc:
|
||||||
|
params.merge!(obj_class.dsl_params)
|
||||||
|
|
||||||
|
params.sanitize_choices(:choices) do |choices|
|
||||||
|
hash_choices = choices_as_hash(choices)
|
||||||
|
ensure_valid_keys(hash_choices)
|
||||||
|
hash_choices
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
#-------------
|
||||||
|
private
|
||||||
|
|
||||||
|
def choices_as_hash(choices)
|
||||||
|
if choices.respond_to?(:to_ary)
|
||||||
|
key_array_by_index(choices.to_ary)
|
||||||
|
else
|
||||||
|
choices
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def key_array_by_index(array)
|
||||||
|
result = {}
|
||||||
|
array.each_with_index do |el, i|
|
||||||
|
result[i] = el unless el.nil?
|
||||||
|
end
|
||||||
|
result
|
||||||
|
end
|
||||||
|
|
||||||
|
def ensure_valid_keys(choices)
|
||||||
|
if choices.key?(nil)
|
||||||
|
raise ArgumentError, ":choices hash may not have nil key"
|
||||||
|
end
|
||||||
|
if choices.keys.detect { |key| key.is_a?(Symbol) && key != :default }
|
||||||
|
raise ArgumentError, ":choices hash may not have symbols for keys"
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# Logic for the :copy_on_change parameter
|
||||||
|
module CopyOnChangePlugin
|
||||||
|
def current_choice
|
||||||
|
obj = super
|
||||||
|
copy_previous_value(obj)
|
||||||
|
obj
|
||||||
|
end
|
||||||
|
|
||||||
|
def copy_previous_value(obj)
|
||||||
|
current_selection = selection
|
||||||
|
prev = get_previous_choice(current_selection)
|
||||||
|
obj.assign(prev) unless prev.nil?
|
||||||
|
remember_current_selection(current_selection)
|
||||||
|
end
|
||||||
|
|
||||||
|
def get_previous_choice(selection)
|
||||||
|
if @last_selection && selection != @last_selection
|
||||||
|
@choices[@last_selection]
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def remember_current_selection(selection)
|
||||||
|
@last_selection = selection
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
@ -0,0 +1,34 @@
|
|||||||
|
require "bindata/base_primitive"
|
||||||
|
|
||||||
|
module BinData
|
||||||
|
# Counts the number of bytes remaining in the input stream from the current
|
||||||
|
# position to the end of the stream. This only makes sense for seekable
|
||||||
|
# streams.
|
||||||
|
#
|
||||||
|
# require 'bindata'
|
||||||
|
#
|
||||||
|
# class A < BinData::Record
|
||||||
|
# count_bytes_remaining :bytes_remaining
|
||||||
|
# string :all_data, read_length: :bytes_remaining
|
||||||
|
# end
|
||||||
|
#
|
||||||
|
# obj = A.read("abcdefghij")
|
||||||
|
# obj.all_data #=> "abcdefghij"
|
||||||
|
#
|
||||||
|
class CountBytesRemaining < BinData::BasePrimitive
|
||||||
|
#---------------
|
||||||
|
private
|
||||||
|
|
||||||
|
def value_to_binary_string(val)
|
||||||
|
""
|
||||||
|
end
|
||||||
|
|
||||||
|
def read_and_return_value(io)
|
||||||
|
io.num_bytes_remaining
|
||||||
|
end
|
||||||
|
|
||||||
|
def sensible_default
|
||||||
|
0
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
198
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/bindata-2.4.15/lib/bindata/delayed_io.rb
vendored
Normal file
198
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/bindata-2.4.15/lib/bindata/delayed_io.rb
vendored
Normal file
@ -0,0 +1,198 @@
|
|||||||
|
require 'bindata/base'
|
||||||
|
require 'bindata/dsl'
|
||||||
|
|
||||||
|
module BinData
|
||||||
|
# BinData declarations are evaluated in a single pass.
|
||||||
|
# However, some binary formats require multi pass processing. A common
|
||||||
|
# reason is seeking backwards in the input stream.
|
||||||
|
#
|
||||||
|
# DelayedIO supports multi pass processing. It works by ignoring the normal
|
||||||
|
# #read or #write calls. The user must explicitly call the #read_now! or
|
||||||
|
# #write_now! methods to process an additional pass. This additional pass
|
||||||
|
# must specify the abs_offset of the I/O operation.
|
||||||
|
#
|
||||||
|
# require 'bindata'
|
||||||
|
#
|
||||||
|
# obj = BinData::DelayedIO.new(read_abs_offset: 3, type: :uint16be)
|
||||||
|
# obj.read("\x00\x00\x00\x11\x12")
|
||||||
|
# obj #=> 0
|
||||||
|
#
|
||||||
|
# obj.read_now!
|
||||||
|
# obj #=> 0x1112
|
||||||
|
#
|
||||||
|
# - OR -
|
||||||
|
#
|
||||||
|
# obj.read("\x00\x00\x00\x11\x12") { obj.read_now! } #=> 0x1122
|
||||||
|
#
|
||||||
|
# obj.to_binary_s { obj.write_now! } #=> "\x00\x00\x00\x11\x12"
|
||||||
|
#
|
||||||
|
# You can use the +auto_call_delayed_io+ keyword to cause #read and #write to
|
||||||
|
# automatically perform the extra passes.
|
||||||
|
#
|
||||||
|
# class ReversePascalString < BinData::Record
|
||||||
|
# auto_call_delayed_io
|
||||||
|
#
|
||||||
|
# delayed_io :str, read_abs_offset: 0 do
|
||||||
|
# string read_length: :len
|
||||||
|
# end
|
||||||
|
# count_bytes_remaining :total_size
|
||||||
|
# skip to_abs_offset: -> { total_size - 1 }
|
||||||
|
# uint8 :len, value: -> { str.length }
|
||||||
|
# end
|
||||||
|
#
|
||||||
|
# s = ReversePascalString.read("hello\x05")
|
||||||
|
# s.to_binary_s #=> "hello\x05"
|
||||||
|
#
|
||||||
|
#
|
||||||
|
# == Parameters
|
||||||
|
#
|
||||||
|
# Parameters may be provided at initialisation to control the behaviour of
|
||||||
|
# an object. These params are:
|
||||||
|
#
|
||||||
|
# <tt>:read_abs_offset</tt>:: The abs_offset to start reading at.
|
||||||
|
# <tt>:type</tt>:: The single type inside the delayed io. Use
|
||||||
|
# a struct if multiple fields are required.
|
||||||
|
class DelayedIO < BinData::Base
|
||||||
|
extend DSLMixin
|
||||||
|
|
||||||
|
dsl_parser :delayed_io
|
||||||
|
arg_processor :delayed_io
|
||||||
|
|
||||||
|
mandatory_parameters :read_abs_offset, :type
|
||||||
|
|
||||||
|
def initialize_instance
|
||||||
|
@type = get_parameter(:type).instantiate(nil, self)
|
||||||
|
@abs_offset = nil
|
||||||
|
@read_io = nil
|
||||||
|
@write_io = nil
|
||||||
|
end
|
||||||
|
|
||||||
|
def clear?
|
||||||
|
@type.clear?
|
||||||
|
end
|
||||||
|
|
||||||
|
def assign(val)
|
||||||
|
@type.assign(val)
|
||||||
|
end
|
||||||
|
|
||||||
|
def snapshot
|
||||||
|
@type.snapshot
|
||||||
|
end
|
||||||
|
|
||||||
|
def num_bytes
|
||||||
|
@type.num_bytes
|
||||||
|
end
|
||||||
|
|
||||||
|
def respond_to?(symbol, include_private = false) #:nodoc:
|
||||||
|
@type.respond_to?(symbol, include_private) || super
|
||||||
|
end
|
||||||
|
|
||||||
|
def method_missing(symbol, *args, &block) #:nodoc:
|
||||||
|
@type.__send__(symbol, *args, &block)
|
||||||
|
end
|
||||||
|
|
||||||
|
def abs_offset
|
||||||
|
@abs_offset || eval_parameter(:read_abs_offset)
|
||||||
|
end
|
||||||
|
|
||||||
|
# Sets the +abs_offset+ to use when writing this object.
|
||||||
|
def abs_offset=(offset)
|
||||||
|
@abs_offset = offset
|
||||||
|
end
|
||||||
|
|
||||||
|
def rel_offset
|
||||||
|
abs_offset
|
||||||
|
end
|
||||||
|
|
||||||
|
def do_read(io) #:nodoc:
|
||||||
|
@read_io = io
|
||||||
|
end
|
||||||
|
|
||||||
|
def do_write(io) #:nodoc:
|
||||||
|
@write_io = io
|
||||||
|
end
|
||||||
|
|
||||||
|
def do_num_bytes #:nodoc:
|
||||||
|
0
|
||||||
|
end
|
||||||
|
|
||||||
|
def include_obj?
|
||||||
|
! has_parameter?(:onlyif) || eval_parameter(:onlyif)
|
||||||
|
end
|
||||||
|
|
||||||
|
# DelayedIO objects aren't read when #read is called.
|
||||||
|
# The reading is delayed until this method is called.
|
||||||
|
def read_now!
|
||||||
|
return unless include_obj?
|
||||||
|
raise IOError, "read from where?" unless @read_io
|
||||||
|
|
||||||
|
@read_io.seekbytes(abs_offset - @read_io.offset)
|
||||||
|
start_read do
|
||||||
|
@type.do_read(@read_io)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# DelayedIO objects aren't written when #write is called.
|
||||||
|
# The writing is delayed until this method is called.
|
||||||
|
def write_now!
|
||||||
|
return unless include_obj?
|
||||||
|
raise IOError, "write to where?" unless @write_io
|
||||||
|
|
||||||
|
@write_io.seekbytes(abs_offset - @write_io.offset)
|
||||||
|
@type.do_write(@write_io)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
class DelayedIoArgProcessor < BaseArgProcessor
|
||||||
|
include MultiFieldArgSeparator
|
||||||
|
|
||||||
|
def sanitize_parameters!(obj_class, params)
|
||||||
|
params.merge!(obj_class.dsl_params)
|
||||||
|
params.must_be_integer(:read_abs_offset)
|
||||||
|
params.sanitize_object_prototype(:type)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# Add +auto_call_delayed_io+ keyword to BinData::Base.
|
||||||
|
class Base
|
||||||
|
class << self
|
||||||
|
# The +auto_call_delayed_io+ keyword sets a data object tree to perform
|
||||||
|
# multi pass I/O automatically.
|
||||||
|
def auto_call_delayed_io
|
||||||
|
include AutoCallDelayedIO
|
||||||
|
|
||||||
|
return if DelayedIO.method_defined? :initialize_instance_without_record_io
|
||||||
|
|
||||||
|
DelayedIO.send(:alias_method, :initialize_instance_without_record_io, :initialize_instance)
|
||||||
|
DelayedIO.send(:define_method, :initialize_instance) do
|
||||||
|
if @parent && !defined? @delayed_io_recorded
|
||||||
|
@delayed_io_recorded = true
|
||||||
|
list = top_level_get(:delayed_ios)
|
||||||
|
list << self if list
|
||||||
|
end
|
||||||
|
|
||||||
|
initialize_instance_without_record_io
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
module AutoCallDelayedIO
|
||||||
|
def initialize_shared_instance
|
||||||
|
top_level_set(:delayed_ios, [])
|
||||||
|
super
|
||||||
|
end
|
||||||
|
|
||||||
|
def read(io)
|
||||||
|
super(io) { top_level_get(:delayed_ios).each(&:read_now!) }
|
||||||
|
end
|
||||||
|
|
||||||
|
def write(io, *_)
|
||||||
|
super(io) { top_level_get(:delayed_ios).each(&:write_now!) }
|
||||||
|
end
|
||||||
|
|
||||||
|
def num_bytes
|
||||||
|
to_binary_s.size
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
484
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/bindata-2.4.15/lib/bindata/dsl.rb
vendored
Normal file
484
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/bindata-2.4.15/lib/bindata/dsl.rb
vendored
Normal file
@ -0,0 +1,484 @@
|
|||||||
|
module BinData
|
||||||
|
# Extracts args for Records and Buffers.
|
||||||
|
#
|
||||||
|
# Foo.new(bar: "baz) is ambiguous as to whether :bar is a value or parameter.
|
||||||
|
#
|
||||||
|
# BaseArgExtractor always assumes :bar is parameter. This extractor correctly
|
||||||
|
# identifies it as value or parameter.
|
||||||
|
module MultiFieldArgSeparator
|
||||||
|
def separate_args(obj_class, obj_args)
|
||||||
|
value, parameters, parent = super(obj_class, obj_args)
|
||||||
|
|
||||||
|
if parameters_is_value?(obj_class, value, parameters)
|
||||||
|
value = parameters
|
||||||
|
parameters = {}
|
||||||
|
end
|
||||||
|
|
||||||
|
[value, parameters, parent]
|
||||||
|
end
|
||||||
|
|
||||||
|
def parameters_is_value?(obj_class, value, parameters)
|
||||||
|
if value.nil? && !parameters.empty?
|
||||||
|
field_names_in_parameters?(obj_class, parameters)
|
||||||
|
else
|
||||||
|
false
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def field_names_in_parameters?(obj_class, parameters)
|
||||||
|
field_names = obj_class.fields.field_names
|
||||||
|
param_keys = parameters.keys
|
||||||
|
|
||||||
|
!(field_names & param_keys).empty?
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# BinData classes that are part of the DSL must be extended by this.
|
||||||
|
module DSLMixin
|
||||||
|
def dsl_parser(parser_type = nil)
|
||||||
|
@dsl_parser ||= begin
|
||||||
|
parser_type ||= superclass.dsl_parser.parser_type
|
||||||
|
DSLParser.new(self, parser_type)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def method_missing(symbol, *args, &block) #:nodoc:
|
||||||
|
dsl_parser.__send__(symbol, *args, &block)
|
||||||
|
end
|
||||||
|
|
||||||
|
# Assert object is not an array or string.
|
||||||
|
def to_ary; nil; end
|
||||||
|
def to_str; nil; end
|
||||||
|
|
||||||
|
# A DSLParser parses and accumulates field definitions of the form
|
||||||
|
#
|
||||||
|
# type name, params
|
||||||
|
#
|
||||||
|
# where:
|
||||||
|
# * +type+ is the under_scored name of a registered type
|
||||||
|
# * +name+ is the (possible optional) name of the field
|
||||||
|
# * +params+ is a hash containing any parameters
|
||||||
|
#
|
||||||
|
class DSLParser
|
||||||
|
def initialize(the_class, parser_type)
|
||||||
|
raise "unknown parser type #{parser_type}" unless parser_abilities[parser_type]
|
||||||
|
|
||||||
|
@the_class = the_class
|
||||||
|
@parser_type = parser_type
|
||||||
|
@validator = DSLFieldValidator.new(the_class, self)
|
||||||
|
@endian = nil
|
||||||
|
end
|
||||||
|
|
||||||
|
attr_reader :parser_type
|
||||||
|
|
||||||
|
def endian(endian = nil)
|
||||||
|
if endian
|
||||||
|
set_endian(endian)
|
||||||
|
elsif @endian.nil?
|
||||||
|
set_endian(parent_attribute(:endian))
|
||||||
|
end
|
||||||
|
@endian
|
||||||
|
end
|
||||||
|
|
||||||
|
def search_prefix(*args)
|
||||||
|
@search_prefix ||= parent_attribute(:search_prefix, []).dup
|
||||||
|
|
||||||
|
prefix = args.collect(&:to_sym).compact
|
||||||
|
unless prefix.empty?
|
||||||
|
if fields?
|
||||||
|
dsl_raise SyntaxError, "search_prefix must be called before defining fields"
|
||||||
|
end
|
||||||
|
|
||||||
|
@search_prefix = prefix.concat(@search_prefix)
|
||||||
|
end
|
||||||
|
|
||||||
|
@search_prefix
|
||||||
|
end
|
||||||
|
|
||||||
|
def hide(*args)
|
||||||
|
if option?(:hidden_fields)
|
||||||
|
@hide ||= parent_attribute(:hide, []).dup
|
||||||
|
|
||||||
|
hidden = args.collect(&:to_sym).compact
|
||||||
|
@hide.concat(hidden)
|
||||||
|
|
||||||
|
@hide
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def fields
|
||||||
|
@fields ||= SanitizedFields.new(hints, parent_fields)
|
||||||
|
end
|
||||||
|
|
||||||
|
def dsl_params
|
||||||
|
abilities = parser_abilities[@parser_type]
|
||||||
|
send(abilities.at(0), abilities.at(1))
|
||||||
|
end
|
||||||
|
|
||||||
|
def method_missing(*args, &block)
|
||||||
|
ensure_hints
|
||||||
|
parse_and_append_field(*args, &block)
|
||||||
|
end
|
||||||
|
|
||||||
|
#-------------
|
||||||
|
private
|
||||||
|
|
||||||
|
def parser_abilities
|
||||||
|
@abilities ||= {
|
||||||
|
struct: [:to_struct_params, :struct, [:multiple_fields, :optional_fieldnames, :hidden_fields]],
|
||||||
|
array: [:to_object_params, :type, [:multiple_fields, :optional_fieldnames]],
|
||||||
|
buffer: [:to_object_params, :type, [:multiple_fields, :optional_fieldnames, :hidden_fields]],
|
||||||
|
choice: [:to_choice_params, :choices, [:multiple_fields, :all_or_none_fieldnames, :fieldnames_are_values]],
|
||||||
|
delayed_io: [:to_object_params, :type, [:multiple_fields, :optional_fieldnames, :hidden_fields]],
|
||||||
|
primitive: [:to_struct_params, :struct, [:multiple_fields, :optional_fieldnames]],
|
||||||
|
skip: [:to_object_params, :until_valid, [:multiple_fields, :optional_fieldnames]],
|
||||||
|
}
|
||||||
|
end
|
||||||
|
|
||||||
|
def option?(opt)
|
||||||
|
parser_abilities[@parser_type].at(2).include?(opt)
|
||||||
|
end
|
||||||
|
|
||||||
|
def ensure_hints
|
||||||
|
endian
|
||||||
|
search_prefix
|
||||||
|
end
|
||||||
|
|
||||||
|
def hints
|
||||||
|
{ endian: endian, search_prefix: search_prefix }
|
||||||
|
end
|
||||||
|
|
||||||
|
def set_endian(endian)
|
||||||
|
if endian
|
||||||
|
if fields?
|
||||||
|
dsl_raise SyntaxError, "endian must be called before defining fields"
|
||||||
|
end
|
||||||
|
if !valid_endian?(endian)
|
||||||
|
dsl_raise ArgumentError, "unknown value for endian '#{endian}'"
|
||||||
|
end
|
||||||
|
|
||||||
|
if endian == :big_and_little
|
||||||
|
DSLBigAndLittleEndianHandler.handle(@the_class)
|
||||||
|
end
|
||||||
|
|
||||||
|
@endian = endian
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def valid_endian?(endian)
|
||||||
|
[:big, :little, :big_and_little].include?(endian)
|
||||||
|
end
|
||||||
|
|
||||||
|
def parent_fields
|
||||||
|
parent_attribute(:fields)
|
||||||
|
end
|
||||||
|
|
||||||
|
def fields?
|
||||||
|
defined?(@fields) && !@fields.empty?
|
||||||
|
end
|
||||||
|
|
||||||
|
def parse_and_append_field(*args, &block)
|
||||||
|
parser = DSLFieldParser.new(hints, *args, &block)
|
||||||
|
begin
|
||||||
|
@validator.validate_field(parser.name)
|
||||||
|
append_field(parser.type, parser.name, parser.params)
|
||||||
|
rescue Exception => err
|
||||||
|
dsl_raise err.class, err.message
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def append_field(type, name, params)
|
||||||
|
fields.add_field(type, name, params)
|
||||||
|
rescue BinData::UnRegisteredTypeError => err
|
||||||
|
raise TypeError, "unknown type '#{err.message}'"
|
||||||
|
end
|
||||||
|
|
||||||
|
def parent_attribute(attr, default = nil)
|
||||||
|
parent = @the_class.superclass
|
||||||
|
parser = parent.respond_to?(:dsl_parser) ? parent.dsl_parser : nil
|
||||||
|
if parser && parser.respond_to?(attr)
|
||||||
|
parser.send(attr)
|
||||||
|
else
|
||||||
|
default
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def dsl_raise(exception, msg)
|
||||||
|
backtrace = caller
|
||||||
|
backtrace.shift while %r{bindata/dsl.rb} =~ backtrace.first
|
||||||
|
|
||||||
|
raise exception, "#{msg} in #{@the_class}", backtrace
|
||||||
|
end
|
||||||
|
|
||||||
|
def to_object_params(key)
|
||||||
|
case fields.length
|
||||||
|
when 0
|
||||||
|
{}
|
||||||
|
when 1
|
||||||
|
{key => fields[0].prototype}
|
||||||
|
else
|
||||||
|
{key=> [:struct, to_struct_params]}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def to_choice_params(key)
|
||||||
|
if fields.empty?
|
||||||
|
{}
|
||||||
|
elsif fields.all_field_names_blank?
|
||||||
|
{key => fields.collect(&:prototype)}
|
||||||
|
else
|
||||||
|
choices = {}
|
||||||
|
fields.each { |f| choices[f.name] = f.prototype }
|
||||||
|
{key => choices}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def to_struct_params(*unused)
|
||||||
|
result = {fields: fields}
|
||||||
|
if !endian.nil?
|
||||||
|
result[:endian] = endian
|
||||||
|
end
|
||||||
|
if !search_prefix.empty?
|
||||||
|
result[:search_prefix] = search_prefix
|
||||||
|
end
|
||||||
|
if option?(:hidden_fields) && !hide.empty?
|
||||||
|
result[:hide] = hide
|
||||||
|
end
|
||||||
|
|
||||||
|
result
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# Handles the :big_and_little endian option.
|
||||||
|
# This option creates two subclasses, each handling
|
||||||
|
# :big or :little endian.
|
||||||
|
class DSLBigAndLittleEndianHandler
|
||||||
|
class << self
|
||||||
|
def handle(bnl_class)
|
||||||
|
make_class_abstract(bnl_class)
|
||||||
|
create_subclasses_with_endian(bnl_class)
|
||||||
|
override_new_in_class(bnl_class)
|
||||||
|
delegate_field_creation(bnl_class)
|
||||||
|
fixup_subclass_hierarchy(bnl_class)
|
||||||
|
end
|
||||||
|
|
||||||
|
def make_class_abstract(bnl_class)
|
||||||
|
bnl_class.send(:unregister_self)
|
||||||
|
end
|
||||||
|
|
||||||
|
def create_subclasses_with_endian(bnl_class)
|
||||||
|
instance_eval "class ::#{bnl_class}Be < ::#{bnl_class}; endian :big; end"
|
||||||
|
instance_eval "class ::#{bnl_class}Le < ::#{bnl_class}; endian :little; end"
|
||||||
|
end
|
||||||
|
|
||||||
|
def override_new_in_class(bnl_class)
|
||||||
|
endian_classes = {
|
||||||
|
big: class_with_endian(bnl_class, :big),
|
||||||
|
little: class_with_endian(bnl_class, :little),
|
||||||
|
}
|
||||||
|
bnl_class.define_singleton_method(:new) do |*args|
|
||||||
|
if self == bnl_class
|
||||||
|
_, options, _ = arg_processor.separate_args(self, args)
|
||||||
|
delegate = endian_classes[options[:endian]]
|
||||||
|
return delegate.new(*args) if delegate
|
||||||
|
end
|
||||||
|
|
||||||
|
super(*args)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def delegate_field_creation(bnl_class)
|
||||||
|
endian_classes = {
|
||||||
|
big: class_with_endian(bnl_class, :big),
|
||||||
|
little: class_with_endian(bnl_class, :little),
|
||||||
|
}
|
||||||
|
|
||||||
|
parser = bnl_class.dsl_parser
|
||||||
|
parser.define_singleton_method(:parse_and_append_field) do |*args, &block|
|
||||||
|
endian_classes[:big].send(*args, &block)
|
||||||
|
endian_classes[:little].send(*args, &block)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def fixup_subclass_hierarchy(bnl_class)
|
||||||
|
parent = bnl_class.superclass
|
||||||
|
if obj_attribute(parent, :endian) == :big_and_little
|
||||||
|
be_subclass = class_with_endian(bnl_class, :big)
|
||||||
|
be_parent = class_with_endian(parent, :big)
|
||||||
|
be_fields = obj_attribute(be_parent, :fields)
|
||||||
|
|
||||||
|
le_subclass = class_with_endian(bnl_class, :little)
|
||||||
|
le_parent = class_with_endian(parent, :little)
|
||||||
|
le_fields = obj_attribute(le_parent, :fields)
|
||||||
|
|
||||||
|
be_subclass.dsl_parser.define_singleton_method(:parent_fields) do
|
||||||
|
be_fields
|
||||||
|
end
|
||||||
|
le_subclass.dsl_parser.define_singleton_method(:parent_fields) do
|
||||||
|
le_fields
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def class_with_endian(class_name, endian)
|
||||||
|
hints = {
|
||||||
|
endian: endian,
|
||||||
|
search_prefix: class_name.dsl_parser.search_prefix,
|
||||||
|
}
|
||||||
|
RegisteredClasses.lookup(class_name, hints)
|
||||||
|
end
|
||||||
|
|
||||||
|
def obj_attribute(obj, attr)
|
||||||
|
obj.dsl_parser.send(attr)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# Extracts the details from a field declaration.
|
||||||
|
class DSLFieldParser
|
||||||
|
def initialize(hints, symbol, *args, &block)
|
||||||
|
@hints = hints
|
||||||
|
@type = symbol
|
||||||
|
@name = name_from_field_declaration(args)
|
||||||
|
@params = params_from_field_declaration(args, &block)
|
||||||
|
end
|
||||||
|
|
||||||
|
attr_reader :type, :name, :params
|
||||||
|
|
||||||
|
def name_from_field_declaration(args)
|
||||||
|
name, _ = args
|
||||||
|
if name == "" || name.is_a?(Hash)
|
||||||
|
nil
|
||||||
|
else
|
||||||
|
name
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def params_from_field_declaration(args, &block)
|
||||||
|
params = params_from_args(args)
|
||||||
|
|
||||||
|
if block_given?
|
||||||
|
params.merge(params_from_block(&block))
|
||||||
|
else
|
||||||
|
params
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def params_from_args(args)
|
||||||
|
name, params = args
|
||||||
|
params = name if name.is_a?(Hash)
|
||||||
|
|
||||||
|
params || {}
|
||||||
|
end
|
||||||
|
|
||||||
|
def params_from_block(&block)
|
||||||
|
bindata_classes = {
|
||||||
|
array: BinData::Array,
|
||||||
|
buffer: BinData::Buffer,
|
||||||
|
choice: BinData::Choice,
|
||||||
|
delayed_io: BinData::DelayedIO,
|
||||||
|
skip: BinData::Skip,
|
||||||
|
struct: BinData::Struct,
|
||||||
|
}
|
||||||
|
|
||||||
|
if bindata_classes.include?(@type)
|
||||||
|
parser = DSLParser.new(bindata_classes[@type], @type)
|
||||||
|
parser.endian(@hints[:endian])
|
||||||
|
parser.search_prefix(*@hints[:search_prefix])
|
||||||
|
parser.instance_eval(&block)
|
||||||
|
|
||||||
|
parser.dsl_params
|
||||||
|
else
|
||||||
|
{}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# Validates a field defined in a DSLMixin.
|
||||||
|
class DSLFieldValidator
|
||||||
|
def initialize(the_class, parser)
|
||||||
|
@the_class = the_class
|
||||||
|
@dsl_parser = parser
|
||||||
|
end
|
||||||
|
|
||||||
|
def validate_field(name)
|
||||||
|
if must_not_have_a_name_failed?(name)
|
||||||
|
raise SyntaxError, "field must not have a name"
|
||||||
|
end
|
||||||
|
|
||||||
|
if all_or_none_names_failed?(name)
|
||||||
|
raise SyntaxError, "fields must either all have names, or none must have names"
|
||||||
|
end
|
||||||
|
|
||||||
|
if must_have_a_name_failed?(name)
|
||||||
|
raise SyntaxError, "field must have a name"
|
||||||
|
end
|
||||||
|
|
||||||
|
ensure_valid_name(name)
|
||||||
|
end
|
||||||
|
|
||||||
|
def ensure_valid_name(name)
|
||||||
|
if name && !option?(:fieldnames_are_values)
|
||||||
|
if malformed_name?(name)
|
||||||
|
raise SyntaxError, "field '#{name}' is an illegal fieldname"
|
||||||
|
end
|
||||||
|
|
||||||
|
if duplicate_name?(name)
|
||||||
|
raise SyntaxError, "duplicate field '#{name}'"
|
||||||
|
end
|
||||||
|
|
||||||
|
if name_shadows_method?(name)
|
||||||
|
raise SyntaxError, "field '#{name}' shadows an existing method"
|
||||||
|
end
|
||||||
|
|
||||||
|
if name_is_reserved?(name)
|
||||||
|
raise SyntaxError, "field '#{name}' is a reserved name"
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def must_not_have_a_name_failed?(name)
|
||||||
|
option?(:no_fieldnames) && !name.nil?
|
||||||
|
end
|
||||||
|
|
||||||
|
def must_have_a_name_failed?(name)
|
||||||
|
option?(:mandatory_fieldnames) && name.nil?
|
||||||
|
end
|
||||||
|
|
||||||
|
def all_or_none_names_failed?(name)
|
||||||
|
if option?(:all_or_none_fieldnames) && !fields.empty?
|
||||||
|
all_names_blank = fields.all_field_names_blank?
|
||||||
|
no_names_blank = fields.no_field_names_blank?
|
||||||
|
|
||||||
|
(!name.nil? && all_names_blank) || (name.nil? && no_names_blank)
|
||||||
|
else
|
||||||
|
false
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def malformed_name?(name)
|
||||||
|
/^[a-z_]\w*$/ !~ name.to_s
|
||||||
|
end
|
||||||
|
|
||||||
|
def duplicate_name?(name)
|
||||||
|
fields.field_name?(name)
|
||||||
|
end
|
||||||
|
|
||||||
|
def name_shadows_method?(name)
|
||||||
|
@the_class.method_defined?(name)
|
||||||
|
end
|
||||||
|
|
||||||
|
def name_is_reserved?(name)
|
||||||
|
BinData::Struct::RESERVED.include?(name.to_sym)
|
||||||
|
end
|
||||||
|
|
||||||
|
def fields
|
||||||
|
@dsl_parser.fields
|
||||||
|
end
|
||||||
|
|
||||||
|
def option?(opt)
|
||||||
|
@dsl_parser.send(:option?, opt)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
83
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/bindata-2.4.15/lib/bindata/float.rb
vendored
Normal file
83
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/bindata-2.4.15/lib/bindata/float.rb
vendored
Normal file
@ -0,0 +1,83 @@
|
|||||||
|
require 'bindata/base_primitive'
|
||||||
|
|
||||||
|
module BinData
|
||||||
|
# Defines a number of classes that contain a floating point number.
|
||||||
|
# The float is defined by precision and endian.
|
||||||
|
|
||||||
|
module FloatingPoint #:nodoc: all
|
||||||
|
class << self
|
||||||
|
PRECISION = {
|
||||||
|
single: 4,
|
||||||
|
double: 8,
|
||||||
|
}
|
||||||
|
|
||||||
|
PACK_CODE = {
|
||||||
|
[:single, :little] => 'e',
|
||||||
|
[:single, :big] => 'g',
|
||||||
|
[:double, :little] => 'E',
|
||||||
|
[:double, :big] => 'G',
|
||||||
|
}
|
||||||
|
|
||||||
|
def define_methods(float_class, precision, endian)
|
||||||
|
float_class.module_eval <<-END
|
||||||
|
def do_num_bytes
|
||||||
|
#{create_num_bytes_code(precision)}
|
||||||
|
end
|
||||||
|
|
||||||
|
#---------------
|
||||||
|
private
|
||||||
|
|
||||||
|
def sensible_default
|
||||||
|
0.0
|
||||||
|
end
|
||||||
|
|
||||||
|
def value_to_binary_string(val)
|
||||||
|
#{create_to_binary_s_code(precision, endian)}
|
||||||
|
end
|
||||||
|
|
||||||
|
def read_and_return_value(io)
|
||||||
|
#{create_read_code(precision, endian)}
|
||||||
|
end
|
||||||
|
END
|
||||||
|
end
|
||||||
|
|
||||||
|
def create_num_bytes_code(precision)
|
||||||
|
PRECISION[precision]
|
||||||
|
end
|
||||||
|
|
||||||
|
def create_read_code(precision, endian)
|
||||||
|
nbytes = PRECISION[precision]
|
||||||
|
unpack = PACK_CODE[[precision, endian]]
|
||||||
|
|
||||||
|
"io.readbytes(#{nbytes}).unpack('#{unpack}').at(0)"
|
||||||
|
end
|
||||||
|
|
||||||
|
def create_to_binary_s_code(precision, endian)
|
||||||
|
pack = PACK_CODE[[precision, endian]]
|
||||||
|
|
||||||
|
"[val].pack('#{pack}')"
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
|
||||||
|
# Single precision floating point number in little endian format
|
||||||
|
class FloatLe < BinData::BasePrimitive
|
||||||
|
FloatingPoint.define_methods(self, :single, :little)
|
||||||
|
end
|
||||||
|
|
||||||
|
# Single precision floating point number in big endian format
|
||||||
|
class FloatBe < BinData::BasePrimitive
|
||||||
|
FloatingPoint.define_methods(self, :single, :big)
|
||||||
|
end
|
||||||
|
|
||||||
|
# Double precision floating point number in little endian format
|
||||||
|
class DoubleLe < BinData::BasePrimitive
|
||||||
|
FloatingPoint.define_methods(self, :double, :little)
|
||||||
|
end
|
||||||
|
|
||||||
|
# Double precision floating point number in big endian format
|
||||||
|
class DoubleBe < BinData::BasePrimitive
|
||||||
|
FloatingPoint.define_methods(self, :double, :big)
|
||||||
|
end
|
||||||
|
end
|
||||||
75
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/bindata-2.4.15/lib/bindata/framework.rb
vendored
Normal file
75
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/bindata-2.4.15/lib/bindata/framework.rb
vendored
Normal file
@ -0,0 +1,75 @@
|
|||||||
|
module BinData
|
||||||
|
# Error raised when unexpected results occur when reading data from IO.
|
||||||
|
class ValidityError < StandardError ; end
|
||||||
|
|
||||||
|
# All methods provided by the framework are to be implemented or overridden
|
||||||
|
# by subclasses of BinData::Base.
|
||||||
|
module Framework
|
||||||
|
# Initializes the state of the object. All instance variables that
|
||||||
|
# are used by the object must be initialized here.
|
||||||
|
def initialize_instance
|
||||||
|
end
|
||||||
|
|
||||||
|
# Initialises state that is shared by objects with the same parameters.
|
||||||
|
#
|
||||||
|
# This should only be used when optimising for performance. Instance
|
||||||
|
# variables set here, and changes to the singleton class will be shared
|
||||||
|
# between all objects that are initialized with the same parameters.
|
||||||
|
# This method is called only once for a particular set of parameters.
|
||||||
|
def initialize_shared_instance
|
||||||
|
end
|
||||||
|
|
||||||
|
# Returns true if the object has not been changed since creation.
|
||||||
|
def clear?
|
||||||
|
raise NotImplementedError
|
||||||
|
end
|
||||||
|
|
||||||
|
# Assigns the value of +val+ to this data object. Note that +val+ must
|
||||||
|
# always be deep copied to ensure no aliasing problems can occur.
|
||||||
|
def assign(val)
|
||||||
|
raise NotImplementedError
|
||||||
|
end
|
||||||
|
|
||||||
|
# Returns a snapshot of this data object.
|
||||||
|
def snapshot
|
||||||
|
raise NotImplementedError
|
||||||
|
end
|
||||||
|
|
||||||
|
# Returns the debug name of +child+. This only needs to be implemented
|
||||||
|
# by objects that contain child objects.
|
||||||
|
def debug_name_of(child) #:nodoc:
|
||||||
|
debug_name
|
||||||
|
end
|
||||||
|
|
||||||
|
# Returns the offset of +child+. This only needs to be implemented
|
||||||
|
# by objects that contain child objects.
|
||||||
|
def offset_of(child) #:nodoc:
|
||||||
|
0
|
||||||
|
end
|
||||||
|
|
||||||
|
# Is this object aligned on non-byte boundaries?
|
||||||
|
def bit_aligned?
|
||||||
|
false
|
||||||
|
end
|
||||||
|
|
||||||
|
# Reads the data for this data object from +io+.
|
||||||
|
def do_read(io) #:nodoc:
|
||||||
|
raise NotImplementedError
|
||||||
|
end
|
||||||
|
|
||||||
|
# Writes the value for this data to +io+.
|
||||||
|
def do_write(io) #:nodoc:
|
||||||
|
raise NotImplementedError
|
||||||
|
end
|
||||||
|
|
||||||
|
# Returns the number of bytes it will take to write this data.
|
||||||
|
def do_num_bytes #:nodoc:
|
||||||
|
raise NotImplementedError
|
||||||
|
end
|
||||||
|
|
||||||
|
# Set visibility requirements of methods to implement
|
||||||
|
public :clear?, :assign, :snapshot, :debug_name_of, :offset_of
|
||||||
|
protected :initialize_instance, :initialize_shared_instance
|
||||||
|
protected :do_read, :do_write, :do_num_bytes
|
||||||
|
end
|
||||||
|
end
|
||||||
212
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/bindata-2.4.15/lib/bindata/int.rb
vendored
Normal file
212
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/bindata-2.4.15/lib/bindata/int.rb
vendored
Normal file
@ -0,0 +1,212 @@
|
|||||||
|
require 'thread'
|
||||||
|
require 'bindata/base_primitive'
|
||||||
|
|
||||||
|
module BinData
|
||||||
|
# Defines a number of classes that contain an integer. The integer
|
||||||
|
# is defined by endian, signedness and number of bytes.
|
||||||
|
|
||||||
|
module Int #:nodoc: all
|
||||||
|
@@mutex = Mutex.new
|
||||||
|
|
||||||
|
class << self
|
||||||
|
def define_class(name, nbits, endian, signed)
|
||||||
|
@@mutex.synchronize do
|
||||||
|
unless BinData.const_defined?(name)
|
||||||
|
new_class = Class.new(BinData::BasePrimitive)
|
||||||
|
Int.define_methods(new_class, nbits, endian.to_sym, signed.to_sym)
|
||||||
|
RegisteredClasses.register(name, new_class)
|
||||||
|
|
||||||
|
BinData.const_set(name, new_class)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
BinData.const_get(name)
|
||||||
|
end
|
||||||
|
|
||||||
|
def define_methods(int_class, nbits, endian, signed)
|
||||||
|
raise "nbits must be divisible by 8" unless (nbits % 8).zero?
|
||||||
|
|
||||||
|
int_class.module_eval <<-END
|
||||||
|
def assign(val)
|
||||||
|
#{create_clamp_code(nbits, signed)}
|
||||||
|
super(val)
|
||||||
|
end
|
||||||
|
|
||||||
|
def do_num_bytes
|
||||||
|
#{nbits / 8}
|
||||||
|
end
|
||||||
|
|
||||||
|
#---------------
|
||||||
|
private
|
||||||
|
|
||||||
|
def sensible_default
|
||||||
|
0
|
||||||
|
end
|
||||||
|
|
||||||
|
def value_to_binary_string(val)
|
||||||
|
#{create_clamp_code(nbits, signed)}
|
||||||
|
#{create_to_binary_s_code(nbits, endian, signed)}
|
||||||
|
end
|
||||||
|
|
||||||
|
def read_and_return_value(io)
|
||||||
|
#{create_read_code(nbits, endian, signed)}
|
||||||
|
end
|
||||||
|
END
|
||||||
|
end
|
||||||
|
|
||||||
|
#-------------
|
||||||
|
private
|
||||||
|
|
||||||
|
def create_clamp_code(nbits, signed)
|
||||||
|
if signed == :signed
|
||||||
|
max = "(1 << (#{nbits} - 1)) - 1"
|
||||||
|
min = "-((#{max}) + 1)"
|
||||||
|
else
|
||||||
|
max = "(1 << #{nbits}) - 1"
|
||||||
|
min = "0"
|
||||||
|
end
|
||||||
|
|
||||||
|
"val = val.clamp(#{min}, #{max})"
|
||||||
|
end
|
||||||
|
|
||||||
|
def create_read_code(nbits, endian, signed)
|
||||||
|
read_str = create_raw_read_code(nbits, endian, signed)
|
||||||
|
|
||||||
|
if need_signed_conversion_code?(nbits, signed)
|
||||||
|
"val = #{read_str} ; #{create_uint2int_code(nbits)}"
|
||||||
|
else
|
||||||
|
read_str
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def create_raw_read_code(nbits, endian, signed)
|
||||||
|
# special case 8bit integers for speed
|
||||||
|
if nbits == 8
|
||||||
|
"io.readbytes(1).ord"
|
||||||
|
else
|
||||||
|
unpack_str = create_read_unpack_code(nbits, endian, signed)
|
||||||
|
assemble_str = create_read_assemble_code(nbits, endian, signed)
|
||||||
|
|
||||||
|
"(#{unpack_str} ; #{assemble_str})"
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def create_read_unpack_code(nbits, endian, signed)
|
||||||
|
nbytes = nbits / 8
|
||||||
|
pack_directive = pack_directive(nbits, endian, signed)
|
||||||
|
|
||||||
|
"ints = io.readbytes(#{nbytes}).unpack('#{pack_directive}')"
|
||||||
|
end
|
||||||
|
|
||||||
|
def create_read_assemble_code(nbits, endian, signed)
|
||||||
|
nwords = nbits / bits_per_word(nbits)
|
||||||
|
|
||||||
|
idx = (0...nwords).to_a
|
||||||
|
idx.reverse! if endian == :big
|
||||||
|
|
||||||
|
parts = (0...nwords).collect do |i|
|
||||||
|
"(ints.at(#{idx[i]}) << #{bits_per_word(nbits) * i})"
|
||||||
|
end
|
||||||
|
parts[0] = parts[0].sub(/ << 0\b/, "") # Remove " << 0" for optimisation
|
||||||
|
|
||||||
|
parts.join(" + ")
|
||||||
|
end
|
||||||
|
|
||||||
|
def create_to_binary_s_code(nbits, endian, signed)
|
||||||
|
# special case 8bit integers for speed
|
||||||
|
return "(val & 0xff).chr" if nbits == 8
|
||||||
|
|
||||||
|
pack_directive = pack_directive(nbits, endian, signed)
|
||||||
|
words = val_as_packed_words(nbits, endian, signed)
|
||||||
|
pack_str = "[#{words}].pack('#{pack_directive}')"
|
||||||
|
|
||||||
|
if need_signed_conversion_code?(nbits, signed)
|
||||||
|
"#{create_int2uint_code(nbits)} ; #{pack_str}"
|
||||||
|
else
|
||||||
|
pack_str
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def val_as_packed_words(nbits, endian, signed)
|
||||||
|
nwords = nbits / bits_per_word(nbits)
|
||||||
|
mask = (1 << bits_per_word(nbits)) - 1
|
||||||
|
|
||||||
|
vals = (0...nwords).collect { |i| "val >> #{bits_per_word(nbits) * i}" }
|
||||||
|
vals[0] = vals[0].sub(/ >> 0\b/, "") # Remove " >> 0" for optimisation
|
||||||
|
vals.reverse! if (endian == :big)
|
||||||
|
|
||||||
|
vals = vals.collect { |val| "#{val} & #{mask}" } # TODO: "& mask" is needed to work around jruby bug. Remove this line when fixed.
|
||||||
|
vals.join(",")
|
||||||
|
end
|
||||||
|
|
||||||
|
def create_int2uint_code(nbits)
|
||||||
|
"val &= #{(1 << nbits) - 1}"
|
||||||
|
end
|
||||||
|
|
||||||
|
def create_uint2int_code(nbits)
|
||||||
|
"(val >= #{1 << (nbits - 1)}) ? val - #{1 << nbits} : val"
|
||||||
|
end
|
||||||
|
|
||||||
|
def bits_per_word(nbits)
|
||||||
|
(nbits % 64).zero? ? 64 :
|
||||||
|
(nbits % 32).zero? ? 32 :
|
||||||
|
(nbits % 16).zero? ? 16 :
|
||||||
|
8
|
||||||
|
end
|
||||||
|
|
||||||
|
def pack_directive(nbits, endian, signed)
|
||||||
|
nwords = nbits / bits_per_word(nbits)
|
||||||
|
|
||||||
|
directives = { 8 => "C", 16 => "S", 32 => "L", 64 => "Q" }
|
||||||
|
|
||||||
|
d = directives[bits_per_word(nbits)]
|
||||||
|
d += ((endian == :big) ? ">" : "<") unless d == "C"
|
||||||
|
|
||||||
|
if signed == :signed && directives.key?(nbits)
|
||||||
|
(d * nwords).downcase
|
||||||
|
else
|
||||||
|
d * nwords
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def need_signed_conversion_code?(nbits, signed)
|
||||||
|
signed == :signed && ![64, 32, 16].include?(nbits)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
|
||||||
|
# Unsigned 1 byte integer.
|
||||||
|
class Uint8 < BinData::BasePrimitive
|
||||||
|
Int.define_methods(self, 8, :little, :unsigned)
|
||||||
|
end
|
||||||
|
|
||||||
|
# Signed 1 byte integer.
|
||||||
|
class Int8 < BinData::BasePrimitive
|
||||||
|
Int.define_methods(self, 8, :little, :signed)
|
||||||
|
end
|
||||||
|
|
||||||
|
# Create classes on demand
|
||||||
|
module IntFactory
|
||||||
|
def const_missing(name)
|
||||||
|
mappings = {
|
||||||
|
/^Uint(\d+)be$/ => [:big, :unsigned],
|
||||||
|
/^Uint(\d+)le$/ => [:little, :unsigned],
|
||||||
|
/^Int(\d+)be$/ => [:big, :signed],
|
||||||
|
/^Int(\d+)le$/ => [:little, :signed],
|
||||||
|
}
|
||||||
|
|
||||||
|
mappings.each_pair do |regex, args|
|
||||||
|
if regex =~ name.to_s
|
||||||
|
nbits = $1.to_i
|
||||||
|
if nbits > 0 && (nbits % 8).zero?
|
||||||
|
return Int.define_class(name, nbits, *args)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
super
|
||||||
|
end
|
||||||
|
end
|
||||||
|
BinData.extend IntFactory
|
||||||
|
end
|
||||||
496
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/bindata-2.4.15/lib/bindata/io.rb
vendored
Normal file
496
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/bindata-2.4.15/lib/bindata/io.rb
vendored
Normal file
@ -0,0 +1,496 @@
|
|||||||
|
require 'stringio'
|
||||||
|
|
||||||
|
module BinData
|
||||||
|
# A wrapper around an IO object. The wrapper provides a consistent
|
||||||
|
# interface for BinData objects to use when accessing the IO.
|
||||||
|
module IO
|
||||||
|
|
||||||
|
# Common operations for both Read and Write.
|
||||||
|
module Common
|
||||||
|
def initialize(io)
|
||||||
|
if self.class === io
|
||||||
|
raise ArgumentError, "io must not be a #{self.class}"
|
||||||
|
end
|
||||||
|
|
||||||
|
# wrap strings in a StringIO
|
||||||
|
if io.respond_to?(:to_str)
|
||||||
|
io = BinData::IO.create_string_io(io.to_str)
|
||||||
|
end
|
||||||
|
|
||||||
|
@raw_io = io
|
||||||
|
@buffer_end_points = nil
|
||||||
|
|
||||||
|
extend seekable? ? SeekableStream : UnSeekableStream
|
||||||
|
stream_init
|
||||||
|
end
|
||||||
|
|
||||||
|
#-------------
|
||||||
|
private
|
||||||
|
|
||||||
|
def seekable?
|
||||||
|
@raw_io.pos
|
||||||
|
rescue NoMethodError, Errno::ESPIPE, Errno::EPIPE, Errno::EINVAL
|
||||||
|
nil
|
||||||
|
end
|
||||||
|
|
||||||
|
def seek(n)
|
||||||
|
seek_raw(buffer_limited_n(n))
|
||||||
|
end
|
||||||
|
|
||||||
|
def buffer_limited_n(n)
|
||||||
|
if @buffer_end_points
|
||||||
|
if n.nil? || n > 0
|
||||||
|
max = @buffer_end_points[1] - offset
|
||||||
|
n = max if n.nil? || n > max
|
||||||
|
else
|
||||||
|
min = @buffer_end_points[0] - offset
|
||||||
|
n = min if n < min
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
n
|
||||||
|
end
|
||||||
|
|
||||||
|
def with_buffer_common(n)
|
||||||
|
prev = @buffer_end_points
|
||||||
|
if prev
|
||||||
|
avail = prev[1] - offset
|
||||||
|
n = avail if n > avail
|
||||||
|
end
|
||||||
|
@buffer_end_points = [offset, offset + n]
|
||||||
|
begin
|
||||||
|
yield(*@buffer_end_points)
|
||||||
|
ensure
|
||||||
|
@buffer_end_points = prev
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# Use #seek and #pos on seekable streams
|
||||||
|
module SeekableStream
|
||||||
|
# The number of bytes remaining in the input stream.
|
||||||
|
def num_bytes_remaining
|
||||||
|
start_mark = @raw_io.pos
|
||||||
|
@raw_io.seek(0, ::IO::SEEK_END)
|
||||||
|
end_mark = @raw_io.pos
|
||||||
|
|
||||||
|
if @buffer_end_points
|
||||||
|
if @buffer_end_points[1] < end_mark
|
||||||
|
end_mark = @buffer_end_points[1]
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
bytes_remaining = end_mark - start_mark
|
||||||
|
@raw_io.seek(start_mark, ::IO::SEEK_SET)
|
||||||
|
|
||||||
|
bytes_remaining
|
||||||
|
end
|
||||||
|
|
||||||
|
# All io calls in +block+ are rolled back after this
|
||||||
|
# method completes.
|
||||||
|
def with_readahead
|
||||||
|
mark = @raw_io.pos
|
||||||
|
begin
|
||||||
|
yield
|
||||||
|
ensure
|
||||||
|
@raw_io.seek(mark, ::IO::SEEK_SET)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
#-----------
|
||||||
|
private
|
||||||
|
|
||||||
|
def stream_init
|
||||||
|
@initial_pos = @raw_io.pos
|
||||||
|
end
|
||||||
|
|
||||||
|
def offset_raw
|
||||||
|
@raw_io.pos - @initial_pos
|
||||||
|
end
|
||||||
|
|
||||||
|
def seek_raw(n)
|
||||||
|
@raw_io.seek(n, ::IO::SEEK_CUR)
|
||||||
|
end
|
||||||
|
|
||||||
|
def read_raw(n)
|
||||||
|
@raw_io.read(n)
|
||||||
|
end
|
||||||
|
|
||||||
|
def write_raw(data)
|
||||||
|
@raw_io.write(data)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# Manually keep track of offset for unseekable streams.
|
||||||
|
module UnSeekableStream
|
||||||
|
def offset_raw
|
||||||
|
@offset
|
||||||
|
end
|
||||||
|
|
||||||
|
# The number of bytes remaining in the input stream.
|
||||||
|
def num_bytes_remaining
|
||||||
|
raise IOError, "stream is unseekable"
|
||||||
|
end
|
||||||
|
|
||||||
|
# All io calls in +block+ are rolled back after this
|
||||||
|
# method completes.
|
||||||
|
def with_readahead
|
||||||
|
mark = @offset
|
||||||
|
@read_data = ""
|
||||||
|
@in_readahead = true
|
||||||
|
|
||||||
|
class << self
|
||||||
|
alias_method :read_raw_without_readahead, :read_raw
|
||||||
|
alias_method :read_raw, :read_raw_with_readahead
|
||||||
|
end
|
||||||
|
|
||||||
|
begin
|
||||||
|
yield
|
||||||
|
ensure
|
||||||
|
@offset = mark
|
||||||
|
@in_readahead = false
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
#-----------
|
||||||
|
private
|
||||||
|
|
||||||
|
def stream_init
|
||||||
|
@offset = 0
|
||||||
|
end
|
||||||
|
|
||||||
|
def read_raw(n)
|
||||||
|
data = @raw_io.read(n)
|
||||||
|
@offset += data.size if data
|
||||||
|
data
|
||||||
|
end
|
||||||
|
|
||||||
|
def read_raw_with_readahead(n)
|
||||||
|
data = ""
|
||||||
|
|
||||||
|
unless @read_data.empty? || @in_readahead
|
||||||
|
bytes_to_consume = [n, @read_data.length].min
|
||||||
|
data += @read_data.slice!(0, bytes_to_consume)
|
||||||
|
n -= bytes_to_consume
|
||||||
|
|
||||||
|
if @read_data.empty?
|
||||||
|
class << self
|
||||||
|
alias_method :read_raw, :read_raw_without_readahead
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
raw_data = @raw_io.read(n)
|
||||||
|
data += raw_data if raw_data
|
||||||
|
|
||||||
|
if @in_readahead
|
||||||
|
@read_data += data
|
||||||
|
end
|
||||||
|
|
||||||
|
@offset += data.size
|
||||||
|
|
||||||
|
data
|
||||||
|
end
|
||||||
|
|
||||||
|
def write_raw(data)
|
||||||
|
@offset += data.size
|
||||||
|
@raw_io.write(data)
|
||||||
|
end
|
||||||
|
|
||||||
|
def seek_raw(n)
|
||||||
|
raise IOError, "stream is unseekable" if n < 0
|
||||||
|
|
||||||
|
# NOTE: how do we seek on a writable stream?
|
||||||
|
|
||||||
|
# skip over data in 8k blocks
|
||||||
|
while n > 0
|
||||||
|
bytes_to_read = [n, 8192].min
|
||||||
|
read_raw(bytes_to_read)
|
||||||
|
n -= bytes_to_read
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# Creates a StringIO around +str+.
|
||||||
|
def self.create_string_io(str = "")
|
||||||
|
s = StringIO.new(str.dup.force_encoding(Encoding::BINARY))
|
||||||
|
s.binmode
|
||||||
|
s
|
||||||
|
end
|
||||||
|
|
||||||
|
# Create a new IO Read wrapper around +io+. +io+ must provide #read,
|
||||||
|
# #pos if reading the current stream position and #seek if setting the
|
||||||
|
# current stream position. If +io+ is a string it will be automatically
|
||||||
|
# wrapped in an StringIO object.
|
||||||
|
#
|
||||||
|
# The IO can handle bitstreams in either big or little endian format.
|
||||||
|
#
|
||||||
|
# M byte1 L M byte2 L
|
||||||
|
# S 76543210 S S fedcba98 S
|
||||||
|
# B B B B
|
||||||
|
#
|
||||||
|
# In big endian format:
|
||||||
|
# readbits(6), readbits(5) #=> [765432, 10fed]
|
||||||
|
#
|
||||||
|
# In little endian format:
|
||||||
|
# readbits(6), readbits(5) #=> [543210, a9876]
|
||||||
|
#
|
||||||
|
class Read
|
||||||
|
include Common
|
||||||
|
|
||||||
|
def initialize(io)
|
||||||
|
super(io)
|
||||||
|
|
||||||
|
# bits when reading
|
||||||
|
@rnbits = 0
|
||||||
|
@rval = 0
|
||||||
|
@rendian = nil
|
||||||
|
end
|
||||||
|
|
||||||
|
# Sets a buffer of +n+ bytes on the io stream. Any reading or seeking
|
||||||
|
# calls inside the +block+ will be contained within this buffer.
|
||||||
|
def with_buffer(n)
|
||||||
|
with_buffer_common(n) do
|
||||||
|
yield
|
||||||
|
read
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# Returns the current offset of the io stream. Offset will be rounded
|
||||||
|
# up when reading bitfields.
|
||||||
|
def offset
|
||||||
|
offset_raw
|
||||||
|
end
|
||||||
|
|
||||||
|
# Seek +n+ bytes from the current position in the io stream.
|
||||||
|
def seekbytes(n)
|
||||||
|
reset_read_bits
|
||||||
|
seek(n)
|
||||||
|
end
|
||||||
|
|
||||||
|
# Reads exactly +n+ bytes from +io+.
|
||||||
|
#
|
||||||
|
# If the data read is nil an EOFError is raised.
|
||||||
|
#
|
||||||
|
# If the data read is too short an IOError is raised.
|
||||||
|
def readbytes(n)
|
||||||
|
reset_read_bits
|
||||||
|
read(n)
|
||||||
|
end
|
||||||
|
|
||||||
|
# Reads all remaining bytes from the stream.
|
||||||
|
def read_all_bytes
|
||||||
|
reset_read_bits
|
||||||
|
read
|
||||||
|
end
|
||||||
|
|
||||||
|
# Reads exactly +nbits+ bits from the stream. +endian+ specifies whether
|
||||||
|
# the bits are stored in +:big+ or +:little+ endian format.
|
||||||
|
def readbits(nbits, endian)
|
||||||
|
if @rendian != endian
|
||||||
|
# don't mix bits of differing endian
|
||||||
|
reset_read_bits
|
||||||
|
@rendian = endian
|
||||||
|
end
|
||||||
|
|
||||||
|
if endian == :big
|
||||||
|
read_big_endian_bits(nbits)
|
||||||
|
else
|
||||||
|
read_little_endian_bits(nbits)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# Discards any read bits so the stream becomes aligned at the
|
||||||
|
# next byte boundary.
|
||||||
|
def reset_read_bits
|
||||||
|
@rnbits = 0
|
||||||
|
@rval = 0
|
||||||
|
end
|
||||||
|
|
||||||
|
#---------------
|
||||||
|
private
|
||||||
|
|
||||||
|
def read(n = nil)
|
||||||
|
str = read_raw(buffer_limited_n(n))
|
||||||
|
if n
|
||||||
|
raise EOFError, "End of file reached" if str.nil?
|
||||||
|
raise IOError, "data truncated" if str.size < n
|
||||||
|
end
|
||||||
|
str
|
||||||
|
end
|
||||||
|
|
||||||
|
def read_big_endian_bits(nbits)
|
||||||
|
while @rnbits < nbits
|
||||||
|
accumulate_big_endian_bits
|
||||||
|
end
|
||||||
|
|
||||||
|
val = (@rval >> (@rnbits - nbits)) & mask(nbits)
|
||||||
|
@rnbits -= nbits
|
||||||
|
@rval &= mask(@rnbits)
|
||||||
|
|
||||||
|
val
|
||||||
|
end
|
||||||
|
|
||||||
|
def accumulate_big_endian_bits
|
||||||
|
byte = read(1).unpack('C').at(0) & 0xff
|
||||||
|
@rval = (@rval << 8) | byte
|
||||||
|
@rnbits += 8
|
||||||
|
end
|
||||||
|
|
||||||
|
def read_little_endian_bits(nbits)
|
||||||
|
while @rnbits < nbits
|
||||||
|
accumulate_little_endian_bits
|
||||||
|
end
|
||||||
|
|
||||||
|
val = @rval & mask(nbits)
|
||||||
|
@rnbits -= nbits
|
||||||
|
@rval >>= nbits
|
||||||
|
|
||||||
|
val
|
||||||
|
end
|
||||||
|
|
||||||
|
def accumulate_little_endian_bits
|
||||||
|
byte = read(1).unpack('C').at(0) & 0xff
|
||||||
|
@rval = @rval | (byte << @rnbits)
|
||||||
|
@rnbits += 8
|
||||||
|
end
|
||||||
|
|
||||||
|
def mask(nbits)
|
||||||
|
(1 << nbits) - 1
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# Create a new IO Write wrapper around +io+. +io+ must provide #write.
|
||||||
|
# If +io+ is a string it will be automatically wrapped in an StringIO
|
||||||
|
# object.
|
||||||
|
#
|
||||||
|
# The IO can handle bitstreams in either big or little endian format.
|
||||||
|
#
|
||||||
|
# See IO::Read for more information.
|
||||||
|
class Write
|
||||||
|
include Common
|
||||||
|
def initialize(io)
|
||||||
|
super(io)
|
||||||
|
|
||||||
|
@wnbits = 0
|
||||||
|
@wval = 0
|
||||||
|
@wendian = nil
|
||||||
|
end
|
||||||
|
|
||||||
|
# Sets a buffer of +n+ bytes on the io stream. Any writes inside the
|
||||||
|
# +block+ will be contained within this buffer. If less than +n+ bytes
|
||||||
|
# are written inside the block, the remainder will be padded with '\0'
|
||||||
|
# bytes.
|
||||||
|
def with_buffer(n)
|
||||||
|
with_buffer_common(n) do |_buf_start, buf_end|
|
||||||
|
yield
|
||||||
|
write("\0" * (buf_end - offset))
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# Returns the current offset of the io stream. Offset will be rounded
|
||||||
|
# up when writing bitfields.
|
||||||
|
def offset
|
||||||
|
offset_raw + (@wnbits > 0 ? 1 : 0)
|
||||||
|
end
|
||||||
|
|
||||||
|
# Seek +n+ bytes from the current position in the io stream.
|
||||||
|
def seekbytes(n)
|
||||||
|
flushbits
|
||||||
|
seek(n)
|
||||||
|
end
|
||||||
|
|
||||||
|
# Writes the given string of bytes to the io stream.
|
||||||
|
def writebytes(str)
|
||||||
|
flushbits
|
||||||
|
write(str)
|
||||||
|
end
|
||||||
|
|
||||||
|
# Writes +nbits+ bits from +val+ to the stream. +endian+ specifies whether
|
||||||
|
# the bits are to be stored in +:big+ or +:little+ endian format.
|
||||||
|
def writebits(val, nbits, endian)
|
||||||
|
if @wendian != endian
|
||||||
|
# don't mix bits of differing endian
|
||||||
|
flushbits
|
||||||
|
@wendian = endian
|
||||||
|
end
|
||||||
|
|
||||||
|
clamped_val = val & mask(nbits)
|
||||||
|
|
||||||
|
if endian == :big
|
||||||
|
write_big_endian_bits(clamped_val, nbits)
|
||||||
|
else
|
||||||
|
write_little_endian_bits(clamped_val, nbits)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# To be called after all +writebits+ have been applied.
|
||||||
|
def flushbits
|
||||||
|
raise "Internal state error nbits = #{@wnbits}" if @wnbits >= 8
|
||||||
|
|
||||||
|
if @wnbits > 0
|
||||||
|
writebits(0, 8 - @wnbits, @wendian)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
alias flush flushbits
|
||||||
|
|
||||||
|
#---------------
|
||||||
|
private
|
||||||
|
|
||||||
|
def write(data)
|
||||||
|
n = buffer_limited_n(data.size)
|
||||||
|
if n < data.size
|
||||||
|
data = data[0, n]
|
||||||
|
end
|
||||||
|
|
||||||
|
write_raw(data)
|
||||||
|
end
|
||||||
|
|
||||||
|
def write_big_endian_bits(val, nbits)
|
||||||
|
while nbits > 0
|
||||||
|
bits_req = 8 - @wnbits
|
||||||
|
if nbits >= bits_req
|
||||||
|
msb_bits = (val >> (nbits - bits_req)) & mask(bits_req)
|
||||||
|
nbits -= bits_req
|
||||||
|
val &= mask(nbits)
|
||||||
|
|
||||||
|
@wval = (@wval << bits_req) | msb_bits
|
||||||
|
write(@wval.chr)
|
||||||
|
|
||||||
|
@wval = 0
|
||||||
|
@wnbits = 0
|
||||||
|
else
|
||||||
|
@wval = (@wval << nbits) | val
|
||||||
|
@wnbits += nbits
|
||||||
|
nbits = 0
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def write_little_endian_bits(val, nbits)
|
||||||
|
while nbits > 0
|
||||||
|
bits_req = 8 - @wnbits
|
||||||
|
if nbits >= bits_req
|
||||||
|
lsb_bits = val & mask(bits_req)
|
||||||
|
nbits -= bits_req
|
||||||
|
val >>= bits_req
|
||||||
|
|
||||||
|
@wval = @wval | (lsb_bits << @wnbits)
|
||||||
|
write(@wval.chr)
|
||||||
|
|
||||||
|
@wval = 0
|
||||||
|
@wnbits = 0
|
||||||
|
else
|
||||||
|
@wval = @wval | (val << @wnbits)
|
||||||
|
@wnbits += nbits
|
||||||
|
nbits = 0
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def mask(nbits)
|
||||||
|
(1 << nbits) - 1
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
109
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/bindata-2.4.15/lib/bindata/lazy.rb
vendored
Normal file
109
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/bindata-2.4.15/lib/bindata/lazy.rb
vendored
Normal file
@ -0,0 +1,109 @@
|
|||||||
|
module BinData
|
||||||
|
# A LazyEvaluator is bound to a data object. The evaluator will evaluate
|
||||||
|
# lambdas in the context of this data object. These lambdas
|
||||||
|
# are those that are passed to data objects as parameters, e.g.:
|
||||||
|
#
|
||||||
|
# BinData::String.new(value: -> { %w(a test message).join(" ") })
|
||||||
|
#
|
||||||
|
# As a shortcut, :foo is the equivalent of lambda { foo }.
|
||||||
|
#
|
||||||
|
# When evaluating lambdas, unknown methods are resolved in the context of the
|
||||||
|
# parent of the bound data object. Resolution is attempted firstly as keys
|
||||||
|
# in #parameters, and secondly as methods in this parent. This
|
||||||
|
# resolution propagates up the chain of parent data objects.
|
||||||
|
#
|
||||||
|
# An evaluation will recurse until it returns a result that is not
|
||||||
|
# a lambda or a symbol.
|
||||||
|
#
|
||||||
|
# This resolution process makes the lambda easier to read as we just write
|
||||||
|
# <tt>field</tt> instead of <tt>obj.field</tt>.
|
||||||
|
class LazyEvaluator
|
||||||
|
|
||||||
|
# Creates a new evaluator. All lazy evaluation is performed in the
|
||||||
|
# context of +obj+.
|
||||||
|
def initialize(obj)
|
||||||
|
@obj = obj
|
||||||
|
end
|
||||||
|
|
||||||
|
def lazy_eval(val, overrides = nil)
|
||||||
|
@overrides = overrides if overrides
|
||||||
|
if val.is_a? Symbol
|
||||||
|
__send__(val)
|
||||||
|
elsif callable?(val)
|
||||||
|
instance_exec(&val)
|
||||||
|
else
|
||||||
|
val
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# Returns a LazyEvaluator for the parent of this data object.
|
||||||
|
def parent
|
||||||
|
if @obj.parent
|
||||||
|
@obj.parent.lazy_evaluator
|
||||||
|
else
|
||||||
|
nil
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# Returns the index of this data object inside it's nearest container
|
||||||
|
# array.
|
||||||
|
def index
|
||||||
|
return @overrides[:index] if defined?(@overrides) && @overrides.key?(:index)
|
||||||
|
|
||||||
|
child = @obj
|
||||||
|
parent = @obj.parent
|
||||||
|
while parent
|
||||||
|
if parent.respond_to?(:find_index_of)
|
||||||
|
return parent.find_index_of(child)
|
||||||
|
end
|
||||||
|
child = parent
|
||||||
|
parent = parent.parent
|
||||||
|
end
|
||||||
|
raise NoMethodError, "no index found"
|
||||||
|
end
|
||||||
|
|
||||||
|
def method_missing(symbol, *args)
|
||||||
|
return @overrides[symbol] if defined?(@overrides) && @overrides.key?(symbol)
|
||||||
|
|
||||||
|
if @obj.parent
|
||||||
|
eval_symbol_in_parent_context(symbol, args)
|
||||||
|
else
|
||||||
|
super
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
#---------------
|
||||||
|
private
|
||||||
|
|
||||||
|
def eval_symbol_in_parent_context(symbol, args)
|
||||||
|
result = resolve_symbol_in_parent_context(symbol, args)
|
||||||
|
recursively_eval(result, args)
|
||||||
|
end
|
||||||
|
|
||||||
|
def resolve_symbol_in_parent_context(symbol, args)
|
||||||
|
obj_parent = @obj.parent
|
||||||
|
|
||||||
|
if obj_parent.has_parameter?(symbol)
|
||||||
|
obj_parent.get_parameter(symbol)
|
||||||
|
elsif obj_parent.safe_respond_to?(symbol, true)
|
||||||
|
obj_parent.__send__(symbol, *args)
|
||||||
|
else
|
||||||
|
symbol
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def recursively_eval(val, args)
|
||||||
|
if val.is_a?(Symbol)
|
||||||
|
parent.__send__(val, *args)
|
||||||
|
elsif callable?(val)
|
||||||
|
parent.instance_exec(&val)
|
||||||
|
else
|
||||||
|
val
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def callable?(obj)
|
||||||
|
Proc === obj || Method === obj || UnboundMethod === obj
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
28
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/bindata-2.4.15/lib/bindata/name.rb
vendored
Normal file
28
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/bindata-2.4.15/lib/bindata/name.rb
vendored
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
module BinData
|
||||||
|
# == Parameters
|
||||||
|
#
|
||||||
|
# Parameters may be provided at initialisation to control the behaviour of
|
||||||
|
# an object. These parameters are:
|
||||||
|
#
|
||||||
|
# <tt>:name</tt>:: The name that this object can be referred to may be
|
||||||
|
# set explicitly. This is only useful when dynamically
|
||||||
|
# generating types.
|
||||||
|
# <code><pre>
|
||||||
|
# BinData::Struct.new(name: :my_struct, fields: ...)
|
||||||
|
# array = BinData::Array.new(type: :my_struct)
|
||||||
|
# </pre></code>
|
||||||
|
module RegisterNamePlugin
|
||||||
|
|
||||||
|
def self.included(base) #:nodoc:
|
||||||
|
# The registered name may be provided explicitly.
|
||||||
|
base.optional_parameter :name
|
||||||
|
end
|
||||||
|
|
||||||
|
def initialize_shared_instance
|
||||||
|
if has_parameter?(:name)
|
||||||
|
RegisteredClasses.register(get_parameter(:name), self)
|
||||||
|
end
|
||||||
|
super
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
94
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/bindata-2.4.15/lib/bindata/offset.rb
vendored
Normal file
94
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/bindata-2.4.15/lib/bindata/offset.rb
vendored
Normal file
@ -0,0 +1,94 @@
|
|||||||
|
module BinData
|
||||||
|
# WARNING: THIS IS UNSUPPORTED!!
|
||||||
|
#
|
||||||
|
# This was a (failed) experimental feature that allowed seeking within the
|
||||||
|
# input stream. It remains here for backwards compatability for the few
|
||||||
|
# people that used it.
|
||||||
|
#
|
||||||
|
# The official way to skip around the stream is to use BinData::Skip with
|
||||||
|
# the `:to_abs_offset` parameter.
|
||||||
|
#
|
||||||
|
# == Parameters
|
||||||
|
#
|
||||||
|
# Parameters may be provided at initialisation to control the behaviour of
|
||||||
|
# an object. These parameters are:
|
||||||
|
#
|
||||||
|
# [<tt>:check_offset</tt>] Raise an error if the current IO offset doesn't
|
||||||
|
# meet this criteria. A boolean return indicates
|
||||||
|
# success or failure. Any other return is compared
|
||||||
|
# to the current offset. The variable +offset+
|
||||||
|
# is made available to any lambda assigned to
|
||||||
|
# this parameter. This parameter is only checked
|
||||||
|
# before reading.
|
||||||
|
# [<tt>:adjust_offset</tt>] Ensures that the current IO offset is at this
|
||||||
|
# position before reading. This is like
|
||||||
|
# <tt>:check_offset</tt>, except that it will
|
||||||
|
# adjust the IO offset instead of raising an error.
|
||||||
|
module CheckOrAdjustOffsetPlugin
|
||||||
|
|
||||||
|
def self.included(base) #:nodoc:
|
||||||
|
base.optional_parameters :check_offset, :adjust_offset
|
||||||
|
base.mutually_exclusive_parameters :check_offset, :adjust_offset
|
||||||
|
end
|
||||||
|
|
||||||
|
def initialize_shared_instance
|
||||||
|
extend CheckOffsetMixin if has_parameter?(:check_offset)
|
||||||
|
extend AdjustOffsetMixin if has_parameter?(:adjust_offset)
|
||||||
|
super
|
||||||
|
end
|
||||||
|
|
||||||
|
module CheckOffsetMixin
|
||||||
|
def do_read(io) #:nodoc:
|
||||||
|
check_offset(io)
|
||||||
|
super(io)
|
||||||
|
end
|
||||||
|
|
||||||
|
#---------------
|
||||||
|
private
|
||||||
|
|
||||||
|
def check_offset(io)
|
||||||
|
actual_offset = io.offset
|
||||||
|
expected = eval_parameter(:check_offset, offset: actual_offset)
|
||||||
|
|
||||||
|
if !expected
|
||||||
|
raise ValidityError, "offset not as expected for #{debug_name}"
|
||||||
|
elsif actual_offset != expected && expected != true
|
||||||
|
raise ValidityError,
|
||||||
|
"offset is '#{actual_offset}' but " +
|
||||||
|
"expected '#{expected}' for #{debug_name}"
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
module AdjustOffsetMixin
|
||||||
|
def do_read(io) #:nodoc:
|
||||||
|
adjust_offset(io)
|
||||||
|
super(io)
|
||||||
|
end
|
||||||
|
|
||||||
|
#---------------
|
||||||
|
private
|
||||||
|
|
||||||
|
def adjust_offset(io)
|
||||||
|
actual_offset = io.offset
|
||||||
|
expected = eval_parameter(:adjust_offset)
|
||||||
|
if actual_offset != expected
|
||||||
|
begin
|
||||||
|
seek = expected - actual_offset
|
||||||
|
io.seekbytes(seek)
|
||||||
|
warn "adjusting stream position by #{seek} bytes" if $VERBOSE
|
||||||
|
rescue
|
||||||
|
raise ValidityError,
|
||||||
|
"offset is '#{actual_offset}' but couldn't seek to " +
|
||||||
|
"expected '#{expected}' for #{debug_name}"
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# Add these offset options to Base
|
||||||
|
class Base
|
||||||
|
include CheckOrAdjustOffsetPlugin
|
||||||
|
end
|
||||||
|
end
|
||||||
128
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/bindata-2.4.15/lib/bindata/params.rb
vendored
Normal file
128
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/bindata-2.4.15/lib/bindata/params.rb
vendored
Normal file
@ -0,0 +1,128 @@
|
|||||||
|
require 'bindata/lazy'
|
||||||
|
|
||||||
|
module BinData
|
||||||
|
module AcceptedParametersPlugin
|
||||||
|
# Mandatory parameters must be present when instantiating a data object.
|
||||||
|
def mandatory_parameters(*args)
|
||||||
|
accepted_parameters.mandatory(*args)
|
||||||
|
end
|
||||||
|
|
||||||
|
# Optional parameters may be present when instantiating a data object.
|
||||||
|
def optional_parameters(*args)
|
||||||
|
accepted_parameters.optional(*args)
|
||||||
|
end
|
||||||
|
|
||||||
|
# Default parameters can be overridden when instantiating a data object.
|
||||||
|
def default_parameters(*args)
|
||||||
|
accepted_parameters.default(*args)
|
||||||
|
end
|
||||||
|
|
||||||
|
# Mutually exclusive parameters may not all be present when
|
||||||
|
# instantiating a data object.
|
||||||
|
def mutually_exclusive_parameters(*args)
|
||||||
|
accepted_parameters.mutually_exclusive(*args)
|
||||||
|
end
|
||||||
|
|
||||||
|
alias mandatory_parameter mandatory_parameters
|
||||||
|
alias optional_parameter optional_parameters
|
||||||
|
alias default_parameter default_parameters
|
||||||
|
|
||||||
|
def accepted_parameters #:nodoc:
|
||||||
|
@accepted_parameters ||= begin
|
||||||
|
ancestor_params = superclass.respond_to?(:accepted_parameters) ?
|
||||||
|
superclass.accepted_parameters : nil
|
||||||
|
AcceptedParameters.new(ancestor_params)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# BinData objects accept parameters when initializing. AcceptedParameters
|
||||||
|
# allow a BinData class to declaratively identify accepted parameters as
|
||||||
|
# mandatory, optional, default or mutually exclusive.
|
||||||
|
class AcceptedParameters
|
||||||
|
def initialize(ancestor_parameters = nil)
|
||||||
|
if ancestor_parameters
|
||||||
|
@mandatory = ancestor_parameters.mandatory.dup
|
||||||
|
@optional = ancestor_parameters.optional.dup
|
||||||
|
@default = ancestor_parameters.default.dup
|
||||||
|
@mutually_exclusive = ancestor_parameters.mutually_exclusive.dup
|
||||||
|
else
|
||||||
|
@mandatory = []
|
||||||
|
@optional = []
|
||||||
|
@default = Hash.new
|
||||||
|
@mutually_exclusive = []
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def mandatory(*args)
|
||||||
|
unless args.empty?
|
||||||
|
@mandatory.concat(to_syms(args))
|
||||||
|
@mandatory.uniq!
|
||||||
|
end
|
||||||
|
@mandatory
|
||||||
|
end
|
||||||
|
|
||||||
|
def optional(*args)
|
||||||
|
unless args.empty?
|
||||||
|
@optional.concat(to_syms(args))
|
||||||
|
@optional.uniq!
|
||||||
|
end
|
||||||
|
@optional
|
||||||
|
end
|
||||||
|
|
||||||
|
def default(args = nil)
|
||||||
|
if args
|
||||||
|
to_syms(args.keys) # call for side effect of validating names
|
||||||
|
args.each_pair do |param, value|
|
||||||
|
@default[param.to_sym] = value
|
||||||
|
end
|
||||||
|
end
|
||||||
|
@default
|
||||||
|
end
|
||||||
|
|
||||||
|
def mutually_exclusive(*args)
|
||||||
|
arg1 = args.shift
|
||||||
|
until args.empty?
|
||||||
|
args.each do |arg2|
|
||||||
|
@mutually_exclusive.push([arg1.to_sym, arg2.to_sym])
|
||||||
|
@mutually_exclusive.uniq!
|
||||||
|
end
|
||||||
|
arg1 = args.shift
|
||||||
|
end
|
||||||
|
@mutually_exclusive
|
||||||
|
end
|
||||||
|
|
||||||
|
def all
|
||||||
|
(@mandatory + @optional + @default.keys).uniq
|
||||||
|
end
|
||||||
|
|
||||||
|
#---------------
|
||||||
|
private
|
||||||
|
|
||||||
|
def to_syms(args)
|
||||||
|
syms = args.collect(&:to_sym)
|
||||||
|
ensure_valid_names(syms)
|
||||||
|
syms
|
||||||
|
end
|
||||||
|
|
||||||
|
def ensure_valid_names(names)
|
||||||
|
invalid_names = self.class.invalid_parameter_names
|
||||||
|
names.each do |name|
|
||||||
|
if invalid_names.include?(name)
|
||||||
|
raise NameError.new("Rename parameter '#{name}' " \
|
||||||
|
"as it shadows an existing method.", name)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def self.invalid_parameter_names
|
||||||
|
@invalid_names ||= begin
|
||||||
|
all_names = LazyEvaluator.instance_methods(true)
|
||||||
|
allowed_names = [:name, :type]
|
||||||
|
invalid_names = (all_names - allowed_names).uniq
|
||||||
|
|
||||||
|
Hash[*invalid_names.collect { |key| [key.to_sym, true] }.flatten]
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
143
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/bindata-2.4.15/lib/bindata/primitive.rb
vendored
Normal file
143
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/bindata-2.4.15/lib/bindata/primitive.rb
vendored
Normal file
@ -0,0 +1,143 @@
|
|||||||
|
require 'bindata/base_primitive'
|
||||||
|
require 'bindata/dsl'
|
||||||
|
require 'bindata/struct'
|
||||||
|
|
||||||
|
module BinData
|
||||||
|
# A Primitive is a declarative way to define a new BinData data type.
|
||||||
|
# The data type must contain a primitive value only, i.e numbers or strings.
|
||||||
|
# For new data types that contain multiple values see BinData::Record.
|
||||||
|
#
|
||||||
|
# To define a new data type, set fields as if for Record and add a
|
||||||
|
# #get and #set method to extract / convert the data between the fields
|
||||||
|
# and the #value of the object.
|
||||||
|
#
|
||||||
|
# require 'bindata'
|
||||||
|
#
|
||||||
|
# class PascalString < BinData::Primitive
|
||||||
|
# uint8 :len, value: -> { data.length }
|
||||||
|
# string :data, read_length: :len
|
||||||
|
#
|
||||||
|
# def get
|
||||||
|
# self.data
|
||||||
|
# end
|
||||||
|
#
|
||||||
|
# def set(v)
|
||||||
|
# self.data = v
|
||||||
|
# end
|
||||||
|
# end
|
||||||
|
#
|
||||||
|
# ps = PascalString.new(initial_value: "hello")
|
||||||
|
# ps.to_binary_s #=> "\005hello"
|
||||||
|
# ps.read("\003abcde")
|
||||||
|
# ps #=> "abc"
|
||||||
|
#
|
||||||
|
# # Unsigned 24 bit big endian integer
|
||||||
|
# class Uint24be < BinData::Primitive
|
||||||
|
# uint8 :byte1
|
||||||
|
# uint8 :byte2
|
||||||
|
# uint8 :byte3
|
||||||
|
#
|
||||||
|
# def get
|
||||||
|
# (self.byte1 << 16) | (self.byte2 << 8) | self.byte3
|
||||||
|
# end
|
||||||
|
#
|
||||||
|
# def set(v)
|
||||||
|
# v = 0 if v < 0
|
||||||
|
# v = 0xffffff if v > 0xffffff
|
||||||
|
#
|
||||||
|
# self.byte1 = (v >> 16) & 0xff
|
||||||
|
# self.byte2 = (v >> 8) & 0xff
|
||||||
|
# self.byte3 = v & 0xff
|
||||||
|
# end
|
||||||
|
# end
|
||||||
|
#
|
||||||
|
# u24 = Uint24be.new
|
||||||
|
# u24.read("\x12\x34\x56")
|
||||||
|
# "0x%x" % u24 #=> 0x123456
|
||||||
|
#
|
||||||
|
# == Parameters
|
||||||
|
#
|
||||||
|
# Primitive objects accept all the parameters that BinData::BasePrimitive do.
|
||||||
|
#
|
||||||
|
class Primitive < BasePrimitive
|
||||||
|
extend DSLMixin
|
||||||
|
|
||||||
|
unregister_self
|
||||||
|
dsl_parser :primitive
|
||||||
|
arg_processor :primitive
|
||||||
|
|
||||||
|
mandatory_parameter :struct_params
|
||||||
|
|
||||||
|
def initialize_instance
|
||||||
|
super
|
||||||
|
@struct = BinData::Struct.new(get_parameter(:struct_params), self)
|
||||||
|
end
|
||||||
|
|
||||||
|
def respond_to?(symbol, include_private = false) #:nodoc:
|
||||||
|
@struct.respond_to?(symbol, include_private) || super
|
||||||
|
end
|
||||||
|
|
||||||
|
def method_missing(symbol, *args, &block) #:nodoc:
|
||||||
|
if @struct.respond_to?(symbol)
|
||||||
|
@struct.__send__(symbol, *args, &block)
|
||||||
|
else
|
||||||
|
super
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def assign(val)
|
||||||
|
super(val)
|
||||||
|
set(_value)
|
||||||
|
@value = get
|
||||||
|
end
|
||||||
|
|
||||||
|
def debug_name_of(child) #:nodoc:
|
||||||
|
debug_name + "-internal-"
|
||||||
|
end
|
||||||
|
|
||||||
|
def do_write(io)
|
||||||
|
set(_value)
|
||||||
|
@struct.do_write(io)
|
||||||
|
end
|
||||||
|
|
||||||
|
def do_num_bytes
|
||||||
|
set(_value)
|
||||||
|
@struct.do_num_bytes
|
||||||
|
end
|
||||||
|
|
||||||
|
#---------------
|
||||||
|
private
|
||||||
|
|
||||||
|
def sensible_default
|
||||||
|
get
|
||||||
|
end
|
||||||
|
|
||||||
|
def read_and_return_value(io)
|
||||||
|
@struct.do_read(io)
|
||||||
|
get
|
||||||
|
end
|
||||||
|
|
||||||
|
###########################################################################
|
||||||
|
# To be implemented by subclasses
|
||||||
|
|
||||||
|
# Extracts the value for this data object from the fields of the
|
||||||
|
# internal struct.
|
||||||
|
def get
|
||||||
|
raise NotImplementedError
|
||||||
|
end
|
||||||
|
|
||||||
|
# Sets the fields of the internal struct to represent +v+.
|
||||||
|
def set(v)
|
||||||
|
raise NotImplementedError
|
||||||
|
end
|
||||||
|
|
||||||
|
# To be implemented by subclasses
|
||||||
|
###########################################################################
|
||||||
|
end
|
||||||
|
|
||||||
|
class PrimitiveArgProcessor < BaseArgProcessor
|
||||||
|
def sanitize_parameters!(obj_class, params)
|
||||||
|
params[:struct_params] = params.create_sanitized_params(obj_class.dsl_params, BinData::Struct)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
23
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/bindata-2.4.15/lib/bindata/record.rb
vendored
Normal file
23
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/bindata-2.4.15/lib/bindata/record.rb
vendored
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
require 'bindata/dsl'
|
||||||
|
require 'bindata/struct'
|
||||||
|
|
||||||
|
module BinData
|
||||||
|
# A Record is a declarative wrapper around Struct.
|
||||||
|
#
|
||||||
|
# See +Struct+ for more info.
|
||||||
|
class Record < BinData::Struct
|
||||||
|
extend DSLMixin
|
||||||
|
|
||||||
|
unregister_self
|
||||||
|
dsl_parser :struct
|
||||||
|
arg_processor :record
|
||||||
|
end
|
||||||
|
|
||||||
|
class RecordArgProcessor < StructArgProcessor
|
||||||
|
include MultiFieldArgSeparator
|
||||||
|
|
||||||
|
def sanitize_parameters!(obj_class, params)
|
||||||
|
super(obj_class, params.merge!(obj_class.dsl_params))
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
134
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/bindata-2.4.15/lib/bindata/registry.rb
vendored
Normal file
134
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/bindata-2.4.15/lib/bindata/registry.rb
vendored
Normal file
@ -0,0 +1,134 @@
|
|||||||
|
module BinData
|
||||||
|
|
||||||
|
class UnRegisteredTypeError < StandardError ; end
|
||||||
|
|
||||||
|
# This registry contains a register of name -> class mappings.
|
||||||
|
#
|
||||||
|
# Numerics (integers and floating point numbers) have an endian property as
|
||||||
|
# part of their name (e.g. int32be, float_le).
|
||||||
|
#
|
||||||
|
# Classes can be looked up based on their full name or an abbreviated +name+
|
||||||
|
# with +hints+.
|
||||||
|
#
|
||||||
|
# There are two hints supported, :endian and :search_prefix.
|
||||||
|
#
|
||||||
|
# #lookup("int32", { endian: :big }) will return Int32Be.
|
||||||
|
#
|
||||||
|
# #lookup("my_type", { search_prefix: :ns }) will return NsMyType.
|
||||||
|
#
|
||||||
|
# Names are stored in under_score_style, not camelCase.
|
||||||
|
class Registry
|
||||||
|
|
||||||
|
def initialize
|
||||||
|
@registry = {}
|
||||||
|
end
|
||||||
|
|
||||||
|
def register(name, class_to_register)
|
||||||
|
return if name.nil? || class_to_register.nil?
|
||||||
|
|
||||||
|
formatted_name = underscore_name(name)
|
||||||
|
warn_if_name_is_already_registered(formatted_name, class_to_register)
|
||||||
|
|
||||||
|
@registry[formatted_name] = class_to_register
|
||||||
|
end
|
||||||
|
|
||||||
|
def unregister(name)
|
||||||
|
@registry.delete(underscore_name(name))
|
||||||
|
end
|
||||||
|
|
||||||
|
def lookup(name, hints = {})
|
||||||
|
the_class = @registry[normalize_name(name, hints)]
|
||||||
|
if the_class
|
||||||
|
the_class
|
||||||
|
elsif @registry[normalize_name(name, hints.merge(endian: :big))]
|
||||||
|
raise(UnRegisteredTypeError, "#{name}, do you need to specify endian?")
|
||||||
|
else
|
||||||
|
raise(UnRegisteredTypeError, name)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# Convert CamelCase +name+ to underscore style.
|
||||||
|
def underscore_name(name)
|
||||||
|
name.
|
||||||
|
to_s.
|
||||||
|
sub(/.*::/, "").
|
||||||
|
gsub(/([A-Z]+)([A-Z][a-z])/, '\1_\2').
|
||||||
|
gsub(/([a-z\d])([A-Z])/, '\1_\2').
|
||||||
|
tr("-", "_").
|
||||||
|
downcase
|
||||||
|
end
|
||||||
|
|
||||||
|
#---------------
|
||||||
|
private
|
||||||
|
|
||||||
|
def normalize_name(name, hints)
|
||||||
|
name = underscore_name(name)
|
||||||
|
|
||||||
|
if !registered?(name)
|
||||||
|
search_prefix = [""].concat(Array(hints[:search_prefix]))
|
||||||
|
search_prefix.each do |prefix|
|
||||||
|
nwp = name_with_prefix(name, prefix)
|
||||||
|
if registered?(nwp)
|
||||||
|
name = nwp
|
||||||
|
break
|
||||||
|
end
|
||||||
|
|
||||||
|
nwe = name_with_endian(nwp, hints[:endian])
|
||||||
|
if registered?(nwe)
|
||||||
|
name = nwe
|
||||||
|
break
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
name
|
||||||
|
end
|
||||||
|
|
||||||
|
def name_with_prefix(name, prefix)
|
||||||
|
prefix = prefix.to_s.chomp("_")
|
||||||
|
if prefix == ""
|
||||||
|
name
|
||||||
|
else
|
||||||
|
"#{prefix}_#{name}"
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def name_with_endian(name, endian)
|
||||||
|
return name if endian.nil?
|
||||||
|
|
||||||
|
suffix = (endian == :little) ? "le" : "be"
|
||||||
|
if /^u?int\d+$/ =~ name
|
||||||
|
name + suffix
|
||||||
|
else
|
||||||
|
name + "_" + suffix
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def registered?(name)
|
||||||
|
register_dynamic_class(name) unless @registry.key?(name)
|
||||||
|
|
||||||
|
@registry.key?(name)
|
||||||
|
end
|
||||||
|
|
||||||
|
def register_dynamic_class(name)
|
||||||
|
if /^u?int\d+(le|be)$/ =~ name || /^s?bit\d+(le)?$/ =~ name
|
||||||
|
class_name = name.gsub(/(?:^|_)(.)/) { $1.upcase }
|
||||||
|
begin
|
||||||
|
BinData.const_get(class_name)
|
||||||
|
rescue NameError
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def warn_if_name_is_already_registered(name, class_to_register)
|
||||||
|
prev_class = @registry[name]
|
||||||
|
if $VERBOSE && prev_class && prev_class != class_to_register
|
||||||
|
warn "warning: replacing registered class #{prev_class} " \
|
||||||
|
"with #{class_to_register}"
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# A singleton registry of all registered classes.
|
||||||
|
RegisteredClasses = Registry.new
|
||||||
|
end
|
||||||
34
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/bindata-2.4.15/lib/bindata/rest.rb
vendored
Normal file
34
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/bindata-2.4.15/lib/bindata/rest.rb
vendored
Normal file
@ -0,0 +1,34 @@
|
|||||||
|
require "bindata/base_primitive"
|
||||||
|
|
||||||
|
module BinData
|
||||||
|
# Rest will consume the input stream from the current position to the end of
|
||||||
|
# the stream. This will mainly be useful for debugging and developing.
|
||||||
|
#
|
||||||
|
# require 'bindata'
|
||||||
|
#
|
||||||
|
# class A < BinData::Record
|
||||||
|
# string :a, read_length: 5
|
||||||
|
# rest :rest
|
||||||
|
# end
|
||||||
|
#
|
||||||
|
# obj = A.read("abcdefghij")
|
||||||
|
# obj.a #=> "abcde"
|
||||||
|
# obj.rest #=" "fghij"
|
||||||
|
#
|
||||||
|
class Rest < BinData::BasePrimitive
|
||||||
|
#---------------
|
||||||
|
private
|
||||||
|
|
||||||
|
def value_to_binary_string(val)
|
||||||
|
val
|
||||||
|
end
|
||||||
|
|
||||||
|
def read_and_return_value(io)
|
||||||
|
io.read_all_bytes
|
||||||
|
end
|
||||||
|
|
||||||
|
def sensible_default
|
||||||
|
""
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
372
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/bindata-2.4.15/lib/bindata/sanitize.rb
vendored
Normal file
372
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/bindata-2.4.15/lib/bindata/sanitize.rb
vendored
Normal file
@ -0,0 +1,372 @@
|
|||||||
|
require 'bindata/registry'
|
||||||
|
|
||||||
|
module BinData
|
||||||
|
|
||||||
|
# Subclasses of this are sanitized
|
||||||
|
class SanitizedParameter; end
|
||||||
|
|
||||||
|
class SanitizedPrototype < SanitizedParameter
|
||||||
|
def initialize(obj_type, obj_params, hints)
|
||||||
|
raw_hints = hints.dup
|
||||||
|
if raw_hints[:endian].respond_to?(:endian)
|
||||||
|
raw_hints[:endian] = raw_hints[:endian].endian
|
||||||
|
end
|
||||||
|
obj_params ||= {}
|
||||||
|
|
||||||
|
if BinData::Base === obj_type
|
||||||
|
obj_class = obj_type
|
||||||
|
else
|
||||||
|
obj_class = RegisteredClasses.lookup(obj_type, raw_hints)
|
||||||
|
end
|
||||||
|
|
||||||
|
if BinData::Base === obj_class
|
||||||
|
@factory = obj_class
|
||||||
|
else
|
||||||
|
@obj_class = obj_class
|
||||||
|
@obj_params = SanitizedParameters.new(obj_params, @obj_class, hints)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def has_parameter?(param)
|
||||||
|
if defined? @factory
|
||||||
|
@factory.has_parameter?(param)
|
||||||
|
else
|
||||||
|
@obj_params.has_parameter?(param)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def instantiate(value = nil, parent = nil)
|
||||||
|
@factory ||= @obj_class.new(@obj_params)
|
||||||
|
|
||||||
|
@factory.new(value, parent)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
#----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
class SanitizedField < SanitizedParameter
|
||||||
|
def initialize(name, field_type, field_params, hints)
|
||||||
|
@name = name
|
||||||
|
@prototype = SanitizedPrototype.new(field_type, field_params, hints)
|
||||||
|
end
|
||||||
|
|
||||||
|
attr_reader :prototype
|
||||||
|
|
||||||
|
def name_as_sym
|
||||||
|
@name.nil? ? nil : @name.to_sym
|
||||||
|
end
|
||||||
|
|
||||||
|
def name
|
||||||
|
@name
|
||||||
|
end
|
||||||
|
|
||||||
|
def has_parameter?(param)
|
||||||
|
@prototype.has_parameter?(param)
|
||||||
|
end
|
||||||
|
|
||||||
|
def instantiate(value = nil, parent = nil)
|
||||||
|
@prototype.instantiate(value, parent)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
#----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
class SanitizedFields < SanitizedParameter
|
||||||
|
include Enumerable
|
||||||
|
|
||||||
|
def initialize(hints, base_fields = nil)
|
||||||
|
@hints = hints
|
||||||
|
if base_fields
|
||||||
|
@fields = base_fields.raw_fields
|
||||||
|
else
|
||||||
|
@fields = []
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def add_field(type, name, params)
|
||||||
|
name = nil if name == ""
|
||||||
|
|
||||||
|
@fields << SanitizedField.new(name, type, params, @hints)
|
||||||
|
end
|
||||||
|
|
||||||
|
def raw_fields
|
||||||
|
@fields.dup
|
||||||
|
end
|
||||||
|
|
||||||
|
def [](idx)
|
||||||
|
@fields[idx]
|
||||||
|
end
|
||||||
|
|
||||||
|
def empty?
|
||||||
|
@fields.empty?
|
||||||
|
end
|
||||||
|
|
||||||
|
def length
|
||||||
|
@fields.length
|
||||||
|
end
|
||||||
|
|
||||||
|
def each(&block)
|
||||||
|
@fields.each(&block)
|
||||||
|
end
|
||||||
|
|
||||||
|
def field_names
|
||||||
|
@fields.collect(&:name_as_sym)
|
||||||
|
end
|
||||||
|
|
||||||
|
def field_name?(name)
|
||||||
|
@fields.detect { |f| f.name_as_sym == name.to_sym }
|
||||||
|
end
|
||||||
|
|
||||||
|
def all_field_names_blank?
|
||||||
|
@fields.all? { |f| f.name.nil? }
|
||||||
|
end
|
||||||
|
|
||||||
|
def no_field_names_blank?
|
||||||
|
@fields.all? { |f| f.name != nil }
|
||||||
|
end
|
||||||
|
|
||||||
|
def any_field_has_parameter?(parameter)
|
||||||
|
@fields.any? { |f| f.has_parameter?(parameter) }
|
||||||
|
end
|
||||||
|
end
|
||||||
|
#----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
class SanitizedChoices < SanitizedParameter
|
||||||
|
def initialize(choices, hints)
|
||||||
|
@choices = {}
|
||||||
|
choices.each_pair do |key, val|
|
||||||
|
if SanitizedParameter === val
|
||||||
|
prototype = val
|
||||||
|
else
|
||||||
|
type, param = val
|
||||||
|
prototype = SanitizedPrototype.new(type, param, hints)
|
||||||
|
end
|
||||||
|
|
||||||
|
if key == :default
|
||||||
|
@choices.default = prototype
|
||||||
|
else
|
||||||
|
@choices[key] = prototype
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def [](key)
|
||||||
|
@choices[key]
|
||||||
|
end
|
||||||
|
end
|
||||||
|
#----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
class SanitizedBigEndian < SanitizedParameter
|
||||||
|
def endian
|
||||||
|
:big
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
class SanitizedLittleEndian < SanitizedParameter
|
||||||
|
def endian
|
||||||
|
:little
|
||||||
|
end
|
||||||
|
end
|
||||||
|
#----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
# BinData objects are instantiated with parameters to determine their
|
||||||
|
# behaviour. These parameters must be sanitized to ensure their values
|
||||||
|
# are valid. When instantiating many objects with identical parameters,
|
||||||
|
# such as an array of records, there is much duplicated sanitizing.
|
||||||
|
#
|
||||||
|
# The purpose of the sanitizing code is to eliminate the duplicated
|
||||||
|
# validation.
|
||||||
|
#
|
||||||
|
# SanitizedParameters is a hash-like collection of parameters. Its purpose
|
||||||
|
# is to recursively sanitize the parameters of an entire BinData object chain
|
||||||
|
# at a single time.
|
||||||
|
class SanitizedParameters < Hash
|
||||||
|
|
||||||
|
# Memoized constants
|
||||||
|
BIG_ENDIAN = SanitizedBigEndian.new
|
||||||
|
LITTLE_ENDIAN = SanitizedLittleEndian.new
|
||||||
|
|
||||||
|
class << self
|
||||||
|
def sanitize(parameters, the_class)
|
||||||
|
if SanitizedParameters === parameters
|
||||||
|
parameters
|
||||||
|
else
|
||||||
|
SanitizedParameters.new(parameters, the_class, {})
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def initialize(parameters, the_class, hints)
|
||||||
|
parameters.each_pair { |key, value| self[key.to_sym] = value }
|
||||||
|
|
||||||
|
@the_class = the_class
|
||||||
|
|
||||||
|
if hints[:endian]
|
||||||
|
self[:endian] ||= hints[:endian]
|
||||||
|
end
|
||||||
|
|
||||||
|
if hints[:search_prefix] && !hints[:search_prefix].empty?
|
||||||
|
self[:search_prefix] = Array(self[:search_prefix]).concat(Array(hints[:search_prefix]))
|
||||||
|
end
|
||||||
|
|
||||||
|
sanitize!
|
||||||
|
end
|
||||||
|
|
||||||
|
alias_method :has_parameter?, :key?
|
||||||
|
|
||||||
|
def has_at_least_one_of?(*keys)
|
||||||
|
keys.each do |key|
|
||||||
|
return true if has_parameter?(key)
|
||||||
|
end
|
||||||
|
|
||||||
|
false
|
||||||
|
end
|
||||||
|
|
||||||
|
def warn_replacement_parameter(bad_key, suggested_key)
|
||||||
|
if has_parameter?(bad_key)
|
||||||
|
Kernel.warn ":#{bad_key} is not used with #{@the_class}. " \
|
||||||
|
"You probably want to change this to :#{suggested_key}"
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# def warn_renamed_parameter(old_key, new_key)
|
||||||
|
# val = delete(old_key)
|
||||||
|
# if val
|
||||||
|
# self[new_key] = val
|
||||||
|
# Kernel.warn ":#{old_key} has been renamed to :#{new_key} in #{@the_class}. " \
|
||||||
|
# "Using :#{old_key} is now deprecated and will be removed in the future"
|
||||||
|
# end
|
||||||
|
# end
|
||||||
|
|
||||||
|
def must_be_integer(*keys)
|
||||||
|
keys.each do |key|
|
||||||
|
if has_parameter?(key)
|
||||||
|
parameter = self[key]
|
||||||
|
unless Symbol === parameter ||
|
||||||
|
parameter.respond_to?(:arity) ||
|
||||||
|
parameter.respond_to?(:to_int)
|
||||||
|
raise ArgumentError, "parameter '#{key}' in #{@the_class} must " \
|
||||||
|
"evaluate to an integer, got #{parameter.class}"
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def rename_parameter(old_key, new_key)
|
||||||
|
if has_parameter?(old_key)
|
||||||
|
self[new_key] = delete(old_key)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def sanitize_object_prototype(key)
|
||||||
|
sanitize(key) { |obj_type, obj_params| create_sanitized_object_prototype(obj_type, obj_params) }
|
||||||
|
end
|
||||||
|
|
||||||
|
def sanitize_fields(key, &block)
|
||||||
|
sanitize(key) do |fields|
|
||||||
|
sanitized_fields = create_sanitized_fields
|
||||||
|
yield(fields, sanitized_fields)
|
||||||
|
sanitized_fields
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def sanitize_choices(key, &block)
|
||||||
|
sanitize(key) do |obj|
|
||||||
|
create_sanitized_choices(yield(obj))
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def sanitize_endian(key)
|
||||||
|
sanitize(key) { |endian| create_sanitized_endian(endian) }
|
||||||
|
end
|
||||||
|
|
||||||
|
def sanitize(key, &block)
|
||||||
|
if needs_sanitizing?(key)
|
||||||
|
self[key] = yield(self[key])
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def create_sanitized_params(params, the_class)
|
||||||
|
SanitizedParameters.new(params, the_class, hints)
|
||||||
|
end
|
||||||
|
|
||||||
|
def hints
|
||||||
|
{ endian: self[:endian], search_prefix: self[:search_prefix] }
|
||||||
|
end
|
||||||
|
|
||||||
|
#---------------
|
||||||
|
private
|
||||||
|
|
||||||
|
def sanitize!
|
||||||
|
ensure_no_nil_values
|
||||||
|
merge_default_parameters!
|
||||||
|
|
||||||
|
@the_class.arg_processor.sanitize_parameters!(@the_class, self)
|
||||||
|
|
||||||
|
ensure_mandatory_parameters_exist
|
||||||
|
ensure_mutual_exclusion_of_parameters
|
||||||
|
end
|
||||||
|
|
||||||
|
def needs_sanitizing?(key)
|
||||||
|
has_key?(key) && ! self[key].is_a?(SanitizedParameter)
|
||||||
|
end
|
||||||
|
|
||||||
|
def ensure_no_nil_values
|
||||||
|
each do |key, value|
|
||||||
|
if value.nil?
|
||||||
|
raise ArgumentError,
|
||||||
|
"parameter '#{key}' has nil value in #{@the_class}"
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def merge_default_parameters!
|
||||||
|
@the_class.default_parameters.each do |key, value|
|
||||||
|
self[key] = value unless has_key?(key)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def ensure_mandatory_parameters_exist
|
||||||
|
@the_class.mandatory_parameters.each do |key|
|
||||||
|
unless has_parameter?(key)
|
||||||
|
raise ArgumentError,
|
||||||
|
"parameter '#{key}' must be specified in #{@the_class}"
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def ensure_mutual_exclusion_of_parameters
|
||||||
|
return if length < 2
|
||||||
|
|
||||||
|
@the_class.mutually_exclusive_parameters.each do |key1, key2|
|
||||||
|
if has_parameter?(key1) && has_parameter?(key2)
|
||||||
|
raise ArgumentError, "params '#{key1}' and '#{key2}' " \
|
||||||
|
"are mutually exclusive in #{@the_class}"
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def create_sanitized_endian(endian)
|
||||||
|
if endian == :big
|
||||||
|
BIG_ENDIAN
|
||||||
|
elsif endian == :little
|
||||||
|
LITTLE_ENDIAN
|
||||||
|
elsif endian == :big_and_little
|
||||||
|
raise ArgumentError, "endian: :big or endian: :little is required"
|
||||||
|
else
|
||||||
|
raise ArgumentError, "unknown value for endian '#{endian}'"
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def create_sanitized_choices(choices)
|
||||||
|
SanitizedChoices.new(choices, hints)
|
||||||
|
end
|
||||||
|
|
||||||
|
def create_sanitized_fields
|
||||||
|
SanitizedFields.new(hints)
|
||||||
|
end
|
||||||
|
|
||||||
|
def create_sanitized_object_prototype(obj_type, obj_params)
|
||||||
|
SanitizedPrototype.new(obj_type, obj_params, hints)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
#----------------------------------------------------------------------------
|
||||||
|
end
|
||||||
133
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/bindata-2.4.15/lib/bindata/skip.rb
vendored
Normal file
133
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/bindata-2.4.15/lib/bindata/skip.rb
vendored
Normal file
@ -0,0 +1,133 @@
|
|||||||
|
require "bindata/base_primitive"
|
||||||
|
|
||||||
|
module BinData
|
||||||
|
# Skip will skip over bytes from the input stream. If the stream is not
|
||||||
|
# seekable, then the bytes are consumed and discarded.
|
||||||
|
#
|
||||||
|
# When writing, skip will write the appropriate number of zero bytes.
|
||||||
|
#
|
||||||
|
# require 'bindata'
|
||||||
|
#
|
||||||
|
# class A < BinData::Record
|
||||||
|
# skip length: 5
|
||||||
|
# string :a, read_length: 5
|
||||||
|
# end
|
||||||
|
#
|
||||||
|
# obj = A.read("abcdefghij")
|
||||||
|
# obj.a #=> "fghij"
|
||||||
|
#
|
||||||
|
#
|
||||||
|
# class B < BinData::Record
|
||||||
|
# skip until_valid: [:string, {read_length: 2, assert: "ef"} ]
|
||||||
|
# string :b, read_length: 5
|
||||||
|
# end
|
||||||
|
#
|
||||||
|
# obj = B.read("abcdefghij")
|
||||||
|
# obj.b #=> "efghi"
|
||||||
|
#
|
||||||
|
#
|
||||||
|
# == Parameters
|
||||||
|
#
|
||||||
|
# Skip objects accept all the params that BinData::BasePrimitive
|
||||||
|
# does, as well as the following:
|
||||||
|
#
|
||||||
|
# <tt>:length</tt>:: The number of bytes to skip.
|
||||||
|
# <tt>:to_abs_offset</tt>:: Skips to the given absolute offset.
|
||||||
|
# <tt>:until_valid</tt>:: Skips untils a given byte pattern is matched.
|
||||||
|
# This parameter contains a type that will raise
|
||||||
|
# a BinData::ValidityError unless an acceptable byte
|
||||||
|
# sequence is found. The type is represented by a
|
||||||
|
# Symbol, or if the type is to have params #
|
||||||
|
# passed to it, then it should be provided as #
|
||||||
|
# <tt>[type_symbol, hash_params]</tt>.
|
||||||
|
#
|
||||||
|
class Skip < BinData::BasePrimitive
|
||||||
|
arg_processor :skip
|
||||||
|
|
||||||
|
optional_parameters :length, :to_abs_offset, :until_valid
|
||||||
|
mutually_exclusive_parameters :length, :to_abs_offset, :until_valid
|
||||||
|
|
||||||
|
def initialize_shared_instance
|
||||||
|
extend SkipLengthPlugin if has_parameter?(:length)
|
||||||
|
extend SkipToAbsOffsetPlugin if has_parameter?(:to_abs_offset)
|
||||||
|
extend SkipUntilValidPlugin if has_parameter?(:until_valid)
|
||||||
|
super
|
||||||
|
end
|
||||||
|
|
||||||
|
#---------------
|
||||||
|
private
|
||||||
|
|
||||||
|
def value_to_binary_string(val)
|
||||||
|
len = skip_length
|
||||||
|
if len < 0
|
||||||
|
raise ValidityError, "#{debug_name} attempted to seek backwards by #{len.abs} bytes"
|
||||||
|
end
|
||||||
|
|
||||||
|
"\000" * skip_length
|
||||||
|
end
|
||||||
|
|
||||||
|
def read_and_return_value(io)
|
||||||
|
len = skip_length
|
||||||
|
if len < 0
|
||||||
|
raise ValidityError, "#{debug_name} attempted to seek backwards by #{len.abs} bytes"
|
||||||
|
end
|
||||||
|
|
||||||
|
io.seekbytes(len)
|
||||||
|
""
|
||||||
|
end
|
||||||
|
|
||||||
|
def sensible_default
|
||||||
|
""
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
class SkipArgProcessor < BaseArgProcessor
|
||||||
|
def sanitize_parameters!(obj_class, params)
|
||||||
|
unless params.has_at_least_one_of?(:length, :to_abs_offset, :until_valid)
|
||||||
|
raise ArgumentError,
|
||||||
|
"#{obj_class} requires either :length, :to_abs_offset or :until_valid"
|
||||||
|
end
|
||||||
|
params.must_be_integer(:to_abs_offset, :length)
|
||||||
|
params.sanitize_object_prototype(:until_valid)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# Logic for the :length parameter
|
||||||
|
module SkipLengthPlugin
|
||||||
|
def skip_length
|
||||||
|
eval_parameter(:length)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# Logic for the :to_abs_offset parameter
|
||||||
|
module SkipToAbsOffsetPlugin
|
||||||
|
def skip_length
|
||||||
|
eval_parameter(:to_abs_offset) - abs_offset
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# Logic for the :until_valid parameter
|
||||||
|
module SkipUntilValidPlugin
|
||||||
|
def skip_length
|
||||||
|
# no skipping when writing
|
||||||
|
0
|
||||||
|
end
|
||||||
|
|
||||||
|
def read_and_return_value(io)
|
||||||
|
prototype = get_parameter(:until_valid)
|
||||||
|
validator = prototype.instantiate(nil, self)
|
||||||
|
|
||||||
|
valid = false
|
||||||
|
until valid
|
||||||
|
begin
|
||||||
|
io.with_readahead do
|
||||||
|
validator.read(io)
|
||||||
|
valid = true
|
||||||
|
end
|
||||||
|
rescue ValidityError
|
||||||
|
io.readbytes(1)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
153
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/bindata-2.4.15/lib/bindata/string.rb
vendored
Normal file
153
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/bindata-2.4.15/lib/bindata/string.rb
vendored
Normal file
@ -0,0 +1,153 @@
|
|||||||
|
require "bindata/base_primitive"
|
||||||
|
|
||||||
|
module BinData
|
||||||
|
# A String is a sequence of bytes. This is the same as strings in Ruby 1.8.
|
||||||
|
# The issue of character encoding is ignored by this class.
|
||||||
|
#
|
||||||
|
# require 'bindata'
|
||||||
|
#
|
||||||
|
# data = "abcdefghij"
|
||||||
|
#
|
||||||
|
# obj = BinData::String.new(read_length: 5)
|
||||||
|
# obj.read(data)
|
||||||
|
# obj #=> "abcde"
|
||||||
|
#
|
||||||
|
# obj = BinData::String.new(length: 6)
|
||||||
|
# obj.read(data)
|
||||||
|
# obj #=> "abcdef"
|
||||||
|
# obj.assign("abcdefghij")
|
||||||
|
# obj #=> "abcdef"
|
||||||
|
# obj.assign("abcd")
|
||||||
|
# obj #=> "abcd\000\000"
|
||||||
|
#
|
||||||
|
# obj = BinData::String.new(length: 6, trim_padding: true)
|
||||||
|
# obj.assign("abcd")
|
||||||
|
# obj #=> "abcd"
|
||||||
|
# obj.to_binary_s #=> "abcd\000\000"
|
||||||
|
#
|
||||||
|
# obj = BinData::String.new(length: 6, pad_byte: 'A')
|
||||||
|
# obj.assign("abcd")
|
||||||
|
# obj #=> "abcdAA"
|
||||||
|
# obj.to_binary_s #=> "abcdAA"
|
||||||
|
#
|
||||||
|
# == Parameters
|
||||||
|
#
|
||||||
|
# String objects accept all the params that BinData::BasePrimitive
|
||||||
|
# does, as well as the following:
|
||||||
|
#
|
||||||
|
# <tt>:read_length</tt>:: The length in bytes to use when reading a value.
|
||||||
|
# <tt>:length</tt>:: The fixed length of the string. If a shorter
|
||||||
|
# string is set, it will be padded to this length.
|
||||||
|
# <tt>:pad_byte</tt>:: The byte to use when padding a string to a
|
||||||
|
# set length. Valid values are Integers and
|
||||||
|
# Strings of length 1. "\0" is the default.
|
||||||
|
# <tt>:pad_front</tt>:: Signifies that the padding occurs at the front
|
||||||
|
# of the string rather than the end. Default
|
||||||
|
# is false.
|
||||||
|
# <tt>:trim_padding</tt>:: Boolean, default false. If set, #value will
|
||||||
|
# return the value with all pad_bytes trimmed
|
||||||
|
# from the end of the string. The value will
|
||||||
|
# not be trimmed when writing.
|
||||||
|
class String < BinData::BasePrimitive
|
||||||
|
arg_processor :string
|
||||||
|
|
||||||
|
optional_parameters :read_length, :length, :trim_padding, :pad_front, :pad_left
|
||||||
|
default_parameters pad_byte: "\0"
|
||||||
|
mutually_exclusive_parameters :read_length, :length
|
||||||
|
mutually_exclusive_parameters :length, :value
|
||||||
|
|
||||||
|
def initialize_shared_instance
|
||||||
|
if (has_parameter?(:value) || has_parameter?(:asserted_value)) &&
|
||||||
|
!has_parameter?(:read_length)
|
||||||
|
extend WarnNoReadLengthPlugin
|
||||||
|
end
|
||||||
|
super
|
||||||
|
end
|
||||||
|
|
||||||
|
def assign(val)
|
||||||
|
super(binary_string(val))
|
||||||
|
end
|
||||||
|
|
||||||
|
def snapshot
|
||||||
|
# override to trim padding
|
||||||
|
snap = super
|
||||||
|
snap = clamp_to_length(snap)
|
||||||
|
|
||||||
|
if get_parameter(:trim_padding)
|
||||||
|
trim_padding(snap)
|
||||||
|
else
|
||||||
|
snap
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
#---------------
|
||||||
|
private
|
||||||
|
|
||||||
|
def clamp_to_length(str)
|
||||||
|
str = binary_string(str)
|
||||||
|
|
||||||
|
len = eval_parameter(:length) || str.length
|
||||||
|
if str.length == len
|
||||||
|
str
|
||||||
|
elsif str.length > len
|
||||||
|
str.slice(0, len)
|
||||||
|
else
|
||||||
|
padding = (eval_parameter(:pad_byte) * (len - str.length))
|
||||||
|
if get_parameter(:pad_front)
|
||||||
|
padding + str
|
||||||
|
else
|
||||||
|
str + padding
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def trim_padding(str)
|
||||||
|
if get_parameter(:pad_front)
|
||||||
|
str.sub(/\A#{eval_parameter(:pad_byte)}*/, "")
|
||||||
|
else
|
||||||
|
str.sub(/#{eval_parameter(:pad_byte)}*\z/, "")
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def value_to_binary_string(val)
|
||||||
|
clamp_to_length(val)
|
||||||
|
end
|
||||||
|
|
||||||
|
def read_and_return_value(io)
|
||||||
|
len = eval_parameter(:read_length) || eval_parameter(:length) || 0
|
||||||
|
io.readbytes(len)
|
||||||
|
end
|
||||||
|
|
||||||
|
def sensible_default
|
||||||
|
""
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
class StringArgProcessor < BaseArgProcessor
|
||||||
|
def sanitize_parameters!(obj_class, params)
|
||||||
|
params.warn_replacement_parameter(:initial_length, :read_length)
|
||||||
|
params.must_be_integer(:read_length, :length)
|
||||||
|
params.rename_parameter(:pad_left, :pad_front)
|
||||||
|
params.sanitize(:pad_byte) { |byte| sanitized_pad_byte(byte) }
|
||||||
|
end
|
||||||
|
|
||||||
|
#-------------
|
||||||
|
private
|
||||||
|
|
||||||
|
def sanitized_pad_byte(byte)
|
||||||
|
pad_byte = byte.is_a?(Integer) ? byte.chr : byte.to_s
|
||||||
|
if pad_byte.bytesize > 1
|
||||||
|
raise ArgumentError, ":pad_byte must not contain more than 1 byte"
|
||||||
|
end
|
||||||
|
pad_byte
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# Warns when reading if :value && no :read_length
|
||||||
|
module WarnNoReadLengthPlugin
|
||||||
|
def read_and_return_value(io)
|
||||||
|
warn "#{debug_name} does not have a :read_length parameter - returning empty string"
|
||||||
|
""
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
96
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/bindata-2.4.15/lib/bindata/stringz.rb
vendored
Normal file
96
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/bindata-2.4.15/lib/bindata/stringz.rb
vendored
Normal file
@ -0,0 +1,96 @@
|
|||||||
|
require "bindata/base_primitive"
|
||||||
|
|
||||||
|
module BinData
|
||||||
|
# A BinData::Stringz object is a container for a zero ("\0") terminated
|
||||||
|
# string.
|
||||||
|
#
|
||||||
|
# For convenience, the zero terminator is not necessary when setting the
|
||||||
|
# value. Likewise, the returned value will not be zero terminated.
|
||||||
|
#
|
||||||
|
# require 'bindata'
|
||||||
|
#
|
||||||
|
# data = "abcd\x00efgh"
|
||||||
|
#
|
||||||
|
# obj = BinData::Stringz.new
|
||||||
|
# obj.read(data)
|
||||||
|
# obj.snapshot #=> "abcd"
|
||||||
|
# obj.num_bytes #=> 5
|
||||||
|
# obj.to_binary_s #=> "abcd\000"
|
||||||
|
#
|
||||||
|
# == Parameters
|
||||||
|
#
|
||||||
|
# Stringz objects accept all the params that BinData::BasePrimitive
|
||||||
|
# does, as well as the following:
|
||||||
|
#
|
||||||
|
# <tt>:max_length</tt>:: The maximum length of the string including the zero
|
||||||
|
# byte.
|
||||||
|
class Stringz < BinData::BasePrimitive
|
||||||
|
|
||||||
|
optional_parameters :max_length
|
||||||
|
|
||||||
|
def assign(val)
|
||||||
|
super(binary_string(val))
|
||||||
|
end
|
||||||
|
|
||||||
|
def snapshot
|
||||||
|
# override to always remove trailing zero bytes
|
||||||
|
result = super
|
||||||
|
trim_and_zero_terminate(result).chomp("\0")
|
||||||
|
end
|
||||||
|
|
||||||
|
#---------------
|
||||||
|
private
|
||||||
|
|
||||||
|
def value_to_binary_string(val)
|
||||||
|
trim_and_zero_terminate(val)
|
||||||
|
end
|
||||||
|
|
||||||
|
def read_and_return_value(io)
|
||||||
|
max_length = eval_parameter(:max_length)
|
||||||
|
str = ""
|
||||||
|
i = 0
|
||||||
|
ch = nil
|
||||||
|
|
||||||
|
# read until zero byte or we have read in the max number of bytes
|
||||||
|
while ch != "\0" && i != max_length
|
||||||
|
ch = io.readbytes(1)
|
||||||
|
str += ch
|
||||||
|
i += 1
|
||||||
|
end
|
||||||
|
|
||||||
|
trim_and_zero_terminate(str)
|
||||||
|
end
|
||||||
|
|
||||||
|
def sensible_default
|
||||||
|
""
|
||||||
|
end
|
||||||
|
|
||||||
|
def trim_and_zero_terminate(str)
|
||||||
|
result = binary_string(str)
|
||||||
|
truncate_after_first_zero_byte!(result)
|
||||||
|
trim_to!(result, eval_parameter(:max_length))
|
||||||
|
append_zero_byte_if_needed!(result)
|
||||||
|
result
|
||||||
|
end
|
||||||
|
|
||||||
|
def truncate_after_first_zero_byte!(str)
|
||||||
|
str.sub!(/([^\0]*\0).*/, '\1')
|
||||||
|
end
|
||||||
|
|
||||||
|
def trim_to!(str, max_length = nil)
|
||||||
|
if max_length
|
||||||
|
max_length = 1 if max_length < 1
|
||||||
|
str.slice!(max_length..-1)
|
||||||
|
if str.length == max_length && str[-1, 1] != "\0"
|
||||||
|
str[-1, 1] = "\0"
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def append_zero_byte_if_needed!(str)
|
||||||
|
if str.length == 0 || str[-1, 1] != "\0"
|
||||||
|
str << "\0"
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
422
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/bindata-2.4.15/lib/bindata/struct.rb
vendored
Normal file
422
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/bindata-2.4.15/lib/bindata/struct.rb
vendored
Normal file
@ -0,0 +1,422 @@
|
|||||||
|
require 'bindata/base'
|
||||||
|
require 'bindata/delayed_io'
|
||||||
|
|
||||||
|
module BinData
|
||||||
|
|
||||||
|
class Base
|
||||||
|
optional_parameter :onlyif, :byte_align # Used by Struct
|
||||||
|
end
|
||||||
|
|
||||||
|
# A Struct is an ordered collection of named data objects.
|
||||||
|
#
|
||||||
|
# require 'bindata'
|
||||||
|
#
|
||||||
|
# class Tuple < BinData::Record
|
||||||
|
# int8 :x
|
||||||
|
# int8 :y
|
||||||
|
# int8 :z
|
||||||
|
# end
|
||||||
|
#
|
||||||
|
# obj = BinData::Struct.new(hide: :a,
|
||||||
|
# fields: [ [:int32le, :a],
|
||||||
|
# [:int16le, :b],
|
||||||
|
# [:tuple, :s] ])
|
||||||
|
# obj.field_names =># [:b, :s]
|
||||||
|
#
|
||||||
|
#
|
||||||
|
# == Parameters
|
||||||
|
#
|
||||||
|
# Parameters may be provided at initialisation to control the behaviour of
|
||||||
|
# an object. These params are:
|
||||||
|
#
|
||||||
|
# <tt>:fields</tt>:: An array specifying the fields for this struct.
|
||||||
|
# Each element of the array is of the form [type, name,
|
||||||
|
# params]. Type is a symbol representing a registered
|
||||||
|
# type. Name is the name of this field. Params is an
|
||||||
|
# optional hash of parameters to pass to this field
|
||||||
|
# when instantiating it. If name is "" or nil, then
|
||||||
|
# that field is anonymous and behaves as a hidden field.
|
||||||
|
# <tt>:hide</tt>:: A list of the names of fields that are to be hidden
|
||||||
|
# from the outside world. Hidden fields don't appear
|
||||||
|
# in #snapshot or #field_names but are still accessible
|
||||||
|
# by name.
|
||||||
|
# <tt>:endian</tt>:: Either :little or :big. This specifies the default
|
||||||
|
# endian of any numerics in this struct, or in any
|
||||||
|
# nested data objects.
|
||||||
|
# <tt>:search_prefix</tt>:: Allows abbreviated type names. If a type is
|
||||||
|
# unrecognised, then each prefix is applied until
|
||||||
|
# a match is found.
|
||||||
|
#
|
||||||
|
# == Field Parameters
|
||||||
|
#
|
||||||
|
# Fields may have have extra parameters as listed below:
|
||||||
|
#
|
||||||
|
# [<tt>:onlyif</tt>] Used to indicate a data object is optional.
|
||||||
|
# if +false+, this object will not be included in any
|
||||||
|
# calls to #read, #write, #num_bytes or #snapshot.
|
||||||
|
# [<tt>:byte_align</tt>] This field's rel_offset must be a multiple of
|
||||||
|
# <tt>:byte_align</tt>.
|
||||||
|
class Struct < BinData::Base
|
||||||
|
arg_processor :struct
|
||||||
|
|
||||||
|
mandatory_parameter :fields
|
||||||
|
optional_parameters :endian, :search_prefix, :hide
|
||||||
|
|
||||||
|
# These reserved words may not be used as field names
|
||||||
|
RESERVED =
|
||||||
|
Hash[*
|
||||||
|
(Hash.instance_methods +
|
||||||
|
%w{alias and begin break case class def defined do else elsif
|
||||||
|
end ensure false for if in module next nil not or redo
|
||||||
|
rescue retry return self super then true undef unless until
|
||||||
|
when while yield} +
|
||||||
|
%w{array element index value} +
|
||||||
|
%w{type initial_length read_until} +
|
||||||
|
%w{fields endian search_prefix hide only_if byte_align} +
|
||||||
|
%w{choices selection copy_on_change} +
|
||||||
|
%w{read_abs_offset struct_params}).collect(&:to_sym).
|
||||||
|
uniq.collect { |key| [key, true] }.flatten
|
||||||
|
]
|
||||||
|
|
||||||
|
def initialize_shared_instance
|
||||||
|
fields = get_parameter(:fields)
|
||||||
|
@field_names = fields.field_names.freeze
|
||||||
|
extend ByteAlignPlugin if fields.any_field_has_parameter?(:byte_align)
|
||||||
|
define_field_accessors
|
||||||
|
super
|
||||||
|
end
|
||||||
|
|
||||||
|
def initialize_instance
|
||||||
|
@field_objs = []
|
||||||
|
end
|
||||||
|
|
||||||
|
def clear #:nodoc:
|
||||||
|
@field_objs.each { |f| f.clear unless f.nil? }
|
||||||
|
end
|
||||||
|
|
||||||
|
def clear? #:nodoc:
|
||||||
|
@field_objs.all? { |f| f.nil? || f.clear? }
|
||||||
|
end
|
||||||
|
|
||||||
|
def assign(val)
|
||||||
|
clear
|
||||||
|
assign_fields(val)
|
||||||
|
end
|
||||||
|
|
||||||
|
def snapshot
|
||||||
|
snapshot = Snapshot.new
|
||||||
|
field_names.each do |name|
|
||||||
|
obj = find_obj_for_name(name)
|
||||||
|
snapshot[name] = obj.snapshot if include_obj?(obj)
|
||||||
|
end
|
||||||
|
snapshot
|
||||||
|
end
|
||||||
|
|
||||||
|
# Returns a list of the names of all fields accessible through this
|
||||||
|
# object. +include_hidden+ specifies whether to include hidden names
|
||||||
|
# in the listing.
|
||||||
|
def field_names(include_hidden = false)
|
||||||
|
if include_hidden
|
||||||
|
@field_names.compact
|
||||||
|
else
|
||||||
|
hidden = get_parameter(:hide) || []
|
||||||
|
@field_names.compact - hidden
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def debug_name_of(child) #:nodoc:
|
||||||
|
field_name = @field_names[find_index_of(child)]
|
||||||
|
"#{debug_name}.#{field_name}"
|
||||||
|
end
|
||||||
|
|
||||||
|
def offset_of(child) #:nodoc:
|
||||||
|
instantiate_all_objs
|
||||||
|
sum = sum_num_bytes_below_index(find_index_of(child))
|
||||||
|
child.bit_aligned? ? sum.floor : sum.ceil
|
||||||
|
end
|
||||||
|
|
||||||
|
def do_read(io) #:nodoc:
|
||||||
|
instantiate_all_objs
|
||||||
|
@field_objs.each { |f| f.do_read(io) if include_obj_for_io?(f) }
|
||||||
|
end
|
||||||
|
|
||||||
|
def do_write(io) #:nodoc
|
||||||
|
instantiate_all_objs
|
||||||
|
@field_objs.each { |f| f.do_write(io) if include_obj_for_io?(f) }
|
||||||
|
end
|
||||||
|
|
||||||
|
def do_num_bytes #:nodoc:
|
||||||
|
instantiate_all_objs
|
||||||
|
sum_num_bytes_for_all_fields
|
||||||
|
end
|
||||||
|
|
||||||
|
def [](key)
|
||||||
|
find_obj_for_name(key)
|
||||||
|
end
|
||||||
|
|
||||||
|
def []=(key, value)
|
||||||
|
obj = find_obj_for_name(key)
|
||||||
|
if obj
|
||||||
|
obj.assign(value)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def key?(key)
|
||||||
|
@field_names.index(base_field_name(key))
|
||||||
|
end
|
||||||
|
|
||||||
|
def each_pair
|
||||||
|
@field_names.compact.each do |name|
|
||||||
|
yield [name, find_obj_for_name(name)]
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
#---------------
|
||||||
|
private
|
||||||
|
|
||||||
|
def define_field_accessors
|
||||||
|
get_parameter(:fields).each_with_index do |field, i|
|
||||||
|
name = field.name_as_sym
|
||||||
|
define_field_accessors_for(name, i) if name
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def define_field_accessors_for(name, index)
|
||||||
|
define_singleton_method(name) do
|
||||||
|
instantiate_obj_at(index) if @field_objs[index].nil?
|
||||||
|
@field_objs[index]
|
||||||
|
end
|
||||||
|
define_singleton_method("#{name}=") do |*vals|
|
||||||
|
instantiate_obj_at(index) if @field_objs[index].nil?
|
||||||
|
@field_objs[index].assign(*vals)
|
||||||
|
end
|
||||||
|
define_singleton_method("#{name}?") do
|
||||||
|
instantiate_obj_at(index) if @field_objs[index].nil?
|
||||||
|
include_obj?(@field_objs[index])
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def find_index_of(obj)
|
||||||
|
@field_objs.index { |el| el.equal?(obj) }
|
||||||
|
end
|
||||||
|
|
||||||
|
def find_obj_for_name(name)
|
||||||
|
index = @field_names.index(base_field_name(name))
|
||||||
|
if index
|
||||||
|
instantiate_obj_at(index)
|
||||||
|
@field_objs[index]
|
||||||
|
else
|
||||||
|
nil
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def base_field_name(name)
|
||||||
|
name.to_s.sub(/(=|\?)\z/, "").to_sym
|
||||||
|
end
|
||||||
|
|
||||||
|
def instantiate_all_objs
|
||||||
|
@field_names.each_index { |i| instantiate_obj_at(i) }
|
||||||
|
end
|
||||||
|
|
||||||
|
def instantiate_obj_at(index)
|
||||||
|
if @field_objs[index].nil?
|
||||||
|
field = get_parameter(:fields)[index]
|
||||||
|
@field_objs[index] = field.instantiate(nil, self)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def assign_fields(val)
|
||||||
|
src = as_stringified_hash(val)
|
||||||
|
|
||||||
|
@field_names.compact.each do |name|
|
||||||
|
obj = find_obj_for_name(name)
|
||||||
|
if obj && src.key?(name)
|
||||||
|
obj.assign(src[name])
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def as_stringified_hash(val)
|
||||||
|
if BinData::Struct === val
|
||||||
|
val
|
||||||
|
elsif val.nil?
|
||||||
|
{}
|
||||||
|
else
|
||||||
|
hash = Snapshot.new
|
||||||
|
val.each_pair { |k,v| hash[k] = v }
|
||||||
|
hash
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def sum_num_bytes_for_all_fields
|
||||||
|
sum_num_bytes_below_index(@field_objs.length)
|
||||||
|
end
|
||||||
|
|
||||||
|
def sum_num_bytes_below_index(index)
|
||||||
|
(0...index).inject(0) do |sum, i|
|
||||||
|
obj = @field_objs[i]
|
||||||
|
if include_obj?(obj)
|
||||||
|
nbytes = obj.do_num_bytes
|
||||||
|
(nbytes.is_a?(Integer) ? sum.ceil : sum) + nbytes
|
||||||
|
else
|
||||||
|
sum
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def include_obj_for_io?(obj)
|
||||||
|
# Used by #do_read and #do_write, to ensure the stream is passed to
|
||||||
|
# DelayedIO objects for delayed processing.
|
||||||
|
include_obj?(obj) || DelayedIO === obj
|
||||||
|
end
|
||||||
|
|
||||||
|
def include_obj?(obj)
|
||||||
|
!obj.has_parameter?(:onlyif) || obj.eval_parameter(:onlyif)
|
||||||
|
end
|
||||||
|
|
||||||
|
# A hash that can be accessed via attributes.
|
||||||
|
class Snapshot < ::Hash #:nodoc:
|
||||||
|
def []=(key, value)
|
||||||
|
super unless value.nil?
|
||||||
|
end
|
||||||
|
|
||||||
|
def respond_to?(symbol, include_private = false)
|
||||||
|
key?(symbol) || super
|
||||||
|
end
|
||||||
|
|
||||||
|
def method_missing(symbol, *args)
|
||||||
|
key?(symbol) ? self[symbol] : super
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# Align fields to a multiple of :byte_align
|
||||||
|
module ByteAlignPlugin
|
||||||
|
def do_read(io)
|
||||||
|
initial_offset = io.offset
|
||||||
|
instantiate_all_objs
|
||||||
|
@field_objs.each do |f|
|
||||||
|
if include_obj?(f)
|
||||||
|
if align_obj?(f)
|
||||||
|
io.seekbytes(bytes_to_align(f, io.offset - initial_offset))
|
||||||
|
end
|
||||||
|
f.do_read(io)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def do_write(io)
|
||||||
|
initial_offset = io.offset
|
||||||
|
instantiate_all_objs
|
||||||
|
@field_objs.each do |f|
|
||||||
|
if include_obj?(f)
|
||||||
|
if align_obj?(f)
|
||||||
|
io.writebytes("\x00" * bytes_to_align(f, io.offset - initial_offset))
|
||||||
|
end
|
||||||
|
f.do_write(io)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def sum_num_bytes_below_index(index)
|
||||||
|
sum = 0
|
||||||
|
(0...@field_objs.length).each do |i|
|
||||||
|
obj = @field_objs[i]
|
||||||
|
if include_obj?(obj)
|
||||||
|
sum = sum.ceil + bytes_to_align(obj, sum.ceil) if align_obj?(obj)
|
||||||
|
|
||||||
|
break if i >= index
|
||||||
|
|
||||||
|
nbytes = obj.do_num_bytes
|
||||||
|
sum = (nbytes.is_a?(Integer) ? sum.ceil : sum) + nbytes
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
sum
|
||||||
|
end
|
||||||
|
|
||||||
|
def bytes_to_align(obj, rel_offset)
|
||||||
|
align = obj.eval_parameter(:byte_align)
|
||||||
|
(align - (rel_offset % align)) % align
|
||||||
|
end
|
||||||
|
|
||||||
|
def align_obj?(obj)
|
||||||
|
obj.has_parameter?(:byte_align)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
class StructArgProcessor < BaseArgProcessor
|
||||||
|
def sanitize_parameters!(obj_class, params)
|
||||||
|
sanitize_endian(params)
|
||||||
|
sanitize_search_prefix(params)
|
||||||
|
sanitize_fields(obj_class, params)
|
||||||
|
sanitize_hide(params)
|
||||||
|
end
|
||||||
|
|
||||||
|
#-------------
|
||||||
|
private
|
||||||
|
|
||||||
|
def sanitize_endian(params)
|
||||||
|
params.sanitize_endian(:endian)
|
||||||
|
end
|
||||||
|
|
||||||
|
def sanitize_search_prefix(params)
|
||||||
|
params.sanitize(:search_prefix) do |sprefix|
|
||||||
|
search_prefix = []
|
||||||
|
Array(sprefix).each do |prefix|
|
||||||
|
prefix = prefix.to_s.chomp("_")
|
||||||
|
search_prefix << prefix if prefix != ""
|
||||||
|
end
|
||||||
|
|
||||||
|
search_prefix
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def sanitize_fields(obj_class, params)
|
||||||
|
params.sanitize_fields(:fields) do |fields, sanitized_fields|
|
||||||
|
fields.each do |ftype, fname, fparams|
|
||||||
|
sanitized_fields.add_field(ftype, fname, fparams)
|
||||||
|
end
|
||||||
|
|
||||||
|
field_names = sanitized_field_names(sanitized_fields)
|
||||||
|
ensure_field_names_are_valid(obj_class, field_names)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def sanitize_hide(params)
|
||||||
|
params.sanitize(:hide) do |hidden|
|
||||||
|
field_names = sanitized_field_names(params[:fields])
|
||||||
|
hfield_names = hidden_field_names(hidden)
|
||||||
|
|
||||||
|
hfield_names & field_names
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def sanitized_field_names(sanitized_fields)
|
||||||
|
sanitized_fields.field_names.compact
|
||||||
|
end
|
||||||
|
|
||||||
|
def hidden_field_names(hidden)
|
||||||
|
(hidden || []).collect(&:to_sym)
|
||||||
|
end
|
||||||
|
|
||||||
|
def ensure_field_names_are_valid(obj_class, field_names)
|
||||||
|
reserved_names = BinData::Struct::RESERVED
|
||||||
|
|
||||||
|
field_names.each do |name|
|
||||||
|
if obj_class.method_defined?(name)
|
||||||
|
raise NameError.new("Rename field '#{name}' in #{obj_class}, " \
|
||||||
|
"as it shadows an existing method.", name)
|
||||||
|
end
|
||||||
|
if reserved_names.include?(name)
|
||||||
|
raise NameError.new("Rename field '#{name}' in #{obj_class}, " \
|
||||||
|
"as it is a reserved name.", name)
|
||||||
|
end
|
||||||
|
if field_names.count(name) != 1
|
||||||
|
raise NameError.new("field '#{name}' in #{obj_class}, " \
|
||||||
|
"is defined multiple times.", name)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
95
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/bindata-2.4.15/lib/bindata/trace.rb
vendored
Normal file
95
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/bindata-2.4.15/lib/bindata/trace.rb
vendored
Normal file
@ -0,0 +1,95 @@
|
|||||||
|
module BinData
|
||||||
|
# reference to the current tracer
|
||||||
|
@tracer ||= nil
|
||||||
|
|
||||||
|
class Tracer #:nodoc:
|
||||||
|
def initialize(io)
|
||||||
|
@trace_io = io
|
||||||
|
end
|
||||||
|
|
||||||
|
def trace(msg)
|
||||||
|
@trace_io.puts(msg)
|
||||||
|
end
|
||||||
|
|
||||||
|
def trace_obj(obj_name, val)
|
||||||
|
if val.length > 30
|
||||||
|
val = val.slice(0..30) + "..."
|
||||||
|
end
|
||||||
|
|
||||||
|
trace "#{obj_name} => #{val}"
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# Turn on trace information when reading a BinData object.
|
||||||
|
# If +block+ is given then the tracing only occurs for that block.
|
||||||
|
# This is useful for debugging a BinData declaration.
|
||||||
|
def trace_reading(io = STDERR)
|
||||||
|
@tracer = Tracer.new(io)
|
||||||
|
[BasePrimitive, Choice].each(&:turn_on_tracing)
|
||||||
|
|
||||||
|
if block_given?
|
||||||
|
begin
|
||||||
|
yield
|
||||||
|
ensure
|
||||||
|
[BasePrimitive, Choice].each(&:turn_off_tracing)
|
||||||
|
@tracer = nil
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def trace_message #:nodoc:
|
||||||
|
yield @tracer if @tracer
|
||||||
|
end
|
||||||
|
|
||||||
|
module_function :trace_reading, :trace_message
|
||||||
|
|
||||||
|
class BasePrimitive < BinData::Base
|
||||||
|
class << self
|
||||||
|
def turn_on_tracing
|
||||||
|
alias_method :do_read_without_hook, :do_read
|
||||||
|
alias_method :do_read, :do_read_with_hook
|
||||||
|
end
|
||||||
|
|
||||||
|
def turn_off_tracing
|
||||||
|
alias_method :do_read, :do_read_without_hook
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def do_read_with_hook(io)
|
||||||
|
do_read_without_hook(io)
|
||||||
|
trace_value
|
||||||
|
end
|
||||||
|
|
||||||
|
def trace_value
|
||||||
|
BinData.trace_message do |tracer|
|
||||||
|
value_string = _value.inspect
|
||||||
|
tracer.trace_obj(debug_name, value_string)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
class Choice < BinData::Base
|
||||||
|
class << self
|
||||||
|
def turn_on_tracing
|
||||||
|
alias_method :do_read_without_hook, :do_read
|
||||||
|
alias_method :do_read, :do_read_with_hook
|
||||||
|
end
|
||||||
|
|
||||||
|
def turn_off_tracing
|
||||||
|
alias_method :do_read, :do_read_without_hook
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def do_read_with_hook(io)
|
||||||
|
trace_selection
|
||||||
|
do_read_without_hook(io)
|
||||||
|
end
|
||||||
|
|
||||||
|
def trace_selection
|
||||||
|
BinData.trace_message do |tracer|
|
||||||
|
selection_string = eval_parameter(:selection).inspect
|
||||||
|
tracer.trace_obj("#{debug_name}-selection-", selection_string)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
62
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/bindata-2.4.15/lib/bindata/uint8_array.rb
vendored
Normal file
62
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/bindata-2.4.15/lib/bindata/uint8_array.rb
vendored
Normal file
@ -0,0 +1,62 @@
|
|||||||
|
require "bindata/base_primitive"
|
||||||
|
|
||||||
|
module BinData
|
||||||
|
# Uint8Array is a specialised type of array that only contains
|
||||||
|
# bytes (Uint8). It is a faster and more memory efficient version
|
||||||
|
# of `BinData::Array.new(:type => :uint8)`.
|
||||||
|
#
|
||||||
|
# require 'bindata'
|
||||||
|
#
|
||||||
|
# obj = BinData::Uint8Array.new(initial_length: 5)
|
||||||
|
# obj.read("abcdefg") #=> [97, 98, 99, 100, 101]
|
||||||
|
# obj[2] #=> 99
|
||||||
|
# obj.collect { |x| x.chr }.join #=> "abcde"
|
||||||
|
#
|
||||||
|
# == Parameters
|
||||||
|
#
|
||||||
|
# Parameters may be provided at initialisation to control the behaviour of
|
||||||
|
# an object. These params are:
|
||||||
|
#
|
||||||
|
# <tt>:initial_length</tt>:: The initial length of the array.
|
||||||
|
# <tt>:read_until</tt>:: May only have a value of `:eof`. This parameter
|
||||||
|
# instructs the array to read as much data from
|
||||||
|
# the stream as possible.
|
||||||
|
class Uint8Array < BinData::BasePrimitive
|
||||||
|
optional_parameters :initial_length, :read_until
|
||||||
|
mutually_exclusive_parameters :initial_length, :read_until
|
||||||
|
arg_processor :uint8_array
|
||||||
|
|
||||||
|
#---------------
|
||||||
|
private
|
||||||
|
|
||||||
|
def value_to_binary_string(val)
|
||||||
|
val.pack("C*")
|
||||||
|
end
|
||||||
|
|
||||||
|
def read_and_return_value(io)
|
||||||
|
if has_parameter?(:initial_length)
|
||||||
|
data = io.readbytes(eval_parameter(:initial_length))
|
||||||
|
else
|
||||||
|
data = io.read_all_bytes
|
||||||
|
end
|
||||||
|
|
||||||
|
data.unpack("C*")
|
||||||
|
end
|
||||||
|
|
||||||
|
def sensible_default
|
||||||
|
[]
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
class Uint8ArrayArgProcessor < BaseArgProcessor
|
||||||
|
def sanitize_parameters!(obj_class, params) #:nodoc:
|
||||||
|
# ensure one of :initial_length and :read_until exists
|
||||||
|
unless params.has_at_least_one_of?(:initial_length, :read_until)
|
||||||
|
params[:initial_length] = 0
|
||||||
|
end
|
||||||
|
|
||||||
|
msg = "Parameter :read_until must have a value of :eof"
|
||||||
|
params.sanitize(:read_until) { |val| raise ArgumentError, msg unless val == :eof }
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
3
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/bindata-2.4.15/lib/bindata/version.rb
vendored
Normal file
3
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/bindata-2.4.15/lib/bindata/version.rb
vendored
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
module BinData
|
||||||
|
VERSION = "2.4.15"
|
||||||
|
end
|
||||||
47
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/bindata-2.4.15/lib/bindata/virtual.rb
vendored
Normal file
47
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/bindata-2.4.15/lib/bindata/virtual.rb
vendored
Normal file
@ -0,0 +1,47 @@
|
|||||||
|
require "bindata/base"
|
||||||
|
|
||||||
|
module BinData
|
||||||
|
# A virtual field is one that is neither read, written nor occupies space in
|
||||||
|
# the data stream. It is used to make assertions or as a convenient label
|
||||||
|
# for determining offsets or storing values.
|
||||||
|
#
|
||||||
|
# require 'bindata'
|
||||||
|
#
|
||||||
|
# class A < BinData::Record
|
||||||
|
# string :a, read_length: 5
|
||||||
|
# string :b, read_length: 5
|
||||||
|
# virtual :c, assert: -> { a == b }
|
||||||
|
# end
|
||||||
|
#
|
||||||
|
# obj = A.read("abcdeabcde")
|
||||||
|
# obj.a #=> "abcde"
|
||||||
|
# obj.c.offset #=> 10
|
||||||
|
#
|
||||||
|
# obj = A.read("abcdeABCDE") #=> BinData::ValidityError: assertion failed for obj.c
|
||||||
|
#
|
||||||
|
# == Parameters
|
||||||
|
#
|
||||||
|
# Parameters may be provided at initialisation to control the behaviour of
|
||||||
|
# an object. These params include those for BinData::Base as well as:
|
||||||
|
#
|
||||||
|
# [<tt>:assert</tt>] Raise an error when reading or assigning if the value
|
||||||
|
# of this evaluated parameter is false.
|
||||||
|
# [<tt>:value</tt>] The virtual object will always have this value.
|
||||||
|
#
|
||||||
|
class Virtual < BinData::BasePrimitive
|
||||||
|
|
||||||
|
def do_read(io)
|
||||||
|
end
|
||||||
|
|
||||||
|
def do_write(io)
|
||||||
|
end
|
||||||
|
|
||||||
|
def do_num_bytes
|
||||||
|
0.0
|
||||||
|
end
|
||||||
|
|
||||||
|
def sensible_default
|
||||||
|
nil
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
36
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/bindata-2.4.15/lib/bindata/warnings.rb
vendored
Normal file
36
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/bindata-2.4.15/lib/bindata/warnings.rb
vendored
Normal file
@ -0,0 +1,36 @@
|
|||||||
|
module BinData
|
||||||
|
class Base
|
||||||
|
# Don't override initialize. If you are defining a new kind of datatype
|
||||||
|
# (list, array, choice etc) then put your initialization code in
|
||||||
|
# #initialize_instance. BinData objects might be initialized as prototypes
|
||||||
|
# and your initialization code may not be called.
|
||||||
|
#
|
||||||
|
# If you're subclassing BinData::Record, you are definitely doing the wrong
|
||||||
|
# thing. Read the documentation on how to use BinData.
|
||||||
|
# http://github.com/dmendel/bindata/wiki/Records
|
||||||
|
alias_method :initialize_without_warning, :initialize
|
||||||
|
def initialize_with_warning(*args)
|
||||||
|
owner = method(:initialize).owner
|
||||||
|
if owner != BinData::Base
|
||||||
|
msg = "Don't override #initialize on #{owner}."
|
||||||
|
if %w(BinData::Base BinData::BasePrimitive).include? self.class.superclass.name
|
||||||
|
msg += "\nrename #initialize to #initialize_instance."
|
||||||
|
end
|
||||||
|
fail msg
|
||||||
|
end
|
||||||
|
initialize_without_warning(*args)
|
||||||
|
end
|
||||||
|
alias initialize initialize_with_warning
|
||||||
|
|
||||||
|
def initialize_instance(*args)
|
||||||
|
unless args.empty?
|
||||||
|
fail "#{caller[0]} remove the call to super in #initialize_instance"
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
class Struct
|
||||||
|
# has_key? is deprecated
|
||||||
|
alias has_key? key?
|
||||||
|
end
|
||||||
|
end
|
||||||
11
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/elftools-1.2.0/lib/elftools.rb
vendored
Normal file
11
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/elftools-1.2.0/lib/elftools.rb
vendored
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
require 'elftools/constants'
|
||||||
|
require 'elftools/elf_file'
|
||||||
|
require 'elftools/version'
|
||||||
|
|
||||||
|
# The ELF parsing tools!
|
||||||
|
# Main entry point is {ELFTools::ELFFile}, see it
|
||||||
|
# for more information.
|
||||||
|
module ELFTools
|
||||||
|
end
|
||||||
722
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/elftools-1.2.0/lib/elftools/constants.rb
vendored
Normal file
722
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/elftools-1.2.0/lib/elftools/constants.rb
vendored
Normal file
@ -0,0 +1,722 @@
|
|||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
module ELFTools
|
||||||
|
# Define constants from elf.h.
|
||||||
|
# Mostly refer from https://github.com/torvalds/linux/blob/master/include/uapi/linux/elf.h
|
||||||
|
# and binutils/elfcpp/elfcpp.h.
|
||||||
|
module Constants
|
||||||
|
# ELF magic header
|
||||||
|
ELFMAG = "\x7FELF"
|
||||||
|
|
||||||
|
# Values of `d_un.d_val' in the DT_FLAGS and DT_FLAGS_1 entry.
|
||||||
|
module DF
|
||||||
|
DF_ORIGIN = 0x00000001 # Object may use DF_ORIGIN
|
||||||
|
DF_SYMBOLIC = 0x00000002 # Symbol resolutions starts here
|
||||||
|
DF_TEXTREL = 0x00000004 # Object contains text relocations
|
||||||
|
DF_BIND_NOW = 0x00000008 # No lazy binding for this object
|
||||||
|
DF_STATIC_TLS = 0x00000010 # Module uses the static TLS model
|
||||||
|
|
||||||
|
DF_1_NOW = 0x00000001 # Set RTLD_NOW for this object.
|
||||||
|
DF_1_GLOBAL = 0x00000002 # Set RTLD_GLOBAL for this object.
|
||||||
|
DF_1_GROUP = 0x00000004 # Set RTLD_GROUP for this object.
|
||||||
|
DF_1_NODELETE = 0x00000008 # Set RTLD_NODELETE for this object.
|
||||||
|
DF_1_LOADFLTR = 0x00000010 # Trigger filtee loading at runtime.
|
||||||
|
DF_1_INITFIRST = 0x00000020 # Set RTLD_INITFIRST for this object
|
||||||
|
DF_1_NOOPEN = 0x00000040 # Set RTLD_NOOPEN for this object.
|
||||||
|
DF_1_ORIGIN = 0x00000080 # $ORIGIN must be handled.
|
||||||
|
DF_1_DIRECT = 0x00000100 # Direct binding enabled.
|
||||||
|
DF_1_TRANS = 0x00000200 # :nodoc:
|
||||||
|
DF_1_INTERPOSE = 0x00000400 # Object is used to interpose.
|
||||||
|
DF_1_NODEFLIB = 0x00000800 # Ignore default lib search path.
|
||||||
|
DF_1_NODUMP = 0x00001000 # Object can't be dldump'ed.
|
||||||
|
DF_1_CONFALT = 0x00002000 # Configuration alternative created.
|
||||||
|
DF_1_ENDFILTEE = 0x00004000 # Filtee terminates filters search.
|
||||||
|
DF_1_DISPRELDNE = 0x00008000 # Disp reloc applied at build time.
|
||||||
|
DF_1_DISPRELPND = 0x00010000 # Disp reloc applied at run-time.
|
||||||
|
DF_1_NODIRECT = 0x00020000 # Object has no-direct binding.
|
||||||
|
DF_1_IGNMULDEF = 0x00040000 # :nodoc:
|
||||||
|
DF_1_NOKSYMS = 0x00080000 # :nodoc:
|
||||||
|
DF_1_NOHDR = 0x00100000 # :nodoc:
|
||||||
|
DF_1_EDITED = 0x00200000 # Object is modified after built.
|
||||||
|
DF_1_NORELOC = 0x00400000 # :nodoc:
|
||||||
|
DF_1_SYMINTPOSE = 0x00800000 # Object has individual interposers.
|
||||||
|
DF_1_GLOBAUDIT = 0x01000000 # Global auditing required.
|
||||||
|
DF_1_SINGLETON = 0x02000000 # Singleton symbols are used.
|
||||||
|
DF_1_STUB = 0x04000000 # :nodoc:
|
||||||
|
DF_1_PIE = 0x08000000 # Object is a position-independent executable.
|
||||||
|
DF_1_KMOD = 0x10000000 # :nodoc:
|
||||||
|
DF_1_WEAKFILTER = 0x20000000 # :nodoc:
|
||||||
|
DF_1_NOCOMMON = 0x40000000 # :nodoc:
|
||||||
|
end
|
||||||
|
include DF
|
||||||
|
|
||||||
|
# Dynamic table types, records in +d_tag+.
|
||||||
|
module DT
|
||||||
|
DT_NULL = 0 # marks the end of the _DYNAMIC array
|
||||||
|
DT_NEEDED = 1 # libraries need to be linked by loader
|
||||||
|
DT_PLTRELSZ = 2 # total size of relocation entries
|
||||||
|
DT_PLTGOT = 3 # address of procedure linkage table or global offset table
|
||||||
|
DT_HASH = 4 # address of symbol hash table
|
||||||
|
DT_STRTAB = 5 # address of string table
|
||||||
|
DT_SYMTAB = 6 # address of symbol table
|
||||||
|
DT_RELA = 7 # address of a relocation table
|
||||||
|
DT_RELASZ = 8 # total size of the {DT_RELA} table
|
||||||
|
DT_RELAENT = 9 # size of each entry in the {DT_RELA} table
|
||||||
|
DT_STRSZ = 10 # total size of {DT_STRTAB}
|
||||||
|
DT_SYMENT = 11 # size of each entry in {DT_SYMTAB}
|
||||||
|
DT_INIT = 12 # where the initialization function is
|
||||||
|
DT_FINI = 13 # where the termination function is
|
||||||
|
DT_SONAME = 14 # the shared object name
|
||||||
|
DT_RPATH = 15 # has been superseded by {DT_RUNPATH}
|
||||||
|
DT_SYMBOLIC = 16 # has been superseded by the DF_SYMBOLIC flag
|
||||||
|
DT_REL = 17 # similar to {DT_RELA}
|
||||||
|
DT_RELSZ = 18 # total size of the {DT_REL} table
|
||||||
|
DT_RELENT = 19 # size of each entry in the {DT_REL} table
|
||||||
|
DT_PLTREL = 20 # type of relocation entry, either {DT_REL} or {DT_RELA}
|
||||||
|
DT_DEBUG = 21 # for debugging
|
||||||
|
DT_TEXTREL = 22 # has been superseded by the DF_TEXTREL flag
|
||||||
|
DT_JMPREL = 23 # address of relocation entries associated solely with procedure linkage table
|
||||||
|
DT_BIND_NOW = 24 # if the loader needs to do relocate now, superseded by the DF_BIND_NOW flag
|
||||||
|
DT_INIT_ARRAY = 25 # address init array
|
||||||
|
DT_FINI_ARRAY = 26 # address of fini array
|
||||||
|
DT_INIT_ARRAYSZ = 27 # total size of init array
|
||||||
|
DT_FINI_ARRAYSZ = 28 # total size of fini array
|
||||||
|
DT_RUNPATH = 29 # path of libraries for searching
|
||||||
|
DT_FLAGS = 30 # flags
|
||||||
|
DT_ENCODING = 32 # just a lower bound
|
||||||
|
DT_PREINIT_ARRAY = 32 # pre-initialization functions array
|
||||||
|
DT_PREINIT_ARRAYSZ = 33 # pre-initialization functions array size (bytes)
|
||||||
|
DT_SYMTAB_SHNDX = 34 # address of the +SHT_SYMTAB_SHNDX+ section associated with {DT_SYMTAB} table
|
||||||
|
DT_RELRSZ = 35 # :nodoc:
|
||||||
|
DT_RELR = 36 # :nodoc:
|
||||||
|
DT_RELRENT = 37 # :nodoc:
|
||||||
|
|
||||||
|
# Values between {DT_LOOS} and {DT_HIOS} are reserved for operating system-specific semantics.
|
||||||
|
DT_LOOS = 0x6000000d
|
||||||
|
DT_HIOS = 0x6ffff000 # see {DT_LOOS}
|
||||||
|
|
||||||
|
# Values between {DT_VALRNGLO} and {DT_VALRNGHI} use the +d_un.d_val+ field of the dynamic structure.
|
||||||
|
DT_VALRNGLO = 0x6ffffd00
|
||||||
|
DT_VALRNGHI = 0x6ffffdff # see {DT_VALRNGLO}
|
||||||
|
|
||||||
|
# Values between {DT_ADDRRNGLO} and {DT_ADDRRNGHI} use the +d_un.d_ptr+ field of the dynamic structure.
|
||||||
|
DT_ADDRRNGLO = 0x6ffffe00
|
||||||
|
DT_GNU_HASH = 0x6ffffef5 # the gnu hash
|
||||||
|
DT_TLSDESC_PLT = 0x6ffffef6 # :nodoc:
|
||||||
|
DT_TLSDESC_GOT = 0x6ffffef7 # :nodoc:
|
||||||
|
DT_GNU_CONFLICT = 0x6ffffef8 # :nodoc:
|
||||||
|
DT_GNU_LIBLIST = 0x6ffffef9 # :nodoc:
|
||||||
|
DT_CONFIG = 0x6ffffefa # :nodoc:
|
||||||
|
DT_DEPAUDIT = 0x6ffffefb # :nodoc:
|
||||||
|
DT_AUDIT = 0x6ffffefc # :nodoc:
|
||||||
|
DT_PLTPAD = 0x6ffffefd # :nodoc:
|
||||||
|
DT_MOVETAB = 0x6ffffefe # :nodoc:
|
||||||
|
DT_SYMINFO = 0x6ffffeff # :nodoc:
|
||||||
|
DT_ADDRRNGHI = 0x6ffffeff # see {DT_ADDRRNGLO}
|
||||||
|
|
||||||
|
DT_VERSYM = 0x6ffffff0 # section address of .gnu.version
|
||||||
|
DT_RELACOUNT = 0x6ffffff9 # relative relocation count
|
||||||
|
DT_RELCOUNT = 0x6ffffffa # relative relocation count
|
||||||
|
DT_FLAGS_1 = 0x6ffffffb # flags
|
||||||
|
DT_VERDEF = 0x6ffffffc # address of version definition table
|
||||||
|
DT_VERDEFNUM = 0x6ffffffd # number of entries in {DT_VERDEF}
|
||||||
|
DT_VERNEED = 0x6ffffffe # address of version dependency table
|
||||||
|
DT_VERNEEDNUM = 0x6fffffff # number of entries in {DT_VERNEED}
|
||||||
|
|
||||||
|
# Values between {DT_LOPROC} and {DT_HIPROC} are reserved for processor-specific semantics.
|
||||||
|
DT_LOPROC = 0x70000000
|
||||||
|
|
||||||
|
DT_PPC_GOT = 0x70000000 # global offset table
|
||||||
|
DT_PPC_OPT = 0x70000001 # whether various optimisations are possible
|
||||||
|
|
||||||
|
DT_PPC64_GLINK = 0x70000000 # start of the .glink section
|
||||||
|
DT_PPC64_OPD = 0x70000001 # start of the .opd section
|
||||||
|
DT_PPC64_OPDSZ = 0x70000002 # size of the .opd section
|
||||||
|
DT_PPC64_OPT = 0x70000003 # whether various optimisations are possible
|
||||||
|
|
||||||
|
DT_SPARC_REGISTER = 0x70000000 # index of an +STT_SPARC_REGISTER+ symbol within the {DT_SYMTAB} table
|
||||||
|
|
||||||
|
DT_MIPS_RLD_VERSION = 0x70000001 # 32 bit version number for runtime linker interface
|
||||||
|
DT_MIPS_TIME_STAMP = 0x70000002 # time stamp
|
||||||
|
DT_MIPS_ICHECKSUM = 0x70000003 # checksum of external strings and common sizes
|
||||||
|
DT_MIPS_IVERSION = 0x70000004 # index of version string in string table
|
||||||
|
DT_MIPS_FLAGS = 0x70000005 # 32 bits of flags
|
||||||
|
DT_MIPS_BASE_ADDRESS = 0x70000006 # base address of the segment
|
||||||
|
DT_MIPS_MSYM = 0x70000007 # :nodoc:
|
||||||
|
DT_MIPS_CONFLICT = 0x70000008 # address of +.conflict+ section
|
||||||
|
DT_MIPS_LIBLIST = 0x70000009 # address of +.liblist+ section
|
||||||
|
DT_MIPS_LOCAL_GOTNO = 0x7000000a # number of local global offset table entries
|
||||||
|
DT_MIPS_CONFLICTNO = 0x7000000b # number of entries in the +.conflict+ section
|
||||||
|
DT_MIPS_LIBLISTNO = 0x70000010 # number of entries in the +.liblist+ section
|
||||||
|
DT_MIPS_SYMTABNO = 0x70000011 # number of entries in the +.dynsym+ section
|
||||||
|
DT_MIPS_UNREFEXTNO = 0x70000012 # index of first external dynamic symbol not referenced locally
|
||||||
|
DT_MIPS_GOTSYM = 0x70000013 # index of first dynamic symbol in global offset table
|
||||||
|
DT_MIPS_HIPAGENO = 0x70000014 # number of page table entries in global offset table
|
||||||
|
DT_MIPS_RLD_MAP = 0x70000016 # address of run time loader map, used for debugging
|
||||||
|
DT_MIPS_DELTA_CLASS = 0x70000017 # delta C++ class definition
|
||||||
|
DT_MIPS_DELTA_CLASS_NO = 0x70000018 # number of entries in {DT_MIPS_DELTA_CLASS}
|
||||||
|
DT_MIPS_DELTA_INSTANCE = 0x70000019 # delta C++ class instances
|
||||||
|
DT_MIPS_DELTA_INSTANCE_NO = 0x7000001a # number of entries in {DT_MIPS_DELTA_INSTANCE}
|
||||||
|
DT_MIPS_DELTA_RELOC = 0x7000001b # delta relocations
|
||||||
|
DT_MIPS_DELTA_RELOC_NO = 0x7000001c # number of entries in {DT_MIPS_DELTA_RELOC}
|
||||||
|
DT_MIPS_DELTA_SYM = 0x7000001d # delta symbols that Delta relocations refer to
|
||||||
|
DT_MIPS_DELTA_SYM_NO = 0x7000001e # number of entries in {DT_MIPS_DELTA_SYM}
|
||||||
|
DT_MIPS_DELTA_CLASSSYM = 0x70000020 # delta symbols that hold class declarations
|
||||||
|
DT_MIPS_DELTA_CLASSSYM_NO = 0x70000021 # number of entries in {DT_MIPS_DELTA_CLASSSYM}
|
||||||
|
DT_MIPS_CXX_FLAGS = 0x70000022 # flags indicating information about C++ flavor
|
||||||
|
DT_MIPS_PIXIE_INIT = 0x70000023 # :nodoc:
|
||||||
|
DT_MIPS_SYMBOL_LIB = 0x70000024 # address of +.MIPS.symlib+
|
||||||
|
DT_MIPS_LOCALPAGE_GOTIDX = 0x70000025 # GOT index of the first PTE for a segment
|
||||||
|
DT_MIPS_LOCAL_GOTIDX = 0x70000026 # GOT index of the first PTE for a local symbol
|
||||||
|
DT_MIPS_HIDDEN_GOTIDX = 0x70000027 # GOT index of the first PTE for a hidden symbol
|
||||||
|
DT_MIPS_PROTECTED_GOTIDX = 0x70000028 # GOT index of the first PTE for a protected symbol
|
||||||
|
DT_MIPS_OPTIONS = 0x70000029 # address of +.MIPS.options+
|
||||||
|
DT_MIPS_INTERFACE = 0x7000002a # address of +.interface+
|
||||||
|
DT_MIPS_DYNSTR_ALIGN = 0x7000002b # :nodoc:
|
||||||
|
DT_MIPS_INTERFACE_SIZE = 0x7000002c # size of the +.interface+ section
|
||||||
|
DT_MIPS_RLD_TEXT_RESOLVE_ADDR = 0x7000002d # size of +rld_text_resolve+ function stored in the GOT
|
||||||
|
DT_MIPS_PERF_SUFFIX = 0x7000002e # default suffix of DSO to be added by rld on +dlopen()+ calls
|
||||||
|
DT_MIPS_COMPACT_SIZE = 0x7000002f # size of compact relocation section (O32)
|
||||||
|
DT_MIPS_GP_VALUE = 0x70000030 # GP value for auxiliary GOTs
|
||||||
|
DT_MIPS_AUX_DYNAMIC = 0x70000031 # address of auxiliary +.dynamic+
|
||||||
|
DT_MIPS_PLTGOT = 0x70000032 # address of the base of the PLTGOT
|
||||||
|
DT_MIPS_RWPLT = 0x70000034 # base of a writable PLT
|
||||||
|
DT_MIPS_RLD_MAP_REL = 0x70000035 # relative offset of run time loader map
|
||||||
|
DT_MIPS_XHASH = 0x70000036 # GNU-style hash table with xlat
|
||||||
|
|
||||||
|
DT_AUXILIARY = 0x7ffffffd # :nodoc:
|
||||||
|
DT_USED = 0x7ffffffe # :nodoc:
|
||||||
|
DT_FILTER = 0x7ffffffe # :nodoc:
|
||||||
|
|
||||||
|
DT_HIPROC = 0x7fffffff # see {DT_LOPROC}
|
||||||
|
end
|
||||||
|
include DT
|
||||||
|
|
||||||
|
# These constants define the various ELF target machines.
|
||||||
|
module EM
|
||||||
|
EM_NONE = 0 # none
|
||||||
|
EM_M32 = 1 # AT&T WE 32100
|
||||||
|
EM_SPARC = 2 # SPARC
|
||||||
|
EM_386 = 3 # Intel 80386
|
||||||
|
EM_68K = 4 # Motorola 68000
|
||||||
|
EM_88K = 5 # Motorola 88000
|
||||||
|
EM_486 = 6 # Intel 80486
|
||||||
|
EM_860 = 7 # Intel 80860
|
||||||
|
EM_MIPS = 8 # MIPS R3000 (officially, big-endian only)
|
||||||
|
EM_S370 = 9 # IBM System/370
|
||||||
|
|
||||||
|
# Next two are historical and binaries and
|
||||||
|
# modules of these types will be rejected by Linux.
|
||||||
|
EM_MIPS_RS3_LE = 10 # MIPS R3000 little-endian
|
||||||
|
EM_MIPS_RS4_BE = 10 # MIPS R4000 big-endian
|
||||||
|
|
||||||
|
EM_PARISC = 15 # HPPA
|
||||||
|
EM_VPP500 = 17 # Fujitsu VPP500 (also some older versions of PowerPC)
|
||||||
|
EM_SPARC32PLUS = 18 # Sun's "v8plus"
|
||||||
|
EM_960 = 19 # Intel 80960
|
||||||
|
EM_PPC = 20 # PowerPC
|
||||||
|
EM_PPC64 = 21 # PowerPC64
|
||||||
|
EM_S390 = 22 # IBM S/390
|
||||||
|
EM_SPU = 23 # Cell BE SPU
|
||||||
|
EM_V800 = 36 # NEC V800 series
|
||||||
|
EM_FR20 = 37 # Fujitsu FR20
|
||||||
|
EM_RH32 = 38 # TRW RH32
|
||||||
|
EM_RCE = 39 # Motorola M*Core
|
||||||
|
EM_ARM = 40 # ARM 32 bit
|
||||||
|
EM_SH = 42 # SuperH
|
||||||
|
EM_SPARCV9 = 43 # SPARC v9 64-bit
|
||||||
|
EM_TRICORE = 44 # Siemens Tricore embedded processor
|
||||||
|
EM_ARC = 45 # ARC Cores
|
||||||
|
EM_H8_300 = 46 # Renesas H8/300
|
||||||
|
EM_H8_300H = 47 # Renesas H8/300H
|
||||||
|
EM_H8S = 48 # Renesas H8S
|
||||||
|
EM_H8_500 = 49 # Renesas H8/500H
|
||||||
|
EM_IA_64 = 50 # HP/Intel IA-64
|
||||||
|
EM_MIPS_X = 51 # Stanford MIPS-X
|
||||||
|
EM_COLDFIRE = 52 # Motorola Coldfire
|
||||||
|
EM_68HC12 = 53 # Motorola M68HC12
|
||||||
|
EM_MMA = 54 # Fujitsu Multimedia Accelerator
|
||||||
|
EM_PCP = 55 # Siemens PCP
|
||||||
|
EM_NCPU = 56 # Sony nCPU embedded RISC processor
|
||||||
|
EM_NDR1 = 57 # Denso NDR1 microprocessor
|
||||||
|
EM_STARCORE = 58 # Motorola Star*Core processor
|
||||||
|
EM_ME16 = 59 # Toyota ME16 processor
|
||||||
|
EM_ST100 = 60 # STMicroelectronics ST100 processor
|
||||||
|
EM_TINYJ = 61 # Advanced Logic Corp. TinyJ embedded processor
|
||||||
|
EM_X86_64 = 62 # AMD x86-64
|
||||||
|
EM_PDSP = 63 # Sony DSP Processor
|
||||||
|
EM_PDP10 = 64 # Digital Equipment Corp. PDP-10
|
||||||
|
EM_PDP11 = 65 # Digital Equipment Corp. PDP-11
|
||||||
|
EM_FX66 = 66 # Siemens FX66 microcontroller
|
||||||
|
EM_ST9PLUS = 67 # STMicroelectronics ST9+ 8/16 bit microcontroller
|
||||||
|
EM_ST7 = 68 # STMicroelectronics ST7 8-bit microcontroller
|
||||||
|
EM_68HC16 = 69 # Motorola MC68HC16 Microcontroller
|
||||||
|
EM_68HC11 = 70 # Motorola MC68HC11 Microcontroller
|
||||||
|
EM_68HC08 = 71 # Motorola MC68HC08 Microcontroller
|
||||||
|
EM_68HC05 = 72 # Motorola MC68HC05 Microcontroller
|
||||||
|
EM_SVX = 73 # Silicon Graphics SVx
|
||||||
|
EM_ST19 = 74 # STMicroelectronics ST19 8-bit cpu
|
||||||
|
EM_VAX = 75 # Digital VAX
|
||||||
|
EM_CRIS = 76 # Axis Communications 32-bit embedded processor
|
||||||
|
EM_JAVELIN = 77 # Infineon Technologies 32-bit embedded cpu
|
||||||
|
EM_FIREPATH = 78 # Element 14 64-bit DSP processor
|
||||||
|
EM_ZSP = 79 # LSI Logic's 16-bit DSP processor
|
||||||
|
EM_MMIX = 80 # Donald Knuth's educational 64-bit processor
|
||||||
|
EM_HUANY = 81 # Harvard's machine-independent format
|
||||||
|
EM_PRISM = 82 # SiTera Prism
|
||||||
|
EM_AVR = 83 # Atmel AVR 8-bit microcontroller
|
||||||
|
EM_FR30 = 84 # Fujitsu FR30
|
||||||
|
EM_D10V = 85 # Mitsubishi D10V
|
||||||
|
EM_D30V = 86 # Mitsubishi D30V
|
||||||
|
EM_V850 = 87 # Renesas V850
|
||||||
|
EM_M32R = 88 # Renesas M32R
|
||||||
|
EM_MN10300 = 89 # Matsushita MN10300
|
||||||
|
EM_MN10200 = 90 # Matsushita MN10200
|
||||||
|
EM_PJ = 91 # picoJava
|
||||||
|
EM_OPENRISC = 92 # OpenRISC 32-bit embedded processor
|
||||||
|
EM_ARC_COMPACT = 93 # ARC International ARCompact processor
|
||||||
|
EM_XTENSA = 94 # Tensilica Xtensa Architecture
|
||||||
|
EM_VIDEOCORE = 95 # Alphamosaic VideoCore processor
|
||||||
|
EM_TMM_GPP = 96 # Thompson Multimedia General Purpose Processor
|
||||||
|
EM_NS32K = 97 # National Semiconductor 32000 series
|
||||||
|
EM_TPC = 98 # Tenor Network TPC processor
|
||||||
|
EM_SNP1K = 99 # Trebia SNP 1000 processor
|
||||||
|
EM_ST200 = 100 # STMicroelectronics ST200 microcontroller
|
||||||
|
EM_IP2K = 101 # Ubicom IP2022 micro controller
|
||||||
|
EM_MAX = 102 # MAX Processor
|
||||||
|
EM_CR = 103 # National Semiconductor CompactRISC
|
||||||
|
EM_F2MC16 = 104 # Fujitsu F2MC16
|
||||||
|
EM_MSP430 = 105 # TI msp430 micro controller
|
||||||
|
EM_BLACKFIN = 106 # ADI Blackfin Processor
|
||||||
|
EM_SE_C33 = 107 # S1C33 Family of Seiko Epson processors
|
||||||
|
EM_SEP = 108 # Sharp embedded microprocessor
|
||||||
|
EM_ARCA = 109 # Arca RISC Microprocessor
|
||||||
|
EM_UNICORE = 110 # Microprocessor series from PKU-Unity Ltd. and MPRC of Peking University
|
||||||
|
EM_EXCESS = 111 # eXcess: 16/32/64-bit configurable embedded CPU
|
||||||
|
EM_DXP = 112 # Icera Semiconductor Inc. Deep Execution Processor
|
||||||
|
EM_ALTERA_NIOS2 = 113 # Altera Nios II soft-core processor
|
||||||
|
EM_CRX = 114 # National Semiconductor CRX
|
||||||
|
EM_XGATE = 115 # Motorola XGATE embedded processor
|
||||||
|
EM_C116 = 116 # Infineon C16x/XC16x processor
|
||||||
|
EM_M16C = 117 # Renesas M16C series microprocessors
|
||||||
|
EM_DSPIC30F = 118 # Microchip Technology dsPIC30F Digital Signal Controller
|
||||||
|
EM_CE = 119 # Freescale Communication Engine RISC core
|
||||||
|
EM_M32C = 120 # Freescale Communication Engine RISC core
|
||||||
|
EM_TSK3000 = 131 # Altium TSK3000 core
|
||||||
|
EM_RS08 = 132 # Freescale RS08 embedded processor
|
||||||
|
EM_SHARC = 133 # Analog Devices SHARC family of 32-bit DSP processors
|
||||||
|
EM_ECOG2 = 134 # Cyan Technology eCOG2 microprocessor
|
||||||
|
EM_SCORE7 = 135 # Sunplus S+core7 RISC processor
|
||||||
|
EM_DSP24 = 136 # New Japan Radio (NJR) 24-bit DSP Processor
|
||||||
|
EM_VIDEOCORE3 = 137 # Broadcom VideoCore III processor
|
||||||
|
EM_LATTICEMICO32 = 138 # RISC processor for Lattice FPGA architecture
|
||||||
|
EM_SE_C17 = 139 # Seiko Epson C17 family
|
||||||
|
EM_TI_C6000 = 140 # The Texas Instruments TMS320C6000 DSP family
|
||||||
|
EM_TI_C2000 = 141 # The Texas Instruments TMS320C2000 DSP family
|
||||||
|
EM_TI_C5500 = 142 # The Texas Instruments TMS320C55x DSP family
|
||||||
|
EM_TI_ARP32 = 143 # Texas Instruments Application Specific RISC Processor, 32bit fetch
|
||||||
|
EM_TI_PRU = 144 # Texas Instruments Programmable Realtime Unit
|
||||||
|
EM_MMDSP_PLUS = 160 # STMicroelectronics 64bit VLIW Data Signal Processor
|
||||||
|
EM_CYPRESS_M8C = 161 # Cypress M8C microprocessor
|
||||||
|
EM_R32C = 162 # Renesas R32C series microprocessors
|
||||||
|
EM_TRIMEDIA = 163 # NXP Semiconductors TriMedia architecture family
|
||||||
|
EM_QDSP6 = 164 # QUALCOMM DSP6 Processor
|
||||||
|
EM_8051 = 165 # Intel 8051 and variants
|
||||||
|
EM_STXP7X = 166 # STMicroelectronics STxP7x family
|
||||||
|
EM_NDS32 = 167 # Andes Technology compact code size embedded RISC processor family
|
||||||
|
EM_ECOG1 = 168 # Cyan Technology eCOG1X family
|
||||||
|
EM_ECOG1X = 168 # Cyan Technology eCOG1X family
|
||||||
|
EM_MAXQ30 = 169 # Dallas Semiconductor MAXQ30 Core Micro-controllers
|
||||||
|
EM_XIMO16 = 170 # New Japan Radio (NJR) 16-bit DSP Processor
|
||||||
|
EM_MANIK = 171 # M2000 Reconfigurable RISC Microprocessor
|
||||||
|
EM_CRAYNV2 = 172 # Cray Inc. NV2 vector architecture
|
||||||
|
EM_RX = 173 # Renesas RX family
|
||||||
|
EM_METAG = 174 # Imagination Technologies Meta processor architecture
|
||||||
|
EM_MCST_ELBRUS = 175 # MCST Elbrus general purpose hardware architecture
|
||||||
|
EM_ECOG16 = 176 # Cyan Technology eCOG16 family
|
||||||
|
EM_CR16 = 177 # National Semiconductor CompactRISC 16-bit processor
|
||||||
|
EM_ETPU = 178 # Freescale Extended Time Processing Unit
|
||||||
|
EM_SLE9X = 179 # Infineon Technologies SLE9X core
|
||||||
|
EM_L1OM = 180 # Intel L1OM
|
||||||
|
EM_K1OM = 181 # Intel K1OM
|
||||||
|
EM_AARCH64 = 183 # ARM 64 bit
|
||||||
|
EM_AVR32 = 185 # Atmel Corporation 32-bit microprocessor family
|
||||||
|
EM_STM8 = 186 # STMicroeletronics STM8 8-bit microcontroller
|
||||||
|
EM_TILE64 = 187 # Tilera TILE64 multicore architecture family
|
||||||
|
EM_TILEPRO = 188 # Tilera TILEPro
|
||||||
|
EM_MICROBLAZE = 189 # Xilinx MicroBlaze
|
||||||
|
EM_CUDA = 190 # NVIDIA CUDA architecture
|
||||||
|
EM_TILEGX = 191 # Tilera TILE-Gx
|
||||||
|
EM_CLOUDSHIELD = 192 # CloudShield architecture family
|
||||||
|
EM_COREA_1ST = 193 # KIPO-KAIST Core-A 1st generation processor family
|
||||||
|
EM_COREA_2ND = 194 # KIPO-KAIST Core-A 2nd generation processor family
|
||||||
|
EM_ARC_COMPACT2 = 195 # Synopsys ARCompact V2
|
||||||
|
EM_OPEN8 = 196 # Open8 8-bit RISC soft processor core
|
||||||
|
EM_RL78 = 197 # Renesas RL78 family
|
||||||
|
EM_VIDEOCORE5 = 198 # Broadcom VideoCore V processor
|
||||||
|
EM_78K0R = 199 # Renesas 78K0R
|
||||||
|
EM_56800EX = 200 # Freescale 56800EX Digital Signal Controller (DSC)
|
||||||
|
EM_BA1 = 201 # Beyond BA1 CPU architecture
|
||||||
|
EM_BA2 = 202 # Beyond BA2 CPU architecture
|
||||||
|
EM_XCORE = 203 # XMOS xCORE processor family
|
||||||
|
EM_MCHP_PIC = 204 # Microchip 8-bit PIC(r) family
|
||||||
|
EM_INTELGT = 205 # Intel Graphics Technology
|
||||||
|
EM_KM32 = 210 # KM211 KM32 32-bit processor
|
||||||
|
EM_KMX32 = 211 # KM211 KMX32 32-bit processor
|
||||||
|
EM_KMX16 = 212 # KM211 KMX16 16-bit processor
|
||||||
|
EM_KMX8 = 213 # KM211 KMX8 8-bit processor
|
||||||
|
EM_KVARC = 214 # KM211 KVARC processor
|
||||||
|
EM_CDP = 215 # Paneve CDP architecture family
|
||||||
|
EM_COGE = 216 # Cognitive Smart Memory Processor
|
||||||
|
EM_COOL = 217 # Bluechip Systems CoolEngine
|
||||||
|
EM_NORC = 218 # Nanoradio Optimized RISC
|
||||||
|
EM_CSR_KALIMBA = 219 # CSR Kalimba architecture family
|
||||||
|
EM_Z80 = 220 # Zilog Z80
|
||||||
|
EM_VISIUM = 221 # Controls and Data Services VISIUMcore processor
|
||||||
|
EM_FT32 = 222 # FTDI Chip FT32 high performance 32-bit RISC architecture
|
||||||
|
EM_MOXIE = 223 # Moxie processor family
|
||||||
|
EM_AMDGPU = 224 # AMD GPU architecture
|
||||||
|
EM_LANAI = 244 # Lanai 32-bit processor
|
||||||
|
EM_CEVA = 245 # CEVA Processor Architecture Family
|
||||||
|
EM_CEVA_X2 = 246 # CEVA X2 Processor Family
|
||||||
|
EM_BPF = 247 # Linux BPF - in-kernel virtual machine
|
||||||
|
EM_GRAPHCORE_IPU = 248 # Graphcore Intelligent Processing Unit
|
||||||
|
EM_IMG1 = 249 # Imagination Technologies
|
||||||
|
EM_NFP = 250 # Netronome Flow Processor (NFP)
|
||||||
|
EM_VE = 251 # NEC Vector Engine
|
||||||
|
EM_CSKY = 252 # C-SKY processor family
|
||||||
|
EM_ARC_COMPACT3_64 = 253 # Synopsys ARCv2.3 64-bit
|
||||||
|
EM_MCS6502 = 254 # MOS Technology MCS 6502 processor
|
||||||
|
EM_ARC_COMPACT3 = 255 # Synopsys ARCv2.3 32-bit
|
||||||
|
EM_KVX = 256 # Kalray VLIW core of the MPPA processor family
|
||||||
|
EM_65816 = 257 # WDC 65816/65C816
|
||||||
|
EM_LOONGARCH = 258 # LoongArch
|
||||||
|
EM_KF32 = 259 # ChipON KungFu32
|
||||||
|
EM_U16_U8CORE = 260 # LAPIS nX-U16/U8
|
||||||
|
EM_TACHYUM = 261 # Tachyum
|
||||||
|
EM_56800EF = 262 # NXP 56800EF Digital Signal Controller (DSC)
|
||||||
|
|
||||||
|
EM_FRV = 0x5441 # Fujitsu FR-V
|
||||||
|
|
||||||
|
# This is an interim value that we will use until the committee comes up with a final number.
|
||||||
|
EM_ALPHA = 0x9026
|
||||||
|
|
||||||
|
# Bogus old m32r magic number, used by old tools.
|
||||||
|
EM_CYGNUS_M32R = 0x9041
|
||||||
|
# This is the old interim value for S/390 architecture
|
||||||
|
EM_S390_OLD = 0xA390
|
||||||
|
# Also Panasonic/MEI MN10300, AM33
|
||||||
|
EM_CYGNUS_MN10300 = 0xbeef
|
||||||
|
|
||||||
|
# Return the architecture name according to +val+.
|
||||||
|
# Used by {ELFTools::ELFFile#machine}.
|
||||||
|
#
|
||||||
|
# Only supports famous archs.
|
||||||
|
# @param [Integer] val Value of +e_machine+.
|
||||||
|
# @return [String]
|
||||||
|
# Name of architecture.
|
||||||
|
# @example
|
||||||
|
# mapping(3)
|
||||||
|
# #=> 'Intel 80386'
|
||||||
|
# mapping(6)
|
||||||
|
# #=> 'Intel 80386'
|
||||||
|
# mapping(62)
|
||||||
|
# #=> 'Advanced Micro Devices X86-64'
|
||||||
|
# mapping(1337)
|
||||||
|
# #=> '<unknown>: 0x539'
|
||||||
|
def self.mapping(val)
|
||||||
|
case val
|
||||||
|
when EM_NONE then 'None'
|
||||||
|
when EM_386, EM_486 then 'Intel 80386'
|
||||||
|
when EM_860 then 'Intel 80860'
|
||||||
|
when EM_MIPS then 'MIPS R3000'
|
||||||
|
when EM_PPC then 'PowerPC'
|
||||||
|
when EM_PPC64 then 'PowerPC64'
|
||||||
|
when EM_ARM then 'ARM'
|
||||||
|
when EM_IA_64 then 'Intel IA-64'
|
||||||
|
when EM_AARCH64 then 'AArch64'
|
||||||
|
when EM_X86_64 then 'Advanced Micro Devices X86-64'
|
||||||
|
else format('<unknown>: 0x%x', val)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
include EM
|
||||||
|
|
||||||
|
# This module defines ELF file types.
|
||||||
|
module ET
|
||||||
|
ET_NONE = 0 # no file type
|
||||||
|
ET_REL = 1 # relocatable file
|
||||||
|
ET_EXEC = 2 # executable file
|
||||||
|
ET_DYN = 3 # shared object
|
||||||
|
ET_CORE = 4 # core file
|
||||||
|
# Return the type name according to +e_type+ in ELF file header.
|
||||||
|
# @return [String] Type in string format.
|
||||||
|
def self.mapping(type)
|
||||||
|
case type
|
||||||
|
when Constants::ET_NONE then 'NONE'
|
||||||
|
when Constants::ET_REL then 'REL'
|
||||||
|
when Constants::ET_EXEC then 'EXEC'
|
||||||
|
when Constants::ET_DYN then 'DYN'
|
||||||
|
when Constants::ET_CORE then 'CORE'
|
||||||
|
else '<unknown>'
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
include ET
|
||||||
|
|
||||||
|
# Program header permission flags, records bitwise OR value in +p_flags+.
|
||||||
|
module PF
|
||||||
|
PF_X = 1 # executable
|
||||||
|
PF_W = 2 # writable
|
||||||
|
PF_R = 4 # readable
|
||||||
|
end
|
||||||
|
include PF
|
||||||
|
|
||||||
|
# Program header types, records in +p_type+.
|
||||||
|
module PT
|
||||||
|
PT_NULL = 0 # null segment
|
||||||
|
PT_LOAD = 1 # segment to be load
|
||||||
|
PT_DYNAMIC = 2 # dynamic tags
|
||||||
|
PT_INTERP = 3 # interpreter, same as .interp section
|
||||||
|
PT_NOTE = 4 # same as .note* section
|
||||||
|
PT_SHLIB = 5 # reserved
|
||||||
|
PT_PHDR = 6 # where program header starts
|
||||||
|
PT_TLS = 7 # thread local storage segment
|
||||||
|
|
||||||
|
PT_LOOS = 0x60000000 # OS-specific
|
||||||
|
PT_GNU_EH_FRAME = 0x6474e550 # for exception handler
|
||||||
|
PT_GNU_STACK = 0x6474e551 # permission of stack
|
||||||
|
PT_GNU_RELRO = 0x6474e552 # read only after relocation
|
||||||
|
PT_GNU_PROPERTY = 0x6474e553 # GNU property
|
||||||
|
PT_GNU_MBIND_HI = 0x6474f554 # Mbind segments (upper bound)
|
||||||
|
PT_GNU_MBIND_LO = 0x6474e555 # Mbind segments (lower bound)
|
||||||
|
PT_OPENBSD_RANDOMIZE = 0x65a3dbe6 # Fill with random data
|
||||||
|
PT_OPENBSD_WXNEEDED = 0x65a3dbe7 # Program does W^X violations
|
||||||
|
PT_OPENBSD_BOOTDATA = 0x65a41be6 # Section for boot arguments
|
||||||
|
PT_HIOS = 0x6fffffff # OS-specific
|
||||||
|
|
||||||
|
# Values between {PT_LOPROC} and {PT_HIPROC} are reserved for processor-specific semantics.
|
||||||
|
PT_LOPROC = 0x70000000
|
||||||
|
|
||||||
|
PT_ARM_ARCHEXT = 0x70000000 # platform architecture compatibility information
|
||||||
|
PT_ARM_EXIDX = 0x70000001 # exception unwind tables
|
||||||
|
|
||||||
|
PT_MIPS_REGINFO = 0x70000000 # register usage information
|
||||||
|
PT_MIPS_RTPROC = 0x70000001 # runtime procedure table
|
||||||
|
PT_MIPS_OPTIONS = 0x70000002 # +.MIPS.options+ section
|
||||||
|
PT_MIPS_ABIFLAGS = 0x70000003 # +.MIPS.abiflags+ section
|
||||||
|
|
||||||
|
PT_AARCH64_ARCHEXT = 0x70000000 # platform architecture compatibility information
|
||||||
|
PT_AARCH64_UNWIND = 0x70000001 # exception unwind tables
|
||||||
|
|
||||||
|
PT_S390_PGSTE = 0x70000000 # 4k page table size
|
||||||
|
|
||||||
|
PT_HIPROC = 0x7fffffff # see {PT_LOPROC}
|
||||||
|
end
|
||||||
|
include PT
|
||||||
|
|
||||||
|
# Special indices to section. These are used when there is no valid index to section header.
|
||||||
|
# The meaning of these values is left upto the embedding header.
|
||||||
|
module SHN
|
||||||
|
SHN_UNDEF = 0 # undefined section
|
||||||
|
SHN_LORESERVE = 0xff00 # start of reserved indices
|
||||||
|
|
||||||
|
# Values between {SHN_LOPROC} and {SHN_HIPROC} are reserved for processor-specific semantics.
|
||||||
|
SHN_LOPROC = 0xff00
|
||||||
|
|
||||||
|
SHN_MIPS_ACOMMON = 0xff00 # defined and allocated common symbol
|
||||||
|
SHN_MIPS_TEXT = 0xff01 # defined and allocated text symbol
|
||||||
|
SHN_MIPS_DATA = 0xff02 # defined and allocated data symbol
|
||||||
|
SHN_MIPS_SCOMMON = 0xff03 # small common symbol
|
||||||
|
SHN_MIPS_SUNDEFINED = 0xff04 # small undefined symbol
|
||||||
|
|
||||||
|
SHN_X86_64_LCOMMON = 0xff02 # large common symbol
|
||||||
|
|
||||||
|
SHN_HIPROC = 0xff1f # see {SHN_LOPROC}
|
||||||
|
|
||||||
|
# Values between {SHN_LOOS} and {SHN_HIOS} are reserved for operating system-specific semantics.
|
||||||
|
SHN_LOOS = 0xff20
|
||||||
|
SHN_HIOS = 0xff3f # see {SHN_LOOS}
|
||||||
|
SHN_ABS = 0xfff1 # specifies absolute values for the corresponding reference
|
||||||
|
SHN_COMMON = 0xfff2 # symbols defined relative to this section are common symbols
|
||||||
|
SHN_XINDEX = 0xffff # escape value indicating that the actual section header index is too large to fit
|
||||||
|
SHN_HIRESERVE = 0xffff # end of reserved indices
|
||||||
|
end
|
||||||
|
include SHN
|
||||||
|
|
||||||
|
# Section flag mask types, records in +sh_flag+.
|
||||||
|
module SHF
|
||||||
|
SHF_WRITE = (1 << 0) # Writable
|
||||||
|
SHF_ALLOC = (1 << 1) # Occupies memory during execution
|
||||||
|
SHF_EXECINSTR = (1 << 2) # Executable
|
||||||
|
SHF_MERGE = (1 << 4) # Might be merged
|
||||||
|
SHF_STRINGS = (1 << 5) # Contains nul-terminated strings
|
||||||
|
SHF_INFO_LINK = (1 << 6) # `sh_info' contains SHT index
|
||||||
|
SHF_LINK_ORDER = (1 << 7) # Preserve order after combining
|
||||||
|
SHF_OS_NONCONFORMING = (1 << 8) # Non-standard OS specific handling required
|
||||||
|
SHF_GROUP = (1 << 9) # Section is member of a group.
|
||||||
|
SHF_TLS = (1 << 10) # Section hold thread-local data.
|
||||||
|
SHF_COMPRESSED = (1 << 11) # Section with compressed data.
|
||||||
|
SHF_MASKOS = 0x0ff00000 # OS-specific.
|
||||||
|
SHF_MASKPROC = 0xf0000000 # Processor-specific
|
||||||
|
SHF_GNU_RETAIN = (1 << 21) # Not to be GCed by linker.
|
||||||
|
SHF_GNU_MBIND = (1 << 24) # Mbind section
|
||||||
|
SHF_ORDERED = (1 << 30) # Special ordering requirement
|
||||||
|
SHF_EXCLUDE = (1 << 31) # Section is excluded unless referenced or allocated (Solaris).
|
||||||
|
end
|
||||||
|
include SHF
|
||||||
|
|
||||||
|
# Section header types, records in +sh_type+.
|
||||||
|
module SHT
|
||||||
|
SHT_NULL = 0 # null section
|
||||||
|
SHT_PROGBITS = 1 # information defined by program itself
|
||||||
|
SHT_SYMTAB = 2 # symbol table section
|
||||||
|
SHT_STRTAB = 3 # string table section
|
||||||
|
SHT_RELA = 4 # relocation with addends
|
||||||
|
SHT_HASH = 5 # symbol hash table
|
||||||
|
SHT_DYNAMIC = 6 # information of dynamic linking
|
||||||
|
SHT_NOTE = 7 # section for notes
|
||||||
|
SHT_NOBITS = 8 # section occupies no space
|
||||||
|
SHT_REL = 9 # relocation
|
||||||
|
SHT_SHLIB = 10 # reserved
|
||||||
|
SHT_DYNSYM = 11 # symbols for dynamic
|
||||||
|
SHT_INIT_ARRAY = 14 # array of initialization functions
|
||||||
|
SHT_FINI_ARRAY = 15 # array of termination functions
|
||||||
|
SHT_PREINIT_ARRAY = 16 # array of functions that are invoked before all other initialization functions
|
||||||
|
SHT_GROUP = 17 # section group
|
||||||
|
SHT_SYMTAB_SHNDX = 18 # indices for SHN_XINDEX entries
|
||||||
|
SHT_RELR = 19 # RELR relative relocations
|
||||||
|
|
||||||
|
# Values between {SHT_LOOS} and {SHT_HIOS} are reserved for operating system-specific semantics.
|
||||||
|
SHT_LOOS = 0x60000000
|
||||||
|
SHT_GNU_INCREMENTAL_INPUTS = 0x6fff4700 # incremental build data
|
||||||
|
SHT_GNU_INCREMENTAL_SYMTAB = 0x6fff4701 # incremental build data
|
||||||
|
SHT_GNU_INCREMENTAL_RELOCS = 0x6fff4702 # incremental build data
|
||||||
|
SHT_GNU_INCREMENTAL_GOT_PLT = 0x6fff4703 # incremental build data
|
||||||
|
SHT_GNU_ATTRIBUTES = 0x6ffffff5 # object attributes
|
||||||
|
SHT_GNU_HASH = 0x6ffffff6 # GNU style symbol hash table
|
||||||
|
SHT_GNU_LIBLIST = 0x6ffffff7 # list of prelink dependencies
|
||||||
|
SHT_SUNW_verdef = 0x6ffffffd # versions defined by file
|
||||||
|
SHT_GNU_verdef = 0x6ffffffd # versions defined by file
|
||||||
|
SHT_SUNW_verneed = 0x6ffffffe # versions needed by file
|
||||||
|
SHT_GNU_verneed = 0x6ffffffe # versions needed by file
|
||||||
|
SHT_SUNW_versym = 0x6fffffff # symbol versions
|
||||||
|
SHT_GNU_versym = 0x6fffffff # symbol versions
|
||||||
|
SHT_HIOS = 0x6fffffff # see {SHT_LOOS}
|
||||||
|
|
||||||
|
# Values between {SHT_LOPROC} and {SHT_HIPROC} are reserved for processor-specific semantics.
|
||||||
|
SHT_LOPROC = 0x70000000
|
||||||
|
|
||||||
|
SHT_SPARC_GOTDATA = 0x70000000 # :nodoc:
|
||||||
|
|
||||||
|
SHT_ARM_EXIDX = 0x70000001 # exception index table
|
||||||
|
SHT_ARM_PREEMPTMAP = 0x70000002 # BPABI DLL dynamic linking pre-emption map
|
||||||
|
SHT_ARM_ATTRIBUTES = 0x70000003 # object file compatibility attributes
|
||||||
|
SHT_ARM_DEBUGOVERLAY = 0x70000004 # support for debugging overlaid programs
|
||||||
|
SHT_ARM_OVERLAYSECTION = 0x70000005 # support for debugging overlaid programs
|
||||||
|
|
||||||
|
SHT_X86_64_UNWIND = 0x70000001 # x86_64 unwind information
|
||||||
|
|
||||||
|
SHT_MIPS_LIBLIST = 0x70000000 # set of dynamic shared objects
|
||||||
|
SHT_MIPS_MSYM = 0x70000001 # :nodoc:
|
||||||
|
SHT_MIPS_CONFLICT = 0x70000002 # list of symbols whose definitions conflict with shared objects
|
||||||
|
SHT_MIPS_GPTAB = 0x70000003 # global pointer table
|
||||||
|
SHT_MIPS_UCODE = 0x70000004 # microcode information
|
||||||
|
SHT_MIPS_DEBUG = 0x70000005 # register usage information
|
||||||
|
SHT_MIPS_REGINFO = 0x70000006 # section contains register usage information
|
||||||
|
SHT_MIPS_PACKAGE = 0x70000007 # :nodoc:
|
||||||
|
SHT_MIPS_PACKSYM = 0x70000008 # :nodoc:
|
||||||
|
SHT_MIPS_RELD = 0x70000009 # :nodoc:
|
||||||
|
SHT_MIPS_IFACE = 0x7000000b # interface information
|
||||||
|
SHT_MIPS_CONTENT = 0x7000000c # description of contents of another section
|
||||||
|
SHT_MIPS_OPTIONS = 0x7000000d # miscellaneous options
|
||||||
|
SHT_MIPS_SHDR = 0x70000010 # :nodoc:
|
||||||
|
SHT_MIPS_FDESC = 0x70000011 # :nodoc:
|
||||||
|
SHT_MIPS_EXTSYM = 0x70000012 # :nodoc:
|
||||||
|
SHT_MIPS_DENSE = 0x70000013 # :nodoc:
|
||||||
|
SHT_MIPS_PDESC = 0x70000014 # :nodoc:
|
||||||
|
SHT_MIPS_LOCSYM = 0x70000015 # :nodoc:
|
||||||
|
SHT_MIPS_AUXSYM = 0x70000016 # :nodoc:
|
||||||
|
SHT_MIPS_OPTSYM = 0x70000017 # :nodoc:
|
||||||
|
SHT_MIPS_LOCSTR = 0x70000018 # :nodoc:
|
||||||
|
SHT_MIPS_LINE = 0x70000019 # :nodoc:
|
||||||
|
SHT_MIPS_RFDESC = 0x7000001a # :nodoc:
|
||||||
|
SHT_MIPS_DELTASYM = 0x7000001b # delta C++ symbol table
|
||||||
|
SHT_MIPS_DELTAINST = 0x7000001c # delta C++ instance table
|
||||||
|
SHT_MIPS_DELTACLASS = 0x7000001d # delta C++ class table
|
||||||
|
SHT_MIPS_DWARF = 0x7000001e # DWARF debugging section
|
||||||
|
SHT_MIPS_DELTADECL = 0x7000001f # delta C++ declarations
|
||||||
|
SHT_MIPS_SYMBOL_LIB = 0x70000020 # list of libraries the binary depends on
|
||||||
|
SHT_MIPS_EVENTS = 0x70000021 # events section
|
||||||
|
SHT_MIPS_TRANSLATE = 0x70000022 # :nodoc:
|
||||||
|
SHT_MIPS_PIXIE = 0x70000023 # :nodoc:
|
||||||
|
SHT_MIPS_XLATE = 0x70000024 # address translation table
|
||||||
|
SHT_MIPS_XLATE_DEBUG = 0x70000025 # SGI internal address translation table
|
||||||
|
SHT_MIPS_WHIRL = 0x70000026 # intermediate code
|
||||||
|
SHT_MIPS_EH_REGION = 0x70000027 # C++ exception handling region info
|
||||||
|
SHT_MIPS_PDR_EXCEPTION = 0x70000029 # runtime procedure descriptor table exception information
|
||||||
|
SHT_MIPS_ABIFLAGS = 0x7000002a # ABI related flags
|
||||||
|
SHT_MIPS_XHASH = 0x7000002b # GNU style symbol hash table with xlat
|
||||||
|
|
||||||
|
SHT_AARCH64_ATTRIBUTES = 0x70000003 # :nodoc:
|
||||||
|
|
||||||
|
SHT_CSKY_ATTRIBUTES = 0x70000001 # object file compatibility attributes
|
||||||
|
|
||||||
|
SHT_ORDERED = 0x7fffffff # :nodoc:
|
||||||
|
|
||||||
|
SHT_HIPROC = 0x7fffffff # see {SHT_LOPROC}
|
||||||
|
|
||||||
|
# Values between {SHT_LOUSER} and {SHT_HIUSER} are reserved for application programs.
|
||||||
|
SHT_LOUSER = 0x80000000
|
||||||
|
SHT_HIUSER = 0xffffffff # see {SHT_LOUSER}
|
||||||
|
end
|
||||||
|
include SHT
|
||||||
|
|
||||||
|
# Symbol binding from Sym st_info field.
|
||||||
|
module STB
|
||||||
|
STB_LOCAL = 0 # Local symbol
|
||||||
|
STB_GLOBAL = 1 # Global symbol
|
||||||
|
STB_WEAK = 2 # Weak symbol
|
||||||
|
STB_NUM = 3 # Number of defined types.
|
||||||
|
STB_LOOS = 10 # Start of OS-specific
|
||||||
|
STB_GNU_UNIQUE = 10 # Unique symbol.
|
||||||
|
STB_HIOS = 12 # End of OS-specific
|
||||||
|
STB_LOPROC = 13 # Start of processor-specific
|
||||||
|
STB_HIPROC = 15 # End of processor-specific
|
||||||
|
end
|
||||||
|
include STB
|
||||||
|
|
||||||
|
# Symbol types from Sym st_info field.
|
||||||
|
module STT
|
||||||
|
STT_NOTYPE = 0 # Symbol type is unspecified
|
||||||
|
STT_OBJECT = 1 # Symbol is a data object
|
||||||
|
STT_FUNC = 2 # Symbol is a code object
|
||||||
|
STT_SECTION = 3 # Symbol associated with a section
|
||||||
|
STT_FILE = 4 # Symbol's name is file name
|
||||||
|
STT_COMMON = 5 # Symbol is a common data object
|
||||||
|
STT_TLS = 6 # Symbol is thread-local data object
|
||||||
|
STT_NUM = 7 # Deprecated.
|
||||||
|
STT_RELC = 8 # Complex relocation expression
|
||||||
|
STT_SRELC = 9 # Signed Complex relocation expression
|
||||||
|
|
||||||
|
# GNU extension: symbol value points to a function which is called
|
||||||
|
# at runtime to determine the final value of the symbol.
|
||||||
|
STT_GNU_IFUNC = 10
|
||||||
|
|
||||||
|
STT_LOOS = 10 # Start of OS-specific
|
||||||
|
STT_HIOS = 12 # End of OS-specific
|
||||||
|
STT_LOPROC = 13 # Start of processor-specific
|
||||||
|
STT_HIPROC = 15 # End of processor-specific
|
||||||
|
|
||||||
|
# The section type that must be used for register symbols on
|
||||||
|
# Sparc. These symbols initialize a global register.
|
||||||
|
STT_SPARC_REGISTER = 13
|
||||||
|
|
||||||
|
# ARM: a THUMB function. This is not defined in ARM ELF Specification but
|
||||||
|
# used by the GNU tool-chain.
|
||||||
|
STT_ARM_TFUNC = 13
|
||||||
|
STT_ARM_16BIT = 15 # ARM: a THUMB label.
|
||||||
|
end
|
||||||
|
include STT
|
||||||
|
end
|
||||||
|
end
|
||||||
178
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/elftools-1.2.0/lib/elftools/dynamic.rb
vendored
Normal file
178
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/elftools-1.2.0/lib/elftools/dynamic.rb
vendored
Normal file
@ -0,0 +1,178 @@
|
|||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
module ELFTools
|
||||||
|
# Define common methods for dynamic sections and dynamic segments.
|
||||||
|
#
|
||||||
|
# @note
|
||||||
|
# This module can only be included by {ELFTools::Sections::DynamicSection}
|
||||||
|
# and {ELFTools::Segments::DynamicSegment} because methods here assume some
|
||||||
|
# attributes exist.
|
||||||
|
module Dynamic
|
||||||
|
# Iterate all tags.
|
||||||
|
#
|
||||||
|
# @note
|
||||||
|
# This method assume the following methods already exist:
|
||||||
|
# header
|
||||||
|
# tag_start
|
||||||
|
# @yieldparam [ELFTools::Dynamic::Tag] tag
|
||||||
|
# @return [Enumerator<ELFTools::Dynamic::Tag>, Array<ELFTools::Dynamic::Tag>]
|
||||||
|
# If block is not given, an enumerator will be returned.
|
||||||
|
# Otherwise, return array of tags.
|
||||||
|
def each_tags(&block)
|
||||||
|
return enum_for(:each_tags) unless block_given?
|
||||||
|
|
||||||
|
arr = []
|
||||||
|
0.step do |i|
|
||||||
|
tag = tag_at(i).tap(&block)
|
||||||
|
arr << tag
|
||||||
|
break if tag.header.d_tag == ELFTools::Constants::DT_NULL
|
||||||
|
end
|
||||||
|
arr
|
||||||
|
end
|
||||||
|
|
||||||
|
# Use {#tags} to get all tags.
|
||||||
|
# @return [Array<ELFTools::Dynamic::Tag>]
|
||||||
|
# Array of tags.
|
||||||
|
def tags
|
||||||
|
@tags ||= each_tags.to_a
|
||||||
|
end
|
||||||
|
|
||||||
|
# Get a tag of specific type.
|
||||||
|
# @param [Integer, Symbol, String] type
|
||||||
|
# Constant value, symbol, or string of type
|
||||||
|
# is acceptable. See examples for more information.
|
||||||
|
# @return [ELFTools::Dynamic::Tag] The desired tag.
|
||||||
|
# @example
|
||||||
|
# dynamic = elf.segment_by_type(:dynamic)
|
||||||
|
# # type as integer
|
||||||
|
# dynamic.tag_by_type(0) # the null tag
|
||||||
|
# #=> #<ELFTools::Dynamic::Tag:0x0055b5a5ecad28 @header={:d_tag=>0, :d_val=>0}>
|
||||||
|
# dynamic.tag_by_type(ELFTools::Constants::DT_NULL)
|
||||||
|
# #=> #<ELFTools::Dynamic::Tag:0x0055b5a5ecad28 @header={:d_tag=>0, :d_val=>0}>
|
||||||
|
#
|
||||||
|
# # symbol
|
||||||
|
# dynamic.tag_by_type(:null)
|
||||||
|
# #=> #<ELFTools::Dynamic::Tag:0x0055b5a5ecad28 @header={:d_tag=>0, :d_val=>0}>
|
||||||
|
# dynamic.tag_by_type(:pltgot)
|
||||||
|
# #=> #<ELFTools::Dynamic::Tag:0x0055d3d2d91b28 @header={:d_tag=>3, :d_val=>6295552}>
|
||||||
|
#
|
||||||
|
# # string
|
||||||
|
# dynamic.tag_by_type('null')
|
||||||
|
# #=> #<ELFTools::Dynamic::Tag:0x0055b5a5ecad28 @header={:d_tag=>0, :d_val=>0}>
|
||||||
|
# dynamic.tag_by_type('DT_PLTGOT')
|
||||||
|
# #=> #<ELFTools::Dynamic::Tag:0x0055d3d2d91b28 @header={:d_tag=>3, :d_val=>6295552}>
|
||||||
|
def tag_by_type(type)
|
||||||
|
type = Util.to_constant(Constants::DT, type)
|
||||||
|
each_tags.find { |tag| tag.header.d_tag == type }
|
||||||
|
end
|
||||||
|
|
||||||
|
# Get tags of specific type.
|
||||||
|
# @param [Integer, Symbol, String] type
|
||||||
|
# Constant value, symbol, or string of type
|
||||||
|
# is acceptable. See examples for more information.
|
||||||
|
# @return [Array<ELFTools::Dynamic::Tag>] The desired tags.
|
||||||
|
#
|
||||||
|
# @see #tag_by_type
|
||||||
|
def tags_by_type(type)
|
||||||
|
type = Util.to_constant(Constants::DT, type)
|
||||||
|
each_tags.select { |tag| tag.header.d_tag == type }
|
||||||
|
end
|
||||||
|
|
||||||
|
# Get the +n+-th tag.
|
||||||
|
#
|
||||||
|
# Tags are lazy loaded.
|
||||||
|
# @note
|
||||||
|
# This method assume the following methods already exist:
|
||||||
|
# header
|
||||||
|
# tag_start
|
||||||
|
# @note
|
||||||
|
# We cannot do bound checking of +n+ here since the only way to get size
|
||||||
|
# of tags is calling +tags.size+.
|
||||||
|
# @param [Integer] n The index.
|
||||||
|
# @return [ELFTools::Dynamic::Tag] The desired tag.
|
||||||
|
def tag_at(n)
|
||||||
|
return if n.negative?
|
||||||
|
|
||||||
|
@tag_at_map ||= {}
|
||||||
|
return @tag_at_map[n] if @tag_at_map[n]
|
||||||
|
|
||||||
|
dyn = Structs::ELF_Dyn.new(endian: endian)
|
||||||
|
dyn.elf_class = header.elf_class
|
||||||
|
stream.pos = tag_start + n * dyn.num_bytes
|
||||||
|
dyn.offset = stream.pos
|
||||||
|
@tag_at_map[n] = Tag.new(dyn.read(stream), stream, method(:str_offset))
|
||||||
|
end
|
||||||
|
|
||||||
|
private
|
||||||
|
|
||||||
|
def endian
|
||||||
|
header.class.self_endian
|
||||||
|
end
|
||||||
|
|
||||||
|
# Get the DT_STRTAB's +d_val+ offset related to file.
|
||||||
|
def str_offset
|
||||||
|
# TODO: handle DT_STRTAB not exitsts.
|
||||||
|
@str_offset ||= @offset_from_vma.call(tag_by_type(:strtab).header.d_val.to_i)
|
||||||
|
end
|
||||||
|
|
||||||
|
# A tag class.
|
||||||
|
class Tag
|
||||||
|
attr_reader :header # @return [ELFTools::Structs::ELF_Dyn] The dynamic tag header.
|
||||||
|
attr_reader :stream # @return [#pos=, #read] Streaming object.
|
||||||
|
|
||||||
|
# Instantiate a {ELFTools::Dynamic::Tag} object.
|
||||||
|
# @param [ELF_Dyn] header The dynamic tag header.
|
||||||
|
# @param [#pos=, #read] stream Streaming object.
|
||||||
|
# @param [Method] str_offset
|
||||||
|
# Call this method to get the string offset related
|
||||||
|
# to file.
|
||||||
|
def initialize(header, stream, str_offset)
|
||||||
|
@header = header
|
||||||
|
@stream = stream
|
||||||
|
@str_offset = str_offset
|
||||||
|
end
|
||||||
|
|
||||||
|
# Some dynamic have name.
|
||||||
|
TYPE_WITH_NAME = [Constants::DT_NEEDED,
|
||||||
|
Constants::DT_SONAME,
|
||||||
|
Constants::DT_RPATH,
|
||||||
|
Constants::DT_RUNPATH].freeze
|
||||||
|
# Return the content of this tag records.
|
||||||
|
#
|
||||||
|
# For normal tags, this method just return
|
||||||
|
# +header.d_val+. For tags with +header.d_val+
|
||||||
|
# in meaning of string offset (e.g. DT_NEEDED), this method would
|
||||||
|
# return the string it specified.
|
||||||
|
# Tags with type in {TYPE_WITH_NAME} are those tags with name.
|
||||||
|
# @return [Integer, String] The content this tag records.
|
||||||
|
# @example
|
||||||
|
# dynamic = elf.segment_by_type(:dynamic)
|
||||||
|
# dynamic.tag_by_type(:init).value
|
||||||
|
# #=> 4195600 # 0x400510
|
||||||
|
# dynamic.tag_by_type(:needed).value
|
||||||
|
# #=> 'libc.so.6'
|
||||||
|
def value
|
||||||
|
name || header.d_val.to_i
|
||||||
|
end
|
||||||
|
|
||||||
|
# Is this tag has a name?
|
||||||
|
#
|
||||||
|
# The criteria here is if this tag's type is in {TYPE_WITH_NAME}.
|
||||||
|
# @return [Boolean] Is this tag has a name.
|
||||||
|
def name?
|
||||||
|
TYPE_WITH_NAME.include?(header.d_tag)
|
||||||
|
end
|
||||||
|
|
||||||
|
# Return the name of this tag.
|
||||||
|
#
|
||||||
|
# Only tags with name would return a name.
|
||||||
|
# Others would return +nil+.
|
||||||
|
# @return [String, nil] The name.
|
||||||
|
def name
|
||||||
|
return nil unless name?
|
||||||
|
|
||||||
|
Util.cstring(stream, @str_offset.call + header.d_val.to_i)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
377
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/elftools-1.2.0/lib/elftools/elf_file.rb
vendored
Normal file
377
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/elftools-1.2.0/lib/elftools/elf_file.rb
vendored
Normal file
@ -0,0 +1,377 @@
|
|||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
require 'elftools/constants'
|
||||||
|
require 'elftools/exceptions'
|
||||||
|
require 'elftools/lazy_array'
|
||||||
|
require 'elftools/sections/sections'
|
||||||
|
require 'elftools/segments/segments'
|
||||||
|
require 'elftools/structs'
|
||||||
|
|
||||||
|
module ELFTools
|
||||||
|
# The main class for using elftools.
|
||||||
|
class ELFFile
|
||||||
|
attr_reader :stream # @return [#pos=, #read] The +File+ object.
|
||||||
|
attr_reader :elf_class # @return [Integer] 32 or 64.
|
||||||
|
attr_reader :endian # @return [Symbol] +:little+ or +:big+.
|
||||||
|
|
||||||
|
# Instantiate an {ELFFile} object.
|
||||||
|
#
|
||||||
|
# @param [#pos=, #read] stream
|
||||||
|
# The +File+ object to be fetch information from.
|
||||||
|
# @example
|
||||||
|
# ELFFile.new(File.open('/bin/cat'))
|
||||||
|
# #=> #<ELFTools::ELFFile:0x00564b106c32a0 @elf_class=64, @endian=:little, @stream=#<File:/bin/cat>>
|
||||||
|
def initialize(stream)
|
||||||
|
@stream = stream
|
||||||
|
# always set binmode if stream is an IO object.
|
||||||
|
@stream.binmode if @stream.respond_to?(:binmode)
|
||||||
|
identify # fetch the most basic information
|
||||||
|
end
|
||||||
|
|
||||||
|
# Return the file header.
|
||||||
|
#
|
||||||
|
# Lazy loading.
|
||||||
|
# @return [ELFTools::Structs::ELF_Ehdr] The header.
|
||||||
|
def header
|
||||||
|
return @header if defined?(@header)
|
||||||
|
|
||||||
|
stream.pos = 0
|
||||||
|
@header = Structs::ELF_Ehdr.new(endian: endian, offset: stream.pos)
|
||||||
|
@header.elf_class = elf_class
|
||||||
|
@header.read(stream)
|
||||||
|
end
|
||||||
|
|
||||||
|
# Return the BuildID of ELF.
|
||||||
|
# @return [String, nil]
|
||||||
|
# BuildID in hex form will be returned.
|
||||||
|
# +nil+ is returned if the .note.gnu.build-id section
|
||||||
|
# is not found.
|
||||||
|
# @example
|
||||||
|
# elf.build_id
|
||||||
|
# #=> '73ab62cb7bc9959ce053c2b711322158708cdc07'
|
||||||
|
def build_id
|
||||||
|
section = section_by_name('.note.gnu.build-id')
|
||||||
|
return nil if section.nil?
|
||||||
|
|
||||||
|
note = section.notes.first
|
||||||
|
return nil if note.nil?
|
||||||
|
|
||||||
|
note.desc.unpack1('H*')
|
||||||
|
end
|
||||||
|
|
||||||
|
# Get machine architecture.
|
||||||
|
#
|
||||||
|
# Mappings of architecture can be found
|
||||||
|
# in {ELFTools::Constants::EM.mapping}.
|
||||||
|
# @return [String]
|
||||||
|
# Name of architecture.
|
||||||
|
# @example
|
||||||
|
# elf.machine
|
||||||
|
# #=> 'Advanced Micro Devices X86-64'
|
||||||
|
def machine
|
||||||
|
ELFTools::Constants::EM.mapping(header.e_machine)
|
||||||
|
end
|
||||||
|
|
||||||
|
# Return the ELF type according to +e_type+.
|
||||||
|
# @return [String] Type in string format.
|
||||||
|
# @example
|
||||||
|
# ELFFile.new(File.open('spec/files/libc.so.6')).elf_type
|
||||||
|
# #=> 'DYN'
|
||||||
|
# ELFFile.new(File.open('spec/files/amd64.elf')).elf_type
|
||||||
|
# #=> 'EXEC'
|
||||||
|
def elf_type
|
||||||
|
ELFTools::Constants::ET.mapping(header.e_type)
|
||||||
|
end
|
||||||
|
|
||||||
|
#========= method about sections
|
||||||
|
|
||||||
|
# Number of sections in this file.
|
||||||
|
# @return [Integer] The desired number.
|
||||||
|
# @example
|
||||||
|
# elf.num_sections
|
||||||
|
# #=> 29
|
||||||
|
def num_sections
|
||||||
|
header.e_shnum
|
||||||
|
end
|
||||||
|
|
||||||
|
# Acquire the section named as +name+.
|
||||||
|
# @param [String] name The desired section name.
|
||||||
|
# @return [ELFTools::Sections::Section, nil] The target section.
|
||||||
|
# @example
|
||||||
|
# elf.section_by_name('.note.gnu.build-id')
|
||||||
|
# #=> #<ELFTools::Sections::Section:0x005647b1282428>
|
||||||
|
# elf.section_by_name('')
|
||||||
|
# #=> #<ELFTools::Sections::NullSection:0x005647b11da110>
|
||||||
|
# elf.section_by_name('no such section')
|
||||||
|
# #=> nil
|
||||||
|
def section_by_name(name)
|
||||||
|
each_sections.find { |sec| sec.name == name }
|
||||||
|
end
|
||||||
|
|
||||||
|
# Iterate all sections.
|
||||||
|
#
|
||||||
|
# All sections are lazy loading, the section
|
||||||
|
# only be created whenever accessing it.
|
||||||
|
# This method is useful for {#section_by_name}
|
||||||
|
# since not all sections need to be created.
|
||||||
|
# @yieldparam [ELFTools::Sections::Section] section A section.
|
||||||
|
# @yieldreturn [void]
|
||||||
|
# @return [Enumerator<ELFTools::Sections::Section>, Array<ELFTools::Sections::Section>]
|
||||||
|
# As +Array#each+, if block is not given, a enumerator will be returned,
|
||||||
|
# otherwise, the whole sections will be returned.
|
||||||
|
def each_sections(&block)
|
||||||
|
return enum_for(:each_sections) unless block_given?
|
||||||
|
|
||||||
|
Array.new(num_sections) do |i|
|
||||||
|
section_at(i).tap(&block)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# Simply use {#sections} to get all sections.
|
||||||
|
# @return [Array<ELFTools::Sections::Section>]
|
||||||
|
# Whole sections.
|
||||||
|
def sections
|
||||||
|
each_sections.to_a
|
||||||
|
end
|
||||||
|
|
||||||
|
# Acquire the +n+-th section, 0-based.
|
||||||
|
#
|
||||||
|
# Sections are lazy loaded.
|
||||||
|
# @param [Integer] n The index.
|
||||||
|
# @return [ELFTools::Sections::Section, nil]
|
||||||
|
# The target section.
|
||||||
|
# If +n+ is out of bound, +nil+ is returned.
|
||||||
|
def section_at(n)
|
||||||
|
@sections ||= LazyArray.new(num_sections, &method(:create_section))
|
||||||
|
@sections[n]
|
||||||
|
end
|
||||||
|
|
||||||
|
# Fetch all sections with specific type.
|
||||||
|
#
|
||||||
|
# The available types are listed in {ELFTools::Constants::PT}.
|
||||||
|
# This method accept giving block.
|
||||||
|
# @param [Integer, Symbol, String] type
|
||||||
|
# The type needed, similar format as {#segment_by_type}.
|
||||||
|
# @yieldparam [ELFTools::Sections::Section] section A section in specific type.
|
||||||
|
# @yieldreturn [void]
|
||||||
|
# @return [Array<ELFTools::Sections::section>] The target sections.
|
||||||
|
# @example
|
||||||
|
# elf = ELFTools::ELFFile.new(File.open('spec/files/amd64.elf'))
|
||||||
|
# elf.sections_by_type(:rela)
|
||||||
|
# #=> [#<ELFTools::Sections::RelocationSection:0x00563cd3219970>,
|
||||||
|
# # #<ELFTools::Sections::RelocationSection:0x00563cd3b89d70>]
|
||||||
|
def sections_by_type(type, &block)
|
||||||
|
type = Util.to_constant(Constants::SHT, type)
|
||||||
|
Util.select_by_type(each_sections, type, &block)
|
||||||
|
end
|
||||||
|
|
||||||
|
# Get the string table section.
|
||||||
|
#
|
||||||
|
# This section is acquired by using the +e_shstrndx+
|
||||||
|
# in ELF header.
|
||||||
|
# @return [ELFTools::Sections::StrTabSection] The desired section.
|
||||||
|
def strtab_section
|
||||||
|
section_at(header.e_shstrndx)
|
||||||
|
end
|
||||||
|
|
||||||
|
#========= method about segments
|
||||||
|
|
||||||
|
# Number of segments in this file.
|
||||||
|
# @return [Integer] The desited number.
|
||||||
|
def num_segments
|
||||||
|
header.e_phnum
|
||||||
|
end
|
||||||
|
|
||||||
|
# Iterate all segments.
|
||||||
|
#
|
||||||
|
# All segments are lazy loading, the segment
|
||||||
|
# only be created whenever accessing it.
|
||||||
|
# This method is useful for {#segment_by_type}
|
||||||
|
# since not all segments need to be created.
|
||||||
|
# @yieldparam [ELFTools::Segments::Segment] segment A segment.
|
||||||
|
# @yieldreturn [void]
|
||||||
|
# @return [Array<ELFTools::Segments::Segment>]
|
||||||
|
# Whole segments will be returned.
|
||||||
|
def each_segments(&block)
|
||||||
|
return enum_for(:each_segments) unless block_given?
|
||||||
|
|
||||||
|
Array.new(num_segments) do |i|
|
||||||
|
segment_at(i).tap(&block)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# Simply use {#segments} to get all segments.
|
||||||
|
# @return [Array<ELFTools::Segments::Segment>]
|
||||||
|
# Whole segments.
|
||||||
|
def segments
|
||||||
|
each_segments.to_a
|
||||||
|
end
|
||||||
|
|
||||||
|
# Get the first segment with +p_type=type+.
|
||||||
|
# The available types are listed in {ELFTools::Constants::PT}.
|
||||||
|
#
|
||||||
|
# @note
|
||||||
|
# This method will return the first segment found,
|
||||||
|
# to found all segments with specific type you can use {#segments_by_type}.
|
||||||
|
# @param [Integer, Symbol, String] type
|
||||||
|
# See examples for clear usage.
|
||||||
|
# @return [ELFTools::Segments::Segment] The target segment.
|
||||||
|
# @example
|
||||||
|
# # type as an integer
|
||||||
|
# elf.segment_by_type(ELFTools::Constants::PT_NOTE)
|
||||||
|
# #=> #<ELFTools::Segments::NoteSegment:0x005629dda1e4f8>
|
||||||
|
#
|
||||||
|
# elf.segment_by_type(4) # PT_NOTE
|
||||||
|
# #=> #<ELFTools::Segments::NoteSegment:0x005629dda1e4f8>
|
||||||
|
#
|
||||||
|
# # type as a symbol
|
||||||
|
# elf.segment_by_type(:PT_NOTE)
|
||||||
|
# #=> #<ELFTools::Segments::NoteSegment:0x005629dda1e4f8>
|
||||||
|
#
|
||||||
|
# # you can do this
|
||||||
|
# elf.segment_by_type(:note) # will be transformed into `PT_NOTE`
|
||||||
|
# #=> #<ELFTools::Segments::NoteSegment:0x005629dda1e4f8>
|
||||||
|
#
|
||||||
|
# # type as a string
|
||||||
|
# elf.segment_by_type('PT_NOTE')
|
||||||
|
# #=> #<ELFTools::Segments::NoteSegment:0x005629dda1e4f8>
|
||||||
|
#
|
||||||
|
# # this is ok
|
||||||
|
# elf.segment_by_type('note') # will be tranformed into `PT_NOTE`
|
||||||
|
# #=> #<ELFTools::Segments::NoteSegment:0x005629dda1e4f8>
|
||||||
|
# @example
|
||||||
|
# elf.segment_by_type(1337)
|
||||||
|
# # ArgumentError: No constants in Constants::PT is 1337
|
||||||
|
#
|
||||||
|
# elf.segment_by_type('oao')
|
||||||
|
# # ArgumentError: No constants in Constants::PT named "PT_OAO"
|
||||||
|
# @example
|
||||||
|
# elf.segment_by_type(0)
|
||||||
|
# #=> nil # no such segment exists
|
||||||
|
def segment_by_type(type)
|
||||||
|
type = Util.to_constant(Constants::PT, type)
|
||||||
|
each_segments.find { |seg| seg.header.p_type == type }
|
||||||
|
end
|
||||||
|
|
||||||
|
# Fetch all segments with specific type.
|
||||||
|
#
|
||||||
|
# If you want to find only one segment,
|
||||||
|
# use {#segment_by_type} instead.
|
||||||
|
# This method accept giving block.
|
||||||
|
# @param [Integer, Symbol, String] type
|
||||||
|
# The type needed, same format as {#segment_by_type}.
|
||||||
|
# @yieldparam [ELFTools::Segments::Segment] segment A segment in specific type.
|
||||||
|
# @yieldreturn [void]
|
||||||
|
# @return [Array<ELFTools::Segments::Segment>] The target segments.
|
||||||
|
def segments_by_type(type, &block)
|
||||||
|
type = Util.to_constant(Constants::PT, type)
|
||||||
|
Util.select_by_type(each_segments, type, &block)
|
||||||
|
end
|
||||||
|
|
||||||
|
# Acquire the +n+-th segment, 0-based.
|
||||||
|
#
|
||||||
|
# Segments are lazy loaded.
|
||||||
|
# @param [Integer] n The index.
|
||||||
|
# @return [ELFTools::Segments::Segment, nil]
|
||||||
|
# The target segment.
|
||||||
|
# If +n+ is out of bound, +nil+ is returned.
|
||||||
|
def segment_at(n)
|
||||||
|
@segments ||= LazyArray.new(num_segments, &method(:create_segment))
|
||||||
|
@segments[n]
|
||||||
|
end
|
||||||
|
|
||||||
|
# Get the offset related to file, given virtual memory address.
|
||||||
|
#
|
||||||
|
# This method should work no matter ELF is a PIE or not.
|
||||||
|
# This method refers from (actually equals to) binutils/readelf.c#offset_from_vma.
|
||||||
|
# @param [Integer] vma The virtual address to be queried.
|
||||||
|
# @return [Integer] Related file offset.
|
||||||
|
# @example
|
||||||
|
# elf = ELFTools::ELFFile.new(File.open('/bin/cat'))
|
||||||
|
# elf.offset_from_vma(0x401337)
|
||||||
|
# #=> 4919 # 0x1337
|
||||||
|
def offset_from_vma(vma, size = 0)
|
||||||
|
segments_by_type(:load) do |seg|
|
||||||
|
return seg.vma_to_offset(vma) if seg.vma_in?(vma, size)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# The patch status.
|
||||||
|
# @return [Hash{Integer => String}]
|
||||||
|
def patches
|
||||||
|
patch = {}
|
||||||
|
loaded_headers.each do |header|
|
||||||
|
header.patches.each do |key, val|
|
||||||
|
patch[key + header.offset] = val
|
||||||
|
end
|
||||||
|
end
|
||||||
|
patch
|
||||||
|
end
|
||||||
|
|
||||||
|
# Apply patches and save as +filename+.
|
||||||
|
#
|
||||||
|
# @param [String] filename
|
||||||
|
# @return [void]
|
||||||
|
def save(filename)
|
||||||
|
stream.pos = 0
|
||||||
|
all = stream.read.force_encoding('ascii-8bit')
|
||||||
|
patches.each do |pos, val|
|
||||||
|
all[pos, val.size] = val
|
||||||
|
end
|
||||||
|
File.binwrite(filename, all)
|
||||||
|
end
|
||||||
|
|
||||||
|
private
|
||||||
|
|
||||||
|
# bad idea..
|
||||||
|
def loaded_headers
|
||||||
|
explore = lambda do |obj|
|
||||||
|
return obj if obj.is_a?(::ELFTools::Structs::ELFStruct)
|
||||||
|
return obj.map(&explore) if obj.is_a?(Array)
|
||||||
|
|
||||||
|
obj.instance_variables.map do |s|
|
||||||
|
explore.call(obj.instance_variable_get(s))
|
||||||
|
end
|
||||||
|
end
|
||||||
|
explore.call(self).flatten
|
||||||
|
end
|
||||||
|
|
||||||
|
def identify
|
||||||
|
stream.pos = 0
|
||||||
|
magic = stream.read(4)
|
||||||
|
raise ELFMagicError, "Invalid magic number #{magic.inspect}" unless magic == Constants::ELFMAG
|
||||||
|
|
||||||
|
ei_class = stream.read(1).ord
|
||||||
|
@elf_class = {
|
||||||
|
1 => 32,
|
||||||
|
2 => 64
|
||||||
|
}[ei_class]
|
||||||
|
raise ELFClassError, format('Invalid EI_CLASS "\x%02x"', ei_class) if elf_class.nil?
|
||||||
|
|
||||||
|
ei_data = stream.read(1).ord
|
||||||
|
@endian = {
|
||||||
|
1 => :little,
|
||||||
|
2 => :big
|
||||||
|
}[ei_data]
|
||||||
|
raise ELFDataError, format('Invalid EI_DATA "\x%02x"', ei_data) if endian.nil?
|
||||||
|
end
|
||||||
|
|
||||||
|
def create_section(n)
|
||||||
|
stream.pos = header.e_shoff + n * header.e_shentsize
|
||||||
|
shdr = Structs::ELF_Shdr.new(endian: endian, offset: stream.pos)
|
||||||
|
shdr.elf_class = elf_class
|
||||||
|
shdr.read(stream)
|
||||||
|
Sections::Section.create(shdr, stream,
|
||||||
|
offset_from_vma: method(:offset_from_vma),
|
||||||
|
strtab: method(:strtab_section),
|
||||||
|
section_at: method(:section_at))
|
||||||
|
end
|
||||||
|
|
||||||
|
def create_segment(n)
|
||||||
|
stream.pos = header.e_phoff + n * header.e_phentsize
|
||||||
|
phdr = Structs::ELF_Phdr[elf_class].new(endian: endian, offset: stream.pos)
|
||||||
|
phdr.elf_class = elf_class
|
||||||
|
Segments::Segment.create(phdr.read(stream), stream, offset_from_vma: method(:offset_from_vma))
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
15
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/elftools-1.2.0/lib/elftools/exceptions.rb
vendored
Normal file
15
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/elftools-1.2.0/lib/elftools/exceptions.rb
vendored
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
module ELFTools
|
||||||
|
# Being raised when parsing error.
|
||||||
|
class ELFError < StandardError; end
|
||||||
|
|
||||||
|
# Raised on invalid ELF magic.
|
||||||
|
class ELFMagicError < ELFError; end
|
||||||
|
|
||||||
|
# Raised on invalid ELF class (EI_CLASS).
|
||||||
|
class ELFClassError < ELFError; end
|
||||||
|
|
||||||
|
# Raised on invalid ELF data encoding (EI_DATA).
|
||||||
|
class ELFDataError < ELFError; end
|
||||||
|
end
|
||||||
49
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/elftools-1.2.0/lib/elftools/lazy_array.rb
vendored
Normal file
49
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/elftools-1.2.0/lib/elftools/lazy_array.rb
vendored
Normal file
@ -0,0 +1,49 @@
|
|||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
require 'delegate'
|
||||||
|
|
||||||
|
module ELFTools
|
||||||
|
# A helper class for {ELFTools} easy to implement
|
||||||
|
# 'lazy loading' objects.
|
||||||
|
# Mainly used when loading sections, segments, and
|
||||||
|
# symbols.
|
||||||
|
class LazyArray < SimpleDelegator
|
||||||
|
# Instantiate a {LazyArray} object.
|
||||||
|
# @param [Integer] size
|
||||||
|
# The size of array.
|
||||||
|
# @yieldparam [Integer] i
|
||||||
|
# Needs the +i+-th element.
|
||||||
|
# @yieldreturn [Object]
|
||||||
|
# Value of the +i+-th element.
|
||||||
|
# @example
|
||||||
|
# arr = LazyArray.new(10) { |i| p "calc #{i}"; i * i }
|
||||||
|
# p arr[2]
|
||||||
|
# # "calc 2"
|
||||||
|
# # 4
|
||||||
|
#
|
||||||
|
# p arr[3]
|
||||||
|
# # "calc 3"
|
||||||
|
# # 9
|
||||||
|
#
|
||||||
|
# p arr[3]
|
||||||
|
# # 9
|
||||||
|
def initialize(size, &block)
|
||||||
|
super(Array.new(size))
|
||||||
|
@block = block
|
||||||
|
end
|
||||||
|
|
||||||
|
# To access elements like a normal array.
|
||||||
|
#
|
||||||
|
# Elements are lazy loaded at the first time
|
||||||
|
# access it.
|
||||||
|
# @return [Object]
|
||||||
|
# The element, returned type is the
|
||||||
|
# return type of block given in {#initialize}.
|
||||||
|
def [](i)
|
||||||
|
# XXX: support negative index?
|
||||||
|
return nil unless i.between?(0, __getobj__.size - 1)
|
||||||
|
|
||||||
|
__getobj__[i] ||= @block.call(i)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
125
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/elftools-1.2.0/lib/elftools/note.rb
vendored
Normal file
125
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/elftools-1.2.0/lib/elftools/note.rb
vendored
Normal file
@ -0,0 +1,125 @@
|
|||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
require 'elftools/structs'
|
||||||
|
require 'elftools/util'
|
||||||
|
|
||||||
|
module ELFTools
|
||||||
|
# Since both note sections and note segments refer to notes, this module
|
||||||
|
# defines common methods for {ELFTools::Sections::NoteSection} and
|
||||||
|
# {ELFTools::Segments::NoteSegment}.
|
||||||
|
#
|
||||||
|
# @note
|
||||||
|
# This module can only be included in {ELFTools::Sections::NoteSection} and
|
||||||
|
# {ELFTools::Segments::NoteSegment} since some methods here assume some
|
||||||
|
# attributes already exist.
|
||||||
|
module Note
|
||||||
|
# Since size of {ELFTools::Structs::ELF_Nhdr} will not change no matter in
|
||||||
|
# what endian and what arch, we can do this here. This value should equal
|
||||||
|
# to 12.
|
||||||
|
SIZE_OF_NHDR = Structs::ELF_Nhdr.new(endian: :little).num_bytes
|
||||||
|
|
||||||
|
# Iterate all notes in a note section or segment.
|
||||||
|
#
|
||||||
|
# Structure of notes are:
|
||||||
|
# +---------------+
|
||||||
|
# | Note 1 header |
|
||||||
|
# +---------------+
|
||||||
|
# | Note 1 name |
|
||||||
|
# +---------------+
|
||||||
|
# | Note 1 desc |
|
||||||
|
# +---------------+
|
||||||
|
# | Note 2 header |
|
||||||
|
# +---------------+
|
||||||
|
# | ... |
|
||||||
|
# +---------------+
|
||||||
|
#
|
||||||
|
# @note
|
||||||
|
# This method assume following methods exist:
|
||||||
|
# stream
|
||||||
|
# note_start
|
||||||
|
# note_total_size
|
||||||
|
# @return [Enumerator<ELFTools::Note::Note>, Array<ELFTools::Note::Note>]
|
||||||
|
# If block is not given, an enumerator will be returned.
|
||||||
|
# Otherwise, return the array of notes.
|
||||||
|
def each_notes
|
||||||
|
return enum_for(:each_notes) unless block_given?
|
||||||
|
|
||||||
|
@notes_offset_map ||= {}
|
||||||
|
cur = note_start
|
||||||
|
notes = []
|
||||||
|
while cur < note_start + note_total_size
|
||||||
|
stream.pos = cur
|
||||||
|
@notes_offset_map[cur] ||= create_note(cur)
|
||||||
|
note = @notes_offset_map[cur]
|
||||||
|
# name and desc size needs to be 4-bytes align
|
||||||
|
name_size = Util.align(note.header.n_namesz, 2)
|
||||||
|
desc_size = Util.align(note.header.n_descsz, 2)
|
||||||
|
cur += SIZE_OF_NHDR + name_size + desc_size
|
||||||
|
notes << note
|
||||||
|
yield note
|
||||||
|
end
|
||||||
|
notes
|
||||||
|
end
|
||||||
|
|
||||||
|
# Simply +#notes+ to get all notes.
|
||||||
|
# @return [Array<ELFTools::Note::Note>]
|
||||||
|
# Whole notes.
|
||||||
|
def notes
|
||||||
|
each_notes.to_a
|
||||||
|
end
|
||||||
|
|
||||||
|
private
|
||||||
|
|
||||||
|
# Get the endian.
|
||||||
|
#
|
||||||
|
# @note This method assume method +header+ exists.
|
||||||
|
# @return [Symbol] +:little+ or +:big+.
|
||||||
|
def endian
|
||||||
|
header.class.self_endian
|
||||||
|
end
|
||||||
|
|
||||||
|
def create_note(cur)
|
||||||
|
nhdr = Structs::ELF_Nhdr.new(endian: endian, offset: stream.pos).read(stream)
|
||||||
|
ELFTools::Note::Note.new(nhdr, stream, cur)
|
||||||
|
end
|
||||||
|
|
||||||
|
# Class of a note.
|
||||||
|
class Note
|
||||||
|
attr_reader :header # @return [ELFTools::Structs::ELF_Nhdr] Note header.
|
||||||
|
attr_reader :stream # @return [#pos=, #read] Streaming object.
|
||||||
|
attr_reader :offset # @return [Integer] Address of this note start, includes note header.
|
||||||
|
|
||||||
|
# Instantiate a {ELFTools::Note::Note} object.
|
||||||
|
# @param [ELF_Nhdr] header The note header.
|
||||||
|
# @param [#pos=, #read] stream Streaming object.
|
||||||
|
# @param [Integer] offset
|
||||||
|
# Start address of this note, includes the header.
|
||||||
|
def initialize(header, stream, offset)
|
||||||
|
@header = header
|
||||||
|
@stream = stream
|
||||||
|
@offset = offset
|
||||||
|
end
|
||||||
|
|
||||||
|
# Name of this note.
|
||||||
|
# @return [String] The name.
|
||||||
|
def name
|
||||||
|
return @name if defined?(@name)
|
||||||
|
|
||||||
|
stream.pos = @offset + SIZE_OF_NHDR
|
||||||
|
@name = stream.read(header.n_namesz)[0..-2]
|
||||||
|
end
|
||||||
|
|
||||||
|
# Description of this note.
|
||||||
|
# @return [String] The description.
|
||||||
|
def desc
|
||||||
|
return @desc if instance_variable_defined?(:@desc)
|
||||||
|
|
||||||
|
stream.pos = @offset + SIZE_OF_NHDR + Util.align(header.n_namesz, 2)
|
||||||
|
@desc = stream.read(header.n_descsz)
|
||||||
|
end
|
||||||
|
|
||||||
|
# If someone likes to use full name.
|
||||||
|
alias description desc
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
@ -0,0 +1,22 @@
|
|||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
require 'elftools/dynamic'
|
||||||
|
require 'elftools/sections/section'
|
||||||
|
|
||||||
|
module ELFTools
|
||||||
|
module Sections
|
||||||
|
# Class for dynamic table section.
|
||||||
|
#
|
||||||
|
# This section should always be named .dynamic.
|
||||||
|
# This class knows how to get the list of dynamic tags.
|
||||||
|
class DynamicSection < Section
|
||||||
|
include ELFTools::Dynamic
|
||||||
|
|
||||||
|
# Get the start address of tags.
|
||||||
|
# @return [Integer] Start address of tags.
|
||||||
|
def tag_start
|
||||||
|
header.sh_offset
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
@ -0,0 +1,27 @@
|
|||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
require 'elftools/note'
|
||||||
|
require 'elftools/sections/section'
|
||||||
|
|
||||||
|
module ELFTools
|
||||||
|
module Sections
|
||||||
|
# Class of note section.
|
||||||
|
# Note section records notes
|
||||||
|
class NoteSection < Section
|
||||||
|
# Load note related methods.
|
||||||
|
include ELFTools::Note
|
||||||
|
|
||||||
|
# Address offset of notes start.
|
||||||
|
# @return [Integer] The offset.
|
||||||
|
def note_start
|
||||||
|
header.sh_offset
|
||||||
|
end
|
||||||
|
|
||||||
|
# The total size of notes in this section.
|
||||||
|
# @return [Integer] The size.
|
||||||
|
def note_total_size
|
||||||
|
header.sh_size
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
@ -0,0 +1,18 @@
|
|||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
require 'elftools/sections/section'
|
||||||
|
|
||||||
|
module ELFTools
|
||||||
|
module Sections
|
||||||
|
# Class of null section.
|
||||||
|
# Null section is for specific the end
|
||||||
|
# of linked list (+sh_link+) between sections.
|
||||||
|
class NullSection < Section
|
||||||
|
# Is this a null section?
|
||||||
|
# @return [Boolean] Yes it is.
|
||||||
|
def null?
|
||||||
|
true
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
@ -0,0 +1,109 @@
|
|||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
require 'elftools/constants'
|
||||||
|
require 'elftools/sections/section'
|
||||||
|
require 'elftools/structs'
|
||||||
|
|
||||||
|
module ELFTools
|
||||||
|
module Sections
|
||||||
|
# Class of note section.
|
||||||
|
# Note section records notes
|
||||||
|
class RelocationSection < Section
|
||||||
|
# Is this relocation a RELA or REL type.
|
||||||
|
# @return [Boolean] If is RELA.
|
||||||
|
def rela?
|
||||||
|
header.sh_type == Constants::SHT_RELA
|
||||||
|
end
|
||||||
|
|
||||||
|
# Number of relocations in this section.
|
||||||
|
# @return [Integer] The number.
|
||||||
|
def num_relocations
|
||||||
|
header.sh_size / header.sh_entsize
|
||||||
|
end
|
||||||
|
|
||||||
|
# Acquire the +n+-th relocation, 0-based.
|
||||||
|
#
|
||||||
|
# relocations are lazy loaded.
|
||||||
|
# @param [Integer] n The index.
|
||||||
|
# @return [ELFTools::Relocation, nil]
|
||||||
|
# The target relocation.
|
||||||
|
# If +n+ is out of bound, +nil+ is returned.
|
||||||
|
def relocation_at(n)
|
||||||
|
@relocations ||= LazyArray.new(num_relocations, &method(:create_relocation))
|
||||||
|
@relocations[n]
|
||||||
|
end
|
||||||
|
|
||||||
|
# Iterate all relocations.
|
||||||
|
#
|
||||||
|
# All relocations are lazy loading, the relocation
|
||||||
|
# only be created whenever accessing it.
|
||||||
|
# @yieldparam [ELFTools::Relocation] rel A relocation object.
|
||||||
|
# @yieldreturn [void]
|
||||||
|
# @return [Enumerator<ELFTools::Relocation>, Array<ELFTools::Relocation>]
|
||||||
|
# If block is not given, an enumerator will be returned.
|
||||||
|
# Otherwise, the whole relocations will be returned.
|
||||||
|
def each_relocations(&block)
|
||||||
|
return enum_for(:each_relocations) unless block_given?
|
||||||
|
|
||||||
|
Array.new(num_relocations) do |i|
|
||||||
|
relocation_at(i).tap(&block)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# Simply use {#relocations} to get all relocations.
|
||||||
|
# @return [Array<ELFTools::Relocation>]
|
||||||
|
# Whole relocations.
|
||||||
|
def relocations
|
||||||
|
each_relocations.to_a
|
||||||
|
end
|
||||||
|
|
||||||
|
private
|
||||||
|
|
||||||
|
def create_relocation(n)
|
||||||
|
stream.pos = header.sh_offset + n * header.sh_entsize
|
||||||
|
klass = rela? ? Structs::ELF_Rela : Structs::ELF_Rel
|
||||||
|
rel = klass.new(endian: header.class.self_endian, offset: stream.pos)
|
||||||
|
rel.elf_class = header.elf_class
|
||||||
|
rel.read(stream)
|
||||||
|
Relocation.new(rel, stream)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# A relocation entry.
|
||||||
|
#
|
||||||
|
# Can be either a REL or RELA relocation.
|
||||||
|
# XXX: move this to an independent file?
|
||||||
|
class Relocation
|
||||||
|
attr_reader :header # @return [ELFTools::Structs::ELF_Rel, ELFTools::Structs::ELF_Rela] Rel(a) header.
|
||||||
|
attr_reader :stream # @return [#pos=, #read] Streaming object.
|
||||||
|
|
||||||
|
# Instantiate a {Relocation} object.
|
||||||
|
def initialize(header, stream)
|
||||||
|
@header = header
|
||||||
|
@stream = stream
|
||||||
|
end
|
||||||
|
|
||||||
|
# +r_info+ contains sym and type, use two methods
|
||||||
|
# to access them easier.
|
||||||
|
# @return [Integer] sym infor.
|
||||||
|
def r_info_sym
|
||||||
|
header.r_info >> mask_bit
|
||||||
|
end
|
||||||
|
alias symbol_index r_info_sym
|
||||||
|
|
||||||
|
# +r_info+ contains sym and type, use two methods
|
||||||
|
# to access them easier.
|
||||||
|
# @return [Integer] type infor.
|
||||||
|
def r_info_type
|
||||||
|
header.r_info & ((1 << mask_bit) - 1)
|
||||||
|
end
|
||||||
|
alias type r_info_type
|
||||||
|
|
||||||
|
private
|
||||||
|
|
||||||
|
def mask_bit
|
||||||
|
header.elf_class == 32 ? 8 : 32
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
56
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/elftools-1.2.0/lib/elftools/sections/section.rb
vendored
Normal file
56
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/elftools-1.2.0/lib/elftools/sections/section.rb
vendored
Normal file
@ -0,0 +1,56 @@
|
|||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
require 'elftools/constants'
|
||||||
|
module ELFTools
|
||||||
|
module Sections
|
||||||
|
# Base class of sections.
|
||||||
|
class Section
|
||||||
|
attr_reader :header # @return [ELFTools::Structs::ELF_Shdr] Section header.
|
||||||
|
attr_reader :stream # @return [#pos=, #read] Streaming object.
|
||||||
|
|
||||||
|
# Instantiate a {Section} object.
|
||||||
|
# @param [ELFTools::Structs::ELF_Shdr] header
|
||||||
|
# The section header object.
|
||||||
|
# @param [#pos=, #read] stream
|
||||||
|
# The streaming object for further dump.
|
||||||
|
# @param [ELFTools::Sections::StrTabSection, Proc] strtab
|
||||||
|
# The string table object. For fetching section names.
|
||||||
|
# If +Proc+ if given, it will call at the first
|
||||||
|
# time access +#name+.
|
||||||
|
# @param [Method] offset_from_vma
|
||||||
|
# The method to get offset of file, given virtual memory address.
|
||||||
|
def initialize(header, stream, offset_from_vma: nil, strtab: nil, **_kwargs)
|
||||||
|
@header = header
|
||||||
|
@stream = stream
|
||||||
|
@strtab = strtab
|
||||||
|
@offset_from_vma = offset_from_vma
|
||||||
|
end
|
||||||
|
|
||||||
|
# Return +header.sh_type+ in a simplier way.
|
||||||
|
# @return [Integer]
|
||||||
|
# The type, meaning of types are defined in {Constants::SHT}.
|
||||||
|
def type
|
||||||
|
header.sh_type.to_i
|
||||||
|
end
|
||||||
|
|
||||||
|
# Get name of this section.
|
||||||
|
# @return [String] The name.
|
||||||
|
def name
|
||||||
|
@name ||= @strtab.call.name_at(header.sh_name)
|
||||||
|
end
|
||||||
|
|
||||||
|
# Fetch data of this section.
|
||||||
|
# @return [String] Data.
|
||||||
|
def data
|
||||||
|
stream.pos = header.sh_offset
|
||||||
|
stream.read(header.sh_size)
|
||||||
|
end
|
||||||
|
|
||||||
|
# Is this a null section?
|
||||||
|
# @return [Boolean] No it's not.
|
||||||
|
def null?
|
||||||
|
false
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
38
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/elftools-1.2.0/lib/elftools/sections/sections.rb
vendored
Normal file
38
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/elftools-1.2.0/lib/elftools/sections/sections.rb
vendored
Normal file
@ -0,0 +1,38 @@
|
|||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
# Require this file to load all sections classes.
|
||||||
|
|
||||||
|
require 'elftools/sections/section'
|
||||||
|
|
||||||
|
require 'elftools/sections/dynamic_section'
|
||||||
|
require 'elftools/sections/note_section'
|
||||||
|
require 'elftools/sections/null_section'
|
||||||
|
require 'elftools/sections/relocation_section'
|
||||||
|
require 'elftools/sections/str_tab_section'
|
||||||
|
require 'elftools/sections/sym_tab_section'
|
||||||
|
|
||||||
|
module ELFTools
|
||||||
|
# Defines different types of sections in this module.
|
||||||
|
module Sections
|
||||||
|
# Class methods of {Sections::Section}.
|
||||||
|
class << Section
|
||||||
|
# Use different class according to +header.sh_type+.
|
||||||
|
# @param [ELFTools::Structs::ELF_Shdr] header Section header.
|
||||||
|
# @param [#pos=, #read] stream Streaming object.
|
||||||
|
# @return [ELFTools::Sections::Section]
|
||||||
|
# Return object dependes on +header.sh_type+.
|
||||||
|
def create(header, stream, *args, **kwargs)
|
||||||
|
klass = case header.sh_type
|
||||||
|
when Constants::SHT_DYNAMIC then DynamicSection
|
||||||
|
when Constants::SHT_NULL then NullSection
|
||||||
|
when Constants::SHT_NOTE then NoteSection
|
||||||
|
when Constants::SHT_RELA, Constants::SHT_REL then RelocationSection
|
||||||
|
when Constants::SHT_STRTAB then StrTabSection
|
||||||
|
when Constants::SHT_SYMTAB, Constants::SHT_DYNSYM then SymTabSection
|
||||||
|
else Section
|
||||||
|
end
|
||||||
|
klass.new(header, stream, *args, **kwargs)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
@ -0,0 +1,21 @@
|
|||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
require 'elftools/sections/section'
|
||||||
|
require 'elftools/util'
|
||||||
|
|
||||||
|
module ELFTools
|
||||||
|
module Sections
|
||||||
|
# Class of string table section.
|
||||||
|
# Usually for section .strtab and .dynstr,
|
||||||
|
# which record names.
|
||||||
|
class StrTabSection < Section
|
||||||
|
# Return the section or symbol name.
|
||||||
|
# @param [Integer] offset
|
||||||
|
# Usually from +shdr.sh_name+ or +sym.st_name+.
|
||||||
|
# @return [String] The name without null bytes.
|
||||||
|
def name_at(offset)
|
||||||
|
Util.cstring(stream, header.sh_offset + offset)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
@ -0,0 +1,127 @@
|
|||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
require 'elftools/sections/section'
|
||||||
|
|
||||||
|
module ELFTools
|
||||||
|
module Sections
|
||||||
|
# Class of symbol table section.
|
||||||
|
# Usually for section .symtab and .dynsym,
|
||||||
|
# which will refer to symbols in ELF file.
|
||||||
|
class SymTabSection < Section
|
||||||
|
# Instantiate a {SymTabSection} object.
|
||||||
|
# There's a +section_at+ lambda for {SymTabSection}
|
||||||
|
# to easily fetch other sections.
|
||||||
|
# @param [ELFTools::Structs::ELF_Shdr] header
|
||||||
|
# See {Section#initialize} for more information.
|
||||||
|
# @param [#pos=, #read] stream
|
||||||
|
# See {Section#initialize} for more information.
|
||||||
|
# @param [Proc] section_at
|
||||||
|
# The method for fetching other sections by index.
|
||||||
|
# This lambda should be {ELFTools::ELFFile#section_at}.
|
||||||
|
def initialize(header, stream, section_at: nil, **_kwargs)
|
||||||
|
@section_at = section_at
|
||||||
|
# For faster #symbol_by_name
|
||||||
|
super
|
||||||
|
end
|
||||||
|
|
||||||
|
# Number of symbols.
|
||||||
|
# @return [Integer] The number.
|
||||||
|
# @example
|
||||||
|
# symtab.num_symbols
|
||||||
|
# #=> 75
|
||||||
|
def num_symbols
|
||||||
|
header.sh_size / header.sh_entsize
|
||||||
|
end
|
||||||
|
|
||||||
|
# Acquire the +n+-th symbol, 0-based.
|
||||||
|
#
|
||||||
|
# Symbols are lazy loaded.
|
||||||
|
# @param [Integer] n The index.
|
||||||
|
# @return [ELFTools::Sections::Symbol, nil]
|
||||||
|
# The target symbol.
|
||||||
|
# If +n+ is out of bound, +nil+ is returned.
|
||||||
|
def symbol_at(n)
|
||||||
|
@symbols ||= LazyArray.new(num_symbols, &method(:create_symbol))
|
||||||
|
@symbols[n]
|
||||||
|
end
|
||||||
|
|
||||||
|
# Iterate all symbols.
|
||||||
|
#
|
||||||
|
# All symbols are lazy loading, the symbol
|
||||||
|
# only be created whenever accessing it.
|
||||||
|
# This method is useful for {#symbol_by_name}
|
||||||
|
# since not all symbols need to be created.
|
||||||
|
# @yieldparam [ELFTools::Sections::Symbol] sym A symbol object.
|
||||||
|
# @yieldreturn [void]
|
||||||
|
# @return [Enumerator<ELFTools::Sections::Symbol>, Array<ELFTools::Sections::Symbol>]
|
||||||
|
# If block is not given, an enumerator will be returned.
|
||||||
|
# Otherwise return array of symbols.
|
||||||
|
def each_symbols(&block)
|
||||||
|
return enum_for(:each_symbols) unless block_given?
|
||||||
|
|
||||||
|
Array.new(num_symbols) do |i|
|
||||||
|
symbol_at(i).tap(&block)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# Simply use {#symbols} to get all symbols.
|
||||||
|
# @return [Array<ELFTools::Sections::Symbol>]
|
||||||
|
# The whole symbols.
|
||||||
|
def symbols
|
||||||
|
each_symbols.to_a
|
||||||
|
end
|
||||||
|
|
||||||
|
# Get symbol by its name.
|
||||||
|
# @param [String] name
|
||||||
|
# The name of symbol.
|
||||||
|
# @return [ELFTools::Sections::Symbol] Desired symbol.
|
||||||
|
def symbol_by_name(name)
|
||||||
|
each_symbols.find { |symbol| symbol.name == name }
|
||||||
|
end
|
||||||
|
|
||||||
|
# Return the symbol string section.
|
||||||
|
# Lazy loaded.
|
||||||
|
# @return [ELFTools::Sections::StrTabSection] The string table section.
|
||||||
|
def symstr
|
||||||
|
@symstr ||= @section_at.call(header.sh_link)
|
||||||
|
end
|
||||||
|
|
||||||
|
private
|
||||||
|
|
||||||
|
def create_symbol(n)
|
||||||
|
stream.pos = header.sh_offset + n * header.sh_entsize
|
||||||
|
sym = Structs::ELF_sym[header.elf_class].new(endian: header.class.self_endian, offset: stream.pos)
|
||||||
|
sym.read(stream)
|
||||||
|
Symbol.new(sym, stream, symstr: method(:symstr))
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# Class of symbol.
|
||||||
|
#
|
||||||
|
# XXX: Should this class be defined in an independent file?
|
||||||
|
class Symbol
|
||||||
|
attr_reader :header # @return [ELFTools::Structs::ELF32_sym, ELFTools::Structs::ELF64_sym] Section header.
|
||||||
|
attr_reader :stream # @return [#pos=, #read] Streaming object.
|
||||||
|
|
||||||
|
# Instantiate a {ELFTools::Sections::Symbol} object.
|
||||||
|
# @param [ELFTools::Structs::ELF32_sym, ELFTools::Structs::ELF64_sym] header
|
||||||
|
# The symbol header.
|
||||||
|
# @param [#pos=, #read] stream The streaming object.
|
||||||
|
# @param [ELFTools::Sections::StrTabSection, Proc] symstr
|
||||||
|
# The symbol string section.
|
||||||
|
# If +Proc+ is given, it will be called at the first time
|
||||||
|
# access {Symbol#name}.
|
||||||
|
def initialize(header, stream, symstr: nil)
|
||||||
|
@header = header
|
||||||
|
@stream = stream
|
||||||
|
@symstr = symstr
|
||||||
|
end
|
||||||
|
|
||||||
|
# Return the symbol name.
|
||||||
|
# @return [String] The name.
|
||||||
|
def name
|
||||||
|
@name ||= @symstr.call.name_at(header.st_name)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
@ -0,0 +1,20 @@
|
|||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
require 'elftools/segments/segment'
|
||||||
|
require 'elftools/dynamic'
|
||||||
|
|
||||||
|
module ELFTools
|
||||||
|
module Segments
|
||||||
|
# Class for dynamic table segment.
|
||||||
|
#
|
||||||
|
# This class knows how to get the list of dynamic tags.
|
||||||
|
class DynamicSegment < Segment
|
||||||
|
include Dynamic # rock!
|
||||||
|
# Get the start address of tags.
|
||||||
|
# @return [Integer] Start address of tags.
|
||||||
|
def tag_start
|
||||||
|
header.p_offset
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
@ -0,0 +1,20 @@
|
|||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
require 'elftools/segments/segment'
|
||||||
|
|
||||||
|
module ELFTools
|
||||||
|
module Segments
|
||||||
|
# For DT_INTERP segment, knows how to get path of
|
||||||
|
# ELF interpreter.
|
||||||
|
class InterpSegment < Segment
|
||||||
|
# Get the path of interpreter.
|
||||||
|
# @return [String] Path to the interpreter.
|
||||||
|
# @example
|
||||||
|
# interp_segment.interp_name
|
||||||
|
# #=> '/lib64/ld-linux-x86-64.so.2'
|
||||||
|
def interp_name
|
||||||
|
data[0..-2] # remove last null byte
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
@ -0,0 +1,91 @@
|
|||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
require 'elftools/segments/segment'
|
||||||
|
|
||||||
|
module ELFTools
|
||||||
|
module Segments
|
||||||
|
# For DT_LOAD segment.
|
||||||
|
# Able to query between file offset and virtual memory address.
|
||||||
|
class LoadSegment < Segment
|
||||||
|
# Returns the start of this segment.
|
||||||
|
# @return [Integer]
|
||||||
|
# The file offset.
|
||||||
|
def file_head
|
||||||
|
header.p_offset.to_i
|
||||||
|
end
|
||||||
|
|
||||||
|
# Returns size in file.
|
||||||
|
# @return [Integer]
|
||||||
|
# The size.
|
||||||
|
def size
|
||||||
|
header.p_filesz.to_i
|
||||||
|
end
|
||||||
|
|
||||||
|
# Returns the end of this segment.
|
||||||
|
# @return [Integer]
|
||||||
|
# The file offset.
|
||||||
|
def file_tail
|
||||||
|
file_head + size
|
||||||
|
end
|
||||||
|
|
||||||
|
# Returns the start virtual address of this segment.
|
||||||
|
# @return [Integer]
|
||||||
|
# The vma.
|
||||||
|
def mem_head
|
||||||
|
header.p_vaddr.to_i
|
||||||
|
end
|
||||||
|
|
||||||
|
# Returns size in memory.
|
||||||
|
# @return [Integer]
|
||||||
|
# The size.
|
||||||
|
def mem_size
|
||||||
|
header.p_memsz.to_i
|
||||||
|
end
|
||||||
|
|
||||||
|
# Returns the end virtual address of this segment.
|
||||||
|
# @return [Integer]
|
||||||
|
# The vma.
|
||||||
|
def mem_tail
|
||||||
|
mem_head + mem_size
|
||||||
|
end
|
||||||
|
|
||||||
|
# Query if the given file offset located in this segment.
|
||||||
|
# @param [Integer] offset
|
||||||
|
# File offset.
|
||||||
|
# @param [Integer] size
|
||||||
|
# Size.
|
||||||
|
# @return [Boolean]
|
||||||
|
def offset_in?(offset, size = 0)
|
||||||
|
file_head <= offset && offset + size < file_tail
|
||||||
|
end
|
||||||
|
|
||||||
|
# Convert file offset into virtual memory address.
|
||||||
|
# @param [Integer] offset
|
||||||
|
# File offset.
|
||||||
|
# @return [Integer]
|
||||||
|
def offset_to_vma(offset)
|
||||||
|
# XXX: What if file_head is not aligned with p_vaddr (which is invalid according to ELF spec)?
|
||||||
|
offset - file_head + header.p_vaddr
|
||||||
|
end
|
||||||
|
|
||||||
|
# Query if the given virtual memory address located in this segment.
|
||||||
|
# @param [Integer] vma
|
||||||
|
# Virtual memory address.
|
||||||
|
# @param [Integer] size
|
||||||
|
# Size.
|
||||||
|
# @return [Boolean]
|
||||||
|
def vma_in?(vma, size = 0)
|
||||||
|
vma >= (header.p_vaddr & -header.p_align) &&
|
||||||
|
vma + size <= mem_tail
|
||||||
|
end
|
||||||
|
|
||||||
|
# Convert virtual memory address into file offset.
|
||||||
|
# @param [Integer] vma
|
||||||
|
# Virtual memory address.
|
||||||
|
# @return [Integer]
|
||||||
|
def vma_to_offset(vma)
|
||||||
|
vma - header.p_vaddr + header.p_offset
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
@ -0,0 +1,26 @@
|
|||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
require 'elftools/note'
|
||||||
|
require 'elftools/segments/segment'
|
||||||
|
|
||||||
|
module ELFTools
|
||||||
|
module Segments
|
||||||
|
# Class of note segment.
|
||||||
|
class NoteSegment < Segment
|
||||||
|
# Load note related methods.
|
||||||
|
include ELFTools::Note
|
||||||
|
|
||||||
|
# Address offset of notes start.
|
||||||
|
# @return [Integer] The offset.
|
||||||
|
def note_start
|
||||||
|
header.p_offset
|
||||||
|
end
|
||||||
|
|
||||||
|
# The total size of notes in this segment.
|
||||||
|
# @return [Integer] The size.
|
||||||
|
def note_total_size
|
||||||
|
header.p_filesz
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
56
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/elftools-1.2.0/lib/elftools/segments/segment.rb
vendored
Normal file
56
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/elftools-1.2.0/lib/elftools/segments/segment.rb
vendored
Normal file
@ -0,0 +1,56 @@
|
|||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
module ELFTools
|
||||||
|
module Segments
|
||||||
|
# Base class of segments.
|
||||||
|
class Segment
|
||||||
|
attr_reader :header # @return [ELFTools::Structs::ELF32_Phdr, ELFTools::Structs::ELF64_Phdr] Program header.
|
||||||
|
attr_reader :stream # @return [#pos=, #read] Streaming object.
|
||||||
|
|
||||||
|
# Instantiate a {Segment} object.
|
||||||
|
# @param [ELFTools::Structs::ELF32_Phdr, ELFTools::Structs::ELF64_Phdr] header
|
||||||
|
# Program header.
|
||||||
|
# @param [#pos=, #read] stream
|
||||||
|
# Streaming object.
|
||||||
|
# @param [Method] offset_from_vma
|
||||||
|
# The method to get offset of file, given virtual memory address.
|
||||||
|
def initialize(header, stream, offset_from_vma: nil)
|
||||||
|
@header = header
|
||||||
|
@stream = stream
|
||||||
|
@offset_from_vma = offset_from_vma
|
||||||
|
end
|
||||||
|
|
||||||
|
# Return +header.p_type+ in a simplier way.
|
||||||
|
# @return [Integer]
|
||||||
|
# The type, meaning of types are defined in {Constants::PT}.
|
||||||
|
def type
|
||||||
|
header.p_type
|
||||||
|
end
|
||||||
|
|
||||||
|
# The content in this segment.
|
||||||
|
# @return [String] The content.
|
||||||
|
def data
|
||||||
|
stream.pos = header.p_offset
|
||||||
|
stream.read(header.p_filesz)
|
||||||
|
end
|
||||||
|
|
||||||
|
# Is this segment readable?
|
||||||
|
# @return [Boolean] Ture or false.
|
||||||
|
def readable?
|
||||||
|
(header.p_flags & 4) == 4
|
||||||
|
end
|
||||||
|
|
||||||
|
# Is this segment writable?
|
||||||
|
# @return [Boolean] Ture or false.
|
||||||
|
def writable?
|
||||||
|
(header.p_flags & 2) == 2
|
||||||
|
end
|
||||||
|
|
||||||
|
# Is this segment executable?
|
||||||
|
# @return [Boolean] Ture or false.
|
||||||
|
def executable?
|
||||||
|
(header.p_flags & 1) == 1
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
34
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/elftools-1.2.0/lib/elftools/segments/segments.rb
vendored
Normal file
34
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/elftools-1.2.0/lib/elftools/segments/segments.rb
vendored
Normal file
@ -0,0 +1,34 @@
|
|||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
# Require this file to load all segment classes.
|
||||||
|
|
||||||
|
require 'elftools/segments/segment'
|
||||||
|
|
||||||
|
require 'elftools/segments/dynamic_segment'
|
||||||
|
require 'elftools/segments/interp_segment'
|
||||||
|
require 'elftools/segments/load_segment'
|
||||||
|
require 'elftools/segments/note_segment'
|
||||||
|
|
||||||
|
module ELFTools
|
||||||
|
# Module for defining different types of segments.
|
||||||
|
module Segments
|
||||||
|
# Class methods of {Segments::Segment}.
|
||||||
|
class << Segment
|
||||||
|
# Use different class according to +header.p_type+.
|
||||||
|
# @param [ELFTools::Structs::ELF32_Phdr, ELFTools::Structs::ELF64_Phdr] header Program header of a segment.
|
||||||
|
# @param [#pos=, #read] stream Streaming object.
|
||||||
|
# @return [ELFTools::Segments::Segment]
|
||||||
|
# Return object dependes on +header.p_type+.
|
||||||
|
def create(header, stream, *args, **kwargs)
|
||||||
|
klass = case header.p_type
|
||||||
|
when Constants::PT_DYNAMIC then DynamicSegment
|
||||||
|
when Constants::PT_INTERP then InterpSegment
|
||||||
|
when Constants::PT_LOAD then LoadSegment
|
||||||
|
when Constants::PT_NOTE then NoteSegment
|
||||||
|
else Segment
|
||||||
|
end
|
||||||
|
klass.new(header, stream, *args, **kwargs)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
219
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/elftools-1.2.0/lib/elftools/structs.rb
vendored
Normal file
219
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/elftools-1.2.0/lib/elftools/structs.rb
vendored
Normal file
@ -0,0 +1,219 @@
|
|||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
require 'bindata'
|
||||||
|
|
||||||
|
module ELFTools
|
||||||
|
# Define ELF related structures in this module.
|
||||||
|
#
|
||||||
|
# Structures are fetched from https://github.com/torvalds/linux/blob/master/include/uapi/linux/elf.h.
|
||||||
|
# Use gem +bindata+ to have these structures support 32/64 bits and little/big endian simultaneously.
|
||||||
|
module Structs
|
||||||
|
# The base structure to define common methods.
|
||||||
|
class ELFStruct < BinData::Record
|
||||||
|
# DRY. Many fields have different type in different arch.
|
||||||
|
CHOICE_SIZE_T = proc do |t = 'uint'|
|
||||||
|
{ selection: :elf_class, choices: { 32 => :"#{t}32", 64 => :"#{t}64" }, copy_on_change: true }
|
||||||
|
end
|
||||||
|
|
||||||
|
attr_accessor :elf_class # @return [Integer] 32 or 64.
|
||||||
|
attr_accessor :offset # @return [Integer] The file offset of this header.
|
||||||
|
|
||||||
|
# Records which fields have been patched.
|
||||||
|
# @return [Hash{Integer => Integer}] Patches.
|
||||||
|
def patches
|
||||||
|
@patches ||= {}
|
||||||
|
end
|
||||||
|
|
||||||
|
# BinData hash(Snapshot) that behaves like HashWithIndifferentAccess
|
||||||
|
alias to_h snapshot
|
||||||
|
|
||||||
|
class << self
|
||||||
|
# Hooks the constructor.
|
||||||
|
#
|
||||||
|
# +BinData::Record+ doesn't allow us to override +#initialize+, so we hack +new+ here.
|
||||||
|
def new(*args)
|
||||||
|
# XXX: The better implementation is +new(*args, **kwargs)+, but we can't do this unless bindata changed
|
||||||
|
# lib/bindata/dsl.rb#override_new_in_class to invoke +new+ with both +args+ and +kwargs+.
|
||||||
|
kwargs = args.last.is_a?(Hash) ? args.last : {}
|
||||||
|
offset = kwargs.delete(:offset)
|
||||||
|
super.tap do |obj|
|
||||||
|
obj.offset = offset
|
||||||
|
obj.field_names.each do |f|
|
||||||
|
m = "#{f}=".to_sym
|
||||||
|
old_method = obj.singleton_method(m)
|
||||||
|
obj.singleton_class.send(:undef_method, m)
|
||||||
|
obj.define_singleton_method(m) do |val|
|
||||||
|
org = obj.send(f)
|
||||||
|
obj.patches[org.abs_offset] = ELFStruct.pack(val, org.num_bytes)
|
||||||
|
old_method.call(val)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# Gets the endianness of current class.
|
||||||
|
# @return [:little, :big] The endianness.
|
||||||
|
def self_endian
|
||||||
|
bindata_name[-2..] == 'be' ? :big : :little
|
||||||
|
end
|
||||||
|
|
||||||
|
# Packs an integer to string.
|
||||||
|
# @param [Integer] val
|
||||||
|
# @param [Integer] bytes
|
||||||
|
# @return [String]
|
||||||
|
def pack(val, bytes)
|
||||||
|
raise ArgumentError, "Not supported assign type #{val.class}" unless val.is_a?(Integer)
|
||||||
|
|
||||||
|
number = val & ((1 << (8 * bytes)) - 1)
|
||||||
|
out = []
|
||||||
|
bytes.times do
|
||||||
|
out << (number & 0xff)
|
||||||
|
number >>= 8
|
||||||
|
end
|
||||||
|
out = out.pack('C*')
|
||||||
|
self_endian == :little ? out : out.reverse
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# ELF header structure.
|
||||||
|
class ELF_Ehdr < ELFStruct
|
||||||
|
endian :big_and_little
|
||||||
|
struct :e_ident do
|
||||||
|
string :magic, read_length: 4
|
||||||
|
int8 :ei_class
|
||||||
|
int8 :ei_data
|
||||||
|
int8 :ei_version
|
||||||
|
int8 :ei_osabi
|
||||||
|
int8 :ei_abiversion
|
||||||
|
string :ei_padding, read_length: 7 # no use
|
||||||
|
end
|
||||||
|
uint16 :e_type
|
||||||
|
uint16 :e_machine
|
||||||
|
uint32 :e_version
|
||||||
|
# entry point
|
||||||
|
choice :e_entry, **CHOICE_SIZE_T['uint']
|
||||||
|
choice :e_phoff, **CHOICE_SIZE_T['uint']
|
||||||
|
choice :e_shoff, **CHOICE_SIZE_T['uint']
|
||||||
|
uint32 :e_flags
|
||||||
|
uint16 :e_ehsize # size of this header
|
||||||
|
uint16 :e_phentsize # size of each segment
|
||||||
|
uint16 :e_phnum # number of segments
|
||||||
|
uint16 :e_shentsize # size of each section
|
||||||
|
uint16 :e_shnum # number of sections
|
||||||
|
uint16 :e_shstrndx # index of string table section
|
||||||
|
end
|
||||||
|
|
||||||
|
# Section header structure.
|
||||||
|
class ELF_Shdr < ELFStruct
|
||||||
|
endian :big_and_little
|
||||||
|
uint32 :sh_name
|
||||||
|
uint32 :sh_type
|
||||||
|
choice :sh_flags, **CHOICE_SIZE_T['uint']
|
||||||
|
choice :sh_addr, **CHOICE_SIZE_T['uint']
|
||||||
|
choice :sh_offset, **CHOICE_SIZE_T['uint']
|
||||||
|
choice :sh_size, **CHOICE_SIZE_T['uint']
|
||||||
|
uint32 :sh_link
|
||||||
|
uint32 :sh_info
|
||||||
|
choice :sh_addralign, **CHOICE_SIZE_T['uint']
|
||||||
|
choice :sh_entsize, **CHOICE_SIZE_T['uint']
|
||||||
|
end
|
||||||
|
|
||||||
|
# Program header structure for 32-bit.
|
||||||
|
class ELF32_Phdr < ELFStruct
|
||||||
|
endian :big_and_little
|
||||||
|
uint32 :p_type
|
||||||
|
uint32 :p_offset
|
||||||
|
uint32 :p_vaddr
|
||||||
|
uint32 :p_paddr
|
||||||
|
uint32 :p_filesz
|
||||||
|
uint32 :p_memsz
|
||||||
|
uint32 :p_flags
|
||||||
|
uint32 :p_align
|
||||||
|
end
|
||||||
|
|
||||||
|
# Program header structure for 64-bit.
|
||||||
|
class ELF64_Phdr < ELFStruct
|
||||||
|
endian :big_and_little
|
||||||
|
uint32 :p_type
|
||||||
|
uint32 :p_flags
|
||||||
|
uint64 :p_offset
|
||||||
|
uint64 :p_vaddr
|
||||||
|
uint64 :p_paddr
|
||||||
|
uint64 :p_filesz
|
||||||
|
uint64 :p_memsz
|
||||||
|
uint64 :p_align
|
||||||
|
end
|
||||||
|
|
||||||
|
# Gets the class of program header according to bits.
|
||||||
|
ELF_Phdr = {
|
||||||
|
32 => ELF32_Phdr,
|
||||||
|
64 => ELF64_Phdr
|
||||||
|
}.freeze
|
||||||
|
|
||||||
|
# Symbol structure for 32-bit.
|
||||||
|
class ELF32_sym < ELFStruct
|
||||||
|
endian :big_and_little
|
||||||
|
uint32 :st_name
|
||||||
|
uint32 :st_value
|
||||||
|
uint32 :st_size
|
||||||
|
uint8 :st_info
|
||||||
|
uint8 :st_other
|
||||||
|
uint16 :st_shndx
|
||||||
|
end
|
||||||
|
|
||||||
|
# Symbol structure for 64-bit.
|
||||||
|
class ELF64_sym < ELFStruct
|
||||||
|
endian :big_and_little
|
||||||
|
uint32 :st_name # Symbol name, index in string tbl
|
||||||
|
uint8 :st_info # Type and binding attributes
|
||||||
|
uint8 :st_other # No defined meaning, 0
|
||||||
|
uint16 :st_shndx # Associated section index
|
||||||
|
uint64 :st_value # Value of the symbol
|
||||||
|
uint64 :st_size # Associated symbol size
|
||||||
|
end
|
||||||
|
|
||||||
|
# Get symbol header class according to bits.
|
||||||
|
ELF_sym = {
|
||||||
|
32 => ELF32_sym,
|
||||||
|
64 => ELF64_sym
|
||||||
|
}.freeze
|
||||||
|
|
||||||
|
# Note header.
|
||||||
|
class ELF_Nhdr < ELFStruct
|
||||||
|
endian :big_and_little
|
||||||
|
uint32 :n_namesz # Name size
|
||||||
|
uint32 :n_descsz # Content size
|
||||||
|
uint32 :n_type # Content type
|
||||||
|
end
|
||||||
|
|
||||||
|
# Dynamic tag header.
|
||||||
|
class ELF_Dyn < ELFStruct
|
||||||
|
endian :big_and_little
|
||||||
|
choice :d_tag, **CHOICE_SIZE_T['int']
|
||||||
|
# This is an union type named +d_un+ in original source,
|
||||||
|
# simplify it to be +d_val+ here.
|
||||||
|
choice :d_val, **CHOICE_SIZE_T['uint']
|
||||||
|
end
|
||||||
|
|
||||||
|
# Rel header in .rel section.
|
||||||
|
class ELF_Rel < ELFStruct
|
||||||
|
endian :big_and_little
|
||||||
|
choice :r_offset, **CHOICE_SIZE_T['uint']
|
||||||
|
choice :r_info, **CHOICE_SIZE_T['uint']
|
||||||
|
|
||||||
|
# Compatibility with ELF_Rela, both can be used interchangeably
|
||||||
|
def r_addend
|
||||||
|
nil
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# Rela header in .rela section.
|
||||||
|
class ELF_Rela < ELFStruct
|
||||||
|
endian :big_and_little
|
||||||
|
choice :r_offset, **CHOICE_SIZE_T['uint']
|
||||||
|
choice :r_info, **CHOICE_SIZE_T['uint']
|
||||||
|
choice :r_addend, **CHOICE_SIZE_T['int']
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
99
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/elftools-1.2.0/lib/elftools/util.rb
vendored
Normal file
99
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/elftools-1.2.0/lib/elftools/util.rb
vendored
Normal file
@ -0,0 +1,99 @@
|
|||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
module ELFTools
|
||||||
|
# Define some util methods.
|
||||||
|
module Util
|
||||||
|
# Class methods.
|
||||||
|
module ClassMethods
|
||||||
|
# Round up the number to be mulitple of
|
||||||
|
# +2**bit+.
|
||||||
|
# @param [Integer] num Number to be rounded-up.
|
||||||
|
# @param [Integer] bit How many bit to be aligned.
|
||||||
|
# @return [Integer] See examples.
|
||||||
|
# @example
|
||||||
|
# align(10, 1) #=> 10
|
||||||
|
# align(10, 2) #=> 12
|
||||||
|
# align(10, 3) #=> 16
|
||||||
|
# align(10, 4) #=> 16
|
||||||
|
# align(10, 5) #=> 32
|
||||||
|
def align(num, bit)
|
||||||
|
n = 2**bit
|
||||||
|
return num if (num % n).zero?
|
||||||
|
|
||||||
|
(num + n) & ~(n - 1)
|
||||||
|
end
|
||||||
|
|
||||||
|
# Fetch the correct value from module +mod+.
|
||||||
|
#
|
||||||
|
# See {ELFTools::ELFFile#segment_by_type} for how to
|
||||||
|
# use this method.
|
||||||
|
# @param [Module] mod The module defined constant numbers.
|
||||||
|
# @param [Integer, Symbol, String] val
|
||||||
|
# Desired value.
|
||||||
|
# @return [Integer]
|
||||||
|
# Currently this method always return a value
|
||||||
|
# from {ELFTools::Constants}.
|
||||||
|
def to_constant(mod, val)
|
||||||
|
# Ignore the outest name.
|
||||||
|
module_name = mod.name.sub('ELFTools::', '')
|
||||||
|
# if val is an integer, check if exists in mod
|
||||||
|
if val.is_a?(Integer)
|
||||||
|
return val if mod.constants.any? { |c| mod.const_get(c) == val }
|
||||||
|
|
||||||
|
raise ArgumentError, "No constants in #{module_name} is #{val}"
|
||||||
|
end
|
||||||
|
val = val.to_s.upcase
|
||||||
|
prefix = module_name.split('::')[-1]
|
||||||
|
val = "#{prefix}_#{val}" unless val.start_with?(prefix)
|
||||||
|
val = val.to_sym
|
||||||
|
raise ArgumentError, "No constants in #{module_name} named \"#{val}\"" unless mod.const_defined?(val)
|
||||||
|
|
||||||
|
mod.const_get(val)
|
||||||
|
end
|
||||||
|
|
||||||
|
# Read from stream until reach a null-byte.
|
||||||
|
# @param [#pos=, #read] stream Streaming object
|
||||||
|
# @param [Integer] offset Start from here.
|
||||||
|
# @return [String] Result string will never contain null byte.
|
||||||
|
# @example
|
||||||
|
# Util.cstring(File.open('/bin/cat'), 0)
|
||||||
|
# #=> "\x7FELF\x02\x01\x01"
|
||||||
|
def cstring(stream, offset)
|
||||||
|
stream.pos = offset
|
||||||
|
# read until "\x00"
|
||||||
|
ret = ''
|
||||||
|
loop do
|
||||||
|
c = stream.read(1)
|
||||||
|
return nil if c.nil? # reach EOF
|
||||||
|
break if c == "\x00"
|
||||||
|
|
||||||
|
ret += c
|
||||||
|
end
|
||||||
|
ret
|
||||||
|
end
|
||||||
|
|
||||||
|
# Select objects from enumerator with +.type+ property
|
||||||
|
# equals to +type+.
|
||||||
|
#
|
||||||
|
# Different from naive +Array#select+ is this method
|
||||||
|
# will yield block whenever find a desired object.
|
||||||
|
#
|
||||||
|
# This method is used to simplify the same logic in methods
|
||||||
|
# {ELFFile#sections_by_type}, {ELFFile#segments_by_type}, etc.
|
||||||
|
# @param [Enumerator] enum An enumerator for further select.
|
||||||
|
# @param [Object] type The type you want.
|
||||||
|
# @return [Array<Object>]
|
||||||
|
# The return value will be objects in +enum+ with attribute
|
||||||
|
# +.type+ equals to +type+.
|
||||||
|
def select_by_type(enum, type)
|
||||||
|
enum.select do |obj|
|
||||||
|
if obj.type == type
|
||||||
|
yield obj if block_given?
|
||||||
|
true
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
extend ClassMethods
|
||||||
|
end
|
||||||
|
end
|
||||||
6
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/elftools-1.2.0/lib/elftools/version.rb
vendored
Normal file
6
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/elftools-1.2.0/lib/elftools/version.rb
vendored
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
module ELFTools
|
||||||
|
# Current gem version
|
||||||
|
VERSION = '1.2.0'
|
||||||
|
end
|
||||||
10
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/patchelf-1.4.0/lib/patchelf.rb
vendored
Normal file
10
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/patchelf-1.4.0/lib/patchelf.rb
vendored
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
# Main module of patchelf.
|
||||||
|
#
|
||||||
|
# @author david942j
|
||||||
|
module PatchELF
|
||||||
|
end
|
||||||
|
|
||||||
|
require 'patchelf/patcher'
|
||||||
|
require 'patchelf/version'
|
||||||
1052
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/patchelf-1.4.0/lib/patchelf/alt_saver.rb
vendored
Normal file
1052
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/patchelf-1.4.0/lib/patchelf/alt_saver.rb
vendored
Normal file
File diff suppressed because it is too large
Load Diff
148
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/patchelf-1.4.0/lib/patchelf/cli.rb
vendored
Normal file
148
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/patchelf-1.4.0/lib/patchelf/cli.rb
vendored
Normal file
@ -0,0 +1,148 @@
|
|||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
require 'optparse'
|
||||||
|
|
||||||
|
require 'patchelf/patcher'
|
||||||
|
require 'patchelf/version'
|
||||||
|
|
||||||
|
module PatchELF
|
||||||
|
# For command line interface to parsing arguments.
|
||||||
|
module CLI
|
||||||
|
# Name of binary.
|
||||||
|
SCRIPT_NAME = 'patchelf.rb'.freeze
|
||||||
|
# CLI usage string.
|
||||||
|
USAGE = format('Usage: %s <commands> FILENAME [OUTPUT_FILE]', SCRIPT_NAME).freeze
|
||||||
|
|
||||||
|
module_function
|
||||||
|
|
||||||
|
# Main method of CLI.
|
||||||
|
# @param [Array<String>] argv
|
||||||
|
# Command line arguments.
|
||||||
|
# @return [void]
|
||||||
|
# @example
|
||||||
|
# PatchELF::CLI.work(%w[--help])
|
||||||
|
# # usage message to stdout
|
||||||
|
# PatchELF::CLI.work(%w[--version])
|
||||||
|
# # version message to stdout
|
||||||
|
def work(argv)
|
||||||
|
@options = {
|
||||||
|
set: {},
|
||||||
|
print: [],
|
||||||
|
needed: []
|
||||||
|
}
|
||||||
|
return $stdout.puts "PatchELF Version #{PatchELF::VERSION}" if argv.include?('--version')
|
||||||
|
return $stdout.puts option_parser unless parse(argv)
|
||||||
|
|
||||||
|
# Now the options are (hopefully) valid, let's process the ELF file.
|
||||||
|
begin
|
||||||
|
@patcher = PatchELF::Patcher.new(@options[:in_file])
|
||||||
|
rescue ELFTools::ELFError, Errno::ENOENT => e
|
||||||
|
return PatchELF::Logger.error(e.message)
|
||||||
|
end
|
||||||
|
patcher.use_rpath! if @options[:force_rpath]
|
||||||
|
readonly
|
||||||
|
patch_requests
|
||||||
|
patcher.save(@options[:out_file])
|
||||||
|
end
|
||||||
|
|
||||||
|
private
|
||||||
|
|
||||||
|
def patcher
|
||||||
|
@patcher
|
||||||
|
end
|
||||||
|
|
||||||
|
def readonly
|
||||||
|
@options[:print].uniq.each do |s|
|
||||||
|
content = patcher.__send__(s)
|
||||||
|
next if content.nil?
|
||||||
|
|
||||||
|
s = :rpath if @options[:force_rpath] && s == :runpath
|
||||||
|
$stdout.puts "#{s}: #{Array(content).join(' ')}"
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def patch_requests
|
||||||
|
@options[:set].each do |sym, val|
|
||||||
|
patcher.__send__("#{sym}=".to_sym, val)
|
||||||
|
end
|
||||||
|
|
||||||
|
@options[:needed].each do |type, val|
|
||||||
|
patcher.__send__("#{type}_needed".to_sym, *val)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def parse(argv)
|
||||||
|
remain = option_parser.permute(argv)
|
||||||
|
return false if remain.first.nil?
|
||||||
|
|
||||||
|
@options[:in_file] = remain.first
|
||||||
|
@options[:out_file] = remain[1] # can be nil
|
||||||
|
true
|
||||||
|
end
|
||||||
|
|
||||||
|
def option_parser
|
||||||
|
@option_parser ||= OptionParser.new do |opts|
|
||||||
|
opts.banner = USAGE
|
||||||
|
|
||||||
|
opts.on('--print-interpreter', '--pi', 'Show interpreter\'s name.') do
|
||||||
|
@options[:print] << :interpreter
|
||||||
|
end
|
||||||
|
|
||||||
|
opts.on('--print-needed', '--pn', 'Show needed libraries specified in DT_NEEDED.') do
|
||||||
|
@options[:print] << :needed
|
||||||
|
end
|
||||||
|
|
||||||
|
opts.on('--print-runpath', '--pr', 'Show the path specified in DT_RUNPATH.') do
|
||||||
|
@options[:print] << :runpath
|
||||||
|
end
|
||||||
|
|
||||||
|
opts.on('--print-soname', '--ps', 'Show soname specified in DT_SONAME.') do
|
||||||
|
@options[:print] << :soname
|
||||||
|
end
|
||||||
|
|
||||||
|
opts.on('--set-interpreter INTERP', '--interp INTERP', 'Set interpreter\'s name.') do |interp|
|
||||||
|
@options[:set][:interpreter] = interp
|
||||||
|
end
|
||||||
|
|
||||||
|
opts.on('--set-needed LIB1,LIB2,LIB3', '--needed LIB1,LIB2,LIB3', Array,
|
||||||
|
'Set needed libraries, this will remove all existent needed libraries.') do |needs|
|
||||||
|
@options[:set][:needed] = needs
|
||||||
|
end
|
||||||
|
|
||||||
|
opts.on('--add-needed LIB', 'Append a new needed library.') do |lib|
|
||||||
|
@options[:needed] << [:add, lib]
|
||||||
|
end
|
||||||
|
|
||||||
|
opts.on('--remove-needed LIB', 'Remove a needed library.') do |lib|
|
||||||
|
@options[:needed] << [:remove, lib]
|
||||||
|
end
|
||||||
|
|
||||||
|
opts.on('--replace-needed LIB1,LIB2', Array, 'Replace needed library LIB1 as LIB2.') do |libs|
|
||||||
|
@options[:needed] << [:replace, libs]
|
||||||
|
end
|
||||||
|
|
||||||
|
opts.on('--set-runpath PATH', '--runpath PATH', 'Set the path of runpath.') do |path|
|
||||||
|
@options[:set][:runpath] = path
|
||||||
|
end
|
||||||
|
|
||||||
|
opts.on(
|
||||||
|
'--force-rpath',
|
||||||
|
'According to the ld.so docs, DT_RPATH is obsolete,',
|
||||||
|
"#{SCRIPT_NAME} will always try to get/set DT_RUNPATH first.",
|
||||||
|
'Use this option to force every operations related to runpath (e.g. --runpath)',
|
||||||
|
'to consider \'DT_RPATH\' instead of \'DT_RUNPATH\'.'
|
||||||
|
) do
|
||||||
|
@options[:force_rpath] = true
|
||||||
|
end
|
||||||
|
|
||||||
|
opts.on('--set-soname SONAME', '--so SONAME', 'Set name of a shared library.') do |soname|
|
||||||
|
@options[:set][:soname] = soname
|
||||||
|
end
|
||||||
|
|
||||||
|
opts.on('--version', 'Show current gem\'s version.')
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
extend self
|
||||||
|
end
|
||||||
|
end
|
||||||
15
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/patchelf-1.4.0/lib/patchelf/exceptions.rb
vendored
Normal file
15
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/patchelf-1.4.0/lib/patchelf/exceptions.rb
vendored
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
# encoding: ascii-8bit
|
||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
require 'elftools/exceptions'
|
||||||
|
|
||||||
|
module PatchELF
|
||||||
|
# Raised on an error during ELF modification.
|
||||||
|
class PatchError < ELFTools::ELFError; end
|
||||||
|
|
||||||
|
# Raised when Dynamic Tag is missing
|
||||||
|
class MissingTagError < PatchError; end
|
||||||
|
|
||||||
|
# Raised on missing Program Header(segment)
|
||||||
|
class MissingSegmentError < PatchError; end
|
||||||
|
end
|
||||||
84
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/patchelf-1.4.0/lib/patchelf/helper.rb
vendored
Normal file
84
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/patchelf-1.4.0/lib/patchelf/helper.rb
vendored
Normal file
@ -0,0 +1,84 @@
|
|||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
module PatchELF
|
||||||
|
# Helper methods for internal usage.
|
||||||
|
module Helper
|
||||||
|
module_function
|
||||||
|
|
||||||
|
# Color codes for pretty print.
|
||||||
|
COLOR_CODE = {
|
||||||
|
esc_m: "\e[0m",
|
||||||
|
info: "\e[38;5;82m", # light green
|
||||||
|
warn: "\e[38;5;230m", # light yellow
|
||||||
|
error: "\e[38;5;196m" # heavy red
|
||||||
|
}.freeze
|
||||||
|
|
||||||
|
# The size of one page.
|
||||||
|
def page_size(e_machine = nil)
|
||||||
|
# Different architectures have different minimum section alignments.
|
||||||
|
case e_machine
|
||||||
|
when ELFTools::Constants::EM_SPARC,
|
||||||
|
ELFTools::Constants::EM_MIPS,
|
||||||
|
ELFTools::Constants::EM_PPC,
|
||||||
|
ELFTools::Constants::EM_PPC64,
|
||||||
|
ELFTools::Constants::EM_AARCH64,
|
||||||
|
ELFTools::Constants::EM_TILEGX,
|
||||||
|
ELFTools::Constants::EM_LOONGARCH
|
||||||
|
0x10000
|
||||||
|
else
|
||||||
|
0x1000
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# For wrapping string with color codes for prettier inspect.
|
||||||
|
# @param [String] str
|
||||||
|
# Content to colorize.
|
||||||
|
# @param [Symbol] type
|
||||||
|
# Specify which kind of color to use, valid symbols are defined in {.COLOR_CODE}.
|
||||||
|
# @return [String]
|
||||||
|
# String that wrapped with color codes.
|
||||||
|
def colorize(str, type)
|
||||||
|
return str unless color_enabled?
|
||||||
|
|
||||||
|
cc = COLOR_CODE
|
||||||
|
color = cc.key?(type) ? cc[type] : ''
|
||||||
|
"#{color}#{str.sub(COLOR_CODE[:esc_m], color)}#{cc[:esc_m]}"
|
||||||
|
end
|
||||||
|
|
||||||
|
# For {#colorize} to decide if need add color codes.
|
||||||
|
# @return [Boolean]
|
||||||
|
def color_enabled?
|
||||||
|
$stderr.tty?
|
||||||
|
end
|
||||||
|
|
||||||
|
# @param [Integer] val
|
||||||
|
# @param [Integer] align
|
||||||
|
# @return [Integer]
|
||||||
|
# Aligned result.
|
||||||
|
# @example
|
||||||
|
# aligndown(0x1234)
|
||||||
|
# #=> 4096
|
||||||
|
# aligndown(0x33, 0x20)
|
||||||
|
# #=> 32
|
||||||
|
# aligndown(0x10, 0x8)
|
||||||
|
# #=> 16
|
||||||
|
def aligndown(val, align = page_size)
|
||||||
|
val - (val & (align - 1))
|
||||||
|
end
|
||||||
|
|
||||||
|
# @param [Integer] val
|
||||||
|
# @param [Integer] align
|
||||||
|
# @return [Integer]
|
||||||
|
# Aligned result.
|
||||||
|
# @example
|
||||||
|
# alignup(0x1234)
|
||||||
|
# #=> 8192
|
||||||
|
# alignup(0x33, 0x20)
|
||||||
|
# #=> 64
|
||||||
|
# alignup(0x10, 0x8)
|
||||||
|
# #=> 16
|
||||||
|
def alignup(val, align = page_size)
|
||||||
|
(val & (align - 1)).zero? ? val : (aligndown(val, align) + align)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
25
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/patchelf-1.4.0/lib/patchelf/logger.rb
vendored
Normal file
25
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/patchelf-1.4.0/lib/patchelf/logger.rb
vendored
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
require 'logger'
|
||||||
|
|
||||||
|
require 'patchelf/helper'
|
||||||
|
|
||||||
|
module PatchELF
|
||||||
|
# A logger for internal usage.
|
||||||
|
module Logger
|
||||||
|
module_function
|
||||||
|
|
||||||
|
@logger = ::Logger.new($stderr).tap do |log|
|
||||||
|
log.formatter = proc do |severity, _datetime, _progname, msg|
|
||||||
|
"[#{PatchELF::Helper.colorize(severity, severity.downcase.to_sym)}] #{msg}\n"
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
%i[debug info warn error level=].each do |sym|
|
||||||
|
define_method(sym) do |msg|
|
||||||
|
@logger.__send__(sym, msg)
|
||||||
|
nil
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
186
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/patchelf-1.4.0/lib/patchelf/mm.rb
vendored
Normal file
186
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/patchelf-1.4.0/lib/patchelf/mm.rb
vendored
Normal file
@ -0,0 +1,186 @@
|
|||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
require 'patchelf/helper'
|
||||||
|
|
||||||
|
module PatchELF
|
||||||
|
# Memory management, provides malloc/free to allocate LOAD segments.
|
||||||
|
# @private
|
||||||
|
class MM
|
||||||
|
attr_reader :extend_size # @return [Integer] The size extended.
|
||||||
|
attr_reader :threshold # @return [Integer] Where the file start to be extended.
|
||||||
|
|
||||||
|
# Instantiate a {MM} object.
|
||||||
|
# @param [ELFTools::ELFFile] elf
|
||||||
|
def initialize(elf)
|
||||||
|
@elf = elf
|
||||||
|
@request = []
|
||||||
|
end
|
||||||
|
|
||||||
|
# @param [Integer] size
|
||||||
|
# @return [void]
|
||||||
|
# @yieldparam [Integer] off
|
||||||
|
# @yieldparam [Integer] vaddr
|
||||||
|
# @yieldreturn [void]
|
||||||
|
# One can only do the following things in the block:
|
||||||
|
# 1. Set ELF headers' attributes (with ELFTools)
|
||||||
|
# 2. Invoke {Saver#inline_patch}
|
||||||
|
def malloc(size, &block)
|
||||||
|
raise ArgumentError, 'malloc\'s size most be positive.' if size <= 0
|
||||||
|
|
||||||
|
@request << [size, block]
|
||||||
|
end
|
||||||
|
|
||||||
|
# Let the malloc / free requests be effective.
|
||||||
|
# @return [void]
|
||||||
|
def dispatch!
|
||||||
|
return if @request.empty?
|
||||||
|
|
||||||
|
@request_size = @request.map(&:first).inject(0, :+)
|
||||||
|
# The malloc-ed area must be 'rw-' since the dynamic table will be modified during runtime.
|
||||||
|
# Find all LOADs and calculate their f-gaps and m-gaps.
|
||||||
|
# We prefer f-gap since it doesn't need move the whole binaries.
|
||||||
|
# 1. Find if any f-gap has enough size, and one of the LOAD next to it is 'rw-'.
|
||||||
|
# - expand (forwardlly), only need to change the attribute of LOAD.
|
||||||
|
# 2. Do 1. again but consider m-gaps instead.
|
||||||
|
# - expand (forwardlly), need to modify all section headers.
|
||||||
|
# 3. We have to create a new LOAD, now we need to expand the first LOAD for putting new segment header.
|
||||||
|
|
||||||
|
# First of all we check if there're less than two LOADs.
|
||||||
|
abnormal_elf('No LOAD segment found, not an executable.') if load_segments.empty?
|
||||||
|
# TODO: Handle only one LOAD. (be careful if memsz > filesz)
|
||||||
|
|
||||||
|
fgap_method || mgap_method || new_load_method
|
||||||
|
end
|
||||||
|
|
||||||
|
# Query if extended.
|
||||||
|
# @return [Boolean]
|
||||||
|
def extended?
|
||||||
|
defined?(@threshold)
|
||||||
|
end
|
||||||
|
|
||||||
|
# Get correct offset after the extension.
|
||||||
|
#
|
||||||
|
# @param [Integer] off
|
||||||
|
# @return [Integer]
|
||||||
|
# Shifted offset.
|
||||||
|
def extended_offset(off)
|
||||||
|
return off unless defined?(@threshold)
|
||||||
|
return off if off < @threshold
|
||||||
|
|
||||||
|
off + @extend_size
|
||||||
|
end
|
||||||
|
|
||||||
|
private
|
||||||
|
|
||||||
|
def fgap_method
|
||||||
|
idx = find_gap { |prv, nxt| nxt.file_head - prv.file_tail }
|
||||||
|
return false if idx.nil?
|
||||||
|
|
||||||
|
loads = load_segments
|
||||||
|
# prefer extend backwardly
|
||||||
|
return extend_backward(loads[idx - 1]) if writable?(loads[idx - 1])
|
||||||
|
|
||||||
|
extend_forward(loads[idx])
|
||||||
|
end
|
||||||
|
|
||||||
|
def extend_backward(seg, size = @request_size)
|
||||||
|
invoke_callbacks(seg, seg.file_tail)
|
||||||
|
seg.header.p_filesz += size
|
||||||
|
seg.header.p_memsz += size
|
||||||
|
true
|
||||||
|
end
|
||||||
|
|
||||||
|
def extend_forward(seg, size = @request_size)
|
||||||
|
seg.header.p_offset -= size
|
||||||
|
seg.header.p_vaddr -= size
|
||||||
|
seg.header.p_filesz += size
|
||||||
|
seg.header.p_memsz += size
|
||||||
|
invoke_callbacks(seg, seg.file_head)
|
||||||
|
true
|
||||||
|
end
|
||||||
|
|
||||||
|
def mgap_method
|
||||||
|
# | 1 | | 2 |
|
||||||
|
# | 1 | | 2 |
|
||||||
|
#=>
|
||||||
|
# | 1 | | 2 |
|
||||||
|
# | 1 | | 2 |
|
||||||
|
idx = find_gap(check_sz: false) { |prv, nxt| PatchELF::Helper.aligndown(nxt.mem_head) - prv.mem_tail }
|
||||||
|
return false if idx.nil?
|
||||||
|
|
||||||
|
loads = load_segments
|
||||||
|
@threshold = loads[idx].file_head
|
||||||
|
@extend_size = PatchELF::Helper.alignup(@request_size)
|
||||||
|
shift_attributes
|
||||||
|
# prefer backward than forward
|
||||||
|
return extend_backward(loads[idx - 1]) if writable?(loads[idx - 1])
|
||||||
|
|
||||||
|
# NOTE: loads[idx].file_head has been changed in shift_attributes
|
||||||
|
extend_forward(loads[idx], @extend_size)
|
||||||
|
end
|
||||||
|
|
||||||
|
def find_gap(check_sz: true)
|
||||||
|
loads = load_segments
|
||||||
|
loads.each_with_index do |l, i|
|
||||||
|
next if i.zero?
|
||||||
|
next unless writable?(l) || writable?(loads[i - 1])
|
||||||
|
|
||||||
|
sz = yield(loads[i - 1], l)
|
||||||
|
abnormal_elf('LOAD segments are out of order.') if check_sz && sz.negative?
|
||||||
|
next unless sz >= @request_size
|
||||||
|
|
||||||
|
return i
|
||||||
|
end
|
||||||
|
nil
|
||||||
|
end
|
||||||
|
|
||||||
|
# TODO
|
||||||
|
def new_load_method
|
||||||
|
raise NotImplementedError
|
||||||
|
end
|
||||||
|
|
||||||
|
def writable?(seg)
|
||||||
|
seg.readable? && seg.writable?
|
||||||
|
end
|
||||||
|
|
||||||
|
# For all attributes >= threshold, += offset
|
||||||
|
def shift_attributes
|
||||||
|
# ELFHeader->section_header
|
||||||
|
# Sections:
|
||||||
|
# all
|
||||||
|
# Segments:
|
||||||
|
# all
|
||||||
|
# XXX: will be buggy if someday the number of segments can be changed.
|
||||||
|
|
||||||
|
# Bottom-up
|
||||||
|
@elf.each_sections do |sec|
|
||||||
|
sec.header.sh_offset += extend_size if sec.header.sh_offset >= threshold
|
||||||
|
end
|
||||||
|
@elf.each_segments do |seg|
|
||||||
|
next unless seg.header.p_offset >= threshold
|
||||||
|
|
||||||
|
seg.header.p_offset += extend_size
|
||||||
|
# We have to change align of LOAD segment since ld.so checks it.
|
||||||
|
seg.header.p_align = Helper.page_size if seg.is_a?(ELFTools::Segments::LoadSegment)
|
||||||
|
end
|
||||||
|
|
||||||
|
@elf.header.e_shoff += extend_size if @elf.header.e_shoff >= threshold
|
||||||
|
end
|
||||||
|
|
||||||
|
def load_segments
|
||||||
|
@elf.segments_by_type(:load)
|
||||||
|
end
|
||||||
|
|
||||||
|
def invoke_callbacks(seg, start)
|
||||||
|
cur = start
|
||||||
|
@request.each do |sz, block|
|
||||||
|
block.call(cur, seg.offset_to_vma(cur))
|
||||||
|
cur += sz
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def abnormal_elf(msg)
|
||||||
|
raise ArgumentError, msg
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
250
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/patchelf-1.4.0/lib/patchelf/patcher.rb
vendored
Normal file
250
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/patchelf-1.4.0/lib/patchelf/patcher.rb
vendored
Normal file
@ -0,0 +1,250 @@
|
|||||||
|
# encoding: ascii-8bit
|
||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
require 'elftools/elf_file'
|
||||||
|
|
||||||
|
require 'patchelf/exceptions'
|
||||||
|
require 'patchelf/logger'
|
||||||
|
require 'patchelf/saver'
|
||||||
|
|
||||||
|
module PatchELF
|
||||||
|
# Class to handle all patching things.
|
||||||
|
class Patcher
|
||||||
|
# @!macro [new] note_apply
|
||||||
|
# @note This setting will be saved after {#save} being invoked.
|
||||||
|
|
||||||
|
attr_reader :elf # @return [ELFTools::ELFFile] ELF parser object.
|
||||||
|
|
||||||
|
# Instantiate a {Patcher} object.
|
||||||
|
# @param [String] filename
|
||||||
|
# Filename of input ELF.
|
||||||
|
# @param [Boolean] logging
|
||||||
|
# *deprecated*: use +on_error+ instead
|
||||||
|
# @param [:log, :silent, :exception] on_error
|
||||||
|
# action when the desired segment/tag field isn't present
|
||||||
|
# :log = logs to stderr
|
||||||
|
# :exception = raise exception related to the error
|
||||||
|
# :silent = ignore the errors
|
||||||
|
def initialize(filename, on_error: :log, logging: true)
|
||||||
|
@in_file = filename
|
||||||
|
@elf = ELFTools::ELFFile.new(File.open(filename))
|
||||||
|
@set = {}
|
||||||
|
@rpath_sym = :runpath
|
||||||
|
@on_error = logging ? on_error : :exception
|
||||||
|
|
||||||
|
on_error_syms = %i[exception log silent]
|
||||||
|
raise ArgumentError, "on_error must be one of #{on_error_syms}" unless on_error_syms.include?(@on_error)
|
||||||
|
end
|
||||||
|
|
||||||
|
# @return [String?]
|
||||||
|
# Get interpreter's name.
|
||||||
|
# @example
|
||||||
|
# PatchELF::Patcher.new('/bin/ls').interpreter
|
||||||
|
# #=> "/lib64/ld-linux-x86-64.so.2"
|
||||||
|
def interpreter
|
||||||
|
@set[:interpreter] || interpreter_
|
||||||
|
end
|
||||||
|
|
||||||
|
# Set interpreter's name.
|
||||||
|
#
|
||||||
|
# If the input ELF has no existent interpreter,
|
||||||
|
# this method will show a warning and has no effect.
|
||||||
|
# @param [String] interp
|
||||||
|
# @macro note_apply
|
||||||
|
def interpreter=(interp)
|
||||||
|
return if interpreter_.nil? # will also show warning if there's no interp segment.
|
||||||
|
|
||||||
|
@set[:interpreter] = interp
|
||||||
|
end
|
||||||
|
|
||||||
|
# Get needed libraries.
|
||||||
|
# @return [Array<String>]
|
||||||
|
# @example
|
||||||
|
# patcher = PatchELF::Patcher.new('/bin/ls')
|
||||||
|
# patcher.needed
|
||||||
|
# #=> ["libselinux.so.1", "libc.so.6"]
|
||||||
|
def needed
|
||||||
|
@set[:needed] || needed_
|
||||||
|
end
|
||||||
|
|
||||||
|
# Set needed libraries.
|
||||||
|
# @param [Array<String>] needs
|
||||||
|
# @macro note_apply
|
||||||
|
def needed=(needs)
|
||||||
|
@set[:needed] = needs
|
||||||
|
end
|
||||||
|
|
||||||
|
# Add the needed library.
|
||||||
|
# @param [String] need
|
||||||
|
# @return [void]
|
||||||
|
# @macro note_apply
|
||||||
|
def add_needed(need)
|
||||||
|
@set[:needed] ||= needed_
|
||||||
|
@set[:needed] << need
|
||||||
|
end
|
||||||
|
|
||||||
|
# Remove the needed library.
|
||||||
|
# @param [String] need
|
||||||
|
# @return [void]
|
||||||
|
# @macro note_apply
|
||||||
|
def remove_needed(need)
|
||||||
|
@set[:needed] ||= needed_
|
||||||
|
@set[:needed].delete(need)
|
||||||
|
end
|
||||||
|
|
||||||
|
# Replace needed library +src+ with +tar+.
|
||||||
|
#
|
||||||
|
# @param [String] src
|
||||||
|
# Library to be replaced.
|
||||||
|
# @param [String] tar
|
||||||
|
# Library replace with.
|
||||||
|
# @return [void]
|
||||||
|
# @macro note_apply
|
||||||
|
def replace_needed(src, tar)
|
||||||
|
@set[:needed] ||= needed_
|
||||||
|
@set[:needed].map! { |v| v == src ? tar : v }
|
||||||
|
end
|
||||||
|
|
||||||
|
# Get the soname of a shared library.
|
||||||
|
# @return [String?] The name.
|
||||||
|
# @example
|
||||||
|
# patcher = PatchELF::Patcher.new('/bin/ls')
|
||||||
|
# patcher.soname
|
||||||
|
# # [WARN] Entry DT_SONAME not found, not a shared library?
|
||||||
|
# #=> nil
|
||||||
|
# @example
|
||||||
|
# PatchELF::Patcher.new('/lib/x86_64-linux-gnu/libc.so.6').soname
|
||||||
|
# #=> "libc.so.6"
|
||||||
|
def soname
|
||||||
|
@set[:soname] || soname_
|
||||||
|
end
|
||||||
|
|
||||||
|
# Set soname.
|
||||||
|
#
|
||||||
|
# If the input ELF is not a shared library with a soname,
|
||||||
|
# this method will show a warning and has no effect.
|
||||||
|
# @param [String] name
|
||||||
|
# @macro note_apply
|
||||||
|
def soname=(name)
|
||||||
|
return if soname_.nil?
|
||||||
|
|
||||||
|
@set[:soname] = name
|
||||||
|
end
|
||||||
|
|
||||||
|
# Get runpath.
|
||||||
|
# @return [String?]
|
||||||
|
def runpath
|
||||||
|
@set[@rpath_sym] || runpath_(@rpath_sym)
|
||||||
|
end
|
||||||
|
|
||||||
|
# Get rpath
|
||||||
|
# return [String?]
|
||||||
|
def rpath
|
||||||
|
@set[:rpath] || runpath_(:rpath)
|
||||||
|
end
|
||||||
|
|
||||||
|
# Set rpath
|
||||||
|
#
|
||||||
|
# Modify / set DT_RPATH of the given ELF.
|
||||||
|
# similar to runpath= except DT_RPATH is modifed/created in DYNAMIC segment.
|
||||||
|
# @param [String] rpath
|
||||||
|
# @macro note_apply
|
||||||
|
def rpath=(rpath)
|
||||||
|
@set[:rpath] = rpath
|
||||||
|
end
|
||||||
|
|
||||||
|
# Set runpath.
|
||||||
|
#
|
||||||
|
# If DT_RUNPATH is not presented in the input ELF,
|
||||||
|
# a new DT_RUNPATH attribute will be inserted into the DYNAMIC segment.
|
||||||
|
# @param [String] runpath
|
||||||
|
# @macro note_apply
|
||||||
|
def runpath=(runpath)
|
||||||
|
@set[@rpath_sym] = runpath
|
||||||
|
end
|
||||||
|
|
||||||
|
# Set all operations related to DT_RUNPATH to use DT_RPATH.
|
||||||
|
# @return [self]
|
||||||
|
def use_rpath!
|
||||||
|
@rpath_sym = :rpath
|
||||||
|
self
|
||||||
|
end
|
||||||
|
|
||||||
|
# Save the patched ELF as +out_file+.
|
||||||
|
# @param [String?] out_file
|
||||||
|
# If +out_file+ is +nil+, the original input file will be modified.
|
||||||
|
# @param [Boolean] patchelf_compatible
|
||||||
|
# When +patchelf_compatible+ is true, tries to produce same ELF as the one produced by NixOS/patchelf.
|
||||||
|
# @return [void]
|
||||||
|
def save(out_file = nil, patchelf_compatible: false)
|
||||||
|
# If nothing is modified, return directly.
|
||||||
|
return if out_file.nil? && !dirty?
|
||||||
|
|
||||||
|
out_file ||= @in_file
|
||||||
|
saver = if patchelf_compatible
|
||||||
|
require 'patchelf/alt_saver'
|
||||||
|
PatchELF::AltSaver.new(@in_file, out_file, @set)
|
||||||
|
else
|
||||||
|
PatchELF::Saver.new(@in_file, out_file, @set)
|
||||||
|
end
|
||||||
|
|
||||||
|
saver.save!
|
||||||
|
end
|
||||||
|
|
||||||
|
private
|
||||||
|
|
||||||
|
def log_or_raise(msg, exception = PatchELF::PatchError)
|
||||||
|
raise exception, msg if @on_error == :exception
|
||||||
|
|
||||||
|
PatchELF::Logger.warn(msg) if @on_error == :log
|
||||||
|
end
|
||||||
|
|
||||||
|
def interpreter_
|
||||||
|
segment = @elf.segment_by_type(:interp)
|
||||||
|
return log_or_raise 'No interpreter found.', PatchELF::MissingSegmentError if segment.nil?
|
||||||
|
|
||||||
|
segment.interp_name
|
||||||
|
end
|
||||||
|
|
||||||
|
# @return [Array<String>]
|
||||||
|
def needed_
|
||||||
|
segment = dynamic_or_log
|
||||||
|
return if segment.nil?
|
||||||
|
|
||||||
|
segment.tags_by_type(:needed).map(&:name)
|
||||||
|
end
|
||||||
|
|
||||||
|
# @return [String?]
|
||||||
|
def runpath_(rpath_sym = :runpath)
|
||||||
|
tag_name_or_log(rpath_sym, "Entry DT_#{rpath_sym.to_s.upcase} not found.")
|
||||||
|
end
|
||||||
|
|
||||||
|
# @return [String?]
|
||||||
|
def soname_
|
||||||
|
tag_name_or_log(:soname, 'Entry DT_SONAME not found, not a shared library?')
|
||||||
|
end
|
||||||
|
|
||||||
|
# @return [Boolean]
|
||||||
|
def dirty?
|
||||||
|
@set.any?
|
||||||
|
end
|
||||||
|
|
||||||
|
def tag_name_or_log(type, log_msg)
|
||||||
|
segment = dynamic_or_log
|
||||||
|
return if segment.nil?
|
||||||
|
|
||||||
|
tag = segment.tag_by_type(type)
|
||||||
|
return log_or_raise log_msg, PatchELF::MissingTagError if tag.nil?
|
||||||
|
|
||||||
|
tag.name
|
||||||
|
end
|
||||||
|
|
||||||
|
def dynamic_or_log
|
||||||
|
@elf.segment_by_type(:dynamic).tap do |s|
|
||||||
|
if s.nil?
|
||||||
|
log_or_raise 'DYNAMIC segment not found, might be a statically-linked ELF?', PatchELF::MissingSegmentError
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
282
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/patchelf-1.4.0/lib/patchelf/saver.rb
vendored
Normal file
282
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/patchelf-1.4.0/lib/patchelf/saver.rb
vendored
Normal file
@ -0,0 +1,282 @@
|
|||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
require 'elftools/constants'
|
||||||
|
require 'elftools/elf_file'
|
||||||
|
require 'elftools/structs'
|
||||||
|
require 'elftools/util'
|
||||||
|
require 'fileutils'
|
||||||
|
|
||||||
|
require 'patchelf/mm'
|
||||||
|
|
||||||
|
module PatchELF
|
||||||
|
# Internal use only.
|
||||||
|
#
|
||||||
|
# For {Patcher} to do patching things and save to file.
|
||||||
|
# @private
|
||||||
|
class Saver
|
||||||
|
attr_reader :in_file # @return [String] Input filename.
|
||||||
|
attr_reader :out_file # @return [String] Output filename.
|
||||||
|
|
||||||
|
# Instantiate a {Saver} object.
|
||||||
|
# @param [String] in_file
|
||||||
|
# @param [String] out_file
|
||||||
|
# @param [{Symbol => String, Array}] set
|
||||||
|
def initialize(in_file, out_file, set)
|
||||||
|
@in_file = in_file
|
||||||
|
@out_file = out_file
|
||||||
|
@set = set
|
||||||
|
# [{Integer => String}]
|
||||||
|
@inline_patch = {}
|
||||||
|
@elf = ELFTools::ELFFile.new(File.open(in_file))
|
||||||
|
@mm = PatchELF::MM.new(@elf)
|
||||||
|
@strtab_extend_requests = []
|
||||||
|
@append_dyn = []
|
||||||
|
end
|
||||||
|
|
||||||
|
# @return [void]
|
||||||
|
def save!
|
||||||
|
# In this method we assume all attributes that should exist do exist.
|
||||||
|
# e.g. DT_INTERP, DT_DYNAMIC. These should have been checked in the patcher.
|
||||||
|
patch_interpreter
|
||||||
|
patch_dynamic
|
||||||
|
|
||||||
|
@mm.dispatch!
|
||||||
|
|
||||||
|
FileUtils.cp(in_file, out_file) if out_file != in_file
|
||||||
|
patch_out(@out_file)
|
||||||
|
# Let output file have the same permission as input.
|
||||||
|
FileUtils.chmod(File.stat(in_file).mode, out_file)
|
||||||
|
end
|
||||||
|
|
||||||
|
private
|
||||||
|
|
||||||
|
def patch_interpreter
|
||||||
|
return if @set[:interpreter].nil?
|
||||||
|
|
||||||
|
new_interp = "#{@set[:interpreter]}\x00"
|
||||||
|
old_interp = "#{@elf.segment_by_type(:interp).interp_name}\x00"
|
||||||
|
return if old_interp == new_interp
|
||||||
|
|
||||||
|
# These headers must be found here but not in the proc.
|
||||||
|
seg_header = @elf.segment_by_type(:interp).header
|
||||||
|
sec_header = section_header('.interp')
|
||||||
|
|
||||||
|
patch = proc do |off, vaddr|
|
||||||
|
# Register an inline patching
|
||||||
|
inline_patch(off, new_interp)
|
||||||
|
|
||||||
|
# The patching feature of ELFTools
|
||||||
|
seg_header.p_offset = off
|
||||||
|
seg_header.p_vaddr = seg_header.p_paddr = vaddr
|
||||||
|
seg_header.p_filesz = seg_header.p_memsz = new_interp.size
|
||||||
|
|
||||||
|
if sec_header
|
||||||
|
sec_header.sh_offset = off
|
||||||
|
sec_header.sh_size = new_interp.size
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
if new_interp.size <= old_interp.size
|
||||||
|
# easy case
|
||||||
|
patch.call(seg_header.p_offset.to_i, seg_header.p_vaddr.to_i)
|
||||||
|
else
|
||||||
|
# hard case, we have to request a new LOAD area
|
||||||
|
@mm.malloc(new_interp.size, &patch)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def patch_dynamic
|
||||||
|
# We never do inline patching on strtab's string.
|
||||||
|
# 1. Search if there's useful string exists
|
||||||
|
# - only need header patching
|
||||||
|
# 2. Append a new string to the strtab.
|
||||||
|
# - register strtab extension
|
||||||
|
dynamic.tags # HACK, force @tags to be defined
|
||||||
|
patch_soname if @set[:soname]
|
||||||
|
patch_runpath if @set[:runpath]
|
||||||
|
patch_runpath(:rpath) if @set[:rpath]
|
||||||
|
patch_needed if @set[:needed]
|
||||||
|
malloc_strtab!
|
||||||
|
expand_dynamic!
|
||||||
|
end
|
||||||
|
|
||||||
|
def patch_soname
|
||||||
|
# The tag must exist.
|
||||||
|
so_tag = dynamic.tag_by_type(:soname)
|
||||||
|
reg_str_table(@set[:soname]) do |idx|
|
||||||
|
so_tag.header.d_val = idx
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def patch_runpath(sym = :runpath)
|
||||||
|
tag = dynamic.tag_by_type(sym)
|
||||||
|
tag = tag.nil? ? lazy_dyn(sym) : tag.header
|
||||||
|
reg_str_table(@set[sym]) do |idx|
|
||||||
|
tag.d_val = idx
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# To mark a not-using tag
|
||||||
|
IGNORE = ELFTools::Constants::DT_LOOS
|
||||||
|
def patch_needed
|
||||||
|
original_needs = dynamic.tags_by_type(:needed)
|
||||||
|
@set[:needed].uniq!
|
||||||
|
|
||||||
|
original = original_needs.map(&:name)
|
||||||
|
replace = @set[:needed]
|
||||||
|
|
||||||
|
# 3 sets:
|
||||||
|
# 1. in original and in needs - remain unchanged
|
||||||
|
# 2. in original but not in needs - remove
|
||||||
|
# 3. not in original and in needs - append
|
||||||
|
append = replace - original
|
||||||
|
remove = original - replace
|
||||||
|
|
||||||
|
ignored_dyns = remove.each_with_object([]) do |name, ignored|
|
||||||
|
dyn = original_needs.find { |n| n.name == name }.header
|
||||||
|
dyn.d_tag = IGNORE
|
||||||
|
ignored << dyn
|
||||||
|
end
|
||||||
|
|
||||||
|
append.zip(ignored_dyns) do |name, ignored_dyn|
|
||||||
|
dyn = ignored_dyn || lazy_dyn(:needed)
|
||||||
|
dyn.d_tag = ELFTools::Constants::DT_NEEDED
|
||||||
|
reg_str_table(name) { |idx| dyn.d_val = idx }
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# Create a temp tag header.
|
||||||
|
# @return [ELFTools::Structs::ELF_Dyn]
|
||||||
|
def lazy_dyn(sym)
|
||||||
|
ELFTools::Structs::ELF_Dyn.new(endian: @elf.endian).tap do |dyn|
|
||||||
|
@append_dyn << dyn
|
||||||
|
dyn.elf_class = @elf.elf_class
|
||||||
|
dyn.d_tag = ELFTools::Util.to_constant(ELFTools::Constants::DT, sym)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def expand_dynamic!
|
||||||
|
return if @append_dyn.empty?
|
||||||
|
|
||||||
|
dyn_sec = section_header('.dynamic')
|
||||||
|
total = dynamic.tags.map(&:header)
|
||||||
|
# the last must be a null-tag
|
||||||
|
total = total[0..-2] + @append_dyn + [total.last]
|
||||||
|
bytes = total.first.num_bytes * total.size
|
||||||
|
@mm.malloc(bytes) do |off, vaddr|
|
||||||
|
inline_patch(off, total.map(&:to_binary_s).join)
|
||||||
|
dynamic.header.p_offset = off
|
||||||
|
dynamic.header.p_vaddr = dynamic.header.p_paddr = vaddr
|
||||||
|
dynamic.header.p_filesz = dynamic.header.p_memsz = bytes
|
||||||
|
if dyn_sec
|
||||||
|
dyn_sec.sh_offset = off
|
||||||
|
dyn_sec.sh_addr = vaddr
|
||||||
|
dyn_sec.sh_size = bytes
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def malloc_strtab!
|
||||||
|
return if @strtab_extend_requests.empty?
|
||||||
|
|
||||||
|
strtab = dynamic.tag_by_type(:strtab)
|
||||||
|
# Process registered requests
|
||||||
|
need_size = strtab_string.size + @strtab_extend_requests.reduce(0) { |sum, (str, _)| sum + str.size + 1 }
|
||||||
|
dynstr = section_header('.dynstr')
|
||||||
|
@mm.malloc(need_size) do |off, vaddr|
|
||||||
|
new_str = "#{strtab_string}#{@strtab_extend_requests.map(&:first).join("\x00")}\x00"
|
||||||
|
inline_patch(off, new_str)
|
||||||
|
cur = strtab_string.size
|
||||||
|
@strtab_extend_requests.each do |str, block|
|
||||||
|
block.call(cur)
|
||||||
|
cur += str.size + 1
|
||||||
|
end
|
||||||
|
# Now patching strtab header
|
||||||
|
strtab.header.d_val = vaddr
|
||||||
|
# We also need to patch dynstr to let readelf have correct output.
|
||||||
|
if dynstr
|
||||||
|
dynstr.sh_size = new_str.size
|
||||||
|
dynstr.sh_offset = off
|
||||||
|
dynstr.sh_addr = vaddr
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# @param [String] str
|
||||||
|
# @yieldparam [Integer] idx
|
||||||
|
# @yieldreturn [void]
|
||||||
|
def reg_str_table(str, &block)
|
||||||
|
idx = strtab_string.index("#{str}\x00")
|
||||||
|
# Request string is already exist
|
||||||
|
return yield idx if idx
|
||||||
|
|
||||||
|
# Record the request
|
||||||
|
@strtab_extend_requests << [str, block]
|
||||||
|
end
|
||||||
|
|
||||||
|
def strtab_string
|
||||||
|
return @strtab_string if defined?(@strtab_string)
|
||||||
|
|
||||||
|
# TODO: handle no strtab exists..
|
||||||
|
offset = @elf.offset_from_vma(dynamic.tag_by_type(:strtab).value)
|
||||||
|
# This is a little tricky since no length information is stored in the tag.
|
||||||
|
# We first get the file offset of the string then 'guess' where the end is.
|
||||||
|
@elf.stream.pos = offset
|
||||||
|
@strtab_string = +''
|
||||||
|
loop do
|
||||||
|
c = @elf.stream.read(1)
|
||||||
|
break unless c =~ /\x00|[[:print:]]/
|
||||||
|
|
||||||
|
@strtab_string << c
|
||||||
|
end
|
||||||
|
@strtab_string
|
||||||
|
end
|
||||||
|
|
||||||
|
# This can only be used for patching interpreter's name
|
||||||
|
# or set strings in a malloc-ed area.
|
||||||
|
# i.e. NEVER intend to change the string defined in strtab
|
||||||
|
def inline_patch(off, str)
|
||||||
|
@inline_patch[off] = str
|
||||||
|
end
|
||||||
|
|
||||||
|
# Modify the out_file according to registered patches.
|
||||||
|
def patch_out(out_file)
|
||||||
|
File.open(out_file, 'r+') do |f|
|
||||||
|
if @mm.extended?
|
||||||
|
original_head = @mm.threshold
|
||||||
|
extra = {}
|
||||||
|
# Copy all data after the second load
|
||||||
|
@elf.stream.pos = original_head
|
||||||
|
extra[original_head + @mm.extend_size] = @elf.stream.read # read to end
|
||||||
|
# zero out the 'gap' we created
|
||||||
|
extra[original_head] = "\x00" * @mm.extend_size
|
||||||
|
extra.each do |pos, str|
|
||||||
|
f.pos = pos
|
||||||
|
f.write(str)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
@elf.patches.each do |pos, str|
|
||||||
|
f.pos = @mm.extended_offset(pos)
|
||||||
|
f.write(str)
|
||||||
|
end
|
||||||
|
|
||||||
|
@inline_patch.each do |pos, str|
|
||||||
|
f.pos = pos
|
||||||
|
f.write(str)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# @return [ELFTools::Sections::Section?]
|
||||||
|
def section_header(name)
|
||||||
|
sec = @elf.section_by_name(name)
|
||||||
|
return if sec.nil?
|
||||||
|
|
||||||
|
sec.header
|
||||||
|
end
|
||||||
|
|
||||||
|
def dynamic
|
||||||
|
@dynamic ||= @elf.segment_by_type(:dynamic)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
6
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/patchelf-1.4.0/lib/patchelf/version.rb
vendored
Normal file
6
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/patchelf-1.4.0/lib/patchelf/version.rb
vendored
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
module PatchELF
|
||||||
|
# Current gem version.
|
||||||
|
VERSION = '1.4.0'.freeze
|
||||||
|
end
|
||||||
20
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/plist-3.7.0/LICENSE.txt
vendored
Normal file
20
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/plist-3.7.0/LICENSE.txt
vendored
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
Copyright (c) 2006-2010, Ben Bleything and Patrick May
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining
|
||||||
|
a copy of this software and associated documentation files (the
|
||||||
|
"Software"), to deal in the Software without restriction, including
|
||||||
|
without limitation the rights to use, copy, modify, merge, publish,
|
||||||
|
distribute, sublicense, and/or sell copies of the Software, and to
|
||||||
|
permit persons to whom the Software is furnished to do so, subject to
|
||||||
|
the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included
|
||||||
|
in all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
|
||||||
|
KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
|
||||||
|
WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||||
|
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
||||||
|
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||||
|
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
||||||
|
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||||
18
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/plist-3.7.0/lib/plist.rb
vendored
Normal file
18
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/plist-3.7.0/lib/plist.rb
vendored
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
# encoding: utf-8
|
||||||
|
|
||||||
|
# = plist
|
||||||
|
#
|
||||||
|
# This is the main file for plist. Everything interesting happens in
|
||||||
|
# Plist and Plist::Emit.
|
||||||
|
#
|
||||||
|
# Copyright 2006-2010 Ben Bleything and Patrick May
|
||||||
|
# Distributed under the MIT License
|
||||||
|
#
|
||||||
|
|
||||||
|
require 'base64'
|
||||||
|
require 'cgi'
|
||||||
|
require 'stringio'
|
||||||
|
|
||||||
|
require_relative 'plist/generator'
|
||||||
|
require_relative 'plist/parser'
|
||||||
|
require_relative 'plist/version'
|
||||||
180
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/plist-3.7.0/lib/plist/generator.rb
vendored
Normal file
180
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/plist-3.7.0/lib/plist/generator.rb
vendored
Normal file
@ -0,0 +1,180 @@
|
|||||||
|
# encoding: utf-8
|
||||||
|
|
||||||
|
# = plist
|
||||||
|
#
|
||||||
|
# Copyright 2006-2010 Ben Bleything and Patrick May
|
||||||
|
# Distributed under the MIT License
|
||||||
|
#
|
||||||
|
|
||||||
|
module Plist
|
||||||
|
# === Create a plist
|
||||||
|
# You can dump an object to a plist in one of two ways:
|
||||||
|
#
|
||||||
|
# * <tt>Plist::Emit.dump(obj)</tt>
|
||||||
|
# * <tt>obj.to_plist</tt>
|
||||||
|
# * This requires that you mixin the <tt>Plist::Emit</tt> module, which is already done for +Array+ and +Hash+.
|
||||||
|
#
|
||||||
|
# The following Ruby classes are converted into native plist types:
|
||||||
|
# Array, Bignum, Date, DateTime, Fixnum, Float, Hash, Integer, String, Symbol, Time, true, false
|
||||||
|
# * +Array+ and +Hash+ are both recursive; their elements will be converted into plist nodes inside the <array> and <dict> containers (respectively).
|
||||||
|
# * +IO+ (and its descendants) and +StringIO+ objects are read from and their contents placed in a <data> element.
|
||||||
|
# * User classes may implement +to_plist_node+ to dictate how they should be serialized; otherwise the object will be passed to <tt>Marshal.dump</tt> and the result placed in a <data> element.
|
||||||
|
#
|
||||||
|
# For detailed usage instructions, refer to USAGE[link:files/docs/USAGE.html] and the methods documented below.
|
||||||
|
module Emit
|
||||||
|
DEFAULT_INDENT = "\t"
|
||||||
|
|
||||||
|
# Helper method for injecting into classes. Calls <tt>Plist::Emit.dump</tt> with +self+.
|
||||||
|
def to_plist(envelope = true, options = {})
|
||||||
|
Plist::Emit.dump(self, envelope, options)
|
||||||
|
end
|
||||||
|
|
||||||
|
# Helper method for injecting into classes. Calls <tt>Plist::Emit.save_plist</tt> with +self+.
|
||||||
|
def save_plist(filename, options = {})
|
||||||
|
Plist::Emit.save_plist(self, filename, options)
|
||||||
|
end
|
||||||
|
|
||||||
|
# The following Ruby classes are converted into native plist types:
|
||||||
|
# Array, Bignum, Date, DateTime, Fixnum, Float, Hash, Integer, String, Symbol, Time
|
||||||
|
#
|
||||||
|
# Write us (via RubyForge) if you think another class can be coerced safely into one of the expected plist classes.
|
||||||
|
#
|
||||||
|
# +IO+ and +StringIO+ objects are encoded and placed in <data> elements; other objects are <tt>Marshal.dump</tt>'ed unless they implement +to_plist_node+.
|
||||||
|
#
|
||||||
|
# The +envelope+ parameters dictates whether or not the resultant plist fragment is wrapped in the normal XML/plist header and footer. Set it to false if you only want the fragment.
|
||||||
|
def self.dump(obj, envelope = true, options = {})
|
||||||
|
options = { :indent => DEFAULT_INDENT }.merge(options)
|
||||||
|
|
||||||
|
output = PlistBuilder.new(options[:indent]).build(obj)
|
||||||
|
output = wrap(output) if envelope
|
||||||
|
|
||||||
|
output
|
||||||
|
end
|
||||||
|
|
||||||
|
# Writes the serialized object's plist to the specified filename.
|
||||||
|
def self.save_plist(obj, filename, options = {})
|
||||||
|
File.open(filename, 'wb') do |f|
|
||||||
|
f.write(obj.to_plist(true, options))
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
private
|
||||||
|
|
||||||
|
class PlistBuilder
|
||||||
|
def initialize(indent_str)
|
||||||
|
@indent_str = indent_str.to_s
|
||||||
|
end
|
||||||
|
|
||||||
|
def build(element, level=0)
|
||||||
|
if element.respond_to? :to_plist_node
|
||||||
|
element.to_plist_node
|
||||||
|
else
|
||||||
|
case element
|
||||||
|
when Array
|
||||||
|
if element.empty?
|
||||||
|
tag('array', nil, level)
|
||||||
|
else
|
||||||
|
tag('array', nil, level) {
|
||||||
|
element.collect {|e| build(e, level + 1) }.join
|
||||||
|
}
|
||||||
|
end
|
||||||
|
when Hash
|
||||||
|
if element.empty?
|
||||||
|
tag('dict', nil, level)
|
||||||
|
else
|
||||||
|
tag('dict', '', level) do
|
||||||
|
element.sort_by{|k,v| k.to_s }.collect do |k,v|
|
||||||
|
tag('key', CGI.escapeHTML(k.to_s), level + 1) +
|
||||||
|
build(v, level + 1)
|
||||||
|
end.join
|
||||||
|
end
|
||||||
|
end
|
||||||
|
when true, false
|
||||||
|
tag(element, nil, level)
|
||||||
|
when Time
|
||||||
|
tag('date', element.utc.strftime('%Y-%m-%dT%H:%M:%SZ'), level)
|
||||||
|
when Date # also catches DateTime
|
||||||
|
tag('date', element.strftime('%Y-%m-%dT%H:%M:%SZ'), level)
|
||||||
|
when String, Symbol, Integer, Float
|
||||||
|
tag(element_type(element), CGI.escapeHTML(element.to_s), level)
|
||||||
|
when IO, StringIO
|
||||||
|
data = element.tap(&:rewind).read
|
||||||
|
data_tag(data, level)
|
||||||
|
else
|
||||||
|
data = Marshal.dump(element)
|
||||||
|
comment_tag('The <data> element below contains a Ruby object which has been serialized with Marshal.dump.') +
|
||||||
|
data_tag(data, level)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
private
|
||||||
|
|
||||||
|
def tag(type, contents, level, &block)
|
||||||
|
if block_given?
|
||||||
|
indent("<#{type}>\n", level) +
|
||||||
|
block.call +
|
||||||
|
indent("</#{type}>\n", level)
|
||||||
|
elsif contents.to_s.empty?
|
||||||
|
indent("<#{type}/>\n", level)
|
||||||
|
else
|
||||||
|
indent("<#{type}>#{contents.to_s}</#{type}>\n", level)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def data_tag(data, level)
|
||||||
|
# note that apple plists are wrapped at a different length then
|
||||||
|
# what ruby's base64 wraps by default.
|
||||||
|
# I used #encode64 instead of #b64encode (which allows a length arg)
|
||||||
|
# because b64encode is b0rked and ignores the length arg.
|
||||||
|
tag('data', nil, level) do
|
||||||
|
Base64.encode64(data)
|
||||||
|
.gsub(/\s+/, '')
|
||||||
|
.scan(/.{1,68}/o)
|
||||||
|
.collect { |line| indent(line, level) }
|
||||||
|
.join("\n")
|
||||||
|
.concat("\n")
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def indent(str, level)
|
||||||
|
@indent_str.to_s * level + str
|
||||||
|
end
|
||||||
|
|
||||||
|
def element_type(item)
|
||||||
|
case item
|
||||||
|
when String, Symbol
|
||||||
|
'string'
|
||||||
|
when Integer
|
||||||
|
'integer'
|
||||||
|
when Float
|
||||||
|
'real'
|
||||||
|
else
|
||||||
|
raise "Don't know about this data type... something must be wrong!"
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def comment_tag(content)
|
||||||
|
return "<!-- #{content} -->\n"
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def self.wrap(contents)
|
||||||
|
output = '<?xml version="1.0" encoding="UTF-8"?>' + "\n"
|
||||||
|
output << '<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">' + "\n"
|
||||||
|
output << '<plist version="1.0">' + "\n"
|
||||||
|
output << contents
|
||||||
|
output << '</plist>' + "\n"
|
||||||
|
|
||||||
|
output
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
class Array #:nodoc:
|
||||||
|
include Plist::Emit
|
||||||
|
end
|
||||||
|
|
||||||
|
class Hash #:nodoc:
|
||||||
|
include Plist::Emit
|
||||||
|
end
|
||||||
263
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/plist-3.7.0/lib/plist/parser.rb
vendored
Executable file
263
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/plist-3.7.0/lib/plist/parser.rb
vendored
Executable file
@ -0,0 +1,263 @@
|
|||||||
|
# encoding: utf-8
|
||||||
|
|
||||||
|
# = plist
|
||||||
|
#
|
||||||
|
# Copyright 2006-2010 Ben Bleything and Patrick May
|
||||||
|
# Distributed under the MIT License
|
||||||
|
#
|
||||||
|
|
||||||
|
# Plist parses Mac OS X xml property list files into ruby data structures.
|
||||||
|
#
|
||||||
|
# === Load a plist file
|
||||||
|
# This is the main point of the library:
|
||||||
|
#
|
||||||
|
# r = Plist.parse_xml(filename_or_xml)
|
||||||
|
module Plist
|
||||||
|
# Raised when an element is not implemented
|
||||||
|
class UnimplementedElementError < RuntimeError; end
|
||||||
|
|
||||||
|
# Note that I don't use these two elements much:
|
||||||
|
#
|
||||||
|
# + Date elements are returned as DateTime objects.
|
||||||
|
# + Data elements are implemented as Tempfiles
|
||||||
|
#
|
||||||
|
# Plist.parse_xml will blow up if it encounters a Date element.
|
||||||
|
# If you encounter such an error, or if you have a Date element which
|
||||||
|
# can't be parsed into a Time object, please create an issue
|
||||||
|
# attaching your plist file at https://github.com/patsplat/plist/issues
|
||||||
|
# so folks can implement the proper support.
|
||||||
|
#
|
||||||
|
# By default, <data> will be assumed to be a marshaled Ruby object and
|
||||||
|
# interpreted with <tt>Marshal.load</tt>. Pass <tt>marshal: false</tt>
|
||||||
|
# to disable this behavior and return the raw binary data as an IO
|
||||||
|
# object instead.
|
||||||
|
def self.parse_xml(filename_or_xml, options={})
|
||||||
|
listener = Listener.new(options)
|
||||||
|
# parser = REXML::Parsers::StreamParser.new(File.new(filename), listener)
|
||||||
|
parser = StreamParser.new(filename_or_xml, listener)
|
||||||
|
parser.parse
|
||||||
|
listener.result
|
||||||
|
end
|
||||||
|
|
||||||
|
class Listener
|
||||||
|
# include REXML::StreamListener
|
||||||
|
|
||||||
|
attr_accessor :result, :open
|
||||||
|
|
||||||
|
def initialize(options={})
|
||||||
|
@result = nil
|
||||||
|
@open = []
|
||||||
|
@options = { :marshal => true }.merge(options).freeze
|
||||||
|
end
|
||||||
|
|
||||||
|
def tag_start(name, attributes)
|
||||||
|
@open.push PTag.mappings[name].new(@options)
|
||||||
|
end
|
||||||
|
|
||||||
|
def text(contents)
|
||||||
|
if @open.last
|
||||||
|
@open.last.text ||= ''
|
||||||
|
@open.last.text.concat(contents)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def tag_end(name)
|
||||||
|
last = @open.pop
|
||||||
|
if @open.empty?
|
||||||
|
@result = last.to_ruby
|
||||||
|
else
|
||||||
|
@open.last.children.push last
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
class StreamParser
|
||||||
|
def initialize(plist_data_or_file, listener)
|
||||||
|
if plist_data_or_file.respond_to? :read
|
||||||
|
@xml = plist_data_or_file.read
|
||||||
|
elsif File.exist? plist_data_or_file
|
||||||
|
@xml = File.read(plist_data_or_file)
|
||||||
|
else
|
||||||
|
@xml = plist_data_or_file
|
||||||
|
end
|
||||||
|
|
||||||
|
@listener = listener
|
||||||
|
end
|
||||||
|
|
||||||
|
TEXT = /([^<]+)/
|
||||||
|
CDATA = /<!\[CDATA\[(.*?)\]\]>/
|
||||||
|
XMLDECL_PATTERN = /<\?xml\s+(.*?)\?>*/m
|
||||||
|
DOCTYPE_PATTERN = /\s*<!DOCTYPE\s+(.*?)(\[|>)/m
|
||||||
|
COMMENT_START = /\A<!--/
|
||||||
|
COMMENT_END = /.*?-->/m
|
||||||
|
UNIMPLEMENTED_ERROR = 'Unimplemented element. ' \
|
||||||
|
'Consider reporting via https://github.com/patsplat/plist/issues'
|
||||||
|
|
||||||
|
def parse
|
||||||
|
plist_tags = PTag.mappings.keys.join('|')
|
||||||
|
start_tag = /<(#{plist_tags})([^>]*)>/i
|
||||||
|
end_tag = /<\/(#{plist_tags})[^>]*>/i
|
||||||
|
|
||||||
|
require 'strscan'
|
||||||
|
|
||||||
|
@scanner = StringScanner.new(@xml)
|
||||||
|
until @scanner.eos?
|
||||||
|
if @scanner.scan(COMMENT_START)
|
||||||
|
@scanner.scan(COMMENT_END)
|
||||||
|
elsif @scanner.scan(XMLDECL_PATTERN)
|
||||||
|
encoding = parse_encoding_from_xml_declaration(@scanner[1])
|
||||||
|
next if encoding.nil?
|
||||||
|
|
||||||
|
# use the specified encoding for the rest of the file
|
||||||
|
next unless String.method_defined?(:force_encoding)
|
||||||
|
@scanner.string = @scanner.rest.force_encoding(encoding)
|
||||||
|
elsif @scanner.scan(DOCTYPE_PATTERN)
|
||||||
|
next
|
||||||
|
elsif @scanner.scan(start_tag)
|
||||||
|
@listener.tag_start(@scanner[1], nil)
|
||||||
|
if (@scanner[2] =~ /\/$/)
|
||||||
|
@listener.tag_end(@scanner[1])
|
||||||
|
end
|
||||||
|
elsif @scanner.scan(TEXT)
|
||||||
|
@listener.text(@scanner[1])
|
||||||
|
elsif @scanner.scan(CDATA)
|
||||||
|
@listener.text(@scanner[1])
|
||||||
|
elsif @scanner.scan(end_tag)
|
||||||
|
@listener.tag_end(@scanner[1])
|
||||||
|
else
|
||||||
|
raise UnimplementedElementError.new(UNIMPLEMENTED_ERROR)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
private
|
||||||
|
|
||||||
|
def parse_encoding_from_xml_declaration(xml_declaration)
|
||||||
|
return unless defined?(Encoding)
|
||||||
|
|
||||||
|
xml_encoding = xml_declaration.match(/(?:\A|\s)encoding=(?:"(.*?)"|'(.*?)')(?:\s|\Z)/)
|
||||||
|
|
||||||
|
return if xml_encoding.nil?
|
||||||
|
|
||||||
|
begin
|
||||||
|
Encoding.find(xml_encoding[1])
|
||||||
|
rescue ArgumentError
|
||||||
|
nil
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
class PTag
|
||||||
|
def self.mappings
|
||||||
|
@mappings ||= {}
|
||||||
|
end
|
||||||
|
|
||||||
|
def self.inherited(sub_class)
|
||||||
|
key = sub_class.to_s.downcase
|
||||||
|
key.gsub!(/^plist::/, '')
|
||||||
|
key.gsub!(/^p/, '') unless key == "plist"
|
||||||
|
|
||||||
|
mappings[key] = sub_class
|
||||||
|
end
|
||||||
|
|
||||||
|
attr_accessor :text, :children, :options
|
||||||
|
def initialize(options)
|
||||||
|
@children = []
|
||||||
|
@options = options
|
||||||
|
end
|
||||||
|
|
||||||
|
def to_ruby
|
||||||
|
raise "Unimplemented: " + self.class.to_s + "#to_ruby on #{self.inspect}"
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
class PList < PTag
|
||||||
|
def to_ruby
|
||||||
|
children.first.to_ruby if children.first
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
class PDict < PTag
|
||||||
|
def to_ruby
|
||||||
|
dict = {}
|
||||||
|
key = nil
|
||||||
|
|
||||||
|
children.each do |c|
|
||||||
|
if key.nil?
|
||||||
|
key = c.to_ruby
|
||||||
|
else
|
||||||
|
dict[key] = c.to_ruby
|
||||||
|
key = nil
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
dict
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
class PKey < PTag
|
||||||
|
def to_ruby
|
||||||
|
CGI.unescapeHTML(text || '')
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
class PString < PTag
|
||||||
|
def to_ruby
|
||||||
|
CGI.unescapeHTML(text || '')
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
class PArray < PTag
|
||||||
|
def to_ruby
|
||||||
|
children.collect do |c|
|
||||||
|
c.to_ruby
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
class PInteger < PTag
|
||||||
|
def to_ruby
|
||||||
|
text.to_i
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
class PTrue < PTag
|
||||||
|
def to_ruby
|
||||||
|
true
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
class PFalse < PTag
|
||||||
|
def to_ruby
|
||||||
|
false
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
class PReal < PTag
|
||||||
|
def to_ruby
|
||||||
|
text.to_f
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
require 'date'
|
||||||
|
class PDate < PTag
|
||||||
|
def to_ruby
|
||||||
|
DateTime.parse(text)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
require 'base64'
|
||||||
|
class PData < PTag
|
||||||
|
def to_ruby
|
||||||
|
data = Base64.decode64(text.gsub(/\s+/, '')) unless text.nil?
|
||||||
|
begin
|
||||||
|
return Marshal.load(data) if options[:marshal]
|
||||||
|
rescue Exception
|
||||||
|
end
|
||||||
|
io = StringIO.new
|
||||||
|
io.write data
|
||||||
|
io.rewind
|
||||||
|
io
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
5
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/plist-3.7.0/lib/plist/version.rb
vendored
Normal file
5
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/plist-3.7.0/lib/plist/version.rb
vendored
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
# encoding: utf-8
|
||||||
|
|
||||||
|
module Plist
|
||||||
|
VERSION = '3.7.0'.freeze
|
||||||
|
end
|
||||||
22
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/public_suffix-5.0.4/LICENSE.txt
vendored
Normal file
22
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/public_suffix-5.0.4/LICENSE.txt
vendored
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
Copyright (c) 2009-2023 Simone Carletti <weppos@weppos.net>
|
||||||
|
|
||||||
|
MIT License
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining
|
||||||
|
a copy of this software and associated documentation files (the
|
||||||
|
"Software"), to deal in the Software without restriction, including
|
||||||
|
without limitation the rights to use, copy, modify, merge, publish,
|
||||||
|
distribute, sublicense, and/or sell copies of the Software, and to
|
||||||
|
permit persons to whom the Software is furnished to do so, subject to
|
||||||
|
the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be
|
||||||
|
included in all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||||
|
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||||
|
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||||
|
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
||||||
|
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||||
|
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
||||||
|
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||||
15431
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/public_suffix-5.0.4/data/list.txt
vendored
Normal file
15431
Library/Homebrew/vendor/bundle/ruby/3.1.0/gems/public_suffix-5.0.4/data/list.txt
vendored
Normal file
File diff suppressed because it is too large
Load Diff
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user