Merge pull request #16687 from Homebrew/dependabot/bundler/Library/Homebrew/bindata-2.5.0

This commit is contained in:
Patrick Linnane 2024-02-16 09:40:29 -08:00 committed by GitHub
commit fa23636c44
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
47 changed files with 2103 additions and 1403 deletions

View File

@ -4,7 +4,7 @@ GEM
addressable (2.8.6) addressable (2.8.6)
public_suffix (>= 2.0.2, < 6.0) public_suffix (>= 2.0.2, < 6.0)
ast (2.4.2) ast (2.4.2)
bindata (2.4.15) bindata (2.5.0)
bootsnap (1.18.3) bootsnap (1.18.3)
msgpack (~> 1.2) msgpack (~> 1.2)
byebug (11.1.3) byebug (11.1.3)

View File

@ -30,7 +30,7 @@ end
$:.unshift File.expand_path("#{__dir__}/../#{RUBY_ENGINE}/#{Gem.ruby_api_version}/gems/public_suffix-5.0.4/lib") $:.unshift File.expand_path("#{__dir__}/../#{RUBY_ENGINE}/#{Gem.ruby_api_version}/gems/public_suffix-5.0.4/lib")
$:.unshift File.expand_path("#{__dir__}/../#{RUBY_ENGINE}/#{Gem.ruby_api_version}/gems/addressable-2.8.6/lib") $:.unshift File.expand_path("#{__dir__}/../#{RUBY_ENGINE}/#{Gem.ruby_api_version}/gems/addressable-2.8.6/lib")
$:.unshift File.expand_path("#{__dir__}/../#{RUBY_ENGINE}/#{Gem.ruby_api_version}/gems/ast-2.4.2/lib") $:.unshift File.expand_path("#{__dir__}/../#{RUBY_ENGINE}/#{Gem.ruby_api_version}/gems/ast-2.4.2/lib")
$:.unshift File.expand_path("#{__dir__}/../#{RUBY_ENGINE}/#{Gem.ruby_api_version}/gems/bindata-2.4.15/lib") $:.unshift File.expand_path("#{__dir__}/../#{RUBY_ENGINE}/#{Gem.ruby_api_version}/gems/bindata-2.5.0/lib")
$:.unshift File.expand_path("#{__dir__}/../#{RUBY_ENGINE}/#{Gem.ruby_api_version}/extensions/arm64-darwin-20/#{Gem.extension_api_version}/msgpack-1.7.2") $:.unshift File.expand_path("#{__dir__}/../#{RUBY_ENGINE}/#{Gem.ruby_api_version}/extensions/arm64-darwin-20/#{Gem.extension_api_version}/msgpack-1.7.2")
$:.unshift File.expand_path("#{__dir__}/../#{RUBY_ENGINE}/#{Gem.ruby_api_version}/gems/msgpack-1.7.2/lib") $:.unshift File.expand_path("#{__dir__}/../#{RUBY_ENGINE}/#{Gem.ruby_api_version}/gems/msgpack-1.7.2/lib")
$:.unshift File.expand_path("#{__dir__}/../#{RUBY_ENGINE}/#{Gem.ruby_api_version}/extensions/arm64-darwin-20/#{Gem.extension_api_version}/bootsnap-1.18.3") $:.unshift File.expand_path("#{__dir__}/../#{RUBY_ENGINE}/#{Gem.ruby_api_version}/extensions/arm64-darwin-20/#{Gem.extension_api_version}/bootsnap-1.18.3")

View File

@ -1,94 +0,0 @@
module BinData
# WARNING: THIS IS UNSUPPORTED!!
#
# This was a (failed) experimental feature that allowed seeking within the
# input stream. It remains here for backwards compatability for the few
# people that used it.
#
# The official way to skip around the stream is to use BinData::Skip with
# the `:to_abs_offset` parameter.
#
# == Parameters
#
# Parameters may be provided at initialisation to control the behaviour of
# an object. These parameters are:
#
# [<tt>:check_offset</tt>] Raise an error if the current IO offset doesn't
# meet this criteria. A boolean return indicates
# success or failure. Any other return is compared
# to the current offset. The variable +offset+
# is made available to any lambda assigned to
# this parameter. This parameter is only checked
# before reading.
# [<tt>:adjust_offset</tt>] Ensures that the current IO offset is at this
# position before reading. This is like
# <tt>:check_offset</tt>, except that it will
# adjust the IO offset instead of raising an error.
module CheckOrAdjustOffsetPlugin
def self.included(base) #:nodoc:
base.optional_parameters :check_offset, :adjust_offset
base.mutually_exclusive_parameters :check_offset, :adjust_offset
end
def initialize_shared_instance
extend CheckOffsetMixin if has_parameter?(:check_offset)
extend AdjustOffsetMixin if has_parameter?(:adjust_offset)
super
end
module CheckOffsetMixin
def do_read(io) #:nodoc:
check_offset(io)
super(io)
end
#---------------
private
def check_offset(io)
actual_offset = io.offset
expected = eval_parameter(:check_offset, offset: actual_offset)
if !expected
raise ValidityError, "offset not as expected for #{debug_name}"
elsif actual_offset != expected && expected != true
raise ValidityError,
"offset is '#{actual_offset}' but " +
"expected '#{expected}' for #{debug_name}"
end
end
end
module AdjustOffsetMixin
def do_read(io) #:nodoc:
adjust_offset(io)
super(io)
end
#---------------
private
def adjust_offset(io)
actual_offset = io.offset
expected = eval_parameter(:adjust_offset)
if actual_offset != expected
begin
seek = expected - actual_offset
io.seekbytes(seek)
warn "adjusting stream position by #{seek} bytes" if $VERBOSE
rescue
raise ValidityError,
"offset is '#{actual_offset}' but couldn't seek to " +
"expected '#{expected}' for #{debug_name}"
end
end
end
end
end
# Add these offset options to Base
class Base
include CheckOrAdjustOffsetPlugin
end
end

View File

@ -1,133 +0,0 @@
require "bindata/base_primitive"
module BinData
# Skip will skip over bytes from the input stream. If the stream is not
# seekable, then the bytes are consumed and discarded.
#
# When writing, skip will write the appropriate number of zero bytes.
#
# require 'bindata'
#
# class A < BinData::Record
# skip length: 5
# string :a, read_length: 5
# end
#
# obj = A.read("abcdefghij")
# obj.a #=> "fghij"
#
#
# class B < BinData::Record
# skip until_valid: [:string, {read_length: 2, assert: "ef"} ]
# string :b, read_length: 5
# end
#
# obj = B.read("abcdefghij")
# obj.b #=> "efghi"
#
#
# == Parameters
#
# Skip objects accept all the params that BinData::BasePrimitive
# does, as well as the following:
#
# <tt>:length</tt>:: The number of bytes to skip.
# <tt>:to_abs_offset</tt>:: Skips to the given absolute offset.
# <tt>:until_valid</tt>:: Skips untils a given byte pattern is matched.
# This parameter contains a type that will raise
# a BinData::ValidityError unless an acceptable byte
# sequence is found. The type is represented by a
# Symbol, or if the type is to have params #
# passed to it, then it should be provided as #
# <tt>[type_symbol, hash_params]</tt>.
#
class Skip < BinData::BasePrimitive
arg_processor :skip
optional_parameters :length, :to_abs_offset, :until_valid
mutually_exclusive_parameters :length, :to_abs_offset, :until_valid
def initialize_shared_instance
extend SkipLengthPlugin if has_parameter?(:length)
extend SkipToAbsOffsetPlugin if has_parameter?(:to_abs_offset)
extend SkipUntilValidPlugin if has_parameter?(:until_valid)
super
end
#---------------
private
def value_to_binary_string(val)
len = skip_length
if len < 0
raise ValidityError, "#{debug_name} attempted to seek backwards by #{len.abs} bytes"
end
"\000" * skip_length
end
def read_and_return_value(io)
len = skip_length
if len < 0
raise ValidityError, "#{debug_name} attempted to seek backwards by #{len.abs} bytes"
end
io.seekbytes(len)
""
end
def sensible_default
""
end
end
class SkipArgProcessor < BaseArgProcessor
def sanitize_parameters!(obj_class, params)
unless params.has_at_least_one_of?(:length, :to_abs_offset, :until_valid)
raise ArgumentError,
"#{obj_class} requires either :length, :to_abs_offset or :until_valid"
end
params.must_be_integer(:to_abs_offset, :length)
params.sanitize_object_prototype(:until_valid)
end
end
# Logic for the :length parameter
module SkipLengthPlugin
def skip_length
eval_parameter(:length)
end
end
# Logic for the :to_abs_offset parameter
module SkipToAbsOffsetPlugin
def skip_length
eval_parameter(:to_abs_offset) - abs_offset
end
end
# Logic for the :until_valid parameter
module SkipUntilValidPlugin
def skip_length
# no skipping when writing
0
end
def read_and_return_value(io)
prototype = get_parameter(:until_valid)
validator = prototype.instantiate(nil, self)
valid = false
until valid
begin
io.with_readahead do
validator.read(io)
valid = true
end
rescue ValidityError
io.readbytes(1)
end
end
end
end
end

View File

@ -1,3 +0,0 @@
module BinData
VERSION = "2.4.15"
end

View File

@ -13,6 +13,7 @@ require 'bindata/int'
require 'bindata/primitive' require 'bindata/primitive'
require 'bindata/record' require 'bindata/record'
require 'bindata/rest' require 'bindata/rest'
require 'bindata/section'
require 'bindata/skip' require 'bindata/skip'
require 'bindata/string' require 'bindata/string'
require 'bindata/stringz' require 'bindata/stringz'

View File

@ -19,11 +19,11 @@ module BinData
def do_num_bytes; 0; end def do_num_bytes; 0; end
def do_read(io) def do_read(io)
io.reset_read_bits io.readbytes(0)
end end
def do_write(io) def do_write(io)
io.flushbits io.writebytes("")
end end
end end
@ -45,18 +45,26 @@ module BinData
def initialize(io) def initialize(io)
@io = io @io = io
end end
def binary_string(str)
str.to_s.dup.force_encoding(Encoding::BINARY)
end
def readbytes(n) def readbytes(n)
n.times.inject("") do |bytes, _| n.times.inject(binary_string("")) do |bytes, _|
bytes += @io.readbits(8, :big).chr bytes + @io.readbits(8, :big).chr
end end
end end
def writebytes(str)
str.each_byte { |v| @io.writebits(v, 8, :big) }
end
end end
def bit_aligned? def bit_aligned?
true true
end end
def read_and_return_value(io) def do_read(io)
super(BitAlignedIO.new(io)) super(BitAlignedIO.new(io))
end end
@ -65,7 +73,7 @@ module BinData
end end
def do_write(io) def do_write(io)
value_to_binary_string(_value).each_byte { |v| io.writebits(v, 8, :big) } super(BitAlignedIO.new(io))
end end
end end
@ -74,6 +82,6 @@ module BinData
end end
def Primitive.bit_aligned def Primitive.bit_aligned
fail "'bit_aligned' is not needed for BinData::Primitives" fail "'bit_aligned' is not supported for BinData::Primitives"
end end
end end

View File

@ -72,18 +72,18 @@ module BinData
end end
def initialize_instance def initialize_instance
@element_list = nil @elements = nil
end end
def clear? def clear?
@element_list.nil? || elements.all?(&:clear?) @elements.nil? || elements.all?(&:clear?)
end end
def assign(array) def assign(array)
return if self.equal?(array) # prevent self assignment return if self.equal?(array) # prevent self assignment
raise ArgumentError, "can't set a nil value for #{debug_name}" if array.nil? raise ArgumentError, "can't set a nil value for #{debug_name}" if array.nil?
@element_list = [] @elements = []
concat(array) concat(array)
end end
@ -220,23 +220,23 @@ module BinData
elements.each { |el| yield el } elements.each { |el| yield el }
end end
def debug_name_of(child) #:nodoc: def debug_name_of(child) # :nodoc:
index = find_index_of(child) index = find_index_of(child)
"#{debug_name}[#{index}]" "#{debug_name}[#{index}]"
end end
def offset_of(child) #:nodoc: def offset_of(child) # :nodoc:
index = find_index_of(child) index = find_index_of(child)
sum = sum_num_bytes_below_index(index) sum = sum_num_bytes_below_index(index)
child.bit_aligned? ? sum.floor : sum.ceil child.bit_aligned? ? sum.floor : sum.ceil
end end
def do_write(io) #:nodoc: def do_write(io) # :nodoc:
elements.each { |el| el.do_write(io) } elements.each { |el| el.do_write(io) }
end end
def do_num_bytes #:nodoc: def do_num_bytes # :nodoc:
sum_num_bytes_for_all_elements sum_num_bytes_for_all_elements
end end
@ -251,7 +251,7 @@ module BinData
end end
def elements def elements
@element_list ||= [] @elements ||= []
end end
def append_new_element def append_new_element
@ -279,10 +279,55 @@ module BinData
end end
end end
end end
# Logic for the :read_until parameter
module ReadUntilPlugin
def do_read(io)
loop do
element = append_new_element
element.do_read(io)
variables = { index: self.length - 1, element: self.last, array: self }
break if eval_parameter(:read_until, variables)
end
end
end
# Logic for the read_until: :eof parameter
module ReadUntilEOFPlugin
def do_read(io)
loop do
element = append_new_element
begin
element.do_read(io)
rescue EOFError, IOError
elements.pop
break
end
end
end
end
# Logic for the :initial_length parameter
module InitialLengthPlugin
def do_read(io)
elements.each { |el| el.do_read(io) }
end
def elements
if @elements.nil?
@elements = []
eval_parameter(:initial_length).times do
@elements << new_element
end
end
@elements
end
end
end end
class ArrayArgProcessor < BaseArgProcessor class ArrayArgProcessor < BaseArgProcessor
def sanitize_parameters!(obj_class, params) #:nodoc: def sanitize_parameters!(obj_class, params) # :nodoc:
# ensure one of :initial_length and :read_until exists # ensure one of :initial_length and :read_until exists
unless params.has_at_least_one_of?(:initial_length, :read_until) unless params.has_at_least_one_of?(:initial_length, :read_until)
params[:initial_length] = 0 params[:initial_length] = 0
@ -296,49 +341,4 @@ module BinData
params.sanitize_object_prototype(:type) params.sanitize_object_prototype(:type)
end end
end end
# Logic for the :read_until parameter
module ReadUntilPlugin
def do_read(io)
loop do
element = append_new_element
element.do_read(io)
variables = { index: self.length - 1, element: self.last, array: self }
break if eval_parameter(:read_until, variables)
end
end
end
# Logic for the read_until: :eof parameter
module ReadUntilEOFPlugin
def do_read(io)
loop do
element = append_new_element
begin
element.do_read(io)
rescue EOFError, IOError
elements.pop
break
end
end
end
end
# Logic for the :initial_length parameter
module InitialLengthPlugin
def do_read(io)
elements.each { |el| el.do_read(io) }
end
def elements
if @element_list.nil?
@element_list = []
eval_parameter(:initial_length).times do
@element_list << new_element
end
end
@element_list
end
end
end end

View File

@ -17,7 +17,7 @@ module BinData
# Instantiates this class and reads from +io+, returning the newly # Instantiates this class and reads from +io+, returning the newly
# created data object. +args+ will be used when instantiating. # created data object. +args+ will be used when instantiating.
def read(io, *args, &block) def read(io, *args, &block)
obj = self.new(*args) obj = new(*args)
obj.read(io, &block) obj.read(io, &block)
obj obj
end end
@ -48,7 +48,7 @@ module BinData
end end
# Registers all subclasses of this class for use # Registers all subclasses of this class for use
def register_subclasses #:nodoc: def register_subclasses # :nodoc:
singleton_class.send(:undef_method, :inherited) singleton_class.send(:undef_method, :inherited)
define_singleton_method(:inherited) do |subclass| define_singleton_method(:inherited) do |subclass|
RegisteredClasses.register(subclass.name, subclass) RegisteredClasses.register(subclass.name, subclass)
@ -90,6 +90,8 @@ module BinData
# Creates a new data object based on this instance. # Creates a new data object based on this instance.
# #
# This implements the prototype design pattern.
#
# All parameters will be be duplicated. Use this method # All parameters will be be duplicated. Use this method
# when creating multiple objects with the same parameters. # when creating multiple objects with the same parameters.
def new(value = nil, parent = nil) def new(value = nil, parent = nil)
@ -117,8 +119,8 @@ module BinData
end end
# Returns a lazy evaluator for this object. # Returns a lazy evaluator for this object.
def lazy_evaluator #:nodoc: def lazy_evaluator # :nodoc:
@lazy ||= LazyEvaluator.new(self) @lazy_evaluator ||= LazyEvaluator.new(self)
end end
# Returns the parameter referenced by +key+. # Returns the parameter referenced by +key+.
@ -177,7 +179,7 @@ module BinData
# Returns the hexadecimal string representation of this data object. # Returns the hexadecimal string representation of this data object.
def to_hex(&block) def to_hex(&block)
to_binary_s(&block).unpack('H*')[0] to_binary_s(&block).unpack1('H*')
end end
# Return a human readable representation of this data object. # Return a human readable representation of this data object.
@ -191,7 +193,7 @@ module BinData
end end
# Work with Ruby's pretty-printer library. # Work with Ruby's pretty-printer library.
def pretty_print(pp) #:nodoc: def pretty_print(pp) # :nodoc:
pp.pp(snapshot) pp.pp(snapshot)
end end
@ -202,40 +204,28 @@ module BinData
# Returns a user friendly name of this object for debugging purposes. # Returns a user friendly name of this object for debugging purposes.
def debug_name def debug_name
if @parent @parent ? @parent.debug_name_of(self) : 'obj'
@parent.debug_name_of(self)
else
"obj"
end
end end
# Returns the offset (in bytes) of this object with respect to its most # Returns the offset (in bytes) of this object with respect to its most
# distant ancestor. # distant ancestor.
def abs_offset def abs_offset
if @parent @parent ? @parent.abs_offset + @parent.offset_of(self) : 0
@parent.abs_offset + @parent.offset_of(self)
else
0
end
end end
# Returns the offset (in bytes) of this object with respect to its parent. # Returns the offset (in bytes) of this object with respect to its parent.
def rel_offset def rel_offset
if @parent @parent ? @parent.offset_of(self) : 0
@parent.offset_of(self)
else
0
end
end end
def ==(other) #:nodoc: def ==(other) # :nodoc:
# double dispatch # double dispatch
other == snapshot other == snapshot
end end
# A version of +respond_to?+ used by the lazy evaluator. It doesn't # A version of +respond_to?+ used by the lazy evaluator. It doesn't
# reinvoke the evaluator so as to avoid infinite evaluation loops. # reinvoke the evaluator so as to avoid infinite evaluation loops.
def safe_respond_to?(symbol, include_private = false) #:nodoc: def safe_respond_to?(symbol, include_private = false) # :nodoc:
base_respond_to?(symbol, include_private) base_respond_to?(symbol, include_private)
end end
@ -329,7 +319,6 @@ module BinData
# Performs sanity checks on the given parameters. # Performs sanity checks on the given parameters.
# This method converts the parameters to the form expected # This method converts the parameters to the form expected
# by the data object. # by the data object.
def sanitize_parameters!(obj_class, obj_params) def sanitize_parameters!(obj_class, obj_params); end
end
end end
end end

View File

@ -65,7 +65,7 @@ module BinData
@value = nil @value = nil
end end
def clear? #:nodoc: def clear? # :nodoc:
@value.nil? @value.nil?
end end
@ -73,13 +73,7 @@ module BinData
raise ArgumentError, "can't set a nil value for #{debug_name}" if val.nil? raise ArgumentError, "can't set a nil value for #{debug_name}" if val.nil?
raw_val = val.respond_to?(:snapshot) ? val.snapshot : val raw_val = val.respond_to?(:snapshot) ? val.snapshot : val
@value = @value = raw_val.dup
begin
raw_val.dup
rescue TypeError
# can't dup Fixnums
raw_val
end
end end
def snapshot def snapshot
@ -94,18 +88,19 @@ module BinData
assign(val) assign(val)
end end
def respond_to?(symbol, include_private = false) #:nodoc: def respond_to_missing?(symbol, include_all = false) # :nodoc:
child = snapshot child = snapshot
child.respond_to?(symbol, include_private) || super child.respond_to?(symbol, include_all) || super
end end
def method_missing(symbol, *args, &block) #:nodoc: def method_missing(symbol, *args, &block) # :nodoc:
child = snapshot child = snapshot
if child.respond_to?(symbol) if child.respond_to?(symbol)
self.class.class_eval \ self.class.class_eval <<-END, __FILE__, __LINE__ + 1
"def #{symbol}(*args, &block);" \ def #{symbol}(*args, &block) # def clamp(*args, &block)
" snapshot.#{symbol}(*args, &block);" \ snapshot.#{symbol}(*args, &block) # snapshot.clamp(*args, &block)
"end" end # end
END
child.__send__(symbol, *args, &block) child.__send__(symbol, *args, &block)
else else
super super
@ -125,15 +120,15 @@ module BinData
snapshot.hash snapshot.hash
end end
def do_read(io) #:nodoc: def do_read(io) # :nodoc:
@value = read_and_return_value(io) @value = read_and_return_value(io)
end end
def do_write(io) #:nodoc: def do_write(io) # :nodoc:
io.writebytes(value_to_binary_string(_value)) io.writebytes(value_to_binary_string(_value))
end end
def do_num_bytes #:nodoc: def do_num_bytes # :nodoc:
value_to_binary_string(_value).length value_to_binary_string(_value).length
end end
@ -172,7 +167,7 @@ module BinData
assert! assert!
end end
def do_read(io) #:nodoc: def do_read(io) # :nodoc:
super(io) super(io)
assert! assert!
end end
@ -205,7 +200,16 @@ module BinData
reading? ? @value : eval_parameter(:asserted_value) reading? ? @value : eval_parameter(:asserted_value)
end end
def do_read(io) #:nodoc: # The asserted value as a binary string.
#
# Rationale: while reading, +#to_binary_s+ will use the
# value read in, rather than the +:asserted_value+.
# This feature is used by Skip.
def asserted_binary_s
value_to_binary_string(eval_parameter(:asserted_value))
end
def do_read(io) # :nodoc:
super(io) super(io)
assert! assert!
end end

View File

@ -5,7 +5,7 @@ module BinData
# Defines a number of classes that contain a bit based integer. # Defines a number of classes that contain a bit based integer.
# The integer is defined by endian and number of bits. # The integer is defined by endian and number of bits.
module BitField #:nodoc: all module BitField # :nodoc: all
@@mutex = Mutex.new @@mutex = Mutex.new
class << self class << self
@ -156,10 +156,10 @@ module BinData
# Create classes for dynamic bitfields # Create classes for dynamic bitfields
{ {
"Bit" => :big, 'Bit' => :big,
"BitLe" => :little, 'BitLe' => :little,
"Sbit" => [:big, :signed], 'Sbit' => [:big, :signed],
"SbitLe" => [:little, :signed], 'SbitLe' => [:little, :signed]
}.each_pair { |name, args| BitField.define_class(name, :nbits, *args) } }.each_pair { |name, args| BitField.define_class(name, :nbits, *args) }
# Create classes on demand # Create classes on demand

View File

@ -41,7 +41,7 @@ module BinData
# end # end
# end # end
# end # end
# #
# #
# == Parameters # == Parameters
# #
@ -80,29 +80,107 @@ module BinData
@type.snapshot @type.snapshot
end end
def respond_to?(symbol, include_private = false) #:nodoc: def respond_to_missing?(symbol, include_all = false) # :nodoc:
@type.respond_to?(symbol, include_private) || super @type.respond_to?(symbol, include_all) || super
end end
def method_missing(symbol, *args, &block) #:nodoc: def method_missing(symbol, *args, &block) # :nodoc:
@type.__send__(symbol, *args, &block) @type.__send__(symbol, *args, &block)
end end
def do_read(io) #:nodoc: def do_read(io) # :nodoc:
io.with_buffer(eval_parameter(:length)) do buf_len = eval_parameter(:length)
@type.do_read(io) io.transform(BufferIO.new(buf_len)) do |transformed_io, _|
@type.do_read(transformed_io)
end end
end end
def do_write(io) #:nodoc: def do_write(io) # :nodoc:
io.with_buffer(eval_parameter(:length)) do buf_len = eval_parameter(:length)
@type.do_write(io) io.transform(BufferIO.new(buf_len)) do |transformed_io, _|
@type.do_write(transformed_io)
end end
end end
def do_num_bytes #:nodoc: def do_num_bytes # :nodoc:
eval_parameter(:length) eval_parameter(:length)
end end
# Transforms the IO stream to restrict access inside
# a buffer of specified length.
class BufferIO < IO::Transform
def initialize(length)
super()
@bytes_remaining = length
end
def before_transform
@buf_start = offset
@buf_end = @buf_start + @bytes_remaining
end
def num_bytes_remaining
[@bytes_remaining, super].min
rescue IOError
@bytes_remaining
end
def skip(n)
nbytes = buffer_limited_n(n)
@bytes_remaining -= nbytes
chain_skip(nbytes)
end
def seek_abs(n)
if n < @buf_start || n >= @buf_end
raise IOError, "can not seek to abs_offset outside of buffer"
end
@bytes_remaining -= (n - offset)
chain_seek_abs(n)
end
def read(n)
nbytes = buffer_limited_n(n)
@bytes_remaining -= nbytes
chain_read(nbytes)
end
def write(data)
nbytes = buffer_limited_n(data.size)
@bytes_remaining -= nbytes
if nbytes < data.size
data = data[0, nbytes]
end
chain_write(data)
end
def after_read_transform
read(nil)
end
def after_write_transform
write("\x00" * @bytes_remaining)
end
def buffer_limited_n(n)
if n.nil?
@bytes_remaining
elsif n.positive?
limit = @bytes_remaining
n > limit ? limit : n
# uncomment if we decide to allow backwards skipping
# elsif n.negative?
# limit = @bytes_remaining + @buf_start - @buf_end
# n < limit ? limit : n
else
0
end
end
end
end end
class BufferArgProcessor < BaseArgProcessor class BufferArgProcessor < BaseArgProcessor

View File

@ -82,18 +82,19 @@ module BinData
if selection.nil? if selection.nil?
raise IndexError, ":selection returned nil for #{debug_name}" raise IndexError, ":selection returned nil for #{debug_name}"
end end
selection selection
end end
def respond_to?(symbol, include_private = false) #:nodoc: def respond_to?(symbol, include_all = false) # :nodoc:
current_choice.respond_to?(symbol, include_private) || super current_choice.respond_to?(symbol, include_all) || super
end end
def method_missing(symbol, *args, &block) #:nodoc: def method_missing(symbol, *args, &block) # :nodoc:
current_choice.__send__(symbol, *args, &block) current_choice.__send__(symbol, *args, &block)
end end
%w(clear? assign snapshot do_read do_write do_num_bytes).each do |m| %w[clear? assign snapshot do_read do_write do_num_bytes].each do |m|
module_eval <<-END module_eval <<-END
def #{m}(*args) def #{m}(*args)
current_choice.#{m}(*args) current_choice.#{m}(*args)
@ -112,14 +113,16 @@ module BinData
def instantiate_choice(selection) def instantiate_choice(selection)
prototype = get_parameter(:choices)[selection] prototype = get_parameter(:choices)[selection]
if prototype.nil? if prototype.nil?
raise IndexError, "selection '#{selection}' does not exist in :choices for #{debug_name}" msg = "selection '#{selection}' does not exist in :choices for #{debug_name}"
raise IndexError, msg
end end
prototype.instantiate(nil, self) prototype.instantiate(nil, self)
end end
end end
class ChoiceArgProcessor < BaseArgProcessor class ChoiceArgProcessor < BaseArgProcessor
def sanitize_parameters!(obj_class, params) #:nodoc: def sanitize_parameters!(obj_class, params) # :nodoc:
params.merge!(obj_class.dsl_params) params.merge!(obj_class.dsl_params)
params.sanitize_choices(:choices) do |choices| params.sanitize_choices(:choices) do |choices|

View File

@ -1,4 +1,4 @@
require "bindata/base_primitive" require 'bindata/base_primitive'
module BinData module BinData
# Counts the number of bytes remaining in the input stream from the current # Counts the number of bytes remaining in the input stream from the current

View File

@ -83,11 +83,11 @@ module BinData
@type.num_bytes @type.num_bytes
end end
def respond_to?(symbol, include_private = false) #:nodoc: def respond_to_missing?(symbol, include_all = false) # :nodoc:
@type.respond_to?(symbol, include_private) || super @type.respond_to?(symbol, include_all) || super
end end
def method_missing(symbol, *args, &block) #:nodoc: def method_missing(symbol, *args, &block) # :nodoc:
@type.__send__(symbol, *args, &block) @type.__send__(symbol, *args, &block)
end end
@ -104,20 +104,20 @@ module BinData
abs_offset abs_offset
end end
def do_read(io) #:nodoc: def do_read(io) # :nodoc:
@read_io = io @read_io = io
end end
def do_write(io) #:nodoc: def do_write(io) # :nodoc:
@write_io = io @write_io = io
end end
def do_num_bytes #:nodoc: def do_num_bytes # :nodoc:
0 0
end end
def include_obj? def include_obj?
! has_parameter?(:onlyif) || eval_parameter(:onlyif) !has_parameter?(:onlyif) || eval_parameter(:onlyif)
end end
# DelayedIO objects aren't read when #read is called. # DelayedIO objects aren't read when #read is called.
@ -126,7 +126,7 @@ module BinData
return unless include_obj? return unless include_obj?
raise IOError, "read from where?" unless @read_io raise IOError, "read from where?" unless @read_io
@read_io.seekbytes(abs_offset - @read_io.offset) @read_io.seek_to_abs_offset(abs_offset)
start_read do start_read do
@type.do_read(@read_io) @type.do_read(@read_io)
end end
@ -138,7 +138,7 @@ module BinData
return unless include_obj? return unless include_obj?
raise IOError, "write to where?" unless @write_io raise IOError, "write to where?" unless @write_io
@write_io.seekbytes(abs_offset - @write_io.offset) @write_io.seek_to_abs_offset(abs_offset)
@type.do_write(@write_io) @type.do_write(@write_io)
end end
end end
@ -153,8 +153,8 @@ module BinData
end end
end end
# Add +auto_call_delayed_io+ keyword to BinData::Base.
class Base class Base
# Add +auto_call_delayed_io+ keyword to BinData::Base.
class << self class << self
# The +auto_call_delayed_io+ keyword sets a data object tree to perform # The +auto_call_delayed_io+ keyword sets a data object tree to perform
# multi pass I/O automatically. # multi pass I/O automatically.

View File

@ -42,7 +42,7 @@ module BinData
end end
end end
def method_missing(symbol, *args, &block) #:nodoc: def method_missing(symbol, *args, &block) # :nodoc:
dsl_parser.__send__(symbol, *args, &block) dsl_parser.__send__(symbol, *args, &block)
end end
@ -131,7 +131,8 @@ module BinData
choice: [:to_choice_params, :choices, [:multiple_fields, :all_or_none_fieldnames, :fieldnames_are_values]], choice: [:to_choice_params, :choices, [:multiple_fields, :all_or_none_fieldnames, :fieldnames_are_values]],
delayed_io: [:to_object_params, :type, [:multiple_fields, :optional_fieldnames, :hidden_fields]], delayed_io: [:to_object_params, :type, [:multiple_fields, :optional_fieldnames, :hidden_fields]],
primitive: [:to_struct_params, :struct, [:multiple_fields, :optional_fieldnames]], primitive: [:to_struct_params, :struct, [:multiple_fields, :optional_fieldnames]],
skip: [:to_object_params, :until_valid, [:multiple_fields, :optional_fieldnames]], section: [:to_object_params, :type, [:multiple_fields, :optional_fieldnames]],
skip: [:to_object_params, :until_valid, [:multiple_fields, :optional_fieldnames]]
} }
end end
@ -182,21 +183,21 @@ module BinData
begin begin
@validator.validate_field(parser.name) @validator.validate_field(parser.name)
append_field(parser.type, parser.name, parser.params) append_field(parser.type, parser.name, parser.params)
rescue Exception => err rescue Exception => e
dsl_raise err.class, err.message dsl_raise e.class, e.message
end end
end end
def append_field(type, name, params) def append_field(type, name, params)
fields.add_field(type, name, params) fields.add_field(type, name, params)
rescue BinData::UnRegisteredTypeError => err rescue BinData::UnRegisteredTypeError => e
raise TypeError, "unknown type '#{err.message}'" raise TypeError, "unknown type '#{e.message}'"
end end
def parent_attribute(attr, default = nil) def parent_attribute(attr, default = nil)
parent = @the_class.superclass parent = @the_class.superclass
parser = parent.respond_to?(:dsl_parser) ? parent.dsl_parser : nil parser = parent.respond_to?(:dsl_parser) ? parent.dsl_parser : nil
if parser && parser.respond_to?(attr) if parser&.respond_to?(attr)
parser.send(attr) parser.send(attr)
else else
default default
@ -205,7 +206,7 @@ module BinData
def dsl_raise(exception, msg) def dsl_raise(exception, msg)
backtrace = caller backtrace = caller
backtrace.shift while %r{bindata/dsl.rb} =~ backtrace.first backtrace.shift while %r{bindata/dsl.rb}.match?(backtrace.first)
raise exception, "#{msg} in #{@the_class}", backtrace raise exception, "#{msg} in #{@the_class}", backtrace
end end
@ -215,9 +216,9 @@ module BinData
when 0 when 0
{} {}
when 1 when 1
{key => fields[0].prototype} { key => fields[0].prototype }
else else
{key=> [:struct, to_struct_params]} { key => [:struct, to_struct_params] }
end end
end end
@ -225,16 +226,16 @@ module BinData
if fields.empty? if fields.empty?
{} {}
elsif fields.all_field_names_blank? elsif fields.all_field_names_blank?
{key => fields.collect(&:prototype)} { key => fields.collect(&:prototype) }
else else
choices = {} choices = {}
fields.each { |f| choices[f.name] = f.prototype } fields.each { |f| choices[f.name] = f.prototype }
{key => choices} { key => choices }
end end
end end
def to_struct_params(*unused) def to_struct_params(*_)
result = {fields: fields} result = { fields: fields }
if !endian.nil? if !endian.nil?
result[:endian] = endian result[:endian] = endian
end end
@ -274,7 +275,7 @@ module BinData
def override_new_in_class(bnl_class) def override_new_in_class(bnl_class)
endian_classes = { endian_classes = {
big: class_with_endian(bnl_class, :big), big: class_with_endian(bnl_class, :big),
little: class_with_endian(bnl_class, :little), little: class_with_endian(bnl_class, :little)
} }
bnl_class.define_singleton_method(:new) do |*args| bnl_class.define_singleton_method(:new) do |*args|
if self == bnl_class if self == bnl_class
@ -290,7 +291,7 @@ module BinData
def delegate_field_creation(bnl_class) def delegate_field_creation(bnl_class)
endian_classes = { endian_classes = {
big: class_with_endian(bnl_class, :big), big: class_with_endian(bnl_class, :big),
little: class_with_endian(bnl_class, :little), little: class_with_endian(bnl_class, :little)
} }
parser = bnl_class.dsl_parser parser = bnl_class.dsl_parser
@ -302,28 +303,28 @@ module BinData
def fixup_subclass_hierarchy(bnl_class) def fixup_subclass_hierarchy(bnl_class)
parent = bnl_class.superclass parent = bnl_class.superclass
if obj_attribute(parent, :endian) == :big_and_little return if obj_attribute(parent, :endian) != :big_and_little
be_subclass = class_with_endian(bnl_class, :big)
be_parent = class_with_endian(parent, :big)
be_fields = obj_attribute(be_parent, :fields)
le_subclass = class_with_endian(bnl_class, :little) be_subclass = class_with_endian(bnl_class, :big)
le_parent = class_with_endian(parent, :little) be_parent = class_with_endian(parent, :big)
le_fields = obj_attribute(le_parent, :fields) be_fields = obj_attribute(be_parent, :fields)
be_subclass.dsl_parser.define_singleton_method(:parent_fields) do le_subclass = class_with_endian(bnl_class, :little)
be_fields le_parent = class_with_endian(parent, :little)
end le_fields = obj_attribute(le_parent, :fields)
le_subclass.dsl_parser.define_singleton_method(:parent_fields) do
le_fields be_subclass.dsl_parser.define_singleton_method(:parent_fields) do
end be_fields
end
le_subclass.dsl_parser.define_singleton_method(:parent_fields) do
le_fields
end end
end end
def class_with_endian(class_name, endian) def class_with_endian(class_name, endian)
hints = { hints = {
endian: endian, endian: endian,
search_prefix: class_name.dsl_parser.search_prefix, search_prefix: class_name.dsl_parser.search_prefix
} }
RegisteredClasses.lookup(class_name, hints) RegisteredClasses.lookup(class_name, hints)
end end
@ -377,8 +378,9 @@ module BinData
buffer: BinData::Buffer, buffer: BinData::Buffer,
choice: BinData::Choice, choice: BinData::Choice,
delayed_io: BinData::DelayedIO, delayed_io: BinData::DelayedIO,
section: BinData::Section,
skip: BinData::Skip, skip: BinData::Skip,
struct: BinData::Struct, struct: BinData::Struct
} }
if bindata_classes.include?(@type) if bindata_classes.include?(@type)
@ -457,7 +459,7 @@ module BinData
end end
def malformed_name?(name) def malformed_name?(name)
/^[a-z_]\w*$/ !~ name.to_s !/^[a-z_]\w*$/.match?(name.to_s)
end end
def duplicate_name?(name) def duplicate_name?(name)

View File

@ -4,7 +4,7 @@ module BinData
# Defines a number of classes that contain a floating point number. # Defines a number of classes that contain a floating point number.
# The float is defined by precision and endian. # The float is defined by precision and endian.
module FloatingPoint #:nodoc: all module FloatingPoint # :nodoc: all
class << self class << self
PRECISION = { PRECISION = {
single: 4, single: 4,
@ -15,7 +15,7 @@ module BinData
[:single, :little] => 'e', [:single, :little] => 'e',
[:single, :big] => 'g', [:single, :big] => 'g',
[:double, :little] => 'E', [:double, :little] => 'E',
[:double, :big] => 'G', [:double, :big] => 'G'
} }
def define_methods(float_class, precision, endian) def define_methods(float_class, precision, endian)
@ -49,7 +49,7 @@ module BinData
nbytes = PRECISION[precision] nbytes = PRECISION[precision]
unpack = PACK_CODE[[precision, endian]] unpack = PACK_CODE[[precision, endian]]
"io.readbytes(#{nbytes}).unpack('#{unpack}').at(0)" "io.readbytes(#{nbytes}).unpack1('#{unpack}')"
end end
def create_to_binary_s_code(precision, endian) def create_to_binary_s_code(precision, endian)

View File

@ -1,14 +1,13 @@
module BinData module BinData
# Error raised when unexpected results occur when reading data from IO. # Error raised when unexpected results occur when reading data from IO.
class ValidityError < StandardError ; end class ValidityError < StandardError; end
# All methods provided by the framework are to be implemented or overridden # All methods provided by the framework are to be implemented or overridden
# by subclasses of BinData::Base. # by subclasses of BinData::Base.
module Framework module Framework
# Initializes the state of the object. All instance variables that # Initializes the state of the object. All instance variables that
# are used by the object must be initialized here. # are used by the object must be initialized here.
def initialize_instance def initialize_instance; end
end
# Initialises state that is shared by objects with the same parameters. # Initialises state that is shared by objects with the same parameters.
# #
@ -16,8 +15,7 @@ module BinData
# variables set here, and changes to the singleton class will be shared # variables set here, and changes to the singleton class will be shared
# between all objects that are initialized with the same parameters. # between all objects that are initialized with the same parameters.
# This method is called only once for a particular set of parameters. # This method is called only once for a particular set of parameters.
def initialize_shared_instance def initialize_shared_instance; end
end
# Returns true if the object has not been changed since creation. # Returns true if the object has not been changed since creation.
def clear? def clear?
@ -37,13 +35,13 @@ module BinData
# Returns the debug name of +child+. This only needs to be implemented # Returns the debug name of +child+. This only needs to be implemented
# by objects that contain child objects. # by objects that contain child objects.
def debug_name_of(child) #:nodoc: def debug_name_of(child) # :nodoc:
debug_name debug_name
end end
# Returns the offset of +child+. This only needs to be implemented # Returns the offset of +child+. This only needs to be implemented
# by objects that contain child objects. # by objects that contain child objects.
def offset_of(child) #:nodoc: def offset_of(child) # :nodoc:
0 0
end end
@ -53,17 +51,17 @@ module BinData
end end
# Reads the data for this data object from +io+. # Reads the data for this data object from +io+.
def do_read(io) #:nodoc: def do_read(io) # :nodoc:
raise NotImplementedError raise NotImplementedError
end end
# Writes the value for this data to +io+. # Writes the value for this data to +io+.
def do_write(io) #:nodoc: def do_write(io) # :nodoc:
raise NotImplementedError raise NotImplementedError
end end
# Returns the number of bytes it will take to write this data. # Returns the number of bytes it will take to write this data.
def do_num_bytes #:nodoc: def do_num_bytes # :nodoc:
raise NotImplementedError raise NotImplementedError
end end

View File

@ -5,7 +5,7 @@ module BinData
# Defines a number of classes that contain an integer. The integer # Defines a number of classes that contain an integer. The integer
# is defined by endian, signedness and number of bytes. # is defined by endian, signedness and number of bytes.
module Int #:nodoc: all module Int # :nodoc: all
@@mutex = Mutex.new @@mutex = Mutex.new
class << self class << self
@ -85,7 +85,7 @@ module BinData
"io.readbytes(1).ord" "io.readbytes(1).ord"
else else
unpack_str = create_read_unpack_code(nbits, endian, signed) unpack_str = create_read_unpack_code(nbits, endian, signed)
assemble_str = create_read_assemble_code(nbits, endian, signed) assemble_str = create_read_assemble_code(nbits, endian)
"(#{unpack_str} ; #{assemble_str})" "(#{unpack_str} ; #{assemble_str})"
end end
@ -98,7 +98,7 @@ module BinData
"ints = io.readbytes(#{nbytes}).unpack('#{pack_directive}')" "ints = io.readbytes(#{nbytes}).unpack('#{pack_directive}')"
end end
def create_read_assemble_code(nbits, endian, signed) def create_read_assemble_code(nbits, endian)
nwords = nbits / bits_per_word(nbits) nwords = nbits / bits_per_word(nbits)
idx = (0...nwords).to_a idx = (0...nwords).to_a
@ -117,7 +117,7 @@ module BinData
return "(val & 0xff).chr" if nbits == 8 return "(val & 0xff).chr" if nbits == 8
pack_directive = pack_directive(nbits, endian, signed) pack_directive = pack_directive(nbits, endian, signed)
words = val_as_packed_words(nbits, endian, signed) words = val_as_packed_words(nbits, endian)
pack_str = "[#{words}].pack('#{pack_directive}')" pack_str = "[#{words}].pack('#{pack_directive}')"
if need_signed_conversion_code?(nbits, signed) if need_signed_conversion_code?(nbits, signed)
@ -127,7 +127,7 @@ module BinData
end end
end end
def val_as_packed_words(nbits, endian, signed) def val_as_packed_words(nbits, endian)
nwords = nbits / bits_per_word(nbits) nwords = nbits / bits_per_word(nbits)
mask = (1 << bits_per_word(nbits)) - 1 mask = (1 << bits_per_word(nbits)) - 1
@ -136,7 +136,7 @@ module BinData
vals.reverse! if (endian == :big) vals.reverse! if (endian == :big)
vals = vals.collect { |val| "#{val} & #{mask}" } # TODO: "& mask" is needed to work around jruby bug. Remove this line when fixed. vals = vals.collect { |val| "#{val} & #{mask}" } # TODO: "& mask" is needed to work around jruby bug. Remove this line when fixed.
vals.join(",") vals.join(',')
end end
def create_int2uint_code(nbits) def create_int2uint_code(nbits)
@ -157,10 +157,10 @@ module BinData
def pack_directive(nbits, endian, signed) def pack_directive(nbits, endian, signed)
nwords = nbits / bits_per_word(nbits) nwords = nbits / bits_per_word(nbits)
directives = { 8 => "C", 16 => "S", 32 => "L", 64 => "Q" } directives = { 8 => 'C', 16 => 'S', 32 => 'L', 64 => 'Q' }
d = directives[bits_per_word(nbits)] d = directives[bits_per_word(nbits)]
d += ((endian == :big) ? ">" : "<") unless d == "C" d += ((endian == :big) ? '>' : '<') unless d == 'C'
if signed == :signed && directives.key?(nbits) if signed == :signed && directives.key?(nbits)
(d * nwords).downcase (d * nwords).downcase
@ -193,7 +193,7 @@ module BinData
/^Uint(\d+)be$/ => [:big, :unsigned], /^Uint(\d+)be$/ => [:big, :unsigned],
/^Uint(\d+)le$/ => [:little, :unsigned], /^Uint(\d+)le$/ => [:little, :unsigned],
/^Int(\d+)be$/ => [:big, :signed], /^Int(\d+)be$/ => [:big, :signed],
/^Int(\d+)le$/ => [:little, :signed], /^Int(\d+)le$/ => [:little, :signed]
} }
mappings.each_pair do |regex, args| mappings.each_pair do |regex, args|

View File

@ -5,217 +5,10 @@ module BinData
# interface for BinData objects to use when accessing the IO. # interface for BinData objects to use when accessing the IO.
module IO module IO
# Common operations for both Read and Write.
module Common
def initialize(io)
if self.class === io
raise ArgumentError, "io must not be a #{self.class}"
end
# wrap strings in a StringIO
if io.respond_to?(:to_str)
io = BinData::IO.create_string_io(io.to_str)
end
@raw_io = io
@buffer_end_points = nil
extend seekable? ? SeekableStream : UnSeekableStream
stream_init
end
#-------------
private
def seekable?
@raw_io.pos
rescue NoMethodError, Errno::ESPIPE, Errno::EPIPE, Errno::EINVAL
nil
end
def seek(n)
seek_raw(buffer_limited_n(n))
end
def buffer_limited_n(n)
if @buffer_end_points
if n.nil? || n > 0
max = @buffer_end_points[1] - offset
n = max if n.nil? || n > max
else
min = @buffer_end_points[0] - offset
n = min if n < min
end
end
n
end
def with_buffer_common(n)
prev = @buffer_end_points
if prev
avail = prev[1] - offset
n = avail if n > avail
end
@buffer_end_points = [offset, offset + n]
begin
yield(*@buffer_end_points)
ensure
@buffer_end_points = prev
end
end
# Use #seek and #pos on seekable streams
module SeekableStream
# The number of bytes remaining in the input stream.
def num_bytes_remaining
start_mark = @raw_io.pos
@raw_io.seek(0, ::IO::SEEK_END)
end_mark = @raw_io.pos
if @buffer_end_points
if @buffer_end_points[1] < end_mark
end_mark = @buffer_end_points[1]
end
end
bytes_remaining = end_mark - start_mark
@raw_io.seek(start_mark, ::IO::SEEK_SET)
bytes_remaining
end
# All io calls in +block+ are rolled back after this
# method completes.
def with_readahead
mark = @raw_io.pos
begin
yield
ensure
@raw_io.seek(mark, ::IO::SEEK_SET)
end
end
#-----------
private
def stream_init
@initial_pos = @raw_io.pos
end
def offset_raw
@raw_io.pos - @initial_pos
end
def seek_raw(n)
@raw_io.seek(n, ::IO::SEEK_CUR)
end
def read_raw(n)
@raw_io.read(n)
end
def write_raw(data)
@raw_io.write(data)
end
end
# Manually keep track of offset for unseekable streams.
module UnSeekableStream
def offset_raw
@offset
end
# The number of bytes remaining in the input stream.
def num_bytes_remaining
raise IOError, "stream is unseekable"
end
# All io calls in +block+ are rolled back after this
# method completes.
def with_readahead
mark = @offset
@read_data = ""
@in_readahead = true
class << self
alias_method :read_raw_without_readahead, :read_raw
alias_method :read_raw, :read_raw_with_readahead
end
begin
yield
ensure
@offset = mark
@in_readahead = false
end
end
#-----------
private
def stream_init
@offset = 0
end
def read_raw(n)
data = @raw_io.read(n)
@offset += data.size if data
data
end
def read_raw_with_readahead(n)
data = ""
unless @read_data.empty? || @in_readahead
bytes_to_consume = [n, @read_data.length].min
data += @read_data.slice!(0, bytes_to_consume)
n -= bytes_to_consume
if @read_data.empty?
class << self
alias_method :read_raw, :read_raw_without_readahead
end
end
end
raw_data = @raw_io.read(n)
data += raw_data if raw_data
if @in_readahead
@read_data += data
end
@offset += data.size
data
end
def write_raw(data)
@offset += data.size
@raw_io.write(data)
end
def seek_raw(n)
raise IOError, "stream is unseekable" if n < 0
# NOTE: how do we seek on a writable stream?
# skip over data in 8k blocks
while n > 0
bytes_to_read = [n, 8192].min
read_raw(bytes_to_read)
n -= bytes_to_read
end
end
end
end
# Creates a StringIO around +str+. # Creates a StringIO around +str+.
def self.create_string_io(str = "") def self.create_string_io(str = "")
s = StringIO.new(str.dup.force_encoding(Encoding::BINARY)) bin_str = str.dup.force_encoding(Encoding::BINARY)
s.binmode StringIO.new(bin_str).tap(&:binmode)
s
end end
# Create a new IO Read wrapper around +io+. +io+ must provide #read, # Create a new IO Read wrapper around +io+. +io+ must provide #read,
@ -236,10 +29,17 @@ module BinData
# readbits(6), readbits(5) #=> [543210, a9876] # readbits(6), readbits(5) #=> [543210, a9876]
# #
class Read class Read
include Common
def initialize(io) def initialize(io)
super(io) if self.class === io
raise ArgumentError, "io must not be a #{self.class}"
end
# wrap strings in a StringIO
if io.respond_to?(:to_str)
io = BinData::IO.create_string_io(io.to_str)
end
@io = RawIO.new(io)
# bits when reading # bits when reading
@rnbits = 0 @rnbits = 0
@ -247,25 +47,38 @@ module BinData
@rendian = nil @rendian = nil
end end
# Sets a buffer of +n+ bytes on the io stream. Any reading or seeking # Allow transforming data in the input stream.
# calls inside the +block+ will be contained within this buffer. # See +BinData::Buffer+ as an example.
def with_buffer(n) #
with_buffer_common(n) do # +io+ must be an instance of +Transform+.
yield #
read # yields +self+ and +io+ to the given block
end def transform(io)
reset_read_bits
saved = @io
@io = io.prepend_to_chain(@io)
yield(self, io)
io.after_read_transform
ensure
@io = saved
end end
# Returns the current offset of the io stream. Offset will be rounded # The number of bytes remaining in the io steam.
# up when reading bitfields. def num_bytes_remaining
def offset @io.num_bytes_remaining
offset_raw
end end
# Seek +n+ bytes from the current position in the io stream. # Seek +n+ bytes from the current position in the io stream.
def seekbytes(n) def skipbytes(n)
reset_read_bits reset_read_bits
seek(n) @io.skip(n)
end
# Seek to an absolute offset within the io stream.
def seek_to_abs_offset(n)
reset_read_bits
@io.seek_abs(n)
end end
# Reads exactly +n+ bytes from +io+. # Reads exactly +n+ bytes from +io+.
@ -311,7 +124,7 @@ module BinData
private private
def read(n = nil) def read(n = nil)
str = read_raw(buffer_limited_n(n)) str = @io.read(n)
if n if n
raise EOFError, "End of file reached" if str.nil? raise EOFError, "End of file reached" if str.nil?
raise IOError, "data truncated" if str.size < n raise IOError, "data truncated" if str.size < n
@ -332,7 +145,7 @@ module BinData
end end
def accumulate_big_endian_bits def accumulate_big_endian_bits
byte = read(1).unpack('C').at(0) & 0xff byte = read(1).unpack1('C') & 0xff
@rval = (@rval << 8) | byte @rval = (@rval << 8) | byte
@rnbits += 8 @rnbits += 8
end end
@ -350,7 +163,7 @@ module BinData
end end
def accumulate_little_endian_bits def accumulate_little_endian_bits
byte = read(1).unpack('C').at(0) & 0xff byte = read(1).unpack1('C') & 0xff
@rval = @rval | (byte << @rnbits) @rval = @rval | (byte << @rnbits)
@rnbits += 8 @rnbits += 8
end end
@ -368,36 +181,46 @@ module BinData
# #
# See IO::Read for more information. # See IO::Read for more information.
class Write class Write
include Common
def initialize(io) def initialize(io)
super(io) if self.class === io
raise ArgumentError, "io must not be a #{self.class}"
end
# wrap strings in a StringIO
if io.respond_to?(:to_str)
io = BinData::IO.create_string_io(io.to_str)
end
@io = RawIO.new(io)
@wnbits = 0 @wnbits = 0
@wval = 0 @wval = 0
@wendian = nil @wendian = nil
end end
# Sets a buffer of +n+ bytes on the io stream. Any writes inside the # Allow transforming data in the output stream.
# +block+ will be contained within this buffer. If less than +n+ bytes # See +BinData::Buffer+ as an example.
# are written inside the block, the remainder will be padded with '\0' #
# bytes. # +io+ must be an instance of +Transform+.
def with_buffer(n) #
with_buffer_common(n) do |_buf_start, buf_end| # yields +self+ and +io+ to the given block
yield def transform(io)
write("\0" * (buf_end - offset))
end
end
# Returns the current offset of the io stream. Offset will be rounded
# up when writing bitfields.
def offset
offset_raw + (@wnbits > 0 ? 1 : 0)
end
# Seek +n+ bytes from the current position in the io stream.
def seekbytes(n)
flushbits flushbits
seek(n)
saved = @io
@io = io.prepend_to_chain(@io)
yield(self, io)
io.after_write_transform
ensure
@io = saved
end
# Seek to an absolute offset within the io stream.
def seek_to_abs_offset(n)
raise IOError, "stream is unseekable" unless @io.seekable?
flushbits
@io.seek_abs(n)
end end
# Writes the given string of bytes to the io stream. # Writes the given string of bytes to the io stream.
@ -438,12 +261,7 @@ module BinData
private private
def write(data) def write(data)
n = buffer_limited_n(data.size) @io.write(data)
if n < data.size
data = data[0, n]
end
write_raw(data)
end end
def write_big_endian_bits(val, nbits) def write_big_endian_bits(val, nbits)
@ -492,5 +310,210 @@ module BinData
(1 << nbits) - 1 (1 << nbits) - 1
end end
end end
# API used to access the raw data stream.
class RawIO
def initialize(io)
@io = io
@pos = 0
if is_seekable?(io)
@initial_pos = io.pos
else
singleton_class.prepend(UnSeekableIO)
end
end
def is_seekable?(io)
io.pos
rescue NoMethodError, Errno::ESPIPE, Errno::EPIPE, Errno::EINVAL
nil
end
def seekable?
true
end
def num_bytes_remaining
start_mark = @io.pos
@io.seek(0, ::IO::SEEK_END)
end_mark = @io.pos
@io.seek(start_mark, ::IO::SEEK_SET)
end_mark - start_mark
end
def offset
@pos
end
def skip(n)
raise IOError, "can not skip backwards" if n.negative?
@io.seek(n, ::IO::SEEK_CUR)
@pos += n
end
def seek_abs(n)
@io.seek(n + @initial_pos, ::IO::SEEK_SET)
@pos = n
end
def read(n)
@io.read(n).tap { |data| @pos += (data&.size || 0) }
end
def write(data)
@io.write(data)
end
end
# An IO stream may be transformed before processing.
# e.g. encoding, compression, buffered.
#
# Multiple transforms can be chained together.
#
# To create a new transform layer, subclass +Transform+.
# Override the public methods +#read+ and +#write+ at a minimum.
# Additionally the hook, +#before_transform+, +#after_read_transform+
# and +#after_write_transform+ are available as well.
#
# IMPORTANT! If your transform changes the size of the underlying
# data stream (e.g. compression), then call
# +::transform_changes_stream_length!+ in your subclass.
class Transform
class << self
# Indicates that this transform changes the length of the
# underlying data. e.g. performs compression or error correction
def transform_changes_stream_length!
prepend(UnSeekableIO)
end
end
def initialize
@chain_io = nil
end
# Initialises this transform.
#
# Called before any IO operations.
def before_transform; end
# Flushes the input stream.
#
# Called after the final read operation.
def after_read_transform; end
# Flushes the output stream.
#
# Called after the final write operation.
def after_write_transform; end
# Prepends this transform to the given +chain+.
#
# Returns self (the new head of chain).
def prepend_to_chain(chain)
@chain_io = chain
before_transform
self
end
# Is the IO seekable?
def seekable?
@chain_io.seekable?
end
# How many bytes are available for reading?
def num_bytes_remaining
chain_num_bytes_remaining
end
# The current offset within the stream.
def offset
chain_offset
end
# Skips forward +n+ bytes in the input stream.
def skip(n)
chain_skip(n)
end
# Seeks to the given absolute position.
def seek_abs(n)
chain_seek_abs(n)
end
# Reads +n+ bytes from the stream.
def read(n)
chain_read(n)
end
# Writes +data+ to the stream.
def write(data)
chain_write(data)
end
#-------------
private
def create_empty_binary_string
"".force_encoding(Encoding::BINARY)
end
def chain_seekable?
@chain_io.seekable?
end
def chain_num_bytes_remaining
@chain_io.num_bytes_remaining
end
def chain_offset
@chain_io.offset
end
def chain_skip(n)
@chain_io.skip(n)
end
def chain_seek_abs(n)
@chain_io.seek_abs(n)
end
def chain_read(n)
@chain_io.read(n)
end
def chain_write(data)
@chain_io.write(data)
end
end
# A module to be prepended to +RawIO+ or +Transform+ when the data
# stream is not seekable. This is either due to underlying stream
# being unseekable or the transform changes the number of bytes.
module UnSeekableIO
def seekable?
false
end
def num_bytes_remaining
raise IOError, "stream is unseekable"
end
def skip(n)
raise IOError, "can not skip backwards" if n.negative?
# skip over data in 8k blocks
while n > 0
bytes_to_read = [n, 8192].min
read(bytes_to_read)
n -= bytes_to_read
end
end
def seek_abs(n)
skip(n - offset)
end
end
end end
end end

View File

@ -13,7 +13,7 @@ module BinData
# </pre></code> # </pre></code>
module RegisterNamePlugin module RegisterNamePlugin
def self.included(base) #:nodoc: def self.included(base) # :nodoc:
# The registered name may be provided explicitly. # The registered name may be provided explicitly.
base.optional_parameter :name base.optional_parameter :name
end end

View File

@ -27,7 +27,7 @@ module BinData
alias optional_parameter optional_parameters alias optional_parameter optional_parameters
alias default_parameter default_parameters alias default_parameter default_parameters
def accepted_parameters #:nodoc: def accepted_parameters # :nodoc:
@accepted_parameters ||= begin @accepted_parameters ||= begin
ancestor_params = superclass.respond_to?(:accepted_parameters) ? ancestor_params = superclass.respond_to?(:accepted_parameters) ?
superclass.accepted_parameters : nil superclass.accepted_parameters : nil
@ -114,13 +114,15 @@ module BinData
end end
end end
def self.invalid_parameter_names class << self
@invalid_names ||= begin def invalid_parameter_names
all_names = LazyEvaluator.instance_methods(true) @invalid_parameter_names ||= begin
allowed_names = [:name, :type] all_names = LazyEvaluator.instance_methods(true)
invalid_names = (all_names - allowed_names).uniq allowed_names = [:name, :type]
invalid_names = (all_names - allowed_names).uniq
Hash[*invalid_names.collect { |key| [key.to_sym, true] }.flatten] Hash[*invalid_names.collect { |key| [key.to_sym, true] }.flatten]
end
end end
end end
end end

View File

@ -73,11 +73,11 @@ module BinData
@struct = BinData::Struct.new(get_parameter(:struct_params), self) @struct = BinData::Struct.new(get_parameter(:struct_params), self)
end end
def respond_to?(symbol, include_private = false) #:nodoc: def respond_to?(symbol, include_private = false) # :nodoc:
@struct.respond_to?(symbol, include_private) || super @struct.respond_to?(symbol, include_private) || super
end end
def method_missing(symbol, *args, &block) #:nodoc: def method_missing(symbol, *args, &block) # :nodoc:
if @struct.respond_to?(symbol) if @struct.respond_to?(symbol)
@struct.__send__(symbol, *args, &block) @struct.__send__(symbol, *args, &block)
else else
@ -91,7 +91,7 @@ module BinData
@value = get @value = get
end end
def debug_name_of(child) #:nodoc: def debug_name_of(child) # :nodoc:
debug_name + "-internal-" debug_name + "-internal-"
end end

View File

@ -1,6 +1,6 @@
module BinData module BinData
# Raised when #lookup fails.
class UnRegisteredTypeError < StandardError ; end class UnRegisteredTypeError < StandardError; end
# This registry contains a register of name -> class mappings. # This registry contains a register of name -> class mappings.
# #
@ -18,7 +18,6 @@ module BinData
# #
# Names are stored in under_score_style, not camelCase. # Names are stored in under_score_style, not camelCase.
class Registry class Registry
def initialize def initialize
@registry = {} @registry = {}
end end
@ -49,13 +48,13 @@ module BinData
# Convert CamelCase +name+ to underscore style. # Convert CamelCase +name+ to underscore style.
def underscore_name(name) def underscore_name(name)
name. name
to_s. .to_s
sub(/.*::/, ""). .sub(/.*::/, "")
gsub(/([A-Z]+)([A-Z][a-z])/, '\1_\2'). .gsub(/([A-Z]+)([A-Z][a-z])/, '\1_\2')
gsub(/([a-z\d])([A-Z])/, '\1_\2'). .gsub(/([a-z\d])([A-Z])/, '\1_\2')
tr("-", "_"). .tr('-', '_')
downcase .downcase
end end
#--------------- #---------------
@ -65,7 +64,7 @@ module BinData
name = underscore_name(name) name = underscore_name(name)
if !registered?(name) if !registered?(name)
search_prefix = [""].concat(Array(hints[:search_prefix])) search_prefix = [""] + Array(hints[:search_prefix])
search_prefix.each do |prefix| search_prefix.each do |prefix|
nwp = name_with_prefix(name, prefix) nwp = name_with_prefix(name, prefix)
if registered?(nwp) if registered?(nwp)
@ -85,7 +84,7 @@ module BinData
end end
def name_with_prefix(name, prefix) def name_with_prefix(name, prefix)
prefix = prefix.to_s.chomp("_") prefix = prefix.to_s.chomp('_')
if prefix == "" if prefix == ""
name name
else else
@ -96,11 +95,11 @@ module BinData
def name_with_endian(name, endian) def name_with_endian(name, endian)
return name if endian.nil? return name if endian.nil?
suffix = (endian == :little) ? "le" : "be" suffix = (endian == :little) ? 'le' : 'be'
if /^u?int\d+$/ =~ name if /^u?int\d+$/.match?(name)
name + suffix name + suffix
else else
name + "_" + suffix name + '_' + suffix
end end
end end
@ -111,9 +110,10 @@ module BinData
end end
def register_dynamic_class(name) def register_dynamic_class(name)
if /^u?int\d+(le|be)$/ =~ name || /^s?bit\d+(le)?$/ =~ name if /^u?int\d+(le|be)$/.match?(name) || /^s?bit\d+(le)?$/.match?(name)
class_name = name.gsub(/(?:^|_)(.)/) { $1.upcase } class_name = name.gsub(/(?:^|_)(.)/) { $1.upcase }
begin begin
# call const_get for side effects
BinData.const_get(class_name) BinData.const_get(class_name)
rescue NameError rescue NameError
end end
@ -122,8 +122,8 @@ module BinData
def warn_if_name_is_already_registered(name, class_to_register) def warn_if_name_is_already_registered(name, class_to_register)
prev_class = @registry[name] prev_class = @registry[name]
if $VERBOSE && prev_class && prev_class != class_to_register if prev_class && prev_class != class_to_register
warn "warning: replacing registered class #{prev_class} " \ Kernel.warn "warning: replacing registered class #{prev_class} " \
"with #{class_to_register}" "with #{class_to_register}"
end end
end end

View File

@ -1,4 +1,4 @@
require "bindata/base_primitive" require 'bindata/base_primitive'
module BinData module BinData
# Rest will consume the input stream from the current position to the end of # Rest will consume the input stream from the current position to the end of

View File

@ -49,14 +49,10 @@ module BinData
@prototype = SanitizedPrototype.new(field_type, field_params, hints) @prototype = SanitizedPrototype.new(field_type, field_params, hints)
end end
attr_reader :prototype attr_reader :prototype, :name
def name_as_sym def name_as_sym
@name.nil? ? nil : @name.to_sym @name&.to_sym
end
def name
@name
end end
def has_parameter?(param) def has_parameter?(param)
@ -74,11 +70,7 @@ module BinData
def initialize(hints, base_fields = nil) def initialize(hints, base_fields = nil)
@hints = hints @hints = hints
if base_fields @fields = base_fields ? base_fields.raw_fields : []
@fields = base_fields.raw_fields
else
@fields = []
end
end end
def add_field(type, name, params) def add_field(type, name, params)
@ -179,7 +171,6 @@ module BinData
# is to recursively sanitize the parameters of an entire BinData object chain # is to recursively sanitize the parameters of an entire BinData object chain
# at a single time. # at a single time.
class SanitizedParameters < Hash class SanitizedParameters < Hash
# Memoized constants # Memoized constants
BIG_ENDIAN = SanitizedBigEndian.new BIG_ENDIAN = SanitizedBigEndian.new
LITTLE_ENDIAN = SanitizedLittleEndian.new LITTLE_ENDIAN = SanitizedLittleEndian.new
@ -210,7 +201,7 @@ module BinData
sanitize! sanitize!
end end
alias_method :has_parameter?, :key? alias has_parameter? key?
def has_at_least_one_of?(*keys) def has_at_least_one_of?(*keys)
keys.each do |key| keys.each do |key|
@ -257,7 +248,9 @@ module BinData
end end
def sanitize_object_prototype(key) def sanitize_object_prototype(key)
sanitize(key) { |obj_type, obj_params| create_sanitized_object_prototype(obj_type, obj_params) } sanitize(key) do |obj_type, obj_params|
create_sanitized_object_prototype(obj_type, obj_params)
end
end end
def sanitize_fields(key, &block) def sanitize_fields(key, &block)
@ -306,7 +299,7 @@ module BinData
end end
def needs_sanitizing?(key) def needs_sanitizing?(key)
has_key?(key) && ! self[key].is_a?(SanitizedParameter) has_parameter?(key) && !self[key].is_a?(SanitizedParameter)
end end
def ensure_no_nil_values def ensure_no_nil_values
@ -320,7 +313,7 @@ module BinData
def merge_default_parameters! def merge_default_parameters!
@the_class.default_parameters.each do |key, value| @the_class.default_parameters.each do |key, value|
self[key] = value unless has_key?(key) self[key] = value unless has_parameter?(key)
end end
end end

View File

@ -0,0 +1,97 @@
require 'bindata/base'
require 'bindata/dsl'
module BinData
# A Section is a layer on top of a stream that transforms the underlying
# data. This allows BinData to process a stream that has multiple
# encodings. e.g. Some data data is compressed or encrypted.
#
# require 'bindata'
#
# class XorTransform < BinData::IO::Transform
# def initialize(xor)
# super()
# @xor = xor
# end
#
# def read(n)
# chain_read(n).bytes.map { |byte| (byte ^ @xor).chr }.join
# end
#
# def write(data)
# chain_write(data.bytes.map { |byte| (byte ^ @xor).chr }.join)
# end
# end
#
# obj = BinData::Section.new(transform: -> { XorTransform.new(0xff) },
# type: [:string, read_length: 5])
#
# obj.read("\x97\x9A\x93\x93\x90") #=> "hello"
#
#
# == Parameters
#
# Parameters may be provided at initialisation to control the behaviour of
# an object. These params are:
#
# <tt>:transform</tt>:: A callable that returns a new BinData::IO::Transform.
# <tt>:type</tt>:: The single type inside the buffer. Use a struct if
# multiple fields are required.
class Section < BinData::Base
extend DSLMixin
dsl_parser :section
arg_processor :section
mandatory_parameters :transform, :type
def initialize_instance
@type = get_parameter(:type).instantiate(nil, self)
end
def clear?
@type.clear?
end
def assign(val)
@type.assign(val)
end
def snapshot
@type.snapshot
end
def respond_to_missing?(symbol, include_all = false) # :nodoc:
@type.respond_to?(symbol, include_all) || super
end
def method_missing(symbol, *args, &block) # :nodoc:
@type.__send__(symbol, *args, &block)
end
def do_read(io) # :nodoc:
io.transform(eval_parameter(:transform)) do |transformed_io, _raw_io|
@type.do_read(transformed_io)
end
end
def do_write(io) # :nodoc:
io.transform(eval_parameter(:transform)) do |transformed_io, _raw_io|
@type.do_write(transformed_io)
end
end
def do_num_bytes # :nodoc:
to_binary_s.size
end
end
class SectionArgProcessor < BaseArgProcessor
include MultiFieldArgSeparator
def sanitize_parameters!(obj_class, params)
params.merge!(obj_class.dsl_params)
params.sanitize_object_prototype(:type)
end
end
end

View File

@ -0,0 +1,222 @@
require 'bindata/base_primitive'
require 'bindata/dsl'
module BinData
# Skip will skip over bytes from the input stream. If the stream is not
# seekable, then the bytes are consumed and discarded.
#
# When writing, skip will write the appropriate number of zero bytes.
#
# require 'bindata'
#
# class A < BinData::Record
# skip length: 5
# string :a, read_length: 5
# end
#
# obj = A.read("abcdefghij")
# obj.a #=> "fghij"
#
#
# class B < BinData::Record
# skip do
# string read_length: 2, assert: 'ef'
# end
# string :s, read_length: 5
# end
#
# obj = B.read("abcdefghij")
# obj.s #=> "efghi"
#
#
# == Parameters
#
# Skip objects accept all the params that BinData::BasePrimitive
# does, as well as the following:
#
# <tt>:length</tt>:: The number of bytes to skip.
# <tt>:to_abs_offset</tt>:: Skips to the given absolute offset.
# <tt>:until_valid</tt>:: Skips until a given byte pattern is matched.
# This parameter contains a type that will raise
# a BinData::ValidityError unless an acceptable byte
# sequence is found. The type is represented by a
# Symbol, or if the type is to have params
# passed to it, then it should be provided as
# <tt>[type_symbol, hash_params]</tt>.
#
class Skip < BinData::BasePrimitive
extend DSLMixin
dsl_parser :skip
arg_processor :skip
optional_parameters :length, :to_abs_offset, :until_valid
mutually_exclusive_parameters :length, :to_abs_offset, :until_valid
def initialize_shared_instance
extend SkipLengthPlugin if has_parameter?(:length)
extend SkipToAbsOffsetPlugin if has_parameter?(:to_abs_offset)
extend SkipUntilValidPlugin if has_parameter?(:until_valid)
super
end
#---------------
private
def value_to_binary_string(_)
len = skip_length
if len.negative?
raise ArgumentError,
"#{debug_name} attempted to seek backwards by #{len.abs} bytes"
end
"\000" * skip_length
end
def read_and_return_value(io)
len = skip_length
if len.negative?
raise ArgumentError,
"#{debug_name} attempted to seek backwards by #{len.abs} bytes"
end
io.skipbytes(len)
""
end
def sensible_default
""
end
# Logic for the :length parameter
module SkipLengthPlugin
def skip_length
eval_parameter(:length)
end
end
# Logic for the :to_abs_offset parameter
module SkipToAbsOffsetPlugin
def skip_length
eval_parameter(:to_abs_offset) - abs_offset
end
end
# Logic for the :until_valid parameter
module SkipUntilValidPlugin
def skip_length
@skip_length ||= 0
end
def read_and_return_value(io)
prototype = get_parameter(:until_valid)
validator = prototype.instantiate(nil, self)
fs = fast_search_for_obj(validator)
io.transform(ReadaheadIO.new) do |transformed_io, raw_io|
pos = 0
loop do
seek_to_pos(pos, raw_io)
validator.clear
validator.do_read(transformed_io)
break
rescue ValidityError
pos += 1
if fs
seek_to_pos(pos, raw_io)
pos += next_search_index(raw_io, fs)
end
end
seek_to_pos(pos, raw_io)
@skip_length = pos
end
end
def seek_to_pos(pos, io)
io.rollback
io.skip(pos)
end
# A fast search has a pattern string at a specific offset.
FastSearch = ::Struct.new('FastSearch', :pattern, :offset)
def fast_search_for(obj)
if obj.respond_to?(:asserted_binary_s)
FastSearch.new(obj.asserted_binary_s, obj.rel_offset)
else
nil
end
end
# If a search object has an +asserted_value+ field then we
# perform a faster search for a valid object.
def fast_search_for_obj(obj)
if BinData::Struct === obj
obj.each_pair(true) do |_, field|
fs = fast_search_for(field)
return fs if fs
end
elsif BinData::BasePrimitive === obj
return fast_search_for(obj)
end
nil
end
SEARCH_SIZE = 100_000
def next_search_index(io, fs)
buffer = binary_string("")
# start searching at fast_search offset
pos = fs.offset
io.skip(fs.offset)
loop do
data = io.read(SEARCH_SIZE)
raise EOFError, "no match" if data.nil?
buffer << data
index = buffer.index(fs.pattern)
if index
return pos + index - fs.offset
end
# advance buffer
searched = buffer.slice!(0..-fs.pattern.size)
pos += searched.size
end
end
class ReadaheadIO < BinData::IO::Transform
def before_transform
if !seekable?
raise IOError, "readahead is not supported on unseekable streams"
end
@mark = offset
end
def rollback
seek_abs(@mark)
end
end
end
end
class SkipArgProcessor < BaseArgProcessor
def sanitize_parameters!(obj_class, params)
params.merge!(obj_class.dsl_params)
unless params.has_at_least_one_of?(:length, :to_abs_offset, :until_valid)
raise ArgumentError,
"#{obj_class} requires :length, :to_abs_offset or :until_valid"
end
params.must_be_integer(:to_abs_offset, :length)
params.sanitize_object_prototype(:until_valid)
end
end
end

View File

@ -1,4 +1,4 @@
require "bindata/base_primitive" require 'bindata/base_primitive'
module BinData module BinData
# A String is a sequence of bytes. This is the same as strings in Ruby 1.8. # A String is a sequence of bytes. This is the same as strings in Ruby 1.8.
@ -121,6 +121,14 @@ module BinData
def sensible_default def sensible_default
"" ""
end end
# Warns when reading if :value && no :read_length
module WarnNoReadLengthPlugin
def read_and_return_value(io)
Kernel.warn "#{debug_name} does not have a :read_length parameter - returning empty string"
""
end
end
end end
class StringArgProcessor < BaseArgProcessor class StringArgProcessor < BaseArgProcessor
@ -142,12 +150,4 @@ module BinData
pad_byte pad_byte
end end
end end
# Warns when reading if :value && no :read_length
module WarnNoReadLengthPlugin
def read_and_return_value(io)
warn "#{debug_name} does not have a :read_length parameter - returning empty string"
""
end
end
end end

View File

@ -1,4 +1,4 @@
require "bindata/base_primitive" require 'bindata/base_primitive'
module BinData module BinData
# A BinData::Stringz object is a container for a zero ("\0") terminated # A BinData::Stringz object is a container for a zero ("\0") terminated
@ -25,7 +25,6 @@ module BinData
# <tt>:max_length</tt>:: The maximum length of the string including the zero # <tt>:max_length</tt>:: The maximum length of the string including the zero
# byte. # byte.
class Stringz < BinData::BasePrimitive class Stringz < BinData::BasePrimitive
optional_parameters :max_length optional_parameters :max_length
def assign(val) def assign(val)
@ -47,14 +46,14 @@ module BinData
def read_and_return_value(io) def read_and_return_value(io)
max_length = eval_parameter(:max_length) max_length = eval_parameter(:max_length)
str = "" str = binary_string("")
i = 0 i = 0
ch = nil ch = nil
# read until zero byte or we have read in the max number of bytes # read until zero byte or we have read in the max number of bytes
while ch != "\0" && i != max_length while ch != "\0" && i != max_length
ch = io.readbytes(1) ch = io.readbytes(1)
str += ch str << ch
i += 1 i += 1
end end
@ -66,9 +65,15 @@ module BinData
end end
def trim_and_zero_terminate(str) def trim_and_zero_terminate(str)
max_length = eval_parameter(:max_length)
if max_length && max_length < 1
msg = "max_length must be >= 1 in #{debug_name} (got #{max_length})"
raise ArgumentError, msg
end
result = binary_string(str) result = binary_string(str)
truncate_after_first_zero_byte!(result) truncate_after_first_zero_byte!(result)
trim_to!(result, eval_parameter(:max_length)) trim_to!(result, max_length)
append_zero_byte_if_needed!(result) append_zero_byte_if_needed!(result)
result result
end end
@ -79,16 +84,13 @@ module BinData
def trim_to!(str, max_length = nil) def trim_to!(str, max_length = nil)
if max_length if max_length
max_length = 1 if max_length < 1
str.slice!(max_length..-1) str.slice!(max_length..-1)
if str.length == max_length && str[-1, 1] != "\0" str[-1, 1] = "\0" if str.length == max_length
str[-1, 1] = "\0"
end
end end
end end
def append_zero_byte_if_needed!(str) def append_zero_byte_if_needed!(str)
if str.length == 0 || str[-1, 1] != "\0" if str.empty? || str[-1, 1] != "\0"
str << "\0" str << "\0"
end end
end end

View File

@ -2,7 +2,6 @@ require 'bindata/base'
require 'bindata/delayed_io' require 'bindata/delayed_io'
module BinData module BinData
class Base class Base
optional_parameter :onlyif, :byte_align # Used by Struct optional_parameter :onlyif, :byte_align # Used by Struct
end end
@ -66,16 +65,18 @@ module BinData
RESERVED = RESERVED =
Hash[* Hash[*
(Hash.instance_methods + (Hash.instance_methods +
%w{alias and begin break case class def defined do else elsif %w[alias and begin break case class def defined do else elsif
end ensure false for if in module next nil not or redo end ensure false for if in module next nil not or redo
rescue retry return self super then true undef unless until rescue retry return self super then true undef unless until
when while yield} + when while yield] +
%w{array element index value} + %w[array element index value] +
%w{type initial_length read_until} + %w[type initial_length read_until] +
%w{fields endian search_prefix hide only_if byte_align} + %w[fields endian search_prefix hide onlyif byte_align] +
%w{choices selection copy_on_change} + %w[choices selection copy_on_change] +
%w{read_abs_offset struct_params}).collect(&:to_sym). %w[read_abs_offset struct_params])
uniq.collect { |key| [key, true] }.flatten .collect(&:to_sym)
.uniq.collect { |key| [key, true] }
.flatten
] ]
def initialize_shared_instance def initialize_shared_instance
@ -90,11 +91,11 @@ module BinData
@field_objs = [] @field_objs = []
end end
def clear #:nodoc: def clear # :nodoc:
@field_objs.each { |f| f.clear unless f.nil? } @field_objs.each { |f| f.nil? || f.clear }
end end
def clear? #:nodoc: def clear? # :nodoc:
@field_objs.all? { |f| f.nil? || f.clear? } @field_objs.all? { |f| f.nil? || f.clear? }
end end
@ -124,28 +125,28 @@ module BinData
end end
end end
def debug_name_of(child) #:nodoc: def debug_name_of(child) # :nodoc:
field_name = @field_names[find_index_of(child)] field_name = @field_names[find_index_of(child)]
"#{debug_name}.#{field_name}" "#{debug_name}.#{field_name}"
end end
def offset_of(child) #:nodoc: def offset_of(child) # :nodoc:
instantiate_all_objs instantiate_all_objs
sum = sum_num_bytes_below_index(find_index_of(child)) sum = sum_num_bytes_below_index(find_index_of(child))
child.bit_aligned? ? sum.floor : sum.ceil child.bit_aligned? ? sum.floor : sum.ceil
end end
def do_read(io) #:nodoc: def do_read(io) # :nodoc:
instantiate_all_objs instantiate_all_objs
@field_objs.each { |f| f.do_read(io) if include_obj_for_io?(f) } @field_objs.each { |f| f.do_read(io) if include_obj_for_io?(f) }
end end
def do_write(io) #:nodoc def do_write(io) # :nodoc:
instantiate_all_objs instantiate_all_objs
@field_objs.each { |f| f.do_write(io) if include_obj_for_io?(f) } @field_objs.each { |f| f.do_write(io) if include_obj_for_io?(f) }
end end
def do_num_bytes #:nodoc: def do_num_bytes # :nodoc:
instantiate_all_objs instantiate_all_objs
sum_num_bytes_for_all_fields sum_num_bytes_for_all_fields
end end
@ -155,19 +156,28 @@ module BinData
end end
def []=(key, value) def []=(key, value)
obj = find_obj_for_name(key) find_obj_for_name(key)&.assign(value)
if obj
obj.assign(value)
end
end end
def key?(key) def key?(key)
@field_names.index(base_field_name(key)) @field_names.index(base_field_name(key))
end end
def each_pair # Calls the given block for each field_name-field_obj pair.
@field_names.compact.each do |name| #
yield [name, find_obj_for_name(name)] # Does not include anonymous or hidden fields unless
# +include_all+ is true.
def each_pair(include_all = false)
instantiate_all_objs
pairs = @field_names.zip(@field_objs).select do |name, _obj|
name || include_all
end
if block_given?
pairs.each { |el| yield(el) }
else
pairs.each
end end
end end
@ -205,8 +215,6 @@ module BinData
if index if index
instantiate_obj_at(index) instantiate_obj_at(index)
@field_objs[index] @field_objs[index]
else
nil
end end
end end
@ -243,7 +251,7 @@ module BinData
{} {}
else else
hash = Snapshot.new hash = Snapshot.new
val.each_pair { |k,v| hash[k] = v } val.each_pair { |k, v| hash[k] = v }
hash hash
end end
end end
@ -275,12 +283,12 @@ module BinData
end end
# A hash that can be accessed via attributes. # A hash that can be accessed via attributes.
class Snapshot < ::Hash #:nodoc: class Snapshot < ::Hash # :nodoc:
def []=(key, value) def []=(key, value)
super unless value.nil? super unless value.nil?
end end
def respond_to?(symbol, include_private = false) def respond_to_missing?(symbol, include_all = false)
key?(symbol) || super key?(symbol) || super
end end
@ -288,60 +296,71 @@ module BinData
key?(symbol) ? self[symbol] : super key?(symbol) ? self[symbol] : super
end end
end end
end
# Align fields to a multiple of :byte_align # Align fields to a multiple of :byte_align
module ByteAlignPlugin module ByteAlignPlugin
def do_read(io) def do_read(io)
initial_offset = io.offset offset = 0
instantiate_all_objs instantiate_all_objs
@field_objs.each do |f| @field_objs.each do |f|
if include_obj?(f) next unless include_obj?(f)
if align_obj?(f) if align_obj?(f)
io.seekbytes(bytes_to_align(f, io.offset - initial_offset)) nbytes = bytes_to_align(f, offset.ceil)
offset = offset.ceil + nbytes
io.readbytes(nbytes)
end end
f.do_read(io) f.do_read(io)
nbytes = f.do_num_bytes
offset = (nbytes.is_a?(Integer) ? offset.ceil : offset) + nbytes
end end
end end
end
def do_write(io) def do_write(io)
initial_offset = io.offset offset = 0
instantiate_all_objs instantiate_all_objs
@field_objs.each do |f| @field_objs.each do |f|
if include_obj?(f) next unless include_obj?(f)
if align_obj?(f) if align_obj?(f)
io.writebytes("\x00" * bytes_to_align(f, io.offset - initial_offset)) nbytes = bytes_to_align(f, offset.ceil)
offset = offset.ceil + nbytes
io.writebytes("\x00" * nbytes)
end end
f.do_write(io) f.do_write(io)
nbytes = f.do_num_bytes
offset = (nbytes.is_a?(Integer) ? offset.ceil : offset) + nbytes
end end
end end
end
def sum_num_bytes_below_index(index) def sum_num_bytes_below_index(index)
sum = 0 sum = 0
(0...@field_objs.length).each do |i| @field_objs.each_with_index do |obj, i|
obj = @field_objs[i] next unless include_obj?(obj)
if include_obj?(obj)
sum = sum.ceil + bytes_to_align(obj, sum.ceil) if align_obj?(obj) if align_obj?(obj)
sum = sum.ceil + bytes_to_align(obj, sum.ceil)
end
break if i >= index break if i >= index
nbytes = obj.do_num_bytes nbytes = obj.do_num_bytes
sum = (nbytes.is_a?(Integer) ? sum.ceil : sum) + nbytes sum = (nbytes.is_a?(Integer) ? sum.ceil : sum) + nbytes
end end
sum
end end
sum def bytes_to_align(obj, rel_offset)
end align = obj.eval_parameter(:byte_align)
(align - (rel_offset % align)) % align
end
def bytes_to_align(obj, rel_offset) def align_obj?(obj)
align = obj.eval_parameter(:byte_align) obj.has_parameter?(:byte_align)
(align - (rel_offset % align)) % align end
end
def align_obj?(obj)
obj.has_parameter?(:byte_align)
end end
end end
@ -362,13 +381,11 @@ module BinData
def sanitize_search_prefix(params) def sanitize_search_prefix(params)
params.sanitize(:search_prefix) do |sprefix| params.sanitize(:search_prefix) do |sprefix|
search_prefix = [] search_prefix = Array(sprefix).collect do |prefix|
Array(sprefix).each do |prefix| prefix.to_s.chomp("_")
prefix = prefix.to_s.chomp("_")
search_prefix << prefix if prefix != ""
end end
search_prefix search_prefix - [""]
end end
end end

View File

@ -1,24 +1,4 @@
module BinData module BinData
# reference to the current tracer
@tracer ||= nil
class Tracer #:nodoc:
def initialize(io)
@trace_io = io
end
def trace(msg)
@trace_io.puts(msg)
end
def trace_obj(obj_name, val)
if val.length > 30
val = val.slice(0..30) + "..."
end
trace "#{obj_name} => #{val}"
end
end
# Turn on trace information when reading a BinData object. # Turn on trace information when reading a BinData object.
# If +block+ is given then the tracing only occurs for that block. # If +block+ is given then the tracing only occurs for that block.
@ -37,30 +17,55 @@ module BinData
end end
end end
def trace_message #:nodoc: # reference to the current tracer
yield @tracer if @tracer @tracer ||= nil
class Tracer # :nodoc:
def initialize(io)
@trace_io = io
end
def trace(msg)
@trace_io.puts(msg)
end
def trace_obj(obj_name, val)
if val.length > 30
val = val.slice(0..30) + "..."
end
trace "#{obj_name} => #{val}"
end
end
def trace_message # :nodoc:
yield @tracer
end end
module_function :trace_reading, :trace_message module_function :trace_reading, :trace_message
class BasePrimitive < BinData::Base module TraceHook
class << self def turn_on_tracing
def turn_on_tracing if !method_defined? :do_read_without_hook
alias_method :do_read_without_hook, :do_read alias_method :do_read_without_hook, :do_read
alias_method :do_read, :do_read_with_hook alias_method :do_read, :do_read_with_hook
end end
end
def turn_off_tracing def turn_off_tracing
if method_defined? :do_read_without_hook
alias_method :do_read, :do_read_without_hook alias_method :do_read, :do_read_without_hook
remove_method :do_read_without_hook
end end
end end
end
class BasePrimitive < BinData::Base
extend TraceHook
def do_read_with_hook(io) def do_read_with_hook(io)
do_read_without_hook(io) do_read_without_hook(io)
trace_value
end
def trace_value
BinData.trace_message do |tracer| BinData.trace_message do |tracer|
value_string = _value.inspect value_string = _value.inspect
tracer.trace_obj(debug_name, value_string) tracer.trace_obj(debug_name, value_string)
@ -69,27 +74,15 @@ module BinData
end end
class Choice < BinData::Base class Choice < BinData::Base
class << self extend TraceHook
def turn_on_tracing
alias_method :do_read_without_hook, :do_read
alias_method :do_read, :do_read_with_hook
end
def turn_off_tracing
alias_method :do_read, :do_read_without_hook
end
end
def do_read_with_hook(io) def do_read_with_hook(io)
trace_selection
do_read_without_hook(io)
end
def trace_selection
BinData.trace_message do |tracer| BinData.trace_message do |tracer|
selection_string = eval_parameter(:selection).inspect selection_string = eval_parameter(:selection).inspect
tracer.trace_obj("#{debug_name}-selection-", selection_string) tracer.trace_obj("#{debug_name}-selection-", selection_string)
end end
do_read_without_hook(io)
end end
end end
end end

View File

@ -0,0 +1,35 @@
require 'brotli'
module BinData
module Transform
# Transforms a brotli compressed data stream.
#
# gem install brotli
class Brotli < BinData::IO::Transform
transform_changes_stream_length!
def initialize(read_length)
super()
@length = read_length
end
def read(n)
@read ||= ::Brotli::inflate(chain_read(@length))
@read.slice!(0...n)
end
def write(data)
@write ||= create_empty_binary_string
@write << data
end
def after_read_transform
raise IOError, "didn't read all data" unless @read.empty?
end
def after_write_transform
chain_write(::Brotli::deflate(@write))
end
end
end
end

View File

@ -0,0 +1,35 @@
require 'extlz4'
module BinData
module Transform
# Transforms a LZ4 compressed data stream.
#
# gem install extlz4
class LZ4 < BinData::IO::Transform
transform_changes_stream_length!
def initialize(read_length)
super()
@length = read_length
end
def read(n)
@read ||= ::LZ4::decode(chain_read(@length))
@read.slice!(0...n)
end
def write(data)
@write ||= create_empty_binary_string
@write << data
end
def after_read_transform
raise IOError, "didn't read all data" unless @read.empty?
end
def after_write_transform
chain_write(::LZ4::encode(@write))
end
end
end
end

View File

@ -0,0 +1,35 @@
require 'xz'
module BinData
module Transform
# Transforms a lzma compressed data stream.
#
# gem install ruby-xz
class Lzma < BinData::IO::Transform
transform_changes_stream_length!
def initialize(read_length)
super()
@length = read_length
end
def read(n)
@read ||= ::XZ::decompress(chain_read(@length))
@read.slice!(0...n)
end
def write(data)
@write ||= create_empty_binary_string
@write << data
end
def after_read_transform
raise IOError, "didn't read all data" unless @read.empty?
end
def after_write_transform
chain_write(::XZ::compress(@write))
end
end
end
end

View File

@ -0,0 +1,19 @@
module BinData
module Transform
# Transforms the data stream by xoring each byte.
class Xor < BinData::IO::Transform
def initialize(xor)
super()
@xor = xor
end
def read(n)
chain_read(n).bytes.map { |byte| (byte ^ @xor).chr }.join
end
def write(data)
chain_write(data.bytes.map { |byte| (byte ^ @xor).chr }.join)
end
end
end
end

View File

@ -0,0 +1,35 @@
require 'xz'
module BinData
module Transform
# Transforms a xz compressed data stream.
#
# gem install ruby-xz
class XZ < BinData::IO::Transform
transform_changes_stream_length!
def initialize(read_length)
super()
@length = read_length
end
def read(n)
@read ||= ::XZ::decompress(chain_read(@length))
@read.slice!(0...n)
end
def write(data)
@write ||= create_empty_binary_string
@write << data
end
def after_read_transform
raise IOError, "didn't read all data" unless @read.empty?
end
def after_write_transform
chain_write(::XZ::compress(@write))
end
end
end
end

View File

@ -0,0 +1,33 @@
require 'zlib'
module BinData
module Transform
# Transforms a zlib compressed data stream.
class Zlib < BinData::IO::Transform
transform_changes_stream_length!
def initialize(read_length)
super()
@length = read_length
end
def read(n)
@read ||= ::Zlib::Inflate.inflate(chain_read(@length))
@read.slice!(0...n)
end
def write(data)
@write ||= create_empty_binary_string
@write << data
end
def after_read_transform
raise IOError, "didn't read all data" unless @read.empty?
end
def after_write_transform
chain_write(::Zlib::Deflate.deflate(@write))
end
end
end
end

View File

@ -0,0 +1,35 @@
require 'zstd-ruby'
module BinData
module Transform
# Transforms a zstd compressed data stream.
#
# gem install zstd-ruby
class Zstd < BinData::IO::Transform
transform_changes_stream_length!
def initialize(read_length)
super()
@length = read_length
end
def read(n)
@read ||= ::Zstd::decompress(chain_read(@length))
@read.slice!(0...n)
end
def write(data)
@write ||= create_empty_binary_string
@write << data
end
def after_read_transform
raise IOError, "didn't read all data" unless @read.empty?
end
def after_write_transform
chain_write(::Zstd::compress(@write))
end
end
end
end

View File

@ -1,4 +1,4 @@
require "bindata/base_primitive" require 'bindata/base_primitive'
module BinData module BinData
# Uint8Array is a specialised type of array that only contains # Uint8Array is a specialised type of array that only contains
@ -49,7 +49,7 @@ module BinData
end end
class Uint8ArrayArgProcessor < BaseArgProcessor class Uint8ArrayArgProcessor < BaseArgProcessor
def sanitize_parameters!(obj_class, params) #:nodoc: def sanitize_parameters!(obj_class, params) # :nodoc:
# ensure one of :initial_length and :read_until exists # ensure one of :initial_length and :read_until exists
unless params.has_at_least_one_of?(:initial_length, :read_until) unless params.has_at_least_one_of?(:initial_length, :read_until)
params[:initial_length] = 0 params[:initial_length] = 0

View File

@ -0,0 +1,3 @@
module BinData
VERSION = '2.5.0'
end

View File

@ -1,4 +1,4 @@
require "bindata/base" require 'bindata/base'
module BinData module BinData
# A virtual field is one that is neither read, written nor occupies space in # A virtual field is one that is neither read, written nor occupies space in
@ -15,7 +15,7 @@ module BinData
# #
# obj = A.read("abcdeabcde") # obj = A.read("abcdeabcde")
# obj.a #=> "abcde" # obj.a #=> "abcde"
# obj.c.offset #=> 10 # obj.c.rel_offset #=> 10
# #
# obj = A.read("abcdeABCDE") #=> BinData::ValidityError: assertion failed for obj.c # obj = A.read("abcdeABCDE") #=> BinData::ValidityError: assertion failed for obj.c
# #
@ -29,12 +29,9 @@ module BinData
# [<tt>:value</tt>] The virtual object will always have this value. # [<tt>:value</tt>] The virtual object will always have this value.
# #
class Virtual < BinData::BasePrimitive class Virtual < BinData::BasePrimitive
def do_read(io); end
def do_read(io) def do_write(io); end
end
def do_write(io)
end
def do_num_bytes def do_num_bytes
0.0 0.0

View File

@ -13,7 +13,7 @@ module BinData
owner = method(:initialize).owner owner = method(:initialize).owner
if owner != BinData::Base if owner != BinData::Base
msg = "Don't override #initialize on #{owner}." msg = "Don't override #initialize on #{owner}."
if %w(BinData::Base BinData::BasePrimitive).include? self.class.superclass.name if %w[BinData::Base BinData::BasePrimitive].include? self.class.superclass.name
msg += "\nrename #initialize to #initialize_instance." msg += "\nrename #initialize to #initialize_instance."
end end
fail msg fail msg