Merge pull request #16687 from Homebrew/dependabot/bundler/Library/Homebrew/bindata-2.5.0

This commit is contained in:
Patrick Linnane 2024-02-16 09:40:29 -08:00 committed by GitHub
commit fa23636c44
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
47 changed files with 2103 additions and 1403 deletions

View File

@ -4,7 +4,7 @@ GEM
addressable (2.8.6)
public_suffix (>= 2.0.2, < 6.0)
ast (2.4.2)
bindata (2.4.15)
bindata (2.5.0)
bootsnap (1.18.3)
msgpack (~> 1.2)
byebug (11.1.3)

View File

@ -30,7 +30,7 @@ end
$:.unshift File.expand_path("#{__dir__}/../#{RUBY_ENGINE}/#{Gem.ruby_api_version}/gems/public_suffix-5.0.4/lib")
$:.unshift File.expand_path("#{__dir__}/../#{RUBY_ENGINE}/#{Gem.ruby_api_version}/gems/addressable-2.8.6/lib")
$:.unshift File.expand_path("#{__dir__}/../#{RUBY_ENGINE}/#{Gem.ruby_api_version}/gems/ast-2.4.2/lib")
$:.unshift File.expand_path("#{__dir__}/../#{RUBY_ENGINE}/#{Gem.ruby_api_version}/gems/bindata-2.4.15/lib")
$:.unshift File.expand_path("#{__dir__}/../#{RUBY_ENGINE}/#{Gem.ruby_api_version}/gems/bindata-2.5.0/lib")
$:.unshift File.expand_path("#{__dir__}/../#{RUBY_ENGINE}/#{Gem.ruby_api_version}/extensions/arm64-darwin-20/#{Gem.extension_api_version}/msgpack-1.7.2")
$:.unshift File.expand_path("#{__dir__}/../#{RUBY_ENGINE}/#{Gem.ruby_api_version}/gems/msgpack-1.7.2/lib")
$:.unshift File.expand_path("#{__dir__}/../#{RUBY_ENGINE}/#{Gem.ruby_api_version}/extensions/arm64-darwin-20/#{Gem.extension_api_version}/bootsnap-1.18.3")

View File

@ -1,94 +0,0 @@
module BinData
# WARNING: THIS IS UNSUPPORTED!!
#
# This was a (failed) experimental feature that allowed seeking within the
# input stream. It remains here for backwards compatability for the few
# people that used it.
#
# The official way to skip around the stream is to use BinData::Skip with
# the `:to_abs_offset` parameter.
#
# == Parameters
#
# Parameters may be provided at initialisation to control the behaviour of
# an object. These parameters are:
#
# [<tt>:check_offset</tt>] Raise an error if the current IO offset doesn't
# meet this criteria. A boolean return indicates
# success or failure. Any other return is compared
# to the current offset. The variable +offset+
# is made available to any lambda assigned to
# this parameter. This parameter is only checked
# before reading.
# [<tt>:adjust_offset</tt>] Ensures that the current IO offset is at this
# position before reading. This is like
# <tt>:check_offset</tt>, except that it will
# adjust the IO offset instead of raising an error.
module CheckOrAdjustOffsetPlugin
def self.included(base) #:nodoc:
base.optional_parameters :check_offset, :adjust_offset
base.mutually_exclusive_parameters :check_offset, :adjust_offset
end
def initialize_shared_instance
extend CheckOffsetMixin if has_parameter?(:check_offset)
extend AdjustOffsetMixin if has_parameter?(:adjust_offset)
super
end
module CheckOffsetMixin
def do_read(io) #:nodoc:
check_offset(io)
super(io)
end
#---------------
private
def check_offset(io)
actual_offset = io.offset
expected = eval_parameter(:check_offset, offset: actual_offset)
if !expected
raise ValidityError, "offset not as expected for #{debug_name}"
elsif actual_offset != expected && expected != true
raise ValidityError,
"offset is '#{actual_offset}' but " +
"expected '#{expected}' for #{debug_name}"
end
end
end
module AdjustOffsetMixin
def do_read(io) #:nodoc:
adjust_offset(io)
super(io)
end
#---------------
private
def adjust_offset(io)
actual_offset = io.offset
expected = eval_parameter(:adjust_offset)
if actual_offset != expected
begin
seek = expected - actual_offset
io.seekbytes(seek)
warn "adjusting stream position by #{seek} bytes" if $VERBOSE
rescue
raise ValidityError,
"offset is '#{actual_offset}' but couldn't seek to " +
"expected '#{expected}' for #{debug_name}"
end
end
end
end
end
# Add these offset options to Base
class Base
include CheckOrAdjustOffsetPlugin
end
end

View File

@ -1,133 +0,0 @@
require "bindata/base_primitive"
module BinData
# Skip will skip over bytes from the input stream. If the stream is not
# seekable, then the bytes are consumed and discarded.
#
# When writing, skip will write the appropriate number of zero bytes.
#
# require 'bindata'
#
# class A < BinData::Record
# skip length: 5
# string :a, read_length: 5
# end
#
# obj = A.read("abcdefghij")
# obj.a #=> "fghij"
#
#
# class B < BinData::Record
# skip until_valid: [:string, {read_length: 2, assert: "ef"} ]
# string :b, read_length: 5
# end
#
# obj = B.read("abcdefghij")
# obj.b #=> "efghi"
#
#
# == Parameters
#
# Skip objects accept all the params that BinData::BasePrimitive
# does, as well as the following:
#
# <tt>:length</tt>:: The number of bytes to skip.
# <tt>:to_abs_offset</tt>:: Skips to the given absolute offset.
# <tt>:until_valid</tt>:: Skips untils a given byte pattern is matched.
# This parameter contains a type that will raise
# a BinData::ValidityError unless an acceptable byte
# sequence is found. The type is represented by a
# Symbol, or if the type is to have params #
# passed to it, then it should be provided as #
# <tt>[type_symbol, hash_params]</tt>.
#
class Skip < BinData::BasePrimitive
arg_processor :skip
optional_parameters :length, :to_abs_offset, :until_valid
mutually_exclusive_parameters :length, :to_abs_offset, :until_valid
def initialize_shared_instance
extend SkipLengthPlugin if has_parameter?(:length)
extend SkipToAbsOffsetPlugin if has_parameter?(:to_abs_offset)
extend SkipUntilValidPlugin if has_parameter?(:until_valid)
super
end
#---------------
private
def value_to_binary_string(val)
len = skip_length
if len < 0
raise ValidityError, "#{debug_name} attempted to seek backwards by #{len.abs} bytes"
end
"\000" * skip_length
end
def read_and_return_value(io)
len = skip_length
if len < 0
raise ValidityError, "#{debug_name} attempted to seek backwards by #{len.abs} bytes"
end
io.seekbytes(len)
""
end
def sensible_default
""
end
end
class SkipArgProcessor < BaseArgProcessor
def sanitize_parameters!(obj_class, params)
unless params.has_at_least_one_of?(:length, :to_abs_offset, :until_valid)
raise ArgumentError,
"#{obj_class} requires either :length, :to_abs_offset or :until_valid"
end
params.must_be_integer(:to_abs_offset, :length)
params.sanitize_object_prototype(:until_valid)
end
end
# Logic for the :length parameter
module SkipLengthPlugin
def skip_length
eval_parameter(:length)
end
end
# Logic for the :to_abs_offset parameter
module SkipToAbsOffsetPlugin
def skip_length
eval_parameter(:to_abs_offset) - abs_offset
end
end
# Logic for the :until_valid parameter
module SkipUntilValidPlugin
def skip_length
# no skipping when writing
0
end
def read_and_return_value(io)
prototype = get_parameter(:until_valid)
validator = prototype.instantiate(nil, self)
valid = false
until valid
begin
io.with_readahead do
validator.read(io)
valid = true
end
rescue ValidityError
io.readbytes(1)
end
end
end
end
end

View File

@ -1,3 +0,0 @@
module BinData
VERSION = "2.4.15"
end

View File

@ -13,6 +13,7 @@ require 'bindata/int'
require 'bindata/primitive'
require 'bindata/record'
require 'bindata/rest'
require 'bindata/section'
require 'bindata/skip'
require 'bindata/string'
require 'bindata/stringz'

View File

@ -19,11 +19,11 @@ module BinData
def do_num_bytes; 0; end
def do_read(io)
io.reset_read_bits
io.readbytes(0)
end
def do_write(io)
io.flushbits
io.writebytes("")
end
end
@ -45,18 +45,26 @@ module BinData
def initialize(io)
@io = io
end
def binary_string(str)
str.to_s.dup.force_encoding(Encoding::BINARY)
end
def readbytes(n)
n.times.inject("") do |bytes, _|
bytes += @io.readbits(8, :big).chr
n.times.inject(binary_string("")) do |bytes, _|
bytes + @io.readbits(8, :big).chr
end
end
def writebytes(str)
str.each_byte { |v| @io.writebits(v, 8, :big) }
end
end
def bit_aligned?
true
end
def read_and_return_value(io)
def do_read(io)
super(BitAlignedIO.new(io))
end
@ -65,7 +73,7 @@ module BinData
end
def do_write(io)
value_to_binary_string(_value).each_byte { |v| io.writebits(v, 8, :big) }
super(BitAlignedIO.new(io))
end
end
@ -74,6 +82,6 @@ module BinData
end
def Primitive.bit_aligned
fail "'bit_aligned' is not needed for BinData::Primitives"
fail "'bit_aligned' is not supported for BinData::Primitives"
end
end

View File

@ -72,18 +72,18 @@ module BinData
end
def initialize_instance
@element_list = nil
@elements = nil
end
def clear?
@element_list.nil? || elements.all?(&:clear?)
@elements.nil? || elements.all?(&:clear?)
end
def assign(array)
return if self.equal?(array) # prevent self assignment
raise ArgumentError, "can't set a nil value for #{debug_name}" if array.nil?
@element_list = []
@elements = []
concat(array)
end
@ -220,23 +220,23 @@ module BinData
elements.each { |el| yield el }
end
def debug_name_of(child) #:nodoc:
def debug_name_of(child) # :nodoc:
index = find_index_of(child)
"#{debug_name}[#{index}]"
end
def offset_of(child) #:nodoc:
def offset_of(child) # :nodoc:
index = find_index_of(child)
sum = sum_num_bytes_below_index(index)
child.bit_aligned? ? sum.floor : sum.ceil
end
def do_write(io) #:nodoc:
def do_write(io) # :nodoc:
elements.each { |el| el.do_write(io) }
end
def do_num_bytes #:nodoc:
def do_num_bytes # :nodoc:
sum_num_bytes_for_all_elements
end
@ -251,7 +251,7 @@ module BinData
end
def elements
@element_list ||= []
@elements ||= []
end
def append_new_element
@ -279,10 +279,55 @@ module BinData
end
end
end
# Logic for the :read_until parameter
module ReadUntilPlugin
def do_read(io)
loop do
element = append_new_element
element.do_read(io)
variables = { index: self.length - 1, element: self.last, array: self }
break if eval_parameter(:read_until, variables)
end
end
end
# Logic for the read_until: :eof parameter
module ReadUntilEOFPlugin
def do_read(io)
loop do
element = append_new_element
begin
element.do_read(io)
rescue EOFError, IOError
elements.pop
break
end
end
end
end
# Logic for the :initial_length parameter
module InitialLengthPlugin
def do_read(io)
elements.each { |el| el.do_read(io) }
end
def elements
if @elements.nil?
@elements = []
eval_parameter(:initial_length).times do
@elements << new_element
end
end
@elements
end
end
end
class ArrayArgProcessor < BaseArgProcessor
def sanitize_parameters!(obj_class, params) #:nodoc:
def sanitize_parameters!(obj_class, params) # :nodoc:
# ensure one of :initial_length and :read_until exists
unless params.has_at_least_one_of?(:initial_length, :read_until)
params[:initial_length] = 0
@ -296,49 +341,4 @@ module BinData
params.sanitize_object_prototype(:type)
end
end
# Logic for the :read_until parameter
module ReadUntilPlugin
def do_read(io)
loop do
element = append_new_element
element.do_read(io)
variables = { index: self.length - 1, element: self.last, array: self }
break if eval_parameter(:read_until, variables)
end
end
end
# Logic for the read_until: :eof parameter
module ReadUntilEOFPlugin
def do_read(io)
loop do
element = append_new_element
begin
element.do_read(io)
rescue EOFError, IOError
elements.pop
break
end
end
end
end
# Logic for the :initial_length parameter
module InitialLengthPlugin
def do_read(io)
elements.each { |el| el.do_read(io) }
end
def elements
if @element_list.nil?
@element_list = []
eval_parameter(:initial_length).times do
@element_list << new_element
end
end
@element_list
end
end
end

View File

@ -17,7 +17,7 @@ module BinData
# Instantiates this class and reads from +io+, returning the newly
# created data object. +args+ will be used when instantiating.
def read(io, *args, &block)
obj = self.new(*args)
obj = new(*args)
obj.read(io, &block)
obj
end
@ -48,7 +48,7 @@ module BinData
end
# Registers all subclasses of this class for use
def register_subclasses #:nodoc:
def register_subclasses # :nodoc:
singleton_class.send(:undef_method, :inherited)
define_singleton_method(:inherited) do |subclass|
RegisteredClasses.register(subclass.name, subclass)
@ -90,6 +90,8 @@ module BinData
# Creates a new data object based on this instance.
#
# This implements the prototype design pattern.
#
# All parameters will be be duplicated. Use this method
# when creating multiple objects with the same parameters.
def new(value = nil, parent = nil)
@ -117,8 +119,8 @@ module BinData
end
# Returns a lazy evaluator for this object.
def lazy_evaluator #:nodoc:
@lazy ||= LazyEvaluator.new(self)
def lazy_evaluator # :nodoc:
@lazy_evaluator ||= LazyEvaluator.new(self)
end
# Returns the parameter referenced by +key+.
@ -177,7 +179,7 @@ module BinData
# Returns the hexadecimal string representation of this data object.
def to_hex(&block)
to_binary_s(&block).unpack('H*')[0]
to_binary_s(&block).unpack1('H*')
end
# Return a human readable representation of this data object.
@ -191,7 +193,7 @@ module BinData
end
# Work with Ruby's pretty-printer library.
def pretty_print(pp) #:nodoc:
def pretty_print(pp) # :nodoc:
pp.pp(snapshot)
end
@ -202,40 +204,28 @@ module BinData
# Returns a user friendly name of this object for debugging purposes.
def debug_name
if @parent
@parent.debug_name_of(self)
else
"obj"
end
@parent ? @parent.debug_name_of(self) : 'obj'
end
# Returns the offset (in bytes) of this object with respect to its most
# distant ancestor.
def abs_offset
if @parent
@parent.abs_offset + @parent.offset_of(self)
else
0
end
@parent ? @parent.abs_offset + @parent.offset_of(self) : 0
end
# Returns the offset (in bytes) of this object with respect to its parent.
def rel_offset
if @parent
@parent.offset_of(self)
else
0
end
@parent ? @parent.offset_of(self) : 0
end
def ==(other) #:nodoc:
def ==(other) # :nodoc:
# double dispatch
other == snapshot
end
# A version of +respond_to?+ used by the lazy evaluator. It doesn't
# reinvoke the evaluator so as to avoid infinite evaluation loops.
def safe_respond_to?(symbol, include_private = false) #:nodoc:
def safe_respond_to?(symbol, include_private = false) # :nodoc:
base_respond_to?(symbol, include_private)
end
@ -329,7 +319,6 @@ module BinData
# Performs sanity checks on the given parameters.
# This method converts the parameters to the form expected
# by the data object.
def sanitize_parameters!(obj_class, obj_params)
end
def sanitize_parameters!(obj_class, obj_params); end
end
end

View File

@ -65,7 +65,7 @@ module BinData
@value = nil
end
def clear? #:nodoc:
def clear? # :nodoc:
@value.nil?
end
@ -73,13 +73,7 @@ module BinData
raise ArgumentError, "can't set a nil value for #{debug_name}" if val.nil?
raw_val = val.respond_to?(:snapshot) ? val.snapshot : val
@value =
begin
raw_val.dup
rescue TypeError
# can't dup Fixnums
raw_val
end
@value = raw_val.dup
end
def snapshot
@ -94,18 +88,19 @@ module BinData
assign(val)
end
def respond_to?(symbol, include_private = false) #:nodoc:
def respond_to_missing?(symbol, include_all = false) # :nodoc:
child = snapshot
child.respond_to?(symbol, include_private) || super
child.respond_to?(symbol, include_all) || super
end
def method_missing(symbol, *args, &block) #:nodoc:
def method_missing(symbol, *args, &block) # :nodoc:
child = snapshot
if child.respond_to?(symbol)
self.class.class_eval \
"def #{symbol}(*args, &block);" \
" snapshot.#{symbol}(*args, &block);" \
"end"
self.class.class_eval <<-END, __FILE__, __LINE__ + 1
def #{symbol}(*args, &block) # def clamp(*args, &block)
snapshot.#{symbol}(*args, &block) # snapshot.clamp(*args, &block)
end # end
END
child.__send__(symbol, *args, &block)
else
super
@ -125,15 +120,15 @@ module BinData
snapshot.hash
end
def do_read(io) #:nodoc:
def do_read(io) # :nodoc:
@value = read_and_return_value(io)
end
def do_write(io) #:nodoc:
def do_write(io) # :nodoc:
io.writebytes(value_to_binary_string(_value))
end
def do_num_bytes #:nodoc:
def do_num_bytes # :nodoc:
value_to_binary_string(_value).length
end
@ -172,7 +167,7 @@ module BinData
assert!
end
def do_read(io) #:nodoc:
def do_read(io) # :nodoc:
super(io)
assert!
end
@ -205,7 +200,16 @@ module BinData
reading? ? @value : eval_parameter(:asserted_value)
end
def do_read(io) #:nodoc:
# The asserted value as a binary string.
#
# Rationale: while reading, +#to_binary_s+ will use the
# value read in, rather than the +:asserted_value+.
# This feature is used by Skip.
def asserted_binary_s
value_to_binary_string(eval_parameter(:asserted_value))
end
def do_read(io) # :nodoc:
super(io)
assert!
end

View File

@ -5,7 +5,7 @@ module BinData
# Defines a number of classes that contain a bit based integer.
# The integer is defined by endian and number of bits.
module BitField #:nodoc: all
module BitField # :nodoc: all
@@mutex = Mutex.new
class << self
@ -156,10 +156,10 @@ module BinData
# Create classes for dynamic bitfields
{
"Bit" => :big,
"BitLe" => :little,
"Sbit" => [:big, :signed],
"SbitLe" => [:little, :signed],
'Bit' => :big,
'BitLe' => :little,
'Sbit' => [:big, :signed],
'SbitLe' => [:little, :signed]
}.each_pair { |name, args| BitField.define_class(name, :nbits, *args) }
# Create classes on demand

View File

@ -41,7 +41,7 @@ module BinData
# end
# end
# end
#
#
#
# == Parameters
#
@ -80,29 +80,107 @@ module BinData
@type.snapshot
end
def respond_to?(symbol, include_private = false) #:nodoc:
@type.respond_to?(symbol, include_private) || super
def respond_to_missing?(symbol, include_all = false) # :nodoc:
@type.respond_to?(symbol, include_all) || super
end
def method_missing(symbol, *args, &block) #:nodoc:
def method_missing(symbol, *args, &block) # :nodoc:
@type.__send__(symbol, *args, &block)
end
def do_read(io) #:nodoc:
io.with_buffer(eval_parameter(:length)) do
@type.do_read(io)
def do_read(io) # :nodoc:
buf_len = eval_parameter(:length)
io.transform(BufferIO.new(buf_len)) do |transformed_io, _|
@type.do_read(transformed_io)
end
end
def do_write(io) #:nodoc:
io.with_buffer(eval_parameter(:length)) do
@type.do_write(io)
def do_write(io) # :nodoc:
buf_len = eval_parameter(:length)
io.transform(BufferIO.new(buf_len)) do |transformed_io, _|
@type.do_write(transformed_io)
end
end
def do_num_bytes #:nodoc:
def do_num_bytes # :nodoc:
eval_parameter(:length)
end
# Transforms the IO stream to restrict access inside
# a buffer of specified length.
class BufferIO < IO::Transform
def initialize(length)
super()
@bytes_remaining = length
end
def before_transform
@buf_start = offset
@buf_end = @buf_start + @bytes_remaining
end
def num_bytes_remaining
[@bytes_remaining, super].min
rescue IOError
@bytes_remaining
end
def skip(n)
nbytes = buffer_limited_n(n)
@bytes_remaining -= nbytes
chain_skip(nbytes)
end
def seek_abs(n)
if n < @buf_start || n >= @buf_end
raise IOError, "can not seek to abs_offset outside of buffer"
end
@bytes_remaining -= (n - offset)
chain_seek_abs(n)
end
def read(n)
nbytes = buffer_limited_n(n)
@bytes_remaining -= nbytes
chain_read(nbytes)
end
def write(data)
nbytes = buffer_limited_n(data.size)
@bytes_remaining -= nbytes
if nbytes < data.size
data = data[0, nbytes]
end
chain_write(data)
end
def after_read_transform
read(nil)
end
def after_write_transform
write("\x00" * @bytes_remaining)
end
def buffer_limited_n(n)
if n.nil?
@bytes_remaining
elsif n.positive?
limit = @bytes_remaining
n > limit ? limit : n
# uncomment if we decide to allow backwards skipping
# elsif n.negative?
# limit = @bytes_remaining + @buf_start - @buf_end
# n < limit ? limit : n
else
0
end
end
end
end
class BufferArgProcessor < BaseArgProcessor

View File

@ -82,18 +82,19 @@ module BinData
if selection.nil?
raise IndexError, ":selection returned nil for #{debug_name}"
end
selection
end
def respond_to?(symbol, include_private = false) #:nodoc:
current_choice.respond_to?(symbol, include_private) || super
def respond_to?(symbol, include_all = false) # :nodoc:
current_choice.respond_to?(symbol, include_all) || super
end
def method_missing(symbol, *args, &block) #:nodoc:
def method_missing(symbol, *args, &block) # :nodoc:
current_choice.__send__(symbol, *args, &block)
end
%w(clear? assign snapshot do_read do_write do_num_bytes).each do |m|
%w[clear? assign snapshot do_read do_write do_num_bytes].each do |m|
module_eval <<-END
def #{m}(*args)
current_choice.#{m}(*args)
@ -112,14 +113,16 @@ module BinData
def instantiate_choice(selection)
prototype = get_parameter(:choices)[selection]
if prototype.nil?
raise IndexError, "selection '#{selection}' does not exist in :choices for #{debug_name}"
msg = "selection '#{selection}' does not exist in :choices for #{debug_name}"
raise IndexError, msg
end
prototype.instantiate(nil, self)
end
end
class ChoiceArgProcessor < BaseArgProcessor
def sanitize_parameters!(obj_class, params) #:nodoc:
def sanitize_parameters!(obj_class, params) # :nodoc:
params.merge!(obj_class.dsl_params)
params.sanitize_choices(:choices) do |choices|

View File

@ -1,4 +1,4 @@
require "bindata/base_primitive"
require 'bindata/base_primitive'
module BinData
# Counts the number of bytes remaining in the input stream from the current

View File

@ -83,11 +83,11 @@ module BinData
@type.num_bytes
end
def respond_to?(symbol, include_private = false) #:nodoc:
@type.respond_to?(symbol, include_private) || super
def respond_to_missing?(symbol, include_all = false) # :nodoc:
@type.respond_to?(symbol, include_all) || super
end
def method_missing(symbol, *args, &block) #:nodoc:
def method_missing(symbol, *args, &block) # :nodoc:
@type.__send__(symbol, *args, &block)
end
@ -104,20 +104,20 @@ module BinData
abs_offset
end
def do_read(io) #:nodoc:
def do_read(io) # :nodoc:
@read_io = io
end
def do_write(io) #:nodoc:
def do_write(io) # :nodoc:
@write_io = io
end
def do_num_bytes #:nodoc:
def do_num_bytes # :nodoc:
0
end
def include_obj?
! has_parameter?(:onlyif) || eval_parameter(:onlyif)
!has_parameter?(:onlyif) || eval_parameter(:onlyif)
end
# DelayedIO objects aren't read when #read is called.
@ -126,7 +126,7 @@ module BinData
return unless include_obj?
raise IOError, "read from where?" unless @read_io
@read_io.seekbytes(abs_offset - @read_io.offset)
@read_io.seek_to_abs_offset(abs_offset)
start_read do
@type.do_read(@read_io)
end
@ -138,7 +138,7 @@ module BinData
return unless include_obj?
raise IOError, "write to where?" unless @write_io
@write_io.seekbytes(abs_offset - @write_io.offset)
@write_io.seek_to_abs_offset(abs_offset)
@type.do_write(@write_io)
end
end
@ -153,8 +153,8 @@ module BinData
end
end
# Add +auto_call_delayed_io+ keyword to BinData::Base.
class Base
# Add +auto_call_delayed_io+ keyword to BinData::Base.
class << self
# The +auto_call_delayed_io+ keyword sets a data object tree to perform
# multi pass I/O automatically.

View File

@ -42,7 +42,7 @@ module BinData
end
end
def method_missing(symbol, *args, &block) #:nodoc:
def method_missing(symbol, *args, &block) # :nodoc:
dsl_parser.__send__(symbol, *args, &block)
end
@ -131,7 +131,8 @@ module BinData
choice: [:to_choice_params, :choices, [:multiple_fields, :all_or_none_fieldnames, :fieldnames_are_values]],
delayed_io: [:to_object_params, :type, [:multiple_fields, :optional_fieldnames, :hidden_fields]],
primitive: [:to_struct_params, :struct, [:multiple_fields, :optional_fieldnames]],
skip: [:to_object_params, :until_valid, [:multiple_fields, :optional_fieldnames]],
section: [:to_object_params, :type, [:multiple_fields, :optional_fieldnames]],
skip: [:to_object_params, :until_valid, [:multiple_fields, :optional_fieldnames]]
}
end
@ -182,21 +183,21 @@ module BinData
begin
@validator.validate_field(parser.name)
append_field(parser.type, parser.name, parser.params)
rescue Exception => err
dsl_raise err.class, err.message
rescue Exception => e
dsl_raise e.class, e.message
end
end
def append_field(type, name, params)
fields.add_field(type, name, params)
rescue BinData::UnRegisteredTypeError => err
raise TypeError, "unknown type '#{err.message}'"
rescue BinData::UnRegisteredTypeError => e
raise TypeError, "unknown type '#{e.message}'"
end
def parent_attribute(attr, default = nil)
parent = @the_class.superclass
parser = parent.respond_to?(:dsl_parser) ? parent.dsl_parser : nil
if parser && parser.respond_to?(attr)
if parser&.respond_to?(attr)
parser.send(attr)
else
default
@ -205,7 +206,7 @@ module BinData
def dsl_raise(exception, msg)
backtrace = caller
backtrace.shift while %r{bindata/dsl.rb} =~ backtrace.first
backtrace.shift while %r{bindata/dsl.rb}.match?(backtrace.first)
raise exception, "#{msg} in #{@the_class}", backtrace
end
@ -215,9 +216,9 @@ module BinData
when 0
{}
when 1
{key => fields[0].prototype}
{ key => fields[0].prototype }
else
{key=> [:struct, to_struct_params]}
{ key => [:struct, to_struct_params] }
end
end
@ -225,16 +226,16 @@ module BinData
if fields.empty?
{}
elsif fields.all_field_names_blank?
{key => fields.collect(&:prototype)}
{ key => fields.collect(&:prototype) }
else
choices = {}
fields.each { |f| choices[f.name] = f.prototype }
{key => choices}
{ key => choices }
end
end
def to_struct_params(*unused)
result = {fields: fields}
def to_struct_params(*_)
result = { fields: fields }
if !endian.nil?
result[:endian] = endian
end
@ -274,7 +275,7 @@ module BinData
def override_new_in_class(bnl_class)
endian_classes = {
big: class_with_endian(bnl_class, :big),
little: class_with_endian(bnl_class, :little),
little: class_with_endian(bnl_class, :little)
}
bnl_class.define_singleton_method(:new) do |*args|
if self == bnl_class
@ -290,7 +291,7 @@ module BinData
def delegate_field_creation(bnl_class)
endian_classes = {
big: class_with_endian(bnl_class, :big),
little: class_with_endian(bnl_class, :little),
little: class_with_endian(bnl_class, :little)
}
parser = bnl_class.dsl_parser
@ -302,28 +303,28 @@ module BinData
def fixup_subclass_hierarchy(bnl_class)
parent = bnl_class.superclass
if obj_attribute(parent, :endian) == :big_and_little
be_subclass = class_with_endian(bnl_class, :big)
be_parent = class_with_endian(parent, :big)
be_fields = obj_attribute(be_parent, :fields)
return if obj_attribute(parent, :endian) != :big_and_little
le_subclass = class_with_endian(bnl_class, :little)
le_parent = class_with_endian(parent, :little)
le_fields = obj_attribute(le_parent, :fields)
be_subclass = class_with_endian(bnl_class, :big)
be_parent = class_with_endian(parent, :big)
be_fields = obj_attribute(be_parent, :fields)
be_subclass.dsl_parser.define_singleton_method(:parent_fields) do
be_fields
end
le_subclass.dsl_parser.define_singleton_method(:parent_fields) do
le_fields
end
le_subclass = class_with_endian(bnl_class, :little)
le_parent = class_with_endian(parent, :little)
le_fields = obj_attribute(le_parent, :fields)
be_subclass.dsl_parser.define_singleton_method(:parent_fields) do
be_fields
end
le_subclass.dsl_parser.define_singleton_method(:parent_fields) do
le_fields
end
end
def class_with_endian(class_name, endian)
hints = {
endian: endian,
search_prefix: class_name.dsl_parser.search_prefix,
search_prefix: class_name.dsl_parser.search_prefix
}
RegisteredClasses.lookup(class_name, hints)
end
@ -377,8 +378,9 @@ module BinData
buffer: BinData::Buffer,
choice: BinData::Choice,
delayed_io: BinData::DelayedIO,
section: BinData::Section,
skip: BinData::Skip,
struct: BinData::Struct,
struct: BinData::Struct
}
if bindata_classes.include?(@type)
@ -457,7 +459,7 @@ module BinData
end
def malformed_name?(name)
/^[a-z_]\w*$/ !~ name.to_s
!/^[a-z_]\w*$/.match?(name.to_s)
end
def duplicate_name?(name)

View File

@ -4,7 +4,7 @@ module BinData
# Defines a number of classes that contain a floating point number.
# The float is defined by precision and endian.
module FloatingPoint #:nodoc: all
module FloatingPoint # :nodoc: all
class << self
PRECISION = {
single: 4,
@ -15,7 +15,7 @@ module BinData
[:single, :little] => 'e',
[:single, :big] => 'g',
[:double, :little] => 'E',
[:double, :big] => 'G',
[:double, :big] => 'G'
}
def define_methods(float_class, precision, endian)
@ -49,7 +49,7 @@ module BinData
nbytes = PRECISION[precision]
unpack = PACK_CODE[[precision, endian]]
"io.readbytes(#{nbytes}).unpack('#{unpack}').at(0)"
"io.readbytes(#{nbytes}).unpack1('#{unpack}')"
end
def create_to_binary_s_code(precision, endian)

View File

@ -1,14 +1,13 @@
module BinData
# Error raised when unexpected results occur when reading data from IO.
class ValidityError < StandardError ; end
class ValidityError < StandardError; end
# All methods provided by the framework are to be implemented or overridden
# by subclasses of BinData::Base.
module Framework
# Initializes the state of the object. All instance variables that
# are used by the object must be initialized here.
def initialize_instance
end
def initialize_instance; end
# Initialises state that is shared by objects with the same parameters.
#
@ -16,8 +15,7 @@ module BinData
# variables set here, and changes to the singleton class will be shared
# between all objects that are initialized with the same parameters.
# This method is called only once for a particular set of parameters.
def initialize_shared_instance
end
def initialize_shared_instance; end
# Returns true if the object has not been changed since creation.
def clear?
@ -37,13 +35,13 @@ module BinData
# Returns the debug name of +child+. This only needs to be implemented
# by objects that contain child objects.
def debug_name_of(child) #:nodoc:
def debug_name_of(child) # :nodoc:
debug_name
end
# Returns the offset of +child+. This only needs to be implemented
# by objects that contain child objects.
def offset_of(child) #:nodoc:
def offset_of(child) # :nodoc:
0
end
@ -53,17 +51,17 @@ module BinData
end
# Reads the data for this data object from +io+.
def do_read(io) #:nodoc:
def do_read(io) # :nodoc:
raise NotImplementedError
end
# Writes the value for this data to +io+.
def do_write(io) #:nodoc:
def do_write(io) # :nodoc:
raise NotImplementedError
end
# Returns the number of bytes it will take to write this data.
def do_num_bytes #:nodoc:
def do_num_bytes # :nodoc:
raise NotImplementedError
end

View File

@ -5,7 +5,7 @@ module BinData
# Defines a number of classes that contain an integer. The integer
# is defined by endian, signedness and number of bytes.
module Int #:nodoc: all
module Int # :nodoc: all
@@mutex = Mutex.new
class << self
@ -85,7 +85,7 @@ module BinData
"io.readbytes(1).ord"
else
unpack_str = create_read_unpack_code(nbits, endian, signed)
assemble_str = create_read_assemble_code(nbits, endian, signed)
assemble_str = create_read_assemble_code(nbits, endian)
"(#{unpack_str} ; #{assemble_str})"
end
@ -98,7 +98,7 @@ module BinData
"ints = io.readbytes(#{nbytes}).unpack('#{pack_directive}')"
end
def create_read_assemble_code(nbits, endian, signed)
def create_read_assemble_code(nbits, endian)
nwords = nbits / bits_per_word(nbits)
idx = (0...nwords).to_a
@ -117,7 +117,7 @@ module BinData
return "(val & 0xff).chr" if nbits == 8
pack_directive = pack_directive(nbits, endian, signed)
words = val_as_packed_words(nbits, endian, signed)
words = val_as_packed_words(nbits, endian)
pack_str = "[#{words}].pack('#{pack_directive}')"
if need_signed_conversion_code?(nbits, signed)
@ -127,7 +127,7 @@ module BinData
end
end
def val_as_packed_words(nbits, endian, signed)
def val_as_packed_words(nbits, endian)
nwords = nbits / bits_per_word(nbits)
mask = (1 << bits_per_word(nbits)) - 1
@ -136,7 +136,7 @@ module BinData
vals.reverse! if (endian == :big)
vals = vals.collect { |val| "#{val} & #{mask}" } # TODO: "& mask" is needed to work around jruby bug. Remove this line when fixed.
vals.join(",")
vals.join(',')
end
def create_int2uint_code(nbits)
@ -157,10 +157,10 @@ module BinData
def pack_directive(nbits, endian, signed)
nwords = nbits / bits_per_word(nbits)
directives = { 8 => "C", 16 => "S", 32 => "L", 64 => "Q" }
directives = { 8 => 'C', 16 => 'S', 32 => 'L', 64 => 'Q' }
d = directives[bits_per_word(nbits)]
d += ((endian == :big) ? ">" : "<") unless d == "C"
d += ((endian == :big) ? '>' : '<') unless d == 'C'
if signed == :signed && directives.key?(nbits)
(d * nwords).downcase
@ -193,7 +193,7 @@ module BinData
/^Uint(\d+)be$/ => [:big, :unsigned],
/^Uint(\d+)le$/ => [:little, :unsigned],
/^Int(\d+)be$/ => [:big, :signed],
/^Int(\d+)le$/ => [:little, :signed],
/^Int(\d+)le$/ => [:little, :signed]
}
mappings.each_pair do |regex, args|

View File

@ -5,217 +5,10 @@ module BinData
# interface for BinData objects to use when accessing the IO.
module IO
# Common operations for both Read and Write.
module Common
def initialize(io)
if self.class === io
raise ArgumentError, "io must not be a #{self.class}"
end
# wrap strings in a StringIO
if io.respond_to?(:to_str)
io = BinData::IO.create_string_io(io.to_str)
end
@raw_io = io
@buffer_end_points = nil
extend seekable? ? SeekableStream : UnSeekableStream
stream_init
end
#-------------
private
def seekable?
@raw_io.pos
rescue NoMethodError, Errno::ESPIPE, Errno::EPIPE, Errno::EINVAL
nil
end
def seek(n)
seek_raw(buffer_limited_n(n))
end
def buffer_limited_n(n)
if @buffer_end_points
if n.nil? || n > 0
max = @buffer_end_points[1] - offset
n = max if n.nil? || n > max
else
min = @buffer_end_points[0] - offset
n = min if n < min
end
end
n
end
def with_buffer_common(n)
prev = @buffer_end_points
if prev
avail = prev[1] - offset
n = avail if n > avail
end
@buffer_end_points = [offset, offset + n]
begin
yield(*@buffer_end_points)
ensure
@buffer_end_points = prev
end
end
# Use #seek and #pos on seekable streams
module SeekableStream
# The number of bytes remaining in the input stream.
def num_bytes_remaining
start_mark = @raw_io.pos
@raw_io.seek(0, ::IO::SEEK_END)
end_mark = @raw_io.pos
if @buffer_end_points
if @buffer_end_points[1] < end_mark
end_mark = @buffer_end_points[1]
end
end
bytes_remaining = end_mark - start_mark
@raw_io.seek(start_mark, ::IO::SEEK_SET)
bytes_remaining
end
# All io calls in +block+ are rolled back after this
# method completes.
def with_readahead
mark = @raw_io.pos
begin
yield
ensure
@raw_io.seek(mark, ::IO::SEEK_SET)
end
end
#-----------
private
def stream_init
@initial_pos = @raw_io.pos
end
def offset_raw
@raw_io.pos - @initial_pos
end
def seek_raw(n)
@raw_io.seek(n, ::IO::SEEK_CUR)
end
def read_raw(n)
@raw_io.read(n)
end
def write_raw(data)
@raw_io.write(data)
end
end
# Manually keep track of offset for unseekable streams.
module UnSeekableStream
def offset_raw
@offset
end
# The number of bytes remaining in the input stream.
def num_bytes_remaining
raise IOError, "stream is unseekable"
end
# All io calls in +block+ are rolled back after this
# method completes.
def with_readahead
mark = @offset
@read_data = ""
@in_readahead = true
class << self
alias_method :read_raw_without_readahead, :read_raw
alias_method :read_raw, :read_raw_with_readahead
end
begin
yield
ensure
@offset = mark
@in_readahead = false
end
end
#-----------
private
def stream_init
@offset = 0
end
def read_raw(n)
data = @raw_io.read(n)
@offset += data.size if data
data
end
def read_raw_with_readahead(n)
data = ""
unless @read_data.empty? || @in_readahead
bytes_to_consume = [n, @read_data.length].min
data += @read_data.slice!(0, bytes_to_consume)
n -= bytes_to_consume
if @read_data.empty?
class << self
alias_method :read_raw, :read_raw_without_readahead
end
end
end
raw_data = @raw_io.read(n)
data += raw_data if raw_data
if @in_readahead
@read_data += data
end
@offset += data.size
data
end
def write_raw(data)
@offset += data.size
@raw_io.write(data)
end
def seek_raw(n)
raise IOError, "stream is unseekable" if n < 0
# NOTE: how do we seek on a writable stream?
# skip over data in 8k blocks
while n > 0
bytes_to_read = [n, 8192].min
read_raw(bytes_to_read)
n -= bytes_to_read
end
end
end
end
# Creates a StringIO around +str+.
def self.create_string_io(str = "")
s = StringIO.new(str.dup.force_encoding(Encoding::BINARY))
s.binmode
s
bin_str = str.dup.force_encoding(Encoding::BINARY)
StringIO.new(bin_str).tap(&:binmode)
end
# Create a new IO Read wrapper around +io+. +io+ must provide #read,
@ -236,10 +29,17 @@ module BinData
# readbits(6), readbits(5) #=> [543210, a9876]
#
class Read
include Common
def initialize(io)
super(io)
if self.class === io
raise ArgumentError, "io must not be a #{self.class}"
end
# wrap strings in a StringIO
if io.respond_to?(:to_str)
io = BinData::IO.create_string_io(io.to_str)
end
@io = RawIO.new(io)
# bits when reading
@rnbits = 0
@ -247,25 +47,38 @@ module BinData
@rendian = nil
end
# Sets a buffer of +n+ bytes on the io stream. Any reading or seeking
# calls inside the +block+ will be contained within this buffer.
def with_buffer(n)
with_buffer_common(n) do
yield
read
end
# Allow transforming data in the input stream.
# See +BinData::Buffer+ as an example.
#
# +io+ must be an instance of +Transform+.
#
# yields +self+ and +io+ to the given block
def transform(io)
reset_read_bits
saved = @io
@io = io.prepend_to_chain(@io)
yield(self, io)
io.after_read_transform
ensure
@io = saved
end
# Returns the current offset of the io stream. Offset will be rounded
# up when reading bitfields.
def offset
offset_raw
# The number of bytes remaining in the io steam.
def num_bytes_remaining
@io.num_bytes_remaining
end
# Seek +n+ bytes from the current position in the io stream.
def seekbytes(n)
def skipbytes(n)
reset_read_bits
seek(n)
@io.skip(n)
end
# Seek to an absolute offset within the io stream.
def seek_to_abs_offset(n)
reset_read_bits
@io.seek_abs(n)
end
# Reads exactly +n+ bytes from +io+.
@ -311,7 +124,7 @@ module BinData
private
def read(n = nil)
str = read_raw(buffer_limited_n(n))
str = @io.read(n)
if n
raise EOFError, "End of file reached" if str.nil?
raise IOError, "data truncated" if str.size < n
@ -332,7 +145,7 @@ module BinData
end
def accumulate_big_endian_bits
byte = read(1).unpack('C').at(0) & 0xff
byte = read(1).unpack1('C') & 0xff
@rval = (@rval << 8) | byte
@rnbits += 8
end
@ -350,7 +163,7 @@ module BinData
end
def accumulate_little_endian_bits
byte = read(1).unpack('C').at(0) & 0xff
byte = read(1).unpack1('C') & 0xff
@rval = @rval | (byte << @rnbits)
@rnbits += 8
end
@ -368,36 +181,46 @@ module BinData
#
# See IO::Read for more information.
class Write
include Common
def initialize(io)
super(io)
if self.class === io
raise ArgumentError, "io must not be a #{self.class}"
end
# wrap strings in a StringIO
if io.respond_to?(:to_str)
io = BinData::IO.create_string_io(io.to_str)
end
@io = RawIO.new(io)
@wnbits = 0
@wval = 0
@wendian = nil
end
# Sets a buffer of +n+ bytes on the io stream. Any writes inside the
# +block+ will be contained within this buffer. If less than +n+ bytes
# are written inside the block, the remainder will be padded with '\0'
# bytes.
def with_buffer(n)
with_buffer_common(n) do |_buf_start, buf_end|
yield
write("\0" * (buf_end - offset))
end
end
# Returns the current offset of the io stream. Offset will be rounded
# up when writing bitfields.
def offset
offset_raw + (@wnbits > 0 ? 1 : 0)
end
# Seek +n+ bytes from the current position in the io stream.
def seekbytes(n)
# Allow transforming data in the output stream.
# See +BinData::Buffer+ as an example.
#
# +io+ must be an instance of +Transform+.
#
# yields +self+ and +io+ to the given block
def transform(io)
flushbits
seek(n)
saved = @io
@io = io.prepend_to_chain(@io)
yield(self, io)
io.after_write_transform
ensure
@io = saved
end
# Seek to an absolute offset within the io stream.
def seek_to_abs_offset(n)
raise IOError, "stream is unseekable" unless @io.seekable?
flushbits
@io.seek_abs(n)
end
# Writes the given string of bytes to the io stream.
@ -438,12 +261,7 @@ module BinData
private
def write(data)
n = buffer_limited_n(data.size)
if n < data.size
data = data[0, n]
end
write_raw(data)
@io.write(data)
end
def write_big_endian_bits(val, nbits)
@ -492,5 +310,210 @@ module BinData
(1 << nbits) - 1
end
end
# API used to access the raw data stream.
class RawIO
def initialize(io)
@io = io
@pos = 0
if is_seekable?(io)
@initial_pos = io.pos
else
singleton_class.prepend(UnSeekableIO)
end
end
def is_seekable?(io)
io.pos
rescue NoMethodError, Errno::ESPIPE, Errno::EPIPE, Errno::EINVAL
nil
end
def seekable?
true
end
def num_bytes_remaining
start_mark = @io.pos
@io.seek(0, ::IO::SEEK_END)
end_mark = @io.pos
@io.seek(start_mark, ::IO::SEEK_SET)
end_mark - start_mark
end
def offset
@pos
end
def skip(n)
raise IOError, "can not skip backwards" if n.negative?
@io.seek(n, ::IO::SEEK_CUR)
@pos += n
end
def seek_abs(n)
@io.seek(n + @initial_pos, ::IO::SEEK_SET)
@pos = n
end
def read(n)
@io.read(n).tap { |data| @pos += (data&.size || 0) }
end
def write(data)
@io.write(data)
end
end
# An IO stream may be transformed before processing.
# e.g. encoding, compression, buffered.
#
# Multiple transforms can be chained together.
#
# To create a new transform layer, subclass +Transform+.
# Override the public methods +#read+ and +#write+ at a minimum.
# Additionally the hook, +#before_transform+, +#after_read_transform+
# and +#after_write_transform+ are available as well.
#
# IMPORTANT! If your transform changes the size of the underlying
# data stream (e.g. compression), then call
# +::transform_changes_stream_length!+ in your subclass.
class Transform
class << self
# Indicates that this transform changes the length of the
# underlying data. e.g. performs compression or error correction
def transform_changes_stream_length!
prepend(UnSeekableIO)
end
end
def initialize
@chain_io = nil
end
# Initialises this transform.
#
# Called before any IO operations.
def before_transform; end
# Flushes the input stream.
#
# Called after the final read operation.
def after_read_transform; end
# Flushes the output stream.
#
# Called after the final write operation.
def after_write_transform; end
# Prepends this transform to the given +chain+.
#
# Returns self (the new head of chain).
def prepend_to_chain(chain)
@chain_io = chain
before_transform
self
end
# Is the IO seekable?
def seekable?
@chain_io.seekable?
end
# How many bytes are available for reading?
def num_bytes_remaining
chain_num_bytes_remaining
end
# The current offset within the stream.
def offset
chain_offset
end
# Skips forward +n+ bytes in the input stream.
def skip(n)
chain_skip(n)
end
# Seeks to the given absolute position.
def seek_abs(n)
chain_seek_abs(n)
end
# Reads +n+ bytes from the stream.
def read(n)
chain_read(n)
end
# Writes +data+ to the stream.
def write(data)
chain_write(data)
end
#-------------
private
def create_empty_binary_string
"".force_encoding(Encoding::BINARY)
end
def chain_seekable?
@chain_io.seekable?
end
def chain_num_bytes_remaining
@chain_io.num_bytes_remaining
end
def chain_offset
@chain_io.offset
end
def chain_skip(n)
@chain_io.skip(n)
end
def chain_seek_abs(n)
@chain_io.seek_abs(n)
end
def chain_read(n)
@chain_io.read(n)
end
def chain_write(data)
@chain_io.write(data)
end
end
# A module to be prepended to +RawIO+ or +Transform+ when the data
# stream is not seekable. This is either due to underlying stream
# being unseekable or the transform changes the number of bytes.
module UnSeekableIO
def seekable?
false
end
def num_bytes_remaining
raise IOError, "stream is unseekable"
end
def skip(n)
raise IOError, "can not skip backwards" if n.negative?
# skip over data in 8k blocks
while n > 0
bytes_to_read = [n, 8192].min
read(bytes_to_read)
n -= bytes_to_read
end
end
def seek_abs(n)
skip(n - offset)
end
end
end
end

View File

@ -13,7 +13,7 @@ module BinData
# </pre></code>
module RegisterNamePlugin
def self.included(base) #:nodoc:
def self.included(base) # :nodoc:
# The registered name may be provided explicitly.
base.optional_parameter :name
end

View File

@ -27,7 +27,7 @@ module BinData
alias optional_parameter optional_parameters
alias default_parameter default_parameters
def accepted_parameters #:nodoc:
def accepted_parameters # :nodoc:
@accepted_parameters ||= begin
ancestor_params = superclass.respond_to?(:accepted_parameters) ?
superclass.accepted_parameters : nil
@ -114,13 +114,15 @@ module BinData
end
end
def self.invalid_parameter_names
@invalid_names ||= begin
all_names = LazyEvaluator.instance_methods(true)
allowed_names = [:name, :type]
invalid_names = (all_names - allowed_names).uniq
class << self
def invalid_parameter_names
@invalid_parameter_names ||= begin
all_names = LazyEvaluator.instance_methods(true)
allowed_names = [:name, :type]
invalid_names = (all_names - allowed_names).uniq
Hash[*invalid_names.collect { |key| [key.to_sym, true] }.flatten]
Hash[*invalid_names.collect { |key| [key.to_sym, true] }.flatten]
end
end
end
end

View File

@ -73,11 +73,11 @@ module BinData
@struct = BinData::Struct.new(get_parameter(:struct_params), self)
end
def respond_to?(symbol, include_private = false) #:nodoc:
def respond_to?(symbol, include_private = false) # :nodoc:
@struct.respond_to?(symbol, include_private) || super
end
def method_missing(symbol, *args, &block) #:nodoc:
def method_missing(symbol, *args, &block) # :nodoc:
if @struct.respond_to?(symbol)
@struct.__send__(symbol, *args, &block)
else
@ -91,7 +91,7 @@ module BinData
@value = get
end
def debug_name_of(child) #:nodoc:
def debug_name_of(child) # :nodoc:
debug_name + "-internal-"
end

View File

@ -1,6 +1,6 @@
module BinData
class UnRegisteredTypeError < StandardError ; end
# Raised when #lookup fails.
class UnRegisteredTypeError < StandardError; end
# This registry contains a register of name -> class mappings.
#
@ -18,7 +18,6 @@ module BinData
#
# Names are stored in under_score_style, not camelCase.
class Registry
def initialize
@registry = {}
end
@ -49,13 +48,13 @@ module BinData
# Convert CamelCase +name+ to underscore style.
def underscore_name(name)
name.
to_s.
sub(/.*::/, "").
gsub(/([A-Z]+)([A-Z][a-z])/, '\1_\2').
gsub(/([a-z\d])([A-Z])/, '\1_\2').
tr("-", "_").
downcase
name
.to_s
.sub(/.*::/, "")
.gsub(/([A-Z]+)([A-Z][a-z])/, '\1_\2')
.gsub(/([a-z\d])([A-Z])/, '\1_\2')
.tr('-', '_')
.downcase
end
#---------------
@ -65,7 +64,7 @@ module BinData
name = underscore_name(name)
if !registered?(name)
search_prefix = [""].concat(Array(hints[:search_prefix]))
search_prefix = [""] + Array(hints[:search_prefix])
search_prefix.each do |prefix|
nwp = name_with_prefix(name, prefix)
if registered?(nwp)
@ -85,7 +84,7 @@ module BinData
end
def name_with_prefix(name, prefix)
prefix = prefix.to_s.chomp("_")
prefix = prefix.to_s.chomp('_')
if prefix == ""
name
else
@ -96,11 +95,11 @@ module BinData
def name_with_endian(name, endian)
return name if endian.nil?
suffix = (endian == :little) ? "le" : "be"
if /^u?int\d+$/ =~ name
suffix = (endian == :little) ? 'le' : 'be'
if /^u?int\d+$/.match?(name)
name + suffix
else
name + "_" + suffix
name + '_' + suffix
end
end
@ -111,9 +110,10 @@ module BinData
end
def register_dynamic_class(name)
if /^u?int\d+(le|be)$/ =~ name || /^s?bit\d+(le)?$/ =~ name
if /^u?int\d+(le|be)$/.match?(name) || /^s?bit\d+(le)?$/.match?(name)
class_name = name.gsub(/(?:^|_)(.)/) { $1.upcase }
begin
# call const_get for side effects
BinData.const_get(class_name)
rescue NameError
end
@ -122,8 +122,8 @@ module BinData
def warn_if_name_is_already_registered(name, class_to_register)
prev_class = @registry[name]
if $VERBOSE && prev_class && prev_class != class_to_register
warn "warning: replacing registered class #{prev_class} " \
if prev_class && prev_class != class_to_register
Kernel.warn "warning: replacing registered class #{prev_class} " \
"with #{class_to_register}"
end
end

View File

@ -1,4 +1,4 @@
require "bindata/base_primitive"
require 'bindata/base_primitive'
module BinData
# Rest will consume the input stream from the current position to the end of

View File

@ -49,14 +49,10 @@ module BinData
@prototype = SanitizedPrototype.new(field_type, field_params, hints)
end
attr_reader :prototype
attr_reader :prototype, :name
def name_as_sym
@name.nil? ? nil : @name.to_sym
end
def name
@name
@name&.to_sym
end
def has_parameter?(param)
@ -74,11 +70,7 @@ module BinData
def initialize(hints, base_fields = nil)
@hints = hints
if base_fields
@fields = base_fields.raw_fields
else
@fields = []
end
@fields = base_fields ? base_fields.raw_fields : []
end
def add_field(type, name, params)
@ -179,7 +171,6 @@ module BinData
# is to recursively sanitize the parameters of an entire BinData object chain
# at a single time.
class SanitizedParameters < Hash
# Memoized constants
BIG_ENDIAN = SanitizedBigEndian.new
LITTLE_ENDIAN = SanitizedLittleEndian.new
@ -210,7 +201,7 @@ module BinData
sanitize!
end
alias_method :has_parameter?, :key?
alias has_parameter? key?
def has_at_least_one_of?(*keys)
keys.each do |key|
@ -257,7 +248,9 @@ module BinData
end
def sanitize_object_prototype(key)
sanitize(key) { |obj_type, obj_params| create_sanitized_object_prototype(obj_type, obj_params) }
sanitize(key) do |obj_type, obj_params|
create_sanitized_object_prototype(obj_type, obj_params)
end
end
def sanitize_fields(key, &block)
@ -306,7 +299,7 @@ module BinData
end
def needs_sanitizing?(key)
has_key?(key) && ! self[key].is_a?(SanitizedParameter)
has_parameter?(key) && !self[key].is_a?(SanitizedParameter)
end
def ensure_no_nil_values
@ -320,7 +313,7 @@ module BinData
def merge_default_parameters!
@the_class.default_parameters.each do |key, value|
self[key] = value unless has_key?(key)
self[key] = value unless has_parameter?(key)
end
end

View File

@ -0,0 +1,97 @@
require 'bindata/base'
require 'bindata/dsl'
module BinData
# A Section is a layer on top of a stream that transforms the underlying
# data. This allows BinData to process a stream that has multiple
# encodings. e.g. Some data data is compressed or encrypted.
#
# require 'bindata'
#
# class XorTransform < BinData::IO::Transform
# def initialize(xor)
# super()
# @xor = xor
# end
#
# def read(n)
# chain_read(n).bytes.map { |byte| (byte ^ @xor).chr }.join
# end
#
# def write(data)
# chain_write(data.bytes.map { |byte| (byte ^ @xor).chr }.join)
# end
# end
#
# obj = BinData::Section.new(transform: -> { XorTransform.new(0xff) },
# type: [:string, read_length: 5])
#
# obj.read("\x97\x9A\x93\x93\x90") #=> "hello"
#
#
# == Parameters
#
# Parameters may be provided at initialisation to control the behaviour of
# an object. These params are:
#
# <tt>:transform</tt>:: A callable that returns a new BinData::IO::Transform.
# <tt>:type</tt>:: The single type inside the buffer. Use a struct if
# multiple fields are required.
class Section < BinData::Base
extend DSLMixin
dsl_parser :section
arg_processor :section
mandatory_parameters :transform, :type
def initialize_instance
@type = get_parameter(:type).instantiate(nil, self)
end
def clear?
@type.clear?
end
def assign(val)
@type.assign(val)
end
def snapshot
@type.snapshot
end
def respond_to_missing?(symbol, include_all = false) # :nodoc:
@type.respond_to?(symbol, include_all) || super
end
def method_missing(symbol, *args, &block) # :nodoc:
@type.__send__(symbol, *args, &block)
end
def do_read(io) # :nodoc:
io.transform(eval_parameter(:transform)) do |transformed_io, _raw_io|
@type.do_read(transformed_io)
end
end
def do_write(io) # :nodoc:
io.transform(eval_parameter(:transform)) do |transformed_io, _raw_io|
@type.do_write(transformed_io)
end
end
def do_num_bytes # :nodoc:
to_binary_s.size
end
end
class SectionArgProcessor < BaseArgProcessor
include MultiFieldArgSeparator
def sanitize_parameters!(obj_class, params)
params.merge!(obj_class.dsl_params)
params.sanitize_object_prototype(:type)
end
end
end

View File

@ -0,0 +1,222 @@
require 'bindata/base_primitive'
require 'bindata/dsl'
module BinData
# Skip will skip over bytes from the input stream. If the stream is not
# seekable, then the bytes are consumed and discarded.
#
# When writing, skip will write the appropriate number of zero bytes.
#
# require 'bindata'
#
# class A < BinData::Record
# skip length: 5
# string :a, read_length: 5
# end
#
# obj = A.read("abcdefghij")
# obj.a #=> "fghij"
#
#
# class B < BinData::Record
# skip do
# string read_length: 2, assert: 'ef'
# end
# string :s, read_length: 5
# end
#
# obj = B.read("abcdefghij")
# obj.s #=> "efghi"
#
#
# == Parameters
#
# Skip objects accept all the params that BinData::BasePrimitive
# does, as well as the following:
#
# <tt>:length</tt>:: The number of bytes to skip.
# <tt>:to_abs_offset</tt>:: Skips to the given absolute offset.
# <tt>:until_valid</tt>:: Skips until a given byte pattern is matched.
# This parameter contains a type that will raise
# a BinData::ValidityError unless an acceptable byte
# sequence is found. The type is represented by a
# Symbol, or if the type is to have params
# passed to it, then it should be provided as
# <tt>[type_symbol, hash_params]</tt>.
#
class Skip < BinData::BasePrimitive
extend DSLMixin
dsl_parser :skip
arg_processor :skip
optional_parameters :length, :to_abs_offset, :until_valid
mutually_exclusive_parameters :length, :to_abs_offset, :until_valid
def initialize_shared_instance
extend SkipLengthPlugin if has_parameter?(:length)
extend SkipToAbsOffsetPlugin if has_parameter?(:to_abs_offset)
extend SkipUntilValidPlugin if has_parameter?(:until_valid)
super
end
#---------------
private
def value_to_binary_string(_)
len = skip_length
if len.negative?
raise ArgumentError,
"#{debug_name} attempted to seek backwards by #{len.abs} bytes"
end
"\000" * skip_length
end
def read_and_return_value(io)
len = skip_length
if len.negative?
raise ArgumentError,
"#{debug_name} attempted to seek backwards by #{len.abs} bytes"
end
io.skipbytes(len)
""
end
def sensible_default
""
end
# Logic for the :length parameter
module SkipLengthPlugin
def skip_length
eval_parameter(:length)
end
end
# Logic for the :to_abs_offset parameter
module SkipToAbsOffsetPlugin
def skip_length
eval_parameter(:to_abs_offset) - abs_offset
end
end
# Logic for the :until_valid parameter
module SkipUntilValidPlugin
def skip_length
@skip_length ||= 0
end
def read_and_return_value(io)
prototype = get_parameter(:until_valid)
validator = prototype.instantiate(nil, self)
fs = fast_search_for_obj(validator)
io.transform(ReadaheadIO.new) do |transformed_io, raw_io|
pos = 0
loop do
seek_to_pos(pos, raw_io)
validator.clear
validator.do_read(transformed_io)
break
rescue ValidityError
pos += 1
if fs
seek_to_pos(pos, raw_io)
pos += next_search_index(raw_io, fs)
end
end
seek_to_pos(pos, raw_io)
@skip_length = pos
end
end
def seek_to_pos(pos, io)
io.rollback
io.skip(pos)
end
# A fast search has a pattern string at a specific offset.
FastSearch = ::Struct.new('FastSearch', :pattern, :offset)
def fast_search_for(obj)
if obj.respond_to?(:asserted_binary_s)
FastSearch.new(obj.asserted_binary_s, obj.rel_offset)
else
nil
end
end
# If a search object has an +asserted_value+ field then we
# perform a faster search for a valid object.
def fast_search_for_obj(obj)
if BinData::Struct === obj
obj.each_pair(true) do |_, field|
fs = fast_search_for(field)
return fs if fs
end
elsif BinData::BasePrimitive === obj
return fast_search_for(obj)
end
nil
end
SEARCH_SIZE = 100_000
def next_search_index(io, fs)
buffer = binary_string("")
# start searching at fast_search offset
pos = fs.offset
io.skip(fs.offset)
loop do
data = io.read(SEARCH_SIZE)
raise EOFError, "no match" if data.nil?
buffer << data
index = buffer.index(fs.pattern)
if index
return pos + index - fs.offset
end
# advance buffer
searched = buffer.slice!(0..-fs.pattern.size)
pos += searched.size
end
end
class ReadaheadIO < BinData::IO::Transform
def before_transform
if !seekable?
raise IOError, "readahead is not supported on unseekable streams"
end
@mark = offset
end
def rollback
seek_abs(@mark)
end
end
end
end
class SkipArgProcessor < BaseArgProcessor
def sanitize_parameters!(obj_class, params)
params.merge!(obj_class.dsl_params)
unless params.has_at_least_one_of?(:length, :to_abs_offset, :until_valid)
raise ArgumentError,
"#{obj_class} requires :length, :to_abs_offset or :until_valid"
end
params.must_be_integer(:to_abs_offset, :length)
params.sanitize_object_prototype(:until_valid)
end
end
end

View File

@ -1,4 +1,4 @@
require "bindata/base_primitive"
require 'bindata/base_primitive'
module BinData
# A String is a sequence of bytes. This is the same as strings in Ruby 1.8.
@ -121,6 +121,14 @@ module BinData
def sensible_default
""
end
# Warns when reading if :value && no :read_length
module WarnNoReadLengthPlugin
def read_and_return_value(io)
Kernel.warn "#{debug_name} does not have a :read_length parameter - returning empty string"
""
end
end
end
class StringArgProcessor < BaseArgProcessor
@ -142,12 +150,4 @@ module BinData
pad_byte
end
end
# Warns when reading if :value && no :read_length
module WarnNoReadLengthPlugin
def read_and_return_value(io)
warn "#{debug_name} does not have a :read_length parameter - returning empty string"
""
end
end
end

View File

@ -1,4 +1,4 @@
require "bindata/base_primitive"
require 'bindata/base_primitive'
module BinData
# A BinData::Stringz object is a container for a zero ("\0") terminated
@ -25,7 +25,6 @@ module BinData
# <tt>:max_length</tt>:: The maximum length of the string including the zero
# byte.
class Stringz < BinData::BasePrimitive
optional_parameters :max_length
def assign(val)
@ -47,14 +46,14 @@ module BinData
def read_and_return_value(io)
max_length = eval_parameter(:max_length)
str = ""
str = binary_string("")
i = 0
ch = nil
# read until zero byte or we have read in the max number of bytes
while ch != "\0" && i != max_length
ch = io.readbytes(1)
str += ch
str << ch
i += 1
end
@ -66,9 +65,15 @@ module BinData
end
def trim_and_zero_terminate(str)
max_length = eval_parameter(:max_length)
if max_length && max_length < 1
msg = "max_length must be >= 1 in #{debug_name} (got #{max_length})"
raise ArgumentError, msg
end
result = binary_string(str)
truncate_after_first_zero_byte!(result)
trim_to!(result, eval_parameter(:max_length))
trim_to!(result, max_length)
append_zero_byte_if_needed!(result)
result
end
@ -79,16 +84,13 @@ module BinData
def trim_to!(str, max_length = nil)
if max_length
max_length = 1 if max_length < 1
str.slice!(max_length..-1)
if str.length == max_length && str[-1, 1] != "\0"
str[-1, 1] = "\0"
end
str[-1, 1] = "\0" if str.length == max_length
end
end
def append_zero_byte_if_needed!(str)
if str.length == 0 || str[-1, 1] != "\0"
if str.empty? || str[-1, 1] != "\0"
str << "\0"
end
end

View File

@ -2,7 +2,6 @@ require 'bindata/base'
require 'bindata/delayed_io'
module BinData
class Base
optional_parameter :onlyif, :byte_align # Used by Struct
end
@ -66,16 +65,18 @@ module BinData
RESERVED =
Hash[*
(Hash.instance_methods +
%w{alias and begin break case class def defined do else elsif
%w[alias and begin break case class def defined do else elsif
end ensure false for if in module next nil not or redo
rescue retry return self super then true undef unless until
when while yield} +
%w{array element index value} +
%w{type initial_length read_until} +
%w{fields endian search_prefix hide only_if byte_align} +
%w{choices selection copy_on_change} +
%w{read_abs_offset struct_params}).collect(&:to_sym).
uniq.collect { |key| [key, true] }.flatten
when while yield] +
%w[array element index value] +
%w[type initial_length read_until] +
%w[fields endian search_prefix hide onlyif byte_align] +
%w[choices selection copy_on_change] +
%w[read_abs_offset struct_params])
.collect(&:to_sym)
.uniq.collect { |key| [key, true] }
.flatten
]
def initialize_shared_instance
@ -90,11 +91,11 @@ module BinData
@field_objs = []
end
def clear #:nodoc:
@field_objs.each { |f| f.clear unless f.nil? }
def clear # :nodoc:
@field_objs.each { |f| f.nil? || f.clear }
end
def clear? #:nodoc:
def clear? # :nodoc:
@field_objs.all? { |f| f.nil? || f.clear? }
end
@ -124,28 +125,28 @@ module BinData
end
end
def debug_name_of(child) #:nodoc:
def debug_name_of(child) # :nodoc:
field_name = @field_names[find_index_of(child)]
"#{debug_name}.#{field_name}"
end
def offset_of(child) #:nodoc:
def offset_of(child) # :nodoc:
instantiate_all_objs
sum = sum_num_bytes_below_index(find_index_of(child))
child.bit_aligned? ? sum.floor : sum.ceil
end
def do_read(io) #:nodoc:
def do_read(io) # :nodoc:
instantiate_all_objs
@field_objs.each { |f| f.do_read(io) if include_obj_for_io?(f) }
end
def do_write(io) #:nodoc
def do_write(io) # :nodoc:
instantiate_all_objs
@field_objs.each { |f| f.do_write(io) if include_obj_for_io?(f) }
end
def do_num_bytes #:nodoc:
def do_num_bytes # :nodoc:
instantiate_all_objs
sum_num_bytes_for_all_fields
end
@ -155,19 +156,28 @@ module BinData
end
def []=(key, value)
obj = find_obj_for_name(key)
if obj
obj.assign(value)
end
find_obj_for_name(key)&.assign(value)
end
def key?(key)
@field_names.index(base_field_name(key))
end
def each_pair
@field_names.compact.each do |name|
yield [name, find_obj_for_name(name)]
# Calls the given block for each field_name-field_obj pair.
#
# Does not include anonymous or hidden fields unless
# +include_all+ is true.
def each_pair(include_all = false)
instantiate_all_objs
pairs = @field_names.zip(@field_objs).select do |name, _obj|
name || include_all
end
if block_given?
pairs.each { |el| yield(el) }
else
pairs.each
end
end
@ -205,8 +215,6 @@ module BinData
if index
instantiate_obj_at(index)
@field_objs[index]
else
nil
end
end
@ -243,7 +251,7 @@ module BinData
{}
else
hash = Snapshot.new
val.each_pair { |k,v| hash[k] = v }
val.each_pair { |k, v| hash[k] = v }
hash
end
end
@ -275,12 +283,12 @@ module BinData
end
# A hash that can be accessed via attributes.
class Snapshot < ::Hash #:nodoc:
class Snapshot < ::Hash # :nodoc:
def []=(key, value)
super unless value.nil?
end
def respond_to?(symbol, include_private = false)
def respond_to_missing?(symbol, include_all = false)
key?(symbol) || super
end
@ -288,60 +296,71 @@ module BinData
key?(symbol) ? self[symbol] : super
end
end
end
# Align fields to a multiple of :byte_align
module ByteAlignPlugin
def do_read(io)
initial_offset = io.offset
instantiate_all_objs
@field_objs.each do |f|
if include_obj?(f)
# Align fields to a multiple of :byte_align
module ByteAlignPlugin
def do_read(io)
offset = 0
instantiate_all_objs
@field_objs.each do |f|
next unless include_obj?(f)
if align_obj?(f)
io.seekbytes(bytes_to_align(f, io.offset - initial_offset))
nbytes = bytes_to_align(f, offset.ceil)
offset = offset.ceil + nbytes
io.readbytes(nbytes)
end
f.do_read(io)
nbytes = f.do_num_bytes
offset = (nbytes.is_a?(Integer) ? offset.ceil : offset) + nbytes
end
end
end
def do_write(io)
initial_offset = io.offset
instantiate_all_objs
@field_objs.each do |f|
if include_obj?(f)
def do_write(io)
offset = 0
instantiate_all_objs
@field_objs.each do |f|
next unless include_obj?(f)
if align_obj?(f)
io.writebytes("\x00" * bytes_to_align(f, io.offset - initial_offset))
nbytes = bytes_to_align(f, offset.ceil)
offset = offset.ceil + nbytes
io.writebytes("\x00" * nbytes)
end
f.do_write(io)
nbytes = f.do_num_bytes
offset = (nbytes.is_a?(Integer) ? offset.ceil : offset) + nbytes
end
end
end
def sum_num_bytes_below_index(index)
sum = 0
(0...@field_objs.length).each do |i|
obj = @field_objs[i]
if include_obj?(obj)
sum = sum.ceil + bytes_to_align(obj, sum.ceil) if align_obj?(obj)
def sum_num_bytes_below_index(index)
sum = 0
@field_objs.each_with_index do |obj, i|
next unless include_obj?(obj)
if align_obj?(obj)
sum = sum.ceil + bytes_to_align(obj, sum.ceil)
end
break if i >= index
nbytes = obj.do_num_bytes
sum = (nbytes.is_a?(Integer) ? sum.ceil : sum) + nbytes
end
sum
end
sum
end
def bytes_to_align(obj, rel_offset)
align = obj.eval_parameter(:byte_align)
(align - (rel_offset % align)) % align
end
def bytes_to_align(obj, rel_offset)
align = obj.eval_parameter(:byte_align)
(align - (rel_offset % align)) % align
end
def align_obj?(obj)
obj.has_parameter?(:byte_align)
def align_obj?(obj)
obj.has_parameter?(:byte_align)
end
end
end
@ -362,13 +381,11 @@ module BinData
def sanitize_search_prefix(params)
params.sanitize(:search_prefix) do |sprefix|
search_prefix = []
Array(sprefix).each do |prefix|
prefix = prefix.to_s.chomp("_")
search_prefix << prefix if prefix != ""
search_prefix = Array(sprefix).collect do |prefix|
prefix.to_s.chomp("_")
end
search_prefix
search_prefix - [""]
end
end

View File

@ -1,24 +1,4 @@
module BinData
# reference to the current tracer
@tracer ||= nil
class Tracer #:nodoc:
def initialize(io)
@trace_io = io
end
def trace(msg)
@trace_io.puts(msg)
end
def trace_obj(obj_name, val)
if val.length > 30
val = val.slice(0..30) + "..."
end
trace "#{obj_name} => #{val}"
end
end
# Turn on trace information when reading a BinData object.
# If +block+ is given then the tracing only occurs for that block.
@ -37,30 +17,55 @@ module BinData
end
end
def trace_message #:nodoc:
yield @tracer if @tracer
# reference to the current tracer
@tracer ||= nil
class Tracer # :nodoc:
def initialize(io)
@trace_io = io
end
def trace(msg)
@trace_io.puts(msg)
end
def trace_obj(obj_name, val)
if val.length > 30
val = val.slice(0..30) + "..."
end
trace "#{obj_name} => #{val}"
end
end
def trace_message # :nodoc:
yield @tracer
end
module_function :trace_reading, :trace_message
class BasePrimitive < BinData::Base
class << self
def turn_on_tracing
module TraceHook
def turn_on_tracing
if !method_defined? :do_read_without_hook
alias_method :do_read_without_hook, :do_read
alias_method :do_read, :do_read_with_hook
end
end
def turn_off_tracing
def turn_off_tracing
if method_defined? :do_read_without_hook
alias_method :do_read, :do_read_without_hook
remove_method :do_read_without_hook
end
end
end
class BasePrimitive < BinData::Base
extend TraceHook
def do_read_with_hook(io)
do_read_without_hook(io)
trace_value
end
def trace_value
BinData.trace_message do |tracer|
value_string = _value.inspect
tracer.trace_obj(debug_name, value_string)
@ -69,27 +74,15 @@ module BinData
end
class Choice < BinData::Base
class << self
def turn_on_tracing
alias_method :do_read_without_hook, :do_read
alias_method :do_read, :do_read_with_hook
end
def turn_off_tracing
alias_method :do_read, :do_read_without_hook
end
end
extend TraceHook
def do_read_with_hook(io)
trace_selection
do_read_without_hook(io)
end
def trace_selection
BinData.trace_message do |tracer|
selection_string = eval_parameter(:selection).inspect
tracer.trace_obj("#{debug_name}-selection-", selection_string)
end
do_read_without_hook(io)
end
end
end

View File

@ -0,0 +1,35 @@
require 'brotli'
module BinData
module Transform
# Transforms a brotli compressed data stream.
#
# gem install brotli
class Brotli < BinData::IO::Transform
transform_changes_stream_length!
def initialize(read_length)
super()
@length = read_length
end
def read(n)
@read ||= ::Brotli::inflate(chain_read(@length))
@read.slice!(0...n)
end
def write(data)
@write ||= create_empty_binary_string
@write << data
end
def after_read_transform
raise IOError, "didn't read all data" unless @read.empty?
end
def after_write_transform
chain_write(::Brotli::deflate(@write))
end
end
end
end

View File

@ -0,0 +1,35 @@
require 'extlz4'
module BinData
module Transform
# Transforms a LZ4 compressed data stream.
#
# gem install extlz4
class LZ4 < BinData::IO::Transform
transform_changes_stream_length!
def initialize(read_length)
super()
@length = read_length
end
def read(n)
@read ||= ::LZ4::decode(chain_read(@length))
@read.slice!(0...n)
end
def write(data)
@write ||= create_empty_binary_string
@write << data
end
def after_read_transform
raise IOError, "didn't read all data" unless @read.empty?
end
def after_write_transform
chain_write(::LZ4::encode(@write))
end
end
end
end

View File

@ -0,0 +1,35 @@
require 'xz'
module BinData
module Transform
# Transforms a lzma compressed data stream.
#
# gem install ruby-xz
class Lzma < BinData::IO::Transform
transform_changes_stream_length!
def initialize(read_length)
super()
@length = read_length
end
def read(n)
@read ||= ::XZ::decompress(chain_read(@length))
@read.slice!(0...n)
end
def write(data)
@write ||= create_empty_binary_string
@write << data
end
def after_read_transform
raise IOError, "didn't read all data" unless @read.empty?
end
def after_write_transform
chain_write(::XZ::compress(@write))
end
end
end
end

View File

@ -0,0 +1,19 @@
module BinData
module Transform
# Transforms the data stream by xoring each byte.
class Xor < BinData::IO::Transform
def initialize(xor)
super()
@xor = xor
end
def read(n)
chain_read(n).bytes.map { |byte| (byte ^ @xor).chr }.join
end
def write(data)
chain_write(data.bytes.map { |byte| (byte ^ @xor).chr }.join)
end
end
end
end

View File

@ -0,0 +1,35 @@
require 'xz'
module BinData
module Transform
# Transforms a xz compressed data stream.
#
# gem install ruby-xz
class XZ < BinData::IO::Transform
transform_changes_stream_length!
def initialize(read_length)
super()
@length = read_length
end
def read(n)
@read ||= ::XZ::decompress(chain_read(@length))
@read.slice!(0...n)
end
def write(data)
@write ||= create_empty_binary_string
@write << data
end
def after_read_transform
raise IOError, "didn't read all data" unless @read.empty?
end
def after_write_transform
chain_write(::XZ::compress(@write))
end
end
end
end

View File

@ -0,0 +1,33 @@
require 'zlib'
module BinData
module Transform
# Transforms a zlib compressed data stream.
class Zlib < BinData::IO::Transform
transform_changes_stream_length!
def initialize(read_length)
super()
@length = read_length
end
def read(n)
@read ||= ::Zlib::Inflate.inflate(chain_read(@length))
@read.slice!(0...n)
end
def write(data)
@write ||= create_empty_binary_string
@write << data
end
def after_read_transform
raise IOError, "didn't read all data" unless @read.empty?
end
def after_write_transform
chain_write(::Zlib::Deflate.deflate(@write))
end
end
end
end

View File

@ -0,0 +1,35 @@
require 'zstd-ruby'
module BinData
module Transform
# Transforms a zstd compressed data stream.
#
# gem install zstd-ruby
class Zstd < BinData::IO::Transform
transform_changes_stream_length!
def initialize(read_length)
super()
@length = read_length
end
def read(n)
@read ||= ::Zstd::decompress(chain_read(@length))
@read.slice!(0...n)
end
def write(data)
@write ||= create_empty_binary_string
@write << data
end
def after_read_transform
raise IOError, "didn't read all data" unless @read.empty?
end
def after_write_transform
chain_write(::Zstd::compress(@write))
end
end
end
end

View File

@ -1,4 +1,4 @@
require "bindata/base_primitive"
require 'bindata/base_primitive'
module BinData
# Uint8Array is a specialised type of array that only contains
@ -49,7 +49,7 @@ module BinData
end
class Uint8ArrayArgProcessor < BaseArgProcessor
def sanitize_parameters!(obj_class, params) #:nodoc:
def sanitize_parameters!(obj_class, params) # :nodoc:
# ensure one of :initial_length and :read_until exists
unless params.has_at_least_one_of?(:initial_length, :read_until)
params[:initial_length] = 0

View File

@ -0,0 +1,3 @@
module BinData
VERSION = '2.5.0'
end

View File

@ -1,4 +1,4 @@
require "bindata/base"
require 'bindata/base'
module BinData
# A virtual field is one that is neither read, written nor occupies space in
@ -15,7 +15,7 @@ module BinData
#
# obj = A.read("abcdeabcde")
# obj.a #=> "abcde"
# obj.c.offset #=> 10
# obj.c.rel_offset #=> 10
#
# obj = A.read("abcdeABCDE") #=> BinData::ValidityError: assertion failed for obj.c
#
@ -29,12 +29,9 @@ module BinData
# [<tt>:value</tt>] The virtual object will always have this value.
#
class Virtual < BinData::BasePrimitive
def do_read(io); end
def do_read(io)
end
def do_write(io)
end
def do_write(io); end
def do_num_bytes
0.0

View File

@ -13,7 +13,7 @@ module BinData
owner = method(:initialize).owner
if owner != BinData::Base
msg = "Don't override #initialize on #{owner}."
if %w(BinData::Base BinData::BasePrimitive).include? self.class.superclass.name
if %w[BinData::Base BinData::BasePrimitive].include? self.class.superclass.name
msg += "\nrename #initialize to #initialize_instance."
end
fail msg