brew vendor-gems: commit updates.

This commit is contained in:
BrewTestBot 2023-01-24 18:07:34 +00:00
parent d09c079d58
commit 4d822cb771
No known key found for this signature in database
GPG Key ID: 82D7D104050B0F0F
137 changed files with 1261 additions and 1573 deletions

View File

@ -23,7 +23,7 @@ kernel = (class << ::Kernel; self; end)
k.send(:private, :require) if private_require
end
end
$:.unshift File.expand_path("#{__dir__}/../#{RUBY_ENGINE}/#{Gem.ruby_api_version}/gems/concurrent-ruby-1.1.10/lib/concurrent-ruby")
$:.unshift File.expand_path("#{__dir__}/../#{RUBY_ENGINE}/#{Gem.ruby_api_version}/gems/concurrent-ruby-1.2.0/lib/concurrent-ruby")
$:.unshift File.expand_path("#{__dir__}/../#{RUBY_ENGINE}/#{Gem.ruby_api_version}/gems/i18n-1.12.0/lib")
$:.unshift File.expand_path("#{__dir__}/../#{RUBY_ENGINE}/#{Gem.ruby_api_version}/gems/minitest-5.17.0/lib")
$:.unshift File.expand_path("#{__dir__}/../#{RUBY_ENGINE}/#{Gem.ruby_api_version}/gems/tzinfo-2.0.5/lib")

View File

@ -1,66 +0,0 @@
require 'concurrent/constants'
module Concurrent
# @!macro thread_local_var
# @!macro internal_implementation_note
# @!visibility private
class AbstractThreadLocalVar
# @!macro thread_local_var_method_initialize
def initialize(default = nil, &default_block)
if default && block_given?
raise ArgumentError, "Cannot use both value and block as default value"
end
if block_given?
@default_block = default_block
@default = nil
else
@default_block = nil
@default = default
end
allocate_storage
end
# @!macro thread_local_var_method_get
def value
raise NotImplementedError
end
# @!macro thread_local_var_method_set
def value=(value)
raise NotImplementedError
end
# @!macro thread_local_var_method_bind
def bind(value, &block)
if block_given?
old_value = self.value
begin
self.value = value
yield
ensure
self.value = old_value
end
end
end
protected
# @!visibility private
def allocate_storage
raise NotImplementedError
end
# @!visibility private
def default
if @default_block
self.value = @default_block.call
else
@default
end
end
end
end

View File

@ -1,205 +0,0 @@
require 'concurrent/synchronization'
require 'concurrent/utility/engine'
require 'concurrent/atomic_reference/numeric_cas_wrapper'
# Shim for TruffleRuby::AtomicReference
if Concurrent.on_truffleruby? && !defined?(TruffleRuby::AtomicReference)
# @!visibility private
module TruffleRuby
AtomicReference = Truffle::AtomicReference
end
end
module Concurrent
# Define update methods that use direct paths
#
# @!visibility private
# @!macro internal_implementation_note
module AtomicDirectUpdate
# @!macro atomic_reference_method_update
#
# Pass the current value to the given block, replacing it
# with the block's result. May retry if the value changes
# during the block's execution.
#
# @yield [Object] Calculate a new value for the atomic reference using
# given (old) value
# @yieldparam [Object] old_value the starting value of the atomic reference
# @return [Object] the new value
def update
true until compare_and_set(old_value = get, new_value = yield(old_value))
new_value
end
# @!macro atomic_reference_method_try_update
#
# Pass the current value to the given block, replacing it
# with the block's result. Return nil if the update fails.
#
# @yield [Object] Calculate a new value for the atomic reference using
# given (old) value
# @yieldparam [Object] old_value the starting value of the atomic reference
# @note This method was altered to avoid raising an exception by default.
# Instead, this method now returns `nil` in case of failure. For more info,
# please see: https://github.com/ruby-concurrency/concurrent-ruby/pull/336
# @return [Object] the new value, or nil if update failed
def try_update
old_value = get
new_value = yield old_value
return unless compare_and_set old_value, new_value
new_value
end
# @!macro atomic_reference_method_try_update!
#
# Pass the current value to the given block, replacing it
# with the block's result. Raise an exception if the update
# fails.
#
# @yield [Object] Calculate a new value for the atomic reference using
# given (old) value
# @yieldparam [Object] old_value the starting value of the atomic reference
# @note This behavior mimics the behavior of the original
# `AtomicReference#try_update` API. The reason this was changed was to
# avoid raising exceptions (which are inherently slow) by default. For more
# info: https://github.com/ruby-concurrency/concurrent-ruby/pull/336
# @return [Object] the new value
# @raise [Concurrent::ConcurrentUpdateError] if the update fails
def try_update!
old_value = get
new_value = yield old_value
unless compare_and_set(old_value, new_value)
if $VERBOSE
raise ConcurrentUpdateError, "Update failed"
else
raise ConcurrentUpdateError, "Update failed", ConcurrentUpdateError::CONC_UP_ERR_BACKTRACE
end
end
new_value
end
end
require 'concurrent/atomic_reference/mutex_atomic'
# @!macro atomic_reference
#
# An object reference that may be updated atomically. All read and write
# operations have java volatile semantic.
#
# @!macro thread_safe_variable_comparison
#
# @see http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicReference.html
# @see http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/package-summary.html
#
# @!method initialize(value = nil)
# @!macro atomic_reference_method_initialize
# @param [Object] value The initial value.
#
# @!method get
# @!macro atomic_reference_method_get
# Gets the current value.
# @return [Object] the current value
#
# @!method set(new_value)
# @!macro atomic_reference_method_set
# Sets to the given value.
# @param [Object] new_value the new value
# @return [Object] the new value
#
# @!method get_and_set(new_value)
# @!macro atomic_reference_method_get_and_set
# Atomically sets to the given value and returns the old value.
# @param [Object] new_value the new value
# @return [Object] the old value
#
# @!method compare_and_set(old_value, new_value)
# @!macro atomic_reference_method_compare_and_set
#
# Atomically sets the value to the given updated value if
# the current value == the expected value.
#
# @param [Object] old_value the expected value
# @param [Object] new_value the new value
#
# @return [Boolean] `true` if successful. A `false` return indicates
# that the actual value was not equal to the expected value.
#
# @!method update
# @!macro atomic_reference_method_update
#
# @!method try_update
# @!macro atomic_reference_method_try_update
#
# @!method try_update!
# @!macro atomic_reference_method_try_update!
# @!macro internal_implementation_note
class ConcurrentUpdateError < ThreadError
# frozen pre-allocated backtrace to speed ConcurrentUpdateError
CONC_UP_ERR_BACKTRACE = ['backtrace elided; set verbose to enable'].freeze
end
# @!macro internal_implementation_note
AtomicReferenceImplementation = case
when Concurrent.on_cruby? && Concurrent.c_extensions_loaded?
# @!visibility private
# @!macro internal_implementation_note
class CAtomicReference
include AtomicDirectUpdate
include AtomicNumericCompareAndSetWrapper
alias_method :compare_and_swap, :compare_and_set
end
CAtomicReference
when Concurrent.on_jruby?
# @!visibility private
# @!macro internal_implementation_note
class JavaAtomicReference
include AtomicDirectUpdate
end
JavaAtomicReference
when Concurrent.on_truffleruby?
class TruffleRubyAtomicReference < TruffleRuby::AtomicReference
include AtomicDirectUpdate
alias_method :value, :get
alias_method :value=, :set
alias_method :compare_and_swap, :compare_and_set
alias_method :swap, :get_and_set
end
TruffleRubyAtomicReference
when Concurrent.on_rbx?
# @note Extends `Rubinius::AtomicReference` version adding aliases
# and numeric logic.
#
# @!visibility private
# @!macro internal_implementation_note
class RbxAtomicReference < Rubinius::AtomicReference
alias_method :_compare_and_set, :compare_and_set
include AtomicDirectUpdate
include AtomicNumericCompareAndSetWrapper
alias_method :value, :get
alias_method :value=, :set
alias_method :swap, :get_and_set
alias_method :compare_and_swap, :compare_and_set
end
RbxAtomicReference
else
MutexAtomicReference
end
private_constant :AtomicReferenceImplementation
# @!macro atomic_reference
class AtomicReference < AtomicReferenceImplementation
# @return [String] Short string representation.
def to_s
format '%s value:%s>', super[0..-2], get
end
alias_method :inspect, :to_s
end
end

View File

@ -1,37 +0,0 @@
require 'concurrent/atomic/abstract_thread_local_var'
if Concurrent.on_jruby?
module Concurrent
# @!visibility private
# @!macro internal_implementation_note
class JavaThreadLocalVar < AbstractThreadLocalVar
# @!macro thread_local_var_method_get
def value
value = @var.get
if value.nil?
default
elsif value == NULL
nil
else
value
end
end
# @!macro thread_local_var_method_set
def value=(value)
@var.set(value)
end
protected
# @!visibility private
def allocate_storage
@var = java.lang.ThreadLocal.new
end
end
end
end

View File

@ -1,181 +0,0 @@
require 'thread'
require 'concurrent/atomic/abstract_thread_local_var'
module Concurrent
# @!visibility private
# @!macro internal_implementation_note
class RubyThreadLocalVar < AbstractThreadLocalVar
# Each thread has a (lazily initialized) array of thread-local variable values
# Each time a new thread-local var is created, we allocate an "index" for it
# For example, if the allocated index is 1, that means slot #1 in EVERY
# thread's thread-local array will be used for the value of that TLV
#
# The good thing about using a per-THREAD structure to hold values, rather
# than a per-TLV structure, is that no synchronization is needed when
# reading and writing those values (since the structure is only ever
# accessed by a single thread)
#
# Of course, when a TLV is GC'd, 1) we need to recover its index for use
# by other new TLVs (otherwise the thread-local arrays could get bigger
# and bigger with time), and 2) we need to null out all the references
# held in the now-unused slots (both to avoid blocking GC of those objects,
# and also to prevent "stale" values from being passed on to a new TLV
# when the index is reused)
# Because we need to null out freed slots, we need to keep references to
# ALL the thread-local arrays -- ARRAYS is for that
# But when a Thread is GC'd, we need to drop the reference to its thread-local
# array, so we don't leak memory
FREE = []
LOCK = Mutex.new
THREAD_LOCAL_ARRAYS = {} # used as a hash set
# synchronize when not on MRI
# on MRI using lock in finalizer leads to "can't be called from trap context" error
# so the code is carefully written to be tread-safe on MRI relying on GIL
if Concurrent.on_cruby?
# @!visibility private
def self.semi_sync(&block)
block.call
end
else
# @!visibility private
def self.semi_sync(&block)
LOCK.synchronize(&block)
end
end
private_constant :FREE, :LOCK, :THREAD_LOCAL_ARRAYS
# @!macro thread_local_var_method_get
def value
if (array = get_threadlocal_array)
value = array[@index]
if value.nil?
default
elsif value.equal?(NULL)
nil
else
value
end
else
default
end
end
# @!macro thread_local_var_method_set
def value=(value)
me = Thread.current
# We could keep the thread-local arrays in a hash, keyed by Thread
# But why? That would require locking
# Using Ruby's built-in thread-local storage is faster
unless (array = get_threadlocal_array(me))
array = set_threadlocal_array([], me)
self.class.semi_sync { THREAD_LOCAL_ARRAYS[array.object_id] = array }
ObjectSpace.define_finalizer(me, self.class.thread_finalizer(array.object_id))
end
array[@index] = (value.nil? ? NULL : value)
value
end
protected
# @!visibility private
def allocate_storage
@index = FREE.pop || next_index
ObjectSpace.define_finalizer(self, self.class.thread_local_finalizer(@index))
end
# @!visibility private
def self.thread_local_finalizer(index)
proc do
semi_sync do
# The cost of GC'ing a TLV is linear in the number of threads using TLVs
# But that is natural! More threads means more storage is used per TLV
# So naturally more CPU time is required to free more storage
#
# DO NOT use each_value which might conflict with new pair assignment
# into the hash in #value= method
THREAD_LOCAL_ARRAYS.values.each { |array| array[index] = nil }
# free index has to be published after the arrays are cleared
FREE.push(index)
end
end
end
# @!visibility private
def self.thread_finalizer(id)
proc do
semi_sync do
# The thread which used this thread-local array is now gone
# So don't hold onto a reference to the array (thus blocking GC)
THREAD_LOCAL_ARRAYS.delete(id)
end
end
end
private
# noinspection RubyClassVariableUsageInspection
@@next = 0
# noinspection RubyClassVariableUsageInspection
def next_index
LOCK.synchronize do
result = @@next
@@next += 1
result
end
end
if Thread.instance_methods.include?(:thread_variable_get)
def get_threadlocal_array(thread = Thread.current)
thread.thread_variable_get(:__threadlocal_array__)
end
def set_threadlocal_array(array, thread = Thread.current)
thread.thread_variable_set(:__threadlocal_array__, array)
end
else
def get_threadlocal_array(thread = Thread.current)
thread[:__threadlocal_array__]
end
def set_threadlocal_array(array, thread = Thread.current)
thread[:__threadlocal_array__] = array
end
end
# This exists only for use in testing
# @!visibility private
def value_for(thread)
if (array = get_threadlocal_array(thread))
value = array[@index]
if value.nil?
get_default
elsif value.equal?(NULL)
nil
else
value
end
else
get_default
end
end
# @!visibility private
def get_default
if @default_block
raise "Cannot use default_for with default block"
else
@default
end
end
end
end

View File

@ -1,104 +0,0 @@
require 'concurrent/utility/engine'
require 'concurrent/atomic/ruby_thread_local_var'
require 'concurrent/atomic/java_thread_local_var'
module Concurrent
###################################################################
# @!macro thread_local_var_method_initialize
#
# Creates a thread local variable.
#
# @param [Object] default the default value when otherwise unset
# @param [Proc] default_block Optional block that gets called to obtain the
# default value for each thread
# @!macro thread_local_var_method_get
#
# Returns the value in the current thread's copy of this thread-local variable.
#
# @return [Object] the current value
# @!macro thread_local_var_method_set
#
# Sets the current thread's copy of this thread-local variable to the specified value.
#
# @param [Object] value the value to set
# @return [Object] the new value
# @!macro thread_local_var_method_bind
#
# Bind the given value to thread local storage during
# execution of the given block.
#
# @param [Object] value the value to bind
# @yield the operation to be performed with the bound variable
# @return [Object] the value
###################################################################
# @!macro thread_local_var_public_api
#
# @!method initialize(default = nil, &default_block)
# @!macro thread_local_var_method_initialize
#
# @!method value
# @!macro thread_local_var_method_get
#
# @!method value=(value)
# @!macro thread_local_var_method_set
#
# @!method bind(value, &block)
# @!macro thread_local_var_method_bind
###################################################################
# @!visibility private
# @!macro internal_implementation_note
ThreadLocalVarImplementation = case
when Concurrent.on_jruby?
JavaThreadLocalVar
else
RubyThreadLocalVar
end
private_constant :ThreadLocalVarImplementation
# @!macro thread_local_var
#
# A `ThreadLocalVar` is a variable where the value is different for each thread.
# Each variable may have a default value, but when you modify the variable only
# the current thread will ever see that change.
#
# @!macro thread_safe_variable_comparison
#
# @example
# v = ThreadLocalVar.new(14)
# v.value #=> 14
# v.value = 2
# v.value #=> 2
#
# @example
# v = ThreadLocalVar.new(14)
#
# t1 = Thread.new do
# v.value #=> 14
# v.value = 1
# v.value #=> 1
# end
#
# t2 = Thread.new do
# v.value #=> 14
# v.value = 2
# v.value #=> 2
# end
#
# v.value #=> 14
#
# @see https://docs.oracle.com/javase/7/docs/api/java/lang/ThreadLocal.html Java ThreadLocal
#
# @!macro thread_local_var_public_api
class ThreadLocalVar < ThreadLocalVarImplementation
end
end

View File

@ -1,32 +0,0 @@
require 'logger'
module Concurrent
module Concern
# Include where logging is needed
#
# @!visibility private
module Logging
include Logger::Severity
# Logs through {Concurrent.global_logger}, it can be overridden by setting @logger
# @param [Integer] level one of Logger::Severity constants
# @param [String] progname e.g. a path of an Actor
# @param [String, nil] message when nil block is used to generate the message
# @yieldreturn [String] a message
def log(level, progname, message = nil, &block)
#NOTE: Cannot require 'concurrent/configuration' above due to circular references.
# Assume that the gem has been initialized if we've gotten this far.
logger = if defined?(@logger) && @logger
@logger
else
Concurrent.global_logger
end
logger.call level, progname, message, &block
rescue => error
$stderr.puts "`Concurrent.configuration.logger` failed to log #{[level, progname, message, block]}\n" +
"#{error.message} (#{error.class})\n#{error.backtrace.join "\n"}"
end
end
end
end

View File

@ -1,30 +0,0 @@
require 'concurrent/utility/engine'
require 'concurrent/synchronization/abstract_object'
require 'concurrent/utility/native_extension_loader' # load native parts first
Concurrent.load_native_extensions
require 'concurrent/synchronization/mri_object'
require 'concurrent/synchronization/jruby_object'
require 'concurrent/synchronization/rbx_object'
require 'concurrent/synchronization/truffleruby_object'
require 'concurrent/synchronization/object'
require 'concurrent/synchronization/volatile'
require 'concurrent/synchronization/abstract_lockable_object'
require 'concurrent/synchronization/mutex_lockable_object'
require 'concurrent/synchronization/jruby_lockable_object'
require 'concurrent/synchronization/rbx_lockable_object'
require 'concurrent/synchronization/lockable_object'
require 'concurrent/synchronization/condition'
require 'concurrent/synchronization/lock'
module Concurrent
# {include:file:docs-source/synchronization.md}
# {include:file:docs-source/synchronization-notes.md}
module Synchronization
end
end

View File

@ -1,45 +0,0 @@
module Concurrent
module Synchronization
if Concurrent.on_jruby? && Concurrent.java_extensions_loaded?
# @!visibility private
module JRubyAttrVolatile
def self.included(base)
base.extend(ClassMethods)
end
module ClassMethods
def attr_volatile(*names)
names.each do |name|
ivar = :"@volatile_#{name}"
class_eval <<-RUBY, __FILE__, __LINE__ + 1
def #{name}
instance_variable_get_volatile(:#{ivar})
end
def #{name}=(value)
instance_variable_set_volatile(:#{ivar}, value)
end
RUBY
end
names.map { |n| [n, :"#{n}="] }.flatten
end
end
end
# @!visibility private
# @!macro internal_implementation_note
class JRubyObject < AbstractObject
include JRubyAttrVolatile
def initialize
# nothing to do
end
end
end
end
end

View File

@ -1,44 +0,0 @@
module Concurrent
module Synchronization
# @!visibility private
module MriAttrVolatile
def self.included(base)
base.extend(ClassMethods)
end
module ClassMethods
def attr_volatile(*names)
names.each do |name|
ivar = :"@volatile_#{name}"
class_eval <<-RUBY, __FILE__, __LINE__ + 1
def #{name}
#{ivar}
end
def #{name}=(value)
#{ivar} = value
end
RUBY
end
names.map { |n| [n, :"#{n}="] }.flatten
end
end
def full_memory_barrier
# relying on undocumented behavior of CRuby, GVL acquire has lock which ensures visibility of ivars
# https://github.com/ruby/ruby/blob/ruby_2_2/thread_pthread.c#L204-L211
end
end
# @!visibility private
# @!macro internal_implementation_note
class MriObject < AbstractObject
include MriAttrVolatile
def initialize
# nothing to do
end
end
end
end

View File

@ -1,71 +0,0 @@
module Concurrent
module Synchronization
# @!visibility private
# @!macro internal_implementation_note
class RbxLockableObject < AbstractLockableObject
safe_initialization!
def initialize(*defaults)
super(*defaults)
@__Waiters__ = []
@__owner__ = nil
end
def initialize_copy(other)
super
@__Waiters__ = []
@__owner__ = nil
end
protected
def synchronize(&block)
if @__owner__ == Thread.current
yield
else
result = nil
Rubinius.synchronize(self) do
begin
@__owner__ = Thread.current
result = yield
ensure
@__owner__ = nil
end
end
result
end
end
def ns_wait(timeout = nil)
wchan = Rubinius::Channel.new
begin
@__Waiters__.push wchan
Rubinius.unlock(self)
signaled = wchan.receive_timeout timeout
ensure
Rubinius.lock(self)
if !signaled && !@__Waiters__.delete(wchan)
# we timed out, but got signaled afterwards,
# so pass that signal on to the next waiter
@__Waiters__.shift << true unless @__Waiters__.empty?
end
end
self
end
def ns_signal
@__Waiters__.shift << true unless @__Waiters__.empty?
self
end
def ns_broadcast
@__Waiters__.shift << true until @__Waiters__.empty?
self
end
end
end
end

View File

@ -1,49 +0,0 @@
module Concurrent
module Synchronization
# @!visibility private
module RbxAttrVolatile
def self.included(base)
base.extend(ClassMethods)
end
module ClassMethods
def attr_volatile(*names)
names.each do |name|
ivar = :"@volatile_#{name}"
class_eval <<-RUBY, __FILE__, __LINE__ + 1
def #{name}
Rubinius.memory_barrier
#{ivar}
end
def #{name}=(value)
#{ivar} = value
Rubinius.memory_barrier
end
RUBY
end
names.map { |n| [n, :"#{n}="] }.flatten
end
end
def full_memory_barrier
# Rubinius instance variables are not volatile so we need to insert barrier
# TODO (pitr 26-Nov-2015): check comments like ^
Rubinius.memory_barrier
end
end
# @!visibility private
# @!macro internal_implementation_note
class RbxObject < AbstractObject
include RbxAttrVolatile
def initialize
# nothing to do
end
end
end
end

View File

@ -1,47 +0,0 @@
module Concurrent
module Synchronization
# @!visibility private
module TruffleRubyAttrVolatile
def self.included(base)
base.extend(ClassMethods)
end
module ClassMethods
def attr_volatile(*names)
names.each do |name|
ivar = :"@volatile_#{name}"
class_eval <<-RUBY, __FILE__, __LINE__ + 1
def #{name}
full_memory_barrier
#{ivar}
end
def #{name}=(value)
#{ivar} = value
full_memory_barrier
end
RUBY
end
names.map { |n| [n, :"#{n}="] }.flatten
end
end
def full_memory_barrier
TruffleRuby.full_memory_barrier
end
end
# @!visibility private
# @!macro internal_implementation_note
class TruffleRubyObject < AbstractObject
include TruffleRubyAttrVolatile
def initialize
# nothing to do
end
end
end
end

View File

@ -1,36 +0,0 @@
module Concurrent
module Synchronization
# Volatile adds the attr_volatile class method when included.
#
# @example
# class Foo
# include Concurrent::Synchronization::Volatile
#
# attr_volatile :bar
#
# def initialize
# self.bar = 1
# end
# end
#
# foo = Foo.new
# foo.bar
# => 1
# foo.bar = 2
# => 2
Volatile = case
when Concurrent.on_cruby?
MriAttrVolatile
when Concurrent.on_jruby?
JRubyAttrVolatile
when Concurrent.on_rbx?
RbxAttrVolatile
when Concurrent.on_truffleruby?
TruffleRubyAttrVolatile
else
MriAttrVolatile
end
end
end

View File

@ -1,50 +0,0 @@
require 'delegate'
require 'monitor'
module Concurrent
unless defined?(SynchronizedDelegator)
# This class provides a trivial way to synchronize all calls to a given object
# by wrapping it with a `Delegator` that performs `Monitor#enter/exit` calls
# around the delegated `#send`. Example:
#
# array = [] # not thread-safe on many impls
# array = SynchronizedDelegator.new([]) # thread-safe
#
# A simple `Monitor` provides a very coarse-grained way to synchronize a given
# object, in that it will cause synchronization for methods that have no need
# for it, but this is a trivial way to get thread-safety where none may exist
# currently on some implementations.
#
# This class is currently being considered for inclusion into stdlib, via
# https://bugs.ruby-lang.org/issues/8556
#
# @!visibility private
class SynchronizedDelegator < SimpleDelegator
def setup
@old_abort = Thread.abort_on_exception
Thread.abort_on_exception = true
end
def teardown
Thread.abort_on_exception = @old_abort
end
def initialize(obj)
__setobj__(obj)
@monitor = Monitor.new
end
def method_missing(method, *args, &block)
monitor = @monitor
begin
monitor.enter
super
ensure
monitor.exit
end
end
end
end
end

View File

@ -1,90 +0,0 @@
require 'concurrent/synchronization'
module Concurrent
# @!macro monotonic_get_time
#
# Returns the current time a tracked by the application monotonic clock.
#
# @param [Symbol] unit the time unit to be returned, can be either
# :float_second, :float_millisecond, :float_microsecond, :second,
# :millisecond, :microsecond, or :nanosecond default to :float_second.
#
# @return [Float] The current monotonic time since some unspecified
# starting point
#
# @!macro monotonic_clock_warning
if defined?(Process::CLOCK_MONOTONIC)
def monotonic_time(unit = :float_second)
Process.clock_gettime(Process::CLOCK_MONOTONIC, unit)
end
elsif Concurrent.on_jruby?
# @!visibility private
TIME_UNITS = Hash.new { |_hash, key| raise ArgumentError, "unexpected unit: #{key}" }.compare_by_identity
TIME_UNITS.merge!(
second: 1_000_000_000,
millisecond: 1_000_000,
microsecond: 1_000,
nanosecond: 1,
float_second: 1_000_000_000.0,
float_millisecond: 1_000_000.0,
float_microsecond: 1_000.0,
)
TIME_UNITS.freeze
private_constant :TIME_UNITS
def monotonic_time(unit = :float_second)
java.lang.System.nanoTime() / TIME_UNITS[unit]
end
else
class_definition = Class.new(Synchronization::LockableObject) do
def initialize
@last_time = Time.now.to_f
@time_units = Hash.new { |_hash, key| raise ArgumentError, "unexpected unit: #{key}" }.compare_by_identity
@time_units.merge!(
second: [nil, true],
millisecond: [1_000, true],
microsecond: [1_000_000, true],
nanosecond: [1_000_000_000, true],
float_second: [nil, false],
float_millisecond: [1_000.0, false],
float_microsecond: [1_000_000.0, false],
)
super()
end
# @!visibility private
def get_time(unit)
synchronize do
now = Time.now.to_f
if @last_time < now
@last_time = now
else # clock has moved back in time
@last_time += 0.000_001
end
scale, to_int = @time_units[unit]
now *= scale if scale
now = now.to_i if to_int
now
end
end
end
# Clock that cannot be set and represents monotonic time since
# some unspecified starting point.
#
# @!visibility private
GLOBAL_MONOTONIC_CLOCK = class_definition.new
private_constant :GLOBAL_MONOTONIC_CLOCK
def monotonic_time(unit = :float_second)
GLOBAL_MONOTONIC_CLOCK.get_time(unit)
end
end
module_function :monotonic_time
end

View File

@ -1,130 +0,0 @@
require 'etc'
require 'rbconfig'
require 'concurrent/delay'
module Concurrent
module Utility
# @!visibility private
class ProcessorCounter
def initialize
@processor_count = Delay.new { compute_processor_count }
@physical_processor_count = Delay.new { compute_physical_processor_count }
end
# Number of processors seen by the OS and used for process scheduling. For
# performance reasons the calculated value will be memoized on the first
# call.
#
# When running under JRuby the Java runtime call
# `java.lang.Runtime.getRuntime.availableProcessors` will be used. According
# to the Java documentation this "value may change during a particular
# invocation of the virtual machine... [applications] should therefore
# occasionally poll this property." Subsequently the result will NOT be
# memoized under JRuby.
#
# Ruby's Etc.nprocessors will be used if available (MRI 2.2+).
#
# On Windows the Win32 API will be queried for the
# `NumberOfLogicalProcessors from Win32_Processor`. This will return the
# total number "logical processors for the current instance of the
# processor", which taked into account hyperthreading.
#
# * AIX: /usr/sbin/pmcycles (AIX 5+), /usr/sbin/lsdev
# * Alpha: /usr/bin/nproc (/proc/cpuinfo exists but cannot be used)
# * BSD: /sbin/sysctl
# * Cygwin: /proc/cpuinfo
# * Darwin: /usr/bin/hwprefs, /usr/sbin/sysctl
# * HP-UX: /usr/sbin/ioscan
# * IRIX: /usr/sbin/sysconf
# * Linux: /proc/cpuinfo
# * Minix 3+: /proc/cpuinfo
# * Solaris: /usr/sbin/psrinfo
# * Tru64 UNIX: /usr/sbin/psrinfo
# * UnixWare: /usr/sbin/psrinfo
#
# @return [Integer] number of processors seen by the OS or Java runtime
#
# @see https://github.com/grosser/parallel/blob/4fc8b89d08c7091fe0419ca8fba1ec3ce5a8d185/lib/parallel.rb
#
# @see http://docs.oracle.com/javase/6/docs/api/java/lang/Runtime.html#availableProcessors()
# @see http://msdn.microsoft.com/en-us/library/aa394373(v=vs.85).aspx
def processor_count
@processor_count.value
end
# Number of physical processor cores on the current system. For performance
# reasons the calculated value will be memoized on the first call.
#
# On Windows the Win32 API will be queried for the `NumberOfCores from
# Win32_Processor`. This will return the total number "of cores for the
# current instance of the processor." On Unix-like operating systems either
# the `hwprefs` or `sysctl` utility will be called in a subshell and the
# returned value will be used. In the rare case where none of these methods
# work or an exception is raised the function will simply return 1.
#
# @return [Integer] number physical processor cores on the current system
#
# @see https://github.com/grosser/parallel/blob/4fc8b89d08c7091fe0419ca8fba1ec3ce5a8d185/lib/parallel.rb
#
# @see http://msdn.microsoft.com/en-us/library/aa394373(v=vs.85).aspx
# @see http://www.unix.com/man-page/osx/1/HWPREFS/
# @see http://linux.die.net/man/8/sysctl
def physical_processor_count
@physical_processor_count.value
end
private
def compute_processor_count
if Concurrent.on_jruby?
java.lang.Runtime.getRuntime.availableProcessors
else
Etc.nprocessors
end
end
def compute_physical_processor_count
ppc = case RbConfig::CONFIG["target_os"]
when /darwin\d\d/
IO.popen("/usr/sbin/sysctl -n hw.physicalcpu", &:read).to_i
when /linux/
cores = {} # unique physical ID / core ID combinations
phy = 0
IO.read("/proc/cpuinfo").scan(/^physical id.*|^core id.*/) do |ln|
if ln.start_with?("physical")
phy = ln[/\d+/]
elsif ln.start_with?("core")
cid = phy + ":" + ln[/\d+/]
cores[cid] = true if not cores[cid]
end
end
cores.count
when /mswin|mingw/
require 'win32ole'
result_set = WIN32OLE.connect("winmgmts://").ExecQuery(
"select NumberOfCores from Win32_Processor")
result_set.to_enum.collect(&:NumberOfCores).reduce(:+)
else
processor_count
end
# fall back to logical count if physical info is invalid
ppc > 0 ? ppc : processor_count
rescue
return 1
end
end
end
# create the default ProcessorCounter on load
@processor_counter = Utility::ProcessorCounter.new
singleton_class.send :attr_reader, :processor_counter
def self.processor_count
processor_counter.processor_count
end
def self.physical_processor_count
processor_counter.physical_processor_count
end
end

View File

@ -1,9 +1,10 @@
require 'concurrent/configuration'
require 'concurrent/atomic/atomic_reference'
require 'concurrent/atomic/count_down_latch'
require 'concurrent/atomic/thread_local_var'
require 'concurrent/collection/copy_on_write_observer_set'
require 'concurrent/concern/observable'
require 'concurrent/synchronization'
require 'concurrent/synchronization/lockable_object'
module Concurrent

View File

@ -34,16 +34,6 @@ module Concurrent
end
JRubyArray
when Concurrent.on_rbx?
require 'monitor'
require 'concurrent/thread_safe/util/data_structures'
class RbxArray < ::Array
end
ThreadSafe::Util.make_synchronized_on_rbx RbxArray
RbxArray
when Concurrent.on_truffleruby?
require 'concurrent/thread_safe/util/data_structures'

View File

@ -1,7 +1,7 @@
require 'concurrent/atomic/atomic_reference'
require 'concurrent/collection/copy_on_notify_observer_set'
require 'concurrent/concern/observable'
require 'concurrent/synchronization'
require 'concurrent/synchronization/object'
# @!macro thread_safe_variable_comparison
#

View File

@ -1,5 +1,6 @@
require 'concurrent/utility/native_extension_loader' # load native parts first
require 'concurrent/atomic/mutex_atomic_boolean'
require 'concurrent/synchronization'
module Concurrent
@ -79,10 +80,10 @@ module Concurrent
# @!visibility private
# @!macro internal_implementation_note
AtomicBooleanImplementation = case
when defined?(JavaAtomicBoolean)
JavaAtomicBoolean
when defined?(CAtomicBoolean)
when Concurrent.on_cruby? && Concurrent.c_extensions_loaded?
CAtomicBoolean
when Concurrent.on_jruby?
JavaAtomicBoolean
else
MutexAtomicBoolean
end

View File

@ -1,5 +1,6 @@
require 'concurrent/utility/native_extension_loader' # load native parts first
require 'concurrent/atomic/mutex_atomic_fixnum'
require 'concurrent/synchronization'
module Concurrent
@ -96,10 +97,10 @@ module Concurrent
# @!visibility private
# @!macro internal_implementation_note
AtomicFixnumImplementation = case
when defined?(JavaAtomicFixnum)
JavaAtomicFixnum
when defined?(CAtomicFixnum)
when Concurrent.on_cruby? && Concurrent.c_extensions_loaded?
CAtomicFixnum
when Concurrent.on_jruby?
JavaAtomicFixnum
else
MutexAtomicFixnum
end

View File

@ -1,3 +1,6 @@
require 'concurrent/errors'
require 'concurrent/synchronization/object'
module Concurrent
# An atomic reference which maintains an object reference along with a mark bit
# that can be updated atomically.

View File

@ -0,0 +1,135 @@
require 'concurrent/utility/native_extension_loader' # load native parts first
require 'concurrent/atomic_reference/atomic_direct_update'
require 'concurrent/atomic_reference/numeric_cas_wrapper'
require 'concurrent/atomic_reference/mutex_atomic'
# Shim for TruffleRuby::AtomicReference
if Concurrent.on_truffleruby? && !defined?(TruffleRuby::AtomicReference)
# @!visibility private
module TruffleRuby
AtomicReference = Truffle::AtomicReference
end
end
module Concurrent
# @!macro internal_implementation_note
AtomicReferenceImplementation = case
when Concurrent.on_cruby? && Concurrent.c_extensions_loaded?
# @!visibility private
# @!macro internal_implementation_note
class CAtomicReference
include AtomicDirectUpdate
include AtomicNumericCompareAndSetWrapper
alias_method :compare_and_swap, :compare_and_set
end
CAtomicReference
when Concurrent.on_jruby?
# @!visibility private
# @!macro internal_implementation_note
class JavaAtomicReference
include AtomicDirectUpdate
end
JavaAtomicReference
when Concurrent.on_truffleruby?
class TruffleRubyAtomicReference < TruffleRuby::AtomicReference
include AtomicDirectUpdate
alias_method :value, :get
alias_method :value=, :set
alias_method :compare_and_swap, :compare_and_set
alias_method :swap, :get_and_set
end
TruffleRubyAtomicReference
else
MutexAtomicReference
end
private_constant :AtomicReferenceImplementation
# An object reference that may be updated atomically. All read and write
# operations have java volatile semantic.
#
# @!macro thread_safe_variable_comparison
#
# @see http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicReference.html
# @see http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/package-summary.html
#
# @!method initialize(value = nil)
# @!macro atomic_reference_method_initialize
# @param [Object] value The initial value.
#
# @!method get
# @!macro atomic_reference_method_get
# Gets the current value.
# @return [Object] the current value
#
# @!method set(new_value)
# @!macro atomic_reference_method_set
# Sets to the given value.
# @param [Object] new_value the new value
# @return [Object] the new value
#
# @!method get_and_set(new_value)
# @!macro atomic_reference_method_get_and_set
# Atomically sets to the given value and returns the old value.
# @param [Object] new_value the new value
# @return [Object] the old value
#
# @!method compare_and_set(old_value, new_value)
# @!macro atomic_reference_method_compare_and_set
#
# Atomically sets the value to the given updated value if
# the current value == the expected value.
#
# @param [Object] old_value the expected value
# @param [Object] new_value the new value
#
# @return [Boolean] `true` if successful. A `false` return indicates
# that the actual value was not equal to the expected value.
#
# @!method update
# Pass the current value to the given block, replacing it
# with the block's result. May retry if the value changes
# during the block's execution.
#
# @yield [Object] Calculate a new value for the atomic reference using
# given (old) value
# @yieldparam [Object] old_value the starting value of the atomic reference
# @return [Object] the new value
#
# @!method try_update
# Pass the current value to the given block, replacing it
# with the block's result. Return nil if the update fails.
#
# @yield [Object] Calculate a new value for the atomic reference using
# given (old) value
# @yieldparam [Object] old_value the starting value of the atomic reference
# @note This method was altered to avoid raising an exception by default.
# Instead, this method now returns `nil` in case of failure. For more info,
# please see: https://github.com/ruby-concurrency/concurrent-ruby/pull/336
# @return [Object] the new value, or nil if update failed
#
# @!method try_update!
# Pass the current value to the given block, replacing it
# with the block's result. Raise an exception if the update
# fails.
#
# @yield [Object] Calculate a new value for the atomic reference using
# given (old) value
# @yieldparam [Object] old_value the starting value of the atomic reference
# @note This behavior mimics the behavior of the original
# `AtomicReference#try_update` API. The reason this was changed was to
# avoid raising exceptions (which are inherently slow) by default. For more
# info: https://github.com/ruby-concurrency/concurrent-ruby/pull/336
# @return [Object] the new value
# @raise [Concurrent::ConcurrentUpdateError] if the update fails
class AtomicReference < AtomicReferenceImplementation
# @return [String] Short string representation.
def to_s
format '%s value:%s>', super[0..-2], get
end
alias_method :inspect, :to_s
end
end

View File

@ -1,4 +1,4 @@
require 'concurrent/synchronization'
require 'concurrent/synchronization/lockable_object'
require 'concurrent/utility/native_integer'
module Concurrent

View File

@ -1,5 +1,5 @@
require 'thread'
require 'concurrent/synchronization'
require 'concurrent/synchronization/lockable_object'
module Concurrent

View File

@ -0,0 +1,109 @@
require 'concurrent/constants'
require_relative 'locals'
module Concurrent
# A `FiberLocalVar` is a variable where the value is different for each fiber.
# Each variable may have a default value, but when you modify the variable only
# the current fiber will ever see that change.
#
# This is similar to Ruby's built-in fiber-local variables (`Thread.current[:name]`),
# but with these major advantages:
# * `FiberLocalVar` has its own identity, it doesn't need a Symbol.
# * Each Ruby's built-in fiber-local variable leaks some memory forever (it's a Symbol held forever on the fiber),
# so it's only OK to create a small amount of them.
# `FiberLocalVar` has no such issue and it is fine to create many of them.
# * Ruby's built-in fiber-local variables leak forever the value set on each fiber (unless set to nil explicitly).
# `FiberLocalVar` automatically removes the mapping for each fiber once the `FiberLocalVar` instance is GC'd.
#
# @example
# v = FiberLocalVar.new(14)
# v.value #=> 14
# v.value = 2
# v.value #=> 2
#
# @example
# v = FiberLocalVar.new(14)
#
# Fiber.new do
# v.value #=> 14
# v.value = 1
# v.value #=> 1
# end.resume
#
# Fiber.new do
# v.value #=> 14
# v.value = 2
# v.value #=> 2
# end.resume
#
# v.value #=> 14
class FiberLocalVar
LOCALS = FiberLocals.new
# Creates a fiber local variable.
#
# @param [Object] default the default value when otherwise unset
# @param [Proc] default_block Optional block that gets called to obtain the
# default value for each fiber
def initialize(default = nil, &default_block)
if default && block_given?
raise ArgumentError, "Cannot use both value and block as default value"
end
if block_given?
@default_block = default_block
@default = nil
else
@default_block = nil
@default = default
end
@index = LOCALS.next_index(self)
end
# Returns the value in the current fiber's copy of this fiber-local variable.
#
# @return [Object] the current value
def value
LOCALS.fetch(@index) { default }
end
# Sets the current fiber's copy of this fiber-local variable to the specified value.
#
# @param [Object] value the value to set
# @return [Object] the new value
def value=(value)
LOCALS.set(@index, value)
end
# Bind the given value to fiber local storage during
# execution of the given block.
#
# @param [Object] value the value to bind
# @yield the operation to be performed with the bound variable
# @return [Object] the value
def bind(value)
if block_given?
old_value = self.value
self.value = value
begin
yield
ensure
self.value = old_value
end
end
end
protected
# @!visibility private
def default
if @default_block
self.value = @default_block.call
else
@default
end
end
end
end

View File

@ -0,0 +1,188 @@
require 'concurrent/utility/engine'
require 'concurrent/constants'
module Concurrent
# @!visibility private
# @!macro internal_implementation_note
#
# An abstract implementation of local storage, with sub-classes for
# per-thread and per-fiber locals.
#
# Each execution context (EC, thread or fiber) has a lazily initialized array
# of local variable values. Each time a new local variable is created, we
# allocate an "index" for it.
#
# For example, if the allocated index is 1, that means slot #1 in EVERY EC's
# locals array will be used for the value of that variable.
#
# The good thing about using a per-EC structure to hold values, rather than
# a global, is that no synchronization is needed when reading and writing
# those values (since the structure is only ever accessed by a single
# thread).
#
# Of course, when a local variable is GC'd, 1) we need to recover its index
# for use by other new local variables (otherwise the locals arrays could
# get bigger and bigger with time), and 2) we need to null out all the
# references held in the now-unused slots (both to avoid blocking GC of those
# objects, and also to prevent "stale" values from being passed on to a new
# local when the index is reused).
#
# Because we need to null out freed slots, we need to keep references to
# ALL the locals arrays, so we can null out the appropriate slots in all of
# them. This is why we need to use a finalizer to clean up the locals array
# when the EC goes out of scope.
class AbstractLocals
def initialize
@free = []
@lock = Mutex.new
@all_arrays = {}
@next = 0
end
def synchronize
@lock.synchronize { yield }
end
if Concurrent.on_cruby?
def weak_synchronize
yield
end
else
alias_method :weak_synchronize, :synchronize
end
def next_index(local)
index = synchronize do
if @free.empty?
@next += 1
else
@free.pop
end
end
# When the local goes out of scope, we should free the associated index
# and all values stored into it.
ObjectSpace.define_finalizer(local, local_finalizer(index))
index
end
def free_index(index)
weak_synchronize do
# The cost of GC'ing a TLV is linear in the number of ECs using local
# variables. But that is natural! More ECs means more storage is used
# per local variable. So naturally more CPU time is required to free
# more storage.
#
# DO NOT use each_value which might conflict with new pair assignment
# into the hash in #set method.
@all_arrays.values.each do |locals|
locals[index] = nil
end
# free index has to be published after the arrays are cleared:
@free << index
end
end
def fetch(index)
locals = self.locals
value = locals ? locals[index] : nil
if nil == value
yield
elsif NULL.equal?(value)
nil
else
value
end
end
def set(index, value)
locals = self.locals!
locals[index] = (nil == value ? NULL : value)
value
end
private
# When the local goes out of scope, clean up that slot across all locals currently assigned.
def local_finalizer(index)
proc do
free_index(index)
end
end
# When a thread/fiber goes out of scope, remove the array from @all_arrays.
def thread_fiber_finalizer(array_object_id)
proc do
weak_synchronize do
@all_arrays.delete(array_object_id)
end
end
end
# Returns the locals for the current scope, or nil if none exist.
def locals
raise NotImplementedError
end
# Returns the locals for the current scope, creating them if necessary.
def locals!
raise NotImplementedError
end
end
# @!visibility private
# @!macro internal_implementation_note
# An array-backed storage of indexed variables per thread.
class ThreadLocals < AbstractLocals
def locals
Thread.current.thread_variable_get(:concurrent_thread_locals)
end
def locals!
thread = Thread.current
locals = thread.thread_variable_get(:concurrent_thread_locals)
unless locals
locals = thread.thread_variable_set(:concurrent_thread_locals, [])
weak_synchronize do
@all_arrays[locals.object_id] = locals
end
# When the thread goes out of scope, we should delete the associated locals:
ObjectSpace.define_finalizer(thread, thread_fiber_finalizer(locals.object_id))
end
locals
end
end
# @!visibility private
# @!macro internal_implementation_note
# An array-backed storage of indexed variables per fiber.
class FiberLocals < AbstractLocals
def locals
Thread.current[:concurrent_fiber_locals]
end
def locals!
thread = Thread.current
locals = thread[:concurrent_fiber_locals]
unless locals
locals = thread[:concurrent_fiber_locals] = []
weak_synchronize do
@all_arrays[locals.object_id] = locals
end
# When the fiber goes out of scope, we should delete the associated locals:
ObjectSpace.define_finalizer(Fiber.current, thread_fiber_finalizer(locals.object_id))
end
locals
end
end
private_constant :AbstractLocals, :ThreadLocals, :FiberLocals
end

View File

@ -0,0 +1,28 @@
require 'concurrent/utility/engine'
require_relative 'fiber_local_var'
require_relative 'thread_local_var'
module Concurrent
# @!visibility private
def self.mutex_owned_per_thread?
return false if Concurrent.on_jruby? || Concurrent.on_truffleruby?
mutex = Mutex.new
# Lock the mutex:
mutex.synchronize do
# Check if the mutex is still owned in a child fiber:
Fiber.new { mutex.owned? }.resume
end
end
if mutex_owned_per_thread?
LockLocalVar = ThreadLocalVar
else
LockLocalVar = FiberLocalVar
end
# Either {FiberLocalVar} or {ThreadLocalVar} depending on whether Mutex (and Monitor)
# are held, respectively, per Fiber or per Thread.
class LockLocalVar
end
end

View File

@ -1,16 +1,18 @@
require 'concurrent/synchronization'
require 'concurrent/synchronization/safe_initialization'
module Concurrent
# @!macro atomic_boolean
# @!visibility private
# @!macro internal_implementation_note
class MutexAtomicBoolean < Synchronization::LockableObject
class MutexAtomicBoolean
extend Concurrent::Synchronization::SafeInitialization
# @!macro atomic_boolean_method_initialize
def initialize(initial = false)
super()
synchronize { ns_initialize(initial) }
@Lock = ::Mutex.new
@value = !!initial
end
# @!macro atomic_boolean_method_value_get
@ -46,8 +48,12 @@ module Concurrent
protected
# @!visibility private
def ns_initialize(initial)
@value = !!initial
def synchronize
if @Lock.owned?
yield
else
@Lock.synchronize { yield }
end
end
private

View File

@ -1,4 +1,4 @@
require 'concurrent/synchronization'
require 'concurrent/synchronization/safe_initialization'
require 'concurrent/utility/native_integer'
module Concurrent
@ -6,12 +6,14 @@ module Concurrent
# @!macro atomic_fixnum
# @!visibility private
# @!macro internal_implementation_note
class MutexAtomicFixnum < Synchronization::LockableObject
class MutexAtomicFixnum
extend Concurrent::Synchronization::SafeInitialization
# @!macro atomic_fixnum_method_initialize
def initialize(initial = 0)
super()
synchronize { ns_initialize(initial) }
@Lock = ::Mutex.new
ns_set(initial)
end
# @!macro atomic_fixnum_method_value_get
@ -60,8 +62,12 @@ module Concurrent
protected
# @!visibility private
def ns_initialize(initial)
ns_set(initial)
def synchronize
if @Lock.owned?
yield
else
@Lock.synchronize { yield }
end
end
private

View File

@ -1,4 +1,4 @@
require 'concurrent/synchronization'
require 'concurrent/synchronization/lockable_object'
require 'concurrent/utility/native_integer'
module Concurrent

View File

@ -1,4 +1,4 @@
require 'concurrent/synchronization'
require 'concurrent/synchronization/lockable_object'
require 'concurrent/utility/native_integer'
module Concurrent

View File

@ -1,7 +1,8 @@
require 'thread'
require 'concurrent/atomic/atomic_fixnum'
require 'concurrent/errors'
require 'concurrent/synchronization'
require 'concurrent/synchronization/object'
require 'concurrent/synchronization/lock'
module Concurrent

View File

@ -1,8 +1,10 @@
require 'thread'
require 'concurrent/atomic/atomic_reference'
require 'concurrent/atomic/atomic_fixnum'
require 'concurrent/errors'
require 'concurrent/synchronization'
require 'concurrent/atomic/thread_local_var'
require 'concurrent/synchronization/object'
require 'concurrent/synchronization/lock'
require 'concurrent/atomic/lock_local_var'
module Concurrent
@ -109,7 +111,7 @@ module Concurrent
@Counter = AtomicFixnum.new(0) # single integer which represents lock state
@ReadQueue = Synchronization::Lock.new # used to queue waiting readers
@WriteQueue = Synchronization::Lock.new # used to queue waiting writers
@HeldCount = ThreadLocalVar.new(0) # indicates # of R & W locks held by this thread
@HeldCount = LockLocalVar.new(0) # indicates # of R & W locks held by this thread
end
# Execute a block operation within a read lock.

View File

@ -1,5 +1,4 @@
require 'concurrent/atomic/mutex_semaphore'
require 'concurrent/synchronization'
module Concurrent
@ -11,7 +10,7 @@ module Concurrent
#
# @param [Fixnum] count the initial count
#
# @raise [ArgumentError] if `count` is not an integer or is less than zero
# @raise [ArgumentError] if `count` is not an integer
# @!macro semaphore_method_acquire
#
@ -21,8 +20,7 @@ module Concurrent
#
# @param [Fixnum] permits Number of permits to acquire
#
# @raise [ArgumentError] if `permits` is not an integer or is less than
# one
# @raise [ArgumentError] if `permits` is not an integer or is less than zero
#
# @return [nil, BasicObject] Without a block, `nil` is returned. If a block
# is given, its return value is returned.
@ -52,8 +50,7 @@ module Concurrent
# @param [Fixnum] timeout the number of seconds to wait for the counter
# or `nil` to return immediately
#
# @raise [ArgumentError] if `permits` is not an integer or is less than
# one
# @raise [ArgumentError] if `permits` is not an integer or is less than zero
#
# @return [true, false, nil, BasicObject] `false` if no permits are
# available, `true` when acquired a permit. If a block is given, the
@ -66,7 +63,7 @@ module Concurrent
#
# @param [Fixnum] permits Number of permits to return to the semaphore.
#
# @raise [ArgumentError] if `permits` is not a number or is less than one
# @raise [ArgumentError] if `permits` is not a number or is less than zero
#
# @return [nil]
@ -96,8 +93,8 @@ module Concurrent
# @!visibility private
# @!macro internal_implementation_note
SemaphoreImplementation = case
when defined?(JavaSemaphore)
SemaphoreImplementation = if Concurrent.on_jruby?
require 'concurrent/utility/native_extension_loader'
JavaSemaphore
else
MutexSemaphore

View File

@ -0,0 +1,111 @@
require 'concurrent/constants'
require_relative 'locals'
module Concurrent
# A `ThreadLocalVar` is a variable where the value is different for each thread.
# Each variable may have a default value, but when you modify the variable only
# the current thread will ever see that change.
#
# This is similar to Ruby's built-in thread-local variables (`Thread#thread_variable_get`),
# but with these major advantages:
# * `ThreadLocalVar` has its own identity, it doesn't need a Symbol.
# * Each Ruby's built-in thread-local variable leaks some memory forever (it's a Symbol held forever on the thread),
# so it's only OK to create a small amount of them.
# `ThreadLocalVar` has no such issue and it is fine to create many of them.
# * Ruby's built-in thread-local variables leak forever the value set on each thread (unless set to nil explicitly).
# `ThreadLocalVar` automatically removes the mapping for each thread once the `ThreadLocalVar` instance is GC'd.
#
# @!macro thread_safe_variable_comparison
#
# @example
# v = ThreadLocalVar.new(14)
# v.value #=> 14
# v.value = 2
# v.value #=> 2
#
# @example
# v = ThreadLocalVar.new(14)
#
# t1 = Thread.new do
# v.value #=> 14
# v.value = 1
# v.value #=> 1
# end
#
# t2 = Thread.new do
# v.value #=> 14
# v.value = 2
# v.value #=> 2
# end
#
# v.value #=> 14
class ThreadLocalVar
LOCALS = ThreadLocals.new
# Creates a thread local variable.
#
# @param [Object] default the default value when otherwise unset
# @param [Proc] default_block Optional block that gets called to obtain the
# default value for each thread
def initialize(default = nil, &default_block)
if default && block_given?
raise ArgumentError, "Cannot use both value and block as default value"
end
if block_given?
@default_block = default_block
@default = nil
else
@default_block = nil
@default = default
end
@index = LOCALS.next_index(self)
end
# Returns the value in the current thread's copy of this thread-local variable.
#
# @return [Object] the current value
def value
LOCALS.fetch(@index) { default }
end
# Sets the current thread's copy of this thread-local variable to the specified value.
#
# @param [Object] value the value to set
# @return [Object] the new value
def value=(value)
LOCALS.set(@index, value)
end
# Bind the given value to thread local storage during
# execution of the given block.
#
# @param [Object] value the value to bind
# @yield the operation to be performed with the bound variable
# @return [Object] the value
def bind(value)
if block_given?
old_value = self.value
self.value = value
begin
yield
ensure
self.value = old_value
end
end
end
protected
# @!visibility private
def default
if @default_block
self.value = @default_block.call
else
@default
end
end
end
end

View File

@ -0,0 +1,37 @@
require 'concurrent/errors'
module Concurrent
# Define update methods that use direct paths
#
# @!visibility private
# @!macro internal_implementation_note
module AtomicDirectUpdate
def update
true until compare_and_set(old_value = get, new_value = yield(old_value))
new_value
end
def try_update
old_value = get
new_value = yield old_value
return unless compare_and_set old_value, new_value
new_value
end
def try_update!
old_value = get
new_value = yield old_value
unless compare_and_set(old_value, new_value)
if $VERBOSE
raise ConcurrentUpdateError, "Update failed"
else
raise ConcurrentUpdateError, "Update failed", ConcurrentUpdateError::CONC_UP_ERR_BACKTRACE
end
end
new_value
end
end
end

View File

@ -1,8 +1,13 @@
require 'concurrent/atomic_reference/atomic_direct_update'
require 'concurrent/atomic_reference/numeric_cas_wrapper'
require 'concurrent/synchronization/safe_initialization'
module Concurrent
# @!visibility private
# @!macro internal_implementation_note
class MutexAtomicReference < Synchronization::LockableObject
class MutexAtomicReference
extend Concurrent::Synchronization::SafeInitialization
include AtomicDirectUpdate
include AtomicNumericCompareAndSetWrapper
alias_method :compare_and_swap, :compare_and_set
@ -10,7 +15,8 @@ module Concurrent
# @!macro atomic_reference_method_initialize
def initialize(value = nil)
super()
synchronize { ns_initialize(value) }
@Lock = ::Mutex.new
@value = value
end
# @!macro atomic_reference_method_get
@ -49,8 +55,13 @@ module Concurrent
protected
def ns_initialize(value)
@value = value
# @!visibility private
def synchronize
if @Lock.owned?
yield
else
@Lock.synchronize { yield }
end
end
end
end

View File

@ -0,0 +1,116 @@
require 'logger'
require 'concurrent/atomic/atomic_reference'
module Concurrent
module Concern
# Include where logging is needed
#
# @!visibility private
module Logging
include Logger::Severity
# Logs through {Concurrent.global_logger}, it can be overridden by setting @logger
# @param [Integer] level one of Logger::Severity constants
# @param [String] progname e.g. a path of an Actor
# @param [String, nil] message when nil block is used to generate the message
# @yieldreturn [String] a message
def log(level, progname, message = nil, &block)
logger = if defined?(@logger) && @logger
@logger
else
Concurrent.global_logger
end
logger.call level, progname, message, &block
rescue => error
$stderr.puts "`Concurrent.configuration.logger` failed to log #{[level, progname, message, block]}\n" +
"#{error.message} (#{error.class})\n#{error.backtrace.join "\n"}"
end
end
end
end
module Concurrent
extend Concern::Logging
# @return [Logger] Logger with provided level and output.
def self.create_simple_logger(level = Logger::FATAL, output = $stderr)
# TODO (pitr-ch 24-Dec-2016): figure out why it had to be replaced, stdlogger was deadlocking
lambda do |severity, progname, message = nil, &block|
return false if severity < level
message = block ? block.call : message
formatted_message = case message
when String
message
when Exception
format "%s (%s)\n%s",
message.message, message.class, (message.backtrace || []).join("\n")
else
message.inspect
end
output.print format "[%s] %5s -- %s: %s\n",
Time.now.strftime('%Y-%m-%d %H:%M:%S.%L'),
Logger::SEV_LABEL[severity],
progname,
formatted_message
true
end
end
# Use logger created by #create_simple_logger to log concurrent-ruby messages.
def self.use_simple_logger(level = Logger::FATAL, output = $stderr)
Concurrent.global_logger = create_simple_logger level, output
end
# @return [Logger] Logger with provided level and output.
# @deprecated
def self.create_stdlib_logger(level = Logger::FATAL, output = $stderr)
logger = Logger.new(output)
logger.level = level
logger.formatter = lambda do |severity, datetime, progname, msg|
formatted_message = case msg
when String
msg
when Exception
format "%s (%s)\n%s",
msg.message, msg.class, (msg.backtrace || []).join("\n")
else
msg.inspect
end
format "[%s] %5s -- %s: %s\n",
datetime.strftime('%Y-%m-%d %H:%M:%S.%L'),
severity,
progname,
formatted_message
end
lambda do |loglevel, progname, message = nil, &block|
logger.add loglevel, message, progname, &block
end
end
# Use logger created by #create_stdlib_logger to log concurrent-ruby messages.
# @deprecated
def self.use_stdlib_logger(level = Logger::FATAL, output = $stderr)
Concurrent.global_logger = create_stdlib_logger level, output
end
# TODO (pitr-ch 27-Dec-2016): remove deadlocking stdlib_logger methods
# Suppresses all output when used for logging.
NULL_LOGGER = lambda { |level, progname, message = nil, &block| }
# @!visibility private
GLOBAL_LOGGER = AtomicReference.new(create_simple_logger(Logger::WARN))
private_constant :GLOBAL_LOGGER
def self.global_logger
GLOBAL_LOGGER.value
end
def self.global_logger=(value)
GLOBAL_LOGGER.value = value
end
end

View File

@ -1,102 +1,19 @@
require 'thread'
require 'concurrent/delay'
require 'concurrent/errors'
require 'concurrent/atomic/atomic_reference'
require 'concurrent/concern/logging'
require 'concurrent/concern/deprecation'
require 'concurrent/executor/immediate_executor'
require 'concurrent/executor/fixed_thread_pool'
require 'concurrent/executor/cached_thread_pool'
require 'concurrent/utility/processor_counter'
module Concurrent
extend Concern::Logging
extend Concern::Deprecation
autoload :Options, 'concurrent/options'
autoload :TimerSet, 'concurrent/executor/timer_set'
autoload :ThreadPoolExecutor, 'concurrent/executor/thread_pool_executor'
# @return [Logger] Logger with provided level and output.
def self.create_simple_logger(level = Logger::FATAL, output = $stderr)
# TODO (pitr-ch 24-Dec-2016): figure out why it had to be replaced, stdlogger was deadlocking
lambda do |severity, progname, message = nil, &block|
return false if severity < level
message = block ? block.call : message
formatted_message = case message
when String
message
when Exception
format "%s (%s)\n%s",
message.message, message.class, (message.backtrace || []).join("\n")
else
message.inspect
end
output.print format "[%s] %5s -- %s: %s\n",
Time.now.strftime('%Y-%m-%d %H:%M:%S.%L'),
Logger::SEV_LABEL[severity],
progname,
formatted_message
true
end
end
# Use logger created by #create_simple_logger to log concurrent-ruby messages.
def self.use_simple_logger(level = Logger::FATAL, output = $stderr)
Concurrent.global_logger = create_simple_logger level, output
end
# @return [Logger] Logger with provided level and output.
# @deprecated
def self.create_stdlib_logger(level = Logger::FATAL, output = $stderr)
logger = Logger.new(output)
logger.level = level
logger.formatter = lambda do |severity, datetime, progname, msg|
formatted_message = case msg
when String
msg
when Exception
format "%s (%s)\n%s",
msg.message, msg.class, (msg.backtrace || []).join("\n")
else
msg.inspect
end
format "[%s] %5s -- %s: %s\n",
datetime.strftime('%Y-%m-%d %H:%M:%S.%L'),
severity,
progname,
formatted_message
end
lambda do |loglevel, progname, message = nil, &block|
logger.add loglevel, message, progname, &block
end
end
# Use logger created by #create_stdlib_logger to log concurrent-ruby messages.
# @deprecated
def self.use_stdlib_logger(level = Logger::FATAL, output = $stderr)
Concurrent.global_logger = create_stdlib_logger level, output
end
# TODO (pitr-ch 27-Dec-2016): remove deadlocking stdlib_logger methods
# Suppresses all output when used for logging.
NULL_LOGGER = lambda { |level, progname, message = nil, &block| }
# @!visibility private
GLOBAL_LOGGER = AtomicReference.new(create_simple_logger(Logger::WARN))
private_constant :GLOBAL_LOGGER
def self.global_logger
GLOBAL_LOGGER.value
end
def self.global_logger=(value)
GLOBAL_LOGGER.value = value
end
# @!visibility private
GLOBAL_FAST_EXECUTOR = Delay.new { Concurrent.new_fast_executor }
private_constant :GLOBAL_FAST_EXECUTOR
@ -136,14 +53,14 @@ module Concurrent
#
# @return [ThreadPoolExecutor] the thread pool
def self.global_fast_executor
GLOBAL_FAST_EXECUTOR.value
GLOBAL_FAST_EXECUTOR.value!
end
# Global thread pool optimized for long, blocking (IO) *tasks*.
#
# @return [ThreadPoolExecutor] the thread pool
def self.global_io_executor
GLOBAL_IO_EXECUTOR.value
GLOBAL_IO_EXECUTOR.value!
end
def self.global_immediate_executor
@ -154,7 +71,7 @@ module Concurrent
#
# @return [Concurrent::TimerSet] the thread pool
def self.global_timer_set
GLOBAL_TIMER_SET.value
GLOBAL_TIMER_SET.value!
end
# General access point to global executors.

View File

@ -1,7 +1,7 @@
require 'thread'
require 'concurrent/concern/obligation'
require 'concurrent/executor/immediate_executor'
require 'concurrent/synchronization'
require 'concurrent/synchronization/lockable_object'
module Concurrent
@ -67,7 +67,7 @@ module Concurrent
# Return the value this object represents after applying the options
# specified by the `#set_deref_options` method. If the delayed operation
# raised an exception this method will return nil. The execption object
# raised an exception this method will return nil. The exception object
# can be accessed via the `#reason` method.
#
# @param [Numeric] timeout the maximum number of seconds to wait

View File

@ -66,4 +66,9 @@ module Concurrent
end
end
# @!macro internal_implementation_note
class ConcurrentUpdateError < ThreadError
# frozen pre-allocated backtrace to speed ConcurrentUpdateError
CONC_UP_ERR_BACKTRACE = ['backtrace elided; set verbose to enable'].freeze
end
end

View File

@ -289,6 +289,7 @@ module Concurrent
end
if Concurrent.on_jruby?
require 'concurrent/utility/native_extension_loader'
# @!macro internal_implementation_note
# @!visibility private

View File

@ -1,7 +1,7 @@
require 'concurrent/errors'
require 'concurrent/concern/deprecation'
require 'concurrent/executor/executor_service'
require 'concurrent/synchronization'
require 'concurrent/synchronization/lockable_object'
module Concurrent

View File

@ -1,7 +1,7 @@
if Concurrent.on_jruby?
require 'concurrent/utility/engine'
if Concurrent.on_jruby?
require 'concurrent/errors'
require 'concurrent/utility/engine'
require 'concurrent/executor/abstract_executor_service'
module Concurrent

View File

@ -1,6 +1,6 @@
require 'concurrent/errors'
require 'concurrent/concern/logging'
require 'concurrent/synchronization'
require 'concurrent/synchronization/lockable_object'
module Concurrent

View File

@ -1,5 +1,8 @@
require 'concurrent/atomics'
require 'concurrent/atomic/atomic_boolean'
require 'concurrent/atomic/atomic_fixnum'
require 'concurrent/atomic/event'
require 'concurrent/executor/executor_service'
require 'concurrent/executor/ruby_executor_service'
module Concurrent

View File

@ -28,15 +28,6 @@ module Concurrent
end
JRubyHash
when Concurrent.on_rbx?
require 'monitor'
require 'concurrent/thread_safe/util/data_structures'
class RbxHash < ::Hash
end
ThreadSafe::Util.make_synchronized_on_rbx RbxHash
RbxHash
when Concurrent.on_truffleruby?
require 'concurrent/thread_safe/util/data_structures'

View File

@ -1,5 +1,5 @@
require 'concurrent/synchronization/abstract_struct'
require 'concurrent/synchronization'
require 'concurrent/synchronization/lockable_object'
module Concurrent

View File

@ -3,7 +3,8 @@ require 'concurrent/errors'
require 'concurrent/collection/copy_on_write_observer_set'
require 'concurrent/concern/obligation'
require 'concurrent/concern/observable'
require 'concurrent/synchronization'
require 'concurrent/executor/safe_task_executor'
require 'concurrent/synchronization/lockable_object'
module Concurrent

View File

@ -1,6 +1,5 @@
require 'thread'
require 'concurrent/constants'
require 'concurrent/synchronization'
require 'concurrent/utility/engine'
module Concurrent
@ -10,17 +9,20 @@ module Concurrent
# @!visibility private
MapImplementation = case
when Concurrent.on_jruby?
require 'concurrent/utility/native_extension_loader'
# noinspection RubyResolve
JRubyMapBackend
when Concurrent.on_cruby?
require 'concurrent/collection/map/mri_map_backend'
MriMapBackend
when Concurrent.on_truffleruby? && defined?(::TruffleRuby::ConcurrentMap)
when Concurrent.on_truffleruby?
if defined?(::TruffleRuby::ConcurrentMap)
require 'concurrent/collection/map/truffleruby_map_backend'
TruffleRubyMapBackend
when Concurrent.on_truffleruby? || Concurrent.on_rbx?
else
require 'concurrent/collection/map/atomic_reference_map_backend'
AtomicReferenceMapBackend
end
else
warn 'Concurrent::Map: unsupported Ruby engine, using a fully synchronized Concurrent::Map implementation'
require 'concurrent/collection/map/synchronized_map_backend'
@ -197,7 +199,6 @@ module Concurrent
# @yieldparam key [Object]
# @yieldreturn [Object] default value
# @return [Object] the value or default value
# @!macro map.atomic_method_with_block
def fetch_or_store(key, default_value = NULL)
fetch(key) do
put(key, block_given? ? yield(key) : (NULL == default_value ? raise_fetch_no_key : default_value))

View File

@ -1,5 +1,5 @@
require 'concurrent/synchronization/abstract_struct'
require 'concurrent/synchronization'
require 'concurrent/synchronization/lockable_object'
module Concurrent

View File

@ -1,5 +1,5 @@
require 'concurrent/concern/dereferenceable'
require 'concurrent/synchronization'
require 'concurrent/synchronization/object'
module Concurrent

Some files were not shown because too many files have changed in this diff Show More