brew/Library/Homebrew/sorbet/rbi/gems/concurrent-ruby@1.3.5.rbi
2025-01-18 05:20:33 +00:00

11658 lines
470 KiB
Ruby
Generated
Raw Permalink Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

# typed: true
# DO NOT EDIT MANUALLY
# This is an autogenerated file for types exported from the `concurrent-ruby` gem.
# Please instead update this file by running `bin/tapioca gem concurrent-ruby`.
# {include:file:README.md}
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/version.rb#1
module Concurrent
extend ::Concurrent::Utility::EngineDetector
extend ::Concurrent::Utility::NativeExtensionLoader
extend ::Concurrent::Concern::Logging
extend ::Concurrent::Concern::Deprecation
private
# Abort a currently running transaction - see `Concurrent::atomically`.
#
# @raise [Transaction::AbortError]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/tvar.rb#139
def abort_transaction; end
# Run a block that reads and writes `TVar`s as a single atomic transaction.
# With respect to the value of `TVar` objects, the transaction is atomic, in
# that it either happens or it does not, consistent, in that the `TVar`
# objects involved will never enter an illegal state, and isolated, in that
# transactions never interfere with each other. You may recognise these
# properties from database transactions.
#
# There are some very important and unusual semantics that you must be aware of:
#
# * Most importantly, the block that you pass to atomically may be executed
# more than once. In most cases your code should be free of
# side-effects, except for via TVar.
#
# * If an exception escapes an atomically block it will abort the transaction.
#
# * It is undefined behaviour to use callcc or Fiber with atomically.
#
# * If you create a new thread within an atomically, it will not be part of
# the transaction. Creating a thread counts as a side-effect.
#
# Transactions within transactions are flattened to a single transaction.
#
# @example
# a = new TVar(100_000)
# b = new TVar(100)
#
# Concurrent::atomically do
# a.value -= 10
# b.value += 10
# end
# @raise [ArgumentError]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/tvar.rb#82
def atomically; end
# @raise [ArgumentError]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/dataflow.rb#56
def call_dataflow(method, executor, *inputs, &block); end
# Dataflow allows you to create a task that will be scheduled when all of its data dependencies are available.
# {include:file:docs-source/dataflow.md}
#
# @param inputs [Future] zero or more `Future` operations that this dataflow depends upon
# @raise [ArgumentError] if no block is given
# @raise [ArgumentError] if any of the inputs are not `IVar`s
# @return [Object] the result of all the operations
# @yield The operation to perform once all the dependencies are met
# @yieldparam inputs [Future] each of the `Future` inputs to the dataflow
# @yieldreturn [Object] the result of the block operation
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/dataflow.rb#34
def dataflow(*inputs, &block); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/dataflow.rb#44
def dataflow!(*inputs, &block); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/dataflow.rb#39
def dataflow_with(executor, *inputs, &block); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/dataflow.rb#49
def dataflow_with!(executor, *inputs, &block); end
# Leave a transaction without committing or aborting - see `Concurrent::atomically`.
#
# @raise [Transaction::LeaveError]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/tvar.rb#144
def leave_transaction; end
# Returns the current time as tracked by the application monotonic clock.
#
# @param unit [Symbol] the time unit to be returned, can be either
# :float_second, :float_millisecond, :float_microsecond, :second,
# :millisecond, :microsecond, or :nanosecond default to :float_second.
# @return [Float] The current monotonic time since some unspecified
# starting point
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/utility/monotonic_time.rb#15
def monotonic_time(unit = T.unsafe(nil)); end
class << self
# Abort a currently running transaction - see `Concurrent::atomically`.
#
# @raise [Transaction::AbortError]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/tvar.rb#139
def abort_transaction; end
# Run a block that reads and writes `TVar`s as a single atomic transaction.
# With respect to the value of `TVar` objects, the transaction is atomic, in
# that it either happens or it does not, consistent, in that the `TVar`
# objects involved will never enter an illegal state, and isolated, in that
# transactions never interfere with each other. You may recognise these
# properties from database transactions.
#
# There are some very important and unusual semantics that you must be aware of:
#
# * Most importantly, the block that you pass to atomically may be executed
# more than once. In most cases your code should be free of
# side-effects, except for via TVar.
#
# * If an exception escapes an atomically block it will abort the transaction.
#
# * It is undefined behaviour to use callcc or Fiber with atomically.
#
# * If you create a new thread within an atomically, it will not be part of
# the transaction. Creating a thread counts as a side-effect.
#
# Transactions within transactions are flattened to a single transaction.
#
# @example
# a = new TVar(100_000)
# b = new TVar(100)
#
# Concurrent::atomically do
# a.value -= 10
# b.value += 10
# end
# @raise [ArgumentError]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/tvar.rb#82
def atomically; end
# Number of processors cores available for process scheduling.
# This method takes in account the CPU quota if the process is inside a cgroup with a
# dedicated CPU quota (typically Docker).
# Otherwise it returns the same value as #processor_count but as a Float.
#
# For performance reasons the calculated value will be memoized on the first
# call.
#
# @return [Float] number of available processors
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/utility/processor_counter.rb#194
def available_processor_count; end
# @raise [ArgumentError]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/dataflow.rb#56
def call_dataflow(method, executor, *inputs, &block); end
# The maximum number of processors cores available for process scheduling.
# Returns `nil` if there is no enforced limit, or a `Float` if the
# process is inside a cgroup with a dedicated CPU quota (typically Docker).
#
# Note that nothing prevents setting a CPU quota higher than the actual number of
# cores on the system.
#
# For performance reasons the calculated value will be memoized on the first
# call.
#
# @return [nil, Float] Maximum number of available processors as set by a cgroup CPU quota, or nil if none set
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/utility/processor_counter.rb#209
def cpu_quota; end
# The CPU shares requested by the process. For performance reasons the calculated
# value will be memoized on the first call.
#
# @return [Float, nil] CPU shares requested by the process, or nil if not set
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/utility/processor_counter.rb#217
def cpu_shares; end
# Create a simple logger with provided level and output.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/concern/logging.rb#38
def create_simple_logger(level = T.unsafe(nil), output = T.unsafe(nil)); end
# Create a stdlib logger with provided level and output.
# If you use this deprecated method you might need to add logger to your Gemfile to avoid warnings from Ruby 3.3.5+.
#
# @deprecated
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/concern/logging.rb#73
def create_stdlib_logger(level = T.unsafe(nil), output = T.unsafe(nil)); end
# Dataflow allows you to create a task that will be scheduled when all of its data dependencies are available.
# {include:file:docs-source/dataflow.md}
#
# @param inputs [Future] zero or more `Future` operations that this dataflow depends upon
# @raise [ArgumentError] if no block is given
# @raise [ArgumentError] if any of the inputs are not `IVar`s
# @return [Object] the result of all the operations
# @yield The operation to perform once all the dependencies are met
# @yieldparam inputs [Future] each of the `Future` inputs to the dataflow
# @yieldreturn [Object] the result of the block operation
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/dataflow.rb#34
def dataflow(*inputs, &block); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/dataflow.rb#44
def dataflow!(*inputs, &block); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/dataflow.rb#39
def dataflow_with(executor, *inputs, &block); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/dataflow.rb#49
def dataflow_with!(executor, *inputs, &block); end
# Disables AtExit handlers including pool auto-termination handlers.
# When disabled it will be the application programmer's responsibility
# to ensure that the handlers are shutdown properly prior to application
# exit by calling `AtExit.run` method.
#
# @deprecated Has no effect since it is no longer needed, see https://github.com/ruby-concurrency/concurrent-ruby/pull/841.
# @note this option should be needed only because of `at_exit` ordering
# issues which may arise when running some of the testing frameworks.
# E.g. Minitest's test-suite runs itself in `at_exit` callback which
# executes after the pools are already terminated. Then auto termination
# needs to be disabled and called manually after test-suite ends.
# @note This method should *never* be called
# from within a gem. It should *only* be used from within the main
# application and even then it should be used only when necessary.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/configuration.rb#48
def disable_at_exit_handlers!; end
# General access point to global executors.
#
# @param executor_identifier [Symbol, Executor] symbols:
# - :fast - {Concurrent.global_fast_executor}
# - :io - {Concurrent.global_io_executor}
# - :immediate - {Concurrent.global_immediate_executor}
# @return [Executor]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/configuration.rb#83
def executor(executor_identifier); end
# Global thread pool optimized for short, fast *operations*.
#
# @return [ThreadPoolExecutor] the thread pool
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/configuration.rb#55
def global_fast_executor; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/configuration.rb#66
def global_immediate_executor; end
# Global thread pool optimized for long, blocking (IO) *tasks*.
#
# @return [ThreadPoolExecutor] the thread pool
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/configuration.rb#62
def global_io_executor; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/concern/logging.rb#114
def global_logger; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/concern/logging.rb#118
def global_logger=(value); end
# Global thread pool user for global *timers*.
#
# @return [Concurrent::TimerSet] the thread pool
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/configuration.rb#73
def global_timer_set; end
# Leave a transaction without committing or aborting - see `Concurrent::atomically`.
#
# @raise [Transaction::LeaveError]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/tvar.rb#144
def leave_transaction; end
# Returns the current time as tracked by the application monotonic clock.
#
# @param unit [Symbol] the time unit to be returned, can be either
# :float_second, :float_millisecond, :float_microsecond, :second,
# :millisecond, :microsecond, or :nanosecond default to :float_second.
# @return [Float] The current monotonic time since some unspecified
# starting point
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/utility/monotonic_time.rb#15
def monotonic_time(unit = T.unsafe(nil)); end
# @return [Boolean]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/lock_local_var.rb#7
def mutex_owned_per_thread?; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/configuration.rb#87
def new_fast_executor(opts = T.unsafe(nil)); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/configuration.rb#98
def new_io_executor(opts = T.unsafe(nil)); end
# Number of physical processor cores on the current system. For performance
# reasons the calculated value will be memoized on the first call.
#
# On Windows the Win32 API will be queried for the `NumberOfCores from
# Win32_Processor`. This will return the total number "of cores for the
# current instance of the processor." On Unix-like operating systems either
# the `hwprefs` or `sysctl` utility will be called in a subshell and the
# returned value will be used. In the rare case where none of these methods
# work or an exception is raised the function will simply return 1.
#
# @return [Integer] number physical processor cores on the current system
# @see https://github.com/grosser/parallel/blob/4fc8b89d08c7091fe0419ca8fba1ec3ce5a8d185/lib/parallel.rb
# @see http://msdn.microsoft.com/en-us/library/aa394373(v=vs.85).aspx
# @see http://www.unix.com/man-page/osx/1/HWPREFS/
# @see http://linux.die.net/man/8/sysctl
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/utility/processor_counter.rb#181
def physical_processor_count; end
# Number of processors seen by the OS and used for process scheduling. For
# performance reasons the calculated value will be memoized on the first
# call.
#
# When running under JRuby the Java runtime call
# `java.lang.Runtime.getRuntime.availableProcessors` will be used. According
# to the Java documentation this "value may change during a particular
# invocation of the virtual machine... [applications] should therefore
# occasionally poll this property." We still memoize this value once under
# JRuby.
#
# Otherwise Ruby's Etc.nprocessors will be used.
#
# @return [Integer] number of processors seen by the OS or Java runtime
# @see http://docs.oracle.com/javase/6/docs/api/java/lang/Runtime.html#availableProcessors()
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/utility/processor_counter.rb#160
def processor_count; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/utility/processor_counter.rb#142
def processor_counter; end
# Use logger created by #create_simple_logger to log concurrent-ruby messages.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/concern/logging.rb#66
def use_simple_logger(level = T.unsafe(nil), output = T.unsafe(nil)); end
# Use logger created by #create_stdlib_logger to log concurrent-ruby messages.
#
# @deprecated
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/concern/logging.rb#101
def use_stdlib_logger(level = T.unsafe(nil), output = T.unsafe(nil)); end
end
end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/exchanger.rb#38
class Concurrent::AbstractExchanger < ::Concurrent::Synchronization::Object
# @return [AbstractExchanger] a new instance of AbstractExchanger
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/exchanger.rb#44
def initialize; end
# Waits for another thread to arrive at this exchange point (unless the
# current thread is interrupted), and then transfers the given object to
# it, receiving its object in return. The timeout value indicates the
# approximate number of seconds the method should block while waiting
# for the exchange. When the timeout value is `nil` the method will
# block indefinitely.
#
#
# In some edge cases when a `timeout` is given a return value of `nil` may be
# ambiguous. Specifically, if `nil` is a valid value in the exchange it will
# be impossible to tell whether `nil` is the actual return value or if it
# signifies timeout. When `nil` is a valid value in the exchange consider
# using {#exchange!} or {#try_exchange} instead.
#
# @param value [Object] the value to exchange with another thread
# @param timeout [Numeric, nil] in seconds, `nil` blocks indefinitely
# @return [Object] the value exchanged by the other thread or `nil` on timeout
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/exchanger.rb#69
def exchange(value, timeout = T.unsafe(nil)); end
# Waits for another thread to arrive at this exchange point (unless the
# current thread is interrupted), and then transfers the given object to
# it, receiving its object in return. The timeout value indicates the
# approximate number of seconds the method should block while waiting
# for the exchange. When the timeout value is `nil` the method will
# block indefinitely.
#
#
# On timeout a {Concurrent::TimeoutError} exception will be raised.
#
# @param value [Object] the value to exchange with another thread
# @param timeout [Numeric, nil] in seconds, `nil` blocks indefinitely
# @raise [Concurrent::TimeoutError] on timeout
# @return [Object] the value exchanged by the other thread
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/exchanger.rb#80
def exchange!(value, timeout = T.unsafe(nil)); end
# Waits for another thread to arrive at this exchange point (unless the
# current thread is interrupted), and then transfers the given object to
# it, receiving its object in return. The timeout value indicates the
# approximate number of seconds the method should block while waiting
# for the exchange. When the timeout value is `nil` the method will
# block indefinitely.
#
#
# The return value will be a {Concurrent::Maybe} set to `Just` on success or
# `Nothing` on timeout.
#
# @example
#
# exchanger = Concurrent::Exchanger.new
#
# result = exchanger.exchange(:foo, 0.5)
#
# if result.just?
# puts result.value #=> :bar
# else
# puts 'timeout'
# end
# @param value [Object] the value to exchange with another thread
# @param timeout [Numeric, nil] in seconds, `nil` blocks indefinitely
# @return [Concurrent::Maybe] on success a `Just` maybe will be returned with
# the item exchanged by the other thread as `#value`; on timeout a
# `Nothing` maybe will be returned with {Concurrent::TimeoutError} as `#reason`
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/exchanger.rb#109
def try_exchange(value, timeout = T.unsafe(nil)); end
private
# Waits for another thread to arrive at this exchange point (unless the
# current thread is interrupted), and then transfers the given object to
# it, receiving its object in return. The timeout value indicates the
# approximate number of seconds the method should block while waiting
# for the exchange. When the timeout value is `nil` the method will
# block indefinitely.
#
# @param value [Object] the value to exchange with another thread
# @param timeout [Numeric, nil] in seconds, `nil` blocks indefinitely
# @raise [NotImplementedError]
# @return [Object, CANCEL] the value exchanged by the other thread; {CANCEL} on timeout
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/exchanger.rb#122
def do_exchange(value, timeout); end
end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/exchanger.rb#41
Concurrent::AbstractExchanger::CANCEL = T.let(T.unsafe(nil), Object)
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/abstract_executor_service.rb#10
class Concurrent::AbstractExecutorService < ::Concurrent::Synchronization::LockableObject
include ::Concurrent::Concern::Logging
include ::Concurrent::ExecutorService
include ::Concurrent::Concern::Deprecation
# Create a new thread pool.
#
# @return [AbstractExecutorService] a new instance of AbstractExecutorService
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/abstract_executor_service.rb#23
def initialize(opts = T.unsafe(nil), &block); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/abstract_executor_service.rb#72
def auto_terminate=(value); end
# @return [Boolean]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/abstract_executor_service.rb#67
def auto_terminate?; end
# Returns the value of attribute fallback_policy.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/abstract_executor_service.rb#18
def fallback_policy; end
# @raise [NotImplementedError]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/abstract_executor_service.rb#42
def kill; end
# Returns the value of attribute name.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/abstract_executor_service.rb#20
def name; end
# @return [Boolean]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/abstract_executor_service.rb#52
def running?; end
# @raise [NotImplementedError]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/abstract_executor_service.rb#37
def shutdown; end
# @return [Boolean]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/abstract_executor_service.rb#62
def shutdown?; end
# @return [Boolean]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/abstract_executor_service.rb#57
def shuttingdown?; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/abstract_executor_service.rb#32
def to_s; end
# @raise [NotImplementedError]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/abstract_executor_service.rb#47
def wait_for_termination(timeout = T.unsafe(nil)); end
private
# Returns an action which executes the `fallback_policy` once the queue
# size reaches `max_queue`. The reason for the indirection of an action
# is so that the work can be deferred outside of synchronization.
#
# @param args [Array] the arguments to the task which is being handled.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/abstract_executor_service.rb#85
def fallback_action(*args); end
# @return [Boolean]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/abstract_executor_service.rb#126
def ns_auto_terminate?; end
# @raise [NotImplementedError]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/abstract_executor_service.rb#106
def ns_execute(*args, &task); end
# Callback method called when the executor has been killed.
# The default behavior is to do nothing.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/abstract_executor_service.rb#122
def ns_kill_execution; end
# Callback method called when an orderly shutdown has completed.
# The default behavior is to signal all waiting threads.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/abstract_executor_service.rb#114
def ns_shutdown_execution; end
end
# The set of possible fallback policies that may be set at thread pool creation.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/abstract_executor_service.rb#15
Concurrent::AbstractExecutorService::FALLBACK_POLICIES = T.let(T.unsafe(nil), Array)
# An abstract implementation of local storage, with sub-classes for
# per-thread and per-fiber locals.
#
# Each execution context (EC, thread or fiber) has a lazily initialized array
# of local variable values. Each time a new local variable is created, we
# allocate an "index" for it.
#
# For example, if the allocated index is 1, that means slot #1 in EVERY EC's
# locals array will be used for the value of that variable.
#
# The good thing about using a per-EC structure to hold values, rather than
# a global, is that no synchronization is needed when reading and writing
# those values (since the structure is only ever accessed by a single
# thread).
#
# Of course, when a local variable is GC'd, 1) we need to recover its index
# for use by other new local variables (otherwise the locals arrays could
# get bigger and bigger with time), and 2) we need to null out all the
# references held in the now-unused slots (both to avoid blocking GC of those
# objects, and also to prevent "stale" values from being passed on to a new
# local when the index is reused).
#
# Because we need to null out freed slots, we need to keep references to
# ALL the locals arrays, so we can null out the appropriate slots in all of
# them. This is why we need to use a finalizer to clean up the locals array
# when the EC goes out of scope.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/locals.rb#35
class Concurrent::AbstractLocals
# @return [AbstractLocals] a new instance of AbstractLocals
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/locals.rb#36
def initialize; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/locals.rb#89
def fetch(index); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/locals.rb#71
def free_index(index); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/locals.rb#55
def next_index(local); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/locals.rb#102
def set(index, value); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/locals.rb#43
def synchronize; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/locals.rb#48
def weak_synchronize; end
private
# When the local goes out of scope, clean up that slot across all locals currently assigned.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/locals.rb#112
def local_finalizer(index); end
# Returns the locals for the current scope, or nil if none exist.
#
# @raise [NotImplementedError]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/locals.rb#128
def locals; end
# Returns the locals for the current scope, creating them if necessary.
#
# @raise [NotImplementedError]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/locals.rb#133
def locals!; end
# When a thread/fiber goes out of scope, remove the array from @all_arrays.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/locals.rb#119
def thread_fiber_finalizer(array_object_id); end
end
# `Agent` is inspired by Clojure's [agent](http://clojure.org/agents)
# function. An agent is a shared, mutable variable providing independent,
# uncoordinated, *asynchronous* change of individual values. Best used when
# the value will undergo frequent, complex updates. Suitable when the result
# of an update does not need to be known immediately. `Agent` is (mostly)
# functionally equivalent to Clojure's agent, except where the runtime
# prevents parity.
#
# Agents are reactive, not autonomous - there is no imperative message loop
# and no blocking receive. The state of an Agent should be itself immutable
# and the `#value` of an Agent is always immediately available for reading by
# any thread without any messages, i.e. observation does not require
# cooperation or coordination.
#
# Agent action dispatches are made using the various `#send` methods. These
# methods always return immediately. At some point later, in another thread,
# the following will happen:
#
# 1. The given `action` will be applied to the state of the Agent and the
# `args`, if any were supplied.
# 2. The return value of `action` will be passed to the validator lambda,
# if one has been set on the Agent.
# 3. If the validator succeeds or if no validator was given, the return value
# of the given `action` will become the new `#value` of the Agent. See
# `#initialize` for details.
# 4. If any observers were added to the Agent, they will be notified. See
# `#add_observer` for details.
# 5. If during the `action` execution any other dispatches are made (directly
# or indirectly), they will be held until after the `#value` of the Agent
# has been changed.
#
# If any exceptions are thrown by an action function, no nested dispatches
# will occur, and the exception will be cached in the Agent itself. When an
# Agent has errors cached, any subsequent interactions will immediately throw
# an exception, until the agent's errors are cleared. Agent errors can be
# examined with `#error` and the agent restarted with `#restart`.
#
# The actions of all Agents get interleaved amongst threads in a thread pool.
# At any point in time, at most one action for each Agent is being executed.
# Actions dispatched to an agent from another single agent or thread will
# occur in the order they were sent, potentially interleaved with actions
# dispatched to the same agent from other sources. The `#send` method should
# be used for actions that are CPU limited, while the `#send_off` method is
# appropriate for actions that may block on IO.
#
# Unlike in Clojure, `Agent` cannot participate in `Concurrent::TVar` transactions.
#
# ## Example
#
# ```
# def next_fibonacci(set = nil)
# return [0, 1] if set.nil?
# set + [set[-2..-1].reduce{|sum,x| sum + x }]
# end
#
# # create an agent with an initial value
# agent = Concurrent::Agent.new(next_fibonacci)
#
# # send a few update requests
# 5.times do
# agent.send{|set| next_fibonacci(set) }
# end
#
# # wait for them to complete
# agent.await
#
# # get the current value
# agent.value #=> [0, 1, 1, 2, 3, 5, 8]
# ```
#
# ## Observation
#
# Agents support observers through the {Concurrent::Observable} mixin module.
# Notification of observers occurs every time an action dispatch returns and
# the new value is successfully validated. Observation will *not* occur if the
# action raises an exception, if validation fails, or when a {#restart} occurs.
#
# When notified the observer will receive three arguments: `time`, `old_value`,
# and `new_value`. The `time` argument is the time at which the value change
# occurred. The `old_value` is the value of the Agent when the action began
# processing. The `new_value` is the value to which the Agent was set when the
# action completed. Note that `old_value` and `new_value` may be the same.
# This is not an error. It simply means that the action returned the same
# value.
#
# ## Nested Actions
#
# It is possible for an Agent action to post further actions back to itself.
# The nested actions will be enqueued normally then processed *after* the
# outer action completes, in the order they were sent, possibly interleaved
# with action dispatches from other threads. Nested actions never deadlock
# with one another and a failure in a nested action will never affect the
# outer action.
#
# Nested actions can be called using the Agent reference from the enclosing
# scope or by passing the reference in as a "send" argument. Nested actions
# cannot be post using `self` from within the action block/proc/lambda; `self`
# in this context will not reference the Agent. The preferred method for
# dispatching nested actions is to pass the Agent as an argument. This allows
# Ruby to more effectively manage the closing scope.
#
# Prefer this:
#
# ```
# agent = Concurrent::Agent.new(0)
# agent.send(agent) do |value, this|
# this.send {|v| v + 42 }
# 3.14
# end
# agent.value #=> 45.14
# ```
#
# Over this:
#
# ```
# agent = Concurrent::Agent.new(0)
# agent.send do |value|
# agent.send {|v| v + 42 }
# 3.14
# end
# ```
#
#
# **NOTE** Never, *under any circumstances*, call any of the "await" methods
# ({#await}, {#await_for}, {#await_for!}, and {#wait}) from within an action
# block/proc/lambda. The call will block the Agent and will always fail.
# Calling either {#await} or {#wait} (with a timeout of `nil`) will
# hopelessly deadlock the Agent with no possibility of recovery.
#
# @see http://clojure.org/Agents Clojure Agents
# @see http://clojure.org/state Values and Change - Clojure's approach to Identity and State
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/agent.rb#145
class Concurrent::Agent < ::Concurrent::Synchronization::LockableObject
include ::Concurrent::Concern::Observable
# Create a new `Agent` with the given initial value and options.
#
# The `:validator` option must be `nil` or a side-effect free proc/lambda
# which takes one argument. On any intended value change the validator, if
# provided, will be called. If the new value is invalid the validator should
# return `false` or raise an error.
#
# The `:error_handler` option must be `nil` or a proc/lambda which takes two
# arguments. When an action raises an error or validation fails, either by
# returning false or raising an error, the error handler will be called. The
# arguments to the error handler will be a reference to the agent itself and
# the error object which was raised.
#
# The `:error_mode` may be either `:continue` (the default if an error
# handler is given) or `:fail` (the default if error handler nil or not
# given).
#
# If an action being run by the agent throws an error or doesn't pass
# validation the error handler, if present, will be called. After the
# handler executes if the error mode is `:continue` the Agent will continue
# as if neither the action that caused the error nor the error itself ever
# happened.
#
# If the mode is `:fail` the Agent will become {#failed?} and will stop
# accepting new action dispatches. Any previously queued actions will be
# held until {#restart} is called. The {#value} method will still work,
# returning the value of the Agent before the error.
#
# @option opts
# @option opts
# @option opts
# @param initial [Object] the initial value
# @param opts [Hash] the configuration options
# @return [Agent] a new instance of Agent
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/agent.rb#220
def initialize(initial, opts = T.unsafe(nil)); end
# Dispatches an action to the Agent and returns immediately. Subsequently,
# in a thread from a thread pool, the {#value} will be set to the return
# value of the action. Appropriate for actions that may block on IO.
#
# @param action [Proc] the action dispatch to be enqueued
# @return [Concurrent::Agent] self
# @see #send_off
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/agent.rb#331
def <<(action); end
# Blocks the current thread (indefinitely!) until all actions dispatched
# thus far, from this thread or nested by the Agent, have occurred. Will
# block when {#failed?}. Will never return if a failed Agent is {#restart}
# with `:clear_actions` true.
#
# Returns a reference to `self` to support method chaining:
#
# ```
# current_value = agent.await.value
# ```
#
#
# **NOTE** Never, *under any circumstances*, call any of the "await" methods
# ({#await}, {#await_for}, {#await_for!}, and {#wait}) from within an action
# block/proc/lambda. The call will block the Agent and will always fail.
# Calling either {#await} or {#wait} (with a timeout of `nil`) will
# hopelessly deadlock the Agent with no possibility of recovery.
#
# @return [Boolean] self
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/agent.rb#350
def await; end
# Blocks the current thread until all actions dispatched thus far, from this
# thread or nested by the Agent, have occurred, or the timeout (in seconds)
# has elapsed.
#
#
# **NOTE** Never, *under any circumstances*, call any of the "await" methods
# ({#await}, {#await_for}, {#await_for!}, and {#wait}) from within an action
# block/proc/lambda. The call will block the Agent and will always fail.
# Calling either {#await} or {#wait} (with a timeout of `nil`) will
# hopelessly deadlock the Agent with no possibility of recovery.
#
# @param timeout [Float] the maximum number of seconds to wait
# @return [Boolean] true if all actions complete before timeout else false
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/agent.rb#363
def await_for(timeout); end
# Blocks the current thread until all actions dispatched thus far, from this
# thread or nested by the Agent, have occurred, or the timeout (in seconds)
# has elapsed.
#
#
# **NOTE** Never, *under any circumstances*, call any of the "await" methods
# ({#await}, {#await_for}, {#await_for!}, and {#wait}) from within an action
# block/proc/lambda. The call will block the Agent and will always fail.
# Calling either {#await} or {#wait} (with a timeout of `nil`) will
# hopelessly deadlock the Agent with no possibility of recovery.
#
# @param timeout [Float] the maximum number of seconds to wait
# @raise [Concurrent::TimeoutError] when timeout is reached
# @return [Boolean] true if all actions complete before timeout
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/agent.rb#377
def await_for!(timeout); end
# The current value (state) of the Agent, irrespective of any pending or
# in-progress actions. The value is always available and is non-blocking.
#
# @return [Object] the current value
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/agent.rb#229
def deref; end
# When {#failed?} and {#error_mode} is `:fail`, returns the error object
# which caused the failure, else `nil`. When {#error_mode} is `:continue`
# will *always* return `nil`.
#
# @return [nil, Error] the error which caused the failure when {#failed?}
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/agent.rb#240
def error; end
# The error mode this Agent is operating in. See {#initialize} for details.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/agent.rb#184
def error_mode; end
# Is the Agent in a failed state?
#
# @return [Boolean]
# @see #restart
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/agent.rb#402
def failed?; end
# Dispatches an action to the Agent and returns immediately. Subsequently,
# in a thread from a thread pool, the {#value} will be set to the return
# value of the action. Action dispatches are only allowed when the Agent
# is not {#failed?}.
#
# The action must be a block/proc/lambda which takes 1 or more arguments.
# The first argument is the current {#value} of the Agent. Any arguments
# passed to the send method via the `args` parameter will be passed to the
# action as the remaining arguments. The action must return the new value
# of the Agent.
#
# * {#send} and {#send!} should be used for actions that are CPU limited
# * {#send_off}, {#send_off!}, and {#<<} are appropriate for actions that
# may block on IO
# * {#send_via} and {#send_via!} are used when a specific executor is to
# be used for the action
#
# @param args [Array<Object>] zero or more arguments to be passed to
# the action
# @param action [Proc] the action dispatch to be enqueued
# @return [Boolean] true if the action is successfully enqueued, false if
# the Agent is {#failed?}
# @yield [agent, value, *args] process the old value and return the new
# @yieldparam value [Object] the current {#value} of the Agent
# @yieldparam args [Array<Object>] zero or more arguments to pass to the
# action
# @yieldreturn [Object] the new value of the Agent
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/agent.rb#294
def post(*args, &action); end
# When {#failed?} and {#error_mode} is `:fail`, returns the error object
# which caused the failure, else `nil`. When {#error_mode} is `:continue`
# will *always* return `nil`.
#
# @return [nil, Error] the error which caused the failure when {#failed?}
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/agent.rb#240
def reason; end
# When an Agent is {#failed?}, changes the Agent {#value} to `new_value`
# then un-fails the Agent so that action dispatches are allowed again. If
# the `:clear_actions` option is give and true, any actions queued on the
# Agent that were being held while it was failed will be discarded,
# otherwise those held actions will proceed. The `new_value` must pass the
# validator if any, or `restart` will raise an exception and the Agent will
# remain failed with its old {#value} and {#error}. Observers, if any, will
# not be notified of the new state.
#
# @option opts
# @param new_value [Object] the new value for the Agent once restarted
# @param opts [Hash] the configuration options
# @raise [Concurrent:AgentError] when not failed
# @return [Boolean] true
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/agent.rb#424
def restart(new_value, opts = T.unsafe(nil)); end
# Dispatches an action to the Agent and returns immediately. Subsequently,
# in a thread from a thread pool, the {#value} will be set to the return
# value of the action. Action dispatches are only allowed when the Agent
# is not {#failed?}.
#
# The action must be a block/proc/lambda which takes 1 or more arguments.
# The first argument is the current {#value} of the Agent. Any arguments
# passed to the send method via the `args` parameter will be passed to the
# action as the remaining arguments. The action must return the new value
# of the Agent.
#
# * {#send} and {#send!} should be used for actions that are CPU limited
# * {#send_off}, {#send_off!}, and {#<<} are appropriate for actions that
# may block on IO
# * {#send_via} and {#send_via!} are used when a specific executor is to
# be used for the action
#
# @param args [Array<Object>] zero or more arguments to be passed to
# the action
# @param action [Proc] the action dispatch to be enqueued
# @return [Boolean] true if the action is successfully enqueued, false if
# the Agent is {#failed?}
# @yield [agent, value, *args] process the old value and return the new
# @yieldparam value [Object] the current {#value} of the Agent
# @yieldparam args [Array<Object>] zero or more arguments to pass to the
# action
# @yieldreturn [Object] the new value of the Agent
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/agent.rb#278
def send(*args, &action); end
# Dispatches an action to the Agent and returns immediately. Subsequently,
# in a thread from a thread pool, the {#value} will be set to the return
# value of the action. Action dispatches are only allowed when the Agent
# is not {#failed?}.
#
# The action must be a block/proc/lambda which takes 1 or more arguments.
# The first argument is the current {#value} of the Agent. Any arguments
# passed to the send method via the `args` parameter will be passed to the
# action as the remaining arguments. The action must return the new value
# of the Agent.
#
# * {#send} and {#send!} should be used for actions that are CPU limited
# * {#send_off}, {#send_off!}, and {#<<} are appropriate for actions that
# may block on IO
# * {#send_via} and {#send_via!} are used when a specific executor is to
# be used for the action
#
# @param args [Array<Object>] zero or more arguments to be passed to
# the action
# @param action [Proc] the action dispatch to be enqueued
# @raise [Concurrent::Agent::Error] if the Agent is {#failed?}
# @return [Boolean] true if the action is successfully enqueued
# @yield [agent, value, *args] process the old value and return the new
# @yieldparam value [Object] the current {#value} of the Agent
# @yieldparam args [Array<Object>] zero or more arguments to pass to the
# action
# @yieldreturn [Object] the new value of the Agent
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/agent.rb#287
def send!(*args, &action); end
# Dispatches an action to the Agent and returns immediately. Subsequently,
# in a thread from a thread pool, the {#value} will be set to the return
# value of the action. Action dispatches are only allowed when the Agent
# is not {#failed?}.
#
# The action must be a block/proc/lambda which takes 1 or more arguments.
# The first argument is the current {#value} of the Agent. Any arguments
# passed to the send method via the `args` parameter will be passed to the
# action as the remaining arguments. The action must return the new value
# of the Agent.
#
# * {#send} and {#send!} should be used for actions that are CPU limited
# * {#send_off}, {#send_off!}, and {#<<} are appropriate for actions that
# may block on IO
# * {#send_via} and {#send_via!} are used when a specific executor is to
# be used for the action
#
# @param args [Array<Object>] zero or more arguments to be passed to
# the action
# @param action [Proc] the action dispatch to be enqueued
# @return [Boolean] true if the action is successfully enqueued, false if
# the Agent is {#failed?}
# @yield [agent, value, *args] process the old value and return the new
# @yieldparam value [Object] the current {#value} of the Agent
# @yieldparam args [Array<Object>] zero or more arguments to pass to the
# action
# @yieldreturn [Object] the new value of the Agent
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/agent.rb#294
def send_off(*args, &action); end
# Dispatches an action to the Agent and returns immediately. Subsequently,
# in a thread from a thread pool, the {#value} will be set to the return
# value of the action. Action dispatches are only allowed when the Agent
# is not {#failed?}.
#
# The action must be a block/proc/lambda which takes 1 or more arguments.
# The first argument is the current {#value} of the Agent. Any arguments
# passed to the send method via the `args` parameter will be passed to the
# action as the remaining arguments. The action must return the new value
# of the Agent.
#
# * {#send} and {#send!} should be used for actions that are CPU limited
# * {#send_off}, {#send_off!}, and {#<<} are appropriate for actions that
# may block on IO
# * {#send_via} and {#send_via!} are used when a specific executor is to
# be used for the action
#
# @param args [Array<Object>] zero or more arguments to be passed to
# the action
# @param action [Proc] the action dispatch to be enqueued
# @raise [Concurrent::Agent::Error] if the Agent is {#failed?}
# @return [Boolean] true if the action is successfully enqueued
# @yield [agent, value, *args] process the old value and return the new
# @yieldparam value [Object] the current {#value} of the Agent
# @yieldparam args [Array<Object>] zero or more arguments to pass to the
# action
# @yieldreturn [Object] the new value of the Agent
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/agent.rb#302
def send_off!(*args, &action); end
# Dispatches an action to the Agent and returns immediately. Subsequently,
# in a thread from a thread pool, the {#value} will be set to the return
# value of the action. Action dispatches are only allowed when the Agent
# is not {#failed?}.
#
# The action must be a block/proc/lambda which takes 1 or more arguments.
# The first argument is the current {#value} of the Agent. Any arguments
# passed to the send method via the `args` parameter will be passed to the
# action as the remaining arguments. The action must return the new value
# of the Agent.
#
# * {#send} and {#send!} should be used for actions that are CPU limited
# * {#send_off}, {#send_off!}, and {#<<} are appropriate for actions that
# may block on IO
# * {#send_via} and {#send_via!} are used when a specific executor is to
# be used for the action
#
# @param args [Array<Object>] zero or more arguments to be passed to
# the action
# @param action [Proc] the action dispatch to be enqueued
# @param executor [Concurrent::ExecutorService] the executor on which the
# action is to be dispatched
# @return [Boolean] true if the action is successfully enqueued, false if
# the Agent is {#failed?}
# @yield [agent, value, *args] process the old value and return the new
# @yieldparam value [Object] the current {#value} of the Agent
# @yieldparam args [Array<Object>] zero or more arguments to pass to the
# action
# @yieldreturn [Object] the new value of the Agent
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/agent.rb#311
def send_via(executor, *args, &action); end
# Dispatches an action to the Agent and returns immediately. Subsequently,
# in a thread from a thread pool, the {#value} will be set to the return
# value of the action. Action dispatches are only allowed when the Agent
# is not {#failed?}.
#
# The action must be a block/proc/lambda which takes 1 or more arguments.
# The first argument is the current {#value} of the Agent. Any arguments
# passed to the send method via the `args` parameter will be passed to the
# action as the remaining arguments. The action must return the new value
# of the Agent.
#
# * {#send} and {#send!} should be used for actions that are CPU limited
# * {#send_off}, {#send_off!}, and {#<<} are appropriate for actions that
# may block on IO
# * {#send_via} and {#send_via!} are used when a specific executor is to
# be used for the action
#
# @param args [Array<Object>] zero or more arguments to be passed to
# the action
# @param action [Proc] the action dispatch to be enqueued
# @param executor [Concurrent::ExecutorService] the executor on which the
# action is to be dispatched
# @raise [Concurrent::Agent::Error] if the Agent is {#failed?}
# @return [Boolean] true if the action is successfully enqueued
# @yield [agent, value, *args] process the old value and return the new
# @yieldparam value [Object] the current {#value} of the Agent
# @yieldparam args [Array<Object>] zero or more arguments to pass to the
# action
# @yieldreturn [Object] the new value of the Agent
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/agent.rb#319
def send_via!(executor, *args, &action); end
# Is the Agent in a failed state?
#
# @return [Boolean]
# @see #restart
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/agent.rb#402
def stopped?; end
# The current value (state) of the Agent, irrespective of any pending or
# in-progress actions. The value is always available and is non-blocking.
#
# @return [Object] the current value
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/agent.rb#229
def value; end
# Blocks the current thread until all actions dispatched thus far, from this
# thread or nested by the Agent, have occurred, or the timeout (in seconds)
# has elapsed. Will block indefinitely when timeout is nil or not given.
#
# Provided mainly for consistency with other classes in this library. Prefer
# the various `await` methods instead.
#
#
# **NOTE** Never, *under any circumstances*, call any of the "await" methods
# ({#await}, {#await_for}, {#await_for!}, and {#wait}) from within an action
# block/proc/lambda. The call will block the Agent and will always fail.
# Calling either {#await} or {#wait} (with a timeout of `nil`) will
# hopelessly deadlock the Agent with no possibility of recovery.
#
# @param timeout [Float] the maximum number of seconds to wait
# @return [Boolean] true if all actions complete before timeout else false
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/agent.rb#393
def wait(timeout = T.unsafe(nil)); end
private
# @raise [ArgumentError]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/agent.rb#510
def enqueue_action_job(action, args, executor); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/agent.rb#516
def enqueue_await_job(latch); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/agent.rb#543
def execute_next_job; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/agent.rb#576
def handle_error(error); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/agent.rb#529
def ns_enqueue_job(job, index = T.unsafe(nil)); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/agent.rb#584
def ns_find_last_job_for_thread; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/agent.rb#490
def ns_initialize(initial, opts); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/agent.rb#539
def ns_post_next_job; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/agent.rb#570
def ns_validate(value); end
class << self
# Blocks the current thread (indefinitely!) until all actions dispatched
# thus far to all the given Agents, from this thread or nested by the
# given Agents, have occurred. Will block when any of the agents are
# failed. Will never return if a failed Agent is restart with
# `:clear_actions` true.
#
#
# **NOTE** Never, *under any circumstances*, call any of the "await" methods
# ({#await}, {#await_for}, {#await_for!}, and {#wait}) from within an action
# block/proc/lambda. The call will block the Agent and will always fail.
# Calling either {#await} or {#wait} (with a timeout of `nil`) will
# hopelessly deadlock the Agent with no possibility of recovery.
#
# @param agents [Array<Concurrent::Agent>] the Agents on which to wait
# @return [Boolean] true
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/agent.rb#449
def await(*agents); end
# Blocks the current thread until all actions dispatched thus far to all
# the given Agents, from this thread or nested by the given Agents, have
# occurred, or the timeout (in seconds) has elapsed.
#
#
# **NOTE** Never, *under any circumstances*, call any of the "await" methods
# ({#await}, {#await_for}, {#await_for!}, and {#wait}) from within an action
# block/proc/lambda. The call will block the Agent and will always fail.
# Calling either {#await} or {#wait} (with a timeout of `nil`) will
# hopelessly deadlock the Agent with no possibility of recovery.
#
# @param timeout [Float] the maximum number of seconds to wait
# @param agents [Array<Concurrent::Agent>] the Agents on which to wait
# @return [Boolean] true if all actions complete before timeout else false
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/agent.rb#463
def await_for(timeout, *agents); end
# Blocks the current thread until all actions dispatched thus far to all
# the given Agents, from this thread or nested by the given Agents, have
# occurred, or the timeout (in seconds) has elapsed.
#
#
# **NOTE** Never, *under any circumstances*, call any of the "await" methods
# ({#await}, {#await_for}, {#await_for!}, and {#wait}) from within an action
# block/proc/lambda. The call will block the Agent and will always fail.
# Calling either {#await} or {#wait} (with a timeout of `nil`) will
# hopelessly deadlock the Agent with no possibility of recovery.
#
# @param timeout [Float] the maximum number of seconds to wait
# @param agents [Array<Concurrent::Agent>] the Agents on which to wait
# @raise [Concurrent::TimeoutError] when timeout is reached
# @return [Boolean] true if all actions complete before timeout
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/agent.rb#482
def await_for!(timeout, *agents); end
end
end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/agent.rb#154
Concurrent::Agent::AWAIT_ACTION = T.let(T.unsafe(nil), Proc)
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/agent.rb#151
Concurrent::Agent::AWAIT_FLAG = T.let(T.unsafe(nil), Object)
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/agent.rb#157
Concurrent::Agent::DEFAULT_ERROR_HANDLER = T.let(T.unsafe(nil), Proc)
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/agent.rb#160
Concurrent::Agent::DEFAULT_VALIDATOR = T.let(T.unsafe(nil), Proc)
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/agent.rb#148
Concurrent::Agent::ERROR_MODES = T.let(T.unsafe(nil), Array)
# Raised during action processing or any other time in an Agent's lifecycle.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/agent.rb#167
class Concurrent::Agent::Error < ::StandardError
# @return [Error] a new instance of Error
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/agent.rb#168
def initialize(message = T.unsafe(nil)); end
end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/agent.rb#163
class Concurrent::Agent::Job < ::Struct
# Returns the value of attribute action
#
# @return [Object] the current value of action
def action; end
# Sets the attribute action
#
# @param value [Object] the value to set the attribute action to.
# @return [Object] the newly set value
def action=(_); end
# Returns the value of attribute args
#
# @return [Object] the current value of args
def args; end
# Sets the attribute args
#
# @param value [Object] the value to set the attribute args to.
# @return [Object] the newly set value
def args=(_); end
# Returns the value of attribute caller
#
# @return [Object] the current value of caller
def caller; end
# Sets the attribute caller
#
# @param value [Object] the value to set the attribute caller to.
# @return [Object] the newly set value
def caller=(_); end
# Returns the value of attribute executor
#
# @return [Object] the current value of executor
def executor; end
# Sets the attribute executor
#
# @param value [Object] the value to set the attribute executor to.
# @return [Object] the newly set value
def executor=(_); end
class << self
def [](*_arg0); end
def inspect; end
def keyword_init?; end
def members; end
def new(*_arg0); end
end
end
# Raised when a new value obtained during action processing or at `#restart`
# fails validation.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/agent.rb#176
class Concurrent::Agent::ValidationError < ::Concurrent::Agent::Error
# @return [ValidationError] a new instance of ValidationError
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/agent.rb#177
def initialize(message = T.unsafe(nil)); end
end
# A thread-safe subclass of Array. This version locks against the object
# itself for every method call, ensuring only one thread can be reading
# or writing at a time. This includes iteration methods like `#each`.
#
# @note `a += b` is **not** a **thread-safe** operation on
# `Concurrent::Array`. It reads array `a`, then it creates new `Concurrent::Array`
# which is concatenation of `a` and `b`, then it writes the concatenation to `a`.
# The read and write are independent operations they do not form a single atomic
# operation therefore when two `+=` operations are executed concurrently updates
# may be lost. Use `#concat` instead.
# @see http://ruby-doc.org/core/Array.html Ruby standard library `Array`
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/array.rb#53
class Concurrent::Array < ::Array; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/array.rb#22
Concurrent::ArrayImplementation = Array
# A mixin module that provides simple asynchronous behavior to a class,
# turning it into a simple actor. Loosely based on Erlang's
# [gen_server](http://www.erlang.org/doc/man/gen_server.html), but without
# supervision or linking.
#
# A more feature-rich {Concurrent::Actor} is also available when the
# capabilities of `Async` are too limited.
#
# ```cucumber
# Feature:
# As a stateful, plain old Ruby class
# I want safe, asynchronous behavior
# So my long-running methods don't block the main thread
# ```
#
# The `Async` module is a way to mix simple yet powerful asynchronous
# capabilities into any plain old Ruby object or class, turning each object
# into a simple Actor. Method calls are processed on a background thread. The
# caller is free to perform other actions while processing occurs in the
# background.
#
# Method calls to the asynchronous object are made via two proxy methods:
# `async` (alias `cast`) and `await` (alias `call`). These proxy methods post
# the method call to the object's background thread and return a "future"
# which will eventually contain the result of the method call.
#
# This behavior is loosely patterned after Erlang's `gen_server` behavior.
# When an Erlang module implements the `gen_server` behavior it becomes
# inherently asynchronous. The `start` or `start_link` function spawns a
# process (similar to a thread but much more lightweight and efficient) and
# returns the ID of the process. Using the process ID, other processes can
# send messages to the `gen_server` via the `cast` and `call` methods. Unlike
# Erlang's `gen_server`, however, `Async` classes do not support linking or
# supervision trees.
#
# ## Basic Usage
#
# When this module is mixed into a class, objects of the class become inherently
# asynchronous. Each object gets its own background thread on which to post
# asynchronous method calls. Asynchronous method calls are executed in the
# background one at a time in the order they are received.
#
# To create an asynchronous class, simply mix in the `Concurrent::Async` module:
#
# ```
# class Hello
# include Concurrent::Async
#
# def hello(name)
# "Hello, #{name}!"
# end
# end
# ```
#
# Mixing this module into a class provides each object two proxy methods:
# `async` and `await`. These methods are thread safe with respect to the
# enclosing object. The former proxy allows methods to be called
# asynchronously by posting to the object's internal thread. The latter proxy
# allows a method to be called synchronously but does so safely with respect
# to any pending asynchronous method calls and ensures proper ordering. Both
# methods return a {Concurrent::IVar} which can be inspected for the result
# of the proxied method call. Calling a method with `async` will return a
# `:pending` `IVar` whereas `await` will return a `:complete` `IVar`.
#
# ```
# class Echo
# include Concurrent::Async
#
# def echo(msg)
# print "#{msg}\n"
# end
# end
#
# horn = Echo.new
# horn.echo('zero') # synchronous, not thread-safe
# # returns the actual return value of the method
#
# horn.async.echo('one') # asynchronous, non-blocking, thread-safe
# # returns an IVar in the :pending state
#
# horn.await.echo('two') # synchronous, blocking, thread-safe
# # returns an IVar in the :complete state
# ```
#
# ## Let It Fail
#
# The `async` and `await` proxy methods have built-in error protection based
# on Erlang's famous "let it fail" philosophy. Instance methods should not be
# programmed defensively. When an exception is raised by a delegated method
# the proxy will rescue the exception, expose it to the caller as the `reason`
# attribute of the returned future, then process the next method call.
#
# ## Calling Methods Internally
#
# External method calls should *always* use the `async` and `await` proxy
# methods. When one method calls another method, the `async` proxy should
# rarely be used and the `await` proxy should *never* be used.
#
# When an object calls one of its own methods using the `await` proxy the
# second call will be enqueued *behind* the currently running method call.
# Any attempt to wait on the result will fail as the second call will never
# run until after the current call completes.
#
# Calling a method using the `await` proxy from within a method that was
# itself called using `async` or `await` will irreversibly deadlock the
# object. Do *not* do this, ever.
#
# ## Instance Variables and Attribute Accessors
#
# Instance variables do not need to be thread-safe so long as they are private.
# Asynchronous method calls are processed in the order they are received and
# are processed one at a time. Therefore private instance variables can only
# be accessed by one thread at a time. This is inherently thread-safe.
#
# When using private instance variables within asynchronous methods, the best
# practice is to read the instance variable into a local variable at the start
# of the method then update the instance variable at the *end* of the method.
# This way, should an exception be raised during method execution the internal
# state of the object will not have been changed.
#
# ### Reader Attributes
#
# The use of `attr_reader` is discouraged. Internal state exposed externally,
# when necessary, should be done through accessor methods. The instance
# variables exposed by these methods *must* be thread-safe, or they must be
# called using the `async` and `await` proxy methods. These two approaches are
# subtly different.
#
# When internal state is accessed via the `async` and `await` proxy methods,
# the returned value represents the object's state *at the time the call is
# processed*, which may *not* be the state of the object at the time the call
# is made.
#
# To get the state *at the current* time, irrespective of an enqueued method
# calls, a reader method must be called directly. This is inherently unsafe
# unless the instance variable is itself thread-safe, preferably using one
# of the thread-safe classes within this library. Because the thread-safe
# classes within this library are internally-locking or non-locking, they can
# be safely used from within asynchronous methods without causing deadlocks.
#
# Generally speaking, the best practice is to *not* expose internal state via
# reader methods. The best practice is to simply use the method's return value.
#
# ### Writer Attributes
#
# Writer attributes should never be used with asynchronous classes. Changing
# the state externally, even when done in the thread-safe way, is not logically
# consistent. Changes to state need to be timed with respect to all asynchronous
# method calls which my be in-process or enqueued. The only safe practice is to
# pass all necessary data to each method as arguments and let the method update
# the internal state as necessary.
#
# ## Class Constants, Variables, and Methods
#
# ### Class Constants
#
# Class constants do not need to be thread-safe. Since they are read-only and
# immutable they may be safely read both externally and from within
# asynchronous methods.
#
# ### Class Variables
#
# Class variables should be avoided. Class variables represent shared state.
# Shared state is anathema to concurrency. Should there be a need to share
# state using class variables they *must* be thread-safe, preferably
# using the thread-safe classes within this library. When updating class
# variables, never assign a new value/object to the variable itself. Assignment
# is not thread-safe in Ruby. Instead, use the thread-safe update functions
# of the variable itself to change the value.
#
# The best practice is to *never* use class variables with `Async` classes.
#
# ### Class Methods
#
# Class methods which are pure functions are safe. Class methods which modify
# class variables should be avoided, for all the reasons listed above.
#
# ## An Important Note About Thread Safe Guarantees
#
# > Thread safe guarantees can only be made when asynchronous method calls
# > are not mixed with direct method calls. Use only direct method calls
# > when the object is used exclusively on a single thread. Use only
# > `async` and `await` when the object is shared between threads. Once you
# > call a method using `async` or `await`, you should no longer call methods
# > directly on the object. Use `async` and `await` exclusively from then on.
#
# @example
#
# class Echo
# include Concurrent::Async
#
# def echo(msg)
# print "#{msg}\n"
# end
# end
#
# horn = Echo.new
# horn.echo('zero') # synchronous, not thread-safe
# # returns the actual return value of the method
#
# horn.async.echo('one') # asynchronous, non-blocking, thread-safe
# # returns an IVar in the :pending state
#
# horn.await.echo('two') # synchronous, blocking, thread-safe
# # returns an IVar in the :complete state
# @see Concurrent::Actor
# @see https://en.wikipedia.org/wiki/Actor_model "Actor Model" at Wikipedia
# @see http://www.erlang.org/doc/man/gen_server.html Erlang gen_server
# @see http://c2.com/cgi/wiki?LetItCrash "Let It Crash" at http://c2.com/
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/async.rb#217
module Concurrent::Async
mixes_in_class_methods ::Concurrent::Async::ClassMethods
# Causes the chained method call to be performed asynchronously on the
# object's thread. The delegated method will return a future in the
# `:pending` state and the method call will have been scheduled on the
# object's thread. The final disposition of the method call can be obtained
# by inspecting the returned future.
#
# @note The method call is guaranteed to be thread safe with respect to
# all other method calls against the same object that are called with
# either `async` or `await`. The mutable nature of Ruby references
# (and object orientation in general) prevent any other thread safety
# guarantees. Do NOT mix direct method calls with delegated method calls.
# Use *only* delegated method calls when sharing the object between threads.
# @raise [NameError] the object does not respond to the requested method
# @raise [ArgumentError] the given `args` do not match the arity of
# the requested method
# @return [Concurrent::IVar] the pending result of the asynchronous operation
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/async.rb#412
def async; end
# Causes the chained method call to be performed synchronously on the
# current thread. The delegated will return a future in either the
# `:fulfilled` or `:rejected` state and the delegated method will have
# completed. The final disposition of the delegated method can be obtained
# by inspecting the returned future.
#
# @note The method call is guaranteed to be thread safe with respect to
# all other method calls against the same object that are called with
# either `async` or `await`. The mutable nature of Ruby references
# (and object orientation in general) prevent any other thread safety
# guarantees. Do NOT mix direct method calls with delegated method calls.
# Use *only* delegated method calls when sharing the object between threads.
# @raise [NameError] the object does not respond to the requested method
# @raise [ArgumentError] the given `args` do not match the arity of the
# requested method
# @return [Concurrent::IVar] the completed result of the synchronous operation
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/async.rb#430
def await; end
# Causes the chained method call to be performed synchronously on the
# current thread. The delegated will return a future in either the
# `:fulfilled` or `:rejected` state and the delegated method will have
# completed. The final disposition of the delegated method can be obtained
# by inspecting the returned future.
#
# @note The method call is guaranteed to be thread safe with respect to
# all other method calls against the same object that are called with
# either `async` or `await`. The mutable nature of Ruby references
# (and object orientation in general) prevent any other thread safety
# guarantees. Do NOT mix direct method calls with delegated method calls.
# Use *only* delegated method calls when sharing the object between threads.
# @raise [NameError] the object does not respond to the requested method
# @raise [ArgumentError] the given `args` do not match the arity of the
# requested method
# @return [Concurrent::IVar] the completed result of the synchronous operation
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/async.rb#430
def call; end
# Causes the chained method call to be performed asynchronously on the
# object's thread. The delegated method will return a future in the
# `:pending` state and the method call will have been scheduled on the
# object's thread. The final disposition of the method call can be obtained
# by inspecting the returned future.
#
# @note The method call is guaranteed to be thread safe with respect to
# all other method calls against the same object that are called with
# either `async` or `await`. The mutable nature of Ruby references
# (and object orientation in general) prevent any other thread safety
# guarantees. Do NOT mix direct method calls with delegated method calls.
# Use *only* delegated method calls when sharing the object between threads.
# @raise [NameError] the object does not respond to the requested method
# @raise [ArgumentError] the given `args` do not match the arity of
# the requested method
# @return [Concurrent::IVar] the pending result of the asynchronous operation
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/async.rb#412
def cast; end
# Initialize the internal serializer and other stnchronization mechanisms.
#
# @note This method *must* be called immediately upon object construction.
# This is the only way thread-safe initialization can be guaranteed.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/async.rb#441
def init_synchronization; end
class << self
# @private
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/async.rb#262
def included(base); end
# Check for the presence of a method on an object and determine if a given
# set of arguments matches the required arity.
#
# @note This check is imperfect because of the way Ruby reports the arity of
# methods with a variable number of arguments. It is possible to determine
# if too few arguments are given but impossible to determine if too many
# arguments are given. This check may also fail to recognize dynamic behavior
# of the object, such as methods simulated with `method_missing`.
# @param obj [Object] the object to check against
# @param method [Symbol] the method to check the object for
# @param args [Array] zero or more arguments for the arity check
# @raise [NameError] the object does not respond to `method` method
# @raise [ArgumentError] the given `args` do not match the arity of `method`
# @see http://www.ruby-doc.org/core-2.1.1/Method.html#method-i-arity Method#arity
# @see http://ruby-doc.org/core-2.1.0/Object.html#method-i-respond_to-3F Object#respond_to?
# @see http://www.ruby-doc.org/core-2.1.0/BasicObject.html#method-i-method_missing BasicObject#method_missing
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/async.rb#250
def validate_argc(obj, method, *args); end
end
end
# Delegates asynchronous, thread-safe method calls to the wrapped object.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/async.rb#282
class Concurrent::Async::AsyncDelegator < ::Concurrent::Synchronization::LockableObject
# Create a new delegator object wrapping the given delegate.
#
# @param delegate [Object] the object to wrap and delegate method calls to
# @return [AsyncDelegator] a new instance of AsyncDelegator
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/async.rb#288
def initialize(delegate); end
# Delegates method calls to the wrapped object.
#
# @param method [Symbol] the method being called
# @param args [Array] zero or more arguments to the method
# @raise [NameError] the object does not respond to `method` method
# @raise [ArgumentError] the given `args` do not match the arity of `method`
# @return [IVar] the result of the method call
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/async.rb#305
def method_missing(method, *args, &block); end
# Perform all enqueued tasks.
#
# This method must be called from within the executor. It must not be
# called while already running. It will loop until the queue is empty.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/async.rb#330
def perform; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/async.rb#348
def reset_if_forked; end
private
# Check whether the method is responsive
#
# @param method [Symbol] the method being called
# @return [Boolean]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/async.rb#322
def respond_to_missing?(method, include_private = T.unsafe(nil)); end
end
# Delegates synchronous, thread-safe method calls to the wrapped object.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/async.rb#360
class Concurrent::Async::AwaitDelegator
# Create a new delegator object wrapping the given delegate.
#
# @param delegate [AsyncDelegator] the object to wrap and delegate method calls to
# @return [AwaitDelegator] a new instance of AwaitDelegator
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/async.rb#365
def initialize(delegate); end
# Delegates method calls to the wrapped object.
#
# @param method [Symbol] the method being called
# @param args [Array] zero or more arguments to the method
# @raise [NameError] the object does not respond to `method` method
# @raise [ArgumentError] the given `args` do not match the arity of `method`
# @return [IVar] the result of the method call
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/async.rb#378
def method_missing(method, *args, &block); end
private
# Check whether the method is responsive
#
# @param method [Symbol] the method being called
# @return [Boolean]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/async.rb#387
def respond_to_missing?(method, include_private = T.unsafe(nil)); end
end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/async.rb#269
module Concurrent::Async::ClassMethods
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/async.rb#270
def new(*args, **_arg1, &block); end
end
# Atoms provide a way to manage shared, synchronous, independent state.
#
# An atom is initialized with an initial value and an optional validation
# proc. At any time the value of the atom can be synchronously and safely
# changed. If a validator is given at construction then any new value
# will be checked against the validator and will be rejected if the
# validator returns false or raises an exception.
#
# There are two ways to change the value of an atom: {#compare_and_set} and
# {#swap}. The former will set the new value if and only if it validates and
# the current value matches the new value. The latter will atomically set the
# new value to the result of running the given block if and only if that
# value validates.
#
# ## Example
#
# ```
# def next_fibonacci(set = nil)
# return [0, 1] if set.nil?
# set + [set[-2..-1].reduce{|sum,x| sum + x }]
# end
#
# # create an atom with an initial value
# atom = Concurrent::Atom.new(next_fibonacci)
#
# # send a few update requests
# 5.times do
# atom.swap{|set| next_fibonacci(set) }
# end
#
# # get the current value
# atom.value #=> [0, 1, 1, 2, 3, 5, 8]
# ```
#
# ## Observation
#
# Atoms support observers through the {Concurrent::Observable} mixin module.
# Notification of observers occurs every time the value of the Atom changes.
# When notified the observer will receive three arguments: `time`, `old_value`,
# and `new_value`. The `time` argument is the time at which the value change
# occurred. The `old_value` is the value of the Atom when the change began
# The `new_value` is the value to which the Atom was set when the change
# completed. Note that `old_value` and `new_value` may be the same. This is
# not an error. It simply means that the change operation returned the same
# value.
#
# Unlike in Clojure, `Atom` cannot participate in {Concurrent::TVar} transactions.
#
#
# ## Thread-safe Variable Classes
#
# Each of the thread-safe variable classes is designed to solve a different
# problem. In general:
#
# * *{Concurrent::Agent}:* Shared, mutable variable providing independent,
# uncoordinated, *asynchronous* change of individual values. Best used when
# the value will undergo frequent, complex updates. Suitable when the result
# of an update does not need to be known immediately.
# * *{Concurrent::Atom}:* Shared, mutable variable providing independent,
# uncoordinated, *synchronous* change of individual values. Best used when
# the value will undergo frequent reads but only occasional, though complex,
# updates. Suitable when the result of an update must be known immediately.
# * *{Concurrent::AtomicReference}:* A simple object reference that can be updated
# atomically. Updates are synchronous but fast. Best used when updates a
# simple set operations. Not suitable when updates are complex.
# {Concurrent::AtomicBoolean} and {Concurrent::AtomicFixnum} are similar
# but optimized for the given data type.
# * *{Concurrent::Exchanger}:* Shared, stateless synchronization point. Used
# when two or more threads need to exchange data. The threads will pair then
# block on each other until the exchange is complete.
# * *{Concurrent::MVar}:* Shared synchronization point. Used when one thread
# must give a value to another, which must take the value. The threads will
# block on each other until the exchange is complete.
# * *{Concurrent::ThreadLocalVar}:* Shared, mutable, isolated variable which
# holds a different value for each thread which has access. Often used as
# an instance variable in objects which must maintain different state
# for different threads.
# * *{Concurrent::TVar}:* Shared, mutable variables which provide
# *coordinated*, *synchronous*, change of *many* stated. Used when multiple
# value must change together, in an all-or-nothing transaction.
#
# @see http://clojure.org/atoms Clojure Atoms
# @see http://clojure.org/state Values and Change - Clojure's approach to Identity and State
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atom.rb#95
class Concurrent::Atom < ::Concurrent::Synchronization::Object
include ::Concurrent::Concern::Observable
extend ::Concurrent::Synchronization::SafeInitialization
# Create a new atom with the given initial value.
#
# @option opts
# @param value [Object] The initial value
# @param opts [Hash] The options used to configure the atom
# @raise [ArgumentError] if the validator is not a `Proc` (when given)
# @return [Atom] a new instance of Atom
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atom.rb#121
def initialize(value, opts = T.unsafe(nil)); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/object.rb#137
def __initialize_atomic_fields__; end
# Atomically sets the value of atom to the new value if and only if the
# current value of the atom is identical to the old value and the new
# value successfully validates against the (optional) validator given
# at construction.
#
# @param old_value [Object] The expected current value.
# @param new_value [Object] The intended new value.
# @return [Boolean] True if the value is changed else false.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atom.rb#181
def compare_and_set(old_value, new_value); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/object.rb#93
def deref; end
# Atomically sets the value of atom to the new value without regard for the
# current value so long as the new value successfully validates against the
# (optional) validator given at construction.
#
# @param new_value [Object] The intended new value.
# @return [Object] The final value of the atom after all operations and
# validations are complete.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atom.rb#198
def reset(new_value); end
# Atomically swaps the value of atom using the given block. The current
# value will be passed to the block, as will any arguments passed as
# arguments to the function. The new value will be validated against the
# (optional) validator proc given at construction. If validation fails the
# value will not be changed.
#
# Internally, {#swap} reads the current value, applies the block to it, and
# attempts to compare-and-set it in. Since another thread may have changed
# the value in the intervening time, it may have to retry, and does so in a
# spin loop. The net effect is that the value will always be the result of
# the application of the supplied block to a current value, atomically.
# However, because the block might be called multiple times, it must be free
# of side effects.
#
# @note The given block may be called multiple times, and thus should be free
# of side effects.
# @param args [Object] Zero or more arguments passed to the block.
# @raise [ArgumentError] When no block is given.
# @return [Object] The final value of the atom after all operations and
# validations are complete.
# @yield [value, args] Calculates a new value for the atom based on the
# current value and any supplied arguments.
# @yieldparam value [Object] The current value of the atom.
# @yieldparam args [Object] All arguments passed to the function, in order.
# @yieldreturn [Object] The intended new value of the atom.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atom.rb#157
def swap(*args); end
# The current value of the atom.
#
# @return [Object] The current value.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/object.rb#93
def value; end
private
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/object.rb#105
def compare_and_set_value(expected, value); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/object.rb#101
def swap_value(value); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/object.rb#109
def update_value(&block); end
# Is the new value valid?
#
# @param new_value [Object] The intended new value.
# @return [Boolean] false if the validator function returns false or raises
# an exception else true
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atom.rb#216
def valid?(new_value); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/object.rb#97
def value=(value); end
end
# A boolean value that can be updated atomically. Reads and writes to an atomic
# boolean and thread-safe and guaranteed to succeed. Reads and writes may block
# briefly but no explicit locking is required.
#
#
# ## Thread-safe Variable Classes
#
# Each of the thread-safe variable classes is designed to solve a different
# problem. In general:
#
# * *{Concurrent::Agent}:* Shared, mutable variable providing independent,
# uncoordinated, *asynchronous* change of individual values. Best used when
# the value will undergo frequent, complex updates. Suitable when the result
# of an update does not need to be known immediately.
# * *{Concurrent::Atom}:* Shared, mutable variable providing independent,
# uncoordinated, *synchronous* change of individual values. Best used when
# the value will undergo frequent reads but only occasional, though complex,
# updates. Suitable when the result of an update must be known immediately.
# * *{Concurrent::AtomicReference}:* A simple object reference that can be updated
# atomically. Updates are synchronous but fast. Best used when updates a
# simple set operations. Not suitable when updates are complex.
# {Concurrent::AtomicBoolean} and {Concurrent::AtomicFixnum} are similar
# but optimized for the given data type.
# * *{Concurrent::Exchanger}:* Shared, stateless synchronization point. Used
# when two or more threads need to exchange data. The threads will pair then
# block on each other until the exchange is complete.
# * *{Concurrent::MVar}:* Shared synchronization point. Used when one thread
# must give a value to another, which must take the value. The threads will
# block on each other until the exchange is complete.
# * *{Concurrent::ThreadLocalVar}:* Shared, mutable, isolated variable which
# holds a different value for each thread which has access. Often used as
# an instance variable in objects which must maintain different state
# for different threads.
# * *{Concurrent::TVar}:* Shared, mutable variables which provide
# *coordinated*, *synchronous*, change of *many* stated. Used when multiple
# value must change together, in an all-or-nothing transaction.
# Performance:
#
# ```
# Testing with ruby 2.1.2
# Testing with Concurrent::MutexAtomicBoolean...
# 2.790000 0.000000 2.790000 ( 2.791454)
# Testing with Concurrent::CAtomicBoolean...
# 0.740000 0.000000 0.740000 ( 0.740206)
#
# Testing with jruby 1.9.3
# Testing with Concurrent::MutexAtomicBoolean...
# 5.240000 2.520000 7.760000 ( 3.683000)
# Testing with Concurrent::JavaAtomicBoolean...
# 3.340000 0.010000 3.350000 ( 0.855000)
# ```
#
# @see http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/atomic/AtomicBoolean.html java.util.concurrent.atomic.AtomicBoolean
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/atomic_boolean.rb#119
class Concurrent::AtomicBoolean < ::Concurrent::MutexAtomicBoolean
# @return [String] Short string representation.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/atomic_boolean.rb#121
def inspect; end
# @return [String] Short string representation.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/atomic_boolean.rb#121
def to_s; end
end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/atomic_boolean.rb#82
Concurrent::AtomicBooleanImplementation = Concurrent::MutexAtomicBoolean
# Define update methods that use direct paths
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic_reference/atomic_direct_update.rb#9
module Concurrent::AtomicDirectUpdate
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic_reference/atomic_direct_update.rb#15
def try_update; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic_reference/atomic_direct_update.rb#24
def try_update!; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic_reference/atomic_direct_update.rb#10
def update; end
end
# A numeric value that can be updated atomically. Reads and writes to an atomic
# fixnum and thread-safe and guaranteed to succeed. Reads and writes may block
# briefly but no explicit locking is required.
#
#
# ## Thread-safe Variable Classes
#
# Each of the thread-safe variable classes is designed to solve a different
# problem. In general:
#
# * *{Concurrent::Agent}:* Shared, mutable variable providing independent,
# uncoordinated, *asynchronous* change of individual values. Best used when
# the value will undergo frequent, complex updates. Suitable when the result
# of an update does not need to be known immediately.
# * *{Concurrent::Atom}:* Shared, mutable variable providing independent,
# uncoordinated, *synchronous* change of individual values. Best used when
# the value will undergo frequent reads but only occasional, though complex,
# updates. Suitable when the result of an update must be known immediately.
# * *{Concurrent::AtomicReference}:* A simple object reference that can be updated
# atomically. Updates are synchronous but fast. Best used when updates a
# simple set operations. Not suitable when updates are complex.
# {Concurrent::AtomicBoolean} and {Concurrent::AtomicFixnum} are similar
# but optimized for the given data type.
# * *{Concurrent::Exchanger}:* Shared, stateless synchronization point. Used
# when two or more threads need to exchange data. The threads will pair then
# block on each other until the exchange is complete.
# * *{Concurrent::MVar}:* Shared synchronization point. Used when one thread
# must give a value to another, which must take the value. The threads will
# block on each other until the exchange is complete.
# * *{Concurrent::ThreadLocalVar}:* Shared, mutable, isolated variable which
# holds a different value for each thread which has access. Often used as
# an instance variable in objects which must maintain different state
# for different threads.
# * *{Concurrent::TVar}:* Shared, mutable variables which provide
# *coordinated*, *synchronous*, change of *many* stated. Used when multiple
# value must change together, in an all-or-nothing transaction.
# Performance:
#
# ```
# Testing with ruby 2.1.2
# Testing with Concurrent::MutexAtomicFixnum...
# 3.130000 0.000000 3.130000 ( 3.136505)
# Testing with Concurrent::CAtomicFixnum...
# 0.790000 0.000000 0.790000 ( 0.785550)
#
# Testing with jruby 1.9.3
# Testing with Concurrent::MutexAtomicFixnum...
# 5.460000 2.460000 7.920000 ( 3.715000)
# Testing with Concurrent::JavaAtomicFixnum...
# 4.520000 0.030000 4.550000 ( 1.187000)
# ```
#
# @see http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/atomic/AtomicLong.html java.util.concurrent.atomic.AtomicLong
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/atomic_fixnum.rb#136
class Concurrent::AtomicFixnum < ::Concurrent::MutexAtomicFixnum
# @return [String] Short string representation.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/atomic_fixnum.rb#138
def inspect; end
# @return [String] Short string representation.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/atomic_fixnum.rb#138
def to_s; end
end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/atomic_fixnum.rb#99
Concurrent::AtomicFixnumImplementation = Concurrent::MutexAtomicFixnum
# An atomic reference which maintains an object reference along with a mark bit
# that can be updated atomically.
#
# @see http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/atomic/AtomicMarkableReference.html java.util.concurrent.atomic.AtomicMarkableReference
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/atomic_markable_reference.rb#10
class Concurrent::AtomicMarkableReference < ::Concurrent::Synchronization::Object
extend ::Concurrent::Synchronization::SafeInitialization
# @return [AtomicMarkableReference] a new instance of AtomicMarkableReference
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/atomic_markable_reference.rb#15
def initialize(value = T.unsafe(nil), mark = T.unsafe(nil)); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/object.rb#137
def __initialize_atomic_fields__; end
# Atomically sets the value and mark to the given updated value and
# mark given both:
# - the current value == the expected value &&
# - the current mark == the expected mark
#
# that the actual value was not equal to the expected value or the
# actual mark was not equal to the expected mark
#
# @param expected_val [Object] the expected value
# @param new_val [Object] the new value
# @param expected_mark [Boolean] the expected mark
# @param new_mark [Boolean] the new mark
# @return [Boolean] `true` if successful. A `false` return indicates
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/atomic_markable_reference.rb#33
def compare_and_set(expected_val, new_val, expected_mark, new_mark); end
# Atomically sets the value and mark to the given updated value and
# mark given both:
# - the current value == the expected value &&
# - the current mark == the expected mark
#
# that the actual value was not equal to the expected value or the
# actual mark was not equal to the expected mark
#
# @param expected_val [Object] the expected value
# @param new_val [Object] the new value
# @param expected_mark [Boolean] the expected mark
# @param new_mark [Boolean] the new mark
# @return [Boolean] `true` if successful. A `false` return indicates
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/atomic_markable_reference.rb#33
def compare_and_swap(expected_val, new_val, expected_mark, new_mark); end
# Gets the current reference and marked values.
#
# @return [Array] the current reference and marked values
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/atomic_markable_reference.rb#64
def get; end
# Gets the current marked value
#
# @return [Boolean] the current marked value
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/atomic_markable_reference.rb#78
def mark; end
# Gets the current marked value
#
# @return [Boolean] the current marked value
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/atomic_markable_reference.rb#78
def marked?; end
# _Unconditionally_ sets to the given value of both the reference and
# the mark.
#
# @param new_val [Object] the new value
# @param new_mark [Boolean] the new mark
# @return [Array] both the new value and the new mark
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/atomic_markable_reference.rb#91
def set(new_val, new_mark); end
# Pass the current value to the given block, replacing it with the
# block's result. Simply return nil if update fails.
#
# the update failed
#
# @return [Array] the new value and marked state, or nil if
# @yield [Object] Calculate a new value and marked state for the atomic
# reference using given (old) value and (old) marked
# @yieldparam old_val [Object] the starting value of the atomic reference
# @yieldparam old_mark [Boolean] the starting state of marked
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/atomic_markable_reference.rb#152
def try_update; end
# Pass the current value to the given block, replacing it
# with the block's result. Raise an exception if the update
# fails.
#
# @raise [Concurrent::ConcurrentUpdateError] if the update fails
# @return [Array] the new value and marked state
# @yield [Object] Calculate a new value and marked state for the atomic
# reference using given (old) value and (old) marked
# @yieldparam old_val [Object] the starting value of the atomic reference
# @yieldparam old_mark [Boolean] the starting state of marked
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/atomic_markable_reference.rb#128
def try_update!; end
# Pass the current value and marked state to the given block, replacing it
# with the block's results. May retry if the value changes during the
# block's execution.
#
# @return [Array] the new value and new mark
# @yield [Object] Calculate a new value and marked state for the atomic
# reference using given (old) value and (old) marked
# @yieldparam old_val [Object] the starting value of the atomic reference
# @yieldparam old_mark [Boolean] the starting state of marked
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/atomic_markable_reference.rb#105
def update; end
# Gets the current value of the reference
#
# @return [Object] the current value of the reference
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/atomic_markable_reference.rb#71
def value; end
private
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/object.rb#105
def compare_and_set_reference(expected, value); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/atomic_markable_reference.rb#163
def immutable_array(*args); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/object.rb#93
def reference; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/object.rb#97
def reference=(value); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/object.rb#101
def swap_reference(value); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/object.rb#109
def update_reference(&block); end
end
# Special "compare and set" handling of numeric values.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic_reference/numeric_cas_wrapper.rb#7
module Concurrent::AtomicNumericCompareAndSetWrapper
# Atomically sets the value to the given updated value if
# the current value == the expected value.
#
# that the actual value was not equal to the expected value.
#
# @param old_value [Object] the expected value
# @param new_value [Object] the new value
# @return [Boolean] `true` if successful. A `false` return indicates
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic_reference/numeric_cas_wrapper.rb#10
def compare_and_set(old_value, new_value); end
end
# An object reference that may be updated atomically. All read and write
# operations have java volatile semantic.
#
#
# ## Thread-safe Variable Classes
#
# Each of the thread-safe variable classes is designed to solve a different
# problem. In general:
#
# * *{Concurrent::Agent}:* Shared, mutable variable providing independent,
# uncoordinated, *asynchronous* change of individual values. Best used when
# the value will undergo frequent, complex updates. Suitable when the result
# of an update does not need to be known immediately.
# * *{Concurrent::Atom}:* Shared, mutable variable providing independent,
# uncoordinated, *synchronous* change of individual values. Best used when
# the value will undergo frequent reads but only occasional, though complex,
# updates. Suitable when the result of an update must be known immediately.
# * *{Concurrent::AtomicReference}:* A simple object reference that can be updated
# atomically. Updates are synchronous but fast. Best used when updates a
# simple set operations. Not suitable when updates are complex.
# {Concurrent::AtomicBoolean} and {Concurrent::AtomicFixnum} are similar
# but optimized for the given data type.
# * *{Concurrent::Exchanger}:* Shared, stateless synchronization point. Used
# when two or more threads need to exchange data. The threads will pair then
# block on each other until the exchange is complete.
# * *{Concurrent::MVar}:* Shared synchronization point. Used when one thread
# must give a value to another, which must take the value. The threads will
# block on each other until the exchange is complete.
# * *{Concurrent::ThreadLocalVar}:* Shared, mutable, isolated variable which
# holds a different value for each thread which has access. Often used as
# an instance variable in objects which must maintain different state
# for different threads.
# * *{Concurrent::TVar}:* Shared, mutable variables which provide
# *coordinated*, *synchronous*, change of *many* stated. Used when multiple
# value must change together, in an all-or-nothing transaction.
#
# @see http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicReference.html
# @see http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/package-summary.html
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/atomic_reference.rb#126
class Concurrent::AtomicReference < ::Concurrent::MutexAtomicReference
# @return [String] Short string representation.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/atomic_reference.rb#129
def inspect; end
# @return [String] Short string representation.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/atomic_reference.rb#129
def to_s; end
end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/atomic_reference.rb#18
Concurrent::AtomicReferenceImplementation = Concurrent::MutexAtomicReference
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/set.rb#30
class Concurrent::CRubySet < ::Set
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/thread_safe/util/data_structures.rb#18
def initialize(*args, &block); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/thread_safe/util/data_structures.rb#32
def &(*args); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/thread_safe/util/data_structures.rb#32
def +(*args); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/thread_safe/util/data_structures.rb#32
def -(*args); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/thread_safe/util/data_structures.rb#32
def <(*args); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/thread_safe/util/data_structures.rb#32
def <<(*args); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/thread_safe/util/data_structures.rb#32
def <=(*args); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/thread_safe/util/data_structures.rb#32
def <=>(*args); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/thread_safe/util/data_structures.rb#32
def ==(*args); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/thread_safe/util/data_structures.rb#32
def ===(*args); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/thread_safe/util/data_structures.rb#32
def >(*args); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/thread_safe/util/data_structures.rb#32
def >=(*args); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/thread_safe/util/data_structures.rb#32
def ^(*args); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/thread_safe/util/data_structures.rb#32
def add(*args); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/thread_safe/util/data_structures.rb#32
def add?(*args); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/thread_safe/util/data_structures.rb#32
def classify(*args); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/thread_safe/util/data_structures.rb#32
def clear(*args); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/thread_safe/util/data_structures.rb#32
def collect!(*args); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/thread_safe/util/data_structures.rb#32
def compare_by_identity(*args); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/thread_safe/util/data_structures.rb#32
def compare_by_identity?(*args); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/thread_safe/util/data_structures.rb#32
def delete(*args); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/thread_safe/util/data_structures.rb#32
def delete?(*args); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/thread_safe/util/data_structures.rb#32
def delete_if(*args); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/thread_safe/util/data_structures.rb#32
def difference(*args); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/thread_safe/util/data_structures.rb#32
def disjoint?(*args); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/thread_safe/util/data_structures.rb#32
def divide(*args); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/thread_safe/util/data_structures.rb#32
def each(*args); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/thread_safe/util/data_structures.rb#32
def empty?(*args); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/thread_safe/util/data_structures.rb#32
def eql?(*args); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/thread_safe/util/data_structures.rb#32
def filter!(*args); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/thread_safe/util/data_structures.rb#32
def flatten(*args); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/thread_safe/util/data_structures.rb#32
def flatten!(*args); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/thread_safe/util/data_structures.rb#32
def flatten_merge(*args); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/thread_safe/util/data_structures.rb#32
def freeze(*args); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/thread_safe/util/data_structures.rb#32
def hash(*args); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/thread_safe/util/data_structures.rb#32
def include?(*args); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/thread_safe/util/data_structures.rb#32
def inspect(*args); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/thread_safe/util/data_structures.rb#32
def intersect?(*args); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/thread_safe/util/data_structures.rb#32
def intersection(*args); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/thread_safe/util/data_structures.rb#32
def join(*args); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/thread_safe/util/data_structures.rb#32
def keep_if(*args); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/thread_safe/util/data_structures.rb#32
def length(*args); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/thread_safe/util/data_structures.rb#32
def map!(*args); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/thread_safe/util/data_structures.rb#32
def member?(*args); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/thread_safe/util/data_structures.rb#32
def merge(*args); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/thread_safe/util/data_structures.rb#32
def pretty_print(*args); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/thread_safe/util/data_structures.rb#32
def pretty_print_cycle(*args); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/thread_safe/util/data_structures.rb#32
def proper_subset?(*args); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/thread_safe/util/data_structures.rb#32
def proper_superset?(*args); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/thread_safe/util/data_structures.rb#32
def reject!(*args); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/thread_safe/util/data_structures.rb#32
def replace(*args); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/thread_safe/util/data_structures.rb#32
def reset(*args); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/thread_safe/util/data_structures.rb#32
def select!(*args); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/thread_safe/util/data_structures.rb#32
def size(*args); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/thread_safe/util/data_structures.rb#32
def subset?(*args); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/thread_safe/util/data_structures.rb#32
def subtract(*args); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/thread_safe/util/data_structures.rb#32
def superset?(*args); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/thread_safe/util/data_structures.rb#32
def to_a(*args); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/thread_safe/util/data_structures.rb#32
def to_s(*args); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/thread_safe/util/data_structures.rb#32
def to_set(*args); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/thread_safe/util/data_structures.rb#32
def union(*args); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/thread_safe/util/data_structures.rb#32
def |(*args); end
private
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/thread_safe/util/data_structures.rb#23
def initialize_copy(other); end
end
# A thread pool that dynamically grows and shrinks to fit the current workload.
# New threads are created as needed, existing threads are reused, and threads
# that remain idle for too long are killed and removed from the pool. These
# pools are particularly suited to applications that perform a high volume of
# short-lived tasks.
#
# On creation a `CachedThreadPool` has zero running threads. New threads are
# created on the pool as new operations are `#post`. The size of the pool
# will grow until `#max_length` threads are in the pool or until the number
# of threads exceeds the number of running and pending operations. When a new
# operation is post to the pool the first available idle thread will be tasked
# with the new operation.
#
# Should a thread crash for any reason the thread will immediately be removed
# from the pool. Similarly, threads which remain idle for an extended period
# of time will be killed and reclaimed. Thus these thread pools are very
# efficient at reclaiming unused resources.
#
# The API and behavior of this class are based on Java's `CachedThreadPool`
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/cached_thread_pool.rb#27
class Concurrent::CachedThreadPool < ::Concurrent::ThreadPoolExecutor
# Create a new thread pool.
#
# @option opts
# @param opts [Hash] the options defining pool behavior.
# @raise [ArgumentError] if `fallback_policy` is not a known policy
# @return [CachedThreadPool] a new instance of CachedThreadPool
# @see http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/Executors.html#newCachedThreadPool--
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/cached_thread_pool.rb#39
def initialize(opts = T.unsafe(nil)); end
private
# Create a new thread pool.
#
# @option opts
# @param opts [Hash] the options defining pool behavior.
# @raise [ArgumentError] if `fallback_policy` is not a known policy
# @see http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/Executors.html#newCachedThreadPool--
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/cached_thread_pool.rb#51
def ns_initialize(opts); end
end
# Raised when an asynchronous operation is cancelled before execution.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/errors.rb#9
class Concurrent::CancelledOperationError < ::Concurrent::Error; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/copy_on_write_observer_set.rb#4
module Concurrent::Collection; end
# A thread safe observer set implemented using copy-on-read approach:
# observers are added and removed from a thread safe collection; every time
# a notification is required the internal data structure is copied to
# prevent concurrency issues
#
# @api private
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/copy_on_notify_observer_set.rb#12
class Concurrent::Collection::CopyOnNotifyObserverSet < ::Concurrent::Synchronization::LockableObject
# @api private
# @return [CopyOnNotifyObserverSet] a new instance of CopyOnNotifyObserverSet
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/copy_on_notify_observer_set.rb#14
def initialize; end
# @api private
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/copy_on_notify_observer_set.rb#20
def add_observer(observer = T.unsafe(nil), func = T.unsafe(nil), &block); end
# @api private
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/copy_on_notify_observer_set.rb#55
def count_observers; end
# @api private
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/copy_on_notify_observer_set.rb#39
def delete_observer(observer); end
# @api private
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/copy_on_notify_observer_set.rb#47
def delete_observers; end
# Notifies all registered observers with optional args and deletes them.
#
# @api private
# @param args [Object] arguments to be passed to each observer
# @return [CopyOnWriteObserverSet] self
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/copy_on_notify_observer_set.rb#72
def notify_and_delete_observers(*args, &block); end
# Notifies all registered observers with optional args
#
# @api private
# @param args [Object] arguments to be passed to each observer
# @return [CopyOnWriteObserverSet] self
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/copy_on_notify_observer_set.rb#62
def notify_observers(*args, &block); end
protected
# @api private
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/copy_on_notify_observer_set.rb#80
def ns_initialize; end
private
# @api private
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/copy_on_notify_observer_set.rb#86
def duplicate_and_clear_observers; end
# @api private
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/copy_on_notify_observer_set.rb#94
def duplicate_observers; end
# @api private
# @raise [ArgumentError]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/copy_on_notify_observer_set.rb#98
def notify_to(observers, *args); end
end
# A thread safe observer set implemented using copy-on-write approach:
# every time an observer is added or removed the whole internal data structure is
# duplicated and replaced with a new one.
#
# @api private
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/copy_on_write_observer_set.rb#11
class Concurrent::Collection::CopyOnWriteObserverSet < ::Concurrent::Synchronization::LockableObject
# @api private
# @return [CopyOnWriteObserverSet] a new instance of CopyOnWriteObserverSet
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/copy_on_write_observer_set.rb#13
def initialize; end
# @api private
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/copy_on_write_observer_set.rb#19
def add_observer(observer = T.unsafe(nil), func = T.unsafe(nil), &block); end
# @api private
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/copy_on_write_observer_set.rb#56
def count_observers; end
# @api private
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/copy_on_write_observer_set.rb#40
def delete_observer(observer); end
# @api private
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/copy_on_write_observer_set.rb#50
def delete_observers; end
# Notifies all registered observers with optional args and deletes them.
#
# @api private
# @param args [Object] arguments to be passed to each observer
# @return [CopyOnWriteObserverSet] self
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/copy_on_write_observer_set.rb#72
def notify_and_delete_observers(*args, &block); end
# Notifies all registered observers with optional args
#
# @api private
# @param args [Object] arguments to be passed to each observer
# @return [CopyOnWriteObserverSet] self
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/copy_on_write_observer_set.rb#63
def notify_observers(*args, &block); end
protected
# @api private
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/copy_on_write_observer_set.rb#80
def ns_initialize; end
private
# @api private
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/copy_on_write_observer_set.rb#102
def clear_observers_and_return_old; end
# @api private
# @raise [ArgumentError]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/copy_on_write_observer_set.rb#86
def notify_to(observers, *args); end
# @api private
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/copy_on_write_observer_set.rb#94
def observers; end
# @api private
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/copy_on_write_observer_set.rb#98
def observers=(new_set); end
end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/map.rb#10
Concurrent::Collection::MapImplementation = Concurrent::Collection::MriMapBackend
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/map/mri_map_backend.rb#10
class Concurrent::Collection::MriMapBackend < ::Concurrent::Collection::NonConcurrentMapBackend
# @return [MriMapBackend] a new instance of MriMapBackend
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/map/mri_map_backend.rb#12
def initialize(options = T.unsafe(nil), &default_proc); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/map/mri_map_backend.rb#17
def []=(key, value); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/map/mri_map_backend.rb#61
def clear; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/map/mri_map_backend.rb#33
def compute(key); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/map/mri_map_backend.rb#21
def compute_if_absent(key); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/map/mri_map_backend.rb#29
def compute_if_present(key); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/map/mri_map_backend.rb#53
def delete(key); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/map/mri_map_backend.rb#57
def delete_pair(key, value); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/map/mri_map_backend.rb#49
def get_and_set(key, value); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/map/mri_map_backend.rb#37
def merge_pair(key, value); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/map/mri_map_backend.rb#45
def replace_if_exists(key, new_value); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/map/mri_map_backend.rb#41
def replace_pair(key, old_value, new_value); end
end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/map/non_concurrent_map_backend.rb#9
class Concurrent::Collection::NonConcurrentMapBackend
# WARNING: all public methods of the class must operate on the @backend
# directly without calling each other. This is important because of the
# SynchronizedMapBackend which uses a non-reentrant mutex for performance
# reasons.
#
# @return [NonConcurrentMapBackend] a new instance of NonConcurrentMapBackend
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/map/non_concurrent_map_backend.rb#15
def initialize(options = T.unsafe(nil), &default_proc); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/map/non_concurrent_map_backend.rb#21
def [](key); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/map/non_concurrent_map_backend.rb#25
def []=(key, value); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/map/non_concurrent_map_backend.rb#94
def clear; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/map/non_concurrent_map_backend.rb#59
def compute(key); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/map/non_concurrent_map_backend.rb#29
def compute_if_absent(key); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/map/non_concurrent_map_backend.rb#53
def compute_if_present(key); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/map/non_concurrent_map_backend.rb#81
def delete(key); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/map/non_concurrent_map_backend.rb#85
def delete_pair(key, value); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/map/non_concurrent_map_backend.rb#99
def each_pair; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/map/non_concurrent_map_backend.rb#71
def get_and_set(key, value); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/map/non_concurrent_map_backend.rb#110
def get_or_default(key, default_value); end
# @return [Boolean]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/map/non_concurrent_map_backend.rb#77
def key?(key); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/map/non_concurrent_map_backend.rb#63
def merge_pair(key, value); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/map/non_concurrent_map_backend.rb#46
def replace_if_exists(key, new_value); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/map/non_concurrent_map_backend.rb#37
def replace_pair(key, old_value, new_value); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/map/non_concurrent_map_backend.rb#106
def size; end
private
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/map/non_concurrent_map_backend.rb#130
def dupped_backend; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/map/non_concurrent_map_backend.rb#124
def initialize_copy(other); end
# @return [Boolean]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/map/non_concurrent_map_backend.rb#134
def pair?(key, expected_value); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/map/non_concurrent_map_backend.rb#116
def set_backend(default_proc); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/map/non_concurrent_map_backend.rb#138
def store_computed_value(key, new_value); end
end
# A queue collection in which the elements are sorted based on their
# comparison (spaceship) operator `<=>`. Items are added to the queue
# at a position relative to their priority. On removal the element
# with the "highest" priority is removed. By default the sort order is
# from highest to lowest, but a lowest-to-highest sort order can be
# set on construction.
#
# The API is based on the `Queue` class from the Ruby standard library.
#
# The pure Ruby implementation, `RubyNonConcurrentPriorityQueue` uses a heap algorithm
# stored in an array. The algorithm is based on the work of Robert Sedgewick
# and Kevin Wayne.
#
# The JRuby native implementation is a thin wrapper around the standard
# library `java.util.NonConcurrentPriorityQueue`.
#
# When running under JRuby the class `NonConcurrentPriorityQueue` extends `JavaNonConcurrentPriorityQueue`.
# When running under all other interpreters it extends `RubyNonConcurrentPriorityQueue`.
#
# @note This implementation is *not* thread safe.
# @see http://en.wikipedia.org/wiki/Priority_queue
# @see http://ruby-doc.org/stdlib-2.0.0/libdoc/thread/rdoc/Queue.html
# @see http://algs4.cs.princeton.edu/24pq/index.php#2.6
# @see http://algs4.cs.princeton.edu/24pq/MaxPQ.java.html
# @see http://docs.oracle.com/javase/7/docs/api/java/util/PriorityQueue.html
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/non_concurrent_priority_queue.rb#50
class Concurrent::Collection::NonConcurrentPriorityQueue < ::Concurrent::Collection::RubyNonConcurrentPriorityQueue
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/ruby_non_concurrent_priority_queue.rb#78
def <<(item); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/ruby_non_concurrent_priority_queue.rb#65
def deq; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/ruby_non_concurrent_priority_queue.rb#78
def enq(item); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/ruby_non_concurrent_priority_queue.rb#48
def has_priority?(item); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/ruby_non_concurrent_priority_queue.rb#65
def shift; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/ruby_non_concurrent_priority_queue.rb#54
def size; end
end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/non_concurrent_priority_queue.rb#10
Concurrent::Collection::NonConcurrentPriorityQueueImplementation = Concurrent::Collection::RubyNonConcurrentPriorityQueue
# A queue collection in which the elements are sorted based on their
# comparison (spaceship) operator `<=>`. Items are added to the queue
# at a position relative to their priority. On removal the element
# with the "highest" priority is removed. By default the sort order is
# from highest to lowest, but a lowest-to-highest sort order can be
# set on construction.
#
# The API is based on the `Queue` class from the Ruby standard library.
#
# The pure Ruby implementation, `RubyNonConcurrentPriorityQueue` uses a heap algorithm
# stored in an array. The algorithm is based on the work of Robert Sedgewick
# and Kevin Wayne.
#
# The JRuby native implementation is a thin wrapper around the standard
# library `java.util.NonConcurrentPriorityQueue`.
#
# When running under JRuby the class `NonConcurrentPriorityQueue` extends `JavaNonConcurrentPriorityQueue`.
# When running under all other interpreters it extends `RubyNonConcurrentPriorityQueue`.
#
# @note This implementation is *not* thread safe.
# @see http://en.wikipedia.org/wiki/Priority_queue
# @see http://ruby-doc.org/stdlib-2.0.0/libdoc/thread/rdoc/Queue.html
# @see http://algs4.cs.princeton.edu/24pq/index.php#2.6
# @see http://algs4.cs.princeton.edu/24pq/MaxPQ.java.html
# @see http://docs.oracle.com/javase/7/docs/api/java/util/PriorityQueue.html
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/ruby_non_concurrent_priority_queue.rb#8
class Concurrent::Collection::RubyNonConcurrentPriorityQueue
# Create a new priority queue with no items.
#
# @option opts
# @param opts [Hash] the options for creating the queue
# @return [RubyNonConcurrentPriorityQueue] a new instance of RubyNonConcurrentPriorityQueue
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/ruby_non_concurrent_priority_queue.rb#11
def initialize(opts = T.unsafe(nil)); end
# Inserts the specified element into this priority queue.
#
# @param item [Object] the item to insert onto the queue
# @raise [ArgumentError]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/ruby_non_concurrent_priority_queue.rb#78
def <<(item); end
# Removes all of the elements from this priority queue.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/ruby_non_concurrent_priority_queue.rb#18
def clear; end
# Deletes all items from `self` that are equal to `item`.
#
# @param item [Object] the item to be removed from the queue
# @return [Object] true if the item is found else false
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/ruby_non_concurrent_priority_queue.rb#25
def delete(item); end
# Retrieves and removes the head of this queue, or returns `nil` if this
# queue is empty.
#
# @return [Object] the head of the queue or `nil` when empty
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/ruby_non_concurrent_priority_queue.rb#65
def deq; end
# Returns `true` if `self` contains no elements.
#
# @return [Boolean] true if there are no items in the queue else false
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/ruby_non_concurrent_priority_queue.rb#43
def empty?; end
# Inserts the specified element into this priority queue.
#
# @param item [Object] the item to insert onto the queue
# @raise [ArgumentError]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/ruby_non_concurrent_priority_queue.rb#78
def enq(item); end
# Returns `true` if the given item is present in `self` (that is, if any
# element == `item`), otherwise returns false.
#
# @param item [Object] the item to search for
# @return [Boolean] true if the item is found else false
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/ruby_non_concurrent_priority_queue.rb#48
def has_priority?(item); end
# Returns `true` if the given item is present in `self` (that is, if any
# element == `item`), otherwise returns false.
#
# @param item [Object] the item to search for
# @return [Boolean] true if the item is found else false
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/ruby_non_concurrent_priority_queue.rb#48
def include?(item); end
# The current length of the queue.
#
# @return [Fixnum] the number of items in the queue
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/ruby_non_concurrent_priority_queue.rb#54
def length; end
# Retrieves, but does not remove, the head of this queue, or returns `nil`
# if this queue is empty.
#
# @return [Object] the head of the queue or `nil` when empty
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/ruby_non_concurrent_priority_queue.rb#60
def peek; end
# Retrieves and removes the head of this queue, or returns `nil` if this
# queue is empty.
#
# @return [Object] the head of the queue or `nil` when empty
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/ruby_non_concurrent_priority_queue.rb#65
def pop; end
# Inserts the specified element into this priority queue.
#
# @param item [Object] the item to insert onto the queue
# @raise [ArgumentError]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/ruby_non_concurrent_priority_queue.rb#78
def push(item); end
# Retrieves and removes the head of this queue, or returns `nil` if this
# queue is empty.
#
# @return [Object] the head of the queue or `nil` when empty
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/ruby_non_concurrent_priority_queue.rb#65
def shift; end
# The current length of the queue.
#
# @return [Fixnum] the number of items in the queue
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/ruby_non_concurrent_priority_queue.rb#54
def size; end
private
# Are the items at the given indexes ordered based on the priority
# order specified at construction?
#
# @param x [Integer] the first index from which to retrieve a comparable value
# @param y [Integer] the second index from which to retrieve a comparable value
# @return [Boolean] true if the two elements are in the correct priority order
# else false
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/ruby_non_concurrent_priority_queue.rb#119
def ordered?(x, y); end
# Percolate down to maintain heap invariant.
#
# @param k [Integer] the index at which to start the percolation
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/ruby_non_concurrent_priority_queue.rb#128
def sink(k); end
# Exchange the values at the given indexes within the internal array.
#
# @param x [Integer] the first index to swap
# @param y [Integer] the second index to swap
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/ruby_non_concurrent_priority_queue.rb#103
def swap(x, y); end
# Percolate up to maintain heap invariant.
#
# @param k [Integer] the index at which to start the percolation
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/ruby_non_concurrent_priority_queue.rb#147
def swim(k); end
class << self
# @!macro priority_queue_method_from_list
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/ruby_non_concurrent_priority_queue.rb#89
def from_list(list, opts = T.unsafe(nil)); end
end
end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/concern/dereferenceable.rb#2
module Concurrent::Concern; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/concern/deprecation.rb#8
module Concurrent::Concern::Deprecation
include ::Concurrent::Concern::Logging
extend ::Concurrent::Concern::Logging
extend ::Concurrent::Concern::Deprecation
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/concern/deprecation.rb#12
def deprecated(message, strip = T.unsafe(nil)); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/concern/deprecation.rb#27
def deprecated_method(old_name, new_name); end
end
# Object references in Ruby are mutable. This can lead to serious problems when
# the `#value` of a concurrent object is a mutable reference. Which is always the
# case unless the value is a `Fixnum`, `Symbol`, or similar "primitive" data type.
# Most classes in this library that expose a `#value` getter method do so using the
# `Dereferenceable` mixin module.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/concern/dereferenceable.rb#11
module Concurrent::Concern::Dereferenceable
# Return the value this object represents after applying the options specified
# by the `#set_deref_options` method.
#
# @return [Object] the current value of the object
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/concern/dereferenceable.rb#21
def deref; end
# Return the value this object represents after applying the options specified
# by the `#set_deref_options` method.
#
# @return [Object] the current value of the object
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/concern/dereferenceable.rb#21
def value; end
protected
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/concern/dereferenceable.rb#63
def apply_deref_options(value); end
# Set the options which define the operations #value performs before
# returning data to the caller (dereferencing).
#
# @note Most classes that include this module will call `#set_deref_options`
# from within the constructor, thus allowing these options to be set at
# object creation.
# @option opts
# @option opts
# @option opts
# @param opts [Hash] the options defining dereference behavior.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/concern/dereferenceable.rb#54
def ns_set_deref_options(opts); end
# Set the options which define the operations #value performs before
# returning data to the caller (dereferencing).
#
# @note Most classes that include this module will call `#set_deref_options`
# from within the constructor, thus allowing these options to be set at
# object creation.
# @option opts
# @option opts
# @option opts
# @param opts [Hash] the options defining dereference behavior.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/concern/dereferenceable.rb#48
def set_deref_options(opts = T.unsafe(nil)); end
# Set the internal value of this object
#
# @param value [Object] the new value
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/concern/dereferenceable.rb#31
def value=(value); end
end
# Include where logging is needed
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/concern/logging.rb#9
module Concurrent::Concern::Logging
# Logs through {Concurrent.global_logger}, it can be overridden by setting @logger
#
# @param level [Integer] one of Concurrent::Concern::Logging constants
# @param progname [String] e.g. a path of an Actor
# @param message [String, nil] when nil block is used to generate the message
# @yieldreturn [String] a message
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/concern/logging.rb#19
def log(level, progname, message = T.unsafe(nil), &block); end
end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/concern/logging.rb#11
Concurrent::Concern::Logging::DEBUG = T.let(T.unsafe(nil), Integer)
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/concern/logging.rb#11
Concurrent::Concern::Logging::ERROR = T.let(T.unsafe(nil), Integer)
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/concern/logging.rb#11
Concurrent::Concern::Logging::FATAL = T.let(T.unsafe(nil), Integer)
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/concern/logging.rb#11
Concurrent::Concern::Logging::INFO = T.let(T.unsafe(nil), Integer)
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/concern/logging.rb#12
Concurrent::Concern::Logging::SEV_LABEL = T.let(T.unsafe(nil), Array)
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/concern/logging.rb#11
Concurrent::Concern::Logging::UNKNOWN = T.let(T.unsafe(nil), Integer)
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/concern/logging.rb#11
Concurrent::Concern::Logging::WARN = T.let(T.unsafe(nil), Integer)
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/concern/obligation.rb#10
module Concurrent::Concern::Obligation
include ::Concurrent::Concern::Dereferenceable
# Has the obligation completed processing?
#
# @return [Boolean]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/concern/obligation.rb#49
def complete?; end
# @example allows Obligation to be risen
# rejected_ivar = Ivar.new.fail
# raise rejected_ivar
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/concern/obligation.rb#126
def exception(*args); end
# Has the obligation been fulfilled?
#
# @return [Boolean]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/concern/obligation.rb#20
def fulfilled?; end
# Is the obligation still awaiting completion of processing?
#
# @return [Boolean]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/concern/obligation.rb#56
def incomplete?; end
# Wait until obligation is complete or the timeout is reached. Will re-raise
# any exceptions raised during processing (but will not raise an exception
# on timeout).
#
# @param timeout [Numeric] the maximum time in seconds to wait.
# @raise [Exception] raises the reason when rejected
# @return [Obligation] self
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/concern/obligation.rb#86
def no_error!(timeout = T.unsafe(nil)); end
# Is obligation completion still pending?
#
# @return [Boolean]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/concern/obligation.rb#35
def pending?; end
# Has the obligation been fulfilled?
#
# @return [Boolean]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/concern/obligation.rb#20
def realized?; end
# If an exception was raised during processing this will return the
# exception object. Will return `nil` when the state is pending or if
# the obligation has been successfully fulfilled.
#
# @return [Exception] the exception raised during processing or `nil`
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/concern/obligation.rb#119
def reason; end
# Has the obligation been rejected?
#
# @return [Boolean]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/concern/obligation.rb#28
def rejected?; end
# The current state of the obligation.
#
# @return [Symbol] the current state
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/concern/obligation.rb#110
def state; end
# Is the obligation still unscheduled?
#
# @return [Boolean]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/concern/obligation.rb#42
def unscheduled?; end
# The current value of the obligation. Will be `nil` while the state is
# pending or the operation has been rejected.
#
# @param timeout [Numeric] the maximum time in seconds to wait.
# @return [Object] see Dereferenceable#deref
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/concern/obligation.rb#65
def value(timeout = T.unsafe(nil)); end
# The current value of the obligation. Will be `nil` while the state is
# pending or the operation has been rejected. Will re-raise any exceptions
# raised during processing (but will not raise an exception on timeout).
#
# @param timeout [Numeric] the maximum time in seconds to wait.
# @raise [Exception] raises the reason when rejected
# @return [Object] see Dereferenceable#deref
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/concern/obligation.rb#98
def value!(timeout = T.unsafe(nil)); end
# Wait until obligation is complete or the timeout has been reached.
#
# @param timeout [Numeric] the maximum time in seconds to wait.
# @return [Obligation] self
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/concern/obligation.rb#74
def wait(timeout = T.unsafe(nil)); end
# Wait until obligation is complete or the timeout is reached. Will re-raise
# any exceptions raised during processing (but will not raise an exception
# on timeout).
#
# @param timeout [Numeric] the maximum time in seconds to wait.
# @raise [Exception] raises the reason when rejected
# @return [Obligation] self
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/concern/obligation.rb#86
def wait!(timeout = T.unsafe(nil)); end
protected
# Atomic compare and set operation
# State is set to `next_state` only if `current state == expected_current`.
#
# @param next_state [Symbol]
# @param expected_current [Symbol]
# @return [Boolean] true is state is changed, false otherwise
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/concern/obligation.rb#174
def compare_and_set_state(next_state, *expected_current); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/concern/obligation.rb#145
def event; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/concern/obligation.rb#134
def get_arguments_from(opts = T.unsafe(nil)); end
# Executes the block within mutex if current state is included in expected_states
#
# @return block value if executed, false otherwise
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/concern/obligation.rb#190
def if_state(*expected_states); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/concern/obligation.rb#139
def init_obligation; end
# Am I in the current state?
#
# @param expected [Symbol] The state to check against
# @return [Boolean] true if in the expected state else false
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/concern/obligation.rb#210
def ns_check_state?(expected); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/concern/obligation.rb#215
def ns_set_state(value); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/concern/obligation.rb#150
def set_state(success, value, reason); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/concern/obligation.rb#161
def state=(value); end
end
# The [observer pattern](http://en.wikipedia.org/wiki/Observer_pattern) is one
# of the most useful design patterns.
#
# The workflow is very simple:
# - an `observer` can register itself to a `subject` via a callback
# - many `observers` can be registered to the same `subject`
# - the `subject` notifies all registered observers when its status changes
# - an `observer` can deregister itself when is no more interested to receive
# event notifications
#
# In a single threaded environment the whole pattern is very easy: the
# `subject` can use a simple data structure to manage all its subscribed
# `observer`s and every `observer` can react directly to every event without
# caring about synchronization.
#
# In a multi threaded environment things are more complex. The `subject` must
# synchronize the access to its data structure and to do so currently we're
# using two specialized ObserverSet: {Concurrent::Concern::CopyOnWriteObserverSet}
# and {Concurrent::Concern::CopyOnNotifyObserverSet}.
#
# When implementing and `observer` there's a very important rule to remember:
# **there are no guarantees about the thread that will execute the callback**
#
# Let's take this example
# ```
# class Observer
# def initialize
# @count = 0
# end
#
# def update
# @count += 1
# end
# end
#
# obs = Observer.new
# [obj1, obj2, obj3, obj4].each { |o| o.add_observer(obs) }
# # execute [obj1, obj2, obj3, obj4]
# ```
#
# `obs` is wrong because the variable `@count` can be accessed by different
# threads at the same time, so it should be synchronized (using either a Mutex
# or an AtomicFixum)
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/concern/observable.rb#50
module Concurrent::Concern::Observable
# Adds an observer to this set. If a block is passed, the observer will be
# created by this method and no other params should be passed.
#
# @param observer [Object] the observer to add
# @param func [Symbol] the function to call on the observer during notification.
# Default is :update
# @return [Object] the added observer
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/concern/observable.rb#61
def add_observer(observer = T.unsafe(nil), func = T.unsafe(nil), &block); end
# Return the number of observers associated with this object.
#
# @return [Integer] the observers count
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/concern/observable.rb#101
def count_observers; end
# Remove `observer` as an observer on this object so that it will no
# longer receive notifications.
#
# @param observer [Object] the observer to remove
# @return [Object] the deleted observer
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/concern/observable.rb#82
def delete_observer(observer); end
# Remove all observers associated with this object.
#
# @return [Observable] self
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/concern/observable.rb#91
def delete_observers; end
# As `#add_observer` but can be used for chaining.
#
# @param observer [Object] the observer to add
# @param func [Symbol] the function to call on the observer during notification.
# @return [Observable] self
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/concern/observable.rb#70
def with_observer(observer = T.unsafe(nil), func = T.unsafe(nil), &block); end
protected
# Returns the value of attribute observers.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/concern/observable.rb#107
def observers; end
# Sets the attribute observers
#
# @param value the value to set the attribute observers to.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/concern/observable.rb#107
def observers=(_arg0); end
end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/errors.rb#70
class Concurrent::ConcurrentUpdateError < ::ThreadError; end
# frozen pre-allocated backtrace to speed ConcurrentUpdateError
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/errors.rb#72
Concurrent::ConcurrentUpdateError::CONC_UP_ERR_BACKTRACE = T.let(T.unsafe(nil), Array)
# Raised when errors occur during configuration.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/errors.rb#6
class Concurrent::ConfigurationError < ::Concurrent::Error; end
# A synchronization object that allows one thread to wait on multiple other threads.
# The thread that will wait creates a `CountDownLatch` and sets the initial value
# (normally equal to the number of other threads). The initiating thread passes the
# latch to the other threads then waits for the other threads by calling the `#wait`
# method. Each of the other threads calls `#count_down` when done with its work.
# When the latch counter reaches zero the waiting thread is unblocked and continues
# with its work. A `CountDownLatch` can be used only once. Its value cannot be reset.
#
# @example Waiter and Decrementer
# latch = Concurrent::CountDownLatch.new(3)
#
# waiter = Thread.new do
# latch.wait()
# puts ("Waiter released")
# end
#
# decrementer = Thread.new do
# sleep(1)
# latch.count_down
# puts latch.count
#
# sleep(1)
# latch.count_down
# puts latch.count
#
# sleep(1)
# latch.count_down
# puts latch.count
# end
#
# [waiter, decrementer].each(&:join)
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/count_down_latch.rb#98
class Concurrent::CountDownLatch < ::Concurrent::MutexCountDownLatch; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/count_down_latch.rb#56
Concurrent::CountDownLatchImplementation = Concurrent::MutexCountDownLatch
# A synchronization aid that allows a set of threads to all wait for each
# other to reach a common barrier point.
#
# @example
# barrier = Concurrent::CyclicBarrier.new(3)
# jobs = Array.new(3) { |i| -> { sleep i; p done: i } }
# process = -> (i) do
# # waiting to start at the same time
# barrier.wait
# # execute job
# jobs[i].call
# # wait for others to finish
# barrier.wait
# end
# threads = 2.times.map do |i|
# Thread.new(i, &process)
# end
#
# # use main as well
# process.call 2
#
# # here we can be sure that all jobs are processed
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/cyclic_barrier.rb#27
class Concurrent::CyclicBarrier < ::Concurrent::Synchronization::LockableObject
# Create a new `CyclicBarrier` that waits for `parties` threads
#
# @param parties [Fixnum] the number of parties
# @raise [ArgumentError] if `parties` is not an integer or is less than zero
# @return [CyclicBarrier] a new instance of CyclicBarrier
# @yield an optional block that will be executed that will be executed after
# the last thread arrives and before the others are released
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/cyclic_barrier.rb#40
def initialize(parties, &block); end
# A barrier can be broken when:
# - a thread called the `reset` method while at least one other thread was waiting
# - at least one thread timed out on `wait` method
#
# A broken barrier can be restored using `reset` it's safer to create a new one
#
# @return [Boolean] true if the barrier is broken otherwise false
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/cyclic_barrier.rb#105
def broken?; end
# @return [Fixnum] the number of threads currently waiting on the barrier
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/cyclic_barrier.rb#54
def number_waiting; end
# @return [Fixnum] the number of threads needed to pass the barrier
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/cyclic_barrier.rb#49
def parties; end
# resets the barrier to its initial state
# If there is at least one waiting thread, it will be woken up, the `wait`
# method will return false and the barrier will be broken
# If the barrier is broken, this method restores it to the original state
#
# @return [nil]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/cyclic_barrier.rb#95
def reset; end
# Blocks on the barrier until the number of waiting threads is equal to
# `parties` or until `timeout` is reached or `reset` is called
# If a block has been passed to the constructor, it will be executed once by
# the last arrived thread before releasing the others
#
# @param timeout [Fixnum] the number of seconds to wait for the counter or
# `nil` to block indefinitely
# @return [Boolean] `true` if the `count` reaches zero else false on
# `timeout` or on `reset` or if the barrier is broken
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/cyclic_barrier.rb#66
def wait(timeout = T.unsafe(nil)); end
protected
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/cyclic_barrier.rb#111
def ns_generation_done(generation, status, continue = T.unsafe(nil)); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/cyclic_barrier.rb#122
def ns_initialize(parties, &block); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/cyclic_barrier.rb#117
def ns_next_generation; end
end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/cyclic_barrier.rb#30
class Concurrent::CyclicBarrier::Generation < ::Struct
# Returns the value of attribute status
#
# @return [Object] the current value of status
def status; end
# Sets the attribute status
#
# @param value [Object] the value to set the attribute status to.
# @return [Object] the newly set value
def status=(_); end
class << self
def [](*_arg0); end
def inspect; end
def keyword_init?; end
def members; end
def new(*_arg0); end
end
end
# Lazy evaluation of a block yielding an immutable result. Useful for
# expensive operations that may never be needed. It may be non-blocking,
# supports the `Concern::Obligation` interface, and accepts the injection of
# custom executor upon which to execute the block. Processing of
# block will be deferred until the first time `#value` is called.
# At that time the caller can choose to return immediately and let
# the block execute asynchronously, block indefinitely, or block
# with a timeout.
#
# When a `Delay` is created its state is set to `pending`. The value and
# reason are both `nil`. The first time the `#value` method is called the
# enclosed operation will be run and the calling thread will block. Other
# threads attempting to call `#value` will block as well. Once the operation
# is complete the *value* will be set to the result of the operation or the
# *reason* will be set to the raised exception, as appropriate. All threads
# blocked on `#value` will return. Subsequent calls to `#value` will immediately
# return the cached value. The operation will only be run once. This means that
# any side effects created by the operation will only happen once as well.
#
# `Delay` includes the `Concurrent::Concern::Dereferenceable` mixin to support thread
# safety of the reference returned by `#value`.
#
# @note The default behavior of `Delay` is to block indefinitely when
# calling either `value` or `wait`, executing the delayed operation on
# the current thread. This makes the `timeout` value completely
# irrelevant. To enable non-blocking behavior, use the `executor`
# constructor option. This will cause the delayed operation to be
# execute on the given executor, allowing the call to timeout.
# @see Concurrent::Concern::Dereferenceable
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/delay.rb#44
class Concurrent::Delay < ::Concurrent::Synchronization::LockableObject
include ::Concurrent::Concern::Dereferenceable
include ::Concurrent::Concern::Obligation
# Create a new `Delay` in the `:pending` state.
#
# @raise [ArgumentError] if no block is given
# @return [Delay] a new instance of Delay
# @yield the delayed operation to perform
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/delay.rb#62
def initialize(opts = T.unsafe(nil), &block); end
# Reconfigures the block returning the value if still `#incomplete?`
#
# @return [true, false] if success
# @yield the delayed operation to perform
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/delay.rb#146
def reconfigure(&block); end
# Return the value this object represents after applying the options
# specified by the `#set_deref_options` method. If the delayed operation
# raised an exception this method will return nil. The exception object
# can be accessed via the `#reason` method.
#
# @note The default behavior of `Delay` is to block indefinitely when
# calling either `value` or `wait`, executing the delayed operation on
# the current thread. This makes the `timeout` value completely
# irrelevant. To enable non-blocking behavior, use the `executor`
# constructor option. This will cause the delayed operation to be
# execute on the given executor, allowing the call to timeout.
# @param timeout [Numeric] the maximum number of seconds to wait
# @return [Object] the current value of the object
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/delay.rb#77
def value(timeout = T.unsafe(nil)); end
# Return the value this object represents after applying the options
# specified by the `#set_deref_options` method. If the delayed operation
# raised an exception, this method will raise that exception (even when)
# the operation has already been executed).
#
# @note The default behavior of `Delay` is to block indefinitely when
# calling either `value` or `wait`, executing the delayed operation on
# the current thread. This makes the `timeout` value completely
# irrelevant. To enable non-blocking behavior, use the `executor`
# constructor option. This will cause the delayed operation to be
# execute on the given executor, allowing the call to timeout.
# @param timeout [Numeric] the maximum number of seconds to wait
# @raise [Exception] when `#rejected?` raises `#reason`
# @return [Object] the current value of the object
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/delay.rb#113
def value!(timeout = T.unsafe(nil)); end
# Return the value this object represents after applying the options
# specified by the `#set_deref_options` method.
#
# @note The default behavior of `Delay` is to block indefinitely when
# calling either `value` or `wait`, executing the delayed operation on
# the current thread. This makes the `timeout` value completely
# irrelevant. To enable non-blocking behavior, use the `executor`
# constructor option. This will cause the delayed operation to be
# execute on the given executor, allowing the call to timeout.
# @param timeout [Integer] (nil) the maximum number of seconds to wait for
# the value to be computed. When `nil` the caller will block indefinitely.
# @return [Object] self
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/delay.rb#132
def wait(timeout = T.unsafe(nil)); end
protected
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/delay.rb#160
def ns_initialize(opts, &block); end
private
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/delay.rb#173
def execute_task_once; end
end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/dataflow.rb#7
class Concurrent::DependencyCounter
# @return [DependencyCounter] a new instance of DependencyCounter
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/dataflow.rb#9
def initialize(count, &block); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/dataflow.rb#14
def update(time, value, reason); end
end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/errors.rb#3
class Concurrent::Error < ::StandardError; end
# Old school kernel-style event reminiscent of Win32 programming in C++.
#
# When an `Event` is created it is in the `unset` state. Threads can choose to
# `#wait` on the event, blocking until released by another thread. When one
# thread wants to alert all blocking threads it calls the `#set` method which
# will then wake up all listeners. Once an `Event` has been set it remains set.
# New threads calling `#wait` will return immediately. An `Event` may be
# `#reset` at any time once it has been set.
#
# @example
# event = Concurrent::Event.new
#
# t1 = Thread.new do
# puts "t1 is waiting"
# event.wait(1)
# puts "event occurred"
# end
#
# t2 = Thread.new do
# puts "t2 calling set"
# event.set
# end
#
# [t1, t2].each(&:join)
#
# # prints:
# # t1 is waiting
# # t2 calling set
# # event occurred
# @see http://msdn.microsoft.com/en-us/library/windows/desktop/ms682655.aspx
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/event.rb#36
class Concurrent::Event < ::Concurrent::Synchronization::LockableObject
# Creates a new `Event` in the unset state. Threads calling `#wait` on the
# `Event` will block.
#
# @return [Event] a new instance of Event
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/event.rb#40
def initialize; end
# Reset a previously set event back to the `unset` state.
# Has no effect if the `Event` has not yet been set.
#
# @return [Boolean] should always return `true`
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/event.rb#68
def reset; end
# Trigger the event, setting the state to `set` and releasing all threads
# waiting on the event. Has no effect if the `Event` has already been set.
#
# @return [Boolean] should always return `true`
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/event.rb#56
def set; end
# Is the object in the set state?
#
# @return [Boolean] indicating whether or not the `Event` has been set
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/event.rb#48
def set?; end
# @return [Boolean]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/event.rb#60
def try?; end
# Wait a given number of seconds for the `Event` to be set by another
# thread. Will wait forever when no `timeout` value is given. Returns
# immediately if the `Event` has already been set.
#
# @return [Boolean] true if the `Event` was set before timeout else false
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/event.rb#83
def wait(timeout = T.unsafe(nil)); end
protected
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/event.rb#104
def ns_initialize; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/event.rb#96
def ns_set; end
end
# A synchronization point at which threads can pair and swap elements within
# pairs. Each thread presents some object on entry to the exchange method,
# matches with a partner thread, and receives its partner's object on return.
#
#
# ## Thread-safe Variable Classes
#
# Each of the thread-safe variable classes is designed to solve a different
# problem. In general:
#
# * *{Concurrent::Agent}:* Shared, mutable variable providing independent,
# uncoordinated, *asynchronous* change of individual values. Best used when
# the value will undergo frequent, complex updates. Suitable when the result
# of an update does not need to be known immediately.
# * *{Concurrent::Atom}:* Shared, mutable variable providing independent,
# uncoordinated, *synchronous* change of individual values. Best used when
# the value will undergo frequent reads but only occasional, though complex,
# updates. Suitable when the result of an update must be known immediately.
# * *{Concurrent::AtomicReference}:* A simple object reference that can be updated
# atomically. Updates are synchronous but fast. Best used when updates a
# simple set operations. Not suitable when updates are complex.
# {Concurrent::AtomicBoolean} and {Concurrent::AtomicFixnum} are similar
# but optimized for the given data type.
# * *{Concurrent::Exchanger}:* Shared, stateless synchronization point. Used
# when two or more threads need to exchange data. The threads will pair then
# block on each other until the exchange is complete.
# * *{Concurrent::MVar}:* Shared synchronization point. Used when one thread
# must give a value to another, which must take the value. The threads will
# block on each other until the exchange is complete.
# * *{Concurrent::ThreadLocalVar}:* Shared, mutable, isolated variable which
# holds a different value for each thread which has access. Often used as
# an instance variable in objects which must maintain different state
# for different threads.
# * *{Concurrent::TVar}:* Shared, mutable variables which provide
# *coordinated*, *synchronous*, change of *many* stated. Used when multiple
# value must change together, in an all-or-nothing transaction.
# This implementation is very simple, using only a single slot for each
# exchanger (unlike more advanced implementations which use an "arena").
# This approach will work perfectly fine when there are only a few threads
# accessing a single `Exchanger`. Beyond a handful of threads the performance
# will degrade rapidly due to contention on the single slot, but the algorithm
# will remain correct.
#
# @example
#
# exchanger = Concurrent::Exchanger.new
#
# threads = [
# Thread.new { puts "first: " << exchanger.exchange('foo', 1) }, #=> "first: bar"
# Thread.new { puts "second: " << exchanger.exchange('bar', 1) } #=> "second: foo"
# ]
# threads.each {|t| t.join(2) }
# @see http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/Exchanger.html java.util.concurrent.Exchanger
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/exchanger.rb#336
class Concurrent::Exchanger < ::Concurrent::RubyExchanger; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/exchanger.rb#327
Concurrent::ExchangerImplementation = Concurrent::RubyExchanger
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/executor_service.rb#157
module Concurrent::ExecutorService
include ::Concurrent::Concern::Logging
# Submit a task to the executor for asynchronous processing.
#
# @param task [Proc] the asynchronous task to perform
# @return [self] returns itself
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/executor_service.rb#166
def <<(task); end
# Does the task queue have a maximum size?
#
# @note Always returns `false`
# @return [Boolean] True if the task queue has a maximum size else false.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/executor_service.rb#174
def can_overflow?; end
# Submit a task to the executor for asynchronous processing.
#
# @param args [Array] zero or more arguments to be passed to the task
# @raise [ArgumentError] if no task is given
# @return [Boolean] `true` if the task is queued, `false` if the executor
# is not running
# @yield the asynchronous task to perform
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/executor_service.rb#161
def post(*args, &task); end
# Does this executor guarantee serialization of its operations?
#
# @note Always returns `false`
# @return [Boolean] True if the executor guarantees that all operations
# will be post in the order they are received and no two operations may
# occur simultaneously. Else false.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/executor_service.rb#181
def serialized?; end
end
# A `FiberLocalVar` is a variable where the value is different for each fiber.
# Each variable may have a default value, but when you modify the variable only
# the current fiber will ever see that change.
#
# This is similar to Ruby's built-in fiber-local variables (`Thread.current[:name]`),
# but with these major advantages:
# * `FiberLocalVar` has its own identity, it doesn't need a Symbol.
# * Each Ruby's built-in fiber-local variable leaks some memory forever (it's a Symbol held forever on the fiber),
# so it's only OK to create a small amount of them.
# `FiberLocalVar` has no such issue and it is fine to create many of them.
# * Ruby's built-in fiber-local variables leak forever the value set on each fiber (unless set to nil explicitly).
# `FiberLocalVar` automatically removes the mapping for each fiber once the `FiberLocalVar` instance is GC'd.
#
# @example
# v = FiberLocalVar.new(14)
# v.value #=> 14
# v.value = 2
# v.value #=> 2
# @example
# v = FiberLocalVar.new(14)
#
# Fiber.new do
# v.value #=> 14
# v.value = 1
# v.value #=> 1
# end.resume
#
# Fiber.new do
# v.value #=> 14
# v.value = 2
# v.value #=> 2
# end.resume
#
# v.value #=> 14
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/fiber_local_var.rb#41
class Concurrent::FiberLocalVar
# Creates a fiber local variable.
#
# @param default [Object] the default value when otherwise unset
# @param default_block [Proc] Optional block that gets called to obtain the
# default value for each fiber
# @return [FiberLocalVar] a new instance of FiberLocalVar
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/fiber_local_var.rb#49
def initialize(default = T.unsafe(nil), &default_block); end
# Bind the given value to fiber local storage during
# execution of the given block.
#
# @param value [Object] the value to bind
# @return [Object] the value
# @yield the operation to be performed with the bound variable
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/fiber_local_var.rb#86
def bind(value); end
# Returns the value in the current fiber's copy of this fiber-local variable.
#
# @return [Object] the current value
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/fiber_local_var.rb#68
def value; end
# Sets the current fiber's copy of this fiber-local variable to the specified value.
#
# @param value [Object] the value to set
# @return [Object] the new value
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/fiber_local_var.rb#76
def value=(value); end
protected
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/fiber_local_var.rb#101
def default; end
end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/fiber_local_var.rb#42
Concurrent::FiberLocalVar::LOCALS = T.let(T.unsafe(nil), Concurrent::FiberLocals)
# An array-backed storage of indexed variables per fiber.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/locals.rb#166
class Concurrent::FiberLocals < ::Concurrent::AbstractLocals
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/locals.rb#167
def locals; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/locals.rb#171
def locals!; end
end
# A thread pool that reuses a fixed number of threads operating off an unbounded queue.
# At any point, at most `num_threads` will be active processing tasks. When all threads are busy new
# tasks `#post` to the thread pool are enqueued until a thread becomes available.
# Should a thread crash for any reason the thread will immediately be removed
# from the pool and replaced.
#
# The API and behavior of this class are based on Java's `FixedThreadPool`
#
# **Thread Pool Options**
#
# Thread pools support several configuration options:
#
# * `idletime`: The number of seconds that a thread may be idle before being reclaimed.
# * `name`: The name of the executor (optional). Printed in the executor's `#to_s` output and
# a `<name>-worker-<id>` name is given to its threads if supported by used Ruby
# implementation. `<id>` is uniq for each thread.
# * `max_queue`: The maximum number of tasks that may be waiting in the work queue at
# any one time. When the queue size reaches `max_queue` and no new threads can be created,
# subsequent tasks will be rejected in accordance with the configured `fallback_policy`.
# * `auto_terminate`: When true (default), the threads started will be marked as daemon.
# * `fallback_policy`: The policy defining how rejected tasks are handled.
#
# Three fallback policies are supported:
#
# * `:abort`: Raise a `RejectedExecutionError` exception and discard the task.
# * `:discard`: Discard the task and return false.
# * `:caller_runs`: Execute the task on the calling thread.
#
# **Shutting Down Thread Pools**
#
# Killing a thread pool while tasks are still being processed, either by calling
# the `#kill` method or at application exit, will have unpredictable results. There
# is no way for the thread pool to know what resources are being used by the
# in-progress tasks. When those tasks are killed the impact on those resources
# cannot be predicted. The *best* practice is to explicitly shutdown all thread
# pools using the provided methods:
#
# * Call `#shutdown` to initiate an orderly termination of all in-progress tasks
# * Call `#wait_for_termination` with an appropriate timeout interval an allow
# the orderly shutdown to complete
# * Call `#kill` *only when* the thread pool fails to shutdown in the allotted time
#
# On some runtime platforms (most notably the JVM) the application will not
# exit until all thread pools have been shutdown. To prevent applications from
# "hanging" on exit, all threads can be marked as daemon according to the
# `:auto_terminate` option.
#
# ```ruby
# pool1 = Concurrent::FixedThreadPool.new(5) # threads will be marked as daemon
# pool2 = Concurrent::FixedThreadPool.new(5, auto_terminate: false) # mark threads as non-daemon
# ```
#
# @note Failure to properly shutdown a thread pool can lead to unpredictable results.
# Please read *Shutting Down Thread Pools* for more information.
# @see http://docs.oracle.com/javase/tutorial/essential/concurrency/pools.html Java Tutorials: Thread Pools
# @see http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/Executors.html Java Executors class
# @see http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutorService.html Java ExecutorService interface
# @see https://docs.oracle.com/javase/8/docs/api/java/lang/Thread.html#setDaemon-boolean-
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/fixed_thread_pool.rb#201
class Concurrent::FixedThreadPool < ::Concurrent::ThreadPoolExecutor
# Create a new thread pool.
#
# @option opts
# @param num_threads [Integer] the number of threads to allocate
# @param opts [Hash] the options defining pool behavior.
# @raise [ArgumentError] if `num_threads` is less than or equal to zero
# @raise [ArgumentError] if `fallback_policy` is not a known policy
# @return [FixedThreadPool] a new instance of FixedThreadPool
# @see http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/Executors.html#newFixedThreadPool-int-
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/fixed_thread_pool.rb#215
def initialize(num_threads, opts = T.unsafe(nil)); end
end
# {include:file:docs-source/future.md}
#
# @see http://ruby-doc.org/stdlib-2.1.1/libdoc/observer/rdoc/Observable.html Ruby Observable module
# @see http://clojuredocs.org/clojure_core/clojure.core/future Clojure's future function
# @see http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/Future.html java.util.concurrent.Future
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/future.rb#21
class Concurrent::Future < ::Concurrent::IVar
# Create a new `Future` in the `:unscheduled` state.
#
# @option opts
# @param opts [Hash] a customizable set of options
# @raise [ArgumentError] if no block is given
# @return [Future] a new instance of Future
# @yield the asynchronous operation to perform
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/future.rb#33
def initialize(opts = T.unsafe(nil), &block); end
# Attempt to cancel the operation if it has not already processed.
# The operation can only be cancelled while still `pending`. It cannot
# be cancelled once it has begun processing or has completed.
#
# @return [Boolean] was the operation successfully cancelled.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/future.rb#99
def cancel; end
# Has the operation been successfully cancelled?
#
# @return [Boolean]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/future.rb#111
def cancelled?; end
# Execute an `:unscheduled` `Future`. Immediately sets the state to `:pending` and
# passes the block to a new thread/thread pool for eventual execution.
# Does nothing if the `Future` is in any state other than `:unscheduled`.
#
# @example Instance and execute in separate steps
# future = Concurrent::Future.new{ sleep(1); 42 }
# future.state #=> :unscheduled
# future.execute
# future.state #=> :pending
# @example Instance and execute in one line
# future = Concurrent::Future.new{ sleep(1); 42 }.execute
# future.state #=> :pending
# @return [Future] a reference to `self`
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/future.rb#53
def execute; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/future.rb#82
def set(value = T.unsafe(nil), &block); end
# Wait the given number of seconds for the operation to complete.
# On timeout attempt to cancel the operation.
#
# @param timeout [Numeric] the maximum time in seconds to wait.
# @return [Boolean] true if the operation completed before the timeout
# else false
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/future.rb#121
def wait_or_cancel(timeout); end
protected
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/future.rb#133
def ns_initialize(value, opts); end
class << self
# Create a new `Future` object with the given block, execute it, and return the
# `:pending` object.
#
# @example
# future = Concurrent::Future.execute{ sleep(1); 42 }
# future.state #=> :pending
# @option opts
# @param opts [Hash] a customizable set of options
# @raise [ArgumentError] if no block is given
# @return [Future] the newly created `Future` in the `:pending` state
# @yield the asynchronous operation to perform
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/future.rb#77
def execute(opts = T.unsafe(nil), &block); end
end
end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/configuration.rb#18
Concurrent::GLOBAL_FAST_EXECUTOR = T.let(T.unsafe(nil), Concurrent::Delay)
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/configuration.rb#30
Concurrent::GLOBAL_IMMEDIATE_EXECUTOR = T.let(T.unsafe(nil), Concurrent::ImmediateExecutor)
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/configuration.rb#22
Concurrent::GLOBAL_IO_EXECUTOR = T.let(T.unsafe(nil), Concurrent::Delay)
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/concern/logging.rb#111
Concurrent::GLOBAL_LOGGER = T.let(T.unsafe(nil), Concurrent::AtomicReference)
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/configuration.rb#26
Concurrent::GLOBAL_TIMER_SET = T.let(T.unsafe(nil), Concurrent::Delay)
# A thread-safe subclass of Hash. This version locks against the object
# itself for every method call, ensuring only one thread can be reading
# or writing at a time. This includes iteration methods like `#each`,
# which takes the lock repeatedly when reading an item.
#
# @see http://ruby-doc.org/core/Hash.html Ruby standard library `Hash`
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/hash.rb#49
class Concurrent::Hash < ::Hash; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/hash.rb#16
Concurrent::HashImplementation = Hash
# An `IVar` is like a future that you can assign. As a future is a value that
# is being computed that you can wait on, an `IVar` is a value that is waiting
# to be assigned, that you can wait on. `IVars` are single assignment and
# deterministic.
#
# Then, express futures as an asynchronous computation that assigns an `IVar`.
# The `IVar` becomes the primitive on which [futures](Future) and
# [dataflow](Dataflow) are built.
#
# An `IVar` is a single-element container that is normally created empty, and
# can only be set once. The I in `IVar` stands for immutable. Reading an
# `IVar` normally blocks until it is set. It is safe to set and read an `IVar`
# from different threads.
#
# If you want to have some parallel task set the value in an `IVar`, you want
# a `Future`. If you want to create a graph of parallel tasks all executed
# when the values they depend on are ready you want `dataflow`. `IVar` is
# generally a low-level primitive.
#
# ## Examples
#
# Create, set and get an `IVar`
#
# ```ruby
# ivar = Concurrent::IVar.new
# ivar.set 14
# ivar.value #=> 14
# ivar.set 2 # would now be an error
# ```
#
# ## See Also
#
# 1. For the theory: Arvind, R. Nikhil, and K. Pingali.
# [I-Structures: Data structures for parallel computing](http://dl.acm.org/citation.cfm?id=69562).
# In Proceedings of Workshop on Graph Reduction, 1986.
# 2. For recent application:
# [DataDrivenFuture in Habanero Java from Rice](http://www.cs.rice.edu/~vs3/hjlib/doc/edu/rice/hj/api/HjDataDrivenFuture.html).
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/ivar.rb#48
class Concurrent::IVar < ::Concurrent::Synchronization::LockableObject
include ::Concurrent::Concern::Dereferenceable
include ::Concurrent::Concern::Obligation
include ::Concurrent::Concern::Observable
# Create a new `IVar` in the `:pending` state with the (optional) initial value.
#
# @option opts
# @option opts
# @option opts
# @param value [Object] the initial value
# @param opts [Hash] the options to create a message with
# @return [IVar] a new instance of IVar
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/ivar.rb#62
def initialize(value = T.unsafe(nil), opts = T.unsafe(nil), &block); end
# Add an observer on this object that will receive notification on update.
#
# Upon completion the `IVar` will notify all observers in a thread-safe way.
# The `func` method of the observer will be called with three arguments: the
# `Time` at which the `Future` completed the asynchronous operation, the
# final `value` (or `nil` on rejection), and the final `reason` (or `nil` on
# fulfillment).
#
# @param observer [Object] the object that will be notified of changes
# @param func [Symbol] symbol naming the method to call when this
# `Observable` has changes`
# @raise [ArgumentError]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/ivar.rb#81
def add_observer(observer = T.unsafe(nil), func = T.unsafe(nil), &block); end
# Set the `IVar` to failed due to some error and wake or notify all threads waiting on it.
#
# @param reason [Object] for the failure
# @raise [Concurrent::MultipleAssignmentError] if the `IVar` has already
# been set or otherwise completed
# @return [IVar] self
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/ivar.rb#135
def fail(reason = T.unsafe(nil)); end
# Set the `IVar` to a value and wake or notify all threads waiting on it.
#
# @param value [Object] the value to store in the `IVar`
# @raise [ArgumentError] if both a value and a block are given
# @raise [Concurrent::MultipleAssignmentError] if the `IVar` has already
# been set or otherwise completed
# @return [IVar] self
# @yield A block operation to use for setting the value
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/ivar.rb#113
def set(value = T.unsafe(nil)); end
# Attempt to set the `IVar` with the given value or block. Return a
# boolean indicating the success or failure of the set operation.
#
# @param value [Object] the value to store in the `IVar`
# @raise [ArgumentError] if both a value and a block are given
# @raise [Concurrent::MultipleAssignmentError] if the `IVar` has already
# been set or otherwise completed
# @return [Boolean] true if the value was set else false
# @yield A block operation to use for setting the value
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/ivar.rb#145
def try_set(value = T.unsafe(nil), &block); end
protected
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/ivar.rb#202
def check_for_block_or_value!(block_given, value); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/ivar.rb#177
def complete(success, value, reason); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/ivar.rb#184
def complete_without_notification(success, value, reason); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/ivar.rb#190
def notify_observers(value, reason); end
# @raise [MultipleAssignmentError]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/ivar.rb#195
def ns_complete_without_notification(success, value, reason); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/ivar.rb#155
def ns_initialize(value, opts); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/ivar.rb#168
def safe_execute(task, args = T.unsafe(nil)); end
end
# Raised when an operation is attempted which is not legal given the
# receiver's current state
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/errors.rb#20
class Concurrent::IllegalOperationError < ::Concurrent::Error; end
# An executor service which runs all operations on the current thread,
# blocking as necessary. Operations are performed in the order they are
# received and no two operations can be performed simultaneously.
#
# This executor service exists mainly for testing an debugging. When used
# it immediately runs every `#post` operation on the current thread, blocking
# that thread until the operation is complete. This can be very beneficial
# during testing because it makes all operations deterministic.
#
# @note Intended for use primarily in testing and debugging.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/immediate_executor.rb#17
class Concurrent::ImmediateExecutor < ::Concurrent::AbstractExecutorService
include ::Concurrent::SerialExecutorService
# Creates a new executor
#
# @return [ImmediateExecutor] a new instance of ImmediateExecutor
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/immediate_executor.rb#21
def initialize; end
# Submit a task to the executor for asynchronous processing.
#
# @param task [Proc] the asynchronous task to perform
# @return [self] returns itself
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/immediate_executor.rb#34
def <<(task); end
# Begin an orderly shutdown. Tasks already in the queue will be executed,
# but no new tasks will be accepted. Has no additional effect if the
# thread pool is not running.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/immediate_executor.rb#55
def kill; end
# Submit a task to the executor for asynchronous processing.
#
# @param args [Array] zero or more arguments to be passed to the task
# @raise [ArgumentError] if no task is given
# @return [Boolean] `true` if the task is queued, `false` if the executor
# is not running
# @yield the asynchronous task to perform
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/immediate_executor.rb#26
def post(*args, &task); end
# Is the executor running?
#
# @return [Boolean] `true` when running, `false` when shutting down or shutdown
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/immediate_executor.rb#40
def running?; end
# Begin an orderly shutdown. Tasks already in the queue will be executed,
# but no new tasks will be accepted. Has no additional effect if the
# thread pool is not running.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/immediate_executor.rb#55
def shutdown; end
# Is the executor shutdown?
#
# @return [Boolean] `true` when shutdown, `false` when shutting down or running
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/immediate_executor.rb#50
def shutdown?; end
# Is the executor shuttingdown?
#
# @return [Boolean] `true` when not running and not shutdown, else `false`
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/immediate_executor.rb#45
def shuttingdown?; end
# Block until executor shutdown is complete or until `timeout` seconds have
# passed.
#
# @note Does not initiate shutdown or termination. Either `shutdown` or `kill`
# must be called before this method (or on another thread).
# @param timeout [Integer] the maximum number of seconds to wait for shutdown to complete
# @return [Boolean] `true` if shutdown complete or false on `timeout`
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/immediate_executor.rb#62
def wait_for_termination(timeout = T.unsafe(nil)); end
end
# Raised when an attempt is made to violate an immutability guarantee.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/errors.rb#16
class Concurrent::ImmutabilityError < ::Concurrent::Error; end
# A thread-safe, immutable variation of Ruby's standard `Struct`.
#
# @see http://ruby-doc.org/core/Struct.html Ruby standard library `Struct`
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/immutable_struct.rb#9
module Concurrent::ImmutableStruct
include ::Concurrent::Synchronization::AbstractStruct
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/immutable_struct.rb#51
def ==(other); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/immutable_struct.rb#46
def [](member); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/immutable_struct.rb#56
def each(&block); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/immutable_struct.rb#62
def each_pair(&block); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/immutable_struct.rb#29
def inspect; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/immutable_struct.rb#36
def merge(other, &block); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/immutable_struct.rb#68
def select(&block); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/immutable_struct.rb#17
def to_a; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/immutable_struct.rb#41
def to_h; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/immutable_struct.rb#29
def to_s; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/immutable_struct.rb#17
def values; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/immutable_struct.rb#24
def values_at(*indexes); end
private
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/immutable_struct.rb#76
def initialize_copy(original); end
class << self
# @private
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/immutable_struct.rb#12
def included(base); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/immutable_struct.rb#82
def new(*args, &block); end
end
end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/immutable_struct.rb#92
Concurrent::ImmutableStruct::FACTORY = T.let(T.unsafe(nil), T.untyped)
# An executor service which runs all operations on a new thread, blocking
# until it completes. Operations are performed in the order they are received
# and no two operations can be performed simultaneously.
#
# This executor service exists mainly for testing an debugging. When used it
# immediately runs every `#post` operation on a new thread, blocking the
# current thread until the operation is complete. This is similar to how the
# ImmediateExecutor works, but the operation has the full stack of the new
# thread at its disposal. This can be helpful when the operations will spawn
# more operations on the same executor and so on - such a situation might
# overflow the single stack in case of an ImmediateExecutor, which is
# inconsistent with how it would behave for a threaded executor.
#
# @note Intended for use primarily in testing and debugging.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/indirect_immediate_executor.rb#19
class Concurrent::IndirectImmediateExecutor < ::Concurrent::ImmediateExecutor
# Creates a new executor
#
# @return [IndirectImmediateExecutor] a new instance of IndirectImmediateExecutor
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/indirect_immediate_executor.rb#21
def initialize; end
# Submit a task to the executor for asynchronous processing.
#
# @param args [Array] zero or more arguments to be passed to the task
# @raise [ArgumentError] if no task is given
# @return [Boolean] `true` if the task is queued, `false` if the executor
# is not running
# @yield the asynchronous task to perform
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/indirect_immediate_executor.rb#27
def post(*args, &task); end
end
# Raised when an object's methods are called when it has not been
# properly initialized.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/errors.rb#24
class Concurrent::InitializationError < ::Concurrent::Error; end
# Raised when a lifecycle method (such as `stop`) is called in an improper
# sequence or when the object is in an inappropriate state.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/errors.rb#13
class Concurrent::LifecycleError < ::Concurrent::Error; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/lock_free_stack.rb#6
class Concurrent::LockFreeStack < ::Concurrent::Synchronization::Object
include ::Enumerable
extend ::Concurrent::Synchronization::SafeInitialization
# @param head [Node]
# @return [LockFreeStack] a new instance of LockFreeStack
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/lock_free_stack.rb#51
def initialize(head = T.unsafe(nil)); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/object.rb#137
def __initialize_atomic_fields__; end
# @return [true, false]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/lock_free_stack.rb#118
def clear; end
# @return [self]
# @yield over the cleared stack
# @yieldparam value [Object]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/lock_free_stack.rb#142
def clear_each(&block); end
# @param head [Node]
# @return [true, false]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/lock_free_stack.rb#128
def clear_if(head); end
# @param head [Node]
# @return [true, false]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/lock_free_stack.rb#99
def compare_and_clear(head); end
# @param head [Node]
# @return [true, false]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/lock_free_stack.rb#85
def compare_and_pop(head); end
# @param head [Node]
# @param value [Object]
# @return [true, false]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/lock_free_stack.rb#65
def compare_and_push(head, value); end
# @param head [Node]
# @return [self]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/lock_free_stack.rb#107
def each(head = T.unsafe(nil)); end
# @param head [Node]
# @return [true, false]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/lock_free_stack.rb#58
def empty?(head = T.unsafe(nil)); end
# @return [String] Short string representation.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/lock_free_stack.rb#154
def inspect; end
# @return [Node]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/lock_free_stack.rb#79
def peek; end
# @return [Object]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/lock_free_stack.rb#90
def pop; end
# @param value [Object]
# @return [self]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/lock_free_stack.rb#71
def push(value); end
# @param head [Node]
# @param new_head [Node]
# @return [true, false]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/lock_free_stack.rb#135
def replace_if(head, new_head); end
# @return [String] Short string representation.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/lock_free_stack.rb#154
def to_s; end
private
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/object.rb#105
def compare_and_set_head(expected, value); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/object.rb#93
def head; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/object.rb#97
def head=(value); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/object.rb#101
def swap_head(value); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/object.rb#109
def update_head(&block); end
class << self
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/lock_free_stack.rb#41
def of1(value); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/lock_free_stack.rb#46
def of2(value1, value2); end
end
end
# The singleton for empty node
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/lock_free_stack.rb#32
Concurrent::LockFreeStack::EMPTY = T.let(T.unsafe(nil), Concurrent::LockFreeStack::Node)
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/lock_free_stack.rb#10
class Concurrent::LockFreeStack::Node
# @return [Node] a new instance of Node
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/lock_free_stack.rb#23
def initialize(value, next_node); end
# @return [Node]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/lock_free_stack.rb#14
def next_node; end
# @return [Object]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/lock_free_stack.rb#17
def value; end
# allow to nil-ify to free GC when the entry is no longer relevant, not synchronised
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/lock_free_stack.rb#21
def value=(_arg0); end
class << self
def [](*_arg0); end
end
end
# Either {FiberLocalVar} or {ThreadLocalVar} depending on whether Mutex (and Monitor)
# are held, respectively, per Fiber or per Thread.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/lock_local_var.rb#21
Concurrent::LockLocalVar = Concurrent::FiberLocalVar
# An `MVar` is a synchronized single element container. They are empty or
# contain one item. Taking a value from an empty `MVar` blocks, as does
# putting a value into a full one. You can either think of them as blocking
# queue of length one, or a special kind of mutable variable.
#
# On top of the fundamental `#put` and `#take` operations, we also provide a
# `#mutate` that is atomic with respect to operations on the same instance.
# These operations all support timeouts.
#
# We also support non-blocking operations `#try_put!` and `#try_take!`, a
# `#set!` that ignores existing values, a `#value` that returns the value
# without removing it or returns `MVar::EMPTY`, and a `#modify!` that yields
# `MVar::EMPTY` if the `MVar` is empty and can be used to set `MVar::EMPTY`.
# You shouldn't use these operations in the first instance.
#
# `MVar` is a [Dereferenceable](Dereferenceable).
#
# `MVar` is related to M-structures in Id, `MVar` in Haskell and `SyncVar` in Scala.
#
# Note that unlike the original Haskell paper, our `#take` is blocking. This is how
# Haskell and Scala do it today.
#
# ## See Also
#
# 1. P. Barth, R. Nikhil, and Arvind. [M-Structures: Extending a parallel, non- strict, functional language with state](http://dl.acm.org/citation.cfm?id=652538). In Proceedings of the 5th
# ACM Conference on Functional Programming Languages and Computer Architecture (FPCA), 1991.
#
# 2. S. Peyton Jones, A. Gordon, and S. Finne. [Concurrent Haskell](http://dl.acm.org/citation.cfm?id=237794).
# In Proceedings of the 23rd Symposium on Principles of Programming Languages
# (PoPL), 1996.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/mvar.rb#38
class Concurrent::MVar < ::Concurrent::Synchronization::Object
include ::Concurrent::Concern::Dereferenceable
extend ::Concurrent::Synchronization::SafeInitialization
# Create a new `MVar`, either empty or with an initial value.
#
# @param opts [Hash] the options controlling how the future will be processed
# @return [MVar] a new instance of MVar
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/mvar.rb#54
def initialize(value = T.unsafe(nil), opts = T.unsafe(nil)); end
# acquires lock on the from an `MVAR`, yields the value to provided block,
# and release lock. A timeout can be set to limit the time spent blocked,
# in which case it returns `TIMEOUT` if the time is exceeded.
#
# @return [Object] the value returned by the block, or `TIMEOUT`
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/mvar.rb#86
def borrow(timeout = T.unsafe(nil)); end
# Returns if the `MVar` is currently empty.
#
# @return [Boolean]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/mvar.rb#195
def empty?; end
# Returns if the `MVar` currently contains a value.
#
# @return [Boolean]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/mvar.rb#200
def full?; end
# Atomically `take`, yield the value to a block for transformation, and then
# `put` the transformed value. Returns the transformed value. A timeout can
# be set to limit the time spent blocked, in which case it returns `TIMEOUT`
# if the time is exceeded.
#
# @raise [ArgumentError]
# @return [Object] the transformed value, or `TIMEOUT`
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/mvar.rb#123
def modify(timeout = T.unsafe(nil)); end
# Non-blocking version of `modify` that will yield with `EMPTY` if there is no value yet.
#
# @raise [ArgumentError]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/mvar.rb#179
def modify!; end
# Put a value into an `MVar`, blocking if there is already a value until
# it is empty. A timeout can be set to limit the time spent blocked, in
# which case it returns `TIMEOUT` if the time is exceeded.
#
# @return [Object] the value that was put, or `TIMEOUT`
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/mvar.rb#103
def put(value, timeout = T.unsafe(nil)); end
# Non-blocking version of `put` that will overwrite an existing value.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/mvar.rb#169
def set!(value); end
# Remove the value from an `MVar`, leaving it empty, and blocking if there
# isn't a value. A timeout can be set to limit the time spent blocked, in
# which case it returns `TIMEOUT` if the time is exceeded.
#
# @return [Object] the value that was taken, or `TIMEOUT`
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/mvar.rb#66
def take(timeout = T.unsafe(nil)); end
# Non-blocking version of `put`, that returns whether or not it was successful.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/mvar.rb#156
def try_put!(value); end
# Non-blocking version of `take`, that returns `EMPTY` instead of blocking.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/mvar.rb#142
def try_take!; end
protected
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/mvar.rb#206
def synchronize(&block); end
private
# @return [Boolean]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/mvar.rb#212
def unlocked_empty?; end
# @return [Boolean]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/mvar.rb#216
def unlocked_full?; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/mvar.rb#224
def wait_for_empty(timeout); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/mvar.rb#220
def wait_for_full(timeout); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/mvar.rb#228
def wait_while(condition, timeout); end
end
# Unique value that represents that an `MVar` was empty
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/mvar.rb#43
Concurrent::MVar::EMPTY = T.let(T.unsafe(nil), Object)
# Unique value that represents that an `MVar` timed out before it was able
# to produce a value.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/mvar.rb#47
Concurrent::MVar::TIMEOUT = T.let(T.unsafe(nil), Object)
# `Concurrent::Map` is a hash-like object and should have much better performance
# characteristics, especially under high concurrency, than `Concurrent::Hash`.
# However, `Concurrent::Map `is not strictly semantically equivalent to a ruby `Hash`
# -- for instance, it does not necessarily retain ordering by insertion time as `Hash`
# does. For most uses it should do fine though, and we recommend you consider
# `Concurrent::Map` instead of `Concurrent::Hash` for your concurrency-safe hash needs.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/map.rb#39
class Concurrent::Map < ::Concurrent::Collection::MriMapBackend
# Iterates over each key value pair.
# This method is atomic.
#
# @note Atomic methods taking a block do not allow the `self` instance
# to be used within the block. Doing so will cause a deadlock.
# @return [self]
# @yield for each key value pair in the map
# @yieldparam key [Object]
# @yieldparam value [Object]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/map.rb#274
def each; end
# Iterates over each key.
# This method is atomic.
#
# @note Atomic methods taking a block do not allow the `self` instance
# to be used within the block. Doing so will cause a deadlock.
# @return [self]
# @yield for each key in the map
# @yieldparam key [Object]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/map.rb#255
def each_key; end
# Iterates over each key value pair.
# This method is atomic.
#
# @note Atomic methods taking a block do not allow the `self` instance
# to be used within the block. Doing so will cause a deadlock.
# @return [self]
# @yield for each key value pair in the map
# @yieldparam key [Object]
# @yieldparam value [Object]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/map.rb#274
def each_pair; end
# Iterates over each value.
# This method is atomic.
#
# @note Atomic methods taking a block do not allow the `self` instance
# to be used within the block. Doing so will cause a deadlock.
# @return [self]
# @yield for each value in the map
# @yieldparam value [Object]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/map.rb#264
def each_value; end
# Is map empty?
#
# @return [true, false]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/map.rb#291
def empty?; end
# Get a value with key, or default_value when key is absent,
# or fail when no default_value is given.
#
# @note The "fetch-then-act" methods of `Map` are not atomic. `Map` is intended
# to be use as a concurrency primitive with strong happens-before
# guarantees. It is not intended to be used as a high-level abstraction
# supporting complex operations. All read and write operations are
# thread safe, but no guarantees are made regarding race conditions
# between the fetch operation and yielding to the block. Additionally,
# this method does not support recursion. This is due to internal
# constraints that are very unlikely to change in the near future.
# @param key [Object]
# @param default_value [Object]
# @raise [KeyError] when key is missing and no default_value is provided
# @return [Object] the value or default value
# @yield default value for a key
# @yieldparam key [Object]
# @yieldreturn [Object] default value
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/map.rb#183
def fetch(key, default_value = T.unsafe(nil)); end
# Fetch value with key, or store default value when key is absent,
# or fail when no default_value is given. This is a two step operation,
# therefore not atomic. The store can overwrite other concurrently
# stored value.
#
# @param key [Object]
# @param default_value [Object]
# @return [Object] the value or default value
# @yield default value for a key
# @yieldparam key [Object]
# @yieldreturn [Object] default value
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/map.rb#205
def fetch_or_store(key, default_value = T.unsafe(nil)); end
# Get a value with key
#
# @param key [Object]
# @return [Object] the value
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/map/non_concurrent_map_backend.rb#21
def get(key); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/map.rb#321
def inspect; end
# Find key of a value.
#
# @param value [Object]
# @return [Object, nil] key or nil when not found
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/map.rb#284
def key(value); end
# All keys
#
# @return [::Array<Object>] keys
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/map.rb#236
def keys; end
# @raise [TypeError]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/map.rb#305
def marshal_dump; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/map.rb#313
def marshal_load(hash); end
# Set a value with key
#
# @param key [Object]
# @param value [Object]
# @return [Object] the new value
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/collection/map/mri_map_backend.rb#17
def put(key, value); end
# Insert value into map with key if key is absent in one atomic step.
#
# @param key [Object]
# @param value [Object]
# @return [Object, nil] the previous value when key was present or nil when there was no key
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/map.rb#215
def put_if_absent(key, value); end
# Is the value stored in the map. Iterates over all values.
#
# @param value [Object]
# @return [true, false]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/map.rb#227
def value?(value); end
# All values
#
# @return [::Array<Object>] values
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/map.rb#244
def values; end
private
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/map.rb#331
def initialize_copy(other); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/map.rb#336
def populate_from(hash); end
# @raise [KeyError]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/map.rb#327
def raise_fetch_no_key; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/map.rb#341
def validate_options_hash!(options); end
end
# Raised when an object with a start/stop lifecycle has been started an
# excessive number of times. Often used in conjunction with a restart
# policy or strategy.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/errors.rb#29
class Concurrent::MaxRestartFrequencyError < ::Concurrent::Error; end
# A `Maybe` encapsulates an optional value. A `Maybe` either contains a value
# of (represented as `Just`), or it is empty (represented as `Nothing`). Using
# `Maybe` is a good way to deal with errors or exceptional cases without
# resorting to drastic measures such as exceptions.
#
# `Maybe` is a replacement for the use of `nil` with better type checking.
#
# For compatibility with {Concurrent::Concern::Obligation} the predicate and
# accessor methods are aliased as `fulfilled?`, `rejected?`, `value`, and
# `reason`.
#
# ## Motivation
#
# A common pattern in languages with pattern matching, such as Erlang and
# Haskell, is to return *either* a value *or* an error from a function
# Consider this Erlang code:
#
# ```erlang
# case file:consult("data.dat") of
# {ok, Terms} -> do_something_useful(Terms);
# {error, Reason} -> lager:error(Reason)
# end.
# ```
#
# In this example the standard library function `file:consult` returns a
# [tuple](http://erlang.org/doc/reference_manual/data_types.html#id69044)
# with two elements: an [atom](http://erlang.org/doc/reference_manual/data_types.html#id64134)
# (similar to a ruby symbol) and a variable containing ancillary data. On
# success it returns the atom `ok` and the data from the file. On failure it
# returns `error` and a string with an explanation of the problem. With this
# pattern there is no ambiguity regarding success or failure. If the file is
# empty the return value cannot be misinterpreted as an error. And when an
# error occurs the return value provides useful information.
#
# In Ruby we tend to return `nil` when an error occurs or else we raise an
# exception. Both of these idioms are problematic. Returning `nil` is
# ambiguous because `nil` may also be a valid value. It also lacks
# information pertaining to the nature of the error. Raising an exception
# is both expensive and usurps the normal flow of control. All of these
# problems can be solved with the use of a `Maybe`.
#
# A `Maybe` is unambiguous with regard to whether or not it contains a value.
# When `Just` it contains a value, when `Nothing` it does not. When `Just`
# the value it contains may be `nil`, which is perfectly valid. When
# `Nothing` the reason for the lack of a value is contained as well. The
# previous Erlang example can be duplicated in Ruby in a principled way by
# having functions return `Maybe` objects:
#
# ```ruby
# result = MyFileUtils.consult("data.dat") # returns a Maybe
# if result.just?
# do_something_useful(result.value) # or result.just
# else
# logger.error(result.reason) # or result.nothing
# end
# ```
#
# @example Returning a Maybe from a Function
# module MyFileUtils
# def self.consult(path)
# file = File.open(path, 'r')
# Concurrent::Maybe.just(file.read)
# rescue => ex
# return Concurrent::Maybe.nothing(ex)
# ensure
# file.close if file
# end
# end
#
# maybe = MyFileUtils.consult('bogus.file')
# maybe.just? #=> false
# maybe.nothing? #=> true
# maybe.reason #=> #<Errno::ENOENT: No such file or directory @ rb_sysopen - bogus.file>
#
# maybe = MyFileUtils.consult('README.md')
# maybe.just? #=> true
# maybe.nothing? #=> false
# maybe.value #=> "# Concurrent Ruby\n[![Gem Version..."
# @example Using Maybe with a Block
# result = Concurrent::Maybe.from do
# Client.find(10) # Client is an ActiveRecord model
# end
#
# # -- if the record was found
# result.just? #=> true
# result.value #=> #<Client id: 10, first_name: "Ryan">
#
# # -- if the record was not found
# result.just? #=> false
# result.reason #=> ActiveRecord::RecordNotFound
# @example Using Maybe with the Null Object Pattern
# # In a Rails controller...
# result = ClientService.new(10).find # returns a Maybe
# render json: result.or(NullClient.new)
# @see https://hackage.haskell.org/package/base-4.2.0.1/docs/Data-Maybe.html Haskell Data.Maybe
# @see https://github.com/purescript/purescript-maybe/blob/master/docs/Data.Maybe.md PureScript Data.Maybe
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/maybe.rb#104
class Concurrent::Maybe < ::Concurrent::Synchronization::Object
include ::Comparable
extend ::Concurrent::Synchronization::SafeInitialization
# Create a new `Maybe` with the given attributes.
#
# @param just [Object] The value when `Just` else `NONE`.
# @param nothing [Exception, Object] The exception when `Nothing` else `NONE`.
# @return [Maybe] The new `Maybe`.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/maybe.rb#224
def initialize(just, nothing); end
# Comparison operator.
#
# @return [Integer] 0 if self and other are both `Nothing`;
# -1 if self is `Nothing` and other is `Just`;
# 1 if self is `Just` and other is nothing;
# `self.just <=> other.just` if both self and other are `Just`.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/maybe.rb#199
def <=>(other); end
# Is this `Maybe` a `Just` (successfully fulfilled with a value)?
#
# @return [Boolean] True if `Just` or false if `Nothing`.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/maybe.rb#176
def fulfilled?; end
# The value of a `Maybe` when `Just`. Will be `NONE` when `Nothing`.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/maybe.rb#114
def just; end
# Is this `Maybe` a `Just` (successfully fulfilled with a value)?
#
# @return [Boolean] True if `Just` or false if `Nothing`.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/maybe.rb#176
def just?; end
# The reason for the `Maybe` when `Nothing`. Will be `NONE` when `Just`.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/maybe.rb#117
def nothing; end
# Is this `Maybe` a `nothing` (rejected with an exception upon fulfillment)?
#
# @return [Boolean] True if `Nothing` or false if `Just`.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/maybe.rb#184
def nothing?; end
# Return either the value of self or the given default value.
#
# @return [Object] The value of self when `Just`; else the given default.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/maybe.rb#210
def or(other); end
# The reason for the `Maybe` when `Nothing`. Will be `NONE` when `Just`.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/maybe.rb#117
def reason; end
# Is this `Maybe` a `nothing` (rejected with an exception upon fulfillment)?
#
# @return [Boolean] True if `Nothing` or false if `Just`.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/maybe.rb#184
def rejected?; end
# The value of a `Maybe` when `Just`. Will be `NONE` when `Nothing`.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/maybe.rb#114
def value; end
class << self
# Create a new `Maybe` using the given block.
#
# Runs the given block passing all function arguments to the block as block
# arguments. If the block runs to completion without raising an exception
# a new `Just` is created with the value set to the return value of the
# block. If the block raises an exception a new `Nothing` is created with
# the reason being set to the raised exception.
#
# @param args [Array<Object>] Zero or more arguments to pass to the block.
# @raise [ArgumentError] when no block given.
# @return [Maybe] The newly created object.
# @yield The block from which to create a new `Maybe`.
# @yieldparam args [Array<Object>] Zero or more block arguments passed as
# arguments to the function.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/maybe.rb#137
def from(*args); end
# Create a new `Just` with the given value.
#
# @param value [Object] The value to set for the new `Maybe` object.
# @return [Maybe] The newly created object.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/maybe.rb#152
def just(value); end
# Create a new `Nothing` with the given (optional) reason.
#
# @param error [Exception] The reason to set for the new `Maybe` object.
# When given a string a new `StandardError` will be created with the
# argument as the message. When no argument is given a new
# `StandardError` with an empty message will be created.
# @return [Maybe] The newly created object.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/maybe.rb#164
def nothing(error = T.unsafe(nil)); end
private
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/safe_initialization.rb#29
def new(*args, &block); end
end
end
# Indicates that the given attribute has not been set.
# When `Just` the {#nothing} getter will return `NONE`.
# When `Nothing` the {#just} getter will return `NONE`.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/maybe.rb#111
Concurrent::Maybe::NONE = T.let(T.unsafe(nil), Object)
# Raised when an attempt is made to modify an immutable object
# (such as an `IVar`) after its final state has been set.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/errors.rb#33
class Concurrent::MultipleAssignmentError < ::Concurrent::Error
# @return [MultipleAssignmentError] a new instance of MultipleAssignmentError
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/errors.rb#36
def initialize(message = T.unsafe(nil), inspection_data = T.unsafe(nil)); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/errors.rb#41
def inspect; end
# Returns the value of attribute inspection_data.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/errors.rb#34
def inspection_data; end
end
# Aggregates multiple exceptions.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/errors.rb#58
class Concurrent::MultipleErrors < ::Concurrent::Error
# @return [MultipleErrors] a new instance of MultipleErrors
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/errors.rb#61
def initialize(errors, message = T.unsafe(nil)); end
# Returns the value of attribute errors.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/errors.rb#59
def errors; end
end
# An thread-safe variation of Ruby's standard `Struct`. Values can be set at
# construction or safely changed at any time during the object's lifecycle.
#
# @see http://ruby-doc.org/core/Struct.html Ruby standard library `Struct`
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/mutable_struct.rb#10
module Concurrent::MutableStruct
include ::Concurrent::Synchronization::AbstractStruct
# Equality
#
# @return [Boolean] true if other has the same struct subclass and has
# equal member values (according to `Object#==`)
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/mutable_struct.rb#128
def ==(other); end
# Attribute Reference
#
# @param member [Symbol, String, Integer] the string or symbol name of the member
# for which to obtain the value or the member's index
# @raise [NameError] if the member does not exist
# @raise [IndexError] if the index is out of range.
# @return [Object] the value of the given struct member or the member at the given index.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/mutable_struct.rb#118
def [](member); end
# Attribute Assignment
#
# Sets the value of the given struct member or the member at the given index.
#
# @param member [Symbol, String, Integer] the string or symbol name of the member
# for which to obtain the value or the member's index
# @raise [NameError] if the name does not exist
# @raise [IndexError] if the index is out of range.
# @return [Object] the value of the given struct member or the member at the given index.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/mutable_struct.rb#185
def []=(member, value); end
# Yields the value of each struct member in order. If no block is given
# an enumerator is returned.
#
# @yield the operation to be performed on each struct member
# @yieldparam value [Object] each struct value (in order)
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/mutable_struct.rb#139
def each(&block); end
# Yields the name and value of each struct member in order. If no block is
# given an enumerator is returned.
#
# @yield the operation to be performed on each struct member/value pair
# @yieldparam member [Object] each struct member (in order)
# @yieldparam value [Object] each struct value (in order)
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/mutable_struct.rb#152
def each_pair(&block); end
# Describe the contents of this struct in a string.
#
# @return [String] the contents of this struct in a string
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/mutable_struct.rb#72
def inspect; end
# Returns a new struct containing the contents of `other` and the contents
# of `self`. If no block is specified, the value for entries with duplicate
# keys will be that of `other`. Otherwise the value for each duplicate key
# is determined by calling the block with the key, its value in `self` and
# its value in `other`.
#
# @param other [Hash] the hash from which to set the new values
# @raise [ArgumentError] of given a member that is not defined in the struct
# @return [Synchronization::AbstractStruct] a new struct with the new values
# @yield an options block for resolving duplicate keys
# @yieldparam member [String, Symbol] the name of the member which is duplicated
# @yieldparam selfvalue [Object] the value of the member in `self`
# @yieldparam othervalue [Object] the value of the member in `other`
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/mutable_struct.rb#94
def merge(other, &block); end
# Yields each member value from the struct to the block and returns an Array
# containing the member values from the struct for which the given block
# returns a true value (equivalent to `Enumerable#select`).
#
# @return [Array] an array containing each value for which the block returns true
# @yield the operation to be performed on each struct member
# @yieldparam value [Object] each struct value (in order)
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/mutable_struct.rb#167
def select(&block); end
# Returns the values for this struct as an Array.
#
# @return [Array] the values for this struct
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/mutable_struct.rb#51
def to_a; end
# Returns a hash containing the names and values for the structs members.
#
# @return [Hash] the names and values for the structs members
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/mutable_struct.rb#103
def to_h; end
# Describe the contents of this struct in a string.
#
# @return [String] the contents of this struct in a string
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/mutable_struct.rb#72
def to_s; end
# Returns the values for this struct as an Array.
#
# @return [Array] the values for this struct
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/mutable_struct.rb#51
def values; end
# Returns the struct member values for each selector as an Array.
#
# A selector may be either an Integer offset or a Range of offsets (as in `Array#values_at`).
#
# @param indexes [Fixnum, Range] the index(es) from which to obatin the values (in order)
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/mutable_struct.rb#63
def values_at(*indexes); end
private
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/mutable_struct.rb#202
def initialize_copy(original); end
class << self
# Factory for creating new struct classes.
#
# ```
# new([class_name] [, member_name]+>) -> StructClass click to toggle source
# new([class_name] [, member_name]+>) {|StructClass| block } -> StructClass
# new(value, ...) -> obj
# StructClass[value, ...] -> obj
# ```
#
# The first two forms are used to create a new struct subclass `class_name`
# that can contain a value for each member_name . This subclass can be
# used to create instances of the structure like any other Class .
#
# If the `class_name` is omitted an anonymous struct class will be created.
# Otherwise, the name of this struct will appear as a constant in the struct class,
# so it must be unique for all structs under this base class and must start with a
# capital letter. Assigning a struct class to a constant also gives the class
# the name of the constant.
#
# If a block is given it will be evaluated in the context of `StructClass`, passing
# the created class as a parameter. This is the recommended way to customize a struct.
# Subclassing an anonymous struct creates an extra anonymous class that will never be used.
#
# The last two forms create a new instance of a struct subclass. The number of value
# parameters must be less than or equal to the number of attributes defined for the
# struct. Unset parameters default to nil. Passing more parameters than number of attributes
# will raise an `ArgumentError`.
#
# @see http://ruby-doc.org/core/Struct.html#method-c-new Ruby standard library `Struct#new`
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/mutable_struct.rb#210
def new(*args, &block); end
end
end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/mutable_struct.rb#220
Concurrent::MutableStruct::FACTORY = T.let(T.unsafe(nil), T.untyped)
# A boolean value that can be updated atomically. Reads and writes to an atomic
# boolean and thread-safe and guaranteed to succeed. Reads and writes may block
# briefly but no explicit locking is required.
#
#
# ## Thread-safe Variable Classes
#
# Each of the thread-safe variable classes is designed to solve a different
# problem. In general:
#
# * *{Concurrent::Agent}:* Shared, mutable variable providing independent,
# uncoordinated, *asynchronous* change of individual values. Best used when
# the value will undergo frequent, complex updates. Suitable when the result
# of an update does not need to be known immediately.
# * *{Concurrent::Atom}:* Shared, mutable variable providing independent,
# uncoordinated, *synchronous* change of individual values. Best used when
# the value will undergo frequent reads but only occasional, though complex,
# updates. Suitable when the result of an update must be known immediately.
# * *{Concurrent::AtomicReference}:* A simple object reference that can be updated
# atomically. Updates are synchronous but fast. Best used when updates a
# simple set operations. Not suitable when updates are complex.
# {Concurrent::AtomicBoolean} and {Concurrent::AtomicFixnum} are similar
# but optimized for the given data type.
# * *{Concurrent::Exchanger}:* Shared, stateless synchronization point. Used
# when two or more threads need to exchange data. The threads will pair then
# block on each other until the exchange is complete.
# * *{Concurrent::MVar}:* Shared synchronization point. Used when one thread
# must give a value to another, which must take the value. The threads will
# block on each other until the exchange is complete.
# * *{Concurrent::ThreadLocalVar}:* Shared, mutable, isolated variable which
# holds a different value for each thread which has access. Often used as
# an instance variable in objects which must maintain different state
# for different threads.
# * *{Concurrent::TVar}:* Shared, mutable variables which provide
# *coordinated*, *synchronous*, change of *many* stated. Used when multiple
# value must change together, in an all-or-nothing transaction.
# Performance:
#
# ```
# Testing with ruby 2.1.2
# Testing with Concurrent::MutexAtomicBoolean...
# 2.790000 0.000000 2.790000 ( 2.791454)
# Testing with Concurrent::CAtomicBoolean...
# 0.740000 0.000000 0.740000 ( 0.740206)
#
# Testing with jruby 1.9.3
# Testing with Concurrent::MutexAtomicBoolean...
# 5.240000 2.520000 7.760000 ( 3.683000)
# Testing with Concurrent::JavaAtomicBoolean...
# 3.340000 0.010000 3.350000 ( 0.855000)
# ```
#
# @see http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/atomic/AtomicBoolean.html java.util.concurrent.atomic.AtomicBoolean
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/mutex_atomic_boolean.rb#8
class Concurrent::MutexAtomicBoolean
extend ::Concurrent::Synchronization::SafeInitialization
# Creates a new `AtomicBoolean` with the given initial value.
#
# @param initial [Boolean] the initial value
# @return [MutexAtomicBoolean] a new instance of MutexAtomicBoolean
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/mutex_atomic_boolean.rb#12
def initialize(initial = T.unsafe(nil)); end
# Is the current value `false`
#
# @return [Boolean] true if the current value is `false`, else false
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/mutex_atomic_boolean.rb#34
def false?; end
# Explicitly sets the value to false.
#
# @return [Boolean] true if value has changed, otherwise false
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/mutex_atomic_boolean.rb#44
def make_false; end
# Explicitly sets the value to true.
#
# @return [Boolean] true if value has changed, otherwise false
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/mutex_atomic_boolean.rb#39
def make_true; end
# Is the current value `true`
#
# @return [Boolean] true if the current value is `true`, else false
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/mutex_atomic_boolean.rb#29
def true?; end
# Retrieves the current `Boolean` value.
#
# @return [Boolean] the current value
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/mutex_atomic_boolean.rb#19
def value; end
# Explicitly sets the value.
#
# @param value [Boolean] the new value to be set
# @return [Boolean] the current value
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/mutex_atomic_boolean.rb#24
def value=(value); end
protected
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/mutex_atomic_boolean.rb#51
def synchronize; end
private
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/mutex_atomic_boolean.rb#62
def ns_make_value(value); end
end
# A numeric value that can be updated atomically. Reads and writes to an atomic
# fixnum and thread-safe and guaranteed to succeed. Reads and writes may block
# briefly but no explicit locking is required.
#
#
# ## Thread-safe Variable Classes
#
# Each of the thread-safe variable classes is designed to solve a different
# problem. In general:
#
# * *{Concurrent::Agent}:* Shared, mutable variable providing independent,
# uncoordinated, *asynchronous* change of individual values. Best used when
# the value will undergo frequent, complex updates. Suitable when the result
# of an update does not need to be known immediately.
# * *{Concurrent::Atom}:* Shared, mutable variable providing independent,
# uncoordinated, *synchronous* change of individual values. Best used when
# the value will undergo frequent reads but only occasional, though complex,
# updates. Suitable when the result of an update must be known immediately.
# * *{Concurrent::AtomicReference}:* A simple object reference that can be updated
# atomically. Updates are synchronous but fast. Best used when updates a
# simple set operations. Not suitable when updates are complex.
# {Concurrent::AtomicBoolean} and {Concurrent::AtomicFixnum} are similar
# but optimized for the given data type.
# * *{Concurrent::Exchanger}:* Shared, stateless synchronization point. Used
# when two or more threads need to exchange data. The threads will pair then
# block on each other until the exchange is complete.
# * *{Concurrent::MVar}:* Shared synchronization point. Used when one thread
# must give a value to another, which must take the value. The threads will
# block on each other until the exchange is complete.
# * *{Concurrent::ThreadLocalVar}:* Shared, mutable, isolated variable which
# holds a different value for each thread which has access. Often used as
# an instance variable in objects which must maintain different state
# for different threads.
# * *{Concurrent::TVar}:* Shared, mutable variables which provide
# *coordinated*, *synchronous*, change of *many* stated. Used when multiple
# value must change together, in an all-or-nothing transaction.
# Performance:
#
# ```
# Testing with ruby 2.1.2
# Testing with Concurrent::MutexAtomicFixnum...
# 3.130000 0.000000 3.130000 ( 3.136505)
# Testing with Concurrent::CAtomicFixnum...
# 0.790000 0.000000 0.790000 ( 0.785550)
#
# Testing with jruby 1.9.3
# Testing with Concurrent::MutexAtomicFixnum...
# 5.460000 2.460000 7.920000 ( 3.715000)
# Testing with Concurrent::JavaAtomicFixnum...
# 4.520000 0.030000 4.550000 ( 1.187000)
# ```
#
# @see http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/atomic/AtomicLong.html java.util.concurrent.atomic.AtomicLong
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/mutex_atomic_fixnum.rb#9
class Concurrent::MutexAtomicFixnum
extend ::Concurrent::Synchronization::SafeInitialization
# Creates a new `AtomicFixnum` with the given initial value.
#
# @param initial [Fixnum] the initial value
# @raise [ArgumentError] if the initial value is not a `Fixnum`
# @return [MutexAtomicFixnum] a new instance of MutexAtomicFixnum
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/mutex_atomic_fixnum.rb#13
def initialize(initial = T.unsafe(nil)); end
# Atomically sets the value to the given updated value if the current
# value == the expected value.
#
# @param expect [Fixnum] the expected value
# @param update [Fixnum] the new value
# @return [Boolean] true if the value was updated else false
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/mutex_atomic_fixnum.rb#44
def compare_and_set(expect, update); end
# Decreases the current value by the given amount (defaults to 1).
#
# @param delta [Fixnum] the amount by which to decrease the current value
# @return [Fixnum] the current value after decrementation
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/mutex_atomic_fixnum.rb#37
def decrement(delta = T.unsafe(nil)); end
# Decreases the current value by the given amount (defaults to 1).
#
# @param delta [Fixnum] the amount by which to decrease the current value
# @return [Fixnum] the current value after decrementation
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/mutex_atomic_fixnum.rb#37
def down(delta = T.unsafe(nil)); end
# Increases the current value by the given amount (defaults to 1).
#
# @param delta [Fixnum] the amount by which to increase the current value
# @return [Fixnum] the current value after incrementation
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/mutex_atomic_fixnum.rb#30
def increment(delta = T.unsafe(nil)); end
# Increases the current value by the given amount (defaults to 1).
#
# @param delta [Fixnum] the amount by which to increase the current value
# @return [Fixnum] the current value after incrementation
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/mutex_atomic_fixnum.rb#30
def up(delta = T.unsafe(nil)); end
# Pass the current value to the given block, replacing it
# with the block's result. May retry if the value changes
# during the block's execution.
#
# @return [Object] the new value
# @yield [Object] Calculate a new value for the atomic reference using
# given (old) value
# @yieldparam old_value [Object] the starting value of the atomic reference
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/mutex_atomic_fixnum.rb#56
def update; end
# Retrieves the current `Fixnum` value.
#
# @return [Fixnum] the current value
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/mutex_atomic_fixnum.rb#20
def value; end
# Explicitly sets the value.
#
# @param value [Fixnum] the new value to be set
# @raise [ArgumentError] if the new value is not a `Fixnum`
# @return [Fixnum] the current value
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/mutex_atomic_fixnum.rb#25
def value=(value); end
protected
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/mutex_atomic_fixnum.rb#65
def synchronize; end
private
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/mutex_atomic_fixnum.rb#76
def ns_set(value); end
end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic_reference/mutex_atomic.rb#9
class Concurrent::MutexAtomicReference
include ::Concurrent::AtomicDirectUpdate
include ::Concurrent::AtomicNumericCompareAndSetWrapper
extend ::Concurrent::Synchronization::SafeInitialization
# @param value [Object] The initial value.
# @return [MutexAtomicReference] a new instance of MutexAtomicReference
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic_reference/mutex_atomic.rb#16
def initialize(value = T.unsafe(nil)); end
# Atomically sets the value to the given updated value if
# the current value == the expected value.
#
# that the actual value was not equal to the expected value.
#
# @param old_value [Object] the expected value
# @param new_value [Object] the new value
# @return [Boolean] `true` if successful. A `false` return indicates
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic_reference/mutex_atomic.rb#45
def _compare_and_set(old_value, new_value); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic_reference/numeric_cas_wrapper.rb#10
def compare_and_swap(old_value, new_value); end
# Gets the current value.
#
# @return [Object] the current value
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic_reference/mutex_atomic.rb#23
def get; end
# Atomically sets to the given value and returns the old value.
#
# @param new_value [Object] the new value
# @return [Object] the old value
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic_reference/mutex_atomic.rb#35
def get_and_set(new_value); end
# Sets to the given value.
#
# @param new_value [Object] the new value
# @return [Object] the new value
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic_reference/mutex_atomic.rb#29
def set(new_value); end
# Atomically sets to the given value and returns the old value.
#
# @param new_value [Object] the new value
# @return [Object] the old value
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic_reference/mutex_atomic.rb#35
def swap(new_value); end
# Gets the current value.
#
# @return [Object] the current value
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic_reference/mutex_atomic.rb#23
def value; end
# Sets to the given value.
#
# @param new_value [Object] the new value
# @return [Object] the new value
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic_reference/mutex_atomic.rb#29
def value=(new_value); end
protected
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic_reference/mutex_atomic.rb#59
def synchronize; end
end
# A synchronization object that allows one thread to wait on multiple other threads.
# The thread that will wait creates a `CountDownLatch` and sets the initial value
# (normally equal to the number of other threads). The initiating thread passes the
# latch to the other threads then waits for the other threads by calling the `#wait`
# method. Each of the other threads calls `#count_down` when done with its work.
# When the latch counter reaches zero the waiting thread is unblocked and continues
# with its work. A `CountDownLatch` can be used only once. Its value cannot be reset.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/mutex_count_down_latch.rb#9
class Concurrent::MutexCountDownLatch < ::Concurrent::Synchronization::LockableObject
# Create a new `CountDownLatch` with the initial `count`.
#
# @param count [new] the initial count
# @raise [ArgumentError] if `count` is not an integer or is less than zero
# @return [MutexCountDownLatch] a new instance of MutexCountDownLatch
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/mutex_count_down_latch.rb#12
def initialize(count = T.unsafe(nil)); end
# The current value of the counter.
#
# @return [Fixnum] the current value of the counter
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/mutex_count_down_latch.rb#34
def count; end
# Signal the latch to decrement the counter. Will signal all blocked threads when
# the `count` reaches zero.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/mutex_count_down_latch.rb#26
def count_down; end
# Block on the latch until the counter reaches zero or until `timeout` is reached.
#
# @param timeout [Fixnum] the number of seconds to wait for the counter or `nil`
# to block indefinitely
# @return [Boolean] `true` if the `count` reaches zero else false on `timeout`
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/mutex_count_down_latch.rb#21
def wait(timeout = T.unsafe(nil)); end
protected
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/mutex_count_down_latch.rb#40
def ns_initialize(count); end
end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/mutex_semaphore.rb#9
class Concurrent::MutexSemaphore < ::Concurrent::Synchronization::LockableObject
# @return [MutexSemaphore] a new instance of MutexSemaphore
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/mutex_semaphore.rb#12
def initialize(count); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/mutex_semaphore.rb#20
def acquire(permits = T.unsafe(nil)); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/mutex_semaphore.rb#38
def available_permits; end
# Acquires and returns all permits that are immediately available.
#
# @return [Integer]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/mutex_semaphore.rb#47
def drain_permits; end
# Shrinks the number of available permits by the indicated reduction.
#
# @param reduction [Fixnum] Number of permits to remove.
# @raise [ArgumentError] if `reduction` is not an integer or is negative
# @raise [ArgumentError] if `@free` - `@reduction` is less than zero
# @return [nil]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/mutex_semaphore.rb#99
def reduce_permits(reduction); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/mutex_semaphore.rb#77
def release(permits = T.unsafe(nil)); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/mutex_semaphore.rb#54
def try_acquire(permits = T.unsafe(nil), timeout = T.unsafe(nil)); end
protected
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/mutex_semaphore.rb#110
def ns_initialize(count); end
private
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/mutex_semaphore.rb#117
def try_acquire_now(permits); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/mutex_semaphore.rb#127
def try_acquire_timed(permits, timeout); end
end
# Various classes within allows for +nil+ values to be stored,
# so a special +NULL+ token is required to indicate the "nil-ness".
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/constants.rb#6
Concurrent::NULL = T.let(T.unsafe(nil), Object)
# Suppresses all output when used for logging.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/concern/logging.rb#108
Concurrent::NULL_LOGGER = T.let(T.unsafe(nil), Proc)
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/options.rb#6
module Concurrent::Options
class << self
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/options.rb#27
def executor(executor_identifier); end
# Get the requested `Executor` based on the values set in the options hash.
#
# @option opts
# @param opts [Hash] the options defining the requested executor
# @return [Executor, nil] the requested thread pool, or nil when no option specified
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/options.rb#19
def executor_from_options(opts = T.unsafe(nil)); end
end
end
# Promises are inspired by the JavaScript [Promises/A](http://wiki.commonjs.org/wiki/Promises/A)
# and [Promises/A+](http://promises-aplus.github.io/promises-spec/) specifications.
#
# > A promise represents the eventual value returned from the single
# > completion of an operation.
#
# Promises are similar to futures and share many of the same behaviours.
# Promises are far more robust, however. Promises can be chained in a tree
# structure where each promise may have zero or more children. Promises are
# chained using the `then` method. The result of a call to `then` is always
# another promise. Promises are resolved asynchronously (with respect to the
# main thread) but in a strict order: parents are guaranteed to be resolved
# before their children, children before their younger siblings. The `then`
# method takes two parameters: an optional block to be executed upon parent
# resolution and an optional callable to be executed upon parent failure. The
# result of each promise is passed to each of its children upon resolution.
# When a promise is rejected all its children will be summarily rejected and
# will receive the reason.
#
# Promises have several possible states: *:unscheduled*, *:pending*,
# *:processing*, *:rejected*, or *:fulfilled*. These are also aggregated as
# `#incomplete?` and `#complete?`. When a Promise is created it is set to
# *:unscheduled*. Once the `#execute` method is called the state becomes
# *:pending*. Once a job is pulled from the thread pool's queue and is given
# to a thread for processing (often immediately upon `#post`) the state
# becomes *:processing*. The future will remain in this state until processing
# is complete. A future that is in the *:unscheduled*, *:pending*, or
# *:processing* is considered `#incomplete?`. A `#complete?` Promise is either
# *:rejected*, indicating that an exception was thrown during processing, or
# *:fulfilled*, indicating success. If a Promise is *:fulfilled* its `#value`
# will be updated to reflect the result of the operation. If *:rejected* the
# `reason` will be updated with a reference to the thrown exception. The
# predicate methods `#unscheduled?`, `#pending?`, `#rejected?`, and
# `#fulfilled?` can be called at any time to obtain the state of the Promise,
# as can the `#state` method, which returns a symbol.
#
# Retrieving the value of a promise is done through the `value` (alias:
# `deref`) method. Obtaining the value of a promise is a potentially blocking
# operation. When a promise is *rejected* a call to `value` will return `nil`
# immediately. When a promise is *fulfilled* a call to `value` will
# immediately return the current value. When a promise is *pending* a call to
# `value` will block until the promise is either *rejected* or *fulfilled*. A
# *timeout* value can be passed to `value` to limit how long the call will
# block. If `nil` the call will block indefinitely. If `0` the call will not
# block. Any other integer or float value will indicate the maximum number of
# seconds to block.
#
# Promises run on the global thread pool.
#
# ### Examples
#
# Start by requiring promises
#
# ```ruby
# require 'concurrent/promise'
# ```
#
# Then create one
#
# ```ruby
# p = Concurrent::Promise.execute do
# # do something
# 42
# end
# ```
#
# Promises can be chained using the `then` method. The `then` method accepts a
# block and an executor, to be executed on fulfillment, and a callable argument to be executed
# on rejection. The result of the each promise is passed as the block argument
# to chained promises.
#
# ```ruby
# p = Concurrent::Promise.new{10}.then{|x| x * 2}.then{|result| result - 10 }.execute
# ```
#
# And so on, and so on, and so on...
#
# ```ruby
# p = Concurrent::Promise.fulfill(20).
# then{|result| result - 10 }.
# then{|result| result * 3 }.
# then(executor: different_executor){|result| result % 5 }.execute
# ```
#
# The initial state of a newly created Promise depends on the state of its parent:
# - if parent is *unscheduled* the child will be *unscheduled*
# - if parent is *pending* the child will be *pending*
# - if parent is *fulfilled* the child will be *pending*
# - if parent is *rejected* the child will be *pending* (but will ultimately be *rejected*)
#
# Promises are executed asynchronously from the main thread. By the time a
# child Promise finishes initialization it may be in a different state than its
# parent (by the time a child is created its parent may have completed
# execution and changed state). Despite being asynchronous, however, the order
# of execution of Promise objects in a chain (or tree) is strictly defined.
#
# There are multiple ways to create and execute a new `Promise`. Both ways
# provide identical behavior:
#
# ```ruby
# # create, operate, then execute
# p1 = Concurrent::Promise.new{ "Hello World!" }
# p1.state #=> :unscheduled
# p1.execute
#
# # create and immediately execute
# p2 = Concurrent::Promise.new{ "Hello World!" }.execute
#
# # execute during creation
# p3 = Concurrent::Promise.execute{ "Hello World!" }
# ```
#
# Once the `execute` method is called a `Promise` becomes `pending`:
#
# ```ruby
# p = Concurrent::Promise.execute{ "Hello, world!" }
# p.state #=> :pending
# p.pending? #=> true
# ```
#
# Wait a little bit, and the promise will resolve and provide a value:
#
# ```ruby
# p = Concurrent::Promise.execute{ "Hello, world!" }
# sleep(0.1)
#
# p.state #=> :fulfilled
# p.fulfilled? #=> true
# p.value #=> "Hello, world!"
# ```
#
# If an exception occurs, the promise will be rejected and will provide
# a reason for the rejection:
#
# ```ruby
# p = Concurrent::Promise.execute{ raise StandardError.new("Here comes the Boom!") }
# sleep(0.1)
#
# p.state #=> :rejected
# p.rejected? #=> true
# p.reason #=> "#<StandardError: Here comes the Boom!>"
# ```
#
# #### Rejection
#
# When a promise is rejected all its children will be rejected and will
# receive the rejection `reason` as the rejection callable parameter:
#
# ```ruby
# p = Concurrent::Promise.execute { Thread.pass; raise StandardError }
#
# c1 = p.then(-> reason { 42 })
# c2 = p.then(-> reason { raise 'Boom!' })
#
# c1.wait.state #=> :fulfilled
# c1.value #=> 45
# c2.wait.state #=> :rejected
# c2.reason #=> #<RuntimeError: Boom!>
# ```
#
# Once a promise is rejected it will continue to accept children that will
# receive immediately rejection (they will be executed asynchronously).
#
# #### Aliases
#
# The `then` method is the most generic alias: it accepts a block to be
# executed upon parent fulfillment and a callable to be executed upon parent
# rejection. At least one of them should be passed. The default block is `{
# |result| result }` that fulfills the child with the parent value. The
# default callable is `{ |reason| raise reason }` that rejects the child with
# the parent reason.
#
# - `on_success { |result| ... }` is the same as `then {|result| ... }`
# - `rescue { |reason| ... }` is the same as `then(Proc.new { |reason| ... } )`
# - `rescue` is aliased by `catch` and `on_error`
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promise.rb#190
class Concurrent::Promise < ::Concurrent::IVar
# Initialize a new Promise with the provided options.
#
# @option opts
# @option opts
# @option opts
# @option opts
# @param opts [Hash] a customizable set of options
# @raise [ArgumentError] if no block is given
# @return [Promise] a new instance of Promise
# @see http://wiki.commonjs.org/wiki/Promises/A
# @see http://promises-aplus.github.io/promises-spec/
# @yield The block operation to be performed asynchronously.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promise.rb#210
def initialize(opts = T.unsafe(nil), &block); end
# Chain onto this promise an action to be undertaken on failure
# (rejection).
#
# @return [Promise] self
# @yield The block to execute
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promise.rb#360
def catch(&block); end
# Execute an `:unscheduled` `Promise`. Immediately sets the state to `:pending` and
# passes the block to a new thread/thread pool for eventual execution.
# Does nothing if the `Promise` is in any state other than `:unscheduled`.
#
# @return [Promise] a reference to `self`
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promise.rb#246
def execute; end
# Set the `IVar` to failed due to some error and wake or notify all threads waiting on it.
#
# @param reason [Object] for the failure
# @raise [Concurrent::MultipleAssignmentError] if the `IVar` has already
# been set or otherwise completed
# @raise [Concurrent::PromiseExecutionError] if not the root promise
# @return [IVar] self
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promise.rb#278
def fail(reason = T.unsafe(nil)); end
# Yield the successful result to the block that returns a promise. If that
# promise is also successful the result is the result of the yielded promise.
# If either part fails the whole also fails.
#
# @example
# Promise.execute { 1 }.flat_map { |v| Promise.execute { v + 2 } }.value! #=> 3
# @return [Promise]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promise.rb#375
def flat_map(&block); end
# Chain onto this promise an action to be undertaken on failure
# (rejection).
#
# @return [Promise] self
# @yield The block to execute
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promise.rb#360
def on_error(&block); end
# Chain onto this promise an action to be undertaken on success
# (fulfillment).
#
# @raise [ArgumentError]
# @return [Promise] self
# @yield The block to execute
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promise.rb#349
def on_success(&block); end
# Chain onto this promise an action to be undertaken on failure
# (rejection).
#
# @return [Promise] self
# @yield The block to execute
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promise.rb#360
def rescue(&block); end
# Set the `IVar` to a value and wake or notify all threads waiting on it.
#
# @param value [Object] the value to store in the `IVar`
# @raise [ArgumentError] if both a value and a block are given
# @raise [Concurrent::MultipleAssignmentError] if the `IVar` has already
# been set or otherwise completed
# @raise [Concurrent::PromiseExecutionError] if not the root promise
# @return [IVar] self
# @yield A block operation to use for setting the value
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promise.rb#262
def set(value = T.unsafe(nil), &block); end
# Chain a new promise off the current promise.
#
# @overload then
# @overload then
# @raise [ArgumentError]
# @return [Promise] the new promise
# @yield The block operation to be performed asynchronously.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promise.rb#314
def then(*args, &block); end
# Builds a promise that produces the result of self and others in an Array
# and fails if any of them fails.
#
# @overload zip
# @overload zip
# @return [Promise<Array>]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promise.rb#440
def zip(*others); end
protected
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promise.rb#551
def complete(success, value, reason); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promise.rb#545
def notify_child(child); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promise.rb#481
def ns_initialize(value, opts); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promise.rb#533
def on_fulfill(result); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promise.rb#539
def on_reject(reason); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promise.rb#562
def realize(task); end
# @return [Boolean]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promise.rb#528
def root?; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promise.rb#520
def set_pending; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promise.rb#570
def set_state!(success, value, reason); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promise.rb#576
def synchronized_set_state!(success, value, reason); end
class << self
# Aggregate a collection of zero or more promises under a composite promise,
# execute the aggregated promises and collect them into a standard Ruby array,
# call the given Ruby `Ennnumerable` predicate (such as `any?`, `all?`, `none?`,
# or `one?`) on the collection checking for the success or failure of each,
# then executing the composite's `#then` handlers if the predicate returns
# `true` or executing the composite's `#rescue` handlers if the predicate
# returns false.
#
#
# The returned promise will not yet have been executed. Additional `#then`
# and `#rescue` handlers may still be provided. Once the returned promise
# is execute the aggregate promises will be also be executed (if they have
# not been executed already). The results of the aggregate promises will
# be checked upon completion. The necessary `#then` and `#rescue` blocks
# on the aggregating promise will then be executed as appropriate. If the
# `#rescue` handlers are executed the raises exception will be
# `Concurrent::PromiseExecutionError`.
#
# @param promises [Array] Zero or more promises to aggregate
# @return [Promise] an unscheduled (not executed) promise that aggregates
# the promises given as arguments
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promise.rb#505
def aggregate(method, *promises); end
# Aggregates a collection of promises and executes the `then` condition
# if all aggregated promises succeed. Executes the `rescue` handler with
# a `Concurrent::PromiseExecutionError` if any of the aggregated promises
# fail. Upon execution will execute any of the aggregate promises that
# were not already executed.
#
# @return [Boolean]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promise.rb#464
def all?(*promises); end
# Aggregates a collection of promises and executes the `then` condition
# if any aggregated promises succeed. Executes the `rescue` handler with
# a `Concurrent::PromiseExecutionError` if any of the aggregated promises
# fail. Upon execution will execute any of the aggregate promises that
# were not already executed.
#
#
# The returned promise will not yet have been executed. Additional `#then`
# and `#rescue` handlers may still be provided. Once the returned promise
# is execute the aggregate promises will be also be executed (if they have
# not been executed already). The results of the aggregate promises will
# be checked upon completion. The necessary `#then` and `#rescue` blocks
# on the aggregating promise will then be executed as appropriate. If the
# `#rescue` handlers are executed the raises exception will be
# `Concurrent::PromiseExecutionError`.
#
# @param promises [Array] Zero or more promises to aggregate
# @return [Promise] an unscheduled (not executed) promise that aggregates
# the promises given as arguments
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promise.rb#475
def any?(*promises); end
# Create a new `Promise` object with the given block, execute it, and return the
# `:pending` object.
#
# @example
# promise = Concurrent::Promise.execute{ sleep(1); 42 }
# promise.state #=> :pending
# @option opts
# @option opts
# @option opts
# @option opts
# @param opts [Hash] a customizable set of options
# @raise [ArgumentError] if no block is given
# @return [Promise] the newly created `Promise` in the `:pending` state
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promise.rb#296
def execute(opts = T.unsafe(nil), &block); end
# Create a new `Promise` and fulfill it immediately.
#
# @option opts
# @option opts
# @option opts
# @option opts
# @param opts [Hash] a customizable set of options
# @raise [ArgumentError] if no block is given
# @return [Promise] the newly created `Promise`
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promise.rb#224
def fulfill(value, opts = T.unsafe(nil)); end
# Create a new `Promise` and reject it immediately.
#
# @option opts
# @option opts
# @option opts
# @option opts
# @param opts [Hash] a customizable set of options
# @raise [ArgumentError] if no block is given
# @return [Promise] the newly created `Promise`
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promise.rb#237
def reject(reason, opts = T.unsafe(nil)); end
# Builds a promise that produces the result of promises in an Array
# and fails if any of them fails.
#
# @overload zip
# @overload zip
# @return [Promise<Array>]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promise.rb#409
def zip(*promises); end
end
end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promise.rb#11
class Concurrent::PromiseExecutionError < ::StandardError; end
# {include:file:docs-source/promises-main.md}
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#13
module Concurrent::Promises
extend ::Concurrent::Promises::FactoryMethods::Configuration
extend ::Concurrent::Promises::FactoryMethods
end
# @abstract
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#2047
class Concurrent::Promises::AbstractAnyPromise < ::Concurrent::Promises::BlockedPromise; end
# Common ancestor of {Event} and {Future} classes, many shared methods are defined here.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#513
class Concurrent::Promises::AbstractEventFuture < ::Concurrent::Synchronization::Object
include ::Concurrent::Promises::InternalStates
extend ::Concurrent::Synchronization::SafeInitialization
# @return [AbstractEventFuture] a new instance of AbstractEventFuture
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#522
def initialize(promise, default_executor); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/object.rb#137
def __initialize_atomic_fields__; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#738
def add_callback_clear_delayed_node(node); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#733
def add_callback_notify_blocked(promise, index); end
# For inspection.
#
# @return [Array<AbstractPromise>]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#702
def blocks; end
# For inspection.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#710
def callbacks; end
# Shortcut of {#chain_on} with default `:io` executor supplied.
#
# @return [Future]
# @see #chain_on
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#596
def chain(*args, &task); end
# Chains the task to be executed asynchronously on executor after it is resolved.
#
# @overload an_event.chain_on
# @overload a_future.chain_on
# @param executor [Executor, :io, :fast] Instance of an executor or a name of the
# global executor. The task is executed on it, default executor remains unchanged.
# @param args [Object] arguments which are passed to the task when it's executed.
# (It might be prepended with other arguments, see the @yield section).
# @return [Future]
# @yieldreturn will become result of the returned Future.
# Its returned value becomes {Future#value} fulfilling it,
# raised exception becomes {Future#reason} rejecting it.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#614
def chain_on(executor, *args, &task); end
# Resolves the resolvable when receiver is resolved.
#
# @param resolvable [Resolvable]
# @return [self]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#629
def chain_resolvable(resolvable); end
# Returns default executor.
#
# @return [Executor] default executor
# @see #with_default_executor
# @see FactoryMethods#future_on
# @see FactoryMethods#resolvable_future
# @see FactoryMethods#any_fulfilled_future_on
# @see similar
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#590
def default_executor; end
# @return [String] Short string representation.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#619
def inspect; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/object.rb#93
def internal_state; end
# Shortcut of {#on_resolution_using} with default `:io` executor supplied.
#
# @return [self]
# @see #on_resolution_using
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#637
def on_resolution(*args, &callback); end
# Stores the callback to be executed synchronously on resolving thread after it is
# resolved.
#
# @overload an_event.on_resolution!
# @overload a_future.on_resolution!
# @param args [Object] arguments which are passed to the task when it's executed.
# (It might be prepended with other arguments, see the @yield section).
# @return [self]
# @yieldreturn is forgotten.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#655
def on_resolution!(*args, &callback); end
# Stores the callback to be executed asynchronously on executor after it is resolved.
#
# @overload an_event.on_resolution_using
# @overload a_future.on_resolution_using
# @param executor [Executor, :io, :fast] Instance of an executor or a name of the
# global executor. The task is executed on it, default executor remains unchanged.
# @param args [Object] arguments which are passed to the task when it's executed.
# (It might be prepended with other arguments, see the @yield section).
# @return [self]
# @yieldreturn is forgotten.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#673
def on_resolution_using(executor, *args, &callback); end
# Is it in pending state?
#
# @return [Boolean]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#549
def pending?; end
# For inspection.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#716
def promise; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#688
def resolve_with(state, raise_on_reassign = T.unsafe(nil), reserved = T.unsafe(nil)); end
# Is it in resolved state?
#
# @return [Boolean]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#555
def resolved?; end
# Returns its state.
#
# @overload an_event.state
# @overload a_future.state
# @return [Symbol]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#543
def state; end
# Resolves the resolvable when receiver is resolved.
#
# @param resolvable [Resolvable]
# @return [self]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#629
def tangle(resolvable); end
# @return [String] Short string representation.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#619
def to_s; end
# Propagates touch. Requests all the delayed futures, which it depends on, to be
# executed. This method is called by any other method requiring resolved state, like {#wait}.
#
# @return [self]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#562
def touch; end
# For inspection.
#
# @return [Boolean]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#722
def touched?; end
# Wait (block the Thread) until receiver is {#resolved?}.
# Calls {Concurrent::AbstractEventFuture#touch}.
#
# @note This function potentially blocks current thread until the Future is resolved.
# Be careful it can deadlock. Try to chain instead.
# @param timeout [Numeric] the maximum time in second to wait.
# @return [self, true, false] self implies timeout was not used, true implies timeout was used
# and it was resolved, false implies it was not resolved within timeout.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#578
def wait(timeout = T.unsafe(nil)); end
# For inspection.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#728
def waiting_threads; end
# Crates new object with same class with the executor set as its new default executor.
# Any futures depending on it will use the new default executor.
#
# @abstract
# @raise [NotImplementedError]
# @return [AbstractEventFuture]
# @see Event#with_default_executor
# @see Future#with_default_executor
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#683
def with_default_executor(executor); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#743
def with_hidden_resolvable; end
private
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#750
def add_callback(method, *args); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#812
def async_callback_on_resolution(state, executor, args, callback); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#796
def call_callback(method, state, args); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#800
def call_callbacks(state); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#763
def callback_clear_delayed_node(state, node); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#818
def callback_notify_blocked(state, promise, index); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/object.rb#105
def compare_and_set_internal_state(expected, value); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/object.rb#97
def internal_state=(value); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/object.rb#101
def swap_internal_state(value); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/object.rb#109
def update_internal_state(&block); end
# @return [Boolean]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#768
def wait_until_resolved(timeout); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#808
def with_async(executor, *args, &block); end
end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1796
class Concurrent::Promises::AbstractFlatPromise < ::Concurrent::Promises::BlockedPromise
# @return [AbstractFlatPromise] a new instance of AbstractFlatPromise
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1798
def initialize(delayed_because, blockers_count, event_or_future); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1808
def touch; end
private
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1828
def add_delayed_of(future); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1820
def on_resolvable(resolved_future, index); end
# @return [Boolean]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1824
def resolvable?(countdown, future, index); end
# @return [Boolean]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1816
def touched?; end
end
# @abstract
# @private
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1549
class Concurrent::Promises::AbstractPromise < ::Concurrent::Synchronization::Object
include ::Concurrent::Promises::InternalStates
extend ::Concurrent::Synchronization::SafeInitialization
# @return [AbstractPromise] a new instance of AbstractPromise
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1553
def initialize(future); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1564
def default_executor; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1581
def delayed_because; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1558
def event; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1558
def future; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1575
def inspect; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1568
def state; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1575
def to_s; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1572
def touch; end
private
# @return [Future]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1592
def evaluate_to(*args, block); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1587
def resolve_with(new_state, raise_on_reassign = T.unsafe(nil)); end
end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#2084
class Concurrent::Promises::AnyFulfilledFuturePromise < ::Concurrent::Promises::AnyResolvedFuturePromise
private
# @return [Boolean]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#2088
def resolvable?(countdown, event_or_future, index); end
end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#2050
class Concurrent::Promises::AnyResolvedEventPromise < ::Concurrent::Promises::AbstractAnyPromise
# @return [AnyResolvedEventPromise] a new instance of AnyResolvedEventPromise
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#2054
def initialize(delayed, blockers_count, default_executor); end
private
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#2062
def on_resolvable(resolved_future, index); end
# @return [Boolean]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#2058
def resolvable?(countdown, future, index); end
end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#2067
class Concurrent::Promises::AnyResolvedFuturePromise < ::Concurrent::Promises::AbstractAnyPromise
# @return [AnyResolvedFuturePromise] a new instance of AnyResolvedFuturePromise
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#2071
def initialize(delayed, blockers_count, default_executor); end
private
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#2079
def on_resolvable(resolved_future, index); end
# @return [Boolean]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#2075
def resolvable?(countdown, future, index); end
end
# @abstract
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1619
class Concurrent::Promises::BlockedPromise < ::Concurrent::Promises::InnerPromise
# @return [BlockedPromise] a new instance of BlockedPromise
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1661
def initialize(delayed, blockers_count, future); end
# for inspection only
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1683
def blocked_by; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1674
def delayed_because; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1667
def on_blocker_resolution(future, index); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1678
def touch; end
private
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1691
def clear_and_propagate_touch(stack_or_element = T.unsafe(nil)); end
# @raise [NotImplementedError]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1710
def on_resolvable(resolved_future, index); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1706
def process_on_blocker_resolution(future, index); end
# @return [true, false] if resolvable
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1702
def resolvable?(countdown, future, index); end
class << self
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1652
def add_delayed(delayed1, delayed2); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1645
def new_blocked_by(blockers, *args, &block); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1623
def new_blocked_by1(blocker, *args, &block); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1630
def new_blocked_by2(blocker1, blocker2, *args, &block); end
private
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/safe_initialization.rb#29
def new(*args, &block); end
end
end
# @abstract
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1716
class Concurrent::Promises::BlockedTaskPromise < ::Concurrent::Promises::BlockedPromise
# @raise [ArgumentError]
# @return [BlockedTaskPromise] a new instance of BlockedTaskPromise
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1717
def initialize(delayed, blockers_count, default_executor, executor, args, &task); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1725
def executor; end
end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1766
class Concurrent::Promises::ChainPromise < ::Concurrent::Promises::BlockedTaskPromise
private
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1769
def on_resolvable(resolved_future, index); end
end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#2095
class Concurrent::Promises::DelayPromise < ::Concurrent::Promises::InnerPromise
# @return [DelayPromise] a new instance of DelayPromise
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#2097
def initialize(default_executor); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#2108
def delayed_because; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#2104
def touch; end
end
# Represents an event which will happen in future (will be resolved). The event is either
# pending or resolved. It should be always resolved. Use {Future} to communicate rejections and
# cancellation.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#826
class Concurrent::Promises::Event < ::Concurrent::Promises::AbstractEventFuture
# Creates a new event or a future which will be resolved when receiver and other are.
# Returns an event if receiver and other are events, otherwise returns a future.
# If just one of the parties is Future then the result
# of the returned future is equal to the result of the supplied future. If both are futures
# then the result is as described in {FactoryMethods#zip_futures_on}.
#
# @return [Future, Event]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#839
def &(other); end
# Creates a new event which will be resolved when the first of receiver, `event_or_future`
# resolves.
#
# @return [Event]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#853
def any(event_or_future); end
# Creates new event dependent on receiver which will not evaluate until touched, see {#touch}.
# In other words, it inserts delay into the chain of Futures making rest of it lazy evaluated.
#
# @return [Event]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#863
def delay; end
# Creates new event dependent on receiver scheduled to execute on/in intended_time.
# In time is interpreted from the moment the receiver is resolved, therefore it inserts
# delay into the chain.
#
# @param intended_time [Numeric, Time] `Numeric` means to run in `intended_time` seconds.
# `Time` means to run on `intended_time`.
# @return [Event]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#875
def schedule(intended_time); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#596
def then(*args, &task); end
# Returns self, since this is event
#
# @return [Event]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#893
def to_event; end
# Converts event to a future. The future is fulfilled when the event is resolved, the future may never fail.
#
# @return [Future]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#885
def to_future; end
# Crates new object with same class with the executor set as its new default executor.
# Any futures depending on it will use the new default executor.
#
# @return [Event]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#899
def with_default_executor(executor); end
# Creates a new event or a future which will be resolved when receiver and other are.
# Returns an event if receiver and other are events, otherwise returns a future.
# If just one of the parties is Future then the result
# of the returned future is equal to the result of the supplied future. If both are futures
# then the result is as described in {FactoryMethods#zip_futures_on}.
#
# @return [Future, Event]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#839
def zip(other); end
# Creates a new event which will be resolved when the first of receiver, `event_or_future`
# resolves.
#
# @return [Event]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#853
def |(event_or_future); end
private
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#910
def callback_on_resolution(state, args, callback); end
# @raise [Concurrent::MultipleAssignmentError]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#905
def rejected_resolution(raise_on_reassign, state); end
end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1972
class Concurrent::Promises::EventWrapperPromise < ::Concurrent::Promises::BlockedPromise
# @return [EventWrapperPromise] a new instance of EventWrapperPromise
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1973
def initialize(delayed, blockers_count, default_executor); end
private
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1979
def on_resolvable(resolved_future, index); end
end
# Container of all {Future}, {Event} factory methods. They are never constructed directly with
# new.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#46
module Concurrent::Promises::FactoryMethods
include ::Concurrent::Promises::FactoryMethods::Configuration
extend ::Concurrent::ReInclude
extend ::Concurrent::Promises::FactoryMethods::Configuration
extend ::Concurrent::Promises::FactoryMethods
# Shortcut of {#any_resolved_future_on} with default `:io` executor supplied.
#
# @return [Future]
# @see #any_resolved_future_on
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#278
def any(*futures_and_or_events); end
# Shortcut of {#any_event_on} with default `:io` executor supplied.
#
# @return [Event]
# @see #any_event_on
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#319
def any_event(*futures_and_or_events); end
# Creates a new event which becomes resolved after the first futures_and_or_events resolves.
# If resolved it does not propagate {Concurrent::AbstractEventFuture#touch}, leaving delayed
# futures un-executed if they are not required any more.
#
# @param default_executor [Executor, :io, :fast] Instance of an executor or a name of the
# global executor. Default executor propagates to chained futures unless overridden with
# executor parameter or changed with {AbstractEventFuture#with_default_executor}.
# @param futures_and_or_events [AbstractEventFuture]
# @return [Event]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#329
def any_event_on(default_executor, *futures_and_or_events); end
# Shortcut of {#any_fulfilled_future_on} with default `:io` executor supplied.
#
# @return [Future]
# @see #any_fulfilled_future_on
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#300
def any_fulfilled_future(*futures_and_or_events); end
# Creates a new future which is resolved after the first futures_and_or_events is fulfilled.
# Its result equals the result of the first resolved future or if all futures_and_or_events reject,
# it has reason of the last rejected future.
# If resolved it does not propagate {Concurrent::AbstractEventFuture#touch}, leaving delayed
# futures un-executed if they are not required any more.
# If event is supplied, which does not have value and can be only resolved, it's
# represented as `:fulfilled` with value `nil`.
#
# @param default_executor [Executor, :io, :fast] Instance of an executor or a name of the
# global executor. Default executor propagates to chained futures unless overridden with
# executor parameter or changed with {AbstractEventFuture#with_default_executor}.
# @param futures_and_or_events [AbstractEventFuture]
# @return [Future]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#313
def any_fulfilled_future_on(default_executor, *futures_and_or_events); end
# Shortcut of {#any_resolved_future_on} with default `:io` executor supplied.
#
# @return [Future]
# @see #any_resolved_future_on
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#278
def any_resolved_future(*futures_and_or_events); end
# Creates a new future which is resolved after the first futures_and_or_events is resolved.
# Its result equals the result of the first resolved future.
# If resolved it does not propagate {Concurrent::AbstractEventFuture#touch}, leaving delayed
# futures un-executed if they are not required any more.
# If event is supplied, which does not have value and can be only resolved, it's
# represented as `:fulfilled` with value `nil`.
#
# @param default_executor [Executor, :io, :fast] Instance of an executor or a name of the
# global executor. Default executor propagates to chained futures unless overridden with
# executor parameter or changed with {AbstractEventFuture#with_default_executor}.
# @param futures_and_or_events [AbstractEventFuture]
# @return [Future]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#294
def any_resolved_future_on(default_executor, *futures_and_or_events); end
# Shortcut of {#delay_on} with default `:io` executor supplied.
#
# @return [Future, Event]
# @see #delay_on
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#190
def delay(*args, &task); end
# Creates a new event or future which is resolved only after it is touched,
# see {Concurrent::AbstractEventFuture#touch}.
#
# @overload delay_on
# @overload delay_on
# @param default_executor [Executor, :io, :fast] Instance of an executor or a name of the
# global executor. Default executor propagates to chained futures unless overridden with
# executor parameter or changed with {AbstractEventFuture#with_default_executor}.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#207
def delay_on(default_executor, *args, &task); end
# Creates a resolved future which will be fulfilled with the given value.
#
# @param default_executor [Executor, :io, :fast] Instance of an executor or a name of the
# global executor. Default executor propagates to chained futures unless overridden with
# executor parameter or changed with {AbstractEventFuture#with_default_executor}.
# @param value [Object]
# @return [Future]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#127
def fulfilled_future(value, default_executor = T.unsafe(nil)); end
# Shortcut of {#future_on} with default `:io` executor supplied.
#
# @return [Future]
# @see #future_on
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#94
def future(*args, &task); end
# Constructs a new Future which will be resolved after block is evaluated on default executor.
# Evaluation begins immediately.
#
# @param default_executor [Executor, :io, :fast] Instance of an executor or a name of the
# global executor. Default executor propagates to chained futures unless overridden with
# executor parameter or changed with {AbstractEventFuture#with_default_executor}.
# @param args [Object] arguments which are passed to the task when it's executed.
# (It might be prepended with other arguments, see the @yield section).
# @return [Future]
# @yield [*args] to the task.
# @yieldreturn will become result of the returned Future.
# Its returned value becomes {Future#value} fulfilling it,
# raised exception becomes {Future#reason} rejecting it.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#106
def future_on(default_executor, *args, &task); end
# General constructor. Behaves differently based on the argument's type. It's provided for convenience
# but it's better to be explicit.
#
# @overload make_future
# @overload make_future
# @overload make_future
# @overload make_future
# @overload make_future
# @param default_executor [Executor, :io, :fast] Instance of an executor or a name of the
# global executor. Default executor propagates to chained futures unless overridden with
# executor parameter or changed with {AbstractEventFuture#with_default_executor}.
# @return [Event, Future]
# @see rejected_future, resolved_event, fulfilled_future
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#174
def make_future(argument = T.unsafe(nil), default_executor = T.unsafe(nil)); end
# Creates a resolved future which will be rejected with the given reason.
#
# @param default_executor [Executor, :io, :fast] Instance of an executor or a name of the
# global executor. Default executor propagates to chained futures unless overridden with
# executor parameter or changed with {AbstractEventFuture#with_default_executor}.
# @param reason [Object]
# @return [Future]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#136
def rejected_future(reason, default_executor = T.unsafe(nil)); end
# Shortcut of {#resolvable_event_on} with default `:io` executor supplied.
#
# @return [ResolvableEvent]
# @see #resolvable_event_on
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#63
def resolvable_event; end
# Creates a resolvable event, user is responsible for resolving the event once
# by calling {Promises::ResolvableEvent#resolve}.
#
# @param default_executor [Executor, :io, :fast] Instance of an executor or a name of the
# global executor. Default executor propagates to chained futures unless overridden with
# executor parameter or changed with {AbstractEventFuture#with_default_executor}.
# @return [ResolvableEvent]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#72
def resolvable_event_on(default_executor = T.unsafe(nil)); end
# Shortcut of {#resolvable_future_on} with default `:io` executor supplied.
#
# @return [ResolvableFuture]
# @see #resolvable_future_on
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#78
def resolvable_future; end
# Creates resolvable future, user is responsible for resolving the future once by
# {Promises::ResolvableFuture#resolve}, {Promises::ResolvableFuture#fulfill},
# or {Promises::ResolvableFuture#reject}
#
# @param default_executor [Executor, :io, :fast] Instance of an executor or a name of the
# global executor. Default executor propagates to chained futures unless overridden with
# executor parameter or changed with {AbstractEventFuture#with_default_executor}.
# @return [ResolvableFuture]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#88
def resolvable_future_on(default_executor = T.unsafe(nil)); end
# Creates resolved event.
#
# @param default_executor [Executor, :io, :fast] Instance of an executor or a name of the
# global executor. Default executor propagates to chained futures unless overridden with
# executor parameter or changed with {AbstractEventFuture#with_default_executor}.
# @return [Event]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#144
def resolved_event(default_executor = T.unsafe(nil)); end
# Creates a resolved future with will be either fulfilled with the given value or rejected with
# the given reason.
#
# @param fulfilled [true, false]
# @param value [Object]
# @param reason [Object]
# @param default_executor [Executor, :io, :fast] Instance of an executor or a name of the
# global executor. Default executor propagates to chained futures unless overridden with
# executor parameter or changed with {AbstractEventFuture#with_default_executor}.
# @return [Future]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#118
def resolved_future(fulfilled, value, reason, default_executor = T.unsafe(nil)); end
# Shortcut of {#schedule_on} with default `:io` executor supplied.
#
# @return [Future, Event]
# @see #schedule_on
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#214
def schedule(intended_time, *args, &task); end
# Creates a new event or future which is resolved in intended_time.
#
# @overload schedule_on
# @overload schedule_on
# @param default_executor [Executor, :io, :fast] Instance of an executor or a name of the
# global executor. Default executor propagates to chained futures unless overridden with
# executor parameter or changed with {AbstractEventFuture#with_default_executor}.
# @param intended_time [Numeric, Time] `Numeric` means to run in `intended_time` seconds.
# `Time` means to run on `intended_time`.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#233
def schedule_on(default_executor, intended_time, *args, &task); end
# Shortcut of {#zip_futures_on} with default `:io` executor supplied.
#
# @return [Future]
# @see #zip_futures_on
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#240
def zip(*futures_and_or_events); end
# Shortcut of {#zip_events_on} with default `:io` executor supplied.
#
# @return [Event]
# @see #zip_events_on
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#262
def zip_events(*futures_and_or_events); end
# Creates a new event which is resolved after all futures_and_or_events are resolved.
# (Future is resolved when fulfilled or rejected.)
#
# @param default_executor [Executor, :io, :fast] Instance of an executor or a name of the
# global executor. Default executor propagates to chained futures unless overridden with
# executor parameter or changed with {AbstractEventFuture#with_default_executor}.
# @param futures_and_or_events [AbstractEventFuture]
# @return [Event]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#272
def zip_events_on(default_executor, *futures_and_or_events); end
# Shortcut of {#zip_futures_on} with default `:io` executor supplied.
#
# @return [Future]
# @see #zip_futures_on
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#240
def zip_futures(*futures_and_or_events); end
# Creates a new future which is resolved after all futures_and_or_events are resolved.
# Its value is an array of zipped future values. Its reason is an array of reasons for rejection.
# If there is an error it rejects.
# If event is supplied, which does not have value and can be only resolved, it's
# represented as `:fulfilled` with value `nil`.
#
# @param default_executor [Executor, :io, :fast] Instance of an executor or a name of the
# global executor. Default executor propagates to chained futures unless overridden with
# executor parameter or changed with {AbstractEventFuture#with_default_executor}.
# @param futures_and_or_events [AbstractEventFuture]
# @return [Future]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#254
def zip_futures_on(default_executor, *futures_and_or_events); end
end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#50
module Concurrent::Promises::FactoryMethods::Configuration
# @return [Executor, :io, :fast] the executor which is used when none is supplied
# to a factory method. The method can be overridden in the receivers of
# `include FactoryMethod`
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#54
def default_executor; end
end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1840
class Concurrent::Promises::FlatEventPromise < ::Concurrent::Promises::AbstractFlatPromise
# @return [FlatEventPromise] a new instance of FlatEventPromise
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1844
def initialize(delayed, blockers_count, default_executor); end
private
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1848
def process_on_blocker_resolution(future, index); end
end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1873
class Concurrent::Promises::FlatFuturePromise < ::Concurrent::Promises::AbstractFlatPromise
# @raise [ArgumentError]
# @return [FlatFuturePromise] a new instance of FlatFuturePromise
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1877
def initialize(delayed, blockers_count, levels, default_executor); end
private
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1884
def process_on_blocker_resolution(future, index); end
end
# Represents a value which will become available in future. May reject with a reason instead,
# e.g. when the tasks raises an exception.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#917
class Concurrent::Promises::Future < ::Concurrent::Promises::AbstractEventFuture
# Creates a new event or a future which will be resolved when receiver and other are.
# Returns an event if receiver and other are events, otherwise returns a future.
# If just one of the parties is Future then the result
# of the returned future is equal to the result of the supplied future. If both are futures
# then the result is as described in {FactoryMethods#zip_futures_on}.
#
# @return [Future]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1070
def &(other); end
# Creates a new event which will be resolved when the first of receiver, `event_or_future`
# resolves. Returning future will have value nil if event_or_future is event and resolves
# first.
#
# @return [Future]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1085
def any(event_or_future); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1215
def apply(args, block); end
# Creates new future dependent on receiver which will not evaluate until touched, see {#touch}.
# In other words, it inserts delay into the chain of Futures making rest of it lazy evaluated.
#
# @return [Future]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1095
def delay; end
# Allows rejected Future to be risen with `raise` method.
# If the reason is not an exception `Runtime.new(reason)` is returned.
#
# @example
# raise Promises.rejected_future(StandardError.new("boom"))
# raise Promises.rejected_future("or just boom")
# @raise [Concurrent::Error] when raising not rejected future
# @return [Exception]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1013
def exception(*args); end
# Creates new future which will have result of the future returned by receiver. If receiver
# rejects it will have its rejection.
#
# @param level [Integer] how many levels of futures should flatten
# @return [Future]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1120
def flat(level = T.unsafe(nil)); end
# Creates new event which will be resolved when the returned event by receiver is.
# Be careful if the receiver rejects it will just resolve since Event does not hold reason.
#
# @return [Event]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1130
def flat_event; end
# Creates new future which will have result of the future returned by receiver. If receiver
# rejects it will have its rejection.
#
# @param level [Integer] how many levels of futures should flatten
# @return [Future]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1120
def flat_future(level = T.unsafe(nil)); end
# Is it in fulfilled state?
#
# @return [Boolean]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#921
def fulfilled?; end
# @return [String] Short string representation.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1235
def inspect; end
# Shortcut of {#on_fulfillment_using} with default `:io` executor supplied.
#
# @return [self]
# @see #on_fulfillment_using
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1136
def on_fulfillment(*args, &callback); end
# Stores the callback to be executed synchronously on resolving thread after it is
# fulfilled. Does nothing on rejection.
#
# @param args [Object] arguments which are passed to the task when it's executed.
# (It might be prepended with other arguments, see the @yield section).
# @return [self]
# @yield [value, *args] to the callback.
# @yieldreturn is forgotten.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1147
def on_fulfillment!(*args, &callback); end
# Stores the callback to be executed asynchronously on executor after it is
# fulfilled. Does nothing on rejection.
#
# @param executor [Executor, :io, :fast] Instance of an executor or a name of the
# global executor. The task is executed on it, default executor remains unchanged.
# @param args [Object] arguments which are passed to the task when it's executed.
# (It might be prepended with other arguments, see the @yield section).
# @return [self]
# @yield [value, *args] to the callback.
# @yieldreturn is forgotten.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1159
def on_fulfillment_using(executor, *args, &callback); end
# Shortcut of {#on_rejection_using} with default `:io` executor supplied.
#
# @return [self]
# @see #on_rejection_using
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1165
def on_rejection(*args, &callback); end
# Stores the callback to be executed synchronously on resolving thread after it is
# rejected. Does nothing on fulfillment.
#
# @param args [Object] arguments which are passed to the task when it's executed.
# (It might be prepended with other arguments, see the @yield section).
# @return [self]
# @yield [reason, *args] to the callback.
# @yieldreturn is forgotten.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1176
def on_rejection!(*args, &callback); end
# Stores the callback to be executed asynchronously on executor after it is
# rejected. Does nothing on fulfillment.
#
# @param executor [Executor, :io, :fast] Instance of an executor or a name of the
# global executor. The task is executed on it, default executor remains unchanged.
# @param args [Object] arguments which are passed to the task when it's executed.
# (It might be prepended with other arguments, see the @yield section).
# @return [self]
# @yield [reason, *args] to the callback.
# @yieldreturn is forgotten.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1188
def on_rejection_using(executor, *args, &callback); end
# Returns reason of future's rejection.
# Calls {Concurrent::AbstractEventFuture#touch}.
#
# @note This function potentially blocks current thread until the Future is resolved.
# Be careful it can deadlock. Try to chain instead.
# @note Make sure returned `nil` is not confused with timeout, no value when rejected,
# no reason when fulfilled, etc.
# Use more exact methods if needed, like {#wait}, {#value!}, {#result}, etc.
# @param timeout [Numeric] the maximum time in second to wait.
# @param timeout_value [Object] a value returned by the method when it times out
# @return [Object, timeout_value] the reason, or timeout_value on timeout, or nil on fulfillment.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#966
def reason(timeout = T.unsafe(nil), timeout_value = T.unsafe(nil)); end
# Is it in rejected state?
#
# @return [Boolean]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#928
def rejected?; end
# Shortcut of {#rescue_on} with default `:io` executor supplied.
#
# @return [Future]
# @see #rescue_on
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1052
def rescue(*args, &task); end
# Chains the task to be executed asynchronously on executor after it rejects. Does not run
# the task if it fulfills. It will resolve though, triggering any dependent futures.
#
# @param executor [Executor, :io, :fast] Instance of an executor or a name of the
# global executor. The task is executed on it, default executor remains unchanged.
# @param args [Object] arguments which are passed to the task when it's executed.
# (It might be prepended with other arguments, see the @yield section).
# @return [Future]
# @yield [reason, *args] to the task.
# @yieldreturn will become result of the returned Future.
# Its returned value becomes {Future#value} fulfilling it,
# raised exception becomes {Future#reason} rejecting it.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1064
def rescue_on(executor, *args, &task); end
# Returns triplet fulfilled?, value, reason.
# Calls {Concurrent::AbstractEventFuture#touch}.
#
# @note This function potentially blocks current thread until the Future is resolved.
# Be careful it can deadlock. Try to chain instead.
# @param timeout [Numeric] the maximum time in second to wait.
# @return [Array(Boolean, Object, Object), nil] triplet of fulfilled?, value, reason, or nil
# on timeout.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#981
def result(timeout = T.unsafe(nil)); end
# Allows to use futures as green threads. The receiver has to evaluate to a future which
# represents what should be done next. It basically flattens indefinitely until non Future
# values is returned which becomes result of the returned future. Any encountered exception
# will become reason of the returned future.
#
# @example
# body = lambda do |v|
# v += 1
# v < 5 ? Promises.future(v, &body) : v
# end
# Promises.future(0, &body).run.value! # => 5
# @param run_test [#call(value)] an object which when called returns either Future to keep running with
# or nil, then the run completes with the value.
# The run_test can be used to extract the Future from deeper structure,
# or to distinguish Future which is a resulting value from a future
# which is suppose to continue running.
# @return [Future]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1210
def run(run_test = T.unsafe(nil)); end
# Creates new event dependent on receiver scheduled to execute on/in intended_time.
# In time is interpreted from the moment the receiver is resolved, therefore it inserts
# delay into the chain.
#
# @param intended_time [Numeric, Time] `Numeric` means to run in `intended_time` seconds.
# `Time` means to run on `intended_time`.
# @return [Future]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1102
def schedule(intended_time); end
# Shortcut of {#then_on} with default `:io` executor supplied.
#
# @return [Future]
# @see #then_on
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1034
def then(*args, &task); end
# Chains the task to be executed asynchronously on executor after it fulfills. Does not run
# the task if it rejects. It will resolve though, triggering any dependent futures.
#
# @param executor [Executor, :io, :fast] Instance of an executor or a name of the
# global executor. The task is executed on it, default executor remains unchanged.
# @param args [Object] arguments which are passed to the task when it's executed.
# (It might be prepended with other arguments, see the @yield section).
# @return [Future]
# @yield [value, *args] to the task.
# @yieldreturn will become result of the returned Future.
# Its returned value becomes {Future#value} fulfilling it,
# raised exception becomes {Future#reason} rejecting it.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1046
def then_on(executor, *args, &task); end
# Converts future to event which is resolved when future is resolved by fulfillment or rejection.
#
# @return [Event]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1222
def to_event; end
# Returns self, since this is a future
#
# @return [Future]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1230
def to_future; end
# @return [String] Short string representation.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1235
def to_s; end
# Return value of the future.
# Calls {Concurrent::AbstractEventFuture#touch}.
#
# @note This function potentially blocks current thread until the Future is resolved.
# Be careful it can deadlock. Try to chain instead.
# @note Make sure returned `nil` is not confused with timeout, no value when rejected,
# no reason when fulfilled, etc.
# Use more exact methods if needed, like {#wait}, {#value!}, {#result}, etc.
# @param timeout [Numeric] the maximum time in second to wait.
# @param timeout_value [Object] a value returned by the method when it times out
# @return [Object, nil, timeout_value] the value of the Future when fulfilled,
# timeout_value on timeout,
# nil on rejection.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#950
def value(timeout = T.unsafe(nil), timeout_value = T.unsafe(nil)); end
# Return value of the future.
# Calls {Concurrent::AbstractEventFuture#touch}.
#
# @note This function potentially blocks current thread until the Future is resolved.
# Be careful it can deadlock. Try to chain instead.
# @note Make sure returned `nil` is not confused with timeout, no value when rejected,
# no reason when fulfilled, etc.
# Use more exact methods if needed, like {#wait}, {#value!}, {#result}, etc.
# @param timeout [Numeric] the maximum time in second to wait.
# @param timeout_value [Object] a value returned by the method when it times out
# @raise [Exception] {#reason} on rejection
# @return [Object, nil, timeout_value] the value of the Future when fulfilled,
# or nil on rejection,
# or timeout_value on timeout.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#997
def value!(timeout = T.unsafe(nil), timeout_value = T.unsafe(nil)); end
# Wait (block the Thread) until receiver is {#resolved?}.
# Calls {Concurrent::AbstractEventFuture#touch}.
#
# @note This function potentially blocks current thread until the Future is resolved.
# Be careful it can deadlock. Try to chain instead.
# @param timeout [Numeric] the maximum time in second to wait.
# @raise [Exception] {#reason} on rejection
# @return [self, true, false] self implies timeout was not used, true implies timeout was used
# and it was resolved, false implies it was not resolved within timeout.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#987
def wait!(timeout = T.unsafe(nil)); end
# Crates new object with same class with the executor set as its new default executor.
# Any futures depending on it will use the new default executor.
#
# @return [Future]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1111
def with_default_executor(executor); end
# Creates a new event or a future which will be resolved when receiver and other are.
# Returns an event if receiver and other are events, otherwise returns a future.
# If just one of the parties is Future then the result
# of the returned future is equal to the result of the supplied future. If both are futures
# then the result is as described in {FactoryMethods#zip_futures_on}.
#
# @return [Future]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1070
def zip(other); end
# Creates a new event which will be resolved when the first of receiver, `event_or_future`
# resolves. Returning future will have value nil if event_or_future is event and resolves
# first.
#
# @return [Future]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1085
def |(event_or_future); end
private
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1272
def async_callback_on_fulfillment(state, executor, args, callback); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1278
def async_callback_on_rejection(state, executor, args, callback); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1284
def callback_on_fulfillment(state, args, callback); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1288
def callback_on_rejection(state, args, callback); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1292
def callback_on_resolution(state, args, callback); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1251
def rejected_resolution(raise_on_reassign, state); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1247
def run_test(v); end
# @raise [self]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1266
def wait_until_resolved!(timeout = T.unsafe(nil)); end
end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1984
class Concurrent::Promises::FutureWrapperPromise < ::Concurrent::Promises::BlockedPromise
# @return [FutureWrapperPromise] a new instance of FutureWrapperPromise
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1985
def initialize(delayed, blockers_count, default_executor); end
private
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1991
def on_resolvable(resolved_future, index); end
end
# will be immediately resolved
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1783
class Concurrent::Promises::ImmediateEventPromise < ::Concurrent::Promises::InnerPromise
# @return [ImmediateEventPromise] a new instance of ImmediateEventPromise
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1784
def initialize(default_executor); end
end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1789
class Concurrent::Promises::ImmediateFuturePromise < ::Concurrent::Promises::InnerPromise
# @return [ImmediateFuturePromise] a new instance of ImmediateFuturePromise
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1790
def initialize(default_executor, fulfilled, value, reason); end
end
# @abstract
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1615
class Concurrent::Promises::InnerPromise < ::Concurrent::Promises::AbstractPromise; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#338
module Concurrent::Promises::InternalStates; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#397
class Concurrent::Promises::InternalStates::Fulfilled < ::Concurrent::Promises::InternalStates::ResolvedWithResult
# @return [Fulfilled] a new instance of Fulfilled
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#399
def initialize(value); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#407
def apply(args, block); end
# @return [Boolean]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#403
def fulfilled?; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#415
def reason; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#419
def to_sym; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#411
def value; end
end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#425
class Concurrent::Promises::InternalStates::FulfilledArray < ::Concurrent::Promises::InternalStates::Fulfilled
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#426
def apply(args, block); end
end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#488
Concurrent::Promises::InternalStates::PENDING = T.let(T.unsafe(nil), Concurrent::Promises::InternalStates::Pending)
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#459
class Concurrent::Promises::InternalStates::PartiallyRejected < ::Concurrent::Promises::InternalStates::ResolvedWithResult
# @return [PartiallyRejected] a new instance of PartiallyRejected
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#460
def initialize(value, reason); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#482
def apply(args, block); end
# @return [Boolean]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#466
def fulfilled?; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#478
def reason; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#470
def to_sym; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#474
def value; end
end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#351
class Concurrent::Promises::InternalStates::Pending < ::Concurrent::Promises::InternalStates::State
# @return [Boolean]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#352
def resolved?; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#356
def to_sym; end
end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#490
Concurrent::Promises::InternalStates::RESERVED = T.let(T.unsafe(nil), Concurrent::Promises::InternalStates::Reserved)
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#492
Concurrent::Promises::InternalStates::RESOLVED = T.let(T.unsafe(nil), Concurrent::Promises::InternalStates::Fulfilled)
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#432
class Concurrent::Promises::InternalStates::Rejected < ::Concurrent::Promises::InternalStates::ResolvedWithResult
# @return [Rejected] a new instance of Rejected
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#433
def initialize(reason); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#453
def apply(args, block); end
# @return [Boolean]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#437
def fulfilled?; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#445
def reason; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#449
def to_sym; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#441
def value; end
end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#362
class Concurrent::Promises::InternalStates::Reserved < ::Concurrent::Promises::InternalStates::Pending; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#366
class Concurrent::Promises::InternalStates::ResolvedWithResult < ::Concurrent::Promises::InternalStates::State
# @raise [NotImplementedError]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#391
def apply; end
# @raise [NotImplementedError]
# @return [Boolean]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#379
def fulfilled?; end
# @raise [NotImplementedError]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#387
def reason; end
# @return [Boolean]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#367
def resolved?; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#375
def result; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#371
def to_sym; end
# @raise [NotImplementedError]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#383
def value; end
end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#340
class Concurrent::Promises::InternalStates::State
# @raise [NotImplementedError]
# @return [Boolean]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#341
def resolved?; end
# @raise [NotImplementedError]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#345
def to_sym; end
end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1748
class Concurrent::Promises::RescuePromise < ::Concurrent::Promises::BlockedTaskPromise
# @return [RescuePromise] a new instance of RescuePromise
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1751
def initialize(delayed, blockers_count, default_executor, executor, args, &task); end
private
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1755
def on_resolvable(resolved_future, index); end
end
# Marker module of Future, Event resolved manually.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1299
module Concurrent::Promises::Resolvable
include ::Concurrent::Promises::InternalStates
end
# A Event which can be resolved by user.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1304
class Concurrent::Promises::ResolvableEvent < ::Concurrent::Promises::Event
include ::Concurrent::Promises::Resolvable
# Makes the event resolved, which triggers all dependent futures.
#
# @param raise_on_reassign [Boolean] should method raise exception if already resolved
# @param reserved [true, false] Set to true if the resolvable is {#reserve}d by you,
# marks resolution of reserved resolvable events and futures explicitly.
# Advanced feature, ignore unless you use {Resolvable#reserve} from edge.
# @return [self, false] false is returned when raise_on_reassign is false and the receiver
# is already resolved.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1324
def resolve(raise_on_reassign = T.unsafe(nil), reserved = T.unsafe(nil)); end
# Behaves as {AbstractEventFuture#wait} but has one additional optional argument
# resolve_on_timeout.
#
# @param resolve_on_timeout [true, false] If it times out and the argument is true it will also resolve the event.
# @return [self, true, false]
# @see AbstractEventFuture#wait
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1342
def wait(timeout = T.unsafe(nil), resolve_on_timeout = T.unsafe(nil)); end
# Creates new event wrapping receiver, effectively hiding the resolve method.
#
# @return [Event]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1331
def with_hidden_resolvable; end
end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1600
class Concurrent::Promises::ResolvableEventPromise < ::Concurrent::Promises::AbstractPromise
# @return [ResolvableEventPromise] a new instance of ResolvableEventPromise
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1601
def initialize(default_executor); end
end
# A Future which can be resolved by user.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1354
class Concurrent::Promises::ResolvableFuture < ::Concurrent::Promises::Future
include ::Concurrent::Promises::Resolvable
# Evaluates the block and sets its result as future's value fulfilling, if the block raises
# an exception the future rejects with it.
#
# @return [self]
# @yield [*args] to the block.
# @yieldreturn [Object] value
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1395
def evaluate_to(*args, &block); end
# Evaluates the block and sets its result as future's value fulfilling, if the block raises
# an exception the future rejects with it.
#
# @raise [Exception] also raise reason on rejection.
# @return [self]
# @yield [*args] to the block.
# @yieldreturn [Object] value
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1406
def evaluate_to!(*args, &block); end
# Makes the future fulfilled with `value`,
# which triggers all dependent futures.
#
# @param value [Object]
# @param raise_on_reassign [Boolean] should method raise exception if already resolved
# @param reserved [true, false] Set to true if the resolvable is {#reserve}d by you,
# marks resolution of reserved resolvable events and futures explicitly.
# Advanced feature, ignore unless you use {Resolvable#reserve} from edge.
# @return [self, false] false is returned when raise_on_reassign is false and the receiver
# is already resolved.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1375
def fulfill(value, raise_on_reassign = T.unsafe(nil), reserved = T.unsafe(nil)); end
# Behaves as {Future#reason} but has one additional optional argument
# resolve_on_timeout.
#
# @param resolve_on_timeout [::Array(true, Object, nil), ::Array(false, nil, Exception), nil] If it times out and the argument is not nil it will also resolve the future
# to the provided resolution.
# @return [Exception, timeout_value, nil]
# @see Future#reason
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1503
def reason(timeout = T.unsafe(nil), timeout_value = T.unsafe(nil), resolve_on_timeout = T.unsafe(nil)); end
# Makes the future rejected with `reason`,
# which triggers all dependent futures.
#
# @param reason [Object]
# @param raise_on_reassign [Boolean] should method raise exception if already resolved
# @param reserved [true, false] Set to true if the resolvable is {#reserve}d by you,
# marks resolution of reserved resolvable events and futures explicitly.
# Advanced feature, ignore unless you use {Resolvable#reserve} from edge.
# @return [self, false] false is returned when raise_on_reassign is false and the receiver
# is already resolved.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1385
def reject(reason, raise_on_reassign = T.unsafe(nil), reserved = T.unsafe(nil)); end
# Makes the future resolved with result of triplet `fulfilled?`, `value`, `reason`,
# which triggers all dependent futures.
#
# @param fulfilled [true, false]
# @param value [Object]
# @param reason [Object]
# @param raise_on_reassign [Boolean] should method raise exception if already resolved
# @param reserved [true, false] Set to true if the resolvable is {#reserve}d by you,
# marks resolution of reserved resolvable events and futures explicitly.
# Advanced feature, ignore unless you use {Resolvable#reserve} from edge.
# @return [self, false] false is returned when raise_on_reassign is false and the receiver
# is already resolved.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1365
def resolve(fulfilled = T.unsafe(nil), value = T.unsafe(nil), reason = T.unsafe(nil), raise_on_reassign = T.unsafe(nil), reserved = T.unsafe(nil)); end
# Behaves as {Future#result} but has one additional optional argument
# resolve_on_timeout.
#
# @param resolve_on_timeout [::Array(true, Object, nil), ::Array(false, nil, Exception), nil] If it times out and the argument is not nil it will also resolve the future
# to the provided resolution.
# @return [::Array(Boolean, Object, Exception), nil]
# @see Future#result
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1524
def result(timeout = T.unsafe(nil), resolve_on_timeout = T.unsafe(nil)); end
# Behaves as {Future#value} but has one additional optional argument
# resolve_on_timeout.
#
# @param resolve_on_timeout [::Array(true, Object, nil), ::Array(false, nil, Exception), nil] If it times out and the argument is not nil it will also resolve the future
# to the provided resolution.
# @return [Object, timeout_value, nil]
# @see Future#value
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1459
def value(timeout = T.unsafe(nil), timeout_value = T.unsafe(nil), resolve_on_timeout = T.unsafe(nil)); end
# Behaves as {Future#value!} but has one additional optional argument
# resolve_on_timeout.
#
# @param resolve_on_timeout [::Array(true, Object, nil), ::Array(false, nil, Exception), nil] If it times out and the argument is not nil it will also resolve the future
# to the provided resolution.
# @raise [Exception] {#reason} on rejection
# @return [Object, timeout_value, nil]
# @see Future#value!
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1481
def value!(timeout = T.unsafe(nil), timeout_value = T.unsafe(nil), resolve_on_timeout = T.unsafe(nil)); end
# Behaves as {AbstractEventFuture#wait} but has one additional optional argument
# resolve_on_timeout.
#
# @param resolve_on_timeout [::Array(true, Object, nil), ::Array(false, nil, Exception), nil] If it times out and the argument is not nil it will also resolve the future
# to the provided resolution.
# @return [self, true, false]
# @see AbstractEventFuture#wait
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1421
def wait(timeout = T.unsafe(nil), resolve_on_timeout = T.unsafe(nil)); end
# Behaves as {Future#wait!} but has one additional optional argument
# resolve_on_timeout.
#
# @param resolve_on_timeout [::Array(true, Object, nil), ::Array(false, nil, Exception), nil] If it times out and the argument is not nil it will also resolve the future
# to the provided resolution.
# @raise [Exception] {#reason} on rejection
# @return [self, true, false]
# @see Future#wait!
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1438
def wait!(timeout = T.unsafe(nil), resolve_on_timeout = T.unsafe(nil)); end
# Creates new future wrapping receiver, effectively hiding the resolve method and similar.
#
# @return [Future]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1542
def with_hidden_resolvable; end
end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1606
class Concurrent::Promises::ResolvableFuturePromise < ::Concurrent::Promises::AbstractPromise
# @return [ResolvableFuturePromise] a new instance of ResolvableFuturePromise
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1607
def initialize(default_executor); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1592
def evaluate_to(*args, block); end
end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1909
class Concurrent::Promises::RunFuturePromise < ::Concurrent::Promises::AbstractFlatPromise
# @return [RunFuturePromise] a new instance of RunFuturePromise
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1913
def initialize(delayed, blockers_count, default_executor, run_test); end
private
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1918
def process_on_blocker_resolution(future, index); end
end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#2114
class Concurrent::Promises::ScheduledPromise < ::Concurrent::Promises::InnerPromise
# @return [ScheduledPromise] a new instance of ScheduledPromise
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#2125
def initialize(default_executor, intended_time); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#2119
def inspect; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#2115
def intended_time; end
end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1730
class Concurrent::Promises::ThenPromise < ::Concurrent::Promises::BlockedTaskPromise
# @return [ThenPromise] a new instance of ThenPromise
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1733
def initialize(delayed, blockers_count, default_executor, executor, args, &task); end
private
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1737
def on_resolvable(resolved_future, index); end
end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1940
class Concurrent::Promises::ZipEventEventPromise < ::Concurrent::Promises::BlockedPromise
# @return [ZipEventEventPromise] a new instance of ZipEventEventPromise
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1941
def initialize(delayed, blockers_count, default_executor); end
private
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1947
def on_resolvable(resolved_future, index); end
end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#2031
class Concurrent::Promises::ZipEventsPromise < ::Concurrent::Promises::BlockedPromise
# @return [ZipEventsPromise] a new instance of ZipEventsPromise
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#2035
def initialize(delayed, blockers_count, default_executor); end
private
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#2041
def on_resolvable(resolved_future, index); end
end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1952
class Concurrent::Promises::ZipFutureEventPromise < ::Concurrent::Promises::BlockedPromise
# @return [ZipFutureEventPromise] a new instance of ZipFutureEventPromise
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1953
def initialize(delayed, blockers_count, default_executor); end
private
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1967
def on_resolvable(resolved_future, index); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1960
def process_on_blocker_resolution(future, index); end
end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#1996
class Concurrent::Promises::ZipFuturesPromise < ::Concurrent::Promises::BlockedPromise
# @return [ZipFuturesPromise] a new instance of ZipFuturesPromise
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#2000
def initialize(delayed, blockers_count, default_executor); end
private
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#2013
def on_resolvable(resolved_future, index); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/promises.rb#2007
def process_on_blocker_resolution(future, index); end
end
# Methods form module A included to a module B, which is already included into class C,
# will not be visible in the C class. If this module is extended to B then A's methods
# are correctly made visible to C.
#
# @example
# module A
# def a
# :a
# end
# end
#
# module B1
# end
#
# class C1
# include B1
# end
#
# module B2
# extend Concurrent::ReInclude
# end
#
# class C2
# include B2
# end
#
# B1.send :include, A
# B2.send :include, A
#
# C1.new.respond_to? :a # => false
# C2.new.respond_to? :a # => true
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/re_include.rb#36
module Concurrent::ReInclude
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/re_include.rb#44
def extended(base); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/re_include.rb#50
def include(*modules); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/re_include.rb#38
def included(base); end
end
# Ruby read-write lock implementation
#
# Allows any number of concurrent readers, but only one concurrent writer
# (And if the "write" lock is taken, any readers who come along will have to wait)
#
# If readers are already active when a writer comes along, the writer will wait for
# all the readers to finish before going ahead.
# Any additional readers that come when the writer is already waiting, will also
# wait (so writers are not starved).
#
# This implementation is based on `java.util.concurrent.ReentrantReadWriteLock`.
#
# @example
# lock = Concurrent::ReadWriteLock.new
# lock.with_read_lock { data.retrieve }
# lock.with_write_lock { data.modify! }
# @note Do **not** try to acquire the write lock while already holding a read lock
# **or** try to acquire the write lock while you already have it.
# This will lead to deadlock
# @see http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/locks/ReentrantReadWriteLock.html java.util.concurrent.ReentrantReadWriteLock
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/read_write_lock.rb#31
class Concurrent::ReadWriteLock < ::Concurrent::Synchronization::Object
extend ::Concurrent::Synchronization::SafeInitialization
# Create a new `ReadWriteLock` in the unlocked state.
#
# @return [ReadWriteLock] a new instance of ReadWriteLock
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/read_write_lock.rb#59
def initialize; end
# Acquire a read lock. If a write lock has been acquired will block until
# it is released. Will not block if other read locks have been acquired.
#
# @raise [Concurrent::ResourceLimitError] if the maximum number of readers
# is exceeded.
# @return [Boolean] true if the lock is successfully acquired
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/read_write_lock.rb#111
def acquire_read_lock; end
# Acquire a write lock. Will block and wait for all active readers and writers.
#
# @raise [Concurrent::ResourceLimitError] if the maximum number of writers
# is exceeded.
# @return [Boolean] true if the lock is successfully acquired
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/read_write_lock.rb#160
def acquire_write_lock; end
# Queries whether any threads are waiting to acquire the read or write lock.
#
# @return [Boolean] true if any threads are waiting for a lock else false
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/read_write_lock.rb#214
def has_waiters?; end
# Release a previously acquired read lock.
#
# @return [Boolean] true if the lock is successfully released
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/read_write_lock.rb#140
def release_read_lock; end
# Release a previously acquired write lock.
#
# @return [Boolean] true if the lock is successfully released
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/read_write_lock.rb#196
def release_write_lock; end
# Execute a block operation within a read lock.
#
# @raise [ArgumentError] when no block is given.
# @raise [Concurrent::ResourceLimitError] if the maximum number of readers
# is exceeded.
# @return [Object] the result of the block operation.
# @yield the task to be performed within the lock.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/read_write_lock.rb#75
def with_read_lock; end
# Execute a block operation within a write lock.
#
# @raise [ArgumentError] when no block is given.
# @raise [Concurrent::ResourceLimitError] if the maximum number of readers
# is exceeded.
# @return [Object] the result of the block operation.
# @yield the task to be performed within the lock.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/read_write_lock.rb#94
def with_write_lock; end
# Queries if the write lock is held by any thread.
#
# @return [Boolean] true if the write lock is held else false`
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/read_write_lock.rb#207
def write_locked?; end
private
# @return [Boolean]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/read_write_lock.rb#246
def max_readers?(c = T.unsafe(nil)); end
# @return [Boolean]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/read_write_lock.rb#251
def max_writers?(c = T.unsafe(nil)); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/read_write_lock.rb#221
def running_readers(c = T.unsafe(nil)); end
# @return [Boolean]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/read_write_lock.rb#226
def running_readers?(c = T.unsafe(nil)); end
# @return [Boolean]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/read_write_lock.rb#231
def running_writer?(c = T.unsafe(nil)); end
# @return [Boolean]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/read_write_lock.rb#241
def waiting_writer?(c = T.unsafe(nil)); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/read_write_lock.rb#236
def waiting_writers(c = T.unsafe(nil)); end
end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/read_write_lock.rb#40
Concurrent::ReadWriteLock::MAX_READERS = T.let(T.unsafe(nil), Integer)
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/read_write_lock.rb#43
Concurrent::ReadWriteLock::MAX_WRITERS = T.let(T.unsafe(nil), Integer)
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/read_write_lock.rb#37
Concurrent::ReadWriteLock::RUNNING_WRITER = T.let(T.unsafe(nil), Integer)
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/read_write_lock.rb#34
Concurrent::ReadWriteLock::WAITING_WRITER = T.let(T.unsafe(nil), Integer)
# Re-entrant read-write lock implementation
#
# Allows any number of concurrent readers, but only one concurrent writer
# (And while the "write" lock is taken, no read locks can be obtained either.
# Hence, the write lock can also be called an "exclusive" lock.)
#
# If another thread has taken a read lock, any thread which wants a write lock
# will block until all the readers release their locks. However, once a thread
# starts waiting to obtain a write lock, any additional readers that come along
# will also wait (so writers are not starved).
#
# A thread can acquire both a read and write lock at the same time. A thread can
# also acquire a read lock OR a write lock more than once. Only when the read (or
# write) lock is released as many times as it was acquired, will the thread
# actually let it go, allowing other threads which might have been waiting
# to proceed. Therefore the lock can be upgraded by first acquiring
# read lock and then write lock and that the lock can be downgraded by first
# having both read and write lock a releasing just the write lock.
#
# If both read and write locks are acquired by the same thread, it is not strictly
# necessary to release them in the same order they were acquired. In other words,
# the following code is legal:
#
# This implementation was inspired by `java.util.concurrent.ReentrantReadWriteLock`.
#
# @example
# lock = Concurrent::ReentrantReadWriteLock.new
# lock.acquire_write_lock
# lock.acquire_read_lock
# lock.release_write_lock
# # At this point, the current thread is holding only a read lock, not a write
# # lock. So other threads can take read locks, but not a write lock.
# lock.release_read_lock
# # Now the current thread is not holding either a read or write lock, so
# # another thread could potentially acquire a write lock.
# @example
# lock = Concurrent::ReentrantReadWriteLock.new
# lock.with_read_lock { data.retrieve }
# lock.with_write_lock { data.modify! }
# @see http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/locks/ReentrantReadWriteLock.html java.util.concurrent.ReentrantReadWriteLock
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/reentrant_read_write_lock.rb#53
class Concurrent::ReentrantReadWriteLock < ::Concurrent::Synchronization::Object
extend ::Concurrent::Synchronization::SafeInitialization
# Create a new `ReentrantReadWriteLock` in the unlocked state.
#
# @return [ReentrantReadWriteLock] a new instance of ReentrantReadWriteLock
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/reentrant_read_write_lock.rb#109
def initialize; end
# Acquire a read lock. If a write lock is held by another thread, will block
# until it is released.
#
# @raise [Concurrent::ResourceLimitError] if the maximum number of readers
# is exceeded.
# @return [Boolean] true if the lock is successfully acquired
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/reentrant_read_write_lock.rb#162
def acquire_read_lock; end
# Acquire a write lock. Will block and wait for all active readers and writers.
#
# @raise [Concurrent::ResourceLimitError] if the maximum number of writers
# is exceeded.
# @return [Boolean] true if the lock is successfully acquired
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/reentrant_read_write_lock.rb#257
def acquire_write_lock; end
# Release a previously acquired read lock.
#
# @return [Boolean] true if the lock is successfully released
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/reentrant_read_write_lock.rb#236
def release_read_lock; end
# Release a previously acquired write lock.
#
# @return [Boolean] true if the lock is successfully released
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/reentrant_read_write_lock.rb#329
def release_write_lock; end
# Try to acquire a read lock and return true if we succeed. If it cannot be
# acquired immediately, return false.
#
# @return [Boolean] true if the lock is successfully acquired
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/reentrant_read_write_lock.rb#215
def try_read_lock; end
# Try to acquire a write lock and return true if we succeed. If it cannot be
# acquired immediately, return false.
#
# @return [Boolean] true if the lock is successfully acquired
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/reentrant_read_write_lock.rb#310
def try_write_lock; end
# Execute a block operation within a read lock.
#
# @raise [ArgumentError] when no block is given.
# @raise [Concurrent::ResourceLimitError] if the maximum number of readers
# is exceeded.
# @return [Object] the result of the block operation.
# @yield the task to be performed within the lock.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/reentrant_read_write_lock.rb#126
def with_read_lock; end
# Execute a block operation within a write lock.
#
# @raise [ArgumentError] when no block is given.
# @raise [Concurrent::ResourceLimitError] if the maximum number of readers
# is exceeded.
# @return [Object] the result of the block operation.
# @yield the task to be performed within the lock.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/reentrant_read_write_lock.rb#145
def with_write_lock; end
private
# @return [Boolean]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/reentrant_read_write_lock.rb#370
def max_readers?(c = T.unsafe(nil)); end
# @return [Boolean]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/reentrant_read_write_lock.rb#375
def max_writers?(c = T.unsafe(nil)); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/reentrant_read_write_lock.rb#345
def running_readers(c = T.unsafe(nil)); end
# @return [Boolean]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/reentrant_read_write_lock.rb#350
def running_readers?(c = T.unsafe(nil)); end
# @return [Boolean]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/reentrant_read_write_lock.rb#355
def running_writer?(c = T.unsafe(nil)); end
# @return [Boolean]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/reentrant_read_write_lock.rb#365
def waiting_or_running_writer?(c = T.unsafe(nil)); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/reentrant_read_write_lock.rb#360
def waiting_writers(c = T.unsafe(nil)); end
end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/reentrant_read_write_lock.rb#94
Concurrent::ReentrantReadWriteLock::MAX_READERS = T.let(T.unsafe(nil), Integer)
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/reentrant_read_write_lock.rb#96
Concurrent::ReentrantReadWriteLock::MAX_WRITERS = T.let(T.unsafe(nil), Integer)
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/reentrant_read_write_lock.rb#84
Concurrent::ReentrantReadWriteLock::READER_BITS = T.let(T.unsafe(nil), Integer)
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/reentrant_read_write_lock.rb#102
Concurrent::ReentrantReadWriteLock::READ_LOCK_MASK = T.let(T.unsafe(nil), Integer)
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/reentrant_read_write_lock.rb#92
Concurrent::ReentrantReadWriteLock::RUNNING_WRITER = T.let(T.unsafe(nil), Integer)
# Used with @Counter:
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/reentrant_read_write_lock.rb#90
Concurrent::ReentrantReadWriteLock::WAITING_WRITER = T.let(T.unsafe(nil), Integer)
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/reentrant_read_write_lock.rb#86
Concurrent::ReentrantReadWriteLock::WRITER_BITS = T.let(T.unsafe(nil), Integer)
# Used with @HeldCount:
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/reentrant_read_write_lock.rb#100
Concurrent::ReentrantReadWriteLock::WRITE_LOCK_HELD = T.let(T.unsafe(nil), Integer)
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/reentrant_read_write_lock.rb#104
Concurrent::ReentrantReadWriteLock::WRITE_LOCK_MASK = T.let(T.unsafe(nil), Integer)
# Raised by an `Executor` when it is unable to process a given task,
# possibly because of a reject policy or other internal error.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/errors.rb#48
class Concurrent::RejectedExecutionError < ::Concurrent::Error; end
# Raised when any finite resource, such as a lock counter, exceeds its
# maximum limit/threshold.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/errors.rb#52
class Concurrent::ResourceLimitError < ::Concurrent::Error; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/exchanger.rb#129
class Concurrent::RubyExchanger < ::Concurrent::AbstractExchanger
extend ::Concurrent::Synchronization::SafeInitialization
# @return [RubyExchanger] a new instance of RubyExchanger
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/exchanger.rb#159
def initialize; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/object.rb#137
def __initialize_atomic_fields__; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/object.rb#105
def compare_and_set_slot(expected, value); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/object.rb#93
def slot; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/object.rb#97
def slot=(value); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/object.rb#101
def swap_slot(value); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/object.rb#109
def update_slot(&block); end
private
# Waits for another thread to arrive at this exchange point (unless the
# current thread is interrupted), and then transfers the given object to
# it, receiving its object in return. The timeout value indicates the
# approximate number of seconds the method should block while waiting
# for the exchange. When the timeout value is `nil` the method will
# block indefinitely.
#
# @param value [Object] the value to exchange with another thread
# @param timeout [Numeric, nil] in seconds, `nil` blocks indefinitely
# @return [Object, CANCEL] the value exchanged by the other thread; {CANCEL} on timeout
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/exchanger.rb#170
def do_exchange(value, timeout); end
end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/exchanger.rb#138
class Concurrent::RubyExchanger::Node < ::Concurrent::Synchronization::Object
extend ::Concurrent::Synchronization::SafeInitialization
# @return [Node] a new instance of Node
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/exchanger.rb#142
def initialize(item); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/object.rb#137
def __initialize_atomic_fields__; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/object.rb#105
def compare_and_set_value(expected, value); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/exchanger.rb#153
def item; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/exchanger.rb#149
def latch; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/object.rb#101
def swap_value(value); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/object.rb#109
def update_value(&block); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/object.rb#93
def value; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/object.rb#97
def value=(value); end
end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/ruby_executor_service.rb#8
class Concurrent::RubyExecutorService < ::Concurrent::AbstractExecutorService
# @return [RubyExecutorService] a new instance of RubyExecutorService
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/ruby_executor_service.rb#11
def initialize(*args, &block); end
# Begin an immediate shutdown. In-progress tasks will be allowed to
# complete but enqueued tasks will be dismissed and no new tasks
# will be accepted. Has no additional effect if the thread pool is
# not running.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/ruby_executor_service.rb#42
def kill; end
# Submit a task to the executor for asynchronous processing.
#
# @param args [Array] zero or more arguments to be passed to the task
# @raise [ArgumentError] if no task is given
# @return [Boolean] `true` if the task is queued, `false` if the executor
# is not running
# @yield the asynchronous task to perform
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/ruby_executor_service.rb#17
def post(*args, &task); end
# Begin an orderly shutdown. Tasks already in the queue will be executed,
# but no new tasks will be accepted. Has no additional effect if the
# thread pool is not running.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/ruby_executor_service.rb#33
def shutdown; end
# Block until executor shutdown is complete or until `timeout` seconds have
# passed.
#
# @note Does not initiate shutdown or termination. Either `shutdown` or `kill`
# must be called before this method (or on another thread).
# @param timeout [Integer] the maximum number of seconds to wait for shutdown to complete
# @return [Boolean] `true` if shutdown complete or false on `timeout`
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/ruby_executor_service.rb#52
def wait_for_termination(timeout = T.unsafe(nil)); end
private
# @return [Boolean]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/ruby_executor_service.rb#70
def ns_running?; end
# @return [Boolean]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/ruby_executor_service.rb#78
def ns_shutdown?; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/ruby_executor_service.rb#66
def ns_shutdown_execution; end
# @return [Boolean]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/ruby_executor_service.rb#74
def ns_shuttingdown?; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/ruby_executor_service.rb#58
def stop_event; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/ruby_executor_service.rb#62
def stopped_event; end
end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/ruby_single_thread_executor.rb#8
class Concurrent::RubySingleThreadExecutor < ::Concurrent::RubyThreadPoolExecutor
# @return [RubySingleThreadExecutor] a new instance of RubySingleThreadExecutor
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/ruby_single_thread_executor.rb#11
def initialize(opts = T.unsafe(nil)); end
end
# **Thread Pool Options**
#
# Thread pools support several configuration options:
#
# * `idletime`: The number of seconds that a thread may be idle before being reclaimed.
# * `name`: The name of the executor (optional). Printed in the executor's `#to_s` output and
# a `<name>-worker-<id>` name is given to its threads if supported by used Ruby
# implementation. `<id>` is uniq for each thread.
# * `max_queue`: The maximum number of tasks that may be waiting in the work queue at
# any one time. When the queue size reaches `max_queue` and no new threads can be created,
# subsequent tasks will be rejected in accordance with the configured `fallback_policy`.
# * `auto_terminate`: When true (default), the threads started will be marked as daemon.
# * `fallback_policy`: The policy defining how rejected tasks are handled.
#
# Three fallback policies are supported:
#
# * `:abort`: Raise a `RejectedExecutionError` exception and discard the task.
# * `:discard`: Discard the task and return false.
# * `:caller_runs`: Execute the task on the calling thread.
#
# **Shutting Down Thread Pools**
#
# Killing a thread pool while tasks are still being processed, either by calling
# the `#kill` method or at application exit, will have unpredictable results. There
# is no way for the thread pool to know what resources are being used by the
# in-progress tasks. When those tasks are killed the impact on those resources
# cannot be predicted. The *best* practice is to explicitly shutdown all thread
# pools using the provided methods:
#
# * Call `#shutdown` to initiate an orderly termination of all in-progress tasks
# * Call `#wait_for_termination` with an appropriate timeout interval an allow
# the orderly shutdown to complete
# * Call `#kill` *only when* the thread pool fails to shutdown in the allotted time
#
# On some runtime platforms (most notably the JVM) the application will not
# exit until all thread pools have been shutdown. To prevent applications from
# "hanging" on exit, all threads can be marked as daemon according to the
# `:auto_terminate` option.
#
# ```ruby
# pool1 = Concurrent::FixedThreadPool.new(5) # threads will be marked as daemon
# pool2 = Concurrent::FixedThreadPool.new(5, auto_terminate: false) # mark threads as non-daemon
# ```
#
# @note Failure to properly shutdown a thread pool can lead to unpredictable results.
# Please read *Shutting Down Thread Pools* for more information.
# @see http://docs.oracle.com/javase/tutorial/essential/concurrency/pools.html Java Tutorials: Thread Pools
# @see http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/Executors.html Java Executors class
# @see http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutorService.html Java ExecutorService interface
# @see https://docs.oracle.com/javase/8/docs/api/java/lang/Thread.html#setDaemon-boolean-
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/ruby_thread_pool_executor.rb#12
class Concurrent::RubyThreadPoolExecutor < ::Concurrent::RubyExecutorService
# @return [RubyThreadPoolExecutor] a new instance of RubyThreadPoolExecutor
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/ruby_thread_pool_executor.rb#45
def initialize(opts = T.unsafe(nil)); end
# The number of threads that are actively executing tasks.
#
# @return [Integer] The number of threads that are actively executing tasks.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/ruby_thread_pool_executor.rb#65
def active_count; end
# Does the task queue have a maximum size?
#
# @return [Boolean] True if the task queue has a maximum size else false.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/ruby_thread_pool_executor.rb#72
def can_overflow?; end
# The number of tasks that have been completed by the pool since construction.
#
# @return [Integer] The number of tasks that have been completed by the pool since construction.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/ruby_thread_pool_executor.rb#60
def completed_task_count; end
# The number of seconds that a thread may be idle before being reclaimed.
#
# @return [Integer] The number of seconds that a thread may be idle before being reclaimed.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/ruby_thread_pool_executor.rb#36
def idletime; end
# The largest number of threads that have been created in the pool since construction.
#
# @return [Integer] The largest number of threads that have been created in the pool since construction.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/ruby_thread_pool_executor.rb#50
def largest_length; end
# The number of threads currently in the pool.
#
# @return [Integer] The number of threads currently in the pool.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/ruby_thread_pool_executor.rb#77
def length; end
# The maximum number of threads that may be created in the pool.
#
# @return [Integer] The maximum number of threads that may be created in the pool.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/ruby_thread_pool_executor.rb#30
def max_length; end
# The maximum number of tasks that may be waiting in the work queue at any one time.
# When the queue size reaches `max_queue` subsequent tasks will be rejected in
# accordance with the configured `fallback_policy`.
#
# @return [Integer] The maximum number of tasks that may be waiting in the work queue at any one time.
# When the queue size reaches `max_queue` subsequent tasks will be rejected in
# accordance with the configured `fallback_policy`.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/ruby_thread_pool_executor.rb#39
def max_queue; end
# The minimum number of threads that may be retained in the pool.
#
# @return [Integer] The minimum number of threads that may be retained in the pool.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/ruby_thread_pool_executor.rb#33
def min_length; end
# Prune the thread pool of unneeded threads
#
# What is being pruned is controlled by the min_threads and idletime
# parameters passed at pool creation time
#
# This is a no-op on some pool implementation (e.g. the Java one). The Ruby
# pool will auto-prune each time a new job is posted. You will need to call
# this method explicitly in case your application post jobs in bursts (a
# lot of jobs and then nothing for long periods)
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/ruby_thread_pool_executor.rb#118
def prune_pool; end
# The number of tasks in the queue awaiting execution.
#
# @return [Integer] The number of tasks in the queue awaiting execution.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/ruby_thread_pool_executor.rb#82
def queue_length; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/ruby_thread_pool_executor.rb#103
def ready_worker(worker, last_message); end
# Number of tasks that may be enqueued before reaching `max_queue` and rejecting
# new tasks. A value of -1 indicates that the queue may grow without bound.
#
# @return [Integer] Number of tasks that may be enqueued before reaching `max_queue` and rejecting
# new tasks. A value of -1 indicates that the queue may grow without bound.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/ruby_thread_pool_executor.rb#87
def remaining_capacity; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/ruby_thread_pool_executor.rb#98
def remove_busy_worker(worker); end
# The number of tasks that have been scheduled for execution on the pool since construction.
#
# @return [Integer] The number of tasks that have been scheduled for execution on the pool since construction.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/ruby_thread_pool_executor.rb#55
def scheduled_task_count; end
# Whether or not a value of 0 for :max_queue option means the queue must perform direct hand-off or rather unbounded queue.
#
# @return [true, false]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/ruby_thread_pool_executor.rb#42
def synchronous; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/ruby_thread_pool_executor.rb#108
def worker_died(worker); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/ruby_thread_pool_executor.rb#113
def worker_task_completed; end
private
# creates new worker which has to receive work to do after it's added
#
# @return [nil, Worker] nil of max capacity is reached
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/ruby_thread_pool_executor.rb#241
def ns_add_busy_worker; end
# tries to assign task to a worker, tries to get one from @ready or to create new one
#
# @return [true, false] if task is assigned to a worker
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/ruby_thread_pool_executor.rb#201
def ns_assign_worker(*args, &task); end
# tries to enqueue task
#
# @return [true, false] if enqueued
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/ruby_thread_pool_executor.rb#219
def ns_enqueue(*args, &task); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/ruby_thread_pool_executor.rb#160
def ns_execute(*args, &task); end
# @raise [ArgumentError]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/ruby_thread_pool_executor.rb#125
def ns_initialize(opts); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/ruby_thread_pool_executor.rb#189
def ns_kill_execution; end
# @return [Boolean]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/ruby_thread_pool_executor.rb#155
def ns_limited_queue?; end
# try oldest worker if it is idle for enough time, it's returned back at the start
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/ruby_thread_pool_executor.rb#280
def ns_prune_pool; end
# handle ready worker, giving it new job or assigning back to @ready
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/ruby_thread_pool_executor.rb#253
def ns_ready_worker(worker, last_message, success = T.unsafe(nil)); end
# removes a worker which is not in not tracked in @ready
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/ruby_thread_pool_executor.rb#271
def ns_remove_busy_worker(worker); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/ruby_thread_pool_executor.rb#296
def ns_reset_if_forked; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/ruby_thread_pool_executor.rb#174
def ns_shutdown_execution; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/ruby_thread_pool_executor.rb#231
def ns_worker_died(worker); end
end
# Default maximum number of threads that will be created in the pool.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/ruby_thread_pool_executor.rb#15
Concurrent::RubyThreadPoolExecutor::DEFAULT_MAX_POOL_SIZE = T.let(T.unsafe(nil), Integer)
# Default maximum number of tasks that may be added to the task queue.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/ruby_thread_pool_executor.rb#21
Concurrent::RubyThreadPoolExecutor::DEFAULT_MAX_QUEUE_SIZE = T.let(T.unsafe(nil), Integer)
# Default minimum number of threads that will be retained in the pool.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/ruby_thread_pool_executor.rb#18
Concurrent::RubyThreadPoolExecutor::DEFAULT_MIN_POOL_SIZE = T.let(T.unsafe(nil), Integer)
# Default value of the :synchronous option.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/ruby_thread_pool_executor.rb#27
Concurrent::RubyThreadPoolExecutor::DEFAULT_SYNCHRONOUS = T.let(T.unsafe(nil), FalseClass)
# Default maximum number of seconds a thread in the pool may remain idle
# before being reclaimed.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/ruby_thread_pool_executor.rb#24
Concurrent::RubyThreadPoolExecutor::DEFAULT_THREAD_IDLETIMEOUT = T.let(T.unsafe(nil), Integer)
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/ruby_thread_pool_executor.rb#310
class Concurrent::RubyThreadPoolExecutor::Worker
include ::Concurrent::Concern::Logging
# @return [Worker] a new instance of Worker
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/ruby_thread_pool_executor.rb#313
def initialize(pool, id); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/ruby_thread_pool_executor.rb#324
def <<(message); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/ruby_thread_pool_executor.rb#332
def kill; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/ruby_thread_pool_executor.rb#328
def stop; end
private
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/ruby_thread_pool_executor.rb#338
def create_worker(queue, pool, idletime); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/ruby_thread_pool_executor.rb#358
def run_task(pool, task, args); end
end
# A simple utility class that executes a callable and returns and array of three elements:
# success - indicating if the callable has been executed without errors
# value - filled by the callable result if it has been executed without errors, nil otherwise
# reason - the error risen by the callable if it has been executed with errors, nil otherwise
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/safe_task_executor.rb#9
class Concurrent::SafeTaskExecutor < ::Concurrent::Synchronization::LockableObject
# @return [SafeTaskExecutor] a new instance of SafeTaskExecutor
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/safe_task_executor.rb#11
def initialize(task, opts = T.unsafe(nil)); end
# @return [Array]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/safe_task_executor.rb#18
def execute(*args); end
end
# `ScheduledTask` is a close relative of `Concurrent::Future` but with one
# important difference: A `Future` is set to execute as soon as possible
# whereas a `ScheduledTask` is set to execute after a specified delay. This
# implementation is loosely based on Java's
# [ScheduledExecutorService](http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/ScheduledExecutorService.html).
# It is a more feature-rich variant of {Concurrent.timer}.
#
# The *intended* schedule time of task execution is set on object construction
# with the `delay` argument. The delay is a numeric (floating point or integer)
# representing a number of seconds in the future. Any other value or a numeric
# equal to or less than zero will result in an exception. The *actual* schedule
# time of task execution is set when the `execute` method is called.
#
# The constructor can also be given zero or more processing options. Currently
# the only supported options are those recognized by the
# [Dereferenceable](Dereferenceable) module.
#
# The final constructor argument is a block representing the task to be performed.
# If no block is given an `ArgumentError` will be raised.
#
# **States**
#
# `ScheduledTask` mixes in the [Obligation](Obligation) module thus giving it
# "future" behavior. This includes the expected lifecycle states. `ScheduledTask`
# has one additional state, however. While the task (block) is being executed the
# state of the object will be `:processing`. This additional state is necessary
# because it has implications for task cancellation.
#
# **Cancellation**
#
# A `:pending` task can be cancelled using the `#cancel` method. A task in any
# other state, including `:processing`, cannot be cancelled. The `#cancel`
# method returns a boolean indicating the success of the cancellation attempt.
# A cancelled `ScheduledTask` cannot be restarted. It is immutable.
#
# **Obligation and Observation**
#
# The result of a `ScheduledTask` can be obtained either synchronously or
# asynchronously. `ScheduledTask` mixes in both the [Obligation](Obligation)
# module and the
# [Observable](http://ruby-doc.org/stdlib-2.0/libdoc/observer/rdoc/Observable.html)
# module from the Ruby standard library. With one exception `ScheduledTask`
# behaves identically to [Future](Observable) with regard to these modules.
#
# @example Basic usage
#
# require 'concurrent/scheduled_task'
# require 'csv'
# require 'open-uri'
#
# class Ticker
# def get_year_end_closing(symbol, year, api_key)
# uri = "https://www.alphavantage.co/query?function=TIME_SERIES_MONTHLY&symbol=#{symbol}&apikey=#{api_key}&datatype=csv"
# data = []
# csv = URI.parse(uri).read
# if csv.include?('call frequency')
# return :rate_limit_exceeded
# end
# CSV.parse(csv, headers: true) do |row|
# data << row['close'].to_f if row['timestamp'].include?(year.to_s)
# end
# year_end = data.first
# year_end
# rescue => e
# p e
# end
# end
#
# api_key = ENV['ALPHAVANTAGE_KEY']
# abort(error_message) unless api_key
#
# # Future
# price = Concurrent::Future.execute{ Ticker.new.get_year_end_closing('TWTR', 2013, api_key) }
# price.state #=> :pending
# price.pending? #=> true
# price.value(0) #=> nil (does not block)
#
# sleep(1) # do other stuff
#
# price.value #=> 63.65 (after blocking if necessary)
# price.state #=> :fulfilled
# price.fulfilled? #=> true
# price.value #=> 63.65
# @example Successful task execution
#
# task = Concurrent::ScheduledTask.new(2){ 'What does the fox say?' }
# task.state #=> :unscheduled
# task.execute
# task.state #=> pending
#
# # wait for it...
# sleep(3)
#
# task.unscheduled? #=> false
# task.pending? #=> false
# task.fulfilled? #=> true
# task.rejected? #=> false
# task.value #=> 'What does the fox say?'
# @example One line creation and execution
#
# task = Concurrent::ScheduledTask.new(2){ 'What does the fox say?' }.execute
# task.state #=> pending
#
# task = Concurrent::ScheduledTask.execute(2){ 'What do you get when you multiply 6 by 9?' }
# task.state #=> pending
# @example Failed task execution
#
# task = Concurrent::ScheduledTask.execute(2){ raise StandardError.new('Call me maybe?') }
# task.pending? #=> true
#
# # wait for it...
# sleep(3)
#
# task.unscheduled? #=> false
# task.pending? #=> false
# task.fulfilled? #=> false
# task.rejected? #=> true
# task.value #=> nil
# task.reason #=> #<StandardError: Call me maybe?>
# @example Task execution with observation
#
# observer = Class.new{
# def update(time, value, reason)
# puts "The task completed at #{time} with value '#{value}'"
# end
# }.new
#
# task = Concurrent::ScheduledTask.new(2){ 'What does the fox say?' }
# task.add_observer(observer)
# task.execute
# task.pending? #=> true
#
# # wait for it...
# sleep(3)
#
# #>> The task completed at 2013-11-07 12:26:09 -0500 with value 'What does the fox say?'
# @see Concurrent.timer
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/scheduled_task.rb#158
class Concurrent::ScheduledTask < ::Concurrent::IVar
include ::Comparable
# Schedule a task for execution at a specified future time.
#
# @option opts
# @param delay [Float] the number of seconds to wait for before executing the task
# @param opts [Hash] a customizable set of options
# @raise [ArgumentError] When no block is given
# @raise [ArgumentError] When given a time that is in the past
# @return [ScheduledTask] a new instance of ScheduledTask
# @yield the task to be performed
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/scheduled_task.rb#178
def initialize(delay, opts = T.unsafe(nil), &task); end
# Comparator which orders by schedule time.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/scheduled_task.rb#213
def <=>(other); end
# Cancel this task and prevent it from executing. A task can only be
# cancelled if it is pending or unscheduled.
#
# @return [Boolean] true if successfully cancelled else false
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/scheduled_task.rb#235
def cancel; end
# Has the task been cancelled?
#
# @return [Boolean] true if the task is in the given state else false
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/scheduled_task.rb#220
def cancelled?; end
# Execute an `:unscheduled` `ScheduledTask`. Immediately sets the state to `:pending`
# and starts counting down toward execution. Does nothing if the `ScheduledTask` is
# in any state other than `:unscheduled`.
#
# @return [ScheduledTask] a reference to `self`
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/scheduled_task.rb#273
def execute; end
# The executor on which to execute the task.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/scheduled_task.rb#163
def executor; end
# The `delay` value given at instantiation.
#
# @return [Float] the initial delay.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/scheduled_task.rb#199
def initial_delay; end
# Execute the task.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/scheduled_task.rb#297
def process_task; end
# In the task execution in progress?
#
# @return [Boolean] true if the task is in the given state else false
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/scheduled_task.rb#227
def processing?; end
# Reschedule the task using the given delay and the current time.
# A task can only be reset while it is `:pending`.
#
# @param delay [Float] the number of seconds to wait for before executing the task
# @raise [ArgumentError] When given a time that is in the past
# @return [Boolean] true if successfully rescheduled else false
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/scheduled_task.rb#262
def reschedule(delay); end
# Reschedule the task using the original delay and the current time.
# A task can only be reset while it is `:pending`.
#
# @return [Boolean] true if successfully rescheduled else false
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/scheduled_task.rb#250
def reset; end
# The monotonic time at which the the task is scheduled to be executed.
#
# @return [Float] the schedule time or nil if `unscheduled`
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/scheduled_task.rb#206
def schedule_time; end
protected
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/ivar.rb#135
def fail(reason = T.unsafe(nil)); end
# Reschedule the task using the given delay and the current time.
# A task can only be reset while it is `:pending`.
#
# @param delay [Float] the number of seconds to wait for before executing the task
# @return [Boolean] true if successfully rescheduled else false
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/scheduled_task.rb#326
def ns_reschedule(delay); end
# Schedule the task using the given delay and the current time.
#
# @param delay [Float] the number of seconds to wait for before executing the task
# @return [Boolean] true if successfully rescheduled else false
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/scheduled_task.rb#312
def ns_schedule(delay); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/ivar.rb#113
def set(value = T.unsafe(nil)); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/ivar.rb#145
def try_set(value = T.unsafe(nil), &block); end
class << self
# Create a new `ScheduledTask` object with the given block, execute it, and return the
# `:pending` object.
#
# @param delay [Float] the number of seconds to wait for before executing the task
# @raise [ArgumentError] if no block is given
# @return [ScheduledTask] the newly created `ScheduledTask` in the `:pending` state
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/scheduled_task.rb#290
def execute(delay, opts = T.unsafe(nil), &task); end
end
end
# A counting semaphore. Conceptually, a semaphore maintains a set of
# permits. Each {#acquire} blocks if necessary until a permit is
# available, and then takes it. Each {#release} adds a permit, potentially
# releasing a blocking acquirer.
# However, no actual permit objects are used; the Semaphore just keeps a
# count of the number available and acts accordingly.
# Alternatively, permits may be acquired within a block, and automatically
# released after the block finishes executing.
#
# @example
# semaphore = Concurrent::Semaphore.new(2)
#
# t1 = Thread.new do
# semaphore.acquire
# puts "Thread 1 acquired semaphore"
# end
#
# t2 = Thread.new do
# semaphore.acquire
# puts "Thread 2 acquired semaphore"
# end
#
# t3 = Thread.new do
# semaphore.acquire
# puts "Thread 3 acquired semaphore"
# end
#
# t4 = Thread.new do
# sleep(2)
# puts "Thread 4 releasing semaphore"
# semaphore.release
# end
#
# [t1, t2, t3, t4].each(&:join)
#
# # prints:
# # Thread 3 acquired semaphore
# # Thread 2 acquired semaphore
# # Thread 4 releasing semaphore
# # Thread 1 acquired semaphore
# @example
# semaphore = Concurrent::Semaphore.new(1)
#
# puts semaphore.available_permits
# semaphore.acquire do
# puts semaphore.available_permits
# end
# puts semaphore.available_permits
#
# # prints:
# # 1
# # 0
# # 1
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/semaphore.rb#161
class Concurrent::Semaphore < ::Concurrent::MutexSemaphore; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/semaphore.rb#96
Concurrent::SemaphoreImplementation = Concurrent::MutexSemaphore
# Indicates that the including `ExecutorService` guarantees
# that all operations will occur in the order they are post and that no
# two operations may occur simultaneously. This module provides no
# functionality and provides no guarantees. That is the responsibility
# of the including class. This module exists solely to allow the including
# object to be interrogated for its serialization status.
#
# @example
# class Foo
# include Concurrent::SerialExecutor
# end
#
# foo = Foo.new
#
# foo.is_a? Concurrent::ExecutorService #=> true
# foo.is_a? Concurrent::SerialExecutor #=> true
# foo.serialized? #=> true
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/serial_executor_service.rb#24
module Concurrent::SerialExecutorService
include ::Concurrent::Concern::Logging
include ::Concurrent::ExecutorService
# Does this executor guarantee serialization of its operations?
#
# @note Always returns `true`
# @return [Boolean] True if the executor guarantees that all operations
# will be post in the order they are received and no two operations may
# occur simultaneously. Else false.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/serial_executor_service.rb#30
def serialized?; end
end
# Ensures passed jobs in a serialized order never running at the same time.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/serialized_execution.rb#8
class Concurrent::SerializedExecution < ::Concurrent::Synchronization::LockableObject
include ::Concurrent::Concern::Logging
# @return [SerializedExecution] a new instance of SerializedExecution
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/serialized_execution.rb#11
def initialize; end
# Submit a task to the executor for asynchronous processing.
#
# @param executor [Executor] to be used for this job
# @param args [Array] zero or more arguments to be passed to the task
# @raise [ArgumentError] if no task is given
# @return [Boolean] `true` if the task is queued, `false` if the executor
# is not running
# @yield the asynchronous task to perform
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/serialized_execution.rb#34
def post(executor, *args, &task); end
# As {#post} but allows to submit multiple tasks at once, it's guaranteed that they will not
# be interleaved by other tasks.
#
# @param posts [Array<Array(ExecutorService, Array<Object>, Proc)>] array of triplets where
# first is a {ExecutorService}, second is array of args for task, third is a task (Proc)
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/serialized_execution.rb#44
def posts(posts); end
private
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/serialized_execution.rb#75
def call_job(job); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/serialized_execution.rb#70
def ns_initialize; end
# ensures next job is executed if any is stashed
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/serialized_execution.rb#95
def work(job); end
end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/serialized_execution.rb#16
class Concurrent::SerializedExecution::Job < ::Struct
# Returns the value of attribute args
#
# @return [Object] the current value of args
def args; end
# Sets the attribute args
#
# @param value [Object] the value to set the attribute args to.
# @return [Object] the newly set value
def args=(_); end
# Returns the value of attribute block
#
# @return [Object] the current value of block
def block; end
# Sets the attribute block
#
# @param value [Object] the value to set the attribute block to.
# @return [Object] the newly set value
def block=(_); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/serialized_execution.rb#17
def call; end
# Returns the value of attribute executor
#
# @return [Object] the current value of executor
def executor; end
# Sets the attribute executor
#
# @param value [Object] the value to set the attribute executor to.
# @return [Object] the newly set value
def executor=(_); end
class << self
def [](*_arg0); end
def inspect; end
def keyword_init?; end
def members; end
def new(*_arg0); end
end
end
# A wrapper/delegator for any `ExecutorService` that
# guarantees serialized execution of tasks.
#
# @see [SimpleDelegator](http://www.ruby-doc.org/stdlib-2.1.2/libdoc/delegate/rdoc/SimpleDelegator.html)
# @see Concurrent::SerializedExecution
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/serialized_execution_delegator.rb#12
class Concurrent::SerializedExecutionDelegator < ::SimpleDelegator
include ::Concurrent::Concern::Logging
include ::Concurrent::ExecutorService
include ::Concurrent::SerialExecutorService
# @return [SerializedExecutionDelegator] a new instance of SerializedExecutionDelegator
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/serialized_execution_delegator.rb#15
def initialize(executor); end
# Submit a task to the executor for asynchronous processing.
#
# @param args [Array] zero or more arguments to be passed to the task
# @raise [ArgumentError] if no task is given
# @return [Boolean] `true` if the task is queued, `false` if the executor
# is not running
# @yield the asynchronous task to perform
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/serialized_execution_delegator.rb#22
def post(*args, &task); end
end
# A thread-safe subclass of Set. This version locks against the object
# itself for every method call, ensuring only one thread can be reading
# or writing at a time. This includes iteration methods like `#each`.
#
# @note `a += b` is **not** a **thread-safe** operation on
# `Concurrent::Set`. It reads Set `a`, then it creates new `Concurrent::Set`
# which is union of `a` and `b`, then it writes the union to `a`.
# The read and write are independent operations they do not form a single atomic
# operation therefore when two `+=` operations are executed concurrently updates
# may be lost. Use `#merge` instead.
# @see http://ruby-doc.org/stdlib-2.4.0/libdoc/set/rdoc/Set.html Ruby standard library `Set`
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/set.rb#61
class Concurrent::Set < ::Concurrent::CRubySet; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/set.rb#23
Concurrent::SetImplementation = Concurrent::CRubySet
# An thread-safe, write-once variation of Ruby's standard `Struct`.
# Each member can have its value set at most once, either at construction
# or any time thereafter. Attempting to assign a value to a member
# that has already been set will result in a `Concurrent::ImmutabilityError`.
#
# @see http://ruby-doc.org/core/Struct.html Ruby standard library `Struct`
# @see http://en.wikipedia.org/wiki/Final_(Java) Java `final` keyword
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/settable_struct.rb#14
module Concurrent::SettableStruct
include ::Concurrent::Synchronization::AbstractStruct
# Equality
#
# @return [Boolean] true if other has the same struct subclass and has
# equal member values (according to `Object#==`)
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/settable_struct.rb#50
def ==(other); end
# Attribute Reference
#
# @param member [Symbol, String, Integer] the string or symbol name of the member
# for which to obtain the value or the member's index
# @raise [NameError] if the member does not exist
# @raise [IndexError] if the index is out of range.
# @return [Object] the value of the given struct member or the member at the given index.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/settable_struct.rb#45
def [](member); end
# Attribute Assignment
#
# Sets the value of the given struct member or the member at the given index.
#
# @param member [Symbol, String, Integer] the string or symbol name of the member
# for which to obtain the value or the member's index
# @raise [NameError] if the name does not exist
# @raise [IndexError] if the index is out of range.
# @raise [Concurrent::ImmutabilityError] if the given member has already been set
# @return [Object] the value of the given struct member or the member at the given index.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/settable_struct.rb#75
def []=(member, value); end
# Yields the value of each struct member in order. If no block is given
# an enumerator is returned.
#
# @yield the operation to be performed on each struct member
# @yieldparam value [Object] each struct value (in order)
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/settable_struct.rb#55
def each(&block); end
# Yields the name and value of each struct member in order. If no block is
# given an enumerator is returned.
#
# @yield the operation to be performed on each struct member/value pair
# @yieldparam member [Object] each struct member (in order)
# @yieldparam value [Object] each struct value (in order)
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/settable_struct.rb#61
def each_pair(&block); end
# Describe the contents of this struct in a string.
#
# @return [String] the contents of this struct in a string
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/settable_struct.rb#29
def inspect; end
# Returns a new struct containing the contents of `other` and the contents
# of `self`. If no block is specified, the value for entries with duplicate
# keys will be that of `other`. Otherwise the value for each duplicate key
# is determined by calling the block with the key, its value in `self` and
# its value in `other`.
#
# @param other [Hash] the hash from which to set the new values
# @raise [ArgumentError] of given a member that is not defined in the struct
# @return [Synchronization::AbstractStruct] a new struct with the new values
# @yield an options block for resolving duplicate keys
# @yieldparam member [String, Symbol] the name of the member which is duplicated
# @yieldparam selfvalue [Object] the value of the member in `self`
# @yieldparam othervalue [Object] the value of the member in `other`
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/settable_struct.rb#35
def merge(other, &block); end
# Yields each member value from the struct to the block and returns an Array
# containing the member values from the struct for which the given block
# returns a true value (equivalent to `Enumerable#select`).
#
# @return [Array] an array containing each value for which the block returns true
# @yield the operation to be performed on each struct member
# @yieldparam value [Object] each struct value (in order)
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/settable_struct.rb#67
def select(&block); end
# Returns the values for this struct as an Array.
#
# @return [Array] the values for this struct
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/settable_struct.rb#18
def to_a; end
# Returns a hash containing the names and values for the structs members.
#
# @return [Hash] the names and values for the structs members
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/settable_struct.rb#40
def to_h; end
# Describe the contents of this struct in a string.
#
# @return [String] the contents of this struct in a string
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/settable_struct.rb#29
def to_s; end
# Returns the values for this struct as an Array.
#
# @return [Array] the values for this struct
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/settable_struct.rb#18
def values; end
# Returns the struct member values for each selector as an Array.
#
# A selector may be either an Integer offset or a Range of offsets (as in `Array#values_at`).
#
# @param indexes [Fixnum, Range] the index(es) from which to obatin the values (in order)
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/settable_struct.rb#24
def values_at(*indexes); end
private
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/settable_struct.rb#97
def initialize_copy(original); end
class << self
# Factory for creating new struct classes.
#
# ```
# new([class_name] [, member_name]+>) -> StructClass click to toggle source
# new([class_name] [, member_name]+>) {|StructClass| block } -> StructClass
# new(value, ...) -> obj
# StructClass[value, ...] -> obj
# ```
#
# The first two forms are used to create a new struct subclass `class_name`
# that can contain a value for each member_name . This subclass can be
# used to create instances of the structure like any other Class .
#
# If the `class_name` is omitted an anonymous struct class will be created.
# Otherwise, the name of this struct will appear as a constant in the struct class,
# so it must be unique for all structs under this base class and must start with a
# capital letter. Assigning a struct class to a constant also gives the class
# the name of the constant.
#
# If a block is given it will be evaluated in the context of `StructClass`, passing
# the created class as a parameter. This is the recommended way to customize a struct.
# Subclassing an anonymous struct creates an extra anonymous class that will never be used.
#
# The last two forms create a new instance of a struct subclass. The number of value
# parameters must be less than or equal to the number of attributes defined for the
# struct. Unset parameters default to nil. Passing more parameters than number of attributes
# will raise an `ArgumentError`.
#
# @see http://ruby-doc.org/core/Struct.html#method-c-new Ruby standard library `Struct#new`
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/settable_struct.rb#105
def new(*args, &block); end
end
end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/settable_struct.rb#115
Concurrent::SettableStruct::FACTORY = T.let(T.unsafe(nil), T.untyped)
# An executor service in which every operation spawns a new,
# independently operating thread.
#
# This is perhaps the most inefficient executor service in this
# library. It exists mainly for testing an debugging. Thread creation
# and management is expensive in Ruby and this executor performs no
# resource pooling. This can be very beneficial during testing and
# debugging because it decouples the using code from the underlying
# executor implementation. In production this executor will likely
# lead to suboptimal performance.
#
# @note Intended for use primarily in testing and debugging.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/simple_executor_service.rb#21
class Concurrent::SimpleExecutorService < ::Concurrent::RubyExecutorService
# Submit a task to the executor for asynchronous processing.
#
# @param task [Proc] the asynchronous task to perform
# @return [self] returns itself
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/simple_executor_service.rb#56
def <<(task); end
# Begin an immediate shutdown. In-progress tasks will be allowed to
# complete but enqueued tasks will be dismissed and no new tasks
# will be accepted. Has no additional effect if the thread pool is
# not running.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/simple_executor_service.rb#84
def kill; end
# Submit a task to the executor for asynchronous processing.
#
# @param args [Array] zero or more arguments to be passed to the task
# @raise [ArgumentError] if no task is given
# @return [Boolean] `true` if the task is queued, `false` if the executor
# is not running
# @yield the asynchronous task to perform
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/simple_executor_service.rb#40
def post(*args, &task); end
# Is the executor running?
#
# @return [Boolean] `true` when running, `false` when shutting down or shutdown
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/simple_executor_service.rb#62
def running?; end
# Begin an orderly shutdown. Tasks already in the queue will be executed,
# but no new tasks will be accepted. Has no additional effect if the
# thread pool is not running.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/simple_executor_service.rb#77
def shutdown; end
# Is the executor shutdown?
#
# @return [Boolean] `true` when shutdown, `false` when shutting down or running
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/simple_executor_service.rb#72
def shutdown?; end
# Is the executor shuttingdown?
#
# @return [Boolean] `true` when not running and not shutdown, else `false`
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/simple_executor_service.rb#67
def shuttingdown?; end
# Block until executor shutdown is complete or until `timeout` seconds have
# passed.
#
# @note Does not initiate shutdown or termination. Either `shutdown` or `kill`
# must be called before this method (or on another thread).
# @param timeout [Integer] the maximum number of seconds to wait for shutdown to complete
# @return [Boolean] `true` if shutdown complete or false on `timeout`
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/simple_executor_service.rb#91
def wait_for_termination(timeout = T.unsafe(nil)); end
private
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/simple_executor_service.rb#97
def ns_initialize(*args); end
class << self
# Submit a task to the executor for asynchronous processing.
#
# @param task [Proc] the asynchronous task to perform
# @return [self] returns itself
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/simple_executor_service.rb#34
def <<(task); end
# Submit a task to the executor for asynchronous processing.
#
# @param args [Array] zero or more arguments to be passed to the task
# @raise [ArgumentError] if no task is given
# @return [Boolean] `true` if the task is queued, `false` if the executor
# is not running
# @yield the asynchronous task to perform
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/simple_executor_service.rb#24
def post(*args); end
end
end
# A thread pool with a single thread an unlimited queue. Should the thread
# die for any reason it will be removed and replaced, thus ensuring that
# the executor will always remain viable and available to process jobs.
#
# A common pattern for background processing is to create a single thread
# on which an infinite loop is run. The thread's loop blocks on an input
# source (perhaps blocking I/O or a queue) and processes each input as it
# is received. This pattern has several issues. The thread itself is highly
# susceptible to errors during processing. Also, the thread itself must be
# constantly monitored and restarted should it die. `SingleThreadExecutor`
# encapsulates all these behaviors. The task processor is highly resilient
# to errors from within tasks. Also, should the thread die it will
# automatically be restarted.
#
# The API and behavior of this class are based on Java's `SingleThreadExecutor`.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/single_thread_executor.rb#37
class Concurrent::SingleThreadExecutor < ::Concurrent::RubySingleThreadExecutor; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/single_thread_executor.rb#10
Concurrent::SingleThreadExecutorImplementation = Concurrent::RubySingleThreadExecutor
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/abstract_object.rb#2
module Concurrent::Synchronization
class << self
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/full_memory_barrier.rb#7
def full_memory_barrier; end
end
end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/abstract_lockable_object.rb#9
class Concurrent::Synchronization::AbstractLockableObject < ::Concurrent::Synchronization::Object
protected
# Broadcast to all waiting threads.
#
# @note only to be used inside synchronized block
# @note to provide direct access to this method in a descendant add method
# ```
# def broadcast
# synchronize { ns_broadcast }
# end
# ```
# @raise [NotImplementedError]
# @return [self]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/abstract_lockable_object.rb#96
def ns_broadcast; end
# Signal one waiting thread.
#
# @note only to be used inside synchronized block
# @note to provide direct access to this method in a descendant add method
# ```
# def signal
# synchronize { ns_signal }
# end
# ```
# @raise [NotImplementedError]
# @return [self]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/abstract_lockable_object.rb#81
def ns_signal; end
# Wait until another thread calls #signal or #broadcast,
# spurious wake-ups can happen.
#
# @note only to be used inside synchronized block
# @note to provide direct access to this method in a descendant add method
# ```
# def wait(timeout = nil)
# synchronize { ns_wait(timeout) }
# end
# ```
# @param timeout [Numeric, nil] in seconds, `nil` means no timeout
# @raise [NotImplementedError]
# @return [self]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/abstract_lockable_object.rb#66
def ns_wait(timeout = T.unsafe(nil)); end
# Wait until condition is met or timeout passes,
# protects against spurious wake-ups.
#
# @note only to be used inside synchronized block
# @note to provide direct access to this method in a descendant add method
# ```
# def wait_until(timeout = nil, &condition)
# synchronize { ns_wait_until(timeout, &condition) }
# end
# ```
# @param timeout [Numeric, nil] in seconds, `nil` means no timeout
# @return [true, false] if condition met
# @yield condition to be met
# @yieldreturn [true, false]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/abstract_lockable_object.rb#37
def ns_wait_until(timeout = T.unsafe(nil), &condition); end
# @note can by made public in descendants if required by `public :synchronize`
# @raise [NotImplementedError]
# @yield runs the block synchronized against this object,
# equivalent of java's `synchronize(this) {}`
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/abstract_lockable_object.rb#18
def synchronize; end
end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/abstract_object.rb#6
class Concurrent::Synchronization::AbstractObject
# @return [AbstractObject] a new instance of AbstractObject
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/abstract_object.rb#7
def initialize; end
# @abstract
# @raise [NotImplementedError]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/abstract_object.rb#13
def full_memory_barrier; end
class << self
# @raise [NotImplementedError]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/abstract_object.rb#17
def attr_volatile(*names); end
end
end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/abstract_struct.rb#6
module Concurrent::Synchronization::AbstractStruct
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/abstract_struct.rb#9
def initialize(*values); end
# Returns the number of struct members.
#
# @return [Fixnum] the number of struct members
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/abstract_struct.rb#19
def length; end
# Returns the struct members as an array of symbols.
#
# @return [Array] the struct members as an array of symbols
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/abstract_struct.rb#29
def members; end
# Returns the number of struct members.
#
# @return [Fixnum] the number of struct members
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/abstract_struct.rb#19
def size; end
protected
# Yields the value of each struct member in order. If no block is given
# an enumerator is returned.
#
# @yield the operation to be performed on each struct member
# @yieldparam value [Object] each struct value (in order)
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/abstract_struct.rb#82
def ns_each; end
# Yields the name and value of each struct member in order. If no block is
# given an enumerator is returned.
#
# @yield the operation to be performed on each struct member/value pair
# @yieldparam member [Object] each struct member (in order)
# @yieldparam value [Object] each struct value (in order)
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/abstract_struct.rb#89
def ns_each_pair; end
# Equality
#
# @return [Boolean] true if other has the same struct subclass and has
# equal member values (according to `Object#==`)
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/abstract_struct.rb#75
def ns_equality(other); end
# Attribute Reference
#
# @param member [Symbol, String, Integer] the string or symbol name of the member
# for which to obtain the value or the member's index
# @raise [NameError] if the member does not exist
# @raise [IndexError] if the index is out of range.
# @return [Object] the value of the given struct member or the member at the given index.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/abstract_struct.rb#59
def ns_get(member); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/abstract_struct.rb#119
def ns_initialize_copy; end
# Describe the contents of this struct in a string.
#
# @return [String] the contents of this struct in a string
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/abstract_struct.rb#105
def ns_inspect; end
# Returns a new struct containing the contents of `other` and the contents
# of `self`. If no block is specified, the value for entries with duplicate
# keys will be that of `other`. Otherwise the value for each duplicate key
# is determined by calling the block with the key, its value in `self` and
# its value in `other`.
#
# @param other [Hash] the hash from which to set the new values
# @raise [ArgumentError] of given a member that is not defined in the struct
# @return [Synchronization::AbstractStruct] a new struct with the new values
# @yield an options block for resolving duplicate keys
# @yieldparam member [String, Symbol] the name of the member which is duplicated
# @yieldparam selfvalue [Object] the value of the member in `self`
# @yieldparam othervalue [Object] the value of the member in `other`
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/abstract_struct.rb#114
def ns_merge(other, &block); end
# Yields each member value from the struct to the block and returns an Array
# containing the member values from the struct for which the given block
# returns a true value (equivalent to `Enumerable#select`).
#
# @return [Array] an array containing each value for which the block returns true
# @yield the operation to be performed on each struct member
# @yieldparam value [Object] each struct value (in order)
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/abstract_struct.rb#98
def ns_select; end
# Returns a hash containing the names and values for the structs members.
#
# @return [Hash] the names and values for the structs members
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/abstract_struct.rb#52
def ns_to_h; end
# Returns the values for this struct as an Array.
#
# @return [Array] the values for this struct
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/abstract_struct.rb#38
def ns_values; end
# Returns the struct member values for each selector as an Array.
#
# A selector may be either an Integer offset or a Range of offsets (as in `Array#values_at`).
#
# @param indexes [Fixnum, Range] the index(es) from which to obatin the values (in order)
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/abstract_struct.rb#45
def ns_values_at(indexes); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/abstract_struct.rb#130
def pr_underscore(clazz); end
class << self
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/abstract_struct.rb#141
def define_struct_class(parent, base, name, members, &block); end
end
end
# TODO (pitr-ch 04-Dec-2016): should be in edge
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/condition.rb#8
class Concurrent::Synchronization::Condition < ::Concurrent::Synchronization::LockableObject
# @return [Condition] a new instance of Condition
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/condition.rb#18
def initialize(lock); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/condition.rb#47
def broadcast; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/condition.rb#51
def ns_broadcast; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/condition.rb#43
def ns_signal; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/condition.rb#27
def ns_wait(timeout = T.unsafe(nil)); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/condition.rb#35
def ns_wait_until(timeout = T.unsafe(nil), &condition); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/condition.rb#39
def signal; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/condition.rb#23
def wait(timeout = T.unsafe(nil)); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/condition.rb#31
def wait_until(timeout = T.unsafe(nil), &condition); end
class << self
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/safe_initialization.rb#29
def private_new(*args, &block); end
private
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/safe_initialization.rb#29
def new(*args, &block); end
end
end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/mutex_lockable_object.rb#8
module Concurrent::Synchronization::ConditionSignalling
protected
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/mutex_lockable_object.rb#16
def ns_broadcast; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/mutex_lockable_object.rb#11
def ns_signal; end
end
# TODO (pitr-ch 04-Dec-2016): should be in edge
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/lock.rb#8
class Concurrent::Synchronization::Lock < ::Concurrent::Synchronization::LockableObject
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/lock.rb#31
def broadcast; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/mutex_lockable_object.rb#16
def ns_broadcast; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/mutex_lockable_object.rb#11
def ns_signal; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/mutex_lockable_object.rb#52
def ns_wait(timeout = T.unsafe(nil)); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/abstract_lockable_object.rb#37
def ns_wait_until(timeout = T.unsafe(nil), &condition); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/lock.rb#25
def signal; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/mutex_lockable_object.rb#44
def synchronize; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/lock.rb#13
def wait(timeout = T.unsafe(nil)); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/lock.rb#19
def wait_until(timeout = T.unsafe(nil), &condition); end
end
# Safe synchronization under any Ruby implementation.
# It provides methods like {#synchronize}, {#wait}, {#signal} and {#broadcast}.
# Provides a single layer which can improve its implementation over time without changes needed to
# the classes using it. Use {Synchronization::Object} not this abstract class.
#
# @note this object does not support usage together with
# [`Thread#wakeup`](http://ruby-doc.org/core/Thread.html#method-i-wakeup)
# and [`Thread#raise`](http://ruby-doc.org/core/Thread.html#method-i-raise).
# `Thread#sleep` and `Thread#wakeup` will work as expected but mixing `Synchronization::Object#wait` and
# `Thread#wakeup` will not work on all platforms.
#
# @see Event implementation as an example of this class use
#
# @example simple
# class AnClass < Synchronization::Object
# def initialize
# super
# synchronize { @value = 'asd' }
# end
#
# def value
# synchronize { @value }
# end
# end
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/lockable_object.rb#50
class Concurrent::Synchronization::LockableObject < ::Concurrent::Synchronization::MutexLockableObject
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/condition.rb#57
def new_condition; end
end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/lockable_object.rb#11
Concurrent::Synchronization::LockableObjectImplementation = Concurrent::Synchronization::MutexLockableObject
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/mutex_lockable_object.rb#60
class Concurrent::Synchronization::MonitorLockableObject < ::Concurrent::Synchronization::AbstractLockableObject
include ::Concurrent::Synchronization::ConditionSignalling
extend ::Concurrent::Synchronization::SafeInitialization
# @return [MonitorLockableObject] a new instance of MonitorLockableObject
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/mutex_lockable_object.rb#65
def initialize; end
protected
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/mutex_lockable_object.rb#83
def ns_wait(timeout = T.unsafe(nil)); end
# TODO may be a problem with lock.synchronize { lock.wait }
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/mutex_lockable_object.rb#79
def synchronize; end
private
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/mutex_lockable_object.rb#71
def initialize_copy(other); end
end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/mutex_lockable_object.rb#25
class Concurrent::Synchronization::MutexLockableObject < ::Concurrent::Synchronization::AbstractLockableObject
include ::Concurrent::Synchronization::ConditionSignalling
extend ::Concurrent::Synchronization::SafeInitialization
# @return [MutexLockableObject] a new instance of MutexLockableObject
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/mutex_lockable_object.rb#30
def initialize; end
protected
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/mutex_lockable_object.rb#52
def ns_wait(timeout = T.unsafe(nil)); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/mutex_lockable_object.rb#44
def synchronize; end
private
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/mutex_lockable_object.rb#36
def initialize_copy(other); end
end
# Abstract object providing final, volatile, ans CAS extensions to build other concurrent abstractions.
# - final instance variables see {Object.safe_initialization!}
# - volatile instance variables see {Object.attr_volatile}
# - volatile instance variables see {Object.attr_atomic}
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/object.rb#15
class Concurrent::Synchronization::Object < ::Concurrent::Synchronization::AbstractObject
include ::Concurrent::Synchronization::Volatile
extend ::Concurrent::Synchronization::Volatile::ClassMethods
# Has to be called by children.
#
# @return [Object] a new instance of Object
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/object.rb#28
def initialize; end
private
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/object.rb#146
def __initialize_atomic_fields__; end
class << self
# @return [true, false] is the attribute with name atomic?
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/object.rb#125
def atomic_attribute?(name); end
# @param inherited [true, false] should inherited volatile with CAS fields be returned?
# @return [::Array<Symbol>] Returns defined volatile with CAS fields on this class.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/object.rb#119
def atomic_attributes(inherited = T.unsafe(nil)); end
# Creates methods for reading and writing to a instance variable with
# volatile (Java) semantic as {.attr_volatile} does.
# The instance variable should be accessed only through generated methods.
# This method generates following methods: `value`, `value=(new_value) #=> new_value`,
# `swap_value(new_value) #=> old_value`,
# `compare_and_set_value(expected, value) #=> true || false`, `update_value(&block)`.
#
# @param names [::Array<Symbol>] of the instance variables to be volatile with CAS.
# @return [::Array<Symbol>] names of defined method names.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/object.rb#84
def attr_atomic(*names); end
# For testing purposes, quite slow. Injects assert code to new method which will raise if class instance contains
# any instance variables with CamelCase names and isn't {.safe_initialization?}.
#
# @raise when offend found
# @return [true]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/object.rb#45
def ensure_safe_initialization_when_final_fields_are_present; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/object.rb#33
def safe_initialization!; end
# @return [Boolean]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/object.rb#37
def safe_initialization?; end
private
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/object.rb#131
def define_initialize_atomic_fields; end
end
end
# By extending this module, a class and all its children are marked to be constructed safely. Meaning that
# all writes (ivar initializations) are made visible to all readers of newly constructed object. It ensures
# same behaviour as Java's final fields.
#
# Due to using Kernel#extend, the module is not included again if already present in the ancestors,
# which avoids extra overhead.
#
# @example
# class AClass < Concurrent::Synchronization::Object
# extend Concurrent::Synchronization::SafeInitialization
#
# def initialize
# @AFinalValue = 'value' # published safely, #foo will never return nil
# end
#
# def foo
# @AFinalValue
# end
# end
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/safe_initialization.rb#28
module Concurrent::Synchronization::SafeInitialization
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/safe_initialization.rb#29
def new(*args, &block); end
end
# Volatile adds the attr_volatile class method when included.
#
# foo = Foo.new
# foo.bar
# => 1
# foo.bar = 2
# => 2
#
# @example
# class Foo
# include Concurrent::Synchronization::Volatile
#
# attr_volatile :bar
#
# def initialize
# self.bar = 1
# end
# end
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/volatile.rb#28
module Concurrent::Synchronization::Volatile
mixes_in_class_methods ::Concurrent::Synchronization::Volatile::ClassMethods
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/volatile.rb#33
def full_memory_barrier; end
class << self
# @private
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/volatile.rb#29
def included(base); end
end
end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/volatile.rb#37
module Concurrent::Synchronization::Volatile::ClassMethods
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/synchronization/volatile.rb#39
def attr_volatile(*names); end
end
# This class provides a trivial way to synchronize all calls to a given object
# by wrapping it with a `Delegator` that performs `Monitor#enter/exit` calls
# around the delegated `#send`. Example:
#
# array = [] # not thread-safe on many impls
# array = SynchronizedDelegator.new([]) # thread-safe
#
# A simple `Monitor` provides a very coarse-grained way to synchronize a given
# object, in that it will cause synchronization for methods that have no need
# for it, but this is a trivial way to get thread-safety where none may exist
# currently on some implementations.
#
# This class is currently being considered for inclusion into stdlib, via
# https://bugs.ruby-lang.org/issues/8556
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/thread_safe/synchronized_delegator.rb#21
class Concurrent::SynchronizedDelegator < ::SimpleDelegator
# @return [SynchronizedDelegator] a new instance of SynchronizedDelegator
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/thread_safe/synchronized_delegator.rb#31
def initialize(obj); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/thread_safe/synchronized_delegator.rb#36
def method_missing(method, *args, &block); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/thread_safe/synchronized_delegator.rb#22
def setup; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/thread_safe/synchronized_delegator.rb#27
def teardown; end
end
# A `TVar` is a transactional variable - a single-element container that
# is used as part of a transaction - see `Concurrent::atomically`.
#
#
# ## Thread-safe Variable Classes
#
# Each of the thread-safe variable classes is designed to solve a different
# problem. In general:
#
# * *{Concurrent::Agent}:* Shared, mutable variable providing independent,
# uncoordinated, *asynchronous* change of individual values. Best used when
# the value will undergo frequent, complex updates. Suitable when the result
# of an update does not need to be known immediately.
# * *{Concurrent::Atom}:* Shared, mutable variable providing independent,
# uncoordinated, *synchronous* change of individual values. Best used when
# the value will undergo frequent reads but only occasional, though complex,
# updates. Suitable when the result of an update must be known immediately.
# * *{Concurrent::AtomicReference}:* A simple object reference that can be updated
# atomically. Updates are synchronous but fast. Best used when updates a
# simple set operations. Not suitable when updates are complex.
# {Concurrent::AtomicBoolean} and {Concurrent::AtomicFixnum} are similar
# but optimized for the given data type.
# * *{Concurrent::Exchanger}:* Shared, stateless synchronization point. Used
# when two or more threads need to exchange data. The threads will pair then
# block on each other until the exchange is complete.
# * *{Concurrent::MVar}:* Shared synchronization point. Used when one thread
# must give a value to another, which must take the value. The threads will
# block on each other until the exchange is complete.
# * *{Concurrent::ThreadLocalVar}:* Shared, mutable, isolated variable which
# holds a different value for each thread which has access. Often used as
# an instance variable in objects which must maintain different state
# for different threads.
# * *{Concurrent::TVar}:* Shared, mutable variables which provide
# *coordinated*, *synchronous*, change of *many* stated. Used when multiple
# value must change together, in an all-or-nothing transaction.
# {include:file:docs-source/tvar.md}
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/tvar.rb#12
class Concurrent::TVar < ::Concurrent::Synchronization::Object
extend ::Concurrent::Synchronization::SafeInitialization
# Create a new `TVar` with an initial value.
#
# @return [TVar] a new instance of TVar
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/tvar.rb#16
def initialize(value); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/tvar.rb#46
def unsafe_lock; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/tvar.rb#36
def unsafe_value; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/tvar.rb#41
def unsafe_value=(value); end
# Get the value of a `TVar`.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/tvar.rb#22
def value; end
# Set the value of a `TVar`.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/tvar.rb#29
def value=(value); end
end
# A `ThreadLocalVar` is a variable where the value is different for each thread.
# Each variable may have a default value, but when you modify the variable only
# the current thread will ever see that change.
#
# This is similar to Ruby's built-in thread-local variables (`Thread#thread_variable_get`),
# but with these major advantages:
# * `ThreadLocalVar` has its own identity, it doesn't need a Symbol.
# * Each Ruby's built-in thread-local variable leaks some memory forever (it's a Symbol held forever on the thread),
# so it's only OK to create a small amount of them.
# `ThreadLocalVar` has no such issue and it is fine to create many of them.
# * Ruby's built-in thread-local variables leak forever the value set on each thread (unless set to nil explicitly).
# `ThreadLocalVar` automatically removes the mapping for each thread once the `ThreadLocalVar` instance is GC'd.
#
#
# ## Thread-safe Variable Classes
#
# Each of the thread-safe variable classes is designed to solve a different
# problem. In general:
#
# * *{Concurrent::Agent}:* Shared, mutable variable providing independent,
# uncoordinated, *asynchronous* change of individual values. Best used when
# the value will undergo frequent, complex updates. Suitable when the result
# of an update does not need to be known immediately.
# * *{Concurrent::Atom}:* Shared, mutable variable providing independent,
# uncoordinated, *synchronous* change of individual values. Best used when
# the value will undergo frequent reads but only occasional, though complex,
# updates. Suitable when the result of an update must be known immediately.
# * *{Concurrent::AtomicReference}:* A simple object reference that can be updated
# atomically. Updates are synchronous but fast. Best used when updates a
# simple set operations. Not suitable when updates are complex.
# {Concurrent::AtomicBoolean} and {Concurrent::AtomicFixnum} are similar
# but optimized for the given data type.
# * *{Concurrent::Exchanger}:* Shared, stateless synchronization point. Used
# when two or more threads need to exchange data. The threads will pair then
# block on each other until the exchange is complete.
# * *{Concurrent::MVar}:* Shared synchronization point. Used when one thread
# must give a value to another, which must take the value. The threads will
# block on each other until the exchange is complete.
# * *{Concurrent::ThreadLocalVar}:* Shared, mutable, isolated variable which
# holds a different value for each thread which has access. Often used as
# an instance variable in objects which must maintain different state
# for different threads.
# * *{Concurrent::TVar}:* Shared, mutable variables which provide
# *coordinated*, *synchronous*, change of *many* stated. Used when multiple
# value must change together, in an all-or-nothing transaction.
#
# @example
# v = ThreadLocalVar.new(14)
# v.value #=> 14
# v.value = 2
# v.value #=> 2
# @example
# v = ThreadLocalVar.new(14)
#
# t1 = Thread.new do
# v.value #=> 14
# v.value = 1
# v.value #=> 1
# end
#
# t2 = Thread.new do
# v.value #=> 14
# v.value = 2
# v.value #=> 2
# end
#
# v.value #=> 14
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/thread_local_var.rb#43
class Concurrent::ThreadLocalVar
# Creates a thread local variable.
#
# @param default [Object] the default value when otherwise unset
# @param default_block [Proc] Optional block that gets called to obtain the
# default value for each thread
# @return [ThreadLocalVar] a new instance of ThreadLocalVar
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/thread_local_var.rb#51
def initialize(default = T.unsafe(nil), &default_block); end
# Bind the given value to thread local storage during
# execution of the given block.
#
# @param value [Object] the value to bind
# @return [Object] the value
# @yield the operation to be performed with the bound variable
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/thread_local_var.rb#88
def bind(value); end
# Returns the value in the current thread's copy of this thread-local variable.
#
# @return [Object] the current value
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/thread_local_var.rb#70
def value; end
# Sets the current thread's copy of this thread-local variable to the specified value.
#
# @param value [Object] the value to set
# @return [Object] the new value
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/thread_local_var.rb#78
def value=(value); end
protected
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/thread_local_var.rb#103
def default; end
end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/thread_local_var.rb#44
Concurrent::ThreadLocalVar::LOCALS = T.let(T.unsafe(nil), Concurrent::ThreadLocals)
# An array-backed storage of indexed variables per thread.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/locals.rb#141
class Concurrent::ThreadLocals < ::Concurrent::AbstractLocals
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/locals.rb#142
def locals; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/atomic/locals.rb#146
def locals!; end
end
# An abstraction composed of one or more threads and a task queue. Tasks
# (blocks or `proc` objects) are submitted to the pool and added to the queue.
# The threads in the pool remove the tasks and execute them in the order
# they were received.
#
# A `ThreadPoolExecutor` will automatically adjust the pool size according
# to the bounds set by `min-threads` and `max-threads`. When a new task is
# submitted and fewer than `min-threads` threads are running, a new thread
# is created to handle the request, even if other worker threads are idle.
# If there are more than `min-threads` but less than `max-threads` threads
# running, a new thread will be created only if the queue is full.
#
# Threads that are idle for too long will be garbage collected, down to the
# configured minimum options. Should a thread crash it, too, will be garbage collected.
#
# `ThreadPoolExecutor` is based on the Java class of the same name. From
# the official Java documentation;
#
# > Thread pools address two different problems: they usually provide
# > improved performance when executing large numbers of asynchronous tasks,
# > due to reduced per-task invocation overhead, and they provide a means
# > of bounding and managing the resources, including threads, consumed
# > when executing a collection of tasks. Each ThreadPoolExecutor also
# > maintains some basic statistics, such as the number of completed tasks.
# >
# > To be useful across a wide range of contexts, this class provides many
# > adjustable parameters and extensibility hooks. However, programmers are
# > urged to use the more convenient Executors factory methods
# > [CachedThreadPool] (unbounded thread pool, with automatic thread reclamation),
# > [FixedThreadPool] (fixed size thread pool) and [SingleThreadExecutor] (single
# > background thread), that preconfigure settings for the most common usage
# > scenarios.
#
# **Thread Pool Options**
#
# Thread pools support several configuration options:
#
# * `idletime`: The number of seconds that a thread may be idle before being reclaimed.
# * `name`: The name of the executor (optional). Printed in the executor's `#to_s` output and
# a `<name>-worker-<id>` name is given to its threads if supported by used Ruby
# implementation. `<id>` is uniq for each thread.
# * `max_queue`: The maximum number of tasks that may be waiting in the work queue at
# any one time. When the queue size reaches `max_queue` and no new threads can be created,
# subsequent tasks will be rejected in accordance with the configured `fallback_policy`.
# * `auto_terminate`: When true (default), the threads started will be marked as daemon.
# * `fallback_policy`: The policy defining how rejected tasks are handled.
#
# Three fallback policies are supported:
#
# * `:abort`: Raise a `RejectedExecutionError` exception and discard the task.
# * `:discard`: Discard the task and return false.
# * `:caller_runs`: Execute the task on the calling thread.
#
# **Shutting Down Thread Pools**
#
# Killing a thread pool while tasks are still being processed, either by calling
# the `#kill` method or at application exit, will have unpredictable results. There
# is no way for the thread pool to know what resources are being used by the
# in-progress tasks. When those tasks are killed the impact on those resources
# cannot be predicted. The *best* practice is to explicitly shutdown all thread
# pools using the provided methods:
#
# * Call `#shutdown` to initiate an orderly termination of all in-progress tasks
# * Call `#wait_for_termination` with an appropriate timeout interval an allow
# the orderly shutdown to complete
# * Call `#kill` *only when* the thread pool fails to shutdown in the allotted time
#
# On some runtime platforms (most notably the JVM) the application will not
# exit until all thread pools have been shutdown. To prevent applications from
# "hanging" on exit, all threads can be marked as daemon according to the
# `:auto_terminate` option.
#
# ```ruby
# pool1 = Concurrent::FixedThreadPool.new(5) # threads will be marked as daemon
# pool2 = Concurrent::FixedThreadPool.new(5, auto_terminate: false) # mark threads as non-daemon
# ```
#
# @note Failure to properly shutdown a thread pool can lead to unpredictable results.
# Please read *Shutting Down Thread Pools* for more information.
# @see http://docs.oracle.com/javase/tutorial/essential/concurrency/pools.html Java Tutorials: Thread Pools
# @see http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/Executors.html Java Executors class
# @see http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutorService.html Java ExecutorService interface
# @see https://docs.oracle.com/javase/8/docs/api/java/lang/Thread.html#setDaemon-boolean-
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/thread_pool_executor.rb#56
class Concurrent::ThreadPoolExecutor < ::Concurrent::RubyThreadPoolExecutor; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/thread_pool_executor.rb#10
Concurrent::ThreadPoolExecutorImplementation = Concurrent::RubyThreadPoolExecutor
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/thread_safe/util.rb#4
module Concurrent::ThreadSafe; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/thread_safe/util.rb#7
module Concurrent::ThreadSafe::Util
class << self
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/thread_safe/util/data_structures.rb#16
def make_synchronized_on_cruby(klass); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/thread_safe/util/data_structures.rb#41
def make_synchronized_on_truffleruby(klass); end
end
end
# TODO (pitr-ch 15-Oct-2016): migrate to Utility::ProcessorCounter
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/thread_safe/util.rb#13
Concurrent::ThreadSafe::Util::CPU_COUNT = T.let(T.unsafe(nil), Integer)
# TODO (pitr-ch 15-Oct-2016): migrate to Utility::NativeInteger
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/thread_safe/util.rb#10
Concurrent::ThreadSafe::Util::FIXNUM_BIT_SIZE = T.let(T.unsafe(nil), Integer)
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/thread_safe/util.rb#11
Concurrent::ThreadSafe::Util::MAX_INT = T.let(T.unsafe(nil), Integer)
# Raised when an operation times out.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/errors.rb#55
class Concurrent::TimeoutError < ::Concurrent::Error; end
# Executes a collection of tasks, each after a given delay. A master task
# monitors the set and schedules each task for execution at the appropriate
# time. Tasks are run on the global thread pool or on the supplied executor.
# Each task is represented as a `ScheduledTask`.
#
# @see Concurrent::ScheduledTask
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/timer_set.rb#19
class Concurrent::TimerSet < ::Concurrent::RubyExecutorService
# Create a new set of timed tasks.
#
# @option opts
# @param opts [Hash] the options used to specify the executor on which to perform actions
# @return [TimerSet] a new instance of TimerSet
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/timer_set.rb#30
def initialize(opts = T.unsafe(nil)); end
# Begin an immediate shutdown. In-progress tasks will be allowed to
# complete but enqueued tasks will be dismissed and no new tasks
# will be accepted. Has no additional effect if the thread pool is
# not running.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/timer_set.rb#62
def kill; end
# Post a task to be execute run after a given delay (in seconds). If the
# delay is less than 1/100th of a second the task will be immediately post
# to the executor.
#
# @param delay [Float] the number of seconds to wait for before executing the task.
# @param args [Array<Object>] the arguments passed to the task on execution.
# @raise [ArgumentError] if the intended execution time is not in the future.
# @raise [ArgumentError] if no block is given.
# @return [Concurrent::ScheduledTask, false] IVar representing the task if the post
# is successful; false after shutdown.
# @yield the task to be performed.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/timer_set.rb#48
def post(delay, *args, &task); end
private
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/executor_service.rb#166
def <<(task); end
# Initialize the object.
#
# @param opts [Hash] the options to create the object with.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/timer_set.rb#74
def ns_initialize(opts); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/timer_set.rb#94
def ns_post_task(task); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/timer_set.rb#129
def ns_reset_if_forked; end
# `ExecutorService` callback called during shutdown.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/timer_set.rb#122
def ns_shutdown_execution; end
# Post the task to the internal queue.
#
# @note This is intended as a callback method from ScheduledTask
# only. It is not intended to be used directly. Post a task
# by using the `SchedulesTask#execute` method.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/timer_set.rb#89
def post_task(task); end
# Run a loop and execute tasks in the scheduled order and at the approximate
# scheduled time. If no tasks remain the thread will exit gracefully so that
# garbage collection can occur. If there are no ready tasks it will sleep
# for up to 60 seconds waiting for the next scheduled task.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/timer_set.rb#143
def process_tasks; end
# Remove the given task from the queue.
#
# @note This is intended as a callback method from `ScheduledTask`
# only. It is not intended to be used directly. Cancel a task
# by using the `ScheduledTask#cancel` method.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/timer_set.rb#115
def remove_task(task); end
end
# A very common concurrency pattern is to run a thread that performs a task at
# regular intervals. The thread that performs the task sleeps for the given
# interval then wakes up and performs the task. Lather, rinse, repeat... This
# pattern causes two problems. First, it is difficult to test the business
# logic of the task because the task itself is tightly coupled with the
# concurrency logic. Second, an exception raised while performing the task can
# cause the entire thread to abend. In a long-running application where the
# task thread is intended to run for days/weeks/years a crashed task thread
# can pose a significant problem. `TimerTask` alleviates both problems.
#
# When a `TimerTask` is launched it starts a thread for monitoring the
# execution interval. The `TimerTask` thread does not perform the task,
# however. Instead, the TimerTask launches the task on a separate thread.
# Should the task experience an unrecoverable crash only the task thread will
# crash. This makes the `TimerTask` very fault tolerant. Additionally, the
# `TimerTask` thread can respond to the success or failure of the task,
# performing logging or ancillary operations.
#
# One other advantage of `TimerTask` is that it forces the business logic to
# be completely decoupled from the concurrency logic. The business logic can
# be tested separately then passed to the `TimerTask` for scheduling and
# running.
#
# A `TimerTask` supports two different types of interval calculations.
# A fixed delay will always wait the same amount of time between the
# completion of one task and the start of the next. A fixed rate will
# attempt to maintain a constant rate of execution regardless of the
# duration of the task. For example, if a fixed rate task is scheduled
# to run every 60 seconds but the task itself takes 10 seconds to
# complete, the next task will be scheduled to run 50 seconds after
# the start of the previous task. If the task takes 70 seconds to
# complete, the next task will be start immediately after the previous
# task completes. Tasks will not be executed concurrently.
#
# In some cases it may be necessary for a `TimerTask` to affect its own
# execution cycle. To facilitate this, a reference to the TimerTask instance
# is passed as an argument to the provided block every time the task is
# executed.
#
# The `TimerTask` class includes the `Dereferenceable` mixin module so the
# result of the last execution is always available via the `#value` method.
# Dereferencing options can be passed to the `TimerTask` during construction or
# at any later time using the `#set_deref_options` method.
#
# `TimerTask` supports notification through the Ruby standard library
# {http://ruby-doc.org/stdlib-2.0/libdoc/observer/rdoc/Observable.html
# Observable} module. On execution the `TimerTask` will notify the observers
# with three arguments: time of execution, the result of the block (or nil on
# failure), and any raised exceptions (or nil on success).
#
# @example Basic usage
# task = Concurrent::TimerTask.new{ puts 'Boom!' }
# task.execute
#
# task.execution_interval #=> 60 (default)
#
# # wait 60 seconds...
# #=> 'Boom!'
#
# task.shutdown #=> true
# @example Configuring `:execution_interval`
# task = Concurrent::TimerTask.new(execution_interval: 5) do
# puts 'Boom!'
# end
#
# task.execution_interval #=> 5
# @example Immediate execution with `:run_now`
# task = Concurrent::TimerTask.new(run_now: true){ puts 'Boom!' }
# task.execute
#
# #=> 'Boom!'
# @example Configuring `:interval_type` with either :fixed_delay or :fixed_rate, default is :fixed_delay
# task = Concurrent::TimerTask.new(execution_interval: 5, interval_type: :fixed_rate) do
# puts 'Boom!'
# end
# task.interval_type #=> :fixed_rate
# @example Last `#value` and `Dereferenceable` mixin
# task = Concurrent::TimerTask.new(
# dup_on_deref: true,
# execution_interval: 5
# ){ Time.now }
#
# task.execute
# Time.now #=> 2013-11-07 18:06:50 -0500
# sleep(10)
# task.value #=> 2013-11-07 18:06:55 -0500
# @example Controlling execution from within the block
# timer_task = Concurrent::TimerTask.new(execution_interval: 1) do |task|
# task.execution_interval.to_i.times{ print 'Boom! ' }
# print "\n"
# task.execution_interval += 1
# if task.execution_interval > 5
# puts 'Stopping...'
# task.shutdown
# end
# end
#
# timer_task.execute
# #=> Boom!
# #=> Boom! Boom!
# #=> Boom! Boom! Boom!
# #=> Boom! Boom! Boom! Boom!
# #=> Boom! Boom! Boom! Boom! Boom!
# #=> Stopping...
# @example Observation
# class TaskObserver
# def update(time, result, ex)
# if result
# print "(#{time}) Execution successfully returned #{result}\n"
# else
# print "(#{time}) Execution failed with error #{ex}\n"
# end
# end
# end
#
# task = Concurrent::TimerTask.new(execution_interval: 1){ 42 }
# task.add_observer(TaskObserver.new)
# task.execute
# sleep 4
#
# #=> (2013-10-13 19:08:58 -0400) Execution successfully returned 42
# #=> (2013-10-13 19:08:59 -0400) Execution successfully returned 42
# #=> (2013-10-13 19:09:00 -0400) Execution successfully returned 42
# task.shutdown
#
# task = Concurrent::TimerTask.new(execution_interval: 1){ sleep }
# task.add_observer(TaskObserver.new)
# task.execute
#
# #=> (2013-10-13 19:07:25 -0400) Execution timed out
# #=> (2013-10-13 19:07:27 -0400) Execution timed out
# #=> (2013-10-13 19:07:29 -0400) Execution timed out
# task.shutdown
#
# task = Concurrent::TimerTask.new(execution_interval: 1){ raise StandardError }
# task.add_observer(TaskObserver.new)
# task.execute
#
# #=> (2013-10-13 19:09:37 -0400) Execution failed with error StandardError
# #=> (2013-10-13 19:09:38 -0400) Execution failed with error StandardError
# #=> (2013-10-13 19:09:39 -0400) Execution failed with error StandardError
# task.shutdown
# @see http://ruby-doc.org/stdlib-2.0/libdoc/observer/rdoc/Observable.html
# @see http://docs.oracle.com/javase/7/docs/api/java/util/TimerTask.html
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/timer_task.rb#165
class Concurrent::TimerTask < ::Concurrent::RubyExecutorService
include ::Concurrent::Concern::Dereferenceable
include ::Concurrent::Concern::Observable
# Create a new TimerTask with the given task and configuration.
#
# @option opts
# @option opts
# @option opts
# @param opts [Hash] the options defining task execution.
# @raise ArgumentError when no block is given.
# @return [TimerTask] the new `TimerTask`
# @yield to the block after :execution_interval seconds have passed since
# the last yield
# @yieldparam task a reference to the `TimerTask` instance so that the
# block can control its own lifecycle. Necessary since `self` will
# refer to the execution context of the block rather than the running
# `TimerTask`.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/timer_task.rb#209
def initialize(opts = T.unsafe(nil), &task); end
# Execute a previously created `TimerTask`.
#
# @example Instance and execute in separate steps
# task = Concurrent::TimerTask.new(execution_interval: 10){ print "Hello World\n" }
# task.running? #=> false
# task.execute
# task.running? #=> true
# @example Instance and execute in one line
# task = Concurrent::TimerTask.new(execution_interval: 10){ print "Hello World\n" }.execute
# task.running? #=> true
# @return [TimerTask] a reference to `self`
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/timer_task.rb#235
def execute; end
# @return [Fixnum] Number of seconds after the task completes before the
# task is performed again.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/timer_task.rb#259
def execution_interval; end
# @return [Fixnum] Number of seconds after the task completes before the
# task is performed again.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/timer_task.rb#266
def execution_interval=(value); end
# @return [Symbol] method to calculate the interval between executions
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/timer_task.rb#276
def interval_type; end
# Is the executor running?
#
# @return [Boolean] `true` when running, `false` when shutting down or shutdown
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/timer_task.rb#218
def running?; end
# @return [Fixnum] Number of seconds the task can run before it is
# considered to have failed.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/timer_task.rb#281
def timeout_interval; end
# @return [Fixnum] Number of seconds the task can run before it is
# considered to have failed.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/timer_task.rb#288
def timeout_interval=(value); end
private
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/executor_service.rb#166
def <<(task); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/timer_task.rb#352
def calculate_next_interval(start_time); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/timer_task.rb#336
def execute_task(completion); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/timer_task.rb#296
def ns_initialize(opts, &task); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/timer_task.rb#324
def ns_kill_execution; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/timer_task.rb#318
def ns_shutdown_execution; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/executor/ruby_executor_service.rb#17
def post(*args, &task); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/timer_task.rb#330
def schedule_next_task(interval = T.unsafe(nil)); end
class << self
# Create and execute a new `TimerTask`.
#
# @example
# task = Concurrent::TimerTask.execute(execution_interval: 10){ print "Hello World\n" }
# task.running? #=> true
# @option opts
# @option opts
# @option opts
# @param opts [Hash] the options defining task execution.
# @raise ArgumentError when no block is given.
# @return [TimerTask] the new `TimerTask`
# @yield to the block after :execution_interval seconds have passed since
# the last yield
# @yieldparam task a reference to the `TimerTask` instance so that the
# block can control its own lifecycle. Necessary since `self` will
# refer to the execution context of the block rather than the running
# `TimerTask`.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/timer_task.rb#252
def execute(opts = T.unsafe(nil), &task); end
end
end
# Default `:interval_type`
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/timer_task.rb#181
Concurrent::TimerTask::DEFAULT_INTERVAL_TYPE = T.let(T.unsafe(nil), Symbol)
# Default `:execution_interval` in seconds.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/timer_task.rb#170
Concurrent::TimerTask::EXECUTION_INTERVAL = T.let(T.unsafe(nil), Integer)
# Maintain the interval between the end of one execution and the start of the next execution.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/timer_task.rb#173
Concurrent::TimerTask::FIXED_DELAY = T.let(T.unsafe(nil), Symbol)
# Maintain the interval between the start of one execution and the start of the next.
# If execution time exceeds the interval, the next execution will start immediately
# after the previous execution finishes. Executions will not run concurrently.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/timer_task.rb#178
Concurrent::TimerTask::FIXED_RATE = T.let(T.unsafe(nil), Symbol)
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/tvar.rb#153
class Concurrent::Transaction
# @return [Transaction] a new instance of Transaction
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/tvar.rb#162
def initialize; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/tvar.rb#192
def abort; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/tvar.rb#196
def commit; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/tvar.rb#177
def open(tvar); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/tvar.rb#166
def read(tvar); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/tvar.rb#206
def unlock; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/tvar.rb#171
def write(tvar, value); end
class << self
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/tvar.rb#212
def current; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/tvar.rb#216
def current=(transaction); end
end
end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/tvar.rb#155
Concurrent::Transaction::ABORTED = T.let(T.unsafe(nil), Object)
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/tvar.rb#159
class Concurrent::Transaction::AbortError < ::StandardError; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/tvar.rb#160
class Concurrent::Transaction::LeaveError < ::StandardError; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/tvar.rb#157
class Concurrent::Transaction::OpenEntry < ::Struct
# Returns the value of attribute modified
#
# @return [Object] the current value of modified
def modified; end
# Sets the attribute modified
#
# @param value [Object] the value to set the attribute modified to.
# @return [Object] the newly set value
def modified=(_); end
# Returns the value of attribute value
#
# @return [Object] the current value of value
def value; end
# Sets the attribute value
#
# @param value [Object] the value to set the attribute value to.
# @return [Object] the newly set value
def value=(_); end
class << self
def [](*_arg0); end
def inspect; end
def keyword_init?; end
def members; end
def new(*_arg0); end
end
end
# A fixed size array with volatile (synchronized, thread safe) getters/setters.
# Mixes in Ruby's `Enumerable` module for enhanced search, sort, and traversal.
#
# @example
# tuple = Concurrent::Tuple.new(16)
#
# tuple.set(0, :foo) #=> :foo | volatile write
# tuple.get(0) #=> :foo | volatile read
# tuple.compare_and_set(0, :foo, :bar) #=> true | strong CAS
# tuple.cas(0, :foo, :baz) #=> false | strong CAS
# tuple.get(0) #=> :bar | volatile read
# @see https://en.wikipedia.org/wiki/Tuple Tuple entry at Wikipedia
# @see http://www.erlang.org/doc/reference_manual/data_types.html#id70396 Erlang Tuple
# @see http://ruby-doc.org/core-2.2.2/Enumerable.html Enumerable
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/tuple.rb#20
class Concurrent::Tuple
include ::Enumerable
# Create a new tuple of the given size.
#
# @param size [Integer] the number of elements in the tuple
# @return [Tuple] a new instance of Tuple
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/tuple.rb#29
def initialize(size); end
# Set the value at the given index to the new value if and only if the current
# value matches the given old value.
#
# @param i [Integer] the index for the element to set
# @param old_value [Object] the value to compare against the current value
# @param new_value [Object] the value to set at the given index
# @return [Boolean] true if the value at the given element was set else false
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/tuple.rb#69
def cas(i, old_value, new_value); end
# Set the value at the given index to the new value if and only if the current
# value matches the given old value.
#
# @param i [Integer] the index for the element to set
# @param old_value [Object] the value to compare against the current value
# @param new_value [Object] the value to set at the given index
# @return [Boolean] true if the value at the given element was set else false
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/tuple.rb#69
def compare_and_set(i, old_value, new_value); end
# Calls the given block once for each element in self, passing that element as a parameter.
#
# @yieldparam ref [Object] the `Concurrent::AtomicReference` object at the current index
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/tuple.rb#78
def each; end
# Get the value of the element at the given index.
#
# @param i [Integer] the index from which to retrieve the value
# @return [Object] the value at the given index or nil if the index is out of bounds
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/tuple.rb#43
def get(i); end
# Set the element at the given index to the given value
#
# @param i [Integer] the index for the element to set
# @param value [Object] the value to set at the given index
# @return [Object] the new value of the element at the given index or nil if the index is out of bounds
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/tuple.rb#55
def set(i, value); end
# The (fixed) size of the tuple.
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/tuple.rb#24
def size; end
# Get the value of the element at the given index.
#
# @param i [Integer] the index from which to retrieve the value
# @return [Object] the value at the given index or nil if the index is out of bounds
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/tuple.rb#43
def volatile_get(i); end
# Set the element at the given index to the given value
#
# @param i [Integer] the index for the element to set
# @param value [Object] the value to set at the given index
# @return [Object] the new value of the element at the given index or nil if the index is out of bounds
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/tuple.rb#55
def volatile_set(i, value); end
end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/utility/engine.rb#3
module Concurrent::Utility; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/utility/engine.rb#6
module Concurrent::Utility::EngineDetector
# @return [Boolean]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/utility/engine.rb#7
def on_cruby?; end
# @return [Boolean]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/utility/engine.rb#11
def on_jruby?; end
# @return [Boolean]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/utility/engine.rb#27
def on_linux?; end
# @return [Boolean]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/utility/engine.rb#23
def on_osx?; end
# @return [Boolean]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/utility/engine.rb#15
def on_truffleruby?; end
# @return [Boolean]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/utility/engine.rb#19
def on_windows?; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/utility/engine.rb#31
def ruby_version(version = T.unsafe(nil), comparison, major, minor, patch); end
end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/utility/native_extension_loader.rb#9
module Concurrent::Utility::NativeExtensionLoader
# @return [Boolean]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/utility/native_extension_loader.rb#11
def allow_c_extensions?; end
# @return [Boolean]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/utility/native_extension_loader.rb#15
def c_extensions_loaded?; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/utility/native_extension_loader.rb#19
def load_native_extensions; end
private
# @return [Boolean]
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/utility/native_extension_loader.rb#50
def java_extensions_loaded?; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/utility/native_extension_loader.rb#38
def load_error_path(error); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/utility/native_extension_loader.rb#46
def set_c_extensions_loaded; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/utility/native_extension_loader.rb#54
def set_java_extensions_loaded; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/utility/native_extension_loader.rb#58
def try_load_c_extension(path); end
end
# @private
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/utility/native_integer.rb#5
module Concurrent::Utility::NativeInteger
extend ::Concurrent::Utility::NativeInteger
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/utility/native_integer.rb#24
def ensure_integer(value); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/utility/native_integer.rb#31
def ensure_integer_and_bounds(value); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/utility/native_integer.rb#17
def ensure_lower_bound(value); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/utility/native_integer.rb#37
def ensure_positive(value); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/utility/native_integer.rb#44
def ensure_positive_and_no_zero(value); end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/utility/native_integer.rb#10
def ensure_upper_bound(value); end
end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/utility/native_integer.rb#8
Concurrent::Utility::NativeInteger::MAX_VALUE = T.let(T.unsafe(nil), Integer)
# http://stackoverflow.com/questions/535721/ruby-max-integer
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/utility/native_integer.rb#7
Concurrent::Utility::NativeInteger::MIN_VALUE = T.let(T.unsafe(nil), Integer)
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/utility/processor_counter.rb#10
class Concurrent::Utility::ProcessorCounter
# @return [ProcessorCounter] a new instance of ProcessorCounter
#
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/utility/processor_counter.rb#11
def initialize; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/utility/processor_counter.rb#26
def available_processor_count; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/utility/processor_counter.rb#41
def cpu_quota; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/utility/processor_counter.rb#45
def cpu_shares; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/utility/processor_counter.rb#22
def physical_processor_count; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/utility/processor_counter.rb#18
def processor_count; end
private
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/utility/processor_counter.rb#104
def compute_cpu_quota; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/utility/processor_counter.rb#124
def compute_cpu_shares; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/utility/processor_counter.rb#59
def compute_physical_processor_count; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/utility/processor_counter.rb#51
def compute_processor_count; end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/utility/processor_counter.rb#99
def run(command); end
end
# source://concurrent-ruby//lib/concurrent-ruby/concurrent/version.rb#2
Concurrent::VERSION = T.let(T.unsafe(nil), String)