Update RBI files for nokogiri.
This commit is contained in:
parent
4107380062
commit
35e0b1bf85
@ -9829,8 +9829,211 @@ class WeakRef
|
||||
def initialize(orig); end
|
||||
end
|
||||
|
||||
class WebRobots
|
||||
def allowed?(url); end
|
||||
|
||||
def crawl_delay(url); end
|
||||
|
||||
def create_cache(); end
|
||||
|
||||
def disallowed?(url); end
|
||||
|
||||
def error(url); end
|
||||
|
||||
def error!(url); end
|
||||
|
||||
def flush_cache(); end
|
||||
|
||||
def initialize(user_agent, options=T.unsafe(nil)); end
|
||||
|
||||
def option(url, token); end
|
||||
|
||||
def options(url); end
|
||||
|
||||
def reset(url); end
|
||||
|
||||
def sitemaps(url); end
|
||||
|
||||
def user_agent(); end
|
||||
end
|
||||
|
||||
class WebRobots::Error
|
||||
end
|
||||
|
||||
class WebRobots::Error
|
||||
end
|
||||
|
||||
class WebRobots::ParseError
|
||||
def initialize(message, site); end
|
||||
|
||||
def site(); end
|
||||
end
|
||||
|
||||
class WebRobots::ParseError
|
||||
end
|
||||
|
||||
class WebRobots::RobotsTxt
|
||||
def allow?(request_uri, user_agent=T.unsafe(nil)); end
|
||||
|
||||
def crawl_delay(user_agent=T.unsafe(nil)); end
|
||||
|
||||
def error(); end
|
||||
|
||||
def error!(); end
|
||||
|
||||
def error=(error); end
|
||||
|
||||
def initialize(site, records, options=T.unsafe(nil)); end
|
||||
|
||||
def options(user_agent=T.unsafe(nil)); end
|
||||
|
||||
def site(); end
|
||||
|
||||
def sitemaps(); end
|
||||
|
||||
def timestamp(); end
|
||||
DISALLOW_ALL = ::T.let(nil, ::T.untyped)
|
||||
end
|
||||
|
||||
class WebRobots::RobotsTxt::AccessControlLine
|
||||
def match?(request_uri); end
|
||||
end
|
||||
|
||||
class WebRobots::RobotsTxt::AccessControlLine
|
||||
end
|
||||
|
||||
class WebRobots::RobotsTxt::AgentLine
|
||||
def pattern(); end
|
||||
end
|
||||
|
||||
class WebRobots::RobotsTxt::AgentLine
|
||||
end
|
||||
|
||||
class WebRobots::RobotsTxt::AllowLine
|
||||
def allow?(); end
|
||||
end
|
||||
|
||||
class WebRobots::RobotsTxt::AllowLine
|
||||
end
|
||||
|
||||
class WebRobots::RobotsTxt::CrawlDelayLine
|
||||
def delay(); end
|
||||
end
|
||||
|
||||
class WebRobots::RobotsTxt::CrawlDelayLine
|
||||
end
|
||||
|
||||
class WebRobots::RobotsTxt::DisallowLine
|
||||
def allow?(); end
|
||||
end
|
||||
|
||||
class WebRobots::RobotsTxt::DisallowLine
|
||||
end
|
||||
|
||||
class WebRobots::RobotsTxt::ExtentionLine
|
||||
end
|
||||
|
||||
class WebRobots::RobotsTxt::ExtentionLine
|
||||
end
|
||||
|
||||
class WebRobots::RobotsTxt::Line
|
||||
def compile(); end
|
||||
|
||||
def initialize(token, value); end
|
||||
|
||||
def token(); end
|
||||
|
||||
def value(); end
|
||||
end
|
||||
|
||||
class WebRobots::RobotsTxt::Line
|
||||
end
|
||||
|
||||
class WebRobots::RobotsTxt::Parser
|
||||
def _reduce_1(val, _values, result); end
|
||||
|
||||
def _reduce_17(val, _values, result); end
|
||||
|
||||
def _reduce_18(val, _values, result); end
|
||||
|
||||
def _reduce_19(val, _values, result); end
|
||||
|
||||
def _reduce_2(val, _values, result); end
|
||||
|
||||
def _reduce_20(val, _values, result); end
|
||||
|
||||
def _reduce_21(val, _values, result); end
|
||||
|
||||
def _reduce_24(val, _values, result); end
|
||||
|
||||
def _reduce_25(val, _values, result); end
|
||||
|
||||
def _reduce_26(val, _values, result); end
|
||||
|
||||
def _reduce_28(val, _values, result); end
|
||||
|
||||
def _reduce_31(val, _values, result); end
|
||||
|
||||
def _reduce_32(val, _values, result); end
|
||||
|
||||
def _reduce_38(val, _values, result); end
|
||||
|
||||
def _reduce_39(val, _values, result); end
|
||||
|
||||
def _reduce_40(val, _values, result); end
|
||||
|
||||
def _reduce_41(val, _values, result); end
|
||||
|
||||
def _reduce_none(val, _values, result); end
|
||||
|
||||
def initialize(target, crawl_delay_handler=T.unsafe(nil)); end
|
||||
|
||||
def on_error(token_id, value, stack); end
|
||||
|
||||
def parse(input, site); end
|
||||
|
||||
def parse!(input, site); end
|
||||
|
||||
def parse_error(message); end
|
||||
KNOWN_TOKENS = ::T.let(nil, ::T.untyped)
|
||||
RE_KNOWN_TOKENS = ::T.let(nil, ::T.untyped)
|
||||
Racc_arg = ::T.let(nil, ::T.untyped)
|
||||
Racc_debug_parser = ::T.let(nil, ::T.untyped)
|
||||
Racc_token_to_s_table = ::T.let(nil, ::T.untyped)
|
||||
end
|
||||
|
||||
class WebRobots::RobotsTxt::Parser
|
||||
end
|
||||
|
||||
class WebRobots::RobotsTxt::Record
|
||||
def allow?(request_uri); end
|
||||
|
||||
def default?(); end
|
||||
|
||||
def delay(); end
|
||||
|
||||
def initialize(agentlines, rulelines); end
|
||||
|
||||
def match?(user_agent); end
|
||||
|
||||
def options(); end
|
||||
end
|
||||
|
||||
class WebRobots::RobotsTxt::Record
|
||||
end
|
||||
|
||||
class WebRobots::RobotsTxt
|
||||
def self.unfetchable(site, reason, target=T.unsafe(nil)); end
|
||||
end
|
||||
|
||||
class WebRobots
|
||||
end
|
||||
|
||||
module Webrobots
|
||||
VERSION = ::T.let(nil, ::T.untyped)
|
||||
end
|
||||
|
||||
module Webrobots
|
||||
end
|
||||
|
||||
class Zlib::Deflate
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user