178 lines
4.7 KiB
Ruby
178 lines
4.7 KiB
Ruby
# DO NOT EDIT MANUALLY
|
|
# This is an autogenerated file for types exported from the `webrobots` gem.
|
|
# Please instead update this file by running `tapioca generate --exclude json`.
|
|
|
|
# typed: true
|
|
|
|
class Nokogiri::HTML::Document < ::Nokogiri::XML::Document
|
|
def fragment(tags = T.unsafe(nil)); end
|
|
def meta_encoding; end
|
|
def meta_encoding=(encoding); end
|
|
def meta_robots(custom_name = T.unsafe(nil)); end
|
|
def nofollow?(custom_name = T.unsafe(nil)); end
|
|
def noindex?(custom_name = T.unsafe(nil)); end
|
|
def serialize(options = T.unsafe(nil)); end
|
|
def title; end
|
|
def title=(text); end
|
|
def type; end
|
|
|
|
private
|
|
|
|
def meta_content_type; end
|
|
def parse_meta_robots(custom_name); end
|
|
def set_metadata_element(element); end
|
|
|
|
class << self
|
|
def new(*_); end
|
|
def parse(string_or_io, url = T.unsafe(nil), encoding = T.unsafe(nil), options = T.unsafe(nil)); end
|
|
def read_io(_, _, _, _); end
|
|
def read_memory(_, _, _, _); end
|
|
end
|
|
end
|
|
|
|
class WebRobots
|
|
def initialize(user_agent, options = T.unsafe(nil)); end
|
|
|
|
def allowed?(url); end
|
|
def crawl_delay(url); end
|
|
def create_cache; end
|
|
def disallowed?(url); end
|
|
def error(url); end
|
|
def error!(url); end
|
|
def flush_cache; end
|
|
def option(url, token); end
|
|
def options(url); end
|
|
def reset(url); end
|
|
def sitemaps(url); end
|
|
def user_agent; end
|
|
|
|
private
|
|
|
|
def crawl_delay_handler(delay, last_checked_at); end
|
|
def fetch_robots_txt(site); end
|
|
def get_robots_txt(site); end
|
|
def http_get(uri); end
|
|
def robots_txt_for(url); end
|
|
def split_uri(url); end
|
|
end
|
|
|
|
class WebRobots::Error < ::StandardError
|
|
end
|
|
|
|
class WebRobots::ParseError < ::WebRobots::Error
|
|
def initialize(message, site); end
|
|
|
|
def site; end
|
|
def to_s; end
|
|
end
|
|
|
|
class WebRobots::RobotsTxt
|
|
def initialize(site, records, options = T.unsafe(nil)); end
|
|
|
|
def allow?(request_uri, user_agent = T.unsafe(nil)); end
|
|
def crawl_delay(user_agent = T.unsafe(nil)); end
|
|
def error; end
|
|
def error!; end
|
|
def error=(_); end
|
|
def options(user_agent = T.unsafe(nil)); end
|
|
def site; end
|
|
def sitemaps; end
|
|
def timestamp; end
|
|
|
|
private
|
|
|
|
def find_record(user_agent = T.unsafe(nil)); end
|
|
def target(user_agent = T.unsafe(nil)); end
|
|
|
|
class << self
|
|
def unfetchable(site, reason, target = T.unsafe(nil)); end
|
|
end
|
|
end
|
|
|
|
class WebRobots::RobotsTxt::AccessControlLine < ::WebRobots::RobotsTxt::Line
|
|
def compile; end
|
|
def match?(request_uri); end
|
|
end
|
|
|
|
class WebRobots::RobotsTxt::AgentLine < ::WebRobots::RobotsTxt::Line
|
|
def compile; end
|
|
def pattern; end
|
|
end
|
|
|
|
class WebRobots::RobotsTxt::AllowLine < ::WebRobots::RobotsTxt::AccessControlLine
|
|
def allow?; end
|
|
end
|
|
|
|
class WebRobots::RobotsTxt::CrawlDelayLine < ::WebRobots::RobotsTxt::Line
|
|
def compile; end
|
|
def delay; end
|
|
end
|
|
|
|
WebRobots::RobotsTxt::DISALLOW_ALL = T.let(T.unsafe(nil), String)
|
|
|
|
class WebRobots::RobotsTxt::DisallowLine < ::WebRobots::RobotsTxt::AccessControlLine
|
|
def allow?; end
|
|
end
|
|
|
|
class WebRobots::RobotsTxt::ExtentionLine < ::WebRobots::RobotsTxt::Line
|
|
end
|
|
|
|
class WebRobots::RobotsTxt::Line
|
|
def initialize(token, value); end
|
|
|
|
def compile; end
|
|
def token; end
|
|
def value; end
|
|
end
|
|
|
|
class WebRobots::RobotsTxt::Parser < ::Racc::Parser
|
|
def initialize(target, crawl_delay_handler = T.unsafe(nil)); end
|
|
|
|
def _reduce_1(val, _values, result); end
|
|
def _reduce_17(val, _values, result); end
|
|
def _reduce_18(val, _values, result); end
|
|
def _reduce_19(val, _values, result); end
|
|
def _reduce_2(val, _values, result); end
|
|
def _reduce_20(val, _values, result); end
|
|
def _reduce_21(val, _values, result); end
|
|
def _reduce_24(val, _values, result); end
|
|
def _reduce_25(val, _values, result); end
|
|
def _reduce_26(val, _values, result); end
|
|
def _reduce_28(val, _values, result); end
|
|
def _reduce_31(val, _values, result); end
|
|
def _reduce_32(val, _values, result); end
|
|
def _reduce_38(val, _values, result); end
|
|
def _reduce_39(val, _values, result); end
|
|
def _reduce_40(val, _values, result); end
|
|
def _reduce_41(val, _values, result); end
|
|
def _reduce_none(val, _values, result); end
|
|
def next_token; end
|
|
def on_error(token_id, value, stack); end
|
|
def parse(input, site); end
|
|
def parse!(input, site); end
|
|
def parse_error(message); end
|
|
end
|
|
|
|
WebRobots::RobotsTxt::Parser::KNOWN_TOKENS = T.let(T.unsafe(nil), Array)
|
|
|
|
WebRobots::RobotsTxt::Parser::RE_KNOWN_TOKENS = T.let(T.unsafe(nil), Regexp)
|
|
|
|
WebRobots::RobotsTxt::Parser::Racc_arg = T.let(T.unsafe(nil), Array)
|
|
|
|
WebRobots::RobotsTxt::Parser::Racc_token_to_s_table = T.let(T.unsafe(nil), Array)
|
|
|
|
class WebRobots::RobotsTxt::Record
|
|
def initialize(agentlines, rulelines); end
|
|
|
|
def allow?(request_uri); end
|
|
def default?; end
|
|
def delay; end
|
|
def match?(user_agent); end
|
|
def options; end
|
|
end
|
|
|
|
module Webrobots
|
|
end
|
|
|
|
Webrobots::VERSION = T.let(T.unsafe(nil), String)
|