File: robots_disallowed_error.rb

package info (click to toggle)
ruby-mechanize 2.7.6-1%2Bdeb10u1
  • links: PTS, VCS
  • area: main
  • in suites: buster
  • size: 1,480 kB
  • sloc: ruby: 11,380; makefile: 5; sh: 4
file content (28 lines) | stat: -rw-r--r-- 718 bytes parent folder | download | duplicates (5)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
# Exception that is raised when an access to a resource is disallowed by
# robots.txt or by HTML document itself.

class Mechanize::RobotsDisallowedError < Mechanize::Error
  def initialize(url)
    if url.is_a?(URI)
      @url = url.to_s
      @uri = url
    else
      @url = url.to_s
    end
  end

  # Returns the URL (string) of the resource that caused this error.
  attr_reader :url

  # Returns the URL (URI object) of the resource that caused this
  # error.  URI::InvalidURIError may be raised if the URL happens to
  # be invalid or not understood by the URI library.
  def uri
    @uri ||= URI.parse(url)
  end

  def to_s
    "Robots access is disallowed for URL: #{url}"
  end
  alias :inspect :to_s
end