Robotxt v0.1.3 Robotxt View Source

Robots.txt parser.

Installation

Add the following to the deps portion of your mix.exs file.

defp deps do
  [
    {:robotxt, "~> 0.1.3"},
  ]
end

Link to this section Summary

Functions

Returns the %Robotxt{} for the given user_agent if it exists. Otherwise, nil is returned.

Returns a list of %Robotxt{}.

Link to this section Types

Specs

t() :: %Robotxt{
  allow: list() | [binary()],
  disallow: list() | [binary()],
  sitemap: nil | binary(),
  user_agent: nil | binary()
}

Link to this section Functions

Link to this function

get_by_user_agent(list, user_agent)

View Source

Specs

get_by_user_agent(
  [
    %Robotxt{
      allow: term(),
      disallow: term(),
      sitemap: term(),
      user_agent: term()
    }
  ],
  binary()
) ::
  %Robotxt{allow: term(), disallow: term(), sitemap: term(), user_agent: term()}
  | nil

Returns the %Robotxt{} for the given user_agent if it exists. Otherwise, nil is returned.

Example

iex> Robotxt.get_by_user_agent([%Robotxt{user_agent: "Twitterbot"}], "Twitterbot")
%Robotxt{user_agent: "Twitterbot", disallow: [], allow: [], sitemap: nil}

iex> Robotxt.get_by_user_agent([%Robotxt{user_agent: "Twitterbot"}], "nope")
nil

Specs

parse(binary()) :: [
  %Robotxt{allow: term(), disallow: term(), sitemap: term(), user_agent: term()}
]

Returns a list of %Robotxt{}.

Example

iex> Robotxt.parse("User-agent: *\nDisallow:\n")
[%Robotxt{user_agent: "*", allow: [], disallow: [""], sitemap: nil}]

iex> Robotxt.parse("user-agent: *\ndisallow:\n")
[%Robotxt{user_agent: "*", allow: [], disallow: [""], sitemap: nil}]