mirror of
https://git.in.rschanz.org/ryan77627/guix.git
synced 2025-01-12 22:26:40 -05:00
gnu: Add perl-www-robotrules.
* gnu/packages/web.scm (perl-www-robotrules): New variable.
This commit is contained in:
parent
e7444ee273
commit
a3eb919fb4
1 changed files with 25 additions and 0 deletions
|
@ -295,3 +295,28 @@ (define-public perl-http-negotiate
|
|||
of the negotiable variants and the value of the various Accept* header
|
||||
fields in the request.")
|
||||
(home-page "http://search.cpan.org/~gaas/HTTP-Negotiate/")))
|
||||
|
||||
(define-public perl-www-robotrules
|
||||
(package
|
||||
(name "perl-www-robotrules")
|
||||
(version "6.02")
|
||||
(source (origin
|
||||
(method url-fetch)
|
||||
(uri (string-append
|
||||
"mirror://cpan/authors/id/G/GA/GAAS/WWW-RobotRules-"
|
||||
version ".tar.gz"))
|
||||
(sha256
|
||||
(base32
|
||||
"07m50dp5n5jxv3m93i55qvnd67a6g7cvbvlik115kmc8lbkh5da6"))))
|
||||
(build-system perl-build-system)
|
||||
(inputs
|
||||
`(("perl-uri" ,perl-uri)))
|
||||
(license (package-license perl))
|
||||
(synopsis "Perl database of robots.txt-derived permissions")
|
||||
(description
|
||||
"The WWW::RobotRules module parses /robots.txt files as specified in
|
||||
\"A Standard for Robot Exclusion\", at
|
||||
<http://www.robotstxt.org/wc/norobots.html>. Webmasters can use the
|
||||
/robots.txt file to forbid conforming robots from accessing parts of
|
||||
their web site.")
|
||||
(home-page "http://search.cpan.org/~gaas/WWW-RobotRules/")))
|
||||
|
|
Loading…
Reference in a new issue