diff --git a/Makefile b/Makefile new file mode 100644 --- /dev/null +++ b/Makefile @@ -0,0 +1 @@ +include ../Makefile.common diff --git a/package.yml b/package.yml new file mode 100644 --- /dev/null +++ b/package.yml @@ -0,0 +1,18 @@ +name : perl-www-robotrules +version : 6.02 +release : 1 +source : + - https://cpan.metacpan.org/authors/id/G/GA/GAAS/WWW-RobotRules-6.02.tar.gz : 46b502e7a288d559429891eeb5d979461dd3ecc6a5c491ead85d165b6e03a51e +license : Artistic-1.0-Perl +component : programming.perl +summary : WWW::RobotRules - database of robots.txt-derived permissions +description: | + This module parses /robots.txt files as specified in "A Standard for Robot Exclusion", at Webmasters can use the /robots.txt file to forbid conforming robots from accessing parts of their web site. The parsed files are kept in a WWW::RobotRules object, and this object provides methods to check if access to a given URL is prohibited. The same WWW::RobotRules object can be used for one or more parsed /robots.txt files on any number of hosts. +rundeps : + - perl-uri +setup : | + %perl_setup +build : | + %perl_build +install : | + %perl_install diff --git a/pspec_x86_64.xml b/pspec_x86_64.xml new file mode 100644 --- /dev/null +++ b/pspec_x86_64.xml @@ -0,0 +1,37 @@ + + + perl-www-robotrules + + Joey Riches + josephriches@gmail.com + + Artistic-1.0-Perl + programming.perl + WWW::RobotRules - database of robots.txt-derived permissions + This module parses /robots.txt files as specified in "A Standard for Robot Exclusion", at <http://www.robotstxt.org/wc/norobots.html> Webmasters can use the /robots.txt file to forbid conforming robots from accessing parts of their web site. The parsed files are kept in a WWW::RobotRules object, and this object provides methods to check if access to a given URL is prohibited. The same WWW::RobotRules object can be used for one or more parsed /robots.txt files on any number of hosts. + + https://solus-project.com/sources/README.Solus + + + perl-www-robotrules + WWW::RobotRules - database of robots.txt-derived permissions + This module parses /robots.txt files as specified in "A Standard for Robot Exclusion", at <http://www.robotstxt.org/wc/norobots.html> Webmasters can use the /robots.txt file to forbid conforming robots from accessing parts of their web site. The parsed files are kept in a WWW::RobotRules object, and this object provides methods to check if access to a given URL is prohibited. The same WWW::RobotRules object can be used for one or more parsed /robots.txt files on any number of hosts. + + programming.perl + + /usr/lib/perl5/vendor_perl/5.24.1/WWW/RobotRules.pm + /usr/lib/perl5/vendor_perl/5.24.1/WWW/RobotRules/AnyDBM_File.pm + /usr/lib/perl5/vendor_perl/5.24.1/x86_64-linux-thread-multi/auto/WWW/RobotRules/.packlist + /usr/share/man + + + + + 2017-05-29 + 6.02 + Packaging update + Joey Riches + josephriches@gmail.com + + + \ No newline at end of file