Hello community,

here is the log from the commit of package perl-WWW-RobotRules for 
openSUSE:Factory checked in at 2012-03-01 17:25:32
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/perl-WWW-RobotRules (Old)
 and      /work/SRC/openSUSE:Factory/.perl-WWW-RobotRules.new (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Package is "perl-WWW-RobotRules", Maintainer is ""

Changes:
--------
--- /work/SRC/openSUSE:Factory/perl-WWW-RobotRules/perl-WWW-RobotRules.changes  
2011-09-23 12:39:23.000000000 +0200
+++ 
/work/SRC/openSUSE:Factory/.perl-WWW-RobotRules.new/perl-WWW-RobotRules.changes 
    2012-03-01 17:25:34.000000000 +0100
@@ -1,0 +2,6 @@
+Mon Feb 20 10:48:55 UTC 2012 - co...@suse.com
+
+- updated to 6.02
+ * Restore perl-5.8.1 compatiblity.
+
+-------------------------------------------------------------------

Old:
----
  WWW-RobotRules-6.01.tar.gz

New:
----
  WWW-RobotRules-6.02.tar.gz

++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Other differences:
------------------
++++++ perl-WWW-RobotRules.spec ++++++
--- /var/tmp/diff_new_pack.JbuBZ9/_old  2012-03-01 17:25:38.000000000 +0100
+++ /var/tmp/diff_new_pack.JbuBZ9/_new  2012-03-01 17:25:38.000000000 +0100
@@ -1,7 +1,7 @@
 #
 # spec file for package perl-WWW-RobotRules
 #
-# Copyright (c) 2011 SUSE LINUX Products GmbH, Nuernberg, Germany.
+# Copyright (c) 2012 SUSE LINUX Products GmbH, Nuernberg, Germany.
 #
 # All modifications and additions to the file contributed by third parties
 # remain the property of their copyright owners, unless otherwise agreed
@@ -16,25 +16,23 @@
 #
 
 
-
 Name:           perl-WWW-RobotRules
-Version:        6.01
-Release:        1
-License:        GPL-1.0+ or Artistic-1.0
+Version:        6.02
+Release:        0
 %define cpan_name WWW-RobotRules
 Summary:        database of robots.txt-derived permissions
-Url:            http://search.cpan.org/dist/WWW-RobotRules/
+License:        Artistic-1.0 or GPL-1.0+
 Group:          Development/Libraries/Perl
-#Source:         
http://www.cpan.org/authors/id/G/GA/GAAS/WWW-RobotRules-%{version}.tar.gz
-Source:         %{cpan_name}-%{version}.tar.gz
-BuildRequires:  perl(Fcntl)
-BuildRequires:  perl(URI) >= 1.10
+Url:            http://search.cpan.org/dist/WWW-RobotRules/
+Source:         
http://www.cpan.org/authors/id/G/GA/GAAS/%{cpan_name}-%{version}.tar.gz
+BuildArch:      noarch
+BuildRoot:      %{_tmppath}/%{name}-%{version}-build
 BuildRequires:  perl
 BuildRequires:  perl-macros
-Requires:       perl(Fcntl)
+BuildRequires:  perl(URI) >= 1.10
+#BuildRequires: perl(WWW::RobotRules)
+#BuildRequires: perl(WWW::RobotRules::AnyDBM_File)
 Requires:       perl(URI) >= 1.10
-BuildRoot:      %{_tmppath}/%{name}-%{version}-build
-BuildArch:      noarch
 %{perl_requires}
 
 %description
@@ -43,9 +41,35 @@
 can use the _/robots.txt_ file to forbid conforming robots from accessing
 parts of their web site.
 
+The parsed files are kept in a WWW::RobotRules object, and this object
+provides methods to check if access to a given URL is prohibited. The same
+WWW::RobotRules object can be used for one or more parsed _/robots.txt_
+files on any number of hosts.
+
+The following methods are provided:
+
+* $rules = WWW::RobotRules->new($robot_name)
+
+  This is the constructor for WWW::RobotRules objects. The first argument
+  given to new() is the name of the robot.
+
+* $rules->parse($robot_txt_url, $content, $fresh_until)
+
+  The parse() method takes as arguments the URL that was used to retrieve
+  the _/robots.txt_ file, and the contents of the file.
+
+* $rules->allowed($uri)
+
+  Returns TRUE if this robot is allowed to retrieve this URL.
+
+* $rules->agent([$name])
+
+  Get/set the agent name. NOTE: Changing the agent name will clear the
+  robots.txt rules and expire times out of the cache.
 
 %prep
 %setup -q -n %{cpan_name}-%{version}
+find . -type f -print0 | xargs -0 chmod 644
 
 %build
 %{__perl} Makefile.PL INSTALLDIRS=vendor
@@ -59,11 +83,8 @@
 %perl_process_packlist
 %perl_gen_filelist
 
-%clean
-%{__rm} -rf %{buildroot}
-
 %files -f %{name}.files
-%defattr(644,root,root,755)
+%defattr(-,root,root,755)
 %doc Changes README
 
 %changelog

++++++ WWW-RobotRules-6.01.tar.gz -> WWW-RobotRules-6.02.tar.gz ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/WWW-RobotRules-6.01/Changes 
new/WWW-RobotRules-6.02/Changes
--- old/WWW-RobotRules-6.01/Changes     2011-03-13 16:08:40.000000000 +0100
+++ new/WWW-RobotRules-6.02/Changes     2012-02-18 14:07:15.000000000 +0100
@@ -1,4 +1,11 @@
 _______________________________________________________________________________
+2012-02-18 WWW-RobotRules 6.02
+
+Restore perl-5.8.1 compatiblity.
+
+
+
+_______________________________________________________________________________
 2011-03-13 WWW-RobotRules 6.01
 
 Added legal notice and updated the meta repository link
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/WWW-RobotRules-6.01/META.yml 
new/WWW-RobotRules-6.02/META.yml
--- old/WWW-RobotRules-6.01/META.yml    2011-03-13 16:08:54.000000000 +0100
+++ new/WWW-RobotRules-6.02/META.yml    2012-02-18 14:07:29.000000000 +0100
@@ -1,6 +1,6 @@
 --- #YAML:1.0
 name:               WWW-RobotRules
-version:            6.01
+version:            6.02
 abstract:           database of robots.txt-derived permissions
 author:
     - Gisle Aas <gi...@activestate.com>
@@ -13,16 +13,16 @@
 requires:
     AnyDBM_File:  0
     Fcntl:        0
-    perl:         5.008008
+    perl:         5.008001
     URI:          1.10
 resources:
     MailingList:  mailto:lib...@perl.org
-    repository:   
http://github.com/gisle/libwww-perl/tree/WWW-RobotRules/master
+    repository:   http://github.com/gisle/www-robotrules
 no_index:
     directory:
         - t
         - inc
-generated_by:       ExtUtils::MakeMaker version 6.56
+generated_by:       ExtUtils::MakeMaker version 6.57_05
 meta-spec:
     url:      http://module-build.sourceforge.net/META-spec-v1.4.html
     version:  1.4
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/WWW-RobotRules-6.01/Makefile.PL 
new/WWW-RobotRules-6.02/Makefile.PL
--- old/WWW-RobotRules-6.01/Makefile.PL 2011-03-13 16:08:36.000000000 +0100
+++ new/WWW-RobotRules-6.02/Makefile.PL 2012-02-18 13:57:41.000000000 +0100
@@ -1,6 +1,6 @@
 #!perl -w
 
-require 5.008008;
+require 5.008001;
 use strict;
 use ExtUtils::MakeMaker;
 
@@ -10,7 +10,7 @@
     ABSTRACT_FROM => 'lib/WWW/RobotRules.pm',
     AUTHOR => 'Gisle Aas <gi...@activestate.com>',
     LICENSE => "perl",
-    MIN_PERL_VERSION => 5.008008,
+    MIN_PERL_VERSION => 5.008001,
     PREREQ_PM => {
         'AnyDBM_File' => 0,
         'Fcntl' => 0,
@@ -18,7 +18,7 @@
     },
     META_MERGE => {
        resources => {
-            repository => 
'http://github.com/gisle/libwww-perl/tree/WWW-RobotRules/master',
+            repository => 'http://github.com/gisle/www-robotrules',
            MailingList => 'mailto:lib...@perl.org',
         }
     },
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/WWW-RobotRules-6.01/lib/WWW/RobotRules.pm 
new/WWW-RobotRules-6.02/lib/WWW/RobotRules.pm
--- old/WWW-RobotRules-6.01/lib/WWW/RobotRules.pm       2011-03-13 
16:08:40.000000000 +0100
+++ new/WWW-RobotRules-6.02/lib/WWW/RobotRules.pm       2012-02-18 
14:05:13.000000000 +0100
@@ -1,6 +1,6 @@
 package WWW::RobotRules;
 
-$VERSION = "6.01";
+$VERSION = "6.02";
 sub Version { $VERSION; }
 
 use strict;

-- 
To unsubscribe, e-mail: opensuse-commit+unsubscr...@opensuse.org
For additional commands, e-mail: opensuse-commit+h...@opensuse.org

Reply via email to