Hello community,
here is the log from the commit of package perl-libwww-perl for openSUSE:Factory
checked in at Thu Jan 14 17:09:03 CET 2010.
--------
--- perl-libwww-perl/perl-libwww-perl.changes 2009-08-04 18:49:09.000000000 +0200
+++ /mounts/work_src_done/STABLE/perl-libwww-perl/perl-libwww-perl.changes 2010-01-13 17:29:20.000000000 +0100
@@ -1,0 +2,33 @@
+Wed Jan 13 16:40:12 CET 2010 - anicka@suse.cz
+
+- update to 5.834
+ * Check for sane default_headers argument [RT#50393]
+ * Add $ua->local_address attribute [RT#40912]
+ * Test that generation of boundary works [RT#49396]
+ * Page does not display the "standard" apache listing any more
+ * Remove unneeded executable permissions.
+ * Switch compression/decompression to use the IO::Compress/IO::Uncompress and
+ * Compress::Raw::Zlib family of modules.
+ * lwp-request should use stderr for auth [RT#21620]
+ * Deal with cookies that expire far into the future [RT#50147]
+ * Deal with cookies that expire at or before epoch [RT#49467]
+ * Pass separate type for https to LWP::ConnCache [RT#48899]
+ * Improved handling of the User-Agent header [RT#48461]
+ * HTTP::Cookies add_cookie_header previous Cookies [RT#46106]
+ * Improve diagnostics from LWP::UserAgent::mirror [RT#48869]
+ * mirror should die in case X-Died is set [RT#48236]
+ * Increase default Net::HTTP max line length to 8k.
+ * Fix net test suite.
+ * Comment spelling fixes.
+ * Fix links to old Netscape cookie specification.
+ * Documentation spelling fixes.
+ * Improve max line length exceeded/read error messages.
+ * Do not warn about seemingly wellformed but unrecognized robots.txt lines.
+ * $mess->content_charset would fail for empty content
+ * Further restrict what variables env_proxy() process
+ * Fix bzip2 content encoding/decoding.
+ * send_te() doc grammar fix.
+ * Document time2str() behavior with an undefined argument.
+ * HTML::Message's content_charset trigger warnings from HTML::Parser [RT#48621]
+
+-------------------------------------------------------------------
calling whatdependson for head-i586
Old:
----
libwww-perl-5.830.tar.bz2
New:
----
libwww-perl-5.834.tar.bz2
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Other differences:
------------------
++++++ perl-libwww-perl.spec ++++++
--- /var/tmp/diff_new_pack.MbDZSA/_old 2010-01-14 17:00:16.000000000 +0100
+++ /var/tmp/diff_new_pack.MbDZSA/_new 2010-01-14 17:00:16.000000000 +0100
@@ -1,7 +1,7 @@
#
-# spec file for package perl-libwww-perl (Version 5.830)
+# spec file for package perl-libwww-perl (Version 5.834)
#
-# Copyright (c) 2009 SUSE LINUX Products GmbH, Nuernberg, Germany.
+# Copyright (c) 2010 SUSE LINUX Products GmbH, Nuernberg, Germany.
#
# All modifications and additions to the file contributed by third parties
# remain the property of their copyright owners, unless otherwise agreed
@@ -21,7 +21,7 @@
Name: perl-libwww-perl
%define cpan_name %( echo %{name} | %{__sed} -e 's,perl-,,' )
Summary: The World-Wide Web library for Perl
-Version: 5.830
+Version: 5.834
Release: 1
AutoReqProv: on
License: Artistic License ..
++++++ libwww-perl-5.830.tar.bz2 -> libwww-perl-5.834.tar.bz2 ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/libwww-perl-5.830/Changes new/libwww-perl-5.834/Changes
--- old/libwww-perl-5.830/Changes 2009-07-26 21:35:21.000000000 +0200
+++ new/libwww-perl-5.834/Changes 2009-11-21 14:03:02.000000000 +0100
@@ -1,4 +1,79 @@
_______________________________________________________________________________
+2009-11-21 Release 5.834
+
+Gisle Aas (4):
+ Check for sane default_headers argument [RT#50393]
+ Add $ua->local_address attribute [RT#40912]
+ Test that generation of boundary works [RT#49396]
+ Page does not display the "standard" apache listing any more
+
+Ville Skyttä (2):
+ Remove unneeded executable permissions.
+ Switch compression/decompression to use the IO::Compress/IO::Uncompress and
+ Compress::Raw::Zlib family of modules.
+
+Slaven Rezic (1):
+ lwp-request should use stderr for auth [RT#21620]
+
+
+
+_______________________________________________________________________________
+2009-10-06 Release 5.833
+
+
+Gisle Aas (5):
+ Deal with cookies that expire far into the future [RT#50147]
+ Deal with cookies that expire at or before epoch [RT#49467]
+ Pass separate type for https to LWP::ConnCache [RT#48899]
+ Improved handling of the User-Agent header [RT#48461]
+ HTTP::Cookies add_cookie_header previous Cookies [RT#46106]
+
+Andreas J. Koenig (1):
+ Improve diagnostics from LWP::UserAgent::mirror [RT#48869]
+
+Slaven Rezic (1):
+ mirror should die in case X-Died is set [RT#48236]
+
+Ville Skyttä (1):
+ Increase default Net::HTTP max line length to 8k.
+
+
+
+_______________________________________________________________________________
+2009-09-21 Release 5.832
+
+
+Ville Skyttä (6):
+ Fix net test suite.
+ Comment spelling fixes.
+ Fix links to old Netscape cookie specification.
+ Documentation spelling fixes.
+ Improve max line length exceeded/read error messages.
+ Do not warn about seemingly wellformed but unrecognized robots.txt lines.
+
+Gisle Aas (1):
+ $mess->content_charset would fail for empty content
+
+mschilli (1):
+ Further restrict what variables env_proxy() process
+
+
+
+_______________________________________________________________________________
+2009-08-13 Release 5.831
+
+
+Ville Skyttä (3):
+ Fix bzip2 content encoding/decoding.
+ send_te() doc grammar fix.
+ Document time2str() behavior with an undefined argument.
+
+Gisle Aas (1):
+ HTML::Message's content_charset trigger warnings from HTML::Parser [RT#48621]
+
+
+
+_______________________________________________________________________________
2009-07-26 Release 5.830
Gisle Aas (1):
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/libwww-perl-5.830/META.yml new/libwww-perl-5.834/META.yml
--- old/libwww-perl-5.830/META.yml 2009-07-26 21:36:39.000000000 +0200
+++ new/libwww-perl-5.834/META.yml 2009-11-21 14:04:36.000000000 +0100
@@ -1,6 +1,6 @@
--- #YAML:1.0
name: libwww-perl
-version: 5.830
+version: 5.834
abstract: The World-Wide Web library for Perl
author:
- Gisle Aas
@@ -8,17 +8,20 @@
distribution_type: module
configure_requires:
ExtUtils::MakeMaker: 0
-build_requires:
- ExtUtils::MakeMaker: 0
requires:
- Compress::Zlib: 1.10
- Digest::MD5: 0
- HTML::Parser: 3.33
- HTML::Tagset: 0
- MIME::Base64: 2.1
- Net::FTP: 2.58
- perl: 5.006
- URI: 1.10
+ Compress::Raw::Zlib: 0
+ Digest::MD5: 0
+ HTML::Parser: 3.33
+ HTML::Tagset: 0
+ IO::Compress::Deflate: 0
+ IO::Compress::Gzip: 0
+ IO::Uncompress::Gunzip: 0
+ IO::Uncompress::Inflate: 0
+ IO::Uncompress::RawInflate: 0
+ MIME::Base64: 2.1
+ Net::FTP: 2.58
+ perl: 5.006
+ URI: 1.10
resources:
MailingList: mailto:libwww@perl.org
repository: http://gitorious.org/projects/libwww-perl
@@ -26,7 +29,7 @@
directory:
- t
- inc
-generated_by: ExtUtils::MakeMaker version 6.55_02
+generated_by: ExtUtils::MakeMaker version 6.4801
meta-spec:
url: http://module-build.sourceforge.net/META-spec-v1.4.html
version: 1.4
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/libwww-perl-5.830/Makefile.PL new/libwww-perl-5.834/Makefile.PL
--- old/libwww-perl-5.830/Makefile.PL 2009-06-15 20:11:24.000000000 +0200
+++ new/libwww-perl-5.834/Makefile.PL 2009-11-15 08:37:14.000000000 +0100
@@ -46,7 +46,12 @@
'HTML::Tagset' => 0,
'HTML::Parser' => "3.33",
'Digest::MD5' => 0,
- 'Compress::Zlib' => "1.10",
+ 'Compress::Raw::Zlib' => 0,
+ 'IO::Compress::Gzip' => 0,
+ 'IO::Compress::Deflate' => 0,
+ 'IO::Uncompress::Gunzip' => 0,
+ 'IO::Uncompress::Inflate' => 0,
+ 'IO::Uncompress::RawInflate' => 0,
},
META_MERGE => {
recommends => {
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/libwww-perl-5.830/bin/lwp-request new/libwww-perl-5.834/bin/lwp-request
--- old/libwww-perl-5.830/bin/lwp-request 2009-06-15 20:22:58.000000000 +0200
+++ new/libwww-perl-5.834/bin/lwp-request 2009-11-21 13:57:41.000000000 +0100
@@ -180,7 +180,7 @@
$progname =~ s,.*[\\/],,; # use basename only
$progname =~ s/\.\w*$//; # strip extension, if any
-$VERSION = "5.827";
+$VERSION = "5.834";
require LWP;
@@ -231,15 +231,15 @@
}
elsif (-t) {
my $netloc = $uri->host_port;
- print "Enter username for $realm at $netloc: ";
+ print STDERR "Enter username for $realm at $netloc: ";
my $user = <STDIN>;
chomp($user);
return (undef, undef) unless length $user;
- print "Password: ";
+ print STDERR "Password: ";
system("stty -echo");
my $password = <STDIN>;
system("stty echo");
- print "\n"; # because we disabled echo
+ print STDERR "\n"; # because we disabled echo
chomp($password);
return ($user, $password);
}
@@ -362,7 +362,7 @@
die "$progname: Illegal Content-type format\n"
unless $options{'c'} =~ m,^[\w\-]+/[\w\-.+]+(?:\s*;.*)?$,
}
- print "Please enter content ($options{'c'}) to be ${method}ed:\n"
+ print STDERR "Please enter content ($options{'c'}) to be ${method}ed:\n"
if -t;
binmode STDIN unless -t or $options{'a'};
$content = join("", <STDIN>);
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/libwww-perl-5.830/lib/HTTP/Cookies/Netscape.pm new/libwww-perl-5.834/lib/HTTP/Cookies/Netscape.pm
--- old/libwww-perl-5.830/lib/HTTP/Cookies/Netscape.pm 2009-06-15 15:20:06.000000000 +0200
+++ new/libwww-perl-5.834/lib/HTTP/Cookies/Netscape.pm 2009-10-03 17:14:09.000000000 +0200
@@ -3,7 +3,7 @@
use strict;
use vars qw(@ISA $VERSION);
-$VERSION = "5.822";
+$VERSION = "5.832";
require HTTP::Cookies;
@ISA=qw(HTTP::Cookies);
@@ -43,6 +43,8 @@
local(*FILE, $_);
open(FILE, ">$file") || return;
+ # Use old, now broken link to the old cookie spec just in case something
+ # else (not us!) requires the comment block exactly this way.
print FILE <http://www.netscape.com/newsref/std/cookie_spec.html
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/libwww-perl-5.830/lib/HTTP/Cookies.pm new/libwww-perl-5.834/lib/HTTP/Cookies.pm
--- old/libwww-perl-5.830/lib/HTTP/Cookies.pm 2009-06-15 20:24:21.000000000 +0200
+++ new/libwww-perl-5.834/lib/HTTP/Cookies.pm 2009-10-06 23:14:56.000000000 +0200
@@ -1,11 +1,11 @@
package HTTP::Cookies;
use strict;
-use HTTP::Date qw(str2time time2str);
+use HTTP::Date qw(str2time parse_date time2str);
use HTTP::Headers::Util qw(_split_header_words join_header_words);
use vars qw($VERSION $EPOCH_OFFSET);
-$VERSION = "5.827";
+$VERSION = "5.833";
# Legacy: because "use "HTTP::Cookies" used be the ONLY way
# to load the class HTTP::Cookies::Netscape.
@@ -89,7 +89,7 @@
if ($port) {
my $found;
if ($port =~ s/^_//) {
- # The correponding Set-Cookie attribute was empty
+ # The corresponding Set-Cookie attribute was empty
$found++ if $port eq $req_port;
$port = "";
}
@@ -160,7 +160,12 @@
}
}
- $request->header(Cookie => join("; ", @cval)) if @cval;
+ if (@cval) {
+ if (my $old = $request->header("Cookie")) {
+ unshift(@cval, $old);
+ }
+ $request->header(Cookie => join("; ", @cval));
+ }
$request;
}
@@ -186,7 +191,7 @@
if (@ns_set) {
# The old Netscape cookie format for Set-Cookie
- # http://wp.netscape.com/newsref/std/cookie_spec.html
+ # http://curl.haxx.se/rfc/cookie_spec.html
# can for instance contain an unquoted "," in the expires
# field, so we have to use this ad-hoc parser.
my $now = time();
@@ -219,10 +224,27 @@
}
if (!$first_param && lc($k) eq "expires") {
my $etime = str2time($v);
- if ($etime) {
- push(@cur, "Max-Age" => str2time($v) - $now);
+ if (defined $etime) {
+ push(@cur, "Max-Age" => $etime - $now);
$expires++;
}
+ else {
+ # parse_date can deal with years outside the range of time_t,
+ my($year, $mon, $day, $hour, $min, $sec, $tz) = parse_date($v);
+ if ($year) {
+ my $thisyear = (gmtime)[5] + 1900;
+ if ($year < $thisyear) {
+ push(@cur, "Max-Age" => -1); # any negative value will do
+ $expires++;
+ }
+ elsif ($year >= $thisyear + 10) {
+ # the date is at least 10 years into the future, just replace
+ # it with something approximate
+ push(@cur, "Max-Age" => 10 * 365 * 24 * 60 * 60);
+ $expires++;
+ }
+ }
+ }
}
elsif (!$first_param && lc($k) =~ /^(?:version|discard|ns-cookie)/) {
# ignore
@@ -261,7 +283,7 @@
if ($k eq "discard" || $k eq "secure") {
$v = 1 unless defined $v;
}
- next if exists $hash{$k}; # only first value is signigicant
+ next if exists $hash{$k}; # only first value is significant
$hash{$k} = $v;
};
@@ -611,7 +633,7 @@
Cookies are a general mechanism which server side connections can use
to both store and retrieve information on the client side of the
connection. For more information about cookies refer to
-URL:http://wp.netscape.com/newsref/std/cookie_spec.html and
+URL:http://curl.haxx.se/rfc/cookie_spec.html and
URL:http://www.cookiecentral.com/. This module also implements the
new style cookies described in I.
The two variants of cookies are supposed to be able to coexist happily.
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/libwww-perl-5.830/lib/HTTP/Date.pm new/libwww-perl-5.834/lib/HTTP/Date.pm
--- old/libwww-perl-5.830/lib/HTTP/Date.pm 2008-04-11 20:59:19.000000000 +0200
+++ new/libwww-perl-5.834/lib/HTTP/Date.pm 2009-10-03 17:14:09.000000000 +0200
@@ -1,6 +1,6 @@
package HTTP::Date;
-$VERSION = "5.810";
+$VERSION = "5.831";
require 5.004;
require Exporter;
@@ -283,8 +283,8 @@
=item time2str( [$time] )
The time2str() function converts a machine time (seconds since epoch)
-to a string. If the function is called without an argument, it will
-use the current time.
+to a string. If the function is called without an argument or with an
+undefined argument, it will use the current time.
The string returned is in the format preferred for the HTTP protocol.
This is a fixed length subset of the format defined by RFC 1123,
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/libwww-perl-5.830/lib/HTTP/Message.pm new/libwww-perl-5.834/lib/HTTP/Message.pm
--- old/libwww-perl-5.830/lib/HTTP/Message.pm 2009-06-25 21:39:22.000000000 +0200
+++ new/libwww-perl-5.834/lib/HTTP/Message.pm 2009-11-21 13:57:55.000000000 +0100
@@ -2,7 +2,7 @@
use strict;
use vars qw($VERSION $AUTOLOAD);
-$VERSION = "5.828";
+$VERSION = "5.834";
require HTTP::Headers;
require Carp;
@@ -248,6 +248,7 @@
if (my $c = $attr->{content}) {
require HTTP::Headers::Util;
my @v = HTTP::Headers::Util::split_header_words($c);
+ return unless @v;
my($ct, undef, %ct_param) = @{$v[0]};
$charset = $ct_param{charset};
}
@@ -260,6 +261,7 @@
$self->eof;
}, "tagname, attr, self"],
report_tags => [qw(meta)],
+ utf8_mode => 1,
);
$p->parse($$cref);
return $charset if $charset;
@@ -299,62 +301,41 @@
next unless $ce;
next if $ce eq "identity";
if ($ce eq "gzip" || $ce eq "x-gzip") {
- require Compress::Zlib;
- unless ($content_ref_iscopy) {
- # memGunzip is documented to destroy its buffer argument
- my $copy = $$content_ref;
- $content_ref = \$copy;
- $content_ref_iscopy++;
- }
- $content_ref = \Compress::Zlib::memGunzip($$content_ref);
- die "Can't gunzip content" unless defined $$content_ref;
+ require IO::Uncompress::Gunzip;
+ my $output;
+ IO::Uncompress::Gunzip::gunzip($content_ref, \$output, Transparent => 0)
+ or die "Can't gunzip content: $IO::Uncompress::Gunzip::GunzipError";
+ $content_ref = \$output;
+ $content_ref_iscopy++;
}
elsif ($ce eq "x-bzip2") {
- require Compress::Bzip2;
- unless ($content_ref_iscopy) {
- # memBunzip is documented to destroy its buffer argument
- my $copy = $$content_ref;
- $content_ref = \$copy;
- $content_ref_iscopy++;
- }
- $content_ref = \Compress::Bzip2::memBunzip($$content_ref);
- die "Can't bunzip content" unless defined $$content_ref;
+ require IO::Uncompress::Bunzip2;
+ my $output;
+ IO::Uncompress::Bunzip2::bunzip2($content_ref, \$output, Transparent => 0)
+ or die "Can't bunzip content: $IO::Uncompress::Bunzip2::Bunzip2Error";
+ $content_ref = \$output;
+ $content_ref_iscopy++;
}
elsif ($ce eq "deflate") {
- require Compress::Zlib;
- my $out = Compress::Zlib::uncompress($$content_ref);
- unless (defined $out) {
- # "Content-Encoding: deflate" is supposed to mean the "zlib"
- # format of RFC 1950, but Microsoft got that wrong, so some
- # servers sends the raw compressed "deflate" data. This
- # tries to inflate this format.
- unless ($content_ref_iscopy) {
- # the $i->inflate method is documented to destroy its
- # buffer argument
- my $copy = $$content_ref;
- $content_ref = \$copy;
- $content_ref_iscopy++;
- }
-
- my($i, $status) = Compress::Zlib::inflateInit(
- WindowBits => -Compress::Zlib::MAX_WBITS(),
- );
- my $OK = Compress::Zlib::Z_OK();
- die "Can't init inflate object" unless $i && $status == $OK;
- ($out, $status) = $i->inflate($content_ref);
- if ($status != Compress::Zlib::Z_STREAM_END()) {
- if ($status == $OK) {
- $self->push_header("Client-Warning" =>
- "Content might be truncated; incomplete deflate stream");
- }
- else {
- # something went bad, can't trust $out any more
- $out = undef;
- }
+ require IO::Uncompress::Inflate;
+ my $output;
+ my $status = IO::Uncompress::Inflate::inflate($content_ref, \$output, Transparent => 0);
+ my $error = $IO::Uncompress::Inflate::InflateError;
+ unless ($status) {
+ # "Content-Encoding: deflate" is supposed to mean the
+ # "zlib" format of RFC 1950, but Microsoft got that
+ # wrong, so some servers sends the raw compressed
+ # "deflate" data. This tries to inflate this format.
+ $output = undef;
+ require IO::Uncompress::RawInflate;
+ unless (IO::Uncompress::RawInflate::rawinflate($content_ref, \$output)) {
+ $self->push_header("Client-Warning" =>
+ "Could not raw inflate content: $IO::Uncompress::RawInflate::RawInflateError");
+ $output = undef;
}
}
- die "Can't inflate content" unless defined $out;
- $content_ref = \$out;
+ die "Can't inflate content: $error" unless defined $output;
+ $content_ref = \$output;
$content_ref_iscopy++;
}
elsif ($ce eq "compress" || $ce eq "x-compress") {
@@ -417,11 +398,16 @@
# XXX preferably we should determine if the modules are available without loading
# them here
eval {
- require Compress::Zlib;
- push(@enc, "gzip", "x-gzip", "deflate");
+ require IO::Uncompress::Gunzip;
+ push(@enc, "gzip", "x-gzip");
+ };
+ eval {
+ require IO::Uncompress::Inflate;
+ require IO::Uncompress::RawInflate;
+ push(@enc, "deflate");
};
eval {
- require Compress::Bzip2;
+ require IO::Uncompress::Bunzip2;
push(@enc, "x-bzip2");
};
# we don't care about announcing the 'identity', 'base64' and
@@ -462,16 +448,25 @@
$content = MIME::Base64::encode($content);
}
elsif ($encoding eq "gzip" || $encoding eq "x-gzip") {
- require Compress::Zlib;
- $content = Compress::Zlib::memGzip($content);
+ require IO::Compress::Gzip;
+ my $output;
+ IO::Compress::Gzip::gzip(\$content, \$output, Minimal => 1)
+ or die "Can't gzip content: $IO::Compress::Gzip::GzipError";
+ $content = $output;
}
elsif ($encoding eq "deflate") {
- require Compress::Zlib;
- $content = Compress::Zlib::compress($content);
+ require IO::Compress::Deflate;
+ my $output;
+ IO::Compress::Deflate::deflate(\$content, \$output)
+ or die "Can't deflate content: $IO::Compress::Deflate::DeflateError";
+ $content = $output;
}
elsif ($encoding eq "x-bzip2") {
- require Compress::Bzip2;
- $content = Compress::Bzip2::memGzip($content);
+ require IO::Compress::Bzip2;
+ my $output;
+ IO::Compress::Bzip2::bzip2(\$content, \$output)
+ or die "Can't bzip2 content: $IO::Compress::Bzip2::Bzip2Error";
+ $content = $output;
}
elsif ($encoding eq "rot13") { # for the fun of it
$content =~ tr/A-Za-z/N-ZA-Mn-za-m/;
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/libwww-perl-5.830/lib/LWP/Protocol/ftp.pm new/libwww-perl-5.834/lib/LWP/Protocol/ftp.pm
--- old/libwww-perl-5.830/lib/LWP/Protocol/ftp.pm 2009-06-15 15:20:06.000000000 +0200
+++ new/libwww-perl-5.834/lib/LWP/Protocol/ftp.pm 2009-10-12 20:34:36.000000000 +0200
@@ -94,6 +94,7 @@
my $ftp = LWP::Protocol::MyFTP->new($host,
Port => $port,
Timeout => $timeout,
+ LocalAddr => $self->{ua}{local_address},
);
# XXX Should be some what to pass on 'Passive' (header??)
unless ($ftp) {
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/libwww-perl-5.830/lib/LWP/Protocol/gopher.pm new/libwww-perl-5.834/lib/LWP/Protocol/gopher.pm
--- old/libwww-perl-5.830/lib/LWP/Protocol/gopher.pm 2009-06-15 15:20:06.000000000 +0200
+++ new/libwww-perl-5.834/lib/LWP/Protocol/gopher.pm 2009-10-12 20:35:50.000000000 +0200
@@ -120,6 +120,7 @@
# Ok, lets make the request
my $socket = IO::Socket::INET->new(PeerAddr => $host,
PeerPort => $port,
+ LocalAddr => $self->{ua}{local_address},
Proto => 'tcp',
Timeout => $timeout);
die "Can't connect to $host:$port" unless $socket;
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/libwww-perl-5.830/lib/LWP/Protocol/http.pm new/libwww-perl-5.834/lib/LWP/Protocol/http.pm
--- old/libwww-perl-5.830/lib/LWP/Protocol/http.pm 2009-07-17 21:03:05.000000000 +0200
+++ new/libwww-perl-5.834/lib/LWP/Protocol/http.pm 2009-10-12 20:33:43.000000000 +0200
@@ -18,7 +18,7 @@
my($self, $host, $port, $timeout) = @_;
my $conn_cache = $self->{ua}{conn_cache};
if ($conn_cache) {
- if (my $sock = $conn_cache->withdraw("http", "$host:$port")) {
+ if (my $sock = $conn_cache->withdraw($self->socket_type, "$host:$port")) {
return $sock if $sock && !$sock->can_read(0);
# if the socket is readable, then either the peer has closed the
# connection or there are some garbage bytes on it. In either
@@ -30,6 +30,7 @@
local($^W) = 0; # IO::Socket::INET can be noisy
my $sock = $self->socket_class->new(PeerAddr => $host,
PeerPort => $port,
+ LocalAddr => $self->{ua}{local_address},
Proto => 'tcp',
Timeout => $timeout,
KeepAlive => !!$conn_cache,
@@ -49,6 +50,11 @@
$sock;
}
+sub socket_type
+{
+ return "http";
+}
+
sub socket_class
{
my $self = shift;
@@ -402,7 +408,7 @@
if (($peer_http_version eq "1.1" && !$connection{close}) ||
$connection{"keep-alive"})
{
- $conn_cache->deposit("http", "$host:$port", $socket);
+ $conn_cache->deposit($self->socket_type, "$host:$port", $socket);
}
}
}
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/libwww-perl-5.830/lib/LWP/Protocol/https.pm new/libwww-perl-5.834/lib/LWP/Protocol/https.pm
--- old/libwww-perl-5.830/lib/LWP/Protocol/https.pm 2008-04-11 20:59:19.000000000 +0200
+++ new/libwww-perl-5.834/lib/LWP/Protocol/https.pm 2009-10-03 20:16:58.000000000 +0200
@@ -6,6 +6,11 @@
require LWP::Protocol::http;
@ISA = qw(LWP::Protocol::http);
+sub socket_type
+{
+ return "https";
+}
+
sub _check_sock
{
my($self, $req, $sock) = @_;
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/libwww-perl-5.830/lib/LWP/UserAgent.pm new/libwww-perl-5.834/lib/LWP/UserAgent.pm
--- old/libwww-perl-5.830/lib/LWP/UserAgent.pm 2009-07-07 21:30:45.000000000 +0200
+++ new/libwww-perl-5.834/lib/LWP/UserAgent.pm 2009-11-21 13:58:07.000000000 +0100
@@ -5,7 +5,7 @@
require LWP::MemberMixin;
@ISA = qw(LWP::MemberMixin);
-$VERSION = "5.829";
+$VERSION = "5.834";
use HTTP::Request ();
use HTTP::Response ();
@@ -40,6 +40,7 @@
my $def_headers = delete $cnf{default_headers};
my $timeout = delete $cnf{timeout};
$timeout = 3*60 unless defined $timeout;
+ my $local_address = delete $cnf{local_address};
my $use_eval = delete $cnf{use_eval};
$use_eval = 1 unless defined $use_eval;
my $parse_head = delete $cnf{parse_head};
@@ -81,6 +82,7 @@
my $self = bless {
def_headers => $def_headers,
timeout => $timeout,
+ local_address => $local_address,
use_eval => $use_eval,
show_progress=> $show_progress,
max_size => $max_size,
@@ -92,7 +94,8 @@
requests_redirectable => $requests_redirectable,
}, $class;
- $self->agent($agent || $class->_agent);
+ $self->agent(defined($agent) ? $agent : $class->_agent)
+ if defined($agent) || !$def_headers || !$def_headers->header("User-Agent");
$self->from($from) if $from;
$self->cookie_jar($cookie_jar) if $cookie_jar;
$self->parse_head($parse_head);
@@ -574,6 +577,7 @@
sub timeout { shift->_elem('timeout', @_); }
+sub local_address{ shift->_elem('local_address',@_); }
sub max_size { shift->_elem('max_size', @_); }
sub max_redirect { shift->_elem('max_redirect', @_); }
sub show_progress{ shift->_elem('show_progress', @_); }
@@ -638,6 +642,8 @@
my $self = shift;
my $old = $self->{def_headers} ||= HTTP::Headers->new;
if (@_) {
+ Carp::croak("default_headers not set to HTTP::Headers compatible object")
+ unless @_ == 1 && $_[0]->can("header_field_names");
$self->{def_headers} = shift;
}
return $old;
@@ -834,12 +840,16 @@
my $tmpfile = "$file-$$";
my $response = $self->request($request, $tmpfile);
+ if ( $response->header('X-Died') ) {
+ die $response->header('X-Died');
+ }
# Only fetching a fresh copy of the would be considered success.
# If the file was not modified, "304" would returned, which
# is considered by HTTP::Status to be a "redirect", /not/ "success"
if ( $response->is_success ) {
- my $file_length = ( stat($tmpfile) )[7];
+ my @stat = stat($tmpfile) or die "Could not stat tmpfile '$tmpfile': $!";
+ my $file_length = $stat[7];
my ($content_length) = $response->header('Content-length');
if ( defined $content_length and $file_length < $content_length ) {
@@ -932,6 +942,8 @@
else {
# Ignore random _proxy variables, allow only valid schemes
next unless $k =~ /^$URI::scheme_re\z/;
+ # Ignore xxx_proxy variables if xxx isn't a supported protocol
+ next unless LWP::Protocol::implementor($k);
$self->proxy($k, $v);
}
}
@@ -1027,6 +1039,7 @@
conn_cache undef
cookie_jar undef
default_headers HTTP::Headers->new
+ local_address undef
max_size undef
max_redirect 7
parse_head 1
@@ -1055,7 +1068,7 @@
CLWP::UserAgent when it dispatches requests. Most of these can also
be initialized by options passed to the constructor method.
-The following attributes methods are provided. The attribute value is
+The following attribute methods are provided. The attribute value is
left unchanged if no argument is given. The return value from each
method is the old attribute value.
@@ -1164,6 +1177,14 @@
$ua->credentials("www.example.com:80", "Some Realm", "foo", "secret");
+=item $ua->local_address
+
+=item $ua->local_address( $address )
+
+Get/set the local interface to bind to for network connections. The interface
+can be specified as a hostname or an IP address. This value is passed as the
+C<LocalAddr> argument to LIO::Socket::INET.
+
=item $ua->max_size
=item $ua->max_size( $bytes )
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/libwww-perl-5.830/lib/LWP.pm new/libwww-perl-5.834/lib/LWP.pm
--- old/libwww-perl-5.830/lib/LWP.pm 2009-07-26 21:34:02.000000000 +0200
+++ new/libwww-perl-5.834/lib/LWP.pm 2009-11-21 13:56:32.000000000 +0100
@@ -1,6 +1,6 @@
package LWP;
-$VERSION = "5.830";
+$VERSION = "5.834";
sub Version { $VERSION; }
require 5.005;
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/libwww-perl-5.830/lib/Net/HTTP/Methods.pm new/libwww-perl-5.834/lib/Net/HTTP/Methods.pm
--- old/libwww-perl-5.830/lib/Net/HTTP/Methods.pm 2009-06-15 15:20:06.000000000 +0200
+++ new/libwww-perl-5.834/lib/Net/HTTP/Methods.pm 2009-11-21 13:58:34.000000000 +0100
@@ -5,7 +5,7 @@
use strict;
use vars qw($VERSION);
-$VERSION = "5.824";
+$VERSION = "5.834";
my $CRLF = "\015\012"; # "\r\n" is not portable
@@ -69,7 +69,7 @@
$peer_http_version = "1.0" unless defined $peer_http_version;
my $send_te = delete $cnf->{SendTE};
my $max_line_length = delete $cnf->{MaxLineLength};
- $max_line_length = 4*1024 unless defined $max_line_length;
+ $max_line_length = 8*1024 unless defined $max_line_length;
my $max_header_lines = delete $cnf->{MaxHeaderLines};
$max_header_lines = 128 unless defined $max_header_lines;
@@ -162,8 +162,8 @@
if ($given{te}) {
push(@connection, "TE") unless grep lc($_) eq "te", @connection;
}
- elsif ($self->send_te && zlib_ok()) {
- # gzip is less wanted since the Compress::Zlib interface for
+ elsif ($self->send_te && gunzip_ok()) {
+ # gzip is less wanted since the IO::Uncompress::Gunzip interface for
# it does not really allow chunked decoding to take place easily.
push(@h2, "TE: deflate,gzip;q=0.3");
push(@connection, "TE");
@@ -241,6 +241,7 @@
sub my_readline {
my $self = shift;
+ my $what = shift;
for (${*$self}{'http_buf'}) {
my $max_line_length = ${*$self}{'http_max_line_length'};
my $pos;
@@ -248,7 +249,7 @@
# find line ending
$pos = index($_, "\012");
last if $pos >= 0;
- die "Line too long (limit is $max_line_length)"
+ die "$what line too long (limit is $max_line_length)"
if $max_line_length && length($_) > $max_line_length;
# need to read more data to find a line ending
@@ -265,7 +266,7 @@
}
# if we have already accumulated some data let's at least
# return that as a line
- die "read failed: $!" unless length;
+ die "$what read failed: $!" unless length;
}
unless ($n) {
return undef unless length;
@@ -273,7 +274,7 @@
}
}
}
- die "Line too long ($pos; limit is $max_line_length)"
+ die "$what line too long ($pos; limit is $max_line_length)"
if $max_line_length && $pos > $max_line_length;
my $line = substr($_, 0, $pos+1, "");
@@ -311,7 +312,7 @@
my @headers;
my $line_count = 0;
my $max_header_lines = ${*$self}{'http_max_header_lines'};
- while (my $line = my_readline($self)) {
+ while (my $line = my_readline($self, 'Header')) {
if ($line =~ /^(\S+?)\s*:\s*(.*)/s) {
push(@headers, $1, $2);
}
@@ -339,7 +340,7 @@
my($self, %opt) = @_;
my $laxed = $opt{laxed};
- my($status, $eol) = my_readline($self);
+ my($status, $eol) = my_readline($self, 'Status');
unless (defined $status) {
die "Server closed connection without sending any data back";
}
@@ -416,19 +417,23 @@
unless pop(@te) eq "chunked";
for (@te) {
- if ($_ eq "deflate" && zlib_ok()) {
- #require Compress::Zlib;
- my $i = Compress::Zlib::inflateInit();
- die "Can't make inflator" unless $i;
- $_ = sub { scalar($i->inflate($_[0])) }
+ if ($_ eq "deflate" && inflate_ok()) {
+ #require Compress::Raw::Zlib;
+ my ($i, $status) = Compress::Raw::Zlib::Inflate->new();
+ die "Can't make inflator: $status" unless $i;
+ $_ = sub { my $out; $i->inflate($_[0], \$out); $out }
}
- elsif ($_ eq "gzip" && zlib_ok()) {
- #require Compress::Zlib;
+ elsif ($_ eq "gzip" && gunzip_ok()) {
+ #require IO::Uncompress::Gunzip;
my @buf;
$_ = sub {
push(@buf, $_[0]);
- return Compress::Zlib::memGunzip(join("", @buf)) if $_[1];
- return "";
+ return "" unless $_[1];
+ my $input = join("", @buf);
+ my $output;
+ IO::Uncompress::Gunzip::gunzip(\$input, \$output, Transparent => 0)
+ or die "Can't gunzip content: $IO::Uncompress::Gunzip::GunzipError";
+ return \$output;
};
}
elsif ($_ eq "identity") {
@@ -471,11 +476,11 @@
# $chunked > 0: bytes left in current chunk to read
if ($chunked <= 0) {
- my $line = my_readline($self);
+ my $line = my_readline($self, 'Entity body');
if ($chunked == 0) {
die "Missing newline after chunk data: '$line'"
if !defined($line) || $line ne "";
- $line = my_readline($self);
+ $line = my_readline($self, 'Entity body');
}
die "EOF when chunk header expected" unless defined($line);
my $chunk_len = $line;
@@ -548,23 +553,39 @@
}
BEGIN {
-my $zlib_ok;
+my $gunzip_ok;
+my $inflate_ok;
-sub zlib_ok {
- return $zlib_ok if defined $zlib_ok;
+sub gunzip_ok {
+ return $gunzip_ok if defined $gunzip_ok;
- # Try to load Compress::Zlib.
+ # Try to load IO::Uncompress::Gunzip.
local $@;
local $SIG{__DIE__};
- $zlib_ok = 0;
+ $gunzip_ok = 0;
eval {
- require Compress::Zlib;
- Compress::Zlib->VERSION(1.10);
- $zlib_ok++;
+ require IO::Uncompress::Gunzip;
+ $gunzip_ok++;
};
- return $zlib_ok;
+ return $gunzip_ok;
+}
+
+sub inflate_ok {
+ return $inflate_ok if defined $inflate_ok;
+
+ # Try to load Compress::Raw::Zlib.
+ local $@;
+ local $SIG{__DIE__};
+ $inflate_ok = 0;
+
+ eval {
+ require Compress::Raw::Zlib;
+ $inflate_ok++;
+ };
+
+ return $inflate_ok;
}
} # BEGIN
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/libwww-perl-5.830/lib/Net/HTTP.pm new/libwww-perl-5.834/lib/Net/HTTP.pm
--- old/libwww-perl-5.830/lib/Net/HTTP.pm 2009-06-13 14:45:37.000000000 +0200
+++ new/libwww-perl-5.834/lib/Net/HTTP.pm 2009-11-21 13:58:54.000000000 +0100
@@ -3,7 +3,7 @@
use strict;
use vars qw($VERSION @ISA $SOCKET_CLASS);
-$VERSION = "5.819";
+$VERSION = "5.834";
unless ($SOCKET_CLASS) {
eval { require IO::Socket::INET } || require IO::Socket;
$SOCKET_CLASS = "IO::Socket::INET";
@@ -57,7 +57,7 @@
The CNet::HTTP class is a low-level HTTP client. An instance of the
CNet::HTTP class represents a connection to an HTTP server. The
HTTP protocol is described in RFC 2616. The CNet::HTTP class
-support C and C.
+supports C and C.
CNet::HTTP is a sub-class of CIO::Socket::INET. You can mix the
methods described below with reading and writing from the socket
@@ -110,9 +110,9 @@
Get/set the a value indicating if the request will be sent with a "TE"
header to indicate the transfer encodings that the server can choose to
-use. If the CCompress::Zlib module is installed then this will
-announce that this client accept both the I<deflate> and I<gzip>
-encodings.
+use. The list of encodings announced as accepted by this client depends
+on availability of the following modules: CCompress::Raw::Zlib for
+I<deflate>, and CIO::Compress::Gunzip for I<gzip>.
=item $s->http_version
@@ -128,11 +128,11 @@
=item $s->max_line_length
Get/set a limit on the length of response line and response header
-lines. The default is 4096. A value of 0 means no limit.
+lines. The default is 8192. A value of 0 means no limit.
=item $s->max_header_length
-Get/set a limit on the number of headers lines that a response can
+Get/set a limit on the number of header lines that a response can
have. The default is 128. A value of 0 means no limit.
=item $s->format_request($method, $uri, %headers, [$content])
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/libwww-perl-5.830/lib/WWW/RobotRules.pm new/libwww-perl-5.834/lib/WWW/RobotRules.pm
--- old/libwww-perl-5.830/lib/WWW/RobotRules.pm 2009-06-15 15:20:06.000000000 +0200
+++ new/libwww-perl-5.834/lib/WWW/RobotRules.pm 2009-10-03 17:14:09.000000000 +0200
@@ -1,6 +1,6 @@
package WWW::RobotRules;
-$VERSION = "5.824";
+$VERSION = "5.832";
sub Version { $VERSION; }
use strict;
@@ -105,11 +105,11 @@
push(@anon_disallowed, $disallow);
}
}
- elsif (/^\s*Sitemap\s*:/i) {
+ elsif (/\S\s*:/) {
# ignore
}
else {
- warn "RobotRules <$robot_txt_uri>: Unexpected line: $_\n" if $^W;
+ warn "RobotRules <$robot_txt_uri>: Malformed record: <$_>\n" if $^W;
}
}
@@ -132,7 +132,7 @@
# See whether my short-name is a substring of the
# "User-Agent: ..." line that we were passed:
-
+
if(index(lc($me), lc($ua_line)) >= 0) {
return 1;
}
@@ -145,10 +145,10 @@
sub allowed {
my($self, $uri) = @_;
$uri = URI->new("$uri");
-
+
return 1 unless $uri->scheme eq 'http' or $uri->scheme eq 'https';
# Robots.txt applies to only those schemes.
-
+
my $netloc = $uri->host . ":" . $uri->port;
my $fresh_until = $self->fresh_until($netloc);
@@ -358,7 +358,7 @@
The format and semantics of the "/robots.txt" file are as follows
(this is an edited abstract of
-http://www.robotstxt.org/wc/norobots.html ):
+http://www.robotstxt.org/wc/norobots.html):
The file consists of one or more records separated by one or more
blank lines. Each record contains lines of the form
@@ -396,6 +396,8 @@
=back
+Unrecognized records are ignored.
+
=head1 ROBOTS.TXT EXAMPLES
The following example "/robots.txt" file specifies that no robots
@@ -433,7 +435,7 @@
Disallow: /west-wing/ # except the west wing!
# It's good to be the Prince...
User-agent: Beast
- Disallow:
+ Disallow:
This file is missing the required blank lines between records.
However, the intention is clear.
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/libwww-perl-5.830/t/base/cookies.t new/libwww-perl-5.834/t/base/cookies.t
--- old/libwww-perl-5.830/t/base/cookies.t 2009-06-15 15:20:06.000000000 +0200
+++ new/libwww-perl-5.834/t/base/cookies.t 2009-10-06 23:03:01.000000000 +0200
@@ -1,7 +1,7 @@
#!perl -w
use Test;
-plan tests => 62;
+plan tests => 66;
use HTTP::Cookies;
use HTTP::Request;
@@ -9,7 +9,7 @@
#-------------------------------------------------------------------
# First we check that it works for the original example at
-# http://www.netscape.com/newsref/std/cookie_spec.html
+# http://curl.haxx.se/rfc/cookie_spec.html
# Client requests a document, and receives in the response:
#
@@ -644,6 +644,43 @@
#print $req->as_string;
ok($req->header("Cookie"), "foo=\"bar\"");
+# Test cookies that expire far into the future [RT#50147]
+$c = HTTP::Cookies->new;
+$res->header("Set-Cookie", "PREF=ID=cee18f7c4e977184:TM=1254583090:LM=1254583090:S=Pdb0-hy9PxrNj4LL; expires=Mon, 03-Oct-2211 15:18:10 GMT; path=/; domain=.example.com");
+$res->push_header("Set-Cookie", "expired1=1; expires=Mon, 03-Oct-2001 15:18:10 GMT; path=/; domain=.example.com");
+$res->push_header("Set-Cookie", "expired2=1; expires=Fri Jan 1 00:00:00 GMT 1970; path=/; domain=.example.com");
+$res->push_header("Set-Cookie", "expired3=1; expires=Fri Jan 1 00:00:01 GMT 1970; path=/; domain=.example.com");
+$res->push_header("Set-Cookie", "expired4=1; expires=Thu Dec 31 23:59:59 GMT 1969; path=/; domain=.example.com");
+$res->push_header("Set-Cookie", "expired5=1; expires=Fri Feb 2 00:00:00 GMT 1950; path=/; domain=.example.com");
+$c->extract_cookies($res);
+#print $res->as_string;
+#print "---\n";
+#print $c->as_string;
+$req = HTTP::Request->new(GET => "http://www.example.com/foo");
+$c->add_cookie_header($req);
+#print $req->as_string;
+ok($req->header("Cookie"), "PREF=ID=cee18f7c4e977184:TM=1254583090:LM=1254583090:S=Pdb0-hy9PxrNj4LL");
+
+$c->clear_temporary_cookies;
+$req = HTTP::Request->new(GET => "http://www.example.com/foo");
+$c->add_cookie_header($req);
+#print $req->as_string;
+ok($req->header("Cookie"), "PREF=ID=cee18f7c4e977184:TM=1254583090:LM=1254583090:S=Pdb0-hy9PxrNj4LL");
+
+# Test merging of cookies
+$c = HTTP::Cookies->new;
+$res->header("Set-Cookie", "foo=1; path=/");
+$c->extract_cookies($res);
+
+$req = HTTP::Request->new(GET => "http://www.example.com/foo");
+$req->header("Cookie", "x=bcd");
+$c->add_cookie_header($req);
+ok($req->header("Cookie"), "x=bcd; foo=1");
+$c->add_cookie_header($req);
+ok($req->header("Cookie"), "x=bcd; foo=1; foo=1");
+#print $req->as_string;
+
+
#-------------------------------------------------------------------
sub interact
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/libwww-perl-5.830/t/base/message.t new/libwww-perl-5.834/t/base/message.t
--- old/libwww-perl-5.830/t/base/message.t 2009-07-26 21:33:32.000000000 +0200
+++ new/libwww-perl-5.834/t/base/message.t 2009-11-15 08:37:14.000000000 +0100
@@ -3,7 +3,7 @@
use strict;
use Test qw(plan ok skip);
-plan tests => 121;
+plan tests => 124;
require HTTP::Message;
use Config qw(%Config);
@@ -469,32 +469,35 @@
ok($m->decoded_content, "Hello World!");
ok(!$m->header("Client-Warning"));
-if (eval "require Compress::Bzip2") {
- $m = HTTP::Message->new([
- "Content-Type" => "text/plain",
- ],
- "Hello world!"
- );
- ok($m->encode("x-bzip2"));
- ok($m->header("Content-Encoding"), "x-bzip2");
- ok($m->content =~ /\0/);
- ok($m->decoded_content, "Hello world!");
- ok($m->decode);
- ok($m->content, "Hello world!");
- if (0) {
- # I prepared the following message by using bzip2 command (v1.0.4)
- # but for some reason it will not pass
+if (eval "require IO::Uncompress::Bunzip2") {
$m = HTTP::Message->new([
"Content-Type" => "text/plain",
"Content-Encoding" => "x-bzip2, base64",
],
"QlpoOTFBWSZTWcvLx0QAAAHVgAAQYAAAQAYEkIAgADEAMCBoYlnQeSEMvxdyRThQkMvLx0Q=\n"
);
- $m->decode;
- $m->dump;
+ ok($m->decoded_content, "Hello world!\n");
+ ok($m->decode);
+ ok($m->content, "Hello world!\n");
+
+ if (eval "require IO::Compress::Bzip2") {
+ $m = HTTP::Message->new([
+ "Content-Type" => "text/plain",
+ ],
+ "Hello world!"
+ );
+ ok($m->encode("x-bzip2"));
+ ok($m->header("Content-Encoding"), "x-bzip2");
+ ok($m->content =~ /^BZh.*\0/);
+ ok($m->decoded_content, "Hello world!");
+ ok($m->decode);
+ ok($m->content, "Hello world!");
+ }
+ else {
+ skip("Need IO::Compress::Bzip2", undef) for 1..6;
}
}
else {
- skip("Need Compress::Bzip2", undef) for 1..6;
+ skip("Need IO::Uncompress::Bunzip2", undef) for 1..9;
}
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/libwww-perl-5.830/t/base/request.t new/libwww-perl-5.834/t/base/request.t
--- old/libwww-perl-5.830/t/base/request.t 2009-06-13 14:45:37.000000000 +0200
+++ new/libwww-perl-5.834/t/base/request.t 2009-11-15 08:37:14.000000000 +0100
@@ -13,7 +13,7 @@
ok($req->method, "GET");
ok($req->uri, "http://www.example.com");
-ok($req->header("Accept-Encoding") =~ /\bgzip\b/); # assuming Compress::Zlib is there
+ok($req->header("Accept-Encoding") =~ /\bgzip\b/); # assuming IO::Uncompress::Gunzip is there
$req->dump(prefix => "# ");
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/libwww-perl-5.830/t/live/apache-listing.t new/libwww-perl-5.834/t/live/apache-listing.t
--- old/libwww-perl-5.830/t/live/apache-listing.t 2009-06-25 20:58:35.000000000 +0200
+++ new/libwww-perl-5.834/t/live/apache-listing.t 2009-11-21 13:55:20.000000000 +0100
@@ -10,7 +10,6 @@
my @urls = (
"http://www.apache.org/dist/apr/?C=N&O=D",
"http://perl.apache.org/rpm/distrib/",
- "http://stein.cshl.org/WWW/software/",
"http://www.cpan.org/modules/by-module/",
);
plan tests => scalar(@urls);
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/libwww-perl-5.830/t/local/http.t new/libwww-perl-5.834/t/local/http.t
--- old/libwww-perl-5.830/t/local/http.t 2009-06-15 15:20:06.000000000 +0200
+++ new/libwww-perl-5.834/t/local/http.t 2009-10-13 19:56:58.000000000 +0200
@@ -48,7 +48,7 @@
}
use Test;
-plan tests => 52;
+plan tests => 54;
my $greeting = <DAEMON>;
$greeting =~ /(<[^>]+>)/;
@@ -327,6 +327,15 @@
ok($_, qr/^Content-Type:\s*application\/x-www-form-urlencoded$/mi);
ok($_, qr/^foo=bar&bar=test$/m);
+$req = HTTP::Request->new(POST => url("/echo/foo", $base));
+$req->content_type("multipart/form-data");
+$req->add_part(HTTP::Message->new(["Content-Type" => "text/plain"], "Hi\n"));
+$req->add_part(HTTP::Message->new(["Content-Type" => "text/plain"], "there\n"));
+$res = $ua->request($req);
+#print $res->as_string;
+ok($res->is_success);
+ok($res->content =~ /^Content-Type: multipart\/form-data; boundary=/m);
+
#----------------------------------------------------------------
print "Check partial content response...\n";
sub httpd_get_partial
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/libwww-perl-5.830/t/net/config.pl.dist new/libwww-perl-5.834/t/net/config.pl.dist
--- old/libwww-perl-5.830/t/net/config.pl.dist 2008-04-04 10:43:22.000000000 +0200
+++ new/libwww-perl-5.834/t/net/config.pl.dist 2009-10-03 17:14:09.000000000 +0200
@@ -1,6 +1,6 @@
package net;
-# Configure these for you local system
+# Configure these for your local system
$httpserver = "localhost:80";
$cgidir = "/cgi-bin/lwp";
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/libwww-perl-5.830/t/net/http-get.t new/libwww-perl-5.834/t/net/http-get.t
--- old/libwww-perl-5.830/t/net/http-get.t 2008-04-04 10:43:22.000000000 +0200
+++ new/libwww-perl-5.834/t/net/http-get.t 2009-10-03 17:14:09.000000000 +0200
@@ -6,7 +6,7 @@
print "1..2\n";
require "net/config.pl";
-require LWP::Protocol::http;
+require HTTP::Request;
require LWP::UserAgent;
my $ua = new LWP::UserAgent; # create a useragent to test
@@ -14,7 +14,7 @@
$netloc = $net::httpserver;
$script = $net::cgidir . "/test";
-$url = new URI::URL("http://$netloc$script?query");
+$url = "http://$netloc$script?query";
my $request = new HTTP::Request('GET', $url);
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/libwww-perl-5.830/t/net/http-post.t new/libwww-perl-5.834/t/net/http-post.t
--- old/libwww-perl-5.830/t/net/http-post.t 2008-04-04 10:43:22.000000000 +0200
+++ new/libwww-perl-5.834/t/net/http-post.t 2009-10-03 17:14:09.000000000 +0200
@@ -6,7 +6,7 @@
print "1..2\n";
require "net/config.pl";
-require LWP::Protocol::http;
+require HTTP::Request;
require LWP::UserAgent;
$netloc = $net::httpserver;
@@ -14,7 +14,7 @@
my $ua = new LWP::UserAgent; # create a useragent to test
-$url = new URI::URL("http://$netloc$script");
+$url = "http://$netloc$script";
my $form = 'searchtype=Substring';
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/libwww-perl-5.830/t/net/http-timeout.t new/libwww-perl-5.834/t/net/http-timeout.t
--- old/libwww-perl-5.830/t/net/http-timeout.t 2008-04-04 10:43:22.000000000 +0200
+++ new/libwww-perl-5.834/t/net/http-timeout.t 2009-10-03 17:14:09.000000000 +0200
@@ -5,8 +5,7 @@
print "1..1\n";
require "net/config.pl";
-require HTTP::Status;
-require LWP::Protocol::http;
+require HTTP::Request;
require LWP::UserAgent;
my $ua = new LWP::UserAgent; # create a useragent to test
@@ -16,7 +15,7 @@
$netloc = $net::httpserver;
$script = $net::cgidir . "/timeout";
-$url = new URI::URL("http://$netloc$script");
+$url = "http://$netloc$script";
my $request = new HTTP::Request('GET', $url);
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/libwww-perl-5.830/t/net/mirror.t new/libwww-perl-5.834/t/net/mirror.t
--- old/libwww-perl-5.830/t/net/mirror.t 2008-04-04 10:43:22.000000000 +0200
+++ new/libwww-perl-5.834/t/net/mirror.t 2009-10-03 17:14:09.000000000 +0200
@@ -3,7 +3,6 @@
#
require "net/config.pl";
-require LWP::Protocol::http;
require LWP::UserAgent;
require HTTP::Status;
@@ -29,7 +28,7 @@
print "ok 2\n";
}
else {
- print "nok ok 2\n";
+ print "not ok 2\n";
}
unlink($copy);
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/libwww-perl-5.830/t/net/proxy.t new/libwww-perl-5.834/t/net/proxy.t
--- old/libwww-perl-5.830/t/net/proxy.t 2009-06-15 15:20:06.000000000 +0200
+++ new/libwww-perl-5.834/t/net/proxy.t 2009-10-03 17:14:09.000000000 +0200
@@ -12,13 +12,14 @@
exit 0;
}
+require HTTP::Request;
require LWP::UserAgent;
my $ua = new LWP::UserAgent; # create a useragent to test
$ua->proxy('ftp', $net::ftp_proxy);
-my $url = new URI::URL('ftp://ftp.uninett.no/');
+my $url = 'ftp://ftp.uninett.no/';
my $request = new HTTP::Request('GET', $url);
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Remember to have fun...
--
To unsubscribe, e-mail: opensuse-commit+unsubscribe@opensuse.org
For additional commands, e-mail: opensuse-commit+help@opensuse.org