diff -Nru apt-cacher-1.7.10/apt-cacher apt-cacher-1.7.11/apt-cacher --- apt-cacher-1.7.10/apt-cacher 2014-08-29 13:20:45.000000000 +0000 +++ apt-cacher-1.7.11/apt-cacher 2015-06-09 08:26:02.000000000 +0000 @@ -21,7 +21,7 @@ =head1 COPYRIGHT Copyright (C) 2005 Eduard Bloch - Copyright (C) 2007-2014 Mark Hindley + Copyright (C) 2007-2015 Mark Hindley Distributed under the terms of the GNU Public Licence (GPL). =cut @@ -455,6 +455,13 @@ if(defined(my $num=sysread($_, my $buf,65536))) { local $SIG{PIPE} = sub {$cfg->{debug} && debug_message('Got SIGPIPE whilst proxying')}; # Catch disconnects/write failure my $writeto = (fileno($_)==fileno($ssl)?$con:$ssl); + unless ($num) { # EOF + my $h=$_->peerhost; + $cfg->{debug} && debug_message("Got EOF from $h"); + $writeto->shutdown(1); + $s->remove($_); + last; + } last LOOP if !defined(syswrite($writeto,$buf,$num)); $count += $num; } @@ -1704,7 +1711,7 @@ $cfg->{debug} && debug_message("Got another status line. Redirected?: $_") if $response; $response=HTTP::Response->parse($_); if ($response->code) { - $cfg->{debug} && debug_message('Parsed header: ' . $response->code); + $cfg->{debug} && debug_message('Parsed headers with status: ' . $response->code); chomp_message($response); # Handle chunked if ($response->header('Transfer-Encoding') && lc $response->header('Transfer-Encoding') eq 'chunked') { diff -Nru apt-cacher-1.7.10/apt-cacher-cleanup.pl apt-cacher-1.7.11/apt-cacher-cleanup.pl --- apt-cacher-1.7.10/apt-cacher-cleanup.pl 2014-08-29 13:20:45.000000000 +0000 +++ apt-cacher-1.7.11/apt-cacher-cleanup.pl 2015-06-09 08:26:14.000000000 +0000 @@ -85,6 +85,8 @@ print "Simulation mode. Just printing what would be done.\n"; } +local $SIG{CHLD} = 'IGNORE'; # Auto reap children + ############################################################################# ### configuration ########################################################### # Include the library for the config file parser @@ -105,6 +107,8 @@ exit 0; } +check_install(); # Before we give up rights + # change uid and gid if root and another user/group configured if (($cfg->{user} && $cfg->{user} !~ 'root' && !$> ) || ($cfg->{group} && $cfg->{group} !~ 'root' && !$) =~ /^0/)){ diff -Nru apt-cacher-1.7.10/apt-cacher-import.pl apt-cacher-1.7.11/apt-cacher-import.pl --- apt-cacher-1.7.10/apt-cacher-import.pl 2014-08-29 13:20:35.000000000 +0000 +++ apt-cacher-1.7.11/apt-cacher-import.pl 2015-06-09 08:26:02.000000000 +0000 @@ -334,7 +334,7 @@ write_header("$header_dir/$targetfile", HTTP::Response->new(200, 'OK', ['Date' => $headerdate, 'Last-Modified' => $headerdate, - 'Content-Length' => -s $packagefile])); + 'Content-Length' => -s "$package_dir/$targetfile"])); } # copy the ownership of the private directory diff -Nru apt-cacher-1.7.10/debian/apt-cacher.init apt-cacher-1.7.11/debian/apt-cacher.init --- apt-cacher-1.7.10/debian/apt-cacher.init 2014-06-18 11:35:45.000000000 +0000 +++ apt-cacher-1.7.11/debian/apt-cacher.init 2015-06-09 08:25:31.000000000 +0000 @@ -15,7 +15,8 @@ DESC="Apt-Cacher" NAME=apt-cacher DAEMON=/usr/sbin/$NAME -PIDFILE=/var/run/$NAME/$NAME.pid +RUNDIR=/var/run/$NAME +PIDFILE=$RUNDIR/$NAME.pid SCRIPTNAME=/etc/init.d/$NAME # Gracefully exit if the package has been removed. @@ -40,6 +41,17 @@ echo "$NAME." else echo "Not started (AUTOSTART not enabled in /etc/default/$NAME)"; + + # apt-cacher needs $RUNDIR, but is not able to create it in inetd or CGI mode + if test ! -d "$RUNDIR"; then + mkdir -m 755 "$RUNDIR" + CONFIG_FILES="/etc/$NAME/$NAME.conf $(run-parts --list /etc/$NAME/conf.d)" + RUN_AS_USER=$(sed -n 's/^\s*user\s*=//p' $CONFIG_FILES | tail -1 | tr -d '[:blank:]') + RUN_AS_GROUP=$(sed -n 's/^\s*group\s*=//p' $CONFIG_FILES | tail -1 | tr -d '[:blank:]') + [ "$RUN_AS_USER" ] && chown $RUN_AS_USER "$RUNDIR" + [ "$RUN_AS_GROUP" ] && chgrp $RUN_AS_GROUP "$RUNDIR" + fi + fi } diff -Nru apt-cacher-1.7.10/debian/changelog apt-cacher-1.7.11/debian/changelog --- apt-cacher-1.7.10/debian/changelog 2014-08-29 13:26:51.000000000 +0000 +++ apt-cacher-1.7.11/debian/changelog 2015-06-09 08:28:52.000000000 +0000 @@ -1,3 +1,18 @@ +apt-cacher (1.7.11) unstable; urgency=low + + * Add Ubuntu codenames 15.04 (vivid) and 15.10 (wily). + * Fix apt-cacher-import.pl in copy mode so that a valid Content-Length + header is generated. Patch from Pip Cet (closes: #782126). + * Correctly detect and handle EOF in ssl_proxy() (closes: #785681). + * Upgrade Standards Version to 3.9.6. No changes. + * Create /var/run/apt-cacher in init script for CGI/inetd mode (closes: + 786661). + * Verify existence (or create) /var/run/apt-cacher in + apt-cacher-cleanup.pl (closes: #760141). + * Automatically reap forked processes in apt-cacher-cleanup.pl. + + -- Mark Hindley Tue, 09 Jun 2015 09:28:35 +0100 + apt-cacher (1.7.10) unstable; urgency=low * Internally store http_proxy as URI object which can include diff -Nru apt-cacher-1.7.10/debian/control apt-cacher-1.7.11/debian/control --- apt-cacher-1.7.10/debian/control 2014-06-18 08:35:12.000000000 +0000 +++ apt-cacher-1.7.11/debian/control 2015-06-09 08:25:31.000000000 +0000 @@ -4,7 +4,7 @@ Maintainer: Mark Hindley Uploaders: Eduard Bloch Build-Depends: debhelper (>= 8.1.0~), po-debconf -Standards-Version: 3.9.5 +Standards-Version: 3.9.6 Package: apt-cacher Architecture: all diff -Nru apt-cacher-1.7.10/lib/apt-cacher.pl apt-cacher-1.7.11/lib/apt-cacher.pl --- apt-cacher-1.7.10/lib/apt-cacher.pl 2014-08-29 13:20:35.000000000 +0000 +++ apt-cacher-1.7.11/lib/apt-cacher.pl 2015-06-09 08:26:02.000000000 +0000 @@ -109,6 +109,8 @@ saucy trusty utopic + vivid + wily )), user => $>, diff -Nru apt-cacher-1.7.10/scripts/generate-conffile.pl apt-cacher-1.7.11/scripts/generate-conffile.pl --- apt-cacher-1.7.10/scripts/generate-conffile.pl 1970-01-01 00:00:00.000000000 +0000 +++ apt-cacher-1.7.11/scripts/generate-conffile.pl 2014-08-26 10:08:27.000000000 +0000 @@ -0,0 +1,292 @@ +#!/usr/bin/perl + +use strict; +use warnings; + +use lib '/usr/src/apt-cacher/src/lib'; + +require('apt-cacher.pl'); + +my $cfg = read_config('/dev/null'); # Get defaults with no config file + + +print <<"EOF" +################################################################################# +# This is the config file for apt-cacher. On most Debian systems you can safely # +# leave the defaults alone. # +# # +# Commented defaults or examples are given. They can be changed here, or # +# overridden using a fragment placed in ./conf.d/ # +################################################################################# + +### GENERAL ### + +# The location of the local cache/working directory. This can become quite +# large, so make sure it is somewhere with plenty of space. +# +#cache_dir = $cfg->{cache_dir} + +# The directory to use for apt-cacher access and error logs. +# The access log records every request in the format: +# +# date-time|PID|client IP address|HIT/HEAD/MISS/EXPIRED/NOTMOD|object size|object name +# +# The error log is slightly more free-form, and is also used for debug messages +# if debug mode is turned on. +# +#log_dir = $cfg->{log_dir} + +# The email address of the administrator is displayed in the info page and +# traffic reports. +# +#admin_email = $cfg->{admin_email} + +# Daemon port setting, only useful in stand-alone mode. You need to run the +# daemon as root to use privileged ports (<1024). +# +# For standalone daemon auto startup settings please edit the file +# /etc/default/apt-cacher. +# +#daemon_port = $cfg->{daemon_port} + +# Optional settings, user and group to run the daemon as. Make sure they have +# sufficient permissions within the cache and log directories. Comment the +# settings to run apt-cacher as the invoking user. +# +group = www-data +user = www-data + +# optional setting, binds the listening daemon to specified IP(s). +# +#daemon_addr = localhost + +# Apt-cacher can be used in offline mode which just uses files already cached, +# but doesn't make any new outgoing connections by setting this to 1. +# +#offline_mode = 1 + +# To enable data checksumming, install libberkeleydb-perl and set this option to +# 1. Then wait until the Packages/Sources files have been refreshed once (and so +# the database has been built up). You can also delete them from the cache to +# trigger the database update. +# +#checksum = 1 + +# Importing checksums from new index files into the checksum database can cause +# high CPU usage on slower systems. This option sets a limit to the number of +# index files that are imported simultaneously, thereby limiting CPU load +# average, but, possibly, taking longer. Set to 0 for no limit. +# +#concurrent_import_limit = 1 + +# CGI mode is deprecated. +# +# Send a 410 (Gone) HTTP message with the specified text when accessed via +# CGI. Useful to tell users to adapt their sources.list files when the +# apt-cacher server is being relocated (via apt-get's error messages while +# running "update") +# +#cgi_advise_to_use = Please use http://cacheserver:3142/ as apt-cacher access URL +#cgi_advise_to_use = Server relocated. To change sources.list, run \ +# perl -pe "s,/apt-cacher\??,:3142," -i /etc/apt/sources.list +# +# To further facilitate migration from CGI to daemon mode this setting will +# automatically redirect incoming CGI requests to the specified daemon URL. +# +#cgi_redirect = http://localhost:3142/ + +### UPSTREAM PROXY ### + +# Apt-cacher can pass all its requests to an external HTTP proxy like Squid, +# which could be very useful if you are using an ISP that blocks port 80 and +# requires all web traffic to go through its proxy. The format is +# 'http://[user[:password]@]hostname:port', eg: 'http://proxy.example.com:8080'. +# +#http_proxy = proxy.example.com:8080 + +# This sets the interface to use for the upstream connection. +# Specify an interface name, an IP address or a host name. +# If unset, the default route is used. +# +#interface = eth0 + +# Rate limiting sets the maximum bandwidth in bytes per second to use for +# fetching packages. Use 0 value for no rate limiting. +# +#limit = $cfg->{limit} + +### ACCESS and SECURITY ### + +# Server mapping - this allows mapping virtual paths that appear in the access +# URL to real server names. The syntax is the part of the beginning of the URL +# to replace (the key), followed by a list of mirror URLs, all space +# separated. Multiple mappings are separated by semicolons or commas, as +# usual. Note that you need to specify all keys (or use the '%PATH_MAP%' +# shorthand) in the allowed_locations option, if you make use of it. Also note +# that the paths should not overlap each other. +# +# The keys are also used to separate the caching of multiple distributions +# within a single apt-cacher instance if distinct_namespaces is also set. +# +#path_map = debian ftp.uni-kl.de/pub/linux/debian ftp2.de.debian.org/debian ; \ +# ubuntu archive.ubuntu.com/ubuntu ; \ +# security security.debian.org/debian-security ftp2.de.debian.org/debian-security +# +# There are 2 default internal path_map settings for the Debian and Ubuntu +# changelog servers which will be merged with this option. +# +# debian-changelogs packages.debian.org metadata.ftp-master.debian.org +# ubuntu-changelogs changelogs.ubuntu.com +# +# These can be overridden by specifying an alternative mirror for that key, or +# deleted by just specifying the key with no mirror. +# +#path_map = debian-changelogs + +# From version 1.7.0 there is support for caching multiple distibutions (eg +# Debian and Ubuntu) within the same apt-cacher instance. Enable this by setting +# distinct_namespaces to 1. Distribution package files are cached in separate +# directories whose names are derived from the relevant path_map key. So +# generally there will be a path_map key => server(s) setting for each +# distribution that is cached. Having enabled distinct_namespaces, existing +# packages can be imported into the correct directory by running (as root) +# +# /usr/share/apt-cacher/apt-cacher-import.pl -u {cache_dir}/packages +# +#distinct_namespaces = $cfg->{distinct_namespaces} + +# If the apt-cacher machine is directly exposed to the Internet and you are +# worried about unauthorised machines fetching packages through it, you can +# specify a list of IP addresses which are allowed to use it and another list of +# IP addresses which are prohibited. +# +# Localhost (127.0.0.1/8, ::ffff:127.0.0.1/8 and ::1) are always allowed. Other +# addresses must be matched by allowed_hosts and not by denied_hosts to be +# permitted to use the cache. Setting allowed_hosts to "*" means "allow all" +# (which was the default before version 1.7.0). The default is now ''. +# +# The format is a comma-separated list containing addresses, optionally with +# masks (like 10.0.0.0/24 or 10.0.0.0/255.255.255.0), or ranges of addresses +# (two addresses separated by a hyphen with no masks, specifying a valid subnet, +# like '192.168.0.0-63' or '192.168.0.0 - 192.168.0.63') or a DNS resolvable +# hostname. The corresponding IPv6 options allowed_hosts_6 and denied_hosts_6 +# are deprecated (but will still be honoured, if set). IPv6 addresses can now be +# added directly to allowed_hosts and denied_hosts along with IPv4 addresses. +# +#allowed_hosts = * +#denied_hosts = $cfg->{denied_hosts} + +# Only allow HTTPS/SSL proxy CONNECT to hosts or IPs which match an item in this +# list. +# +#allowed_ssl_locations = + +# Only allow HTTPS/SSL proxy CONNECT to ports which match an item in this list. +# Adding further items to this option can pose a significant security risk. DO +# NOT do it unless you understand the full implications. +# +#allowed_ssl_ports = $cfg->{allowed_ssl_ports} + +# Optional setting to limit access to upstream mirrors based on server names in +# the URLs. This is matched before any path_map settings are expanded. If +# '%PATH_MAP%' in included in this option, it will be expanded to the keys of +# the path_map setting. Note these items are strings, not regexps. +# +#allowed_locations = ftp.uni-kl.de, ftp.nerim.net, debian.tu-bs.de/debian +#allowed_locations = ftp.debian.org, %PATH_MAP% +#allowed_locations = %PATH_MAP% + +# List of Architectures that is used to expand %VALID_ARCHS% in *_files_regexps +# (see below). +# +#supported_archs = i386, amd64 +#supported_archs = $cfg->{supported_archs} + +# List of Ubuntu release names used to expand %VALID_UBUNTU_RELEASE_NAMES% in +# *_files_regexp (see below). This is required to allow the Ubuntu installer to +# fetch upgrade information. As the naming scheme is unpredictable, new release +# names need to be added to this list. +# +#ubuntu_release_names = $cfg->{ubuntu_release_names} + +### HOUSEKEEPING ### + +# Apt-cacher can generate usage reports every 24 hours if you set this directive +# to 1. You can view the reports in a web browser by pointing to your cache +# machine with 'report' on the end, like this: +# +# http://yourcache.example.com:3142/report +# +# Generating reports is very fast even with many thousands of logfile lines, so +# you can safely turn this on without creating much additional system load. +# +#generate_reports = $cfg->{generate_reports} + +# Apt-cacher can clean up its cache directory every 24 hours if you set this +# directive to 1. Cleaning the cache can take some time to run (generally in the +# order of a few minutes) and removes all package files that are not mentioned +# in any existing 'Packages' lists. This has the effect of deleting packages +# that have been superseded by an updated 'Packages' list. +# +#clean_cache = $cfg->{clean_cache} + +### INTERNALS ### + +# Debug mode makes apt-cacher write a lot of extra debug information to the +# error log (whose location is defined with the 'log_dir' directive). Leave +# this off unless you need it, or your error log will get very big. Acceptable +# values are 0 or an integer up to 7. See man apt-cacher (1) for further +# details. +# +#debug = $cfg->{debug} + +# You shouldn't need to change anything below here. If you do, ensure you +# understand the full implications of doing so. + +# As a convenience the following strings are expanded within the *_files_regexp +# settings at runtime: +# %VALID_UBUNTU_RELEASE_NAMES% --> A regexp derived from ubuntu_release_names +# %VALID_ARCHS% --> A regexp derived from supported_archs +# %VALID_PACKAGE_NAME% --> A regexp matching valid package names +# %VALID_VERSION% --> A regexp matching valid package versions + +# Permitted package files -- this is a perl regular expression which matches all +# package-type files (files that are uniquely identified by their filename). +# +#package_files_regexp = $cfg->{package_files_regexp} + +# Permitted APT pdiff files -- this is a perl regular expression which matches +# APT pdiff files which are ed(1) scripts used to patch index files rather than +# redownloading the whole file afresh. +# +#pdiff_files_regexp = $cfg->{pdiff_files_regexp} + +# Permitted Index files -- this is the perl regular expression which matches all +# index-type files (files that are uniquely identified by their full path and +# need to be checked for freshness). +# +#index_files_regexp = $cfg->{index_files_regexp} + +# Permitted installer files -- this is the perl regular expression which matches +# all installer-type files (files that are uniquely identified by their full +# path but don’t need to be checked for freshness). These are typically files +# used by Debian/Ubuntu Installer, Debian Live and apt. +# +#installer_files_regexp = $cfg->{installer_files_regexp} + +# Perl regular expression which matches Index files from which to read checksums +# if checksum is enabled. +# +#checksum_files_regexp = $cfg->{checksum_files_regexp} + +# Perl regular expression which matches files for which checksum validation is +# not performed. NB files matched by installer_files_regexp are skipped +# automatically and do not need to be added here as well. +# +#skip_checksum_files_regexp = $cfg->{skip_checksum_files_regexp} + +# Perl regular expression which matches URLs to be permitted for Debian bugs +# SOAP requests as made by apt-listbugs(1). +# +#soap_url_regexp = $cfg->{soap_url_regexp} +EOF diff -Nru apt-cacher-1.7.10/scripts/test.pl apt-cacher-1.7.11/scripts/test.pl --- apt-cacher-1.7.10/scripts/test.pl 1970-01-01 00:00:00.000000000 +0000 +++ apt-cacher-1.7.11/scripts/test.pl 2014-08-26 10:08:27.000000000 +0000 @@ -0,0 +1,228 @@ +#! /usr/bin/perl + +# This is a test script for apt-cacher + +use strict; +use warnings; + +use FindBin; +use Test::More qw(no_plan); + +use Test::WWW::Mechanize; +use Net::SSL; +use Crypt::SSLeay; # not required + +my $mech = Test::WWW::Mechanize->new; + +# goto SKIP; + +# Compilation and perlcritic +foreach (glob("$FindBin::Bin/../{apt-cacher,apt-cacher-*.pl,lib/*.pl}")) { + next if /-cgi/; + ok(system('perl', '-wc', $_) == 0, "Syntax $_") || exit; + ok(system('perlcritic', '--verbose', '10', $_) == 0, "Perlcritic $_") || exit; +} + +# Mirror mode +foreach my $server + ('http://localhost:3143/', + 'http://localhost:3142/', + 'http://localhost:3124/', + glob('{http://localhost/cgi-bin/,http://localhost:81/}' # Lighttpd and Apache + . '{apt-cacher-src/,apt-cacher-src\?,apt-cacher-src\?/}') + ) + { + #Expected Success + foreach my $test + ( + ['' => 'usage', 'Apt-cacher version'], + ['report' => 'report', 'Apt-cacher traffic report'], + ['ftp.uk.debian.org/debian/dists/stable/Release.gpg' => 'basic', '^$'], + ['debian/dists/stable/Release.gpg' => 'basic with host expansion', '^$'], + # ['http://ftp.uk.debian.org/debian/dists/stable/Release.gpg' => 'embedded http://'], + # ['ftp://ftp.uk.debian.org/debian/dists/stable/Release.gpg' => 'embedded ftp://'] + ) + { + $mech->head_ok( $server.$test->[0], "HEAD $server ($test->[1])") || exit; + $mech->get_ok( $server.$test->[0], "GET $server ($test->[1])") || exit; + $mech->title_like(qr($test->[2])) || print 'Got: ' . $mech->response->content , ", Expected: 200\n" && exit; + } + # Expected Failures + + # Expect 404 + foreach my $test + ( + ['debian/dists/stabl/Release.gpg' => 'N/A path']) + { + $mech->get($server.$test->[0]); + ok($mech->status == 404, "$server ($test->[1])") || do { print 'Got :', $mech->status, ", Expected: 404\n"; + $mech->dump_headers; + exit; + } + } + + # Expect 403 + foreach my $test + ( + ['debian/dists/../stable/Release.gpg' => 'Bad path'], + ['../debian/dists/stable/Release.gpg' => 'Bad host'], + ['debian/dists/stable/' => 'No filename'], + ['debian/dists/stable/Release.not.permitted' => 'Bad filename'] + ) + { + my $expected = 403; + $expected = 404 if ($test->[0] =~ /\.\./ && $server =~ m#/apt-cacher-src/# ); # Because CGI collapses the /../ + $mech->get($server.$test->[0]); + ok($mech->status == $expected, "$server ($test->[1])") || do { print 'Got :', $mech->status, ", Expected: $expected\n"; + $mech->dump_headers; + exit; + } + } + + # Expect 502/504 + foreach my $test + ( + ['sillydnswithnochance/debian/dists/stable/Release.gpg' => 'N/A DNS'] + ) + { + my $expected = 504; + $expected = 502 if ( $server =~ m\http://localhost:31(?:43|24)/\ ); # No upstream proxy + $mech->get($server.$test->[0]); + ok($mech->status == $expected, "$server ($test->[1])") || do { print 'Got :', $mech->status, ", Expected: $expected\n"; + $mech->dump_headers; + exit; + } + } + } + +SKIP: + +# Proxy mode + +$mech->proxy(['http', 'ftp', 'https'], 'http://localhost:3143/'); +foreach my $test + ( + ['http://ftp.uk.debian.org/debian/dists/stable/Release.gpg' => 'HTTP Proxy'], + ['http://debian/dists/stable/Release.gpg' => 'HTTP Proxy with host expansion'], + ['https://www.hindley.org.uk/~mark/debian/Packages.gz' => 'HTTPS Proxy GET'], + ['ftp://ftp.uk.debian.org/debian/dists/stable/Release.gpg' => 'FTP Proxy'] + ) + { + $mech->get_ok($test->[0], $test->[1]) || exit; + } + +foreach my $test + ( + ['http://sillydnswithnochance/debian/dists/stable/Release.gpg' => 'HTTP Proxy Bad DNS', 504], + ['http://ftp.uk.debian.org.uk/debian/../dists/stable/Release.gpg' => 'HTTP Proxy Bad URI', 403], + ) + { + $mech->get($test->[0]); + ok($mech->status == $test->[2], $test->[1]) || do { print 'Got :', $mech->status, ", Expected: $test->[2]\n"; + $mech->dump_headers; + exit; + } + } +$mech->proxy(['http', 'ftp', 'https'], undef); + +foreach my $test + ( + ['https://www.hindley.org.uk/~mark/debian/Packages.gz' => 'HTTPS Proxy CONNECT', 200], +# ['https://www.hindley.org.uk:22/~mark/debian/Packages.gz' => 'HTTPS Proxy CONNECT: Bad port', 403], +# ['https://titan.hindleynet/~mark/debian/Packages.gz' => 'HTTPS Proxy CONNECT: Bad host', 403] + ) + { + local $ENV{HTTPS_PROXY} = 'http://localhost:3143'; + $mech->ssl_opts('verify_hostname' => 0); + $mech->get($test->[0]); + ok($mech->status == $test->[2], $test->[1]) || do { print 'Got :', $mech->status, ", Expected: $test->[2]\n"; + $mech->dump_headers; + exit; + } + } + + +# Individual tests + +# SOAP post +$mech->proxy(['http'], 'http://localhost:3143/'); +$mech->post('http://bugs.debian.org:80/cgi-bin/soap.cgi', + Content_Type => 'text/xml; charset=utf-8', + Content => ' + + + + +severity + +critical +grave +serious + +package + +apt-cacher + + + + +'); +ok($mech->response->is_success, 'SOAP POST'); + +# Circular Proxy request +$mech->proxy(['http'], 'http://localhost:3142/'); +$mech->get_ok('http://mercury.hindleynet:3142/debian/dists/stable/Release.gpg', 'HTTP Proxy circular request'); +$mech->proxy(['http'], undef); + +# Cache-Control: no-cache +$mech->get('http://localhost:3143/debian/dists/stable/Release', 'Cache-Control' => 'no-cache'); +ok($mech->response->current_age<60, 'Cache-Control: no-cache') || do { print 'Got age:', $mech->response->current_age, "\n"; + $mech->dump_headers; + }; + +# Cache-Control: max-age +$mech->get('http://localhost:3143/debian/dists/stable/Release', 'Cache-Control' => 'max-age=0'); +ok($mech->response->current_age<60, 'Cache-Control: max-age') || do { print 'Got age:', $mech->response->current_age, "\n"; + $mech->dump_headers; + }; + +# If-Modified-Since +$mech->head('http://localhost:3142/debian/dists/stable/Release', 'If-Modified-Since' => HTTP::Date::time2str); +ok($mech->status == 304, 'If-Modified-Since') || do { print 'Got :', $mech->status, "\n"; + $mech->dump_headers; + }; + +# Range +$mech->head('http://localhost:3142/debian/dists/stable/Release', 'Range' => 'bytes=0-23' ); +ok($mech->status == 206 && # Partial content + $mech->response->content_length == 24, # Length is 0-23 inclusive + 'Range') || $mech->dump_headers; + +# If-Range +$mech->head('http://localhost:3142/debian/dists/stable/Release', 'If-Range' => HTTP::Date::time2str, 'Range' => 'bytes=0-23' ); # Not Mod => Range returned +ok($mech->status == 206 && # Partial content + $mech->response->content_length == 24, # Length is 0-23 inclusive + 'If-Range Not Modified') || $mech->dump_headers; + +$mech->head('http://localhost:3142/debian/dists/stable/Release', 'If-Range' => HTTP::Date::time2str ); # Missing Range => Whole returned +ok($mech->status == 200, # Complete content + 'If-Range without Range') || $mech->dump_headers; + +$mech->head('http://localhost:3142/debian/dists/stable/Release', 'If-Range' => HTTP::Date::time2str(0) , 'Range' => 'bytes=0-23' ); # Mod => Whole returned +ok($mech->status == 200, # Complete content + 'If-Range Modified') || $mech->dump_headers; + +# Package +$mech->get('http://localhost:3142/titan/~mark/debian/timeoutd_1.5-10moh_i386.deb', 'Cache-Control' => 'no-cache'); +ok($mech->status == 200, 'Package fetch without checksum') || do { print 'Got :', $mech->status, "\n"; + $mech->dump_headers;}; +$mech->get('http://localhost:3143/titan/~mark/debian/timeoutd_1.5-10moh_i386.deb', 'Cache-Control' => 'no-cache'); +ok($mech->status == 200, 'Package fetch with checksum') || do { print 'Got :', $mech->status, "\n"; + $mech->dump_headers;} diff -Nru apt-cacher-1.7.10/scripts/update-manpage-regexps.pl apt-cacher-1.7.11/scripts/update-manpage-regexps.pl --- apt-cacher-1.7.10/scripts/update-manpage-regexps.pl 1970-01-01 00:00:00.000000000 +0000 +++ apt-cacher-1.7.11/scripts/update-manpage-regexps.pl 2012-02-16 00:19:51.000000000 +0000 @@ -0,0 +1,29 @@ +#!/usr/bin/perl + +use strict; +use warnings; + +use lib '/usr/src/apt-cacher/src/lib'; + +require('apt-cacher.pl'); + +my $cfg = read_config('/dev/null'); # Get defaults with no config file +my $re = join ' ', $cfg->{ubuntu_release_names}; + +open(my $input, '<', '/usr/src/apt-cacher/src/debian/apt-cacher.8') || die $!; + +while (<$input>) { + if (/$re/) { + print "$\n"; + next; + } + foreach my $key (keys %$cfg) { + next unless $key =~ 'regexp'; +# s/^.BI? "?$key\s+\[.*$/.B $key [$cfg->{$key}]/ + s/^.BI? "?$key\s+\[.*$/.BI "$key [" "see default \/etc\/apt\-cacher\/apt\-cacher.conf" "]"/ + && s/\\/\\(rs/g # groff printable backslashes + && s/\|/|\\:/g # Insert groff line break positions + && last; + } + print; +} diff -Nru apt-cacher-1.7.10/test/benchmark.pl apt-cacher-1.7.11/test/benchmark.pl --- apt-cacher-1.7.10/test/benchmark.pl 1970-01-01 00:00:00.000000000 +0000 +++ apt-cacher-1.7.11/test/benchmark.pl 2014-05-25 14:59:44.000000000 +0000 @@ -0,0 +1,22 @@ +#!/usr/bin/perl + +use strict; +use warnings; + +use Benchmark qw(:all); + + +cmpthese(-1, { + 'alternate' => + sub { + map {(m!^[^:/?#]+://! ? '' :"http://") . $_ } my $s='test' + }, + 'alternate with group' => + sub { + map {(m!^(?:[^:/?#])+://! ? '' :"http://") . $_ } my $s='test' + }, + 'negative look ahead' => + sub { + map { s%^(?![^:/?#]+://)%http://%; $_ } my $s='test' + }, + }); diff -Nru apt-cacher-1.7.10/test/bug.pl apt-cacher-1.7.11/test/bug.pl --- apt-cacher-1.7.10/test/bug.pl 1970-01-01 00:00:00.000000000 +0000 +++ apt-cacher-1.7.11/test/bug.pl 2011-11-07 10:49:11.000000000 +0000 @@ -0,0 +1,42 @@ +#!/usr/bin/perl +use strict; +use warnings; + +use WWW::Curl::Easy; +use WWW::Curl::Share; + +my $count = 5; +while ($count--) { + if (my $pid = fork) { + &doloop; + } + else { + &doloop; + } +} +exit; + +INIT { + my $curlsh = new WWW::Curl::Share; + $curlsh->setopt(CURLSHOPT_SHARE, CURL_LOCK_DATA_DNS); + print "Share is $curlsh\n"; + + sub doloop + { + my $curl; + + unless ($curl) { + $curl = new WWW::Curl::Easy; + $curl->setopt(CURLOPT_SHARE, $curlsh); + + $curl->setopt(CURLOPT_VERBOSE, 1); + $curl->setopt(CURLOPT_DNS_CACHE_TIMEOUT, -1); + $curl->setopt(CURLOPT_CONNECT_ONLY, 1); + $curl->setopt(CURLOPT_FORBID_REUSE, 1); + $curl->setopt(CURLOPT_URL, 'http://ftp.us.debian.org'); + } + $curl->perform; + } +} + + diff -Nru apt-cacher-1.7.10/test/curlftp.pl apt-cacher-1.7.11/test/curlftp.pl --- apt-cacher-1.7.10/test/curlftp.pl 1970-01-01 00:00:00.000000000 +0000 +++ apt-cacher-1.7.11/test/curlftp.pl 2011-11-07 10:49:11.000000000 +0000 @@ -0,0 +1,17 @@ +#!/usr/bin/perl + +use strict; +use warnings; + +use WWW::Curl::Easy; + +my $curl = WWW::Curl::Easy->new; +$curl->setopt(CURLOPT_VERBOSE, 1); +$curl->setopt(CURLOPT_FILETIME, 1); +$curl->setopt(CURLOPT_FTP_FILEMETHOD, 2); # CURLFTPMETHOD_NOCWD +$curl->setopt(CURLOPT_FTPPORT, '-'); +$curl->setopt(CURLOPT_FTP_USE_EPSV, 1); +$curl->setopt(CURLOPT_URL, 'ftp://ftp.uk.debian.org/debian/dists/stable/Release.gpg'); + +$curl->perform; + diff -Nru apt-cacher-1.7.10/test/curlmulti.pl apt-cacher-1.7.11/test/curlmulti.pl --- apt-cacher-1.7.10/test/curlmulti.pl 1970-01-01 00:00:00.000000000 +0000 +++ apt-cacher-1.7.11/test/curlmulti.pl 2011-11-07 10:49:11.000000000 +0000 @@ -0,0 +1,73 @@ +#!/usr/bin/perl +use strict; +use warnings; + +use WWW::Curl::Easy; +use WWW::Curl::Multi; + + +pipe(my $libcurl, my $daemon)|| die $!; + +unless (my $libcurlpid = fork) { + # Child -- libcurl thread + close $daemon; + + my %easy; + my $curlm = WWW::Curl::Multi->new; + my $curl_id = $$; # This should be a handle unique id. + my $active_handles = 0; + + # Loop requests + while (<$libcurl>) { + print "Got request $_\n"; + my $curl = new WWW::Curl::Easy; + $easy{$curl_id} = $curl; # Register handle + $curl->setopt(CURLOPT_PRIVATE,$curl_id); # Assign Multi ID + # do the usual configuration on the handle + + $curl->setopt(CURLOPT_VERBOSE, 1); + $curl->setopt(CURLOPT_DNS_CACHE_TIMEOUT, -1); + $curl->setopt(CURLOPT_NOBODY, 1); + + $curl->setopt(CURLOPT_URL, $_); + + # Add easy handles to multi + $curlm->add_handle($curl); + $active_handles++; + + while (my $active_transfers = $curlm->perform) { + if ($active_transfers != $active_handles) { + while (my ($id,$return_value) = $curlm->info_read) { + if ($id) { + $active_handles--; + my $actual_easy_handle = $easy{$id}; + # do the usual result/error checking routine here + # ... + # letting the curl handle get garbage collected, or we leak memory. + delete $easy{$id}; + } + } + } + } + } + exit; +} + +close $libcurl; +if (my $pid = fork) { + &doloop; +} +else { + &doloop; +} + +sub doloop + { + my $n=4; + while ($n--) { + print "$$: Sending request\n"; + print $daemon "http://ftp.us.debian.org\n"; + } + } + + diff -Nru apt-cacher-1.7.10/test/curlshare.pl apt-cacher-1.7.11/test/curlshare.pl --- apt-cacher-1.7.10/test/curlshare.pl 1970-01-01 00:00:00.000000000 +0000 +++ apt-cacher-1.7.11/test/curlshare.pl 2011-11-07 10:49:11.000000000 +0000 @@ -0,0 +1,73 @@ +#!/usr/bin/perl +use strict; +use warnings; + +use WWW::Curl::Easy; +use WWW::Curl::Multi; + + +pipe(my $libcurl, my $daemon)|| die $!; + +unless (my $libcurlpid = fork) { + # Child -- libcurl thread + close $daemon; + + my %easy; + my $curlm = WWW::Curl::Multi->new; + my $curl_id = $$; # This should be a handle unique id. + my $active_handles = 0; + + # Loop requests + while (<$libcurl>) { + print "Got request $_\n"; + my $curl = new WWW::Curl::Easy; + $easy{$curl_id} = $curl; # Register handle + $curl->setopt(CURLOPT_PRIVATE,$curl_id); # Assign Multi ID + # do the usual configuration on the handle + + $curl->setopt(CURLOPT_VERBOSE, 1); + $curl->setopt(CURLOPT_DNS_CACHE_TIMEOUT, -1); + $curl->setopt(CURLOPT_NOBODY, 1); + + $curl->setopt(CURLOPT_URL, $_); + + # Add easy handles to multi + $curlm->add_handle($curl); + $active_handles++; + + while (my $active_transfers = $curlm->perform) { + if ($active_transfers != $active_handles) { + while (my ($id,$return_value) = $curlm->info_read) { + if ($id) { + $active_handles--; + my $actual_easy_handle = $easy{$id}; + # do the usual result/error checking routine here + # ... + # letting the curl handle get garbage collected, or we leak memory. + delete $easy{$id}; + } + } + } + } + } + exit; +} + +close $libcurl; +if (my $pid = fork) { + &doloop; +} +else { + &doloop; +} + +sub doloop + { + my $n=4; + while ($n--) { + print "$$: Sending request\n"; + print $daemon "http://ftp.us.debian.org\n"; + } + } + + diff -Nru apt-cacher-1.7.10/test/db-cache.pl apt-cacher-1.7.11/test/db-cache.pl --- apt-cacher-1.7.10/test/db-cache.pl 1970-01-01 00:00:00.000000000 +0000 +++ apt-cacher-1.7.11/test/db-cache.pl 2012-05-03 10:47:36.000000000 +0000 @@ -0,0 +1,78 @@ +#!/usr/bin/perl + +use strict; +use warnings; + + +use BerkeleyDB; + +our $cfg; +my $count=2; + +sub sig_handler { + warn "Got SIG@_. Exiting gracefully!\n" if $cfg->{debug}; + exit 1; +} + +sub db { + for ('INT', 'TERM', 'PIPE', 'QUIT', 'HUP', 'SEGV') { + $SIG{$_} = \&sig_handler unless $SIG{$_}; + } + + my $env = new BerkeleyDB::Env + -Home => '/tmp', + -Flags => DB_CREATE | DB_INIT_MPOOL | DB_INIT_CDB, + -ErrFile => *STDERR, + -ThreadCount => 64, + -ErrPrefix => "[$$]:" + or die $BerkeleyDB::Error; + + $env->set_isalive(); + if ($env->failchk == DB_RUNRECOVERY) { + warn "Failed thread detected.\n"; + } + + my $dbh = new BerkeleyDB::Btree + -Filename => '/tmp/test.db', + -Flags => DB_CREATE, + -Env => $env + or die $BerkeleyDB::Error; + + return $dbh; +} + +sub fetch_store { + my $cpid; + my $dbh=db(); + warn "[$$]: Init DB in fetch_store\n"; + + + + return 1; +} + + +sub return_file { + my $dbh=$_[0]; + warn "[$$]: Init DB in return_file\n"; + $dbh->db_put('child', $$) && die $!; + return 1; +} + +warn "[$$]: Parent\n"; + +while ($count--) { + my $pid = fork; + die $! unless defined $pid; + if ($pid == 0) { + my $write = "$$: $count"; + warn "[$$]: Write $write\n"; + db()->db_put('child', $write) && die $!; + exit; + } + else { + waitpid $pid, 0; + db()->db_get('child', my $read) && die $!; + warn "[$$]: Read $read\n"; + } +} diff -Nru apt-cacher-1.7.10/test/db.pl apt-cacher-1.7.11/test/db.pl --- apt-cacher-1.7.10/test/db.pl 1970-01-01 00:00:00.000000000 +0000 +++ apt-cacher-1.7.11/test/db.pl 2011-11-07 10:49:11.000000000 +0000 @@ -0,0 +1,80 @@ +#!/usr/bin/perl + +use strict; +use warnings; + + +use BerkeleyDB; + +our $cfg; +my $count=2; + +sub sig_handler { + warn "Got SIG@_. Exiting gracefully!\n" if $cfg->{debug}; + exit 1; +} + +sub db { + for ('INT', 'TERM', 'PIPE', 'QUIT', 'HUP', 'SEGV') { + $SIG{$_} = \&sig_handler unless $SIG{$_}; + } + + my $env = new BerkeleyDB::Env + -Home => '/tmp', + -Flags => DB_CREATE | DB_INIT_MPOOL | DB_INIT_CDB, + -ErrFile => *STDERR, + -ThreadCount => 64, + -ErrPrefix => "[$$]:" + or die $BerkeleyDB::Error; + + $env->set_isalive(); + if ($env->failchk == DB_RUNRECOVERY) { + warn "Failed thread detected.\n"; + } + + $SIG{ALRM} = sub { + $env->failchk; + alarm 1; + }; + alarm 1; + + my $dbh = new BerkeleyDB::Btree + -Filename => '/tmp/test.db', + -Flags => DB_CREATE, + -Env => $env + or die $BerkeleyDB::Error; + undef $env; + return $dbh; +} + +sub fetch_store { + my $cpid; + my $dbh=db(); + warn "[$$]: Init DB in fetch_store\n"; + + $dbh->db_put('test', 0) && die $!; + $dbh->db_get('child', $cpid) && die $!; + return 1; +} + + +sub return_file { + my $dbh=$_[0]; + warn "[$$]: Init DB in return_file\n"; + $dbh->db_put('child', $$) && die $!; + return 1; +} + +while ($count--) { + my $pid = fork; + die $! unless defined $pid; + if ($pid == 0) { + fetch_store(); + exit(0); + } + # use %db here + warn "[$$]: Child fetcher process $pid\n"; + if (return_file(db())) { + sleep 1; + } +} diff -Nru apt-cacher-1.7.10/test/dns.pl apt-cacher-1.7.11/test/dns.pl --- apt-cacher-1.7.10/test/dns.pl 1970-01-01 00:00:00.000000000 +0000 +++ apt-cacher-1.7.11/test/dns.pl 2011-11-07 10:49:11.000000000 +0000 @@ -0,0 +1,18 @@ +#!/usr/local/bin/perl -- # -*-Perl-*- + +use Net::DNS; + +while (1) { + my $res = Net::DNS::Resolver->new; + my $query = $res->search("ftp.us.debian.org"); + + if ($query) { + foreach my $rr ($query->answer) { + next unless $rr->type eq "A"; + print $rr->address, "\n"; + last; + } + } else { + warn "query failed: ", $res->errorstring, "\n"; + } +} diff -Nru apt-cacher-1.7.10/test/extract_sums.pl apt-cacher-1.7.11/test/extract_sums.pl --- apt-cacher-1.7.10/test/extract_sums.pl 1970-01-01 00:00:00.000000000 +0000 +++ apt-cacher-1.7.11/test/extract_sums.pl 2011-11-07 10:49:11.000000000 +0000 @@ -0,0 +1,26 @@ +#!/usr/bin/perl + +use strict; +use warnings; + +use lib '/usr/src/apt-cacher/src'; + +require 'lib/apt-cacher.pl'; + +our $cfg = read_config('/etc/apt-cacher/apt-cacher.conf'); + +my %h; + +extract_sums('/var/cache/apt-cacher/packages/debian_dists_experimental_main_binary-i386_Packages.diff_Index', \%h); +extract_sums('/var/cache/apt-cacher/packages/titan:9999_debian_dists_unstable_contrib_binary-i386_Packages.bz2', \%h); +extract_sums('/var/cache/apt-cacher/packages/titan:9999_debian_dists_stable_contrib_source_Sources.bz2', \%h); +extract_sums('/var/cache/apt-cacher/packages/titan:9999_debian_dists_stable_Release', \%h); +extract_sums('/var/cache/apt-cacher/packages/titan_~mark_debian_Packages.gz', \%h); + +foreach my $file (keys %h) { + print "$file:\n"; + my $href = hashify(\$h{$file}); + foreach (keys %$href) { + print " $_: $href->{$_}\n" if $href->{$_}; + } +} diff -Nru apt-cacher-1.7.10/test/fork-flock.pl apt-cacher-1.7.11/test/fork-flock.pl --- apt-cacher-1.7.10/test/fork-flock.pl 1970-01-01 00:00:00.000000000 +0000 +++ apt-cacher-1.7.11/test/fork-flock.pl 2012-01-20 13:18:43.000000000 +0000 @@ -0,0 +1,30 @@ +#!/usr/bin/perl + +use strict; +use warnings; + +use Fcntl qw':flock'; +use IO::Handle; + +open(my $tmpfile, "+>", undef) or die $!; + +flock $tmpfile, LOCK_EX or die $!; + +defined (my $cpid = fork) or die "Fork failed: $!"; + +if ($cpid){ + #open my $newfile, '<', '/dev/fd/' . $tmpfile->fileno or die $!; # Works, but not completely portable + open my $newfile, '<', '/dev/fd/' . $tmpfile->fileno or die $!; + # open my $newfile, '+>&' . $tmpfile->fileno or die $!; + undef $tmpfile; + print "$$: parent reopen\n"; + + flock $newfile, LOCK_SH or die $!; + print "$$: parent lock\n"; +} +else { + sleep 2; + print "$$: child unlock\n"; + flock $tmpfile, LOCK_UN or die $!; + print "$$: child exiting\n"; +} diff -Nru apt-cacher-1.7.10/test/ftp.pl apt-cacher-1.7.11/test/ftp.pl --- apt-cacher-1.7.10/test/ftp.pl 1970-01-01 00:00:00.000000000 +0000 +++ apt-cacher-1.7.11/test/ftp.pl 2011-11-07 10:49:11.000000000 +0000 @@ -0,0 +1,31 @@ +#! /usr/bin/perl + +use strict; +use warnings; +use WWW::Curl::Easy; +use WWW::Curl::Multi; + + + my $curl=WWW::Curl::Easy->new; + my $multi=WWW::Curl::Multi->new; + $curl->setopt(CURLOPT_VERBOSE, 1); + $curl->setopt(CURLOPT_URL, 'ftp://ftp.uk.debian.org/debian/dists/stable/Release.gpg'); + $curl->setopt(CURLOPT_FTPPORT, '-'); + open my $fh, ">/dev/tty"||die $!; + $curl->setopt(CURLOPT_WRITEHEADER, $fh); + $curl->setopt(CURLOPT_HEADERFUNCTION, \&callback); + sub callback { + my ($chunk,$fh)=@_; + print $fh $chunk; + return length($chunk); + } + + $multi->add_handle($curl); + + while ($multi->perform){}; + + close $fh; + +while (1) { +} + diff -Nru apt-cacher-1.7.10/test/load.pl apt-cacher-1.7.11/test/load.pl --- apt-cacher-1.7.10/test/load.pl 1970-01-01 00:00:00.000000000 +0000 +++ apt-cacher-1.7.11/test/load.pl 2011-11-07 10:49:11.000000000 +0000 @@ -0,0 +1,26 @@ +#!/usr/bin/perl + +use strict; +use warnings; + +use POSIX ":sys_wait_h"; +use File::Temp; + +for (my $count = 5; $count; $count--) { + defined(my $pid = fork) || die "Fork failed: $!"; + if ($pid) { + sleep(rand 60); + next; + } + else { + my $dir = File::Temp::tempdir(CLEANUP => 1); + print "$$: Running new debootstrap\n"; + exec("fakechroot /usr/sbin/debootstrap --download-only --variant=fakechroot stable $dir http://localhost:3142/titan:9999/debian > /dev/null"); + die "exec() failed: $!"; + } +} + + +sleep(10) while waitpid(-1, WNOHANG) > 0; + +print "Done!\n" diff -Nru apt-cacher-1.7.10/test/mmap.pl apt-cacher-1.7.11/test/mmap.pl --- apt-cacher-1.7.10/test/mmap.pl 1970-01-01 00:00:00.000000000 +0000 +++ apt-cacher-1.7.11/test/mmap.pl 2011-12-28 15:53:34.000000000 +0000 @@ -0,0 +1,57 @@ +#!/usr/bin/perl + +use strict; +use warnings; + +use Benchmark qw(:all); + + +my $file = '/var/log/syslog'; + +cmpthese(-1, { + 'PerlIO' => + sub { + open(my $fh, '<', $file) || die $1; + while (<$fh>) { + /^t/; + } + }, + + 'Pragma mmap' => + sub { + use open IO => ':mmap'; + open(my $fh, '<', $file) || die $1; + while (<$fh>) { + /^t/; + } + }, + + 'Pragma mmap repeat' => + sub { + use open IO => 'mmap'; + open(my $fh, '<', $file) || die $1; +# print "$_\n" foreach PerlIO::get_layers($fh); + while (<$fh>) { + /^t/; + } + }, + + 'mmap' => + sub { + open(my $fh, '<:mmap', $file) || die $1; +# print "$_\n" foreach PerlIO::get_layers($fh); + while (<$fh>) { + /^t/; + } + }, + + 'PerlIO repeat' => + sub { + open(my $fh, '<', $file) || die $1; +# print "$_\n" foreach PerlIO::get_layers($fh); + while (<$fh>) { + /^t/; + } + }, + + }); diff -Nru apt-cacher-1.7.10/test/multi.pl apt-cacher-1.7.11/test/multi.pl --- apt-cacher-1.7.10/test/multi.pl 1970-01-01 00:00:00.000000000 +0000 +++ apt-cacher-1.7.11/test/multi.pl 2011-11-07 10:49:11.000000000 +0000 @@ -0,0 +1,41 @@ +#!/usr/bin/perl +use strict; +use warnings; + +use WWW::Curl::Easy; +use WWW::Curl::Multi; + +my $count = 5; +while ($count--) { + if (my $pid = fork) { + &doloop; + } + else { + &doloop; + } +} +exit; + +INIT { + my $curlm = new WWW::Curl::Multi; + + + sub doloop + { + my $curl; + + unless ($curl) { + $curl = new WWW::Curl::Easy; + $curlm->add_handle($curl); + + $curl->setopt(CURLOPT_VERBOSE, 1); + $curl->setopt(CURLOPT_DNS_CACHE_TIMEOUT, -1); + $curl->setopt(CURLOPT_CONNECT_ONLY, 1); + $curl->setopt(CURLOPT_FORBID_REUSE, 1); + $curl->setopt(CURLOPT_URL, 'http://ftp.us.debian.org'); + } + $curl->perform; + } +} + + diff -Nru apt-cacher-1.7.10/test/object.pl apt-cacher-1.7.11/test/object.pl --- apt-cacher-1.7.10/test/object.pl 1970-01-01 00:00:00.000000000 +0000 +++ apt-cacher-1.7.11/test/object.pl 2012-02-28 15:01:27.000000000 +0000 @@ -0,0 +1,17 @@ +#!/usr/bin/perl + +use strict; +use warnings; + +use File::Spec; +use lib (File::Spec->splitpath($0))[1] .'../lib'; +use CachedRequest; +use HTTP::Request; + +require('/usr/src/apt-cacher/src/lib/apt-cacher.pl'); + +our $cfg = read_config('/etc/apt-cacher/apt-cacher.conf'); + +my $r = CachedRequest->new(HTTP::Request->new('GET' => 'http://debian/dists/stable/Release')); + +sleep 1; diff -Nru apt-cacher-1.7.10/test/sendfile.pl apt-cacher-1.7.11/test/sendfile.pl --- apt-cacher-1.7.10/test/sendfile.pl 1970-01-01 00:00:00.000000000 +0000 +++ apt-cacher-1.7.11/test/sendfile.pl 2011-11-07 10:49:11.000000000 +0000 @@ -0,0 +1,10 @@ +#!/usr/bin/perl + +use strict; +use warnings; + +use Sys::Syscall ':sendfile'; + +open(my $fh, '<', \"test\n") || die $!; +my $sent = sendfile(*STDOUT, $fh, 100)|| die $!; +print "sent $sent\n"; diff -Nru apt-cacher-1.7.10/test/shm.pl apt-cacher-1.7.11/test/shm.pl --- apt-cacher-1.7.10/test/shm.pl 1970-01-01 00:00:00.000000000 +0000 +++ apt-cacher-1.7.11/test/shm.pl 2011-11-07 10:49:11.000000000 +0000 @@ -0,0 +1,55 @@ +#!/usr/bin/perl +use strict; +use warnings; + +use WWW::Curl::Easy; +use WWW::Curl::Share; +use Storable qw(freeze thaw); +use Data::Dumper; +use IPC::ShareLite; + + +#my $count = 5; +#while ($count--) { +# if (my $pid = fork) { +# &doloop; +# } +# else { + &doloop; +# } +#} +exit; + +INIT { + my $curlsh = new WWW::Curl::Share; + $curlsh->setopt(CURLSHOPT_SHARE, CURL_LOCK_DATA_DNS); + print "Share is $curlsh\n"; + + # put it in IPC + + my $shm = new IPC::ShareLite( -key => 1971, + -create => 'yes', + -destroy => 'yes' ) or die $!; + + $shm->store( Data::Dumper->Dump([$curlsh],['curlsh'])); + + sub doloop + { + my $curl; +# unless ($curl) { + $curl = new WWW::Curl::Easy; + print "Thawed share is ".($shm->fetch)."\n"; + eval($shm->fetch); +# $curl->setopt(CURLOPT_SHARE, (thaw($shm->fetch))[0]); + + $curl->setopt(CURLOPT_VERBOSE, 1); + $curl->setopt(CURLOPT_DNS_CACHE_TIMEOUT, -1); + $curl->setopt(CURLOPT_CONNECT_ONLY, 1); + $curl->setopt(CURLOPT_FORBID_REUSE, 1); + $curl->setopt(CURLOPT_URL, 'http://ftp.us.debian.org'); +# } + $curl->perform; + } +} + + diff -Nru apt-cacher-1.7.10/test/signal.pl apt-cacher-1.7.11/test/signal.pl --- apt-cacher-1.7.10/test/signal.pl 1970-01-01 00:00:00.000000000 +0000 +++ apt-cacher-1.7.11/test/signal.pl 2011-11-07 10:49:11.000000000 +0000 @@ -0,0 +1,11 @@ +#!/usr/bin/perl + +use warnings; +use strict; + + +$SIG{SEGV} = sub {print "Got signal\n";}; + +while (1){ + sleep 100; +} diff -Nru apt-cacher-1.7.10/test/socket.pl apt-cacher-1.7.11/test/socket.pl --- apt-cacher-1.7.10/test/socket.pl 1970-01-01 00:00:00.000000000 +0000 +++ apt-cacher-1.7.11/test/socket.pl 2011-11-07 10:49:11.000000000 +0000 @@ -0,0 +1,33 @@ +use strict; +use warnings; +use Socket; +use IO::Socket; +use FreezeThaw qw(freeze thaw); +use HTTP::Response; + +socketpair(CHILD, PARENT, AF_UNIX, SOCK_STREAM, PF_UNSPEC) or die "socketpair: $!"; + + +$SIG{CHLD} = 'IGNORE'; + +CHILD->autoflush(1); +PARENT->autoflush(1); + +if (my $pid = fork) { + close PARENT; +# print CHILD "Parent Pid $$ is sending\n"; + print CHILD freeze(HTTP::Response->new(200, 'Message for code')); + close CHILD; + waitpid($pid,0); +} else { + die "cannot fork: $!" unless defined $pid; + close CHILD; + while (my $line = ) { + chomp $line; + print "Child Pid $$ just read this: `$line'\n"; + my $obj = (thaw($line))[0]; + print $obj->message."\n" + } + close PARENT; + exit; +} diff -Nru apt-cacher-1.7.10/test/thread.pl apt-cacher-1.7.11/test/thread.pl --- apt-cacher-1.7.10/test/thread.pl 1970-01-01 00:00:00.000000000 +0000 +++ apt-cacher-1.7.11/test/thread.pl 2011-11-07 10:49:11.000000000 +0000 @@ -0,0 +1,47 @@ +#!/usr/bin/perl +use strict; +use warnings; + +use WWW::Curl::Easy; +use WWW::Curl::Share; +use threads; +use threads::shared; + + +my $count = 5; +while ($count--) { +threads->new(\&doloop); next; + if (my $pid = fork) { + &doloop; + } + else { + &doloop; + } +} + + +exit; + +INIT { + my $curlsh = &share(new WWW::Curl::Share); + $curlsh->setopt(CURLSHOPT_SHARE, CURL_LOCK_DATA_DNS); + + sub doloop + { + my $curl; + unless ($curl) { + $curl = new WWW::Curl::Easy; + $curl->setopt(CURLOPT_SHARE, $curlsh); + + $curl->setopt(CURLOPT_VERBOSE, 1); + $curl->setopt(CURLOPT_DNS_CACHE_TIMEOUT, -1); + $curl->setopt(CURLOPT_CONNECT_ONLY, 1); + $curl->setopt(CURLOPT_FORBID_REUSE, 1); + $curl->setopt(CURLOPT_URL, 'http://ftp.us.debian.org'); + } + print "Share is $curlsh\n"; + $curl->perform; + } +} + + diff -Nru apt-cacher-1.7.10/test/verify.pl apt-cacher-1.7.11/test/verify.pl --- apt-cacher-1.7.10/test/verify.pl 1970-01-01 00:00:00.000000000 +0000 +++ apt-cacher-1.7.11/test/verify.pl 2011-11-07 10:49:11.000000000 +0000 @@ -0,0 +1,15 @@ +#! /usr/bin/perl + +use BerkeleyDB; + +while (1) { + + alarm(1); + eval { + local $SIG{ALRM} = sub { die "Timeout\n";}; + my $status = BerkeleyDB::db_verify + -Filename => '/var/cache/apt-cacher/sums.db'; + print "Returned ".$status." \n"; + }; + print "$@"; +}