diff -Nru pgbackrest-2.15.1/build/lib/pgBackRestBuild/Config/Data.pm pgbackrest-2.16/build/lib/pgBackRestBuild/Config/Data.pm --- pgbackrest-2.15.1/build/lib/pgBackRestBuild/Config/Data.pm 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/build/lib/pgBackRestBuild/Config/Data.pm 2019-08-05 16:03:04.000000000 +0000 @@ -287,6 +287,8 @@ push @EXPORT, qw(CFGOPT_REPO_S3_ENDPOINT); use constant CFGOPT_REPO_S3_HOST => CFGDEF_REPO_S3 . '-host'; push @EXPORT, qw(CFGOPT_REPO_S3_HOST); +use constant CFGOPT_REPO_S3_PORT => CFGDEF_REPO_S3 . '-port'; + push @EXPORT, qw(CFGOPT_REPO_S3_PORT); use constant CFGOPT_REPO_S3_REGION => CFGDEF_REPO_S3 . '-region'; push @EXPORT, qw(CFGOPT_REPO_S3_REGION); use constant CFGOPT_REPO_S3_TOKEN => CFGDEF_REPO_S3 . '-token'; @@ -1862,6 +1864,18 @@ &CFGDEF_COMMAND => CFGOPT_REPO_TYPE, }, + &CFGOPT_REPO_S3_PORT => + { + &CFGDEF_SECTION => CFGDEF_SECTION_GLOBAL, + &CFGDEF_TYPE => CFGDEF_TYPE_INTEGER, + &CFGDEF_PREFIX => CFGDEF_PREFIX_REPO, + &CFGDEF_INDEX_TOTAL => CFGDEF_INDEX_REPO, + &CFGDEF_DEFAULT => 443, + &CFGDEF_ALLOW_RANGE => [1, 65535], + &CFGDEF_DEPEND => CFGOPT_REPO_S3_BUCKET, + &CFGDEF_COMMAND => CFGOPT_REPO_TYPE, + }, + &CFGOPT_REPO_S3_REGION, { &CFGDEF_INHERIT => CFGOPT_REPO_S3_BUCKET, diff -Nru pgbackrest-2.15.1/debian/changelog pgbackrest-2.16/debian/changelog --- pgbackrest-2.15.1/debian/changelog 2019-06-27 13:58:27.000000000 +0000 +++ pgbackrest-2.16/debian/changelog 2019-08-08 18:33:32.000000000 +0000 @@ -1,3 +1,20 @@ +pgbackrest (2.16-1) unstable; urgency=medium + + * New Upstream Release: + - Bug Fixes: + * Retry S3 RequestTimeTooSkewed errors instead of immediately + terminating. + * Fix incorrect handling of transfer-encoding response to HEAD request. + * Fix scoping violations exposed by optimizations in gcc 9. + - Features: + * Add repo-s3-port option for setting a non-standard S3 service port. + - Improvements: + * The local command for backup is implemented entirely in C. + * The check command is implemented partly in C. + * Adjust dependencies. + + -- Adrian Vondendriesch Thu, 08 Aug 2019 20:33:32 +0200 + pgbackrest (2.15.1-1) unstable; urgency=medium * New Upstream Release: diff -Nru pgbackrest-2.15.1/debian/control pgbackrest-2.16/debian/control --- pgbackrest-2.15.1/debian/control 2019-02-11 18:13:00.000000000 +0000 +++ pgbackrest-2.16/debian/control 2019-08-08 18:33:32.000000000 +0000 @@ -4,14 +4,13 @@ Maintainer: Debian PostgreSQL Maintainers Uploaders: Adrian Vondendriesch Build-Depends: debhelper (>= 9), - libio-socket-ssl-perl, libperl-dev, + libpq-dev, libssl-dev, libxml-checker-perl, - libxml-libxml-perl, - libxml2-dev, + libxml2-dev, txt2man, - zlib1g-dev + zlib1g-dev Standards-Version: 4.3.0 Homepage: http://www.pgbackrest.org/ Vcs-Git: https://salsa.debian.org/postgresql/pgbackrest.git @@ -19,12 +18,12 @@ Package: pgbackrest Architecture: any -Depends: libdbd-pg-perl, +Depends: perl, postgresql-common, ${misc:Depends}, ${perl:Depends}, ${shlibs:Depends} -Suggests: libio-socket-ssl-perl, libxml-libxml-perl, pgbackrest-doc +Suggests: pgbackrest-doc Description: Reliable PostgreSQL Backup & Restore pgBackRest is a simple, reliable backup and restore system for PostgreSQL that can seamlessly scale up to the largest databases and workloads. diff -Nru pgbackrest-2.15.1/doc/lib/BackRestDoc/Common/DocManifest.pm pgbackrest-2.16/doc/lib/BackRestDoc/Common/DocManifest.pm --- pgbackrest-2.15.1/doc/lib/BackRestDoc/Common/DocManifest.pm 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/doc/lib/BackRestDoc/Common/DocManifest.pm 2019-08-05 16:03:04.000000000 +0000 @@ -272,7 +272,7 @@ my $strIf = $self->variableReplace($oNode->paramGet('if')); # In this case we really do want to evaluate the contents and not treat it as a literal - $bIf = eval($strIf); ## no critic (BuiltinFunctions::ProhibitStringyEval) + $bIf = eval($strIf); # Error if the eval failed if ($@) @@ -319,7 +319,7 @@ if ($oVariable->paramTest('eval', 'y')) { # In this case we really do want to evaluate the contents of strValue and not treat it as a literal. - $strValue = eval($strValue); ## no critic (BuiltinFunctions::ProhibitStringyEval) + $strValue = eval($strValue); if ($@) { diff -Nru pgbackrest-2.15.1/doc/lib/BackRestDoc/Html/DocHtmlPage.pm pgbackrest-2.16/doc/lib/BackRestDoc/Html/DocHtmlPage.pm --- pgbackrest-2.15.1/doc/lib/BackRestDoc/Html/DocHtmlPage.pm 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/doc/lib/BackRestDoc/Html/DocHtmlPage.pm 2019-08-05 16:03:04.000000000 +0000 @@ -91,7 +91,7 @@ my $oHtmlBuilder = new BackRestDoc::Html::DocHtmlBuilder( $self->{oManifest}->variableReplace('{[project]}' . (defined($self->{oManifest}->variableGet('project-tagline')) ? - $self->{oManifest}->variableGet('project-tagline') : '')), + ' - ' . $self->{oManifest}->variableGet('project-tagline') : '')), $self->{oManifest}->variableReplace($strTitle . (defined($strSubTitle) ? " - ${strSubTitle}" : '')), $self->{oManifest}->variableGet('project-favicon'), $self->{oManifest}->variableGet('project-logo'), diff -Nru pgbackrest-2.15.1/doc/RELEASE.md pgbackrest-2.16/doc/RELEASE.md --- pgbackrest-2.15.1/doc/RELEASE.md 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/doc/RELEASE.md 2019-08-05 16:03:04.000000000 +0000 @@ -99,7 +99,7 @@ - Add user guides for CentOS/RHEL 6/7. ``` -The first line will be the release title and the rest will be the body. The tag field should be updated with the current version so a tag is created from master. +The first line will be the release title and the rest will be the body. The tag field should be updated with the current version so a tag is created from master. **Be sure to select the release commit explicitly rather than auto-tagging the last commit in master!** ## Push web documentation to master and deploy ``` @@ -128,10 +128,16 @@ to: ``` use constant PROJECT_VERSION => '2.15dev'; +``` + +Run deploy to generate git history (ctrl-c as soon as the file is generated): +``` +doc/release.pl --deploy +``` -Build to generate files: +Build to generate files and test documentation: ``` -test/test.pl --no-lint --vm=u18 --no-package --build-only +test/test.pl --vm=u18 --build-only ``` Commit and push to integration: diff -Nru pgbackrest-2.15.1/doc/release.pl pgbackrest-2.16/doc/release.pl --- pgbackrest-2.15.1/doc/release.pl 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/doc/release.pl 2019-08-05 16:03:04.000000000 +0000 @@ -211,7 +211,7 @@ # Generate coverage summmary &log(INFO, "Generate Coverage Summary"); executeTest( - "${strTestExe} --no-lint --no-package --no-valgrind --no-optimize --vm-max=3 --coverage-summary", + "${strTestExe} --no-package --no-valgrind --no-optimize --vm-max=3 --coverage-summary", {bShowOutputAsync => true}); } diff -Nru pgbackrest-2.15.1/doc/resource/exe.cache pgbackrest-2.16/doc/resource/exe.cache --- pgbackrest-2.15.1/doc/resource/exe.cache 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/doc/resource/exe.cache 2019-08-05 16:03:04.000000000 +0000 @@ -35,7 +35,7 @@ "key" : { "bash-wrap" : true, "cmd" : [ - "sudo mkdir /root/pgbackrest-release-2.15" + "sudo mkdir /root/pgbackrest-release-2.16" ], "host" : "build", "load-env" : true, @@ -48,7 +48,7 @@ "key" : { "bash-wrap" : true, "cmd" : [ - "sudo cp -r /pgbackrest/libc /root/pgbackrest-release-2.15" + "sudo cp -r /pgbackrest/libc /root/pgbackrest-release-2.16" ], "host" : "build", "load-env" : true, @@ -61,7 +61,7 @@ "key" : { "bash-wrap" : true, "cmd" : [ - "sudo cp -r /pgbackrest/src /root/pgbackrest-release-2.15" + "sudo cp -r /pgbackrest/src /root/pgbackrest-release-2.16" ], "host" : "build", "load-env" : true, @@ -87,7 +87,8 @@ "key" : { "bash-wrap" : true, "cmd" : [ - "sudo apt-get install build-essential libssl-dev libxml2-dev libperl-dev zlib1g-dev" + "sudo apt-get install build-essential libssl-dev libxml2-dev libperl-dev zlib1g-dev \\", + " libpq-dev" ], "cmd-extra" : "-y 2>&1", "host" : "build", @@ -119,7 +120,7 @@ "key" : { "bash-wrap" : true, "cmd" : [ - "(cd /root/pgbackrest-release-2.15/src && ./configure)" + "(cd /root/pgbackrest-release-2.16/src && ./configure)" ], "host" : "build", "load-env" : true, @@ -132,7 +133,7 @@ "key" : { "bash-wrap" : true, "cmd" : [ - "sudo make -s -C /root/pgbackrest-release-2.15/src" + "sudo make -s -C /root/pgbackrest-release-2.16/src" ], "host" : "build", "load-env" : true, @@ -159,7 +160,7 @@ "key" : { "bash-wrap" : true, "cmd" : [ - "sudo scp build:/root/pgbackrest-release-2.15/src/pgbackrest /usr/bin" + "sudo scp build:/root/pgbackrest-release-2.16/src/pgbackrest /usr/bin" ], "cmd-extra" : "2>&1", "host" : "pg-primary", @@ -186,7 +187,7 @@ "key" : { "bash-wrap" : true, "cmd" : [ - "sudo apt-get install libdbd-pg-perl" + "sudo apt-get install perl" ], "cmd-extra" : "-y 2>&1", "host" : "pg-primary", @@ -301,7 +302,7 @@ "type" : "exe", "value" : { "output" : [ - "pgBackRest 2.15 - General help", + "pgBackRest 2.16 - General help", "", "Usage:", " pgbackrest [options] [command]", @@ -446,7 +447,7 @@ "type" : "exe", "value" : { "output" : [ - "pgBackRest 2.15 - 'backup' command - 'log-path' option help", + "pgBackRest 2.16 - 'backup' command - 'log-path' option help", "", "Path where log files are stored.", "", @@ -683,7 +684,7 @@ "type" : "exe", "value" : { "output" : [ - "P00 INFO: stanza-create command begin 2.15: --log-level-console=info --log-level-stderr=off --no-log-timestamp --pg1-path=/var/lib/postgresql/10/demo --repo1-cipher-pass= --repo1-cipher-type=aes-256-cbc --repo1-path=/var/lib/pgbackrest --stanza=demo", + "P00 INFO: stanza-create command begin 2.16: --log-level-console=info --log-level-stderr=off --no-log-timestamp --pg1-path=/var/lib/postgresql/10/demo --repo1-cipher-pass= --repo1-cipher-type=aes-256-cbc --repo1-path=/var/lib/pgbackrest --stanza=demo", "P00 INFO: stanza-create command end: completed successfully" ] } @@ -698,7 +699,7 @@ "filter" : true, "filter-context" : 2, "list" : [ - " successfully stored in the archive at " + " successfully archived to " ] }, "host" : "pg-primary", @@ -709,8 +710,8 @@ "type" : "exe", "value" : { "output" : [ - "P00 INFO: check command begin 2.15: --log-level-console=info --log-level-stderr=off --no-log-timestamp --pg1-path=/var/lib/postgresql/10/demo --repo1-cipher-pass= --repo1-cipher-type=aes-256-cbc --repo1-path=/var/lib/pgbackrest --stanza=demo", - "P00 INFO: WAL segment 000000010000000000000001 successfully stored in the archive at '/var/lib/pgbackrest/archive/demo/10-1/0000000100000000/000000010000000000000001-b32bd89933278fbcbdc4b6204ee15a31c1197ca6.gz'", + "P00 INFO: check command begin 2.16: --log-level-console=info --log-level-stderr=off --no-log-timestamp --pg1-path=/var/lib/postgresql/10/demo --repo1-cipher-pass= --repo1-cipher-type=aes-256-cbc --repo1-path=/var/lib/pgbackrest --stanza=demo", + "P00 INFO: WAL segment 000000010000000000000001 successfully archived to '/var/lib/pgbackrest/archive/demo/10-1/0000000100000000/000000010000000000000001-e32d76d02b8c71f4b0dd15e5a714874dba64f14b.gz'", "P00 INFO: check command end: completed successfully" ] } @@ -737,9 +738,9 @@ "type" : "exe", "value" : { "output" : [ - "P00 INFO: backup command begin 2.15: --log-level-console=info --log-level-stderr=off --no-log-timestamp --pg1-path=/var/lib/postgresql/10/demo --repo1-cipher-pass= --repo1-cipher-type=aes-256-cbc --repo1-path=/var/lib/pgbackrest --repo1-retention-full=2 --stanza=demo", + "P00 INFO: backup command begin 2.16: --log-level-console=info --log-level-stderr=off --no-log-timestamp --pg1-path=/var/lib/postgresql/10/demo --repo1-cipher-pass= --repo1-cipher-type=aes-256-cbc --repo1-path=/var/lib/pgbackrest --repo1-retention-full=2 --stanza=demo", "P00 WARN: no prior backup exists, incr backup has been changed to full", - "P00 INFO: execute non-exclusive pg_start_backup() with label \"pgBackRest backup started at 2019-06-25 02:01:57\": backup begins after the next regular checkpoint completes", + "P00 INFO: execute non-exclusive pg_start_backup() with label \"pgBackRest backup started at 2019-08-05 14:26:04\": backup begins after the next regular checkpoint completes", "P00 INFO: backup start archive = 000000010000000000000002, lsn = 0/2000028", " [filtered 941 lines of output]", "P01 INFO: backup file /var/lib/postgresql/10/demo/base/1/12820 (0B, 100%)", @@ -765,7 +766,7 @@ "type" : "exe", "value" : { "output" : [ - "20190625-020157F" + "20190805-142604F" ] } }, @@ -792,11 +793,11 @@ "value" : { "output" : [ " [filtered 4 lines of output]", - "P01 INFO: backup file /var/lib/postgresql/10/demo/global/pg_control (8KB, 99%) checksum 52cd17e0aeeb68a0b978d3e068fe91d31d75b574", + "P01 INFO: backup file /var/lib/postgresql/10/demo/global/pg_control (8KB, 99%) checksum dc1892b6cd4b304e22eb94d86fa905d84dcacfac", "P01 INFO: backup file /var/lib/postgresql/10/demo/pg_logical/replorigin_checkpoint (8B, 100%) checksum 347fc8f2df71bd4436e38bd1516ccd7ea0d46532", "P00 INFO: diff backup size = 8KB", "P00 INFO: execute non-exclusive pg_stop_backup() and wait for all WAL segments to archive", - "P00 INFO: backup stop archive = 000000010000000000000003, lsn = 0/30000F8", + "P00 INFO: backup stop archive = 000000010000000000000003, lsn = 0/3000130", " [filtered 4 lines of output]" ] } @@ -829,18 +830,18 @@ " db (current)", " wal archive min/max (10-1): 000000010000000000000002/000000010000000000000003", "", - " full backup: 20190625-020157F", - " timestamp start/stop: 2019-06-25 02:01:57 / 2019-06-25 02:02:10", + " full backup: 20190805-142604F", + " timestamp start/stop: 2019-08-05 14:26:04 / 2019-08-05 14:26:18", " wal start/stop: 000000010000000000000002 / 000000010000000000000002", " database size: 22.4MB, backup size: 22.4MB", " repository size: 2.7MB, repository backup size: 2.7MB", "", - " diff backup: 20190625-020157F_20190625-020211D", - " timestamp start/stop: 2019-06-25 02:02:11 / 2019-06-25 02:02:13", + " diff backup: 20190805-142604F_20190805-142620D", + " timestamp start/stop: 2019-08-05 14:26:20 / 2019-08-05 14:26:22", " wal start/stop: 000000010000000000000003 / 000000010000000000000003", " database size: 22.4MB, backup size: 8.2KB", " repository size: 2.7MB, repository backup size: 517B", - " backup reference list: 20190625-020157F" + " backup reference list: 20190805-142604F" ] } }, @@ -974,9 +975,9 @@ "type" : "exe", "value" : { "output" : [ - "P00 INFO: backup command begin 2.15: --log-level-console=info --log-level-stderr=off --no-log-timestamp --pg1-path=/var/lib/postgresql/10/demo --repo1-cipher-pass= --repo1-cipher-type=aes-256-cbc --repo1-path=/var/lib/pgbackrest --repo1-retention-full=2 --stanza=demo --type=incr", - "P00 INFO: last backup label = 20190625-020157F_20190625-020211D, version = 2.15", - "P00 INFO: execute non-exclusive pg_start_backup() with label \"pgBackRest backup started at 2019-06-25 02:02:31\": backup begins after the next regular checkpoint completes", + "P00 INFO: backup command begin 2.16: --log-level-console=info --log-level-stderr=off --no-log-timestamp --pg1-path=/var/lib/postgresql/10/demo --repo1-cipher-pass= --repo1-cipher-type=aes-256-cbc --repo1-path=/var/lib/pgbackrest --repo1-retention-full=2 --stanza=demo --type=incr", + "P00 INFO: last backup label = 20190805-142604F_20190805-142620D, version = 2.16", + "P00 INFO: execute non-exclusive pg_start_backup() with label \"pgBackRest backup started at 2019-08-05 14:26:39\": backup begins after the next regular checkpoint completes", "P00 INFO: backup start archive = 000000020000000000000005, lsn = 0/5000028", "P00 WARN: a timeline switch has occurred since the last backup, enabling delta checksum", " [filtered 8 lines of output]" @@ -1035,11 +1036,11 @@ "type" : "exe", "value" : { "output" : [ - "P00 INFO: backup command begin 2.15: --log-level-console=info --log-level-stderr=off --no-log-timestamp --pg1-path=/var/lib/postgresql/10/demo --repo1-cipher-pass= --repo1-cipher-type=aes-256-cbc --repo1-path=/var/lib/pgbackrest --repo1-retention-full=2 --stanza=demo --start-fast --type=incr", - "P00 INFO: last backup label = 20190625-020157F_20190625-020231I, version = 2.15", - "P00 INFO: execute non-exclusive pg_start_backup() with label \"pgBackRest backup started at 2019-06-25 02:02:36\": backup begins after the requested immediate checkpoint completes", + "P00 INFO: backup command begin 2.16: --log-level-console=info --log-level-stderr=off --no-log-timestamp --pg1-path=/var/lib/postgresql/10/demo --repo1-cipher-pass= --repo1-cipher-type=aes-256-cbc --repo1-path=/var/lib/pgbackrest --repo1-retention-full=2 --stanza=demo --start-fast --type=incr", + "P00 INFO: last backup label = 20190805-142604F_20190805-142639I, version = 2.16", + "P00 INFO: execute non-exclusive pg_start_backup() with label \"pgBackRest backup started at 2019-08-05 14:26:45\": backup begins after the requested immediate checkpoint completes", "P00 INFO: backup start archive = 000000020000000000000006, lsn = 0/6000028", - "P01 INFO: backup file /var/lib/postgresql/10/demo/global/pg_control (8KB, 99%) checksum d849b75b7c41237a1e91c7e2ae2a4ae7c5d1fe60", + "P01 INFO: backup file /var/lib/postgresql/10/demo/global/pg_control (8KB, 99%) checksum 02f64252aa89e8bfd46d880147daa1bf0bdba23e", " [filtered 8 lines of output]" ] } @@ -1078,25 +1079,25 @@ " },", " \"backrest\" : {", " \"format\" : 5,", - " \"version\" : \"2.15\"", + " \"version\" : \"2.16\"", " },", " \"database\" : {", " \"id\" : 1", " },", " \"info\" : {", - " \"delta\" : 23521617,", + " \"delta\" : 23497041,", " \"repository\" : {", - " \"delta\" : 2790501,", - " \"size\" : 2790501", + " \"delta\" : 2790373,", + " \"size\" : 2790373", " },", - " \"size\" : 23521617", + " \"size\" : 23497041", " },", - " \"label\" : \"20190625-020157F\",", + " \"label\" : \"20190805-142604F\",", " \"prior\" : null,", " \"reference\" : null,", " \"timestamp\" : {", - " \"start\" : 1561428117,", - " \"stop\" : 1561428130", + " \"start\" : 1565015164,", + " \"stop\" : 1565015178", " },", " \"type\" : \"full\"", " },", @@ -1107,7 +1108,7 @@ " },", " \"backrest\" : {", " \"format\" : 5,", - " \"version\" : \"2.15\"", + " \"version\" : \"2.16\"", " },", " \"database\" : {", " \"id\" : 1", @@ -1116,18 +1117,18 @@ " \"delta\" : 8429,", " \"repository\" : {", " \"delta\" : 517,", - " \"size\" : 2790501", + " \"size\" : 2790373", " },", - " \"size\" : 23521617", + " \"size\" : 23497041", " },", - " \"label\" : \"20190625-020157F_20190625-020211D\",", - " \"prior\" : \"20190625-020157F\",", + " \"label\" : \"20190805-142604F_20190805-142620D\",", + " \"prior\" : \"20190805-142604F\",", " \"reference\" : [", - " \"20190625-020157F\"", + " \"20190805-142604F\"", " ],", " \"timestamp\" : {", - " \"start\" : 1561428131,", - " \"stop\" : 1561428133", + " \"start\" : 1565015180,", + " \"stop\" : 1565015182", " },", " \"type\" : \"diff\"", " },", @@ -1138,7 +1139,7 @@ " },", " \"backrest\" : {", " \"format\" : 5,", - " \"version\" : \"2.15\"", + " \"version\" : \"2.16\"", " },", " \"database\" : {", " \"id\" : 1", @@ -1147,19 +1148,19 @@ " \"delta\" : 8421,", " \"repository\" : {", " \"delta\" : 469,", - " \"size\" : 2790501", + " \"size\" : 2790373", " },", - " \"size\" : 23521617", + " \"size\" : 23497041", " },", - " \"label\" : \"20190625-020157F_20190625-020231I\",", - " \"prior\" : \"20190625-020157F_20190625-020211D\",", + " \"label\" : \"20190805-142604F_20190805-142639I\",", + " \"prior\" : \"20190805-142604F_20190805-142620D\",", " \"reference\" : [", - " \"20190625-020157F\",", - " \"20190625-020157F_20190625-020211D\"", + " \"20190805-142604F\",", + " \"20190805-142604F_20190805-142620D\"", " ],", " \"timestamp\" : {", - " \"start\" : 1561428151,", - " \"stop\" : 1561428154", + " \"start\" : 1565015199,", + " \"stop\" : 1565015203", " },", " \"type\" : \"incr\"", " },", @@ -1170,7 +1171,7 @@ " },", " \"backrest\" : {", " \"format\" : 5,", - " \"version\" : \"2.15\"", + " \"version\" : \"2.16\"", " },", " \"database\" : {", " \"id\" : 1", @@ -1179,18 +1180,18 @@ " \"delta\" : 8429,", " \"repository\" : {", " \"delta\" : 517,", - " \"size\" : 2790501", + " \"size\" : 2790373", " },", - " \"size\" : 23521617", + " \"size\" : 23497041", " },", - " \"label\" : \"20190625-020157F_20190625-020236I\",", - " \"prior\" : \"20190625-020157F_20190625-020231I\",", + " \"label\" : \"20190805-142604F_20190805-142645I\",", + " \"prior\" : \"20190805-142604F_20190805-142639I\",", " \"reference\" : [", - " \"20190625-020157F\"", + " \"20190805-142604F\"", " ],", " \"timestamp\" : {", - " \"start\" : 1561428156,", - " \"stop\" : 1561428158", + " \"start\" : 1565015205,", + " \"stop\" : 1565015207", " },", " \"type\" : \"incr\"", " }", @@ -1199,7 +1200,7 @@ " \"db\" : [", " {", " \"id\" : 1,", - " \"system-id\" : 6706282598478229752,", + " \"system-id\" : 6721688847430979816,", " \"version\" : \"10\"", " }", " ],", @@ -1354,7 +1355,7 @@ "output" : [ " name | last_successful_backup | last_archived_wal ", "--------+------------------------+--------------------------", - " \"demo\" | 2019-06-25 02:02:38+00 | 000000020000000000000006", + " \"demo\" | 2019-08-05 14:26:47+00 | 000000020000000000000006", "(1 row)" ] } @@ -1388,7 +1389,7 @@ "type" : "exe", "value" : { "output" : [ - "1561428158" + "1565015207" ] } }, @@ -1452,7 +1453,7 @@ "filter" : true, "filter-context" : 2, "list" : [ - "archive retention on backup 20190625-020157F|remove archive" + "archive retention on backup 20190805-142604F|remove archive" ] }, "host" : "pg-primary", @@ -1466,7 +1467,7 @@ " [filtered 951 lines of output]", "P00 INFO: backup command end: completed successfully", "P00 INFO: expire command begin", - "P00 DETAIL: archive retention on backup 20190625-020157F, archiveId = 10-1, start = 000000010000000000000002", + "P00 DETAIL: archive retention on backup 20190805-142604F, archiveId = 10-1, start = 000000010000000000000002", "P00 DETAIL: no archive to remove, archiveId = 10-1", "P00 INFO: expire command end: completed successfully" ] @@ -1486,7 +1487,7 @@ "type" : "exe", "value" : { "output" : [ - "20190625-020246F" + "20190805-142656F" ] } }, @@ -1501,7 +1502,7 @@ "filter" : true, "filter-context" : 2, "list" : [ - "expire full backup set\\: 20190625-020157F|archive retention on backup 20190625-020246F|remove archive" + "expire full backup set\\: 20190805-142604F|archive retention on backup 20190805-142656F|remove archive" ] }, "host" : "pg-primary", @@ -1515,9 +1516,9 @@ " [filtered 950 lines of output]", "P00 INFO: backup command end: completed successfully", "P00 INFO: expire command begin", - "P00 INFO: expire full backup set: 20190625-020157F, 20190625-020157F_20190625-020211D, 20190625-020157F_20190625-020231I, 20190625-020157F_20190625-020236I", - "P00 INFO: remove expired backup 20190625-020157F_20190625-020236I", - "P00 INFO: remove expired backup 20190625-020157F_20190625-020231I", + "P00 INFO: expire full backup set: 20190805-142604F, 20190805-142604F_20190805-142620D, 20190805-142604F_20190805-142639I, 20190805-142604F_20190805-142645I", + "P00 INFO: remove expired backup 20190805-142604F_20190805-142645I", + "P00 INFO: remove expired backup 20190805-142604F_20190805-142639I", " [filtered 2 lines of output]" ] } @@ -1580,7 +1581,7 @@ "type" : "exe", "value" : { "output" : [ - "20190625-020259F_20190625-020314D" + "20190805-142711F_20190805-142727D" ] } }, @@ -1608,7 +1609,7 @@ "filter" : true, "filter-context" : 2, "list" : [ - "expire diff backup set: 20190625-020259F_20190625-020314D" + "expire diff backup set: 20190805-142711F_20190805-142727D" ] }, "host" : "pg-primary", @@ -1619,12 +1620,12 @@ "type" : "exe", "value" : { "output" : [ - " [filtered 10 lines of output]", + " [filtered 11 lines of output]", "P00 INFO: backup command end: completed successfully", "P00 INFO: expire command begin", - "P00 INFO: expire diff backup set: 20190625-020259F_20190625-020314D, 20190625-020259F_20190625-020317I", - "P00 INFO: remove expired backup 20190625-020259F_20190625-020317I", - "P00 INFO: remove expired backup 20190625-020259F_20190625-020314D" + "P00 INFO: expire diff backup set: 20190805-142711F_20190805-142727D, 20190805-142711F_20190805-142731I", + "P00 INFO: remove expired backup 20190805-142711F_20190805-142731I", + "P00 INFO: remove expired backup 20190805-142711F_20190805-142727D" ] } }, @@ -1673,7 +1674,7 @@ "type" : "exe", "value" : { "output" : [ - "20190625-020259F_20190625-020321D" + "20190805-142711F_20190805-142735D" ] } }, @@ -1717,7 +1718,7 @@ " [filtered 8 lines of output]", "P00 INFO: execute non-exclusive pg_stop_backup() and wait for all WAL segments to archive", "P00 INFO: backup stop archive = 000000020000000000000010, lsn = 0/100000F8", - "P00 INFO: new backup label = 20190625-020259F_20190625-020326D", + "P00 INFO: new backup label = 20190805-142711F_20190805-142740D", "P00 INFO: backup command end: completed successfully", "P00 INFO: expire command begin" ] @@ -1737,7 +1738,7 @@ "type" : "exe", "value" : { "output" : [ - "20190625-020259F_20190625-020326D" + "20190805-142711F_20190805-142740D" ] } }, @@ -1752,7 +1753,7 @@ "filter" : true, "filter-context" : 2, "list" : [ - "archive retention on backup 20190625-020259F_20190625-020321D|remove archive" + "archive retention on backup 20190805-142711F_20190805-142735D|remove archive" ] }, "host" : "pg-primary", @@ -1763,13 +1764,13 @@ "type" : "exe", "value" : { "output" : [ - "P00 INFO: expire command begin 2.15: --log-level-console=detail --log-level-stderr=off --no-log-timestamp --repo1-cipher-pass= --repo1-cipher-type=aes-256-cbc --repo1-path=/var/lib/pgbackrest --repo1-retention-archive=1 --repo1-retention-archive-type=diff --repo1-retention-diff=2 --repo1-retention-full=2 --stanza=demo", - "P00 DETAIL: archive retention on backup 20190625-020246F, archiveId = 10-1, start = 000000020000000000000008, stop = 000000020000000000000008", - "P00 DETAIL: archive retention on backup 20190625-020259F, archiveId = 10-1, start = 000000020000000000000009, stop = 000000020000000000000009", - "P00 DETAIL: archive retention on backup 20190625-020259F_20190625-020321D, archiveId = 10-1, start = 00000002000000000000000C, stop = 00000002000000000000000C", - "P00 DETAIL: archive retention on backup 20190625-020259F_20190625-020326D, archiveId = 10-1, start = 000000020000000000000010", - "P00 DETAIL: remove archive: archiveId = 10-1, start = 00000002000000000000000A, stop = 00000002000000000000000B", - "P00 DETAIL: remove archive: archiveId = 10-1, start = 00000002000000000000000D, stop = 00000002000000000000000F", + "P00 INFO: expire command begin 2.16: --log-level-console=detail --log-level-stderr=off --no-log-timestamp --repo1-cipher-pass= --repo1-cipher-type=aes-256-cbc --repo1-path=/var/lib/pgbackrest --repo1-retention-archive=1 --repo1-retention-archive-type=diff --repo1-retention-diff=2 --repo1-retention-full=2 --stanza=demo", + "P00 DETAIL: archive retention on backup 20190805-142656F, archiveId = 10-1, start = 000000020000000000000008, stop = 000000020000000000000008", + "P00 DETAIL: archive retention on backup 20190805-142711F, archiveId = 10-1, start = 000000020000000000000009, stop = 000000020000000000000009", + "P00 DETAIL: archive retention on backup 20190805-142711F_20190805-142735D, archiveId = 10-1, start = 00000002000000000000000D, stop = 00000002000000000000000D", + "P00 DETAIL: archive retention on backup 20190805-142711F_20190805-142740D, archiveId = 10-1, start = 000000020000000000000010", + "P00 DETAIL: remove archive: archiveId = 10-1, start = 00000002000000000000000A, stop = 00000002000000000000000C", + "P00 DETAIL: remove archive: archiveId = 10-1, start = 00000002000000000000000E, stop = 00000002000000000000000F", "P00 INFO: expire command end: completed successfully" ] } @@ -1809,7 +1810,7 @@ "type" : "exe", "value" : { "output" : [ - " [filtered 761 lines of output]", + " [filtered 763 lines of output]", "P01 DETAIL: restore file /var/lib/postgresql/10/demo/base/12977/PG_VERSION - exists and matches backup (3B, 99%) checksum 4143d3a341877154d6e95211464e1df1015b74bd", "P01 DETAIL: restore file /var/lib/postgresql/10/demo/base/1/PG_VERSION - exists and matches backup (3B, 99%) checksum 4143d3a341877154d6e95211464e1df1015b74bd", "P01 DETAIL: restore file /var/lib/postgresql/10/demo/PG_VERSION - exists and matches backup (3B, 100%) checksum 4143d3a341877154d6e95211464e1df1015b74bd", @@ -2198,7 +2199,7 @@ "type" : "exe", "value" : { "output" : [ - "2019-06-25 02:04:10.358542+00" + "2019-08-05 14:28:26.411465+00" ] } }, @@ -2251,7 +2252,7 @@ "bash-wrap" : true, "cmd" : [ "sudo -u postgres pgbackrest --stanza=demo --delta \\", - " --type=time \"--target=2019-06-25 02:04:10.358542+00\" \\", + " --type=time \"--target=2019-08-05 14:28:26.411465+00\" \\", " --target-action=promote restore" ], "host" : "pg-primary", @@ -2296,7 +2297,7 @@ "value" : { "output" : [ "restore_command = 'pgbackrest --stanza=demo archive-get %f \"%p\"'", - "recovery_target_time = '2019-06-25 02:04:10.358542+00'", + "recovery_target_time = '2019-08-05 14:28:26.411465+00'", "recovery_target_action = 'promote'" ] } @@ -2378,16 +2379,16 @@ "output" : [ " [filtered 2 lines of output]", "LOG: listening on Unix socket \"/var/run/postgresql/.s.PGSQL.5432\"", - "LOG: database system was interrupted; last known up at 2019-06-25 02:04:03 UTC", - "LOG: starting point-in-time recovery to 2019-06-25 02:04:10.358542+00", + "LOG: database system was interrupted; last known up at 2019-08-05 14:28:19 UTC", + "LOG: starting point-in-time recovery to 2019-08-05 14:28:26.411465+00", "LOG: restored log file \"00000004.history\" from archive", "LOG: restored log file \"000000040000000000000015\" from archive", " [filtered 2 lines of output]", "LOG: database system is ready to accept read only connections", "LOG: restored log file \"000000040000000000000016\" from archive", - "LOG: recovery stopping before commit of transaction 564, time 2019-06-25 02:04:10.872712+00", - "LOG: redo done at 0/16021378", - "LOG: last completed transaction was at log time 2019-06-25 02:04:09.907842+00", + "LOG: recovery stopping before commit of transaction 564, time 2019-08-05 14:28:26.933264+00", + "LOG: redo done at 0/16021358", + "LOG: last completed transaction was at log time 2019-08-05 14:28:25.894026+00", "LOG: selected new timeline ID: 5", "LOG: archive recovery complete", " [filtered 3 lines of output]" @@ -2439,7 +2440,7 @@ "type" : "exe", "value" : { "output" : [ - "20190625-020259F_20190625-020403D" + "20190805-142711F_20190805-142818D" ] } }, @@ -2474,7 +2475,7 @@ "bash-wrap" : true, "cmd" : [ "sudo -u postgres pgbackrest --stanza=demo --delta \\", - " --type=time \"--target=2019-06-25 02:04:10.358542+00\" --target-action=promote restore" + " --type=time \"--target=2019-08-05 14:28:26.411465+00\" --target-action=promote restore" ], "host" : "pg-primary", "load-env" : true, @@ -2573,8 +2574,8 @@ "output" : [ " [filtered 2 lines of output]", "LOG: listening on Unix socket \"/var/run/postgresql/.s.PGSQL.5432\"", - "LOG: database system was interrupted; last known up at 2019-06-25 02:04:25 UTC", - "LOG: starting point-in-time recovery to 2019-06-25 02:04:10.358542+00", + "LOG: database system was interrupted; last known up at 2019-08-05 14:28:39 UTC", + "LOG: starting point-in-time recovery to 2019-08-05 14:28:26.411465+00", "LOG: restored log file \"00000005.history\" from archive", "LOG: restored log file \"000000050000000000000017\" from archive", "LOG: redo starts at 0/17000028", @@ -2595,7 +2596,7 @@ "filter" : false, "filter-context" : 2, "list" : [ - "20190625-020259F_20190625-020403D" + "20190805-142711F_20190805-142818D" ] }, "host" : "pg-primary", @@ -2613,45 +2614,45 @@ " db (current)", " wal archive min/max (10-1): 000000020000000000000008/000000050000000000000017", "", - " full backup: 20190625-020246F", - " timestamp start/stop: 2019-06-25 02:02:46 / 2019-06-25 02:02:58", + " full backup: 20190805-142656F", + " timestamp start/stop: 2019-08-05 14:26:56 / 2019-08-05 14:27:10", " wal start/stop: 000000020000000000000008 / 000000020000000000000008", " database size: 22.4MB, backup size: 22.4MB", " repository size: 2.7MB, repository backup size: 2.7MB", "", - " full backup: 20190625-020259F", - " timestamp start/stop: 2019-06-25 02:02:59 / 2019-06-25 02:03:12", + " full backup: 20190805-142711F", + " timestamp start/stop: 2019-08-05 14:27:11 / 2019-08-05 14:27:25", " wal start/stop: 000000020000000000000009 / 000000020000000000000009", " database size: 22.4MB, backup size: 22.4MB", " repository size: 2.7MB, repository backup size: 2.7MB", "", - " diff backup: 20190625-020259F_20190625-020326D", - " timestamp start/stop: 2019-06-25 02:03:26 / 2019-06-25 02:03:28", + " diff backup: 20190805-142711F_20190805-142740D", + " timestamp start/stop: 2019-08-05 14:27:40 / 2019-08-05 14:27:43", " wal start/stop: 000000020000000000000010 / 000000020000000000000010", " database size: 22.4MB, backup size: 96.2KB", " repository size: 2.7MB, repository backup size: 11.9KB", - " backup reference list: 20190625-020259F", + " backup reference list: 20190805-142711F", "", - " incr backup: 20190625-020259F_20190625-020339I", - " timestamp start/stop: 2019-06-25 02:03:39 / 2019-06-25 02:03:48", + " incr backup: 20190805-142711F_20190805-142754I", + " timestamp start/stop: 2019-08-05 14:27:54 / 2019-08-05 14:28:04", " wal start/stop: 000000030000000000000012 / 000000030000000000000012", - " database size: 37MB, backup size: 15MB", + " database size: 37.0MB, backup size: 15.0MB", " repository size: 4.4MB, repository backup size: 1.8MB", - " backup reference list: 20190625-020259F, 20190625-020259F_20190625-020326D", + " backup reference list: 20190805-142711F, 20190805-142711F_20190805-142740D", "", - " diff backup: 20190625-020259F_20190625-020403D", - " timestamp start/stop: 2019-06-25 02:04:03 / 2019-06-25 02:04:09", + " diff backup: 20190805-142711F_20190805-142818D", + " timestamp start/stop: 2019-08-05 14:28:18 / 2019-08-05 14:28:25", " wal start/stop: 000000040000000000000015 / 000000040000000000000015", " database size: 29.7MB, backup size: 7.8MB", " repository size: 3.5MB, repository backup size: 948.7KB", - " backup reference list: 20190625-020259F", + " backup reference list: 20190805-142711F", "", - " incr backup: 20190625-020259F_20190625-020424I", - " timestamp start/stop: 2019-06-25 02:04:24 / 2019-06-25 02:04:27", + " incr backup: 20190805-142711F_20190805-142838I", + " timestamp start/stop: 2019-08-05 14:28:38 / 2019-08-05 14:28:41", " wal start/stop: 000000050000000000000017 / 000000050000000000000017", " database size: 29.7MB, backup size: 2MB", " repository size: 3.5MB, repository backup size: 218.4KB", - " backup reference list: 20190625-020259F, 20190625-020259F_20190625-020403D" + " backup reference list: 20190805-142711F, 20190805-142711F_20190805-142818D" ] } }, @@ -2673,8 +2674,8 @@ "bash-wrap" : true, "cmd" : [ "sudo -u postgres pgbackrest --stanza=demo --delta \\", - " --type=time \"--target=2019-06-25 02:04:10.358542+00\" \\", - " --set=20190625-020259F_20190625-020403D --target-action=promote restore" + " --type=time \"--target=2019-08-05 14:28:26.411465+00\" \\", + " --set=20190805-142711F_20190805-142818D --target-action=promote restore" ], "host" : "pg-primary", "load-env" : true, @@ -2773,16 +2774,16 @@ "output" : [ " [filtered 2 lines of output]", "LOG: listening on Unix socket \"/var/run/postgresql/.s.PGSQL.5432\"", - "LOG: database system was interrupted; last known up at 2019-06-25 02:04:03 UTC", - "LOG: starting point-in-time recovery to 2019-06-25 02:04:10.358542+00", + "LOG: database system was interrupted; last known up at 2019-08-05 14:28:19 UTC", + "LOG: starting point-in-time recovery to 2019-08-05 14:28:26.411465+00", "LOG: restored log file \"00000004.history\" from archive", "LOG: restored log file \"000000040000000000000015\" from archive", " [filtered 2 lines of output]", "LOG: database system is ready to accept read only connections", "LOG: restored log file \"000000040000000000000016\" from archive", - "LOG: recovery stopping before commit of transaction 564, time 2019-06-25 02:04:10.872712+00", - "LOG: redo done at 0/16021378", - "LOG: last completed transaction was at log time 2019-06-25 02:04:09.907842+00", + "LOG: recovery stopping before commit of transaction 564, time 2019-08-05 14:28:26.933264+00", + "LOG: redo done at 0/16021358", + "LOG: last completed transaction was at log time 2019-08-05 14:28:25.894026+00", "LOG: restored log file \"00000005.history\" from archive", "LOG: restored log file \"00000006.history\" from archive", " [filtered 5 lines of output]" @@ -2793,20 +2794,6 @@ "key" : { "bash-wrap" : true, "cmd" : [ - "sudo apt-get install libio-socket-ssl-perl libxml-libxml-perl" - ], - "cmd-extra" : "-y 2>&1", - "host" : "pg-primary", - "load-env" : true, - "output" : false, - "run-as-user" : null - }, - "type" : "exe" - }, - { - "key" : { - "bash-wrap" : true, - "cmd" : [ "echo \"172.17.0.2 demo-bucket.s3.us-east-1.amazonaws.com s3.us-east-1.amazonaws.com\" | tee -a /etc/hosts" ], "host" : "pg-primary", @@ -2922,7 +2909,8 @@ "type" : "exe", "value" : { "output" : [ - "P00 INFO: stanza-create command begin 2.15: --log-level-console=info --log-level-stderr=off --no-log-timestamp --pg1-path=/var/lib/postgresql/10/demo --repo1-cipher-pass= --repo1-cipher-type=aes-256-cbc --repo1-path=/demo-repo --repo1-s3-bucket=demo-bucket --repo1-s3-endpoint=s3.us-east-1.amazonaws.com --repo1-s3-key= --repo1-s3-key-secret= --repo1-s3-region=us-east-1 --repo1-type=s3 --stanza=demo", + "P00 INFO: stanza-create command begin 2.16: --log-level-console=info --log-level-stderr=off --no-log-timestamp --pg1-path=/var/lib/postgresql/10/demo --repo1-cipher-pass= --repo1-cipher-type=aes-256-cbc --repo1-path=/demo-repo --repo1-s3-bucket=demo-bucket --repo1-s3-endpoint=s3.us-east-1.amazonaws.com --repo1-s3-key= --repo1-s3-key-secret= --repo1-s3-region=us-east-1 --repo1-type=s3 --stanza=demo", + "P00 INFO: http statistics: objects 1, sessions 1, requests 16, retries 0, closes 0", "P00 INFO: stanza-create command end: completed successfully" ] } @@ -2949,17 +2937,17 @@ "type" : "exe", "value" : { "output" : [ - "P00 INFO: backup command begin 2.15: --log-level-console=info --log-level-stderr=off --no-log-timestamp --pg1-path=/var/lib/postgresql/10/demo --process-max=4 --repo1-cipher-pass= --repo1-cipher-type=aes-256-cbc --repo1-path=/demo-repo --repo1-retention-diff=2 --repo1-retention-full=2 --repo1-s3-bucket=demo-bucket --repo1-s3-endpoint=s3.us-east-1.amazonaws.com --repo1-s3-key= --repo1-s3-key-secret= --repo1-s3-region=us-east-1 --repo1-type=s3 --stanza=demo --start-fast", + "P00 INFO: backup command begin 2.16: --log-level-console=info --log-level-stderr=off --no-log-timestamp --pg1-path=/var/lib/postgresql/10/demo --process-max=4 --repo1-cipher-pass= --repo1-cipher-type=aes-256-cbc --repo1-path=/demo-repo --repo1-retention-diff=2 --repo1-retention-full=2 --repo1-s3-bucket=demo-bucket --repo1-s3-endpoint=s3.us-east-1.amazonaws.com --repo1-s3-key= --repo1-s3-key-secret= --repo1-s3-region=us-east-1 --repo1-type=s3 --stanza=demo --start-fast", "P00 WARN: no prior backup exists, incr backup has been changed to full", - "P00 INFO: execute non-exclusive pg_start_backup() with label \"pgBackRest backup started at 2019-06-25 02:05:04\": backup begins after the requested immediate checkpoint completes", + "P00 INFO: execute non-exclusive pg_start_backup() with label \"pgBackRest backup started at 2019-08-05 14:29:05\": backup begins after the requested immediate checkpoint completes", "P00 INFO: backup start archive = 000000070000000000000017, lsn = 0/17000028", " [filtered 1238 lines of output]", - "P03 INFO: backup file /var/lib/postgresql/10/demo/base/1/12820 (0B, 100%)", - "P02 INFO: backup file /var/lib/postgresql/10/demo/base/1/12815 (0B, 100%)", + "P01 INFO: backup file /var/lib/postgresql/10/demo/base/1/12815 (0B, 100%)", + "P03 INFO: backup file /var/lib/postgresql/10/demo/base/1/12830 (0B, 100%)", "P00 INFO: full backup size = 29.7MB", "P00 INFO: execute non-exclusive pg_stop_backup() and wait for all WAL segments to archive", "P00 INFO: backup stop archive = 000000070000000000000017, lsn = 0/17000130", - " [filtered 5 lines of output]" + " [filtered 6 lines of output]" ] } }, @@ -2997,7 +2985,7 @@ "type" : "exe", "value" : { "output" : [ - "P00 INFO: stop command begin 2.15: --log-level-console=info --log-level-stderr=off --no-log-timestamp --repo1-cipher-pass= --repo1-cipher-type=aes-256-cbc --repo1-path=/demo-repo --repo1-s3-bucket=demo-bucket --repo1-s3-endpoint=s3.us-east-1.amazonaws.com --repo1-s3-key= --repo1-s3-key-secret= --repo1-s3-region=us-east-1 --repo1-type=s3 --stanza=demo", + "P00 INFO: stop command begin 2.16: --log-level-console=info --log-level-stderr=off --no-log-timestamp --repo1-cipher-pass= --repo1-cipher-type=aes-256-cbc --repo1-path=/demo-repo --repo1-s3-bucket=demo-bucket --repo1-s3-endpoint=s3.us-east-1.amazonaws.com --repo1-s3-key= --repo1-s3-key-secret= --repo1-s3-region=us-east-1 --repo1-type=s3 --stanza=demo", "P00 INFO: stop command end: completed successfully" ] } @@ -3023,7 +3011,8 @@ "type" : "exe", "value" : { "output" : [ - "P00 INFO: stanza-delete command begin 2.15: --log-level-console=info --log-level-stderr=off --no-log-timestamp --pg1-path=/var/lib/postgresql/10/demo --repo1-cipher-pass= --repo1-cipher-type=aes-256-cbc --repo1-path=/demo-repo --repo1-s3-bucket=demo-bucket --repo1-s3-endpoint=s3.us-east-1.amazonaws.com --repo1-s3-key= --repo1-s3-key-secret= --repo1-s3-region=us-east-1 --repo1-type=s3 --stanza=demo", + "P00 INFO: stanza-delete command begin 2.16: --log-level-console=info --log-level-stderr=off --no-log-timestamp --pg1-path=/var/lib/postgresql/10/demo --repo1-cipher-pass= --repo1-cipher-type=aes-256-cbc --repo1-path=/demo-repo --repo1-s3-bucket=demo-bucket --repo1-s3-endpoint=s3.us-east-1.amazonaws.com --repo1-s3-key= --repo1-s3-key-secret= --repo1-s3-region=us-east-1 --repo1-type=s3 --stanza=demo", + "P00 INFO: http statistics: objects 1, sessions 1, requests 13, retries 0, closes 0", "P00 INFO: stanza-delete command end: completed successfully" ] } @@ -3072,7 +3061,7 @@ "key" : { "bash-wrap" : true, "cmd" : [ - "sudo scp build:/root/pgbackrest-release-2.15/src/pgbackrest /usr/bin" + "sudo scp build:/root/pgbackrest-release-2.16/src/pgbackrest /usr/bin" ], "cmd-extra" : "2>&1", "host" : "repository", @@ -3099,7 +3088,7 @@ "key" : { "bash-wrap" : true, "cmd" : [ - "sudo apt-get install libdbd-pg-perl" + "sudo apt-get install perl" ], "cmd-extra" : "-y 2>&1", "host" : "repository", @@ -3658,16 +3647,16 @@ " db (current)", " wal archive min/max (10-1): 00000008000000000000001D/00000008000000000000001F", "", - " full backup: 20190625-020629F", - " timestamp start/stop: 2019-06-25 02:06:29 / 2019-06-25 02:06:46", + " full backup: 20190805-143025F", + " timestamp start/stop: 2019-08-05 14:30:25 / 2019-08-05 14:30:40", " wal start/stop: 00000008000000000000001D / 00000008000000000000001D", - " database size: 29.8MB, backup size: 29.8MB", + " database size: 29.7MB, backup size: 29.7MB", " repository size: 3.5MB, repository backup size: 3.5MB", "", - " full backup: 20190625-020648F", - " timestamp start/stop: 2019-06-25 02:06:48 / 2019-06-25 02:06:56", + " full backup: 20190805-143042F", + " timestamp start/stop: 2019-08-05 14:30:42 / 2019-08-05 14:30:51", " wal start/stop: 00000008000000000000001F / 00000008000000000000001F", - " database size: 29.8MB, backup size: 29.8MB", + " database size: 29.7MB, backup size: 29.7MB", " repository size: 3.5MB, repository backup size: 3.5MB" ] } @@ -3812,7 +3801,7 @@ "key" : { "bash-wrap" : true, "cmd" : [ - "sudo scp build:/root/pgbackrest-release-2.15/src/pgbackrest /usr/bin" + "sudo scp build:/root/pgbackrest-release-2.16/src/pgbackrest /usr/bin" ], "cmd-extra" : "2>&1", "host" : "pg-standby", @@ -3839,7 +3828,7 @@ "key" : { "bash-wrap" : true, "cmd" : [ - "sudo apt-get install libdbd-pg-perl" + "sudo apt-get install perl" ], "cmd-extra" : "-y 2>&1", "host" : "pg-standby", @@ -4217,7 +4206,7 @@ "output" : [ " [filtered 3 lines of output]", "LOG: listening on Unix socket \"/var/run/postgresql/.s.PGSQL.5432\"", - "LOG: database system was interrupted; last known up at 2019-06-25 02:06:49 UTC", + "LOG: database system was interrupted; last known up at 2019-08-05 14:30:43 UTC", "LOG: entering standby mode", "LOG: restored log file \"00000008.history\" from archive", "LOG: restored log file \"00000008000000000000001F\" from archive", @@ -4305,7 +4294,7 @@ "output" : [ " pg_switch_wal | current_timestamp ", "---------------+-------------------------------", - " 0/2002B4F0 | 2019-06-25 02:07:43.160346+00", + " 0/2002B4D8 | 2019-08-05 14:31:38.868459+00", "(1 row)" ] } @@ -4332,9 +4321,9 @@ "type" : "exe", "value" : { "output" : [ - " message | current_timestamp ", - "----------------+-------------------------------", - " Important Data | 2019-06-25 02:07:47.687919+00", + " message | current_timestamp ", + "----------------+-----------------------------", + " Important Data | 2019-08-05 14:31:43.6212+00", "(1 row)" ] } @@ -4349,7 +4338,7 @@ "filter" : true, "filter-context" : 2, "list" : [ - "all other checks passed" + "because no primary was found" ] }, "host" : "pg-standby", @@ -4360,8 +4349,8 @@ "type" : "exe", "value" : { "output" : [ - "P00 INFO: check command begin 2.15: --log-level-console=info --log-level-file=detail --log-level-stderr=off --no-log-timestamp --pg1-path=/var/lib/postgresql/10/demo --repo1-host=repository --stanza=demo", - "P00 INFO: switch wal cannot be performed on the standby, all other checks passed successfully", + "P00 INFO: check command begin 2.16: --log-level-console=info --log-level-file=detail --log-level-stderr=off --no-log-timestamp --pg1-path=/var/lib/postgresql/10/demo --repo1-host=repository --stanza=demo", + "P00 INFO: switch wal not performed because no primary was found", "P00 INFO: check command end: completed successfully" ] } @@ -4608,7 +4597,7 @@ "output" : [ " message | current_timestamp ", "----------------+-------------------------------", - " Important Data | 2019-06-25 02:08:03.728204+00", + " Important Data | 2019-08-05 14:32:00.774117+00", "(1 row)" ] } @@ -4637,7 +4626,7 @@ "output" : [ " message | current_timestamp ", "----------------+-------------------------------", - " Important Data | 2019-06-25 02:08:04.471849+00", + " Important Data | 2019-08-05 14:32:01.616566+00", "(1 row)" ] } @@ -4869,8 +4858,8 @@ "type" : "exe", "value" : { "output" : [ - "P00 INFO: check command begin 2.15: --log-level-console=info --log-level-file=detail --log-level-stderr=off --no-log-timestamp --pg1-path=/var/lib/postgresql/10/demo --repo1-host=repository --stanza=demo", - "P00 INFO: WAL segment 000000080000000000000026 successfully stored in the archive at '/var/lib/pgbackrest/archive/demo/10-1/0000000800000000/000000080000000000000026-5bfb8590e2f586e88e6021db37f8fd7d17c2b686.gz'", + "P00 INFO: check command begin 2.16: --log-level-console=info --log-level-file=detail --log-level-stderr=off --no-log-timestamp --pg1-path=/var/lib/postgresql/10/demo --repo1-host=repository --stanza=demo", + "P00 INFO: WAL segment 000000080000000000000026 successfully archived to 'archive/demo/10-1/0000000800000000/000000080000000000000026-7eb6a9de7b9b280721b5d49e50e901ae2dd349af.gz'", "P00 INFO: check command end: completed successfully" ] } @@ -4897,22 +4886,22 @@ "value" : { "output" : [ "-------------------PROCESS START-------------------", - "P00 INFO: archive-push-async command begin 2.15: [/var/lib/postgresql/10/demo/pg_wal] --log-level-console=off --log-level-file=detail --log-level-stderr=off --no-log-timestamp --pg1-path=/var/lib/postgresql/10/demo --process-max=2 --repo1-host=repository --spool-path=/var/spool/pgbackrest --stanza=demo", + "P00 INFO: archive-push-async command begin 2.16: [/var/lib/postgresql/10/demo/pg_wal] --log-level-console=off --log-level-file=detail --log-level-stderr=off --no-log-timestamp --pg1-path=/var/lib/postgresql/10/demo --process-max=2 --repo1-host=repository --spool-path=/var/spool/pgbackrest --stanza=demo", "P00 INFO: push 1 WAL file(s) to archive: 000000080000000000000021", "P01 DETAIL: pushed WAL file '000000080000000000000021' to the archive", "P00 INFO: archive-push-async command end: completed successfully", "", "-------------------PROCESS START-------------------", - "P00 INFO: archive-push-async command begin 2.15: [/var/lib/postgresql/10/demo/pg_wal] --log-level-console=off --log-level-file=detail --log-level-stderr=off --no-log-timestamp --pg1-path=/var/lib/postgresql/10/demo --process-max=2 --repo1-host=repository --spool-path=/var/spool/pgbackrest --stanza=demo", + "P00 INFO: archive-push-async command begin 2.16: [/var/lib/postgresql/10/demo/pg_wal] --log-level-console=off --log-level-file=detail --log-level-stderr=off --no-log-timestamp --pg1-path=/var/lib/postgresql/10/demo --process-max=2 --repo1-host=repository --spool-path=/var/spool/pgbackrest --stanza=demo", "P00 INFO: push 4 WAL file(s) to archive: 000000080000000000000022...000000080000000000000025", - "P02 DETAIL: pushed WAL file '000000080000000000000023' to the archive", "P01 DETAIL: pushed WAL file '000000080000000000000022' to the archive", - "P02 DETAIL: pushed WAL file '000000080000000000000024' to the archive", - "P01 DETAIL: pushed WAL file '000000080000000000000025' to the archive", + "P02 DETAIL: pushed WAL file '000000080000000000000023' to the archive", + "P01 DETAIL: pushed WAL file '000000080000000000000024' to the archive", + "P02 DETAIL: pushed WAL file '000000080000000000000025' to the archive", "P00 INFO: archive-push-async command end: completed successfully", "", "-------------------PROCESS START-------------------", - "P00 INFO: archive-push-async command begin 2.15: [/var/lib/postgresql/10/demo/pg_wal] --log-level-console=off --log-level-file=detail --log-level-stderr=off --no-log-timestamp --pg1-path=/var/lib/postgresql/10/demo --process-max=2 --repo1-host=repository --spool-path=/var/spool/pgbackrest --stanza=demo", + "P00 INFO: archive-push-async command begin 2.16: [/var/lib/postgresql/10/demo/pg_wal] --log-level-console=off --log-level-file=detail --log-level-stderr=off --no-log-timestamp --pg1-path=/var/lib/postgresql/10/demo --process-max=2 --repo1-host=repository --spool-path=/var/spool/pgbackrest --stanza=demo", "P00 INFO: push 1 WAL file(s) to archive: 000000080000000000000026", "P01 DETAIL: pushed WAL file '000000080000000000000026' to the archive", "P00 INFO: archive-push-async command end: completed successfully" @@ -4954,23 +4943,23 @@ "value" : { "output" : [ "-------------------PROCESS START-------------------", - "P00 INFO: archive-get-async command begin 2.15: [00000008000000000000001F, 000000080000000000000020, 000000080000000000000021, 000000080000000000000022, 000000080000000000000023, 000000080000000000000024, 000000080000000000000025, 000000080000000000000026] --log-level-console=off --log-level-file=detail --log-level-stderr=off --no-log-timestamp --pg1-path=/var/lib/postgresql/10/demo --process-max=2 --repo1-host=repository --spool-path=/var/spool/pgbackrest --stanza=demo", + "P00 INFO: archive-get-async command begin 2.16: [00000008000000000000001F, 000000080000000000000020, 000000080000000000000021, 000000080000000000000022, 000000080000000000000023, 000000080000000000000024, 000000080000000000000025, 000000080000000000000026] --log-level-console=off --log-level-file=detail --log-level-stderr=off --no-log-timestamp --pg1-path=/var/lib/postgresql/10/demo --process-max=2 --repo1-host=repository --spool-path=/var/spool/pgbackrest --stanza=demo", "P00 INFO: get 8 WAL file(s) from archive: 00000008000000000000001F...000000080000000000000026", - "P02 DETAIL: found 000000080000000000000020 in the archive", "P01 DETAIL: found 00000008000000000000001F in the archive", - "P01 DETAIL: unable to find 000000080000000000000022 in the archive", - "P02 DETAIL: unable to find 000000080000000000000021 in the archive", + "P02 DETAIL: found 000000080000000000000020 in the archive", + "P01 DETAIL: unable to find 000000080000000000000021 in the archive", + "P02 DETAIL: unable to find 000000080000000000000022 in the archive", " [filtered 20 lines of output]", - "P00 INFO: archive-get-async command begin 2.15: [000000080000000000000021, 000000080000000000000022, 000000080000000000000023, 000000080000000000000024, 000000080000000000000025, 000000080000000000000026, 000000080000000000000027, 000000080000000000000028] --log-level-console=off --log-level-file=detail --log-level-stderr=off --no-log-timestamp --pg1-path=/var/lib/postgresql/10/demo --process-max=2 --repo1-host=repository --spool-path=/var/spool/pgbackrest --stanza=demo", + "P00 INFO: archive-get-async command begin 2.16: [000000080000000000000021, 000000080000000000000022, 000000080000000000000023, 000000080000000000000024, 000000080000000000000025, 000000080000000000000026, 000000080000000000000027, 000000080000000000000028] --log-level-console=off --log-level-file=detail --log-level-stderr=off --no-log-timestamp --pg1-path=/var/lib/postgresql/10/demo --process-max=2 --repo1-host=repository --spool-path=/var/spool/pgbackrest --stanza=demo", "P00 INFO: get 8 WAL file(s) from archive: 000000080000000000000021...000000080000000000000028", "P01 DETAIL: found 000000080000000000000021 in the archive", "P02 DETAIL: found 000000080000000000000022 in the archive", - "P01 DETAIL: found 000000080000000000000023 in the archive", "P02 DETAIL: found 000000080000000000000024 in the archive", - "P02 DETAIL: unable to find 000000080000000000000026 in the archive", - "P02 DETAIL: unable to find 000000080000000000000027 in the archive", - "P02 DETAIL: unable to find 000000080000000000000028 in the archive", - "P01 DETAIL: found 000000080000000000000025 in the archive", + "P01 DETAIL: found 000000080000000000000023 in the archive", + "P01 DETAIL: unable to find 000000080000000000000026 in the archive", + "P01 DETAIL: unable to find 000000080000000000000027 in the archive", + "P01 DETAIL: unable to find 000000080000000000000028 in the archive", + "P02 DETAIL: found 000000080000000000000025 in the archive", "P00 INFO: archive-get-async command end: completed successfully", "", " [filtered 8 lines of output]", @@ -5059,15 +5048,20 @@ "value" : { "output" : [ " [filtered 2 lines of output]", - "P00 INFO: execute non-exclusive pg_start_backup() with label \"pgBackRest backup started at 2019-06-25 02:08:25\": backup begins after the requested immediate checkpoint completes", + "P00 INFO: execute non-exclusive pg_start_backup() with label \"pgBackRest backup started at 2019-08-05 14:32:24\": backup begins after the requested immediate checkpoint completes", "P00 INFO: backup start archive = 000000080000000000000028, lsn = 0/28000028", "P00 INFO: wait for replay on the standby to reach 0/28000028", - "P00 INFO: replay on the standby reached 0/28000108, checkpoint 0/28000060", - "P01 INFO: backup file pg-primary:/var/lib/postgresql/10/demo/global/pg_control (8KB, 0%) checksum fcf65db89e0d0bc985776c8c60a62af01c4121ec", - "P01 INFO: backup file pg-primary:/var/lib/postgresql/10/demo/pg_logical/replorigin_checkpoint (8B, 0%) checksum 347fc8f2df71bd4436e38bd1516ccd7ea0d46532", - "P02 INFO: backup file pg-standby:/var/lib/postgresql/10/demo/base/12978/2608 (440KB, 19%) checksum 6b75dba8ba9cb12bea08c18fc2c833d3d857a4d9", - "P03 INFO: backup file pg-standby:/var/lib/postgresql/10/demo/base/12978/1249 (392KB, 36%) checksum 0180bb0af20ddaf75c00578eb23902603150ead1", - " [filtered 39 lines of output]" + "P00 INFO: replay on the standby reached 0/280000D0, checkpoint 0/28000060", + "P02 INFO: backup file pg-standby:/var/lib/postgresql/10/demo/base/12978/2608 (440KB, 19%) checksum d529490728bfd6044e453ade84bae8563d866ab1", + "P02 INFO: backup file pg-standby:/var/lib/postgresql/10/demo/base/12978/2673 (312KB, 32%) checksum ac0cccb77c5641dbc6d1e9dc341a2fea1ea35f22", + "P04 INFO: backup file pg-standby:/var/lib/postgresql/10/demo/base/12978/2674 (368KB, 48%) checksum 694ec887d5afd354ca8a6f0bc25bf72bbf7490fd", + "P02 INFO: backup file pg-standby:/var/lib/postgresql/10/demo/base/12978/2658 (112KB, 53%) checksum d6dc412ca41421084a657a4f80f5c4d0f4ed31e8", + "P01 INFO: backup file pg-primary:/var/lib/postgresql/10/demo/global/pg_control (8KB, 54%) checksum 301d240be6d5a6ff31dd72e6e5322f890d5aeaa1", + "P04 INFO: backup file pg-standby:/var/lib/postgresql/10/demo/base/12978/1259 (88KB, 58%) checksum 06e67d09d0b1505b6895f425c3001fc80210f0e4", + "P01 INFO: backup file pg-primary:/var/lib/postgresql/10/demo/pg_logical/replorigin_checkpoint (8B, 58%) checksum 347fc8f2df71bd4436e38bd1516ccd7ea0d46532", + "P02 INFO: backup file pg-standby:/var/lib/postgresql/10/demo/base/12978/2659 (80KB, 61%) checksum bdeb9c730703557389a8166beb15339f6431b1e3", + "P03 INFO: backup file pg-standby:/var/lib/postgresql/10/demo/base/12978/1249 (392KB, 78%) checksum 81e85815104dd697a2d4999b162f21285bc1a252", + " [filtered 34 lines of output]" ] } }, @@ -5343,7 +5337,7 @@ "type" : "exe", "value" : { "output" : [ - "P00 INFO: stanza-upgrade command begin 2.15: --no-backup-standby --log-level-console=info --log-level-stderr=off --no-log-timestamp --no-online --pg1-host=pg-primary --pg2-host=pg-standby --pg1-path=/var/lib/postgresql/11/demo --pg2-path=/var/lib/postgresql/11/demo --repo1-path=/var/lib/pgbackrest --stanza=demo", + "P00 INFO: stanza-upgrade command begin 2.16: --no-backup-standby --log-level-console=info --log-level-stderr=off --no-log-timestamp --no-online --pg1-host=pg-primary --pg2-host=pg-standby --pg1-path=/var/lib/postgresql/11/demo --pg2-path=/var/lib/postgresql/11/demo --repo1-path=/var/lib/pgbackrest --stanza=demo", "P00 INFO: stanza-upgrade command end: completed successfully" ] } @@ -5444,7 +5438,14 @@ "output" : true, "run-as-user" : null }, - "type" : "exe" + "type" : "exe", + "value" : { + "output" : [ + "P00 WARN: unable to check pg-2: [DbConnectError] raised from remote-0 protocol on 'pg-standby': unable to connect to 'dbname='postgres' port=5432': could not connect to server: No such file or directory", + " \tIs the server running locally and accepting", + " \tconnections on Unix domain socket \"/var/run/postgresql/.s.PGSQL.5432\"?" + ] + } }, { "key" : { @@ -5597,7 +5598,7 @@ "key" : { "bash-wrap" : true, "cmd" : [ - "sudo mkdir /root/pgbackrest-release-2.15" + "sudo mkdir /root/pgbackrest-release-2.16" ], "host" : "build", "load-env" : true, @@ -5610,7 +5611,7 @@ "key" : { "bash-wrap" : true, "cmd" : [ - "sudo cp -r /pgbackrest/libc /root/pgbackrest-release-2.15" + "sudo cp -r /pgbackrest/libc /root/pgbackrest-release-2.16" ], "host" : "build", "load-env" : true, @@ -5623,7 +5624,7 @@ "key" : { "bash-wrap" : true, "cmd" : [ - "sudo cp -r /pgbackrest/src /root/pgbackrest-release-2.15" + "sudo cp -r /pgbackrest/src /root/pgbackrest-release-2.16" ], "host" : "build", "load-env" : true, @@ -5637,7 +5638,7 @@ "bash-wrap" : true, "cmd" : [ "sudo yum install build-essential gcc openssl-devel libxml2-devel \\", - " perl-ExtUtils-Embed" + " postgresql-devel perl-ExtUtils-Embed" ], "cmd-extra" : "-y 2>&1", "host" : "build", @@ -5669,7 +5670,7 @@ "key" : { "bash-wrap" : true, "cmd" : [ - "(cd /root/pgbackrest-release-2.15/src && ./configure)" + "(cd /root/pgbackrest-release-2.16/src && ./configure)" ], "host" : "build", "load-env" : true, @@ -5682,7 +5683,7 @@ "key" : { "bash-wrap" : true, "cmd" : [ - "sudo make -s -C /root/pgbackrest-release-2.15/src" + "sudo make -s -C /root/pgbackrest-release-2.16/src" ], "host" : "build", "load-env" : true, @@ -5709,7 +5710,7 @@ "key" : { "bash-wrap" : true, "cmd" : [ - "sudo scp build:/root/pgbackrest-release-2.15/src/pgbackrest /usr/bin" + "sudo scp build:/root/pgbackrest-release-2.16/src/pgbackrest /usr/bin" ], "cmd-extra" : "2>&1", "host" : "pg-primary", @@ -5737,7 +5738,7 @@ "bash-wrap" : true, "cmd" : [ "sudo yum install perl perl-Time-HiRes perl-parent perl-JSON \\", - " perl-Digest-SHA perl-DBD-Pg" + " perl-Digest-SHA" ], "cmd-extra" : "-y 2>&1", "host" : "pg-primary", @@ -5852,7 +5853,7 @@ "type" : "exe", "value" : { "output" : [ - "pgBackRest 2.15 - General help", + "pgBackRest 2.16 - General help", "", "Usage:", " pgbackrest [options] [command]", @@ -5996,7 +5997,7 @@ "type" : "exe", "value" : { "output" : [ - "pgBackRest 2.15 - 'backup' command - 'log-path' option help", + "pgBackRest 2.16 - 'backup' command - 'log-path' option help", "", "Path where log files are stored.", "", @@ -6234,7 +6235,7 @@ "type" : "exe", "value" : { "output" : [ - "P00 INFO: stanza-create command begin 2.15: --log-level-console=info --log-level-stderr=off --no-log-timestamp --pg1-path=/var/lib/pgsql/9.5/data --repo1-cipher-pass= --repo1-cipher-type=aes-256-cbc --repo1-path=/var/lib/pgbackrest --stanza=demo", + "P00 INFO: stanza-create command begin 2.16: --log-level-console=info --log-level-stderr=off --no-log-timestamp --pg1-path=/var/lib/pgsql/9.5/data --repo1-cipher-pass= --repo1-cipher-type=aes-256-cbc --repo1-path=/var/lib/pgbackrest --stanza=demo", "P00 INFO: stanza-create command end: completed successfully" ] } @@ -6249,7 +6250,7 @@ "filter" : true, "filter-context" : 2, "list" : [ - " successfully stored in the archive at " + " successfully archived to " ] }, "host" : "pg-primary", @@ -6260,8 +6261,8 @@ "type" : "exe", "value" : { "output" : [ - "P00 INFO: check command begin 2.15: --log-level-console=info --log-level-stderr=off --no-log-timestamp --pg1-path=/var/lib/pgsql/9.5/data --repo1-cipher-pass= --repo1-cipher-type=aes-256-cbc --repo1-path=/var/lib/pgbackrest --stanza=demo", - "P00 INFO: WAL segment 000000010000000000000001 successfully stored in the archive at '/var/lib/pgbackrest/archive/demo/9.5-1/0000000100000000/000000010000000000000001-41b5fea8d3574238c5a991ef095bd5cfb14028c4.gz'", + "P00 INFO: check command begin 2.16: --log-level-console=info --log-level-stderr=off --no-log-timestamp --pg1-path=/var/lib/pgsql/9.5/data --repo1-cipher-pass= --repo1-cipher-type=aes-256-cbc --repo1-path=/var/lib/pgbackrest --stanza=demo", + "P00 INFO: WAL segment 000000010000000000000001 successfully archived to '/var/lib/pgbackrest/archive/demo/9.5-1/0000000100000000/000000010000000000000001-d60c5d3556c2683be329dbf2be3d2766e928cedc.gz'", "P00 INFO: check command end: completed successfully" ] } @@ -6288,9 +6289,9 @@ "type" : "exe", "value" : { "output" : [ - "P00 INFO: backup command begin 2.15: --log-level-console=info --log-level-stderr=off --no-log-timestamp --pg1-path=/var/lib/pgsql/9.5/data --repo1-cipher-pass= --repo1-cipher-type=aes-256-cbc --repo1-path=/var/lib/pgbackrest --repo1-retention-full=2 --stanza=demo", + "P00 INFO: backup command begin 2.16: --log-level-console=info --log-level-stderr=off --no-log-timestamp --pg1-path=/var/lib/pgsql/9.5/data --repo1-cipher-pass= --repo1-cipher-type=aes-256-cbc --repo1-path=/var/lib/pgbackrest --repo1-retention-full=2 --stanza=demo", "P00 WARN: no prior backup exists, incr backup has been changed to full", - "P00 INFO: execute exclusive pg_start_backup() with label \"pgBackRest backup started at 2019-06-25 01:52:09\": backup begins after the next regular checkpoint completes", + "P00 INFO: execute exclusive pg_start_backup() with label \"pgBackRest backup started at 2019-08-05 14:16:39\": backup begins after the next regular checkpoint completes", "P00 INFO: backup start archive = 000000010000000000000002, lsn = 0/2000028", " [filtered 851 lines of output]", "P01 INFO: backup file /var/lib/pgsql/9.5/data/base/1/12216 (0B, 100%)", @@ -6316,7 +6317,7 @@ "type" : "exe", "value" : { "output" : [ - "20190625-015209F" + "20190805-141639F" ] } }, @@ -6343,9 +6344,9 @@ "value" : { "output" : [ " [filtered 5 lines of output]", - "P01 INFO: backup file /var/lib/pgsql/9.5/data/pg_log/postgresql.log (994B, 97%) checksum f029f8e154e8074e0c38216727ae6f0a5b7590a4", - "P01 INFO: backup file /var/lib/pgsql/9.5/data/backup_label (236B, 100%) checksum 91c6d3666702133d4fc0d8a771bc956bbd4061ec", - "P00 INFO: diff backup size = 9KB", + "P01 INFO: backup file /var/lib/pgsql/9.5/data/pg_log/postgresql.log (1010B, 97%) checksum 291705d5ea5a6c9f71c4e0705ede2413ec653402", + "P01 INFO: backup file /var/lib/pgsql/9.5/data/backup_label (236B, 100%) checksum 0185d9b0cda3247888e4686a82b43c482b6c2f1e", + "P00 INFO: diff backup size = 9.2KB", "P00 INFO: execute exclusive pg_stop_backup() and wait for all WAL segments to archive", "P00 INFO: backup stop archive = 000000010000000000000003, lsn = 0/30000F8", " [filtered 4 lines of output]" @@ -6380,18 +6381,18 @@ " db (current)", " wal archive min/max (9.5-1): 000000010000000000000002/000000010000000000000003", "", - " full backup: 20190625-015209F", - " timestamp start/stop: 2019-06-25 01:52:09 / 2019-06-25 01:52:22", + " full backup: 20190805-141639F", + " timestamp start/stop: 2019-08-05 14:16:39 / 2019-08-05 14:16:52", " wal start/stop: 000000010000000000000002 / 000000010000000000000002", - " database size: 20.6MB, backup size: 20.6MB", + " database size: 20.5MB, backup size: 20.5MB", " repository size: 2.4MB, repository backup size: 2.4MB", "", - " diff backup: 20190625-015209F_20190625-015223D", - " timestamp start/stop: 2019-06-25 01:52:23 / 2019-06-25 01:52:27", + " diff backup: 20190805-141639F_20190805-141653D", + " timestamp start/stop: 2019-08-05 14:16:53 / 2019-08-05 14:16:56", " wal start/stop: 000000010000000000000003 / 000000010000000000000003", - " database size: 20.6MB, backup size: 9.2KB", - " repository size: 2.4MB, repository backup size: 752B", - " backup reference list: 20190625-015209F" + " database size: 20.5MB, backup size: 9.2KB", + " repository size: 2.4MB, repository backup size: 768B", + " backup reference list: 20190805-141639F" ] } }, @@ -6549,9 +6550,9 @@ "type" : "exe", "value" : { "output" : [ - "P00 INFO: backup command begin 2.15: --log-level-console=info --log-level-stderr=off --no-log-timestamp --pg1-path=/var/lib/pgsql/9.5/data --repo1-cipher-pass= --repo1-cipher-type=aes-256-cbc --repo1-path=/var/lib/pgbackrest --repo1-retention-full=2 --stanza=demo --type=incr", - "P00 INFO: last backup label = 20190625-015209F_20190625-015223D, version = 2.15", - "P00 INFO: execute exclusive pg_start_backup() with label \"pgBackRest backup started at 2019-06-25 01:52:47\": backup begins after the next regular checkpoint completes", + "P00 INFO: backup command begin 2.16: --log-level-console=info --log-level-stderr=off --no-log-timestamp --pg1-path=/var/lib/pgsql/9.5/data --repo1-cipher-pass= --repo1-cipher-type=aes-256-cbc --repo1-path=/var/lib/pgbackrest --repo1-retention-full=2 --stanza=demo --type=incr", + "P00 INFO: last backup label = 20190805-141639F_20190805-141653D, version = 2.16", + "P00 INFO: execute exclusive pg_start_backup() with label \"pgBackRest backup started at 2019-08-05 14:17:16\": backup begins after the next regular checkpoint completes", "P00 INFO: backup start archive = 000000020000000000000006, lsn = 0/6000028", "P00 WARN: a timeline switch has occurred since the last backup, enabling delta checksum", " [filtered 10 lines of output]" @@ -6610,11 +6611,11 @@ "type" : "exe", "value" : { "output" : [ - "P00 INFO: backup command begin 2.15: --log-level-console=info --log-level-stderr=off --no-log-timestamp --pg1-path=/var/lib/pgsql/9.5/data --repo1-cipher-pass= --repo1-cipher-type=aes-256-cbc --repo1-path=/var/lib/pgbackrest --repo1-retention-full=2 --stanza=demo --start-fast --type=incr", - "P00 INFO: last backup label = 20190625-015209F_20190625-015247I, version = 2.15", - "P00 INFO: execute exclusive pg_start_backup() with label \"pgBackRest backup started at 2019-06-25 01:52:55\": backup begins after the requested immediate checkpoint completes", + "P00 INFO: backup command begin 2.16: --log-level-console=info --log-level-stderr=off --no-log-timestamp --pg1-path=/var/lib/pgsql/9.5/data --repo1-cipher-pass= --repo1-cipher-type=aes-256-cbc --repo1-path=/var/lib/pgbackrest --repo1-retention-full=2 --stanza=demo --start-fast --type=incr", + "P00 INFO: last backup label = 20190805-141639F_20190805-141716I, version = 2.16", + "P00 INFO: execute exclusive pg_start_backup() with label \"pgBackRest backup started at 2019-08-05 14:17:22\": backup begins after the requested immediate checkpoint completes", "P00 INFO: backup start archive = 000000020000000000000007, lsn = 0/7000028", - "P01 INFO: backup file /var/lib/pgsql/9.5/data/global/pg_control (8KB, 78%) checksum 2b4ab9f47cf459daf998f4882a6ab93f35c0c24d", + "P01 INFO: backup file /var/lib/pgsql/9.5/data/global/pg_control (8KB, 78%) checksum e07b82087775d774035eecdf11b04252e2cd778b", " [filtered 9 lines of output]" ] } @@ -6656,9 +6657,9 @@ "value" : { "output" : [ " [filtered 2 lines of output]", - "P00 INFO: execute exclusive pg_start_backup() with label \"pgBackRest backup started at 2019-06-25 01:52:59\": backup begins after the requested immediate checkpoint completes", + "P00 INFO: execute exclusive pg_start_backup() with label \"pgBackRest backup started at 2019-08-05 14:17:27\": backup begins after the requested immediate checkpoint completes", "P00 INFO: backup start archive = 000000020000000000000009, lsn = 0/9000028", - "P00 ERROR: [047]: unable to create path '/var/lib/pgbackrest/backup/demo/20190625-015209F_20190625-015259I': Permission denied", + "P00 ERROR: [047]: unable to create path '/var/lib/pgbackrest/backup/demo/20190805-141639F_20190805-141727I': [13] Permission denied", "P00 INFO: backup command end: aborted with exception [047]" ] } @@ -6699,12 +6700,12 @@ "type" : "exe", "value" : { "output" : [ - "P00 INFO: backup command begin 2.15: --log-level-console=info --log-level-stderr=off --no-log-timestamp --pg1-path=/var/lib/pgsql/9.5/data --repo1-cipher-pass= --repo1-cipher-type=aes-256-cbc --repo1-path=/var/lib/pgbackrest --repo1-retention-full=2 --stanza=demo --start-fast --type=incr", - "P00 INFO: last backup label = 20190625-015209F_20190625-015255I, version = 2.15", - "P00 INFO: execute exclusive pg_start_backup() with label \"pgBackRest backup started at 2019-06-25 01:53:03\": backup begins after the requested immediate checkpoint completes", - "P00 ERROR: [057]: ERROR: a backup is already in progress", - " HINT: Run pg_stop_backup() and try again.:", - " select to_char(current_timestamp, 'YYYY-MM-DD HH24:MI:SS.US TZ'), pg_xlogfile_name(lsn), lsn::text, (select setting::int8 from pg_settings where name = 'wal_segment_size') * (select setting::int8 from pg_settings where name = 'wal_block_size') from pg_start_backup('pgBackRest backup started at 2019-06-25 01:53:03', true) as lsn" + "P00 INFO: backup command begin 2.16: --log-level-console=info --log-level-stderr=off --no-log-timestamp --pg1-path=/var/lib/pgsql/9.5/data --repo1-cipher-pass= --repo1-cipher-type=aes-256-cbc --repo1-path=/var/lib/pgbackrest --repo1-retention-full=2 --stanza=demo --start-fast --type=incr", + "P00 INFO: last backup label = 20190805-141639F_20190805-141722I, version = 2.16", + "P00 INFO: execute exclusive pg_start_backup() with label \"pgBackRest backup started at 2019-08-05 14:17:31\": backup begins after the requested immediate checkpoint completes", + "P00 ERROR: [057]: unable to execute query 'select to_char(current_timestamp, 'YYYY-MM-DD HH24:MI:SS.US TZ'), pg_xlogfile_name(lsn), lsn::text, (select setting::int8 from pg_settings where name = 'wal_segment_size') * (select setting::int8 from pg_settings where name = 'wal_block_size') from pg_start_backup('pgBackRest backup started at 2019-08-05 14:17:31', true) as lsn': ERROR: a backup is already in progress", + " HINT: Run pg_stop_backup() and try again.", + "P00 INFO: backup command end: aborted with exception [057]" ] } }, @@ -6761,13 +6762,13 @@ "type" : "exe", "value" : { "output" : [ - "P00 INFO: backup command begin 2.15: --log-level-console=info --log-level-stderr=off --no-log-timestamp --pg1-path=/var/lib/pgsql/9.5/data --repo1-cipher-pass= --repo1-cipher-type=aes-256-cbc --repo1-path=/var/lib/pgbackrest --repo1-retention-full=2 --stanza=demo --start-fast --stop-auto --type=incr", - "P00 INFO: last backup label = 20190625-015209F_20190625-015255I, version = 2.15", + "P00 INFO: backup command begin 2.16: --log-level-console=info --log-level-stderr=off --no-log-timestamp --pg1-path=/var/lib/pgsql/9.5/data --repo1-cipher-pass= --repo1-cipher-type=aes-256-cbc --repo1-path=/var/lib/pgbackrest --repo1-retention-full=2 --stanza=demo --start-fast --stop-auto --type=incr", + "P00 INFO: last backup label = 20190805-141639F_20190805-141722I, version = 2.16", "P00 WARN: the cluster is already in backup mode but no pgBackRest backup process is running. pg_stop_backup() will be called so a new backup can be started.", "P00 INFO: execute exclusive pg_stop_backup() and wait for all WAL segments to archive", - "P00 INFO: execute exclusive pg_start_backup() with label \"pgBackRest backup started at 2019-06-25 01:53:06\": backup begins after the requested immediate checkpoint completes", + "P00 INFO: execute exclusive pg_start_backup() with label \"pgBackRest backup started at 2019-08-05 14:17:34\": backup begins after the requested immediate checkpoint completes", "P00 INFO: backup start archive = 00000002000000000000000A, lsn = 0/A000028", - "P01 INFO: backup file /var/lib/pgsql/9.5/data/global/pg_control (8KB, 74%) checksum 15659858720228725f21d947dfa2ddc1ee8c284b", + "P01 INFO: backup file /var/lib/pgsql/9.5/data/global/pg_control (8KB, 74%) checksum 94b40d08af77d0fce99cce8a7d312c025f9c1d81", " [filtered 9 lines of output]" ] } @@ -6806,25 +6807,25 @@ " },", " \"backrest\" : {", " \"format\" : 5,", - " \"version\" : \"2.15\"", + " \"version\" : \"2.16\"", " },", " \"database\" : {", " \"id\" : 1", " },", " \"info\" : {", - " \"delta\" : 21551047,", + " \"delta\" : 21502018,", " \"repository\" : {", - " \"delta\" : 2531120,", - " \"size\" : 2531120", + " \"delta\" : 2530304,", + " \"size\" : 2530304", " },", - " \"size\" : 21551047", + " \"size\" : 21502018", " },", - " \"label\" : \"20190625-015209F\",", + " \"label\" : \"20190805-141639F\",", " \"prior\" : null,", " \"reference\" : null,", " \"timestamp\" : {", - " \"start\" : 1561427529,", - " \"stop\" : 1561427542", + " \"start\" : 1565014599,", + " \"stop\" : 1565014612", " },", " \"type\" : \"full\"", " },", @@ -6835,27 +6836,27 @@ " },", " \"backrest\" : {", " \"format\" : 5,", - " \"version\" : \"2.15\"", + " \"version\" : \"2.16\"", " },", " \"database\" : {", " \"id\" : 1", " },", " \"info\" : {", - " \"delta\" : 9422,", + " \"delta\" : 9438,", " \"repository\" : {", - " \"delta\" : 752,", - " \"size\" : 2531120", + " \"delta\" : 768,", + " \"size\" : 2530304", " },", - " \"size\" : 21551261", + " \"size\" : 21502125", " },", - " \"label\" : \"20190625-015209F_20190625-015223D\",", - " \"prior\" : \"20190625-015209F\",", + " \"label\" : \"20190805-141639F_20190805-141653D\",", + " \"prior\" : \"20190805-141639F\",", " \"reference\" : [", - " \"20190625-015209F\"", + " \"20190805-141639F\"", " ],", " \"timestamp\" : {", - " \"start\" : 1561427543,", - " \"stop\" : 1561427547", + " \"start\" : 1565014613,", + " \"stop\" : 1565014616", " },", " \"type\" : \"diff\"", " },", @@ -6866,27 +6867,27 @@ " },", " \"backrest\" : {", " \"format\" : 5,", - " \"version\" : \"2.15\"", + " \"version\" : \"2.16\"", " },", " \"database\" : {", " \"id\" : 1", " },", " \"info\" : {", - " \"delta\" : 10268,", + " \"delta\" : 10284,", " \"repository\" : {", " \"delta\" : 928,", - " \"size\" : 2531296", + " \"size\" : 2530464", " },", - " \"size\" : 21552107", + " \"size\" : 21502971", " },", - " \"label\" : \"20190625-015209F_20190625-015247I\",", - " \"prior\" : \"20190625-015209F_20190625-015223D\",", + " \"label\" : \"20190805-141639F_20190805-141716I\",", + " \"prior\" : \"20190805-141639F_20190805-141653D\",", " \"reference\" : [", - " \"20190625-015209F\"", + " \"20190805-141639F\"", " ],", " \"timestamp\" : {", - " \"start\" : 1561427567,", - " \"stop\" : 1561427572", + " \"start\" : 1565014636,", + " \"stop\" : 1565014640", " },", " \"type\" : \"incr\"", " },", @@ -6897,27 +6898,27 @@ " },", " \"backrest\" : {", " \"format\" : 5,", - " \"version\" : \"2.15\"", + " \"version\" : \"2.16\"", " },", " \"database\" : {", " \"id\" : 1", " },", " \"info\" : {", - " \"delta\" : 10375,", + " \"delta\" : 10391,", " \"repository\" : {", " \"delta\" : 928,", - " \"size\" : 2531296", + " \"size\" : 2530464", " },", - " \"size\" : 21552214", + " \"size\" : 21503078", " },", - " \"label\" : \"20190625-015209F_20190625-015255I\",", - " \"prior\" : \"20190625-015209F_20190625-015247I\",", + " \"label\" : \"20190805-141639F_20190805-141722I\",", + " \"prior\" : \"20190805-141639F_20190805-141716I\",", " \"reference\" : [", - " \"20190625-015209F\"", + " \"20190805-141639F\"", " ],", " \"timestamp\" : {", - " \"start\" : 1561427575,", - " \"stop\" : 1561427578", + " \"start\" : 1565014642,", + " \"stop\" : 1565014646", " },", " \"type\" : \"incr\"", " },", @@ -6928,27 +6929,27 @@ " },", " \"backrest\" : {", " \"format\" : 5,", - " \"version\" : \"2.15\"", + " \"version\" : \"2.16\"", " },", " \"database\" : {", " \"id\" : 1", " },", " \"info\" : {", - " \"delta\" : 11014,", + " \"delta\" : 11137,", " \"repository\" : {", " \"delta\" : 1136,", - " \"size\" : 2531504", + " \"size\" : 2530672", " },", - " \"size\" : 21552853", + " \"size\" : 21503824", " },", - " \"label\" : \"20190625-015209F_20190625-015306I\",", - " \"prior\" : \"20190625-015209F_20190625-015255I\",", + " \"label\" : \"20190805-141639F_20190805-141734I\",", + " \"prior\" : \"20190805-141639F_20190805-141722I\",", " \"reference\" : [", - " \"20190625-015209F\"", + " \"20190805-141639F\"", " ],", " \"timestamp\" : {", - " \"start\" : 1561427586,", - " \"stop\" : 1561427590", + " \"start\" : 1565014654,", + " \"stop\" : 1565014659", " },", " \"type\" : \"incr\"", " }", @@ -6957,7 +6958,7 @@ " \"db\" : [", " {", " \"id\" : 1,", - " \"system-id\" : 6706280076246565125,", + " \"system-id\" : 6721686411974537471,", " \"version\" : \"9.5\"", " }", " ],", @@ -7112,7 +7113,7 @@ "output" : [ " name | last_successful_backup | last_archived_wal ", "--------+------------------------+--------------------------", - " \"demo\" | 2019-06-25 01:53:10+00 | 00000002000000000000000A", + " \"demo\" | 2019-08-05 14:17:39+00 | 00000002000000000000000A", "(1 row)" ] } @@ -7159,7 +7160,7 @@ "filter" : true, "filter-context" : 2, "list" : [ - "archive retention on backup 20190625-015209F|remove archive" + "archive retention on backup 20190805-141639F|remove archive" ] }, "host" : "pg-primary", @@ -7173,7 +7174,7 @@ " [filtered 860 lines of output]", "P00 INFO: backup command end: completed successfully", "P00 INFO: expire command begin", - "P00 DETAIL: archive retention on backup 20190625-015209F, archiveId = 9.5-1, start = 000000010000000000000002", + "P00 DETAIL: archive retention on backup 20190805-141639F, archiveId = 9.5-1, start = 000000010000000000000002", "P00 DETAIL: no archive to remove, archiveId = 9.5-1", "P00 INFO: expire command end: completed successfully" ] @@ -7193,7 +7194,7 @@ "type" : "exe", "value" : { "output" : [ - "20190625-015315F" + "20190805-141746F" ] } }, @@ -7208,7 +7209,7 @@ "filter" : true, "filter-context" : 2, "list" : [ - "expire full backup set\\: 20190625-015209F|archive retention on backup 20190625-015315F|remove archive" + "expire full backup set\\: 20190805-141639F|archive retention on backup 20190805-141746F|remove archive" ] }, "host" : "pg-primary", @@ -7222,9 +7223,9 @@ " [filtered 860 lines of output]", "P00 INFO: backup command end: completed successfully", "P00 INFO: expire command begin", - "P00 INFO: expire full backup set: 20190625-015209F, 20190625-015209F_20190625-015223D, 20190625-015209F_20190625-015247I, 20190625-015209F_20190625-015255I, 20190625-015209F_20190625-015306I", - "P00 INFO: remove expired backup 20190625-015209F_20190625-015306I", - "P00 INFO: remove expired backup 20190625-015209F_20190625-015255I", + "P00 INFO: expire full backup set: 20190805-141639F, 20190805-141639F_20190805-141653D, 20190805-141639F_20190805-141716I, 20190805-141639F_20190805-141722I, 20190805-141639F_20190805-141734I", + "P00 INFO: remove expired backup 20190805-141639F_20190805-141734I", + "P00 INFO: remove expired backup 20190805-141639F_20190805-141722I", " [filtered 3 lines of output]" ] } @@ -7288,7 +7289,7 @@ "type" : "exe", "value" : { "output" : [ - "20190625-015328F_20190625-015343D" + "20190805-141800F_20190805-141817D" ] } }, @@ -7316,7 +7317,7 @@ "filter" : true, "filter-context" : 2, "list" : [ - "expire diff backup set: 20190625-015328F_20190625-015343D" + "expire diff backup set: 20190805-141800F_20190805-141817D" ] }, "host" : "pg-primary", @@ -7330,9 +7331,9 @@ " [filtered 12 lines of output]", "P00 INFO: backup command end: completed successfully", "P00 INFO: expire command begin", - "P00 INFO: expire diff backup set: 20190625-015328F_20190625-015343D, 20190625-015328F_20190625-015348I", - "P00 INFO: remove expired backup 20190625-015328F_20190625-015348I", - "P00 INFO: remove expired backup 20190625-015328F_20190625-015343D" + "P00 INFO: expire diff backup set: 20190805-141800F_20190805-141817D, 20190805-141800F_20190805-141821I", + "P00 INFO: remove expired backup 20190805-141800F_20190805-141821I", + "P00 INFO: remove expired backup 20190805-141800F_20190805-141817D" ] } }, @@ -7382,7 +7383,7 @@ "type" : "exe", "value" : { "output" : [ - "20190625-015328F_20190625-015353D" + "20190805-141800F_20190805-141826D" ] } }, @@ -7425,8 +7426,8 @@ "output" : [ " [filtered 9 lines of output]", "P00 INFO: execute exclusive pg_stop_backup() and wait for all WAL segments to archive", - "P00 INFO: backup stop archive = 000000020000000000000015, lsn = 0/150000F8", - "P00 INFO: new backup label = 20190625-015328F_20190625-015401D", + "P00 INFO: backup stop archive = 000000020000000000000014, lsn = 0/140000F8", + "P00 INFO: new backup label = 20190805-141800F_20190805-141833D", "P00 INFO: backup command end: completed successfully", "P00 INFO: expire command begin" ] @@ -7446,7 +7447,7 @@ "type" : "exe", "value" : { "output" : [ - "20190625-015328F_20190625-015401D" + "20190805-141800F_20190805-141833D" ] } }, @@ -7461,7 +7462,7 @@ "filter" : true, "filter-context" : 2, "list" : [ - "archive retention on backup 20190625-015328F_20190625-015353D|remove archive" + "archive retention on backup 20190805-141800F_20190805-141826D|remove archive" ] }, "host" : "pg-primary", @@ -7472,14 +7473,13 @@ "type" : "exe", "value" : { "output" : [ - "P00 INFO: expire command begin 2.15: --log-level-console=detail --log-level-stderr=off --no-log-timestamp --repo1-cipher-pass= --repo1-cipher-type=aes-256-cbc --repo1-path=/var/lib/pgbackrest --repo1-retention-archive=1 --repo1-retention-archive-type=diff --repo1-retention-diff=2 --repo1-retention-full=2 --stanza=demo", - "P00 DETAIL: archive retention on backup 20190625-015315F, archiveId = 9.5-1, start = 00000002000000000000000C, stop = 00000002000000000000000C", - "P00 DETAIL: archive retention on backup 20190625-015328F, archiveId = 9.5-1, start = 00000002000000000000000E, stop = 00000002000000000000000E", - "P00 DETAIL: archive retention on backup 20190625-015328F_20190625-015353D, archiveId = 9.5-1, start = 000000020000000000000012, stop = 000000020000000000000012", - "P00 DETAIL: archive retention on backup 20190625-015328F_20190625-015401D, archiveId = 9.5-1, start = 000000020000000000000015", - "P00 DETAIL: remove archive: archiveId = 9.5-1, start = 00000002000000000000000D, stop = 00000002000000000000000D", - "P00 DETAIL: remove archive: archiveId = 9.5-1, start = 00000002000000000000000F, stop = 000000020000000000000011", - "P00 DETAIL: remove archive: archiveId = 9.5-1, start = 000000020000000000000013, stop = 000000020000000000000014", + "P00 INFO: expire command begin 2.16: --log-level-console=detail --log-level-stderr=off --no-log-timestamp --repo1-cipher-pass= --repo1-cipher-type=aes-256-cbc --repo1-path=/var/lib/pgbackrest --repo1-retention-archive=1 --repo1-retention-archive-type=diff --repo1-retention-diff=2 --repo1-retention-full=2 --stanza=demo", + "P00 DETAIL: archive retention on backup 20190805-141746F, archiveId = 9.5-1, start = 00000002000000000000000C, stop = 00000002000000000000000C", + "P00 DETAIL: archive retention on backup 20190805-141800F, archiveId = 9.5-1, start = 00000002000000000000000D, stop = 00000002000000000000000D", + "P00 DETAIL: archive retention on backup 20190805-141800F_20190805-141826D, archiveId = 9.5-1, start = 000000020000000000000011, stop = 000000020000000000000011", + "P00 DETAIL: archive retention on backup 20190805-141800F_20190805-141833D, archiveId = 9.5-1, start = 000000020000000000000014", + "P00 DETAIL: remove archive: archiveId = 9.5-1, start = 00000002000000000000000E, stop = 000000020000000000000010", + "P00 DETAIL: remove archive: archiveId = 9.5-1, start = 000000020000000000000012, stop = 000000020000000000000013", "P00 INFO: expire command end: completed successfully" ] } @@ -7519,7 +7519,7 @@ "type" : "exe", "value" : { "output" : [ - " [filtered 877 lines of output]", + " [filtered 878 lines of output]", "P01 DETAIL: restore file /var/lib/pgsql/9.5/data/base/1/12211 - exists and is zero size (0B, 100%)", "P00 INFO: write /var/lib/pgsql/9.5/data/recovery.conf", "P00 INFO: restore global/pg_control (performed last to ensure aborted restores cannot be started)", @@ -7902,7 +7902,7 @@ "type" : "exe", "value" : { "output" : [ - "2019-06-25 01:54:48.389605+00" + "2019-08-05 14:19:22.0495+00" ] } }, @@ -7955,7 +7955,7 @@ "bash-wrap" : true, "cmd" : [ "sudo -u postgres pgbackrest --stanza=demo --delta \\", - " --type=time \"--target=2019-06-25 01:54:48.389605+00\" \\", + " --type=time \"--target=2019-08-05 14:19:22.0495+00\" \\", " --target-action=promote restore" ], "host" : "pg-primary", @@ -8000,7 +8000,7 @@ "value" : { "output" : [ "restore_command = 'pgbackrest --stanza=demo archive-get %f \"%p\"'", - "recovery_target_time = '2019-06-25 01:54:48.389605+00'", + "recovery_target_time = '2019-08-05 14:19:22.0495+00'", "recovery_target_action = 'promote'" ] } @@ -8080,19 +8080,19 @@ "type" : "exe", "value" : { "output" : [ - "LOG: database system was interrupted; last known up at 2019-06-25 01:54:40 UTC", - "LOG: starting point-in-time recovery to 2019-06-25 01:54:48.389605+00", + "LOG: database system was interrupted; last known up at 2019-08-05 14:19:15 UTC", + "LOG: starting point-in-time recovery to 2019-08-05 14:19:22.0495+00", "LOG: restored log file \"00000004.history\" from archive", + "LOG: restored log file \"000000040000000000000019\" from archive", + "LOG: redo starts at 0/19000028", + "LOG: consistent recovery state reached at 0/190000F8", "LOG: restored log file \"00000004000000000000001A\" from archive", - "LOG: redo starts at 0/1A000028", - "LOG: consistent recovery state reached at 0/1A0000F8", - "LOG: restored log file \"00000004000000000000001B\" from archive", - "LOG: recovery stopping before commit of transaction 630, time 2019-06-25 01:54:48.81305+00", - "LOG: redo done at 0/1B01B9C0", - "LOG: last completed transaction was at log time 2019-06-25 01:54:47.95498+00", + "LOG: recovery stopping before commit of transaction 630, time 2019-08-05 14:19:22.514378+00", + "LOG: redo done at 0/1A01C358", + "LOG: last completed transaction was at log time 2019-08-05 14:19:21.590632+00", "LOG: selected new timeline ID: 5", "LOG: archive recovery complete", - " [filtered 4 lines of output]" + " [filtered 5 lines of output]" ] } }, @@ -8141,7 +8141,7 @@ "type" : "exe", "value" : { "output" : [ - "20190625-015328F_20190625-015439D" + "20190805-141800F_20190805-141913D" ] } }, @@ -8176,7 +8176,7 @@ "bash-wrap" : true, "cmd" : [ "sudo -u postgres pgbackrest --stanza=demo --delta \\", - " --type=time \"--target=2019-06-25 01:54:48.389605+00\" --target-action=promote restore" + " --type=time \"--target=2019-08-05 14:19:22.0495+00\" --target-action=promote restore" ], "host" : "pg-primary", "load-env" : true, @@ -8273,14 +8273,14 @@ "type" : "exe", "value" : { "output" : [ - "LOG: database system was interrupted; last known up at 2019-06-25 01:55:01 UTC", - "LOG: starting point-in-time recovery to 2019-06-25 01:54:48.389605+00", + "LOG: database system was interrupted; last known up at 2019-08-05 14:19:35 UTC", + "LOG: starting point-in-time recovery to 2019-08-05 14:19:22.0495+00", "LOG: restored log file \"00000005.history\" from archive", - "LOG: restored log file \"00000005000000000000001C\" from archive", - "LOG: redo starts at 0/1C000028", - "LOG: consistent recovery state reached at 0/1C0000F8", - "LOG: redo done at 0/1C0000F8", - "LOG: restored log file \"00000005000000000000001C\" from archive", + "LOG: restored log file \"00000005000000000000001B\" from archive", + "LOG: redo starts at 0/1B000028", + "LOG: consistent recovery state reached at 0/1B0000F8", + "LOG: redo done at 0/1B0000F8", + "LOG: restored log file \"00000005000000000000001B\" from archive", " [filtered 8 lines of output]" ] } @@ -8295,7 +8295,7 @@ "filter" : false, "filter-context" : 2, "list" : [ - "20190625-015328F_20190625-015439D" + "20190805-141800F_20190805-141913D" ] }, "host" : "pg-primary", @@ -8311,47 +8311,47 @@ " cipher: aes-256-cbc", "", " db (current)", - " wal archive min/max (9.5-1): 00000002000000000000000C/00000005000000000000001C", + " wal archive min/max (9.5-1): 00000002000000000000000C/00000005000000000000001B", "", - " full backup: 20190625-015315F", - " timestamp start/stop: 2019-06-25 01:53:15 / 2019-06-25 01:53:27", + " full backup: 20190805-141746F", + " timestamp start/stop: 2019-08-05 14:17:46 / 2019-08-05 14:17:59", " wal start/stop: 00000002000000000000000C / 00000002000000000000000C", - " database size: 20.6MB, backup size: 20.6MB", + " database size: 20.5MB, backup size: 20.5MB", " repository size: 2.4MB, repository backup size: 2.4MB", "", - " full backup: 20190625-015328F", - " timestamp start/stop: 2019-06-25 01:53:28 / 2019-06-25 01:53:41", - " wal start/stop: 00000002000000000000000E / 00000002000000000000000E", - " database size: 20.6MB, backup size: 20.6MB", + " full backup: 20190805-141800F", + " timestamp start/stop: 2019-08-05 14:18:00 / 2019-08-05 14:18:15", + " wal start/stop: 00000002000000000000000D / 00000002000000000000000D", + " database size: 20.5MB, backup size: 20.5MB", " repository size: 2.4MB, repository backup size: 2.4MB", "", - " diff backup: 20190625-015328F_20190625-015401D", - " timestamp start/stop: 2019-06-25 01:54:01 / 2019-06-25 01:54:04", - " wal start/stop: 000000020000000000000015 / 000000020000000000000015", - " database size: 20.6MB, backup size: 116.3KB", + " diff backup: 20190805-141800F_20190805-141833D", + " timestamp start/stop: 2019-08-05 14:18:33 / 2019-08-05 14:18:37", + " wal start/stop: 000000020000000000000014 / 000000020000000000000014", + " database size: 20.5MB, backup size: 116.5KB", " repository size: 2.4MB, repository backup size: 12.7KB", - " backup reference list: 20190625-015328F", + " backup reference list: 20190805-141800F", "", - " incr backup: 20190625-015328F_20190625-015415I", - " timestamp start/stop: 2019-06-25 01:54:15 / 2019-06-25 01:54:26", - " wal start/stop: 000000030000000000000017 / 000000030000000000000017", + " incr backup: 20190805-141800F_20190805-141848I", + " timestamp start/stop: 2019-08-05 14:18:48 / 2019-08-05 14:18:59", + " wal start/stop: 000000030000000000000016 / 000000030000000000000016", " database size: 33.9MB, backup size: 13.4MB", " repository size: 4MB, repository backup size: 1.6MB", - " backup reference list: 20190625-015328F, 20190625-015328F_20190625-015401D", + " backup reference list: 20190805-141800F, 20190805-141800F_20190805-141833D", "", - " diff backup: 20190625-015328F_20190625-015439D", - " timestamp start/stop: 2019-06-25 01:54:39 / 2019-06-25 01:54:47", - " wal start/stop: 00000004000000000000001A / 00000004000000000000001A", - " database size: 27.3MB, backup size: 6.9MB", - " repository size: 3.2MB, repository backup size: 831.8KB", - " backup reference list: 20190625-015328F", - "", - " incr backup: 20190625-015328F_20190625-015500I", - " timestamp start/stop: 2019-06-25 01:55:00 / 2019-06-25 01:55:05", - " wal start/stop: 00000005000000000000001C / 00000005000000000000001C", - " database size: 27.3MB, backup size: 2.0MB", - " repository size: 3.2MB, repository backup size: 209.9KB", - " backup reference list: 20190625-015328F, 20190625-015328F_20190625-015439D" + " diff backup: 20190805-141800F_20190805-141913D", + " timestamp start/stop: 2019-08-05 14:19:13 / 2019-08-05 14:19:20", + " wal start/stop: 000000040000000000000019 / 000000040000000000000019", + " database size: 27.2MB, backup size: 6.8MB", + " repository size: 3.2MB, repository backup size: 831.5KB", + " backup reference list: 20190805-141800F", + "", + " incr backup: 20190805-141800F_20190805-141934I", + " timestamp start/stop: 2019-08-05 14:19:34 / 2019-08-05 14:19:39", + " wal start/stop: 00000005000000000000001B / 00000005000000000000001B", + " database size: 27.2MB, backup size: 2.0MB", + " repository size: 3.2MB, repository backup size: 209.7KB", + " backup reference list: 20190805-141800F, 20190805-141800F_20190805-141913D" ] } }, @@ -8373,8 +8373,8 @@ "bash-wrap" : true, "cmd" : [ "sudo -u postgres pgbackrest --stanza=demo --delta \\", - " --type=time \"--target=2019-06-25 01:54:48.389605+00\" \\", - " --set=20190625-015328F_20190625-015439D --target-action=promote restore" + " --type=time \"--target=2019-08-05 14:19:22.0495+00\" \\", + " --set=20190805-141800F_20190805-141913D --target-action=promote restore" ], "host" : "pg-primary", "load-env" : true, @@ -8471,16 +8471,16 @@ "type" : "exe", "value" : { "output" : [ - "LOG: database system was interrupted; last known up at 2019-06-25 01:54:40 UTC", - "LOG: starting point-in-time recovery to 2019-06-25 01:54:48.389605+00", + "LOG: database system was interrupted; last known up at 2019-08-05 14:19:15 UTC", + "LOG: starting point-in-time recovery to 2019-08-05 14:19:22.0495+00", "LOG: restored log file \"00000004.history\" from archive", + "LOG: restored log file \"000000040000000000000019\" from archive", + "LOG: redo starts at 0/19000028", + "LOG: consistent recovery state reached at 0/190000F8", "LOG: restored log file \"00000004000000000000001A\" from archive", - "LOG: redo starts at 0/1A000028", - "LOG: consistent recovery state reached at 0/1A0000F8", - "LOG: restored log file \"00000004000000000000001B\" from archive", - "LOG: recovery stopping before commit of transaction 630, time 2019-06-25 01:54:48.81305+00", - "LOG: redo done at 0/1B01B9C0", - "LOG: last completed transaction was at log time 2019-06-25 01:54:47.95498+00", + "LOG: recovery stopping before commit of transaction 630, time 2019-08-05 14:19:22.514378+00", + "LOG: redo done at 0/1A01C358", + "LOG: last completed transaction was at log time 2019-08-05 14:19:21.590632+00", "LOG: restored log file \"00000005.history\" from archive", "LOG: restored log file \"00000006.history\" from archive", " [filtered 6 lines of output]" @@ -8491,20 +8491,6 @@ "key" : { "bash-wrap" : true, "cmd" : [ - "sudo yum install perl-XML-LibXML perl-IO-Socket-SSL" - ], - "cmd-extra" : "-y 2>&1", - "host" : "pg-primary", - "load-env" : true, - "output" : false, - "run-as-user" : null - }, - "type" : "exe" - }, - { - "key" : { - "bash-wrap" : true, - "cmd" : [ "echo \"172.17.0.2 demo-bucket.s3.us-east-1.amazonaws.com s3.us-east-1.amazonaws.com\" | tee -a /etc/hosts" ], "host" : "pg-primary", @@ -8555,9 +8541,6 @@ "repo1-s3-bucket" : { "value" : "demo-bucket" }, - "repo1-s3-ca-file" : { - "value" : "/etc/pki/tls/certs/ca-bundle.crt" - }, "repo1-s3-endpoint" : { "value" : "s3.us-east-1.amazonaws.com" }, @@ -8590,7 +8573,6 @@ "repo1-retention-diff=2", "repo1-retention-full=2", "repo1-s3-bucket=demo-bucket", - "repo1-s3-ca-file=/etc/pki/tls/certs/ca-bundle.crt", "repo1-s3-endpoint=s3.us-east-1.amazonaws.com", "repo1-s3-key=accessKey1", "repo1-s3-key-secret=verySecretKey1", @@ -8625,7 +8607,8 @@ "type" : "exe", "value" : { "output" : [ - "P00 INFO: stanza-create command begin 2.15: --log-level-console=info --log-level-stderr=off --no-log-timestamp --pg1-path=/var/lib/pgsql/9.5/data --repo1-cipher-pass= --repo1-cipher-type=aes-256-cbc --repo1-path=/demo-repo --repo1-s3-bucket=demo-bucket --repo1-s3-ca-file=/etc/pki/tls/certs/ca-bundle.crt --repo1-s3-endpoint=s3.us-east-1.amazonaws.com --repo1-s3-key= --repo1-s3-key-secret= --repo1-s3-region=us-east-1 --repo1-type=s3 --stanza=demo", + "P00 INFO: stanza-create command begin 2.16: --log-level-console=info --log-level-stderr=off --no-log-timestamp --pg1-path=/var/lib/pgsql/9.5/data --repo1-cipher-pass= --repo1-cipher-type=aes-256-cbc --repo1-path=/demo-repo --repo1-s3-bucket=demo-bucket --repo1-s3-endpoint=s3.us-east-1.amazonaws.com --repo1-s3-key= --repo1-s3-key-secret= --repo1-s3-region=us-east-1 --repo1-type=s3 --stanza=demo", + "P00 INFO: http statistics: objects 1, sessions 1, requests 16, retries 0, closes 0", "P00 INFO: stanza-create command end: completed successfully" ] } @@ -8652,17 +8635,17 @@ "type" : "exe", "value" : { "output" : [ - "P00 INFO: backup command begin 2.15: --log-level-console=info --log-level-stderr=off --no-log-timestamp --pg1-path=/var/lib/pgsql/9.5/data --process-max=4 --repo1-cipher-pass= --repo1-cipher-type=aes-256-cbc --repo1-path=/demo-repo --repo1-retention-diff=2 --repo1-retention-full=2 --repo1-s3-bucket=demo-bucket --repo1-s3-ca-file=/etc/pki/tls/certs/ca-bundle.crt --repo1-s3-endpoint=s3.us-east-1.amazonaws.com --repo1-s3-key= --repo1-s3-key-secret= --repo1-s3-region=us-east-1 --repo1-type=s3 --stanza=demo --start-fast --stop-auto", + "P00 INFO: backup command begin 2.16: --log-level-console=info --log-level-stderr=off --no-log-timestamp --pg1-path=/var/lib/pgsql/9.5/data --process-max=4 --repo1-cipher-pass= --repo1-cipher-type=aes-256-cbc --repo1-path=/demo-repo --repo1-retention-diff=2 --repo1-retention-full=2 --repo1-s3-bucket=demo-bucket --repo1-s3-endpoint=s3.us-east-1.amazonaws.com --repo1-s3-key= --repo1-s3-key-secret= --repo1-s3-region=us-east-1 --repo1-type=s3 --stanza=demo --start-fast --stop-auto", "P00 WARN: no prior backup exists, incr backup has been changed to full", - "P00 INFO: execute exclusive pg_start_backup() with label \"pgBackRest backup started at 2019-06-25 01:55:32\": backup begins after the requested immediate checkpoint completes", - "P00 INFO: backup start archive = 00000007000000000000001C, lsn = 0/1C000028", + "P00 INFO: execute exclusive pg_start_backup() with label \"pgBackRest backup started at 2019-08-05 14:20:03\": backup begins after the requested immediate checkpoint completes", + "P00 INFO: backup start archive = 00000007000000000000001B, lsn = 0/1B000028", " [filtered 1119 lines of output]", - "P03 INFO: backup file /var/lib/pgsql/9.5/data/base/1/12216 (0B, 100%)", "P04 INFO: backup file /var/lib/pgsql/9.5/data/base/1/12211 (0B, 100%)", + "P01 INFO: backup file /var/lib/pgsql/9.5/data/base/1/12226 (0B, 100%)", "P00 INFO: full backup size = 27.2MB", "P00 INFO: execute exclusive pg_stop_backup() and wait for all WAL segments to archive", - "P00 INFO: backup stop archive = 00000007000000000000001C, lsn = 0/1C000130", - " [filtered 5 lines of output]" + "P00 INFO: backup stop archive = 00000007000000000000001B, lsn = 0/1B000130", + " [filtered 6 lines of output]" ] } }, @@ -8700,7 +8683,7 @@ "type" : "exe", "value" : { "output" : [ - "P00 INFO: stop command begin 2.15: --log-level-console=info --log-level-stderr=off --no-log-timestamp --repo1-cipher-pass= --repo1-cipher-type=aes-256-cbc --repo1-path=/demo-repo --repo1-s3-bucket=demo-bucket --repo1-s3-ca-file=/etc/pki/tls/certs/ca-bundle.crt --repo1-s3-endpoint=s3.us-east-1.amazonaws.com --repo1-s3-key= --repo1-s3-key-secret= --repo1-s3-region=us-east-1 --repo1-type=s3 --stanza=demo", + "P00 INFO: stop command begin 2.16: --log-level-console=info --log-level-stderr=off --no-log-timestamp --repo1-cipher-pass= --repo1-cipher-type=aes-256-cbc --repo1-path=/demo-repo --repo1-s3-bucket=demo-bucket --repo1-s3-endpoint=s3.us-east-1.amazonaws.com --repo1-s3-key= --repo1-s3-key-secret= --repo1-s3-region=us-east-1 --repo1-type=s3 --stanza=demo", "P00 INFO: stop command end: completed successfully" ] } @@ -8726,7 +8709,8 @@ "type" : "exe", "value" : { "output" : [ - "P00 INFO: stanza-delete command begin 2.15: --log-level-console=info --log-level-stderr=off --no-log-timestamp --pg1-path=/var/lib/pgsql/9.5/data --repo1-cipher-pass= --repo1-cipher-type=aes-256-cbc --repo1-path=/demo-repo --repo1-s3-bucket=demo-bucket --repo1-s3-ca-file=/etc/pki/tls/certs/ca-bundle.crt --repo1-s3-endpoint=s3.us-east-1.amazonaws.com --repo1-s3-key= --repo1-s3-key-secret= --repo1-s3-region=us-east-1 --repo1-type=s3 --stanza=demo", + "P00 INFO: stanza-delete command begin 2.16: --log-level-console=info --log-level-stderr=off --no-log-timestamp --pg1-path=/var/lib/pgsql/9.5/data --repo1-cipher-pass= --repo1-cipher-type=aes-256-cbc --repo1-path=/demo-repo --repo1-s3-bucket=demo-bucket --repo1-s3-endpoint=s3.us-east-1.amazonaws.com --repo1-s3-key= --repo1-s3-key-secret= --repo1-s3-region=us-east-1 --repo1-type=s3 --stanza=demo", + "P00 INFO: http statistics: objects 1, sessions 1, requests 13, retries 0, closes 0", "P00 INFO: stanza-delete command end: completed successfully" ] } @@ -8788,7 +8772,7 @@ "key" : { "bash-wrap" : true, "cmd" : [ - "sudo scp build:/root/pgbackrest-release-2.15/src/pgbackrest /usr/bin" + "sudo scp build:/root/pgbackrest-release-2.16/src/pgbackrest /usr/bin" ], "cmd-extra" : "2>&1", "host" : "repository", @@ -8816,7 +8800,7 @@ "bash-wrap" : true, "cmd" : [ "sudo yum install perl perl-Time-HiRes perl-parent perl-JSON \\", - " perl-Digest-SHA perl-DBD-Pg" + " perl-Digest-SHA" ], "cmd-extra" : "-y 2>&1", "host" : "repository", @@ -9373,18 +9357,18 @@ " cipher: none", "", " db (current)", - " wal archive min/max (9.5-1): 000000080000000000000023/000000080000000000000025", + " wal archive min/max (9.5-1): 000000080000000000000021/000000080000000000000022", "", - " full backup: 20190625-015707F", - " timestamp start/stop: 2019-06-25 01:57:07 / 2019-06-25 01:57:25", - " wal start/stop: 000000080000000000000023 / 000000080000000000000023", - " database size: 27.3MB, backup size: 27.3MB", + " full backup: 20190805-142120F", + " timestamp start/stop: 2019-08-05 14:21:20 / 2019-08-05 14:21:37", + " wal start/stop: 000000080000000000000021 / 000000080000000000000021", + " database size: 27.2MB, backup size: 27.2MB", " repository size: 3.2MB, repository backup size: 3.2MB", "", - " full backup: 20190625-015728F", - " timestamp start/stop: 2019-06-25 01:57:28 / 2019-06-25 01:57:36", - " wal start/stop: 000000080000000000000025 / 000000080000000000000025", - " database size: 27.3MB, backup size: 27.3MB", + " full backup: 20190805-142139F", + " timestamp start/stop: 2019-08-05 14:21:39 / 2019-08-05 14:21:48", + " wal start/stop: 000000080000000000000022 / 000000080000000000000022", + " database size: 27.2MB, backup size: 27.2MB", " repository size: 3.2MB, repository backup size: 3.2MB" ] } @@ -9529,7 +9513,7 @@ "key" : { "bash-wrap" : true, "cmd" : [ - "sudo scp build:/root/pgbackrest-release-2.15/src/pgbackrest /usr/bin" + "sudo scp build:/root/pgbackrest-release-2.16/src/pgbackrest /usr/bin" ], "cmd-extra" : "2>&1", "host" : "pg-standby", @@ -9557,7 +9541,7 @@ "bash-wrap" : true, "cmd" : [ "sudo yum install perl perl-Time-HiRes perl-parent perl-JSON \\", - " perl-Digest-SHA perl-DBD-Pg" + " perl-Digest-SHA" ], "cmd-extra" : "-y 2>&1", "host" : "pg-standby", @@ -9933,12 +9917,12 @@ "type" : "exe", "value" : { "output" : [ - "LOG: database system was interrupted; last known up at 2019-06-25 01:57:29 UTC", + "LOG: database system was interrupted; last known up at 2019-08-05 14:21:40 UTC", "LOG: entering standby mode", "LOG: restored log file \"00000008.history\" from archive", - "LOG: restored log file \"000000080000000000000025\" from archive", - "LOG: redo starts at 0/25000028", - "LOG: consistent recovery state reached at 0/250000F8", + "LOG: restored log file \"000000080000000000000022\" from archive", + "LOG: redo starts at 0/22000028", + "LOG: consistent recovery state reached at 0/22000130", "LOG: database system is ready to accept read only connections" ] } @@ -10020,7 +10004,7 @@ "output" : [ " pg_switch_xlog | now ", "----------------+-------------------------------", - " 0/2601F408 | 2019-06-25 01:58:27.705598+00", + " 0/2301FD90 | 2019-08-05 14:22:32.040158+00", "(1 row)" ] } @@ -10049,7 +10033,7 @@ "output" : [ " message | now ", "----------------+-------------------------------", - " Important Data | 2019-06-25 01:58:33.389552+00", + " Important Data | 2019-08-05 14:22:37.124113+00", "(1 row)" ] } @@ -10064,7 +10048,7 @@ "filter" : true, "filter-context" : 2, "list" : [ - "all other checks passed" + "because no primary was found" ] }, "host" : "pg-standby", @@ -10075,8 +10059,8 @@ "type" : "exe", "value" : { "output" : [ - "P00 INFO: check command begin 2.15: --log-level-console=info --log-level-file=detail --log-level-stderr=off --no-log-timestamp --pg1-path=/var/lib/pgsql/9.5/data --repo1-host=repository --stanza=demo", - "P00 INFO: switch xlog cannot be performed on the standby, all other checks passed successfully", + "P00 INFO: check command begin 2.16: --log-level-console=info --log-level-file=detail --log-level-stderr=off --no-log-timestamp --pg1-path=/var/lib/pgsql/9.5/data --repo1-host=repository --stanza=demo", + "P00 INFO: switch wal not performed because no primary was found", "P00 INFO: check command end: completed successfully" ] } @@ -10313,8 +10297,8 @@ "output" : [ " [filtered 6 lines of output]", "LOG: database system is ready to accept read only connections", - "LOG: restored log file \"000000080000000000000026\" from archive", - "LOG: started streaming WAL from primary at 0/27000000 on timeline 8" + "LOG: restored log file \"000000080000000000000023\" from archive", + "LOG: started streaming WAL from primary at 0/24000000 on timeline 8" ] } }, @@ -10344,9 +10328,9 @@ "type" : "exe", "value" : { "output" : [ - " message | now ", - "----------------+-------------------------------", - " Important Data | 2019-06-25 01:58:50.842981+00", + " message | now ", + "----------------+------------------------------", + " Important Data | 2019-08-05 14:22:55.71943+00", "(1 row)" ] } @@ -10375,7 +10359,7 @@ "output" : [ " message | now ", "----------------+-------------------------------", - " Important Data | 2019-06-25 01:58:51.555622+00", + " Important Data | 2019-08-05 14:22:56.491023+00", "(1 row)" ] } @@ -10607,8 +10591,8 @@ "type" : "exe", "value" : { "output" : [ - "P00 INFO: check command begin 2.15: --log-level-console=info --log-level-file=detail --log-level-stderr=off --no-log-timestamp --pg1-path=/var/lib/pgsql/9.5/data --repo1-host=repository --stanza=demo", - "P00 INFO: WAL segment 00000008000000000000002C successfully stored in the archive at '/var/lib/pgbackrest/archive/demo/9.5-1/0000000800000000/00000008000000000000002C-690245e836623931c67b800e4553236149bc1d26.gz'", + "P00 INFO: check command begin 2.16: --log-level-console=info --log-level-file=detail --log-level-stderr=off --no-log-timestamp --pg1-path=/var/lib/pgsql/9.5/data --repo1-host=repository --stanza=demo", + "P00 INFO: WAL segment 000000080000000000000029 successfully archived to 'archive/demo/9.5-1/0000000800000000/000000080000000000000029-fa9261a1a7deedb0e601f5aa9aee3be2b3c562af.gz'", "P00 INFO: check command end: completed successfully" ] } @@ -10635,24 +10619,24 @@ "value" : { "output" : [ "-------------------PROCESS START-------------------", - "P00 INFO: archive-push-async command begin 2.15: [/var/lib/pgsql/9.5/data/pg_xlog] --log-level-console=off --log-level-file=detail --log-level-stderr=off --no-log-timestamp --pg1-path=/var/lib/pgsql/9.5/data --process-max=2 --repo1-host=repository --spool-path=/var/spool/pgbackrest --stanza=demo", - "P00 INFO: push 1 WAL file(s) to archive: 000000080000000000000027", - "P01 DETAIL: pushed WAL file '000000080000000000000027' to the archive", + "P00 INFO: archive-push-async command begin 2.16: [/var/lib/pgsql/9.5/data/pg_xlog] --log-level-console=off --log-level-file=detail --log-level-stderr=off --no-log-timestamp --pg1-path=/var/lib/pgsql/9.5/data --process-max=2 --repo1-host=repository --spool-path=/var/spool/pgbackrest --stanza=demo", + "P00 INFO: push 1 WAL file(s) to archive: 000000080000000000000024", + "P01 DETAIL: pushed WAL file '000000080000000000000024' to the archive", "P00 INFO: archive-push-async command end: completed successfully", "", "-------------------PROCESS START-------------------", - "P00 INFO: archive-push-async command begin 2.15: [/var/lib/pgsql/9.5/data/pg_xlog] --log-level-console=off --log-level-file=detail --log-level-stderr=off --no-log-timestamp --pg1-path=/var/lib/pgsql/9.5/data --process-max=2 --repo1-host=repository --spool-path=/var/spool/pgbackrest --stanza=demo", - "P00 INFO: push 4 WAL file(s) to archive: 000000080000000000000028...00000008000000000000002B", - "P02 DETAIL: pushed WAL file '000000080000000000000029' to the archive", - "P01 DETAIL: pushed WAL file '000000080000000000000028' to the archive", - "P02 DETAIL: pushed WAL file '00000008000000000000002A' to the archive", - "P01 DETAIL: pushed WAL file '00000008000000000000002B' to the archive", + "P00 INFO: archive-push-async command begin 2.16: [/var/lib/pgsql/9.5/data/pg_xlog] --log-level-console=off --log-level-file=detail --log-level-stderr=off --no-log-timestamp --pg1-path=/var/lib/pgsql/9.5/data --process-max=2 --repo1-host=repository --spool-path=/var/spool/pgbackrest --stanza=demo", + "P00 INFO: push 4 WAL file(s) to archive: 000000080000000000000025...000000080000000000000028", + "P01 DETAIL: pushed WAL file '000000080000000000000025' to the archive", + "P02 DETAIL: pushed WAL file '000000080000000000000026' to the archive", + "P01 DETAIL: pushed WAL file '000000080000000000000027' to the archive", + "P02 DETAIL: pushed WAL file '000000080000000000000028' to the archive", "P00 INFO: archive-push-async command end: completed successfully", "", "-------------------PROCESS START-------------------", - "P00 INFO: archive-push-async command begin 2.15: [/var/lib/pgsql/9.5/data/pg_xlog] --log-level-console=off --log-level-file=detail --log-level-stderr=off --no-log-timestamp --pg1-path=/var/lib/pgsql/9.5/data --process-max=2 --repo1-host=repository --spool-path=/var/spool/pgbackrest --stanza=demo", - "P00 INFO: push 1 WAL file(s) to archive: 00000008000000000000002C", - "P01 DETAIL: pushed WAL file '00000008000000000000002C' to the archive", + "P00 INFO: archive-push-async command begin 2.16: [/var/lib/pgsql/9.5/data/pg_xlog] --log-level-console=off --log-level-file=detail --log-level-stderr=off --no-log-timestamp --pg1-path=/var/lib/pgsql/9.5/data --process-max=2 --repo1-host=repository --spool-path=/var/spool/pgbackrest --stanza=demo", + "P00 INFO: push 1 WAL file(s) to archive: 000000080000000000000029", + "P01 DETAIL: pushed WAL file '000000080000000000000029' to the archive", "P00 INFO: archive-push-async command end: completed successfully" ] } @@ -10692,24 +10676,30 @@ "value" : { "output" : [ "-------------------PROCESS START-------------------", - "P00 INFO: archive-get-async command begin 2.15: [000000080000000000000025, 000000080000000000000026, 000000080000000000000027, 000000080000000000000028, 000000080000000000000029, 00000008000000000000002A, 00000008000000000000002B, 00000008000000000000002C] --log-level-console=off --log-level-file=detail --log-level-stderr=off --no-log-timestamp --pg1-path=/var/lib/pgsql/9.5/data --process-max=2 --repo1-host=repository --spool-path=/var/spool/pgbackrest --stanza=demo", - "P00 INFO: get 8 WAL file(s) from archive: 000000080000000000000025...00000008000000000000002C", - "P01 DETAIL: found 000000080000000000000025 in the archive", - "P02 DETAIL: found 000000080000000000000026 in the archive", - "P01 DETAIL: unable to find 000000080000000000000027 in the archive", - "P02 DETAIL: unable to find 000000080000000000000028 in the archive", + "P00 INFO: archive-get-async command begin 2.16: [000000080000000000000022, 000000080000000000000023, 000000080000000000000024, 000000080000000000000025, 000000080000000000000026, 000000080000000000000027, 000000080000000000000028, 000000080000000000000029] --log-level-console=off --log-level-file=detail --log-level-stderr=off --no-log-timestamp --pg1-path=/var/lib/pgsql/9.5/data --process-max=2 --repo1-host=repository --spool-path=/var/spool/pgbackrest --stanza=demo", + "P00 INFO: get 8 WAL file(s) from archive: 000000080000000000000022...000000080000000000000029", + "P01 DETAIL: found 000000080000000000000022 in the archive", + "P02 DETAIL: found 000000080000000000000023 in the archive", + "P01 DETAIL: unable to find 000000080000000000000024 in the archive", + "P02 DETAIL: unable to find 000000080000000000000025 in the archive", " [filtered 20 lines of output]", - "P00 INFO: archive-get-async command begin 2.15: [000000080000000000000027, 000000080000000000000028, 000000080000000000000029, 00000008000000000000002A, 00000008000000000000002B, 00000008000000000000002C, 00000008000000000000002D, 00000008000000000000002E] --log-level-console=off --log-level-file=detail --log-level-stderr=off --no-log-timestamp --pg1-path=/var/lib/pgsql/9.5/data --process-max=2 --repo1-host=repository --spool-path=/var/spool/pgbackrest --stanza=demo", - "P00 INFO: get 8 WAL file(s) from archive: 000000080000000000000027...00000008000000000000002E", + "P00 INFO: archive-get-async command begin 2.16: [000000080000000000000024, 000000080000000000000025, 000000080000000000000026, 000000080000000000000027, 000000080000000000000028, 000000080000000000000029, 00000008000000000000002A, 00000008000000000000002B] --log-level-console=off --log-level-file=detail --log-level-stderr=off --no-log-timestamp --pg1-path=/var/lib/pgsql/9.5/data --process-max=2 --repo1-host=repository --spool-path=/var/spool/pgbackrest --stanza=demo", + "P00 INFO: get 8 WAL file(s) from archive: 000000080000000000000024...00000008000000000000002B", + "P02 DETAIL: found 000000080000000000000025 in the archive", + "P01 DETAIL: found 000000080000000000000024 in the archive", + "P02 DETAIL: found 000000080000000000000026 in the archive", "P01 DETAIL: found 000000080000000000000027 in the archive", - "P02 DETAIL: found 000000080000000000000028 in the archive", + "P02 DETAIL: unable to find 000000080000000000000029 in the archive", + "P02 DETAIL: unable to find 00000008000000000000002A in the archive", + "P02 DETAIL: unable to find 00000008000000000000002B in the archive", + "P01 DETAIL: found 000000080000000000000028 in the archive", + "P00 INFO: archive-get-async command end: completed successfully", + "", + " [filtered 8 lines of output]", + "P02 DETAIL: unable to find 00000008000000000000002F in the archive", + "P02 DETAIL: unable to find 000000080000000000000030 in the archive", "P01 DETAIL: found 000000080000000000000029 in the archive", - "P02 DETAIL: found 00000008000000000000002A in the archive", - "P01 DETAIL: found 00000008000000000000002B in the archive", - "P02 DETAIL: found 00000008000000000000002C in the archive", - "P01 DETAIL: unable to find 00000008000000000000002D in the archive", - "P02 DETAIL: unable to find 00000008000000000000002E in the archive", - " [filtered 13 lines of output]" + "P00 INFO: archive-get-async command end: completed successfully" ] } }, @@ -10791,22 +10781,22 @@ "value" : { "output" : [ " [filtered 2 lines of output]", - "P00 INFO: execute exclusive pg_start_backup() with label \"pgBackRest backup started at 2019-06-25 01:59:12\": backup begins after the requested immediate checkpoint completes", - "P00 INFO: backup start archive = 00000008000000000000002E, lsn = 0/2E000028", - "P00 INFO: wait for replay on the standby to reach 0/2E000028", - "P00 INFO: replay on the standby reached 0/2E0000D0", - "P01 INFO: backup file pg-primary:/var/lib/pgsql/9.5/data/pg_log/postgresql.log (8.2KB, 0%) checksum 29c668523d17144b554b47c9a3af96bc59557345", - "P03 INFO: backup file pg-standby:/var/lib/pgsql/9.5/data/base/12378/2674 (360KB, 17%) checksum 9f98c59fd7fd8d79f2b470d0eb0ddf8bce495b33", - "P01 INFO: backup file pg-primary:/var/lib/pgsql/9.5/data/global/pg_control (8KB, 17%) checksum 0d9cf6846b76c96bf3e0ba792808d56213e68a15", - "P02 INFO: backup file pg-standby:/var/lib/pgsql/9.5/data/base/12378/2608 (432KB, 37%) checksum 944bc47b45ba0f9602a74bfd9d30855f23076414", - "P04 INFO: backup file pg-standby:/var/lib/pgsql/9.5/data/base/12378/1249 (360KB, 54%) checksum e5426f89282b1c6841bd9ff4815a8d2bc862ffa6", + "P00 INFO: execute exclusive pg_start_backup() with label \"pgBackRest backup started at 2019-08-05 14:23:19\": backup begins after the requested immediate checkpoint completes", + "P00 INFO: backup start archive = 00000008000000000000002B, lsn = 0/2B000028", + "P00 INFO: wait for replay on the standby to reach 0/2B000028", + "P00 INFO: replay on the standby reached 0/2B0000D0", + "P01 INFO: backup file pg-primary:/var/lib/pgsql/9.5/data/global/pg_control (8KB, 0%) checksum 92ee819b438e2517cb5c3a334e01850510e28e1f", + "P01 INFO: backup file pg-primary:/var/lib/pgsql/9.5/data/pg_log/postgresql.log (7.6KB, 0%) checksum e62db239d109c880b62630e0a597a99533f4310e", + "P02 INFO: backup file pg-standby:/var/lib/pgsql/9.5/data/base/12378/2608 (432KB, 20%) checksum c3aedd77e3b75d3d6e08d0b57e2833d5c71ac5b6", + "P03 INFO: backup file pg-standby:/var/lib/pgsql/9.5/data/base/12378/1249 (360KB, 37%) checksum fb5f6c68c53aaacb60668c5b5e6c1f1010c2afa4", + "P04 INFO: backup file pg-standby:/var/lib/pgsql/9.5/data/base/12378/2674 (352KB, 54%) checksum 21e7a7a42b102a58611b29b71dbf06b6507c566d", "P01 INFO: backup file pg-primary:/var/lib/pgsql/9.5/data/pg_hba.conf (4.1KB, 54%) checksum 1c0bca2f7f44d354aaaefb2c21fd1290b7379690", - "P02 INFO: backup file pg-standby:/var/lib/pgsql/9.5/data/base/12378/2658 (112KB, 59%) checksum 4d7b0996d87f4df06c7a7b06c37dd0fa39f810c4", - "P03 INFO: backup file pg-standby:/var/lib/pgsql/9.5/data/base/12378/2673 (312KB, 74%) checksum b1875b8baa04f94c7b88a92968ad10ceb438e474", - "P04 INFO: backup file pg-standby:/var/lib/pgsql/9.5/data/base/12378/1259 (104KB, 78%) checksum b6f3100edc560fed6024f08b50a1d6b4d37999dd", - "P01 INFO: backup file pg-primary:/var/lib/pgsql/9.5/data/backup_label (238B, 78%) checksum 1b94dc6ccfaaf6913ede73eb5f9dd58c3ce18c5f", - "P03 INFO: backup file pg-standby:/var/lib/pgsql/9.5/data/base/12378/1247 (72KB, 82%) checksum b58edbf0e7bc8f10c490f3c8744626aa2670e401", - "P04 INFO: backup file pg-standby:/var/lib/pgsql/9.5/data/base/12378/2704 (40KB, 84%) checksum 231ecdeec7698b51062d2804dcd0265bb4dc3db0", + "P02 INFO: backup file pg-standby:/var/lib/pgsql/9.5/data/base/12378/2673 (304KB, 68%) checksum 2c020bc20e46892a32dccf6b68f2bdefc329d751", + "P03 INFO: backup file pg-standby:/var/lib/pgsql/9.5/data/base/12378/2658 (112KB, 73%) checksum 62b594b5ac435cc25118b67b6ffe202643cc847f", + "P04 INFO: backup file pg-standby:/var/lib/pgsql/9.5/data/base/12378/1259 (104KB, 78%) checksum 489b52839e41d10b6b5fa06dd00292311d1a1393", + "P01 INFO: backup file pg-primary:/var/lib/pgsql/9.5/data/backup_label (238B, 78%) checksum 212c6df0075db596e4d28c0ed3343c6fc69a9dc3", + "P02 INFO: backup file pg-standby:/var/lib/pgsql/9.5/data/base/12378/2659 (72KB, 82%) checksum 10f363d03df1929a29247a85547d9525fe60ee14", + "P03 INFO: backup file pg-standby:/var/lib/pgsql/9.5/data/base/12378/1247 (72KB, 85%) checksum c817c4e116f6de188f13674d36b730138acde746", " [filtered 30 lines of output]" ] } @@ -11070,7 +11060,7 @@ "type" : "exe", "value" : { "output" : [ - "P00 INFO: stanza-upgrade command begin 2.15: --no-backup-standby --log-level-console=info --log-level-stderr=off --no-log-timestamp --no-online --pg1-host=pg-primary --pg2-host=pg-standby --pg1-path=/var/lib/pgsql/9.6/data --pg2-path=/var/lib/pgsql/9.6/data --repo1-path=/var/lib/pgbackrest --stanza=demo", + "P00 INFO: stanza-upgrade command begin 2.16: --no-backup-standby --log-level-console=info --log-level-stderr=off --no-log-timestamp --no-online --pg1-host=pg-primary --pg2-host=pg-standby --pg1-path=/var/lib/pgsql/9.6/data --pg2-path=/var/lib/pgsql/9.6/data --repo1-path=/var/lib/pgbackrest --stanza=demo", "P00 INFO: stanza-upgrade command end: completed successfully" ] } @@ -11107,7 +11097,7 @@ "type" : "exe", "value" : { "output" : [ - "postgresql-9.6 (pid 4887) is running..." + "postgresql-9.6 (pid 4854) is running..." ] } }, @@ -11174,7 +11164,14 @@ "output" : true, "run-as-user" : null }, - "type" : "exe" + "type" : "exe", + "value" : { + "output" : [ + "P00 WARN: unable to check pg-2: [DbConnectError] raised from remote-0 protocol on 'pg-standby': unable to connect to 'dbname='postgres' port=5432': could not connect to server: No such file or directory", + " \tIs the server running locally and accepting", + " \tconnections on Unix domain socket \"/var/run/postgresql/.s.PGSQL.5432\"?" + ] + } }, { "key" : { @@ -11327,7 +11324,7 @@ "key" : { "bash-wrap" : true, "cmd" : [ - "sudo mkdir /root/pgbackrest-release-2.15" + "sudo mkdir /root/pgbackrest-release-2.16" ], "host" : "build", "load-env" : true, @@ -11340,7 +11337,7 @@ "key" : { "bash-wrap" : true, "cmd" : [ - "sudo cp -r /pgbackrest/libc /root/pgbackrest-release-2.15" + "sudo cp -r /pgbackrest/libc /root/pgbackrest-release-2.16" ], "host" : "build", "load-env" : true, @@ -11353,7 +11350,7 @@ "key" : { "bash-wrap" : true, "cmd" : [ - "sudo cp -r /pgbackrest/src /root/pgbackrest-release-2.15" + "sudo cp -r /pgbackrest/src /root/pgbackrest-release-2.16" ], "host" : "build", "load-env" : true, @@ -11367,7 +11364,7 @@ "bash-wrap" : true, "cmd" : [ "sudo yum install build-essential gcc make openssl-devel libxml2-devel \\", - " perl-ExtUtils-Embed" + " postgresql-devel perl-ExtUtils-Embed" ], "cmd-extra" : "-y 2>&1", "host" : "build", @@ -11399,7 +11396,7 @@ "key" : { "bash-wrap" : true, "cmd" : [ - "(cd /root/pgbackrest-release-2.15/src && ./configure)" + "(cd /root/pgbackrest-release-2.16/src && ./configure)" ], "host" : "build", "load-env" : true, @@ -11412,7 +11409,7 @@ "key" : { "bash-wrap" : true, "cmd" : [ - "sudo make -s -C /root/pgbackrest-release-2.15/src" + "sudo make -s -C /root/pgbackrest-release-2.16/src" ], "host" : "build", "load-env" : true, @@ -11439,7 +11436,7 @@ "key" : { "bash-wrap" : true, "cmd" : [ - "sudo scp build:/root/pgbackrest-release-2.15/src/pgbackrest /usr/bin" + "sudo scp build:/root/pgbackrest-release-2.16/src/pgbackrest /usr/bin" ], "cmd-extra" : "2>&1", "host" : "pg-primary", @@ -11466,7 +11463,7 @@ "key" : { "bash-wrap" : true, "cmd" : [ - "sudo yum install perl perl-Time-HiRes perl-Digest-SHA perl-DBD-Pg perl-JSON-PP" + "sudo yum install perl perl-Time-HiRes perl-Digest-SHA perl-JSON-PP" ], "cmd-extra" : "-y 2>&1", "host" : "pg-primary", @@ -11581,7 +11578,7 @@ "type" : "exe", "value" : { "output" : [ - "pgBackRest 2.15 - General help", + "pgBackRest 2.16 - General help", "", "Usage:", " pgbackrest [options] [command]", @@ -11725,7 +11722,7 @@ "type" : "exe", "value" : { "output" : [ - "pgBackRest 2.15 - 'backup' command - 'log-path' option help", + "pgBackRest 2.16 - 'backup' command - 'log-path' option help", "", "Path where log files are stored.", "", @@ -11963,7 +11960,7 @@ "type" : "exe", "value" : { "output" : [ - "P00 INFO: stanza-create command begin 2.15: --log-level-console=info --log-level-stderr=off --no-log-timestamp --pg1-path=/var/lib/pgsql/9.6/data --repo1-cipher-pass= --repo1-cipher-type=aes-256-cbc --repo1-path=/var/lib/pgbackrest --stanza=demo", + "P00 INFO: stanza-create command begin 2.16: --log-level-console=info --log-level-stderr=off --no-log-timestamp --pg1-path=/var/lib/pgsql/9.6/data --repo1-cipher-pass= --repo1-cipher-type=aes-256-cbc --repo1-path=/var/lib/pgbackrest --stanza=demo", "P00 INFO: stanza-create command end: completed successfully" ] } @@ -11978,7 +11975,7 @@ "filter" : true, "filter-context" : 2, "list" : [ - " successfully stored in the archive at " + " successfully archived to " ] }, "host" : "pg-primary", @@ -11989,8 +11986,8 @@ "type" : "exe", "value" : { "output" : [ - "P00 INFO: check command begin 2.15: --log-level-console=info --log-level-stderr=off --no-log-timestamp --pg1-path=/var/lib/pgsql/9.6/data --repo1-cipher-pass= --repo1-cipher-type=aes-256-cbc --repo1-path=/var/lib/pgbackrest --stanza=demo", - "P00 INFO: WAL segment 000000010000000000000001 successfully stored in the archive at '/var/lib/pgbackrest/archive/demo/9.6-1/0000000100000000/000000010000000000000001-950df57b860fa827a4b4d20dbd0717976f07504e.gz'", + "P00 INFO: check command begin 2.16: --log-level-console=info --log-level-stderr=off --no-log-timestamp --pg1-path=/var/lib/pgsql/9.6/data --repo1-cipher-pass= --repo1-cipher-type=aes-256-cbc --repo1-path=/var/lib/pgbackrest --stanza=demo", + "P00 INFO: WAL segment 000000010000000000000001 successfully archived to '/var/lib/pgbackrest/archive/demo/9.6-1/0000000100000000/000000010000000000000001-c19a1d8218c5e157c43d9cbf5c50f8eecd96fd7f.gz'", "P00 INFO: check command end: completed successfully" ] } @@ -12017,9 +12014,9 @@ "type" : "exe", "value" : { "output" : [ - "P00 INFO: backup command begin 2.15: --log-level-console=info --log-level-stderr=off --no-log-timestamp --pg1-path=/var/lib/pgsql/9.6/data --repo1-cipher-pass= --repo1-cipher-type=aes-256-cbc --repo1-path=/var/lib/pgbackrest --repo1-retention-full=2 --stanza=demo", + "P00 INFO: backup command begin 2.16: --log-level-console=info --log-level-stderr=off --no-log-timestamp --pg1-path=/var/lib/pgsql/9.6/data --repo1-cipher-pass= --repo1-cipher-type=aes-256-cbc --repo1-path=/var/lib/pgbackrest --repo1-retention-full=2 --stanza=demo", "P00 WARN: no prior backup exists, incr backup has been changed to full", - "P00 INFO: execute non-exclusive pg_start_backup() with label \"pgBackRest backup started at 2019-06-25 01:43:23\": backup begins after the next regular checkpoint completes", + "P00 INFO: execute non-exclusive pg_start_backup() with label \"pgBackRest backup started at 2019-08-05 14:07:26\": backup begins after the next regular checkpoint completes", "P00 INFO: backup start archive = 000000010000000000000002, lsn = 0/2000028", " [filtered 864 lines of output]", "P01 INFO: backup file /var/lib/pgsql/9.6/data/base/1/12312 (0B, 100%)", @@ -12045,7 +12042,7 @@ "type" : "exe", "value" : { "output" : [ - "20190625-014323F" + "20190805-140726F" ] } }, @@ -12073,7 +12070,7 @@ "output" : [ " [filtered 3 lines of output]", "P00 INFO: backup start archive = 000000010000000000000003, lsn = 0/3000028", - "P01 INFO: backup file /var/lib/pgsql/9.6/data/global/pg_control (8KB, 100%) checksum db2865c02e0b7e6443868fd03edadaa3954a1d51", + "P01 INFO: backup file /var/lib/pgsql/9.6/data/global/pg_control (8KB, 100%) checksum 12bb7a7b60f396ea462f396cb4296647e7281ced", "P00 INFO: diff backup size = 8KB", "P00 INFO: execute non-exclusive pg_stop_backup() and wait for all WAL segments to archive", "P00 INFO: backup stop archive = 000000010000000000000003, lsn = 0/30000F8", @@ -12109,18 +12106,18 @@ " db (current)", " wal archive min/max (9.6-1): 000000010000000000000002/000000010000000000000003", "", - " full backup: 20190625-014323F", - " timestamp start/stop: 2019-06-25 01:43:23 / 2019-06-25 01:43:35", + " full backup: 20190805-140726F", + " timestamp start/stop: 2019-08-05 14:07:26 / 2019-08-05 14:07:40", " wal start/stop: 000000010000000000000002 / 000000010000000000000002", - " database size: 20.9MB, backup size: 20.9MB", + " database size: 21.0MB, backup size: 21.0MB", " repository size: 2.5MB, repository backup size: 2.5MB", "", - " diff backup: 20190625-014323F_20190625-014336D", - " timestamp start/stop: 2019-06-25 01:43:36 / 2019-06-25 01:43:39", + " diff backup: 20190805-140726F_20190805-140741D", + " timestamp start/stop: 2019-08-05 14:07:41 / 2019-08-05 14:07:45", " wal start/stop: 000000010000000000000003 / 000000010000000000000003", - " database size: 20.9MB, backup size: 8.2KB", + " database size: 21.0MB, backup size: 8.2KB", " repository size: 2.5MB, repository backup size: 421B", - " backup reference list: 20190625-014323F" + " backup reference list: 20190805-140726F" ] } }, @@ -12186,12 +12183,12 @@ "type" : "exe", "value" : { "output" : [ - " [filtered 8 lines of output]", - "Jun 25 01:43:41 pg-primary systemd[1]: Starting PostgreSQL 9.6 database server...", - "Jun 25 01:43:41 pg-primary systemd[1]: postgresql-9.6.service: main process exited, code=exited, status=2/INVALIDARGUMENT", - "Jun 25 01:43:41 pg-primary systemd[1]: Failed to start PostgreSQL 9.6 database server.", - "Jun 25 01:43:41 pg-primary systemd[1]: Unit postgresql-9.6.service entered failed state.", - "Jun 25 01:43:41 pg-primary systemd[1]: postgresql-9.6.service failed." + " [filtered 11 lines of output]", + "Aug 05 14:07:47 pg-primary postmaster[878]: but could not open file \"/var/lib/pgsql/9.6/data/global/pg_control\": No such file or directory", + "Aug 05 14:07:47 pg-primary systemd[1]: postgresql-9.6.service: main process exited, code=exited, status=2/INVALIDARGUMENT", + "Aug 05 14:07:47 pg-primary systemd[1]: Failed to start PostgreSQL 9.6 database server.", + "Aug 05 14:07:47 pg-primary systemd[1]: Unit postgresql-9.6.service entered failed state.", + "Aug 05 14:07:47 pg-primary systemd[1]: postgresql-9.6.service failed." ] } }, @@ -12269,10 +12266,10 @@ "type" : "exe", "value" : { "output" : [ - "P00 INFO: backup command begin 2.15: --log-level-console=info --log-level-stderr=off --no-log-timestamp --pg1-path=/var/lib/pgsql/9.6/data --repo1-cipher-pass= --repo1-cipher-type=aes-256-cbc --repo1-path=/var/lib/pgbackrest --repo1-retention-full=2 --stanza=demo --type=incr", - "P00 INFO: last backup label = 20190625-014323F_20190625-014336D, version = 2.15", - "P00 INFO: execute non-exclusive pg_start_backup() with label \"pgBackRest backup started at 2019-06-25 01:43:55\": backup begins after the next regular checkpoint completes", - "P00 INFO: backup start archive = 000000020000000000000005, lsn = 0/5000028", + "P00 INFO: backup command begin 2.16: --log-level-console=info --log-level-stderr=off --no-log-timestamp --pg1-path=/var/lib/pgsql/9.6/data --repo1-cipher-pass= --repo1-cipher-type=aes-256-cbc --repo1-path=/var/lib/pgbackrest --repo1-retention-full=2 --stanza=demo --type=incr", + "P00 INFO: last backup label = 20190805-140726F_20190805-140741D, version = 2.16", + "P00 INFO: execute non-exclusive pg_start_backup() with label \"pgBackRest backup started at 2019-08-05 14:08:01\": backup begins after the next regular checkpoint completes", + "P00 INFO: backup start archive = 000000020000000000000006, lsn = 0/6000028", "P00 WARN: a timeline switch has occurred since the last backup, enabling delta checksum", " [filtered 9 lines of output]" ] @@ -12330,11 +12327,11 @@ "type" : "exe", "value" : { "output" : [ - "P00 INFO: backup command begin 2.15: --log-level-console=info --log-level-stderr=off --no-log-timestamp --pg1-path=/var/lib/pgsql/9.6/data --repo1-cipher-pass= --repo1-cipher-type=aes-256-cbc --repo1-path=/var/lib/pgbackrest --repo1-retention-full=2 --stanza=demo --start-fast --type=incr", - "P00 INFO: last backup label = 20190625-014323F_20190625-014355I, version = 2.15", - "P00 INFO: execute non-exclusive pg_start_backup() with label \"pgBackRest backup started at 2019-06-25 01:44:01\": backup begins after the requested immediate checkpoint completes", - "P00 INFO: backup start archive = 000000020000000000000006, lsn = 0/6000028", - "P01 INFO: backup file /var/lib/pgsql/9.6/data/global/pg_control (8KB, 100%) checksum faa4883811ea5e218d29407b0aed84a17a3b44e3", + "P00 INFO: backup command begin 2.16: --log-level-console=info --log-level-stderr=off --no-log-timestamp --pg1-path=/var/lib/pgsql/9.6/data --repo1-cipher-pass= --repo1-cipher-type=aes-256-cbc --repo1-path=/var/lib/pgbackrest --repo1-retention-full=2 --stanza=demo --start-fast --type=incr", + "P00 INFO: last backup label = 20190805-140726F_20190805-140801I, version = 2.16", + "P00 INFO: execute non-exclusive pg_start_backup() with label \"pgBackRest backup started at 2019-08-05 14:08:06\": backup begins after the requested immediate checkpoint completes", + "P00 INFO: backup start archive = 000000020000000000000007, lsn = 0/7000028", + "P01 INFO: backup file /var/lib/pgsql/9.6/data/global/pg_control (8KB, 100%) checksum 6b865105367facf3722e5c30eb5d9901be668061", " [filtered 7 lines of output]" ] } @@ -12361,7 +12358,7 @@ " \"id\" : 1", " },", " \"id\" : \"9.6-1\",", - " \"max\" : \"000000020000000000000006\",", + " \"max\" : \"000000020000000000000007\",", " \"min\" : \"000000010000000000000002\"", " }", " ],", @@ -12373,25 +12370,25 @@ " },", " \"backrest\" : {", " \"format\" : 5,", - " \"version\" : \"2.15\"", + " \"version\" : \"2.16\"", " },", " \"database\" : {", " \"id\" : 1", " },", " \"info\" : {", - " \"delta\" : 21952964,", + " \"delta\" : 21977556,", " \"repository\" : {", - " \"delta\" : 2581349,", - " \"size\" : 2581349", + " \"delta\" : 2583189,", + " \"size\" : 2583189", " },", - " \"size\" : 21952964", + " \"size\" : 21977556", " },", - " \"label\" : \"20190625-014323F\",", + " \"label\" : \"20190805-140726F\",", " \"prior\" : null,", " \"reference\" : null,", " \"timestamp\" : {", - " \"start\" : 1561427003,", - " \"stop\" : 1561427015", + " \"start\" : 1565014046,", + " \"stop\" : 1565014060", " },", " \"type\" : \"full\"", " },", @@ -12402,7 +12399,7 @@ " },", " \"backrest\" : {", " \"format\" : 5,", - " \"version\" : \"2.15\"", + " \"version\" : \"2.16\"", " },", " \"database\" : {", " \"id\" : 1", @@ -12411,60 +12408,60 @@ " \"delta\" : 8421,", " \"repository\" : {", " \"delta\" : 421,", - " \"size\" : 2581333", + " \"size\" : 2583189", " },", - " \"size\" : 21952964", + " \"size\" : 21977556", " },", - " \"label\" : \"20190625-014323F_20190625-014336D\",", - " \"prior\" : \"20190625-014323F\",", + " \"label\" : \"20190805-140726F_20190805-140741D\",", + " \"prior\" : \"20190805-140726F\",", " \"reference\" : [", - " \"20190625-014323F\"", + " \"20190805-140726F\"", " ],", " \"timestamp\" : {", - " \"start\" : 1561427016,", - " \"stop\" : 1561427019", + " \"start\" : 1565014061,", + " \"stop\" : 1565014065", " },", " \"type\" : \"diff\"", " },", " {", " \"archive\" : {", - " \"start\" : \"000000020000000000000005\",", - " \"stop\" : \"000000020000000000000005\"", + " \"start\" : \"000000020000000000000006\",", + " \"stop\" : \"000000020000000000000006\"", " },", " \"backrest\" : {", " \"format\" : 5,", - " \"version\" : \"2.15\"", + " \"version\" : \"2.16\"", " },", " \"database\" : {", " \"id\" : 1", " },", " \"info\" : {", - " \"delta\" : 9341,", + " \"delta\" : 9421,", " \"repository\" : {", - " \"delta\" : 837,", - " \"size\" : 2581477", + " \"delta\" : 869,", + " \"size\" : 2583349", " },", - " \"size\" : 21953532", + " \"size\" : 21978188", " },", - " \"label\" : \"20190625-014323F_20190625-014355I\",", - " \"prior\" : \"20190625-014323F_20190625-014336D\",", + " \"label\" : \"20190805-140726F_20190805-140801I\",", + " \"prior\" : \"20190805-140726F_20190805-140741D\",", " \"reference\" : [", - " \"20190625-014323F\"", + " \"20190805-140726F\"", " ],", " \"timestamp\" : {", - " \"start\" : 1561427035,", - " \"stop\" : 1561427039", + " \"start\" : 1565014081,", + " \"stop\" : 1565014084", " },", " \"type\" : \"incr\"", " },", " {", " \"archive\" : {", - " \"start\" : \"000000020000000000000006\",", - " \"stop\" : \"000000020000000000000006\"", + " \"start\" : \"000000020000000000000007\",", + " \"stop\" : \"000000020000000000000007\"", " },", " \"backrest\" : {", " \"format\" : 5,", - " \"version\" : \"2.15\"", + " \"version\" : \"2.16\"", " },", " \"database\" : {", " \"id\" : 1", @@ -12473,19 +12470,19 @@ " \"delta\" : 8421,", " \"repository\" : {", " \"delta\" : 421,", - " \"size\" : 2581477", + " \"size\" : 2583349", " },", - " \"size\" : 21953532", + " \"size\" : 21978188", " },", - " \"label\" : \"20190625-014323F_20190625-014401I\",", - " \"prior\" : \"20190625-014323F_20190625-014355I\",", + " \"label\" : \"20190805-140726F_20190805-140806I\",", + " \"prior\" : \"20190805-140726F_20190805-140801I\",", " \"reference\" : [", - " \"20190625-014323F\",", - " \"20190625-014323F_20190625-014355I\"", + " \"20190805-140726F\",", + " \"20190805-140726F_20190805-140801I\"", " ],", " \"timestamp\" : {", - " \"start\" : 1561427041,", - " \"stop\" : 1561427044", + " \"start\" : 1565014086,", + " \"stop\" : 1565014090", " },", " \"type\" : \"incr\"", " }", @@ -12494,7 +12491,7 @@ " \"db\" : [", " {", " \"id\" : 1,", - " \"system-id\" : 6706277820808323406,", + " \"system-id\" : 6721684048843493692,", " \"version\" : \"9.6\"", " }", " ],", @@ -12649,7 +12646,7 @@ "output" : [ " name | last_successful_backup | last_archived_wal ", "--------+------------------------+--------------------------", - " \"demo\" | 2019-06-25 01:44:04+00 | 000000020000000000000006", + " \"demo\" | 2019-08-05 14:08:10+00 | 000000020000000000000007", "(1 row)" ] } @@ -12695,7 +12692,7 @@ "filter" : true, "filter-context" : 2, "list" : [ - "archive retention on backup 20190625-014323F|remove archive" + "archive retention on backup 20190805-140726F|remove archive" ] }, "host" : "pg-primary", @@ -12709,7 +12706,7 @@ " [filtered 874 lines of output]", "P00 INFO: backup command end: completed successfully", "P00 INFO: expire command begin", - "P00 DETAIL: archive retention on backup 20190625-014323F, archiveId = 9.6-1, start = 000000010000000000000002", + "P00 DETAIL: archive retention on backup 20190805-140726F, archiveId = 9.6-1, start = 000000010000000000000002", "P00 DETAIL: no archive to remove, archiveId = 9.6-1", "P00 INFO: expire command end: completed successfully" ] @@ -12729,7 +12726,7 @@ "type" : "exe", "value" : { "output" : [ - "20190625-014409F" + "20190805-140815F" ] } }, @@ -12744,7 +12741,7 @@ "filter" : true, "filter-context" : 2, "list" : [ - "expire full backup set\\: 20190625-014323F|archive retention on backup 20190625-014409F|remove archive" + "expire full backup set\\: 20190805-140726F|archive retention on backup 20190805-140815F|remove archive" ] }, "host" : "pg-primary", @@ -12758,9 +12755,9 @@ " [filtered 873 lines of output]", "P00 INFO: backup command end: completed successfully", "P00 INFO: expire command begin", - "P00 INFO: expire full backup set: 20190625-014323F, 20190625-014323F_20190625-014336D, 20190625-014323F_20190625-014355I, 20190625-014323F_20190625-014401I", - "P00 INFO: remove expired backup 20190625-014323F_20190625-014401I", - "P00 INFO: remove expired backup 20190625-014323F_20190625-014355I", + "P00 INFO: expire full backup set: 20190805-140726F, 20190805-140726F_20190805-140741D, 20190805-140726F_20190805-140801I, 20190805-140726F_20190805-140806I", + "P00 INFO: remove expired backup 20190805-140726F_20190805-140806I", + "P00 INFO: remove expired backup 20190805-140726F_20190805-140801I", " [filtered 2 lines of output]" ] } @@ -12823,7 +12820,7 @@ "type" : "exe", "value" : { "output" : [ - "20190625-014423F_20190625-014437D" + "20190805-140830F_20190805-140844D" ] } }, @@ -12851,7 +12848,7 @@ "filter" : true, "filter-context" : 2, "list" : [ - "expire diff backup set: 20190625-014423F_20190625-014437D" + "expire diff backup set: 20190805-140830F_20190805-140844D" ] }, "host" : "pg-primary", @@ -12865,9 +12862,9 @@ " [filtered 9 lines of output]", "P00 INFO: backup command end: completed successfully", "P00 INFO: expire command begin", - "P00 INFO: expire diff backup set: 20190625-014423F_20190625-014437D, 20190625-014423F_20190625-014442I", - "P00 INFO: remove expired backup 20190625-014423F_20190625-014442I", - "P00 INFO: remove expired backup 20190625-014423F_20190625-014437D" + "P00 INFO: expire diff backup set: 20190805-140830F_20190805-140844D, 20190805-140830F_20190805-140849I", + "P00 INFO: remove expired backup 20190805-140830F_20190805-140849I", + "P00 INFO: remove expired backup 20190805-140830F_20190805-140844D" ] } }, @@ -12916,7 +12913,7 @@ "type" : "exe", "value" : { "output" : [ - "20190625-014423F_20190625-014447D" + "20190805-140830F_20190805-140854D" ] } }, @@ -12959,8 +12956,8 @@ "output" : [ " [filtered 8 lines of output]", "P00 INFO: execute non-exclusive pg_stop_backup() and wait for all WAL segments to archive", - "P00 INFO: backup stop archive = 000000020000000000000011, lsn = 0/110000F8", - "P00 INFO: new backup label = 20190625-014423F_20190625-014453D", + "P00 INFO: backup stop archive = 000000020000000000000012, lsn = 0/120000F8", + "P00 INFO: new backup label = 20190805-140830F_20190805-140900D", "P00 INFO: backup command end: completed successfully", "P00 INFO: expire command begin" ] @@ -12980,7 +12977,7 @@ "type" : "exe", "value" : { "output" : [ - "20190625-014423F_20190625-014453D" + "20190805-140830F_20190805-140900D" ] } }, @@ -12995,7 +12992,7 @@ "filter" : true, "filter-context" : 2, "list" : [ - "archive retention on backup 20190625-014423F_20190625-014447D|remove archive" + "archive retention on backup 20190805-140830F_20190805-140854D|remove archive" ] }, "host" : "pg-primary", @@ -13006,14 +13003,14 @@ "type" : "exe", "value" : { "output" : [ - "P00 INFO: expire command begin 2.15: --log-level-console=detail --log-level-stderr=off --no-log-timestamp --repo1-cipher-pass= --repo1-cipher-type=aes-256-cbc --repo1-path=/var/lib/pgbackrest --repo1-retention-archive=1 --repo1-retention-archive-type=diff --repo1-retention-diff=2 --repo1-retention-full=2 --stanza=demo", - "P00 DETAIL: archive retention on backup 20190625-014409F, archiveId = 9.6-1, start = 000000020000000000000008, stop = 000000020000000000000008", - "P00 DETAIL: archive retention on backup 20190625-014423F, archiveId = 9.6-1, start = 00000002000000000000000A, stop = 00000002000000000000000A", - "P00 DETAIL: archive retention on backup 20190625-014423F_20190625-014447D, archiveId = 9.6-1, start = 00000002000000000000000E, stop = 00000002000000000000000E", - "P00 DETAIL: archive retention on backup 20190625-014423F_20190625-014453D, archiveId = 9.6-1, start = 000000020000000000000011", - "P00 DETAIL: remove archive: archiveId = 9.6-1, start = 000000020000000000000009, stop = 000000020000000000000009", - "P00 DETAIL: remove archive: archiveId = 9.6-1, start = 00000002000000000000000B, stop = 00000002000000000000000D", - "P00 DETAIL: remove archive: archiveId = 9.6-1, start = 00000002000000000000000F, stop = 000000020000000000000010", + "P00 INFO: expire command begin 2.16: --log-level-console=detail --log-level-stderr=off --no-log-timestamp --repo1-cipher-pass= --repo1-cipher-type=aes-256-cbc --repo1-path=/var/lib/pgbackrest --repo1-retention-archive=1 --repo1-retention-archive-type=diff --repo1-retention-diff=2 --repo1-retention-full=2 --stanza=demo", + "P00 DETAIL: archive retention on backup 20190805-140815F, archiveId = 9.6-1, start = 000000020000000000000009, stop = 000000020000000000000009", + "P00 DETAIL: archive retention on backup 20190805-140830F, archiveId = 9.6-1, start = 00000002000000000000000B, stop = 00000002000000000000000B", + "P00 DETAIL: archive retention on backup 20190805-140830F_20190805-140854D, archiveId = 9.6-1, start = 00000002000000000000000F, stop = 00000002000000000000000F", + "P00 DETAIL: archive retention on backup 20190805-140830F_20190805-140900D, archiveId = 9.6-1, start = 000000020000000000000012", + "P00 DETAIL: remove archive: archiveId = 9.6-1, start = 00000002000000000000000A, stop = 00000002000000000000000A", + "P00 DETAIL: remove archive: archiveId = 9.6-1, start = 00000002000000000000000C, stop = 00000002000000000000000E", + "P00 DETAIL: remove archive: archiveId = 9.6-1, start = 000000020000000000000010, stop = 000000020000000000000011", "P00 INFO: expire command end: completed successfully" ] } @@ -13436,7 +13433,7 @@ "type" : "exe", "value" : { "output" : [ - "2019-06-25 01:45:33.362422+00" + "2019-08-05 14:09:42.006892+00" ] } }, @@ -13489,7 +13486,7 @@ "bash-wrap" : true, "cmd" : [ "sudo -u postgres pgbackrest --stanza=demo --delta \\", - " --type=time \"--target=2019-06-25 01:45:33.362422+00\" \\", + " --type=time \"--target=2019-08-05 14:09:42.006892+00\" \\", " --target-action=promote restore" ], "host" : "pg-primary", @@ -13534,7 +13531,7 @@ "value" : { "output" : [ "restore_command = 'pgbackrest --stanza=demo archive-get %f \"%p\"'", - "recovery_target_time = '2019-06-25 01:45:33.362422+00'", + "recovery_target_time = '2019-08-05 14:09:42.006892+00'", "recovery_target_action = 'promote'" ] } @@ -13614,16 +13611,16 @@ "type" : "exe", "value" : { "output" : [ - "LOG: database system was interrupted; last known up at 2019-06-25 01:45:26 UTC", - "LOG: starting point-in-time recovery to 2019-06-25 01:45:33.362422+00", + "LOG: database system was interrupted; last known up at 2019-08-05 14:09:34 UTC", + "LOG: starting point-in-time recovery to 2019-08-05 14:09:42.006892+00", "LOG: restored log file \"00000004.history\" from archive", - "LOG: restored log file \"000000040000000000000016\" from archive", - "LOG: redo starts at 0/16000028", - "LOG: consistent recovery state reached at 0/160000F8", "LOG: restored log file \"000000040000000000000017\" from archive", - "LOG: recovery stopping before commit of transaction 639, time 2019-06-25 01:45:33.807831+00", - "LOG: redo done at 0/17020CA8", - "LOG: last completed transaction was at log time 2019-06-25 01:45:32.937199+00", + "LOG: redo starts at 0/17000028", + "LOG: consistent recovery state reached at 0/170000F8", + "LOG: restored log file \"000000040000000000000018\" from archive", + "LOG: recovery stopping before commit of transaction 639, time 2019-08-05 14:09:42.458028+00", + "LOG: redo done at 0/18020D30", + "LOG: last completed transaction was at log time 2019-08-05 14:09:41.53546+00", "LOG: selected new timeline ID: 5", "LOG: archive recovery complete", " [filtered 3 lines of output]" @@ -13675,7 +13672,7 @@ "type" : "exe", "value" : { "output" : [ - "20190625-014423F_20190625-014525D" + "20190805-140830F_20190805-140933D" ] } }, @@ -13710,7 +13707,7 @@ "bash-wrap" : true, "cmd" : [ "sudo -u postgres pgbackrest --stanza=demo --delta \\", - " --type=time \"--target=2019-06-25 01:45:33.362422+00\" --target-action=promote restore" + " --type=time \"--target=2019-08-05 14:09:42.006892+00\" --target-action=promote restore" ], "host" : "pg-primary", "load-env" : true, @@ -13807,14 +13804,14 @@ "type" : "exe", "value" : { "output" : [ - "LOG: database system was interrupted; last known up at 2019-06-25 01:45:43 UTC", - "LOG: starting point-in-time recovery to 2019-06-25 01:45:33.362422+00", + "LOG: database system was interrupted; last known up at 2019-08-05 14:09:52 UTC", + "LOG: starting point-in-time recovery to 2019-08-05 14:09:42.006892+00", "LOG: restored log file \"00000005.history\" from archive", - "LOG: restored log file \"000000050000000000000018\" from archive", - "LOG: redo starts at 0/18000028", - "LOG: consistent recovery state reached at 0/180000F8", - "LOG: redo done at 0/180000F8", - "LOG: restored log file \"000000050000000000000018\" from archive", + "LOG: restored log file \"000000050000000000000019\" from archive", + "LOG: redo starts at 0/19000028", + "LOG: consistent recovery state reached at 0/190000F8", + "LOG: redo done at 0/190000F8", + "LOG: restored log file \"000000050000000000000019\" from archive", " [filtered 7 lines of output]" ] } @@ -13829,7 +13826,7 @@ "filter" : false, "filter-context" : 2, "list" : [ - "20190625-014423F_20190625-014525D" + "20190805-140830F_20190805-140933D" ] }, "host" : "pg-primary", @@ -13845,47 +13842,47 @@ " cipher: aes-256-cbc", "", " db (current)", - " wal archive min/max (9.6-1): 000000020000000000000008/000000050000000000000018", + " wal archive min/max (9.6-1): 000000020000000000000009/000000050000000000000019", "", - " full backup: 20190625-014409F", - " timestamp start/stop: 2019-06-25 01:44:09 / 2019-06-25 01:44:22", - " wal start/stop: 000000020000000000000008 / 000000020000000000000008", - " database size: 20.9MB, backup size: 20.9MB", + " full backup: 20190805-140815F", + " timestamp start/stop: 2019-08-05 14:08:15 / 2019-08-05 14:08:29", + " wal start/stop: 000000020000000000000009 / 000000020000000000000009", + " database size: 21.0MB, backup size: 21.0MB", " repository size: 2.5MB, repository backup size: 2.5MB", "", - " full backup: 20190625-014423F", - " timestamp start/stop: 2019-06-25 01:44:23 / 2019-06-25 01:44:35", - " wal start/stop: 00000002000000000000000A / 00000002000000000000000A", - " database size: 20.9MB, backup size: 20.9MB", + " full backup: 20190805-140830F", + " timestamp start/stop: 2019-08-05 14:08:30 / 2019-08-05 14:08:42", + " wal start/stop: 00000002000000000000000B / 00000002000000000000000B", + " database size: 21.0MB, backup size: 21.0MB", " repository size: 2.5MB, repository backup size: 2.5MB", "", - " diff backup: 20190625-014423F_20190625-014453D", - " timestamp start/stop: 2019-06-25 01:44:53 / 2019-06-25 01:44:56", - " wal start/stop: 000000020000000000000011 / 000000020000000000000011", - " database size: 20.9MB, backup size: 97.6KB", + " diff backup: 20190805-140830F_20190805-140900D", + " timestamp start/stop: 2019-08-05 14:09:00 / 2019-08-05 14:09:04", + " wal start/stop: 000000020000000000000012 / 000000020000000000000012", + " database size: 21.0MB, backup size: 97.6KB", " repository size: 2.5MB, repository backup size: 11.8KB", - " backup reference list: 20190625-014423F", + " backup reference list: 20190805-140830F", "", - " incr backup: 20190625-014423F_20190625-014504I", - " timestamp start/stop: 2019-06-25 01:45:04 / 2019-06-25 01:45:14", - " wal start/stop: 000000030000000000000013 / 000000030000000000000013", + " incr backup: 20190805-140830F_20190805-140913I", + " timestamp start/stop: 2019-08-05 14:09:13 / 2019-08-05 14:09:22", + " wal start/stop: 000000030000000000000014 / 000000030000000000000014", " database size: 34.6MB, backup size: 13.7MB", " repository size: 4MB, repository backup size: 1.6MB", - " backup reference list: 20190625-014423F, 20190625-014423F_20190625-014453D", + " backup reference list: 20190805-140830F, 20190805-140830F_20190805-140900D", "", - " diff backup: 20190625-014423F_20190625-014525D", - " timestamp start/stop: 2019-06-25 01:45:25 / 2019-06-25 01:45:32", - " wal start/stop: 000000040000000000000016 / 000000040000000000000016", + " diff backup: 20190805-140830F_20190805-140933D", + " timestamp start/stop: 2019-08-05 14:09:33 / 2019-08-05 14:09:40", + " wal start/stop: 000000040000000000000017 / 000000040000000000000017", " database size: 27.8MB, backup size: 7.0MB", - " repository size: 3.3MB, repository backup size: 846.8KB", - " backup reference list: 20190625-014423F", + " repository size: 3.3MB, repository backup size: 847.4KB", + " backup reference list: 20190805-140830F", "", - " incr backup: 20190625-014423F_20190625-014542I", - " timestamp start/stop: 2019-06-25 01:45:42 / 2019-06-25 01:45:47", - " wal start/stop: 000000050000000000000018 / 000000050000000000000018", - " database size: 27.7MB, backup size: 2MB", - " repository size: 3.3MB, repository backup size: 211KB", - " backup reference list: 20190625-014423F, 20190625-014423F_20190625-014525D" + " incr backup: 20190805-140830F_20190805-140951I", + " timestamp start/stop: 2019-08-05 14:09:51 / 2019-08-05 14:09:56", + " wal start/stop: 000000050000000000000019 / 000000050000000000000019", + " database size: 27.8MB, backup size: 2MB", + " repository size: 3.3MB, repository backup size: 211.5KB", + " backup reference list: 20190805-140830F, 20190805-140830F_20190805-140933D" ] } }, @@ -13907,8 +13904,8 @@ "bash-wrap" : true, "cmd" : [ "sudo -u postgres pgbackrest --stanza=demo --delta \\", - " --type=time \"--target=2019-06-25 01:45:33.362422+00\" \\", - " --set=20190625-014423F_20190625-014525D --target-action=promote restore" + " --type=time \"--target=2019-08-05 14:09:42.006892+00\" \\", + " --set=20190805-140830F_20190805-140933D --target-action=promote restore" ], "host" : "pg-primary", "load-env" : true, @@ -14005,16 +14002,16 @@ "type" : "exe", "value" : { "output" : [ - "LOG: database system was interrupted; last known up at 2019-06-25 01:45:26 UTC", - "LOG: starting point-in-time recovery to 2019-06-25 01:45:33.362422+00", + "LOG: database system was interrupted; last known up at 2019-08-05 14:09:34 UTC", + "LOG: starting point-in-time recovery to 2019-08-05 14:09:42.006892+00", "LOG: restored log file \"00000004.history\" from archive", - "LOG: restored log file \"000000040000000000000016\" from archive", - "LOG: redo starts at 0/16000028", - "LOG: consistent recovery state reached at 0/160000F8", "LOG: restored log file \"000000040000000000000017\" from archive", - "LOG: recovery stopping before commit of transaction 639, time 2019-06-25 01:45:33.807831+00", - "LOG: redo done at 0/17020CA8", - "LOG: last completed transaction was at log time 2019-06-25 01:45:32.937199+00", + "LOG: redo starts at 0/17000028", + "LOG: consistent recovery state reached at 0/170000F8", + "LOG: restored log file \"000000040000000000000018\" from archive", + "LOG: recovery stopping before commit of transaction 639, time 2019-08-05 14:09:42.458028+00", + "LOG: redo done at 0/18020D30", + "LOG: last completed transaction was at log time 2019-08-05 14:09:41.53546+00", "LOG: restored log file \"00000005.history\" from archive", "LOG: restored log file \"00000006.history\" from archive", " [filtered 5 lines of output]" @@ -14025,20 +14022,6 @@ "key" : { "bash-wrap" : true, "cmd" : [ - "sudo yum install perl-XML-LibXML perl-IO-Socket-SSL" - ], - "cmd-extra" : "-y 2>&1", - "host" : "pg-primary", - "load-env" : true, - "output" : false, - "run-as-user" : null - }, - "type" : "exe" - }, - { - "key" : { - "bash-wrap" : true, - "cmd" : [ "echo \"172.17.0.2 demo-bucket.s3.us-east-1.amazonaws.com s3.us-east-1.amazonaws.com\" | tee -a /etc/hosts" ], "host" : "pg-primary", @@ -14089,9 +14072,6 @@ "repo1-s3-bucket" : { "value" : "demo-bucket" }, - "repo1-s3-ca-file" : { - "value" : "/etc/pki/tls/certs/ca-bundle.crt" - }, "repo1-s3-endpoint" : { "value" : "s3.us-east-1.amazonaws.com" }, @@ -14124,7 +14104,6 @@ "repo1-retention-diff=2", "repo1-retention-full=2", "repo1-s3-bucket=demo-bucket", - "repo1-s3-ca-file=/etc/pki/tls/certs/ca-bundle.crt", "repo1-s3-endpoint=s3.us-east-1.amazonaws.com", "repo1-s3-key=accessKey1", "repo1-s3-key-secret=verySecretKey1", @@ -14158,7 +14137,8 @@ "type" : "exe", "value" : { "output" : [ - "P00 INFO: stanza-create command begin 2.15: --log-level-console=info --log-level-stderr=off --no-log-timestamp --pg1-path=/var/lib/pgsql/9.6/data --repo1-cipher-pass= --repo1-cipher-type=aes-256-cbc --repo1-path=/demo-repo --repo1-s3-bucket=demo-bucket --repo1-s3-ca-file=/etc/pki/tls/certs/ca-bundle.crt --repo1-s3-endpoint=s3.us-east-1.amazonaws.com --repo1-s3-key= --repo1-s3-key-secret= --repo1-s3-region=us-east-1 --repo1-type=s3 --stanza=demo", + "P00 INFO: stanza-create command begin 2.16: --log-level-console=info --log-level-stderr=off --no-log-timestamp --pg1-path=/var/lib/pgsql/9.6/data --repo1-cipher-pass= --repo1-cipher-type=aes-256-cbc --repo1-path=/demo-repo --repo1-s3-bucket=demo-bucket --repo1-s3-endpoint=s3.us-east-1.amazonaws.com --repo1-s3-key= --repo1-s3-key-secret= --repo1-s3-region=us-east-1 --repo1-type=s3 --stanza=demo", + "P00 INFO: http statistics: objects 1, sessions 1, requests 16, retries 0, closes 0", "P00 INFO: stanza-create command end: completed successfully" ] } @@ -14185,17 +14165,17 @@ "type" : "exe", "value" : { "output" : [ - "P00 INFO: backup command begin 2.15: --log-level-console=info --log-level-stderr=off --no-log-timestamp --pg1-path=/var/lib/pgsql/9.6/data --process-max=4 --repo1-cipher-pass= --repo1-cipher-type=aes-256-cbc --repo1-path=/demo-repo --repo1-retention-diff=2 --repo1-retention-full=2 --repo1-s3-bucket=demo-bucket --repo1-s3-ca-file=/etc/pki/tls/certs/ca-bundle.crt --repo1-s3-endpoint=s3.us-east-1.amazonaws.com --repo1-s3-key= --repo1-s3-key-secret= --repo1-s3-region=us-east-1 --repo1-type=s3 --stanza=demo --start-fast", + "P00 INFO: backup command begin 2.16: --log-level-console=info --log-level-stderr=off --no-log-timestamp --pg1-path=/var/lib/pgsql/9.6/data --process-max=4 --repo1-cipher-pass= --repo1-cipher-type=aes-256-cbc --repo1-path=/demo-repo --repo1-retention-diff=2 --repo1-retention-full=2 --repo1-s3-bucket=demo-bucket --repo1-s3-endpoint=s3.us-east-1.amazonaws.com --repo1-s3-key= --repo1-s3-key-secret= --repo1-s3-region=us-east-1 --repo1-type=s3 --stanza=demo --start-fast", "P00 WARN: no prior backup exists, incr backup has been changed to full", - "P00 INFO: execute non-exclusive pg_start_backup() with label \"pgBackRest backup started at 2019-06-25 01:46:14\": backup begins after the requested immediate checkpoint completes", - "P00 INFO: backup start archive = 000000070000000000000018, lsn = 0/18000028", + "P00 INFO: execute non-exclusive pg_start_backup() with label \"pgBackRest backup started at 2019-08-05 14:10:15\": backup begins after the requested immediate checkpoint completes", + "P00 INFO: backup start archive = 000000070000000000000019, lsn = 0/19000028", " [filtered 1136 lines of output]", - "P04 INFO: backup file /var/lib/pgsql/9.6/data/base/1/12312 (0B, 100%)", - "P01 INFO: backup file /var/lib/pgsql/9.6/data/base/1/12307 (0B, 100%)", + "P01 INFO: backup file /var/lib/pgsql/9.6/data/base/1/12322 (0B, 100%)", + "P02 INFO: backup file /var/lib/pgsql/9.6/data/base/1/12307 (0B, 100%)", "P00 INFO: full backup size = 27.7MB", "P00 INFO: execute non-exclusive pg_stop_backup() and wait for all WAL segments to archive", - "P00 INFO: backup stop archive = 000000070000000000000018, lsn = 0/18000130", - " [filtered 5 lines of output]" + "P00 INFO: backup stop archive = 000000070000000000000019, lsn = 0/19000130", + " [filtered 6 lines of output]" ] } }, @@ -14233,7 +14213,7 @@ "type" : "exe", "value" : { "output" : [ - "P00 INFO: stop command begin 2.15: --log-level-console=info --log-level-stderr=off --no-log-timestamp --repo1-cipher-pass= --repo1-cipher-type=aes-256-cbc --repo1-path=/demo-repo --repo1-s3-bucket=demo-bucket --repo1-s3-ca-file=/etc/pki/tls/certs/ca-bundle.crt --repo1-s3-endpoint=s3.us-east-1.amazonaws.com --repo1-s3-key= --repo1-s3-key-secret= --repo1-s3-region=us-east-1 --repo1-type=s3 --stanza=demo", + "P00 INFO: stop command begin 2.16: --log-level-console=info --log-level-stderr=off --no-log-timestamp --repo1-cipher-pass= --repo1-cipher-type=aes-256-cbc --repo1-path=/demo-repo --repo1-s3-bucket=demo-bucket --repo1-s3-endpoint=s3.us-east-1.amazonaws.com --repo1-s3-key= --repo1-s3-key-secret= --repo1-s3-region=us-east-1 --repo1-type=s3 --stanza=demo", "P00 INFO: stop command end: completed successfully" ] } @@ -14259,7 +14239,8 @@ "type" : "exe", "value" : { "output" : [ - "P00 INFO: stanza-delete command begin 2.15: --log-level-console=info --log-level-stderr=off --no-log-timestamp --pg1-path=/var/lib/pgsql/9.6/data --repo1-cipher-pass= --repo1-cipher-type=aes-256-cbc --repo1-path=/demo-repo --repo1-s3-bucket=demo-bucket --repo1-s3-ca-file=/etc/pki/tls/certs/ca-bundle.crt --repo1-s3-endpoint=s3.us-east-1.amazonaws.com --repo1-s3-key= --repo1-s3-key-secret= --repo1-s3-region=us-east-1 --repo1-type=s3 --stanza=demo", + "P00 INFO: stanza-delete command begin 2.16: --log-level-console=info --log-level-stderr=off --no-log-timestamp --pg1-path=/var/lib/pgsql/9.6/data --repo1-cipher-pass= --repo1-cipher-type=aes-256-cbc --repo1-path=/demo-repo --repo1-s3-bucket=demo-bucket --repo1-s3-endpoint=s3.us-east-1.amazonaws.com --repo1-s3-key= --repo1-s3-key-secret= --repo1-s3-region=us-east-1 --repo1-type=s3 --stanza=demo", + "P00 INFO: http statistics: objects 1, sessions 1, requests 13, retries 0, closes 0", "P00 INFO: stanza-delete command end: completed successfully" ] } @@ -14321,7 +14302,7 @@ "key" : { "bash-wrap" : true, "cmd" : [ - "sudo scp build:/root/pgbackrest-release-2.15/src/pgbackrest /usr/bin" + "sudo scp build:/root/pgbackrest-release-2.16/src/pgbackrest /usr/bin" ], "cmd-extra" : "2>&1", "host" : "repository", @@ -14348,7 +14329,7 @@ "key" : { "bash-wrap" : true, "cmd" : [ - "sudo yum install perl perl-Time-HiRes perl-Digest-SHA perl-DBD-Pg perl-JSON-PP" + "sudo yum install perl perl-Time-HiRes perl-Digest-SHA perl-JSON-PP" ], "cmd-extra" : "-y 2>&1", "host" : "repository", @@ -14905,19 +14886,19 @@ " cipher: none", "", " db (current)", - " wal archive min/max (9.6-1): 00000008000000000000001E/00000008000000000000001F", + " wal archive min/max (9.6-1): 00000008000000000000001F/000000080000000000000020", "", - " full backup: 20190625-014744F", - " timestamp start/stop: 2019-06-25 01:47:44 / 2019-06-25 01:47:59", - " wal start/stop: 00000008000000000000001E / 00000008000000000000001E", + " full backup: 20190805-141129F", + " timestamp start/stop: 2019-08-05 14:11:29 / 2019-08-05 14:11:46", + " wal start/stop: 00000008000000000000001F / 00000008000000000000001F", " database size: 27.8MB, backup size: 27.8MB", - " repository size: 3.2MB, repository backup size: 3.2MB", + " repository size: 3.3MB, repository backup size: 3.3MB", "", - " full backup: 20190625-014801F", - " timestamp start/stop: 2019-06-25 01:48:01 / 2019-06-25 01:48:10", - " wal start/stop: 00000008000000000000001F / 00000008000000000000001F", + " full backup: 20190805-141149F", + " timestamp start/stop: 2019-08-05 14:11:49 / 2019-08-05 14:11:57", + " wal start/stop: 000000080000000000000020 / 000000080000000000000020", " database size: 27.8MB, backup size: 27.8MB", - " repository size: 3.2MB, repository backup size: 3.2MB" + " repository size: 3.3MB, repository backup size: 3.3MB" ] } }, @@ -15061,7 +15042,7 @@ "key" : { "bash-wrap" : true, "cmd" : [ - "sudo scp build:/root/pgbackrest-release-2.15/src/pgbackrest /usr/bin" + "sudo scp build:/root/pgbackrest-release-2.16/src/pgbackrest /usr/bin" ], "cmd-extra" : "2>&1", "host" : "pg-standby", @@ -15088,7 +15069,7 @@ "key" : { "bash-wrap" : true, "cmd" : [ - "sudo yum install perl perl-Time-HiRes perl-Digest-SHA perl-DBD-Pg perl-JSON-PP" + "sudo yum install perl perl-Time-HiRes perl-Digest-SHA perl-JSON-PP" ], "cmd-extra" : "-y 2>&1", "host" : "pg-standby", @@ -15464,12 +15445,12 @@ "type" : "exe", "value" : { "output" : [ - "LOG: database system was interrupted; last known up at 2019-06-25 01:48:02 UTC", + "LOG: database system was interrupted; last known up at 2019-08-05 14:11:50 UTC", "LOG: entering standby mode", "LOG: restored log file \"00000008.history\" from archive", - "LOG: restored log file \"00000008000000000000001F\" from archive", - "LOG: redo starts at 0/1F000028", - "LOG: consistent recovery state reached at 0/1F000130", + "LOG: restored log file \"000000080000000000000020\" from archive", + "LOG: redo starts at 0/20000028", + "LOG: consistent recovery state reached at 0/20000130", "LOG: database system is ready to accept read only connections" ] } @@ -15551,7 +15532,7 @@ "output" : [ " pg_switch_xlog | now ", "----------------+-------------------------------", - " 0/20022268 | 2019-06-25 01:48:58.731098+00", + " 0/210222F0 | 2019-08-05 14:12:45.029361+00", "(1 row)" ] } @@ -15578,9 +15559,9 @@ "type" : "exe", "value" : { "output" : [ - " message | now ", - "----------------+-------------------------------", - " Important Data | 2019-06-25 01:49:00.677035+00", + " message | now ", + "----------------+------------------------------", + " Important Data | 2019-08-05 14:12:46.49054+00", "(1 row)" ] } @@ -15595,7 +15576,7 @@ "filter" : true, "filter-context" : 2, "list" : [ - "all other checks passed" + "because no primary was found" ] }, "host" : "pg-standby", @@ -15606,8 +15587,8 @@ "type" : "exe", "value" : { "output" : [ - "P00 INFO: check command begin 2.15: --log-level-console=info --log-level-file=detail --log-level-stderr=off --no-log-timestamp --pg1-path=/var/lib/pgsql/9.6/data --repo1-host=repository --stanza=demo", - "P00 INFO: switch xlog cannot be performed on the standby, all other checks passed successfully", + "P00 INFO: check command begin 2.16: --log-level-console=info --log-level-file=detail --log-level-stderr=off --no-log-timestamp --pg1-path=/var/lib/pgsql/9.6/data --repo1-host=repository --stanza=demo", + "P00 INFO: switch wal not performed because no primary was found", "P00 INFO: check command end: completed successfully" ] } @@ -15844,8 +15825,8 @@ "output" : [ " [filtered 6 lines of output]", "LOG: database system is ready to accept read only connections", - "LOG: restored log file \"000000080000000000000020\" from archive", - "LOG: started streaming WAL from primary at 0/21000000 on timeline 8" + "LOG: restored log file \"000000080000000000000021\" from archive", + "LOG: started streaming WAL from primary at 0/22000000 on timeline 8" ] } }, @@ -15877,7 +15858,7 @@ "output" : [ " message | now ", "----------------+-------------------------------", - " Important Data | 2019-06-25 01:49:15.642895+00", + " Important Data | 2019-08-05 14:13:02.899808+00", "(1 row)" ] } @@ -15906,7 +15887,7 @@ "output" : [ " message | now ", "----------------+-------------------------------", - " Important Data | 2019-06-25 01:49:16.383919+00", + " Important Data | 2019-08-05 14:13:03.690529+00", "(1 row)" ] } @@ -16138,8 +16119,8 @@ "type" : "exe", "value" : { "output" : [ - "P00 INFO: check command begin 2.15: --log-level-console=info --log-level-file=detail --log-level-stderr=off --no-log-timestamp --pg1-path=/var/lib/pgsql/9.6/data --repo1-host=repository --stanza=demo", - "P00 INFO: WAL segment 000000080000000000000026 successfully stored in the archive at '/var/lib/pgbackrest/archive/demo/9.6-1/0000000800000000/000000080000000000000026-693b8d4fb09c1d2fd269f7da1b523d976bc61442.gz'", + "P00 INFO: check command begin 2.16: --log-level-console=info --log-level-file=detail --log-level-stderr=off --no-log-timestamp --pg1-path=/var/lib/pgsql/9.6/data --repo1-host=repository --stanza=demo", + "P00 INFO: WAL segment 000000080000000000000027 successfully archived to 'archive/demo/9.6-1/0000000800000000/000000080000000000000027-c578600c9060850d600c71b06bb20636491fbfcc.gz'", "P00 INFO: check command end: completed successfully" ] } @@ -16166,24 +16147,24 @@ "value" : { "output" : [ "-------------------PROCESS START-------------------", - "P00 INFO: archive-push-async command begin 2.15: [/var/lib/pgsql/9.6/data/pg_xlog] --log-level-console=off --log-level-file=detail --log-level-stderr=off --no-log-timestamp --pg1-path=/var/lib/pgsql/9.6/data --process-max=2 --repo1-host=repository --spool-path=/var/spool/pgbackrest --stanza=demo", - "P00 INFO: push 1 WAL file(s) to archive: 000000080000000000000021", - "P01 DETAIL: pushed WAL file '000000080000000000000021' to the archive", + "P00 INFO: archive-push-async command begin 2.16: [/var/lib/pgsql/9.6/data/pg_xlog] --log-level-console=off --log-level-file=detail --log-level-stderr=off --no-log-timestamp --pg1-path=/var/lib/pgsql/9.6/data --process-max=2 --repo1-host=repository --spool-path=/var/spool/pgbackrest --stanza=demo", + "P00 INFO: push 1 WAL file(s) to archive: 000000080000000000000022", + "P01 DETAIL: pushed WAL file '000000080000000000000022' to the archive", "P00 INFO: archive-push-async command end: completed successfully", "", "-------------------PROCESS START-------------------", - "P00 INFO: archive-push-async command begin 2.15: [/var/lib/pgsql/9.6/data/pg_xlog] --log-level-console=off --log-level-file=detail --log-level-stderr=off --no-log-timestamp --pg1-path=/var/lib/pgsql/9.6/data --process-max=2 --repo1-host=repository --spool-path=/var/spool/pgbackrest --stanza=demo", - "P00 INFO: push 4 WAL file(s) to archive: 000000080000000000000022...000000080000000000000025", - "P01 DETAIL: pushed WAL file '000000080000000000000022' to the archive", - "P02 DETAIL: pushed WAL file '000000080000000000000023' to the archive", - "P01 DETAIL: pushed WAL file '000000080000000000000024' to the archive", + "P00 INFO: archive-push-async command begin 2.16: [/var/lib/pgsql/9.6/data/pg_xlog] --log-level-console=off --log-level-file=detail --log-level-stderr=off --no-log-timestamp --pg1-path=/var/lib/pgsql/9.6/data --process-max=2 --repo1-host=repository --spool-path=/var/spool/pgbackrest --stanza=demo", + "P00 INFO: push 4 WAL file(s) to archive: 000000080000000000000023...000000080000000000000026", + "P02 DETAIL: pushed WAL file '000000080000000000000024' to the archive", + "P01 DETAIL: pushed WAL file '000000080000000000000023' to the archive", "P02 DETAIL: pushed WAL file '000000080000000000000025' to the archive", + "P01 DETAIL: pushed WAL file '000000080000000000000026' to the archive", "P00 INFO: archive-push-async command end: completed successfully", "", "-------------------PROCESS START-------------------", - "P00 INFO: archive-push-async command begin 2.15: [/var/lib/pgsql/9.6/data/pg_xlog] --log-level-console=off --log-level-file=detail --log-level-stderr=off --no-log-timestamp --pg1-path=/var/lib/pgsql/9.6/data --process-max=2 --repo1-host=repository --spool-path=/var/spool/pgbackrest --stanza=demo", - "P00 INFO: push 1 WAL file(s) to archive: 000000080000000000000026", - "P01 DETAIL: pushed WAL file '000000080000000000000026' to the archive", + "P00 INFO: archive-push-async command begin 2.16: [/var/lib/pgsql/9.6/data/pg_xlog] --log-level-console=off --log-level-file=detail --log-level-stderr=off --no-log-timestamp --pg1-path=/var/lib/pgsql/9.6/data --process-max=2 --repo1-host=repository --spool-path=/var/spool/pgbackrest --stanza=demo", + "P00 INFO: push 1 WAL file(s) to archive: 000000080000000000000027", + "P01 DETAIL: pushed WAL file '000000080000000000000027' to the archive", "P00 INFO: archive-push-async command end: completed successfully" ] } @@ -16223,23 +16204,23 @@ "value" : { "output" : [ "-------------------PROCESS START-------------------", - "P00 INFO: archive-get-async command begin 2.15: [00000008000000000000001F, 000000080000000000000020, 000000080000000000000021, 000000080000000000000022, 000000080000000000000023, 000000080000000000000024, 000000080000000000000025, 000000080000000000000026] --log-level-console=off --log-level-file=detail --log-level-stderr=off --no-log-timestamp --pg1-path=/var/lib/pgsql/9.6/data --process-max=2 --repo1-host=repository --spool-path=/var/spool/pgbackrest --stanza=demo", - "P00 INFO: get 8 WAL file(s) from archive: 00000008000000000000001F...000000080000000000000026", - "P01 DETAIL: found 00000008000000000000001F in the archive", - "P02 DETAIL: found 000000080000000000000020 in the archive", - "P01 DETAIL: unable to find 000000080000000000000021 in the archive", - "P02 DETAIL: unable to find 000000080000000000000022 in the archive", - " [filtered 20 lines of output]", - "P00 INFO: archive-get-async command begin 2.15: [000000080000000000000021, 000000080000000000000022, 000000080000000000000023, 000000080000000000000024, 000000080000000000000025, 000000080000000000000026, 000000080000000000000027, 000000080000000000000028] --log-level-console=off --log-level-file=detail --log-level-stderr=off --no-log-timestamp --pg1-path=/var/lib/pgsql/9.6/data --process-max=2 --repo1-host=repository --spool-path=/var/spool/pgbackrest --stanza=demo", - "P00 INFO: get 8 WAL file(s) from archive: 000000080000000000000021...000000080000000000000028", - "P02 DETAIL: found 000000080000000000000022 in the archive", - "P01 DETAIL: found 000000080000000000000021 in the archive", - "P01 DETAIL: found 000000080000000000000024 in the archive", + "P00 INFO: archive-get-async command begin 2.16: [000000080000000000000020, 000000080000000000000021, 000000080000000000000022, 000000080000000000000023, 000000080000000000000024, 000000080000000000000025, 000000080000000000000026, 000000080000000000000027] --log-level-console=off --log-level-file=detail --log-level-stderr=off --no-log-timestamp --pg1-path=/var/lib/pgsql/9.6/data --process-max=2 --repo1-host=repository --spool-path=/var/spool/pgbackrest --stanza=demo", + "P00 INFO: get 8 WAL file(s) from archive: 000000080000000000000020...000000080000000000000027", + "P01 DETAIL: found 000000080000000000000020 in the archive", + "P02 DETAIL: found 000000080000000000000021 in the archive", + "P01 DETAIL: unable to find 000000080000000000000022 in the archive", + "P02 DETAIL: unable to find 000000080000000000000023 in the archive", + " [filtered 33 lines of output]", + "P00 INFO: archive-get-async command begin 2.16: [000000080000000000000022, 000000080000000000000023, 000000080000000000000024, 000000080000000000000025, 000000080000000000000026, 000000080000000000000027, 000000080000000000000028, 000000080000000000000029] --log-level-console=off --log-level-file=detail --log-level-stderr=off --no-log-timestamp --pg1-path=/var/lib/pgsql/9.6/data --process-max=2 --repo1-host=repository --spool-path=/var/spool/pgbackrest --stanza=demo", + "P00 INFO: get 8 WAL file(s) from archive: 000000080000000000000022...000000080000000000000029", "P02 DETAIL: found 000000080000000000000023 in the archive", + "P01 DETAIL: found 000000080000000000000022 in the archive", "P01 DETAIL: found 000000080000000000000025 in the archive", - "P02 DETAIL: found 000000080000000000000026 in the archive", - "P01 DETAIL: unable to find 000000080000000000000027 in the archive", - "P02 DETAIL: unable to find 000000080000000000000028 in the archive", + "P02 DETAIL: found 000000080000000000000024 in the archive", + "P01 DETAIL: found 000000080000000000000026 in the archive", + "P02 DETAIL: found 000000080000000000000027 in the archive", + "P01 DETAIL: unable to find 000000080000000000000028 in the archive", + "P02 DETAIL: unable to find 000000080000000000000029 in the archive", " [filtered 13 lines of output]" ] } @@ -16322,15 +16303,15 @@ "value" : { "output" : [ " [filtered 2 lines of output]", - "P00 INFO: execute non-exclusive pg_start_backup() with label \"pgBackRest backup started at 2019-06-25 01:49:35\": backup begins after the requested immediate checkpoint completes", - "P00 INFO: backup start archive = 000000080000000000000028, lsn = 0/28000028", - "P00 INFO: wait for replay on the standby to reach 0/28000028", - "P00 INFO: replay on the standby reached 0/280003C0, checkpoint 0/28000060", - "P01 INFO: backup file pg-primary:/var/lib/pgsql/9.6/data/global/pg_control (8KB, 0%) checksum 60d5a83dacc30005f648638b382a9cac1c98a83c", - "P01 INFO: backup file pg-primary:/var/lib/pgsql/9.6/data/pg_log/postgresql.log (5.5KB, 0%) checksum 1b8b1c4e74a42018e132de85a9d0f72c4917b2b0", + "P00 INFO: execute non-exclusive pg_start_backup() with label \"pgBackRest backup started at 2019-08-05 14:13:25\": backup begins after the requested immediate checkpoint completes", + "P00 INFO: backup start archive = 000000080000000000000029, lsn = 0/29000028", + "P00 INFO: wait for replay on the standby to reach 0/29000028", + "P00 INFO: replay on the standby reached 0/290000D0, checkpoint 0/29000060", + "P01 INFO: backup file pg-primary:/var/lib/pgsql/9.6/data/global/pg_control (8KB, 0%) checksum 3ee45700b5531a67b7dc6c12a57cc4ebc94d5be2", + "P01 INFO: backup file pg-primary:/var/lib/pgsql/9.6/data/pg_log/postgresql.log (5.6KB, 0%) checksum 085fa4c2f71708e4012814c373fd77614b58c760", "P01 INFO: backup file pg-primary:/var/lib/pgsql/9.6/data/pg_hba.conf (4.1KB, 0%) checksum 1c0bca2f7f44d354aaaefb2c21fd1290b7379690", - "P02 INFO: backup file pg-standby:/var/lib/pgsql/9.6/data/base/12470/2608 (440KB, 21%) checksum 596500d5404a89dd47a1c8fc7474b90bb3c32121", - "P03 INFO: backup file pg-standby:/var/lib/pgsql/9.6/data/base/12470/1249 (360KB, 37%) checksum faf11ef681c050046d598703e8776eca3812ea7a", + "P02 INFO: backup file pg-standby:/var/lib/pgsql/9.6/data/base/12470/2608 (440KB, 21%) checksum 86006fafd6442c9dbdb4aa3eb13fceb08ea26747", + "P03 INFO: backup file pg-standby:/var/lib/pgsql/9.6/data/base/12470/1249 (360KB, 37%) checksum d4e1d662fafc3d76047af4c4af55d5ca0a2303c6", " [filtered 38 lines of output]" ] } @@ -16594,7 +16575,7 @@ "type" : "exe", "value" : { "output" : [ - "P00 INFO: stanza-upgrade command begin 2.15: --no-backup-standby --log-level-console=info --log-level-stderr=off --no-log-timestamp --no-online --pg1-host=pg-primary --pg2-host=pg-standby --pg1-path=/var/lib/pgsql/10/data --pg2-path=/var/lib/pgsql/10/data --repo1-path=/var/lib/pgbackrest --stanza=demo", + "P00 INFO: stanza-upgrade command begin 2.16: --no-backup-standby --log-level-console=info --log-level-stderr=off --no-log-timestamp --no-online --pg1-host=pg-primary --pg2-host=pg-standby --pg1-path=/var/lib/pgsql/10/data --pg2-path=/var/lib/pgsql/10/data --repo1-path=/var/lib/pgbackrest --stanza=demo", "P00 INFO: stanza-upgrade command end: completed successfully" ] } @@ -16628,20 +16609,20 @@ "output" : [ "● postgresql-10.service - PostgreSQL 10 database server", " Loaded: loaded (/usr/lib/systemd/system/postgresql-10.service; disabled; vendor preset: disabled)", - " Active: active (running) since Tue 2019-06-25 01:49:57 UTC; 421ms ago", + " Active: active (running) since Mon 2019-08-05 14:13:46 UTC; 475ms ago", " Docs: https://www.postgresql.org/docs/10/static/", - " Process: 3986 ExecStartPre=/usr/pgsql-10/bin/postgresql-10-check-db-dir ${PGDATA} (code=exited, status=0/SUCCESS)", - " Main PID: 3991 (postmaster)", - " CGroup: /docker/22adc956e711d039bce132beac84beb0782d62eebef745e5325d50ee2fffb506/system.slice/postgresql-10.service", - " ├─3991 /usr/pgsql-10/bin/postmaster -D /var/lib/pgsql/10/data/", - " ├─3992 postgres: logger process ", - " ├─3994 postgres: checkpointer process ", - " ├─3995 postgres: writer process ", - " ├─3996 postgres: wal writer process ", - " ├─3997 postgres: autovacuum launcher process ", - " ├─3998 postgres: archiver process ", - " ├─3999 postgres: stats collector process ", - " └─4000 postgres: bgworker: logical replication launcher " + " Process: 3961 ExecStartPre=/usr/pgsql-10/bin/postgresql-10-check-db-dir ${PGDATA} (code=exited, status=0/SUCCESS)", + " Main PID: 3966 (postmaster)", + " CGroup: /docker/b5e36150644171a56bf49ea64c1a200dd4fd60bdf713d737daa004709d2438a5/system.slice/postgresql-10.service", + " ├─3966 /usr/pgsql-10/bin/postmaster -D /var/lib/pgsql/10/data/", + " ├─3967 postgres: logger process ", + " ├─3969 postgres: checkpointer process ", + " ├─3970 postgres: writer process ", + " ├─3971 postgres: wal writer process ", + " ├─3972 postgres: autovacuum launcher process ", + " ├─3973 postgres: archiver process ", + " ├─3974 postgres: stats collector process ", + " └─3975 postgres: bgworker: logical replication launcher " ] } }, @@ -16708,7 +16689,14 @@ "output" : true, "run-as-user" : null }, - "type" : "exe" + "type" : "exe", + "value" : { + "output" : [ + "P00 WARN: unable to check pg-2: [DbConnectError] raised from remote-0 protocol on 'pg-standby': unable to connect to 'dbname='postgres' port=5432': could not connect to server: No such file or directory", + " \tIs the server running locally and accepting", + " \tconnections on Unix domain socket \"/var/run/postgresql/.s.PGSQL.5432\"?" + ] + } }, { "key" : { diff -Nru pgbackrest-2.15.1/doc/resource/git-history.cache pgbackrest-2.16/doc/resource/git-history.cache --- pgbackrest-2.15.1/doc/resource/git-history.cache 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/doc/resource/git-history.cache 2019-08-05 16:03:04.000000000 +0000 @@ -1,5 +1,310 @@ [ { + "commit": "3d3003e9ca6dd449ea16c6812d1cabc392c9d294", + "date": "2019-08-01 20:35:01 -0400", + "subject": "The check command is implemented partly in C.", + "body": "Implement switch WAL and archive check in C but leave the rest in Perl for now.\n\nThe main idea was to have some real integration tests for the new database code so the rest of the migration can wait." + }, + { + "commit": "e4901d50d5e99168f7962b93f7a327e45f7b7fed", + "date": "2019-08-01 15:38:27 -0400", + "subject": "Add Db object to encapsulate PostgreSQL queries and commands.", + "body": "Migrate functionality from the Perl Db module to C. For now this is just enough to implement the WAL switch check.\n\nAdd the dbGet() helper function to get Db objects easily.\n\nCreate macros in harnessPq to make writing pq scripts easier by grouping commonly used functions together." + }, + { + "commit": "f9e1f3a79823ee1bd6656e4f7a8fb23e735b8ccf", + "date": "2019-08-01 14:28:30 -0400", + "subject": "Retry S3 RequestTimeTooSkewed errors instead of immediately terminating.", + "body": "The cause of this error seems to be that a failed request takes so long that a subsequent retry at the http level uses outdated headers.\n\nWe're not sure if pgBackRest it to blame here (in one case a kernel downgrade fixed it, in another case an incorrect network driver was the problem) so add retries to hopefully deal with the issue if it is not too persistent. If SSL_write() has long delays before reporting an error then this will obviously affect backup performance." + }, + { + "commit": "2eb3c9f95f6e57072b65da81cb07a1268ebbf38e", + "date": "2019-08-01 09:58:24 -0400", + "subject": "Improve error handling for SSL_write().", + "body": "Error codes were not being caught for SSL_write() so it was hard to see exactly what was happening in error cases. Report errors to aid in debugging.\n\nAlso add a retry for SSL_ERROR_WANT_READ. Even though we have not been able to reproduce this case it is required by SSL_write() so go ahead and implement it." + }, + { + "commit": "89c67287bcb8bc7884b5dd37703b0d7786c68b95", + "date": "2019-07-31 20:44:49 -0400", + "subject": "Improve multi-host handling in protocol helper.", + "body": "Multiple PostgreSQL hosts were supported via the host-id option but there are cases where it is useful to be able to directly specify the host id required, e.g. to iterate through pg* hosts when looking for candidate primaries and standbys during backup." + }, + { + "commit": "893ae24284e506c551fbc28bec8719881a856e2a", + "date": "2019-07-31 19:58:57 -0400", + "subject": "Add timeout to walSegmentFind().", + "body": "Keep trying to locate the WAL segment until timeout. This is useful for the check and backup commands which must wait for segments to arrive in the archive." + }, + { + "commit": "03b28da1cac8f39c7f57d2c15a83ad667a51b997", + "date": "2019-07-31 11:35:58 -0400", + "subject": "Rename control/control module to control/common.", + "body": "This is more consistent with how other common modules are named." + }, + { + "commit": "a04baa1bdc07f85041bfaf910f122b7ae278d6aa", + "date": "2019-07-30 11:42:56 -0400", + "subject": "Fix incorrect comment (obviously pasted from S3 code)." + }, + { + "commit": "3d892cfb75c3303b0b592d4481d415d25c3f0df9", + "date": "2019-07-30 11:39:01 -0400", + "subject": "Remove extra linefeed." + }, + { + "commit": "88c1929ec5a41915753e3d3078bdaae62d14bf0f", + "date": "2019-07-26 08:37:58 -0400", + "subject": "Don't pass local config* options to the remote.", + "body": "The remotes have their own config options (repo-host-config, etc.) so don't pass the local config* options.\n\nThis was a regression from the behavior of the Perl code and while there have been no field reports it caused breakage on test systems with multiple configurations." + }, + { + "commit": "f8b0676fd6edbe915bc23d2f082b4862004ca151", + "date": "2019-07-25 20:15:06 -0400", + "subject": "Allow modules to be included for testing without requiring coverage.", + "body": "Sometimes it is useful to get at the internals of a module that is not being tested for coverage in order to provide coverage for another module that is being tested. The include directive allows this.\n\nUpdate modules that had previously been added to coverage that only need to be included." + }, + { + "commit": "554d98746a96d5a93e3449d62c81c9b7a7c84e9d", + "date": "2019-07-25 17:36:51 -0400", + "subject": "Add repo-s3-port option for setting a non-standard S3 service port.", + "body": "If this option is set then ports appended to repo-s3-endpoint or repo-s3-host will be ignored.\n\nSetting this option explicitly may be the only way to use a bare ipv6 address with S3 (since multiple colons confuse the parser) but we plan to improve this in the future." + }, + { + "commit": "d8ca0e5c5bc2af6b85b20f48b40b83762d415b7a", + "date": "2019-07-25 17:05:39 -0400", + "subject": "Add Perl interface to C PgQuery object.", + "body": "This validates that all current queries work with the new interface and removes the dependency on DBD::Pg." + }, + { + "commit": "415542b4a3589ff7b25dbd97d1041a9b1ff87815", + "date": "2019-07-25 14:50:02 -0400", + "subject": "Add PostgreSQL query client.", + "body": "This direct interface to libpq allows simple queries to be run against PostgreSQL and supports timeouts.\n\nTesting is performed using a shim that can use scripted responses to test all aspects of the client code. The shim will be very useful for testing backup scenarios on complex topologies." + }, + { + "commit": "59f135340d4f76c522c8b24ecb23d56b57b2b0f8", + "date": "2019-07-25 14:34:16 -0400", + "subject": "The local command for backup is implemented entirely in C.", + "body": "The local process is now entirely migrated to C. Since all major I/O operations are performed in the local process, the vast majority of I/O is now performed in C." + }, + { + "commit": "54ec8f151e4164c3f363f417bece7c4b1533dfd6", + "date": "2019-07-24 19:45:35 -0400", + "subject": "Add int rendering to jsonFromVar()." + }, + { + "commit": "615735e7eed32c429a13559584a7268e4dbff3fc", + "date": "2019-07-24 06:52:49 -0400", + "subject": "Add new types to JSON render.", + "body": "Add bool, array, and int64 as valid array subtypes.\n\nPretty print for the array subtype is not correct but is currently not in use (this can be seen at line 328 in typeJsonTest.c)." + }, + { + "commit": "62f0c7fb37e160d4a8a2dbe410fe8a569bc4ba38", + "date": "2019-07-22 13:09:51 -0400", + "subject": "Add lock bot configuration.", + "body": "Lock closed issues after ninety days to prevent posting on old issues." + }, + { + "commit": "38ba458616f450232dc1090306e651561c9b3076", + "date": "2019-07-18 08:42:42 -0400", + "subject": "Add IoSink filter.", + "body": "Discard all data passed to the filter. Useful for calculating size/checksum on a remote system when no data needs to be returned.\n\nUpdate ioReadDrain() to automatically use the IoSink filter." + }, + { + "commit": "d1dd6add4853a77d2ebbf27c326bbb4670b78955", + "date": "2019-07-17 16:55:21 -0400", + "subject": "Remove never-used infoBackupCheckPg() function.", + "body": "Contributed by Cynthia Shang." + }, + { + "commit": "3bdba4933d45451a3fa6f5dafcb79fd976bffef6", + "date": "2019-07-17 16:49:42 -0400", + "subject": "Fix incorrect handling of transfer-encoding response to HEAD request.", + "body": "The HTTP server can use either content-length or transfer-encoding to indicate that there is content in the response. HEAD requests do not include content but return all the same headers as GET. In the HEAD case we were ignoring content-length but not transfer-encoding which led to unexpected eof errors on AWS S3. Our test server, minio, uses content-length so this was not caught in integration testing.\n\nIgnore all content for HEAD requests (no matter how it is reported) and add a unit test for transfer-encoding to prevent a regression." + }, + { + "commit": "6f981c53bb63d3a644a8573cb9edb162c443d03b", + "date": "2019-07-17 15:44:55 -0400", + "subject": "Remove obsolete ignoreMissing parameter.", + "body": "Contributed by Cynthia Shang." + }, + { + "commit": "7662d32e6020148d822fd5080f2a5ebda7ebe103", + "date": "2019-07-17 15:42:37 -0400", + "subject": "Fix comment typos and clarify HEAD response behavior." + }, + { + "commit": "eee67db4d611bb18950daf7bddb84217b010d767", + "date": "2019-07-17 14:09:50 -0400", + "subject": "Allow pg storage to be remote.", + "body": "None of the currently migrated commands needed remote pg storage but now backup, check, stanza-* will need it." + }, + { + "commit": "30f55a3c2a139ffd59547fd8e422c3fae3feecd3", + "date": "2019-07-15 17:36:24 -0400", + "subject": "Add compressed storage feature.", + "body": "This feature denotes storage that can compress files so that they take up less space than what was written. Currently this includes the Posix and CIFS drivers. The stored size of the file will be rechecked after write to determine if the reported size is different. This check would be wasted on object stores such as S3, and they might not report the file as existing immediately after write.\n\nAlso add tests to each storage driver to check features." + }, + { + "commit": "3e1062825dde7cab506225a3b0658552b44de7ce", + "date": "2019-07-15 16:49:46 -0400", + "subject": "Allow multiple filters to be pushed to the remote and return results.", + "body": "Previously only a single filter could be pushed to the remote since order was not being maintained. Now the filters are strictly ordered.\n\nResults are returned from the remote and set in the local IoFilterGroup so they can be retrieved.\n\nExpand remote filter support to include all filters." + }, + { + "commit": "d5654375a5152764f76335eb5265eb994d1df6bd", + "date": "2019-07-15 08:44:41 -0400", + "subject": "Add ioReadDrain().", + "body": "Read all data from an IoRead object and discard it. This is handy for calculating size, hash, etc. when the output is not needed.\n\nUpdate code where a loop was used before." + }, + { + "commit": "cdb75ac8b38810e2e7c6f5d0161443650f94bb31", + "date": "2019-07-15 07:13:36 -0400", + "subject": "Add constants for path and archive.info/backup.info combinations.", + "body": "Contributed by Cynthia Shang." + }, + { + "commit": "ede7df9fb1d937f228e982d987d306345007dfe0", + "date": "2019-07-14 15:53:31 -0400", + "subject": "Allow NULL in JSON list." + }, + { + "commit": "c836c483dc7198a685991753300bae49d3caa73c", + "date": "2019-07-14 15:42:55 -0400", + "subject": "Add lstClear() to List object." + }, + { + "commit": "e10577d0b0c8054a08513b069e2f2647ddd51fb7", + "date": "2019-07-11 09:13:56 -0400", + "subject": "Fix incorrect offline upper bound for ignoring page checksum errors.", + "body": "For offline backups the upper bound was being set to 0x0000FFFF0000FFFF rather than UINT64_MAX. This meant that page checksum errors might be ignored for databases with a lot of past WAL in offline mode.\n\nOnline mode is not affected since the upper bound is retrieved from pg_start_backup()." + }, + { + "commit": "2fd0ebb78aabe882d4b080d4014d7ecabbded3da", + "date": "2019-07-10 15:08:35 -0400", + "subject": "Fix links broken by non-standard version.", + "body": "Using version 2.15.1 fixed the duplicate tarball problem but broke the auto-generated links. Fix them manually since this should not be a common problem." + }, + { + "commit": "6a89c1526e1ecc2940ed2bf42258ceb9ef85f0e0", + "date": "2019-07-10 12:04:25 -0400", + "subject": "Revert a2dcdc07.", + "body": "It is simpler to implement the required logic in stanza-delete rather than add complexity to this function." + }, + { + "commit": "04646599a776348914b3e9bb9b707d2845540aac", + "date": "2019-07-10 06:17:33 -0400", + "subject": "Remove extraneous test macro." + }, + { + "commit": "4e7db608dc6488feb99e5f2707f06e3ca39d78e8", + "date": "2019-07-10 06:11:21 -0400", + "subject": "Clarify that return statements are not removed in production builds." + }, + { + "commit": "a22a6dc08c25a2af9ac9a6f2331144f2bae8dcbe", + "date": "2019-07-10 06:06:07 -0400", + "subject": "Update contributor name." + }, + { + "commit": "a2dcdc0711a8dfb7d562b2ee0a8586de9676669f", + "date": "2019-07-09 16:41:58 -0400", + "subject": "Update lockStopTest() to optionally return a result rather than error.", + "body": "Some commands (e.g. stanza-delete) would prefer to throw a customized error." + }, + { + "commit": "27b3246e852e98902f21589f5cdfece9ebb27b39", + "date": "2019-07-08 08:29:25 -0400", + "subject": "Exclude more build files from rsync between tests.", + "body": "Files (especially build.auto.h) were being removed and forcing a full build between separate invocations of test.pl.\n\nThis affected ad-hoc testing at the command-line, not a full test run in CI." + }, + { + "commit": "5e1ed2e8a52c2c4d2151ef17496f0847cd9ae939", + "date": "2019-07-05 18:34:15 -0400", + "subject": "Remove clang static analysis.", + "body": "This analysis never produced anything but false positives (var might be NULL) but took over a minute per test run and added 600MB to the test container." + }, + { + "commit": "488fb672948c07abfd7e38604eacbd0f82610b1a", + "date": "2019-07-05 17:25:01 -0400", + "subject": "Force PostgreSQL versions to string for newer versions of JSON:PP.", + "body": "Since 2.91 JSON::PP has a bias for saving variables that look like numbers as numbers even if they were declared as strings.\n\nForce versions to strings where needed by appending ''.\n\nUpdate the json-pp-perl package on Ubuntu 18.04 to 2.97 to provide test coverage." + }, + { + "commit": "9836578520e3fb5c038230ebfb123001a7eb32fa", + "date": "2019-07-05 16:55:17 -0400", + "subject": "Remove perl critic and coverage.", + "body": "No new Perl code is being developed, so these tools are just taking up time and making migrations to newer platforms harder. There are only a few Perl tests remaining with full coverage so the coverage tool does not warn of loss of coverage in most cases.\n\nRemove both tools and associated libraries." + }, + { + "commit": "fc2101352206a87f8471fd37455d9cd990fce95d", + "date": "2019-07-05 16:25:28 -0400", + "subject": "Fix scoping violations exposed by optimizations in gcc 9.", + "body": "gcc < 9 makes all compound literals function scope, even though the C spec requires them to be invalid outside the current scope. Since the compiler and valgrind were not enforcing this we had a few violations which caused problems in gcc >= 9.\n\nEven though we are not quite ready to support gcc 9 officially, fix the scoping violations that currently exist in the codebase." + }, + { + "commit": "1708f1d1514b3f2afb69664317df705a1fccfaf7", + "date": "2019-07-02 22:20:35 -0400", + "subject": "Use minio for integration testing.", + "body": "ScalityS3 has not received any maintenance in years and is slow to start which is bad for testing. Replace it with minio which starts quickly and ships as a single executable or a tiny container.\n\nMinio has stricter limits on allowable characters but should still provide enough coverage to show that our encoding is working correctly.\n\nThis commit also includes the upgrade to openssl 1.1.1 in the Ubuntu 18.04 container." + }, + { + "commit": "b9b21315ead6610bb41ee19794763555a7265261", + "date": "2019-07-02 22:09:12 -0400", + "subject": "Updates for openssl 1.1.1.", + "body": "Some HTTP error tests were failing after the upgrade to openssl 1.1.1, though the rest of the unit and integration tests worked fine. This seemed to be related to the very small messages used in the error testing, but it pointed to an issue with the code not being fully compliant, made worse by auto-retry being enabled by default.\n\nDisable auto-retry and implement better error handling to bring the code in line with openssl recommendations.\n\nThere's no evidence this is a problem in the field, but having all the tests pass seems like a good idea and the new code is certainly more robust.\n\nCoverage will be complete in the next commit when openssl 1.1.1 is introduced." + }, + { + "commit": "c55009d0f9ebfea746ab270be167dc2d78976ce9", + "date": "2019-06-27 14:39:11 -0400", + "subject": "Community yum package can be installed with --var=package=yum.", + "body": "Like apt, the community yum package can now be installed instead of a user-specified package." + }, + { + "commit": "b0728c33dbd7c99a7b785c152f20d5cdfa902bff", + "date": "2019-06-27 14:30:20 -0400", + "subject": "Remove Debian package patch merged to upstream." + }, + { + "commit": "020101b30bfe9f2d34b878fcd51fad932781d9f8", + "date": "2019-06-27 09:38:40 -0400", + "subject": "Update release notes to explicitly select release commit when tagging." + }, + { + "commit": "33e3d316d63a41cd64d217abe30786cfb48bf09e", + "date": "2019-06-26 19:52:04 -0400", + "subject": "Reverse loop in infoPgSave() to be consistent with infoPgNewLoad().", + "body": "Contributed by Cynthia Shang." + }, + { + "commit": "4bffa0c5bb7d551c60b8d8bcff9a3bbe6e02eddd", + "date": "2019-06-26 15:02:30 -0400", + "subject": "Add test function to create the S3 bucket instead of using aws cli.", + "body": "Eventually the idea is to remove the dependency on aws cli since Python is a big install." + }, + { + "commit": "4815752ccc46ff742f67b369bc75ad3efcf11204", + "date": "2019-06-26 08:24:58 -0400", + "subject": "Add Perl interface to C storage layer.", + "body": "Maintaining the storage layer/drivers in two languages is burdensome. Since the integration tests require the Perl storage layer/drivers we'll need them even after the core code is migrated to C. Create an interface layer so the Perl code can be removed and new storage drivers/features introduced without adding Perl equivalents.\n\nThe goal is to move the integration tests to C so this interface will eventually be removed. That being the case, the interface was designed for maximum compatibility to ease the transition. The result looks a bit hacky but we'll improve it as needed until it can be retired." + }, + { + "commit": "bd6c0941e9e3aa2dab84f7404da1f9dc60cd693b", + "date": "2019-06-25 17:27:19 -0400", + "subject": "Fix missing dash in site name.", + "body": "Without this the project name and tagline just run together." + }, + { + "commit": "466602387bc279aa44a811dc2948b893e4127b2c", + "date": "2019-06-25 08:42:20 -0400", + "subject": "Begin v2.16 development." + }, + { + "commit": "6650d8144cba46f93ad75c11ad98d87c57d2ea7f", + "date": "2019-06-25 08:29:06 -0400", + "subject": "v2.15: C Implementation of Expire" + }, + { "commit": "51fcaee43edf022aea0f94b76d254f2de8b6e1d1", "date": "2019-06-25 07:58:38 -0400", "subject": "Add host-repo-path variable internal replacement.", @@ -21,13 +326,13 @@ "commit": "c22e10e4a938b444ac7912efc3b751829401360f", "date": "2019-06-24 15:42:33 -0400", "subject": "Honor configure --prefix option.", - "body": "The --prefix option was entirely ignored and DESTDIR was a combination of DESTDIR and bindir.\n\nBring both in line with recommendations for autoconf and make as specified in https://www.gnu.org/software/make/manual/html_node/Directory-Variables.html and https://www.gnu.org/prep/standards/html_node/DESTDIR.html.\n\nSuggested by Daniel Westermann." + "body": "The --prefix option was entirely ignored and DESTDIR was a combination of DESTDIR and bindir.\n\nBring both in line with recommendations for autoconf and make as specified in https://www.gnu.org/software/make/manual/html_node/Directory-Variables.html and https://www.gnu.org/prep/standards/html_node/DESTDIR.html." }, { "commit": "b498188f01f8d2ccd4d0ce2cce3af2e5069d9ac3", "date": "2019-06-24 11:59:44 -0400", "subject": "Error on db history mismatch when expiring.", - "body": "Amend commit 434cd832 to error when the db history in archive.info and backup.info do not match.\n\nThe Perl code would attempt to reconcile the history by matching on system id and version but we are not planning to migrate that code to C. It's possible that there are users with mismatches but if so they should have been getting errors from info for the last six months. It's easy enough to manually fix these files if there are any mismatches in the field.\n\nContributed by Cynthia Shang." + "body": "Amend commit 434cd832 to error when the db history in archive.info and backup.info do not match.\n\nThe Perl code would attempt to reconcile the history by matching on system id and version but we are not planning to migrate that code to C. It's possible that there are users with mismatches but if so they should have been getting errors from info for the last six months. It's easy enough to manually fix these files if there are any mismatches in the field." }, { "commit": "039e515a319216035187c89efccf97143d4cac03", @@ -56,7 +361,7 @@ "commit": "434cd832855e4e189403962753fd8771e29880a4", "date": "2019-06-18 15:19:20 -0400", "subject": "The expire command is implemented entirely in C.", - "body": "This implementation duplicates the functionality of the Perl code but does so with different logic and includes full unit tests.\n\nAlong the way at least one bug was fixed, see issue #748.\n\nContributed by Cynthia Shang." + "body": "This implementation duplicates the functionality of the Perl code but does so with different logic and includes full unit tests.\n\nAlong the way at least one bug was fixed, see issue #748." }, { "commit": "f88bee7b3321a2c79f0317913ba1e83a56d19c7d", @@ -68,7 +373,7 @@ "commit": "0efdf2576f02b1768fb8805b27571e403ac3cb52", "date": "2019-06-18 07:35:34 -0400", "subject": "Remove hard-coded PostgreSQL user so $PGUSER works.", - "body": "The PostgreSQL user was hard-coded to the OS user which libpq will automatically use if $PGUSER is not set, so this code was redundant and prevented $PGUSER from working when set.\n\nSuggested by Julian Zhang, Janis Puris." + "body": "The PostgreSQL user was hard-coded to the OS user which libpq will automatically use if $PGUSER is not set, so this code was redundant and prevented $PGUSER from working when set." }, { "commit": "593446718a8997bfd674cf5e54230290cb5bcdea", @@ -98,13 +403,13 @@ "commit": "c64c9c05905d435cd650195ed9470355a4d2be2f", "date": "2019-06-17 06:59:06 -0400", "subject": "Add backup management functions to InfoBackup.", - "body": "Allow current backups to be listed and deleted.\n\nAlso expose some constants required by expire and stanza-* commands.\n\nContributed by Cynthia Shang." + "body": "Allow current backups to be listed and deleted.\n\nAlso expose some constants required by expire and stanza-* commands." }, { "commit": "44bafc127d9d4e38478aa44d0adc0f34695c6640", "date": "2019-06-17 06:47:15 -0400", "subject": "Rename info*New() functions to info*NewLoad().", - "body": "These names more accurately reflect what the functions do and follow the convention started in Info and InfoPg.\n\nAlso remove the ignoreMissing parameter since it was never used.\n\nContributed by Cynthia Shang." + "body": "These names more accurately reflect what the functions do and follow the convention started in Info and InfoPg.\n\nAlso remove the ignoreMissing parameter since it was never used." }, { "commit": "f05fbc54a8f00a56937439fb88eafe8e239781e8", @@ -192,7 +497,7 @@ "commit": "d7bd0c58cdd9a434aa5893db16e6bcf8425e26b9", "date": "2019-06-05 07:27:24 -0400", "subject": "Use wal_level=replica in the documentation for PostgreSQL >= 9.6.", - "body": "The documentation was using wal_level=hot_standby which is a deprecated setting.\n\nAlso remove the reference to wal_level=archive since it is no longer supported and is not recommended for older versions.\n\nSuggested by Patrick McLaughlin." + "body": "The documentation was using wal_level=hot_standby which is a deprecated setting.\n\nAlso remove the reference to wal_level=archive since it is no longer supported and is not recommended for older versions." }, { "commit": "aca11b2fa19337f934fc095e33f014d5e6df71f3", @@ -428,7 +733,7 @@ "commit": "a839830333490555800377ea095d756765f476e4", "date": "2019-05-20 16:19:14 -0400", "subject": "Add most unimplemented functions to the remote storage driver.", - "body": "Add pathCreate(), pathRemove(), pathSync(), and remove() to the driver.\n\nContributed by Cynthia Shang." + "body": "Add pathCreate(), pathRemove(), pathSync(), and remove() to the driver." }, { "commit": "bbf2e0d5b0876d3663cc69d10925c6e0d08cd729", diff -Nru pgbackrest-2.15.1/doc/xml/auto/metric-coverage-report.auto.xml pgbackrest-2.16/doc/xml/auto/metric-coverage-report.auto.xml --- pgbackrest-2.15.1/doc/xml/auto/metric-coverage-report.auto.xml 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/doc/xml/auto/metric-coverage-report.auto.xml 2019-08-05 16:03:04.000000000 +0000 @@ -8,29 +8,36 @@ command/archive 10/10 (100.0%) - 56/56 (100.0%) - 176/176 (100.0%) + 64/64 (100.0%) + 179/179 (100.0%) command/archive/get 6/6 (100.0%) 86/86 (100.0%) - 214/214 (100.0%) + 213/213 (100.0%) command/archive/push 9/9 (100.0%) - 86/86 (100.0%) - 265/265 (100.0%) + 84/84 (100.0%) + 259/259 (100.0%) command/backup - 5/5 (100.0%) - 44/44 (100.0%) - 117/117 (100.0%) + 9/9 (100.0%) + 108/108 (100.0%) + 259/259 (100.0%) + + + + command/check + 1/1 (100.0%) + 4/4 (100.0%) + 20/20 (100.0%) @@ -44,7 +51,7 @@ command/expire 10/10 (100.0%) 130/130 (100.0%) - 266/266 (100.0%) + 264/264 (100.0%) @@ -65,21 +72,21 @@ command/local 1/1 (100.0%) --- - 16/16 (100.0%) + 17/17 (100.0%) command/remote 1/1 (100.0%) 6/6 (100.0%) - 27/27 (100.0%) + 28/28 (100.0%) command/restore 2/2 (100.0%) - 48/48 (100.0%) - 85/85 (100.0%) + 46/46 (100.0%) + 80/80 (100.0%) @@ -105,9 +112,9 @@ common/crypto - 23/23 (100.0%) - 68/68 (100.0%) - 310/310 (100.0%) + 25/25 (100.0%) + 72/72 (100.0%) + 323/323 (100.0%) @@ -119,16 +126,16 @@ common/io - 43/43 (100.0%) - 104/104 (100.0%) - 471/471 (100.0%) + 44/44 (100.0%) + 106/106 (100.0%) + 485/485 (100.0%) common/io/filter - 36/36 (100.0%) - 84/84 (100.0%) - 367/367 (100.0%) + 41/41 (100.0%) + 88/88 (100.0%) + 419/419 (100.0%) @@ -140,30 +147,37 @@ common/io/tls - 14/14 (100.0%) - 76/76 (100.0%) - 239/239 (100.0%) + 17/17 (100.0%) + 88/88 (100.0%) + 292/292 (100.0%) common/type - 227/227 (100.0%) - 504/504 (100.0%) - 2708/2708 (100.0%) + 228/228 (100.0%) + 516/516 (100.0%) + 2730/2730 (100.0%) config 98/98 (100.0%) 538/538 (100.0%) - 1385/1385 (100.0%) + 1384/1384 (100.0%) + + + + db + 15/15 (100.0%) + 48/48 (100.0%) + 214/214 (100.0%) info - 35/35 (100.0%) - 106/106 (100.0%) - 511/511 (100.0%) + 34/34 (100.0%) + 98/98 (100.0%) + 496/496 (100.0%) @@ -175,23 +189,23 @@ postgres - 15/15 (100.0%) - 66/66 (100.0%) - 189/189 (100.0%) + 26/26 (100.0%) + 110/110 (100.0%) + 343/343 (100.0%) protocol - 59/59 (100.0%) - 136/136 (100.0%) - 712/712 (100.0%) + 60/60 (100.0%) + 164/164 (100.0%) + 738/738 (100.0%) storage - 61/61 (100.0%) - 154/154 (100.0%) - 693/693 (100.0%) + 63/63 (100.0%) + 162/162 (100.0%) + 715/715 (100.0%) @@ -211,20 +225,20 @@ storage/remote 23/23 (100.0%) - 78/78 (100.0%) - 478/478 (100.0%) + 90/90 (100.0%) + 493/493 (100.0%) storage/s3 28/28 (100.0%) - 110/110 (100.0%) - 582/582 (100.0%) + 120/120 (100.0%) + 595/595 (100.0%) TOTAL - 949/949 (100.0%) - 3609/3610 (99.97%) - 13085/13085 (100.0%) + 994/994 (100.0%) + 3857/3858 (99.97%) + 13820/13820 (100.0%) \ No newline at end of file diff -Nru pgbackrest-2.15.1/doc/xml/reference.xml pgbackrest-2.16/doc/xml/reference.xml --- pgbackrest-2.15.1/doc/xml/reference.xml 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/doc/xml/reference.xml 2019-08-05 16:03:04.000000000 +0000 @@ -437,6 +437,15 @@ 127.0.0.1 + + + S3 repository port. + + Port to use when connecting to the endpoint (or host if specified). + + 9000 + + S3 repository region. diff -Nru pgbackrest-2.15.1/doc/xml/release.xml pgbackrest-2.16/doc/xml/release.xml --- pgbackrest-2.15.1/doc/xml/release.xml 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/doc/xml/release.xml 2019-08-05 16:03:04.000000000 +0000 @@ -12,6 +12,91 @@ + + + + + + + + + + + +

Retry S3 RequestTimeTooSkewed errors instead of immediately terminating.

+
+ + + + + + +

Fix incorrect handling of transfer-encoding response to HEAD request.

+
+ + + + + + + +

Fix scoping violations exposed by optimizations in gcc 9.

+
+
+ + + +

Add repo-s3-port option for setting a non-standard S3 service port.

+
+
+ + + + + + + + +

The local command for backup is implemented entirely in C.

+
+ + + + + + +

The check command is implemented partly in C.

+
+
+ + + + + + + +

Add Perl interface to C storage layer.

+
+ + + + + + +

Add Db object to encapsulate queries and commands.

+
+ + + + + + +

Add PostgreSQL query client.

+
+
+
+
+ @@ -7074,6 +7159,11 @@ arogozin + + Ale&scaron; Zelen&yacute; + aleszeleny + + Andres Freund anarazel @@ -7133,6 +7223,11 @@ the1forte + + Christian Lange + chrlange + + Christoph Berg ChristophBerg @@ -7386,6 +7481,11 @@ golpa + + Ned T. Crigler + crigler + + Nick Floersch seinick @@ -7406,6 +7506,11 @@ hitech73 + + Pavel Suderevsky + psuderevsky + + Pritam Barhate pritammobisoft @@ -7441,6 +7546,11 @@ sfrazer + + sean0101n + sean0101n + + Lardi&egrave;re S&eacute;bastien slardiere @@ -7461,6 +7571,11 @@ sfrost + + Tim Garton + ralfthewise + + Todd Vernick gintoddic diff -Nru pgbackrest-2.15.1/doc/xml/user-guide.xml pgbackrest-2.16/doc/xml/user-guide.xml --- pgbackrest-2.15.1/doc/xml/user-guide.xml 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/doc/xml/user-guide.xml 2019-08-05 16:03:04.000000000 +0000 @@ -458,24 +458,24 @@ -

contains embedded Perl which requires some additional modules.

+

contains embedded Perl which requires some additional packages.

Install required Perl packages - apt-get install libdbd-pg-perl + apt-get install perl -y 2>&1 yum install perl perl-Time-HiRes perl-parent perl-JSON - perl-Digest-SHA perl-DBD-Pg + perl-Digest-SHA -y 2>&1 - yum install perl perl-Time-HiRes perl-Digest-SHA perl-DBD-Pg perl-JSON-PP + yum install perl perl-Time-HiRes perl-Digest-SHA perl-JSON-PP -y 2>&1 @@ -536,14 +536,19 @@ -y 2>&1 - + yum -y install {[pgbackrest-repo-path]}/{[package]} -y 2>&1 - + yum install pgbackrest + + + yum install pgbackrest + -y 2>&1 + @@ -584,20 +589,6 @@ - - Install packages required for S3-compatible object store support - - - apt-get install libio-socket-ssl-perl libxml-libxml-perl - -y 2>&1 - - - - yum install perl-XML-LibXML perl-IO-Socket-SSL - -y 2>&1 - - -

supports locating repositories in S3-compatible object stores. The bucket used to store the repository must be created in advance &mdash; will not do it automatically. The repository can be located in the bucket root (/) but it's usually best to place it in a subpath so object store logs or other data can also be stored in the bucket without conflicts.

@@ -627,7 +618,6 @@ {[s3-bucket]} {[s3-endpoint]} {[s3-region]} - /etc/pki/tls/certs/ca-bundle.crt 4 @@ -768,14 +758,16 @@
- apt-get install build-essential libssl-dev libxml2-dev libperl-dev zlib1g-dev + + apt-get install build-essential libssl-dev libxml2-dev libperl-dev zlib1g-dev + libpq-dev -y 2>&1 yum install build-essential gcc openssl-devel libxml2-devel - perl-ExtUtils-Embed + postgresql-devel perl-ExtUtils-Embed -y 2>&1 @@ -783,7 +775,7 @@ yum install build-essential gcc make openssl-devel libxml2-devel - perl-ExtUtils-Embed + postgresql-devel perl-ExtUtils-Embed -y 2>&1 @@ -1097,7 +1089,7 @@ {[project-exe]} {[dash]}-stanza={[postgres-cluster-demo]} {[dash]}-log-level-console=info check - successfully stored in the archive at + successfully archived to @@ -2764,7 +2756,7 @@ {[project-exe]} {[dash]}-stanza={[postgres-cluster-demo]} {[dash]}-log-level-console=info check - all other checks passed + because no primary was found diff -Nru pgbackrest-2.15.1/.github/lock.yml pgbackrest-2.16/.github/lock.yml --- pgbackrest-2.15.1/.github/lock.yml 1970-01-01 00:00:00.000000000 +0000 +++ pgbackrest-2.16/.github/lock.yml 2019-08-05 16:03:04.000000000 +0000 @@ -0,0 +1,36 @@ +# Configuration for Lock Threads - https://github.com/dessant/lock-threads + +# Number of days of inactivity before a closed issue or pull request is locked +daysUntilLock: 90 + +# Skip issues and pull requests created before a given timestamp. Timestamp must +# follow ISO 8601 (`YYYY-MM-DD`). Set to `false` to disable +skipCreatedBefore: false + +# Issues and pull requests with these labels will be ignored. Set to `[]` to disable +exemptLabels: [] + +# Label to add before locking, such as `outdated`. Set to `false` to disable +lockLabel: false + +# Comment to post before locking. Set to `false` to disable +lockComment: > + This thread has been automatically locked. Please open a new issue for related bugs. + +# Assign `resolved` as the reason for locking. Set to `false` to disable +setLockReason: true + +# Limit to only `issues` or `pulls` +# only: issues + +# Optionally, specify configuration settings just for `issues` or `pulls` +# issues: +# exemptLabels: +# - help-wanted +# lockLabel: outdated + +# pulls: +# daysUntilLock: 30 + +# Repository to extend settings from +# _extends: repo diff -Nru pgbackrest-2.15.1/lib/pgBackRest/Archive/Get/File.pm pgbackrest-2.16/lib/pgBackRest/Archive/Get/File.pm --- pgbackrest-2.15.1/lib/pgBackRest/Archive/Get/File.pm 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/lib/pgBackRest/Archive/Get/File.pm 2019-08-05 16:03:04.000000000 +0000 @@ -21,9 +21,6 @@ use pgBackRest::Config::Config; use pgBackRest::Protocol::Helper; use pgBackRest::Protocol::Storage::Helper; -use pgBackRest::Storage::Base; -use pgBackRest::Storage::Filter::Gzip; -use pgBackRest::Storage::Filter::Sha; use pgBackRest::Storage::Helper; #################################################################################################################################### diff -Nru pgbackrest-2.15.1/lib/pgBackRest/Archive/Info.pm pgbackrest-2.16/lib/pgBackRest/Archive/Info.pm --- pgbackrest-2.15.1/lib/pgBackRest/Archive/Info.pm 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/lib/pgBackRest/Archive/Info.pm 2019-08-05 16:03:04.000000000 +0000 @@ -27,7 +27,6 @@ use pgBackRest::Manifest; use pgBackRest::Protocol::Storage::Helper; use pgBackRest::Storage::Base; -use pgBackRest::Storage::Filter::Gzip; use pgBackRest::Storage::Helper; #################################################################################################################################### @@ -409,9 +408,6 @@ # Get the db-system-id from the WAL file depending on the version of postgres my $iSysIdOffset = $strDbVersion >= PG_VERSION_93 ? PG_WAL_SYSTEM_ID_OFFSET_GTE_93 : PG_WAL_SYSTEM_ID_OFFSET_LT_93; - # Read first 8k of WAL segment - my $tBlock; - # Error if the file encryption setting is not valid for the repo if (!storageRepo()->encryptionValid(storageRepo()->encrypted($strArchiveFilePath))) { @@ -423,10 +419,12 @@ my $oFileIo = storageRepo()->openRead( $strArchiveFilePath, {rhyFilter => $strArchiveFile =~ ('\.' . COMPRESS_EXT . '$') ? - [{strClass => STORAGE_FILTER_GZIP, rxyParam => [{strCompressType => STORAGE_DECOMPRESS}]}] : undef, + [{strClass => STORAGE_FILTER_GZIP, rxyParam => [STORAGE_DECOMPRESS, false]}] : undef, strCipherPass => $self->cipherPassSub()}); + $oFileIo->open(); - $oFileIo->read(\$tBlock, 512, true); + my $tBlock; + $oFileIo->read(\$tBlock, 512); $oFileIo->close(); # Get the required data from the file that was pulled into scalar $tBlock @@ -579,12 +577,14 @@ # Fill db section $self->numericSet(INFO_ARCHIVE_SECTION_DB, INFO_ARCHIVE_KEY_DB_SYSTEM_ID, undef, $ullDbSysId); - $self->set(INFO_ARCHIVE_SECTION_DB, INFO_ARCHIVE_KEY_DB_VERSION, undef, $strDbVersion); + # Force the version to a string since newer versions of JSON::PP lose track of the fact that it is one + $self->set(INFO_ARCHIVE_SECTION_DB, INFO_ARCHIVE_KEY_DB_VERSION, undef, $strDbVersion . ''); $self->numericSet(INFO_ARCHIVE_SECTION_DB, INFO_ARCHIVE_KEY_DB_ID, undef, $iDbHistoryId); # Fill db history $self->numericSet(INFO_ARCHIVE_SECTION_DB_HISTORY, $iDbHistoryId, INFO_ARCHIVE_KEY_DB_ID, $ullDbSysId); - $self->set(INFO_ARCHIVE_SECTION_DB_HISTORY, $iDbHistoryId, INFO_ARCHIVE_KEY_DB_VERSION, $strDbVersion); + # Force the version to a string since newer versions of JSON::PP lose track of the fact that it is one + $self->set(INFO_ARCHIVE_SECTION_DB_HISTORY, $iDbHistoryId, INFO_ARCHIVE_KEY_DB_VERSION, $strDbVersion . ''); # Return from function and log return values if any return logDebugReturn($strOperation); diff -Nru pgbackrest-2.15.1/lib/pgBackRest/Backup/Backup.pm pgbackrest-2.16/lib/pgBackRest/Backup/Backup.pm --- pgbackrest-2.15.1/lib/pgBackRest/Backup/Backup.pm 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/lib/pgBackRest/Backup/Backup.pm 2019-08-05 16:03:04.000000000 +0000 @@ -30,8 +30,6 @@ use pgBackRest::Protocol::Storage::Helper; use pgBackRest::Common::Io::Handle; use pgBackRest::Storage::Base; -use pgBackRest::Storage::Filter::Gzip; -use pgBackRest::Storage::Filter::Sha; use pgBackRest::Storage::Helper; use pgBackRest::Version; @@ -229,7 +227,7 @@ if ($cType eq 'd') { logDebugMisc($strOperation, "remove path ${strName}"); - $oStorageRepo->remove(STORAGE_REPO_BACKUP . "/${strBackupLabel}/${strName}", {bRecurse => true}); + $oStorageRepo->pathRemove(STORAGE_REPO_BACKUP . "/${strBackupLabel}/${strName}", {bRecurse => true}); } # Else add the file/link to be deleted later else @@ -323,7 +321,7 @@ storageRepo()->pathCreate(STORAGE_REPO_BACKUP . "/${strBackupLabel}/${strPath}", {bIgnoreExists => true}); } - if (storageRepo()->driver()->capability(STORAGE_CAPABILITY_LINK)) + if (storageRepo()->capability(STORAGE_CAPABILITY_LINK)) { for my $strTarget ($oBackupManifest->keys(MANIFEST_SECTION_BACKUP_TARGET)) { @@ -338,13 +336,6 @@ } } - # Build the lsn start parameter to pass to the extra function - my $hStartLsnParam = - { - iWalId => defined($strLsnStart) ? hex((split('/', $strLsnStart))[0]) : 0xFFFF, - iWalOffset => defined($strLsnStart) ? hex((split('/', $strLsnStart))[1]) : 0xFFFF, - }; - # Iterate all files in the manifest foreach my $strRepoFile ( sort {sprintf("%016d-%s", $oBackupManifest->numericGet(MANIFEST_SECTION_TARGET_FILE, $b, MANIFEST_SUBKEY_SIZE), $b) cmp @@ -403,13 +394,13 @@ # Queue for parallel backup $oBackupProcess->queueJob( $iHostConfigIdx, $strQueueKey, $strRepoFile, OP_BACKUP_FILE, - [$strDbFile, $strRepoFile, $lSize, + [$strDbFile, $bIgnoreMissing, $lSize, $oBackupManifest->get(MANIFEST_SECTION_TARGET_FILE, $strRepoFile, MANIFEST_SUBKEY_CHECKSUM, false), - cfgOption(CFGOPT_CHECKSUM_PAGE) ? isChecksumPage($strRepoFile) : false, $strBackupLabel, $bCompress, - cfgOption(CFGOPT_COMPRESS_LEVEL), $oBackupManifest->numericGet(MANIFEST_SECTION_TARGET_FILE, $strRepoFile, - MANIFEST_SUBKEY_TIMESTAMP, false), $bIgnoreMissing, - cfgOption(CFGOPT_CHECKSUM_PAGE) && isChecksumPage($strRepoFile) ? $hStartLsnParam : undef, - cfgOption(CFGOPT_DELTA), defined($strReference) ? true : false], + cfgOption(CFGOPT_CHECKSUM_PAGE) ? isChecksumPage($strRepoFile) : false, + defined($strLsnStart) ? hex((split('/', $strLsnStart))[0]) : 0xFFFFFFFF, + defined($strLsnStart) ? hex((split('/', $strLsnStart))[1]) : 0xFFFFFFFF, + $strRepoFile, defined($strReference) ? true : false, $bCompress, cfgOption(CFGOPT_COMPRESS_LEVEL), + $strBackupLabel, cfgOption(CFGOPT_DELTA)], {rParamSecure => $oBackupManifest->cipherPassSub() ? [$oBackupManifest->cipherPassSub()] : undef}); # Size and checksum will be removed and then verified later as a sanity check @@ -451,7 +442,8 @@ { ($lSizeCurrent, $lManifestSaveCurrent) = backupManifestUpdate( $oBackupManifest, cfgOption(cfgOptionIdFromIndex(CFGOPT_PG_HOST, $hJob->{iHostConfigIdx}), false), - $hJob->{iProcessId}, @{$hJob->{rParam}}[0..4], @{$hJob->{rResult}}, $lSizeTotal, $lSizeCurrent, $lManifestSaveSize, + $hJob->{iProcessId}, @{$hJob->{rParam}}[0], @{$hJob->{rParam}}[7], @{$hJob->{rParam}}[2], @{$hJob->{rParam}}[3], + @{$hJob->{rParam}}[4], @{$hJob->{rResult}}, $lSizeTotal, $lSizeCurrent, $lManifestSaveSize, $lManifestSaveCurrent); } @@ -745,7 +737,7 @@ &log(WARN, "aborted backup ${strAbortedBackup} cannot be resumed: ${strReason}"); &log(TEST, TEST_BACKUP_NORESUME); - $oStorageRepo->remove(STORAGE_REPO_BACKUP . "/${strAbortedBackup}", {bRecurse => true}); + $oStorageRepo->pathRemove(STORAGE_REPO_BACKUP . "/${strAbortedBackup}", {bRecurse => true}); undef($oAbortedManifest); } @@ -979,7 +971,9 @@ # Add compression filter if ($bCompress) { - push(@{$rhyFilter}, {strClass => STORAGE_FILTER_GZIP}); + push( + @{$rhyFilter}, + {strClass => STORAGE_FILTER_GZIP, rxyParam => [STORAGE_COMPRESS, false, cfgOption(CFGOPT_COMPRESS_LEVEL)]}); } # If the backups are encrypted, then the passphrase for the backup set from the manifest file is required to access @@ -1064,7 +1058,7 @@ $oBackupManifest->set(MANIFEST_SECTION_BACKUP, MANIFEST_KEY_LABEL, undef, $strBackupLabel); # Sync backup path if supported - if ($oStorageRepo->driver()->capability(STORAGE_CAPABILITY_PATH_SYNC)) + if ($oStorageRepo->capability(STORAGE_CAPABILITY_PATH_SYNC)) { # Sync all paths in the backup $oStorageRepo->pathSync(STORAGE_REPO_BACKUP . "/${strBackupLabel}"); @@ -1096,12 +1090,12 @@ {'strCipherPass' => $strCipherPassManifest}), $oStorageRepo->openWrite( "${strHistoryPath}/${strBackupLabel}.manifest." . COMPRESS_EXT, - {rhyFilter => [{strClass => STORAGE_FILTER_GZIP}], + {rhyFilter => [{strClass => STORAGE_FILTER_GZIP, rxyParam => [STORAGE_COMPRESS, false, 9]}], bPathCreate => true, bAtomic => true, strCipherPass => defined($strCipherPassManifest) ? $strCipherPassManifest : undef})); # Sync history path if supported - if ($oStorageRepo->driver()->capability(STORAGE_CAPABILITY_PATH_SYNC)) + if ($oStorageRepo->capability(STORAGE_CAPABILITY_PATH_SYNC)) { $oStorageRepo->pathSync(STORAGE_REPO_BACKUP . qw{/} . PATH_BACKUP_HISTORY); $oStorageRepo->pathSync($strHistoryPath); @@ -1110,7 +1104,7 @@ # Create a link to the most recent backup $oStorageRepo->remove(STORAGE_REPO_BACKUP . qw(/) . LINK_LATEST); - if (storageRepo()->driver()->capability(STORAGE_CAPABILITY_LINK)) + if (storageRepo()->capability(STORAGE_CAPABILITY_LINK)) { $oStorageRepo->linkCreate( STORAGE_REPO_BACKUP . "/${strBackupLabel}", STORAGE_REPO_BACKUP . qw{/} . LINK_LATEST, {bRelative => true}); @@ -1120,7 +1114,7 @@ $oBackupInfo->add($oBackupManifest); # Sync backup root path if supported - if ($oStorageRepo->driver()->capability(STORAGE_CAPABILITY_PATH_SYNC)) + if ($oStorageRepo->capability(STORAGE_CAPABILITY_PATH_SYNC)) { $oStorageRepo->pathSync(STORAGE_REPO_BACKUP); } diff -Nru pgbackrest-2.15.1/lib/pgBackRest/Backup/File.pm pgbackrest-2.16/lib/pgBackRest/Backup/File.pm --- pgbackrest-2.15.1/lib/pgBackRest/Backup/File.pm 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/lib/pgBackRest/Backup/File.pm 2019-08-05 16:03:04.000000000 +0000 @@ -12,17 +12,15 @@ use File::Basename qw(dirname); use Storable qw(dclone); -use pgBackRest::Backup::Filter::PageChecksum; use pgBackRest::Common::Exception; use pgBackRest::Common::Io::Handle; use pgBackRest::Common::Log; use pgBackRest::Common::String; +use pgBackRest::Config::Config; use pgBackRest::DbVersion; use pgBackRest::Manifest; use pgBackRest::Protocol::Storage::Helper; use pgBackRest::Storage::Base; -use pgBackRest::Storage::Filter::Gzip; -use pgBackRest::Storage::Filter::Sha; use pgBackRest::Storage::Helper; #################################################################################################################################### @@ -40,210 +38,6 @@ push @EXPORT, qw(BACKUP_FILE_NOOP); #################################################################################################################################### -# backupFile -#################################################################################################################################### -sub backupFile -{ - # Assign function parameters, defaults, and log debug info - my - ( - $strOperation, - $strDbFile, # Database file to backup - $strRepoFile, # Location in the repository to copy to - $lSizeFile, # File size - $strChecksum, # File checksum to be checked - $bChecksumPage, # Should page checksums be calculated? - $strBackupLabel, # Label of current backup - $bCompress, # Compress destination file - $iCompressLevel, # Compress level - $lModificationTime, # File modification time - $bIgnoreMissing, # Is it OK if the file is missing? - $hExtraParam, # Parameter to pass to the extra function - $bDelta, # Is the delta option on? - $bHasReference, # Does the file exist in the repo in a prior backup in the set? - $strCipherPass, # Passphrase to access the repo file (undefined if repo not encrypted). This - # parameter must always be last in the parameter list to this function. - ) = - logDebugParam - ( - __PACKAGE__ . '::backupFile', \@_, - {name => 'strDbFile', trace => true}, - {name => 'strRepoFile', trace => true}, - {name => 'lSizeFile', trace => true}, - {name => 'strChecksum', required => false, trace => true}, - {name => 'bChecksumPage', trace => true}, - {name => 'strBackupLabel', trace => true}, - {name => 'bCompress', trace => true}, - {name => 'iCompressLevel', trace => true}, - {name => 'lModificationTime', trace => true}, - {name => 'bIgnoreMissing', default => true, trace => true}, - {name => 'hExtraParam', required => false, trace => true}, - {name => 'bDelta', trace => true}, - {name => 'bHasReference', trace => true}, - {name => 'strCipherPass', required => false, trace => true}, - ); - - my $oStorageRepo = storageRepo(); # Repo storage - my $iCopyResult = BACKUP_FILE_COPY; # Copy result - my $strCopyChecksum; # Copy checksum - my $rExtra; # Page checksum result - my $lCopySize; # Copy Size - my $lRepoSize; # Repo size - - # Add compression suffix if needed - my $strFileOp = $strRepoFile . ($bCompress ? '.' . COMPRESS_EXT : ''); - - my $bCopy = true; - - # If checksum is defined then the file needs to be checked. If delta option then check the DB and possibly the repo, else just - # check the repo. - if (defined($strChecksum)) - { - # If delta, then check the DB checksum and possibly the repo. If the checksum does not match in either case then recopy. - if ($bDelta) - { - ($strCopyChecksum, $lCopySize) = storageDb()->hashSize($strDbFile, {bIgnoreMissing => $bIgnoreMissing}); - - # If the DB file exists, then check the checksum - if (defined($strCopyChecksum)) - { - $bCopy = !($strCopyChecksum eq $strChecksum && $lCopySize == $lSizeFile); - - # If the database file checksum and size are same and the file is in a prior backup, then no need to copy. If the - # checksum/size do not match, that is OK, just leave the copy result as COPY so the file will be copied to this - # backup. - if (!$bCopy && $bHasReference) - { - $iCopyResult = BACKUP_FILE_NOOP; - } - } - # Else the source file is missing from the database so skip this file - else - { - $iCopyResult = BACKUP_FILE_SKIP; - $bCopy = false; - } - } - - # If this is not a delta backup or it is and the file exists and the checksum from the DB matches, then also test the - # checksum of the file in the repo (unless it is in a prior backup) and if the checksum doesn't match, then there may be - # corruption in the repo, so recopy - if (!$bDelta || !$bHasReference) - { - # If this is a delta backup and the file is missing from the DB, then remove it from the repo (backupManifestUpdate will - # remove it from the manifest) - if ($iCopyResult == BACKUP_FILE_SKIP) - { - $oStorageRepo->remove(STORAGE_REPO_BACKUP . "/${strBackupLabel}/${strFileOp}"); - } - elsif (!$bDelta || !$bCopy) - { - # Add decompression - my $rhyFilter; - - if ($bCompress) - { - push(@{$rhyFilter}, {strClass => STORAGE_FILTER_GZIP, rxyParam => [{strCompressType => STORAGE_DECOMPRESS}]}); - } - - # Get the checksum - ($strCopyChecksum, $lCopySize) = $oStorageRepo->hashSize( - $oStorageRepo->openRead(STORAGE_REPO_BACKUP . "/${strBackupLabel}/${strFileOp}", - {rhyFilter => $rhyFilter, strCipherPass => $strCipherPass})); - - # Determine if the file needs to be recopied - $bCopy = !($strCopyChecksum eq $strChecksum && $lCopySize == $lSizeFile); - - # Set copy result - $iCopyResult = $bCopy ? BACKUP_FILE_RECOPY : BACKUP_FILE_CHECKSUM; - } - } - } - - # Copy the file - if ($bCopy) - { - # Add sha filter - my $rhyFilter = [{strClass => STORAGE_FILTER_SHA}]; - - # Add page checksum filter - if ($bChecksumPage) - { - # Determine which segment no this is by checking for a numeric extension. No extension means segment 0. - my $iSegmentNo = ($strDbFile =~ /\.[0-9]+$/) ? substr(($strDbFile =~ m/\.[0-9]+$/g)[0], 1) + 0 : 0; - - push( - @{$rhyFilter}, - {strClass => BACKUP_FILTER_PAGECHECKSUM, - rxyParam => [$iSegmentNo, $hExtraParam->{iWalId}, $hExtraParam->{iWalOffset}]}); - }; - - # Add compression - if ($bCompress) - { - push(@{$rhyFilter}, {strClass => STORAGE_FILTER_GZIP, rxyParam => [{iLevel => $iCompressLevel}]}); - } - - # Open the file - my $oSourceFileIo = storageDb()->openRead($strDbFile, {rhyFilter => $rhyFilter, bIgnoreMissing => $bIgnoreMissing}); - - # If source file exists - if (defined($oSourceFileIo)) - { - my $oDestinationFileIo = $oStorageRepo->openWrite( - STORAGE_REPO_BACKUP . "/${strBackupLabel}/${strFileOp}", - {bPathCreate => true, bProtocolCompress => !$bCompress, strCipherPass => $strCipherPass}); - - # Copy the file - $oStorageRepo->copy($oSourceFileIo, $oDestinationFileIo); - - # Get sha checksum and size - $strCopyChecksum = $oSourceFileIo->result(STORAGE_FILTER_SHA); - $lCopySize = $oSourceFileIo->result(COMMON_IO_HANDLE); - $lRepoSize = $oDestinationFileIo->result(COMMON_IO_HANDLE); - - if (!defined($lRepoSize)) - { - confess &log(ERROR, "REPO_SIZE IS NOT SET"); - } - - # Get results of page checksum validation - $rExtra = $bChecksumPage ? $oSourceFileIo->result(BACKUP_FILTER_PAGECHECKSUM) : undef; - } - # Else if source file is missing the database removed it - else - { - $iCopyResult = BACKUP_FILE_SKIP; - } - } - - # If the file was copied get the repo size only if the storage can store the files with a different size than what was written. - # This has to be checked after the file is at rest because filesystem compression may affect the actual repo size and this - # cannot be calculated in stream. - # - # If the file was checksummed then get the size in all cases since we don't already have it. - if ((($iCopyResult == BACKUP_FILE_COPY || $iCopyResult == BACKUP_FILE_RECOPY) && - $oStorageRepo->driver()->capability(STORAGE_CAPABILITY_SIZE_DIFF)) || - $iCopyResult == BACKUP_FILE_CHECKSUM) - { - $lRepoSize = ($oStorageRepo->info(STORAGE_REPO_BACKUP . "/${strBackupLabel}/${strFileOp}"))->size(); - } - - # Return from function and log return values if any - return logDebugReturn - ( - $strOperation, - {name => 'iCopyResult', value => $iCopyResult, trace => true}, - {name => 'lCopySize', value => $lCopySize, trace => true}, - {name => 'lRepoSize', value => $lRepoSize, trace => true}, - {name => 'strCopyChecksum', value => $strCopyChecksum, trace => true}, - {name => 'rExtra', value => $rExtra, trace => true}, - ); -} - -push @EXPORT, qw(backupFile); - -#################################################################################################################################### # backupManifestUpdate #################################################################################################################################### sub backupManifestUpdate @@ -361,21 +155,22 @@ if ($bChecksumPage) { # The valid flag should be set - if (defined($rExtra->{bValid})) + if (defined($rExtra->{valid})) { # Store the valid flag - $oManifest->boolSet(MANIFEST_SECTION_TARGET_FILE, $strRepoFile, MANIFEST_SUBKEY_CHECKSUM_PAGE, $rExtra->{bValid}); + $oManifest->boolSet( + MANIFEST_SECTION_TARGET_FILE, $strRepoFile, MANIFEST_SUBKEY_CHECKSUM_PAGE, $rExtra->{valid}); # If the page was not valid - if (!$rExtra->{bValid}) + if (!$rExtra->{valid}) { # Check for a page misalignment if ($lSizeCopy % PG_PAGE_SIZE != 0) { # Make sure the align flag was set, otherwise there is a bug - if (!defined($rExtra->{bAlign}) || $rExtra->{bAlign}) + if (!defined($rExtra->{align}) || $rExtra->{align}) { - confess &log(ASSERT, 'bAlign flag should have been set for misaligned page'); + confess &log(ASSERT, 'align flag should have been set for misaligned page'); } # Emit a warning so the user knows something is amiss @@ -388,13 +183,13 @@ { $oManifest->set( MANIFEST_SECTION_TARGET_FILE, $strRepoFile, MANIFEST_SUBKEY_CHECKSUM_PAGE_ERROR, - dclone($rExtra->{iyPageError})); + dclone($rExtra->{error})); # Build a pretty list of the page errors my $strPageError; my $iPageErrorTotal = 0; - foreach my $iyPage (@{$rExtra->{iyPageError}}) + foreach my $iyPage (@{$rExtra->{error}}) { $strPageError .= (defined($strPageError) ? ', ' : ''); diff -Nru pgbackrest-2.15.1/lib/pgBackRest/Backup/Filter/PageChecksum.pm pgbackrest-2.16/lib/pgBackRest/Backup/Filter/PageChecksum.pm --- pgbackrest-2.15.1/lib/pgBackRest/Backup/Filter/PageChecksum.pm 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/lib/pgBackRest/Backup/Filter/PageChecksum.pm 1970-01-01 00:00:00.000000000 +0000 @@ -1,168 +0,0 @@ -#################################################################################################################################### -# Backup Page Checksum Filter -#################################################################################################################################### -package pgBackRest::Backup::Filter::PageChecksum; -use parent 'pgBackRest::Common::Io::Filter'; - -use strict; -use warnings FATAL => qw(all); -use Carp qw(confess); -use English '-no_match_vars'; - -use Exporter qw(import); - our @EXPORT = qw(); - -use pgBackRest::Common::Exception; -use pgBackRest::Common::Log; -use pgBackRest::DbVersion qw(PG_PAGE_SIZE); -use pgBackRest::LibC qw(:checksum); - -#################################################################################################################################### -# Package name constant -#################################################################################################################################### -use constant BACKUP_FILTER_PAGECHECKSUM => __PACKAGE__; - push @EXPORT, qw(BACKUP_FILTER_PAGECHECKSUM); - -#################################################################################################################################### -# CONSTRUCTOR -#################################################################################################################################### -sub new -{ - my $class = shift; - - # Assign function parameters, defaults, and log debug info - my - ( - $strOperation, - $oParent, - $iSegmentNo, - $iWalId, - $iWalOffset, - ) = - logDebugParam - ( - __PACKAGE__ . '->new', \@_, - {name => 'oParent', trace => true}, - {name => 'iSegmentNo', trace => true}, - {name => 'iWalId', trace => true}, - {name => 'iWalOffset', trace => true}, - ); - - # Bless with new class - my $self = $class->SUPER::new($oParent); - bless $self, $class; - - # Set variables - $self->{iSegmentNo} = $iSegmentNo; - $self->{iWalId} = $iWalId; - $self->{iWalOffset} = $iWalOffset; - - # Create the result object - $self->{hResult}{bValid} = true; - $self->{hResult}{bAlign} = true; - - # Return from function and log return values if any - return logDebugReturn - ( - $strOperation, - {name => 'self', value => $self} - ); -} - -#################################################################################################################################### -# read - validate page checksums -#################################################################################################################################### -sub read -{ - my $self = shift; - my $rtBuffer = shift; - my $iSize = shift; - - # Call the io method - my $iActualSize = $self->parent()->read($rtBuffer, $iSize); - - # Validate page checksums for the read block - if ($iActualSize > 0) - { - # If the buffer is not divisible by 0 then it's not valid - if (!$self->{hResult}{bAlign} || ($iActualSize % PG_PAGE_SIZE != 0)) - { - if (!$self->{hResult}{bAlign}) - { - confess &log(ASSERT, "should not be possible to see two misaligned blocks in a row"); - } - - $self->{hResult}{bValid} = false; - $self->{hResult}{bAlign} = false; - delete($self->{hResult}{iyPageError}); - } - else - { - # Calculate offset to the first block in the buffer - my $iBlockOffset = int(($self->size() - $iActualSize) / PG_PAGE_SIZE) + ($self->{iSegmentNo} * 131072); - - if (!pageChecksumBufferTest( - $$rtBuffer, $iActualSize, $iBlockOffset, PG_PAGE_SIZE, $self->{iWalId}, - $self->{iWalOffset})) - { - $self->{hResult}{bValid} = false; - - # Now figure out exactly where the errors occurred. It would be more efficient if the checksum function returned an - # array, but we're hoping there won't be that many errors to scan so this should work fine. - for (my $iBlockNo = 0; $iBlockNo < int($iActualSize / PG_PAGE_SIZE); $iBlockNo++) - { - my $iBlockNoStart = $iBlockOffset + $iBlockNo; - - if (!pageChecksumTest( - substr($$rtBuffer, $iBlockNo * PG_PAGE_SIZE, PG_PAGE_SIZE), $iBlockNoStart, PG_PAGE_SIZE, - $self->{iWalId}, $self->{iWalOffset})) - { - my $iLastIdx = defined($self->{hResult}{iyPageError}) ? @{$self->{hResult}{iyPageError}} - 1 : 0; - my $iyLast = defined($self->{hResult}{iyPageError}) ? $self->{hResult}{iyPageError}[$iLastIdx] : undef; - - if (!defined($iyLast) || (!ref($iyLast) && $iyLast != $iBlockNoStart - 1) || - (ref($iyLast) && $iyLast->[1] != $iBlockNoStart - 1)) - { - push(@{$self->{hResult}{iyPageError}}, $iBlockNoStart); - } - elsif (!ref($iyLast)) - { - $self->{hResult}{iyPageError}[$iLastIdx] = undef; - push(@{$self->{hResult}{iyPageError}[$iLastIdx]}, $iyLast); - push(@{$self->{hResult}{iyPageError}[$iLastIdx]}, $iBlockNoStart); - } - else - { - $self->{hResult}{iyPageError}[$iLastIdx][1] = $iBlockNoStart; - } - } - } - } - } - } - - # Return the actual size read - return $iActualSize; -} - -#################################################################################################################################### -# close - close and set the result -#################################################################################################################################### -sub close -{ - my $self = shift; - - if (defined($self->{hResult})) - { - # Set result - $self->resultSet(BACKUP_FILTER_PAGECHECKSUM, $self->{hResult}); - - # Delete the sha object - undef($self->{hResult}); - - # Close io - return $self->parent()->close(); - } -} - -1; diff -Nru pgbackrest-2.15.1/lib/pgBackRest/Backup/Info.pm pgbackrest-2.16/lib/pgBackRest/Backup/Info.pm --- pgbackrest-2.15.1/lib/pgBackRest/Backup/Info.pm 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/lib/pgBackRest/Backup/Info.pm 2019-08-05 16:03:04.000000000 +0000 @@ -967,14 +967,15 @@ $self->numericSet(INFO_BACKUP_SECTION_DB, INFO_BACKUP_KEY_CATALOG, undef, $iCatalogVersion); $self->numericSet(INFO_BACKUP_SECTION_DB, INFO_BACKUP_KEY_CONTROL, undef, $iControlVersion); $self->numericSet(INFO_BACKUP_SECTION_DB, INFO_BACKUP_KEY_SYSTEM_ID, undef, $ullDbSysId); - $self->set(INFO_BACKUP_SECTION_DB, INFO_BACKUP_KEY_DB_VERSION, undef, $strDbVersion); + # Force the version to a string since newer versions of JSON::PP lose track of the fact that it is one + $self->set(INFO_BACKUP_SECTION_DB, INFO_BACKUP_KEY_DB_VERSION, undef, $strDbVersion . ''); $self->numericSet(INFO_BACKUP_SECTION_DB, INFO_BACKUP_KEY_HISTORY_ID, undef, $iDbHistoryId); # Fill db history $self->numericSet(INFO_BACKUP_SECTION_DB_HISTORY, $iDbHistoryId, INFO_BACKUP_KEY_CATALOG, $iCatalogVersion); $self->numericSet(INFO_BACKUP_SECTION_DB_HISTORY, $iDbHistoryId, INFO_BACKUP_KEY_CONTROL, $iControlVersion); $self->numericSet(INFO_BACKUP_SECTION_DB_HISTORY, $iDbHistoryId, INFO_BACKUP_KEY_SYSTEM_ID, $ullDbSysId); - $self->set(INFO_BACKUP_SECTION_DB_HISTORY, $iDbHistoryId, INFO_BACKUP_KEY_DB_VERSION, $strDbVersion); + $self->set(INFO_BACKUP_SECTION_DB_HISTORY, $iDbHistoryId, INFO_BACKUP_KEY_DB_VERSION, $strDbVersion . ''); # Return from function and log return values if any return logDebugReturn($strOperation); diff -Nru pgbackrest-2.15.1/lib/pgBackRest/Check/Check.pm pgbackrest-2.16/lib/pgBackRest/Check/Check.pm --- pgbackrest-2.15.1/lib/pgBackRest/Check/Check.pm 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/lib/pgBackRest/Check/Check.pm 2019-08-05 16:03:04.000000000 +0000 @@ -159,57 +159,14 @@ }; } - # If able to get the archive id then force archiving and check the arrival of the archived WAL file with the time specified - if ($iResult == 0 && !$oDb->isStandby()) - { - $strWalSegment = $oDb->walSwitch(); - - eval - { - $strArchiveFile = walSegmentFind(storageRepo(), $strArchiveId, $strWalSegment, $iArchiveTimeout); - return true; - } - # If this is a backrest error then capture the code and message else confess - or do - { - # Capture error information - $iResult = exceptionCode($EVAL_ERROR); - $strResultMessage = exceptionMessage($EVAL_ERROR); - }; - } - # Reset the console logging logLevelSet(undef, cfgOption(CFGOPT_LOG_LEVEL_CONSOLE)); } - # If the archiving was successful and backup.info check did not error in an unexpected way, then indicate success - # Else, log the error. - if ($iResult == 0) - { - if (!$oDb->isStandby()) - { - &log(INFO, - "WAL segment ${strWalSegment} successfully stored in the archive at '" . - storageRepo()->pathGet(STORAGE_REPO_ARCHIVE . "/$strArchiveId/${strArchiveFile}") . "'"); - } - else - { - &log(INFO, 'switch ' . $oDb->walId() . ' cannot be performed on the standby, all other checks passed successfully'); - } - } - else + # Log the captured error + if ($iResult != 0) { - # Log the captured error &log(ERROR, $strResultMessage, $iResult); - - # If a WAL switch was attempted, then alert the user that the WAL that did not reach the archive - if (defined($strWalSegment) && !defined($strArchiveFile)) - { - &log(WARN, - "WAL segment ${strWalSegment} did not reach the archive:" . (defined($strArchiveId) ? $strArchiveId : '') . "\n" . - "HINT: Check the archive_command to ensure that all options are correct (especially --stanza).\n" . - "HINT: Check the PostgreSQL server log for errors."); - } } # Return from function and log return values if any diff -Nru pgbackrest-2.15.1/lib/pgBackRest/Common/Http/Client.pm pgbackrest-2.16/lib/pgBackRest/Common/Http/Client.pm --- pgbackrest-2.15.1/lib/pgBackRest/Common/Http/Client.pm 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/lib/pgBackRest/Common/Http/Client.pm 1970-01-01 00:00:00.000000000 +0000 @@ -1,364 +0,0 @@ -#################################################################################################################################### -# HTTP Client -#################################################################################################################################### -package pgBackRest::Common::Http::Client; -use parent 'pgBackRest::Common::Io::Buffered'; - -use strict; -use warnings FATAL => qw(all); -use Carp qw(confess); -use English '-no_match_vars'; - -use Exporter qw(import); - our @EXPORT = qw(); -use IO::Socket::SSL; -use Socket qw(SOL_SOCKET SO_KEEPALIVE); - -use pgBackRest::Common::Exception; -use pgBackRest::Common::Io::Buffered; -use pgBackRest::Common::Log; -use pgBackRest::Common::String; -use pgBackRest::Common::Xml; -use pgBackRest::Common::Http::Common; - -#################################################################################################################################### -# Constants -#################################################################################################################################### -use constant HTTP_VERB_GET => 'GET'; - push @EXPORT, qw(HTTP_VERB_GET); -use constant HTTP_VERB_POST => 'POST'; - push @EXPORT, qw(HTTP_VERB_POST); -use constant HTTP_VERB_PUT => 'PUT'; - push @EXPORT, qw(HTTP_VERB_PUT); - -use constant HTTP_HEADER_CONTENT_LENGTH => 'content-length'; - push @EXPORT, qw(HTTP_HEADER_CONTENT_LENGTH); -use constant HTTP_HEADER_TRANSFER_ENCODING => 'transfer-encoding'; - push @EXPORT, qw(HTTP_HEADER_TRANSFER_ENCODING); - -#################################################################################################################################### -# new -#################################################################################################################################### -sub new -{ - my $class = shift; - - # Assign function parameters, defaults, and log debug info - my - ( - $strOperation, - $strHost, - $strVerb, - $iPort, - $strUri, - $hQuery, - $hRequestHeader, - $rstrRequestBody, - $bResponseBodyPrefetch, - $iProtocolTimeout, - $iTryTotal, - $lBufferMax, - $bVerifySsl, - $strCaPath, - $strCaFile, - ) = - logDebugParam - ( - __PACKAGE__ . '->new', \@_, - {name => 'strHost', trace => true}, - {name => 'strVerb', trace => true}, - {name => 'iPort', optional => true, default => 443, trace => true}, - {name => 'strUri', optional => true, default => qw(/), trace => true}, - {name => 'hQuery', optional => true, trace => true}, - {name => 'hRequestHeader', optional => true, trace => true}, - {name => 'rstrRequestBody', optional => true, trace => true}, - {name => 'bResponseBodyPrefetch', optional => true, default => false, trace => true}, - {name => 'iProtocolTimeout', optional => true, default => 300, trace => true}, - {name => 'iTryTotal', optional => true, default => 3, trace => true}, - {name => 'lBufferMax', optional => true, default => 32768, trace => true}, - {name => 'bVerifySsl', optional => true, default => true, trace => true}, - {name => 'strCaPath', optional => true, trace => true}, - {name => 'strCaFile', optional => true, trace => true}, - ); - - # Retry as many times as requested - my $self; - my $iTry = 1; - my $bRetry; - - do - { - # Disable logging if a failure will be retried - logDisable() if $iTry < $iTryTotal; - $bRetry = false; - - eval - { - # Connect to the server - my $oSocket; - - if (eval{require IO::Socket::IP}) - { - $oSocket = IO::Socket::IP->new(PeerHost => $strHost, PeerPort => $iPort) - or confess &log(ERROR, "unable to create socket: $@", ERROR_HOST_CONNECT); - } - else - { - require IO::Socket::INET; - - $oSocket = IO::Socket::INET->new(PeerHost => $strHost, PeerPort => $iPort) - or confess &log(ERROR, "unable to create socket: $@", ERROR_HOST_CONNECT); - } - - setsockopt($oSocket, SOL_SOCKET,SO_KEEPALIVE, 1) - or confess &log(ERROR, "unable to set socket keepalive: $@", ERROR_HOST_CONNECT); - - eval - { - IO::Socket::SSL->start_SSL( - $oSocket, SSL_verify_mode => $bVerifySsl ? SSL_VERIFY_PEER : SSL_VERIFY_NONE, SSL_ca_path => $strCaPath, - SSL_ca_file => $strCaFile); - } - or do - { - logErrorResult( - ERROR_HOST_CONNECT, coalesce(length($!) == 0 ? undef : $!, $SSL_ERROR), length($!) > 0 ? $SSL_ERROR : undef); - }; - - # Bless with new class - $self = $class->SUPER::new( - new pgBackRest::Common::Io::Handle('httpClient', $oSocket, $oSocket), $iProtocolTimeout, $lBufferMax); - bless $self, $class; - - # Store socket - $self->{oSocket} = $oSocket; - - # Generate the query string - my $strQuery = httpQuery($hQuery); - - # Construct the request headers - $self->{strRequestHeader} = "${strVerb} " . httpUriEncode($strUri, true) . "?${strQuery} HTTP/1.1" . "\r\n"; - - foreach my $strHeader (sort(keys(%{$hRequestHeader}))) - { - $self->{strRequestHeader} .= "${strHeader}: $hRequestHeader->{$strHeader}\r\n"; - } - - $self->{strRequestHeader} .= "\r\n"; - - # Write request headers - $self->write(\$self->{strRequestHeader}); - - # Write content - if (defined($rstrRequestBody)) - { - my $iTotalSize = length($$rstrRequestBody); - my $iTotalSent = 0; - - # Write the request body in buffer-sized chunks - do - { - my $strBufferWrite = substr($$rstrRequestBody, $iTotalSent, $lBufferMax); - $iTotalSent += $self->write(\$strBufferWrite); - } while ($iTotalSent < $iTotalSize); - } - - # Read response code - ($self->{strResponseProtocol}, $self->{iResponseCode}, $self->{strResponseMessage}) = - split(' ', trim($self->readLine())); - - # Read the response headers - $self->{iContentLength} = 0; - $self->{strResponseHeader} = ''; - my $strHeader = trim($self->readLine()); - - while ($strHeader ne '') - { - # Validate header - $self->{strResponseHeader} .= "${strHeader}\n"; - - my $iColonPos = index($strHeader, ':'); - - if ($iColonPos == -1) - { - confess &log(ERROR, "http header '${strHeader}' requires colon separator", ERROR_PROTOCOL); - } - - # Parse header - my $strHeaderKey = lc(substr($strHeader, 0, $iColonPos)); - my $strHeaderValue = trim(substr($strHeader, $iColonPos + 1)); - - # Store the header - $self->{hResponseHeader}{$strHeaderKey} = $strHeaderValue; - - # Process content length - if ($strHeaderKey eq HTTP_HEADER_CONTENT_LENGTH) - { - $self->{iContentLength} = $strHeaderValue + 0; - $self->{iContentRemaining} = $self->{iContentLength}; - } - # Process transfer encoding (only chunked is supported) - elsif ($strHeaderKey eq HTTP_HEADER_TRANSFER_ENCODING) - { - if ($strHeaderValue eq 'chunked') - { - $self->{iContentLength} = -1; - } - else - { - confess &log(ERROR, "invalid value '${strHeaderValue} for http header '${strHeaderKey}'", ERROR_PROTOCOL); - } - } - - # Read next header - $strHeader = trim($self->readLine()); - } - - # Prefetch response - mostly useful when the response is known to be short - if ($bResponseBodyPrefetch) - { - $self->{strResponseBody} = $self->responseBody(); - } - - # Enable logging if a failure will be retried - logEnable() if $iTry < $iTryTotal; - return 1; - } - or do - { - # Enable logging if a failure will be retried - logEnable() if $iTry < $iTryTotal; - - # If tries reaches total allowed then error - if ($iTry == $iTryTotal) - { - confess $EVAL_ERROR; - } - - # Try again - $iTry++; - $bRetry = true; - }; - } - while ($bRetry); - - # Return from function and log return values if any - return logDebugReturn - ( - $strOperation, - {name => 'self', value => $self} - ); -} - -#################################################################################################################################### -# read - read content from http stream -#################################################################################################################################### -sub read -{ - my $self = shift; - my $rtBuffer = shift; - my $iRequestSize = shift; - - # Make sure request size is not larger than what remains to be read - $iRequestSize = $iRequestSize < $self->{iContentRemaining} ? $iRequestSize : $self->{iContentRemaining}; - $self->{iContentRemaining} -= $iRequestSize; - - my $iActualSize = $self->SUPER::read($rtBuffer, $iRequestSize, true); - - # Set eof if there is nothing left to read - if ($self->{iContentRemaining} == 0) - { - $self->SUPER::eofSet(true); - } - - return $iActualSize; -} - -#################################################################################################################################### -# close/DESTROY - close the HTTP connection -#################################################################################################################################### -sub close -{ - my $self = shift; - - # Only close if the socket is open - if (defined($self->{oSocket})) - { - $self->{oSocket}->close(); - undef($self->{oSocket}); - } -} - -sub DESTROY {shift->close()} - -#################################################################################################################################### -# responseBody - return the entire body of the response in a buffer -#################################################################################################################################### -sub responseBody -{ - my $self = shift; - - # Assign function parameters, defaults, and log debug info - my - ( - $strOperation, - ) = - logDebugParam - ( - __PACKAGE__ . '->responseBody' - ); - - # Return prefetched response body if it exists - return $self->{strResponseBody} if exists($self->{strResponseBody}); - - # Fetch response body if content length is not 0 - my $strResponseBody = undef; - - if ($self->{iContentLength} != 0) - { - # Transfer encoding is chunked - if ($self->{iContentLength} == -1) - { - while (1) - { - # Read chunk length - my $strChunkLength = trim($self->readLine()); - my $iChunkLength = hex($strChunkLength); - - # Exit if chunk length is 0 - last if ($iChunkLength == 0); - - # Read the chunk and consume the terminating LF - $self->SUPER::read(\$strResponseBody, $iChunkLength, true); - $self->readLine(); - }; - } - # Else content length is known - else - { - $self->SUPER::read(\$strResponseBody, $self->{iContentLength}, true); - } - - $self->close(); - } - - # Return from function and log return values if any - return logDebugReturn - ( - $strOperation, - {name => 'rstrResponseBody', value => \$strResponseBody, trace => true} - ); -} - -#################################################################################################################################### -# Properties. -#################################################################################################################################### -sub contentLength {shift->{iContentLength}} # Content length if available (-1 means not known yet) -sub requestHeaderText {trim(shift->{strRequestHeader})} -sub responseCode {shift->{iResponseCode}} -sub responseHeader {shift->{hResponseHeader}} -sub responseHeaderText {trim(shift->{strResponseHeader})} -sub responseMessage {shift->{strResponseMessage}} -sub responseProtocol {shift->{strResponseProtocol}} - -1; diff -Nru pgbackrest-2.15.1/lib/pgBackRest/Common/Http/Common.pm pgbackrest-2.16/lib/pgBackRest/Common/Http/Common.pm --- pgbackrest-2.15.1/lib/pgBackRest/Common/Http/Common.pm 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/lib/pgBackRest/Common/Http/Common.pm 1970-01-01 00:00:00.000000000 +0000 @@ -1,107 +0,0 @@ -#################################################################################################################################### -# HTTP Common -#################################################################################################################################### -package pgBackRest::Common::Http::Common; - -use strict; -use warnings FATAL => qw(all); -use Carp qw(confess); -use English '-no_match_vars'; - -use Exporter qw(import); - our @EXPORT = qw(); - -use pgBackRest::Common::Exception; -use pgBackRest::Common::Log; - -#################################################################################################################################### -# httpQuery - encode an HTTP query from a hash -#################################################################################################################################### -sub httpQuery -{ - # Assign function parameters, defaults, and log debug info - my - ( - $strOperation, - $hQuery, - ) = - logDebugParam - ( - __PACKAGE__ . '::httpQuery', \@_, - {name => 'hQuery', required => false, trace => true}, - ); - - # Generate the query string - my $strQuery = ''; - - # If a hash (the normal case) - if (ref($hQuery)) - { - foreach my $strParam (sort(keys(%{$hQuery}))) - { - # Parameters may not be defined - this is OK - if (defined($hQuery->{$strParam})) - { - $strQuery .= ($strQuery eq '' ? '' : '&') . $strParam . '=' . httpUriEncode($hQuery->{$strParam}); - } - } - } - # Else query string was passed directly as a scalar - elsif (defined($hQuery)) - { - $strQuery = $hQuery; - } - - # Return from function and log return values if any - return logDebugReturn - ( - $strOperation, - {name => 'strQuery', value => $strQuery, trace => true} - ); -} - -push @EXPORT, qw(httpQuery); - -#################################################################################################################################### -# httpUriEncode - encode query values to conform with URI specs -#################################################################################################################################### -sub httpUriEncode -{ - my $strString = shift; - my $bPath = shift; - - # Only encode if source string is defined - my $strEncodedString; - - if (defined($strString)) - { - # Iterate all characters in the string - for (my $iIndex = 0; $iIndex < length($strString); $iIndex++) - { - my $cChar = substr($strString, $iIndex, 1); - - # These characters are reproduced verbatim - if (($cChar ge 'A' && $cChar le 'Z') || ($cChar ge 'a' && $cChar le 'z') || ($cChar ge '0' && $cChar le '9') || - $cChar eq '_' || $cChar eq '-' || $cChar eq '~' || $cChar eq '.' || ($bPath && $cChar eq '/')) - { - $strEncodedString .= $cChar; - } - # Forward slash is encoded - elsif ($cChar eq '/') - { - $strEncodedString .= '%2F'; - } - # All other characters are hex-encoded - else - { - $strEncodedString .= sprintf('%%%02X', ord($cChar)); - } - } - } - - return $strEncodedString; -} - -push @EXPORT, qw(httpUriEncode); - -1; diff -Nru pgbackrest-2.15.1/lib/pgBackRest/Common/Xml.pm pgbackrest-2.16/lib/pgBackRest/Common/Xml.pm --- pgbackrest-2.15.1/lib/pgBackRest/Common/Xml.pm 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/lib/pgBackRest/Common/Xml.pm 1970-01-01 00:00:00.000000000 +0000 @@ -1,163 +0,0 @@ -#################################################################################################################################### -# XML Helper Functions -#################################################################################################################################### -package pgBackRest::Common::Xml; - -use strict; -use warnings FATAL => qw(all); -use Carp qw(confess); -use English '-no_match_vars'; - -use Exporter qw(import); - our @EXPORT = qw(); -use XML::LibXML; - -use pgBackRest::Common::Exception; -use pgBackRest::Common::Log; - -#################################################################################################################################### -# xmlParse - parse a string into an xml document and return the root node -#################################################################################################################################### -use constant XML_HEADER => ''; - push @EXPORT, qw(XML_HEADER); - -#################################################################################################################################### -# Convert a string to xml so that it is suitable to be appended into xml -#################################################################################################################################### -sub xmlFromText -{ - my $strText = shift; - - return XML::LibXML::Text->new($strText)->toString(); -} - -push @EXPORT, qw(xmlFromText); - -#################################################################################################################################### -# xmlParse - parse a string into an xml document and return the root node -#################################################################################################################################### -sub xmlParse -{ - my $rstrXml = shift; - - my $oXml = XML::LibXML->load_xml(string => $rstrXml)->documentElement(); - - return $oXml; -} - -push @EXPORT, qw(xmlParse); - -#################################################################################################################################### -# xmlTagChildren - get all children that match the tag -#################################################################################################################################### -sub xmlTagChildren -{ - my $oXml = shift; - my $strTag = shift; - - return $oXml->getChildrenByTagName($strTag); -} - -push @EXPORT, qw(xmlTagChildren); - -#################################################################################################################################### -# xmlTagText - get the text content for a tag, error if the tag is required and does not exist -#################################################################################################################################### -sub xmlTagText -{ - my $oXml = shift; - my $strTag = shift; - my $bRequired = shift; - # my $strDefault = shift; - - # Get the tag or tags - my @oyTag = $oXml->getElementsByTagName($strTag); - - # Error if the tag does not exist and is required - if (@oyTag > 1) - { - confess &log(ERROR, @oyTag . " '${strTag}' tag(s) exist, but only one was expected", ERROR_FORMAT); - } - elsif (@oyTag == 0) - { - if (!defined($bRequired) || $bRequired) - { - confess &log(ERROR, "tag '${strTag}' does not exist", ERROR_FORMAT); - } - } - else - { - return $oyTag[0]->textContent(); - } - - return; -} - -push @EXPORT, qw(xmlTagText); - -#################################################################################################################################### -# xmlTagBool - get the boolean content for a tag, error if the tag is required and does not exist or is not boolean -#################################################################################################################################### -sub xmlTagBool -{ - my $oXml = shift; - my $strTag = shift; - my $bRequired = shift; - # my $strDefault = shift; - - # Test content for boolean value - my $strContent = xmlTagText($oXml, $strTag, $bRequired); - - if (defined($strContent)) - { - if ($strContent eq 'true') - { - return true; - } - elsif ($strContent eq 'false') - { - return false; - } - else - { - confess &log(ERROR, "invalid boolean value '${strContent}' for tag '${strTag}'", ERROR_FORMAT); - } - } - - return; -} - -push @EXPORT, qw(xmlTagBool); - -#################################################################################################################################### -# xmlTagInt - get the integer content for a tag, error if the tag is required and does not exist or is not an integer -#################################################################################################################################### -sub xmlTagInt -{ - my $oXml = shift; - my $strTag = shift; - my $bRequired = shift; - # my $strDefault = shift; - - # Test content for boolean value - my $iContent = xmlTagText($oXml, $strTag, $bRequired); - - if (defined($iContent)) - { - eval - { - $iContent = $iContent + 0; - return 1; - } - or do - { - confess &log(ERROR, "invalid integer value '${iContent}' for tag '${strTag}'", ERROR_FORMAT); - } - } - - return $iContent; -} - -push @EXPORT, qw(xmlTagInt); - -1; diff -Nru pgbackrest-2.15.1/lib/pgBackRest/Db.pm pgbackrest-2.16/lib/pgBackRest/Db.pm --- pgbackrest-2.15.1/lib/pgBackRest/Db.pm 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/lib/pgBackRest/Db.pm 2019-08-05 16:03:04.000000000 +0000 @@ -6,13 +6,13 @@ use strict; use warnings FATAL => qw(all); use Carp qw(confess); +use English '-no_match_vars'; -use DBD::Pg ':async'; -use DBI; use Exporter qw(import); our @EXPORT = qw(); use Fcntl qw(O_RDONLY); use File::Basename qw(dirname); +use JSON::PP; use pgBackRest::DbVersion; use pgBackRest::Common::Exception; @@ -126,10 +126,10 @@ # Assign function parameters, defaults, and log debug info my ($strOperation) = logDebugParam(__PACKAGE__ . '->DESTROY'); - if (defined($self->{hDb})) + if (defined($self->{oDb})) { - $self->{hDb}->disconnect(); - undef($self->{hDb}); + $self->{oDb}->close(); + undef($self->{oDb}); } # Return from function and log return values if any @@ -167,63 +167,46 @@ # Else run locally else { - if (!defined($self->{hDb})) + if (!defined($self->{oDb})) { - # Connect to the db - my $strDbName = 'postgres'; - my $strDbSocketPath = cfgOption(cfgOptionIdFromIndex(CFGOPT_PG_SOCKET_PATH, $self->{iRemoteIdx}), false); + $self->{oDb} = new pgBackRest::LibC::PgClient( + cfgOption(cfgOptionIdFromIndex(CFGOPT_PG_SOCKET_PATH, $self->{iRemoteIdx}), false), + cfgOption(cfgOptionIdFromIndex(CFGOPT_PG_PORT, $self->{iRemoteIdx})), 'postgres', + cfgOption(CFGOPT_DB_TIMEOUT) * 1000); - # Make sure the socket path is absolute - if (defined($strDbSocketPath) && $strDbSocketPath !~ /^\//) + if ($bWarnOnError) { - confess &log(ERROR, "'${strDbSocketPath}' is not valid for '" . cfgOptionName(CFGOPT_PG_SOCKET_PATH) . "' option:" . - " path must be absolute", ERROR_OPTION_INVALID_VALUE); - } - - # Construct the URI - my $strDbUri = - "dbi:Pg:dbname=${strDbName};port=" . cfgOption(cfgOptionIdFromIndex(CFGOPT_PG_PORT, $self->{iRemoteIdx})) . - (defined($strDbSocketPath) ? ";host=${strDbSocketPath}" : ''); - - logDebugMisc - ( - $strOperation, undef, - {name => 'strDbUri', value => $strDbUri}, - ); - - $self->{hDb} = DBI->connect($strDbUri, undef, undef, - {AutoCommit => 1, RaiseError => 0, PrintError => 0, Warn => 0}); - - # If db handle is not valid then check error - if (!$self->{hDb}) - { - # Throw an error unless a warning was requested - if (!$bWarnOnError) + eval { - confess &log(ERROR, $DBI::errstr, ERROR_DB_CONNECT); + $self->{oDb}->open(); + return true; } + or do + { + &log(WARN, exceptionMessage($EVAL_ERROR)); + $bResult = false; - # Log a warning - &log(WARN, $DBI::errstr); - - $bResult = false; - undef($self->{hDb}); + undef($self->{oDb}); + } } else { + $self->{oDb}->open(); + } + + if (defined($self->{oDb})) + { my ($fDbVersion) = $self->versionGet(); if ($fDbVersion >= PG_VERSION_APPLICATION_NAME) { # Set application name for monitoring and debugging - $self->{hDb}->do( + $self->{oDb}->query( "set application_name = '" . PROJECT_NAME . ' [' . - (cfgOptionValid(CFGOPT_COMMAND) ? cfgOption(CFGOPT_COMMAND) : cfgCommandName(cfgCommandGet())) . "]'") - or confess &log(ERROR, $self->{hDb}->errstr, ERROR_DB_QUERY); + (cfgOptionValid(CFGOPT_COMMAND) ? cfgOption(CFGOPT_COMMAND) : cfgCommandName(cfgCommandGet())) . "]'"); # Clear search path to prevent possible function overrides - $self->{hDb}->do("set search_path = 'pg_catalog'") - or confess &log(ERROR, $self->{hDb}->errstr, ERROR_DB_QUERY); + $self->{oDb}->query("set search_path = 'pg_catalog'"); } } } @@ -273,73 +256,11 @@ else { $self->connect(); + my $strResult = $self->{oDb}->query($strSql); - # Prepare the query - my $hStatement = $self->{hDb}->prepare($strSql, {pg_async => PG_ASYNC}) - or confess &log(ERROR, $DBI::errstr . ":\n${strSql}", ERROR_DB_QUERY); - - # Execute the query - $hStatement->execute() - or confess &log(ERROR, $DBI::errstr. ":\n${strSql}", ERROR_DB_QUERY); - - # Wait for the query to return - my $oWait = waitInit(cfgOption(CFGOPT_DB_TIMEOUT)); - my $bTimeout = true; - - do - { - # Is the statement done? - if ($hStatement->pg_ready()) - { - # return now if there is no result expected - if (!$bResult) - { - return \@stryResult; - } - - if (!$hStatement->pg_result()) - { - # Return if the error should be ignored - if ($bIgnoreError) - { - return \@stryResult; - } - - # Else report it - confess &log(ERROR, $DBI::errstr . ":\n${strSql}", ERROR_DB_QUERY); - } - - # Get rows and return them - my @stryRow; - - do - { - # Get next row - @stryRow = $hStatement->fetchrow_array; - - # If the row has data then add it to the result - if (@stryRow) - { - push(@{$stryResult[@stryResult]}, @stryRow); - } - # Else check for error - elsif ($hStatement->err) - { - confess &log(ERROR, $DBI::errstr . ":\n${strSql}", ERROR_DB_QUERY); - } - } - while (@stryRow); - - $bTimeout = false; - } - } while ($bTimeout && waitMore($oWait)); - - # If timeout then cancel the query and confess - if ($bTimeout) + if (defined($strResult)) { - $hStatement->pg_cancel(); - confess &log(ERROR, 'statement timed out after ' . waitInterval($oWait) . - " second(s):\n${strSql}", ERROR_DB_TIMEOUT); + @stryResult = @{JSON::PP->new()->allow_nonref()->decode($strResult)}; } } @@ -849,38 +770,6 @@ } #################################################################################################################################### -# walSwitch -# -# Forces a switch to the next transaction log in order to archive the current log. -#################################################################################################################################### -sub walSwitch -{ - my $self = shift; - - # Assign function parameters, defaults, and log debug info - my $strOperation = logDebugParam(__PACKAGE__ . '->walSwitch'); - - # Create a restore point to ensure current WAL will be archived. For versions <= 9.0 activity will need to be generated by - # the user if there have been no writes since the last WAL switch. - if ($self->{strDbVersion} >= PG_VERSION_91) - { - $self->executeSql("select pg_create_restore_point('" . PROJECT_NAME . " Archive Check');"); - } - - my $strWalFileName = $self->executeSqlOne( - 'select pg_' . $self->walId() . 'file_name from pg_' . $self->walId() . 'file_name(pg_switch_' . $self->walId() . '());'); - - &log(INFO, "switch WAL ${strWalFileName}"); - - # Return from function and log return values if any - return logDebugReturn - ( - $strOperation, - {name => 'strWalFileName', value => $strWalFileName} - ); -} - -#################################################################################################################################### # isStandby # # Determines if a database is a standby by testing if it is in recovery mode. @@ -1005,7 +894,7 @@ if ($self->{strDbVersion} >= PG_VERSION_96) { - $strCheckpointLSN = $self->executeSqlOne('select checkpoint_' . $self->lsnId() .' from pg_control_checkpoint()'); + $strCheckpointLSN = $self->executeSqlOne('select checkpoint_' . $self->lsnId() .'::text from pg_control_checkpoint()'); if (lsnNormalize($strCheckpointLSN) le lsnNormalize($strTargetLSN)) { diff -Nru pgbackrest-2.15.1/lib/pgBackRest/LibCAuto.pm pgbackrest-2.16/lib/pgBackRest/LibCAuto.pm --- pgbackrest-2.15.1/lib/pgBackRest/LibCAuto.pm 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/lib/pgBackRest/LibCAuto.pm 2019-08-05 16:03:04.000000000 +0000 @@ -74,8 +74,6 @@ checksum => [ 'pageChecksum', - 'pageChecksumBufferTest', - 'pageChecksumTest', ], config => @@ -273,6 +271,7 @@ 'CFGOPT_REPO_S3_HOST', 'CFGOPT_REPO_S3_KEY', 'CFGOPT_REPO_S3_KEY_SECRET', + 'CFGOPT_REPO_S3_PORT', 'CFGOPT_REPO_S3_REGION', 'CFGOPT_REPO_S3_TOKEN', 'CFGOPT_REPO_S3_VERIFY_TLS', @@ -352,7 +351,7 @@ storage => [ - 'storagePosixPathRemove', + 'storageRepoFree', ], test => diff -Nru pgbackrest-2.15.1/lib/pgBackRest/LibC.pm pgbackrest-2.16/lib/pgBackRest/LibC.pm --- pgbackrest-2.15.1/lib/pgBackRest/LibC.pm 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/lib/pgBackRest/LibC.pm 2019-08-05 16:03:04.000000000 +0000 @@ -16,8 +16,7 @@ foreach my $strConstant (keys(%{$rhConstant})) { - eval ## no critic (BuiltinFunctions::ProhibitStringyEval, ErrorHandling::RequireCheckingReturnValueOfEval) - "use constant ${strConstant} => '" . $rhConstant->{$strConstant} . "'"; + eval "use constant ${strConstant} => '" . $rhConstant->{$strConstant} . "'"; } # Export functions and constants @@ -48,8 +47,7 @@ if ($strPrefix eq 'CFGCMD' || $strPrefix eq 'CFGOPT') { - eval ## no critic (BuiltinFunctions::ProhibitStringyEval, ErrorHandling::RequireCheckingReturnValueOfEval) - "use constant ${strConstant} => ${iConstantIdx}"; + eval "use constant ${strConstant} => ${iConstantIdx}"; } $strPrefixLast = $strPrefix; diff -Nru pgbackrest-2.15.1/lib/pgBackRest/Main.pm pgbackrest-2.16/lib/pgBackRest/Main.pm --- pgbackrest-2.15.1/lib/pgBackRest/Main.pm 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/lib/pgBackRest/Main.pm 2019-08-05 16:03:04.000000000 +0000 @@ -107,30 +107,6 @@ cfgOption(CFGOPT_LOCK_PATH), cfgOption(CFGOPT_COMMAND), cfgOption(CFGOPT_STANZA, false), cfgOption(CFGOPT_PROCESS)); } - # Process local command - # -------------------------------------------------------------------------------------------------------------------------- - elsif (cfgCommandTest(CFGCMD_LOCAL)) - { - # Set log levels - cfgOptionSet(CFGOPT_LOG_LEVEL_STDERR, PROTOCOL, true); - logLevelSet(cfgOption(CFGOPT_LOG_LEVEL_FILE), OFF, cfgOption(CFGOPT_LOG_LEVEL_STDERR)); - - logFileSet( - storageLocal(), - cfgOption(CFGOPT_LOG_PATH) . '/' . cfgOption(CFGOPT_STANZA) . '-' . lc(cfgOption(CFGOPT_COMMAND)) . '-' . - lc(cfgCommandName(cfgCommandGet())) . '-' . sprintf("%03d", cfgOption(CFGOPT_PROCESS))); - - # Load module dynamically - require pgBackRest::Protocol::Local::Minion; - pgBackRest::Protocol::Local::Minion->import(); - - # Create the local object - my $oLocal = new pgBackRest::Protocol::Local::Minion(); - - # Process local requests - $oLocal->process(); - } - # Process check command # -------------------------------------------------------------------------------------------------------------------------- elsif (cfgCommandTest(CFGCMD_CHECK)) diff -Nru pgbackrest-2.15.1/lib/pgBackRest/Manifest.pm pgbackrest-2.16/lib/pgBackRest/Manifest.pm --- pgbackrest-2.15.1/lib/pgBackRest/Manifest.pm 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/lib/pgBackRest/Manifest.pm 2019-08-05 16:03:04.000000000 +0000 @@ -322,7 +322,8 @@ confess &log(ASSERT, 'strDbVersion and iDbCatalogVersion must be provided with bLoad = false'); } - $self->set(MANIFEST_SECTION_BACKUP_DB, MANIFEST_KEY_DB_VERSION, undef, $strDbVersion); + # Force the version to a string since newer versions of JSON::PP lose track of the fact that it is one + $self->set(MANIFEST_SECTION_BACKUP_DB, MANIFEST_KEY_DB_VERSION, undef, $strDbVersion . ''); $self->numericSet(MANIFEST_SECTION_BACKUP_DB, MANIFEST_KEY_CATALOG, undef, $iDbCatalogVersion); } diff -Nru pgbackrest-2.15.1/lib/pgBackRest/Protocol/Local/Minion.pm pgbackrest-2.16/lib/pgBackRest/Protocol/Local/Minion.pm --- pgbackrest-2.15.1/lib/pgBackRest/Protocol/Local/Minion.pm 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/lib/pgBackRest/Protocol/Local/Minion.pm 1970-01-01 00:00:00.000000000 +0000 @@ -1,70 +0,0 @@ -#################################################################################################################################### -# PROTOCOL LOCAL MINION MODULE -#################################################################################################################################### -package pgBackRest::Protocol::Local::Minion; -use parent 'pgBackRest::Protocol::Command::Minion'; - -use strict; -use warnings FATAL => qw(all); -use Carp qw(confess); - -use pgBackRest::Backup::File; -use pgBackRest::Common::Log; -use pgBackRest::Config::Config; -use pgBackRest::Storage::Local; -use pgBackRest::Protocol::Base::Master; -use pgBackRest::Protocol::Base::Minion; -use pgBackRest::Protocol::Command::Minion; -use pgBackRest::Protocol::Helper; -use pgBackRest::RestoreFile; - -#################################################################################################################################### -# CONSTRUCTOR -#################################################################################################################################### -sub new -{ - my $class = shift; # Class name - - # Assign function parameters, defaults, and log debug info - my ($strOperation) = logDebugParam(__PACKAGE__ . '->new'); - - # Init object and store variables - my $self = $class->SUPER::new(cfgCommandName(CFGCMD_LOCAL), cfgOption(CFGOPT_BUFFER_SIZE), cfgOption(CFGOPT_PROTOCOL_TIMEOUT)); - bless $self, $class; - - # Return from function and log return values if any - return logDebugReturn - ( - $strOperation, - {name => 'self', value => $self} - ); -} - -#################################################################################################################################### -# init -#################################################################################################################################### -sub init -{ - my $self = shift; - - # Assign function parameters, defaults, and log debug info - my ($strOperation) = logDebugParam(__PACKAGE__ . '->init'); - - # Create anonymous subs for each command - my $hCommandMap = - { - &OP_BACKUP_FILE => sub {backupFile(@{shift()})}, - - # To be run after each command to keep the remote alive - &OP_POST => sub {protocolKeepAlive()}, - }; - - # Return from function and log return values if any - return logDebugReturn - ( - $strOperation, - {name => 'hCommandMap', value => $hCommandMap} - ); -} - -1; diff -Nru pgbackrest-2.15.1/lib/pgBackRest/Protocol/Remote/Minion.pm pgbackrest-2.16/lib/pgBackRest/Protocol/Remote/Minion.pm --- pgbackrest-2.15.1/lib/pgBackRest/Protocol/Remote/Minion.pm 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/lib/pgBackRest/Protocol/Remote/Minion.pm 2019-08-05 16:03:04.000000000 +0000 @@ -91,11 +91,12 @@ my $oSourceFileIo = $oStorage->openRead(@{shift()}); # If the source file exists - if (defined($oSourceFileIo)) + if (defined($oSourceFileIo) && (!defined($oSourceFileIo->{oStorageCRead}) || $oSourceFileIo->open())) { $self->outputWrite(true); - $oStorage->copy($oSourceFileIo, new pgBackRest::Protocol::Storage::File($self, $oSourceFileIo)); + $oStorage->copy( + $oSourceFileIo, new pgBackRest::Protocol::Storage::File($self, $oSourceFileIo), {bSourceOpen => true}); return true; } diff -Nru pgbackrest-2.15.1/lib/pgBackRest/Protocol/Storage/Helper.pm pgbackrest-2.16/lib/pgBackRest/Protocol/Storage/Helper.pm --- pgbackrest-2.15.1/lib/pgBackRest/Protocol/Storage/Helper.pm 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/lib/pgBackRest/Protocol/Storage/Helper.pm 2019-08-05 16:03:04.000000000 +0000 @@ -13,11 +13,10 @@ use pgBackRest::Common::Log; use pgBackRest::Config::Config; +use pgBackRest::LibC qw(:storage); use pgBackRest::Protocol::Helper; use pgBackRest::Protocol::Storage::Remote; -use pgBackRest::Storage::Base; use pgBackRest::Storage::Helper; -use pgBackRest::Storage::Local; #################################################################################################################################### # Storage constants @@ -60,9 +59,8 @@ { if (isDbLocal({iRemoteIdx => $iRemoteIdx})) { - $hStorage->{&STORAGE_DB}{$iRemoteIdx} = new pgBackRest::Storage::Local( - cfgOption(cfgOptionIdFromIndex(CFGOPT_PG_PATH, $iRemoteIdx)), new pgBackRest::Storage::Posix::Driver(), - {strTempExtension => STORAGE_TEMP_EXT, lBufferMax => cfgOption(CFGOPT_BUFFER_SIZE)}); + $hStorage->{&STORAGE_DB}{$iRemoteIdx} = new pgBackRest::Storage::Storage( + STORAGE_DB, {lBufferMax => cfgOption(CFGOPT_BUFFER_SIZE)}); } else { @@ -82,54 +80,6 @@ push @EXPORT, qw(storageDb); #################################################################################################################################### -# storageRepoRule - rules for paths in the repository -#################################################################################################################################### -sub storageRepoRule -{ - my $strRule = shift; - my $strFile = shift; - my $strStanza = shift; - - # Result path and file - my $strResultFile; - - # Return archive path - if ($strRule eq STORAGE_REPO_ARCHIVE) - { - $strResultFile = "archive" . (defined($strStanza) ? "/${strStanza}" : ''); - - # If file is not defined nothing further to do - if (defined($strFile)) - { - my ($strArchiveId, $strWalFile) = split('/', $strFile); - - # If a WAL file (including .backup) - if (defined($strWalFile) && $strWalFile =~ /^[0-F]{24}/) - { - $strResultFile .= "/${strArchiveId}/" . substr($strWalFile, 0, 16) . "/${strWalFile}"; - } - # Else other files - else - { - $strResultFile .= "/${strFile}"; - } - } - } - # Return backup path - elsif ($strRule eq STORAGE_REPO_BACKUP) - { - $strResultFile = "backup" . (defined($strStanza) ? "/${strStanza}" : '') . (defined($strFile) ? "/${strFile}" : ''); - } - # Else error - else - { - confess &log(ASSERT, "invalid " . STORAGE_REPO . " storage rule ${strRule}"); - } - - return $strResultFile; -} - -#################################################################################################################################### # storageRepo - get repository storage #################################################################################################################################### sub storageRepo @@ -146,85 +96,18 @@ {name => 'strStanza', optional => true, trace => true}, ); - if (!defined($strStanza)) - { - if (cfgOptionValid(CFGOPT_STANZA) && cfgOptionTest(CFGOPT_STANZA)) - { - $strStanza = cfgOption(CFGOPT_STANZA); - } - else - { - $strStanza = STORAGE_REPO; - } - } - # Create storage if not defined - if (!defined($hStorage->{&STORAGE_REPO}{$strStanza})) + if (!defined($hStorage->{&STORAGE_REPO})) { if (isRepoLocal()) { - # Path rules - my $hRule = - { - &STORAGE_REPO_ARCHIVE => - { - fnRule => \&storageRepoRule, - xData => $strStanza eq STORAGE_REPO ? undef : $strStanza, - }, - &STORAGE_REPO_BACKUP => - { - fnRule => \&storageRepoRule, - xData => $strStanza eq STORAGE_REPO ? undef : $strStanza, - }, - }; - - # Create the driver - my $oDriver; - - if (cfgOptionTest(CFGOPT_REPO_TYPE, CFGOPTVAL_REPO_TYPE_S3)) - { - require pgBackRest::Storage::S3::Driver; - - $oDriver = new pgBackRest::Storage::S3::Driver( - cfgOption(CFGOPT_REPO_S3_BUCKET), cfgOption(CFGOPT_REPO_S3_ENDPOINT), cfgOption(CFGOPT_REPO_S3_REGION), - cfgOption(CFGOPT_REPO_S3_KEY), cfgOption(CFGOPT_REPO_S3_KEY_SECRET), - {strHost => cfgOption(CFGOPT_REPO_S3_HOST, false), bVerifySsl => cfgOption(CFGOPT_REPO_S3_VERIFY_TLS, false), - strCaPath => cfgOption(CFGOPT_REPO_S3_CA_PATH, false), - strCaFile => cfgOption(CFGOPT_REPO_S3_CA_FILE, false), lBufferMax => cfgOption(CFGOPT_BUFFER_SIZE), - strSecurityToken => cfgOption(CFGOPT_REPO_S3_TOKEN, false)}); - } - elsif (cfgOptionTest(CFGOPT_REPO_TYPE, CFGOPTVAL_REPO_TYPE_CIFS)) - { - require pgBackRest::Storage::Cifs::Driver; - - $oDriver = new pgBackRest::Storage::Cifs::Driver(); - } - else - { - $oDriver = new pgBackRest::Storage::Posix::Driver(); - } - - # Set the encryption for the repo - my $strCipherType; - my $strCipherPass; - - # If the encryption is not the default (none) then set the user-defined passphrase and magic based on the type - if (cfgOption(CFGOPT_REPO_CIPHER_TYPE) ne CFGOPTVAL_REPO_CIPHER_TYPE_NONE) - { - $strCipherType = cfgOption(CFGOPT_REPO_CIPHER_TYPE); - $strCipherPass = cfgOption(CFGOPT_REPO_CIPHER_PASS); - } - - # Create local storage - $hStorage->{&STORAGE_REPO}{$strStanza} = new pgBackRest::Storage::Local( - cfgOption(CFGOPT_REPO_PATH), $oDriver, - {strTempExtension => STORAGE_TEMP_EXT, hRule => $hRule, lBufferMax => cfgOption(CFGOPT_BUFFER_SIZE), - strCipherType => $strCipherType, strCipherPassUser => $strCipherPass}); + $hStorage->{&STORAGE_REPO} = new pgBackRest::Storage::Storage( + STORAGE_REPO, {lBufferMax => cfgOption(CFGOPT_BUFFER_SIZE)}); } else { # Create remote storage - $hStorage->{&STORAGE_REPO}{$strStanza} = new pgBackRest::Protocol::Storage::Remote( + $hStorage->{&STORAGE_REPO} = new pgBackRest::Protocol::Storage::Remote( protocolGet(CFGOPTVAL_REMOTE_TYPE_BACKUP)); } } @@ -233,7 +116,7 @@ return logDebugReturn ( $strOperation, - {name => 'oStorageRepo', value => $hStorage->{&STORAGE_REPO}{$strStanza}, trace => true}, + {name => 'oStorageRepo', value => $hStorage->{&STORAGE_REPO}, trace => true}, ); } @@ -249,6 +132,8 @@ delete($hStorage->{&STORAGE_REPO}); + storageRepoFree(); + # Return from function and log return values if any return logDebugReturn($strOperation); } diff -Nru pgbackrest-2.15.1/lib/pgBackRest/Protocol/Storage/Remote.pm pgbackrest-2.16/lib/pgBackRest/Protocol/Storage/Remote.pm --- pgbackrest-2.15.1/lib/pgBackRest/Protocol/Storage/Remote.pm 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/lib/pgBackRest/Protocol/Storage/Remote.pm 2019-08-05 16:03:04.000000000 +0000 @@ -15,7 +15,6 @@ use pgBackRest::Protocol::Helper; use pgBackRest::Protocol::Storage::File; use pgBackRest::Storage::Base; -use pgBackRest::Storage::Filter::Gzip; #################################################################################################################################### # new @@ -195,29 +194,10 @@ {name => 'rhParam', required => false}, ); - # Determine whether protocol compress will be used - my $bProtocolCompress = protocolCompress($rhParam); - - # Compress on the remote side - if ($bProtocolCompress) - { - push( - @{$rhParam->{rhyFilter}}, - {strClass => STORAGE_FILTER_GZIP, - rxyParam => [{iLevel => cfgOption(CFGOPT_COMPRESS_LEVEL_NETWORK), bWantGzip => false}]}); - } - my $oSourceFileIo = $self->{oProtocol}->cmdExecute(OP_STORAGE_OPEN_READ, [$strFileExp, $rhParam]) ? new pgBackRest::Protocol::Storage::File($self->{oProtocol}) : undef; - # Decompress on the local side - if ($bProtocolCompress) - { - $oSourceFileIo = new pgBackRest::Storage::Filter::Gzip( - $oSourceFileIo, {strCompressType => STORAGE_DECOMPRESS, bWantGzip => false}); - } - # Return from function and log return values if any return logDebugReturn ( @@ -247,28 +227,10 @@ {name => 'rhParam', required => false}, ); - # Determine whether protocol compress will be used - my $bProtocolCompress = protocolCompress($rhParam); - - # Decompress on the remote side - if ($bProtocolCompress) - { - push( - @{$rhParam->{rhyFilter}}, - {strClass => STORAGE_FILTER_GZIP, rxyParam => [{strCompressType => STORAGE_DECOMPRESS, bWantGzip => false}]}); - } - # Open the remote file $self->{oProtocol}->cmdWrite(OP_STORAGE_OPEN_WRITE, [$strFileExp, $rhParam]); my $oDestinationFileIo = new pgBackRest::Protocol::Storage::File($self->{oProtocol}); - # Compress on local side - if ($bProtocolCompress) - { - $oDestinationFileIo = new pgBackRest::Storage::Filter::Gzip( - $oDestinationFileIo, {iLevel => cfgOption(CFGOPT_COMPRESS_LEVEL_NETWORK), bWantGzip => false}); - } - # Return from function and log return values if any return logDebugReturn ( @@ -368,24 +330,6 @@ } #################################################################################################################################### -# Used internally to determine if protocol compression should be enabled -#################################################################################################################################### -sub protocolCompress -{ - my $rhParam = shift; - - my $bProtocolCompress = false; - - if (defined($rhParam->{bProtocolCompress})) - { - $bProtocolCompress = $rhParam->{bProtocolCompress} && cfgOption(CFGOPT_COMPRESS_LEVEL_NETWORK) > 0 ? true : false; - delete($rhParam->{bProtocolCompress}); - } - - return $bProtocolCompress; -} - -#################################################################################################################################### # getters #################################################################################################################################### sub protocol {shift->{oProtocol}}; diff -Nru pgbackrest-2.15.1/lib/pgBackRest/RestoreFile.pm pgbackrest-2.16/lib/pgBackRest/RestoreFile.pm --- pgbackrest-2.15.1/lib/pgBackRest/RestoreFile.pm 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/lib/pgBackRest/RestoreFile.pm 2019-08-05 16:03:04.000000000 +0000 @@ -20,9 +20,6 @@ use pgBackRest::Config::Config; use pgBackRest::Manifest; use pgBackRest::Protocol::Storage::Helper; -use pgBackRest::Storage::Base; -use pgBackRest::Storage::Filter::Gzip; -use pgBackRest::Storage::Filter::Sha; use pgBackRest::Storage::Helper; #################################################################################################################################### diff -Nru pgbackrest-2.15.1/lib/pgBackRest/Restore.pm pgbackrest-2.16/lib/pgBackRest/Restore.pm --- pgbackrest-2.15.1/lib/pgBackRest/Restore.pm 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/lib/pgBackRest/Restore.pm 2019-08-05 16:03:04.000000000 +0000 @@ -227,8 +227,7 @@ # Copy the backup manifest to the db cluster path storageDb()->copy( - storageRepo()->openRead(STORAGE_REPO_BACKUP . "/$self->{strBackupSet}/" . FILE_MANIFEST, {bProtocolCompress => true, - strCipherPass => $strCipherPass}), + storageRepo()->openRead(STORAGE_REPO_BACKUP . "/$self->{strBackupSet}/" . FILE_MANIFEST, {strCipherPass => $strCipherPass}), $self->{strDbClusterPath} . '/' . FILE_MANIFEST); # Load the manifest into a hash @@ -1089,7 +1088,7 @@ # Copy backup info, load it, then delete $oStorageDb->copy( - storageRepo()->openRead(STORAGE_REPO_BACKUP . qw(/) . FILE_BACKUP_INFO, {bProtocolCompress => true}), + storageRepo()->openRead(STORAGE_REPO_BACKUP . qw(/) . FILE_BACKUP_INFO), $self->{strDbClusterPath} . '/' . FILE_BACKUP_INFO); my $oBackupInfo = new pgBackRest::Backup::Info($self->{strDbClusterPath}, false, undef, {oStorage => storageDb()}); diff -Nru pgbackrest-2.15.1/lib/pgBackRest/Stanza.pm pgbackrest-2.16/lib/pgBackRest/Stanza.pm --- pgbackrest-2.15.1/lib/pgBackRest/Stanza.pm 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/lib/pgBackRest/Stanza.pm 2019-08-05 16:03:04.000000000 +0000 @@ -294,7 +294,7 @@ my ($strOperation) = logDebugParam(__PACKAGE__ . '->stanzaDelete'); my $strStanza = cfgOption(CFGOPT_STANZA); - my $oStorageRepo = storageRepo({strStanza => $strStanza}); + my $oStorageRepo = storageRepo(); # If at least an archive or backup directory exists for the stanza, then continue, else nothing to do if ($oStorageRepo->pathExists(STORAGE_REPO_ARCHIVE) || $oStorageRepo->pathExists(STORAGE_REPO_BACKUP)) @@ -342,8 +342,8 @@ } # Recursively remove the stanza archive and backup directories - $oStorageRepo->remove(STORAGE_REPO_ARCHIVE, {bRecurse => true, bIgnoreMissing => true}); - $oStorageRepo->remove(STORAGE_REPO_BACKUP, {bRecurse => true, bIgnoreMissing => true}); + $oStorageRepo->pathRemove(STORAGE_REPO_ARCHIVE, {bRecurse => true, bIgnoreMissing => true}); + $oStorageRepo->pathRemove(STORAGE_REPO_BACKUP, {bRecurse => true, bIgnoreMissing => true}); # Remove the stop file so processes can run. lockStart(); diff -Nru pgbackrest-2.15.1/lib/pgBackRest/Storage/Base.pm pgbackrest-2.16/lib/pgBackRest/Storage/Base.pm --- pgbackrest-2.15.1/lib/pgBackRest/Storage/Base.pm 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/lib/pgBackRest/Storage/Base.pm 2019-08-05 16:03:04.000000000 +0000 @@ -17,6 +17,17 @@ use pgBackRest::Common::Log; #################################################################################################################################### +# Storage constants +#################################################################################################################################### +use constant STORAGE_LOCAL => ''; + push @EXPORT, qw(STORAGE_LOCAL); + +use constant STORAGE_S3 => 's3'; + push @EXPORT, qw(STORAGE_S3); +use constant STORAGE_POSIX => 'posix'; + push @EXPORT, qw(STORAGE_POSIX); + +#################################################################################################################################### # Compress constants #################################################################################################################################### use constant STORAGE_COMPRESS => 'compress'; @@ -35,6 +46,16 @@ push @EXPORT, qw(CIPHER_MAGIC); #################################################################################################################################### +# Filter constants +#################################################################################################################################### +use constant STORAGE_FILTER_CIPHER_BLOCK => 'pgBackRest::Storage::Filter::CipherBlock'; + push @EXPORT, qw(STORAGE_FILTER_CIPHER_BLOCK); +use constant STORAGE_FILTER_GZIP => 'pgBackRest::Storage::Filter::Gzip'; + push @EXPORT, qw(STORAGE_FILTER_GZIP); +use constant STORAGE_FILTER_SHA => 'pgBackRest::Storage::Filter::Sha'; + push @EXPORT, qw(STORAGE_FILTER_SHA); + +#################################################################################################################################### # Capability constants #################################################################################################################################### # Can the size in the storage be different than what was written? For example, a ZFS filesystem could be doing compression of a @@ -78,9 +99,10 @@ ); } + #################################################################################################################################### -# copy - copy a file. If special encryption settings are required, then the file objects from openRead/openWrite must be passed -# instead of file names. +# Copy a file. If special encryption settings are required, then the file objects from openRead/openWrite must be passed instead of +# file names. #################################################################################################################################### sub copy { @@ -92,47 +114,62 @@ $strOperation, $xSourceFile, $xDestinationFile, + $bSourceOpen, ) = logDebugParam ( __PACKAGE__ . '->copy', \@_, {name => 'xSourceFile', required => false}, - {name => 'xDestinationFile', required => false}, + {name => 'xDestinationFile'}, + {name => 'bSourceOpen', optional => true, default => false}, ); - # Was the file copied? - my $bResult = false; + # Is source/destination an IO object or a file expression? + my $oSourceFileIo = defined($xSourceFile) ? (ref($xSourceFile) ? $xSourceFile : $self->openRead($xSourceFile)) : undef; - # Is source an IO object or a file expression? - my $oSourceFileIo = - defined($xSourceFile) ? - (ref($xSourceFile) ? $xSourceFile : $self->openRead($self->pathGet($xSourceFile))) : undef; + # Does the source file exist? + my $bResult = false; - # Proceed if source file exists + # Copy if the source file exists if (defined($oSourceFileIo)) { - # Is destination an IO object or a file expression? - my $oDestinationFileIo = ref($xDestinationFile) ? $xDestinationFile : $self->openWrite($self->pathGet($xDestinationFile)); + my $oDestinationFileIo = ref($xDestinationFile) ? $xDestinationFile : $self->openWrite($xDestinationFile); - # Copy the data - my $lSizeRead; - - do + # Use C copy if source and destination are C objects + if (defined($oSourceFileIo->{oStorageCRead}) && defined($oDestinationFileIo->{oStorageCWrite})) { - # Read data - my $tBuffer = ''; - - $lSizeRead = $oSourceFileIo->read(\$tBuffer, $self->{lBufferMax}); - $oDestinationFileIo->write(\$tBuffer); + $bResult = $self->{oStorageC}->copy( + $oSourceFileIo->{oStorageCRead}, $oDestinationFileIo->{oStorageCWrite}) ? true : false; } - while ($lSizeRead != 0); - - # Close files - $oSourceFileIo->close(); - $oDestinationFileIo->close(); + else + { + # Open the source file if it is a C object + $bResult = defined($oSourceFileIo->{oStorageCRead}) ? ($bSourceOpen || $oSourceFileIo->open()) : true; - # File was copied - $bResult = true; + if ($bResult) + { + # Open the destination file if it is a C object + if (defined($oDestinationFileIo->{oStorageCWrite})) + { + $oDestinationFileIo->open(); + } + + # Copy the data + do + { + # Read data + my $tBuffer = ''; + + $oSourceFileIo->read(\$tBuffer, $self->{lBufferMax}); + $oDestinationFileIo->write(\$tBuffer); + } + while (!$oSourceFileIo->eof()); + + # Close files + $oSourceFileIo->close(); + $oDestinationFileIo->close(); + } + } } return logDebugReturn diff -Nru pgbackrest-2.15.1/lib/pgBackRest/Storage/Cifs/Driver.pm pgbackrest-2.16/lib/pgBackRest/Storage/Cifs/Driver.pm --- pgbackrest-2.15.1/lib/pgBackRest/Storage/Cifs/Driver.pm 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/lib/pgBackRest/Storage/Cifs/Driver.pm 1970-01-01 00:00:00.000000000 +0000 @@ -1,55 +0,0 @@ -#################################################################################################################################### -# CIFS Storage -# -# Implements storage functions for Posix-compliant file systems. -#################################################################################################################################### -package pgBackRest::Storage::Cifs::Driver; -use parent 'pgBackRest::Storage::Posix::Driver'; - -use strict; -use warnings FATAL => qw(all); -use Carp qw(confess); -use English '-no_match_vars'; - -use Exporter qw(import); - our @EXPORT = qw(); - -use pgBackRest::Common::Log; -use pgBackRest::Storage::Base; - -#################################################################################################################################### -# Package name constant -#################################################################################################################################### -use constant STORAGE_CIFS_DRIVER => __PACKAGE__; - push @EXPORT, qw(STORAGE_CIFS_DRIVER); - -#################################################################################################################################### -# pathSync - CIFS does not support path sync so this is a noop -#################################################################################################################################### -sub pathSync -{ - my $self = shift; - - # Assign function parameters, defaults, and log debug info - my - ( - $strOperation, - $strPath, - ) = - logDebugParam - ( - __PACKAGE__ . '->pathSync', \@_, - {name => 'strPath', trace => true}, - ); - - # Return from function and log return values if any - return logDebugReturn($strOperation); -} - -#################################################################################################################################### -# Getters/Setters -#################################################################################################################################### -sub capability {shift eq STORAGE_CAPABILITY_SIZE_DIFF ? true : false} -sub className {STORAGE_CIFS_DRIVER} - -1; diff -Nru pgbackrest-2.15.1/lib/pgBackRest/Storage/Filter/CipherBlock.pm pgbackrest-2.16/lib/pgBackRest/Storage/Filter/CipherBlock.pm --- pgbackrest-2.15.1/lib/pgBackRest/Storage/Filter/CipherBlock.pm 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/lib/pgBackRest/Storage/Filter/CipherBlock.pm 1970-01-01 00:00:00.000000000 +0000 @@ -1,177 +0,0 @@ -#################################################################################################################################### -# Block Cipher Filter -#################################################################################################################################### -package pgBackRest::Storage::Filter::CipherBlock; -use parent 'pgBackRest::Common::Io::Filter'; - -use strict; -use warnings FATAL => qw(all); -use Carp qw(confess); -use English '-no_match_vars'; - -use Exporter qw(import); - our @EXPORT = qw(); - -use pgBackRest::Common::Exception; -use pgBackRest::Common::Io::Base; -use pgBackRest::Common::Log; -use pgBackRest::LibC qw(:crypto); -use pgBackRest::Storage::Base; - -#################################################################################################################################### -# Package name constant -#################################################################################################################################### -use constant STORAGE_FILTER_CIPHER_BLOCK => __PACKAGE__; - push @EXPORT, qw(STORAGE_FILTER_CIPHER_BLOCK); - -#################################################################################################################################### -# CONSTRUCTOR -#################################################################################################################################### -sub new -{ - my $class = shift; - - # Assign function parameters, defaults, and log debug info - my - ( - $strOperation, - $oParent, - $strCipherType, - $tCipherPass, - $strMode, - ) = - logDebugParam - ( - __PACKAGE__ . '->new', \@_, - {name => 'oParent', trace => true}, - {name => 'strCipherType', trace => true}, - {name => 'tCipherPass', trace => true}, - {name => 'strMode', optional => true, default => STORAGE_ENCRYPT, trace => true}, - ); - - # Bless with new class - my $self = $class->SUPER::new($oParent); - bless $self, $class; - - # Check mode is valid - $self->{strMode} = $strMode; - - if (!($self->{strMode} eq STORAGE_ENCRYPT || $self->{strMode} eq STORAGE_DECRYPT)) - { - confess &log(ASSERT, "unknown cipher mode: $self->{strMode}"); - } - - # Set read/write - $self->{bWrite} = false; - - # Create cipher object - $self->{oCipher} = new pgBackRest::LibC::Cipher::Block( - $self->{strMode} eq STORAGE_ENCRYPT ? CIPHER_MODE_ENCRYPT : CIPHER_MODE_DECRYPT, $strCipherType, $tCipherPass, - length($tCipherPass)); - - # Return from function and log return values if any - return logDebugReturn - ( - $strOperation, - {name => 'self', value => $self} - ); -} - -#################################################################################################################################### -# read - encrypt/decrypt data -#################################################################################################################################### -sub read -{ - my $self = shift; - my $rtBuffer = shift; - my $iSize = shift; - - # Return 0 if all data has been read - return 0 if $self->eof(); - - # Loop until required bytes have been read - my $tBufferRead = ''; - my $iBufferReadSize = 0; - - do - { - # Read data - my $tCipherBuffer; - my $iActualSize = $self->SUPER::read(\$tCipherBuffer, $iSize); - - # If something was read, then process it - if ($iActualSize > 0) - { - $tBufferRead .= $self->{oCipher}->process($tCipherBuffer); - } - - # If eof then flush the remaining data - if ($self->eof()) - { - $tBufferRead .= $self->{oCipher}->flush(); - } - - # Get the current size of the read buffer - $iBufferReadSize = length($tBufferRead); - } - while ($iBufferReadSize < $iSize && !$self->eof()); - - # Append to the read buffer - $$rtBuffer .= $tBufferRead; - - # Return the actual size read - return $iBufferReadSize; -} - -#################################################################################################################################### -# write - encrypt/decrypt data -#################################################################################################################################### -sub write -{ - my $self = shift; - my $rtBuffer = shift; - - # Set write flag so close will flush buffer - $self->{bWrite} = true; - - # Write the buffer if defined - my $tCipherBuffer; - - if (defined($$rtBuffer)) - { - $tCipherBuffer = $self->{oCipher}->process($$rtBuffer); - } - - # Call the io method. If $rtBuffer is undefined, then this is expected to error. - $self->SUPER::write(\$tCipherBuffer); - - return length($$rtBuffer); -} - -#################################################################################################################################### -# close - close the file -#################################################################################################################################### -sub close -{ - my $self = shift; - - # Only close the object if not already closed - if ($self->{oCipher}) - { - # Flush the write buffer - if ($self->{bWrite}) - { - my $tCipherBuffer = $self->{oCipher}->flush(); - $self->SUPER::write(\$tCipherBuffer); - } - - undef($self->{oCipher}); - - # Close io - return $self->SUPER::close(); - } - - return false; -} - -1; diff -Nru pgbackrest-2.15.1/lib/pgBackRest/Storage/Filter/Gzip.pm pgbackrest-2.16/lib/pgBackRest/Storage/Filter/Gzip.pm --- pgbackrest-2.15.1/lib/pgBackRest/Storage/Filter/Gzip.pm 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/lib/pgBackRest/Storage/Filter/Gzip.pm 1970-01-01 00:00:00.000000000 +0000 @@ -1,264 +0,0 @@ -#################################################################################################################################### -# GZIP Filter -#################################################################################################################################### -package pgBackRest::Storage::Filter::Gzip; -use parent 'pgBackRest::Common::Io::Filter'; - -use strict; -use warnings FATAL => qw(all); -use Carp qw(confess); -use English '-no_match_vars'; - -use Compress::Raw::Zlib qw(WANT_GZIP MAX_WBITS Z_OK Z_BUF_ERROR Z_DATA_ERROR Z_STREAM_END); -use Exporter qw(import); - our @EXPORT = qw(); - -use pgBackRest::Common::Exception; -use pgBackRest::Common::Io::Base; -use pgBackRest::Common::Log; -use pgBackRest::Storage::Base; - -#################################################################################################################################### -# Package name constant -#################################################################################################################################### -use constant STORAGE_FILTER_GZIP => __PACKAGE__; - push @EXPORT, qw(STORAGE_FILTER_GZIP); - -#################################################################################################################################### -# CONSTRUCTOR -#################################################################################################################################### -sub new -{ - my $class = shift; - - # Assign function parameters, defaults, and log debug info - my - ( - $strOperation, - $oParent, - $bWantGzip, - $strCompressType, - $iLevel, - $lCompressBufferMax, - ) = - logDebugParam - ( - __PACKAGE__ . '->new', \@_, - {name => 'oParent', trace => true}, - {name => 'bWantGzip', optional => true, default => true, trace => true}, - {name => 'strCompressType', optional => true, default => STORAGE_COMPRESS, trace => true}, - {name => 'iLevel', optional => true, default => 6, trace => true}, - {name => 'lCompressBufferMax', optional => true, default => COMMON_IO_BUFFER_MAX, trace => true}, - ); - - # Bless with new class - my $self = $class->SUPER::new($oParent); - bless $self, $class; - - # Set variables - $self->{bWantGzip} = $bWantGzip; - $self->{iLevel} = $iLevel; - $self->{lCompressBufferMax} = $lCompressBufferMax; - $self->{strCompressType} = $strCompressType; - - # Set read/write - $self->{bWrite} = false; - - # Create the zlib object - my $iZLibStatus; - - if ($self->{strCompressType} eq STORAGE_COMPRESS) - { - ($self->{oZLib}, $iZLibStatus) = new Compress::Raw::Zlib::Deflate( - WindowBits => $self->{bWantGzip} ? WANT_GZIP : MAX_WBITS, Level => $self->{iLevel}, - Bufsize => $self->{lCompressBufferMax}, AppendOutput => 1); - - $self->{tCompressedBuffer} = undef; - } - else - { - ($self->{oZLib}, $iZLibStatus) = new Compress::Raw::Zlib::Inflate( - WindowBits => $self->{bWantGzip} ? WANT_GZIP : MAX_WBITS, Bufsize => $self->{lCompressBufferMax}, - LimitOutput => 1, AppendOutput => 1); - - $self->{tUncompressedBuffer} = undef; - $self->{lUncompressedBufferSize} = 0; - } - - $self->errorCheck($iZLibStatus); - - # Return from function and log return values if any - return logDebugReturn - ( - $strOperation, - {name => 'self', value => $self} - ); -} - -#################################################################################################################################### -# errorCheck - check status code for errors -#################################################################################################################################### -sub errorCheck -{ - my $self = shift; - my $iZLibStatus = shift; - - if (!($iZLibStatus == Z_OK || $iZLibStatus == Z_BUF_ERROR)) - { - logErrorResult( - $self->{bWrite} ? ERROR_FILE_WRITE : ERROR_FILE_READ, - 'unable to ' . ($self->{strCompressType} eq STORAGE_COMPRESS ? 'deflate' : 'inflate') . " '" . - $self->parent()->name() . "'", - $self->{oZLib}->msg()); - } - - return Z_OK; -} - -#################################################################################################################################### -# read - compress/decompress data -#################################################################################################################################### -sub read -{ - my $self = shift; - my $rtBuffer = shift; - my $iSize = shift; - - if ($self->{strCompressType} eq STORAGE_COMPRESS) - { - return 0 if $self->eof(); - - my $lSizeBegin = defined($$rtBuffer) ? length($$rtBuffer) : 0; - my $lUncompressedSize; - my $lCompressedSize; - - do - { - my $tUncompressedBuffer; - $lUncompressedSize = $self->parent()->read(\$tUncompressedBuffer, $iSize); - - if ($lUncompressedSize > 0) - { - $self->errorCheck($self->{oZLib}->deflate($tUncompressedBuffer, $$rtBuffer)); - } - else - { - $self->errorCheck($self->{oZLib}->flush($$rtBuffer)); - } - - $lCompressedSize = length($$rtBuffer) - $lSizeBegin; - } - while ($lUncompressedSize > 0 && $lCompressedSize < $iSize); - - # Return the actual size read - return $lCompressedSize; - } - else - { - # If the local buffer size is not large enough to satisfy the request and there is still data to decompress - while ($self->{lUncompressedBufferSize} < $iSize && !$self->parent()->eof()) - { - if (!defined($self->{tCompressedBuffer}) || length($self->{tCompressedBuffer}) == 0) - { - $self->parent()->read(\$self->{tCompressedBuffer}, $self->{lCompressBufferMax}); - } - - my $iZLibStatus = $self->{oZLib}->inflate($self->{tCompressedBuffer}, $self->{tUncompressedBuffer}); - $self->{lUncompressedBufferSize} = length($self->{tUncompressedBuffer}); - - last if $iZLibStatus == Z_STREAM_END; - - $self->errorCheck($iZLibStatus); - } - - # Actual size is the lesser of the local buffer size or requested size - if the local buffer is smaller than the requested - # size it means that there was nothing more to be read - my $iActualSize = $self->{lUncompressedBufferSize} < $iSize ? $self->{lUncompressedBufferSize} : $iSize; - - # Append to the request buffer - $$rtBuffer .= substr($self->{tUncompressedBuffer}, 0, $iActualSize); - - # Truncate local buffer - $self->{tUncompressedBuffer} = substr($self->{tUncompressedBuffer}, $iActualSize); - $self->{lUncompressedBufferSize} -= $iActualSize; - - # Return the actual size read - return $iActualSize; - } -} - -#################################################################################################################################### -# write - compress/decompress data -#################################################################################################################################### -sub write -{ - my $self = shift; - my $rtBuffer = shift; - - $self->{bWrite} = true; - - if ($self->{strCompressType} eq STORAGE_COMPRESS) - { - # Compress the data - $self->errorCheck($self->{oZLib}->deflate($$rtBuffer, $self->{tCompressedBuffer})); - - # Only write when buffer is full - if (length($self->{tCompressedBuffer}) > $self->{lCompressBufferMax}) - { - $self->parent()->write(\$self->{tCompressedBuffer}); - $self->{tCompressedBuffer} = undef; - } - } - else - { - my $tCompressedBuffer = $$rtBuffer; - - while (length($tCompressedBuffer) > 0) - { - my $tUncompressedBuffer; - - my $iZLibStatus = $self->{oZLib}->inflate($tCompressedBuffer, $tUncompressedBuffer); - $self->parent()->write(\$tUncompressedBuffer); - - last if $iZLibStatus == Z_STREAM_END; - - $self->errorCheck($iZLibStatus); - } - } - - # Return bytes written - return length($$rtBuffer); -} - -#################################################################################################################################### -# close - close the file -#################################################################################################################################### -sub close -{ - my $self = shift; - - if (defined($self->{oZLib})) - { - # Flush the write buffer - if ($self->{bWrite}) - { - if ($self->{strCompressType} eq STORAGE_COMPRESS) - { - # Flush out last compressed bytes - $self->errorCheck($self->{oZLib}->flush($self->{tCompressedBuffer})); - - # Write last compressed bytes - $self->parent()->write(\$self->{tCompressedBuffer}); - } - } - - undef($self->{oZLib}); - - # Close io - return $self->parent()->close(); - } - - return false; -} - -1; diff -Nru pgbackrest-2.15.1/lib/pgBackRest/Storage/Filter/Sha.pm pgbackrest-2.16/lib/pgBackRest/Storage/Filter/Sha.pm --- pgbackrest-2.15.1/lib/pgBackRest/Storage/Filter/Sha.pm 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/lib/pgBackRest/Storage/Filter/Sha.pm 1970-01-01 00:00:00.000000000 +0000 @@ -1,124 +0,0 @@ -#################################################################################################################################### -# SHA Filter -#################################################################################################################################### -package pgBackRest::Storage::Filter::Sha; -use parent 'pgBackRest::Common::Io::Filter'; - -use strict; -use warnings FATAL => qw(all); -use Carp qw(confess); -use English '-no_match_vars'; - -use Exporter qw(import); - our @EXPORT = qw(); - -use pgBackRest::Common::Exception; -use pgBackRest::Common::Log; - -#################################################################################################################################### -# Package name constant -#################################################################################################################################### -use constant STORAGE_FILTER_SHA => __PACKAGE__; - push @EXPORT, qw(STORAGE_FILTER_SHA); - -#################################################################################################################################### -# CONSTRUCTOR -#################################################################################################################################### -sub new -{ - my $class = shift; - - # Assign function parameters, defaults, and log debug info - my - ( - $strOperation, - $oParent, - $strAlgorithm, - ) = - logDebugParam - ( - __PACKAGE__ . '->new', \@_, - {name => 'oParent', trace => true}, - {name => 'strAlgorithm', optional => true, default => 'sha1', trace => true}, - ); - - # Bless with new class - my $self = $class->SUPER::new($oParent); - bless $self, $class; - - # Set variables - $self->{strAlgorithm} = $strAlgorithm; - - # Create SHA object - $self->{oSha} = new pgBackRest::LibC::Crypto::Hash($self->{strAlgorithm}); - - # Return from function and log return values if any - return logDebugReturn - ( - $strOperation, - {name => 'self', value => $self} - ); -} - -#################################################################################################################################### -# read - calculate sha digest -#################################################################################################################################### -sub read -{ - my $self = shift; - my $rtBuffer = shift; - my $iSize = shift; - - # Call the io method - my $tShaBuffer; - my $iActualSize = $self->parent()->read(\$tShaBuffer, $iSize); - - # Calculate sha for the returned buffer - if ($iActualSize > 0) - { - $self->{oSha}->process($tShaBuffer); - $$rtBuffer .= $tShaBuffer; - } - - # Return the actual size read - return $iActualSize; -} - -#################################################################################################################################### -# write - calculate sha digest -#################################################################################################################################### -sub write -{ - my $self = shift; - my $rtBuffer = shift; - - # Calculate sha for the buffer - $self->{oSha}->process($$rtBuffer); - - # Call the io method - return $self->parent()->write($rtBuffer); -} - -#################################################################################################################################### -# close - close the file -#################################################################################################################################### -sub close -{ - my $self = shift; - - if (defined($self->{oSha})) - { - # Set result - $self->resultSet(STORAGE_FILTER_SHA, $self->{oSha}->result()); - - # Delete the sha object - delete($self->{oSha}); - - # Close io - return $self->parent->close(); - } - - return false; -} - -1; diff -Nru pgbackrest-2.15.1/lib/pgBackRest/Storage/Helper.pm pgbackrest-2.16/lib/pgBackRest/Storage/Helper.pm --- pgbackrest-2.15.1/lib/pgBackRest/Storage/Helper.pm 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/lib/pgBackRest/Storage/Helper.pm 2019-08-05 16:03:04.000000000 +0000 @@ -13,17 +13,11 @@ use pgBackRest::Common::Log; use pgBackRest::Config::Config; -use pgBackRest::Storage::Posix::Driver; -use pgBackRest::Storage::Local; +use pgBackRest::Storage::Base; +use pgBackRest::Storage::Storage; use pgBackRest::Version; #################################################################################################################################### -# Storage constants -#################################################################################################################################### -use constant STORAGE_LOCAL => ''; - push @EXPORT, qw(STORAGE_LOCAL); - -#################################################################################################################################### # Compression extension #################################################################################################################################### use constant COMPRESS_EXT => 'gz'; @@ -36,11 +30,6 @@ push @EXPORT, qw(STORAGE_TEMP_EXT); #################################################################################################################################### -# Cache storage so it can be retrieved quickly -#################################################################################################################################### -my $hStorage; - -#################################################################################################################################### # storageLocal - get local storage # # Local storage is generally read-only (except for locking) and can never reference a remote path. Used for adhoc activities like @@ -49,32 +38,13 @@ sub storageLocal { # Assign function parameters, defaults, and log debug info - my - ( - $strOperation, - $strPath, - ) = - logDebugParam - ( - __PACKAGE__ . '::storageLocal', \@_, - {name => 'strPath', default => '/', trace => true}, - ); - - # Create storage if not defined - if (!defined($hStorage->{&STORAGE_LOCAL}{$strPath})) - { - # Create local storage - $hStorage->{&STORAGE_LOCAL}{$strPath} = new pgBackRest::Storage::Local( - $strPath, new pgBackRest::Storage::Posix::Driver(), - {strTempExtension => STORAGE_TEMP_EXT, - lBufferMax => cfgOptionValid(CFGOPT_BUFFER_SIZE, false) ? cfgOption(CFGOPT_BUFFER_SIZE, false) : undef}); - } + my ($strOperation) = logDebugParam(__PACKAGE__ . '::storageLocal'); # Return from function and log return values if any return logDebugReturn ( $strOperation, - {name => 'oStorageLocal', value => $hStorage->{&STORAGE_LOCAL}{$strPath}, trace => true}, + {name => 'oStorageLocal', value => new pgBackRest::Storage::Storage(STORAGE_LOCAL), trace => true}, ); } diff -Nru pgbackrest-2.15.1/lib/pgBackRest/Storage/Local.pm pgbackrest-2.16/lib/pgBackRest/Storage/Local.pm --- pgbackrest-2.15.1/lib/pgBackRest/Storage/Local.pm 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/lib/pgBackRest/Storage/Local.pm 1970-01-01 00:00:00.000000000 +0000 @@ -1,924 +0,0 @@ -#################################################################################################################################### -# Local Storage -# -# Implements storage functionality using drivers. -#################################################################################################################################### -package pgBackRest::Storage::Local; -use parent 'pgBackRest::Storage::Base'; - -use strict; -use warnings FATAL => qw(all); -use Carp qw(confess); -use English '-no_match_vars'; - -use File::Basename qw(dirname); - -use pgBackRest::Common::Exception; -use pgBackRest::Common::Log; -use pgBackRest::Common::String; -use pgBackRest::Storage::Base; -use pgBackRest::Storage::Filter::Sha; - -#################################################################################################################################### -# new -#################################################################################################################################### -sub new -{ - my $class = shift; - - # Assign function parameters, defaults, and log debug info - my - ( - $strOperation, - $strPathBase, - $oDriver, - $hRule, - $bAllowTemp, - $strTempExtension, - $strDefaultPathMode, - $strDefaultFileMode, - $lBufferMax, - $strCipherType, - $strCipherPassUser, - ) = - logDebugParam - ( - __PACKAGE__ . '->new', \@_, - {name => 'strPathBase'}, - {name => 'oDriver'}, - {name => 'hRule', optional => true}, - {name => 'bAllowTemp', optional => true, default => true}, - {name => 'strTempExtension', optional => true, default => 'tmp'}, - {name => 'strDefaultPathMode', optional => true, default => '0750'}, - {name => 'strDefaultFileMode', optional => true, default => '0640'}, - {name => 'lBufferMax', optional => true}, - {name => 'strCipherType', optional => true}, - {name => 'strCipherPassUser', optional => true, redact => true}, - ); - - # Create class - my $self = $class->SUPER::new({lBufferMax => $lBufferMax}); - bless $self, $class; - - $self->{strPathBase} = $strPathBase; - $self->{oDriver} = $oDriver; - $self->{hRule} = $hRule; - $self->{bAllowTemp} = $bAllowTemp; - $self->{strTempExtension} = $strTempExtension; - $self->{strDefaultPathMode} = $strDefaultPathMode; - $self->{strDefaultFileMode} = $strDefaultFileMode; - $self->{strCipherType} = $strCipherType; - $self->{strCipherPassUser} = $strCipherPassUser; - - if (defined($self->{strCipherType})) - { - require pgBackRest::Storage::Filter::CipherBlock; - pgBackRest::Storage::Filter::CipherBlock->import(); - } - - # Set temp extension in driver - $self->driver()->tempExtensionSet($self->{strTempExtension}) if $self->driver()->can('tempExtensionSet'); - - # Return from function and log return values if any - return logDebugReturn - ( - $strOperation, - {name => 'self', value => $self} - ); -} - -#################################################################################################################################### -# exists - check if file exists -#################################################################################################################################### -sub exists -{ - my $self = shift; - - # Assign function parameters, defaults, and log debug info - my - ( - $strOperation, - $strFileExp, - ) = - logDebugParam - ( - __PACKAGE__ . '->exists', \@_, - {name => 'strFileExp'}, - ); - - # Check exists - my $bExists = $self->driver()->exists($self->pathGet($strFileExp)); - - # Return from function and log return values if any - return logDebugReturn - ( - $strOperation, - {name => 'bExists', value => $bExists} - ); -} - -#################################################################################################################################### -# hashSize - calculate sha1 hash and size of file. If special encryption settings are required, then the file objects from -# openRead/openWrite must be passed instead of file names. -#################################################################################################################################### -sub hashSize -{ - my $self = shift; - - # Assign function parameters, defaults, and log debug info - my - ( - $strOperation, - $xFileExp, - $bIgnoreMissing, - ) = - logDebugParam - ( - __PACKAGE__ . '->hashSize', \@_, - {name => 'xFileExp'}, - {name => 'bIgnoreMissing', optional => true, default => false}, - ); - - # Set operation variables - my $strHash; - my $lSize; - - # Is this an IO object or a file expression? - my $oFileIo = - defined($xFileExp) ? (ref($xFileExp) ? $xFileExp : - $self->openRead($self->pathGet($xFileExp), {bIgnoreMissing => $bIgnoreMissing})) : undef; - - if (defined($oFileIo)) - { - $lSize = 0; - my $oShaIo = new pgBackRest::Storage::Filter::Sha($oFileIo); - my $lSizeRead; - - do - { - my $tContent; - $lSizeRead = $oShaIo->read(\$tContent, $self->{lBufferMax}); - $lSize += $lSizeRead; - } - while ($lSizeRead != 0); - - # Close the file - $oShaIo->close(); - - # Get the hash - $strHash = $oShaIo->result(STORAGE_FILTER_SHA); - } - - # Return from function and log return values if any - return logDebugReturn - ( - $strOperation, - {name => 'strHash', value => $strHash}, - {name => 'lSize', value => $lSize} - ); -} - -#################################################################################################################################### -# info - get information for path/file -#################################################################################################################################### -sub info -{ - my $self = shift; - - # Assign function parameters, defaults, and log debug info - my - ( - $strOperation, - $strPathFileExp, - $bIgnoreMissing, - ) = - logDebugParam - ( - __PACKAGE__ . '::fileStat', \@_, - {name => 'strPathFileExp'}, - {name => 'bIgnoreMissing', optional => true, default => false}, - ); - - # Stat the path/file - my $oInfo = $self->driver()->info($self->pathGet($strPathFileExp), {bIgnoreMissing => $bIgnoreMissing}); - - # Return from function and log return values if any - return logDebugReturn - ( - $strOperation, - {name => 'oInfo', value => $oInfo, trace => true} - ); -} - -#################################################################################################################################### -# linkCreate - create a link -#################################################################################################################################### -sub linkCreate -{ - my $self = shift; - - # Assign function parameters, defaults, and log debug info - my - ( - $strOperation, - $strSourcePathFileExp, - $strDestinationLinkExp, - $bHard, - $bRelative, - $bPathCreate, - $bIgnoreExists, - ) = - logDebugParam - ( - __PACKAGE__ . '->linkCreate', \@_, - {name => 'strSourcePathFileExp'}, - {name => 'strDestinationLinkExp'}, - {name => 'bHard', optional=> true, default => false}, - {name => 'bRelative', optional=> true, default => false}, - {name => 'bPathCreate', optional=> true, default => true}, - {name => 'bIgnoreExists', optional => true, default => false}, - ); - - # Get source and destination paths - my $strSourcePathFile = $self->pathGet($strSourcePathFileExp); - my $strDestinationLink = $self->pathGet($strDestinationLinkExp); - - # Generate relative path if requested - if ($bRelative) - { - # Determine how much of the paths are common - my @strySource = split('/', $strSourcePathFile); - my @stryDestination = split('/', $strDestinationLink); - - while (defined($strySource[0]) && defined($stryDestination[0]) && $strySource[0] eq $stryDestination[0]) - { - shift(@strySource); - shift(@stryDestination); - } - - # Add relative path sections - $strSourcePathFile = ''; - - for (my $iIndex = 0; $iIndex < @stryDestination - 1; $iIndex++) - { - $strSourcePathFile .= '../'; - } - - # Add path to source - $strSourcePathFile .= join('/', @strySource); - - logDebugMisc - ( - $strOperation, 'apply relative path', - {name => 'strSourcePathFile', value => $strSourcePathFile, trace => true} - ); - } - - # Create the link - $self->driver()->linkCreate( - $strSourcePathFile, $strDestinationLink, {bHard => $bHard, bPathCreate => $bPathCreate, bIgnoreExists => $bIgnoreExists}); - - # Return from function and log return values if any - return logDebugReturn($strOperation); -} - -#################################################################################################################################### -# list - list all files/paths in path -#################################################################################################################################### -sub list -{ - my $self = shift; - - # Assign function parameters, defaults, and log debug info - my - ( - $strOperation, - $strPathExp, - $strExpression, - $strSortOrder, - $bIgnoreMissing, - ) = - logDebugParam - ( - __PACKAGE__ . '->list', \@_, - {name => 'strPathExp', required => false}, - {name => 'strExpression', optional => true}, - {name => 'strSortOrder', optional => true, default => 'forward'}, - {name => 'bIgnoreMissing', optional => true, default => false}, - ); - - # Get file list - my $rstryFileList = $self->driver()->list( - $self->pathGet($strPathExp), {strExpression => $strExpression, bIgnoreMissing => $bIgnoreMissing}); - - # Apply expression if defined - if (defined($strExpression)) - { - @{$rstryFileList} = grep(/$strExpression/i, @{$rstryFileList}); - } - - # Reverse sort - if ($strSortOrder eq 'reverse') - { - @{$rstryFileList} = sort {$b cmp $a} @{$rstryFileList}; - } - # Normal sort - else - { - @{$rstryFileList} = sort @{$rstryFileList}; - } - - # Return from function and log return values if any - return logDebugReturn - ( - $strOperation, - {name => 'stryFileList', value => $rstryFileList} - ); -} - -#################################################################################################################################### -# manifest - build path/file/link manifest starting with base path and including all subpaths -#################################################################################################################################### -sub manifest -{ - my $self = shift; - - # Assign function parameters, defaults, and log debug info - my - ( - $strOperation, - $strPathExp, - $strFilter, - ) = - logDebugParam - ( - __PACKAGE__ . '->manifest', \@_, - {name => 'strPathExp'}, - {name => 'strFilter', optional => true, trace => true}, - ); - - my $hManifest = $self->driver()->manifest($self->pathGet($strPathExp), {strFilter => $strFilter}); - - # Return from function and log return values if any - return logDebugReturn - ( - $strOperation, - {name => 'hManifest', value => $hManifest, trace => true} - ); -} - -#################################################################################################################################### -# move - move path/file -#################################################################################################################################### -sub move -{ - my $self = shift; - - # Assign function parameters, defaults, and log debug info - my - ( - $strOperation, - $strSourcePathFileExp, - $strDestinationPathFileExp, - $bPathCreate, - ) = - logDebugParam - ( - __PACKAGE__ . '->move', \@_, - {name => 'strSourcePathExp'}, - {name => 'strDestinationPathExp'}, - {name => 'bPathCreate', optional => true, default => false, trace => true}, - ); - - # Set operation variables - $self->driver()->move( - $self->pathGet($strSourcePathFileExp), $self->pathGet($strDestinationPathFileExp), {bPathCreate => $bPathCreate}); - - # Return from function and log return values if any - return logDebugReturn - ( - $strOperation - ); -} - -#################################################################################################################################### -# openRead - open file for reading -#################################################################################################################################### -sub openRead -{ - my $self = shift; - - # Assign function parameters, defaults, and log debug info - my - ( - $strOperation, - $xFileExp, - $bIgnoreMissing, - $rhyFilter, - $strCipherPass, - ) = - logDebugParam - ( - __PACKAGE__ . '->openRead', \@_, - {name => 'xFileExp'}, - {name => 'bIgnoreMissing', optional => true, default => false}, - {name => 'rhyFilter', optional => true}, - {name => 'strCipherPass', optional => true, redact => true}, - ); - - # Open the file - my $oFileIo = $self->driver()->openRead($self->pathGet($xFileExp), {bIgnoreMissing => $bIgnoreMissing}); - - # Apply filters if file is defined - if (defined($oFileIo)) - { - # If cipher is set then add the filter so that decryption is the first filter applied to the data read before any of the - # other filters - if (defined($self->cipherType())) - { - $oFileIo = &STORAGE_FILTER_CIPHER_BLOCK->new( - $oFileIo, $self->cipherType(), defined($strCipherPass) ? $strCipherPass : $self->cipherPassUser(), - {strMode => STORAGE_DECRYPT}); - } - - # Apply any other filters - if (defined($rhyFilter)) - { - foreach my $rhFilter (@{$rhyFilter}) - { - $oFileIo = $rhFilter->{strClass}->new($oFileIo, @{$rhFilter->{rxyParam}}); - } - } - } - - # Return from function and log return values if any - return logDebugReturn - ( - $strOperation, - {name => 'oFileIo', value => $oFileIo, trace => true}, - ); -} - -#################################################################################################################################### -# openWrite - open file for writing -#################################################################################################################################### -sub openWrite -{ - my $self = shift; - - # Assign function parameters, defaults, and log debug info - my - ( - $strOperation, - $xFileExp, - $strMode, - $strUser, - $strGroup, - $lTimestamp, - $bAtomic, - $bPathCreate, - $rhyFilter, - $strCipherPass, - ) = - logDebugParam - ( - __PACKAGE__ . '->openWrite', \@_, - {name => 'xFileExp'}, - {name => 'strMode', optional => true, default => $self->{strDefaultFileMode}}, - {name => 'strUser', optional => true}, - {name => 'strGroup', optional => true}, - {name => 'lTimestamp', optional => true}, - {name => 'bAtomic', optional => true, default => false}, - {name => 'bPathCreate', optional => true, default => false}, - {name => 'rhyFilter', optional => true}, - {name => 'strCipherPass', optional => true, redact => true}, - ); - - # Open the file - my $oFileIo = $self->driver()->openWrite($self->pathGet($xFileExp), - {strMode => $strMode, strUser => $strUser, strGroup => $strGroup, lTimestamp => $lTimestamp, bPathCreate => $bPathCreate, - bAtomic => $bAtomic}); - - # If cipher is set then add filter so that encryption is performed just before the data is actually written - if (defined($self->cipherType())) - { - $oFileIo = &STORAGE_FILTER_CIPHER_BLOCK->new( - $oFileIo, $self->cipherType(), defined($strCipherPass) ? $strCipherPass : $self->cipherPassUser()); - } - - # Apply any other filters - if (defined($rhyFilter)) - { - foreach my $rhFilter (reverse(@{$rhyFilter})) - { - $oFileIo = $rhFilter->{strClass}->new($oFileIo, @{$rhFilter->{rxyParam}}); - } - } - - # Return from function and log return values if any - return logDebugReturn - ( - $strOperation, - {name => 'oFileIo', value => $oFileIo, trace => true}, - ); -} - -#################################################################################################################################### -# owner - change ownership of path/file -#################################################################################################################################### -sub owner -{ - my $self = shift; - - # Assign function parameters, defaults, and log debug info - my - ( - $strOperation, - $strPathFileExp, - $strUser, - $strGroup - ) = - logDebugParam - ( - __PACKAGE__ . '->owner', \@_, - {name => 'strPathFileExp'}, - {name => 'strUser', required => false}, - {name => 'strGroup', required => false} - ); - - # Set ownership - $self->driver()->owner($self->pathGet($strPathFileExp), {strUser => $strUser, strGroup => $strGroup}); - - # Return from function and log return values if any - return logDebugReturn - ( - $strOperation - ); -} - -#################################################################################################################################### -# pathCreate - create path -#################################################################################################################################### -sub pathCreate -{ - my $self = shift; - - # Assign function parameters, defaults, and log debug info - my - ( - $strOperation, - $strPathExp, - $strMode, - $bIgnoreExists, - $bCreateParent, - ) = - logDebugParam - ( - __PACKAGE__ . '->pathCreate', \@_, - {name => 'strPathExp'}, - {name => 'strMode', optional => true, default => $self->{strDefaultPathMode}}, - {name => 'bIgnoreExists', optional => true, default => false}, - {name => 'bCreateParent', optional => true, default => false}, - ); - - # Create path - $self->driver()->pathCreate( - $self->pathGet($strPathExp), {strMode => $strMode, bIgnoreExists => $bIgnoreExists, bCreateParent => $bCreateParent}); - - # Return from function and log return values if any - return logDebugReturn - ( - $strOperation - ); -} - -#################################################################################################################################### -# pathExists - check if path exists -#################################################################################################################################### -sub pathExists -{ - my $self = shift; - - # Assign function parameters, defaults, and log debug info - my - ( - $strOperation, - $strPathExp, - ) = - logDebugParam - ( - __PACKAGE__ . '->pathExists', \@_, - {name => 'strPathExp'}, - ); - - # Check exists - my $bExists = $self->driver()->pathExists($self->pathGet($strPathExp)); - - # Return from function and log return values if any - return logDebugReturn - ( - $strOperation, - {name => 'bExists', value => $bExists} - ); -} - -#################################################################################################################################### -# pathGet - resolve a path expression into an absolute path -#################################################################################################################################### -sub pathGet -{ - my $self = shift; - - # Assign function parameters, defaults, and log debug info - my - ( - $strOperation, - $strPathExp, # File that that needs to be translated to a path - $bTemp, # Return the temp file name - ) = - logDebugParam - ( - __PACKAGE__ . '->pathGet', \@_, - {name => 'strPathExp', required => false, trace => true}, - {name => 'bTemp', optional => true, default => false, trace => true}, - ); - - # Path and file to be returned - my $strPath; - my $strFile; - - # Is this an absolute path type? - my $bAbsolute = false; - - if (defined($strPathExp) && index($strPathExp, qw(/)) == 0) - { - $bAbsolute = true; - $strPath = $strPathExp; - } - else - { - # Is it a rule type - if (defined($strPathExp) && index($strPathExp, qw(<)) == 0) - { - # Extract the rule type - my $iPos = index($strPathExp, qw(>)); - - if ($iPos == -1) - { - confess &log(ASSERT, "found < but not > in '${strPathExp}'"); - } - - my $strType = substr($strPathExp, 0, $iPos + 1); - - # Extract the filename - if ($iPos < length($strPathExp) - 1) - { - $strFile = substr($strPathExp, $iPos + 2); - } - - # Lookup the rule - if (!defined($self->{hRule}->{$strType})) - { - confess &log(ASSERT, "storage rule '${strType}' does not exist"); - } - - # If rule is a ref then call the function - if (ref($self->{hRule}->{$strType})) - { - $strPath = $self->pathBase(); - $strFile = $self->{hRule}{$strType}{fnRule}->($strType, $strFile, $self->{hRule}{$strType}{xData}); - } - # Else get the path - else - { - $strPath = $self->pathBase() . ($self->pathBase() =~ /\/$/ ? '' : qw{/}) . $self->{hRule}->{$strType}; - } - } - # Else it must be relative - else - { - $strPath = $self->pathBase(); - $strFile = $strPathExp; - } - } - - # Make sure a temp file is valid for this type and file - if ($bTemp) - { - # Error when temp files are not allowed - if (!$self->{bAllowTemp}) - { - confess &log(ASSERT, "temp file not supported for storage '" . $self->pathBase() . "'"); - } - - # The file must be defined - if (!$bAbsolute) - { - if (!defined($strFile)) - { - confess &log(ASSERT, 'file part must be defined when temp file specified'); - } - } - } - - # Combine path and file - $strPath .= defined($strFile) ? ($strPath =~ /\/$/ ? '' : qw{/}) . "${strFile}" : ''; - - # Add temp extension - $strPath .= $bTemp ? ".$self->{strTempExtension}" : ''; - - # Return from function and log return values if any - return logDebugReturn - ( - $strOperation, - {name => 'strPath', value => $strPath, trace => true} - ); -} - -#################################################################################################################################### -# Sync path so newly added file entries are not lost -#################################################################################################################################### -sub pathSync -{ - my $self = shift; - - # Assign function parameters, defaults, and log debug info - my - ( - $strOperation, - $strPathExp, - ) = - logDebugParam - ( - __PACKAGE__ . '->pathSync', \@_, - {name => 'strPathExp'}, - ); - - $self->driver()->pathSync($self->pathGet($strPathExp)); - - # Return from function and log return values if any - return logDebugReturn($strOperation); -} - -#################################################################################################################################### -# remove - remove path/file -#################################################################################################################################### -sub remove -{ - my $self = shift; - - # Assign function parameters, defaults, and log debug info - my - ( - $strOperation, - $xstryPathFileExp, - $bIgnoreMissing, - $bRecurse, - ) = - logDebugParam - ( - __PACKAGE__ . '->remove', \@_, - {name => 'xstryPathFileExp'}, - {name => 'bIgnoreMissing', optional => true, default => true}, - {name => 'bRecurse', optional => true, default => false, trace => true}, - ); - - # Evaluate expressions for all files - my @stryPathFileExp; - - if (ref($xstryPathFileExp)) - { - foreach my $strPathFileExp (@{$xstryPathFileExp}) - { - push(@stryPathFileExp, $self->pathGet($strPathFileExp)); - } - } - - # Remove path(s)/file(s) - my $bRemoved = $self->driver()->remove( - ref($xstryPathFileExp) ? \@stryPathFileExp : $self->pathGet($xstryPathFileExp), - {bIgnoreMissing => $bIgnoreMissing, bRecurse => $bRecurse}); - - # Return from function and log return values if any - return logDebugReturn - ( - $strOperation, - {name => 'bRemoved', value => $bRemoved} - ); -} - -#################################################################################################################################### -# encrypted - determine if the file is encrypted or not -#################################################################################################################################### -sub encrypted -{ - my $self = shift; - - # Assign function parameters, defaults, and log debug info - my - ( - $strOperation, - $strFileName, - $bIgnoreMissing, - ) = - logDebugParam - ( - __PACKAGE__ . '->encrypted', \@_, - {name => 'strFileName'}, - {name => 'bIgnoreMissing', optional => true, default => false}, - ); - - my $tMagicSignature; - my $bEncrypted = false; - - # Open the file via the driver - my $oFile = $self->driver()->openRead($self->pathGet($strFileName), {bIgnoreMissing => $bIgnoreMissing}); - - # If the file does not exist because we're ignoring missing (else it would error before this is executed) then determine if it - # should be encrypted based on the repo - if (!defined($oFile)) - { - if (defined($self->{strCipherType})) - { - $bEncrypted = true; - } - } - else - { - # If the file does exist, then read the magic signature - my $lSizeRead = $oFile->read(\$tMagicSignature, length(CIPHER_MAGIC)); - - # Close the file handle - $oFile->close(); - - # If the file is able to be read, then if it is encrypted it must at least have the magic signature, even if it were - # originally a 0 byte file - if (($lSizeRead > 0) && substr($tMagicSignature, 0, length(CIPHER_MAGIC)) eq CIPHER_MAGIC) - { - $bEncrypted = true; - } - - } - - # Return from function and log return values if any - return logDebugReturn - ( - $strOperation, - {name => 'bEncrypted', value => $bEncrypted} - ); -} - -#################################################################################################################################### -# encryptionValid - determine if encyption set properly based on the value passed -#################################################################################################################################### -sub encryptionValid -{ - my $self = shift; - - # Assign function parameters, defaults, and log debug info - my - ( - $strOperation, - $bEncrypted, - ) = - logDebugParam - ( - __PACKAGE__ . '->encryptionValid', \@_, - {name => 'bEncrypted'}, - ); - - my $bValid = true; - - # If encryption is set on the file then make sure the repo is encrypted and visa-versa - if ($bEncrypted) - { - if (!defined($self->{strCipherType})) - { - $bValid = false; - } - } - else - { - if (defined($self->{strCipherType})) - { - $bValid = false; - } - } - - # Return from function and log return values if any - return logDebugReturn - ( - $strOperation, - {name => 'bValid', value => $bValid} - ); -} - -#################################################################################################################################### -# Getters -#################################################################################################################################### -sub pathBase {shift->{strPathBase}} -sub driver {shift->{oDriver}} -sub cipherType {shift->{strCipherType}} -sub cipherPassUser {shift->{strCipherPassUser}} - -1; diff -Nru pgbackrest-2.15.1/lib/pgBackRest/Storage/Posix/Driver.pm pgbackrest-2.16/lib/pgBackRest/Storage/Posix/Driver.pm --- pgbackrest-2.15.1/lib/pgBackRest/Storage/Posix/Driver.pm 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/lib/pgBackRest/Storage/Posix/Driver.pm 1970-01-01 00:00:00.000000000 +0000 @@ -1,963 +0,0 @@ -#################################################################################################################################### -# Posix Storage -# -# Implements storage functions for Posix-compliant file systems. -#################################################################################################################################### -package pgBackRest::Storage::Posix::Driver; - -use strict; -use warnings FATAL => qw(all); -use Carp qw(confess); -use English '-no_match_vars'; - -use Exporter qw(import); - our @EXPORT = qw(); -use File::Basename qw(basename dirname); -use Fcntl qw(:mode); -use File::stat qw{lstat}; - -use pgBackRest::Common::Exception; -use pgBackRest::Common::Log; -use pgBackRest::Storage::Base; -use pgBackRest::Storage::Posix::FileRead; -use pgBackRest::Storage::Posix::FileWrite; - -#################################################################################################################################### -# Package name constant -#################################################################################################################################### -use constant STORAGE_POSIX_DRIVER => __PACKAGE__; - push @EXPORT, qw(STORAGE_POSIX_DRIVER); - -#################################################################################################################################### -# new -#################################################################################################################################### -sub new -{ - my $class = shift; - - # Create the class hash - my $self = {}; - bless $self, $class; - - # Assign function parameters, defaults, and log debug info - ( - my $strOperation, - $self->{bFileSync}, - $self->{bPathSync}, - ) = - logDebugParam - ( - __PACKAGE__ . '->new', \@_, - {name => 'bFileSync', optional => true, default => true}, - {name => 'bPathSync', optional => true, default => true}, - ); - - # Set default temp extension - $self->{strTempExtension} = 'tmp'; - - # Return from function and log return values if any - return logDebugReturn - ( - $strOperation, - {name => 'self', value => $self, trace => true} - ); -} - -#################################################################################################################################### -# exists - check if a path or file exists -#################################################################################################################################### -sub exists -{ - my $self = shift; - - # Assign function parameters, defaults, and log debug info - my - ( - $strOperation, - $strFile, - ) = - logDebugParam - ( - __PACKAGE__ . '->exists', \@_, - {name => 'strFile', trace => true}, - ); - - # Does the path/file exist? - my $bExists = true; - my $oStat = lstat($strFile); - - # Use stat to test if file exists - if (defined($oStat)) - { - # Check that it is actually a file - $bExists = !S_ISDIR($oStat->mode) ? true : false; - } - else - { - # If the error is not entry missing, then throw error - if (!$OS_ERROR{ENOENT}) - { - logErrorResult(ERROR_FILE_EXISTS, "unable to test if file '${strFile}' exists", $OS_ERROR); - } - - $bExists = false; - } - - # Return from function and log return values if any - return logDebugReturn - ( - $strOperation, - {name => 'bExists', value => $bExists, trace => true} - ); -} - -#################################################################################################################################### -# info - get information for path/file -#################################################################################################################################### -sub info -{ - my $self = shift; - - # Assign function parameters, defaults, and log debug info - my - ( - $strOperation, - $strPathFile, - $bIgnoreMissing, - ) = - logDebugParam - ( - __PACKAGE__ . '->info', \@_, - {name => 'strFile', trace => true}, - {name => 'bIgnoreMissing', optional => true, default => false, trace => true}, - ); - - # Stat the path/file - my $oInfo = lstat($strPathFile); - - # Check for errors - if (!defined($oInfo)) - { - if (!($OS_ERROR{ENOENT} && $bIgnoreMissing)) - { - logErrorResult($OS_ERROR{ENOENT} ? ERROR_FILE_MISSING : ERROR_FILE_OPEN, "unable to stat '${strPathFile}'", $OS_ERROR); - } - } - - # Return from function and log return values if any - return logDebugReturn - ( - $strOperation, - {name => 'oInfo', value => $oInfo, trace => true} - ); -} - -#################################################################################################################################### -# linkCreate -#################################################################################################################################### -sub linkCreate -{ - my $self = shift; - - # Assign function parameters, defaults, and log debug info - my - ( - $strOperation, - $strSourcePathFile, - $strDestinationLink, - $bHard, - $bPathCreate, - $bIgnoreExists, - ) = - logDebugParam - ( - __PACKAGE__ . '->linkCreate', \@_, - {name => 'strSourcePathFile', trace => true}, - {name => 'strDestinationLink', trace => true}, - {name => 'bHard', optional=> true, default => false, trace => true}, - {name => 'bPathCreate', optional=> true, default => true, trace => true}, - {name => 'bIgnoreExists', optional => true, default => false, trace => true}, - ); - - if (!($bHard ? link($strSourcePathFile, $strDestinationLink) : symlink($strSourcePathFile, $strDestinationLink))) - { - my $strMessage = "unable to create link '${strDestinationLink}'"; - - # If parent path or source is missing - if ($OS_ERROR{ENOENT}) - { - # Check if source is missing - if (!$self->exists($strSourcePathFile)) - { - confess &log(ERROR, "${strMessage} because source '${strSourcePathFile}' does not exist", ERROR_FILE_MISSING); - } - - if (!$bPathCreate) - { - confess &log(ERROR, "${strMessage} because parent does not exist", ERROR_PATH_MISSING); - } - - # Create parent path - $self->pathCreate(dirname($strDestinationLink), {bIgnoreExists => true, bCreateParent => true}); - - # Create link - $self->linkCreate($strSourcePathFile, $strDestinationLink, {bHard => $bHard}); - } - # Else if link already exists - elsif ($OS_ERROR{EEXIST}) - { - if (!$bIgnoreExists) - { - confess &log(ERROR, "${strMessage} because it already exists", ERROR_PATH_EXISTS); - } - } - else - { - logErrorResult(ERROR_PATH_CREATE, ${strMessage}, $OS_ERROR); - } - } - - # Return from function and log return values if any - return logDebugReturn($strOperation); -} - -#################################################################################################################################### -# linkDestination - get destination of symlink -#################################################################################################################################### -sub linkDestination -{ - my $self = shift; - - # Assign function parameters, defaults, and log debug info - my - ( - $strOperation, - $strLink, - ) = - logDebugParam - ( - __PACKAGE__ . '->linkDestination', \@_, - {name => 'strLink', trace => true}, - ); - - # Get link destination - my $strLinkDestination = readlink($strLink); - - # Check for errors - if (!defined($strLinkDestination)) - { - logErrorResult( - $OS_ERROR{ENOENT} ? ERROR_FILE_MISSING : ERROR_FILE_OPEN, "unable to get destination for link ${strLink}", $OS_ERROR); - } - - # Return from function and log return values if any - return logDebugReturn - ( - $strOperation, - {name => 'strLinkDestination', value => $strLinkDestination, trace => true} - ); -} - -#################################################################################################################################### -# list - list all files/paths in path -#################################################################################################################################### -sub list -{ - my $self = shift; - - # Assign function parameters, defaults, and log debug info - my - ( - $strOperation, - $strPath, - $bIgnoreMissing, - ) = - logDebugParam - ( - __PACKAGE__ . '->list', \@_, - {name => 'strPath', trace => true}, - {name => 'bIgnoreMissing', optional => true, default => false, trace => true}, - ); - - # Working variables - my @stryFileList; - my $hPath; - - # Attempt to open the path - if (opendir($hPath, $strPath)) - { - @stryFileList = grep(!/^(\.)|(\.\.)$/i, readdir($hPath)); - close($hPath); - } - # Else process errors - else - { - # Ignore the error if the file is missing and missing files should be ignored - if (!($OS_ERROR{ENOENT} && $bIgnoreMissing)) - { - logErrorResult($OS_ERROR{ENOENT} ? ERROR_FILE_MISSING : ERROR_FILE_OPEN, "unable to read path '${strPath}'", $OS_ERROR); - } - } - - # Return from function and log return values if any - return logDebugReturn - ( - $strOperation, - {name => 'stryFileList', value => \@stryFileList, ref => true, trace => true} - ); -} - -#################################################################################################################################### -# manifest - build path/file/link manifest starting with base path and including all subpaths -#################################################################################################################################### -sub manifest -{ - my $self = shift; - - # Assign function parameters, defaults, and log debug info - my - ( - $strOperation, - $strPath, - $bIgnoreMissing, - $strFilter, - ) = - logDebugParam - ( - __PACKAGE__ . '->manifest', \@_, - {name => 'strPath', trace => true}, - {name => 'bIgnoreMissing', optional => true, default => false, trace => true}, - {name => 'strFilter', optional => true, trace => true}, - ); - - # Generate the manifest - my $hManifest = {}; - $self->manifestRecurse($strPath, undef, 0, $hManifest, $bIgnoreMissing, $strFilter); - - # Return from function and log return values if any - return logDebugReturn - ( - $strOperation, - {name => 'hManifest', value => $hManifest, trace => true} - ); -} - -sub manifestRecurse -{ - my $self = shift; - - # Assign function parameters, defaults, and log debug info - my - ( - $strOperation, - $strPath, - $strSubPath, - $iDepth, - $hManifest, - $bIgnoreMissing, - $strFilter, - ) = - logDebugParam - ( - __PACKAGE__ . '::manifestRecurse', \@_, - {name => 'strPath', trace => true}, - {name => 'strSubPath', required => false, trace => true}, - {name => 'iDepth', default => 0, trace => true}, - {name => 'hManifest', required => false, trace => true}, - {name => 'bIgnoreMissing', required => false, default => false, trace => true}, - {name => 'strFilter', required => false, trace => true}, - ); - - # Set operation and debug strings - my $strPathRead = $strPath . (defined($strSubPath) ? "/${strSubPath}" : ''); - my $hPath; - - # If this is the top level stat the path to discover if it is actually a file - my $oPathInfo = $self->info($strPathRead, {bIgnoreMissing => $bIgnoreMissing}); - - if (defined($oPathInfo)) - { - # If the initial path passed is a file then generate the manifest for just that file - if ($iDepth == 0 && !S_ISDIR($oPathInfo->mode())) - { - $hManifest->{basename($strPathRead)} = $self->manifestStat($strPathRead); - } - # Else read as a normal directory - else - { - # Get a list of all files in the path (including .) - my @stryFileList = @{$self->list($strPathRead, {bIgnoreMissing => $iDepth != 0})}; - unshift(@stryFileList, '.'); - my $hFileStat = $self->manifestList($strPathRead, \@stryFileList, $strFilter); - - # Loop through all subpaths/files in the path - foreach my $strFile (keys(%{$hFileStat})) - { - my $strManifestFile = $iDepth == 0 ? $strFile : ($strSubPath . ($strFile eq qw(.) ? '' : "/${strFile}")); - $hManifest->{$strManifestFile} = $hFileStat->{$strFile}; - - # Recurse into directories - if ($hManifest->{$strManifestFile}{type} eq 'd' && $strFile ne qw(.)) - { - $self->manifestRecurse($strPath, $strManifestFile, $iDepth + 1, $hManifest); - } - } - } - } - - # Return from function and log return values if any - return logDebugReturn($strOperation); -} - -sub manifestList -{ - my $self = shift; - - # Assign function parameters, defaults, and log debug info - my - ( - $strOperation, - $strPath, - $stryFile, - $strFilter, - ) = - logDebugParam - ( - __PACKAGE__ . '->manifestList', \@_, - {name => 'strPath', trace => true}, - {name => 'stryFile', trace => true}, - {name => 'strFilter', required => false, trace => true}, - ); - - my $hFileStat = {}; - - foreach my $strFile (@{$stryFile}) - { - if ($strFile ne '.' && defined($strFilter) && $strFilter ne $strFile) - { - next; - } - - $hFileStat->{$strFile} = $self->manifestStat("${strPath}" . ($strFile eq qw(.) ? '' : "/${strFile}")); - - if (!defined($hFileStat->{$strFile})) - { - delete($hFileStat->{$strFile}); - } - } - - # Return from function and log return values if any - return logDebugReturn - ( - $strOperation, - {name => 'hFileStat', value => $hFileStat, trace => true} - ); -} - -sub manifestStat -{ - my $self = shift; - - # Assign function parameters, defaults, and log debug info - my - ( - $strOperation, - $strFile, - ) = - logDebugParam - ( - __PACKAGE__ . '->manifestStat', \@_, - {name => 'strFile', trace => true}, - ); - - # Stat the path/file, ignoring any that are missing - my $oStat = $self->info($strFile, {bIgnoreMissing => true}); - - # Generate file data if stat succeeded (i.e. file exists) - my $hFile; - - if (defined($oStat)) - { - # Check for regular file - if (S_ISREG($oStat->mode)) - { - $hFile->{type} = 'f'; - - # Get size - $hFile->{size} = $oStat->size; - - # Get modification time - $hFile->{modification_time} = $oStat->mtime; - } - # Check for directory - elsif (S_ISDIR($oStat->mode)) - { - $hFile->{type} = 'd'; - } - # Check for link - elsif (S_ISLNK($oStat->mode)) - { - $hFile->{type} = 'l'; - $hFile->{link_destination} = $self->linkDestination($strFile); - } - # Not a recognized type - else - { - confess &log(ERROR, "${strFile} is not of type directory, file, or link", ERROR_FILE_INVALID); - } - - # Get user name - $hFile->{user} = getpwuid($oStat->uid); - - # Get group name - $hFile->{group} = getgrgid($oStat->gid); - - # Get mode - if ($hFile->{type} ne 'l') - { - $hFile->{mode} = sprintf('%04o', S_IMODE($oStat->mode)); - } - } - - # Return from function and log return values if any - return logDebugReturn - ( - $strOperation, - {name => 'hFile', value => $hFile, trace => true} - ); -} - -#################################################################################################################################### -# move - move path/file -#################################################################################################################################### -sub move -{ - my $self = shift; - - # Assign function parameters, defaults, and log debug info - my - ( - $strOperation, - $strSourceFile, - $strDestinationFile, - $bPathCreate, - ) = - logDebugParam - ( - __PACKAGE__ . '->move', \@_, - {name => 'strSourceFile', trace => true}, - {name => 'strDestinationFile', trace => true}, - {name => 'bPathCreate', default => false, trace => true}, - ); - - # Get source and destination paths - my $strSourcePathFile = dirname($strSourceFile); - my $strDestinationPathFile = dirname($strDestinationFile); - - # Move the file - if (!rename($strSourceFile, $strDestinationFile)) - { - my $strMessage = "unable to move '${strSourceFile}'"; - - # If something is missing determine if it is the source or destination - if ($OS_ERROR{ENOENT}) - { - if (!$self->exists($strSourceFile)) - { - logErrorResult(ERROR_FILE_MISSING, "${strMessage} because it is missing"); - } - - if ($bPathCreate) - { - # Attempt to create the path - ignore exists here in case another process creates it first - $self->pathCreate($strDestinationPathFile, {bCreateParent => true, bIgnoreExists => true}); - - # Try move again - $self->move($strSourceFile, $strDestinationFile); - } - else - { - logErrorResult(ERROR_PATH_MISSING, "${strMessage} to missing path '${strDestinationPathFile}'"); - } - } - # Else raise the error - else - { - logErrorResult(ERROR_FILE_MOVE, "${strMessage} to '${strDestinationFile}'", $OS_ERROR); - } - } - - # Return from function and log return values if any - return logDebugReturn($strOperation); -} - -#################################################################################################################################### -# openRead - open file for reading -#################################################################################################################################### -sub openRead -{ - my $self = shift; - - # Assign function parameters, defaults, and log debug info - my - ( - $strOperation, - $strFile, - $bIgnoreMissing, - ) = - logDebugParam - ( - __PACKAGE__ . '->openRead', \@_, - {name => 'strFile', trace => true}, - {name => 'bIgnoreMissing', optional => true, default => false, trace => true}, - ); - - my $oFileIO = new pgBackRest::Storage::Posix::FileRead($self, $strFile, {bIgnoreMissing => $bIgnoreMissing}); - - # Return from function and log return values if any - return logDebugReturn - ( - $strOperation, - {name => 'oFileIO', value => $oFileIO, trace => true}, - ); -} - -#################################################################################################################################### -# openWrite - open file for writing -#################################################################################################################################### -sub openWrite -{ - my $self = shift; - - # Assign function parameters, defaults, and log debug info - my - ( - $strOperation, - $strFile, - $strMode, - $strUser, - $strGroup, - $lTimestamp, - $bPathCreate, - $bAtomic, - ) = - logDebugParam - ( - __PACKAGE__ . '->openWrite', \@_, - {name => 'strFile', trace => true}, - {name => 'strMode', optional => true, trace => true}, - {name => 'strUser', optional => true, trace => true}, - {name => 'strGroup', optional => true, trace => true}, - {name => 'lTimestamp', optional => true, trace => true}, - {name => 'bPathCreate', optional => true, trace => true}, - {name => 'bAtomic', optional => true, trace => true}, - ); - - my $oFileIO = new pgBackRest::Storage::Posix::FileWrite( - $self, $strFile, - {strMode => $strMode, strUser => $strUser, strGroup => $strGroup, lTimestamp => $lTimestamp, bPathCreate => $bPathCreate, - bAtomic => $bAtomic, bSync => $self->{bFileSync}}); - - # Return from function and log return values if any - return logDebugReturn - ( - $strOperation, - {name => 'oFileIO', value => $oFileIO, trace => true}, - ); -} - -#################################################################################################################################### -# owner - change ownership of path/file -#################################################################################################################################### -sub owner -{ - my $self = shift; - - # Assign function parameters, defaults, and log debug info - my - ( - $strOperation, - $strFilePath, - $strUser, - $strGroup, - ) = - logDebugParam - ( - __PACKAGE__ . '->owner', \@_, - {name => 'strFilePath', trace => true}, - {name => 'strUser', optional => true, trace => true}, - {name => 'strGroup', optional => true, trace => true}, - ); - - # Only proceed if user or group was specified - if (defined($strUser) || defined($strGroup)) - { - my $strMessage = "unable to set ownership for '${strFilePath}'"; - my $iUserId; - my $iGroupId; - - # If the user or group is not defined then get it by stat'ing the file. This is because the chown function requires that - # both user and group be set. - my $oStat = $self->info($strFilePath); - - if (!defined($strUser)) - { - $iUserId = $oStat->uid; - } - - if (!defined($strGroup)) - { - $iGroupId = $oStat->gid; - } - - # Lookup user if specified - if (defined($strUser)) - { - $iUserId = getpwnam($strUser); - - if (!defined($iUserId)) - { - logErrorResult(ERROR_FILE_OWNER, "${strMessage} because user '${strUser}' does not exist"); - } - } - - # Lookup group if specified - if (defined($strGroup)) - { - $iGroupId = getgrnam($strGroup); - - if (!defined($iGroupId)) - { - logErrorResult(ERROR_FILE_OWNER, "${strMessage} because group '${strGroup}' does not exist"); - } - } - - # Set ownership on the file if the user or group would be changed - if ($iUserId != $oStat->uid || $iGroupId != $oStat->gid) - { - if (!chown($iUserId, $iGroupId, $strFilePath)) - { - logErrorResult(ERROR_FILE_OWNER, "${strMessage}", $OS_ERROR); - } - } - } - - # Return from function and log return values if any - return logDebugReturn($strOperation); -} - -#################################################################################################################################### -# pathCreate - create path -#################################################################################################################################### -sub pathCreate -{ - my $self = shift; - - # Assign function parameters, defaults, and log debug info - my - ( - $strOperation, - $strPath, - $strMode, - $bIgnoreExists, - $bCreateParent, - ) = - logDebugParam - ( - __PACKAGE__ . '->pathCreate', \@_, - {name => 'strPath', trace => true}, - {name => 'strMode', optional => true, default => '0750', trace => true}, - {name => 'bIgnoreExists', optional => true, default => false, trace => true}, - {name => 'bCreateParent', optional => true, default => false, trace => true}, - ); - - # Attempt to create the directory - if (!mkdir($strPath, oct($strMode))) - { - my $strMessage = "unable to create path '${strPath}'"; - - # If parent path is missing - if ($OS_ERROR{ENOENT}) - { - if (!$bCreateParent) - { - confess &log(ERROR, "${strMessage} because parent does not exist", ERROR_PATH_MISSING); - } - - # Create parent path - $self->pathCreate(dirname($strPath), {strMode => $strMode, bIgnoreExists => true, bCreateParent => $bCreateParent}); - - # Create path - $self->pathCreate($strPath, {strMode => $strMode, bIgnoreExists => true}); - } - # Else if path already exists - elsif ($OS_ERROR{EEXIST}) - { - if (!$bIgnoreExists) - { - confess &log(ERROR, "${strMessage} because it already exists", ERROR_PATH_EXISTS); - } - } - else - { - logErrorResult(ERROR_PATH_CREATE, ${strMessage}, $OS_ERROR); - } - } - - # Return from function and log return values if any - return logDebugReturn($strOperation); -} - -#################################################################################################################################### -# pathExists - check if path exists -#################################################################################################################################### -sub pathExists -{ - my $self = shift; - - # Assign function parameters, defaults, and log debug info - my - ( - $strOperation, - $strPath, - ) = - logDebugParam - ( - __PACKAGE__ . '->pathExists', \@_, - {name => 'strPath', trace => true}, - ); - - # Does the path/file exist? - my $bExists = true; - my $oStat = lstat($strPath); - - # Use stat to test if path exists - if (defined($oStat)) - { - # Check that it is actually a path - $bExists = S_ISDIR($oStat->mode) ? true : false; - } - else - { - # If the error is not entry missing, then throw error - if (!$OS_ERROR{ENOENT}) - { - logErrorResult(ERROR_FILE_EXISTS, "unable to test if path '${strPath}' exists", $OS_ERROR); - } - - $bExists = false; - } - - # Return from function and log return values if any - return logDebugReturn - ( - $strOperation, - {name => 'bExists', value => $bExists, trace => true} - ); -} - -#################################################################################################################################### -# pathSync - perform fsync on path -#################################################################################################################################### -sub pathSync -{ - my $self = shift; - - # Assign function parameters, defaults, and log debug info - my - ( - $strOperation, - $strPath, - ) = - logDebugParam - ( - __PACKAGE__ . '->pathSync', \@_, - {name => 'strPath', trace => true}, - ); - - open(my $hPath, "<", $strPath) - or confess &log(ERROR, "unable to open '${strPath}' for sync", ERROR_PATH_OPEN); - open(my $hPathDup, ">&", $hPath) - or confess &log(ERROR, "unable to duplicate '${strPath}' handle for sync", ERROR_PATH_OPEN); - - $hPathDup->sync() - or confess &log(ERROR, "unable to sync path '${strPath}'", ERROR_PATH_SYNC); - - close($hPathDup); - close($hPath); - - # Return from function and log return values if any - return logDebugReturn($strOperation); -} - -#################################################################################################################################### -# remove - remove path/file -#################################################################################################################################### -sub remove -{ - my $self = shift; - - # Assign function parameters, defaults, and log debug info - my - ( - $strOperation, - $strPathFile, - $bIgnoreMissing, - $bRecurse, - ) = - logDebugParam - ( - __PACKAGE__ . '->remove', \@_, - {name => 'strPathFile', trace => true}, - {name => 'bIgnoreMissing', optional => true, default => false, trace => true}, - {name => 'bRecurse', optional => true, default => false, trace => true}, - ); - - # Working variables - my $bRemoved = true; - - # Remove a tree - if ($bRecurse) - { - # Dynamically load the driver - require pgBackRest::LibC; - pgBackRest::LibC->import(qw(:storage)); - - storagePosixPathRemove($strPathFile, !$bIgnoreMissing, $bRecurse) - } - # Only remove the specified file - else - { - foreach my $strFile (ref($strPathFile) ? @{$strPathFile} : ($strPathFile)) - { - if (unlink($strFile) != 1) - { - $bRemoved = false; - - # Throw error if this is not an ignored missing file - if (!($OS_ERROR{ENOENT} && $bIgnoreMissing)) - { - logErrorResult( - $OS_ERROR{ENOENT} ? ERROR_FILE_MISSING : ERROR_FILE_OPEN, "unable to remove file '${strFile}'", $OS_ERROR); - } - } - } - } - - # Return from function and log return values if any - return logDebugReturn - ( - $strOperation, - {name => 'bRemoved', value => $bRemoved, trace => true} - ); -} - -#################################################################################################################################### -# Getters/Setters -#################################################################################################################################### -sub capability {true} -sub className {STORAGE_POSIX_DRIVER} -sub tempExtension {shift->{strTempExtension}} -sub tempExtensionSet {my $self = shift; $self->{strTempExtension} = shift} - -1; diff -Nru pgbackrest-2.15.1/lib/pgBackRest/Storage/Posix/FileRead.pm pgbackrest-2.16/lib/pgBackRest/Storage/Posix/FileRead.pm --- pgbackrest-2.15.1/lib/pgBackRest/Storage/Posix/FileRead.pm 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/lib/pgBackRest/Storage/Posix/FileRead.pm 1970-01-01 00:00:00.000000000 +0000 @@ -1,105 +0,0 @@ -#################################################################################################################################### -# Posix File Read -#################################################################################################################################### -package pgBackRest::Storage::Posix::FileRead; -use parent 'pgBackRest::Common::Io::Handle'; - -use strict; -use warnings FATAL => qw(all); -use Carp qw(confess); -use English '-no_match_vars'; - -use Fcntl qw(O_RDONLY); - -use pgBackRest::Common::Exception; -use pgBackRest::Common::Log; - -#################################################################################################################################### -# CONSTRUCTOR -#################################################################################################################################### -sub new -{ - my $class = shift; - - # Assign function parameters, defaults, and log debug info - my - ( - $strOperation, - $oDriver, - $strName, - $bIgnoreMissing, - ) = - logDebugParam - ( - __PACKAGE__ . '->new', \@_, - {name => 'oDriver', trace => true}, - {name => 'strName', trace => true}, - {name => 'bIgnoreMissing', optional => true, default => false, trace => true}, - ); - - # Open the file - my $fhFile; - - if (!sysopen($fhFile, $strName, O_RDONLY)) - { - if (!($OS_ERROR{ENOENT} && $bIgnoreMissing)) - { - logErrorResult($OS_ERROR{ENOENT} ? ERROR_FILE_MISSING : ERROR_FILE_OPEN, "unable to open '${strName}'", $OS_ERROR); - } - - undef($fhFile); - } - - # Create IO object if open succeeded - my $self; - - if (defined($fhFile)) - { - # Set file mode to binary - binmode($fhFile); - - # Create the class hash - $self = $class->SUPER::new("'${strName}'", $fhFile); - bless $self, $class; - - # Set variables - $self->{oDriver} = $oDriver; - $self->{strName} = $strName; - $self->{fhFile} = $fhFile; - } - - # Return from function and log return values if any - return logDebugReturn - ( - $strOperation, - {name => 'self', value => $self, trace => true} - ); -} - -#################################################################################################################################### -# close - close the file -#################################################################################################################################### -sub close -{ - my $self = shift; - - if (defined($self->handle())) - { - # Close the file - close($self->handle()); - undef($self->{fhFile}); - - # Close parent - $self->SUPER::close(); - } - - return true; -} - -#################################################################################################################################### -# Getters -#################################################################################################################################### -sub handle {shift->{fhFile}} -sub name {shift->{strName}} - -1; diff -Nru pgbackrest-2.15.1/lib/pgBackRest/Storage/Posix/FileWrite.pm pgbackrest-2.16/lib/pgBackRest/Storage/Posix/FileWrite.pm --- pgbackrest-2.15.1/lib/pgBackRest/Storage/Posix/FileWrite.pm 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/lib/pgBackRest/Storage/Posix/FileWrite.pm 1970-01-01 00:00:00.000000000 +0000 @@ -1,209 +0,0 @@ -#################################################################################################################################### -# Posix File Write -#################################################################################################################################### -package pgBackRest::Storage::Posix::FileWrite; -use parent 'pgBackRest::Common::Io::Handle'; - -use strict; -use warnings FATAL => qw(all); -use Carp qw(confess); -use English '-no_match_vars'; - -use Fcntl qw(O_RDONLY O_WRONLY O_CREAT O_TRUNC); -use File::Basename qw(dirname); - -use pgBackRest::Common::Exception; -use pgBackRest::Common::Log; - -use pgBackRest::Common::Io::Handle; -use pgBackRest::Storage::Base; - -#################################################################################################################################### -# CONSTRUCTOR -#################################################################################################################################### -sub new -{ - my $class = shift; - - # Assign function parameters, defaults, and log debug info - my - ( - $strOperation, - $oDriver, - $strName, - $strMode, - $strUser, - $strGroup, - $lTimestamp, - $bPathCreate, - $bAtomic, - $bSync, - ) = - logDebugParam - ( - __PACKAGE__ . '->new', \@_, - {name => 'oDriver', trace => true}, - {name => 'strName', trace => true}, - {name => 'strMode', optional => true, trace => true}, - {name => 'strUser', optional => true, trace => true}, - {name => 'strGroup', optional => true, trace => true}, - {name => 'lTimestamp', optional => true, trace => true}, - {name => 'bPathCreate', optional => true, default => false, trace => true}, - {name => 'bAtomic', optional => true, default => false, trace => true}, - {name => 'bSync', optional => true, default => true, trace => true}, - ); - - # Create the class hash - my $self = $class->SUPER::new("'${strName}'"); - bless $self, $class; - - # Set variables - $self->{oDriver} = $oDriver; - $self->{strName} = $strName; - $self->{strMode} = $strMode; - $self->{strUser} = $strUser; - $self->{strGroup} = $strGroup; - $self->{lTimestamp} = $lTimestamp; - $self->{bPathCreate} = $bPathCreate; - $self->{bAtomic} = $bAtomic; - $self->{bSync} = $bSync; - - # If atomic create temp filename - if ($self->{bAtomic}) - { - # Create temp file name - $self->{strNameTmp} = "$self->{strName}." . $self->{oDriver}->tempExtension(); - } - - # Open file on first write to avoid creating extraneous files on error - $self->{bOpened} = false; - - # Return from function and log return values if any - return logDebugReturn - ( - $strOperation, - {name => 'self', value => $self, trace => true} - ); -} - -#################################################################################################################################### -# open - open the file -#################################################################################################################################### -sub open -{ - my $self = shift; - - # Get the file name - my $strFile = $self->{bAtomic} ? $self->{strNameTmp} : $self->{strName}; - - # Open the file - if (!sysopen( - $self->{fhFile}, $strFile, O_WRONLY | O_CREAT | O_TRUNC, oct(defined($self->{strMode}) ? $self->{strMode} : '0666'))) - { - # If the path does not exist create it if requested - if ($OS_ERROR{ENOENT} && $self->{bPathCreate}) - { - $self->{oDriver}->pathCreate(dirname($strFile), {bIgnoreExists => true, bCreateParent => true}); - $self->{bPathCreate} = false; - return $self->open(); - } - - logErrorResult($OS_ERROR{ENOENT} ? ERROR_PATH_MISSING : ERROR_FILE_OPEN, "unable to open '${strFile}'", $OS_ERROR); - } - - # Set file mode to binary - binmode($self->{fhFile}); - - # Set the owner - $self->{oDriver}->owner($strFile, {strUser => $self->{strUser}, strGroup => $self->{strGroup}}); - - # Set handle - $self->handleWriteSet($self->{fhFile}); - - # Mark file as opened - $self->{bOpened} = true; - - return true; -} - -#################################################################################################################################### -# write - write data to a file -#################################################################################################################################### -sub write -{ - my $self = shift; - my $rtBuffer = shift; - - # Open file if it is not open already - $self->open() if !$self->opened(); - - return $self->SUPER::write($rtBuffer); -} - -#################################################################################################################################### -# close - close the file -#################################################################################################################################### -sub close -{ - my $self = shift; - - if (defined($self->handle())) - { - # Sync the file - if ($self->{bSync}) - { - $self->handle()->sync(); - } - - # Close the file - close($self->handle()); - undef($self->{fhFile}); - - # Get current filename - my $strCurrentName = $self->{bAtomic} ? $self->{strNameTmp} : $self->{strName}; - - # Set the modification time - if (defined($self->{lTimestamp})) - { - utime(time(), $self->{lTimestamp}, $strCurrentName) - or logErrorResult(ERROR_FILE_WRITE, "unable to set time for '${strCurrentName}'", $OS_ERROR); - } - - # Move the file from temp to final if atomic - if ($self->{bAtomic}) - { - $self->{oDriver}->move($strCurrentName, $self->{strName}); - } - - # Set result - $self->resultSet(COMMON_IO_HANDLE, $self->{lSize}); - - # Close parent - $self->SUPER::close(); - } - - return true; -} - -#################################################################################################################################### -# Close the handle if it is open (in case close() was never called) -#################################################################################################################################### -sub DESTROY -{ - my $self = shift; - - if (defined($self->handle())) - { - CORE::close($self->handle()); - undef($self->{fhFile}); - } -} - -#################################################################################################################################### -# Getters -#################################################################################################################################### -sub handle {shift->{fhFile}} -sub opened {shift->{bOpened}} -sub name {shift->{strName}} - -1; diff -Nru pgbackrest-2.15.1/lib/pgBackRest/Storage/S3/Auth.pm pgbackrest-2.16/lib/pgBackRest/Storage/S3/Auth.pm --- pgbackrest-2.15.1/lib/pgBackRest/Storage/S3/Auth.pm 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/lib/pgBackRest/Storage/S3/Auth.pm 1970-01-01 00:00:00.000000000 +0000 @@ -1,283 +0,0 @@ -#################################################################################################################################### -# S3 Authentication -# -# Contains the functions required to do S3 authentication. It's a complicated topic and too much to cover here, but there is -# excellent documentation at http://docs.aws.amazon.com/AmazonS3/latest/API/sig-v4-header-based-auth.html. -#################################################################################################################################### -package pgBackRest::Storage::S3::Auth; - -use strict; -use warnings FATAL => qw(all); -use Carp qw(confess); -use English '-no_match_vars'; - -use Digest::SHA qw(hmac_sha256 hmac_sha256_hex); -use Exporter qw(import); - our @EXPORT = qw(); -use POSIX qw(strftime); - -use pgBackRest::Common::Http::Common; -use pgBackRest::Common::Log; -use pgBackRest::LibC qw(:crypto); - -#################################################################################################################################### -# Constants -#################################################################################################################################### -use constant S3 => 's3'; -use constant AWS4 => 'AWS4'; -use constant AWS4_REQUEST => 'aws4_request'; -use constant AWS4_HMAC_SHA256 => 'AWS4-HMAC-SHA256'; - -use constant S3_HEADER_AUTHORIZATION => 'authorization'; - push @EXPORT, qw(S3_HEADER_AUTHORIZATION); -use constant S3_HEADER_DATE => 'x-amz-date'; - push @EXPORT, qw(S3_HEADER_DATE); -use constant S3_HEADER_CONTENT_SHA256 => 'x-amz-content-sha256'; - push @EXPORT, qw(S3_HEADER_CONTENT_SHA256); -use constant S3_HEADER_HOST => 'host'; - push @EXPORT, qw(S3_HEADER_HOST); -use constant S3_HEADER_TOKEN => 'x-amz-security-token'; - push @EXPORT, qw(S3_HEADER_TOKEN); - -use constant PAYLOAD_DEFAULT_HASH => cryptoHashOne('sha256', ''); - push @EXPORT, qw(PAYLOAD_DEFAULT_HASH); - -#################################################################################################################################### -# s3DateTime - format date/time for authentication -#################################################################################################################################### -sub s3DateTime -{ - # Assign function parameters, defaults, and log debug info - my - ( - $strOperation, - $lTime, - ) = - logDebugParam - ( - __PACKAGE__ . '::s3DateTime', \@_, - {name => 'lTime', default => time(), trace => true}, - ); - - # Return from function and log return values if any - return logDebugReturn - ( - $strOperation, - {name => 'strDateTime', value => strftime("%Y%m%dT%H%M%SZ", gmtime($lTime)), trace => true} - ); -} - -push @EXPORT, qw(s3DateTime); - -#################################################################################################################################### -# s3CanonicalRequest - strictly formatted version of the HTTP request used for signing -#################################################################################################################################### -sub s3CanonicalRequest -{ - # Assign function parameters, defaults, and log debug info - my - ( - $strOperation, - $strVerb, - $strUri, - $strQuery, - $hHeader, - $strPayloadHash, - ) = - logDebugParam - ( - __PACKAGE__ . '::s3CanonicalRequest', \@_, - {name => 'strVerb', trace => true}, - {name => 'strUri', trace => true}, - {name => 'strQuery', trace => true}, - {name => 'hHeader', trace => true}, - {name => 'strPayloadHash', trace => true}, - ); - - # Create the canonical request - my $strCanonicalRequest = - "${strVerb}\n${strUri}\n${strQuery}\n"; - my $strSignedHeaders; - - foreach my $strHeader (sort(keys(%{$hHeader}))) - { - if (lc($strHeader) ne $strHeader) - { - confess &log(ASSERT, "header '${strHeader}' must be lower case"); - } - - $strCanonicalRequest .= $strHeader . ":$hHeader->{$strHeader}\n"; - $strSignedHeaders .= (defined($strSignedHeaders) ? qw(;) : '') . lc($strHeader); - } - - $strCanonicalRequest .= "\n${strSignedHeaders}\n${strPayloadHash}"; - - # Return from function and log return values if any - return logDebugReturn - ( - $strOperation, - {name => 'strCanonicalRequest', value => $strCanonicalRequest, trace => true}, - {name => 'strSignedHeaders', value => $strSignedHeaders, trace => true}, - ); -} - -push @EXPORT, qw(s3CanonicalRequest); - -#################################################################################################################################### -# s3SigningKey - signing keys last for seven days, but we'll regenerate every day because it doesn't seem too burdensome -#################################################################################################################################### -my $hSigningKeyCache; # Cache signing keys rather than regenerating them every time - -sub s3SigningKey -{ - # Assign function parameters, defaults, and log debug info - my - ( - $strOperation, - $strDate, - $strRegion, - $strSecretAccessKey, - ) = - logDebugParam - ( - __PACKAGE__ . '::s3SigningKey', \@_, - {name => 'strDate', trace => true}, - {name => 'strRegion', trace => true}, - {name => 'strSecretAccessKey', redact => true, trace => true}, - ); - - # Check for signing key in cache - my $strSigningKey = $hSigningKeyCache->{$strDate}{$strRegion}{$strSecretAccessKey}; - - # If not found then generate it - if (!defined($strSigningKey)) - { - my $strDateKey = hmac_sha256($strDate, AWS4 . $strSecretAccessKey); - my $strRegionKey = hmac_sha256($strRegion, $strDateKey); - my $strServiceKey = hmac_sha256(S3, $strRegionKey); - $strSigningKey = hmac_sha256(AWS4_REQUEST, $strServiceKey); - - # Cache the signing key - $hSigningKeyCache->{$strDate}{$strRegion}{$strSecretAccessKey} = $strSigningKey; - } - - # Return from function and log return values if any - return logDebugReturn - ( - $strOperation, - {name => 'strSigningKey', value => $strSigningKey, redact => true, trace => true} - ); -} - -push @EXPORT, qw(s3SigningKey); - -#################################################################################################################################### -# s3StringToSign - string that will be signed by the signing key for authentication -#################################################################################################################################### -sub s3StringToSign -{ - # Assign function parameters, defaults, and log debug info - my - ( - $strOperation, - $strDateTime, - $strRegion, - $strCanonicalRequestHash, - ) = - logDebugParam - ( - __PACKAGE__ . '::s3StringToSign', \@_, - {name => 'strDateTime', trace => true}, - {name => 'strRegion', trace => true}, - {name => 'strCanonicalRequestHash', trace => true}, - ); - - my $strStringToSign = - AWS4_HMAC_SHA256 . "\n${strDateTime}\n" . substr($strDateTime, 0, 8) . "/${strRegion}/" . S3 . '/' . AWS4_REQUEST . "\n" . - $strCanonicalRequestHash; - - # Return from function and log return values if any - return logDebugReturn - ( - $strOperation, - {name => 'strStringToSign', value => $strStringToSign, trace => true} - ); -} - -push @EXPORT, qw(s3StringToSign); - -#################################################################################################################################### -# s3AuthorizationHeader - authorization string that will be used in the HTTP "authorization" header -#################################################################################################################################### -sub s3AuthorizationHeader -{ - # Assign function parameters, defaults, and log debug info - my - ( - $strOperation, - $strRegion, - $strHost, - $strVerb, - $strUri, - $strQuery, - $strDateTime, - $hHeader, - $strAccessKeyId, - $strSecretAccessKey, - $strSecurityToken, - $strPayloadHash, - ) = - logDebugParam - ( - __PACKAGE__ . '::s3AuthorizationHeader', \@_, - {name => 'strRegion', trace => true}, - {name => 'strHost', trace => true}, - {name => 'strVerb', trace => true}, - {name => 'strUri', trace => true}, - {name => 'strQuery', trace => true}, - {name => 'strDateTime', trace => true}, - {name => 'hHeader', required => false, trace => true}, - {name => 'strAccessKeyId', redact => true, trace => true}, - {name => 'strSecretAccessKey', redact => true, trace => true}, - {name => 'strSecurityToken', required => false, redact => true, trace => true}, - {name => 'strPayloadHash', trace => true}, - ); - - # Delete the authorization header if it already exists. This could happen on a retry. - delete($hHeader->{&S3_HEADER_AUTHORIZATION}); - - # Add s3 required headers - $hHeader->{&S3_HEADER_HOST} = $strHost; - $hHeader->{&S3_HEADER_CONTENT_SHA256} = $strPayloadHash; - $hHeader->{&S3_HEADER_DATE} = $strDateTime; - - # Add security token if defined - if (defined($strSecurityToken)) - { - $hHeader->{&S3_HEADER_TOKEN} = $strSecurityToken; - } - - # Create authorization string - my ($strCanonicalRequest, $strSignedHeaders) = s3CanonicalRequest( - $strVerb, httpUriEncode($strUri, true), $strQuery, $hHeader, $strPayloadHash); - my $strStringToSign = s3StringToSign($strDateTime, $strRegion, cryptoHashOne('sha256', $strCanonicalRequest)); - - $hHeader->{&S3_HEADER_AUTHORIZATION} = - AWS4_HMAC_SHA256 . " Credential=${strAccessKeyId}/" . substr($strDateTime, 0, 8) . "/${strRegion}/" . S3 . qw(/) . - AWS4_REQUEST . ",SignedHeaders=${strSignedHeaders},Signature=" . hmac_sha256_hex($strStringToSign, - s3SigningKey(substr($strDateTime, 0, 8), $strRegion, $strSecretAccessKey)); - - # Return from function and log return values if any - return logDebugReturn - ( - $strOperation, - {name => 'hHeader', value => $hHeader, trace => true}, - {name => 'strCanonicalRequest', value => $strCanonicalRequest, trace => true}, - {name => 'strSignedHeaders', value => $strSignedHeaders, trace => true}, - {name => 'strStringToSign', value => $strStringToSign, trace => true}, - ); -} - -push @EXPORT, qw(s3AuthorizationHeader); - -1; diff -Nru pgbackrest-2.15.1/lib/pgBackRest/Storage/S3/Driver.pm pgbackrest-2.16/lib/pgBackRest/Storage/S3/Driver.pm --- pgbackrest-2.15.1/lib/pgBackRest/Storage/S3/Driver.pm 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/lib/pgBackRest/Storage/S3/Driver.pm 1970-01-01 00:00:00.000000000 +0000 @@ -1,507 +0,0 @@ -#################################################################################################################################### -# S3 Storage -#################################################################################################################################### -package pgBackRest::Storage::S3::Driver; -use parent 'pgBackRest::Storage::S3::Request'; - -use strict; -use warnings FATAL => qw(all); -use Carp qw(confess); -use English '-no_match_vars'; - -use Exporter qw(import); - our @EXPORT = qw(); -use Digest::MD5 qw(md5_base64); -use File::Basename qw(basename dirname); - -use pgBackRest::Common::Exception; -use pgBackRest::Common::Log; -use pgBackRest::Common::String; -use pgBackRest::Common::Xml; -use pgBackRest::Storage::S3::FileRead; -use pgBackRest::Storage::S3::FileWrite; -use pgBackRest::Storage::S3::Request; -use pgBackRest::Storage::S3::Info; - -#################################################################################################################################### -# Package name constant -#################################################################################################################################### -use constant STORAGE_S3_DRIVER => __PACKAGE__; - push @EXPORT, qw(STORAGE_S3_DRIVER); - -#################################################################################################################################### -# Query constants -#################################################################################################################################### -use constant S3_QUERY_CONTINUATION_TOKEN => 'continuation-token'; -use constant S3_QUERY_DELIMITER => 'delimiter'; -use constant S3_QUERY_LIST_TYPE => 'list-type'; -use constant S3_QUERY_PREFIX => 'prefix'; - -#################################################################################################################################### -# Batch maximum size -#################################################################################################################################### -use constant S3_BATCH_MAX => 1000; - -#################################################################################################################################### -# openWrite - open a file for write -#################################################################################################################################### -sub openWrite -{ - my $self = shift; - - # Assign function parameters, defaults, and log debug info - my - ( - $strOperation, - $strFile, - ) = - logDebugParam - ( - __PACKAGE__ . '->openWrite', \@_, - {name => 'strFile', trace => true}, - ); - - my $oFileIO = new pgBackRest::Storage::S3::FileWrite($self, $strFile); - - # Return from function and log return values if any - return logDebugReturn - ( - $strOperation, - {name => 'oFileIO', value => $oFileIO, trace => true}, - ); -} - -#################################################################################################################################### -# openRead -#################################################################################################################################### -sub openRead -{ - my $self = shift; - - # Assign function parameters, defaults, and log debug info - my - ( - $strOperation, - $strFile, - $bIgnoreMissing, - ) = - logDebugParam - ( - __PACKAGE__ . '->openRead', \@_, - {name => 'strFile', trace => true}, - {name => 'bIgnoreMissing', optional => true, default => false, trace => true}, - ); - - my $oFileIO = new pgBackRest::Storage::S3::FileRead($self, $strFile, {bIgnoreMissing => $bIgnoreMissing}); - - # Return from function and log return values if any - return logDebugReturn - ( - $strOperation, - {name => 'oFileIO', value => $oFileIO, trace => true}, - ); -} - -#################################################################################################################################### -# manifest -#################################################################################################################################### -sub manifest -{ - my $self = shift; - - # Assign function parameters, defaults, and log debug info - my - ( - $strOperation, - $strPath, - $bRecurse, - $bPath, - ) = - logDebugParam - ( - __PACKAGE__ . '->manifest', \@_, - {name => 'strPath', trace => true}, - # Optional parameters not part of the driver spec - {name => 'bRecurse', optional => true, default => true, trace => true}, - {name => 'bPath', optional => true, default => true, trace => true}, - ); - - # Determine the prefix (this is the search path within the bucket) - my $strPrefix = $strPath eq qw{/} ? undef : substr($strPath, 1) . ($bPath ? qw{/} : ''); - - # A delimiter must be used if recursion is not desired - my $strDelimiter = $bRecurse ? undef : '/'; - - # Hash to hold the manifest - my $hManifest = {}; - - # Continuation token - returned from requests where there is more data to be fetched - my $strContinuationToken; - - do - { - # Get the file list - my $oResponse = $self->request( - HTTP_VERB_GET, {hQuery => - {&S3_QUERY_LIST_TYPE => 2, &S3_QUERY_PREFIX => $strPrefix, &S3_QUERY_DELIMITER => $strDelimiter, - &S3_QUERY_CONTINUATION_TOKEN => $strContinuationToken}, strResponseType => S3_RESPONSE_TYPE_XML}); - - # Modify the prefix for file searches so the filename is not stripped off - if (defined($strPrefix) && !$bPath) - { - # If there are no paths in the prefix then undef it - if (index($strPrefix, qw{/}) == -1) - { - undef($strPrefix); - } - else - { - $strPrefix = dirname($strPrefix) . qw{/}; - } - } - - # Store files - foreach my $oFile (xmlTagChildren($oResponse, "Contents")) - { - my $strName = xmlTagText($oFile, "Key"); - - # Strip off prefix - if (defined($strPrefix)) - { - $strName = substr($strName, length($strPrefix)); - } - - $hManifest->{$strName}->{type} = 'f'; - $hManifest->{$strName}->{size} = xmlTagText($oFile, 'Size') + 0; - - # Generate paths from the name if recursing - if ($bRecurse) - { - my @stryName = split(qw{/}, $strName); - - if (@stryName > 1) - { - $strName = undef; - - for (my $iIndex = 0; $iIndex < @stryName - 1; $iIndex++) - { - $strName .= (defined($strName) ? qw{/} : '') . $stryName[$iIndex]; - $hManifest->{$strName}->{type} = 'd'; - } - } - } - } - - # Store directories - if ($bPath && !$bRecurse) - { - foreach my $oPath (xmlTagChildren($oResponse, "CommonPrefixes")) - { - my $strName = xmlTagText($oPath, "Prefix"); - - # Strip off prefix - if (defined($strPrefix)) - { - $strName = substr($strName, length($strPrefix)); - } - - # Strip off final / - $strName = substr($strName, 0, length($strName) - 1); - - $hManifest->{$strName}->{type} = 'd'; - } - } - - $strContinuationToken = xmlTagText($oResponse, "NextContinuationToken", false); - } - while (defined($strContinuationToken)); - - # Add . for the initial path (this is just for compatibility with filesystems that have directories) - if ($bPath) - { - $hManifest->{qw{.}}->{type} = 'd'; - } - - # use Data::Dumper; &log(WARN, 'MANIFEST' . Dumper($hManifest)); - - # Return from function and log return values if any - return logDebugReturn - ( - $strOperation, - {name => 'hManifest', value => $hManifest, trace => true} - ); -} - -#################################################################################################################################### -# list - list a directory -#################################################################################################################################### -sub list -{ - my $self = shift; - - # Assign function parameters, defaults, and log debug info - my - ( - $strOperation, - $strPath, - $strExpression - ) = - logDebugParam - ( - __PACKAGE__ . '->list', \@_, - {name => 'strPath', trace => true}, - {name => 'strExpression', optional => true, trace => true}, - ); - - # Use the regexp to build a prefix to shorten searches - my $strPrefix = regexPrefix($strExpression); - - # Get list using manifest function - my @stryFileList = grep( - !/^\.$/i, keys(%{$self->manifest( - $strPath . (defined($strPrefix) ? "/${strPrefix}" : ''), {bRecurse => false, bPath => !defined($strPrefix)})})); - - # Return from function and log return values if any - return logDebugReturn - ( - $strOperation, - {name => 'stryFileList', value => \@stryFileList, ref => true, trace => true} - ); -} - -#################################################################################################################################### -# pathCreate - directories do no exist in s3 so this is a noop -#################################################################################################################################### -sub pathCreate -{ - my $self = shift; - - # Assign function parameters, defaults, and log debug info - my - ( - $strOperation, - $strPath, - ) = - logDebugParam - ( - __PACKAGE__ . '->pathCreate', \@_, - {name => 'strPath', trace => true}, - ); - - # Return from function and log return values if any - return logDebugReturn($strOperation); -} - -#################################################################################################################################### -# pathSync - directories do not exist in s3 so this is a noop -#################################################################################################################################### -sub pathSync -{ - my $self = shift; - - # Assign function parameters, defaults, and log debug info - my - ( - $strOperation, - $strPath, - ) = - logDebugParam - ( - __PACKAGE__ . '->pathSync', \@_, - {name => 'strPath', trace => true}, - ); - - # Return from function and log return values if any - return logDebugReturn($strOperation); -} - -#################################################################################################################################### -# exists - check if a file exists -#################################################################################################################################### -sub exists -{ - my $self = shift; - - # Assign function parameters, defaults, and log debug info - my - ( - $strOperation, - $strFile, - ) = - logDebugParam - ( - __PACKAGE__ . '->exists', \@_, - {name => 'strFile', trace => true}, - ); - - # Does the path/file exist? - my $bExists = defined($self->manifest($strFile, {bRecurse => false, bPath => false})->{basename($strFile)}) ? true : false; - - # Return from function and log return values if any - return logDebugReturn - ( - $strOperation, - {name => 'bExists', value => $bExists, trace => true} - ); -} - -#################################################################################################################################### -# pathExists -#################################################################################################################################### -sub pathExists -{ - my $self = shift; - - # Assign function parameters, defaults, and log debug info - my - ( - $strOperation, - $strPath, - ) = - logDebugParam - ( - __PACKAGE__ . '->pathExists', \@_, - {name => 'strPath', trace => true}, - ); - - my $bExists = true; - - # Only check if path <> / - if ($strPath ne qw{/}) - { - # Does the path exist? - my $rhInfo = $self->manifest(dirname($strPath), {bRecurse => false, bPath => true})->{basename($strPath)}; - $bExists = defined($rhInfo) && $rhInfo->{type} eq 'd' ? true : false; - } - - # Return from function and log return values if any - return logDebugReturn - ( - $strOperation, - {name => 'bExists', value => $bExists, trace => true} - ); -} - -#################################################################################################################################### -# info - get information about a file -#################################################################################################################################### -sub info -{ - my $self = shift; - - # Assign function parameters, defaults, and log debug info - my - ( - $strOperation, - $strFile, - ) = - logDebugParam - ( - __PACKAGE__ . '->info', \@_, - {name => 'strFile', trace => true}, - ); - - # Get the file - my $rhFile = $self->manifest($strFile, {bRecurse => false, bPath => false})->{basename($strFile)}; - - if (!defined($rhFile)) - { - confess &log(ERROR, "unable to get info for missing file ${strFile}", ERROR_FILE_MISSING); - } - - # Return from function and log return values if any - return logDebugReturn - ( - $strOperation, - {name => 'oInfo', value => new pgBackRest::Storage::S3::Info($rhFile->{size}), trace => true} - ); -} - -#################################################################################################################################### -# remove -#################################################################################################################################### -sub remove -{ - my $self = shift; - - # Assign function parameters, defaults, and log debug info - my - ( - $strOperation, - $rstryFile, - $bRecurse, - ) = - logDebugParam - ( - __PACKAGE__ . '->remove', \@_, - {name => 'rstryFile', trace => true}, - {name => 'bRecurse', optional => true, default => false, trace => true}, - ); - - # Remove a tree - if ($bRecurse) - { - my $rhManifest = $self->manifest($rstryFile); - my @stryRemoveFile; - - # Iterate all files in the manifest - foreach my $strFile (sort({$b cmp $a} keys(%{$rhManifest}))) - { - next if $rhManifest->{$strFile}->{type} eq 'd'; - push(@stryRemoveFile, "${rstryFile}/${strFile}"); - } - - # Remove files - if (@stryRemoveFile > 0) - { - $self->remove(\@stryRemoveFile); - } - } - # Only remove the specified file - else - { - # If stryFile is a scalar, convert to an array - my $rstryFileAll = ref($rstryFile) ? $rstryFile : [$rstryFile]; - - do - { - my $strFile = shift(@{$rstryFileAll}); - my $iTotal = 0; - my $strXml = XML_HEADER . 'true'; - - while (defined($strFile)) - { - $iTotal++; - $strXml .= '' . xmlFromText(substr($strFile, 1)) . ''; - - $strFile = $iTotal < S3_BATCH_MAX ? shift(@{$rstryFileAll}) : undef; - } - - $strXml .= ''; - - my $hHeader = {'content-md5' => md5_base64($strXml) . '=='}; - - # Delete a file - my $oResponse = $self->request( - HTTP_VERB_POST, - {hQuery => 'delete=', rstrBody => \$strXml, hHeader => $hHeader, strResponseType => S3_RESPONSE_TYPE_XML}); - } - while (@{$rstryFileAll} > 0); - } - - # Return from function and log return values if any - return logDebugReturn - ( - $strOperation, - {name => 'bResult', value => true, trace => true} - ); -} - - -#################################################################################################################################### -# Getters -#################################################################################################################################### -sub capability {false} -sub className {STORAGE_S3_DRIVER} - -1; diff -Nru pgbackrest-2.15.1/lib/pgBackRest/Storage/S3/FileRead.pm pgbackrest-2.16/lib/pgBackRest/Storage/S3/FileRead.pm --- pgbackrest-2.15.1/lib/pgBackRest/Storage/S3/FileRead.pm 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/lib/pgBackRest/Storage/S3/FileRead.pm 1970-01-01 00:00:00.000000000 +0000 @@ -1,68 +0,0 @@ -#################################################################################################################################### -# S3 File Read -#################################################################################################################################### -package pgBackRest::Storage::S3::FileRead; -use parent 'pgBackRest::Common::Http::Client'; - -use strict; -use warnings FATAL => qw(all); -use Carp qw(confess); -use English '-no_match_vars'; - -use Digest::MD5 qw(md5_base64); -use Fcntl qw(O_RDONLY O_WRONLY O_CREAT O_TRUNC); -use File::Basename qw(dirname); - -use pgBackRest::Common::Exception; -use pgBackRest::Common::Log; -use pgBackRest::Common::Xml; -use pgBackRest::Storage::Base; -use pgBackRest::Storage::S3::Request; - -#################################################################################################################################### -# CONSTRUCTOR -#################################################################################################################################### -sub new -{ - my $class = shift; - - # Assign function parameters, defaults, and log debug info - my - ( - $strOperation, - $oDriver, - $strName, - $bIgnoreMissing, - ) = - logDebugParam - ( - __PACKAGE__ . '->new', \@_, - {name => 'oDriver', trace => true}, - {name => 'strName', trace => true}, - {name => 'bIgnoreMissing', optional => true, default => false, trace => true}, - ); - - # Open file - my $self = $oDriver->request( - HTTP_VERB_GET, {strUri => $strName, strResponseType => S3_RESPONSE_TYPE_IO, bIgnoreMissing => $bIgnoreMissing}); - - # Bless with new class if file exists - if (defined($self)) - { - bless $self, $class; - } - - # Return from function and log return values if any - return logDebugReturn - ( - $strOperation, - {name => 'self', value => $self, trace => true} - ); -} - -#################################################################################################################################### -# Getters -#################################################################################################################################### -sub name {shift->{strName}} - -1; diff -Nru pgbackrest-2.15.1/lib/pgBackRest/Storage/S3/FileWrite.pm pgbackrest-2.16/lib/pgBackRest/Storage/S3/FileWrite.pm --- pgbackrest-2.15.1/lib/pgBackRest/Storage/S3/FileWrite.pm 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/lib/pgBackRest/Storage/S3/FileWrite.pm 1970-01-01 00:00:00.000000000 +0000 @@ -1,205 +0,0 @@ -#################################################################################################################################### -# S3 File Write -#################################################################################################################################### -package pgBackRest::Storage::S3::FileWrite; -use parent 'pgBackRest::Common::Io::Base'; - -use strict; -use warnings FATAL => qw(all); -use Carp qw(confess); -use English '-no_match_vars'; - -use Digest::MD5 qw(md5_base64); -use Fcntl qw(O_RDONLY O_WRONLY O_CREAT O_TRUNC); -use File::Basename qw(dirname); - -use pgBackRest::Common::Exception; -use pgBackRest::Common::Io::Handle; -use pgBackRest::Common::Log; -use pgBackRest::Common::Xml; -use pgBackRest::Storage::Base; -use pgBackRest::Storage::S3::Request; - -#################################################################################################################################### -# Constants -#################################################################################################################################### -use constant S3_BUFFER_MAX => 16777216; - -#################################################################################################################################### -# CONSTRUCTOR -#################################################################################################################################### -sub new -{ - my $class = shift; - - # Assign function parameters, defaults, and log debug info - my - ( - $strOperation, - $oDriver, - $strName, - ) = - logDebugParam - ( - __PACKAGE__ . '->new', \@_, - {name => 'oDriver', trace => true}, - {name => 'strName', trace => true}, - ); - - # Create the class hash - my $self = $class->SUPER::new("'${strName}'"); - bless $self, $class; - - # Set variables - $self->{oDriver} = $oDriver; - $self->{strName} = $strName; - - # Start with an empty buffer - $self->{rtBuffer} = ''; - - # Has anything been written? - $self->{bWritten} = false; - - # How much has been written? - $self->{lSize} = 0; - - # Return from function and log return values if any - return logDebugReturn - ( - $strOperation, - {name => 'self', value => $self, trace => true} - ); -} - -#################################################################################################################################### -# open - open the file -#################################################################################################################################### -sub open -{ - my $self = shift; - - # Request an upload id - my $oResponse = $self->{oDriver}->request( - HTTP_VERB_POST, {strUri => $self->{strName}, hQuery => 'uploads=', strResponseType => S3_RESPONSE_TYPE_XML}); - - $self->{strUploadId} = xmlTagText($oResponse, 'UploadId'); - - # Intialize the multi-part array - $self->{rstryMultiPart} = []; -} - -#################################################################################################################################### -# write - write data to a file -#################################################################################################################################### -sub write -{ - my $self = shift; - my $rtBuffer = shift; - - # Note that write has been called - $self->{bWritten} = true; - - if (defined($rtBuffer)) - { - $self->{rtBuffer} .= $$rtBuffer; - $self->{lSize} += length($$rtBuffer); - - # Wait until buffer is at least max before writing to avoid writing smaller files multi-part - if (length($self->{rtBuffer}) >= S3_BUFFER_MAX) - { - $self->flush(); - } - - return length($$rtBuffer); - } - - return 0; -} - -#################################################################################################################################### -# flush - flush whatever is in the buffer out -#################################################################################################################################### -sub flush -{ - my $self = shift; - - # Open file if it is not open already - $self->open() if !$self->opened(); - - # Put a file - $self->{oDriver}->request( - HTTP_VERB_PUT, - {strUri => $self->{strName}, - hQuery => {'partNumber' => @{$self->{rstryMultiPart}} + 1, 'uploadId' => $self->{strUploadId}}, - rstrBody => \$self->{rtBuffer}, hHeader => {'content-md5' => md5_base64($self->{rtBuffer}) . '=='}}); - - # Store the returned etag - push(@{$self->{rstryMultiPart}}, $self->{oDriver}->{hResponseHeader}{&S3_HEADER_ETAG}); - - # Clear the buffer - $self->{rtBuffer} = ''; -} - -#################################################################################################################################### -# close - close the file -#################################################################################################################################### -sub close -{ - my $self = shift; - - # Only close if something was written - if ($self->{bWritten}) - { - # Make sure close does not run again - $self->{bWritten} = false; - - # If the file is open then multipart transfer has already started and must be completed - if ($self->opened()) - { - # flush out whatever is in the buffer - $self->flush(); - - my $strXml = XML_HEADER . ''; - my $iPartNo = 0; - - foreach my $strETag (@{$self->{rstryMultiPart}}) - { - $iPartNo++; - - $strXml .= "${iPartNo}${strETag}"; - } - - $strXml .= ''; - - # Finalize file - my $oResponse = $self->{oDriver}->request( - HTTP_VERB_POST, - {strUri => $self->{strName}, hQuery => {'uploadId' => $self->{strUploadId}}, - rstrBody => \$strXml, hHeader => {'content-md5' => md5_base64($strXml) . '=='}, - strResponseType => S3_RESPONSE_TYPE_XML}); - } - # Else the file can be transmitted in one block - else - { - $self->{oDriver}->request( - HTTP_VERB_PUT, - {strUri => $self->{strName}, rstrBody => \$self->{rtBuffer}, - hHeader => {'content-md5' => md5_base64($self->{rtBuffer}) . '=='}}); - } - } - - # This is how we report the write size back to the caller. It's a bit hokey -- results are based on the object name and in all - # other cases I/O passes through Io::Handle. It doesn't seem worth fixing since it could break things and this code is being - # migrated to C anyway. - $self->resultSet(COMMON_IO_HANDLE, $self->{lSize}); - - return true; -} - -#################################################################################################################################### -# Getters -#################################################################################################################################### -sub opened {defined(shift->{strUploadId})} -sub name {shift->{strName}} - -1; diff -Nru pgbackrest-2.15.1/lib/pgBackRest/Storage/S3/Info.pm pgbackrest-2.16/lib/pgBackRest/Storage/S3/Info.pm --- pgbackrest-2.15.1/lib/pgBackRest/Storage/S3/Info.pm 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/lib/pgBackRest/Storage/S3/Info.pm 1970-01-01 00:00:00.000000000 +0000 @@ -1,48 +0,0 @@ -#################################################################################################################################### -# S3 File Info -#################################################################################################################################### -package pgBackRest::Storage::S3::Info; - -use strict; -use warnings FATAL => qw(all); -use Carp qw(confess); -use English '-no_match_vars'; - -use pgBackRest::Common::Log; - -#################################################################################################################################### -# new -#################################################################################################################################### -sub new -{ - my $class = shift; - - # Create the class hash - my $self = {}; - bless $self, $class; - - # Assign function parameters, defaults, and log debug info - ( - my $strOperation, - $self->{lSize}, - ) = - logDebugParam - ( - __PACKAGE__ . '->new', \@_, - {name => 'lSize'}, - ); - - # Return from function and log return values if any - return logDebugReturn - ( - $strOperation, - {name => 'self', value => $self} - ); -} - -#################################################################################################################################### -# Getters -#################################################################################################################################### -sub size {shift->{lSize}} - -1; diff -Nru pgbackrest-2.15.1/lib/pgBackRest/Storage/S3/Request.pm pgbackrest-2.16/lib/pgBackRest/Storage/S3/Request.pm --- pgbackrest-2.15.1/lib/pgBackRest/Storage/S3/Request.pm 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/lib/pgBackRest/Storage/S3/Request.pm 1970-01-01 00:00:00.000000000 +0000 @@ -1,264 +0,0 @@ -#################################################################################################################################### -# S3 Request -#################################################################################################################################### -package pgBackRest::Storage::S3::Request; - -use strict; -use warnings FATAL => qw(all); -use Carp qw(confess); -use English '-no_match_vars'; - -use Exporter qw(import); - our @EXPORT = qw(); -use IO::Socket::SSL; - -use pgBackRest::Common::Exception; -use pgBackRest::Common::Http::Client; -use pgBackRest::Common::Http::Common; -use pgBackRest::Common::Io::Base; -use pgBackRest::Common::Log; -use pgBackRest::Common::String; -use pgBackRest::Common::Xml; -use pgBackRest::LibC qw(:crypto); -use pgBackRest::Storage::S3::Auth; - -#################################################################################################################################### -# Constants -#################################################################################################################################### -use constant HTTP_VERB_GET => 'GET'; - push @EXPORT, qw(HTTP_VERB_GET); -use constant HTTP_VERB_POST => 'POST'; - push @EXPORT, qw(HTTP_VERB_POST); -use constant HTTP_VERB_PUT => 'PUT'; - push @EXPORT, qw(HTTP_VERB_PUT); - -use constant S3_HEADER_CONTENT_LENGTH => 'content-length'; - push @EXPORT, qw(S3_HEADER_CONTENT_LENGTH); -use constant S3_HEADER_TRANSFER_ENCODING => 'transfer-encoding'; - push @EXPORT, qw(S3_HEADER_TRANSFER_ENCODING); -use constant S3_HEADER_ETAG => 'etag'; - push @EXPORT, qw(S3_HEADER_ETAG); - -use constant S3_RESPONSE_TYPE_IO => 'io'; - push @EXPORT, qw(S3_RESPONSE_TYPE_IO); -use constant S3_RESPONSE_TYPE_NONE => 'none'; - push @EXPORT, qw(S3_RESPONSE_TYPE_NONE); -use constant S3_RESPONSE_TYPE_XML => 'xml'; - push @EXPORT, qw(S3_RESPONSE_TYPE_XML); - -use constant S3_RESPONSE_CODE_SUCCESS => 200; -use constant S3_RESPONSE_CODE_ERROR_AUTH => 403; -use constant S3_RESPONSE_CODE_ERROR_NOT_FOUND => 404; -use constant S3_RESPONSE_CODE_ERROR_RETRY_CLASS => 5; - -use constant S3_RETRY_MAX => 4; - -#################################################################################################################################### -# new -#################################################################################################################################### -sub new -{ - my $class = shift; - - # Create the class hash - my $self = {}; - bless $self, $class; - - # Assign function parameters, defaults, and log debug info - ( - my $strOperation, - $self->{strBucket}, - $self->{strEndPoint}, - $self->{strRegion}, - $self->{strAccessKeyId}, - $self->{strSecretAccessKey}, - $self->{strSecurityToken}, - $self->{strHost}, - $self->{iPort}, - $self->{bVerifySsl}, - $self->{strCaPath}, - $self->{strCaFile}, - $self->{lBufferMax}, - ) = - logDebugParam - ( - __PACKAGE__ . '->new', \@_, - {name => 'strBucket'}, - {name => 'strEndPoint'}, - {name => 'strRegion'}, - {name => 'strAccessKeyId', redact => true}, - {name => 'strSecretAccessKey', redact => true}, - {name => 'strSecurityToken', optional => true, redact => true}, - {name => 'strHost', optional => true}, - {name => 'iPort', optional => true}, - {name => 'bVerifySsl', optional => true, default => true}, - {name => 'strCaPath', optional => true}, - {name => 'strCaFile', optional => true}, - {name => 'lBufferMax', optional => true, default => COMMON_IO_BUFFER_MAX}, - ); - - # If host is not set then it will be bucket + endpoint - $self->{strHost} = defined($self->{strHost}) ? $self->{strHost} : "$self->{strBucket}.$self->{strEndPoint}"; - - # Return from function and log return values if any - return logDebugReturn - ( - $strOperation, - {name => 'self', value => $self, trace => true} - ); -} - -#################################################################################################################################### -# request - send a request to S3 -#################################################################################################################################### -sub request -{ - my $self = shift; - - # Assign function parameters, defaults, and log debug info - my - ( - $strOperation, - $strVerb, - $strUri, - $hQuery, - $hHeader, - $rstrBody, - $strResponseType, - $bIgnoreMissing, - ) = - logDebugParam - ( - __PACKAGE__ . '->request', \@_, - {name => 'strVerb', trace => true}, - {name => 'strUri', optional => true, default => '/', trace => true}, - {name => 'hQuery', optional => true, trace => true}, - {name => 'hHeader', optional => true, trace => true}, - {name => 'rstrBody', optional => true, trace => true}, - {name => 'strResponseType', optional => true, default => S3_RESPONSE_TYPE_NONE, trace => true}, - {name => 'bIgnoreMissing', optional => true, default => false, trace => true}, - ); - - # Server response - my $oResponse; - - # Allow retries on S3 internal failures - my $bRetry; - my $iRetryTotal = 0; - - do - { - # Assume that a retry will not be attempted which is true in most cases - $bRetry = false; - - # Set content length and hash - $hHeader->{&S3_HEADER_CONTENT_SHA256} = defined($rstrBody) ? cryptoHashOne('sha256', $$rstrBody) : PAYLOAD_DEFAULT_HASH; - $hHeader->{&S3_HEADER_CONTENT_LENGTH} = defined($rstrBody) ? length($$rstrBody) : 0; - - # Generate authorization header - ($hHeader, my $strCanonicalRequest, my $strSignedHeaders, my $strStringToSign) = s3AuthorizationHeader( - $self->{strRegion}, "$self->{strBucket}.$self->{strEndPoint}", $strVerb, $strUri, httpQuery($hQuery), s3DateTime(), - $hHeader, $self->{strAccessKeyId}, $self->{strSecretAccessKey}, $self->{strSecurityToken}, - $hHeader->{&S3_HEADER_CONTENT_SHA256}); - - # Send the request - my $oHttpClient = new pgBackRest::Common::Http::Client( - $self->{strHost}, $strVerb, - {iPort => $self->{iPort}, strUri => $strUri, hQuery => $hQuery, hRequestHeader => $hHeader, - rstrRequestBody => $rstrBody, bVerifySsl => $self->{bVerifySsl}, strCaPath => $self->{strCaPath}, - strCaFile => $self->{strCaFile}, bResponseBodyPrefetch => $strResponseType eq S3_RESPONSE_TYPE_XML, - lBufferMax => $self->{lBufferMax}}); - - # Check response code - my $iResponseCode = $oHttpClient->responseCode(); - - if ($iResponseCode == S3_RESPONSE_CODE_SUCCESS) - { - # Save the response headers locally - $self->{hResponseHeader} = $oHttpClient->responseHeader(); - - # XML response is expected - if ($strResponseType eq S3_RESPONSE_TYPE_XML) - { - my $rtResponseBody = $oHttpClient->responseBody(); - - if ($oHttpClient->contentLength() == 0 || !defined($$rtResponseBody)) - { - confess &log(ERROR, - "response type '${strResponseType}' was requested but content length is zero or content is missing", - ERROR_PROTOCOL); - } - - $oResponse = xmlParse($$rtResponseBody); - } - # An IO object is expected for file responses - elsif ($strResponseType eq S3_RESPONSE_TYPE_IO) - { - $oResponse = $oHttpClient; - } - } - else - { - # If file was not found - if ($iResponseCode == S3_RESPONSE_CODE_ERROR_NOT_FOUND) - { - # If missing files should not be ignored then error - if (!$bIgnoreMissing) - { - confess &log(ERROR, "unable to open '${strUri}': No such file or directory", ERROR_FILE_MISSING); - } - - $bRetry = false; - } - # Else a more serious error - else - { - # Retry for S3 internal or rate-limiting errors (any 5xx error should be retried) - if (int($iResponseCode / 100) == S3_RESPONSE_CODE_ERROR_RETRY_CLASS) - { - # Increment retry total and check if retry should be attempted - $iRetryTotal++; - $bRetry = $iRetryTotal <= S3_RETRY_MAX; - - # Sleep after first retry just in case data needs to stabilize - if ($iRetryTotal > 1) - { - sleep(5); - } - } - - # If no retry then throw the error - if (!$bRetry) - { - my $rstrResponseBody = $oHttpClient->responseBody(); - - # Redact authorization header because it contains the access key - my $strRequestHeader = $oHttpClient->requestHeaderText(); - $strRequestHeader =~ s/^${\S3_HEADER_AUTHORIZATION}:.*$/${\S3_HEADER_AUTHORIZATION}: /mg; - - confess &log(ERROR, - 'S3 request error' . ($iRetryTotal > 0 ? " after " . (S3_RETRY_MAX + 1) . " tries" : '') . - " [$iResponseCode] " . $oHttpClient->responseMessage() . - "\n*** request header ***\n${strRequestHeader}" . - ($iResponseCode == S3_RESPONSE_CODE_ERROR_AUTH ? - "\n*** canonical request ***\n" . $strCanonicalRequest . - "\n*** signed headers ***\n" . $strSignedHeaders . - "\n*** string to sign ***\n" . $strStringToSign : '') . - "\n*** response header ***\n" . $oHttpClient->responseHeaderText() . - (defined($$rstrResponseBody) ? "\n*** response body ***\n${$rstrResponseBody}" : ''), - ERROR_PROTOCOL); - } - } - } - } - while ($bRetry); - - # Return from function and log return values if any - return logDebugReturn - ( - $strOperation, - {name => 'oResponse', value => $oResponse, trace => true, ref => true} - ); -} - -1; diff -Nru pgbackrest-2.15.1/lib/pgBackRest/Storage/Storage.pm pgbackrest-2.16/lib/pgBackRest/Storage/Storage.pm --- pgbackrest-2.15.1/lib/pgBackRest/Storage/Storage.pm 1970-01-01 00:00:00.000000000 +0000 +++ pgbackrest-2.16/lib/pgBackRest/Storage/Storage.pm 2019-08-05 16:03:04.000000000 +0000 @@ -0,0 +1,1030 @@ +#################################################################################################################################### +# C Storage Interface +#################################################################################################################################### +package pgBackRest::Storage::Storage; +use parent 'pgBackRest::Storage::Base'; + +use strict; +use warnings FATAL => qw(all); +use Carp qw(confess); +use English '-no_match_vars'; + +use File::Basename qw(dirname); +use Fcntl qw(:mode); +use File::stat qw{lstat}; +use JSON::PP; + +use pgBackRest::Common::Exception; +use pgBackRest::Common::Io::Handle; +use pgBackRest::Common::Log; +use pgBackRest::Storage::Base; +use pgBackRest::Storage::StorageRead; +use pgBackRest::Storage::StorageWrite; + +#################################################################################################################################### +# new +#################################################################################################################################### +sub new +{ + my $class = shift; + + # Create the class hash + my $self = {}; + bless $self, $class; + + # Assign function parameters, defaults, and log debug info + ( + my $strOperation, + $self->{strType}, + $self->{strPath}, + $self->{lBufferMax}, + $self->{strDefaultPathMode}, + $self->{strDefaultFileMode}, + ) = + logDebugParam + ( + __PACKAGE__ . '->new', \@_, + {name => 'strType'}, + {name => 'strPath', optional => true}, + {name => 'lBufferMax', optional => true, default => 65536}, + {name => 'strDefaultPathMode', optional => true, default => '0750'}, + {name => 'strDefaultFileMode', optional => true, default => '0640'}, + ); + + # Create C storage object + $self->{oStorageC} = pgBackRest::LibC::Storage->new($self->{strType}, $self->{strPath}); + + # Get encryption settings + if ($self->{strType} eq '') + { + $self->{strCipherType} = $self->{oStorageC}->cipherType(); + $self->{strCipherPass} = $self->{oStorageC}->cipherPass(); + } + + # Create JSON object + $self->{oJSON} = JSON::PP->new()->allow_nonref(); + + # Return from function and log return values if any + return logDebugReturn + ( + $strOperation, + {name => 'self', value => $self} + ); +} + +#################################################################################################################################### +# Check if file exists (not a path) +#################################################################################################################################### +sub exists +{ + my $self = shift; + + # Assign function parameters, defaults, and log debug info + my + ( + $strOperation, + $strFileExp, + ) = + logDebugParam + ( + __PACKAGE__ . '->exists', \@_, + {name => 'strFileExp'}, + ); + + # Check exists + my $bExists = $self->{oStorageC}->exists($strFileExp); + + # Return from function and log return values if any + return logDebugReturn + ( + $strOperation, + {name => 'bExists', value => $bExists ? true : false} + ); +} + +#################################################################################################################################### +# Read a buffer from storage all at once +#################################################################################################################################### +sub get +{ + my $self = shift; + + # Assign function parameters, defaults, and log debug info + my + ( + $strOperation, + $xFile, + $strCipherPass, + ) = + logDebugParam + ( + __PACKAGE__ . '->get', \@_, + {name => 'xFile', required => false, trace => true}, + {name => 'strCipherPass', optional => true, default => $self->cipherPassUser(), redact => true}, + ); + + # Is this an IO object or a file expression? If file expression, then open the file and pass passphrase if one is defined or + # if the repo has a user passphrase defined - else pass undef + my $oFileIo = defined($xFile) ? (ref($xFile) ? $xFile : $self->openRead($xFile, {strCipherPass => $strCipherPass})) : undef; + + # Get the file contents + my $bEmpty = false; + my $tContent = $self->{oStorageC}->get($oFileIo->{oStorageCRead}); + + if (defined($tContent) && length($tContent) == 0) + { + $tContent = undef; + $bEmpty = true; + } + + # Return from function and log return values if any + return logDebugReturn + ( + $strOperation, + {name => 'rtContent', value => defined($tContent) || $bEmpty ? \$tContent : undef, trace => true}, + ); +} + +#################################################################################################################################### +# Calculate sha1 hash and size of file. If special encryption settings are required, then the file objects from openRead/openWrite +# must be passed instead of file names. +#################################################################################################################################### +sub hashSize +{ + my $self = shift; + + # Assign function parameters, defaults, and log debug info + my + ( + $strOperation, + $xFileExp, + $bIgnoreMissing, + ) = + logDebugParam + ( + __PACKAGE__ . '->hashSize', \@_, + {name => 'xFileExp'}, + {name => 'bIgnoreMissing', optional => true, default => false}, + ); + + # Set operation variables + my $strHash; + my $lSize; + + # Is this an IO object or a file expression? + my $oFileIo = ref($xFileExp) ? $xFileExp : $self->openRead($xFileExp, {bIgnoreMissing => $bIgnoreMissing}); + + # Add size and sha filters + $oFileIo->{oStorageCRead}->filterAdd(COMMON_IO_HANDLE, undef); + $oFileIo->{oStorageCRead}->filterAdd(STORAGE_FILTER_SHA, undef); + + # Read the file and set results if it exists + if ($self->{oStorageC}->readDrain($oFileIo->{oStorageCRead})) + { + $strHash = $oFileIo->result(STORAGE_FILTER_SHA); + $lSize = $oFileIo->result(COMMON_IO_HANDLE); + } + + # Return from function and log return values if any + return logDebugReturn + ( + $strOperation, + {name => 'strHash', value => $strHash}, + {name => 'lSize', value => $lSize} + ); +} + +#################################################################################################################################### +# Get information for path/file +#################################################################################################################################### +sub info +{ + my $self = shift; + + # Assign function parameters, defaults, and log debug info + my + ( + $strOperation, + $strPathFileExp, + $bIgnoreMissing, + ) = + logDebugParam + ( + __PACKAGE__ . '->info', \@_, + {name => 'strPathFileExp'}, + {name => 'bIgnoreMissing', optional => true, default => false}, + ); + + my $rhInfo; + my $strJson = $self->{oStorageC}->info($strPathFileExp, $bIgnoreMissing); + + if (defined($strJson)) + { + $rhInfo = $self->{oJSON}->decode($strJson); + } + + # Return from function and log return values if any + return logDebugReturn + ( + $strOperation, + {name => 'rhInfo', value => $rhInfo, trace => true} + ); +} + +#################################################################################################################################### +# linkCreate - create a link +#################################################################################################################################### +sub linkCreate +{ + my $self = shift; + + # Assign function parameters, defaults, and log debug info + my + ( + $strOperation, + $strSourcePathFileExp, + $strDestinationLinkExp, + $bHard, + $bRelative, + $bPathCreate, + $bIgnoreExists, + ) = + logDebugParam + ( + __PACKAGE__ . '->linkCreate', \@_, + {name => 'strSourcePathFileExp'}, + {name => 'strDestinationLinkExp'}, + {name => 'bHard', optional=> true, default => false}, + {name => 'bRelative', optional=> true, default => false}, + {name => 'bPathCreate', optional=> true, default => true}, + {name => 'bIgnoreExists', optional => true, default => false}, + ); + + # Get source and destination paths + my $strSourcePathFile = $self->pathGet($strSourcePathFileExp); + my $strDestinationLink = $self->pathGet($strDestinationLinkExp); + + # Generate relative path if requested + if ($bRelative) + { + # Determine how much of the paths are common + my @strySource = split('/', $strSourcePathFile); + my @stryDestination = split('/', $strDestinationLink); + + while (defined($strySource[0]) && defined($stryDestination[0]) && $strySource[0] eq $stryDestination[0]) + { + shift(@strySource); + shift(@stryDestination); + } + + # Add relative path sections + $strSourcePathFile = ''; + + for (my $iIndex = 0; $iIndex < @stryDestination - 1; $iIndex++) + { + $strSourcePathFile .= '../'; + } + + # Add path to source + $strSourcePathFile .= join('/', @strySource); + + logDebugMisc + ( + $strOperation, 'apply relative path', + {name => 'strSourcePathFile', value => $strSourcePathFile, trace => true} + ); + } + + if (!($bHard ? link($strSourcePathFile, $strDestinationLink) : symlink($strSourcePathFile, $strDestinationLink))) + { + my $strMessage = "unable to create link '${strDestinationLink}'"; + + # If parent path or source is missing + if ($OS_ERROR{ENOENT}) + { + # Check if source is missing + if (!$self->exists($strSourcePathFile)) + { + confess &log(ERROR, "${strMessage} because source '${strSourcePathFile}' does not exist", ERROR_FILE_MISSING); + } + + if (!$bPathCreate) + { + confess &log(ERROR, "${strMessage} because parent does not exist", ERROR_PATH_MISSING); + } + + # Create parent path + $self->pathCreate(dirname($strDestinationLink), {bIgnoreExists => true, bCreateParent => true}); + + # Create link + $self->linkCreate($strSourcePathFile, $strDestinationLink, {bHard => $bHard}); + } + # Else if link already exists + elsif ($OS_ERROR{EEXIST}) + { + if (!$bIgnoreExists) + { + confess &log(ERROR, "${strMessage} because it already exists", ERROR_PATH_EXISTS); + } + } + else + { + logErrorResult(ERROR_PATH_CREATE, ${strMessage}, $OS_ERROR); + } + } + + # Return from function and log return values if any + return logDebugReturn($strOperation); +} + +#################################################################################################################################### +# List all files/paths in path +#################################################################################################################################### +sub list +{ + my $self = shift; + + # Assign function parameters, defaults, and log debug info + my + ( + $strOperation, + $strPathExp, + $strExpression, + $strSortOrder, + $bIgnoreMissing, + ) = + logDebugParam + ( + __PACKAGE__ . '->list', \@_, + {name => 'strPathExp', required => false}, + {name => 'strExpression', optional => true}, + {name => 'strSortOrder', optional => true, default => 'forward'}, + {name => 'bIgnoreMissing', optional => true, default => false}, + ); + + # Get file list + my $rstryFileList = []; + my $strFileList = $self->{oStorageC}->list($strPathExp, $bIgnoreMissing, $strSortOrder eq 'forward', $strExpression); + + if (defined($strFileList) && $strFileList ne '[]') + { + $rstryFileList = $self->{oJSON}->decode($strFileList); + } + + # Return from function and log return values if any + return logDebugReturn + ( + $strOperation, + {name => 'stryFileList', value => $rstryFileList} + ); +} + +#################################################################################################################################### +# Build path/file/link manifest starting with base path and including all subpaths +#################################################################################################################################### +sub manifest +{ + my $self = shift; + + # Assign function parameters, defaults, and log debug info + my + ( + $strOperation, + $strPathExp, + $strFilter, + ) = + logDebugParam + ( + __PACKAGE__ . '->manifest', \@_, + {name => 'strPathExp'}, + {name => 'strFilter', optional => true, trace => true}, + ); + + my $hManifest = $self->{oJSON}->decode($self->{oStorageC}->manifest($strPathExp, $strFilter)); + + # Return from function and log return values if any + return logDebugReturn + ( + $strOperation, + {name => 'hManifest', value => $hManifest, trace => true} + ); +} + +#################################################################################################################################### +# move - move path/file +#################################################################################################################################### +sub move +{ + my $self = shift; + + # Assign function parameters, defaults, and log debug info + my + ( + $strOperation, + $strSourceFileExp, + $strDestinationFileExp, + $bPathCreate, + ) = + logDebugParam + ( + __PACKAGE__ . '->move', \@_, + {name => 'strSourceFileExp'}, + {name => 'strDestinationFileExp'}, + ); + + # Get source and destination paths + my $strSourceFile = $self->pathGet($strSourceFileExp); + my $strDestinationFile = $self->pathGet($strDestinationFileExp); + + # Move the file + if (!rename($strSourceFile, $strDestinationFile)) + { + logErrorResult(ERROR_FILE_MOVE, "unable to move '${strSourceFile}' to '${strDestinationFile}'", $OS_ERROR); + } + + # Return from function and log return values if any + return logDebugReturn($strOperation); +} + +#################################################################################################################################### +# Open file for reading +#################################################################################################################################### +sub openRead +{ + my $self = shift; + + # Assign function parameters, defaults, and log debug info + my + ( + $strOperation, + $xFileExp, + $bIgnoreMissing, + $rhyFilter, + $strCipherPass, + ) = + logDebugParam + ( + __PACKAGE__ . '->openRead', \@_, + {name => 'xFileExp'}, + {name => 'bIgnoreMissing', optional => true, default => false}, + {name => 'rhyFilter', optional => true}, + {name => 'strCipherPass', optional => true, default => $self->cipherPassUser(), redact => true}, + ); + + # Open the file + my $oFileIo = pgBackRest::LibC::StorageRead->new($self->{oStorageC}, $xFileExp, $bIgnoreMissing); + + # If cipher is set then decryption is the first filter applied to the read + if (defined($self->cipherType())) + { + $oFileIo->filterAdd(STORAGE_FILTER_CIPHER_BLOCK, $self->{oJSON}->encode([false, $self->cipherType(), $strCipherPass])); + } + + # Apply any other filters + if (defined($rhyFilter)) + { + foreach my $rhFilter (@{$rhyFilter}) + { + $oFileIo->filterAdd( + $rhFilter->{strClass}, defined($rhFilter->{rxyParam}) ? $self->{oJSON}->encode($rhFilter->{rxyParam}) : undef); + } + } + + # Return from function and log return values if any + return logDebugReturn + ( + $strOperation, + {name => 'oFileIo', value => new pgBackRest::Storage::StorageRead($self, $oFileIo), trace => true}, + ); +} + +#################################################################################################################################### +# Open file for writing +#################################################################################################################################### +sub openWrite +{ + my $self = shift; + + # Assign function parameters, defaults, and log debug info + my + ( + $strOperation, + $xFileExp, + $strMode, + $strUser, + $strGroup, + $lTimestamp, + $bAtomic, + $bPathCreate, + $rhyFilter, + $strCipherPass, + ) = + logDebugParam + ( + __PACKAGE__ . '->openWrite', \@_, + {name => 'xFileExp'}, + {name => 'strMode', optional => true, default => $self->{strDefaultFileMode}}, + {name => 'strUser', optional => true}, + {name => 'strGroup', optional => true}, + {name => 'lTimestamp', optional => true, default => '0'}, + {name => 'bAtomic', optional => true, default => false}, + {name => 'bPathCreate', optional => true, default => false}, + {name => 'rhyFilter', optional => true}, + {name => 'strCipherPass', optional => true, default => $self->cipherPassUser(), redact => true}, + ); + + # Open the file + my $oFileIo = pgBackRest::LibC::StorageWrite->new( + $self->{oStorageC}, $xFileExp, oct($strMode), $strUser, $strGroup, $lTimestamp, $bAtomic, $bPathCreate); + + # Apply any other filters + if (defined($rhyFilter)) + { + foreach my $rhFilter (@{$rhyFilter}) + { + $oFileIo->filterAdd( + $rhFilter->{strClass}, defined($rhFilter->{rxyParam}) ? $self->{oJSON}->encode($rhFilter->{rxyParam}) : undef); + } + } + + # If cipher is set then encryption is the last filter applied to the write + if (defined($self->cipherType())) + { + $oFileIo->filterAdd(STORAGE_FILTER_CIPHER_BLOCK, $self->{oJSON}->encode([true, $self->cipherType(), $strCipherPass])); + } + + # Return from function and log return values if any + return logDebugReturn + ( + $strOperation, + {name => 'oFileIo', value => new pgBackRest::Storage::StorageWrite($self, $oFileIo), trace => true}, + ); +} + +#################################################################################################################################### +# Change ownership of path/file +#################################################################################################################################### +sub owner +{ + my $self = shift; + + # Assign function parameters, defaults, and log debug info + my + ( + $strOperation, + $strPathFileExp, + $strUser, + $strGroup + ) = + logDebugParam + ( + __PACKAGE__ . '->owner', \@_, + {name => 'strPathFileExp'}, + {name => 'strUser', required => false}, + {name => 'strGroup', required => false} + ); + + # Only proceed if user or group was specified + if (defined($strUser) || defined($strGroup)) + { + my $strPathFile = $self->pathGet($strPathFileExp); + my $strMessage = "unable to set ownership for '${strPathFile}'"; + my $iUserId; + my $iGroupId; + + # If the user or group is not defined then get it by stat'ing the file. This is because the chown function requires that + # both user and group be set. + my $oStat = lstat($strPathFile); + + if (!defined($oStat)) + { + confess &log(ERROR, "unable to stat '${strPathFile}': No such file or directory", ERROR_FILE_MISSING); + } + + if (!defined($strUser)) + { + $iUserId = $oStat->uid; + } + + if (!defined($strGroup)) + { + $iGroupId = $oStat->gid; + } + + # Lookup user if specified + if (defined($strUser)) + { + $iUserId = getpwnam($strUser); + + if (!defined($iUserId)) + { + logErrorResult(ERROR_FILE_OWNER, "${strMessage} because user '${strUser}' does not exist"); + } + } + + # Lookup group if specified + if (defined($strGroup)) + { + $iGroupId = getgrnam($strGroup); + + if (!defined($iGroupId)) + { + logErrorResult(ERROR_FILE_OWNER, "${strMessage} because group '${strGroup}' does not exist"); + } + } + + # Set ownership on the file if the user or group would be changed + if ($iUserId != $oStat->uid || $iGroupId != $oStat->gid) + { + if (!chown($iUserId, $iGroupId, $strPathFile)) + { + logErrorResult(ERROR_FILE_OWNER, "${strMessage}", $OS_ERROR); + } + } + } + + # Return from function and log return values if any + return logDebugReturn + ( + $strOperation + ); +} + +#################################################################################################################################### +# Generate an absolute path from an absolute base path and a relative path +#################################################################################################################################### +sub pathAbsolute +{ + my $self = shift; + + # Assign function parameters, defaults, and log debug info + my + ( + $strOperation, + $strBasePath, + $strPath + ) = + logDebugParam + ( + __PACKAGE__ . '->pathAbsolute', \@_, + {name => 'strBasePath', trace => true}, + {name => 'strPath', trace => true} + ); + + # Working variables + my $strAbsolutePath; + + # If the path is already absolute + if (index($strPath, '/') == 0) + { + $strAbsolutePath = $strPath; + } + # Else make it absolute using the base path + else + { + # Make sure the absolute path is really absolute + if (index($strBasePath, '/') != 0 || index($strBasePath, '/..') != -1) + { + confess &log(ERROR, "${strBasePath} is not an absolute path", ERROR_PATH_TYPE); + } + + while (index($strPath, '..') == 0) + { + $strBasePath = dirname($strBasePath); + $strPath = substr($strPath, 2); + + if (index($strPath, '/') == 0) + { + $strPath = substr($strPath, 1); + } + } + + $strAbsolutePath = "${strBasePath}/${strPath}"; + } + + # Make sure the result is really an absolute path + if (index($strAbsolutePath, '/') != 0 || index($strAbsolutePath, '/..') != -1) + { + confess &log(ERROR, "result ${strAbsolutePath} was not an absolute path", ERROR_PATH_TYPE); + } + + # Return from function and log return values if any + return logDebugReturn + ( + $strOperation, + {name => 'strAbsolutePath', value => $strAbsolutePath, trace => true} + ); +} + +#################################################################################################################################### +# Create a path +#################################################################################################################################### +sub pathCreate +{ + my $self = shift; + + # Assign function parameters, defaults, and log debug info + my + ( + $strOperation, + $strPathExp, + $strMode, + $bIgnoreExists, + $bCreateParent, + ) = + logDebugParam + ( + __PACKAGE__ . '->pathCreate', \@_, + {name => 'strPathExp'}, + {name => 'strMode', optional => true}, + {name => 'bIgnoreExists', optional => true, default => false}, + {name => 'bCreateParent', optional => true, default => false}, + ); + + # Create path + $self->{oStorageC}->pathCreate($strPathExp, $strMode, $bIgnoreExists, $bCreateParent); + + # Return from function and log return values if any + return logDebugReturn + ( + $strOperation + ); +} + +#################################################################################################################################### +# Check if path exists +#################################################################################################################################### +sub pathExists +{ + my $self = shift; + + # Assign function parameters, defaults, and log debug info + my + ( + $strOperation, + $strPathExp, + ) = + logDebugParam + ( + __PACKAGE__ . '->pathExists', \@_, + {name => 'strPathExp'}, + ); + + # Check exists + my $bExists = $self->{oStorageC}->pathExists($strPathExp); + + # Return from function and log return values if any + return logDebugReturn + ( + $strOperation, + {name => 'bExists', value => $bExists ? true : false} + ); +} + +#################################################################################################################################### +# Resolve a path expression into an absolute path +#################################################################################################################################### +sub pathGet +{ + my $self = shift; + + # Assign function parameters, defaults, and log debug info + my + ( + $strOperation, + $strPathExp, + ) = + logDebugParam + ( + __PACKAGE__ . '->pathGet', \@_, + {name => 'strPathExp'}, + ); + + # Check exists + my $strPath = $self->{oStorageC}->pathGet($strPathExp); + + # Return from function and log return values if any + return logDebugReturn + ( + $strOperation, + {name => 'strPath', value => $strPath, trace => true} + ); +} + +#################################################################################################################################### +# Remove path and all files below it +#################################################################################################################################### +sub pathRemove +{ + my $self = shift; + + # Assign function parameters, defaults, and log debug info + my + ( + $strOperation, + $strPathExp, + $bIgnoreMissing, + $bRecurse, + ) = + logDebugParam + ( + __PACKAGE__ . '->pathRemove', \@_, + {name => 'strPathExp'}, + {name => 'bIgnoreMissing', optional => true, default => true}, + {name => 'bRecurse', optional => true, default => false}, + ); + + $self->{oStorageC}->pathRemove($strPathExp, $bIgnoreMissing, $bRecurse); + + # Return from function and log return values if any + return logDebugReturn($strOperation); +} + +#################################################################################################################################### +# Sync path so newly added file entries are not lost +#################################################################################################################################### +sub pathSync +{ + my $self = shift; + + # Assign function parameters, defaults, and log debug info + my + ( + $strOperation, + $strPathExp, + ) = + logDebugParam + ( + __PACKAGE__ . '->pathSync', \@_, + {name => 'strPathExp'}, + ); + + $self->{oStorageC}->pathSync($strPathExp); + + # Return from function and log return values if any + return logDebugReturn($strOperation); +} + +#################################################################################################################################### +# put - writes a buffer out to storage all at once +#################################################################################################################################### +sub put +{ + my $self = shift; + + # Assign function parameters, defaults, and log debug info + my + ( + $strOperation, + $xFile, + $xContent, + $strCipherPass, + ) = + logDebugParam + ( + __PACKAGE__ . '->put', \@_, + {name => 'xFile', trace => true}, + {name => 'xContent', required => false, trace => true}, + {name => 'strCipherPass', optional => true, default => $self->cipherPassUser(), trace => true, redact => true}, + ); + + # Is this an IO object or a file expression? If file expression, then open the file and pass passphrase if one is defined or if + # the repo has a user passphrase defined - else pass undef + my $oFileIo = ref($xFile) ? $xFile : $self->openWrite($xFile, {strCipherPass => $strCipherPass}); + + # Write the content + my $lSize = $self->{oStorageC}->put($oFileIo->{oStorageCWrite}, ref($xContent) ? $$xContent : $xContent); + + # Return from function and log return values if any + return logDebugReturn + ( + $strOperation, + {name => 'lSize', value => $lSize, trace => true}, + ); +} + +#################################################################################################################################### +# Remove file +#################################################################################################################################### +sub remove +{ + my $self = shift; + + # Assign function parameters, defaults, and log debug info + my + ( + $strOperation, + $xFileExp, + $bIgnoreMissing, + ) = + logDebugParam + ( + __PACKAGE__ . '->remove', \@_, + {name => 'xFileExp'}, + {name => 'bIgnoreMissing', optional => true, default => true}, + ); + + foreach my $strFileExp (ref($xFileExp) ? @{$xFileExp} : ($xFileExp)) + { + $self->{oStorageC}->remove($strFileExp, $bIgnoreMissing); + } + + # Return from function and log return values if any + return logDebugReturn($strOperation); +} + +#################################################################################################################################### +# encrypted - determine if the file is encrypted or not +#################################################################################################################################### +sub encrypted +{ + my $self = shift; + + # Assign function parameters, defaults, and log debug info + my + ( + $strOperation, + $strFileExp, + $bIgnoreMissing, + ) = + logDebugParam + ( + __PACKAGE__ . '->encrypted', \@_, + {name => 'strFileExp'}, + {name => 'bIgnoreMissing', optional => true, default => false}, + ); + + my $bEncrypted = false; + + # Open the file via the driver + my $oFileIo = new pgBackRest::Storage::StorageRead( + $self, pgBackRest::LibC::StorageRead->new($self->{oStorageC}, $strFileExp, $bIgnoreMissing)); + + # If the file does not exist because we're ignoring missing (else it would error before this is executed) then determine if it + # should be encrypted based on the repo + if (!$oFileIo->open()) + { + if (defined($self->cipherType())) + { + $bEncrypted = true; + } + } + else + { + # If the file does exist, then read the magic signature + my $tMagicSignature = ''; + my $lSizeRead = $oFileIo->read(\$tMagicSignature, length(CIPHER_MAGIC)); + $oFileIo->close(); + + if (substr($tMagicSignature, 0, length(CIPHER_MAGIC)) eq CIPHER_MAGIC) + { + $bEncrypted = true; + } + } + + # Return from function and log return values if any + return logDebugReturn + ( + $strOperation, + {name => 'bEncrypted', value => $bEncrypted} + ); +} + +#################################################################################################################################### +# encryptionValid - determine if encyption set properly based on the value passed +#################################################################################################################################### +sub encryptionValid +{ + my $self = shift; + + # Assign function parameters, defaults, and log debug info + my + ( + $strOperation, + $bEncrypted, + ) = + logDebugParam + ( + __PACKAGE__ . '->encryptionValid', \@_, + {name => 'bEncrypted'}, + ); + + my $bValid = ($bEncrypted && defined($self->cipherType())) || (!$bEncrypted && !defined($self->cipherType())); + + # Return from function and log return values if any + return logDebugReturn + ( + $strOperation, + {name => 'bValid', value => $bValid ? true : false} + ); +} + +#################################################################################################################################### +# Getters +#################################################################################################################################### +sub capability {shift->type() eq STORAGE_POSIX} +sub type {shift->{oStorageC}->type()} +sub cipherType {shift->{strCipherType}} +sub cipherPassUser {shift->{strCipherPass}} + +1; diff -Nru pgbackrest-2.15.1/lib/pgBackRest/Storage/StorageRead.pm pgbackrest-2.16/lib/pgBackRest/Storage/StorageRead.pm --- pgbackrest-2.15.1/lib/pgBackRest/Storage/StorageRead.pm 1970-01-01 00:00:00.000000000 +0000 +++ pgbackrest-2.16/lib/pgBackRest/Storage/StorageRead.pm 2019-08-05 16:03:04.000000000 +0000 @@ -0,0 +1,190 @@ +#################################################################################################################################### +# C Storage Read Interface +#################################################################################################################################### +package pgBackRest::Storage::StorageRead; + +use strict; +use warnings FATAL => qw(all); +use Carp qw(confess); +use English '-no_match_vars'; + +use File::Basename qw(dirname); +use Fcntl qw(:mode); +use File::stat qw{lstat}; +use JSON::PP; + +use pgBackRest::Common::Exception; +use pgBackRest::Common::Log; +use pgBackRest::Storage::Base; + +#################################################################################################################################### +# new +#################################################################################################################################### +sub new +{ + my $class = shift; + + # Create the class hash + my $self = {}; + bless $self, $class; + + # Assign function parameters, defaults, and log debug info + ( + my $strOperation, + $self->{oStorage}, + $self->{oStorageCRead}, + ) = + logDebugParam + ( + __PACKAGE__ . '->new', \@_, + {name => 'oStorage'}, + {name => 'oStorageCRead'}, + ); + + # Return from function and log return values if any + return logDebugReturn + ( + $strOperation, + {name => 'self', value => $self} + ); +} + +#################################################################################################################################### +# Open the file +#################################################################################################################################### +sub open +{ + my $self = shift; + + # Assign function parameters, defaults, and log debug info + my ($strOperation) = logDebugParam(__PACKAGE__ . '->open'); + + return logDebugReturn + ( + $strOperation, + {name => 'bResult', value => $self->{oStorageCRead}->open() ? true : false, trace => true}, + ); +} + +#################################################################################################################################### +# Read data +#################################################################################################################################### +sub read +{ + my $self = shift; + + # Assign function parameters, defaults, and log debug info + my ( + $strOperation, + $rtBuffer, + $iSize, + ) = + logDebugParam + ( + __PACKAGE__ . '->read', \@_, + {name => 'rtBuffer'}, + {name => 'iSize'}, + ); + + # Read if not eof + my $iActualSize = 0; + + if (!$self->eof()) + { + my $tBuffer = $self->{oStorageCRead}->read($iSize); + $iActualSize = length($tBuffer); + $$rtBuffer .= $tBuffer; + } + + # Return from function and log return values if any + return logDebugReturn + ( + $strOperation, + {name => 'iActualSize', value => $iActualSize} + ); +} + +#################################################################################################################################### +# Is the file at eof? +#################################################################################################################################### +sub eof +{ + my $self = shift; + + # Assign function parameters, defaults, and log debug info + my ($strOperation) = logDebugParam(__PACKAGE__ . '->eof'); + + return logDebugReturn + ( + $strOperation, + {name => 'bResult', value => $self->{oStorageCRead}->eof() ? true : false, trace => true}, + ); +} + +#################################################################################################################################### +# Close the file +#################################################################################################################################### +sub close +{ + my $self = shift; + + # Assign function parameters, defaults, and log debug info + my ($strOperation) = logDebugParam(__PACKAGE__ . '->close'); + + $self->{oStorageCRead}->close(); + + return logDebugReturn + ( + $strOperation, + {name => 'bResult', value => true, trace => true}, + ); +} + +#################################################################################################################################### +# Get a filter result +#################################################################################################################################### +sub result +{ + my $self = shift; + + # Assign function parameters, defaults, and log debug info + my + ( + $strOperation, + $strClass, + ) = + logDebugParam + ( + __PACKAGE__ . '->result', \@_, + {name => 'strClass'}, + ); + + my $xResult = $self->{oStorage}->{oJSON}->decode($self->{oStorageCRead}->result($strClass)); + + return logDebugReturn + ( + $strOperation, + {name => 'xResult', value => $xResult, trace => true}, + ); +} + +#################################################################################################################################### +# Get all filter results +#################################################################################################################################### +sub resultAll +{ + my $self = shift; + + # Assign function parameters, defaults, and log debug info + my ($strOperation) = logDebugParam(__PACKAGE__ . '->resultAll'); + + my $xResult = $self->{oStorage}->{oJSON}->decode($self->{oStorageCRead}->resultAll()); + + return logDebugReturn + ( + $strOperation, + {name => 'xResultAll', value => $xResult, trace => true}, + ); +} + +1; diff -Nru pgbackrest-2.15.1/lib/pgBackRest/Storage/StorageWrite.pm pgbackrest-2.16/lib/pgBackRest/Storage/StorageWrite.pm --- pgbackrest-2.15.1/lib/pgBackRest/Storage/StorageWrite.pm 1970-01-01 00:00:00.000000000 +0000 +++ pgbackrest-2.16/lib/pgBackRest/Storage/StorageWrite.pm 2019-08-05 16:03:04.000000000 +0000 @@ -0,0 +1,163 @@ +#################################################################################################################################### +# C Storage Write Interface +#################################################################################################################################### +package pgBackRest::Storage::StorageWrite; + +use strict; +use warnings FATAL => qw(all); +use Carp qw(confess); +use English '-no_match_vars'; + +use File::Basename qw(dirname); +use Fcntl qw(:mode); +use File::stat qw{lstat}; +use JSON::PP; + +use pgBackRest::Common::Exception; +use pgBackRest::Common::Log; +use pgBackRest::Storage::Base; + +#################################################################################################################################### +# new +#################################################################################################################################### +sub new +{ + my $class = shift; + + # Create the class hash + my $self = {}; + bless $self, $class; + + # Assign function parameters, defaults, and log debug info + ( + my $strOperation, + $self->{oStorage}, + $self->{oStorageCWrite}, + ) = + logDebugParam + ( + __PACKAGE__ . '->new', \@_, + {name => 'oStorage'}, + {name => 'oStorageCWrite'}, + ); + + # Return from function and log return values if any + return logDebugReturn + ( + $strOperation, + {name => 'self', value => $self} + ); +} + +#################################################################################################################################### +# Open the file +#################################################################################################################################### +sub open +{ + my $self = shift; + + # Assign function parameters, defaults, and log debug info + my ($strOperation) = logDebugParam(__PACKAGE__ . '->open'); + + $self->{oStorageCWrite}->open(); + + return logDebugReturn + ( + $strOperation, + {name => 'bResult', value => true, trace => true}, + ); +} + +#################################################################################################################################### +# Write data +#################################################################################################################################### +sub write +{ + my $self = shift; + + # Assign function parameters, defaults, and log debug info + my ( + $strOperation, + $rtBuffer, + ) = + logDebugParam + ( + __PACKAGE__ . '->write', \@_, + {name => 'rtBuffer'}, + ); + + # Return from function and log return values if any + return logDebugReturn + ( + $strOperation, + {name => 'iActualSize', value => $self->{oStorageCWrite}->write($$rtBuffer)} + ); +} + +#################################################################################################################################### +# Close the file +#################################################################################################################################### +sub close +{ + my $self = shift; + + # Assign function parameters, defaults, and log debug info + my ($strOperation) = logDebugParam(__PACKAGE__ . '->close'); + + $self->{oStorageCWrite}->close(); + + return logDebugReturn + ( + $strOperation, + {name => 'bResult', value => true, trace => true}, + ); +} + +#################################################################################################################################### +# Get a filter result +#################################################################################################################################### +sub result +{ + my $self = shift; + + # Assign function parameters, defaults, and log debug info + my + ( + $strOperation, + $strClass, + ) = + logDebugParam + ( + __PACKAGE__ . '->result', \@_, + {name => 'strClass'}, + ); + + my $xResult = $self->{oStorage}->{oJSON}->decode($self->{oStorageCWrite}->result($strClass)); + + return logDebugReturn + ( + $strOperation, + {name => 'xResult', value => $xResult, trace => true}, + ); +} + +#################################################################################################################################### +# Get all filter results +#################################################################################################################################### +sub resultAll +{ + my $self = shift; + + # Assign function parameters, defaults, and log debug info + my ($strOperation) = logDebugParam(__PACKAGE__ . '->resultAll'); + + my $xResult = $self->{oStorage}->{oJSON}->decode($self->{oStorageCWrite}->resultAll()); + + return logDebugReturn + ( + $strOperation, + {name => 'xResultAll', value => $xResult, trace => true}, + ); +} + +1; diff -Nru pgbackrest-2.15.1/lib/pgBackRest/Version.pm pgbackrest-2.16/lib/pgBackRest/Version.pm --- pgbackrest-2.15.1/lib/pgBackRest/Version.pm 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/lib/pgBackRest/Version.pm 2019-08-05 16:03:04.000000000 +0000 @@ -39,7 +39,7 @@ # Defines the current version of the BackRest executable. The version number is used to track features but does not affect what # repositories or manifests can be read - that's the job of the format number. #----------------------------------------------------------------------------------------------------------------------------------- -use constant PROJECT_VERSION => '2.15'; +use constant PROJECT_VERSION => '2.16'; push @EXPORT, qw(PROJECT_VERSION); # Repository Format Number diff -Nru pgbackrest-2.15.1/libc/build/lib/pgBackRestLibC/Build.pm pgbackrest-2.16/libc/build/lib/pgBackRestLibC/Build.pm --- pgbackrest-2.15.1/libc/build/lib/pgBackRestLibC/Build.pm 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/libc/build/lib/pgBackRestLibC/Build.pm 2019-08-05 16:03:04.000000000 +0000 @@ -19,8 +19,6 @@ use pgBackRest::Common::Log; use pgBackRest::Common::String; -use pgBackRest::Storage::Local; -use pgBackRest::Storage::Posix::Driver; use pgBackRest::Version; use pgBackRestBuild::Build; @@ -28,6 +26,9 @@ use pgBackRestBuild::Config::Data; use pgBackRestBuild::Error::Data; +use pgBackRestTest::Common::Storage; +use pgBackRestTest::Common::StoragePosix; + #################################################################################################################################### # Perl function and constant exports #################################################################################################################################### @@ -74,8 +75,6 @@ { &BLD_EXPORTTYPE_SUB => [qw( pageChecksum - pageChecksumBufferTest - pageChecksumTest )], }, @@ -149,7 +148,7 @@ 'storage' => { &BLD_EXPORTTYPE_SUB => [qw( - storagePosixPathRemove + storageRepoFree )], }, @@ -172,8 +171,8 @@ my @stryBuilt; # Storage - my $oStorage = new pgBackRest::Storage::Local( - $strBuildPath, new pgBackRest::Storage::Posix::Driver({bFileSync => false, bPathSync => false})); + my $oStorage = new pgBackRestTest::Common::Storage( + $strBuildPath, new pgBackRestTest::Common::StoragePosix({bFileSync => false, bPathSync => false})); # Build interface file my $strContent = diff -Nru pgbackrest-2.15.1/libc/LibC.h pgbackrest-2.16/libc/LibC.h --- pgbackrest-2.15.1/libc/LibC.h 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/libc/LibC.h 2019-08-05 16:03:04.000000000 +0000 @@ -181,3 +181,15 @@ ***********************************************************************************************************************************/ #define MEM_CONTEXT_XS_DESTROY(memContext) \ memContextFree(memContext) + +/*********************************************************************************************************************************** +Create new string from an SV +***********************************************************************************************************************************/ +#define STR_NEW_SV(param) \ + (SvOK(param) ? strNewN(SvPV_nolen(param), SvCUR(param)) : NULL) + +/*********************************************************************************************************************************** +Create const buffer from an SV +***********************************************************************************************************************************/ +#define BUF_CONST_SV(param) \ + (SvOK(param) ? BUF(SvPV_nolen(param), SvCUR(param)) : NULL) diff -Nru pgbackrest-2.15.1/libc/LibC.xs pgbackrest-2.16/libc/LibC.xs --- pgbackrest-2.15.1/libc/LibC.xs 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/libc/LibC.xs 2019-08-05 16:03:04.000000000 +0000 @@ -50,6 +50,7 @@ ***********************************************************************************************************************************/ #include "common/crypto/common.h" #include "common/error.h" +#include "common/io/io.h" #include "common/lock.h" #include "config/config.h" #include "config/define.h" @@ -69,9 +70,12 @@ These includes define data structures that are required for the C to Perl interface but are not part of the regular C source. ***********************************************************************************************************************************/ -#include "xs/crypto/cipherBlock.xsh" #include "xs/crypto/hash.xsh" #include "xs/common/encode.xsh" +#include "xs/postgres/client.xsh" +#include "xs/storage/storage.xsh" +#include "xs/storage/storageRead.xsh" +#include "xs/storage/storageWrite.xsh" /*********************************************************************************************************************************** Module definition @@ -97,8 +101,10 @@ INCLUDE: xs/config/config.xs INCLUDE: xs/config/configTest.xs INCLUDE: xs/config/define.xs -INCLUDE: xs/crypto/cipherBlock.xs INCLUDE: xs/crypto/hash.xs INCLUDE: xs/crypto/random.xs +INCLUDE: xs/postgres/client.xs INCLUDE: xs/postgres/pageChecksum.xs INCLUDE: xs/storage/storage.xs +INCLUDE: xs/storage/storageRead.xs +INCLUDE: xs/storage/storageWrite.xs diff -Nru pgbackrest-2.15.1/libc/Makefile.PL pgbackrest-2.16/libc/Makefile.PL --- pgbackrest-2.15.1/libc/Makefile.PL 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/libc/Makefile.PL 2019-08-05 16:03:04.000000000 +0000 @@ -59,10 +59,17 @@ 'common/io/filter/buffer.c', 'common/io/filter/filter.c', 'common/io/filter/group.c', + 'common/io/filter/sink.c', 'common/io/filter/size.c', 'common/io/handleWrite.c', + 'common/io/http/cache.c', + 'common/io/http/client.c', + 'common/io/http/common.c', + 'common/io/http/header.c', + 'common/io/http/query.c', 'common/io/io.c', 'common/io/read.c', + 'common/io/tls/client.c', 'common/io/write.c', 'common/lock.c', 'common/log.c', @@ -86,10 +93,20 @@ 'config/load.c', 'config/parse.c', 'perl/config.c', + 'protocol/client.c', + 'protocol/command.c', + 'protocol/helper.c', + 'protocol/parallel.c', + 'protocol/parallelJob.c', + 'protocol/server.c', + 'postgres/client.c', 'postgres/pageChecksum.c', 'storage/posix/read.c', 'storage/posix/storage.c', 'storage/posix/write.c', + 'storage/s3/read.c', + 'storage/s3/storage.c', + 'storage/s3/write.c', 'storage/helper.c', 'storage/read.c', 'storage/storage.c', @@ -116,6 +133,7 @@ -D_POSIX_C_SOURCE=200112L -D_FILE_OFFSET_BITS=64 `xml2-config --cflags` + -I`pg_config --includedir` )), INC => join(' ', qw( @@ -125,7 +143,7 @@ C => \@stryCFile, - LIBS => '-lcrypto -lssl -lxml2', + LIBS => '-lcrypto -lpq -lssl -lxml2', OBJECT => '$(O_FILES)', ); diff -Nru pgbackrest-2.15.1/libc/typemap pgbackrest-2.16/libc/typemap --- pgbackrest-2.15.1/libc/typemap 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/libc/typemap 2019-08-05 16:03:04.000000000 +0000 @@ -1,2 +1,4 @@ -pgBackRest::LibC::Cipher::Block T_PTROBJ -pgBackRest::LibC::Crypto::Hash T_PTROBJ +pgBackRest::LibC::PgClient T_PTROBJ +pgBackRest::LibC::Storage T_PTROBJ +pgBackRest::LibC::StorageRead T_PTROBJ +pgBackRest::LibC::StorageWrite T_PTROBJ diff -Nru pgbackrest-2.15.1/libc/xs/crypto/cipherBlock.xs pgbackrest-2.16/libc/xs/crypto/cipherBlock.xs --- pgbackrest-2.15.1/libc/xs/crypto/cipherBlock.xs 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/libc/xs/crypto/cipherBlock.xs 1970-01-01 00:00:00.000000000 +0000 @@ -1,114 +0,0 @@ -#################################################################################################################################### -# Block Cipher Perl Exports -# -# XS wrapper for functions in cipher/block.c. -#################################################################################################################################### - -MODULE = pgBackRest::LibC PACKAGE = pgBackRest::LibC::Cipher::Block - -#################################################################################################################################### -pgBackRest::LibC::Cipher::Block -new(class, mode, type, key, keySize, digest = NULL) - const char *class - U32 mode - const char *type - unsigned char *key - I32 keySize - const char *digest -CODE: - RETVAL = NULL; - - CHECK(type != NULL); - CHECK(key != NULL); - CHECK(keySize != 0); - - // Not much point to this but it keeps the var from being unused - if (strcmp(class, PACKAGE_NAME_LIBC "::Cipher::Block") != 0) - croak("unexpected class name '%s'", class); - - MEM_CONTEXT_XS_NEW_BEGIN("cipherBlockXs") - { - RETVAL = memNew(sizeof(CipherBlockXs)); - RETVAL->memContext = MEM_CONTEXT_XS(); - - RETVAL->pxPayload = cipherBlockNew(mode, cipherType(STR(type)), BUF(key, keySize), digest == NULL ? NULL : STR(digest)); - } - MEM_CONTEXT_XS_NEW_END(); -OUTPUT: - RETVAL - -#################################################################################################################################### -SV * -process(self, source) - pgBackRest::LibC::Cipher::Block self - SV *source -CODE: - RETVAL = NULL; - - MEM_CONTEXT_XS_BEGIN(self->memContext) - { - STRLEN tSize; - const unsigned char *sourcePtr = (const unsigned char *)SvPV(source, tSize); - - RETVAL = NEWSV(0, ioBufferSize()); - SvPOK_only(RETVAL); - - if (tSize > 0) - { - size_t outBufferUsed = 0; - - do - { - SvGROW(RETVAL, outBufferUsed + ioBufferSize()); - Buffer *outBuffer = bufNewUseC((unsigned char *)SvPV_nolen(RETVAL) + outBufferUsed, ioBufferSize()); - - ioFilterProcessInOut(self->pxPayload, BUF(sourcePtr, tSize), outBuffer); - outBufferUsed += bufUsed(outBuffer); - } - while (ioFilterInputSame(self->pxPayload)); - - SvCUR_set(RETVAL, outBufferUsed); - } - else - SvCUR_set(RETVAL, 0); - } - MEM_CONTEXT_XS_END(); -OUTPUT: - RETVAL - -#################################################################################################################################### -SV * -flush(self) - pgBackRest::LibC::Cipher::Block self -CODE: - RETVAL = NULL; - - MEM_CONTEXT_XS_BEGIN(self->memContext) - { - RETVAL = NEWSV(0, ioBufferSize()); - SvPOK_only(RETVAL); - - size_t outBufferUsed = 0; - - do - { - SvGROW(RETVAL, outBufferUsed + ioBufferSize()); - Buffer *outBuffer = bufNewUseC((unsigned char *)SvPV_nolen(RETVAL) + outBufferUsed, ioBufferSize()); - - ioFilterProcessInOut(self->pxPayload, NULL, outBuffer); - outBufferUsed += bufUsed(outBuffer); - } - while (!ioFilterDone(self->pxPayload)); - - SvCUR_set(RETVAL, outBufferUsed); - } - MEM_CONTEXT_XS_END(); -OUTPUT: - RETVAL - -#################################################################################################################################### -void -DESTROY(self) - pgBackRest::LibC::Cipher::Block self -CODE: - MEM_CONTEXT_XS_DESTROY(self->memContext); diff -Nru pgbackrest-2.15.1/libc/xs/crypto/cipherBlock.xsh pgbackrest-2.16/libc/xs/crypto/cipherBlock.xsh --- pgbackrest-2.15.1/libc/xs/crypto/cipherBlock.xsh 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/libc/xs/crypto/cipherBlock.xsh 1970-01-01 00:00:00.000000000 +0000 @@ -1,17 +0,0 @@ -/*********************************************************************************************************************************** -Block Cipher XS Header -***********************************************************************************************************************************/ -#include "common/assert.h" -#include "common/crypto/cipherBlock.h" -#include "common/io/io.h" -#include "common/memContext.h" - -// Encrypt/decrypt modes -#define CIPHER_MODE_ENCRYPT ((int)cipherModeEncrypt) -#define CIPHER_MODE_DECRYPT ((int)cipherModeDecrypt) - -typedef struct CipherBlockXs -{ - MemContext *memContext; - IoFilter *pxPayload; -} CipherBlockXs, *pgBackRest__LibC__Cipher__Block; diff -Nru pgbackrest-2.15.1/libc/xs/crypto/hash.xs pgbackrest-2.16/libc/xs/crypto/hash.xs --- pgbackrest-2.15.1/libc/xs/crypto/hash.xs 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/libc/xs/crypto/hash.xs 2019-08-05 16:03:04.000000000 +0000 @@ -4,72 +4,6 @@ # XS wrapper for functions in cipher/hash.c. #################################################################################################################################### -MODULE = pgBackRest::LibC PACKAGE = pgBackRest::LibC::Crypto::Hash - -#################################################################################################################################### -pgBackRest::LibC::Crypto::Hash -new(class, type) - const char *class - const char *type -CODE: - RETVAL = NULL; - - // Don't warn when class param is used - (void)class; - - MEM_CONTEXT_XS_NEW_BEGIN("cryptoHashXs") - { - RETVAL = memNew(sizeof(CryptoHashXs)); - RETVAL->memContext = MEM_CONTEXT_XS(); - RETVAL->pxPayload = cryptoHashNew(strNew(type)); - } - MEM_CONTEXT_XS_NEW_END(); -OUTPUT: - RETVAL - -#################################################################################################################################### -void -process(self, message) - pgBackRest::LibC::Crypto::Hash self - SV *message -CODE: - MEM_CONTEXT_XS_TEMP_BEGIN() - { - STRLEN messageSize; - const void *messagePtr = SvPV(message, messageSize); - - if (messageSize > 0) - ioFilterProcessIn(self->pxPayload, BUF(messagePtr, messageSize)); - } - MEM_CONTEXT_XS_TEMP_END(); - -#################################################################################################################################### -SV * -result(self) - pgBackRest::LibC::Crypto::Hash self -CODE: - RETVAL = NULL; - - MEM_CONTEXT_XS_TEMP_BEGIN() - { - const String *hash = varStr(ioFilterResult(self->pxPayload)); - - RETVAL = newSV(strSize(hash)); - SvPOK_only(RETVAL); - strcpy((char *)SvPV_nolen(RETVAL), strPtr(hash)); - SvCUR_set(RETVAL, strSize(hash)); - } - MEM_CONTEXT_XS_TEMP_END(); -OUTPUT: - RETVAL - -#################################################################################################################################### -void -DESTROY(self) - pgBackRest::LibC::Crypto::Hash self -CODE: - MEM_CONTEXT_XS_DESTROY(self->memContext); - MODULE = pgBackRest::LibC PACKAGE = pgBackRest::LibC #################################################################################################################################### diff -Nru pgbackrest-2.15.1/libc/xs/crypto/hash.xsh pgbackrest-2.16/libc/xs/crypto/hash.xsh --- pgbackrest-2.15.1/libc/xs/crypto/hash.xsh 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/libc/xs/crypto/hash.xsh 2019-08-05 16:03:04.000000000 +0000 @@ -2,11 +2,3 @@ Cryptographic Hashes XS Header ***********************************************************************************************************************************/ #include "common/crypto/hash.h" -#include "common/io/filter/filter.intern.h" -#include "common/memContext.h" - -typedef struct CryptoHashXs -{ - MemContext *memContext; - IoFilter *pxPayload; -} CryptoHashXs, *pgBackRest__LibC__Crypto__Hash; diff -Nru pgbackrest-2.15.1/libc/xs/postgres/client.xs pgbackrest-2.16/libc/xs/postgres/client.xs --- pgbackrest-2.15.1/libc/xs/postgres/client.xs 1970-01-01 00:00:00.000000000 +0000 +++ pgbackrest-2.16/libc/xs/postgres/client.xs 2019-08-05 16:03:04.000000000 +0000 @@ -0,0 +1,89 @@ +# ---------------------------------------------------------------------------------------------------------------------------------- +# PostgreSQL Query Client +# ---------------------------------------------------------------------------------------------------------------------------------- + +MODULE = pgBackRest::LibC PACKAGE = pgBackRest::LibC::PgClient + +#################################################################################################################################### +pgBackRest::LibC::PgClient +new(class, host, port, database, queryTimeout) +PREINIT: + MEM_CONTEXT_XS_TEMP_BEGIN() + { +INPUT: + const String *class = STR_NEW_SV($arg); + const String *host = STR_NEW_SV($arg); + U32 port + const String *database = STR_NEW_SV($arg); + UV queryTimeout +CODE: + CHECK(strEqZ(class, PACKAGE_NAME_LIBC "::PgClient")); + + memContextSwitch(MEM_CONTEXT_XS_OLD()); + RETVAL = pgClientNew(host, port, database, NULL, queryTimeout); + memContextSwitch(MEM_CONTEXT_XS_TEMP()); +OUTPUT: + RETVAL +CLEANUP: + } + MEM_CONTEXT_XS_TEMP_END(); + +#################################################################################################################################### +void +open(self) +PREINIT: + MEM_CONTEXT_XS_TEMP_BEGIN() + { +INPUT: + pgBackRest::LibC::PgClient self +CODE: + pgClientOpen(self); +CLEANUP: + } + MEM_CONTEXT_XS_TEMP_END(); + +#################################################################################################################################### +const char * +query(self, query) +PREINIT: + MEM_CONTEXT_XS_TEMP_BEGIN() + { +INPUT: + pgBackRest::LibC::PgClient self + const String *query = STR_NEW_SV($arg); +CODE: + VariantList *result = pgClientQuery(self, query); + RETVAL = result ? strPtr(jsonFromVar(varNewVarLst(result), 0)) : NULL; +OUTPUT: + RETVAL +CLEANUP: + } + MEM_CONTEXT_XS_TEMP_END(); + +#################################################################################################################################### +void +close(self) +PREINIT: + MEM_CONTEXT_XS_TEMP_BEGIN() + { +INPUT: + pgBackRest::LibC::PgClient self +CODE: + pgClientClose(self); +CLEANUP: + } + MEM_CONTEXT_XS_TEMP_END(); + +#################################################################################################################################### +void +DESTROY(self) +PREINIT: + MEM_CONTEXT_XS_TEMP_BEGIN() + { +INPUT: + pgBackRest::LibC::PgClient self +CODE: + pgClientFree(self); +CLEANUP: + } + MEM_CONTEXT_XS_TEMP_END(); diff -Nru pgbackrest-2.15.1/libc/xs/postgres/client.xsh pgbackrest-2.16/libc/xs/postgres/client.xsh --- pgbackrest-2.15.1/libc/xs/postgres/client.xsh 1970-01-01 00:00:00.000000000 +0000 +++ pgbackrest-2.16/libc/xs/postgres/client.xsh 2019-08-05 16:03:04.000000000 +0000 @@ -0,0 +1,6 @@ +/*********************************************************************************************************************************** +PostgreSQL Query Client Header +***********************************************************************************************************************************/ +#include "postgres/client.h" + +typedef PgClient *pgBackRest__LibC__PgClient; diff -Nru pgbackrest-2.15.1/libc/xs/postgres/pageChecksum.xs pgbackrest-2.16/libc/xs/postgres/pageChecksum.xs --- pgbackrest-2.15.1/libc/xs/postgres/pageChecksum.xs 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/libc/xs/postgres/pageChecksum.xs 2019-08-05 16:03:04.000000000 +0000 @@ -20,42 +20,3 @@ ERROR_XS_END(); OUTPUT: RETVAL - -bool -pageChecksumTest(page, blockNo, pageSize, ignoreWalId, ignoreWalOffset) - const char *page - U32 blockNo - U32 pageSize - U32 ignoreWalId - U32 ignoreWalOffset -CODE: - RETVAL = false; - - ERROR_XS_BEGIN() - { - RETVAL = pageChecksumTest( - (const unsigned char *)page, blockNo, pageSize, ignoreWalId, ignoreWalOffset); - } - ERROR_XS_END(); -OUTPUT: - RETVAL - -bool -pageChecksumBufferTest(pageBuffer, pageBufferSize, blockNoBegin, pageSize, ignoreWalId, ignoreWalOffset) - const char *pageBuffer - U32 pageBufferSize - U32 blockNoBegin - U32 pageSize - U32 ignoreWalId - U32 ignoreWalOffset -CODE: - RETVAL = false; - - ERROR_XS_BEGIN() - { - RETVAL = pageChecksumBufferTest( - (const unsigned char *)pageBuffer, pageBufferSize, blockNoBegin, pageSize, ignoreWalId, ignoreWalOffset); - } - ERROR_XS_END(); -OUTPUT: - RETVAL diff -Nru pgbackrest-2.15.1/libc/xs/storage/storageRead.xs pgbackrest-2.16/libc/xs/storage/storageRead.xs --- pgbackrest-2.15.1/libc/xs/storage/storageRead.xs 1970-01-01 00:00:00.000000000 +0000 +++ pgbackrest-2.16/libc/xs/storage/storageRead.xs 2019-08-05 16:03:04.000000000 +0000 @@ -0,0 +1,159 @@ +# ---------------------------------------------------------------------------------------------------------------------------------- +# Storage Read Exports +# ---------------------------------------------------------------------------------------------------------------------------------- + +MODULE = pgBackRest::LibC PACKAGE = pgBackRest::LibC::StorageRead + +#################################################################################################################################### +pgBackRest::LibC::StorageRead +new(class, storage, file, ignoreMissing) +PREINIT: + MEM_CONTEXT_XS_TEMP_BEGIN() + { +INPUT: + const String *class = STR_NEW_SV($arg); + pgBackRest::LibC::Storage storage + const String *file = STR_NEW_SV($arg); + bool ignoreMissing +CODE: + CHECK(strEqZ(class, PACKAGE_NAME_LIBC "::StorageRead")); + + RETVAL = storageReadMove(storageNewReadP(storage, file, .ignoreMissing = ignoreMissing), MEM_CONTEXT_XS_OLD()); +OUTPUT: + RETVAL +CLEANUP: + } + MEM_CONTEXT_XS_TEMP_END(); + +#################################################################################################################################### +void +filterAdd(self, filter, param) +PREINIT: + MEM_CONTEXT_XS_TEMP_BEGIN() + { +INPUT: + pgBackRest::LibC::StorageRead self + const String *filter = STR_NEW_SV($arg); + const String *param = STR_NEW_SV($arg); +CODE: + IoFilterGroup *filterGroup = ioReadFilterGroup(storageReadIo(self)); + storageFilterXsAdd(filterGroup, filter, param); +CLEANUP: + } + MEM_CONTEXT_XS_TEMP_END(); + +#################################################################################################################################### +bool +open(self) +PREINIT: + MEM_CONTEXT_XS_TEMP_BEGIN() + { +INPUT: + pgBackRest::LibC::StorageRead self +CODE: + RETVAL = ioReadOpen(storageReadIo(self)); +OUTPUT: + RETVAL +CLEANUP: + } + MEM_CONTEXT_XS_TEMP_END(); + +#################################################################################################################################### +SV * +read(self, bufferSize) +PREINIT: + MEM_CONTEXT_XS_TEMP_BEGIN() + { +INPUT: + pgBackRest::LibC::StorageRead self + U32 bufferSize +CODE: + RETVAL = NEWSV(0, bufferSize); + SvPOK_only(RETVAL); + + Buffer *bufferRead = bufNewUseC((unsigned char *)SvPV_nolen(RETVAL), bufferSize); + ioRead(storageReadIo(self), bufferRead); + + SvCUR_set(RETVAL, bufUsed(bufferRead)); +OUTPUT: + RETVAL +CLEANUP: + } + MEM_CONTEXT_XS_TEMP_END(); + +#################################################################################################################################### +bool +eof(self) +PREINIT: + MEM_CONTEXT_XS_TEMP_BEGIN() + { +INPUT: + pgBackRest::LibC::StorageRead self +CODE: + RETVAL = ioReadEof(storageReadIo(self)); +OUTPUT: + RETVAL +CLEANUP: + } + MEM_CONTEXT_XS_TEMP_END(); + +#################################################################################################################################### +void +close(self) +PREINIT: + MEM_CONTEXT_XS_TEMP_BEGIN() + { +INPUT: + pgBackRest::LibC::StorageRead self +CODE: + ioReadClose(storageReadIo(self)); +CLEANUP: + } + MEM_CONTEXT_XS_TEMP_END(); + +#################################################################################################################################### +const char * +result(self, filter) +PREINIT: + MEM_CONTEXT_XS_TEMP_BEGIN() + { +INPUT: + pgBackRest::LibC::StorageRead self + const String *filter = STR_NEW_SV($arg); +CODE: + RETVAL = strPtr(storageFilterXsResult(ioReadFilterGroup(storageReadIo(self)), filter)); +OUTPUT: + RETVAL +CLEANUP: + } + MEM_CONTEXT_XS_TEMP_END(); + +#################################################################################################################################### +const char * +resultAll(self) +PREINIT: + MEM_CONTEXT_XS_TEMP_BEGIN() + { +INPUT: + pgBackRest::LibC::StorageRead self +CODE: + RETVAL = strPtr(storageFilterXsResultAll(ioReadFilterGroup(storageReadIo(self)))); +OUTPUT: + RETVAL +CLEANUP: + } + MEM_CONTEXT_XS_TEMP_END(); + +#################################################################################################################################### +void +DESTROY(self) +PREINIT: + MEM_CONTEXT_XS_TEMP_BEGIN() + { +INPUT: + pgBackRest::LibC::StorageRead self +CODE: + storageReadFree(self); +CLEANUP: + } + MEM_CONTEXT_XS_TEMP_END(); diff -Nru pgbackrest-2.15.1/libc/xs/storage/storageRead.xsh pgbackrest-2.16/libc/xs/storage/storageRead.xsh --- pgbackrest-2.15.1/libc/xs/storage/storageRead.xsh 1970-01-01 00:00:00.000000000 +0000 +++ pgbackrest-2.16/libc/xs/storage/storageRead.xsh 2019-08-05 16:03:04.000000000 +0000 @@ -0,0 +1,4 @@ +/*********************************************************************************************************************************** +Storage Read XS Header +***********************************************************************************************************************************/ +typedef StorageRead *pgBackRest__LibC__StorageRead; diff -Nru pgbackrest-2.15.1/libc/xs/storage/storageWrite.xs pgbackrest-2.16/libc/xs/storage/storageWrite.xs --- pgbackrest-2.15.1/libc/xs/storage/storageWrite.xs 1970-01-01 00:00:00.000000000 +0000 +++ pgbackrest-2.16/libc/xs/storage/storageWrite.xs 2019-08-05 16:03:04.000000000 +0000 @@ -0,0 +1,146 @@ +# ---------------------------------------------------------------------------------------------------------------------------------- +# Storage Write Exports +# ---------------------------------------------------------------------------------------------------------------------------------- + +MODULE = pgBackRest::LibC PACKAGE = pgBackRest::LibC::StorageWrite + +#################################################################################################################################### +pgBackRest::LibC::StorageWrite +new(class, storage, file, mode, user, group, timeModified, atomic, pathCreate) +PREINIT: + MEM_CONTEXT_XS_TEMP_BEGIN() + { +INPUT: + const String *class = STR_NEW_SV($arg); + pgBackRest::LibC::Storage storage + const String *file = STR_NEW_SV($arg); + U32 mode + const String *user = STR_NEW_SV($arg); + const String *group = STR_NEW_SV($arg); + IV timeModified + bool atomic + bool pathCreate +CODE: + CHECK(strEqZ(class, PACKAGE_NAME_LIBC "::StorageWrite")); + + RETVAL = storageWriteMove( + storageNewWriteP( + storage, file, .modeFile = mode, .user = user, .group = group, .timeModified = (time_t)timeModified, + .noCreatePath = storageFeature(storage, storageFeaturePath) ? !pathCreate : false, .noSyncPath = !atomic, + .noAtomic = !atomic), + MEM_CONTEXT_XS_OLD()); +OUTPUT: + RETVAL +CLEANUP: + } + MEM_CONTEXT_XS_TEMP_END(); + +#################################################################################################################################### +void +filterAdd(self, filter, param) +PREINIT: + MEM_CONTEXT_XS_TEMP_BEGIN() + { +INPUT: + pgBackRest::LibC::StorageWrite self + const String *filter = STR_NEW_SV($arg); + const String *param = STR_NEW_SV($arg); +CODE: + IoFilterGroup *filterGroup = ioWriteFilterGroup(storageWriteIo(self)); + storageFilterXsAdd(filterGroup, filter, param); +CLEANUP: + } + MEM_CONTEXT_XS_TEMP_END(); + +#################################################################################################################################### +void +open(self) +PREINIT: + MEM_CONTEXT_XS_TEMP_BEGIN() + { +INPUT: + pgBackRest::LibC::StorageWrite self +CODE: + ioWriteOpen(storageWriteIo(self)); +CLEANUP: + } + MEM_CONTEXT_XS_TEMP_END(); + +#################################################################################################################################### +UV +write(self, buffer) +PREINIT: + MEM_CONTEXT_XS_TEMP_BEGIN() + { +INPUT: + pgBackRest::LibC::StorageWrite self + const Buffer *buffer = BUF_CONST_SV($arg); +CODE: + ioWrite(storageWriteIo(self), buffer); + RETVAL = bufUsed(buffer); +OUTPUT: + RETVAL +CLEANUP: + } + MEM_CONTEXT_XS_TEMP_END(); + +#################################################################################################################################### +void +close(self) +PREINIT: + MEM_CONTEXT_XS_TEMP_BEGIN() + { +INPUT: + pgBackRest::LibC::StorageWrite self +CODE: + ioWriteClose(storageWriteIo(self)); +CLEANUP: + } + MEM_CONTEXT_XS_TEMP_END(); + +#################################################################################################################################### +const char * +result(self, filter) +PREINIT: + MEM_CONTEXT_XS_TEMP_BEGIN() + { +INPUT: + pgBackRest::LibC::StorageWrite self + const String *filter = STR_NEW_SV($arg); +CODE: + RETVAL = strPtr(storageFilterXsResult(ioWriteFilterGroup(storageWriteIo(self)), filter)); +OUTPUT: + RETVAL +CLEANUP: + } + MEM_CONTEXT_XS_TEMP_END(); + +#################################################################################################################################### +const char * +resultAll(self) +PREINIT: + MEM_CONTEXT_XS_TEMP_BEGIN() + { +INPUT: + pgBackRest::LibC::StorageWrite self +CODE: + RETVAL = strPtr(storageFilterXsResultAll(ioWriteFilterGroup(storageWriteIo(self)))); +OUTPUT: + RETVAL +CLEANUP: + } + MEM_CONTEXT_XS_TEMP_END(); + +#################################################################################################################################### +void +DESTROY(self) +PREINIT: + MEM_CONTEXT_XS_TEMP_BEGIN() + { +INPUT: + pgBackRest::LibC::StorageWrite self +CODE: + storageWriteFree(self); +CLEANUP: + } + MEM_CONTEXT_XS_TEMP_END(); diff -Nru pgbackrest-2.15.1/libc/xs/storage/storageWrite.xsh pgbackrest-2.16/libc/xs/storage/storageWrite.xsh --- pgbackrest-2.15.1/libc/xs/storage/storageWrite.xsh 1970-01-01 00:00:00.000000000 +0000 +++ pgbackrest-2.16/libc/xs/storage/storageWrite.xsh 2019-08-05 16:03:04.000000000 +0000 @@ -0,0 +1,4 @@ +/*********************************************************************************************************************************** +Storage Write XS Header +***********************************************************************************************************************************/ +typedef StorageWrite *pgBackRest__LibC__StorageWrite; diff -Nru pgbackrest-2.15.1/libc/xs/storage/storage.xs pgbackrest-2.16/libc/xs/storage/storage.xs --- pgbackrest-2.15.1/libc/xs/storage/storage.xs 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/libc/xs/storage/storage.xs 2019-08-05 16:03:04.000000000 +0000 @@ -2,19 +2,426 @@ # Storage Exports # ---------------------------------------------------------------------------------------------------------------------------------- -MODULE = pgBackRest::LibC PACKAGE = pgBackRest::LibC +MODULE = pgBackRest::LibC PACKAGE = pgBackRest::LibC::Storage + +#################################################################################################################################### +pgBackRest::LibC::Storage +new(class, type, path) +PREINIT: + MEM_CONTEXT_XS_TEMP_BEGIN() + { +INPUT: + const String *class = STR_NEW_SV($arg); + const String *type = STR_NEW_SV($arg); + const String *path = STR_NEW_SV($arg); +CODE: + CHECK(strEqZ(class, PACKAGE_NAME_LIBC "::Storage")); + + if (strEqZ(type, "")) + { + memContextSwitch(MEM_CONTEXT_XS_OLD()); + RETVAL = storagePosixNew( + path == NULL ? STRDEF("/") : path, STORAGE_MODE_FILE_DEFAULT, STORAGE_MODE_PATH_DEFAULT, true, NULL); + storagePathEnforceSet((Storage *)RETVAL, false); + memContextSwitch(MEM_CONTEXT_XS_TEMP()); + } + else if (strEqZ(type, "")) + { + CHECK(path == NULL); + RETVAL = (Storage *)storageRepoWrite(); + } + else if (strEqZ(type, "")) + { + CHECK(path == NULL); + + memContextSwitch(MEM_CONTEXT_XS_OLD()); + RETVAL = storagePosixNew(cfgOptionStr(cfgOptPgPath), STORAGE_MODE_FILE_DEFAULT, STORAGE_MODE_PATH_DEFAULT, true, NULL); + storagePathEnforceSet((Storage *)RETVAL, false); + memContextSwitch(MEM_CONTEXT_XS_TEMP()); + } + else + THROW_FMT(AssertError, "unexpected storage type '%s'", strPtr(type)); +OUTPUT: + RETVAL +CLEANUP: + } + MEM_CONTEXT_XS_TEMP_END(); + +#################################################################################################################################### +void +bucketCreate(self) +PREINIT: + MEM_CONTEXT_XS_TEMP_BEGIN() + { +INPUT: + pgBackRest::LibC::Storage self +CODE: + if (strEq(storageType(self), STORAGE_S3_TYPE_STR)) + storageS3Request((StorageS3 *)storageDriver(self), HTTP_VERB_PUT_STR, FSLASH_STR, NULL, NULL, true, false); + else + THROW_FMT(AssertError, "unable to create bucket on '%s' storage", strPtr(storageType(self))); +CLEANUP: + } + MEM_CONTEXT_XS_TEMP_END(); + +#################################################################################################################################### +bool +copy(self, source, destination) +PREINIT: + MEM_CONTEXT_XS_TEMP_BEGIN() + { +INPUT: + pgBackRest::LibC::StorageRead source + pgBackRest::LibC::StorageWrite destination +CODE: + RETVAL = storageCopyNP(source, destination); +OUTPUT: + RETVAL +CLEANUP: + } + MEM_CONTEXT_XS_TEMP_END(); + +#################################################################################################################################### +bool +exists(self, fileExp) +PREINIT: + MEM_CONTEXT_XS_TEMP_BEGIN() + { +INPUT: + pgBackRest::LibC::Storage self + const String *fileExp = STR_NEW_SV($arg); +CODE: + RETVAL = storageExistsNP(self, fileExp); +OUTPUT: + RETVAL +CLEANUP: + } + MEM_CONTEXT_XS_TEMP_END(); + +#################################################################################################################################### +SV * +get(self, read) +PREINIT: + MEM_CONTEXT_XS_TEMP_BEGIN() + { +INPUT: + pgBackRest::LibC::StorageRead read +CODE: + RETVAL = NULL; + Buffer *buffer = storageGetNP(read); + + if (buffer != NULL) + { + if (bufUsed(buffer) == 0) + RETVAL = newSVpv("", 0); + else + RETVAL = newSVpv((char *)bufPtr(buffer), bufUsed(buffer)); + } +OUTPUT: + RETVAL +CLEANUP: + } + MEM_CONTEXT_XS_TEMP_END(); + +#################################################################################################################################### +SV * +info(self, pathExp, ignoreMissing) +PREINIT: + MEM_CONTEXT_XS_TEMP_BEGIN() + { +INPUT: + pgBackRest::LibC::Storage self + const String *pathExp = STR_NEW_SV($arg); + bool ignoreMissing +CODE: + RETVAL = NULL; + + StorageInfo info = storageInfoP(self, pathExp, .ignoreMissing = ignoreMissing); + + if (info.exists) + { + String *json = storageManifestXsInfo(NULL, &info); + RETVAL = newSVpv((char *)strPtr(json), strSize(json)); + } +OUTPUT: + RETVAL +CLEANUP: + } + MEM_CONTEXT_XS_TEMP_END(); + +#################################################################################################################################### +SV * +list(self, pathExp, ignoreMissing, sortAsc, expression) +PREINIT: + MEM_CONTEXT_XS_TEMP_BEGIN() + { +INPUT: + pgBackRest::LibC::Storage self + const String *pathExp = STR_NEW_SV($arg); + bool ignoreMissing + bool sortAsc + const String *expression = STR_NEW_SV($arg); +CODE: + StringList *fileList = strLstSort( + storageListP(self, pathExp, .errorOnMissing = storageFeature(self, storageFeaturePath) ? !ignoreMissing : false, + .expression = expression), sortAsc ? sortOrderAsc : sortOrderDesc); + + const String *fileListJson = jsonFromVar(varNewVarLst(varLstNewStrLst(fileList)), 0); + + RETVAL = newSVpv(strPtr(fileListJson), strSize(fileListJson)); +OUTPUT: + RETVAL +CLEANUP: + } + MEM_CONTEXT_XS_TEMP_END(); + +#################################################################################################################################### +SV * +manifest(self, pathExp, filter=NULL) +PREINIT: + MEM_CONTEXT_XS_TEMP_BEGIN() + { +INPUT: + pgBackRest::LibC::Storage self + const String *pathExp = STR_NEW_SV($arg); + const String *filter = STR_NEW_SV($arg); +CODE: + StorageManifestXsCallbackData data = {.storage = self, .json = strNew("{"), .pathRoot = pathExp, .filter = filter}; + + // If a path is specified + StorageInfo info = storageInfoP(self, pathExp, .ignoreMissing = true); + + if (!info.exists || info.type == storageTypePath) + { + storageInfoListP( + self, data.pathRoot, storageManifestXsCallback, &data, + .errorOnMissing = storageFeature(self, storageFeaturePath) ? true : false); + } + // Else a file is specified + else + { + info.name = strBase(storagePath(self, pathExp)); + strCat(data.json, strPtr(storageManifestXsInfo(NULL, &info))); + } + + strCat(data.json, "}"); + + RETVAL = newSVpv((char *)strPtr(data.json), strSize(data.json)); +OUTPUT: + RETVAL +CLEANUP: + } + MEM_CONTEXT_XS_TEMP_END(); + +#################################################################################################################################### +void +pathCreate(self, pathExp, mode, ignoreExists, createParent) +PREINIT: + MEM_CONTEXT_XS_TEMP_BEGIN() + { +INPUT: + pgBackRest::LibC::Storage self + const String *pathExp = STR_NEW_SV($arg); + const String *mode = STR_NEW_SV($arg); + bool ignoreExists + bool createParent +CODE: + if (storageFeature(self, storageFeaturePath)) + storagePathCreateP( + self, pathExp, .mode = mode ? cvtZToIntBase(strPtr(mode), 8) : 0, .errorOnExists = !ignoreExists, + .noParentCreate = !createParent); +CLEANUP: + } + MEM_CONTEXT_XS_TEMP_END(); + +#################################################################################################################################### +bool +pathExists(self, pathExp) +PREINIT: + MEM_CONTEXT_XS_TEMP_BEGIN() + { +INPUT: + pgBackRest::LibC::Storage self + const String *pathExp = STR_NEW_SV($arg); +CODE: + RETVAL = true; + + if (storageFeature(self, storageFeaturePath)) + RETVAL = storagePathExistsNP(self, pathExp); +OUTPUT: + RETVAL +CLEANUP: + } + MEM_CONTEXT_XS_TEMP_END(); + +#################################################################################################################################### +SV * +pathGet(self, pathExp) +PREINIT: + MEM_CONTEXT_XS_TEMP_BEGIN() + { +INPUT: + pgBackRest::LibC::Storage self + const String *pathExp = STR_NEW_SV($arg); +CODE: + String *path = storagePathNP(self, pathExp); + RETVAL = newSVpv((char *)strPtr(path), strSize(path)); +OUTPUT: + RETVAL +CLEANUP: + } + MEM_CONTEXT_XS_TEMP_END(); #################################################################################################################################### void -storagePosixPathRemove(path, errorOnMissing, recurse) - const char *path - bool errorOnMissing +pathRemove(self, pathExp, ignoreMissing, recurse) +PREINIT: + MEM_CONTEXT_XS_TEMP_BEGIN() + { +INPUT: + pgBackRest::LibC::Storage self + const String *pathExp = STR_NEW_SV($arg); + bool ignoreMissing bool recurse CODE: + storagePathRemoveP( + self, pathExp, .errorOnMissing = storageFeature(self, storageFeaturePath) ? !ignoreMissing : false, .recurse = recurse); +CLEANUP: + } + MEM_CONTEXT_XS_TEMP_END(); + +#################################################################################################################################### +void +pathSync(self, pathExp) +PREINIT: + MEM_CONTEXT_XS_TEMP_BEGIN() + { +INPUT: + pgBackRest::LibC::Storage self + const String *pathExp = STR_NEW_SV($arg); +CODE: + storagePathSyncNP(self, pathExp); +CLEANUP: + } + MEM_CONTEXT_XS_TEMP_END(); + +#################################################################################################################################### +UV +put(self, write, buffer) +PREINIT: + MEM_CONTEXT_XS_TEMP_BEGIN() + { +INPUT: + pgBackRest::LibC::StorageWrite write + const Buffer *buffer = BUF_CONST_SV($arg); +CODE: + storagePutNP(write, buffer); + RETVAL = buffer ? bufUsed(buffer) : 0; +OUTPUT: + RETVAL +CLEANUP: + } + MEM_CONTEXT_XS_TEMP_END(); + +#################################################################################################################################### +bool +readDrain(self, read) +PREINIT: + MEM_CONTEXT_XS_TEMP_BEGIN() + { +INPUT: + pgBackRest::LibC::StorageRead read +CODE: + RETVAL = false; + + // Read and discard all IO (this is useful for processing filters) + if (ioReadOpen(storageReadIo(read))) + { + Buffer *buffer = bufNew(ioBufferSize()); + + do + { + ioRead(storageReadIo(read), buffer); + bufUsedZero(buffer); + } + while (!ioReadEof(storageReadIo(read))); + + ioReadClose(storageReadIo(read)); + RETVAL = true; + } +OUTPUT: + RETVAL +CLEANUP: + } + MEM_CONTEXT_XS_TEMP_END(); + +#################################################################################################################################### +void +remove(self, fileExp, ignoreMissing) +PREINIT: + MEM_CONTEXT_XS_TEMP_BEGIN() + { +INPUT: + pgBackRest::LibC::Storage self + const String *fileExp = STR_NEW_SV($arg); + bool ignoreMissing +CODE: + storageRemoveP(self, fileExp, .errorOnMissing = storageFeature(self, storageFeaturePath) ? !ignoreMissing : false); +CLEANUP: + } + MEM_CONTEXT_XS_TEMP_END(); + +#################################################################################################################################### +const char * +cipherType(self) +PREINIT: + MEM_CONTEXT_XS_TEMP_BEGIN() + { +CODE: + if (cfgOptionStr(cfgOptRepoCipherType) == NULL || cipherType(cfgOptionStr(cfgOptRepoCipherType)) == cipherTypeNone) + RETVAL = NULL; + else + RETVAL = strPtr(cfgOptionStr(cfgOptRepoCipherType)); +OUTPUT: + RETVAL +CLEANUP: + } + MEM_CONTEXT_XS_TEMP_END(); + +#################################################################################################################################### +const char * +cipherPass(self) +PREINIT: MEM_CONTEXT_XS_TEMP_BEGIN() { - storagePathRemoveP( - storagePosixNew(strNew("/"), 0640, 750, true, NULL), strNew(path), .errorOnMissing = errorOnMissing, - .recurse = recurse); +CODE: + RETVAL = strPtr(cfgOptionStr(cfgOptRepoCipherPass)); +OUTPUT: + RETVAL +CLEANUP: } MEM_CONTEXT_XS_TEMP_END(); + +#################################################################################################################################### +const char * +type(self) +PREINIT: + MEM_CONTEXT_XS_TEMP_BEGIN() + { +INPUT: + pgBackRest::LibC::Storage self +CODE: + RETVAL = strPtr(storageType(self)); +OUTPUT: + RETVAL +CLEANUP: + } + MEM_CONTEXT_XS_TEMP_END(); + +MODULE = pgBackRest::LibC PACKAGE = pgBackRest::LibC + +#################################################################################################################################### +void +storageRepoFree() +CODE: + storageHelperFree(); diff -Nru pgbackrest-2.15.1/libc/xs/storage/storage.xsh pgbackrest-2.16/libc/xs/storage/storage.xsh --- pgbackrest-2.15.1/libc/xs/storage/storage.xsh 1970-01-01 00:00:00.000000000 +0000 +++ pgbackrest-2.16/libc/xs/storage/storage.xsh 2019-08-05 16:03:04.000000000 +0000 @@ -0,0 +1,206 @@ +/*********************************************************************************************************************************** +Storage XS Header +***********************************************************************************************************************************/ +#include "common/assert.h" +#include "common/compress/gzip/compress.h" +#include "common/compress/gzip/decompress.h" +#include "common/crypto/cipherBlock.h" +#include "common/io/filter/size.h" +#include "common/memContext.h" +#include "common/type/convert.h" +#include "common/type/json.h" +#include "postgres/interface.h" +#include "storage/helper.h" +#include "storage/s3/storage.intern.h" +#include "storage/storage.intern.h" + +typedef Storage *pgBackRest__LibC__Storage; + +/*********************************************************************************************************************************** +Manifest callback +***********************************************************************************************************************************/ +typedef struct StorageManifestXsCallbackData +{ + const Storage *storage; + const String *pathRoot; + const String *path; + String *json; + const String *filter; +} StorageManifestXsCallbackData; + +String * +storageManifestXsInfo(const String *path, const StorageInfo *info) +{ + String *json = strNew(""); + + if (info->name != NULL) + { + strCatFmt( + json, "%s:", strPtr(jsonFromStr(path == NULL ? info->name : strNewFmt("%s/%s", strPtr(path), strPtr(info->name))))); + } + + strCatFmt(json, "{\"group\":%s,\"user\":%s,\"type\":\"", strPtr(jsonFromStr(info->group)), strPtr(jsonFromStr(info->user))); + + switch (info->type) + { + case storageTypeFile: + { + strCatFmt( + json, "f\",\"mode\":\"%04o\",\"modification_time\":%" PRId64 ",\"size\":%" PRIu64 "}", info->mode, + (int64_t)info->timeModified, info->size); + break; + } + + case storageTypeLink: + { + strCatFmt(json, "l\",\"link_destination\":%s}", strPtr(jsonFromStr(info->linkDestination))); + break; + } + + case storageTypePath: + { + strCatFmt(json, "d\",\"mode\":\"%04o\"}", info->mode); + break; + } + } + + return json; +} + +void +storageManifestXsCallback(void *callbackData, const StorageInfo *info) +{ + StorageManifestXsCallbackData *data = (StorageManifestXsCallbackData *)callbackData; + + if (data->path == NULL || !strEqZ(info->name, ".")) + { + if (!strEqZ(info->name, ".") && data->filter && !strEq(data->filter, info->name)) + return; + + if (strSize(data->json) != 1) + strCat(data->json, ","); + + strCat(data->json, strPtr(storageManifestXsInfo(data->path, info))); + + if (info->type == storageTypePath) + { + if (!strEqZ(info->name, ".")) + { + StorageManifestXsCallbackData dataSub = + { + .storage = data->storage, + .json = data->json, + .pathRoot = data->pathRoot, + .path = data->path == NULL ? info->name : strNewFmt("%s/%s", strPtr(data->path), strPtr(info->name)), + }; + + storageInfoListNP( + dataSub.storage, strNewFmt("%s/%s", strPtr(dataSub.pathRoot), strPtr(dataSub.path)), storageManifestXsCallback, + &dataSub); + } + } + } +} + +/*********************************************************************************************************************************** +Add IO filter +***********************************************************************************************************************************/ +void +storageFilterXsAdd(IoFilterGroup *filterGroup, const String *filter, const String *paramJson) +{ + VariantList *paramList = paramJson ? jsonToVarLst(paramJson) : NULL; + + if (strEqZ(filter, "pgBackRest::Storage::Filter::CipherBlock")) + { + ioFilterGroupAdd( + filterGroup, + cipherBlockNew( + varUInt64Force(varLstGet(paramList, 0)) ? cipherModeEncrypt : cipherModeDecrypt, + cipherType(varStr(varLstGet(paramList, 1))), BUFSTR(varStr(varLstGet(paramList, 2))), NULL)); + } + else if (strEqZ(filter, "pgBackRest::Storage::Filter::Sha")) + { + ioFilterGroupAdd(filterGroup, cryptoHashNew(HASH_TYPE_SHA1_STR)); + } + else if (strEqZ(filter, "pgBackRest::Common::Io::Handle")) + { + ioFilterGroupAdd(filterGroup, ioSizeNew()); + } + else if (strEqZ(filter, "pgBackRest::Storage::Filter::Gzip")) + { + if (strEqZ(varStr(varLstGet(paramList, 0)), "compress")) + { + ioFilterGroupAdd( + filterGroup, gzipCompressNew(varUIntForce(varLstGet(paramList, 2)), varBoolForce(varLstGet(paramList, 1)))); + } + else + { + ioFilterGroupAdd(filterGroup, gzipDecompressNew(varBoolForce(varLstGet(paramList, 1)))); + } + } + else + THROW_FMT(AssertError, "unable to add invalid filter '%s'", strPtr(filter)); +} + +/*********************************************************************************************************************************** +Get result from IO filter +***********************************************************************************************************************************/ +String * +storageFilterXsResult(const IoFilterGroup *filterGroup, const String *filter) +{ + const Variant *result; + + if (strEqZ(filter, "pgBackRest::Storage::Filter::Sha")) + { + result = ioFilterGroupResult(filterGroup, CRYPTO_HASH_FILTER_TYPE_STR); + } + else if (strEqZ(filter, "pgBackRest::Common::Io::Handle")) + { + result = ioFilterGroupResult(filterGroup, SIZE_FILTER_TYPE_STR); + } + else + THROW_FMT(AssertError, "unable to get result for invalid filter '%s'", strPtr(filter)); + + if (result == NULL) + THROW_FMT(AssertError, "unable to find result for filter '%s'", strPtr(filter)); + + return jsonFromVar(result, 0); +} + +/*********************************************************************************************************************************** +Get results from all IO filters +***********************************************************************************************************************************/ +String * +storageFilterXsResultAll(const IoFilterGroup *filterGroup) +{ + const VariantList *filterList = kvKeyList(varKv(ioFilterGroupResultAll(filterGroup))); + String *result = strNew("{"); + + for (unsigned int filterIdx = 0; filterIdx < varLstSize(filterList); filterIdx++) + { + const String *filter = varStr(varLstGet(filterList, filterIdx)); + const String *filterPerl = NULL; + + if (strEq(filter, CRYPTO_HASH_FILTER_TYPE_STR)) + { + filterPerl = strNew("pgBackRest::Storage::Filter::Sha"); + } + else if (strEq(filter, SIZE_FILTER_TYPE_STR)) + { + filterPerl = strNew("pgBackRest::Common::Io::Handle"); + } + + if (filterPerl != NULL) + { + if (strSize(result) > 1) + strCat(result, ","); + + strCatFmt( + result, "%s:%s", strPtr(jsonFromStr(filterPerl)), strPtr(storageFilterXsResult(filterGroup, filterPerl))); + } + } + + strCat(result, "}"); + + return result; +} diff -Nru pgbackrest-2.15.1/README.md pgbackrest-2.16/README.md --- pgbackrest-2.15.1/README.md 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/README.md 2019-08-05 16:03:04.000000000 +0000 @@ -4,7 +4,7 @@ pgBackRest aims to be a simple, reliable backup and restore solution that can seamlessly scale up to the largest databases and workloads by utilizing algorithms that are optimized for database-specific requirements. -pgBackRest [v2.15](https://github.com/pgbackrest/pgbackrest/releases/tag/release/2.15) is the current stable release. Release notes are on the [Releases](http://www.pgbackrest.org/release.html) page. +pgBackRest [v2.16](https://github.com/pgbackrest/pgbackrest/releases/tag/release/2.16) is the current stable release. Release notes are on the [Releases](http://www.pgbackrest.org/release.html) page. Documentation for v1 can be found [here](http://www.pgbackrest.org/1). No further releases are planned for v1 because v2 is backward-compatible with v1 options and repositories. diff -Nru pgbackrest-2.15.1/src/command/archive/common.c pgbackrest-2.16/src/command/archive/common.c --- pgbackrest-2.15.1/src/command/archive/common.c 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/src/command/archive/common.c 2019-08-05 16:03:04.000000000 +0000 @@ -32,6 +32,8 @@ #define STATUS_FILE_GLOBAL "global" STRING_STATIC(STATUS_FILE_GLOBAL_STR, STATUS_FILE_GLOBAL); +STRING_STATIC(STATUS_FILE_GLOBAL_ERROR_STR, STATUS_FILE_GLOBAL STATUS_EXT_ERROR); + /*********************************************************************************************************************************** Get the correct spool queue based on the archive mode ***********************************************************************************************************************************/ @@ -79,7 +81,7 @@ // If that doesn't exist then check for a global error if (!errorFileExists) { - errorFile = STRDEF(STATUS_FILE_GLOBAL STATUS_EXT_ERROR); + errorFile = STATUS_FILE_GLOBAL_ERROR_STR; errorFileExists = storageExistsNP(storageSpool(), strNewFmt("%s/%s", strPtr(spoolQueue), strPtr(errorFile))); } } @@ -296,12 +298,13 @@ have multiple files that match the segment, though more than one match is not a good thing. ***********************************************************************************************************************************/ String * -walSegmentFind(const Storage *storage, const String *archiveId, const String *walSegment) +walSegmentFind(const Storage *storage, const String *archiveId, const String *walSegment, TimeMSec timeout) { FUNCTION_LOG_BEGIN(logLevelDebug); FUNCTION_LOG_PARAM(STORAGE, storage); FUNCTION_LOG_PARAM(STRING, archiveId); FUNCTION_LOG_PARAM(STRING, walSegment); + FUNCTION_LOG_PARAM(TIME_MSEC, timeout); FUNCTION_LOG_END(); ASSERT(storage != NULL); @@ -313,30 +316,36 @@ MEM_CONTEXT_TEMP_BEGIN() { - // Get a list of all WAL segments that match - StringList *list = storageListP( - storage, strNewFmt(STORAGE_REPO_ARCHIVE "/%s/%s", strPtr(archiveId), strPtr(strSubN(walSegment, 0, 16))), - .expression = strNewFmt("^%s%s-[0-f]{40}(\\.gz){0,1}$", strPtr(strSubN(walSegment, 0, 24)), - walIsPartial(walSegment) ? WAL_SEGMENT_PARTIAL_EXT : ""), .nullOnMissing = true); + Wait *wait = timeout > 0 ? waitNew(timeout) : NULL; - // If there are results - if (list != NULL && strLstSize(list) > 0) + do { - // Error if there is more than one match - if (strLstSize(list) > 1) + // Get a list of all WAL segments that match + StringList *list = storageListP( + storage, strNewFmt(STORAGE_REPO_ARCHIVE "/%s/%s", strPtr(archiveId), strPtr(strSubN(walSegment, 0, 16))), + .expression = strNewFmt("^%s%s-[0-f]{40}(\\.gz){0,1}$", strPtr(strSubN(walSegment, 0, 24)), + walIsPartial(walSegment) ? WAL_SEGMENT_PARTIAL_EXT : ""), .nullOnMissing = true); + + // If there are results + if (list != NULL && strLstSize(list) > 0) { - THROW_FMT( - ArchiveDuplicateError, - "duplicates found in archive for WAL segment %s: %s\n" - "HINT: are multiple primaries archiving to this stanza?", - strPtr(walSegment), strPtr(strLstJoin(strLstSort(list, sortOrderAsc), ", "))); - } + // Error if there is more than one match + if (strLstSize(list) > 1) + { + THROW_FMT( + ArchiveDuplicateError, + "duplicates found in archive for WAL segment %s: %s\n" + "HINT: are multiple primaries archiving to this stanza?", + strPtr(walSegment), strPtr(strLstJoin(strLstSort(list, sortOrderAsc), ", "))); + } - // Copy file name of WAL segment found into the calling context - memContextSwitch(MEM_CONTEXT_OLD()); - result = strDup(strLstGet(list, 0)); - memContextSwitch(MEM_CONTEXT_TEMP()); + // Copy file name of WAL segment found into the calling context + memContextSwitch(MEM_CONTEXT_OLD()); + result = strDup(strLstGet(list, 0)); + memContextSwitch(MEM_CONTEXT_TEMP()); + } } + while (result == NULL && wait != NULL && waitMore(wait)); } MEM_CONTEXT_TEMP_END(); diff -Nru pgbackrest-2.15.1/src/command/archive/common.h pgbackrest-2.16/src/command/archive/common.h --- pgbackrest-2.15.1/src/command/archive/common.h 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/src/command/archive/common.h 2019-08-05 16:03:04.000000000 +0000 @@ -65,7 +65,7 @@ bool walIsPartial(const String *walSegment); bool walIsSegment(const String *walSegment); String *walPath(const String *walFile, const String *pgPath, const String *command); -String *walSegmentFind(const Storage *storage, const String *archiveId, const String *walSegment); +String *walSegmentFind(const Storage *storage, const String *archiveId, const String *walSegment, TimeMSec timeout); String *walSegmentNext(const String *walSegment, size_t walSegmentSize, unsigned int pgVersion); StringList *walSegmentRange(const String *walSegmentBegin, size_t walSegmentSize, unsigned int pgVersion, unsigned int range); diff -Nru pgbackrest-2.15.1/src/command/archive/get/file.c pgbackrest-2.16/src/command/archive/get/file.c --- pgbackrest-2.15.1/src/command/archive/get/file.c 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/src/command/archive/get/file.c 2019-08-05 16:03:04.000000000 +0000 @@ -5,7 +5,7 @@ #include "command/archive/get/file.h" #include "command/archive/common.h" -#include "command/control/control.h" +#include "command/control/common.h" #include "common/compress/gzip/common.h" #include "common/compress/gzip/decompress.h" #include "common/crypto/cipherBlock.h" @@ -50,8 +50,7 @@ PgControl controlInfo = pgControlFromFile(cfgOptionStr(cfgOptPgPath)); // Attempt to load the archive info file - InfoArchive *info = infoArchiveNewLoad( - storageRepo(), STRDEF(STORAGE_REPO_ARCHIVE "/" INFO_ARCHIVE_FILE), cipherType, cipherPass); + InfoArchive *info = infoArchiveNewLoad(storageRepo(), INFO_ARCHIVE_PATH_FILE_STR, cipherType, cipherPass); // Loop through the pg history in case the WAL we need is not in the most recent archive id String *archiveId = NULL; @@ -69,7 +68,7 @@ // If a WAL segment search among the possible file names if (walIsSegment(archiveFile)) { - String *walSegmentFile = walSegmentFind(storageRepo(), archiveId, archiveFile); + String *walSegmentFile = walSegmentFind(storageRepo(), archiveId, archiveFile, 0); if (walSegmentFile != NULL) { diff -Nru pgbackrest-2.15.1/src/command/archive/push/file.c pgbackrest-2.16/src/command/archive/push/file.c --- pgbackrest-2.15.1/src/command/archive/push/file.c 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/src/command/archive/push/file.c 2019-08-05 16:03:04.000000000 +0000 @@ -5,7 +5,7 @@ #include "command/archive/push/file.h" #include "command/archive/common.h" -#include "command/control/control.h" +#include "command/control/common.h" #include "common/compress/gzip/common.h" #include "common/compress/gzip/compress.h" #include "common/crypto/cipherBlock.h" @@ -72,25 +72,15 @@ if (isSegment) { - // Generate a sha1 checksum for the wal segment. ??? Probably need a function in storage for this. + // Generate a sha1 checksum for the wal segment IoRead *read = storageReadIo(storageNewReadNP(storageLocal(), walSource)); ioFilterGroupAdd(ioReadFilterGroup(read), cryptoHashNew(HASH_TYPE_SHA1_STR)); + ioReadDrain(read); - Buffer *buffer = bufNew(ioBufferSize()); - ioReadOpen(read); - - do - { - ioRead(read, buffer); - bufUsedZero(buffer); - } - while (!ioReadEof(read)); - - ioReadClose(read); const String *walSegmentChecksum = varStr(ioFilterGroupResult(ioReadFilterGroup(read), CRYPTO_HASH_FILTER_TYPE_STR)); // If the wal segment already exists in the repo then compare checksums - walSegmentFile = walSegmentFind(storageRepo(), archiveId, archiveFile); + walSegmentFile = walSegmentFind(storageRepo(), archiveId, archiveFile, 0); if (walSegmentFile != NULL) { diff -Nru pgbackrest-2.15.1/src/command/archive/push/push.c pgbackrest-2.16/src/command/archive/push/push.c --- pgbackrest-2.15.1/src/command/archive/push/push.c 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/src/command/archive/push/push.c 2019-08-05 16:03:04.000000000 +0000 @@ -10,7 +10,7 @@ #include "command/archive/push/file.h" #include "command/archive/push/protocol.h" #include "command/command.h" -#include "command/control/control.h" +#include "command/control/common.h" #include "common/debug.h" #include "common/fork.h" #include "common/log.h" @@ -216,8 +216,7 @@ PgControl controlInfo = pgControlFromFile(cfgOptionStr(cfgOptPgPath)); // Attempt to load the archive info file - InfoArchive *info = infoArchiveNewLoad( - storageRepo(), STRDEF(STORAGE_REPO_ARCHIVE "/" INFO_ARCHIVE_FILE), cipherType, cipherPass); + InfoArchive *info = infoArchiveNewLoad(storageRepo(), INFO_ARCHIVE_PATH_FILE_STR, cipherType, cipherPass); // Get archive id for the most recent version -- archive-push will only operate against the most recent version String *archiveId = infoPgArchiveId(infoArchivePg(info), infoPgDataCurrentId(infoArchivePg(info))); diff -Nru pgbackrest-2.15.1/src/command/backup/file.c pgbackrest-2.16/src/command/backup/file.c --- pgbackrest-2.15.1/src/command/backup/file.c 1970-01-01 00:00:00.000000000 +0000 +++ pgbackrest-2.16/src/command/backup/file.c 2019-08-05 16:03:04.000000000 +0000 @@ -0,0 +1,252 @@ +/*********************************************************************************************************************************** +Backup File +***********************************************************************************************************************************/ +#include "build.auto.h" + +#include + +#include "command/backup/file.h" +#include "command/backup/pageChecksum.h" +#include "common/compress/gzip/common.h" +#include "common/compress/gzip/compress.h" +#include "common/compress/gzip/decompress.h" +#include "common/crypto/cipherBlock.h" +#include "common/crypto/hash.h" +#include "common/debug.h" +#include "common/io/filter/group.h" +#include "common/io/filter/size.h" +#include "common/io/io.h" +#include "common/log.h" +#include "common/regExp.h" +#include "common/type/convert.h" +#include "postgres/interface.h" +#include "storage/helper.h" + +/*********************************************************************************************************************************** +Helper functions +***********************************************************************************************************************************/ +static unsigned int +segmentNumber(const String *pgFile) +{ + FUNCTION_TEST_BEGIN(); + FUNCTION_TEST_PARAM(STRING, pgFile); + FUNCTION_TEST_END(); + + // Determine which segment number this is by checking for a numeric extension. No extension means segment 0. + FUNCTION_TEST_RETURN(regExpMatchOne(STRDEF("\\.[0-9]+$"), pgFile) ? cvtZToUInt(strrchr(strPtr(pgFile), '.') + 1) : 0); +} + +/*********************************************************************************************************************************** +Copy a file from the PostgreSQL data directory to the repository +***********************************************************************************************************************************/ +BackupFileResult +backupFile( + const String *pgFile, bool pgFileIgnoreMissing, uint64_t pgFileSize, const String *pgFileChecksum, bool pgFileChecksumPage, + uint64_t pgFileChecksumPageLsnLimit, const String *repoFile, bool repoFileHasReference, bool repoFileCompress, + unsigned int repoFileCompressLevel, const String *backupLabel, bool delta, CipherType cipherType, const String *cipherPass) +{ + FUNCTION_LOG_BEGIN(logLevelDebug); + FUNCTION_LOG_PARAM(STRING, pgFile); // Database file to copy to the repo + FUNCTION_LOG_PARAM(BOOL, pgFileIgnoreMissing); // Is it OK if the database file is missing? + FUNCTION_LOG_PARAM(UINT64, pgFileSize); // Size of the database file + FUNCTION_LOG_PARAM(STRING, pgFileChecksum); // Checksum to verify the database file + FUNCTION_LOG_PARAM(BOOL, pgFileChecksumPage); // Should page checksums be validated + FUNCTION_LOG_PARAM(UINT64, pgFileChecksumPageLsnLimit); // Upper LSN limit to which page checksums must be valid + FUNCTION_LOG_PARAM(STRING, repoFile); // Destination in the repo to copy the pg file + FUNCTION_LOG_PARAM(BOOL, repoFileHasReference); // Does the repo file exists in a prior backup in the set? + FUNCTION_LOG_PARAM(BOOL, repoFileCompress); // Compress destination file + FUNCTION_LOG_PARAM(UINT, repoFileCompressLevel); // Compression level for destination file + FUNCTION_LOG_PARAM(STRING, backupLabel); // Label of current backup + FUNCTION_LOG_PARAM(BOOL, delta); // Is the delta option on? + FUNCTION_LOG_PARAM(ENUM, cipherType); // Encryption type + FUNCTION_TEST_PARAM(STRING, cipherPass); // Password to access the repo file if encrypted + FUNCTION_LOG_END(); + + ASSERT(pgFile != NULL); + ASSERT(repoFile != NULL); + ASSERT(backupLabel != NULL); + ASSERT((cipherType == cipherTypeNone && cipherPass == NULL) || (cipherType != cipherTypeNone && cipherPass != NULL)); + + // Backup file results + BackupFileResult result = {.backupCopyResult = backupCopyResultCopy}; + + MEM_CONTEXT_TEMP_BEGIN() + { + // Generate complete repo path and add compression extension if needed + const String *repoPathFile = strNewFmt( + STORAGE_REPO_BACKUP "/%s/%s%s", strPtr(backupLabel), strPtr(repoFile), repoFileCompress ? "." GZIP_EXT : ""); + + // If checksum is defined then the file needs to be checked. If delta option then check the DB and possibly the repo, else + // just check the repo. + if (pgFileChecksum != NULL) + { + // Does the file in pg match the checksum and size passed? + bool pgFileMatch = false; + + // If delta, then check the DB checksum and possibly the repo. If the checksum does not match in either case then + // recopy. + if (delta) + { + // Generate checksum/size for the pg file + IoRead *read = storageReadIo(storageNewReadP(storagePg(), pgFile, .ignoreMissing = pgFileIgnoreMissing)); + ioFilterGroupAdd(ioReadFilterGroup(read), cryptoHashNew(HASH_TYPE_SHA1_STR)); + ioFilterGroupAdd(ioReadFilterGroup(read), ioSizeNew()); + + // If the pg file exists check the checksum/size + if (ioReadDrain(read)) + { + const String *pgTestChecksum = varStr( + ioFilterGroupResult(ioReadFilterGroup(read), CRYPTO_HASH_FILTER_TYPE_STR)); + uint64_t pgTestSize = varUInt64Force(ioFilterGroupResult(ioReadFilterGroup(read), SIZE_FILTER_TYPE_STR)); + + // Does the pg file match? + if (pgFileSize == pgTestSize && strEq(pgFileChecksum, pgTestChecksum)) + { + pgFileMatch = true; + + // If it matches and is a reference to a previous backup then no need to copy the file + if (repoFileHasReference) + { + memContextSwitch(MEM_CONTEXT_OLD()); + result.backupCopyResult = backupCopyResultNoOp; + result.copySize = pgTestSize; + result.copyChecksum = strDup(pgTestChecksum); + memContextSwitch(MEM_CONTEXT_TEMP()); + } + } + } + // Else the source file is missing from the database so skip this file + else + result.backupCopyResult = backupCopyResultSkip; + } + + // If this is not a delta backup or it is and the file exists and the checksum from the DB matches, then also test the + // checksum of the file in the repo (unless it is in a prior backup) and if the checksum doesn't match, then there may + // be corruption in the repo, so recopy + if (!delta || !repoFileHasReference) + { + // If this is a delta backup and the file is missing from the DB, then remove it from the repo (backupManifestUpdate + // will remove it from the manifest) + if (result.backupCopyResult == backupCopyResultSkip) + { + storageRemoveNP(storageRepoWrite(), repoPathFile); + } + else if (!delta || pgFileMatch) + { + // Generate checksum/size for the repo file + IoRead *read = storageReadIo(storageNewReadNP(storageRepo(), repoPathFile)); + + if (cipherType != cipherTypeNone) + { + ioFilterGroupAdd( + ioReadFilterGroup(read), cipherBlockNew(cipherModeDecrypt, cipherType, BUFSTR(cipherPass), NULL)); + } + + if (repoFileCompress) + ioFilterGroupAdd(ioReadFilterGroup(read), gzipDecompressNew(false)); + + ioFilterGroupAdd(ioReadFilterGroup(read), cryptoHashNew(HASH_TYPE_SHA1_STR)); + ioFilterGroupAdd(ioReadFilterGroup(read), ioSizeNew()); + + ioReadDrain(read); + + // Test checksum/size + const String *pgTestChecksum = varStr( + ioFilterGroupResult(ioReadFilterGroup(read), CRYPTO_HASH_FILTER_TYPE_STR)); + uint64_t pgTestSize = varUInt64Force(ioFilterGroupResult(ioReadFilterGroup(read), SIZE_FILTER_TYPE_STR)); + + // No need to recopy if checksum/size match + if (pgFileSize == pgTestSize && strEq(pgFileChecksum, pgTestChecksum)) + { + memContextSwitch(MEM_CONTEXT_OLD()); + result.backupCopyResult = backupCopyResultChecksum; + result.copySize = pgTestSize; + result.copyChecksum = strDup(pgTestChecksum); + memContextSwitch(MEM_CONTEXT_TEMP()); + } + // Else recopy when repo file is not as expected + else + result.backupCopyResult = backupCopyResultReCopy; + } + } + } + + // Copy the file + if (result.backupCopyResult == backupCopyResultCopy || result.backupCopyResult == backupCopyResultReCopy) + { + // Is the file compressible during the copy? + bool compressible = !repoFileCompress && cipherType == cipherTypeNone; + + // Setup pg file for read + StorageRead *read = storageNewReadP( + storagePg(), pgFile, .ignoreMissing = pgFileIgnoreMissing, .compressible = compressible); + ioFilterGroupAdd(ioReadFilterGroup(storageReadIo(read)), cryptoHashNew(HASH_TYPE_SHA1_STR)); + ioFilterGroupAdd(ioReadFilterGroup(storageReadIo(read)), ioSizeNew()); + + // Add page checksum filter + if (pgFileChecksumPage) + { + ioFilterGroupAdd( + ioReadFilterGroup(storageReadIo(read)), pageChecksumNew(segmentNumber(pgFile), PG_SEGMENT_PAGE_DEFAULT, + PG_PAGE_SIZE_DEFAULT, pgFileChecksumPageLsnLimit)); + } + + // Add compression + if (repoFileCompress) + ioFilterGroupAdd(ioReadFilterGroup(storageReadIo(read)), gzipCompressNew((int)repoFileCompressLevel, false)); + + // If there is a cipher then add the encrypt filter + if (cipherType != cipherTypeNone) + { + ioFilterGroupAdd( + ioReadFilterGroup( + storageReadIo(read)), cipherBlockNew(cipherModeEncrypt, cipherType, BUFSTR(cipherPass), NULL)); + } + + // Setup the repo file for write + StorageWrite *write = storageNewWriteP(storageRepoWrite(), repoPathFile, .compressible = compressible); + ioFilterGroupAdd(ioWriteFilterGroup(storageWriteIo(write)), ioSizeNew()); + + // Open the source and destination and copy the file + if (storageCopy(read, write)) + { + memContextSwitch(MEM_CONTEXT_OLD()); + + // Get sizes and checksum + result.copySize = varUInt64Force( + ioFilterGroupResult(ioReadFilterGroup(storageReadIo(read)), SIZE_FILTER_TYPE_STR)); + result.copyChecksum = strDup( + varStr(ioFilterGroupResult(ioReadFilterGroup(storageReadIo(read)), CRYPTO_HASH_FILTER_TYPE_STR))); + result.repoSize = + varUInt64Force(ioFilterGroupResult(ioWriteFilterGroup(storageWriteIo(write)), SIZE_FILTER_TYPE_STR)); + + // Get results of page checksum validation + if (pgFileChecksumPage) + { + result.pageChecksumResult = kvDup( + varKv(ioFilterGroupResult(ioReadFilterGroup(storageReadIo(read)), PAGE_CHECKSUM_FILTER_TYPE_STR))); + } + + memContextSwitch(MEM_CONTEXT_TEMP()); + } + // Else if source file is missing and the read setup indicated ignore a missing file, the database removed it so skip it + else + result.backupCopyResult = backupCopyResultSkip; + } + + // If the file was copied get the repo size only if the storage can store the files with a different size than what was + // written. This has to be checked after the file is at rest because filesystem compression may affect the actual repo size + // and this cannot be calculated in stream. + // + // If the file was checksummed then get the size in all cases since we don't already have it. + if (((result.backupCopyResult == backupCopyResultCopy || result.backupCopyResult == backupCopyResultReCopy) && + storageFeature(storageRepo(), storageFeatureCompress)) || + result.backupCopyResult == backupCopyResultChecksum) + { + result.repoSize = storageInfoNP(storageRepo(), repoPathFile).size; + } + } + MEM_CONTEXT_TEMP_END(); + + FUNCTION_LOG_RETURN(BACKUP_FILE_RESULT, result); +} diff -Nru pgbackrest-2.15.1/src/command/backup/file.h pgbackrest-2.16/src/command/backup/file.h --- pgbackrest-2.15.1/src/command/backup/file.h 1970-01-01 00:00:00.000000000 +0000 +++ pgbackrest-2.16/src/command/backup/file.h 2019-08-05 16:03:04.000000000 +0000 @@ -0,0 +1,47 @@ +/*********************************************************************************************************************************** +Backup File +***********************************************************************************************************************************/ +#ifndef COMMAND_BACKUP_FILE_H +#define COMMAND_BACKUP_FILE_H + +#include "common/crypto/common.h" +#include "common/type/keyValue.h" + +/*********************************************************************************************************************************** +Backup file types +***********************************************************************************************************************************/ +typedef enum +{ + backupCopyResultChecksum, + backupCopyResultCopy, + backupCopyResultReCopy, + backupCopyResultSkip, + backupCopyResultNoOp, +} BackupCopyResult; + +/*********************************************************************************************************************************** +Functions +***********************************************************************************************************************************/ +typedef struct BackupFileResult +{ + BackupCopyResult backupCopyResult; + uint64_t copySize; + String *copyChecksum; + uint64_t repoSize; + KeyValue *pageChecksumResult; +} BackupFileResult; + +BackupFileResult backupFile( + const String *pgFile, bool pgFileIgnoreMissing, uint64_t pgFileSize, const String *pgFileChecksum, bool pgFileChecksumPage, + uint64_t pgFileChecksumPageLsnLimit, const String *repoFile, bool repoFileHasReference, bool repoFileCompress, + unsigned int repoFileCompressLevel, const String *backupLabel, bool delta, CipherType cipherType, const String *cipherPass); + +/*********************************************************************************************************************************** +Macros for function logging +***********************************************************************************************************************************/ +#define FUNCTION_LOG_BACKUP_FILE_RESULT_TYPE \ + BackupFileResult +#define FUNCTION_LOG_BACKUP_FILE_RESULT_FORMAT(value, buffer, bufferSize) \ + objToLog(&value, "BackupFileResult", buffer, bufferSize) + +#endif diff -Nru pgbackrest-2.15.1/src/command/backup/pageChecksum.c pgbackrest-2.16/src/command/backup/pageChecksum.c --- pgbackrest-2.15.1/src/command/backup/pageChecksum.c 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/src/command/backup/pageChecksum.c 2019-08-05 16:03:04.000000000 +0000 @@ -229,9 +229,25 @@ driver->valid = true; driver->align = true; - this = ioFilterNewP(PAGE_CHECKSUM_FILTER_TYPE_STR, driver, NULL, .in = pageChecksumProcess, .result = pageChecksumResult); + // Create param list + VariantList *paramList = varLstNew(); + varLstAdd(paramList, varNewUInt(segmentNo)); + varLstAdd(paramList, varNewUInt(segmentPageTotal)); + varLstAdd(paramList, varNewUInt64(pageSize)); + varLstAdd(paramList, varNewUInt64(lsnLimit)); + + this = ioFilterNewP( + PAGE_CHECKSUM_FILTER_TYPE_STR, driver, paramList, .in = pageChecksumProcess, .result = pageChecksumResult); } MEM_CONTEXT_NEW_END(); FUNCTION_LOG_RETURN(IO_FILTER, this); } + +IoFilter * +pageChecksumNewVar(const VariantList *paramList) +{ + return pageChecksumNew( + varUIntForce(varLstGet(paramList, 0)), varUIntForce(varLstGet(paramList, 1)), varUIntForce(varLstGet(paramList, 2)), + varUInt64(varLstGet(paramList, 3))); +} diff -Nru pgbackrest-2.15.1/src/command/backup/pageChecksum.h pgbackrest-2.16/src/command/backup/pageChecksum.h --- pgbackrest-2.15.1/src/command/backup/pageChecksum.h 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/src/command/backup/pageChecksum.h 2019-08-05 16:03:04.000000000 +0000 @@ -18,5 +18,6 @@ Constructor ***********************************************************************************************************************************/ IoFilter *pageChecksumNew(unsigned int segmentNo, unsigned int segmentPageTotal, size_t pageSize, uint64_t lsnLimit); +IoFilter *pageChecksumNewVar(const VariantList *paramList); #endif diff -Nru pgbackrest-2.15.1/src/command/backup/protocol.c pgbackrest-2.16/src/command/backup/protocol.c --- pgbackrest-2.15.1/src/command/backup/protocol.c 1970-01-01 00:00:00.000000000 +0000 +++ pgbackrest-2.16/src/command/backup/protocol.c 2019-08-05 16:03:04.000000000 +0000 @@ -0,0 +1,67 @@ +/*********************************************************************************************************************************** +Backup Protocol Handler +***********************************************************************************************************************************/ +#include "build.auto.h" + +#include "command/backup/file.h" +#include "command/backup/protocol.h" +#include "common/debug.h" +#include "common/io/io.h" +#include "common/log.h" +#include "common/memContext.h" +#include "config/config.h" +#include "storage/helper.h" + +/*********************************************************************************************************************************** +Constants +***********************************************************************************************************************************/ +STRING_EXTERN(PROTOCOL_COMMAND_BACKUP_FILE_STR, PROTOCOL_COMMAND_BACKUP_FILE); + +/*********************************************************************************************************************************** +Process protocol requests +***********************************************************************************************************************************/ +bool +backupProtocol(const String *command, const VariantList *paramList, ProtocolServer *server) +{ + FUNCTION_LOG_BEGIN(logLevelDebug); + FUNCTION_LOG_PARAM(STRING, command); + FUNCTION_LOG_PARAM(VARIANT_LIST, paramList); + FUNCTION_LOG_PARAM(PROTOCOL_SERVER, server); + FUNCTION_LOG_END(); + + ASSERT(command != NULL); + + // Attempt to satisfy the request -- we may get requests that are meant for other handlers + bool found = true; + + MEM_CONTEXT_TEMP_BEGIN() + { + if (strEq(command, PROTOCOL_COMMAND_BACKUP_FILE_STR)) + { + // Backup the file + BackupFileResult result = backupFile( + varStr(varLstGet(paramList, 0)), varBoolForce(varLstGet(paramList, 1)), varUInt64(varLstGet(paramList, 2)), + varStr(varLstGet(paramList, 3)), varBoolForce(varLstGet(paramList, 4)), + varUInt64(varLstGet(paramList, 5)) << 32 | varUInt64(varLstGet(paramList, 6)), varStr(varLstGet(paramList, 7)), + varBoolForce(varLstGet(paramList, 8)), varBoolForce(varLstGet(paramList, 9)), + varUIntForce(varLstGet(paramList, 10)), varStr(varLstGet(paramList, 11)), varBoolForce(varLstGet(paramList, 12)), + varLstSize(paramList) == 14 ? cipherTypeAes256Cbc : cipherTypeNone, + varLstSize(paramList) == 14 ? varStr(varLstGet(paramList, 13)) : NULL); + + // Return backup result + VariantList *resultList = varLstNew(); + varLstAdd(resultList, varNewUInt(result.backupCopyResult)); + varLstAdd(resultList, varNewUInt64(result.copySize)); + varLstAdd(resultList, varNewUInt64(result.repoSize)); + varLstAdd(resultList, varNewStr(result.copyChecksum)); + varLstAdd(resultList, result.pageChecksumResult != NULL ? varNewKv(result.pageChecksumResult) : NULL); + + protocolServerResponse(server, varNewVarLst(resultList)); + } + else + found = false; + } + MEM_CONTEXT_TEMP_END(); + + FUNCTION_LOG_RETURN(BOOL, found); +} diff -Nru pgbackrest-2.15.1/src/command/backup/protocol.h pgbackrest-2.16/src/command/backup/protocol.h --- pgbackrest-2.15.1/src/command/backup/protocol.h 1970-01-01 00:00:00.000000000 +0000 +++ pgbackrest-2.16/src/command/backup/protocol.h 2019-08-05 16:03:04.000000000 +0000 @@ -0,0 +1,22 @@ +/*********************************************************************************************************************************** +Backup Protocol Handler +***********************************************************************************************************************************/ +#ifndef COMMAND_BACKUP_PROTOCOL_H +#define COMMAND_BACKUP_PROTOCOL_H + +#include "common/type/string.h" +#include "common/type/variantList.h" +#include "protocol/server.h" + +/*********************************************************************************************************************************** +Constants +***********************************************************************************************************************************/ +#define PROTOCOL_COMMAND_BACKUP_FILE "backupFile" + STRING_DECLARE(PROTOCOL_COMMAND_BACKUP_FILE_STR); + +/*********************************************************************************************************************************** +Functions +***********************************************************************************************************************************/ +bool backupProtocol(const String *command, const VariantList *paramList, ProtocolServer *server); + +#endif diff -Nru pgbackrest-2.15.1/src/command/check/check.c pgbackrest-2.16/src/command/check/check.c --- pgbackrest-2.15.1/src/command/check/check.c 1970-01-01 00:00:00.000000000 +0000 +++ pgbackrest-2.16/src/command/check/check.c 2019-08-05 16:03:04.000000000 +0000 @@ -0,0 +1,77 @@ +/*********************************************************************************************************************************** +Check Command +***********************************************************************************************************************************/ +#include "build.auto.h" + +#include "command/archive/common.h" +#include "command/check/check.h" +#include "common/debug.h" +#include "common/log.h" +#include "common/memContext.h" +#include "config/config.h" +#include "db/helper.h" +#include "info/infoArchive.h" +#include "storage/helper.h" + +/*********************************************************************************************************************************** +Perform standard checks +***********************************************************************************************************************************/ +void +cmdCheck(void) +{ + FUNCTION_LOG_VOID(logLevelDebug); + + MEM_CONTEXT_TEMP_BEGIN() + { + // Get the repo storage in case it is remote and encryption settings need to be pulled down + storageRepo(); + + // Attempt to load the archive info file + InfoArchive *archiveInfo = infoArchiveNewLoad( + storageRepo(), INFO_ARCHIVE_PATH_FILE_STR, cipherType(cfgOptionStr(cfgOptRepoCipherType)), + cfgOptionStr(cfgOptRepoCipherPass)); + const String *archiveId = infoArchiveId(archiveInfo); + + // Get the primary/standby connections (standby is only required if backup from standby is enabled) + DbGetResult dbGroup = dbGet(false, false); + + // Free the standby connection immediately since we don't need it for anything + dbFree(dbGroup.standby); + + // Perform a WAL switch and make sure the WAL is archived if a primary was found + if (dbGroup.primary != NULL) + { + // Perform WAL switch + const String *walSegment = dbWalSwitch(dbGroup.primary); + dbFree(dbGroup.primary); + + // Wait for the WAL to appear in the repo + TimeMSec archiveTimeout = (TimeMSec)(cfgOptionDbl(cfgOptArchiveTimeout) * MSEC_PER_SEC); + const String *walSegmentFile = walSegmentFind(storageRepo(), archiveId, walSegment, archiveTimeout); + + if (walSegmentFile != NULL) + { + LOG_INFO( + "WAL segment %s successfully archived to '%s'", strPtr(walSegment), + strPtr( + storagePath( + storageRepo(), strNewFmt(STORAGE_REPO_ARCHIVE "/%s/%s", strPtr(archiveId), strPtr(walSegmentFile))))); + } + else + { + THROW_FMT( + ArchiveTimeoutError, + "WAL segment %s was not archived before the %" PRIu64 "ms timeout\n" + "HINT: Check the archive_command to ensure that all options are correct (especially --stanza).\n" + "HINT: Check the PostgreSQL server log for errors.", + strPtr(walSegment), archiveTimeout); + } + } + else + LOG_INFO("switch wal not performed because no primary was found"); + + } + MEM_CONTEXT_TEMP_END(); + + FUNCTION_LOG_RETURN_VOID(); +} diff -Nru pgbackrest-2.15.1/src/command/check/check.h pgbackrest-2.16/src/command/check/check.h --- pgbackrest-2.15.1/src/command/check/check.h 1970-01-01 00:00:00.000000000 +0000 +++ pgbackrest-2.16/src/command/check/check.h 2019-08-05 16:03:04.000000000 +0000 @@ -0,0 +1,12 @@ +/*********************************************************************************************************************************** +Check Command +***********************************************************************************************************************************/ +#ifndef COMMAND_CHECK_CHECK_H +#define COMMAND_CHECK_CHECK_H + +/*********************************************************************************************************************************** +Functions +***********************************************************************************************************************************/ +void cmdCheck(void); + +#endif diff -Nru pgbackrest-2.15.1/src/command/control/common.c pgbackrest-2.16/src/command/control/common.c --- pgbackrest-2.15.1/src/command/control/common.c 1970-01-01 00:00:00.000000000 +0000 +++ pgbackrest-2.16/src/command/control/common.c 2019-08-05 16:03:04.000000000 +0000 @@ -0,0 +1,50 @@ +/*********************************************************************************************************************************** +Common Handler for Control Commands +***********************************************************************************************************************************/ +#include "build.auto.h" + +#include "command/control/common.h" +#include "common/debug.h" +#include "config/config.h" +#include "storage/helper.h" + +/*********************************************************************************************************************************** +Create the stop filename +***********************************************************************************************************************************/ +String * +lockStopFileName(const String *stanza) +{ + FUNCTION_TEST_BEGIN(); + FUNCTION_TEST_PARAM(STRING, stanza); + FUNCTION_TEST_END(); + + String *result = strNewFmt("%s/%s.stop", strPtr(cfgOptionStr(cfgOptLockPath)), stanza != NULL ? strPtr(stanza) : "all"); + + FUNCTION_TEST_RETURN(result); +} + +/*********************************************************************************************************************************** +Test for the existence of a stop file +***********************************************************************************************************************************/ +void +lockStopTest(void) +{ + FUNCTION_LOG_VOID(logLevelDebug); + + MEM_CONTEXT_TEMP_BEGIN() + { + // Check the current stanza (if any) + if (cfgOptionTest(cfgOptStanza)) + { + if (storageExistsNP(storageLocal(), lockStopFileName(cfgOptionStr(cfgOptStanza)))) + THROW_FMT(StopError, "stop file exists for stanza %s", strPtr(cfgOptionStr(cfgOptStanza))); + } + + // Check all stanzas + if (storageExistsNP(storageLocal(), lockStopFileName(NULL))) + THROW(StopError, "stop file exists for all stanzas"); + } + MEM_CONTEXT_TEMP_END(); + + FUNCTION_LOG_RETURN_VOID(); +} diff -Nru pgbackrest-2.15.1/src/command/control/common.h pgbackrest-2.16/src/command/control/common.h --- pgbackrest-2.15.1/src/command/control/common.h 1970-01-01 00:00:00.000000000 +0000 +++ pgbackrest-2.16/src/command/control/common.h 2019-08-05 16:03:04.000000000 +0000 @@ -0,0 +1,15 @@ +/*********************************************************************************************************************************** +Common Handler for Control Commands +***********************************************************************************************************************************/ +#ifndef COMMAND_CONTROL_COMMON_H +#define COMMAND_CONTROL_COMMON_H + +#include "common/type/string.h" + +/*********************************************************************************************************************************** +Functions +***********************************************************************************************************************************/ +String *lockStopFileName(const String *stanza); +void lockStopTest(void); + +#endif diff -Nru pgbackrest-2.15.1/src/command/control/control.c pgbackrest-2.16/src/command/control/control.c --- pgbackrest-2.15.1/src/command/control/control.c 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/src/command/control/control.c 1970-01-01 00:00:00.000000000 +0000 @@ -1,50 +0,0 @@ -/*********************************************************************************************************************************** -Command Control -***********************************************************************************************************************************/ -#include "build.auto.h" - -#include "command/control/control.h" -#include "common/debug.h" -#include "config/config.h" -#include "storage/helper.h" - -/*********************************************************************************************************************************** -Create the stop filename -***********************************************************************************************************************************/ -String * -lockStopFileName(const String *stanza) -{ - FUNCTION_TEST_BEGIN(); - FUNCTION_TEST_PARAM(STRING, stanza); - FUNCTION_TEST_END(); - - String *result = strNewFmt("%s/%s.stop", strPtr(cfgOptionStr(cfgOptLockPath)), stanza != NULL ? strPtr(stanza) : "all"); - - FUNCTION_TEST_RETURN(result); -} - -/*********************************************************************************************************************************** -Test for the existence of a stop file -***********************************************************************************************************************************/ -void -lockStopTest(void) -{ - FUNCTION_LOG_VOID(logLevelDebug); - - MEM_CONTEXT_TEMP_BEGIN() - { - // Check the current stanza (if any) - if (cfgOptionTest(cfgOptStanza)) - { - if (storageExistsNP(storageLocal(), lockStopFileName(cfgOptionStr(cfgOptStanza)))) - THROW_FMT(StopError, "stop file exists for stanza %s", strPtr(cfgOptionStr(cfgOptStanza))); - } - - // Check all stanzas - if (storageExistsNP(storageLocal(), lockStopFileName(NULL))) - THROW(StopError, "stop file exists for all stanzas"); - } - MEM_CONTEXT_TEMP_END(); - - FUNCTION_LOG_RETURN_VOID(); -} diff -Nru pgbackrest-2.15.1/src/command/control/control.h pgbackrest-2.16/src/command/control/control.h --- pgbackrest-2.15.1/src/command/control/control.h 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/src/command/control/control.h 1970-01-01 00:00:00.000000000 +0000 @@ -1,15 +0,0 @@ -/*********************************************************************************************************************************** -Command Control -***********************************************************************************************************************************/ -#ifndef COMMAND_CONTROL_CONTROL_H -#define COMMAND_CONTROL_CONTROL_H - -#include "common/type/string.h" - -/*********************************************************************************************************************************** -Functions -***********************************************************************************************************************************/ -String *lockStopFileName(const String *stanza); -void lockStopTest(void); - -#endif diff -Nru pgbackrest-2.15.1/src/command/expire/expire.c pgbackrest-2.16/src/command/expire/expire.c --- pgbackrest-2.15.1/src/command/expire/expire.c 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/src/command/expire/expire.c 2019-08-05 16:03:04.000000000 +0000 @@ -308,8 +308,8 @@ { // Attempt to load the archive info file InfoArchive *infoArchive = infoArchiveNewLoad( - storageRepo(), STRDEF(STORAGE_REPO_ARCHIVE "/" INFO_ARCHIVE_FILE), - cipherType(cfgOptionStr(cfgOptRepoCipherType)), cfgOptionStr(cfgOptRepoCipherPass)); + storageRepo(), INFO_ARCHIVE_PATH_FILE_STR, cipherType(cfgOptionStr(cfgOptRepoCipherType)), + cfgOptionStr(cfgOptRepoCipherPass)); InfoPg *infoArchivePgData = infoArchivePg(infoArchive); @@ -672,14 +672,15 @@ // Load the backup.info InfoBackup *infoBackup = infoBackupNewLoad( - storageRepo(), STRDEF(STORAGE_REPO_BACKUP "/" INFO_BACKUP_FILE), cipherType(cfgOptionStr(cfgOptRepoCipherType)), + storageRepo(), INFO_BACKUP_PATH_FILE_STR, cipherType(cfgOptionStr(cfgOptRepoCipherType)), cfgOptionStr(cfgOptRepoCipherPass)); expireFullBackup(infoBackup); expireDiffBackup(infoBackup); - infoBackupSave(infoBackup, storageRepoWrite(), STRDEF(STORAGE_REPO_BACKUP "/" INFO_BACKUP_FILE), - cipherType(cfgOptionStr(cfgOptRepoCipherType)), cfgOptionStr(cfgOptRepoCipherPass)); + infoBackupSave( + infoBackup, storageRepoWrite(), INFO_BACKUP_PATH_FILE_STR, cipherType(cfgOptionStr(cfgOptRepoCipherType)), + cfgOptionStr(cfgOptRepoCipherPass)); removeExpiredBackup(infoBackup); removeExpiredArchive(infoBackup); diff -Nru pgbackrest-2.15.1/src/command/help/help.c pgbackrest-2.16/src/command/help/help.c --- pgbackrest-2.15.1/src/command/help/help.c 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/src/command/help/help.c 2019-08-05 16:03:04.000000000 +0000 @@ -188,7 +188,7 @@ } // Construct message for more help - more = STRDEF("[command]"); + more = strNew("[command]"); } else { @@ -224,13 +224,13 @@ const String *section = NULL; if (cfgDefOptionHelpSection(optionDefId) != NULL) - section = STR(cfgDefOptionHelpSection(optionDefId)); + section = strNew(cfgDefOptionHelpSection(optionDefId)); if (section == NULL || (!strEqZ(section, "general") && !strEqZ(section, "log") && !strEqZ(section, "repository") && !strEqZ(section, "stanza"))) { - section = STRDEF("command"); + section = strNew("command"); } kvAdd(optionKv, VARSTR(section), VARINT((int)optionDefId)); diff -Nru pgbackrest-2.15.1/src/command/local/local.c pgbackrest-2.16/src/command/local/local.c --- pgbackrest-2.15.1/src/command/local/local.c 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/src/command/local/local.c 2019-08-05 16:03:04.000000000 +0000 @@ -5,6 +5,7 @@ #include "command/archive/get/protocol.h" #include "command/archive/push/protocol.h" +#include "command/backup/protocol.h" #include "command/restore/protocol.h" #include "common/debug.h" #include "common/io/handleRead.h" @@ -34,6 +35,7 @@ ProtocolServer *server = protocolServerNew(name, PROTOCOL_SERVICE_LOCAL_STR, read, write); protocolServerHandlerAdd(server, archiveGetProtocol); protocolServerHandlerAdd(server, archivePushProtocol); + protocolServerHandlerAdd(server, backupProtocol); protocolServerHandlerAdd(server, restoreProtocol); protocolServerProcess(server); } diff -Nru pgbackrest-2.15.1/src/command/remote/remote.c pgbackrest-2.16/src/command/remote/remote.c --- pgbackrest-2.15.1/src/command/remote/remote.c 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/src/command/remote/remote.c 2019-08-05 16:03:04.000000000 +0000 @@ -11,6 +11,7 @@ #include "common/log.h" #include "config/config.h" #include "config/protocol.h" +#include "db/protocol.h" #include "protocol/helper.h" #include "protocol/server.h" #include "storage/remote/protocol.h" @@ -33,6 +34,7 @@ ProtocolServer *server = protocolServerNew(name, PROTOCOL_SERVICE_REMOTE_STR, read, write); protocolServerHandlerAdd(server, storageRemoteProtocol); + protocolServerHandlerAdd(server, dbProtocol); protocolServerHandlerAdd(server, configProtocol); // Acquire a lock if this command needs one. We'll use the noop that is always sent from the client right after the diff -Nru pgbackrest-2.15.1/src/command/restore/file.c pgbackrest-2.16/src/command/restore/file.c --- pgbackrest-2.15.1/src/command/restore/file.c 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/src/command/restore/file.c 2019-08-05 16:03:04.000000000 +0000 @@ -88,18 +88,7 @@ { read = storageReadIo(storageNewReadNP(storagePgWrite(), pgFile)); ioFilterGroupAdd(ioReadFilterGroup(read), cryptoHashNew(HASH_TYPE_SHA1_STR)); - - Buffer *buffer = bufNew(ioBufferSize()); - ioReadOpen(read); - - do - { - ioRead(read, buffer); - bufUsedZero(buffer); - } - while (!ioReadEof(read)); - - ioReadClose(read); + ioReadDrain(read); } // If size and checksum are equal then no need to copy the file diff -Nru pgbackrest-2.15.1/src/common/crypto/cipherBlock.c pgbackrest-2.16/src/common/crypto/cipherBlock.c --- pgbackrest-2.15.1/src/common/crypto/cipherBlock.c 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/src/common/crypto/cipherBlock.c 2019-08-05 16:03:04.000000000 +0000 @@ -19,8 +19,7 @@ /*********************************************************************************************************************************** Filter type constant ***********************************************************************************************************************************/ -#define CIPHER_BLOCK_FILTER_TYPE "cipherBlock" - STRING_STATIC(CIPHER_BLOCK_FILTER_TYPE_STR, CIPHER_BLOCK_FILTER_TYPE); +STRING_EXTERN(CIPHER_BLOCK_FILTER_TYPE_STR, CIPHER_BLOCK_FILTER_TYPE); /*********************************************************************************************************************************** Header constants and sizes @@ -435,12 +434,29 @@ driver->pass = memNewRaw(driver->passSize); memcpy(driver->pass, bufPtr(pass), driver->passSize); + // Create param list + VariantList *paramList = varLstNew(); + varLstAdd(paramList, varNewUInt(mode)); + varLstAdd(paramList, varNewUInt(cipherType)); + // ??? Using a string here is not correct since the passphrase is being passed as a buffer so may contain null characters. + // However, since strings are used to hold the passphrase in the rest of the code this is currently valid. + varLstAdd(paramList, varNewStr(strNewBuf(pass))); + varLstAdd(paramList, digestName ? varNewStr(digestName) : NULL); + // Create filter interface this = ioFilterNewP( - CIPHER_BLOCK_FILTER_TYPE_STR, driver, NULL, .done = cipherBlockDone, .inOut = cipherBlockProcess, + CIPHER_BLOCK_FILTER_TYPE_STR, driver, paramList, .done = cipherBlockDone, .inOut = cipherBlockProcess, .inputSame = cipherBlockInputSame); } MEM_CONTEXT_NEW_END(); FUNCTION_LOG_RETURN(IO_FILTER, this); } + +IoFilter * +cipherBlockNewVar(const VariantList *paramList) +{ + return cipherBlockNew( + (CipherMode)varUIntForce(varLstGet(paramList, 0)), (CipherType)varUIntForce(varLstGet(paramList, 1)), + BUFSTR(varStr(varLstGet(paramList, 2))), varLstGet(paramList, 3) == NULL ? NULL : varStr(varLstGet(paramList, 3))); +} diff -Nru pgbackrest-2.15.1/src/common/crypto/cipherBlock.h pgbackrest-2.16/src/common/crypto/cipherBlock.h --- pgbackrest-2.15.1/src/common/crypto/cipherBlock.h 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/src/common/crypto/cipherBlock.h 2019-08-05 16:03:04.000000000 +0000 @@ -8,8 +8,15 @@ #include "common/crypto/common.h" /*********************************************************************************************************************************** +Filter type constant +***********************************************************************************************************************************/ +#define CIPHER_BLOCK_FILTER_TYPE "cipherBlock" + STRING_DECLARE(CIPHER_BLOCK_FILTER_TYPE_STR); + +/*********************************************************************************************************************************** Constructor ***********************************************************************************************************************************/ IoFilter *cipherBlockNew(CipherMode mode, CipherType cipherType, const Buffer *pass, const String *digestName); +IoFilter *cipherBlockNewVar(const VariantList *paramList); #endif diff -Nru pgbackrest-2.15.1/src/common/crypto/hash.c pgbackrest-2.16/src/common/crypto/hash.c --- pgbackrest-2.15.1/src/common/crypto/hash.c 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/src/common/crypto/hash.c 2019-08-05 16:03:04.000000000 +0000 @@ -163,14 +163,24 @@ // Initialize context cryptoError(!EVP_DigestInit_ex(driver->hashContext, driver->hashType, NULL), "unable to initialize hash context"); + // Create param list + VariantList *paramList = varLstNew(); + varLstAdd(paramList, varNewStr(type)); + // Create filter interface - this = ioFilterNewP(CRYPTO_HASH_FILTER_TYPE_STR, driver, NULL, .in = cryptoHashProcess, .result = cryptoHashResult); + this = ioFilterNewP(CRYPTO_HASH_FILTER_TYPE_STR, driver, paramList, .in = cryptoHashProcess, .result = cryptoHashResult); } MEM_CONTEXT_NEW_END(); FUNCTION_LOG_RETURN(IO_FILTER, this); } +IoFilter * +cryptoHashNewVar(const VariantList *paramList) +{ + return cryptoHashNew(varStr(varLstGet(paramList, 0))); +} + /*********************************************************************************************************************************** Get hash for one C buffer ***********************************************************************************************************************************/ diff -Nru pgbackrest-2.15.1/src/common/crypto/hash.h pgbackrest-2.16/src/common/crypto/hash.h --- pgbackrest-2.15.1/src/common/crypto/hash.h 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/src/common/crypto/hash.h 2019-08-05 16:03:04.000000000 +0000 @@ -41,6 +41,7 @@ Constructor ***********************************************************************************************************************************/ IoFilter *cryptoHashNew(const String *type); +IoFilter *cryptoHashNewVar(const VariantList *paramList); /*********************************************************************************************************************************** Helper functions diff -Nru pgbackrest-2.15.1/src/common/debug.h pgbackrest-2.16/src/common/debug.h --- pgbackrest-2.15.1/src/common/debug.h 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/src/common/debug.h 2019-08-05 16:03:04.000000000 +0000 @@ -269,7 +269,7 @@ Function Test Macros In debug builds these macros will update the stack trace with function names and parameters but not log. In production builds all -test macros are compiled out. +test macros are compiled out (except for return statements). ***********************************************************************************************************************************/ #ifdef DEBUG_TEST_TRACE #define FUNCTION_TEST_BEGIN() \ diff -Nru pgbackrest-2.15.1/src/common/error.c pgbackrest-2.16/src/common/error.c --- pgbackrest-2.15.1/src/common/error.c 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/src/common/error.c 2019-08-05 16:03:04.000000000 +0000 @@ -31,7 +31,7 @@ /*********************************************************************************************************************************** Maximum allowed number of nested try blocks ***********************************************************************************************************************************/ -#define ERROR_TRY_MAX 32 +#define ERROR_TRY_MAX 64 /*********************************************************************************************************************************** States for each try diff -Nru pgbackrest-2.15.1/src/common/io/filter/filter.c pgbackrest-2.16/src/common/io/filter/filter.c --- pgbackrest-2.15.1/src/common/io/filter/filter.c 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/src/common/io/filter/filter.c 2019-08-05 16:03:04.000000000 +0000 @@ -48,8 +48,6 @@ ASSERT(!(interface.in != NULL && interface.inOut != NULL)); // If the filter does not produce output then it should produce a result ASSERT(interface.in == NULL || (interface.result != NULL && interface.done == NULL && interface.inputSame == NULL)); - // Filters that produce output will not always be able to dump all their output and will need to get the same input again - ASSERT(interface.inOut == NULL || interface.inputSame != NULL); IoFilter *this = memNew(sizeof(IoFilter)); this->memContext = memContextCurrent(); diff -Nru pgbackrest-2.15.1/src/common/io/filter/group.c pgbackrest-2.16/src/common/io/filter/group.c --- pgbackrest-2.15.1/src/common/io/filter/group.c 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/src/common/io/filter/group.c 2019-08-05 16:03:04.000000000 +0000 @@ -103,6 +103,31 @@ } /*********************************************************************************************************************************** +Insert a filter before an index +***********************************************************************************************************************************/ +IoFilterGroup * +ioFilterGroupInsert(IoFilterGroup *this, unsigned int listIdx, IoFilter *filter) +{ + FUNCTION_LOG_BEGIN(logLevelDebug); + FUNCTION_LOG_PARAM(IO_FILTER_GROUP, this); + FUNCTION_LOG_PARAM(IO_FILTER, filter); + FUNCTION_LOG_END(); + + ASSERT(this != NULL); + ASSERT(!this->opened && !this->closed); + ASSERT(filter != NULL); + + // Move the filter to this object's mem context + ioFilterMove(filter, this->memContext); + + // Add the filter + IoFilterData filterData = {.filter = filter}; + lstInsert(this->filterList, listIdx, &filterData); + + FUNCTION_LOG_RETURN(IO_FILTER_GROUP, this); +} + +/*********************************************************************************************************************************** Get a filter ***********************************************************************************************************************************/ static IoFilterData * @@ -119,6 +144,27 @@ } /*********************************************************************************************************************************** +Clear filters +***********************************************************************************************************************************/ +IoFilterGroup * +ioFilterGroupClear(IoFilterGroup *this) +{ + FUNCTION_LOG_BEGIN(logLevelDebug); + FUNCTION_LOG_PARAM(IO_FILTER_GROUP, this); + FUNCTION_LOG_END(); + + ASSERT(this != NULL); + ASSERT(!this->opened); + + for (unsigned int filterIdx = 0; filterIdx < ioFilterGroupSize(this); filterIdx++) + ioFilterFree(ioFilterGroupGet(this, filterIdx)->filter); + + lstClear(this->filterList); + + FUNCTION_LOG_RETURN(IO_FILTER_GROUP, this); +} + +/*********************************************************************************************************************************** Open filter group Setup the filter group and allocate any required buffers. @@ -401,21 +447,20 @@ ASSERT(!this->opened); ASSERT(this->filterList != NULL); - KeyValue *result = kvNew(); + VariantList *result = varLstNew(); - MEM_CONTEXT_TEMP_BEGIN() + for (unsigned int filterIdx = 0; filterIdx < ioFilterGroupSize(this); filterIdx++) { - for (unsigned int filterIdx = 0; filterIdx < ioFilterGroupSize(this); filterIdx++) - { - IoFilter *filter = ioFilterGroupGet(this, filterIdx)->filter; - const VariantList *paramList = ioFilterParamList(filter); + IoFilter *filter = ioFilterGroupGet(this, filterIdx)->filter; + const VariantList *paramList = ioFilterParamList(filter); - kvAdd(result, VARSTR(ioFilterType(filter)), paramList ? varNewVarLst(paramList) : NULL); - } + KeyValue *filterParam = kvNew(); + kvAdd(filterParam, VARSTR(ioFilterType(filter)), paramList ? varNewVarLst(paramList) : NULL); + + varLstAdd(result, varNewKv(filterParam)); } - MEM_CONTEXT_TEMP_END(); - FUNCTION_LOG_RETURN(VARIANT, varNewKv(result)); + FUNCTION_LOG_RETURN(VARIANT, varNewVarLst(result)); } /*********************************************************************************************************************************** @@ -429,8 +474,7 @@ FUNCTION_LOG_PARAM(STRING, filterType); FUNCTION_LOG_END(); - ASSERT(this != NULL); - ASSERT(this->opened && this->closed); + ASSERT(this->opened); ASSERT(filterType != NULL); const Variant *result = NULL; @@ -461,6 +505,30 @@ } /*********************************************************************************************************************************** +Set all filter results +***********************************************************************************************************************************/ +void +ioFilterGroupResultAllSet(IoFilterGroup *this, const Variant *filterResult) +{ + FUNCTION_LOG_BEGIN(logLevelDebug); + FUNCTION_LOG_PARAM(IO_FILTER_GROUP, this); + FUNCTION_LOG_END(); + + ASSERT(this != NULL); + + if (filterResult != NULL) + { + MEM_CONTEXT_BEGIN(this->memContext) + { + this->filterResult = kvDup(varKv(filterResult)); + } + MEM_CONTEXT_END(); + } + + FUNCTION_LOG_RETURN_VOID(); +} + +/*********************************************************************************************************************************** Return total number of filters ***********************************************************************************************************************************/ unsigned int diff -Nru pgbackrest-2.15.1/src/common/io/filter/group.h pgbackrest-2.16/src/common/io/filter/group.h --- pgbackrest-2.15.1/src/common/io/filter/group.h 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/src/common/io/filter/group.h 2019-08-05 16:03:04.000000000 +0000 @@ -30,6 +30,8 @@ Functions ***********************************************************************************************************************************/ IoFilterGroup *ioFilterGroupAdd(IoFilterGroup *this, IoFilter *filter); +IoFilterGroup *ioFilterGroupInsert(IoFilterGroup *this, unsigned int listIdx, IoFilter *filter); +IoFilterGroup *ioFilterGroupClear(IoFilterGroup *this); void ioFilterGroupOpen(IoFilterGroup *this); void ioFilterGroupProcess(IoFilterGroup *this, const Buffer *input, Buffer *output); void ioFilterGroupClose(IoFilterGroup *this); @@ -42,6 +44,7 @@ Variant *ioFilterGroupParamAll(const IoFilterGroup *this); const Variant *ioFilterGroupResult(const IoFilterGroup *this, const String *filterType); const Variant *ioFilterGroupResultAll(const IoFilterGroup *this); +void ioFilterGroupResultAllSet(IoFilterGroup *this, const Variant *filterResult); unsigned int ioFilterGroupSize(const IoFilterGroup *this); /*********************************************************************************************************************************** diff -Nru pgbackrest-2.15.1/src/common/io/filter/sink.c pgbackrest-2.16/src/common/io/filter/sink.c --- pgbackrest-2.15.1/src/common/io/filter/sink.c 1970-01-01 00:00:00.000000000 +0000 +++ pgbackrest-2.16/src/common/io/filter/sink.c 2019-08-05 16:03:04.000000000 +0000 @@ -0,0 +1,75 @@ +/*********************************************************************************************************************************** +IO Sink Filter +***********************************************************************************************************************************/ +#include "build.auto.h" + +#include "common/debug.h" +#include "common/io/filter/filter.intern.h" +#include "common/io/filter/sink.h" +#include "common/log.h" +#include "common/memContext.h" +#include "common/object.h" + +/*********************************************************************************************************************************** +Filter type constant +***********************************************************************************************************************************/ +STRING_EXTERN(SINK_FILTER_TYPE_STR, SINK_FILTER_TYPE); + +/*********************************************************************************************************************************** +Object type +***********************************************************************************************************************************/ +typedef struct IoSink +{ + MemContext *memContext; // Mem context of filter +} IoSink; + +/*********************************************************************************************************************************** +Macros for function logging +***********************************************************************************************************************************/ +#define FUNCTION_LOG_IO_SINK_TYPE \ + IoSink * +#define FUNCTION_LOG_IO_SINK_FORMAT(value, buffer, bufferSize) \ + objToLog(value, "IoSink", buffer, bufferSize) + +/*********************************************************************************************************************************** +Discard all input +***********************************************************************************************************************************/ +static void +ioSinkProcess(THIS_VOID, const Buffer *input, Buffer *output) +{ + THIS(IoSink); + + FUNCTION_LOG_BEGIN(logLevelTrace); + FUNCTION_LOG_PARAM(IO_SINK, this); + FUNCTION_LOG_PARAM(BUFFER, input); + FUNCTION_LOG_PARAM(BUFFER, output); + FUNCTION_LOG_END(); + + ASSERT(this != NULL); + ASSERT(input != NULL); + ASSERT(output != NULL); + + FUNCTION_LOG_RETURN_VOID(); +} + +/*********************************************************************************************************************************** +New object +***********************************************************************************************************************************/ +IoFilter * +ioSinkNew(void) +{ + FUNCTION_LOG_VOID(logLevelTrace); + + IoFilter *this = NULL; + + MEM_CONTEXT_NEW_BEGIN("IoSink") + { + IoSink *driver = memNew(sizeof(IoSink)); + driver->memContext = memContextCurrent(); + + this = ioFilterNewP(SINK_FILTER_TYPE_STR, driver, NULL, .inOut = ioSinkProcess); + } + MEM_CONTEXT_NEW_END(); + + FUNCTION_LOG_RETURN(IO_FILTER, this); +} diff -Nru pgbackrest-2.15.1/src/common/io/filter/sink.h pgbackrest-2.16/src/common/io/filter/sink.h --- pgbackrest-2.15.1/src/common/io/filter/sink.h 1970-01-01 00:00:00.000000000 +0000 +++ pgbackrest-2.16/src/common/io/filter/sink.h 2019-08-05 16:03:04.000000000 +0000 @@ -0,0 +1,23 @@ +/*********************************************************************************************************************************** +IO Sink Filter + +Consume all bytes sent to the filter without passing any on. This filter is useful when running size/hash filters on a remote when +no data should be returned. +***********************************************************************************************************************************/ +#ifndef COMMON_IO_FILTER_SINK_H +#define COMMON_IO_FILTER_SINK_H + +#include "common/io/filter/filter.h" + +/*********************************************************************************************************************************** +Filter type constant +***********************************************************************************************************************************/ +#define SINK_FILTER_TYPE "sink" + STRING_DECLARE(SINK_FILTER_TYPE_STR); + +/*********************************************************************************************************************************** +Constructor +***********************************************************************************************************************************/ +IoFilter *ioSinkNew(void); + +#endif diff -Nru pgbackrest-2.15.1/src/common/io/http/client.c pgbackrest-2.16/src/common/io/http/client.c --- pgbackrest-2.15.1/src/common/io/http/client.c 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/src/common/io/http/client.c 2019-08-05 16:03:04.000000000 +0000 @@ -385,8 +385,8 @@ HTTP_HEADER_CONTENT_LENGTH); } - // Was content returned in the response? - bool contentExists = this->contentChunked || (this->contentSize > 0 && !strEq(verb, HTTP_VERB_HEAD_STR)); + // Was content returned in the response? HEAD will report content but not actually return any. + bool contentExists = (this->contentChunked || this->contentSize > 0) && !strEq(verb, HTTP_VERB_HEAD_STR); this->contentEof = !contentExists; // If all content should be returned from this function then read the buffer. Also read the reponse if there has diff -Nru pgbackrest-2.15.1/src/common/io/io.c pgbackrest-2.16/src/common/io/io.c --- pgbackrest-2.15.1/src/common/io/io.c 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/src/common/io/io.c 2019-08-05 16:03:04.000000000 +0000 @@ -4,6 +4,7 @@ #include "build.auto.h" #include "common/debug.h" +#include "common/io/filter/sink.h" #include "common/io/io.h" #include "common/log.h" @@ -74,3 +75,38 @@ FUNCTION_TEST_RETURN(result); } + +/*********************************************************************************************************************************** +Read all IO but don't store it. Useful for calculating checksums, size, etc. +***********************************************************************************************************************************/ +bool +ioReadDrain(IoRead *read) +{ + FUNCTION_TEST_BEGIN(); + FUNCTION_TEST_PARAM(IO_READ, read); + FUNCTION_TEST_END(); + + ASSERT(read != NULL); + + // Add a sink filter so we only need one read + ioFilterGroupAdd(ioReadFilterGroup(read), ioSinkNew()); + + // Check if the IO can be opened + bool result = ioReadOpen(read); + + if (result) + { + MEM_CONTEXT_TEMP_BEGIN() + { + // A single read that returns zero bytes + ioRead(read, bufNew(1)); + ASSERT(ioReadEof(read)); + + // Close the IO + ioReadClose(read); + } + MEM_CONTEXT_TEMP_END(); + } + + FUNCTION_TEST_RETURN(result); +} diff -Nru pgbackrest-2.15.1/src/common/io/io.h pgbackrest-2.16/src/common/io/io.h --- pgbackrest-2.15.1/src/common/io/io.h 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/src/common/io/io.h 2019-08-05 16:03:04.000000000 +0000 @@ -14,6 +14,7 @@ Functions ***********************************************************************************************************************************/ Buffer *ioReadBuf(IoRead *read); +bool ioReadDrain(IoRead *read); /*********************************************************************************************************************************** Getters/Setters diff -Nru pgbackrest-2.15.1/src/common/io/tls/client.c pgbackrest-2.16/src/common/io/tls/client.c --- pgbackrest-2.15.1/src/common/io/tls/client.c 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/src/common/io/tls/client.c 2019-08-05 16:03:04.000000000 +0000 @@ -66,6 +66,57 @@ OBJECT_DEFINE_FREE_RESOURCE_END(LOG); /*********************************************************************************************************************************** +Report TLS errors. Returns true if the command should continue and false if it should exit. +***********************************************************************************************************************************/ +static bool +tlsError(TlsClient *this, int code) +{ + FUNCTION_LOG_BEGIN(logLevelTrace); + FUNCTION_LOG_PARAM(TLS_CLIENT, this); + FUNCTION_LOG_PARAM(INT, code); + FUNCTION_LOG_END(); + + bool result = false; + + switch (code) + { + // The connection was closed + case SSL_ERROR_ZERO_RETURN: + { + tlsClientClose(this); + break; + } + + // Try the read/write again + case SSL_ERROR_WANT_READ: + case SSL_ERROR_WANT_WRITE: + { + result = true; + break; + } + + // A syscall failed (this usually indicates eof) + case SSL_ERROR_SYSCALL: + { + // Get the error before closing so it is not cleared + int errNo = errno; + tlsClientClose(this); + + // Throw the sys error if there is one + THROW_ON_SYS_ERROR(errNo, KernelError, "tls failed syscall"); + + break; + } + + // Some other tls error that cannot be handled + default: + THROW_FMT(ServiceError, "tls error [%d]", code); + } + + FUNCTION_LOG_RETURN(BOOL, result); +} + +/*********************************************************************************************************************************** New object ***********************************************************************************************************************************/ TlsClient * @@ -116,6 +167,9 @@ // Exclude SSL versions to only allow TLS and also disable compression SSL_CTX_set_options(this->context, (long)(SSL_OP_ALL | SSL_OP_NO_SSLv2 | SSL_OP_NO_SSLv3 | SSL_OP_NO_COMPRESSION)); + // Disable auto-retry to prevent SSL_read() from hanging + SSL_CTX_clear_mode(this->context, SSL_MODE_AUTO_RETRY); + // Set location of CA certificates if the server certificate will be verified // ------------------------------------------------------------------------------------------------------------------------- if (this->verifyPeer) @@ -275,6 +329,46 @@ } /*********************************************************************************************************************************** +Wait for the socket to be readable +***********************************************************************************************************************************/ +static void +tlsClientReadWait(TlsClient *this) +{ + FUNCTION_LOG_BEGIN(logLevelTrace); + FUNCTION_LOG_PARAM(TLS_CLIENT, this); + FUNCTION_LOG_END(); + + ASSERT(this != NULL); + ASSERT(this->session != NULL); + + // Initialize the file descriptor set used for select + fd_set selectSet; + FD_ZERO(&selectSet); + + // We know the socket is not negative because it passed error handling, so it is safe to cast to unsigned + FD_SET((unsigned int)this->socket, &selectSet); + + // Initialize timeout struct used for select. Recreate this structure each time since Linux (at least) will modify it. + struct timeval timeoutSelect; + timeoutSelect.tv_sec = (time_t)(this->timeout / MSEC_PER_SEC); + timeoutSelect.tv_usec = (time_t)(this->timeout % MSEC_PER_SEC * 1000); + + // Determine if there is data to be read + int result = select(this->socket + 1, &selectSet, NULL, NULL, &timeoutSelect); + THROW_ON_SYS_ERROR_FMT(result == -1, AssertError, "unable to select from '%s:%u'", strPtr(this->host), this->port); + + // If no data read after time allotted then error + if (!result) + { + THROW_FMT( + FileReadError, "timeout after %" PRIu64 "ms waiting for read from '%s:%u'", this->timeout, strPtr(this->host), + this->port); + } + + FUNCTION_LOG_RETURN_VOID(); +} + +/*********************************************************************************************************************************** Read from the TLS session ***********************************************************************************************************************************/ size_t @@ -293,63 +387,80 @@ ASSERT(buffer != NULL); ASSERT(!bufFull(buffer)); - ssize_t actualBytes = 0; + ssize_t result = 0; // If blocking read keep reading until buffer is full do { // If no tls data pending then check the socket if (!SSL_pending(this->session)) - { - // Initialize the file descriptor set used for select - fd_set selectSet; - FD_ZERO(&selectSet); - - // We know the socket is not negative because it passed error handling, so it is safe to cast to unsigned - FD_SET((unsigned int)this->socket, &selectSet); - - // Initialize timeout struct used for select. Recreate this structure each time since Linux (at least) will modify it. - struct timeval timeoutSelect; - timeoutSelect.tv_sec = (time_t)(this->timeout / MSEC_PER_SEC); - timeoutSelect.tv_usec = (time_t)(this->timeout % MSEC_PER_SEC * 1000); - - // Determine if there is data to be read - int result = select(this->socket + 1, &selectSet, NULL, NULL, &timeoutSelect); - THROW_ON_SYS_ERROR_FMT(result == -1, AssertError, "unable to select from '%s:%u'", strPtr(this->host), this->port); - - // If no data read after time allotted then error - if (!result) - { - THROW_FMT( - FileReadError, "unable to read data from '%s:%u' after %" PRIu64 "ms", - strPtr(this->host), this->port, this->timeout); - } - } + tlsClientReadWait(this); // Read and handle errors size_t expectedBytes = bufRemains(buffer); - actualBytes = SSL_read(this->session, bufRemainsPtr(buffer), (int)expectedBytes); + result = SSL_read(this->session, bufRemainsPtr(buffer), (int)expectedBytes); - cryptoError(actualBytes < 0, "unable to read from TLS"); - - // Update amount of buffer used - bufUsedInc(buffer, (size_t)actualBytes); - - // If zero bytes were returned then the connection was closed - if (actualBytes == 0) + if (result <= 0) { - tlsClientClose(this); - break; + // Break if the error indicates that we should not continue trying + if (!tlsError(this, SSL_get_error(this->session, (int)result))) + break; } + // Update amount of buffer used + else + bufUsedInc(buffer, (size_t)result); } while (block && bufRemains(buffer) > 0); - FUNCTION_LOG_RETURN(SIZE, (size_t)actualBytes); + FUNCTION_LOG_RETURN(SIZE, (size_t)result); } /*********************************************************************************************************************************** Write to the tls session ***********************************************************************************************************************************/ +static bool +tlsWriteContinue(TlsClient *this, int writeResult, int writeError, size_t writeSize) +{ + FUNCTION_LOG_BEGIN(logLevelTrace); + FUNCTION_LOG_PARAM(TLS_CLIENT, this); + FUNCTION_LOG_PARAM(INT, writeResult); + FUNCTION_LOG_PARAM(INT, writeError); + FUNCTION_LOG_PARAM(SIZE, writeSize); + FUNCTION_LOG_END(); + + ASSERT(this != NULL); + ASSERT(writeSize > 0); + + bool result = true; + + // Handle errors + if (writeResult <= 0) + { + // If error = SSL_ERROR_NONE then this is the first write attempt so continue + if (writeError != SSL_ERROR_NONE) + { + // Error if the error indicates that we should not continue trying + if (!tlsError(this, writeError)) + THROW_FMT(FileWriteError, "unable to write to tls [%d]", writeError); + + // Wait for the socket to be readable for tls renegotiation + tlsClientReadWait(this); + } + } + else + { + if ((size_t)writeResult != writeSize) + { + THROW_FMT( + FileWriteError, "unable to write to tls, write size %d does not match expected size %zu", writeResult, writeSize); + } + + result = false; + } + + FUNCTION_LOG_RETURN(BOOL, result); +} + void tlsClientWrite(THIS_VOID, const Buffer *buffer) { @@ -364,7 +475,14 @@ ASSERT(this->session != NULL); ASSERT(buffer != NULL); - cryptoError(SSL_write(this->session, bufPtr(buffer), (int)bufUsed(buffer)) != (int)bufUsed(buffer), "unable to write"); + int result = 0; + int error = SSL_ERROR_NONE; + + while (tlsWriteContinue(this, result, error, bufUsed(buffer))) + { + result = SSL_write(this->session, bufPtr(buffer), (int)bufUsed(buffer)); + error = SSL_get_error(this->session, result); + } FUNCTION_LOG_RETURN_VOID(); } diff -Nru pgbackrest-2.15.1/src/common/lock.c pgbackrest-2.16/src/common/lock.c --- pgbackrest-2.15.1/src/common/lock.c 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/src/common/lock.c 2019-08-05 16:03:04.000000000 +0000 @@ -98,7 +98,7 @@ const String *errorHint = NULL; if (errNo == EWOULDBLOCK) - errorHint = STRDEF("\nHINT: is another " PROJECT_NAME " process running?"); + errorHint = strNew("\nHINT: is another " PROJECT_NAME " process running?"); else if (errNo == EACCES) { errorHint = strNewFmt( diff -Nru pgbackrest-2.15.1/src/common/type/json.c pgbackrest-2.16/src/common/type/json.c --- pgbackrest-2.15.1/src/common/type/json.c 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/src/common/type/json.c 2019-08-05 16:03:04.000000000 +0000 @@ -966,12 +966,32 @@ Variant *varSub = varLstGet(vl, vlIdx); - if (varType(varSub) == varTypeKeyValue) + if (varSub == NULL) + { + strCat(jsonStr, "null"); + } + else if (varType(varSub) == varTypeBool) + { + strCat(jsonStr, strPtr(jsonFromBool(varBool(varSub)))); + } + else if (varType(varSub) == varTypeKeyValue) { // Update the depth before processing the contents of the list element strCat(indentDepth, strPtr(indentSpace)); strCat(jsonStr, strPtr(jsonFromKvInternal(varKv(varSub), indentSpace, indentDepth))); } + else if (varType(varSub) == varTypeVariantList) + { + strCat(jsonStr, strPtr(jsonFromVar(varSub, indent))); + } + else if (varType(varSub) == varTypeInt) + { + strCat(jsonStr, strPtr(jsonFromInt(varInt(varSub)))); + } + else if (varType(varSub) == varTypeInt64) + { + strCat(jsonStr, strPtr(jsonFromInt64(varInt64(varSub)))); + } else if (varType(varSub) == varTypeUInt) { strCat(jsonStr, strPtr(jsonFromUInt(varUInt(varSub)))); diff -Nru pgbackrest-2.15.1/src/common/type/list.c pgbackrest-2.16/src/common/type/list.c --- pgbackrest-2.15.1/src/common/type/list.c 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/src/common/type/list.c 2019-08-05 16:03:04.000000000 +0000 @@ -68,6 +68,33 @@ } /*********************************************************************************************************************************** +Clear items from a list +***********************************************************************************************************************************/ +List * +lstClear(List *this) +{ + FUNCTION_TEST_BEGIN(); + FUNCTION_TEST_PARAM(LIST, this); + FUNCTION_TEST_END(); + + ASSERT(this != NULL); + + if (this->list != NULL) + { + MEM_CONTEXT_BEGIN(this->memContext) + { + memFree(this->list); + } + MEM_CONTEXT_END(); + + this->listSize = 0; + this->listSizeMax = 0; + } + + FUNCTION_TEST_RETURN(this); +} + +/*********************************************************************************************************************************** Get an item from the list ***********************************************************************************************************************************/ void * diff -Nru pgbackrest-2.15.1/src/common/type/list.h pgbackrest-2.16/src/common/type/list.h --- pgbackrest-2.15.1/src/common/type/list.h 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/src/common/type/list.h 2019-08-05 16:03:04.000000000 +0000 @@ -25,6 +25,7 @@ ***********************************************************************************************************************************/ List *lstNew(size_t itemSize); List *lstAdd(List *this, const void *item); +List *lstClear(List *this); void *lstGet(const List *this, unsigned int listIdx); List *lstInsert(List *this, unsigned int listIdx, const void *item); List *lstRemove(List *this, unsigned int listIdx); diff -Nru pgbackrest-2.15.1/src/config/config.auto.c pgbackrest-2.16/src/config/config.auto.c --- pgbackrest-2.15.1/src/config/config.auto.c 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/src/config/config.auto.c 2019-08-05 16:03:04.000000000 +0000 @@ -447,6 +447,7 @@ STRING_EXTERN(CFGOPT_REPO1_S3_HOST_STR, CFGOPT_REPO1_S3_HOST); STRING_EXTERN(CFGOPT_REPO1_S3_KEY_STR, CFGOPT_REPO1_S3_KEY); STRING_EXTERN(CFGOPT_REPO1_S3_KEY_SECRET_STR, CFGOPT_REPO1_S3_KEY_SECRET); +STRING_EXTERN(CFGOPT_REPO1_S3_PORT_STR, CFGOPT_REPO1_S3_PORT); STRING_EXTERN(CFGOPT_REPO1_S3_REGION_STR, CFGOPT_REPO1_S3_REGION); STRING_EXTERN(CFGOPT_REPO1_S3_TOKEN_STR, CFGOPT_REPO1_S3_TOKEN); STRING_EXTERN(CFGOPT_REPO1_S3_VERIFY_TLS_STR, CFGOPT_REPO1_S3_VERIFY_TLS); @@ -1635,6 +1636,14 @@ ) //------------------------------------------------------------------------------------------------------------------------------ + CONFIG_OPTION + ( + CONFIG_OPTION_NAME(CFGOPT_REPO1_S3_PORT) + CONFIG_OPTION_INDEX(0) + CONFIG_OPTION_DEFINE_ID(cfgDefOptRepoS3Port) + ) + + //------------------------------------------------------------------------------------------------------------------------------ CONFIG_OPTION ( CONFIG_OPTION_NAME(CFGOPT_REPO1_S3_REGION) diff -Nru pgbackrest-2.15.1/src/config/config.auto.h pgbackrest-2.16/src/config/config.auto.h --- pgbackrest-2.15.1/src/config/config.auto.h 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/src/config/config.auto.h 2019-08-05 16:03:04.000000000 +0000 @@ -343,6 +343,8 @@ STRING_DECLARE(CFGOPT_REPO1_S3_KEY_STR); #define CFGOPT_REPO1_S3_KEY_SECRET "repo1-s3-key-secret" STRING_DECLARE(CFGOPT_REPO1_S3_KEY_SECRET_STR); +#define CFGOPT_REPO1_S3_PORT "repo1-s3-port" + STRING_DECLARE(CFGOPT_REPO1_S3_PORT_STR); #define CFGOPT_REPO1_S3_REGION "repo1-s3-region" STRING_DECLARE(CFGOPT_REPO1_S3_REGION_STR); #define CFGOPT_REPO1_S3_TOKEN "repo1-s3-token" @@ -386,7 +388,7 @@ #define CFGOPT_TYPE "type" STRING_DECLARE(CFGOPT_TYPE_STR); -#define CFG_OPTION_TOTAL 166 +#define CFG_OPTION_TOTAL 167 /*********************************************************************************************************************************** Command enum @@ -565,6 +567,7 @@ cfgOptRepoS3Host, cfgOptRepoS3Key, cfgOptRepoS3KeySecret, + cfgOptRepoS3Port, cfgOptRepoS3Region, cfgOptRepoS3Token, cfgOptRepoS3VerifyTls, diff -Nru pgbackrest-2.15.1/src/config/config.c pgbackrest-2.16/src/config/config.c --- pgbackrest-2.15.1/src/config/config.c 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/src/config/config.c 2019-08-05 16:03:04.000000000 +0000 @@ -393,8 +393,6 @@ LockType cfgLockRemoteType(ConfigCommand commandId) { - FUNCTION_TEST_VOID(); - FUNCTION_TEST_BEGIN(); FUNCTION_TEST_PARAM(ENUM, commandId); FUNCTION_TEST_END(); diff -Nru pgbackrest-2.15.1/src/config/define.auto.c pgbackrest-2.16/src/config/define.auto.c --- pgbackrest-2.15.1/src/config/define.auto.c 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/src/config/define.auto.c 2019-08-05 16:03:04.000000000 +0000 @@ -3723,6 +3723,60 @@ // ----------------------------------------------------------------------------------------------------------------------------- CFGDEFDATA_OPTION ( + CFGDEFDATA_OPTION_NAME("repo-s3-port") + CFGDEFDATA_OPTION_REQUIRED(true) + CFGDEFDATA_OPTION_SECTION(cfgDefSectionGlobal) + CFGDEFDATA_OPTION_TYPE(cfgDefOptTypeInteger) + CFGDEFDATA_OPTION_INTERNAL(false) + + CFGDEFDATA_OPTION_INDEX_TOTAL(1) + CFGDEFDATA_OPTION_SECURE(false) + + CFGDEFDATA_OPTION_HELP_SECTION("repository") + CFGDEFDATA_OPTION_HELP_SUMMARY("S3 repository port.") + CFGDEFDATA_OPTION_HELP_DESCRIPTION + ( + "Port to use when connecting to the endpoint (or host if specified)." + ) + + CFGDEFDATA_OPTION_COMMAND_LIST + ( + CFGDEFDATA_OPTION_COMMAND(cfgDefCmdArchiveGet) + CFGDEFDATA_OPTION_COMMAND(cfgDefCmdArchiveGetAsync) + CFGDEFDATA_OPTION_COMMAND(cfgDefCmdArchivePush) + CFGDEFDATA_OPTION_COMMAND(cfgDefCmdArchivePushAsync) + CFGDEFDATA_OPTION_COMMAND(cfgDefCmdBackup) + CFGDEFDATA_OPTION_COMMAND(cfgDefCmdCheck) + CFGDEFDATA_OPTION_COMMAND(cfgDefCmdExpire) + CFGDEFDATA_OPTION_COMMAND(cfgDefCmdInfo) + CFGDEFDATA_OPTION_COMMAND(cfgDefCmdLocal) + CFGDEFDATA_OPTION_COMMAND(cfgDefCmdLs) + CFGDEFDATA_OPTION_COMMAND(cfgDefCmdRemote) + CFGDEFDATA_OPTION_COMMAND(cfgDefCmdRestore) + CFGDEFDATA_OPTION_COMMAND(cfgDefCmdStanzaCreate) + CFGDEFDATA_OPTION_COMMAND(cfgDefCmdStanzaDelete) + CFGDEFDATA_OPTION_COMMAND(cfgDefCmdStanzaUpgrade) + CFGDEFDATA_OPTION_COMMAND(cfgDefCmdStart) + CFGDEFDATA_OPTION_COMMAND(cfgDefCmdStop) + ) + + CFGDEFDATA_OPTION_OPTIONAL_LIST + ( + CFGDEFDATA_OPTION_OPTIONAL_ALLOW_RANGE(1, 65535) + CFGDEFDATA_OPTION_OPTIONAL_DEPEND_LIST + ( + cfgDefOptRepoType, + "s3" + ) + + CFGDEFDATA_OPTION_OPTIONAL_DEFAULT("443") + CFGDEFDATA_OPTION_OPTIONAL_PREFIX("repo") + ) + ) + + // ----------------------------------------------------------------------------------------------------------------------------- + CFGDEFDATA_OPTION + ( CFGDEFDATA_OPTION_NAME("repo-s3-region") CFGDEFDATA_OPTION_REQUIRED(true) CFGDEFDATA_OPTION_SECTION(cfgDefSectionGlobal) diff -Nru pgbackrest-2.15.1/src/config/define.auto.h pgbackrest-2.16/src/config/define.auto.h --- pgbackrest-2.15.1/src/config/define.auto.h 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/src/config/define.auto.h 2019-08-05 16:03:04.000000000 +0000 @@ -127,6 +127,7 @@ cfgDefOptRepoS3Host, cfgDefOptRepoS3Key, cfgDefOptRepoS3KeySecret, + cfgDefOptRepoS3Port, cfgDefOptRepoS3Region, cfgDefOptRepoS3Token, cfgDefOptRepoS3VerifyTls, diff -Nru pgbackrest-2.15.1/src/config/parse.auto.c pgbackrest-2.16/src/config/parse.auto.c --- pgbackrest-2.15.1/src/config/parse.auto.c 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/src/config/parse.auto.c 2019-08-05 16:03:04.000000000 +0000 @@ -2008,6 +2008,18 @@ .val = PARSE_OPTION_FLAG | PARSE_DEPRECATE_FLAG | cfgOptRepoS3KeySecret, }, + // repo-s3-port option + // ----------------------------------------------------------------------------------------------------------------------------- + { + .name = CFGOPT_REPO1_S3_PORT, + .has_arg = required_argument, + .val = PARSE_OPTION_FLAG | cfgOptRepoS3Port, + }, + { + .name = "reset-" CFGOPT_REPO1_S3_PORT, + .val = PARSE_OPTION_FLAG | PARSE_RESET_FLAG | cfgOptRepoS3Port, + }, + // repo-s3-region option and deprecations // ----------------------------------------------------------------------------------------------------------------------------- { @@ -2421,6 +2433,7 @@ cfgOptRepoS3Host, cfgOptRepoS3Key, cfgOptRepoS3KeySecret, + cfgOptRepoS3Port, cfgOptRepoS3Region, cfgOptRepoS3Token, cfgOptRepoS3VerifyTls, diff -Nru pgbackrest-2.15.1/src/config/parse.c pgbackrest-2.16/src/config/parse.c --- pgbackrest-2.15.1/src/config/parse.c 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/src/config/parse.c 2019-08-05 16:03:04.000000000 +0000 @@ -40,6 +40,12 @@ #define PGBACKREST_CONFIG_INCLUDE_PATH "conf.d" /*********************************************************************************************************************************** +Option value constants +***********************************************************************************************************************************/ +VARIANT_STRDEF_STATIC(OPTION_VALUE_0, "0"); +VARIANT_STRDEF_STATIC(OPTION_VALUE_1, "1"); + +/*********************************************************************************************************************************** Parse option flags ***********************************************************************************************************************************/ // Offset the option values so they don't conflict with getopt_long return codes @@ -899,9 +905,9 @@ if (dependOptionDefType == cfgDefOptTypeBoolean) { if (cfgOptionBool(dependOptionId)) - dependValue = VARSTRDEF("1"); + dependValue = OPTION_VALUE_1; else - dependValue = VARSTRDEF("0"); + dependValue = OPTION_VALUE_0; } } diff -Nru pgbackrest-2.15.1/src/configure pgbackrest-2.16/src/configure --- pgbackrest-2.15.1/src/configure 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/src/configure 2019-08-05 16:03:04.000000000 +0000 @@ -1,6 +1,6 @@ #! /bin/sh # Guess values for system-dependent variables and create Makefiles. -# Generated by GNU Autoconf 2.69 for pgBackRest 2.15. +# Generated by GNU Autoconf 2.69 for pgBackRest 2.16. # # # Copyright (C) 1992-1996, 1998-2012 Free Software Foundation, Inc. @@ -576,8 +576,8 @@ # Identity of this package. PACKAGE_NAME='pgBackRest' PACKAGE_TARNAME='pgbackrest' -PACKAGE_VERSION='2.15' -PACKAGE_STRING='pgBackRest 2.15' +PACKAGE_VERSION='2.16' +PACKAGE_STRING='pgBackRest 2.16' PACKAGE_BUGREPORT='' PACKAGE_URL='' @@ -1199,7 +1199,7 @@ # Omit some internal or obsolete options to make the list less imposing. # This message is too long to be a string in the A/UX 3.1 sh. cat <<_ACEOF -\`configure' configures pgBackRest 2.15 to adapt to many kinds of systems. +\`configure' configures pgBackRest 2.16 to adapt to many kinds of systems. Usage: $0 [OPTION]... [VAR=VALUE]... @@ -1261,7 +1261,7 @@ if test -n "$ac_init_help"; then case $ac_init_help in - short | recursive ) echo "Configuration of pgBackRest 2.15:";; + short | recursive ) echo "Configuration of pgBackRest 2.16:";; esac cat <<\_ACEOF @@ -1348,7 +1348,7 @@ test -n "$ac_init_help" && exit $ac_status if $ac_init_version; then cat <<\_ACEOF -pgBackRest configure 2.15 +pgBackRest configure 2.16 generated by GNU Autoconf 2.69 Copyright (C) 2012 Free Software Foundation, Inc. @@ -1449,7 +1449,7 @@ This file contains any messages produced by compilers while running configure, to aid debugging if configure makes a mistake. -It was created by pgBackRest $as_me 2.15, which was +It was created by pgBackRest $as_me 2.16, which was generated by GNU Autoconf 2.69. Invocation command line was $ $0 $@ @@ -2861,6 +2861,57 @@ CLIBRARY="`perl -MExtUtils::Embed -e ccopts`" +# Check required pq library +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for PQconnectdb in -lpq" >&5 +$as_echo_n "checking for PQconnectdb in -lpq... " >&6; } +if ${ac_cv_lib_pq_PQconnectdb+:} false; then : + $as_echo_n "(cached) " >&6 +else + ac_check_lib_save_LIBS=$LIBS +LIBS="-lpq $LIBS" +cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +/* Override any GCC internal prototype to avoid an error. + Use char because int might match the return type of a GCC + builtin and then its argument prototype would still apply. */ +#ifdef __cplusplus +extern "C" +#endif +char PQconnectdb (); +int +main () +{ +return PQconnectdb (); + ; + return 0; +} +_ACEOF +if ac_fn_c_try_link "$LINENO"; then : + ac_cv_lib_pq_PQconnectdb=yes +else + ac_cv_lib_pq_PQconnectdb=no +fi +rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext +LIBS=$ac_check_lib_save_LIBS +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_pq_PQconnectdb" >&5 +$as_echo "$ac_cv_lib_pq_PQconnectdb" >&6; } +if test "x$ac_cv_lib_pq_PQconnectdb" = xyes; then : + cat >>confdefs.h <<_ACEOF +#define HAVE_LIBPQ 1 +_ACEOF + + LIBS="-lpq $LIBS" + +else + as_fn_error $? "library 'pq' is required" "$LINENO" 5 +fi + +CINCLUDE="$CINCLUDE -I`pg_config --includedir`" + + # Check required openssl libraries { $as_echo "$as_me:${as_lineno-$LINENO}: checking for EVP_get_digestbyname in -lcrypto" >&5 $as_echo_n "checking for EVP_get_digestbyname in -lcrypto... " >&6; } @@ -3568,7 +3619,7 @@ # report actual input values of CONFIG_FILES etc. instead of their # values after options handling. ac_log=" -This file was extended by pgBackRest $as_me 2.15, which was +This file was extended by pgBackRest $as_me 2.16, which was generated by GNU Autoconf 2.69. Invocation command line was CONFIG_FILES = $CONFIG_FILES @@ -3630,7 +3681,7 @@ cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 ac_cs_config="`$as_echo "$ac_configure_args" | sed 's/^ //; s/[\\""\`\$]/\\\\&/g'`" ac_cs_version="\\ -pgBackRest config.status 2.15 +pgBackRest config.status 2.16 configured by $0, generated by GNU Autoconf 2.69, with options \\"\$ac_cs_config\\" diff -Nru pgbackrest-2.15.1/src/configure.ac pgbackrest-2.16/src/configure.ac --- pgbackrest-2.15.1/src/configure.ac 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/src/configure.ac 2019-08-05 16:03:04.000000000 +0000 @@ -1,6 +1,6 @@ # Initialize configuration AC_PREREQ([2.69]) -AC_INIT([pgBackRest], [2.15]) +AC_INIT([pgBackRest], [2.16]) AC_CONFIG_SRCDIR([version.h]) # Check compiler @@ -39,6 +39,10 @@ LIBS="$LIBS_BEFORE_PERL `perl -MExtUtils::Embed -e ldopts`" AC_SUBST(CLIBRARY, "`perl -MExtUtils::Embed -e ccopts`") +# Check required pq library +AC_CHECK_LIB([pq], [PQconnectdb], [], [AC_MSG_ERROR([library 'pq' is required])]) +AC_SUBST(CINCLUDE, "$CINCLUDE -I`pg_config --includedir`") + # Check required openssl libraries AC_CHECK_LIB([crypto], [EVP_get_digestbyname], [], [AC_MSG_ERROR([library 'crypto' is required])]) AC_CHECK_LIB([ssl], [SSL_new], [], [AC_MSG_ERROR([library 'ssl' is required])]) diff -Nru pgbackrest-2.15.1/src/db/db.c pgbackrest-2.16/src/db/db.c --- pgbackrest-2.15.1/src/db/db.c 1970-01-01 00:00:00.000000000 +0000 +++ pgbackrest-2.16/src/db/db.c 2019-08-05 16:03:04.000000000 +0000 @@ -0,0 +1,313 @@ +/*********************************************************************************************************************************** +Database Client +***********************************************************************************************************************************/ +#include "build.auto.h" + +#include "common/debug.h" +#include "common/log.h" +#include "common/memContext.h" +#include "common/object.h" +#include "db/db.h" +#include "db/protocol.h" +#include "postgres/interface.h" +#include "postgres/version.h" +#include "version.h" + +/*********************************************************************************************************************************** +Object type +***********************************************************************************************************************************/ +struct Db +{ + MemContext *memContext; + PgClient *client; // Local PostgreSQL client + ProtocolClient *remoteClient; // Protocol client for remote db queries + unsigned int remoteIdx; // Index provided by the remote on open for subsequent calls + const String *applicationName; // Used to identify this connection in PostgreSQL + + unsigned int pgVersion; // Version as reported by the database + const String *pgDataPath; // Data directory reported by the database +}; + +OBJECT_DEFINE_FREE(DB); + +/*********************************************************************************************************************************** +Close protocol connection. No need to close a locally created PgClient since it has its own destructor. +***********************************************************************************************************************************/ +OBJECT_DEFINE_FREE_RESOURCE_BEGIN(DB, LOG, logLevelTrace) +{ + ProtocolCommand *command = protocolCommandNew(PROTOCOL_COMMAND_DB_CLOSE_STR); + protocolCommandParamAdd(command, VARUINT(this->remoteIdx)); + + protocolClientExecute(this->remoteClient, command, false); +} +OBJECT_DEFINE_FREE_RESOURCE_END(LOG); + +/*********************************************************************************************************************************** +Create object +***********************************************************************************************************************************/ +Db * +dbNew(PgClient *client, ProtocolClient *remoteClient, const String *applicationName) +{ + FUNCTION_LOG_BEGIN(logLevelDebug); + FUNCTION_LOG_PARAM(PG_CLIENT, client); + FUNCTION_LOG_PARAM(PROTOCOL_CLIENT, remoteClient); + FUNCTION_LOG_PARAM(STRING, applicationName); + FUNCTION_LOG_END(); + + ASSERT((client != NULL && remoteClient == NULL) || (client == NULL && remoteClient != NULL)); + ASSERT(applicationName != NULL); + + Db *this = NULL; + + MEM_CONTEXT_NEW_BEGIN("Db") + { + this = memNew(sizeof(Db)); + this->memContext = memContextCurrent(); + + this->client = pgClientMove(client, this->memContext); + this->remoteClient = remoteClient; + this->applicationName = strDup(applicationName); + } + MEM_CONTEXT_NEW_END(); + + FUNCTION_LOG_RETURN(DB, this); +} + +/*********************************************************************************************************************************** +Execute a query +***********************************************************************************************************************************/ +static VariantList * +dbQuery(Db *this, const String *query) +{ + FUNCTION_LOG_BEGIN(logLevelDebug); + FUNCTION_LOG_PARAM(DB, this); + FUNCTION_LOG_PARAM(STRING, query); + FUNCTION_LOG_END(); + + ASSERT(this != NULL); + ASSERT(query != NULL); + + VariantList *result = NULL; + + // Query remotely + if (this->remoteClient != NULL) + { + ProtocolCommand *command = protocolCommandNew(PROTOCOL_COMMAND_DB_QUERY_STR); + protocolCommandParamAdd(command, VARUINT(this->remoteIdx)); + protocolCommandParamAdd(command, VARSTR(query)); + + result = varVarLst(protocolClientExecute(this->remoteClient, command, true)); + } + // Else locally + else + result = pgClientQuery(this->client, query); + + FUNCTION_LOG_RETURN(VARIANT_LIST, result); +} + +/*********************************************************************************************************************************** +Execute a command that expects no output +***********************************************************************************************************************************/ +static void +dbExec(Db *this, const String *command) +{ + FUNCTION_LOG_BEGIN(logLevelDebug); + FUNCTION_LOG_PARAM(DB, this); + FUNCTION_LOG_PARAM(STRING, command); + FUNCTION_LOG_END(); + + ASSERT(this != NULL); + ASSERT(command != NULL); + + CHECK(dbQuery(this, command) == NULL); + + FUNCTION_LOG_RETURN_VOID(); +} + +/*********************************************************************************************************************************** +Execute a query that returns a single row and column +***********************************************************************************************************************************/ +static Variant * +dbQueryColumn(Db *this, const String *query) +{ + FUNCTION_LOG_BEGIN(logLevelDebug); + FUNCTION_LOG_PARAM(DB, this); + FUNCTION_LOG_PARAM(STRING, query); + FUNCTION_LOG_END(); + + ASSERT(this != NULL); + ASSERT(query != NULL); + + VariantList *result = dbQuery(this, query); + + CHECK(varLstSize(result) == 1); + CHECK(varLstSize(varVarLst(varLstGet(result, 0))) == 1); + + FUNCTION_LOG_RETURN(VARIANT, varLstGet(varVarLst(varLstGet(result, 0)), 0)); +} + +/*********************************************************************************************************************************** +Execute a query that returns a single row +***********************************************************************************************************************************/ +static VariantList * +dbQueryRow(Db *this, const String *query) +{ + FUNCTION_LOG_BEGIN(logLevelDebug); + FUNCTION_LOG_PARAM(DB, this); + FUNCTION_LOG_PARAM(STRING, query); + FUNCTION_LOG_END(); + + ASSERT(this != NULL); + ASSERT(query != NULL); + + VariantList *result = dbQuery(this, query); + + CHECK(varLstSize(result) == 1); + + FUNCTION_LOG_RETURN(VARIANT_LIST, varVarLst(varLstGet(result, 0))); +} + +/*********************************************************************************************************************************** +Open the db connection +***********************************************************************************************************************************/ +void +dbOpen(Db *this) +{ + FUNCTION_LOG_BEGIN(logLevelDebug); + FUNCTION_LOG_PARAM(DB, this); + FUNCTION_LOG_END(); + + ASSERT(this != NULL); + + MEM_CONTEXT_TEMP_BEGIN() + { + // Open the connection + if (this->remoteClient != NULL) + { + ProtocolCommand *command = protocolCommandNew(PROTOCOL_COMMAND_DB_OPEN_STR); + this->remoteIdx = varUIntForce(protocolClientExecute(this->remoteClient, command, true)); + + // Set a callback to notify the remote when a connection is closed + memContextCallbackSet(this->memContext, dbFreeResource, this); + } + else + pgClientOpen(this->client); + + // Set search_path to prevent overrides of the functions we expect to call. All queries should also be schema-qualified, + // but this is an extra level protection. + dbExec(this, STRDEF("set search_path = 'pg_catalog'")); + + // Query the version and data_directory + VariantList *row = dbQueryRow( + this, + STRDEF( + "select (select setting from pg_catalog.pg_settings where name = 'server_version_num')::int4," + " (select setting from pg_catalog.pg_settings where name = 'data_directory')::text")); + + // Strip the minor version off since we don't need it. In the future it might be a good idea to warn users when they are + // running an old minor version. + this->pgVersion = varUIntForce(varLstGet(row, 0)) / 100 * 100; + + // Store the data directory that PostgreSQL is running in. This can be compared to the configured pgBackRest directory when + // validating the configuration. + MEM_CONTEXT_BEGIN(this->memContext) + { + this->pgDataPath = strDup(varStr(varLstGet(row, 1))); + } + MEM_CONTEXT_END(); + + if (this->pgVersion >= PG_VERSION_APPLICATION_NAME) + dbExec(this, strNewFmt("set application_name = '%s'", strPtr(this->applicationName))); + } + MEM_CONTEXT_TEMP_END(); + + FUNCTION_LOG_RETURN_VOID(); +} + +/*********************************************************************************************************************************** +Is this instance a standby? +***********************************************************************************************************************************/ +bool +dbIsStandby(Db *this) +{ + FUNCTION_LOG_BEGIN(logLevelDebug); + FUNCTION_LOG_PARAM(DB, this); + FUNCTION_LOG_END(); + + ASSERT(this != NULL); + + bool result = false; + + if (this->pgVersion >= PG_VERSION_HOT_STANDBY) + { + result = varBool(dbQueryColumn(this, STRDEF("select pg_catalog.pg_is_in_recovery()"))); + } + + FUNCTION_LOG_RETURN(BOOL, result); +} + +/*********************************************************************************************************************************** +Switch the WAL segment and return the segment that should have been archived +***********************************************************************************************************************************/ +String * +dbWalSwitch(Db *this) +{ + FUNCTION_LOG_BEGIN(logLevelDebug); + FUNCTION_LOG_PARAM(DB, this); + FUNCTION_LOG_END(); + + ASSERT(this != NULL); + + String *result = NULL; + + MEM_CONTEXT_TEMP_BEGIN() + { + // Create a restore point to ensure current WAL will be archived. For versions < 9.1 activity will need to be generated by + // the user if there have been no writes since the last WAL switch. + if (this->pgVersion >= PG_VERSION_RESTORE_POINT) + dbQueryColumn(this, STRDEF("select pg_catalog.pg_create_restore_point('" PROJECT_NAME " Archive Check')::text")); + + // Request a WAL segment switch + const char *walName = strPtr(pgWalName(this->pgVersion)); + const String *walFileName = varStr( + dbQueryColumn(this, strNewFmt("select pg_catalog.pg_%sfile_name(pg_catalog.pg_switch_%s())::text", walName, walName))); + + // Copy WAL segment name to the calling context + memContextSwitch(MEM_CONTEXT_OLD()); + result = strDup(walFileName); + memContextSwitch(MEM_CONTEXT_TEMP()); + } + MEM_CONTEXT_TEMP_END(); + + FUNCTION_LOG_RETURN(STRING, result); +} + +/*********************************************************************************************************************************** +Move the object to a new context +***********************************************************************************************************************************/ +Db * +dbMove(Db *this, MemContext *parentNew) +{ + FUNCTION_TEST_BEGIN(); + FUNCTION_TEST_PARAM(DB, this); + FUNCTION_TEST_PARAM(MEM_CONTEXT, parentNew); + FUNCTION_TEST_END(); + + ASSERT(parentNew != NULL); + + if (this != NULL) + memContextMove(this->memContext, parentNew); + + FUNCTION_TEST_RETURN(this); +} + +/*********************************************************************************************************************************** +Render as string for logging +***********************************************************************************************************************************/ +String * +dbToLog(const Db *this) +{ + return strNewFmt( + "{client: %s, remoteClient: %s}", this->client == NULL ? "null" : strPtr(pgClientToLog(this->client)), + this->remoteClient == NULL ? "null" : strPtr(protocolClientToLog(this->remoteClient))); +} diff -Nru pgbackrest-2.15.1/src/db/db.h pgbackrest-2.16/src/db/db.h --- pgbackrest-2.15.1/src/db/db.h 1970-01-01 00:00:00.000000000 +0000 +++ pgbackrest-2.16/src/db/db.h 2019-08-05 16:03:04.000000000 +0000 @@ -0,0 +1,51 @@ +/*********************************************************************************************************************************** +Database Client + +Implements the required PostgreSQL queries and commands. Notice that there is no general purpose query function -- all queries are +expected to be embedded in this object. +***********************************************************************************************************************************/ +#ifndef DB_DB_H +#define DB_DB_H + +#include "postgres/client.h" +#include "protocol/client.h" + +/*********************************************************************************************************************************** +Object type +***********************************************************************************************************************************/ +#define DB_TYPE Db +#define DB_PREFIX db + +typedef struct Db Db; + +/*********************************************************************************************************************************** +Constructor +***********************************************************************************************************************************/ +Db *dbNew(PgClient *client, ProtocolClient *remoteClient, const String *applicationName); + +/*********************************************************************************************************************************** +Functions +***********************************************************************************************************************************/ +void dbOpen(Db *this); +bool dbIsStandby(Db *this); +String *dbWalSwitch(Db *this); +void dbClose(Db *this); + +Db *dbMove(Db *this, MemContext *parentNew); + +/*********************************************************************************************************************************** +Destructor +***********************************************************************************************************************************/ +void dbFree(Db *this); + +/*********************************************************************************************************************************** +Macros for function logging +***********************************************************************************************************************************/ +String *dbToLog(const Db *this); + +#define FUNCTION_LOG_DB_TYPE \ + Db * +#define FUNCTION_LOG_DB_FORMAT(value, buffer, bufferSize) \ + FUNCTION_LOG_STRING_OBJECT_FORMAT(value, dbToLog, buffer, bufferSize) + +#endif diff -Nru pgbackrest-2.15.1/src/db/helper.c pgbackrest-2.16/src/db/helper.c --- pgbackrest-2.15.1/src/db/helper.c 1970-01-01 00:00:00.000000000 +0000 +++ pgbackrest-2.16/src/db/helper.c 2019-08-05 16:03:04.000000000 +0000 @@ -0,0 +1,127 @@ +/*********************************************************************************************************************************** +Database Helper +***********************************************************************************************************************************/ +#include "build.auto.h" + +#include "common/debug.h" +#include "config/config.h" +#include "db/helper.h" +#include "postgres/interface.h" +#include "protocol/helper.h" +#include "version.h" + +/*********************************************************************************************************************************** +Get specified cluster +***********************************************************************************************************************************/ +static Db * +dbGetId(unsigned int pgId) +{ + FUNCTION_LOG_BEGIN(logLevelDebug); + FUNCTION_LOG_PARAM(UINT, pgId); + FUNCTION_LOG_END(); + + ASSERT(pgId > 0); + + Db *result = NULL; + + MEM_CONTEXT_TEMP_BEGIN() + { + const String *applicationName = strNewFmt(PROJECT_NAME " [%s]", cfgCommandName(cfgCommand())); + + if (pgIsLocal(pgId)) + { + result = dbNew( + pgClientNew( + cfgOptionStr(cfgOptPgSocketPath + pgId - 1), cfgOptionUInt(cfgOptPgPort + pgId - 1), PG_DB_POSTGRES_STR, NULL, + (TimeMSec)(cfgOptionDbl(cfgOptDbTimeout) * MSEC_PER_SEC)), + NULL, applicationName); + } + else + result = dbNew(NULL, protocolRemoteGet(protocolStorageTypePg, pgId), applicationName); + + dbMove(result, MEM_CONTEXT_OLD()); + } + MEM_CONTEXT_TEMP_END(); + + FUNCTION_LOG_RETURN(DB, result); +} + +/*********************************************************************************************************************************** +Get primary cluster or primary and standby cluster +***********************************************************************************************************************************/ +DbGetResult +dbGet(bool primaryOnly, bool primaryRequired) +{ + FUNCTION_LOG_BEGIN(logLevelDebug); + FUNCTION_LOG_PARAM(BOOL, primaryOnly); + FUNCTION_LOG_PARAM(BOOL, primaryRequired); + FUNCTION_LOG_END(); + + DbGetResult result = {0}; + + MEM_CONTEXT_TEMP_BEGIN() + { + // Loop through to look for primary and standby (if required) + for (unsigned int pgIdx = 0; pgIdx < cfgOptionIndexTotal(cfgOptPgPath); pgIdx++) + { + if (cfgOptionTest(cfgOptPgHost + pgIdx) || cfgOptionTest(cfgOptPgPath + pgIdx)) + { + Db *db = NULL; + bool standby = false; + + TRY_BEGIN() + { + db = dbGetId(pgIdx + 1); + dbOpen(db); + standby = dbIsStandby(db); + } + CATCH_ANY() + { + dbFree(db); + db = NULL; + + LOG_WARN("unable to check pg-%u: [%s] %s", pgIdx + 1, errorTypeName(errorType()), errorMessage()); + } + TRY_END(); + + // Was the connection successful + if (db != NULL) + { + // Is this cluster a standby + if (standby) + { + // If a standby has not already been found then assign it + if (result.standbyId == 0 && !primaryOnly) + { + result.standbyId = pgIdx + 1; + result.standby = db; + } + // Else close the connection since we don't need it + else + dbFree(db); + } + // Else is a primary + else + { + // Error if more than one primary was found + if (result.primaryId != 0) + THROW(DbConnectError, "more than one primary cluster found"); + + result.primaryId = pgIdx + 1; + result.primary = db; + } + } + } + } + + // Error if no primary was found + if (result.primaryId == 0 && primaryRequired) + THROW(DbConnectError, "unable to find primary cluster - cannot proceed"); + + dbMove(result.primary, MEM_CONTEXT_OLD()); + dbMove(result.standby, MEM_CONTEXT_OLD()); + } + MEM_CONTEXT_TEMP_END(); + + FUNCTION_LOG_RETURN(DB_GET_RESULT, result); +} diff -Nru pgbackrest-2.15.1/src/db/helper.h pgbackrest-2.16/src/db/helper.h --- pgbackrest-2.15.1/src/db/helper.h 1970-01-01 00:00:00.000000000 +0000 +++ pgbackrest-2.16/src/db/helper.h 2019-08-05 16:03:04.000000000 +0000 @@ -0,0 +1,34 @@ +/*********************************************************************************************************************************** +Database Helper + +Helper functions for getting connections to PostgreSQL. +***********************************************************************************************************************************/ +#ifndef DB_HELPER_H +#define DB_HELPER_H + +#include + +#include "db/db.h" + +/*********************************************************************************************************************************** +Functions +***********************************************************************************************************************************/ +typedef struct DbGetResult +{ + unsigned int primaryId; + Db *primary; + unsigned int standbyId; + Db *standby; +} DbGetResult; + +DbGetResult dbGet(bool primaryOnly, bool primaryRequired); + +/*********************************************************************************************************************************** +Macros for function logging +***********************************************************************************************************************************/ +#define FUNCTION_LOG_DB_GET_RESULT_TYPE \ + DbGetResult +#define FUNCTION_LOG_DB_GET_RESULT_FORMAT(value, buffer, bufferSize) \ + objToLog(&value, "DbGetResult", buffer, bufferSize) + +#endif diff -Nru pgbackrest-2.15.1/src/db/protocol.c pgbackrest-2.16/src/db/protocol.c --- pgbackrest-2.15.1/src/db/protocol.c 1970-01-01 00:00:00.000000000 +0000 +++ pgbackrest-2.16/src/db/protocol.c 2019-08-05 16:03:04.000000000 +0000 @@ -0,0 +1,96 @@ +/*********************************************************************************************************************************** +Db Protocol Handler +***********************************************************************************************************************************/ +#include "build.auto.h" + +#include "common/debug.h" +#include "common/io/io.h" +#include "common/log.h" +#include "common/memContext.h" +#include "common/type/list.h" +#include "config/config.h" +#include "db/protocol.h" +#include "postgres/client.h" +#include "postgres/interface.h" + +/*********************************************************************************************************************************** +Constants +***********************************************************************************************************************************/ +STRING_EXTERN(PROTOCOL_COMMAND_DB_OPEN_STR, PROTOCOL_COMMAND_DB_OPEN); +STRING_EXTERN(PROTOCOL_COMMAND_DB_QUERY_STR, PROTOCOL_COMMAND_DB_QUERY); +STRING_EXTERN(PROTOCOL_COMMAND_DB_CLOSE_STR, PROTOCOL_COMMAND_DB_CLOSE); + +/*********************************************************************************************************************************** +Local variables +***********************************************************************************************************************************/ +static struct +{ + List *pgClientList; // List of db objects +} dbProtocolLocal; + +/*********************************************************************************************************************************** +Process db protocol requests +***********************************************************************************************************************************/ +bool +dbProtocol(const String *command, const VariantList *paramList, ProtocolServer *server) +{ + FUNCTION_LOG_BEGIN(logLevelDebug); + FUNCTION_LOG_PARAM(STRING, command); + FUNCTION_LOG_PARAM(VARIANT_LIST, paramList); + FUNCTION_LOG_PARAM(PROTOCOL_SERVER, server); + FUNCTION_LOG_END(); + + ASSERT(command != NULL); + + // Attempt to satisfy the request -- we may get requests that are meant for other handlers + bool found = true; + + MEM_CONTEXT_TEMP_BEGIN() + { + if (strEq(command, PROTOCOL_COMMAND_DB_OPEN_STR)) + { + // If the db list does not exist then create it in the calling context (which should be persistent) + if (dbProtocolLocal.pgClientList == NULL) + { + memContextSwitch(MEM_CONTEXT_OLD()); + dbProtocolLocal.pgClientList = lstNew(sizeof(PgClient *)); + memContextSwitch(MEM_CONTEXT_TEMP()); + } + + // Add db to the list + unsigned int dbIdx = lstSize(dbProtocolLocal.pgClientList); + + MEM_CONTEXT_BEGIN(lstMemContext(dbProtocolLocal.pgClientList)) + { + // Only a single db is passed to the remote + PgClient *pgClient = pgClientNew( + cfgOptionStr(cfgOptPgSocketPath), cfgOptionUInt(cfgOptPgPort), PG_DB_POSTGRES_STR, NULL, + (TimeMSec)(cfgOptionDbl(cfgOptDbTimeout) * MSEC_PER_SEC)); + pgClientOpen(pgClient); + + lstAdd(dbProtocolLocal.pgClientList, &pgClient); + } + MEM_CONTEXT_END(); + + // Return db index which should be included in subsequent calls + protocolServerResponse(server, VARUINT(dbIdx)); + } + else if (strEq(command, PROTOCOL_COMMAND_DB_QUERY_STR) || strEq(command, PROTOCOL_COMMAND_DB_CLOSE_STR)) + { + PgClient *pgClient = *(PgClient **)lstGet(dbProtocolLocal.pgClientList, varUIntForce(varLstGet(paramList, 0))); + + if (strEq(command, PROTOCOL_COMMAND_DB_QUERY_STR)) + protocolServerResponse(server, varNewVarLst(pgClientQuery(pgClient, varStr(varLstGet(paramList, 1))))); + else + { + pgClientClose(pgClient); + protocolServerResponse(server, NULL); + } + } + else + found = false; + } + MEM_CONTEXT_TEMP_END(); + + FUNCTION_LOG_RETURN(BOOL, found); +} diff -Nru pgbackrest-2.15.1/src/db/protocol.h pgbackrest-2.16/src/db/protocol.h --- pgbackrest-2.15.1/src/db/protocol.h 1970-01-01 00:00:00.000000000 +0000 +++ pgbackrest-2.16/src/db/protocol.h 2019-08-05 16:03:04.000000000 +0000 @@ -0,0 +1,27 @@ +/*********************************************************************************************************************************** +Db Protocol Handler +***********************************************************************************************************************************/ +#ifndef DB_PROTOCOL_H +#define DB_PROTOCOL_H + +#include "common/type/string.h" +#include "common/type/variantList.h" +#include "protocol/client.h" +#include "protocol/server.h" + +/*********************************************************************************************************************************** +Constants +***********************************************************************************************************************************/ +#define PROTOCOL_COMMAND_DB_OPEN "dbOpen" + STRING_DECLARE(PROTOCOL_COMMAND_DB_OPEN_STR); +#define PROTOCOL_COMMAND_DB_QUERY "dbQuery" + STRING_DECLARE(PROTOCOL_COMMAND_DB_QUERY_STR); +#define PROTOCOL_COMMAND_DB_CLOSE "dbClose" + STRING_DECLARE(PROTOCOL_COMMAND_DB_CLOSE_STR); + +/*********************************************************************************************************************************** +Functions +***********************************************************************************************************************************/ +bool dbProtocol(const String *command, const VariantList *paramList, ProtocolServer *server); + +#endif diff -Nru pgbackrest-2.15.1/src/info/infoArchive.c pgbackrest-2.16/src/info/infoArchive.c --- pgbackrest-2.15.1/src/info/infoArchive.c 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/src/info/infoArchive.c 2019-08-05 16:03:04.000000000 +0000 @@ -19,6 +19,12 @@ #include "storage/helper.h" /*********************************************************************************************************************************** +Constants +***********************************************************************************************************************************/ +STRING_EXTERN(INFO_ARCHIVE_PATH_FILE_STR, INFO_ARCHIVE_PATH_FILE); +STRING_EXTERN(INFO_ARCHIVE_PATH_FILE_COPY_STR, INFO_ARCHIVE_PATH_FILE_COPY); + +/*********************************************************************************************************************************** Object type ***********************************************************************************************************************************/ struct InfoArchive diff -Nru pgbackrest-2.15.1/src/info/infoArchive.h pgbackrest-2.16/src/info/infoArchive.h --- pgbackrest-2.15.1/src/info/infoArchive.h 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/src/info/infoArchive.h 2019-08-05 16:03:04.000000000 +0000 @@ -23,6 +23,11 @@ #define INFO_ARCHIVE_FILE "archive.info" #define REGEX_ARCHIVE_DIR_DB_VERSION "^[0-9]+(\\.[0-9]+)*-[0-9]+$" +#define INFO_ARCHIVE_PATH_FILE STORAGE_REPO_ARCHIVE "/" INFO_ARCHIVE_FILE + STRING_DECLARE(INFO_ARCHIVE_PATH_FILE_STR); +#define INFO_ARCHIVE_PATH_FILE_COPY INFO_ARCHIVE_PATH_FILE INFO_COPY_EXT + STRING_DECLARE(INFO_ARCHIVE_PATH_FILE_COPY_STR); + /*********************************************************************************************************************************** Constructor ***********************************************************************************************************************************/ diff -Nru pgbackrest-2.15.1/src/info/infoBackup.c pgbackrest-2.16/src/info/infoBackup.c --- pgbackrest-2.15.1/src/info/infoBackup.c 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/src/info/infoBackup.c 2019-08-05 16:03:04.000000000 +0000 @@ -12,7 +12,6 @@ #include "common/ini.h" #include "common/log.h" #include "common/memContext.h" -#include "common/ini.h" #include "common/object.h" #include "common/regExp.h" #include "common/type/json.h" @@ -25,7 +24,7 @@ #include "storage/helper.h" /*********************************************************************************************************************************** -Internal constants +Constants ??? INFO_BACKUP_SECTION should be in a separate include since it will also be used when reading the manifest ***********************************************************************************************************************************/ #define INFO_BACKUP_SECTION "backup" @@ -37,6 +36,9 @@ VARIANT_STRDEF_STATIC(INFO_BACKUP_KEY_BACKUP_INFO_SIZE_DELTA_VAR, "backup-info-size-delta"); VARIANT_STRDEF_STATIC(INFO_BACKUP_KEY_BACKUP_REFERENCE_VAR, "backup-reference"); +STRING_EXTERN(INFO_BACKUP_PATH_FILE_STR, INFO_BACKUP_PATH_FILE); +STRING_EXTERN(INFO_BACKUP_PATH_FILE_COPY_STR, INFO_BACKUP_PATH_FILE_COPY); + /*********************************************************************************************************************************** Object type ***********************************************************************************************************************************/ @@ -152,48 +154,6 @@ } /*********************************************************************************************************************************** -Checks the backup info file's DB section against the PG version, system id, catolog and constrol version passed in and returns -the history id of the current PG database. -***********************************************************************************************************************************/ -unsigned int -infoBackupCheckPg( - const InfoBackup *this, - unsigned int pgVersion, - uint64_t pgSystemId, - uint32_t pgCatalogVersion, - uint32_t pgControlVersion) -{ - FUNCTION_LOG_BEGIN(logLevelTrace); - FUNCTION_LOG_PARAM(INFO_BACKUP, this); - FUNCTION_LOG_PARAM(UINT, pgVersion); - FUNCTION_LOG_PARAM(UINT64, pgSystemId); - FUNCTION_LOG_PARAM(UINT32, pgCatalogVersion); - FUNCTION_LOG_PARAM(UINT32, pgControlVersion); - FUNCTION_LOG_END(); - - ASSERT(this != NULL); - - InfoPgData backupPg = infoPgDataCurrent(this->infoPg); - - if (backupPg.version != pgVersion || backupPg.systemId != pgSystemId) - THROW(BackupMismatchError, strPtr(strNewFmt( - "database version = %s, system-id %" PRIu64 " does not match backup version = %s, system-id = %" PRIu64 "\n" - "HINT: is this the correct stanza?", - strPtr(pgVersionToStr(pgVersion)), pgSystemId, strPtr(pgVersionToStr(backupPg.version)), backupPg.systemId))); - - if (backupPg.catalogVersion != pgCatalogVersion || backupPg.controlVersion != pgControlVersion) - { - THROW(BackupMismatchError, strPtr(strNewFmt( - "database control-version = %" PRIu32 ", catalog-version %" PRIu32 - " does not match backup control-version = %" PRIu32 ", catalog-version = %" PRIu32 "\n" - "HINT: this may be a symptom of database or repository corruption!", - pgControlVersion, pgCatalogVersion, backupPg.controlVersion, backupPg.catalogVersion))); - } - - FUNCTION_LOG_RETURN(UINT, backupPg.id); -} - -/*********************************************************************************************************************************** Save to file ***********************************************************************************************************************************/ void diff -Nru pgbackrest-2.15.1/src/info/infoBackup.h pgbackrest-2.16/src/info/infoBackup.h --- pgbackrest-2.15.1/src/info/infoBackup.h 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/src/info/infoBackup.h 2019-08-05 16:03:04.000000000 +0000 @@ -22,6 +22,11 @@ ***********************************************************************************************************************************/ #define INFO_BACKUP_FILE "backup.info" +#define INFO_BACKUP_PATH_FILE STORAGE_REPO_BACKUP "/" INFO_BACKUP_FILE + STRING_DECLARE(INFO_BACKUP_PATH_FILE_STR); +#define INFO_BACKUP_PATH_FILE_COPY INFO_BACKUP_PATH_FILE INFO_COPY_EXT + STRING_DECLARE(INFO_BACKUP_PATH_FILE_COPY_STR); + /*********************************************************************************************************************************** Information about an existing backup ***********************************************************************************************************************************/ @@ -60,8 +65,6 @@ /*********************************************************************************************************************************** Functions ***********************************************************************************************************************************/ -unsigned int infoBackupCheckPg( - const InfoBackup *this, unsigned int pgVersion, uint64_t pgSystemId, uint32_t pgCatalogVersion, uint32_t pgControlVersion); // Remove a backup from the current section void infoBackupDataDelete(const InfoBackup *this, const String *backupDeleteLabel); void infoBackupSave( diff -Nru pgbackrest-2.15.1/src/info/infoPg.c pgbackrest-2.16/src/info/infoPg.c --- pgbackrest-2.15.1/src/info/infoPg.c 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/src/info/infoPg.c 2019-08-05 16:03:04.000000000 +0000 @@ -192,8 +192,8 @@ iniSet(ini, INFO_SECTION_DB_STR, varStr(INFO_KEY_DB_SYSTEM_ID_VAR), jsonFromUInt64(pgData.systemId)); iniSet(ini, INFO_SECTION_DB_STR, varStr(INFO_KEY_DB_VERSION_VAR), jsonFromStr(pgVersionToStr(pgData.version))); - // Set the db history section - for (unsigned int pgDataIdx = 0; pgDataIdx < infoPgDataTotal(this); pgDataIdx++) + // Set the db history section in reverse so oldest history is first instead of last to be consistent with load + for (unsigned int pgDataIdx = infoPgDataTotal(this) - 1; (int)pgDataIdx >= 0; pgDataIdx--) { InfoPgData pgData = infoPgData(this, pgDataIdx); diff -Nru pgbackrest-2.15.1/src/main.c pgbackrest-2.16/src/main.c --- pgbackrest-2.15.1/src/main.c 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/src/main.c 2019-08-05 16:03:04.000000000 +0000 @@ -9,6 +9,7 @@ #include "command/archive/get/get.h" #include "command/archive/push/push.h" +#include "command/check/check.h" #include "command/command.h" #include "command/expire/expire.h" #include "command/help/help.h" @@ -125,7 +126,9 @@ // ----------------------------------------------------------------------------------------------------------------- case cfgCmdCheck: { + // Functionality is currently split between Perl and C perlExec(); + cmdCheck(); break; } @@ -158,15 +161,7 @@ // ----------------------------------------------------------------------------------------------------------------- case cfgCmdLocal: { - if (strEq(cfgOptionStr(cfgOptCommand), CFGCMD_ARCHIVE_GET_ASYNC_STR) || - strEq(cfgOptionStr(cfgOptCommand), CFGCMD_ARCHIVE_PUSH_ASYNC_STR) || - strEq(cfgOptionStr(cfgOptCommand), CFGCMD_RESTORE_STR)) - { - cmdLocal(STDIN_FILENO, STDOUT_FILENO); - } - else - perlExec(); - + cmdLocal(STDIN_FILENO, STDOUT_FILENO); break; } diff -Nru pgbackrest-2.15.1/src/Makefile.in pgbackrest-2.16/src/Makefile.in --- pgbackrest-2.15.1/src/Makefile.in 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/src/Makefile.in 2019-08-05 16:03:04.000000000 +0000 @@ -52,11 +52,15 @@ command/archive/push/protocol.c \ command/archive/push/push.c \ command/backup/common.c \ + command/backup/file.c \ + command/backup/pageChecksum.c \ + command/check/check.c \ + command/backup/protocol.c \ command/expire/expire.c \ command/help/help.c \ command/info/info.c \ command/command.c \ - command/control/control.c \ + command/control/common.c \ command/local/local.c \ command/restore/file.c \ command/restore/protocol.c \ @@ -80,6 +84,7 @@ common/io/filter/buffer.c \ common/io/filter/filter.c \ common/io/filter/group.c \ + common/io/filter/sink.c \ common/io/filter/size.c \ common/io/handleRead.c \ common/io/handleWrite.c \ @@ -116,6 +121,9 @@ config/load.c \ config/parse.c \ config/protocol.c \ + db/db.c \ + db/helper.c \ + db/protocol.c \ info/info.c \ info/infoArchive.c \ info/infoBackup.c \ @@ -123,6 +131,7 @@ info/infoPg.c \ perl/config.c \ perl/exec.c \ + postgres/client.c \ postgres/interface.c \ postgres/interface/v083.c \ postgres/interface/v084.c \ @@ -196,7 +205,7 @@ command/archive/common.o: command/archive/common.c build.auto.h command/archive/common.h common/assert.h common/debug.h common/error.auto.h common/error.h common/io/filter/filter.h common/io/filter/group.h common/io/read.h common/io/write.h common/lock.h common/log.h common/logLevel.h common/memContext.h common/regExp.h common/stackTrace.h common/time.h common/type/buffer.h common/type/convert.h common/type/keyValue.h common/type/string.h common/type/stringList.h common/type/variant.h common/type/variantList.h common/wait.h config/config.auto.h config/config.h config/define.auto.h config/define.h postgres/version.h storage/helper.h storage/info.h storage/read.h storage/storage.h storage/write.h $(CC) $(CPPFLAGS) $(CFLAGS) $(CMAKE) -c command/archive/common.c -o command/archive/common.o -command/archive/get/file.o: command/archive/get/file.c build.auto.h command/archive/common.h command/archive/get/file.h command/control/control.h common/assert.h common/compress/gzip/common.h common/compress/gzip/decompress.h common/crypto/cipherBlock.h common/crypto/common.h common/debug.h common/error.auto.h common/error.h common/ini.h common/io/filter/filter.h common/io/filter/group.h common/io/read.h common/io/write.h common/lock.h common/log.h common/logLevel.h common/memContext.h common/stackTrace.h common/time.h common/type/buffer.h common/type/convert.h common/type/keyValue.h common/type/string.h common/type/stringList.h common/type/variant.h common/type/variantList.h config/config.auto.h config/config.h config/define.auto.h config/define.h info/info.h info/infoArchive.h info/infoPg.h postgres/interface.h storage/helper.h storage/info.h storage/read.h storage/storage.h storage/write.h +command/archive/get/file.o: command/archive/get/file.c build.auto.h command/archive/common.h command/archive/get/file.h command/control/common.h common/assert.h common/compress/gzip/common.h common/compress/gzip/decompress.h common/crypto/cipherBlock.h common/crypto/common.h common/debug.h common/error.auto.h common/error.h common/ini.h common/io/filter/filter.h common/io/filter/group.h common/io/read.h common/io/write.h common/lock.h common/log.h common/logLevel.h common/memContext.h common/stackTrace.h common/time.h common/type/buffer.h common/type/convert.h common/type/keyValue.h common/type/string.h common/type/stringList.h common/type/variant.h common/type/variantList.h config/config.auto.h config/config.h config/define.auto.h config/define.h info/info.h info/infoArchive.h info/infoPg.h postgres/interface.h storage/helper.h storage/info.h storage/read.h storage/storage.h storage/write.h $(CC) $(CPPFLAGS) $(CFLAGS) $(CMAKE) -c command/archive/get/file.c -o command/archive/get/file.o command/archive/get/get.o: command/archive/get/get.c build.auto.h command/archive/common.h command/archive/get/file.h command/archive/get/protocol.h command/command.h common/assert.h common/crypto/common.h common/debug.h common/error.auto.h common/error.h common/fork.h common/io/filter/filter.h common/io/filter/group.h common/io/read.h common/io/write.h common/lock.h common/log.h common/logLevel.h common/memContext.h common/regExp.h common/stackTrace.h common/time.h common/type/buffer.h common/type/convert.h common/type/keyValue.h common/type/string.h common/type/stringList.h common/type/variant.h common/type/variantList.h common/wait.h config/config.auto.h config/config.h config/define.auto.h config/define.h config/exec.h perl/exec.h postgres/interface.h protocol/client.h protocol/command.h protocol/helper.h protocol/parallel.h protocol/parallelJob.h protocol/server.h storage/helper.h storage/info.h storage/read.h storage/storage.h storage/write.h @@ -205,26 +214,35 @@ command/archive/get/protocol.o: command/archive/get/protocol.c build.auto.h command/archive/get/file.h command/archive/get/protocol.h common/assert.h common/crypto/common.h common/debug.h common/error.auto.h common/error.h common/io/filter/filter.h common/io/filter/group.h common/io/io.h common/io/read.h common/io/write.h common/lock.h common/log.h common/logLevel.h common/memContext.h common/stackTrace.h common/time.h common/type/buffer.h common/type/convert.h common/type/keyValue.h common/type/string.h common/type/stringList.h common/type/variant.h common/type/variantList.h config/config.auto.h config/config.h config/define.auto.h config/define.h protocol/server.h storage/helper.h storage/info.h storage/read.h storage/storage.h storage/write.h $(CC) $(CPPFLAGS) $(CFLAGS) $(CMAKE) -c command/archive/get/protocol.c -o command/archive/get/protocol.o -command/archive/push/file.o: command/archive/push/file.c build.auto.h command/archive/common.h command/archive/push/file.h command/control/control.h common/assert.h common/compress/gzip/common.h common/compress/gzip/compress.h common/crypto/cipherBlock.h common/crypto/common.h common/crypto/hash.h common/debug.h common/error.auto.h common/error.h common/io/filter/filter.h common/io/filter/group.h common/io/io.h common/io/read.h common/io/write.h common/lock.h common/log.h common/logLevel.h common/memContext.h common/stackTrace.h common/time.h common/type/buffer.h common/type/convert.h common/type/keyValue.h common/type/string.h common/type/stringList.h common/type/variant.h common/type/variantList.h config/config.auto.h config/config.h config/define.auto.h config/define.h postgres/interface.h storage/helper.h storage/info.h storage/read.h storage/storage.h storage/write.h +command/archive/push/file.o: command/archive/push/file.c build.auto.h command/archive/common.h command/archive/push/file.h command/control/common.h common/assert.h common/compress/gzip/common.h common/compress/gzip/compress.h common/crypto/cipherBlock.h common/crypto/common.h common/crypto/hash.h common/debug.h common/error.auto.h common/error.h common/io/filter/filter.h common/io/filter/group.h common/io/io.h common/io/read.h common/io/write.h common/lock.h common/log.h common/logLevel.h common/memContext.h common/stackTrace.h common/time.h common/type/buffer.h common/type/convert.h common/type/keyValue.h common/type/string.h common/type/stringList.h common/type/variant.h common/type/variantList.h config/config.auto.h config/config.h config/define.auto.h config/define.h postgres/interface.h storage/helper.h storage/info.h storage/read.h storage/storage.h storage/write.h $(CC) $(CPPFLAGS) $(CFLAGS) $(CMAKE) -c command/archive/push/file.c -o command/archive/push/file.o command/archive/push/protocol.o: command/archive/push/protocol.c build.auto.h command/archive/push/file.h command/archive/push/protocol.h common/assert.h common/crypto/common.h common/debug.h common/error.auto.h common/error.h common/io/filter/filter.h common/io/filter/group.h common/io/io.h common/io/read.h common/io/write.h common/lock.h common/log.h common/logLevel.h common/memContext.h common/stackTrace.h common/time.h common/type/buffer.h common/type/convert.h common/type/keyValue.h common/type/string.h common/type/stringList.h common/type/variant.h common/type/variantList.h config/config.auto.h config/config.h config/define.auto.h config/define.h protocol/server.h storage/helper.h storage/info.h storage/read.h storage/storage.h storage/write.h $(CC) $(CPPFLAGS) $(CFLAGS) $(CMAKE) -c command/archive/push/protocol.c -o command/archive/push/protocol.o -command/archive/push/push.o: command/archive/push/push.c build.auto.h command/archive/common.h command/archive/push/file.h command/archive/push/protocol.h command/command.h command/control/control.h common/assert.h common/crypto/common.h common/debug.h common/error.auto.h common/error.h common/fork.h common/ini.h common/io/filter/filter.h common/io/filter/group.h common/io/read.h common/io/write.h common/lock.h common/log.h common/logLevel.h common/memContext.h common/stackTrace.h common/time.h common/type/buffer.h common/type/convert.h common/type/keyValue.h common/type/string.h common/type/stringList.h common/type/variant.h common/type/variantList.h common/wait.h config/config.auto.h config/config.h config/define.auto.h config/define.h config/exec.h info/info.h info/infoArchive.h info/infoPg.h postgres/interface.h protocol/client.h protocol/command.h protocol/helper.h protocol/parallel.h protocol/parallelJob.h protocol/server.h storage/helper.h storage/info.h storage/read.h storage/storage.h storage/write.h +command/archive/push/push.o: command/archive/push/push.c build.auto.h command/archive/common.h command/archive/push/file.h command/archive/push/protocol.h command/command.h command/control/common.h common/assert.h common/crypto/common.h common/debug.h common/error.auto.h common/error.h common/fork.h common/ini.h common/io/filter/filter.h common/io/filter/group.h common/io/read.h common/io/write.h common/lock.h common/log.h common/logLevel.h common/memContext.h common/stackTrace.h common/time.h common/type/buffer.h common/type/convert.h common/type/keyValue.h common/type/string.h common/type/stringList.h common/type/variant.h common/type/variantList.h common/wait.h config/config.auto.h config/config.h config/define.auto.h config/define.h config/exec.h info/info.h info/infoArchive.h info/infoPg.h postgres/interface.h protocol/client.h protocol/command.h protocol/helper.h protocol/parallel.h protocol/parallelJob.h protocol/server.h storage/helper.h storage/info.h storage/read.h storage/storage.h storage/write.h $(CC) $(CPPFLAGS) $(CFLAGS) $(CMAKE) -c command/archive/push/push.c -o command/archive/push/push.o command/backup/common.o: command/backup/common.c build.auto.h command/backup/common.h common/assert.h common/debug.h common/error.auto.h common/error.h common/log.h common/logLevel.h common/memContext.h common/stackTrace.h common/type/buffer.h common/type/convert.h common/type/string.h $(CC) $(CPPFLAGS) $(CFLAGS) $(CMAKE) -c command/backup/common.c -o command/backup/common.o +command/backup/file.o: command/backup/file.c build.auto.h command/backup/file.h command/backup/pageChecksum.h common/assert.h common/compress/gzip/common.h common/compress/gzip/compress.h common/compress/gzip/decompress.h common/crypto/cipherBlock.h common/crypto/common.h common/crypto/hash.h common/debug.h common/error.auto.h common/error.h common/io/filter/filter.h common/io/filter/group.h common/io/filter/size.h common/io/io.h common/io/read.h common/io/write.h common/log.h common/logLevel.h common/memContext.h common/regExp.h common/stackTrace.h common/time.h common/type/buffer.h common/type/convert.h common/type/keyValue.h common/type/string.h common/type/stringList.h common/type/variant.h common/type/variantList.h postgres/interface.h storage/helper.h storage/info.h storage/read.h storage/storage.h storage/write.h + $(CC) $(CPPFLAGS) $(CFLAGS) $(CMAKE) -c command/backup/file.c -o command/backup/file.o + command/backup/pageChecksum.o: command/backup/pageChecksum.c build.auto.h command/backup/pageChecksum.h common/assert.h common/debug.h common/error.auto.h common/error.h common/io/filter/filter.h common/io/filter/filter.intern.h common/log.h common/logLevel.h common/macro.h common/memContext.h common/object.h common/stackTrace.h common/type/buffer.h common/type/convert.h common/type/keyValue.h common/type/string.h common/type/stringList.h common/type/variant.h common/type/variantList.h postgres/pageChecksum.h $(CC) $(CPPFLAGS) $(CFLAGS) $(CMAKE) -c command/backup/pageChecksum.c -o command/backup/pageChecksum.o +command/backup/protocol.o: command/backup/protocol.c build.auto.h command/backup/file.h command/backup/protocol.h common/assert.h common/crypto/common.h common/debug.h common/error.auto.h common/error.h common/io/filter/filter.h common/io/filter/group.h common/io/io.h common/io/read.h common/io/write.h common/lock.h common/log.h common/logLevel.h common/memContext.h common/stackTrace.h common/time.h common/type/buffer.h common/type/convert.h common/type/keyValue.h common/type/string.h common/type/stringList.h common/type/variant.h common/type/variantList.h config/config.auto.h config/config.h config/define.auto.h config/define.h protocol/server.h storage/helper.h storage/info.h storage/read.h storage/storage.h storage/write.h + $(CC) $(CPPFLAGS) $(CFLAGS) $(CMAKE) -c command/backup/protocol.c -o command/backup/protocol.o + +command/check/check.o: command/check/check.c build.auto.h command/archive/common.h command/check/check.h common/assert.h common/crypto/common.h common/debug.h common/error.auto.h common/error.h common/ini.h common/io/filter/filter.h common/io/filter/group.h common/io/read.h common/io/write.h common/lock.h common/log.h common/logLevel.h common/memContext.h common/stackTrace.h common/time.h common/type/buffer.h common/type/convert.h common/type/keyValue.h common/type/string.h common/type/stringList.h common/type/variant.h common/type/variantList.h config/config.auto.h config/config.h config/define.auto.h config/define.h db/db.h db/helper.h info/info.h info/infoArchive.h info/infoPg.h postgres/client.h protocol/client.h protocol/command.h storage/helper.h storage/info.h storage/read.h storage/storage.h storage/write.h + $(CC) $(CPPFLAGS) $(CFLAGS) $(CMAKE) -c command/check/check.c -o command/check/check.o + command/command.o: command/command.c build.auto.h common/assert.h common/debug.h common/error.auto.h common/error.h common/io/filter/filter.h common/io/filter/group.h common/io/http/client.h common/io/http/header.h common/io/http/query.h common/io/read.h common/io/tls/client.h common/io/write.h common/lock.h common/log.h common/logLevel.h common/memContext.h common/stackTrace.h common/time.h common/type/buffer.h common/type/convert.h common/type/keyValue.h common/type/string.h common/type/stringList.h common/type/variant.h common/type/variantList.h config/config.auto.h config/config.h config/define.auto.h config/define.h version.h $(CC) $(CPPFLAGS) $(CFLAGS) $(CMAKE) -c command/command.c -o command/command.o -command/control/control.o: command/control/control.c build.auto.h command/control/control.h common/assert.h common/debug.h common/error.auto.h common/error.h common/io/filter/filter.h common/io/filter/group.h common/io/read.h common/io/write.h common/lock.h common/log.h common/logLevel.h common/memContext.h common/stackTrace.h common/time.h common/type/buffer.h common/type/convert.h common/type/keyValue.h common/type/string.h common/type/stringList.h common/type/variant.h common/type/variantList.h config/config.auto.h config/config.h config/define.auto.h config/define.h storage/helper.h storage/info.h storage/read.h storage/storage.h storage/write.h - $(CC) $(CPPFLAGS) $(CFLAGS) $(CMAKE) -c command/control/control.c -o command/control/control.o +command/control/common.o: command/control/common.c build.auto.h command/control/common.h common/assert.h common/debug.h common/error.auto.h common/error.h common/io/filter/filter.h common/io/filter/group.h common/io/read.h common/io/write.h common/lock.h common/log.h common/logLevel.h common/memContext.h common/stackTrace.h common/time.h common/type/buffer.h common/type/convert.h common/type/keyValue.h common/type/string.h common/type/stringList.h common/type/variant.h common/type/variantList.h config/config.auto.h config/config.h config/define.auto.h config/define.h storage/helper.h storage/info.h storage/read.h storage/storage.h storage/write.h + $(CC) $(CPPFLAGS) $(CFLAGS) $(CMAKE) -c command/control/common.c -o command/control/common.o command/expire/expire.o: command/expire/expire.c build.auto.h command/archive/common.h command/backup/common.h common/assert.h common/crypto/common.h common/debug.h common/error.auto.h common/error.h common/ini.h common/io/filter/filter.h common/io/filter/group.h common/io/read.h common/io/write.h common/lock.h common/log.h common/logLevel.h common/memContext.h common/regExp.h common/stackTrace.h common/time.h common/type/buffer.h common/type/convert.h common/type/keyValue.h common/type/list.h common/type/string.h common/type/stringList.h common/type/variant.h common/type/variantList.h config/config.auto.h config/config.h config/define.auto.h config/define.h info/info.h info/infoArchive.h info/infoBackup.h info/infoManifest.h info/infoPg.h storage/helper.h storage/info.h storage/read.h storage/storage.h storage/write.h $(CC) $(CPPFLAGS) $(CFLAGS) $(CMAKE) -c command/expire/expire.c -o command/expire/expire.o @@ -235,10 +253,10 @@ command/info/info.o: command/info/info.c build.auto.h command/archive/common.h command/info/info.h common/assert.h common/crypto/common.h common/debug.h common/error.auto.h common/error.h common/ini.h common/io/filter/filter.h common/io/filter/group.h common/io/handleWrite.h common/io/read.h common/io/write.h common/lock.h common/log.h common/logLevel.h common/memContext.h common/stackTrace.h common/time.h common/type/buffer.h common/type/convert.h common/type/json.h common/type/keyValue.h common/type/string.h common/type/stringList.h common/type/variant.h common/type/variantList.h config/config.auto.h config/config.h config/define.auto.h config/define.h info/info.h info/infoArchive.h info/infoBackup.h info/infoPg.h perl/exec.h postgres/interface.h storage/helper.h storage/info.h storage/read.h storage/storage.h storage/write.h $(CC) $(CPPFLAGS) $(CFLAGS) $(CMAKE) -c command/info/info.c -o command/info/info.o -command/local/local.o: command/local/local.c build.auto.h command/archive/get/protocol.h command/archive/push/protocol.h command/restore/protocol.h common/assert.h common/debug.h common/error.auto.h common/error.h common/io/filter/filter.h common/io/filter/group.h common/io/handleRead.h common/io/handleWrite.h common/io/read.h common/io/write.h common/lock.h common/log.h common/logLevel.h common/memContext.h common/stackTrace.h common/time.h common/type/buffer.h common/type/convert.h common/type/keyValue.h common/type/string.h common/type/stringList.h common/type/variant.h common/type/variantList.h config/config.auto.h config/config.h config/define.auto.h config/define.h config/protocol.h protocol/client.h protocol/command.h protocol/helper.h protocol/server.h +command/local/local.o: command/local/local.c build.auto.h command/archive/get/protocol.h command/archive/push/protocol.h command/backup/protocol.h command/restore/protocol.h common/assert.h common/debug.h common/error.auto.h common/error.h common/io/filter/filter.h common/io/filter/group.h common/io/handleRead.h common/io/handleWrite.h common/io/read.h common/io/write.h common/lock.h common/log.h common/logLevel.h common/memContext.h common/stackTrace.h common/time.h common/type/buffer.h common/type/convert.h common/type/keyValue.h common/type/string.h common/type/stringList.h common/type/variant.h common/type/variantList.h config/config.auto.h config/config.h config/define.auto.h config/define.h config/protocol.h protocol/client.h protocol/command.h protocol/helper.h protocol/server.h $(CC) $(CPPFLAGS) $(CFLAGS) $(CMAKE) -c command/local/local.c -o command/local/local.o -command/remote/remote.o: command/remote/remote.c build.auto.h common/assert.h common/debug.h common/error.auto.h common/error.h common/io/filter/filter.h common/io/filter/group.h common/io/handleRead.h common/io/handleWrite.h common/io/read.h common/io/write.h common/lock.h common/log.h common/logLevel.h common/memContext.h common/stackTrace.h common/time.h common/type/buffer.h common/type/convert.h common/type/keyValue.h common/type/string.h common/type/stringList.h common/type/variant.h common/type/variantList.h config/config.auto.h config/config.h config/define.auto.h config/define.h config/protocol.h protocol/client.h protocol/command.h protocol/helper.h protocol/server.h storage/remote/protocol.h +command/remote/remote.o: command/remote/remote.c build.auto.h common/assert.h common/debug.h common/error.auto.h common/error.h common/io/filter/filter.h common/io/filter/group.h common/io/handleRead.h common/io/handleWrite.h common/io/read.h common/io/write.h common/lock.h common/log.h common/logLevel.h common/memContext.h common/stackTrace.h common/time.h common/type/buffer.h common/type/convert.h common/type/keyValue.h common/type/string.h common/type/stringList.h common/type/variant.h common/type/variantList.h config/config.auto.h config/config.h config/define.auto.h config/define.h config/protocol.h db/protocol.h protocol/client.h protocol/command.h protocol/helper.h protocol/server.h storage/remote/protocol.h $(CC) $(CPPFLAGS) $(CFLAGS) $(CMAKE) -c command/remote/remote.c -o command/remote/remote.o command/restore/file.o: command/restore/file.c build.auto.h command/restore/file.h common/assert.h common/compress/gzip/common.h common/compress/gzip/decompress.h common/crypto/cipherBlock.h common/crypto/common.h common/crypto/hash.h common/debug.h common/error.auto.h common/error.h common/io/filter/filter.h common/io/filter/group.h common/io/filter/size.h common/io/io.h common/io/read.h common/io/write.h common/lock.h common/log.h common/logLevel.h common/memContext.h common/stackTrace.h common/time.h common/type/buffer.h common/type/convert.h common/type/keyValue.h common/type/string.h common/type/stringList.h common/type/variant.h common/type/variantList.h config/config.auto.h config/config.h config/define.auto.h config/define.h storage/helper.h storage/info.h storage/read.h storage/storage.h storage/write.h @@ -307,6 +325,9 @@ common/io/filter/group.o: common/io/filter/group.c build.auto.h common/assert.h common/debug.h common/error.auto.h common/error.h common/io/filter/buffer.h common/io/filter/filter.h common/io/filter/filter.intern.h common/io/filter/group.h common/io/io.h common/log.h common/logLevel.h common/macro.h common/memContext.h common/object.h common/stackTrace.h common/type/buffer.h common/type/convert.h common/type/keyValue.h common/type/list.h common/type/string.h common/type/stringList.h common/type/variant.h common/type/variantList.h $(CC) $(CPPFLAGS) $(CFLAGS) $(CMAKE) -c common/io/filter/group.c -o common/io/filter/group.o +common/io/filter/sink.o: common/io/filter/sink.c build.auto.h common/assert.h common/debug.h common/error.auto.h common/error.h common/io/filter/filter.h common/io/filter/filter.intern.h common/io/filter/sink.h common/log.h common/logLevel.h common/macro.h common/memContext.h common/object.h common/stackTrace.h common/type/buffer.h common/type/convert.h common/type/keyValue.h common/type/string.h common/type/stringList.h common/type/variant.h common/type/variantList.h + $(CC) $(CPPFLAGS) $(CFLAGS) $(CMAKE) -c common/io/filter/sink.c -o common/io/filter/sink.o + common/io/filter/size.o: common/io/filter/size.c build.auto.h common/assert.h common/debug.h common/error.auto.h common/error.h common/io/filter/filter.h common/io/filter/filter.intern.h common/io/filter/size.h common/log.h common/logLevel.h common/macro.h common/memContext.h common/object.h common/stackTrace.h common/type/buffer.h common/type/convert.h common/type/keyValue.h common/type/string.h common/type/stringList.h common/type/variant.h common/type/variantList.h $(CC) $(CPPFLAGS) $(CFLAGS) $(CMAKE) -c common/io/filter/size.c -o common/io/filter/size.o @@ -331,7 +352,7 @@ common/io/http/query.o: common/io/http/query.c build.auto.h common/assert.h common/debug.h common/error.auto.h common/error.h common/io/http/common.h common/io/http/query.h common/logLevel.h common/macro.h common/memContext.h common/object.h common/stackTrace.h common/type/buffer.h common/type/convert.h common/type/keyValue.h common/type/string.h common/type/stringList.h common/type/variant.h common/type/variantList.h $(CC) $(CPPFLAGS) $(CFLAGS) $(CMAKE) -c common/io/http/query.c -o common/io/http/query.o -common/io/io.o: common/io/io.c build.auto.h common/assert.h common/debug.h common/error.auto.h common/error.h common/io/io.h common/log.h common/logLevel.h common/stackTrace.h common/type/convert.h +common/io/io.o: common/io/io.c build.auto.h common/assert.h common/debug.h common/error.auto.h common/error.h common/io/filter/filter.h common/io/filter/sink.h common/io/io.h common/log.h common/logLevel.h common/memContext.h common/stackTrace.h common/type/buffer.h common/type/convert.h common/type/keyValue.h common/type/string.h common/type/variant.h common/type/variantList.h $(CC) $(CPPFLAGS) $(CFLAGS) $(CMAKE) -c common/io/io.c -o common/io/io.o common/io/read.o: common/io/read.c build.auto.h common/assert.h common/debug.h common/error.auto.h common/error.h common/io/filter/filter.h common/io/filter/group.h common/io/io.h common/io/read.h common/io/read.intern.h common/log.h common/logLevel.h common/macro.h common/memContext.h common/object.h common/stackTrace.h common/type/buffer.h common/type/convert.h common/type/keyValue.h common/type/string.h common/type/variant.h common/type/variantList.h @@ -412,6 +433,15 @@ config/protocol.o: config/protocol.c build.auto.h common/assert.h common/debug.h common/error.auto.h common/error.h common/io/filter/filter.h common/io/filter/group.h common/io/io.h common/io/read.h common/io/write.h common/lock.h common/log.h common/logLevel.h common/memContext.h common/stackTrace.h common/time.h common/type/buffer.h common/type/convert.h common/type/keyValue.h common/type/string.h common/type/stringList.h common/type/variant.h common/type/variantList.h config/config.auto.h config/config.h config/define.auto.h config/define.h config/protocol.h protocol/client.h protocol/command.h protocol/server.h $(CC) $(CPPFLAGS) $(CFLAGS) $(CMAKE) -c config/protocol.c -o config/protocol.o +db/db.o: db/db.c build.auto.h common/assert.h common/debug.h common/error.auto.h common/error.h common/io/filter/filter.h common/io/filter/group.h common/io/read.h common/io/write.h common/log.h common/logLevel.h common/macro.h common/memContext.h common/object.h common/stackTrace.h common/time.h common/type/buffer.h common/type/convert.h common/type/keyValue.h common/type/string.h common/type/stringList.h common/type/variant.h common/type/variantList.h db/db.h db/protocol.h postgres/client.h postgres/interface.h postgres/version.h protocol/client.h protocol/command.h protocol/server.h version.h + $(CC) $(CPPFLAGS) $(CFLAGS) $(CMAKE) -c db/db.c -o db/db.o + +db/helper.o: db/helper.c build.auto.h common/assert.h common/debug.h common/error.auto.h common/error.h common/io/filter/filter.h common/io/filter/group.h common/io/read.h common/io/write.h common/lock.h common/log.h common/logLevel.h common/memContext.h common/stackTrace.h common/time.h common/type/buffer.h common/type/convert.h common/type/keyValue.h common/type/string.h common/type/stringList.h common/type/variant.h common/type/variantList.h config/config.auto.h config/config.h config/define.auto.h config/define.h db/db.h db/helper.h postgres/client.h postgres/interface.h protocol/client.h protocol/command.h protocol/helper.h version.h + $(CC) $(CPPFLAGS) $(CFLAGS) $(CMAKE) -c db/helper.c -o db/helper.o + +db/protocol.o: db/protocol.c build.auto.h common/assert.h common/debug.h common/error.auto.h common/error.h common/io/filter/filter.h common/io/filter/group.h common/io/io.h common/io/read.h common/io/write.h common/lock.h common/log.h common/logLevel.h common/memContext.h common/stackTrace.h common/time.h common/type/buffer.h common/type/convert.h common/type/keyValue.h common/type/list.h common/type/string.h common/type/stringList.h common/type/variant.h common/type/variantList.h config/config.auto.h config/config.h config/define.auto.h config/define.h db/protocol.h postgres/client.h postgres/interface.h protocol/client.h protocol/command.h protocol/server.h + $(CC) $(CPPFLAGS) $(CFLAGS) $(CMAKE) -c db/protocol.c -o db/protocol.o + info/info.o: info/info.c build.auto.h common/assert.h common/crypto/cipherBlock.h common/crypto/common.h common/crypto/hash.h common/debug.h common/error.auto.h common/error.h common/ini.h common/io/filter/filter.h common/io/filter/filter.intern.h common/io/filter/group.h common/io/read.h common/io/write.h common/log.h common/logLevel.h common/macro.h common/memContext.h common/object.h common/stackTrace.h common/time.h common/type/buffer.h common/type/convert.h common/type/json.h common/type/keyValue.h common/type/string.h common/type/stringList.h common/type/variant.h common/type/variantList.h info/info.h storage/helper.h storage/info.h storage/read.h storage/storage.h storage/write.h version.h $(CC) $(CPPFLAGS) $(CFLAGS) $(CMAKE) -c info/info.c -o info/info.o @@ -427,15 +457,18 @@ info/infoPg.o: info/infoPg.c build.auto.h common/assert.h common/crypto/common.h common/debug.h common/error.auto.h common/error.h common/ini.h common/io/filter/filter.h common/io/filter/group.h common/io/read.h common/io/write.h common/log.h common/logLevel.h common/macro.h common/memContext.h common/object.h common/stackTrace.h common/time.h common/type/buffer.h common/type/convert.h common/type/json.h common/type/keyValue.h common/type/list.h common/type/string.h common/type/stringList.h common/type/variant.h common/type/variantList.h info/info.h info/infoPg.h postgres/interface.h postgres/version.h storage/helper.h storage/info.h storage/read.h storage/storage.h storage/write.h $(CC) $(CPPFLAGS) $(CFLAGS) $(CMAKE) -c info/infoPg.c -o info/infoPg.o -main.o: main.c build.auto.h command/archive/get/get.h command/archive/push/push.h command/command.h command/expire/expire.h command/help/help.h command/info/info.h command/local/local.h command/remote/remote.h command/storage/list.h common/assert.h common/debug.h common/error.auto.h common/error.h common/exit.h common/lock.h common/log.h common/logLevel.h common/memContext.h common/stackTrace.h common/time.h common/type/buffer.h common/type/convert.h common/type/keyValue.h common/type/string.h common/type/stringList.h common/type/variant.h common/type/variantList.h config/config.auto.h config/config.h config/define.auto.h config/define.h config/load.h perl/exec.h postgres/interface.h version.h +main.o: main.c build.auto.h command/archive/get/get.h command/archive/push/push.h command/check/check.h command/command.h command/expire/expire.h command/help/help.h command/info/info.h command/local/local.h command/remote/remote.h command/storage/list.h common/assert.h common/debug.h common/error.auto.h common/error.h common/exit.h common/lock.h common/log.h common/logLevel.h common/memContext.h common/stackTrace.h common/time.h common/type/buffer.h common/type/convert.h common/type/keyValue.h common/type/string.h common/type/stringList.h common/type/variant.h common/type/variantList.h config/config.auto.h config/config.h config/define.auto.h config/define.h config/load.h perl/exec.h postgres/interface.h version.h $(CC) $(CPPFLAGS) $(CFLAGS) $(CMAKE) -c main.c -o main.o perl/config.o: perl/config.c build.auto.h common/assert.h common/debug.h common/error.auto.h common/error.h common/lock.h common/log.h common/logLevel.h common/memContext.h common/stackTrace.h common/time.h common/type/buffer.h common/type/convert.h common/type/json.h common/type/keyValue.h common/type/string.h common/type/stringList.h common/type/variant.h common/type/variantList.h config/config.auto.h config/config.h config/define.auto.h config/define.h $(CC) $(CPPFLAGS) $(CFLAGS) $(CMAKE) -c perl/config.c -o perl/config.o -perl/exec.o: perl/exec.c ../libc/LibC.h build.auto.h common/assert.h common/crypto/cipherBlock.h common/crypto/common.h common/crypto/hash.h common/debug.h common/encode.h common/error.auto.h common/error.h common/io/filter/filter.h common/io/filter/filter.intern.h common/io/filter/group.h common/io/io.h common/io/read.h common/io/read.intern.h common/io/write.h common/io/write.intern.h common/lock.h common/log.h common/logLevel.h common/memContext.h common/stackTrace.h common/time.h common/type/buffer.h common/type/convert.h common/type/keyValue.h common/type/string.h common/type/stringList.h common/type/variant.h common/type/variantList.h config/config.auto.h config/config.h config/define.auto.h config/define.h config/load.h config/parse.h perl/config.h perl/embed.auto.c perl/exec.h perl/libc.auto.c postgres/pageChecksum.h storage/info.h storage/posix/storage.h storage/read.h storage/read.intern.h storage/storage.h storage/storage.intern.h storage/write.h storage/write.intern.h version.h ../libc/xs/common/encode.xsh ../libc/xs/crypto/cipherBlock.xsh ../libc/xs/crypto/hash.xsh +perl/exec.o: perl/exec.c ../libc/LibC.h build.auto.h common/assert.h common/compress/gzip/compress.h common/compress/gzip/decompress.h common/crypto/cipherBlock.h common/crypto/common.h common/crypto/hash.h common/debug.h common/encode.h common/error.auto.h common/error.h common/io/filter/filter.h common/io/filter/group.h common/io/filter/size.h common/io/http/client.h common/io/http/header.h common/io/http/query.h common/io/io.h common/io/read.h common/io/read.intern.h common/io/write.h common/io/write.intern.h common/lock.h common/log.h common/logLevel.h common/memContext.h common/stackTrace.h common/time.h common/type/buffer.h common/type/convert.h common/type/json.h common/type/keyValue.h common/type/string.h common/type/stringList.h common/type/variant.h common/type/variantList.h config/config.auto.h config/config.h config/define.auto.h config/define.h config/load.h config/parse.h perl/config.h perl/embed.auto.c perl/exec.h perl/libc.auto.c postgres/client.h postgres/interface.h postgres/pageChecksum.h storage/helper.h storage/info.h storage/posix/storage.h storage/read.h storage/read.intern.h storage/s3/storage.h storage/s3/storage.intern.h storage/storage.h storage/storage.intern.h storage/write.h storage/write.intern.h version.h ../libc/xs/common/encode.xsh ../libc/xs/crypto/hash.xsh ../libc/xs/postgres/client.xsh ../libc/xs/storage/storage.xsh ../libc/xs/storage/storageRead.xsh ../libc/xs/storage/storageWrite.xsh $(CC) $(CPPFLAGS) $(CFLAGS) $(CMAKE) -c perl/exec.c -o perl/exec.o +postgres/client.o: postgres/client.c build.auto.h common/assert.h common/debug.h common/error.auto.h common/error.h common/log.h common/logLevel.h common/macro.h common/memContext.h common/object.h common/stackTrace.h common/time.h common/type/buffer.h common/type/convert.h common/type/keyValue.h common/type/list.h common/type/string.h common/type/stringList.h common/type/variant.h common/type/variantList.h common/wait.h postgres/client.h + $(CC) $(CPPFLAGS) $(CFLAGS) $(CMAKE) -c postgres/client.c -o postgres/client.o + postgres/interface.o: postgres/interface.c build.auto.h common/assert.h common/debug.h common/error.auto.h common/error.h common/io/filter/filter.h common/io/filter/group.h common/io/read.h common/io/write.h common/log.h common/logLevel.h common/memContext.h common/regExp.h common/stackTrace.h common/time.h common/type/buffer.h common/type/convert.h common/type/keyValue.h common/type/string.h common/type/stringList.h common/type/variant.h common/type/variantList.h postgres/interface.h postgres/interface/version.h postgres/version.h storage/helper.h storage/info.h storage/read.h storage/storage.h storage/write.h $(CC) $(CPPFLAGS) $(CFLAGS) $(CMAKE) -c postgres/interface.c -o postgres/interface.o @@ -511,7 +544,7 @@ storage/read.o: storage/read.c build.auto.h common/assert.h common/debug.h common/error.auto.h common/error.h common/io/filter/filter.h common/io/filter/group.h common/io/read.h common/io/read.intern.h common/log.h common/logLevel.h common/macro.h common/memContext.h common/object.h common/stackTrace.h common/type/buffer.h common/type/convert.h common/type/keyValue.h common/type/string.h common/type/variant.h common/type/variantList.h storage/read.h storage/read.intern.h $(CC) $(CPPFLAGS) $(CFLAGS) $(CMAKE) -c storage/read.c -o storage/read.o -storage/remote/protocol.o: storage/remote/protocol.c build.auto.h common/assert.h common/compress/gzip/compress.h common/compress/gzip/decompress.h common/debug.h common/error.auto.h common/error.h common/io/filter/filter.h common/io/filter/group.h common/io/io.h common/io/read.h common/io/read.intern.h common/io/write.h common/io/write.intern.h common/log.h common/logLevel.h common/memContext.h common/regExp.h common/stackTrace.h common/time.h common/type/buffer.h common/type/convert.h common/type/keyValue.h common/type/string.h common/type/stringList.h common/type/variant.h common/type/variantList.h protocol/server.h storage/helper.h storage/info.h storage/read.h storage/read.intern.h storage/remote/protocol.h storage/storage.h storage/storage.intern.h storage/write.h storage/write.intern.h version.h +storage/remote/protocol.o: storage/remote/protocol.c build.auto.h command/backup/pageChecksum.h common/assert.h common/compress/gzip/compress.h common/compress/gzip/decompress.h common/crypto/cipherBlock.h common/crypto/common.h common/crypto/hash.h common/debug.h common/error.auto.h common/error.h common/io/filter/filter.h common/io/filter/group.h common/io/filter/sink.h common/io/filter/size.h common/io/io.h common/io/read.h common/io/read.intern.h common/io/write.h common/io/write.intern.h common/lock.h common/log.h common/logLevel.h common/memContext.h common/regExp.h common/stackTrace.h common/time.h common/type/buffer.h common/type/convert.h common/type/keyValue.h common/type/string.h common/type/stringList.h common/type/variant.h common/type/variantList.h config/config.auto.h config/config.h config/define.auto.h config/define.h protocol/server.h storage/helper.h storage/info.h storage/read.h storage/read.intern.h storage/remote/protocol.h storage/storage.h storage/storage.intern.h storage/write.h storage/write.intern.h version.h $(CC) $(CPPFLAGS) $(CFLAGS) $(CMAKE) -c storage/remote/protocol.c -o storage/remote/protocol.o storage/remote/read.o: storage/remote/read.c build.auto.h common/assert.h common/compress/gzip/compress.h common/compress/gzip/decompress.h common/debug.h common/error.auto.h common/error.h common/io/filter/filter.h common/io/filter/group.h common/io/read.h common/io/read.intern.h common/io/write.h common/io/write.intern.h common/log.h common/logLevel.h common/macro.h common/memContext.h common/object.h common/stackTrace.h common/time.h common/type/buffer.h common/type/convert.h common/type/keyValue.h common/type/string.h common/type/stringList.h common/type/variant.h common/type/variantList.h protocol/client.h protocol/command.h protocol/server.h storage/info.h storage/read.h storage/read.intern.h storage/remote/protocol.h storage/remote/read.h storage/remote/storage.h storage/remote/storage.intern.h storage/storage.h storage/storage.intern.h storage/write.h storage/write.intern.h version.h diff -Nru pgbackrest-2.15.1/src/perl/config.c pgbackrest-2.16/src/perl/config.c --- pgbackrest-2.15.1/src/perl/config.c 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/src/perl/config.c 2019-08-05 16:03:04.000000000 +0000 @@ -42,19 +42,19 @@ { case cfgSourceParam: { - source = VARSTRDEF("param"); + source = varNewStrZ("param"); break; } case cfgSourceConfig: { - source = VARSTRDEF("config"); + source = varNewStrZ("config"); break; } case cfgSourceDefault: { - source = VARSTRDEF("default"); + source = varNewStrZ("default"); break; } } diff -Nru pgbackrest-2.15.1/src/perl/embed.auto.c pgbackrest-2.16/src/perl/embed.auto.c --- pgbackrest-2.15.1/src/perl/embed.auto.c 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/src/perl/embed.auto.c 2019-08-05 16:03:04.000000000 +0000 @@ -269,9 +269,6 @@ "use pgBackRest::Config::Config;\n" "use pgBackRest::Protocol::Helper;\n" "use pgBackRest::Protocol::Storage::Helper;\n" - "use pgBackRest::Storage::Base;\n" - "use pgBackRest::Storage::Filter::Gzip;\n" - "use pgBackRest::Storage::Filter::Sha;\n" "use pgBackRest::Storage::Helper;\n" "\n\n\n\n\n" "sub archiveGetCheck\n" @@ -391,7 +388,6 @@ "use pgBackRest::Manifest;\n" "use pgBackRest::Protocol::Storage::Helper;\n" "use pgBackRest::Storage::Base;\n" - "use pgBackRest::Storage::Filter::Gzip;\n" "use pgBackRest::Storage::Helper;\n" "\n\n\n\n" "use constant ARCHIVE_INFO_FILE => 'archive.info';\n" @@ -708,8 +704,6 @@ "\n\n" "my $iSysIdOffset = $strDbVersion >= PG_VERSION_93 ? PG_WAL_SYSTEM_ID_OFFSET_GTE_93 : PG_WAL_SYSTEM_ID_OFFSET_LT_93;\n" "\n\n" - "my $tBlock;\n" - "\n\n" "if (!storageRepo()->encryptionValid(storageRepo()->encrypted($strArchiveFilePath)))\n" "{\n" "confess &log(ERROR, \"encryption incompatible for '$strArchiveFilePath'\" .\n" @@ -719,10 +713,12 @@ "my $oFileIo = storageRepo()->openRead(\n" "$strArchiveFilePath,\n" "{rhyFilter => $strArchiveFile =~ ('\\.' . COMPRESS_EXT . '$') ?\n" - "[{strClass => STORAGE_FILTER_GZIP, rxyParam => [{strCompressType => STORAGE_DECOMPRESS}]}] : undef,\n" + "[{strClass => STORAGE_FILTER_GZIP, rxyParam => [STORAGE_DECOMPRESS, false]}] : undef,\n" "strCipherPass => $self->cipherPassSub()});\n" + "$oFileIo->open();\n" "\n" - "$oFileIo->read(\\$tBlock, 512, true);\n" + "my $tBlock;\n" + "$oFileIo->read(\\$tBlock, 512);\n" "$oFileIo->close();\n" "\n\n" "my ($iMagic, $iFlag, $junk, $ullDbSysId) = unpack('SSa' . $iSysIdOffset . 'Q', $tBlock);\n" @@ -847,11 +843,13 @@ ");\n" "\n\n" "$self->numericSet(INFO_ARCHIVE_SECTION_DB, INFO_ARCHIVE_KEY_DB_SYSTEM_ID, undef, $ullDbSysId);\n" - "$self->set(INFO_ARCHIVE_SECTION_DB, INFO_ARCHIVE_KEY_DB_VERSION, undef, $strDbVersion);\n" + "\n" + "$self->set(INFO_ARCHIVE_SECTION_DB, INFO_ARCHIVE_KEY_DB_VERSION, undef, $strDbVersion . '');\n" "$self->numericSet(INFO_ARCHIVE_SECTION_DB, INFO_ARCHIVE_KEY_DB_ID, undef, $iDbHistoryId);\n" "\n\n" "$self->numericSet(INFO_ARCHIVE_SECTION_DB_HISTORY, $iDbHistoryId, INFO_ARCHIVE_KEY_DB_ID, $ullDbSysId);\n" - "$self->set(INFO_ARCHIVE_SECTION_DB_HISTORY, $iDbHistoryId, INFO_ARCHIVE_KEY_DB_VERSION, $strDbVersion);\n" + "\n" + "$self->set(INFO_ARCHIVE_SECTION_DB_HISTORY, $iDbHistoryId, INFO_ARCHIVE_KEY_DB_VERSION, $strDbVersion . '');\n" "\n\n" "return logDebugReturn($strOperation);\n" "}\n" @@ -901,8 +899,6 @@ "use pgBackRest::Protocol::Storage::Helper;\n" "use pgBackRest::Common::Io::Handle;\n" "use pgBackRest::Storage::Base;\n" - "use pgBackRest::Storage::Filter::Gzip;\n" - "use pgBackRest::Storage::Filter::Sha;\n" "use pgBackRest::Storage::Helper;\n" "use pgBackRest::Version;\n" "\n\n\n\n" @@ -1074,7 +1070,7 @@ "if ($cType eq 'd')\n" "{\n" "logDebugMisc($strOperation, \"remove path ${strName}\");\n" - "$oStorageRepo->remove(STORAGE_REPO_BACKUP . \"/${strBackupLabel}/${strName}\", {bRecurse => true});\n" + "$oStorageRepo->pathRemove(STORAGE_REPO_BACKUP . \"/${strBackupLabel}/${strName}\", {bRecurse => true});\n" "}\n" "\n" "else\n" @@ -1154,7 +1150,7 @@ "storageRepo()->pathCreate(STORAGE_REPO_BACKUP . \"/${strBackupLabel}/${strPath}\", {bIgnoreExists => true});\n" "}\n" "\n" - "if (storageRepo()->driver()->capability(STORAGE_CAPABILITY_LINK))\n" + "if (storageRepo()->capability(STORAGE_CAPABILITY_LINK))\n" "{\n" "for my $strTarget ($oBackupManifest->keys(MANIFEST_SECTION_BACKUP_TARGET))\n" "{\n" @@ -1169,12 +1165,6 @@ "}\n" "}\n" "\n\n" - "my $hStartLsnParam =\n" - "{\n" - "iWalId => defined($strLsnStart) ? hex((split('/', $strLsnStart))[0]) : 0xFFFF,\n" - "iWalOffset => defined($strLsnStart) ? hex((split('/', $strLsnStart))[1]) : 0xFFFF,\n" - "};\n" - "\n\n" "foreach my $strRepoFile (\n" "sort {sprintf(\"%016d-%s\", $oBackupManifest->numericGet(MANIFEST_SECTION_TARGET_FILE, $b, MANIFEST_SUBKEY_SIZE), $b) cmp\n" "sprintf(\"%016d-%s\", $oBackupManifest->numericGet(MANIFEST_SECTION_TARGET_FILE, $a, MANIFEST_SUBKEY_SIZE), $a)}\n" @@ -1223,13 +1213,13 @@ "\n\n" "$oBackupProcess->queueJob(\n" "$iHostConfigIdx, $strQueueKey, $strRepoFile, OP_BACKUP_FILE,\n" - "[$strDbFile, $strRepoFile, $lSize,\n" + "[$strDbFile, $bIgnoreMissing, $lSize,\n" "$oBackupManifest->get(MANIFEST_SECTION_TARGET_FILE, $strRepoFile, MANIFEST_SUBKEY_CHECKSUM, false),\n" - "cfgOption(CFGOPT_CHECKSUM_PAGE) ? isChecksumPage($strRepoFile) : false, $strBackupLabel, $bCompress,\n" - "cfgOption(CFGOPT_COMPRESS_LEVEL), $oBackupManifest->numericGet(MANIFEST_SECTION_TARGET_FILE, $strRepoFile,\n" - "MANIFEST_SUBKEY_TIMESTAMP, false), $bIgnoreMissing,\n" - "cfgOption(CFGOPT_CHECKSUM_PAGE) && isChecksumPage($strRepoFile) ? $hStartLsnParam : undef,\n" - "cfgOption(CFGOPT_DELTA), defined($strReference) ? true : false],\n" + "cfgOption(CFGOPT_CHECKSUM_PAGE) ? isChecksumPage($strRepoFile) : false,\n" + "defined($strLsnStart) ? hex((split('/', $strLsnStart))[0]) : 0xFFFFFFFF,\n" + "defined($strLsnStart) ? hex((split('/', $strLsnStart))[1]) : 0xFFFFFFFF,\n" + "$strRepoFile, defined($strReference) ? true : false, $bCompress, cfgOption(CFGOPT_COMPRESS_LEVEL),\n" + "$strBackupLabel, cfgOption(CFGOPT_DELTA)],\n" "{rParamSecure => $oBackupManifest->cipherPassSub() ? [$oBackupManifest->cipherPassSub()] : undef});\n" "\n\n" "$oBackupManifest->remove(MANIFEST_SECTION_TARGET_FILE, $strRepoFile, MANIFEST_SUBKEY_SIZE);\n" @@ -1264,7 +1254,8 @@ "{\n" "($lSizeCurrent, $lManifestSaveCurrent) = backupManifestUpdate(\n" "$oBackupManifest, cfgOption(cfgOptionIdFromIndex(CFGOPT_PG_HOST, $hJob->{iHostConfigIdx}), false),\n" - "$hJob->{iProcessId}, @{$hJob->{rParam}}[0..4], @{$hJob->{rResult}}, $lSizeTotal, $lSizeCurrent, $lManifestSaveSize,\n" + "$hJob->{iProcessId}, @{$hJob->{rParam}}[0], @{$hJob->{rParam}}[7], @{$hJob->{rParam}}[2], @{$hJob->{rParam}}[3],\n" + "@{$hJob->{rParam}}[4], @{$hJob->{rResult}}, $lSizeTotal, $lSizeCurrent, $lManifestSaveSize,\n" "$lManifestSaveCurrent);\n" "}\n" "\n\n\n" @@ -1516,7 +1507,7 @@ "&log(WARN, \"aborted backup ${strAbortedBackup} cannot be resumed: ${strReason}\");\n" "&log(TEST, TEST_BACKUP_NORESUME);\n" "\n" - "$oStorageRepo->remove(STORAGE_REPO_BACKUP . \"/${strAbortedBackup}\", {bRecurse => true});\n" + "$oStorageRepo->pathRemove(STORAGE_REPO_BACKUP . \"/${strAbortedBackup}\", {bRecurse => true});\n" "undef($oAbortedManifest);\n" "}\n" "\n" @@ -1716,7 +1707,9 @@ "\n\n" "if ($bCompress)\n" "{\n" - "push(@{$rhyFilter}, {strClass => STORAGE_FILTER_GZIP});\n" + "push(\n" + "@{$rhyFilter},\n" + "{strClass => STORAGE_FILTER_GZIP, rxyParam => [STORAGE_COMPRESS, false, cfgOption(CFGOPT_COMPRESS_LEVEL)]});\n" "}\n" "\n\n\n" "my $oDestinationFileIo = $oStorageRepo->openWrite(\n" @@ -1786,7 +1779,7 @@ "$oBackupManifest->set(MANIFEST_SECTION_BACKUP, MANIFEST_KEY_TIMESTAMP_STOP, undef, $lTimestampStop + 0);\n" "$oBackupManifest->set(MANIFEST_SECTION_BACKUP, MANIFEST_KEY_LABEL, undef, $strBackupLabel);\n" "\n\n" - "if ($oStorageRepo->driver()->capability(STORAGE_CAPABILITY_PATH_SYNC))\n" + "if ($oStorageRepo->capability(STORAGE_CAPABILITY_PATH_SYNC))\n" "{\n" "\n" "$oStorageRepo->pathSync(STORAGE_REPO_BACKUP . \"/${strBackupLabel}\");\n" @@ -1814,11 +1807,11 @@ "{'strCipherPass' => $strCipherPassManifest}),\n" "$oStorageRepo->openWrite(\n" "\"${strHistoryPath}/${strBackupLabel}.manifest.\" . COMPRESS_EXT,\n" - "{rhyFilter => [{strClass => STORAGE_FILTER_GZIP}],\n" + "{rhyFilter => [{strClass => STORAGE_FILTER_GZIP, rxyParam => [STORAGE_COMPRESS, false, 9]}],\n" "bPathCreate => true, bAtomic => true,\n" "strCipherPass => defined($strCipherPassManifest) ? $strCipherPassManifest : undef}));\n" "\n\n" - "if ($oStorageRepo->driver()->capability(STORAGE_CAPABILITY_PATH_SYNC))\n" + "if ($oStorageRepo->capability(STORAGE_CAPABILITY_PATH_SYNC))\n" "{\n" "$oStorageRepo->pathSync(STORAGE_REPO_BACKUP . qw{/} . PATH_BACKUP_HISTORY);\n" "$oStorageRepo->pathSync($strHistoryPath);\n" @@ -1826,7 +1819,7 @@ "\n\n" "$oStorageRepo->remove(STORAGE_REPO_BACKUP . qw(/) . LINK_LATEST);\n" "\n" - "if (storageRepo()->driver()->capability(STORAGE_CAPABILITY_LINK))\n" + "if (storageRepo()->capability(STORAGE_CAPABILITY_LINK))\n" "{\n" "$oStorageRepo->linkCreate(\n" "STORAGE_REPO_BACKUP . \"/${strBackupLabel}\", STORAGE_REPO_BACKUP . qw{/} . LINK_LATEST, {bRelative => true});\n" @@ -1834,7 +1827,7 @@ "\n\n" "$oBackupInfo->add($oBackupManifest);\n" "\n\n" - "if ($oStorageRepo->driver()->capability(STORAGE_CAPABILITY_PATH_SYNC))\n" + "if ($oStorageRepo->capability(STORAGE_CAPABILITY_PATH_SYNC))\n" "{\n" "$oStorageRepo->pathSync(STORAGE_REPO_BACKUP);\n" "}\n" @@ -2072,17 +2065,15 @@ "use File::Basename qw(dirname);\n" "use Storable qw(dclone);\n" "\n" - "use pgBackRest::Backup::Filter::PageChecksum;\n" "use pgBackRest::Common::Exception;\n" "use pgBackRest::Common::Io::Handle;\n" "use pgBackRest::Common::Log;\n" "use pgBackRest::Common::String;\n" + "use pgBackRest::Config::Config;\n" "use pgBackRest::DbVersion;\n" "use pgBackRest::Manifest;\n" "use pgBackRest::Protocol::Storage::Helper;\n" "use pgBackRest::Storage::Base;\n" - "use pgBackRest::Storage::Filter::Gzip;\n" - "use pgBackRest::Storage::Filter::Sha;\n" "use pgBackRest::Storage::Helper;\n" "\n\n\n\n" "use constant BACKUP_FILE_CHECKSUM => 0;\n" @@ -2096,179 +2087,6 @@ "use constant BACKUP_FILE_NOOP => 4;\n" "push @EXPORT, qw(BACKUP_FILE_NOOP);\n" "\n\n\n\n" - "sub backupFile\n" - "{\n" - "\n" - "my\n" - "(\n" - "$strOperation,\n" - "$strDbFile,\n" - "$strRepoFile,\n" - "$lSizeFile,\n" - "$strChecksum,\n" - "$bChecksumPage,\n" - "$strBackupLabel,\n" - "$bCompress,\n" - "$iCompressLevel,\n" - "$lModificationTime,\n" - "$bIgnoreMissing,\n" - "$hExtraParam,\n" - "$bDelta,\n" - "$bHasReference,\n" - "$strCipherPass,\n" - "\n" - ") =\n" - "logDebugParam\n" - "(\n" - "__PACKAGE__ . '::backupFile', \\@_,\n" - "{name => 'strDbFile', trace => true},\n" - "{name => 'strRepoFile', trace => true},\n" - "{name => 'lSizeFile', trace => true},\n" - "{name => 'strChecksum', required => false, trace => true},\n" - "{name => 'bChecksumPage', trace => true},\n" - "{name => 'strBackupLabel', trace => true},\n" - "{name => 'bCompress', trace => true},\n" - "{name => 'iCompressLevel', trace => true},\n" - "{name => 'lModificationTime', trace => true},\n" - "{name => 'bIgnoreMissing', default => true, trace => true},\n" - "{name => 'hExtraParam', required => false, trace => true},\n" - "{name => 'bDelta', trace => true},\n" - "{name => 'bHasReference', trace => true},\n" - "{name => 'strCipherPass', required => false, trace => true},\n" - ");\n" - "\n" - "my $oStorageRepo = storageRepo();\n" - "my $iCopyResult = BACKUP_FILE_COPY;\n" - "my $strCopyChecksum;\n" - "my $rExtra;\n" - "my $lCopySize;\n" - "my $lRepoSize;\n" - "\n\n" - "my $strFileOp = $strRepoFile . ($bCompress ? '.' . COMPRESS_EXT : '');\n" - "\n" - "my $bCopy = true;\n" - "\n\n\n" - "if (defined($strChecksum))\n" - "{\n" - "\n" - "if ($bDelta)\n" - "{\n" - "($strCopyChecksum, $lCopySize) = storageDb()->hashSize($strDbFile, {bIgnoreMissing => $bIgnoreMissing});\n" - "\n\n" - "if (defined($strCopyChecksum))\n" - "{\n" - "$bCopy = !($strCopyChecksum eq $strChecksum && $lCopySize == $lSizeFile);\n" - "\n\n\n\n" - "if (!$bCopy && $bHasReference)\n" - "{\n" - "$iCopyResult = BACKUP_FILE_NOOP;\n" - "}\n" - "}\n" - "\n" - "else\n" - "{\n" - "$iCopyResult = BACKUP_FILE_SKIP;\n" - "$bCopy = false;\n" - "}\n" - "}\n" - "\n\n\n\n" - "if (!$bDelta || !$bHasReference)\n" - "{\n" - "\n\n" - "if ($iCopyResult == BACKUP_FILE_SKIP)\n" - "{\n" - "$oStorageRepo->remove(STORAGE_REPO_BACKUP . \"/${strBackupLabel}/${strFileOp}\");\n" - "}\n" - "elsif (!$bDelta || !$bCopy)\n" - "{\n" - "\n" - "my $rhyFilter;\n" - "\n" - "if ($bCompress)\n" - "{\n" - "push(@{$rhyFilter}, {strClass => STORAGE_FILTER_GZIP, rxyParam => [{strCompressType => STORAGE_DECOMPRESS}]});\n" - "}\n" - "\n\n" - "($strCopyChecksum, $lCopySize) = $oStorageRepo->hashSize(\n" - "$oStorageRepo->openRead(STORAGE_REPO_BACKUP . \"/${strBackupLabel}/${strFileOp}\",\n" - "{rhyFilter => $rhyFilter, strCipherPass => $strCipherPass}));\n" - "\n\n" - "$bCopy = !($strCopyChecksum eq $strChecksum && $lCopySize == $lSizeFile);\n" - "\n\n" - "$iCopyResult = $bCopy ? BACKUP_FILE_RECOPY : BACKUP_FILE_CHECKSUM;\n" - "}\n" - "}\n" - "}\n" - "\n\n" - "if ($bCopy)\n" - "{\n" - "\n" - "my $rhyFilter = [{strClass => STORAGE_FILTER_SHA}];\n" - "\n\n" - "if ($bChecksumPage)\n" - "{\n" - "\n" - "my $iSegmentNo = ($strDbFile =~ /\\.[0-9]+$/) ? substr(($strDbFile =~ m/\\.[0-9]+$/g)[0], 1) + 0 : 0;\n" - "\n" - "push(\n" - "@{$rhyFilter},\n" - "{strClass => BACKUP_FILTER_PAGECHECKSUM,\n" - "rxyParam => [$iSegmentNo, $hExtraParam->{iWalId}, $hExtraParam->{iWalOffset}]});\n" - "};\n" - "\n\n" - "if ($bCompress)\n" - "{\n" - "push(@{$rhyFilter}, {strClass => STORAGE_FILTER_GZIP, rxyParam => [{iLevel => $iCompressLevel}]});\n" - "}\n" - "\n\n" - "my $oSourceFileIo = storageDb()->openRead($strDbFile, {rhyFilter => $rhyFilter, bIgnoreMissing => $bIgnoreMissing});\n" - "\n\n" - "if (defined($oSourceFileIo))\n" - "{\n" - "my $oDestinationFileIo = $oStorageRepo->openWrite(\n" - "STORAGE_REPO_BACKUP . \"/${strBackupLabel}/${strFileOp}\",\n" - "{bPathCreate => true, bProtocolCompress => !$bCompress, strCipherPass => $strCipherPass});\n" - "\n\n" - "$oStorageRepo->copy($oSourceFileIo, $oDestinationFileIo);\n" - "\n\n" - "$strCopyChecksum = $oSourceFileIo->result(STORAGE_FILTER_SHA);\n" - "$lCopySize = $oSourceFileIo->result(COMMON_IO_HANDLE);\n" - "$lRepoSize = $oDestinationFileIo->result(COMMON_IO_HANDLE);\n" - "\n" - "if (!defined($lRepoSize))\n" - "{\n" - "confess &log(ERROR, \"REPO_SIZE IS NOT SET\");\n" - "}\n" - "\n\n" - "$rExtra = $bChecksumPage ? $oSourceFileIo->result(BACKUP_FILTER_PAGECHECKSUM) : undef;\n" - "}\n" - "\n" - "else\n" - "{\n" - "$iCopyResult = BACKUP_FILE_SKIP;\n" - "}\n" - "}\n" - "\n\n\n\n\n\n" - "if ((($iCopyResult == BACKUP_FILE_COPY || $iCopyResult == BACKUP_FILE_RECOPY) &&\n" - "$oStorageRepo->driver()->capability(STORAGE_CAPABILITY_SIZE_DIFF)) ||\n" - "$iCopyResult == BACKUP_FILE_CHECKSUM)\n" - "{\n" - "$lRepoSize = ($oStorageRepo->info(STORAGE_REPO_BACKUP . \"/${strBackupLabel}/${strFileOp}\"))->size();\n" - "}\n" - "\n\n" - "return logDebugReturn\n" - "(\n" - "$strOperation,\n" - "{name => 'iCopyResult', value => $iCopyResult, trace => true},\n" - "{name => 'lCopySize', value => $lCopySize, trace => true},\n" - "{name => 'lRepoSize', value => $lRepoSize, trace => true},\n" - "{name => 'strCopyChecksum', value => $strCopyChecksum, trace => true},\n" - "{name => 'rExtra', value => $rExtra, trace => true},\n" - ");\n" - "}\n" - "\n" - "push @EXPORT, qw(backupFile);\n" - "\n\n\n\n" "sub backupManifestUpdate\n" "{\n" "\n" @@ -2376,20 +2194,21 @@ "if ($bChecksumPage)\n" "{\n" "\n" - "if (defined($rExtra->{bValid}))\n" + "if (defined($rExtra->{valid}))\n" "{\n" "\n" - "$oManifest->boolSet(MANIFEST_SECTION_TARGET_FILE, $strRepoFile, MANIFEST_SUBKEY_CHECKSUM_PAGE, $rExtra->{bValid});\n" + "$oManifest->boolSet(\n" + "MANIFEST_SECTION_TARGET_FILE, $strRepoFile, MANIFEST_SUBKEY_CHECKSUM_PAGE, $rExtra->{valid});\n" "\n\n" - "if (!$rExtra->{bValid})\n" + "if (!$rExtra->{valid})\n" "{\n" "\n" "if ($lSizeCopy % PG_PAGE_SIZE != 0)\n" "{\n" "\n" - "if (!defined($rExtra->{bAlign}) || $rExtra->{bAlign})\n" + "if (!defined($rExtra->{align}) || $rExtra->{align})\n" "{\n" - "confess &log(ASSERT, 'bAlign flag should have been set for misaligned page');\n" + "confess &log(ASSERT, 'align flag should have been set for misaligned page');\n" "}\n" "\n\n" "&log(WARN,\n" @@ -2401,12 +2220,12 @@ "{\n" "$oManifest->set(\n" "MANIFEST_SECTION_TARGET_FILE, $strRepoFile, MANIFEST_SUBKEY_CHECKSUM_PAGE_ERROR,\n" - "dclone($rExtra->{iyPageError}));\n" + "dclone($rExtra->{error}));\n" "\n\n" "my $strPageError;\n" "my $iPageErrorTotal = 0;\n" "\n" - "foreach my $iyPage (@{$rExtra->{iyPageError}})\n" + "foreach my $iyPage (@{$rExtra->{error}})\n" "{\n" "$strPageError .= (defined($strPageError) ? ', ' : '');\n" "\n\n" @@ -2479,152 +2298,6 @@ "1;\n" }, { - .name = "pgBackRest/Backup/Filter/PageChecksum.pm", - .data = - "\n\n\n" - "package pgBackRest::Backup::Filter::PageChecksum;\n" - "use parent 'pgBackRest::Common::Io::Filter';\n" - "\n" - "use strict;\n" - "use warnings FATAL => qw(all);\n" - "use Carp qw(confess);\n" - "use English '-no_match_vars';\n" - "\n" - "use Exporter qw(import);\n" - "our @EXPORT = qw();\n" - "\n" - "use pgBackRest::Common::Exception;\n" - "use pgBackRest::Common::Log;\n" - "use pgBackRest::DbVersion qw(PG_PAGE_SIZE);\n" - "use pgBackRest::LibC qw(:checksum);\n" - "\n\n\n\n" - "use constant BACKUP_FILTER_PAGECHECKSUM => __PACKAGE__;\n" - "push @EXPORT, qw(BACKUP_FILTER_PAGECHECKSUM);\n" - "\n\n\n\n" - "sub new\n" - "{\n" - "my $class = shift;\n" - "\n\n" - "my\n" - "(\n" - "$strOperation,\n" - "$oParent,\n" - "$iSegmentNo,\n" - "$iWalId,\n" - "$iWalOffset,\n" - ") =\n" - "logDebugParam\n" - "(\n" - "__PACKAGE__ . '->new', \\@_,\n" - "{name => 'oParent', trace => true},\n" - "{name => 'iSegmentNo', trace => true},\n" - "{name => 'iWalId', trace => true},\n" - "{name => 'iWalOffset', trace => true},\n" - ");\n" - "\n\n" - "my $self = $class->SUPER::new($oParent);\n" - "bless $self, $class;\n" - "\n\n" - "$self->{iSegmentNo} = $iSegmentNo;\n" - "$self->{iWalId} = $iWalId;\n" - "$self->{iWalOffset} = $iWalOffset;\n" - "\n\n" - "$self->{hResult}{bValid} = true;\n" - "$self->{hResult}{bAlign} = true;\n" - "\n\n" - "return logDebugReturn\n" - "(\n" - "$strOperation,\n" - "{name => 'self', value => $self}\n" - ");\n" - "}\n" - "\n\n\n\n" - "sub read\n" - "{\n" - "my $self = shift;\n" - "my $rtBuffer = shift;\n" - "my $iSize = shift;\n" - "\n\n" - "my $iActualSize = $self->parent()->read($rtBuffer, $iSize);\n" - "\n\n" - "if ($iActualSize > 0)\n" - "{\n" - "\n" - "if (!$self->{hResult}{bAlign} || ($iActualSize % PG_PAGE_SIZE != 0))\n" - "{\n" - "if (!$self->{hResult}{bAlign})\n" - "{\n" - "confess &log(ASSERT, \"should not be possible to see two misaligned blocks in a row\");\n" - "}\n" - "\n" - "$self->{hResult}{bValid} = false;\n" - "$self->{hResult}{bAlign} = false;\n" - "delete($self->{hResult}{iyPageError});\n" - "}\n" - "else\n" - "{\n" - "\n" - "my $iBlockOffset = int(($self->size() - $iActualSize) / PG_PAGE_SIZE) + ($self->{iSegmentNo} * 131072);\n" - "\n" - "if (!pageChecksumBufferTest(\n" - "$$rtBuffer, $iActualSize, $iBlockOffset, PG_PAGE_SIZE, $self->{iWalId},\n" - "$self->{iWalOffset}))\n" - "{\n" - "$self->{hResult}{bValid} = false;\n" - "\n\n\n" - "for (my $iBlockNo = 0; $iBlockNo < int($iActualSize / PG_PAGE_SIZE); $iBlockNo++)\n" - "{\n" - "my $iBlockNoStart = $iBlockOffset + $iBlockNo;\n" - "\n" - "if (!pageChecksumTest(\n" - "substr($$rtBuffer, $iBlockNo * PG_PAGE_SIZE, PG_PAGE_SIZE), $iBlockNoStart, PG_PAGE_SIZE,\n" - "$self->{iWalId}, $self->{iWalOffset}))\n" - "{\n" - "my $iLastIdx = defined($self->{hResult}{iyPageError}) ? @{$self->{hResult}{iyPageError}} - 1 : 0;\n" - "my $iyLast = defined($self->{hResult}{iyPageError}) ? $self->{hResult}{iyPageError}[$iLastIdx] : undef;\n" - "\n" - "if (!defined($iyLast) || (!ref($iyLast) && $iyLast != $iBlockNoStart - 1) ||\n" - "(ref($iyLast) && $iyLast->[1] != $iBlockNoStart - 1))\n" - "{\n" - "push(@{$self->{hResult}{iyPageError}}, $iBlockNoStart);\n" - "}\n" - "elsif (!ref($iyLast))\n" - "{\n" - "$self->{hResult}{iyPageError}[$iLastIdx] = undef;\n" - "push(@{$self->{hResult}{iyPageError}[$iLastIdx]}, $iyLast);\n" - "push(@{$self->{hResult}{iyPageError}[$iLastIdx]}, $iBlockNoStart);\n" - "}\n" - "else\n" - "{\n" - "$self->{hResult}{iyPageError}[$iLastIdx][1] = $iBlockNoStart;\n" - "}\n" - "}\n" - "}\n" - "}\n" - "}\n" - "}\n" - "\n\n" - "return $iActualSize;\n" - "}\n" - "\n\n\n\n" - "sub close\n" - "{\n" - "my $self = shift;\n" - "\n" - "if (defined($self->{hResult}))\n" - "{\n" - "\n" - "$self->resultSet(BACKUP_FILTER_PAGECHECKSUM, $self->{hResult});\n" - "\n\n" - "undef($self->{hResult});\n" - "\n\n" - "return $self->parent()->close();\n" - "}\n" - "}\n" - "\n" - "1;\n" - }, - { .name = "pgBackRest/Backup/Info.pm", .data = "\n\n\n" @@ -3444,13 +3117,14 @@ "$self->numericSet(INFO_BACKUP_SECTION_DB, INFO_BACKUP_KEY_CATALOG, undef, $iCatalogVersion);\n" "$self->numericSet(INFO_BACKUP_SECTION_DB, INFO_BACKUP_KEY_CONTROL, undef, $iControlVersion);\n" "$self->numericSet(INFO_BACKUP_SECTION_DB, INFO_BACKUP_KEY_SYSTEM_ID, undef, $ullDbSysId);\n" - "$self->set(INFO_BACKUP_SECTION_DB, INFO_BACKUP_KEY_DB_VERSION, undef, $strDbVersion);\n" + "\n" + "$self->set(INFO_BACKUP_SECTION_DB, INFO_BACKUP_KEY_DB_VERSION, undef, $strDbVersion . '');\n" "$self->numericSet(INFO_BACKUP_SECTION_DB, INFO_BACKUP_KEY_HISTORY_ID, undef, $iDbHistoryId);\n" "\n\n" "$self->numericSet(INFO_BACKUP_SECTION_DB_HISTORY, $iDbHistoryId, INFO_BACKUP_KEY_CATALOG, $iCatalogVersion);\n" "$self->numericSet(INFO_BACKUP_SECTION_DB_HISTORY, $iDbHistoryId, INFO_BACKUP_KEY_CONTROL, $iControlVersion);\n" "$self->numericSet(INFO_BACKUP_SECTION_DB_HISTORY, $iDbHistoryId, INFO_BACKUP_KEY_SYSTEM_ID, $ullDbSysId);\n" - "$self->set(INFO_BACKUP_SECTION_DB_HISTORY, $iDbHistoryId, INFO_BACKUP_KEY_DB_VERSION, $strDbVersion);\n" + "$self->set(INFO_BACKUP_SECTION_DB_HISTORY, $iDbHistoryId, INFO_BACKUP_KEY_DB_VERSION, $strDbVersion . '');\n" "\n\n" "return logDebugReturn($strOperation);\n" "}\n" @@ -3652,52 +3326,12 @@ "};\n" "}\n" "\n\n" - "if ($iResult == 0 && !$oDb->isStandby())\n" - "{\n" - "$strWalSegment = $oDb->walSwitch();\n" - "\n" - "eval\n" - "{\n" - "$strArchiveFile = walSegmentFind(storageRepo(), $strArchiveId, $strWalSegment, $iArchiveTimeout);\n" - "return true;\n" - "}\n" - "\n" - "or do\n" - "{\n" - "\n" - "$iResult = exceptionCode($EVAL_ERROR);\n" - "$strResultMessage = exceptionMessage($EVAL_ERROR);\n" - "};\n" - "}\n" - "\n\n" "logLevelSet(undef, cfgOption(CFGOPT_LOG_LEVEL_CONSOLE));\n" "}\n" - "\n\n\n" - "if ($iResult == 0)\n" - "{\n" - "if (!$oDb->isStandby())\n" - "{\n" - "&log(INFO,\n" - "\"WAL segment ${strWalSegment} successfully stored in the archive at '\" .\n" - "storageRepo()->pathGet(STORAGE_REPO_ARCHIVE . \"/$strArchiveId/${strArchiveFile}\") . \"'\");\n" - "}\n" - "else\n" - "{\n" - "&log(INFO, 'switch ' . $oDb->walId() . ' cannot be performed on the standby, all other checks passed successfully');\n" - "}\n" - "}\n" - "else\n" - "{\n" - "\n" - "&log(ERROR, $strResultMessage, $iResult);\n" "\n\n" - "if (defined($strWalSegment) && !defined($strArchiveFile))\n" + "if ($iResult != 0)\n" "{\n" - "&log(WARN,\n" - "\"WAL segment ${strWalSegment} did not reach the archive:\" . (defined($strArchiveId) ? $strArchiveId : '') . \"\\n\" .\n" - "\"HINT: Check the archive_command to ensure that all options are correct (especially --stanza).\\n\" .\n" - "\"HINT: Check the PostgreSQL server log for errors.\");\n" - "}\n" + "&log(ERROR, $strResultMessage, $iResult);\n" "}\n" "\n\n" "return logDebugReturn\n" @@ -4126,11 +3760,10 @@ "1;\n" }, { - .name = "pgBackRest/Common/Http/Client.pm", + .name = "pgBackRest/Common/Ini.pm", .data = "\n\n\n" - "package pgBackRest::Common::Http::Client;\n" - "use parent 'pgBackRest::Common::Io::Buffered';\n" + "package pgBackRest::Common::Ini;\n" "\n" "use strict;\n" "use warnings FATAL => qw(all);\n" @@ -4139,425 +3772,9 @@ "\n" "use Exporter qw(import);\n" "our @EXPORT = qw();\n" - "use IO::Socket::SSL;\n" - "use Socket qw(SOL_SOCKET SO_KEEPALIVE);\n" - "\n" - "use pgBackRest::Common::Exception;\n" - "use pgBackRest::Common::Io::Buffered;\n" - "use pgBackRest::Common::Log;\n" - "use pgBackRest::Common::String;\n" - "use pgBackRest::Common::Xml;\n" - "use pgBackRest::Common::Http::Common;\n" - "\n\n\n\n" - "use constant HTTP_VERB_GET => 'GET';\n" - "push @EXPORT, qw(HTTP_VERB_GET);\n" - "use constant HTTP_VERB_POST => 'POST';\n" - "push @EXPORT, qw(HTTP_VERB_POST);\n" - "use constant HTTP_VERB_PUT => 'PUT';\n" - "push @EXPORT, qw(HTTP_VERB_PUT);\n" - "\n" - "use constant HTTP_HEADER_CONTENT_LENGTH => 'content-length';\n" - "push @EXPORT, qw(HTTP_HEADER_CONTENT_LENGTH);\n" - "use constant HTTP_HEADER_TRANSFER_ENCODING => 'transfer-encoding';\n" - "push @EXPORT, qw(HTTP_HEADER_TRANSFER_ENCODING);\n" - "\n\n\n\n" - "sub new\n" - "{\n" - "my $class = shift;\n" - "\n\n" - "my\n" - "(\n" - "$strOperation,\n" - "$strHost,\n" - "$strVerb,\n" - "$iPort,\n" - "$strUri,\n" - "$hQuery,\n" - "$hRequestHeader,\n" - "$rstrRequestBody,\n" - "$bResponseBodyPrefetch,\n" - "$iProtocolTimeout,\n" - "$iTryTotal,\n" - "$lBufferMax,\n" - "$bVerifySsl,\n" - "$strCaPath,\n" - "$strCaFile,\n" - ") =\n" - "logDebugParam\n" - "(\n" - "__PACKAGE__ . '->new', \\@_,\n" - "{name => 'strHost', trace => true},\n" - "{name => 'strVerb', trace => true},\n" - "{name => 'iPort', optional => true, default => 443, trace => true},\n" - "{name => 'strUri', optional => true, default => qw(/), trace => true},\n" - "{name => 'hQuery', optional => true, trace => true},\n" - "{name => 'hRequestHeader', optional => true, trace => true},\n" - "{name => 'rstrRequestBody', optional => true, trace => true},\n" - "{name => 'bResponseBodyPrefetch', optional => true, default => false, trace => true},\n" - "{name => 'iProtocolTimeout', optional => true, default => 300, trace => true},\n" - "{name => 'iTryTotal', optional => true, default => 3, trace => true},\n" - "{name => 'lBufferMax', optional => true, default => 32768, trace => true},\n" - "{name => 'bVerifySsl', optional => true, default => true, trace => true},\n" - "{name => 'strCaPath', optional => true, trace => true},\n" - "{name => 'strCaFile', optional => true, trace => true},\n" - ");\n" - "\n\n" - "my $self;\n" - "my $iTry = 1;\n" - "my $bRetry;\n" - "\n" - "do\n" - "{\n" - "\n" - "logDisable() if $iTry < $iTryTotal;\n" - "$bRetry = false;\n" - "\n" - "eval\n" - "{\n" - "\n" - "my $oSocket;\n" - "\n" - "if (eval{require IO::Socket::IP})\n" - "{\n" - "$oSocket = IO::Socket::IP->new(PeerHost => $strHost, PeerPort => $iPort)\n" - "or confess &log(ERROR, \"unable to create socket: $@\", ERROR_HOST_CONNECT);\n" - "}\n" - "else\n" - "{\n" - "require IO::Socket::INET;\n" - "\n" - "$oSocket = IO::Socket::INET->new(PeerHost => $strHost, PeerPort => $iPort)\n" - "or confess &log(ERROR, \"unable to create socket: $@\", ERROR_HOST_CONNECT);\n" - "}\n" - "\n" - "setsockopt($oSocket, SOL_SOCKET,SO_KEEPALIVE, 1)\n" - "or confess &log(ERROR, \"unable to set socket keepalive: $@\", ERROR_HOST_CONNECT);\n" - "\n" - "eval\n" - "{\n" - "IO::Socket::SSL->start_SSL(\n" - "$oSocket, SSL_verify_mode => $bVerifySsl ? SSL_VERIFY_PEER : SSL_VERIFY_NONE, SSL_ca_path => $strCaPath,\n" - "SSL_ca_file => $strCaFile);\n" - "}\n" - "or do\n" - "{\n" - "logErrorResult(\n" - "ERROR_HOST_CONNECT, coalesce(length($!) == 0 ? undef : $!, $SSL_ERROR), length($!) > 0 ? $SSL_ERROR : undef);\n" - "};\n" - "\n\n" - "$self = $class->SUPER::new(\n" - "new pgBackRest::Common::Io::Handle('httpClient', $oSocket, $oSocket), $iProtocolTimeout, $lBufferMax);\n" - "bless $self, $class;\n" - "\n\n" - "$self->{oSocket} = $oSocket;\n" - "\n\n" - "my $strQuery = httpQuery($hQuery);\n" - "\n\n" - "$self->{strRequestHeader} = \"${strVerb} \" . httpUriEncode($strUri, true) . \"?${strQuery} HTTP/1.1\" . \"\\r\\n\";\n" - "\n" - "foreach my $strHeader (sort(keys(%{$hRequestHeader})))\n" - "{\n" - "$self->{strRequestHeader} .= \"${strHeader}: $hRequestHeader->{$strHeader}\\r\\n\";\n" - "}\n" - "\n" - "$self->{strRequestHeader} .= \"\\r\\n\";\n" - "\n\n" - "$self->write(\\$self->{strRequestHeader});\n" - "\n\n" - "if (defined($rstrRequestBody))\n" - "{\n" - "my $iTotalSize = length($$rstrRequestBody);\n" - "my $iTotalSent = 0;\n" - "\n\n" - "do\n" - "{\n" - "my $strBufferWrite = substr($$rstrRequestBody, $iTotalSent, $lBufferMax);\n" - "$iTotalSent += $self->write(\\$strBufferWrite);\n" - "} while ($iTotalSent < $iTotalSize);\n" - "}\n" - "\n\n" - "($self->{strResponseProtocol}, $self->{iResponseCode}, $self->{strResponseMessage}) =\n" - "split(' ', trim($self->readLine()));\n" - "\n\n" - "$self->{iContentLength} = 0;\n" - "$self->{strResponseHeader} = '';\n" - "my $strHeader = trim($self->readLine());\n" - "\n" - "while ($strHeader ne '')\n" - "{\n" - "\n" - "$self->{strResponseHeader} .= \"${strHeader}\\n\";\n" - "\n" - "my $iColonPos = index($strHeader, ':');\n" - "\n" - "if ($iColonPos == -1)\n" - "{\n" - "confess &log(ERROR, \"http header '${strHeader}' requires colon separator\", ERROR_PROTOCOL);\n" - "}\n" - "\n\n" - "my $strHeaderKey = lc(substr($strHeader, 0, $iColonPos));\n" - "my $strHeaderValue = trim(substr($strHeader, $iColonPos + 1));\n" - "\n\n" - "$self->{hResponseHeader}{$strHeaderKey} = $strHeaderValue;\n" - "\n\n" - "if ($strHeaderKey eq HTTP_HEADER_CONTENT_LENGTH)\n" - "{\n" - "$self->{iContentLength} = $strHeaderValue + 0;\n" - "$self->{iContentRemaining} = $self->{iContentLength};\n" - "}\n" - "\n" - "elsif ($strHeaderKey eq HTTP_HEADER_TRANSFER_ENCODING)\n" - "{\n" - "if ($strHeaderValue eq 'chunked')\n" - "{\n" - "$self->{iContentLength} = -1;\n" - "}\n" - "else\n" - "{\n" - "confess &log(ERROR, \"invalid value '${strHeaderValue} for http header '${strHeaderKey}'\", ERROR_PROTOCOL);\n" - "}\n" - "}\n" - "\n\n" - "$strHeader = trim($self->readLine());\n" - "}\n" - "\n\n" - "if ($bResponseBodyPrefetch)\n" - "{\n" - "$self->{strResponseBody} = $self->responseBody();\n" - "}\n" - "\n\n" - "logEnable() if $iTry < $iTryTotal;\n" - "return 1;\n" - "}\n" - "or do\n" - "{\n" - "\n" - "logEnable() if $iTry < $iTryTotal;\n" - "\n\n" - "if ($iTry == $iTryTotal)\n" - "{\n" - "confess $EVAL_ERROR;\n" - "}\n" - "\n\n" - "$iTry++;\n" - "$bRetry = true;\n" - "};\n" - "}\n" - "while ($bRetry);\n" - "\n\n" - "return logDebugReturn\n" - "(\n" - "$strOperation,\n" - "{name => 'self', value => $self}\n" - ");\n" - "}\n" - "\n\n\n\n" - "sub read\n" - "{\n" - "my $self = shift;\n" - "my $rtBuffer = shift;\n" - "my $iRequestSize = shift;\n" - "\n\n" - "$iRequestSize = $iRequestSize < $self->{iContentRemaining} ? $iRequestSize : $self->{iContentRemaining};\n" - "$self->{iContentRemaining} -= $iRequestSize;\n" - "\n" - "my $iActualSize = $self->SUPER::read($rtBuffer, $iRequestSize, true);\n" - "\n\n" - "if ($self->{iContentRemaining} == 0)\n" - "{\n" - "$self->SUPER::eofSet(true);\n" - "}\n" - "\n" - "return $iActualSize;\n" - "}\n" - "\n\n\n\n" - "sub close\n" - "{\n" - "my $self = shift;\n" - "\n\n" - "if (defined($self->{oSocket}))\n" - "{\n" - "$self->{oSocket}->close();\n" - "undef($self->{oSocket});\n" - "}\n" - "}\n" - "\n" - "sub DESTROY {shift->close()}\n" - "\n\n\n\n" - "sub responseBody\n" - "{\n" - "my $self = shift;\n" - "\n\n" - "my\n" - "(\n" - "$strOperation,\n" - ") =\n" - "logDebugParam\n" - "(\n" - "__PACKAGE__ . '->responseBody'\n" - ");\n" - "\n\n" - "return $self->{strResponseBody} if exists($self->{strResponseBody});\n" - "\n\n" - "my $strResponseBody = undef;\n" - "\n" - "if ($self->{iContentLength} != 0)\n" - "{\n" - "\n" - "if ($self->{iContentLength} == -1)\n" - "{\n" - "while (1)\n" - "{\n" - "\n" - "my $strChunkLength = trim($self->readLine());\n" - "my $iChunkLength = hex($strChunkLength);\n" - "\n\n" - "last if ($iChunkLength == 0);\n" - "\n\n" - "$self->SUPER::read(\\$strResponseBody, $iChunkLength, true);\n" - "$self->readLine();\n" - "};\n" - "}\n" - "\n" - "else\n" - "{\n" - "$self->SUPER::read(\\$strResponseBody, $self->{iContentLength}, true);\n" - "}\n" - "\n" - "$self->close();\n" - "}\n" - "\n\n" - "return logDebugReturn\n" - "(\n" - "$strOperation,\n" - "{name => 'rstrResponseBody', value => \\$strResponseBody, trace => true}\n" - ");\n" - "}\n" - "\n\n\n\n" - "sub contentLength {shift->{iContentLength}}\n" - "sub requestHeaderText {trim(shift->{strRequestHeader})}\n" - "sub responseCode {shift->{iResponseCode}}\n" - "sub responseHeader {shift->{hResponseHeader}}\n" - "sub responseHeaderText {trim(shift->{strResponseHeader})}\n" - "sub responseMessage {shift->{strResponseMessage}}\n" - "sub responseProtocol {shift->{strResponseProtocol}}\n" - "\n" - "1;\n" - }, - { - .name = "pgBackRest/Common/Http/Common.pm", - .data = - "\n\n\n" - "package pgBackRest::Common::Http::Common;\n" - "\n" - "use strict;\n" - "use warnings FATAL => qw(all);\n" - "use Carp qw(confess);\n" - "use English '-no_match_vars';\n" - "\n" - "use Exporter qw(import);\n" - "our @EXPORT = qw();\n" - "\n" - "use pgBackRest::Common::Exception;\n" - "use pgBackRest::Common::Log;\n" - "\n\n\n\n" - "sub httpQuery\n" - "{\n" - "\n" - "my\n" - "(\n" - "$strOperation,\n" - "$hQuery,\n" - ") =\n" - "logDebugParam\n" - "(\n" - "__PACKAGE__ . '::httpQuery', \\@_,\n" - "{name => 'hQuery', required => false, trace => true},\n" - ");\n" - "\n\n" - "my $strQuery = '';\n" - "\n\n" - "if (ref($hQuery))\n" - "{\n" - "foreach my $strParam (sort(keys(%{$hQuery})))\n" - "{\n" - "\n" - "if (defined($hQuery->{$strParam}))\n" - "{\n" - "$strQuery .= ($strQuery eq '' ? '' : '&') . $strParam . '=' . httpUriEncode($hQuery->{$strParam});\n" - "}\n" - "}\n" - "}\n" - "\n" - "elsif (defined($hQuery))\n" - "{\n" - "$strQuery = $hQuery;\n" - "}\n" - "\n\n" - "return logDebugReturn\n" - "(\n" - "$strOperation,\n" - "{name => 'strQuery', value => $strQuery, trace => true}\n" - ");\n" - "}\n" - "\n" - "push @EXPORT, qw(httpQuery);\n" - "\n\n\n\n" - "sub httpUriEncode\n" - "{\n" - "my $strString = shift;\n" - "my $bPath = shift;\n" - "\n\n" - "my $strEncodedString;\n" - "\n" - "if (defined($strString))\n" - "{\n" - "\n" - "for (my $iIndex = 0; $iIndex < length($strString); $iIndex++)\n" - "{\n" - "my $cChar = substr($strString, $iIndex, 1);\n" - "\n\n" - "if (($cChar ge 'A' && $cChar le 'Z') || ($cChar ge 'a' && $cChar le 'z') || ($cChar ge '0' && $cChar le '9') ||\n" - "$cChar eq '_' || $cChar eq '-' || $cChar eq '~' || $cChar eq '.' || ($bPath && $cChar eq '/'))\n" - "{\n" - "$strEncodedString .= $cChar;\n" - "}\n" - "\n" - "elsif ($cChar eq '/')\n" - "{\n" - "$strEncodedString .= '%2F';\n" - "}\n" - "\n" - "else\n" - "{\n" - "$strEncodedString .= sprintf('%%%02X', ord($cChar));\n" - "}\n" - "}\n" - "}\n" - "\n" - "return $strEncodedString;\n" - "}\n" - "\n" - "push @EXPORT, qw(httpUriEncode);\n" - "\n" - "1;\n" - }, - { - .name = "pgBackRest/Common/Ini.pm", - .data = - "\n\n\n" - "package pgBackRest::Common::Ini;\n" - "\n" - "use strict;\n" - "use warnings FATAL => qw(all);\n" - "use Carp qw(confess);\n" - "use English '-no_match_vars';\n" - "\n" - "use Exporter qw(import);\n" - "our @EXPORT = qw();\n" - "use File::Basename qw(dirname);\n" - "use JSON::PP;\n" - "use Storable qw(dclone);\n" + "use File::Basename qw(dirname);\n" + "use JSON::PP;\n" + "use Storable qw(dclone);\n" "\n" "use pgBackRest::Common::Exception;\n" "use pgBackRest::Common::Log;\n" @@ -7242,147 +6459,10 @@ "1;\n" }, { - .name = "pgBackRest/Common/Xml.pm", + .name = "pgBackRest/Config/Config.pm", .data = "\n\n\n" - "package pgBackRest::Common::Xml;\n" - "\n" - "use strict;\n" - "use warnings FATAL => qw(all);\n" - "use Carp qw(confess);\n" - "use English '-no_match_vars';\n" - "\n" - "use Exporter qw(import);\n" - "our @EXPORT = qw();\n" - "use XML::LibXML;\n" - "\n" - "use pgBackRest::Common::Exception;\n" - "use pgBackRest::Common::Log;\n" - "\n\n\n\n" - "use constant XML_HEADER => '';\n" - "push @EXPORT, qw(XML_HEADER);\n" - "\n\n\n\n" - "sub xmlFromText\n" - "{\n" - "my $strText = shift;\n" - "\n" - "return XML::LibXML::Text->new($strText)->toString();\n" - "}\n" - "\n" - "push @EXPORT, qw(xmlFromText);\n" - "\n\n\n\n" - "sub xmlParse\n" - "{\n" - "my $rstrXml = shift;\n" - "\n" - "my $oXml = XML::LibXML->load_xml(string => $rstrXml)->documentElement();\n" - "\n" - "return $oXml;\n" - "}\n" - "\n" - "push @EXPORT, qw(xmlParse);\n" - "\n\n\n\n" - "sub xmlTagChildren\n" - "{\n" - "my $oXml = shift;\n" - "my $strTag = shift;\n" - "\n" - "return $oXml->getChildrenByTagName($strTag);\n" - "}\n" - "\n" - "push @EXPORT, qw(xmlTagChildren);\n" - "\n\n\n\n" - "sub xmlTagText\n" - "{\n" - "my $oXml = shift;\n" - "my $strTag = shift;\n" - "my $bRequired = shift;\n" - "\n\n\n" - "my @oyTag = $oXml->getElementsByTagName($strTag);\n" - "\n\n" - "if (@oyTag > 1)\n" - "{\n" - "confess &log(ERROR, @oyTag . \" '${strTag}' tag(s) exist, but only one was expected\", ERROR_FORMAT);\n" - "}\n" - "elsif (@oyTag == 0)\n" - "{\n" - "if (!defined($bRequired) || $bRequired)\n" - "{\n" - "confess &log(ERROR, \"tag '${strTag}' does not exist\", ERROR_FORMAT);\n" - "}\n" - "}\n" - "else\n" - "{\n" - "return $oyTag[0]->textContent();\n" - "}\n" - "\n" - "return;\n" - "}\n" - "\n" - "push @EXPORT, qw(xmlTagText);\n" - "\n\n\n\n" - "sub xmlTagBool\n" - "{\n" - "my $oXml = shift;\n" - "my $strTag = shift;\n" - "my $bRequired = shift;\n" - "\n\n\n" - "my $strContent = xmlTagText($oXml, $strTag, $bRequired);\n" - "\n" - "if (defined($strContent))\n" - "{\n" - "if ($strContent eq 'true')\n" - "{\n" - "return true;\n" - "}\n" - "elsif ($strContent eq 'false')\n" - "{\n" - "return false;\n" - "}\n" - "else\n" - "{\n" - "confess &log(ERROR, \"invalid boolean value '${strContent}' for tag '${strTag}'\", ERROR_FORMAT);\n" - "}\n" - "}\n" - "\n" - "return;\n" - "}\n" - "\n" - "push @EXPORT, qw(xmlTagBool);\n" - "\n\n\n\n" - "sub xmlTagInt\n" - "{\n" - "my $oXml = shift;\n" - "my $strTag = shift;\n" - "my $bRequired = shift;\n" - "\n\n\n" - "my $iContent = xmlTagText($oXml, $strTag, $bRequired);\n" - "\n" - "if (defined($iContent))\n" - "{\n" - "eval\n" - "{\n" - "$iContent = $iContent + 0;\n" - "return 1;\n" - "}\n" - "or do\n" - "{\n" - "confess &log(ERROR, \"invalid integer value '${iContent}' for tag '${strTag}'\", ERROR_FORMAT);\n" - "}\n" - "}\n" - "\n" - "return $iContent;\n" - "}\n" - "\n" - "push @EXPORT, qw(xmlTagInt);\n" - "\n" - "1;\n" - }, - { - .name = "pgBackRest/Config/Config.pm", - .data = - "\n\n\n" - "package pgBackRest::Config::Config;\n" + "package pgBackRest::Config::Config;\n" "\n" "use strict;\n" "use warnings FATAL => qw(all);\n" @@ -7769,13 +6849,13 @@ "use strict;\n" "use warnings FATAL => qw(all);\n" "use Carp qw(confess);\n" + "use English '-no_match_vars';\n" "\n" - "use DBD::Pg ':async';\n" - "use DBI;\n" "use Exporter qw(import);\n" "our @EXPORT = qw();\n" "use Fcntl qw(O_RDONLY);\n" "use File::Basename qw(dirname);\n" + "use JSON::PP;\n" "\n" "use pgBackRest::DbVersion;\n" "use pgBackRest::Common::Exception;\n" @@ -7864,10 +6944,10 @@ "\n\n" "my ($strOperation) = logDebugParam(__PACKAGE__ . '->DESTROY');\n" "\n" - "if (defined($self->{hDb}))\n" + "if (defined($self->{oDb}))\n" "{\n" - "$self->{hDb}->disconnect();\n" - "undef($self->{hDb});\n" + "$self->{oDb}->close();\n" + "undef($self->{oDb});\n" "}\n" "\n\n" "return logDebugReturn($strOperation);\n" @@ -7898,58 +6978,45 @@ "\n" "else\n" "{\n" - "if (!defined($self->{hDb}))\n" + "if (!defined($self->{oDb}))\n" "{\n" + "$self->{oDb} = new pgBackRest::LibC::PgClient(\n" + "cfgOption(cfgOptionIdFromIndex(CFGOPT_PG_SOCKET_PATH, $self->{iRemoteIdx}), false),\n" + "cfgOption(cfgOptionIdFromIndex(CFGOPT_PG_PORT, $self->{iRemoteIdx})), 'postgres',\n" + "cfgOption(CFGOPT_DB_TIMEOUT) * 1000);\n" "\n" - "my $strDbName = 'postgres';\n" - "my $strDbSocketPath = cfgOption(cfgOptionIdFromIndex(CFGOPT_PG_SOCKET_PATH, $self->{iRemoteIdx}), false);\n" - "\n\n" - "if (defined($strDbSocketPath) && $strDbSocketPath !~ /^\\//)\n" + "if ($bWarnOnError)\n" + "{\n" + "eval\n" "{\n" - "confess &log(ERROR, \"'${strDbSocketPath}' is not valid for '\" . cfgOptionName(CFGOPT_PG_SOCKET_PATH) . \"' option:\" .\n" - "\" path must be absolute\", ERROR_OPTION_INVALID_VALUE);\n" + "$self->{oDb}->open();\n" + "return true;\n" "}\n" - "\n\n" - "my $strDbUri =\n" - "\"dbi:Pg:dbname=${strDbName};port=\" . cfgOption(cfgOptionIdFromIndex(CFGOPT_PG_PORT, $self->{iRemoteIdx})) .\n" - "(defined($strDbSocketPath) ? \";host=${strDbSocketPath}\" : '');\n" - "\n" - "logDebugMisc\n" - "(\n" - "$strOperation, undef,\n" - "{name => 'strDbUri', value => $strDbUri},\n" - ");\n" - "\n" - "$self->{hDb} = DBI->connect($strDbUri, undef, undef,\n" - "{AutoCommit => 1, RaiseError => 0, PrintError => 0, Warn => 0});\n" - "\n\n" - "if (!$self->{hDb})\n" + "or do\n" "{\n" + "&log(WARN, exceptionMessage($EVAL_ERROR));\n" + "$bResult = false;\n" "\n" - "if (!$bWarnOnError)\n" - "{\n" - "confess &log(ERROR, $DBI::errstr, ERROR_DB_CONNECT);\n" + "undef($self->{oDb});\n" "}\n" - "\n\n" - "&log(WARN, $DBI::errstr);\n" - "\n" - "$bResult = false;\n" - "undef($self->{hDb});\n" "}\n" "else\n" "{\n" + "$self->{oDb}->open();\n" + "}\n" + "\n" + "if (defined($self->{oDb}))\n" + "{\n" "my ($fDbVersion) = $self->versionGet();\n" "\n" "if ($fDbVersion >= PG_VERSION_APPLICATION_NAME)\n" "{\n" "\n" - "$self->{hDb}->do(\n" + "$self->{oDb}->query(\n" "\"set application_name = '\" . PROJECT_NAME . ' [' .\n" - "(cfgOptionValid(CFGOPT_COMMAND) ? cfgOption(CFGOPT_COMMAND) : cfgCommandName(cfgCommandGet())) . \"]'\")\n" - "or confess &log(ERROR, $self->{hDb}->errstr, ERROR_DB_QUERY);\n" + "(cfgOptionValid(CFGOPT_COMMAND) ? cfgOption(CFGOPT_COMMAND) : cfgCommandName(cfgCommandGet())) . \"]'\");\n" "\n\n" - "$self->{hDb}->do(\"set search_path = 'pg_catalog'\")\n" - "or confess &log(ERROR, $self->{hDb}->errstr, ERROR_DB_QUERY);\n" + "$self->{oDb}->query(\"set search_path = 'pg_catalog'\");\n" "}\n" "}\n" "}\n" @@ -7992,66 +7059,11 @@ "else\n" "{\n" "$self->connect();\n" - "\n\n" - "my $hStatement = $self->{hDb}->prepare($strSql, {pg_async => PG_ASYNC})\n" - "or confess &log(ERROR, $DBI::errstr . \":\\n${strSql}\", ERROR_DB_QUERY);\n" - "\n\n" - "$hStatement->execute()\n" - "or confess &log(ERROR, $DBI::errstr. \":\\n${strSql}\", ERROR_DB_QUERY);\n" - "\n\n" - "my $oWait = waitInit(cfgOption(CFGOPT_DB_TIMEOUT));\n" - "my $bTimeout = true;\n" - "\n" - "do\n" - "{\n" - "\n" - "if ($hStatement->pg_ready())\n" - "{\n" - "\n" - "if (!$bResult)\n" - "{\n" - "return \\@stryResult;\n" - "}\n" + "my $strResult = $self->{oDb}->query($strSql);\n" "\n" - "if (!$hStatement->pg_result())\n" - "{\n" - "\n" - "if ($bIgnoreError)\n" - "{\n" - "return \\@stryResult;\n" - "}\n" - "\n\n" - "confess &log(ERROR, $DBI::errstr . \":\\n${strSql}\", ERROR_DB_QUERY);\n" - "}\n" - "\n\n" - "my @stryRow;\n" - "\n" - "do\n" - "{\n" - "\n" - "@stryRow = $hStatement->fetchrow_array;\n" - "\n\n" - "if (@stryRow)\n" - "{\n" - "push(@{$stryResult[@stryResult]}, @stryRow);\n" - "}\n" - "\n" - "elsif ($hStatement->err)\n" - "{\n" - "confess &log(ERROR, $DBI::errstr . \":\\n${strSql}\", ERROR_DB_QUERY);\n" - "}\n" - "}\n" - "while (@stryRow);\n" - "\n" - "$bTimeout = false;\n" - "}\n" - "} while ($bTimeout && waitMore($oWait));\n" - "\n\n" - "if ($bTimeout)\n" + "if (defined($strResult))\n" "{\n" - "$hStatement->pg_cancel();\n" - "confess &log(ERROR, 'statement timed out after ' . waitInterval($oWait) .\n" - "\" second(s):\\n${strSql}\", ERROR_DB_TIMEOUT);\n" + "@stryResult = @{JSON::PP->new()->allow_nonref()->decode($strResult)};\n" "}\n" "}\n" "\n\n" @@ -8465,29 +7477,6 @@ "return $self->{strDbVersion} >= PG_VERSION_10 ? 'lsn' : 'location';\n" "}\n" "\n\n\n\n\n\n" - "sub walSwitch\n" - "{\n" - "my $self = shift;\n" - "\n\n" - "my $strOperation = logDebugParam(__PACKAGE__ . '->walSwitch');\n" - "\n\n\n" - "if ($self->{strDbVersion} >= PG_VERSION_91)\n" - "{\n" - "$self->executeSql(\"select pg_create_restore_point('\" . PROJECT_NAME . \" Archive Check');\");\n" - "}\n" - "\n" - "my $strWalFileName = $self->executeSqlOne(\n" - "'select pg_' . $self->walId() . 'file_name from pg_' . $self->walId() . 'file_name(pg_switch_' . $self->walId() . '());');\n" - "\n" - "&log(INFO, \"switch WAL ${strWalFileName}\");\n" - "\n\n" - "return logDebugReturn\n" - "(\n" - "$strOperation,\n" - "{name => 'strWalFileName', value => $strWalFileName}\n" - ");\n" - "}\n" - "\n\n\n\n\n\n" "sub isStandby\n" "{\n" "my $self = shift;\n" @@ -8585,7 +7574,7 @@ "\n" "if ($self->{strDbVersion} >= PG_VERSION_96)\n" "{\n" - "$strCheckpointLSN = $self->executeSqlOne('select checkpoint_' . $self->lsnId() .' from pg_control_checkpoint()');\n" + "$strCheckpointLSN = $self->executeSqlOne('select checkpoint_' . $self->lsnId() .'::text from pg_control_checkpoint()');\n" "\n" "if (lsnNormalize($strCheckpointLSN) le lsnNormalize($strTargetLSN))\n" "{\n" @@ -8859,8 +7848,7 @@ "\n" "foreach my $strConstant (keys(%{$rhConstant}))\n" "{\n" - "eval\n" - "\"use constant ${strConstant} => '\" . $rhConstant->{$strConstant} . \"'\";\n" + "eval \"use constant ${strConstant} => '\" . $rhConstant->{$strConstant} . \"'\";\n" "}\n" "\n\n" "our %EXPORT_TAGS = %{pgBackRest::LibCAuto::libcAutoExportTag()};\n" @@ -8889,8 +7877,7 @@ "\n" "if ($strPrefix eq 'CFGCMD' || $strPrefix eq 'CFGOPT')\n" "{\n" - "eval\n" - "\"use constant ${strConstant} => ${iConstantIdx}\";\n" + "eval \"use constant ${strConstant} => ${iConstantIdx}\";\n" "}\n" "\n" "$strPrefixLast = $strPrefix;\n" @@ -8979,8 +7966,6 @@ "checksum =>\n" "[\n" "'pageChecksum',\n" - "'pageChecksumBufferTest',\n" - "'pageChecksumTest',\n" "],\n" "\n" "config =>\n" @@ -9178,6 +8163,7 @@ "'CFGOPT_REPO_S3_HOST',\n" "'CFGOPT_REPO_S3_KEY',\n" "'CFGOPT_REPO_S3_KEY_SECRET',\n" + "'CFGOPT_REPO_S3_PORT',\n" "'CFGOPT_REPO_S3_REGION',\n" "'CFGOPT_REPO_S3_TOKEN',\n" "'CFGOPT_REPO_S3_VERIFY_TLS',\n" @@ -9257,7 +8243,7 @@ "\n" "storage =>\n" "[\n" - "'storagePosixPathRemove',\n" + "'storageRepoFree',\n" "],\n" "\n" "test =>\n" @@ -9363,25 +8349,6 @@ "cfgOption(CFGOPT_LOCK_PATH), cfgOption(CFGOPT_COMMAND), cfgOption(CFGOPT_STANZA, false), cfgOption(CFGOPT_PROCESS));\n" "}\n" "\n\n\n" - "elsif (cfgCommandTest(CFGCMD_LOCAL))\n" - "{\n" - "\n" - "cfgOptionSet(CFGOPT_LOG_LEVEL_STDERR, PROTOCOL, true);\n" - "logLevelSet(cfgOption(CFGOPT_LOG_LEVEL_FILE), OFF, cfgOption(CFGOPT_LOG_LEVEL_STDERR));\n" - "\n" - "logFileSet(\n" - "storageLocal(),\n" - "cfgOption(CFGOPT_LOG_PATH) . '/' . cfgOption(CFGOPT_STANZA) . '-' . lc(cfgOption(CFGOPT_COMMAND)) . '-' .\n" - "lc(cfgCommandName(cfgCommandGet())) . '-' . sprintf(\"%03d\", cfgOption(CFGOPT_PROCESS)));\n" - "\n\n" - "require pgBackRest::Protocol::Local::Minion;\n" - "pgBackRest::Protocol::Local::Minion->import();\n" - "\n\n" - "my $oLocal = new pgBackRest::Protocol::Local::Minion();\n" - "\n\n" - "$oLocal->process();\n" - "}\n" - "\n\n\n" "elsif (cfgCommandTest(CFGCMD_CHECK))\n" "{\n" "\n" @@ -9847,8 +8814,8 @@ "{\n" "confess &log(ASSERT, 'strDbVersion and iDbCatalogVersion must be provided with bLoad = false');\n" "}\n" - "\n" - "$self->set(MANIFEST_SECTION_BACKUP_DB, MANIFEST_KEY_DB_VERSION, undef, $strDbVersion);\n" + "\n\n" + "$self->set(MANIFEST_SECTION_BACKUP_DB, MANIFEST_KEY_DB_VERSION, undef, $strDbVersion . '');\n" "$self->numericSet(MANIFEST_SECTION_BACKUP_DB, MANIFEST_KEY_CATALOG, undef, $iDbCatalogVersion);\n" "}\n" "\n\n" @@ -11943,65 +10910,6 @@ "1;\n" }, { - .name = "pgBackRest/Protocol/Local/Minion.pm", - .data = - "\n\n\n" - "package pgBackRest::Protocol::Local::Minion;\n" - "use parent 'pgBackRest::Protocol::Command::Minion';\n" - "\n" - "use strict;\n" - "use warnings FATAL => qw(all);\n" - "use Carp qw(confess);\n" - "\n" - "use pgBackRest::Backup::File;\n" - "use pgBackRest::Common::Log;\n" - "use pgBackRest::Config::Config;\n" - "use pgBackRest::Storage::Local;\n" - "use pgBackRest::Protocol::Base::Master;\n" - "use pgBackRest::Protocol::Base::Minion;\n" - "use pgBackRest::Protocol::Command::Minion;\n" - "use pgBackRest::Protocol::Helper;\n" - "use pgBackRest::RestoreFile;\n" - "\n\n\n\n" - "sub new\n" - "{\n" - "my $class = shift;\n" - "\n\n" - "my ($strOperation) = logDebugParam(__PACKAGE__ . '->new');\n" - "\n\n" - "my $self = $class->SUPER::new(cfgCommandName(CFGCMD_LOCAL), cfgOption(CFGOPT_BUFFER_SIZE), cfgOption(CFGOPT_PROTOCOL_TIMEOUT));\n" - "bless $self, $class;\n" - "\n\n" - "return logDebugReturn\n" - "(\n" - "$strOperation,\n" - "{name => 'self', value => $self}\n" - ");\n" - "}\n" - "\n\n\n\n" - "sub init\n" - "{\n" - "my $self = shift;\n" - "\n\n" - "my ($strOperation) = logDebugParam(__PACKAGE__ . '->init');\n" - "\n\n" - "my $hCommandMap =\n" - "{\n" - "&OP_BACKUP_FILE => sub {backupFile(@{shift()})},\n" - "\n\n" - "&OP_POST => sub {protocolKeepAlive()},\n" - "};\n" - "\n\n" - "return logDebugReturn\n" - "(\n" - "$strOperation,\n" - "{name => 'hCommandMap', value => $hCommandMap}\n" - ");\n" - "}\n" - "\n" - "1;\n" - }, - { .name = "pgBackRest/Protocol/Local/Process.pm", .data = "\n\n\n\n\n" @@ -12705,11 +11613,12 @@ "{\n" "my $oSourceFileIo = $oStorage->openRead(@{shift()});\n" "\n\n" - "if (defined($oSourceFileIo))\n" + "if (defined($oSourceFileIo) && (!defined($oSourceFileIo->{oStorageCRead}) || $oSourceFileIo->open()))\n" "{\n" "$self->outputWrite(true);\n" "\n" - "$oStorage->copy($oSourceFileIo, new pgBackRest::Protocol::Storage::File($self, $oSourceFileIo));\n" + "$oStorage->copy(\n" + "$oSourceFileIo, new pgBackRest::Protocol::Storage::File($self, $oSourceFileIo), {bSourceOpen => true});\n" "\n" "return true;\n" "}\n" @@ -12903,11 +11812,10 @@ "\n" "use pgBackRest::Common::Log;\n" "use pgBackRest::Config::Config;\n" + "use pgBackRest::LibC qw(:storage);\n" "use pgBackRest::Protocol::Helper;\n" "use pgBackRest::Protocol::Storage::Remote;\n" - "use pgBackRest::Storage::Base;\n" "use pgBackRest::Storage::Helper;\n" - "use pgBackRest::Storage::Local;\n" "\n\n\n\n" "use constant STORAGE_DB => '';\n" "push @EXPORT, qw(STORAGE_DB);\n" @@ -12940,9 +11848,8 @@ "{\n" "if (isDbLocal({iRemoteIdx => $iRemoteIdx}))\n" "{\n" - "$hStorage->{&STORAGE_DB}{$iRemoteIdx} = new pgBackRest::Storage::Local(\n" - "cfgOption(cfgOptionIdFromIndex(CFGOPT_PG_PATH, $iRemoteIdx)), new pgBackRest::Storage::Posix::Driver(),\n" - "{strTempExtension => STORAGE_TEMP_EXT, lBufferMax => cfgOption(CFGOPT_BUFFER_SIZE)});\n" + "$hStorage->{&STORAGE_DB}{$iRemoteIdx} = new pgBackRest::Storage::Storage(\n" + "STORAGE_DB, {lBufferMax => cfgOption(CFGOPT_BUFFER_SIZE)});\n" "}\n" "else\n" "{\n" @@ -12960,135 +11867,31 @@ "\n" "push @EXPORT, qw(storageDb);\n" "\n\n\n\n" - "sub storageRepoRule\n" + "sub storageRepo\n" "{\n" - "my $strRule = shift;\n" - "my $strFile = shift;\n" - "my $strStanza = shift;\n" - "\n\n" - "my $strResultFile;\n" + "\n" + "my\n" + "(\n" + "$strOperation,\n" + "$strStanza,\n" + ") =\n" + "logDebugParam\n" + "(\n" + "__PACKAGE__ . '::storageRepo', \\@_,\n" + "{name => 'strStanza', optional => true, trace => true},\n" + ");\n" "\n\n" - "if ($strRule eq STORAGE_REPO_ARCHIVE)\n" + "if (!defined($hStorage->{&STORAGE_REPO}))\n" "{\n" - "$strResultFile = \"archive\" . (defined($strStanza) ? \"/${strStanza}\" : '');\n" - "\n\n" - "if (defined($strFile))\n" + "if (isRepoLocal())\n" "{\n" - "my ($strArchiveId, $strWalFile) = split('/', $strFile);\n" - "\n\n" - "if (defined($strWalFile) && $strWalFile =~ /^[0-F]{24}/)\n" - "{\n" - "$strResultFile .= \"/${strArchiveId}/\" . substr($strWalFile, 0, 16) . \"/${strWalFile}\";\n" - "}\n" - "\n" - "else\n" - "{\n" - "$strResultFile .= \"/${strFile}\";\n" - "}\n" - "}\n" - "}\n" - "\n" - "elsif ($strRule eq STORAGE_REPO_BACKUP)\n" - "{\n" - "$strResultFile = \"backup\" . (defined($strStanza) ? \"/${strStanza}\" : '') . (defined($strFile) ? \"/${strFile}\" : '');\n" - "}\n" - "\n" - "else\n" - "{\n" - "confess &log(ASSERT, \"invalid \" . STORAGE_REPO . \" storage rule ${strRule}\");\n" - "}\n" - "\n" - "return $strResultFile;\n" - "}\n" - "\n\n\n\n" - "sub storageRepo\n" - "{\n" - "\n" - "my\n" - "(\n" - "$strOperation,\n" - "$strStanza,\n" - ") =\n" - "logDebugParam\n" - "(\n" - "__PACKAGE__ . '::storageRepo', \\@_,\n" - "{name => 'strStanza', optional => true, trace => true},\n" - ");\n" - "\n" - "if (!defined($strStanza))\n" - "{\n" - "if (cfgOptionValid(CFGOPT_STANZA) && cfgOptionTest(CFGOPT_STANZA))\n" - "{\n" - "$strStanza = cfgOption(CFGOPT_STANZA);\n" - "}\n" - "else\n" - "{\n" - "$strStanza = STORAGE_REPO;\n" - "}\n" - "}\n" - "\n\n" - "if (!defined($hStorage->{&STORAGE_REPO}{$strStanza}))\n" - "{\n" - "if (isRepoLocal())\n" - "{\n" - "\n" - "my $hRule =\n" - "{\n" - "&STORAGE_REPO_ARCHIVE =>\n" - "{\n" - "fnRule => \\&storageRepoRule,\n" - "xData => $strStanza eq STORAGE_REPO ? undef : $strStanza,\n" - "},\n" - "&STORAGE_REPO_BACKUP =>\n" - "{\n" - "fnRule => \\&storageRepoRule,\n" - "xData => $strStanza eq STORAGE_REPO ? undef : $strStanza,\n" - "},\n" - "};\n" - "\n\n" - "my $oDriver;\n" - "\n" - "if (cfgOptionTest(CFGOPT_REPO_TYPE, CFGOPTVAL_REPO_TYPE_S3))\n" - "{\n" - "require pgBackRest::Storage::S3::Driver;\n" - "\n" - "$oDriver = new pgBackRest::Storage::S3::Driver(\n" - "cfgOption(CFGOPT_REPO_S3_BUCKET), cfgOption(CFGOPT_REPO_S3_ENDPOINT), cfgOption(CFGOPT_REPO_S3_REGION),\n" - "cfgOption(CFGOPT_REPO_S3_KEY), cfgOption(CFGOPT_REPO_S3_KEY_SECRET),\n" - "{strHost => cfgOption(CFGOPT_REPO_S3_HOST, false), bVerifySsl => cfgOption(CFGOPT_REPO_S3_VERIFY_TLS, false),\n" - "strCaPath => cfgOption(CFGOPT_REPO_S3_CA_PATH, false),\n" - "strCaFile => cfgOption(CFGOPT_REPO_S3_CA_FILE, false), lBufferMax => cfgOption(CFGOPT_BUFFER_SIZE),\n" - "strSecurityToken => cfgOption(CFGOPT_REPO_S3_TOKEN, false)});\n" - "}\n" - "elsif (cfgOptionTest(CFGOPT_REPO_TYPE, CFGOPTVAL_REPO_TYPE_CIFS))\n" - "{\n" - "require pgBackRest::Storage::Cifs::Driver;\n" - "\n" - "$oDriver = new pgBackRest::Storage::Cifs::Driver();\n" - "}\n" - "else\n" - "{\n" - "$oDriver = new pgBackRest::Storage::Posix::Driver();\n" - "}\n" - "\n\n" - "my $strCipherType;\n" - "my $strCipherPass;\n" - "\n\n" - "if (cfgOption(CFGOPT_REPO_CIPHER_TYPE) ne CFGOPTVAL_REPO_CIPHER_TYPE_NONE)\n" - "{\n" - "$strCipherType = cfgOption(CFGOPT_REPO_CIPHER_TYPE);\n" - "$strCipherPass = cfgOption(CFGOPT_REPO_CIPHER_PASS);\n" - "}\n" - "\n\n" - "$hStorage->{&STORAGE_REPO}{$strStanza} = new pgBackRest::Storage::Local(\n" - "cfgOption(CFGOPT_REPO_PATH), $oDriver,\n" - "{strTempExtension => STORAGE_TEMP_EXT, hRule => $hRule, lBufferMax => cfgOption(CFGOPT_BUFFER_SIZE),\n" - "strCipherType => $strCipherType, strCipherPassUser => $strCipherPass});\n" + "$hStorage->{&STORAGE_REPO} = new pgBackRest::Storage::Storage(\n" + "STORAGE_REPO, {lBufferMax => cfgOption(CFGOPT_BUFFER_SIZE)});\n" "}\n" "else\n" "{\n" "\n" - "$hStorage->{&STORAGE_REPO}{$strStanza} = new pgBackRest::Protocol::Storage::Remote(\n" + "$hStorage->{&STORAGE_REPO} = new pgBackRest::Protocol::Storage::Remote(\n" "protocolGet(CFGOPTVAL_REMOTE_TYPE_BACKUP));\n" "}\n" "}\n" @@ -13096,7 +11899,7 @@ "return logDebugReturn\n" "(\n" "$strOperation,\n" - "{name => 'oStorageRepo', value => $hStorage->{&STORAGE_REPO}{$strStanza}, trace => true},\n" + "{name => 'oStorageRepo', value => $hStorage->{&STORAGE_REPO}, trace => true},\n" ");\n" "}\n" "\n" @@ -13108,6 +11911,8 @@ "my ($strOperation) = logDebugParam(__PACKAGE__ . '::storageRepoCacheClear');\n" "\n" "delete($hStorage->{&STORAGE_REPO});\n" + "\n" + "storageRepoFree();\n" "\n\n" "return logDebugReturn($strOperation);\n" "}\n" @@ -13134,7 +11939,6 @@ "use pgBackRest::Protocol::Helper;\n" "use pgBackRest::Protocol::Storage::File;\n" "use pgBackRest::Storage::Base;\n" - "use pgBackRest::Storage::Filter::Gzip;\n" "\n\n\n\n" "sub new\n" "{\n" @@ -13282,27 +12086,11 @@ "{name => 'strFileExp'},\n" "{name => 'rhParam', required => false},\n" ");\n" - "\n\n" - "my $bProtocolCompress = protocolCompress($rhParam);\n" - "\n\n" - "if ($bProtocolCompress)\n" - "{\n" - "push(\n" - "@{$rhParam->{rhyFilter}},\n" - "{strClass => STORAGE_FILTER_GZIP,\n" - "rxyParam => [{iLevel => cfgOption(CFGOPT_COMPRESS_LEVEL_NETWORK), bWantGzip => false}]});\n" - "}\n" "\n" "my $oSourceFileIo =\n" "$self->{oProtocol}->cmdExecute(OP_STORAGE_OPEN_READ, [$strFileExp, $rhParam]) ?\n" "new pgBackRest::Protocol::Storage::File($self->{oProtocol}) : undef;\n" "\n\n" - "if ($bProtocolCompress)\n" - "{\n" - "$oSourceFileIo = new pgBackRest::Storage::Filter::Gzip(\n" - "$oSourceFileIo, {strCompressType => STORAGE_DECOMPRESS, bWantGzip => false});\n" - "}\n" - "\n\n" "return logDebugReturn\n" "(\n" "$strOperation,\n" @@ -13327,24 +12115,9 @@ "{name => 'rhParam', required => false},\n" ");\n" "\n\n" - "my $bProtocolCompress = protocolCompress($rhParam);\n" - "\n\n" - "if ($bProtocolCompress)\n" - "{\n" - "push(\n" - "@{$rhParam->{rhyFilter}},\n" - "{strClass => STORAGE_FILTER_GZIP, rxyParam => [{strCompressType => STORAGE_DECOMPRESS, bWantGzip => false}]});\n" - "}\n" - "\n\n" "$self->{oProtocol}->cmdWrite(OP_STORAGE_OPEN_WRITE, [$strFileExp, $rhParam]);\n" "my $oDestinationFileIo = new pgBackRest::Protocol::Storage::File($self->{oProtocol});\n" "\n\n" - "if ($bProtocolCompress)\n" - "{\n" - "$oDestinationFileIo = new pgBackRest::Storage::Filter::Gzip(\n" - "$oDestinationFileIo, {iLevel => cfgOption(CFGOPT_COMPRESS_LEVEL_NETWORK), bWantGzip => false});\n" - "}\n" - "\n\n" "return logDebugReturn\n" "(\n" "$strOperation,\n" @@ -13427,21 +12200,6 @@ ");\n" "}\n" "\n\n\n\n" - "sub protocolCompress\n" - "{\n" - "my $rhParam = shift;\n" - "\n" - "my $bProtocolCompress = false;\n" - "\n" - "if (defined($rhParam->{bProtocolCompress}))\n" - "{\n" - "$bProtocolCompress = $rhParam->{bProtocolCompress} && cfgOption(CFGOPT_COMPRESS_LEVEL_NETWORK) > 0 ? true : false;\n" - "delete($rhParam->{bProtocolCompress});\n" - "}\n" - "\n" - "return $bProtocolCompress;\n" - "}\n" - "\n\n\n\n" "sub protocol {shift->{oProtocol}};\n" "\n" "1;\n" @@ -13643,8 +12401,7 @@ "}\n" "\n\n" "storageDb()->copy(\n" - "storageRepo()->openRead(STORAGE_REPO_BACKUP . \"/$self->{strBackupSet}/\" . FILE_MANIFEST, {bProtocolCompress => true,\n" - "strCipherPass => $strCipherPass}),\n" + "storageRepo()->openRead(STORAGE_REPO_BACKUP . \"/$self->{strBackupSet}/\" . FILE_MANIFEST, {strCipherPass => $strCipherPass}),\n" "$self->{strDbClusterPath} . '/' . FILE_MANIFEST);\n" "\n\n" "my $oManifest = new pgBackRest::Manifest(\n" @@ -14400,7 +13157,7 @@ "}\n" "\n\n" "$oStorageDb->copy(\n" - "storageRepo()->openRead(STORAGE_REPO_BACKUP . qw(/) . FILE_BACKUP_INFO, {bProtocolCompress => true}),\n" + "storageRepo()->openRead(STORAGE_REPO_BACKUP . qw(/) . FILE_BACKUP_INFO),\n" "$self->{strDbClusterPath} . '/' . FILE_BACKUP_INFO);\n" "\n" "my $oBackupInfo = new pgBackRest::Backup::Info($self->{strDbClusterPath}, false, undef, {oStorage => storageDb()});\n" @@ -14659,9 +13416,6 @@ "use pgBackRest::Config::Config;\n" "use pgBackRest::Manifest;\n" "use pgBackRest::Protocol::Storage::Helper;\n" - "use pgBackRest::Storage::Base;\n" - "use pgBackRest::Storage::Filter::Gzip;\n" - "use pgBackRest::Storage::Filter::Sha;\n" "use pgBackRest::Storage::Helper;\n" "\n\n\n\n\n\n" "sub restoreLog\n" @@ -14966,7 +13720,7 @@ "my ($strOperation) = logDebugParam(__PACKAGE__ . '->stanzaDelete');\n" "\n" "my $strStanza = cfgOption(CFGOPT_STANZA);\n" - "my $oStorageRepo = storageRepo({strStanza => $strStanza});\n" + "my $oStorageRepo = storageRepo();\n" "\n\n" "if ($oStorageRepo->pathExists(STORAGE_REPO_ARCHIVE) || $oStorageRepo->pathExists(STORAGE_REPO_BACKUP))\n" "{\n" @@ -15006,8 +13760,8 @@ "$oStorageRepo->remove(STORAGE_REPO_BACKUP . \"/${strBackup}/\" . FILE_MANIFEST_COPY, {bIgnoreMissing => true});\n" "}\n" "\n\n" - "$oStorageRepo->remove(STORAGE_REPO_ARCHIVE, {bRecurse => true, bIgnoreMissing => true});\n" - "$oStorageRepo->remove(STORAGE_REPO_BACKUP, {bRecurse => true, bIgnoreMissing => true});\n" + "$oStorageRepo->pathRemove(STORAGE_REPO_ARCHIVE, {bRecurse => true, bIgnoreMissing => true});\n" + "$oStorageRepo->pathRemove(STORAGE_REPO_BACKUP, {bRecurse => true, bIgnoreMissing => true});\n" "\n\n" "lockStart();\n" "}\n" @@ -15400,6 +14154,14 @@ "use pgBackRest::Common::Io::Base;\n" "use pgBackRest::Common::Log;\n" "\n\n\n\n" + "use constant STORAGE_LOCAL => '';\n" + "push @EXPORT, qw(STORAGE_LOCAL);\n" + "\n" + "use constant STORAGE_S3 => 's3';\n" + "push @EXPORT, qw(STORAGE_S3);\n" + "use constant STORAGE_POSIX => 'posix';\n" + "push @EXPORT, qw(STORAGE_POSIX);\n" + "\n\n\n\n" "use constant STORAGE_COMPRESS => 'compress';\n" "push @EXPORT, qw(STORAGE_COMPRESS);\n" "use constant STORAGE_DECOMPRESS => 'decompress';\n" @@ -15411,6 +14173,13 @@ "push @EXPORT, qw(STORAGE_DECRYPT);\n" "use constant CIPHER_MAGIC => 'Salted__';\n" "push @EXPORT, qw(CIPHER_MAGIC);\n" + "\n\n\n\n" + "use constant STORAGE_FILTER_CIPHER_BLOCK => 'pgBackRest::Storage::Filter::CipherBlock';\n" + "push @EXPORT, qw(STORAGE_FILTER_CIPHER_BLOCK);\n" + "use constant STORAGE_FILTER_GZIP => 'pgBackRest::Storage::Filter::Gzip';\n" + "push @EXPORT, qw(STORAGE_FILTER_GZIP);\n" + "use constant STORAGE_FILTER_SHA => 'pgBackRest::Storage::Filter::Sha';\n" + "push @EXPORT, qw(STORAGE_FILTER_SHA);\n" "\n\n\n\n\n\n\n" "use constant STORAGE_CAPABILITY_SIZE_DIFF => 'size-diff';\n" "push @EXPORT, qw(STORAGE_CAPABILITY_SIZE_DIFF);\n" @@ -15443,7 +14212,7 @@ "{name => 'self', value => $self}\n" ");\n" "}\n" - "\n\n\n\n\n" + "\n\n\n\n\n\n" "sub copy\n" "{\n" "my $self = shift;\n" @@ -15453,41 +14222,56 @@ "$strOperation,\n" "$xSourceFile,\n" "$xDestinationFile,\n" + "$bSourceOpen,\n" ") =\n" "logDebugParam\n" "(\n" "__PACKAGE__ . '->copy', \\@_,\n" "{name => 'xSourceFile', required => false},\n" - "{name => 'xDestinationFile', required => false},\n" + "{name => 'xDestinationFile'},\n" + "{name => 'bSourceOpen', optional => true, default => false},\n" ");\n" "\n\n" - "my $bResult = false;\n" + "my $oSourceFileIo = defined($xSourceFile) ? (ref($xSourceFile) ? $xSourceFile : $self->openRead($xSourceFile)) : undef;\n" "\n\n" - "my $oSourceFileIo =\n" - "defined($xSourceFile) ?\n" - "(ref($xSourceFile) ? $xSourceFile : $self->openRead($self->pathGet($xSourceFile))) : undef;\n" + "my $bResult = false;\n" "\n\n" "if (defined($oSourceFileIo))\n" "{\n" - "\n" - "my $oDestinationFileIo = ref($xDestinationFile) ? $xDestinationFile : $self->openWrite($self->pathGet($xDestinationFile));\n" + "my $oDestinationFileIo = ref($xDestinationFile) ? $xDestinationFile : $self->openWrite($xDestinationFile);\n" "\n\n" - "my $lSizeRead;\n" + "if (defined($oSourceFileIo->{oStorageCRead}) && defined($oDestinationFileIo->{oStorageCWrite}))\n" + "{\n" + "$bResult = $self->{oStorageC}->copy(\n" + "$oSourceFileIo->{oStorageCRead}, $oDestinationFileIo->{oStorageCWrite}) ? true : false;\n" + "}\n" + "else\n" + "{\n" + "\n" + "$bResult = defined($oSourceFileIo->{oStorageCRead}) ? ($bSourceOpen || $oSourceFileIo->open()) : true;\n" + "\n" + "if ($bResult)\n" + "{\n" "\n" + "if (defined($oDestinationFileIo->{oStorageCWrite}))\n" + "{\n" + "$oDestinationFileIo->open();\n" + "}\n" + "\n\n" "do\n" "{\n" "\n" "my $tBuffer = '';\n" "\n" - "$lSizeRead = $oSourceFileIo->read(\\$tBuffer, $self->{lBufferMax});\n" + "$oSourceFileIo->read(\\$tBuffer, $self->{lBufferMax});\n" "$oDestinationFileIo->write(\\$tBuffer);\n" "}\n" - "while ($lSizeRead != 0);\n" + "while (!$oSourceFileIo->eof());\n" "\n\n" "$oSourceFileIo->close();\n" "$oDestinationFileIo->close();\n" - "\n\n" - "$bResult = true;\n" + "}\n" + "}\n" "}\n" "\n" "return logDebugReturn\n" @@ -15650,109 +14434,105 @@ "1;\n" }, { - .name = "pgBackRest/Storage/Cifs/Driver.pm", + .name = "pgBackRest/Storage/Helper.pm", .data = - "\n\n\n\n\n" - "package pgBackRest::Storage::Cifs::Driver;\n" - "use parent 'pgBackRest::Storage::Posix::Driver';\n" + "\n\n\n" + "package pgBackRest::Storage::Helper;\n" "\n" "use strict;\n" "use warnings FATAL => qw(all);\n" "use Carp qw(confess);\n" - "use English '-no_match_vars';\n" "\n" "use Exporter qw(import);\n" "our @EXPORT = qw();\n" + "use File::Basename qw(basename);\n" "\n" "use pgBackRest::Common::Log;\n" + "use pgBackRest::Config::Config;\n" "use pgBackRest::Storage::Base;\n" + "use pgBackRest::Storage::Storage;\n" + "use pgBackRest::Version;\n" "\n\n\n\n" - "use constant STORAGE_CIFS_DRIVER => __PACKAGE__;\n" - "push @EXPORT, qw(STORAGE_CIFS_DRIVER);\n" + "use constant COMPRESS_EXT => 'gz';\n" + "push @EXPORT, qw(COMPRESS_EXT);\n" "\n\n\n\n" - "sub pathSync\n" + "use constant STORAGE_TEMP_EXT => PROJECT_EXE . '.tmp';\n" + "push @EXPORT, qw(STORAGE_TEMP_EXT);\n" + "\n\n\n\n\n\n\n" + "sub storageLocal\n" "{\n" - "my $self = shift;\n" + "\n" + "my ($strOperation) = logDebugParam(__PACKAGE__ . '::storageLocal');\n" "\n\n" - "my\n" + "return logDebugReturn\n" "(\n" "$strOperation,\n" - "$strPath,\n" - ") =\n" - "logDebugParam\n" - "(\n" - "__PACKAGE__ . '->pathSync', \\@_,\n" - "{name => 'strPath', trace => true},\n" + "{name => 'oStorageLocal', value => new pgBackRest::Storage::Storage(STORAGE_LOCAL), trace => true},\n" ");\n" - "\n\n" - "return logDebugReturn($strOperation);\n" "}\n" - "\n\n\n\n" - "sub capability {shift eq STORAGE_CAPABILITY_SIZE_DIFF ? true : false}\n" - "sub className {STORAGE_CIFS_DRIVER}\n" + "\n" + "push @EXPORT, qw(storageLocal);\n" "\n" "1;\n" }, { - .name = "pgBackRest/Storage/Filter/CipherBlock.pm", + .name = "pgBackRest/Storage/Storage.pm", .data = "\n\n\n" - "package pgBackRest::Storage::Filter::CipherBlock;\n" - "use parent 'pgBackRest::Common::Io::Filter';\n" + "package pgBackRest::Storage::Storage;\n" + "use parent 'pgBackRest::Storage::Base';\n" "\n" "use strict;\n" "use warnings FATAL => qw(all);\n" "use Carp qw(confess);\n" "use English '-no_match_vars';\n" "\n" - "use Exporter qw(import);\n" - "our @EXPORT = qw();\n" + "use File::Basename qw(dirname);\n" + "use Fcntl qw(:mode);\n" + "use File::stat qw{lstat};\n" + "use JSON::PP;\n" "\n" "use pgBackRest::Common::Exception;\n" - "use pgBackRest::Common::Io::Base;\n" + "use pgBackRest::Common::Io::Handle;\n" "use pgBackRest::Common::Log;\n" - "use pgBackRest::LibC qw(:crypto);\n" "use pgBackRest::Storage::Base;\n" - "\n\n\n\n" - "use constant STORAGE_FILTER_CIPHER_BLOCK => __PACKAGE__;\n" - "push @EXPORT, qw(STORAGE_FILTER_CIPHER_BLOCK);\n" + "use pgBackRest::Storage::StorageRead;\n" + "use pgBackRest::Storage::StorageWrite;\n" "\n\n\n\n" "sub new\n" "{\n" "my $class = shift;\n" "\n\n" - "my\n" + "my $self = {};\n" + "bless $self, $class;\n" + "\n\n" "(\n" - "$strOperation,\n" - "$oParent,\n" - "$strCipherType,\n" - "$tCipherPass,\n" - "$strMode,\n" + "my $strOperation,\n" + "$self->{strType},\n" + "$self->{strPath},\n" + "$self->{lBufferMax},\n" + "$self->{strDefaultPathMode},\n" + "$self->{strDefaultFileMode},\n" ") =\n" "logDebugParam\n" "(\n" "__PACKAGE__ . '->new', \\@_,\n" - "{name => 'oParent', trace => true},\n" - "{name => 'strCipherType', trace => true},\n" - "{name => 'tCipherPass', trace => true},\n" - "{name => 'strMode', optional => true, default => STORAGE_ENCRYPT, trace => true},\n" + "{name => 'strType'},\n" + "{name => 'strPath', optional => true},\n" + "{name => 'lBufferMax', optional => true, default => 65536},\n" + "{name => 'strDefaultPathMode', optional => true, default => '0750'},\n" + "{name => 'strDefaultFileMode', optional => true, default => '0640'},\n" ");\n" "\n\n" - "my $self = $class->SUPER::new($oParent);\n" - "bless $self, $class;\n" + "$self->{oStorageC} = pgBackRest::LibC::Storage->new($self->{strType}, $self->{strPath});\n" "\n\n" - "$self->{strMode} = $strMode;\n" - "\n" - "if (!($self->{strMode} eq STORAGE_ENCRYPT || $self->{strMode} eq STORAGE_DECRYPT))\n" + "if ($self->{strType} eq '')\n" "{\n" - "confess &log(ASSERT, \"unknown cipher mode: $self->{strMode}\");\n" + "$self->{strCipherType} = $self->{oStorageC}->cipherType();\n" + "$self->{strCipherPass} = $self->{oStorageC}->cipherPass();\n" "}\n" "\n\n" - "$self->{bWrite} = false;\n" - "\n\n" - "$self->{oCipher} = new pgBackRest::LibC::Cipher::Block(\n" - "$self->{strMode} eq STORAGE_ENCRYPT ? CIPHER_MODE_ENCRYPT : CIPHER_MODE_DECRYPT, $strCipherType, $tCipherPass,\n" - "length($tCipherPass));\n" + "$self->{oJSON} = JSON::PP->new()->allow_nonref();\n" "\n\n" "return logDebugReturn\n" "(\n" @@ -15761,2837 +14541,553 @@ ");\n" "}\n" "\n\n\n\n" - "sub read\n" + "sub exists\n" "{\n" "my $self = shift;\n" - "my $rtBuffer = shift;\n" - "my $iSize = shift;\n" - "\n\n" - "return 0 if $self->eof();\n" - "\n\n" - "my $tBufferRead = '';\n" - "my $iBufferReadSize = 0;\n" - "\n" - "do\n" - "{\n" - "\n" - "my $tCipherBuffer;\n" - "my $iActualSize = $self->SUPER::read(\\$tCipherBuffer, $iSize);\n" - "\n\n" - "if ($iActualSize > 0)\n" - "{\n" - "$tBufferRead .= $self->{oCipher}->process($tCipherBuffer);\n" - "}\n" - "\n\n" - "if ($self->eof())\n" - "{\n" - "$tBufferRead .= $self->{oCipher}->flush();\n" - "}\n" "\n\n" - "$iBufferReadSize = length($tBufferRead);\n" - "}\n" - "while ($iBufferReadSize < $iSize && !$self->eof());\n" + "my\n" + "(\n" + "$strOperation,\n" + "$strFileExp,\n" + ") =\n" + "logDebugParam\n" + "(\n" + "__PACKAGE__ . '->exists', \\@_,\n" + "{name => 'strFileExp'},\n" + ");\n" "\n\n" - "$$rtBuffer .= $tBufferRead;\n" + "my $bExists = $self->{oStorageC}->exists($strFileExp);\n" "\n\n" - "return $iBufferReadSize;\n" + "return logDebugReturn\n" + "(\n" + "$strOperation,\n" + "{name => 'bExists', value => $bExists ? true : false}\n" + ");\n" "}\n" "\n\n\n\n" - "sub write\n" + "sub get\n" "{\n" "my $self = shift;\n" - "my $rtBuffer = shift;\n" "\n\n" - "$self->{bWrite} = true;\n" + "my\n" + "(\n" + "$strOperation,\n" + "$xFile,\n" + "$strCipherPass,\n" + ") =\n" + "logDebugParam\n" + "(\n" + "__PACKAGE__ . '->get', \\@_,\n" + "{name => 'xFile', required => false, trace => true},\n" + "{name => 'strCipherPass', optional => true, default => $self->cipherPassUser(), redact => true},\n" + ");\n" + "\n\n\n" + "my $oFileIo = defined($xFile) ? (ref($xFile) ? $xFile : $self->openRead($xFile, {strCipherPass => $strCipherPass})) : undef;\n" "\n\n" - "my $tCipherBuffer;\n" + "my $bEmpty = false;\n" + "my $tContent = $self->{oStorageC}->get($oFileIo->{oStorageCRead});\n" "\n" - "if (defined($$rtBuffer))\n" + "if (defined($tContent) && length($tContent) == 0)\n" "{\n" - "$tCipherBuffer = $self->{oCipher}->process($$rtBuffer);\n" + "$tContent = undef;\n" + "$bEmpty = true;\n" "}\n" "\n\n" - "$self->SUPER::write(\\$tCipherBuffer);\n" - "\n" - "return length($$rtBuffer);\n" + "return logDebugReturn\n" + "(\n" + "$strOperation,\n" + "{name => 'rtContent', value => defined($tContent) || $bEmpty ? \\$tContent : undef, trace => true},\n" + ");\n" "}\n" - "\n\n\n\n" - "sub close\n" + "\n\n\n\n\n" + "sub hashSize\n" "{\n" "my $self = shift;\n" "\n\n" - "if ($self->{oCipher})\n" - "{\n" - "\n" - "if ($self->{bWrite})\n" - "{\n" - "my $tCipherBuffer = $self->{oCipher}->flush();\n" - "$self->SUPER::write(\\$tCipherBuffer);\n" - "}\n" - "\n" - "undef($self->{oCipher});\n" - "\n\n" - "return $self->SUPER::close();\n" - "}\n" - "\n" - "return false;\n" - "}\n" - "\n" - "1;\n" - }, - { - .name = "pgBackRest/Storage/Filter/Gzip.pm", - .data = - "\n\n\n" - "package pgBackRest::Storage::Filter::Gzip;\n" - "use parent 'pgBackRest::Common::Io::Filter';\n" - "\n" - "use strict;\n" - "use warnings FATAL => qw(all);\n" - "use Carp qw(confess);\n" - "use English '-no_match_vars';\n" - "\n" - "use Compress::Raw::Zlib qw(WANT_GZIP MAX_WBITS Z_OK Z_BUF_ERROR Z_DATA_ERROR Z_STREAM_END);\n" - "use Exporter qw(import);\n" - "our @EXPORT = qw();\n" - "\n" - "use pgBackRest::Common::Exception;\n" - "use pgBackRest::Common::Io::Base;\n" - "use pgBackRest::Common::Log;\n" - "use pgBackRest::Storage::Base;\n" - "\n\n\n\n" - "use constant STORAGE_FILTER_GZIP => __PACKAGE__;\n" - "push @EXPORT, qw(STORAGE_FILTER_GZIP);\n" - "\n\n\n\n" - "sub new\n" - "{\n" - "my $class = shift;\n" - "\n\n" - "my\n" - "(\n" - "$strOperation,\n" - "$oParent,\n" - "$bWantGzip,\n" - "$strCompressType,\n" - "$iLevel,\n" - "$lCompressBufferMax,\n" - ") =\n" - "logDebugParam\n" - "(\n" - "__PACKAGE__ . '->new', \\@_,\n" - "{name => 'oParent', trace => true},\n" - "{name => 'bWantGzip', optional => true, default => true, trace => true},\n" - "{name => 'strCompressType', optional => true, default => STORAGE_COMPRESS, trace => true},\n" - "{name => 'iLevel', optional => true, default => 6, trace => true},\n" - "{name => 'lCompressBufferMax', optional => true, default => COMMON_IO_BUFFER_MAX, trace => true},\n" - ");\n" - "\n\n" - "my $self = $class->SUPER::new($oParent);\n" - "bless $self, $class;\n" - "\n\n" - "$self->{bWantGzip} = $bWantGzip;\n" - "$self->{iLevel} = $iLevel;\n" - "$self->{lCompressBufferMax} = $lCompressBufferMax;\n" - "$self->{strCompressType} = $strCompressType;\n" - "\n\n" - "$self->{bWrite} = false;\n" - "\n\n" - "my $iZLibStatus;\n" - "\n" - "if ($self->{strCompressType} eq STORAGE_COMPRESS)\n" - "{\n" - "($self->{oZLib}, $iZLibStatus) = new Compress::Raw::Zlib::Deflate(\n" - "WindowBits => $self->{bWantGzip} ? WANT_GZIP : MAX_WBITS, Level => $self->{iLevel},\n" - "Bufsize => $self->{lCompressBufferMax}, AppendOutput => 1);\n" - "\n" - "$self->{tCompressedBuffer} = undef;\n" - "}\n" - "else\n" - "{\n" - "($self->{oZLib}, $iZLibStatus) = new Compress::Raw::Zlib::Inflate(\n" - "WindowBits => $self->{bWantGzip} ? WANT_GZIP : MAX_WBITS, Bufsize => $self->{lCompressBufferMax},\n" - "LimitOutput => 1, AppendOutput => 1);\n" - "\n" - "$self->{tUncompressedBuffer} = undef;\n" - "$self->{lUncompressedBufferSize} = 0;\n" - "}\n" - "\n" - "$self->errorCheck($iZLibStatus);\n" - "\n\n" - "return logDebugReturn\n" - "(\n" - "$strOperation,\n" - "{name => 'self', value => $self}\n" - ");\n" - "}\n" - "\n\n\n\n" - "sub errorCheck\n" - "{\n" - "my $self = shift;\n" - "my $iZLibStatus = shift;\n" - "\n" - "if (!($iZLibStatus == Z_OK || $iZLibStatus == Z_BUF_ERROR))\n" - "{\n" - "logErrorResult(\n" - "$self->{bWrite} ? ERROR_FILE_WRITE : ERROR_FILE_READ,\n" - "'unable to ' . ($self->{strCompressType} eq STORAGE_COMPRESS ? 'deflate' : 'inflate') . \" '\" .\n" - "$self->parent()->name() . \"'\",\n" - "$self->{oZLib}->msg());\n" - "}\n" - "\n" - "return Z_OK;\n" - "}\n" - "\n\n\n\n" - "sub read\n" - "{\n" - "my $self = shift;\n" - "my $rtBuffer = shift;\n" - "my $iSize = shift;\n" - "\n" - "if ($self->{strCompressType} eq STORAGE_COMPRESS)\n" - "{\n" - "return 0 if $self->eof();\n" - "\n" - "my $lSizeBegin = defined($$rtBuffer) ? length($$rtBuffer) : 0;\n" - "my $lUncompressedSize;\n" - "my $lCompressedSize;\n" - "\n" - "do\n" - "{\n" - "my $tUncompressedBuffer;\n" - "$lUncompressedSize = $self->parent()->read(\\$tUncompressedBuffer, $iSize);\n" - "\n" - "if ($lUncompressedSize > 0)\n" - "{\n" - "$self->errorCheck($self->{oZLib}->deflate($tUncompressedBuffer, $$rtBuffer));\n" - "}\n" - "else\n" - "{\n" - "$self->errorCheck($self->{oZLib}->flush($$rtBuffer));\n" - "}\n" - "\n" - "$lCompressedSize = length($$rtBuffer) - $lSizeBegin;\n" - "}\n" - "while ($lUncompressedSize > 0 && $lCompressedSize < $iSize);\n" - "\n\n" - "return $lCompressedSize;\n" - "}\n" - "else\n" - "{\n" - "\n" - "while ($self->{lUncompressedBufferSize} < $iSize && !$self->parent()->eof())\n" - "{\n" - "if (!defined($self->{tCompressedBuffer}) || length($self->{tCompressedBuffer}) == 0)\n" - "{\n" - "$self->parent()->read(\\$self->{tCompressedBuffer}, $self->{lCompressBufferMax});\n" - "}\n" - "\n" - "my $iZLibStatus = $self->{oZLib}->inflate($self->{tCompressedBuffer}, $self->{tUncompressedBuffer});\n" - "$self->{lUncompressedBufferSize} = length($self->{tUncompressedBuffer});\n" - "\n" - "last if $iZLibStatus == Z_STREAM_END;\n" - "\n" - "$self->errorCheck($iZLibStatus);\n" - "}\n" - "\n\n\n" - "my $iActualSize = $self->{lUncompressedBufferSize} < $iSize ? $self->{lUncompressedBufferSize} : $iSize;\n" - "\n\n" - "$$rtBuffer .= substr($self->{tUncompressedBuffer}, 0, $iActualSize);\n" - "\n\n" - "$self->{tUncompressedBuffer} = substr($self->{tUncompressedBuffer}, $iActualSize);\n" - "$self->{lUncompressedBufferSize} -= $iActualSize;\n" - "\n\n" - "return $iActualSize;\n" - "}\n" - "}\n" - "\n\n\n\n" - "sub write\n" - "{\n" - "my $self = shift;\n" - "my $rtBuffer = shift;\n" - "\n" - "$self->{bWrite} = true;\n" - "\n" - "if ($self->{strCompressType} eq STORAGE_COMPRESS)\n" - "{\n" - "\n" - "$self->errorCheck($self->{oZLib}->deflate($$rtBuffer, $self->{tCompressedBuffer}));\n" - "\n\n" - "if (length($self->{tCompressedBuffer}) > $self->{lCompressBufferMax})\n" - "{\n" - "$self->parent()->write(\\$self->{tCompressedBuffer});\n" - "$self->{tCompressedBuffer} = undef;\n" - "}\n" - "}\n" - "else\n" - "{\n" - "my $tCompressedBuffer = $$rtBuffer;\n" - "\n" - "while (length($tCompressedBuffer) > 0)\n" - "{\n" - "my $tUncompressedBuffer;\n" - "\n" - "my $iZLibStatus = $self->{oZLib}->inflate($tCompressedBuffer, $tUncompressedBuffer);\n" - "$self->parent()->write(\\$tUncompressedBuffer);\n" - "\n" - "last if $iZLibStatus == Z_STREAM_END;\n" - "\n" - "$self->errorCheck($iZLibStatus);\n" - "}\n" - "}\n" - "\n\n" - "return length($$rtBuffer);\n" - "}\n" - "\n\n\n\n" - "sub close\n" - "{\n" - "my $self = shift;\n" - "\n" - "if (defined($self->{oZLib}))\n" - "{\n" - "\n" - "if ($self->{bWrite})\n" - "{\n" - "if ($self->{strCompressType} eq STORAGE_COMPRESS)\n" - "{\n" - "\n" - "$self->errorCheck($self->{oZLib}->flush($self->{tCompressedBuffer}));\n" - "\n\n" - "$self->parent()->write(\\$self->{tCompressedBuffer});\n" - "}\n" - "}\n" - "\n" - "undef($self->{oZLib});\n" - "\n\n" - "return $self->parent()->close();\n" - "}\n" - "\n" - "return false;\n" - "}\n" - "\n" - "1;\n" - }, - { - .name = "pgBackRest/Storage/Filter/Sha.pm", - .data = - "\n\n\n" - "package pgBackRest::Storage::Filter::Sha;\n" - "use parent 'pgBackRest::Common::Io::Filter';\n" - "\n" - "use strict;\n" - "use warnings FATAL => qw(all);\n" - "use Carp qw(confess);\n" - "use English '-no_match_vars';\n" - "\n" - "use Exporter qw(import);\n" - "our @EXPORT = qw();\n" - "\n" - "use pgBackRest::Common::Exception;\n" - "use pgBackRest::Common::Log;\n" - "\n\n\n\n" - "use constant STORAGE_FILTER_SHA => __PACKAGE__;\n" - "push @EXPORT, qw(STORAGE_FILTER_SHA);\n" - "\n\n\n\n" - "sub new\n" - "{\n" - "my $class = shift;\n" - "\n\n" - "my\n" - "(\n" - "$strOperation,\n" - "$oParent,\n" - "$strAlgorithm,\n" - ") =\n" - "logDebugParam\n" - "(\n" - "__PACKAGE__ . '->new', \\@_,\n" - "{name => 'oParent', trace => true},\n" - "{name => 'strAlgorithm', optional => true, default => 'sha1', trace => true},\n" - ");\n" - "\n\n" - "my $self = $class->SUPER::new($oParent);\n" - "bless $self, $class;\n" - "\n\n" - "$self->{strAlgorithm} = $strAlgorithm;\n" - "\n\n" - "$self->{oSha} = new pgBackRest::LibC::Crypto::Hash($self->{strAlgorithm});\n" - "\n\n" - "return logDebugReturn\n" - "(\n" - "$strOperation,\n" - "{name => 'self', value => $self}\n" - ");\n" - "}\n" - "\n\n\n\n" - "sub read\n" - "{\n" - "my $self = shift;\n" - "my $rtBuffer = shift;\n" - "my $iSize = shift;\n" - "\n\n" - "my $tShaBuffer;\n" - "my $iActualSize = $self->parent()->read(\\$tShaBuffer, $iSize);\n" - "\n\n" - "if ($iActualSize > 0)\n" - "{\n" - "$self->{oSha}->process($tShaBuffer);\n" - "$$rtBuffer .= $tShaBuffer;\n" - "}\n" - "\n\n" - "return $iActualSize;\n" - "}\n" - "\n\n\n\n" - "sub write\n" - "{\n" - "my $self = shift;\n" - "my $rtBuffer = shift;\n" - "\n\n" - "$self->{oSha}->process($$rtBuffer);\n" - "\n\n" - "return $self->parent()->write($rtBuffer);\n" - "}\n" - "\n\n\n\n" - "sub close\n" - "{\n" - "my $self = shift;\n" - "\n" - "if (defined($self->{oSha}))\n" - "{\n" - "\n" - "$self->resultSet(STORAGE_FILTER_SHA, $self->{oSha}->result());\n" - "\n\n" - "delete($self->{oSha});\n" - "\n\n" - "return $self->parent->close();\n" - "}\n" - "\n" - "return false;\n" - "}\n" - "\n" - "1;\n" - }, - { - .name = "pgBackRest/Storage/Helper.pm", - .data = - "\n\n\n" - "package pgBackRest::Storage::Helper;\n" - "\n" - "use strict;\n" - "use warnings FATAL => qw(all);\n" - "use Carp qw(confess);\n" - "\n" - "use Exporter qw(import);\n" - "our @EXPORT = qw();\n" - "use File::Basename qw(basename);\n" - "\n" - "use pgBackRest::Common::Log;\n" - "use pgBackRest::Config::Config;\n" - "use pgBackRest::Storage::Posix::Driver;\n" - "use pgBackRest::Storage::Local;\n" - "use pgBackRest::Version;\n" - "\n\n\n\n" - "use constant STORAGE_LOCAL => '';\n" - "push @EXPORT, qw(STORAGE_LOCAL);\n" - "\n\n\n\n" - "use constant COMPRESS_EXT => 'gz';\n" - "push @EXPORT, qw(COMPRESS_EXT);\n" - "\n\n\n\n" - "use constant STORAGE_TEMP_EXT => PROJECT_EXE . '.tmp';\n" - "push @EXPORT, qw(STORAGE_TEMP_EXT);\n" - "\n\n\n\n" - "my $hStorage;\n" - "\n\n\n\n\n\n\n" - "sub storageLocal\n" - "{\n" - "\n" - "my\n" - "(\n" - "$strOperation,\n" - "$strPath,\n" - ") =\n" - "logDebugParam\n" - "(\n" - "__PACKAGE__ . '::storageLocal', \\@_,\n" - "{name => 'strPath', default => '/', trace => true},\n" - ");\n" - "\n\n" - "if (!defined($hStorage->{&STORAGE_LOCAL}{$strPath}))\n" - "{\n" - "\n" - "$hStorage->{&STORAGE_LOCAL}{$strPath} = new pgBackRest::Storage::Local(\n" - "$strPath, new pgBackRest::Storage::Posix::Driver(),\n" - "{strTempExtension => STORAGE_TEMP_EXT,\n" - "lBufferMax => cfgOptionValid(CFGOPT_BUFFER_SIZE, false) ? cfgOption(CFGOPT_BUFFER_SIZE, false) : undef});\n" - "}\n" - "\n\n" - "return logDebugReturn\n" - "(\n" - "$strOperation,\n" - "{name => 'oStorageLocal', value => $hStorage->{&STORAGE_LOCAL}{$strPath}, trace => true},\n" - ");\n" - "}\n" - "\n" - "push @EXPORT, qw(storageLocal);\n" - "\n" - "1;\n" - }, - { - .name = "pgBackRest/Storage/Local.pm", - .data = - "\n\n\n\n\n" - "package pgBackRest::Storage::Local;\n" - "use parent 'pgBackRest::Storage::Base';\n" - "\n" - "use strict;\n" - "use warnings FATAL => qw(all);\n" - "use Carp qw(confess);\n" - "use English '-no_match_vars';\n" - "\n" - "use File::Basename qw(dirname);\n" - "\n" - "use pgBackRest::Common::Exception;\n" - "use pgBackRest::Common::Log;\n" - "use pgBackRest::Common::String;\n" - "use pgBackRest::Storage::Base;\n" - "use pgBackRest::Storage::Filter::Sha;\n" - "\n\n\n\n" - "sub new\n" - "{\n" - "my $class = shift;\n" - "\n\n" - "my\n" - "(\n" - "$strOperation,\n" - "$strPathBase,\n" - "$oDriver,\n" - "$hRule,\n" - "$bAllowTemp,\n" - "$strTempExtension,\n" - "$strDefaultPathMode,\n" - "$strDefaultFileMode,\n" - "$lBufferMax,\n" - "$strCipherType,\n" - "$strCipherPassUser,\n" - ") =\n" - "logDebugParam\n" - "(\n" - "__PACKAGE__ . '->new', \\@_,\n" - "{name => 'strPathBase'},\n" - "{name => 'oDriver'},\n" - "{name => 'hRule', optional => true},\n" - "{name => 'bAllowTemp', optional => true, default => true},\n" - "{name => 'strTempExtension', optional => true, default => 'tmp'},\n" - "{name => 'strDefaultPathMode', optional => true, default => '0750'},\n" - "{name => 'strDefaultFileMode', optional => true, default => '0640'},\n" - "{name => 'lBufferMax', optional => true},\n" - "{name => 'strCipherType', optional => true},\n" - "{name => 'strCipherPassUser', optional => true, redact => true},\n" - ");\n" - "\n\n" - "my $self = $class->SUPER::new({lBufferMax => $lBufferMax});\n" - "bless $self, $class;\n" - "\n" - "$self->{strPathBase} = $strPathBase;\n" - "$self->{oDriver} = $oDriver;\n" - "$self->{hRule} = $hRule;\n" - "$self->{bAllowTemp} = $bAllowTemp;\n" - "$self->{strTempExtension} = $strTempExtension;\n" - "$self->{strDefaultPathMode} = $strDefaultPathMode;\n" - "$self->{strDefaultFileMode} = $strDefaultFileMode;\n" - "$self->{strCipherType} = $strCipherType;\n" - "$self->{strCipherPassUser} = $strCipherPassUser;\n" - "\n" - "if (defined($self->{strCipherType}))\n" - "{\n" - "require pgBackRest::Storage::Filter::CipherBlock;\n" - "pgBackRest::Storage::Filter::CipherBlock->import();\n" - "}\n" - "\n\n" - "$self->driver()->tempExtensionSet($self->{strTempExtension}) if $self->driver()->can('tempExtensionSet');\n" - "\n\n" - "return logDebugReturn\n" - "(\n" - "$strOperation,\n" - "{name => 'self', value => $self}\n" - ");\n" - "}\n" - "\n\n\n\n" - "sub exists\n" - "{\n" - "my $self = shift;\n" - "\n\n" - "my\n" - "(\n" - "$strOperation,\n" - "$strFileExp,\n" - ") =\n" - "logDebugParam\n" - "(\n" - "__PACKAGE__ . '->exists', \\@_,\n" - "{name => 'strFileExp'},\n" - ");\n" - "\n\n" - "my $bExists = $self->driver()->exists($self->pathGet($strFileExp));\n" - "\n\n" - "return logDebugReturn\n" - "(\n" - "$strOperation,\n" - "{name => 'bExists', value => $bExists}\n" - ");\n" - "}\n" - "\n\n\n\n\n" - "sub hashSize\n" - "{\n" - "my $self = shift;\n" - "\n\n" - "my\n" - "(\n" - "$strOperation,\n" - "$xFileExp,\n" - "$bIgnoreMissing,\n" - ") =\n" - "logDebugParam\n" - "(\n" - "__PACKAGE__ . '->hashSize', \\@_,\n" - "{name => 'xFileExp'},\n" - "{name => 'bIgnoreMissing', optional => true, default => false},\n" - ");\n" - "\n\n" - "my $strHash;\n" - "my $lSize;\n" - "\n\n" - "my $oFileIo =\n" - "defined($xFileExp) ? (ref($xFileExp) ? $xFileExp :\n" - "$self->openRead($self->pathGet($xFileExp), {bIgnoreMissing => $bIgnoreMissing})) : undef;\n" - "\n" - "if (defined($oFileIo))\n" - "{\n" - "$lSize = 0;\n" - "my $oShaIo = new pgBackRest::Storage::Filter::Sha($oFileIo);\n" - "my $lSizeRead;\n" - "\n" - "do\n" - "{\n" - "my $tContent;\n" - "$lSizeRead = $oShaIo->read(\\$tContent, $self->{lBufferMax});\n" - "$lSize += $lSizeRead;\n" - "}\n" - "while ($lSizeRead != 0);\n" - "\n\n" - "$oShaIo->close();\n" - "\n\n" - "$strHash = $oShaIo->result(STORAGE_FILTER_SHA);\n" - "}\n" - "\n\n" - "return logDebugReturn\n" - "(\n" - "$strOperation,\n" - "{name => 'strHash', value => $strHash},\n" - "{name => 'lSize', value => $lSize}\n" - ");\n" - "}\n" - "\n\n\n\n" - "sub info\n" - "{\n" - "my $self = shift;\n" - "\n\n" - "my\n" - "(\n" - "$strOperation,\n" - "$strPathFileExp,\n" - "$bIgnoreMissing,\n" - ") =\n" - "logDebugParam\n" - "(\n" - "__PACKAGE__ . '::fileStat', \\@_,\n" - "{name => 'strPathFileExp'},\n" - "{name => 'bIgnoreMissing', optional => true, default => false},\n" - ");\n" - "\n\n" - "my $oInfo = $self->driver()->info($self->pathGet($strPathFileExp), {bIgnoreMissing => $bIgnoreMissing});\n" - "\n\n" - "return logDebugReturn\n" - "(\n" - "$strOperation,\n" - "{name => 'oInfo', value => $oInfo, trace => true}\n" - ");\n" - "}\n" - "\n\n\n\n" - "sub linkCreate\n" - "{\n" - "my $self = shift;\n" - "\n\n" - "my\n" - "(\n" - "$strOperation,\n" - "$strSourcePathFileExp,\n" - "$strDestinationLinkExp,\n" - "$bHard,\n" - "$bRelative,\n" - "$bPathCreate,\n" - "$bIgnoreExists,\n" - ") =\n" - "logDebugParam\n" - "(\n" - "__PACKAGE__ . '->linkCreate', \\@_,\n" - "{name => 'strSourcePathFileExp'},\n" - "{name => 'strDestinationLinkExp'},\n" - "{name => 'bHard', optional=> true, default => false},\n" - "{name => 'bRelative', optional=> true, default => false},\n" - "{name => 'bPathCreate', optional=> true, default => true},\n" - "{name => 'bIgnoreExists', optional => true, default => false},\n" - ");\n" - "\n\n" - "my $strSourcePathFile = $self->pathGet($strSourcePathFileExp);\n" - "my $strDestinationLink = $self->pathGet($strDestinationLinkExp);\n" - "\n\n" - "if ($bRelative)\n" - "{\n" - "\n" - "my @strySource = split('/', $strSourcePathFile);\n" - "my @stryDestination = split('/', $strDestinationLink);\n" - "\n" - "while (defined($strySource[0]) && defined($stryDestination[0]) && $strySource[0] eq $stryDestination[0])\n" - "{\n" - "shift(@strySource);\n" - "shift(@stryDestination);\n" - "}\n" - "\n\n" - "$strSourcePathFile = '';\n" - "\n" - "for (my $iIndex = 0; $iIndex < @stryDestination - 1; $iIndex++)\n" - "{\n" - "$strSourcePathFile .= '../';\n" - "}\n" - "\n\n" - "$strSourcePathFile .= join('/', @strySource);\n" - "\n" - "logDebugMisc\n" - "(\n" - "$strOperation, 'apply relative path',\n" - "{name => 'strSourcePathFile', value => $strSourcePathFile, trace => true}\n" - ");\n" - "}\n" - "\n\n" - "$self->driver()->linkCreate(\n" - "$strSourcePathFile, $strDestinationLink, {bHard => $bHard, bPathCreate => $bPathCreate, bIgnoreExists => $bIgnoreExists});\n" - "\n\n" - "return logDebugReturn($strOperation);\n" - "}\n" - "\n\n\n\n" - "sub list\n" - "{\n" - "my $self = shift;\n" - "\n\n" - "my\n" - "(\n" - "$strOperation,\n" - "$strPathExp,\n" - "$strExpression,\n" - "$strSortOrder,\n" - "$bIgnoreMissing,\n" - ") =\n" - "logDebugParam\n" - "(\n" - "__PACKAGE__ . '->list', \\@_,\n" - "{name => 'strPathExp', required => false},\n" - "{name => 'strExpression', optional => true},\n" - "{name => 'strSortOrder', optional => true, default => 'forward'},\n" - "{name => 'bIgnoreMissing', optional => true, default => false},\n" - ");\n" - "\n\n" - "my $rstryFileList = $self->driver()->list(\n" - "$self->pathGet($strPathExp), {strExpression => $strExpression, bIgnoreMissing => $bIgnoreMissing});\n" - "\n\n" - "if (defined($strExpression))\n" - "{\n" - "@{$rstryFileList} = grep(/$strExpression/i, @{$rstryFileList});\n" - "}\n" - "\n\n" - "if ($strSortOrder eq 'reverse')\n" - "{\n" - "@{$rstryFileList} = sort {$b cmp $a} @{$rstryFileList};\n" - "}\n" - "\n" - "else\n" - "{\n" - "@{$rstryFileList} = sort @{$rstryFileList};\n" - "}\n" - "\n\n" - "return logDebugReturn\n" - "(\n" - "$strOperation,\n" - "{name => 'stryFileList', value => $rstryFileList}\n" - ");\n" - "}\n" - "\n\n\n\n" - "sub manifest\n" - "{\n" - "my $self = shift;\n" - "\n\n" - "my\n" - "(\n" - "$strOperation,\n" - "$strPathExp,\n" - "$strFilter,\n" - ") =\n" - "logDebugParam\n" - "(\n" - "__PACKAGE__ . '->manifest', \\@_,\n" - "{name => 'strPathExp'},\n" - "{name => 'strFilter', optional => true, trace => true},\n" - ");\n" - "\n" - "my $hManifest = $self->driver()->manifest($self->pathGet($strPathExp), {strFilter => $strFilter});\n" - "\n\n" - "return logDebugReturn\n" - "(\n" - "$strOperation,\n" - "{name => 'hManifest', value => $hManifest, trace => true}\n" - ");\n" - "}\n" - "\n\n\n\n" - "sub move\n" - "{\n" - "my $self = shift;\n" - "\n\n" - "my\n" - "(\n" - "$strOperation,\n" - "$strSourcePathFileExp,\n" - "$strDestinationPathFileExp,\n" - "$bPathCreate,\n" - ") =\n" - "logDebugParam\n" - "(\n" - "__PACKAGE__ . '->move', \\@_,\n" - "{name => 'strSourcePathExp'},\n" - "{name => 'strDestinationPathExp'},\n" - "{name => 'bPathCreate', optional => true, default => false, trace => true},\n" - ");\n" - "\n\n" - "$self->driver()->move(\n" - "$self->pathGet($strSourcePathFileExp), $self->pathGet($strDestinationPathFileExp), {bPathCreate => $bPathCreate});\n" - "\n\n" - "return logDebugReturn\n" - "(\n" - "$strOperation\n" - ");\n" - "}\n" - "\n\n\n\n" - "sub openRead\n" - "{\n" - "my $self = shift;\n" - "\n\n" - "my\n" - "(\n" - "$strOperation,\n" - "$xFileExp,\n" - "$bIgnoreMissing,\n" - "$rhyFilter,\n" - "$strCipherPass,\n" - ") =\n" - "logDebugParam\n" - "(\n" - "__PACKAGE__ . '->openRead', \\@_,\n" - "{name => 'xFileExp'},\n" - "{name => 'bIgnoreMissing', optional => true, default => false},\n" - "{name => 'rhyFilter', optional => true},\n" - "{name => 'strCipherPass', optional => true, redact => true},\n" - ");\n" - "\n\n" - "my $oFileIo = $self->driver()->openRead($self->pathGet($xFileExp), {bIgnoreMissing => $bIgnoreMissing});\n" - "\n\n" - "if (defined($oFileIo))\n" - "{\n" - "\n\n" - "if (defined($self->cipherType()))\n" - "{\n" - "$oFileIo = &STORAGE_FILTER_CIPHER_BLOCK->new(\n" - "$oFileIo, $self->cipherType(), defined($strCipherPass) ? $strCipherPass : $self->cipherPassUser(),\n" - "{strMode => STORAGE_DECRYPT});\n" - "}\n" - "\n\n" - "if (defined($rhyFilter))\n" - "{\n" - "foreach my $rhFilter (@{$rhyFilter})\n" - "{\n" - "$oFileIo = $rhFilter->{strClass}->new($oFileIo, @{$rhFilter->{rxyParam}});\n" - "}\n" - "}\n" - "}\n" - "\n\n" - "return logDebugReturn\n" - "(\n" - "$strOperation,\n" - "{name => 'oFileIo', value => $oFileIo, trace => true},\n" - ");\n" - "}\n" - "\n\n\n\n" - "sub openWrite\n" - "{\n" - "my $self = shift;\n" - "\n\n" - "my\n" - "(\n" - "$strOperation,\n" - "$xFileExp,\n" - "$strMode,\n" - "$strUser,\n" - "$strGroup,\n" - "$lTimestamp,\n" - "$bAtomic,\n" - "$bPathCreate,\n" - "$rhyFilter,\n" - "$strCipherPass,\n" - ") =\n" - "logDebugParam\n" - "(\n" - "__PACKAGE__ . '->openWrite', \\@_,\n" - "{name => 'xFileExp'},\n" - "{name => 'strMode', optional => true, default => $self->{strDefaultFileMode}},\n" - "{name => 'strUser', optional => true},\n" - "{name => 'strGroup', optional => true},\n" - "{name => 'lTimestamp', optional => true},\n" - "{name => 'bAtomic', optional => true, default => false},\n" - "{name => 'bPathCreate', optional => true, default => false},\n" - "{name => 'rhyFilter', optional => true},\n" - "{name => 'strCipherPass', optional => true, redact => true},\n" - ");\n" - "\n\n" - "my $oFileIo = $self->driver()->openWrite($self->pathGet($xFileExp),\n" - "{strMode => $strMode, strUser => $strUser, strGroup => $strGroup, lTimestamp => $lTimestamp, bPathCreate => $bPathCreate,\n" - "bAtomic => $bAtomic});\n" - "\n\n" - "if (defined($self->cipherType()))\n" - "{\n" - "$oFileIo = &STORAGE_FILTER_CIPHER_BLOCK->new(\n" - "$oFileIo, $self->cipherType(), defined($strCipherPass) ? $strCipherPass : $self->cipherPassUser());\n" - "}\n" - "\n\n" - "if (defined($rhyFilter))\n" - "{\n" - "foreach my $rhFilter (reverse(@{$rhyFilter}))\n" - "{\n" - "$oFileIo = $rhFilter->{strClass}->new($oFileIo, @{$rhFilter->{rxyParam}});\n" - "}\n" - "}\n" - "\n\n" - "return logDebugReturn\n" - "(\n" - "$strOperation,\n" - "{name => 'oFileIo', value => $oFileIo, trace => true},\n" - ");\n" - "}\n" - "\n\n\n\n" - "sub owner\n" - "{\n" - "my $self = shift;\n" - "\n\n" - "my\n" - "(\n" - "$strOperation,\n" - "$strPathFileExp,\n" - "$strUser,\n" - "$strGroup\n" - ") =\n" - "logDebugParam\n" - "(\n" - "__PACKAGE__ . '->owner', \\@_,\n" - "{name => 'strPathFileExp'},\n" - "{name => 'strUser', required => false},\n" - "{name => 'strGroup', required => false}\n" - ");\n" - "\n\n" - "$self->driver()->owner($self->pathGet($strPathFileExp), {strUser => $strUser, strGroup => $strGroup});\n" - "\n\n" - "return logDebugReturn\n" - "(\n" - "$strOperation\n" - ");\n" - "}\n" - "\n\n\n\n" - "sub pathCreate\n" - "{\n" - "my $self = shift;\n" - "\n\n" - "my\n" - "(\n" - "$strOperation,\n" - "$strPathExp,\n" - "$strMode,\n" - "$bIgnoreExists,\n" - "$bCreateParent,\n" - ") =\n" - "logDebugParam\n" - "(\n" - "__PACKAGE__ . '->pathCreate', \\@_,\n" - "{name => 'strPathExp'},\n" - "{name => 'strMode', optional => true, default => $self->{strDefaultPathMode}},\n" - "{name => 'bIgnoreExists', optional => true, default => false},\n" - "{name => 'bCreateParent', optional => true, default => false},\n" - ");\n" - "\n\n" - "$self->driver()->pathCreate(\n" - "$self->pathGet($strPathExp), {strMode => $strMode, bIgnoreExists => $bIgnoreExists, bCreateParent => $bCreateParent});\n" - "\n\n" - "return logDebugReturn\n" - "(\n" - "$strOperation\n" - ");\n" - "}\n" - "\n\n\n\n" - "sub pathExists\n" - "{\n" - "my $self = shift;\n" - "\n\n" - "my\n" - "(\n" - "$strOperation,\n" - "$strPathExp,\n" - ") =\n" - "logDebugParam\n" - "(\n" - "__PACKAGE__ . '->pathExists', \\@_,\n" - "{name => 'strPathExp'},\n" - ");\n" - "\n\n" - "my $bExists = $self->driver()->pathExists($self->pathGet($strPathExp));\n" - "\n\n" - "return logDebugReturn\n" - "(\n" - "$strOperation,\n" - "{name => 'bExists', value => $bExists}\n" - ");\n" - "}\n" - "\n\n\n\n" - "sub pathGet\n" - "{\n" - "my $self = shift;\n" - "\n\n" - "my\n" - "(\n" - "$strOperation,\n" - "$strPathExp,\n" - "$bTemp,\n" - ") =\n" - "logDebugParam\n" - "(\n" - "__PACKAGE__ . '->pathGet', \\@_,\n" - "{name => 'strPathExp', required => false, trace => true},\n" - "{name => 'bTemp', optional => true, default => false, trace => true},\n" - ");\n" - "\n\n" - "my $strPath;\n" - "my $strFile;\n" - "\n\n" - "my $bAbsolute = false;\n" - "\n" - "if (defined($strPathExp) && index($strPathExp, qw(/)) == 0)\n" - "{\n" - "$bAbsolute = true;\n" - "$strPath = $strPathExp;\n" - "}\n" - "else\n" - "{\n" - "\n" - "if (defined($strPathExp) && index($strPathExp, qw(<)) == 0)\n" - "{\n" - "\n" - "my $iPos = index($strPathExp, qw(>));\n" - "\n" - "if ($iPos == -1)\n" - "{\n" - "confess &log(ASSERT, \"found < but not > in '${strPathExp}'\");\n" - "}\n" - "\n" - "my $strType = substr($strPathExp, 0, $iPos + 1);\n" - "\n\n" - "if ($iPos < length($strPathExp) - 1)\n" - "{\n" - "$strFile = substr($strPathExp, $iPos + 2);\n" - "}\n" - "\n\n" - "if (!defined($self->{hRule}->{$strType}))\n" - "{\n" - "confess &log(ASSERT, \"storage rule '${strType}' does not exist\");\n" - "}\n" - "\n\n" - "if (ref($self->{hRule}->{$strType}))\n" - "{\n" - "$strPath = $self->pathBase();\n" - "$strFile = $self->{hRule}{$strType}{fnRule}->($strType, $strFile, $self->{hRule}{$strType}{xData});\n" - "}\n" - "\n" - "else\n" - "{\n" - "$strPath = $self->pathBase() . ($self->pathBase() =~ /\\/$/ ? '' : qw{/}) . $self->{hRule}->{$strType};\n" - "}\n" - "}\n" - "\n" - "else\n" - "{\n" - "$strPath = $self->pathBase();\n" - "$strFile = $strPathExp;\n" - "}\n" - "}\n" - "\n\n" - "if ($bTemp)\n" - "{\n" - "\n" - "if (!$self->{bAllowTemp})\n" - "{\n" - "confess &log(ASSERT, \"temp file not supported for storage '\" . $self->pathBase() . \"'\");\n" - "}\n" - "\n\n" - "if (!$bAbsolute)\n" - "{\n" - "if (!defined($strFile))\n" - "{\n" - "confess &log(ASSERT, 'file part must be defined when temp file specified');\n" - "}\n" - "}\n" - "}\n" - "\n\n" - "$strPath .= defined($strFile) ? ($strPath =~ /\\/$/ ? '' : qw{/}) . \"${strFile}\" : '';\n" - "\n\n" - "$strPath .= $bTemp ? \".$self->{strTempExtension}\" : '';\n" - "\n\n" - "return logDebugReturn\n" - "(\n" - "$strOperation,\n" - "{name => 'strPath', value => $strPath, trace => true}\n" - ");\n" - "}\n" - "\n\n\n\n" - "sub pathSync\n" - "{\n" - "my $self = shift;\n" - "\n\n" - "my\n" - "(\n" - "$strOperation,\n" - "$strPathExp,\n" - ") =\n" - "logDebugParam\n" - "(\n" - "__PACKAGE__ . '->pathSync', \\@_,\n" - "{name => 'strPathExp'},\n" - ");\n" - "\n" - "$self->driver()->pathSync($self->pathGet($strPathExp));\n" - "\n\n" - "return logDebugReturn($strOperation);\n" - "}\n" - "\n\n\n\n" - "sub remove\n" - "{\n" - "my $self = shift;\n" - "\n\n" - "my\n" - "(\n" - "$strOperation,\n" - "$xstryPathFileExp,\n" - "$bIgnoreMissing,\n" - "$bRecurse,\n" - ") =\n" - "logDebugParam\n" - "(\n" - "__PACKAGE__ . '->remove', \\@_,\n" - "{name => 'xstryPathFileExp'},\n" - "{name => 'bIgnoreMissing', optional => true, default => true},\n" - "{name => 'bRecurse', optional => true, default => false, trace => true},\n" - ");\n" - "\n\n" - "my @stryPathFileExp;\n" - "\n" - "if (ref($xstryPathFileExp))\n" - "{\n" - "foreach my $strPathFileExp (@{$xstryPathFileExp})\n" - "{\n" - "push(@stryPathFileExp, $self->pathGet($strPathFileExp));\n" - "}\n" - "}\n" - "\n\n" - "my $bRemoved = $self->driver()->remove(\n" - "ref($xstryPathFileExp) ? \\@stryPathFileExp : $self->pathGet($xstryPathFileExp),\n" - "{bIgnoreMissing => $bIgnoreMissing, bRecurse => $bRecurse});\n" - "\n\n" - "return logDebugReturn\n" - "(\n" - "$strOperation,\n" - "{name => 'bRemoved', value => $bRemoved}\n" - ");\n" - "}\n" - "\n\n\n\n" - "sub encrypted\n" - "{\n" - "my $self = shift;\n" - "\n\n" - "my\n" - "(\n" - "$strOperation,\n" - "$strFileName,\n" - "$bIgnoreMissing,\n" - ") =\n" - "logDebugParam\n" - "(\n" - "__PACKAGE__ . '->encrypted', \\@_,\n" - "{name => 'strFileName'},\n" - "{name => 'bIgnoreMissing', optional => true, default => false},\n" - ");\n" - "\n" - "my $tMagicSignature;\n" - "my $bEncrypted = false;\n" - "\n\n" - "my $oFile = $self->driver()->openRead($self->pathGet($strFileName), {bIgnoreMissing => $bIgnoreMissing});\n" - "\n\n\n" - "if (!defined($oFile))\n" - "{\n" - "if (defined($self->{strCipherType}))\n" - "{\n" - "$bEncrypted = true;\n" - "}\n" - "}\n" - "else\n" - "{\n" - "\n" - "my $lSizeRead = $oFile->read(\\$tMagicSignature, length(CIPHER_MAGIC));\n" - "\n\n" - "$oFile->close();\n" - "\n\n\n" - "if (($lSizeRead > 0) && substr($tMagicSignature, 0, length(CIPHER_MAGIC)) eq CIPHER_MAGIC)\n" - "{\n" - "$bEncrypted = true;\n" - "}\n" - "\n" - "}\n" - "\n\n" - "return logDebugReturn\n" - "(\n" - "$strOperation,\n" - "{name => 'bEncrypted', value => $bEncrypted}\n" - ");\n" - "}\n" - "\n\n\n\n" - "sub encryptionValid\n" - "{\n" - "my $self = shift;\n" - "\n\n" - "my\n" - "(\n" - "$strOperation,\n" - "$bEncrypted,\n" - ") =\n" - "logDebugParam\n" - "(\n" - "__PACKAGE__ . '->encryptionValid', \\@_,\n" - "{name => 'bEncrypted'},\n" - ");\n" - "\n" - "my $bValid = true;\n" - "\n\n" - "if ($bEncrypted)\n" - "{\n" - "if (!defined($self->{strCipherType}))\n" - "{\n" - "$bValid = false;\n" - "}\n" - "}\n" - "else\n" - "{\n" - "if (defined($self->{strCipherType}))\n" - "{\n" - "$bValid = false;\n" - "}\n" - "}\n" - "\n\n" - "return logDebugReturn\n" - "(\n" - "$strOperation,\n" - "{name => 'bValid', value => $bValid}\n" - ");\n" - "}\n" - "\n\n\n\n" - "sub pathBase {shift->{strPathBase}}\n" - "sub driver {shift->{oDriver}}\n" - "sub cipherType {shift->{strCipherType}}\n" - "sub cipherPassUser {shift->{strCipherPassUser}}\n" - "\n" - "1;\n" - }, - { - .name = "pgBackRest/Storage/Posix/Driver.pm", - .data = - "\n\n\n\n\n" - "package pgBackRest::Storage::Posix::Driver;\n" - "\n" - "use strict;\n" - "use warnings FATAL => qw(all);\n" - "use Carp qw(confess);\n" - "use English '-no_match_vars';\n" - "\n" - "use Exporter qw(import);\n" - "our @EXPORT = qw();\n" - "use File::Basename qw(basename dirname);\n" - "use Fcntl qw(:mode);\n" - "use File::stat qw{lstat};\n" - "\n" - "use pgBackRest::Common::Exception;\n" - "use pgBackRest::Common::Log;\n" - "use pgBackRest::Storage::Base;\n" - "use pgBackRest::Storage::Posix::FileRead;\n" - "use pgBackRest::Storage::Posix::FileWrite;\n" - "\n\n\n\n" - "use constant STORAGE_POSIX_DRIVER => __PACKAGE__;\n" - "push @EXPORT, qw(STORAGE_POSIX_DRIVER);\n" - "\n\n\n\n" - "sub new\n" - "{\n" - "my $class = shift;\n" - "\n\n" - "my $self = {};\n" - "bless $self, $class;\n" - "\n\n" - "(\n" - "my $strOperation,\n" - "$self->{bFileSync},\n" - "$self->{bPathSync},\n" - ") =\n" - "logDebugParam\n" - "(\n" - "__PACKAGE__ . '->new', \\@_,\n" - "{name => 'bFileSync', optional => true, default => true},\n" - "{name => 'bPathSync', optional => true, default => true},\n" - ");\n" - "\n\n" - "$self->{strTempExtension} = 'tmp';\n" - "\n\n" - "return logDebugReturn\n" - "(\n" - "$strOperation,\n" - "{name => 'self', value => $self, trace => true}\n" - ");\n" - "}\n" - "\n\n\n\n" - "sub exists\n" - "{\n" - "my $self = shift;\n" - "\n\n" - "my\n" - "(\n" - "$strOperation,\n" - "$strFile,\n" - ") =\n" - "logDebugParam\n" - "(\n" - "__PACKAGE__ . '->exists', \\@_,\n" - "{name => 'strFile', trace => true},\n" - ");\n" - "\n\n" - "my $bExists = true;\n" - "my $oStat = lstat($strFile);\n" - "\n\n" - "if (defined($oStat))\n" - "{\n" - "\n" - "$bExists = !S_ISDIR($oStat->mode) ? true : false;\n" - "}\n" - "else\n" - "{\n" - "\n" - "if (!$OS_ERROR{ENOENT})\n" - "{\n" - "logErrorResult(ERROR_FILE_EXISTS, \"unable to test if file '${strFile}' exists\", $OS_ERROR);\n" - "}\n" - "\n" - "$bExists = false;\n" - "}\n" - "\n\n" - "return logDebugReturn\n" - "(\n" - "$strOperation,\n" - "{name => 'bExists', value => $bExists, trace => true}\n" - ");\n" - "}\n" - "\n\n\n\n" - "sub info\n" - "{\n" - "my $self = shift;\n" - "\n\n" - "my\n" - "(\n" - "$strOperation,\n" - "$strPathFile,\n" - "$bIgnoreMissing,\n" - ") =\n" - "logDebugParam\n" - "(\n" - "__PACKAGE__ . '->info', \\@_,\n" - "{name => 'strFile', trace => true},\n" - "{name => 'bIgnoreMissing', optional => true, default => false, trace => true},\n" - ");\n" - "\n\n" - "my $oInfo = lstat($strPathFile);\n" - "\n\n" - "if (!defined($oInfo))\n" - "{\n" - "if (!($OS_ERROR{ENOENT} && $bIgnoreMissing))\n" - "{\n" - "logErrorResult($OS_ERROR{ENOENT} ? ERROR_FILE_MISSING : ERROR_FILE_OPEN, \"unable to stat '${strPathFile}'\", $OS_ERROR);\n" - "}\n" - "}\n" - "\n\n" - "return logDebugReturn\n" - "(\n" - "$strOperation,\n" - "{name => 'oInfo', value => $oInfo, trace => true}\n" - ");\n" - "}\n" - "\n\n\n\n" - "sub linkCreate\n" - "{\n" - "my $self = shift;\n" - "\n\n" - "my\n" - "(\n" - "$strOperation,\n" - "$strSourcePathFile,\n" - "$strDestinationLink,\n" - "$bHard,\n" - "$bPathCreate,\n" - "$bIgnoreExists,\n" - ") =\n" - "logDebugParam\n" - "(\n" - "__PACKAGE__ . '->linkCreate', \\@_,\n" - "{name => 'strSourcePathFile', trace => true},\n" - "{name => 'strDestinationLink', trace => true},\n" - "{name => 'bHard', optional=> true, default => false, trace => true},\n" - "{name => 'bPathCreate', optional=> true, default => true, trace => true},\n" - "{name => 'bIgnoreExists', optional => true, default => false, trace => true},\n" - ");\n" - "\n" - "if (!($bHard ? link($strSourcePathFile, $strDestinationLink) : symlink($strSourcePathFile, $strDestinationLink)))\n" - "{\n" - "my $strMessage = \"unable to create link '${strDestinationLink}'\";\n" - "\n\n" - "if ($OS_ERROR{ENOENT})\n" - "{\n" - "\n" - "if (!$self->exists($strSourcePathFile))\n" - "{\n" - "confess &log(ERROR, \"${strMessage} because source '${strSourcePathFile}' does not exist\", ERROR_FILE_MISSING);\n" - "}\n" - "\n" - "if (!$bPathCreate)\n" - "{\n" - "confess &log(ERROR, \"${strMessage} because parent does not exist\", ERROR_PATH_MISSING);\n" - "}\n" - "\n\n" - "$self->pathCreate(dirname($strDestinationLink), {bIgnoreExists => true, bCreateParent => true});\n" - "\n\n" - "$self->linkCreate($strSourcePathFile, $strDestinationLink, {bHard => $bHard});\n" - "}\n" - "\n" - "elsif ($OS_ERROR{EEXIST})\n" - "{\n" - "if (!$bIgnoreExists)\n" - "{\n" - "confess &log(ERROR, \"${strMessage} because it already exists\", ERROR_PATH_EXISTS);\n" - "}\n" - "}\n" - "else\n" - "{\n" - "logErrorResult(ERROR_PATH_CREATE, ${strMessage}, $OS_ERROR);\n" - "}\n" - "}\n" - "\n\n" - "return logDebugReturn($strOperation);\n" - "}\n" - "\n\n\n\n" - "sub linkDestination\n" - "{\n" - "my $self = shift;\n" - "\n\n" - "my\n" - "(\n" - "$strOperation,\n" - "$strLink,\n" - ") =\n" - "logDebugParam\n" - "(\n" - "__PACKAGE__ . '->linkDestination', \\@_,\n" - "{name => 'strLink', trace => true},\n" - ");\n" - "\n\n" - "my $strLinkDestination = readlink($strLink);\n" - "\n\n" - "if (!defined($strLinkDestination))\n" - "{\n" - "logErrorResult(\n" - "$OS_ERROR{ENOENT} ? ERROR_FILE_MISSING : ERROR_FILE_OPEN, \"unable to get destination for link ${strLink}\", $OS_ERROR);\n" - "}\n" - "\n\n" - "return logDebugReturn\n" - "(\n" - "$strOperation,\n" - "{name => 'strLinkDestination', value => $strLinkDestination, trace => true}\n" - ");\n" - "}\n" - "\n\n\n\n" - "sub list\n" - "{\n" - "my $self = shift;\n" - "\n\n" - "my\n" - "(\n" - "$strOperation,\n" - "$strPath,\n" - "$bIgnoreMissing,\n" - ") =\n" - "logDebugParam\n" - "(\n" - "__PACKAGE__ . '->list', \\@_,\n" - "{name => 'strPath', trace => true},\n" - "{name => 'bIgnoreMissing', optional => true, default => false, trace => true},\n" - ");\n" - "\n\n" - "my @stryFileList;\n" - "my $hPath;\n" - "\n\n" - "if (opendir($hPath, $strPath))\n" - "{\n" - "@stryFileList = grep(!/^(\\.)|(\\.\\.)$/i, readdir($hPath));\n" - "close($hPath);\n" - "}\n" - "\n" - "else\n" - "{\n" - "\n" - "if (!($OS_ERROR{ENOENT} && $bIgnoreMissing))\n" - "{\n" - "logErrorResult($OS_ERROR{ENOENT} ? ERROR_FILE_MISSING : ERROR_FILE_OPEN, \"unable to read path '${strPath}'\", $OS_ERROR);\n" - "}\n" - "}\n" - "\n\n" - "return logDebugReturn\n" - "(\n" - "$strOperation,\n" - "{name => 'stryFileList', value => \\@stryFileList, ref => true, trace => true}\n" - ");\n" - "}\n" - "\n\n\n\n" - "sub manifest\n" - "{\n" - "my $self = shift;\n" - "\n\n" - "my\n" - "(\n" - "$strOperation,\n" - "$strPath,\n" - "$bIgnoreMissing,\n" - "$strFilter,\n" - ") =\n" - "logDebugParam\n" - "(\n" - "__PACKAGE__ . '->manifest', \\@_,\n" - "{name => 'strPath', trace => true},\n" - "{name => 'bIgnoreMissing', optional => true, default => false, trace => true},\n" - "{name => 'strFilter', optional => true, trace => true},\n" - ");\n" - "\n\n" - "my $hManifest = {};\n" - "$self->manifestRecurse($strPath, undef, 0, $hManifest, $bIgnoreMissing, $strFilter);\n" - "\n\n" - "return logDebugReturn\n" - "(\n" - "$strOperation,\n" - "{name => 'hManifest', value => $hManifest, trace => true}\n" - ");\n" - "}\n" - "\n" - "sub manifestRecurse\n" - "{\n" - "my $self = shift;\n" - "\n\n" - "my\n" - "(\n" - "$strOperation,\n" - "$strPath,\n" - "$strSubPath,\n" - "$iDepth,\n" - "$hManifest,\n" - "$bIgnoreMissing,\n" - "$strFilter,\n" - ") =\n" - "logDebugParam\n" - "(\n" - "__PACKAGE__ . '::manifestRecurse', \\@_,\n" - "{name => 'strPath', trace => true},\n" - "{name => 'strSubPath', required => false, trace => true},\n" - "{name => 'iDepth', default => 0, trace => true},\n" - "{name => 'hManifest', required => false, trace => true},\n" - "{name => 'bIgnoreMissing', required => false, default => false, trace => true},\n" - "{name => 'strFilter', required => false, trace => true},\n" - ");\n" - "\n\n" - "my $strPathRead = $strPath . (defined($strSubPath) ? \"/${strSubPath}\" : '');\n" - "my $hPath;\n" - "\n\n" - "my $oPathInfo = $self->info($strPathRead, {bIgnoreMissing => $bIgnoreMissing});\n" - "\n" - "if (defined($oPathInfo))\n" - "{\n" - "\n" - "if ($iDepth == 0 && !S_ISDIR($oPathInfo->mode()))\n" - "{\n" - "$hManifest->{basename($strPathRead)} = $self->manifestStat($strPathRead);\n" - "}\n" - "\n" - "else\n" - "{\n" - "\n" - "my @stryFileList = @{$self->list($strPathRead, {bIgnoreMissing => $iDepth != 0})};\n" - "unshift(@stryFileList, '.');\n" - "my $hFileStat = $self->manifestList($strPathRead, \\@stryFileList, $strFilter);\n" - "\n\n" - "foreach my $strFile (keys(%{$hFileStat}))\n" - "{\n" - "my $strManifestFile = $iDepth == 0 ? $strFile : ($strSubPath . ($strFile eq qw(.) ? '' : \"/${strFile}\"));\n" - "$hManifest->{$strManifestFile} = $hFileStat->{$strFile};\n" - "\n\n" - "if ($hManifest->{$strManifestFile}{type} eq 'd' && $strFile ne qw(.))\n" - "{\n" - "$self->manifestRecurse($strPath, $strManifestFile, $iDepth + 1, $hManifest);\n" - "}\n" - "}\n" - "}\n" - "}\n" - "\n\n" - "return logDebugReturn($strOperation);\n" - "}\n" - "\n" - "sub manifestList\n" - "{\n" - "my $self = shift;\n" - "\n\n" - "my\n" - "(\n" - "$strOperation,\n" - "$strPath,\n" - "$stryFile,\n" - "$strFilter,\n" - ") =\n" - "logDebugParam\n" - "(\n" - "__PACKAGE__ . '->manifestList', \\@_,\n" - "{name => 'strPath', trace => true},\n" - "{name => 'stryFile', trace => true},\n" - "{name => 'strFilter', required => false, trace => true},\n" - ");\n" - "\n" - "my $hFileStat = {};\n" - "\n" - "foreach my $strFile (@{$stryFile})\n" - "{\n" - "if ($strFile ne '.' && defined($strFilter) && $strFilter ne $strFile)\n" - "{\n" - "next;\n" - "}\n" - "\n" - "$hFileStat->{$strFile} = $self->manifestStat(\"${strPath}\" . ($strFile eq qw(.) ? '' : \"/${strFile}\"));\n" - "\n" - "if (!defined($hFileStat->{$strFile}))\n" - "{\n" - "delete($hFileStat->{$strFile});\n" - "}\n" - "}\n" - "\n\n" - "return logDebugReturn\n" - "(\n" - "$strOperation,\n" - "{name => 'hFileStat', value => $hFileStat, trace => true}\n" - ");\n" - "}\n" - "\n" - "sub manifestStat\n" - "{\n" - "my $self = shift;\n" - "\n\n" - "my\n" - "(\n" - "$strOperation,\n" - "$strFile,\n" - ") =\n" - "logDebugParam\n" - "(\n" - "__PACKAGE__ . '->manifestStat', \\@_,\n" - "{name => 'strFile', trace => true},\n" - ");\n" - "\n\n" - "my $oStat = $self->info($strFile, {bIgnoreMissing => true});\n" - "\n\n" - "my $hFile;\n" - "\n" - "if (defined($oStat))\n" - "{\n" - "\n" - "if (S_ISREG($oStat->mode))\n" - "{\n" - "$hFile->{type} = 'f';\n" - "\n\n" - "$hFile->{size} = $oStat->size;\n" - "\n\n" - "$hFile->{modification_time} = $oStat->mtime;\n" - "}\n" - "\n" - "elsif (S_ISDIR($oStat->mode))\n" - "{\n" - "$hFile->{type} = 'd';\n" - "}\n" - "\n" - "elsif (S_ISLNK($oStat->mode))\n" - "{\n" - "$hFile->{type} = 'l';\n" - "$hFile->{link_destination} = $self->linkDestination($strFile);\n" - "}\n" - "\n" - "else\n" - "{\n" - "confess &log(ERROR, \"${strFile} is not of type directory, file, or link\", ERROR_FILE_INVALID);\n" - "}\n" - "\n\n" - "$hFile->{user} = getpwuid($oStat->uid);\n" - "\n\n" - "$hFile->{group} = getgrgid($oStat->gid);\n" - "\n\n" - "if ($hFile->{type} ne 'l')\n" - "{\n" - "$hFile->{mode} = sprintf('%04o', S_IMODE($oStat->mode));\n" - "}\n" - "}\n" - "\n\n" - "return logDebugReturn\n" - "(\n" - "$strOperation,\n" - "{name => 'hFile', value => $hFile, trace => true}\n" - ");\n" - "}\n" - "\n\n\n\n" - "sub move\n" - "{\n" - "my $self = shift;\n" - "\n\n" - "my\n" - "(\n" - "$strOperation,\n" - "$strSourceFile,\n" - "$strDestinationFile,\n" - "$bPathCreate,\n" - ") =\n" - "logDebugParam\n" - "(\n" - "__PACKAGE__ . '->move', \\@_,\n" - "{name => 'strSourceFile', trace => true},\n" - "{name => 'strDestinationFile', trace => true},\n" - "{name => 'bPathCreate', default => false, trace => true},\n" - ");\n" - "\n\n" - "my $strSourcePathFile = dirname($strSourceFile);\n" - "my $strDestinationPathFile = dirname($strDestinationFile);\n" - "\n\n" - "if (!rename($strSourceFile, $strDestinationFile))\n" - "{\n" - "my $strMessage = \"unable to move '${strSourceFile}'\";\n" - "\n\n" - "if ($OS_ERROR{ENOENT})\n" - "{\n" - "if (!$self->exists($strSourceFile))\n" - "{\n" - "logErrorResult(ERROR_FILE_MISSING, \"${strMessage} because it is missing\");\n" - "}\n" - "\n" - "if ($bPathCreate)\n" - "{\n" - "\n" - "$self->pathCreate($strDestinationPathFile, {bCreateParent => true, bIgnoreExists => true});\n" - "\n\n" - "$self->move($strSourceFile, $strDestinationFile);\n" - "}\n" - "else\n" - "{\n" - "logErrorResult(ERROR_PATH_MISSING, \"${strMessage} to missing path '${strDestinationPathFile}'\");\n" - "}\n" - "}\n" - "\n" - "else\n" - "{\n" - "logErrorResult(ERROR_FILE_MOVE, \"${strMessage} to '${strDestinationFile}'\", $OS_ERROR);\n" - "}\n" - "}\n" - "\n\n" - "return logDebugReturn($strOperation);\n" - "}\n" - "\n\n\n\n" - "sub openRead\n" - "{\n" - "my $self = shift;\n" - "\n\n" - "my\n" - "(\n" - "$strOperation,\n" - "$strFile,\n" - "$bIgnoreMissing,\n" - ") =\n" - "logDebugParam\n" - "(\n" - "__PACKAGE__ . '->openRead', \\@_,\n" - "{name => 'strFile', trace => true},\n" - "{name => 'bIgnoreMissing', optional => true, default => false, trace => true},\n" - ");\n" - "\n" - "my $oFileIO = new pgBackRest::Storage::Posix::FileRead($self, $strFile, {bIgnoreMissing => $bIgnoreMissing});\n" - "\n\n" - "return logDebugReturn\n" - "(\n" - "$strOperation,\n" - "{name => 'oFileIO', value => $oFileIO, trace => true},\n" - ");\n" - "}\n" - "\n\n\n\n" - "sub openWrite\n" - "{\n" - "my $self = shift;\n" - "\n\n" - "my\n" - "(\n" - "$strOperation,\n" - "$strFile,\n" - "$strMode,\n" - "$strUser,\n" - "$strGroup,\n" - "$lTimestamp,\n" - "$bPathCreate,\n" - "$bAtomic,\n" - ") =\n" - "logDebugParam\n" - "(\n" - "__PACKAGE__ . '->openWrite', \\@_,\n" - "{name => 'strFile', trace => true},\n" - "{name => 'strMode', optional => true, trace => true},\n" - "{name => 'strUser', optional => true, trace => true},\n" - "{name => 'strGroup', optional => true, trace => true},\n" - "{name => 'lTimestamp', optional => true, trace => true},\n" - "{name => 'bPathCreate', optional => true, trace => true},\n" - "{name => 'bAtomic', optional => true, trace => true},\n" - ");\n" - "\n" - "my $oFileIO = new pgBackRest::Storage::Posix::FileWrite(\n" - "$self, $strFile,\n" - "{strMode => $strMode, strUser => $strUser, strGroup => $strGroup, lTimestamp => $lTimestamp, bPathCreate => $bPathCreate,\n" - "bAtomic => $bAtomic, bSync => $self->{bFileSync}});\n" - "\n\n" - "return logDebugReturn\n" - "(\n" - "$strOperation,\n" - "{name => 'oFileIO', value => $oFileIO, trace => true},\n" - ");\n" - "}\n" - "\n\n\n\n" - "sub owner\n" - "{\n" - "my $self = shift;\n" - "\n\n" - "my\n" - "(\n" - "$strOperation,\n" - "$strFilePath,\n" - "$strUser,\n" - "$strGroup,\n" - ") =\n" - "logDebugParam\n" - "(\n" - "__PACKAGE__ . '->owner', \\@_,\n" - "{name => 'strFilePath', trace => true},\n" - "{name => 'strUser', optional => true, trace => true},\n" - "{name => 'strGroup', optional => true, trace => true},\n" - ");\n" - "\n\n" - "if (defined($strUser) || defined($strGroup))\n" - "{\n" - "my $strMessage = \"unable to set ownership for '${strFilePath}'\";\n" - "my $iUserId;\n" - "my $iGroupId;\n" - "\n\n\n" - "my $oStat = $self->info($strFilePath);\n" - "\n" - "if (!defined($strUser))\n" - "{\n" - "$iUserId = $oStat->uid;\n" - "}\n" - "\n" - "if (!defined($strGroup))\n" - "{\n" - "$iGroupId = $oStat->gid;\n" - "}\n" - "\n\n" - "if (defined($strUser))\n" - "{\n" - "$iUserId = getpwnam($strUser);\n" - "\n" - "if (!defined($iUserId))\n" - "{\n" - "logErrorResult(ERROR_FILE_OWNER, \"${strMessage} because user '${strUser}' does not exist\");\n" - "}\n" - "}\n" - "\n\n" - "if (defined($strGroup))\n" - "{\n" - "$iGroupId = getgrnam($strGroup);\n" - "\n" - "if (!defined($iGroupId))\n" - "{\n" - "logErrorResult(ERROR_FILE_OWNER, \"${strMessage} because group '${strGroup}' does not exist\");\n" - "}\n" - "}\n" - "\n\n" - "if ($iUserId != $oStat->uid || $iGroupId != $oStat->gid)\n" - "{\n" - "if (!chown($iUserId, $iGroupId, $strFilePath))\n" - "{\n" - "logErrorResult(ERROR_FILE_OWNER, \"${strMessage}\", $OS_ERROR);\n" - "}\n" - "}\n" - "}\n" - "\n\n" - "return logDebugReturn($strOperation);\n" - "}\n" - "\n\n\n\n" - "sub pathCreate\n" - "{\n" - "my $self = shift;\n" - "\n\n" - "my\n" - "(\n" - "$strOperation,\n" - "$strPath,\n" - "$strMode,\n" - "$bIgnoreExists,\n" - "$bCreateParent,\n" - ") =\n" - "logDebugParam\n" - "(\n" - "__PACKAGE__ . '->pathCreate', \\@_,\n" - "{name => 'strPath', trace => true},\n" - "{name => 'strMode', optional => true, default => '0750', trace => true},\n" - "{name => 'bIgnoreExists', optional => true, default => false, trace => true},\n" - "{name => 'bCreateParent', optional => true, default => false, trace => true},\n" - ");\n" - "\n\n" - "if (!mkdir($strPath, oct($strMode)))\n" - "{\n" - "my $strMessage = \"unable to create path '${strPath}'\";\n" - "\n\n" - "if ($OS_ERROR{ENOENT})\n" - "{\n" - "if (!$bCreateParent)\n" - "{\n" - "confess &log(ERROR, \"${strMessage} because parent does not exist\", ERROR_PATH_MISSING);\n" - "}\n" - "\n\n" - "$self->pathCreate(dirname($strPath), {strMode => $strMode, bIgnoreExists => true, bCreateParent => $bCreateParent});\n" - "\n\n" - "$self->pathCreate($strPath, {strMode => $strMode, bIgnoreExists => true});\n" - "}\n" - "\n" - "elsif ($OS_ERROR{EEXIST})\n" - "{\n" - "if (!$bIgnoreExists)\n" - "{\n" - "confess &log(ERROR, \"${strMessage} because it already exists\", ERROR_PATH_EXISTS);\n" - "}\n" - "}\n" - "else\n" - "{\n" - "logErrorResult(ERROR_PATH_CREATE, ${strMessage}, $OS_ERROR);\n" - "}\n" - "}\n" - "\n\n" - "return logDebugReturn($strOperation);\n" - "}\n" - "\n\n\n\n" - "sub pathExists\n" - "{\n" - "my $self = shift;\n" - "\n\n" - "my\n" - "(\n" - "$strOperation,\n" - "$strPath,\n" - ") =\n" - "logDebugParam\n" - "(\n" - "__PACKAGE__ . '->pathExists', \\@_,\n" - "{name => 'strPath', trace => true},\n" - ");\n" - "\n\n" - "my $bExists = true;\n" - "my $oStat = lstat($strPath);\n" - "\n\n" - "if (defined($oStat))\n" - "{\n" - "\n" - "$bExists = S_ISDIR($oStat->mode) ? true : false;\n" - "}\n" - "else\n" - "{\n" - "\n" - "if (!$OS_ERROR{ENOENT})\n" - "{\n" - "logErrorResult(ERROR_FILE_EXISTS, \"unable to test if path '${strPath}' exists\", $OS_ERROR);\n" - "}\n" - "\n" - "$bExists = false;\n" - "}\n" - "\n\n" - "return logDebugReturn\n" - "(\n" - "$strOperation,\n" - "{name => 'bExists', value => $bExists, trace => true}\n" - ");\n" - "}\n" - "\n\n\n\n" - "sub pathSync\n" - "{\n" - "my $self = shift;\n" - "\n\n" - "my\n" - "(\n" - "$strOperation,\n" - "$strPath,\n" - ") =\n" - "logDebugParam\n" - "(\n" - "__PACKAGE__ . '->pathSync', \\@_,\n" - "{name => 'strPath', trace => true},\n" - ");\n" - "\n" - "open(my $hPath, \"<\", $strPath)\n" - "or confess &log(ERROR, \"unable to open '${strPath}' for sync\", ERROR_PATH_OPEN);\n" - "open(my $hPathDup, \">&\", $hPath)\n" - "or confess &log(ERROR, \"unable to duplicate '${strPath}' handle for sync\", ERROR_PATH_OPEN);\n" - "\n" - "$hPathDup->sync()\n" - "or confess &log(ERROR, \"unable to sync path '${strPath}'\", ERROR_PATH_SYNC);\n" - "\n" - "close($hPathDup);\n" - "close($hPath);\n" - "\n\n" - "return logDebugReturn($strOperation);\n" - "}\n" - "\n\n\n\n" - "sub remove\n" - "{\n" - "my $self = shift;\n" - "\n\n" - "my\n" - "(\n" - "$strOperation,\n" - "$strPathFile,\n" - "$bIgnoreMissing,\n" - "$bRecurse,\n" - ") =\n" - "logDebugParam\n" - "(\n" - "__PACKAGE__ . '->remove', \\@_,\n" - "{name => 'strPathFile', trace => true},\n" - "{name => 'bIgnoreMissing', optional => true, default => false, trace => true},\n" - "{name => 'bRecurse', optional => true, default => false, trace => true},\n" - ");\n" - "\n\n" - "my $bRemoved = true;\n" - "\n\n" - "if ($bRecurse)\n" - "{\n" - "\n" - "require pgBackRest::LibC;\n" - "pgBackRest::LibC->import(qw(:storage));\n" - "\n" - "storagePosixPathRemove($strPathFile, !$bIgnoreMissing, $bRecurse)\n" - "}\n" - "\n" - "else\n" - "{\n" - "foreach my $strFile (ref($strPathFile) ? @{$strPathFile} : ($strPathFile))\n" - "{\n" - "if (unlink($strFile) != 1)\n" - "{\n" - "$bRemoved = false;\n" - "\n\n" - "if (!($OS_ERROR{ENOENT} && $bIgnoreMissing))\n" - "{\n" - "logErrorResult(\n" - "$OS_ERROR{ENOENT} ? ERROR_FILE_MISSING : ERROR_FILE_OPEN, \"unable to remove file '${strFile}'\", $OS_ERROR);\n" - "}\n" - "}\n" - "}\n" - "}\n" - "\n\n" - "return logDebugReturn\n" - "(\n" - "$strOperation,\n" - "{name => 'bRemoved', value => $bRemoved, trace => true}\n" - ");\n" - "}\n" - "\n\n\n\n" - "sub capability {true}\n" - "sub className {STORAGE_POSIX_DRIVER}\n" - "sub tempExtension {shift->{strTempExtension}}\n" - "sub tempExtensionSet {my $self = shift; $self->{strTempExtension} = shift}\n" - "\n" - "1;\n" - }, - { - .name = "pgBackRest/Storage/Posix/FileRead.pm", - .data = - "\n\n\n" - "package pgBackRest::Storage::Posix::FileRead;\n" - "use parent 'pgBackRest::Common::Io::Handle';\n" - "\n" - "use strict;\n" - "use warnings FATAL => qw(all);\n" - "use Carp qw(confess);\n" - "use English '-no_match_vars';\n" - "\n" - "use Fcntl qw(O_RDONLY);\n" - "\n" - "use pgBackRest::Common::Exception;\n" - "use pgBackRest::Common::Log;\n" - "\n\n\n\n" - "sub new\n" - "{\n" - "my $class = shift;\n" - "\n\n" "my\n" "(\n" "$strOperation,\n" - "$oDriver,\n" - "$strName,\n" + "$xFileExp,\n" "$bIgnoreMissing,\n" ") =\n" "logDebugParam\n" "(\n" - "__PACKAGE__ . '->new', \\@_,\n" - "{name => 'oDriver', trace => true},\n" - "{name => 'strName', trace => true},\n" - "{name => 'bIgnoreMissing', optional => true, default => false, trace => true},\n" - ");\n" - "\n\n" - "my $fhFile;\n" - "\n" - "if (!sysopen($fhFile, $strName, O_RDONLY))\n" - "{\n" - "if (!($OS_ERROR{ENOENT} && $bIgnoreMissing))\n" - "{\n" - "logErrorResult($OS_ERROR{ENOENT} ? ERROR_FILE_MISSING : ERROR_FILE_OPEN, \"unable to open '${strName}'\", $OS_ERROR);\n" - "}\n" - "\n" - "undef($fhFile);\n" - "}\n" - "\n\n" - "my $self;\n" - "\n" - "if (defined($fhFile))\n" - "{\n" - "\n" - "binmode($fhFile);\n" - "\n\n" - "$self = $class->SUPER::new(\"'${strName}'\", $fhFile);\n" - "bless $self, $class;\n" - "\n\n" - "$self->{oDriver} = $oDriver;\n" - "$self->{strName} = $strName;\n" - "$self->{fhFile} = $fhFile;\n" - "}\n" - "\n\n" - "return logDebugReturn\n" - "(\n" - "$strOperation,\n" - "{name => 'self', value => $self, trace => true}\n" - ");\n" - "}\n" - "\n\n\n\n" - "sub close\n" - "{\n" - "my $self = shift;\n" - "\n" - "if (defined($self->handle()))\n" - "{\n" - "\n" - "close($self->handle());\n" - "undef($self->{fhFile});\n" - "\n\n" - "$self->SUPER::close();\n" - "}\n" - "\n" - "return true;\n" - "}\n" - "\n\n\n\n" - "sub handle {shift->{fhFile}}\n" - "sub name {shift->{strName}}\n" - "\n" - "1;\n" - }, - { - .name = "pgBackRest/Storage/Posix/FileWrite.pm", - .data = - "\n\n\n" - "package pgBackRest::Storage::Posix::FileWrite;\n" - "use parent 'pgBackRest::Common::Io::Handle';\n" - "\n" - "use strict;\n" - "use warnings FATAL => qw(all);\n" - "use Carp qw(confess);\n" - "use English '-no_match_vars';\n" - "\n" - "use Fcntl qw(O_RDONLY O_WRONLY O_CREAT O_TRUNC);\n" - "use File::Basename qw(dirname);\n" - "\n" - "use pgBackRest::Common::Exception;\n" - "use pgBackRest::Common::Log;\n" - "\n" - "use pgBackRest::Common::Io::Handle;\n" - "use pgBackRest::Storage::Base;\n" - "\n\n\n\n" - "sub new\n" - "{\n" - "my $class = shift;\n" - "\n\n" - "my\n" - "(\n" - "$strOperation,\n" - "$oDriver,\n" - "$strName,\n" - "$strMode,\n" - "$strUser,\n" - "$strGroup,\n" - "$lTimestamp,\n" - "$bPathCreate,\n" - "$bAtomic,\n" - "$bSync,\n" - ") =\n" - "logDebugParam\n" - "(\n" - "__PACKAGE__ . '->new', \\@_,\n" - "{name => 'oDriver', trace => true},\n" - "{name => 'strName', trace => true},\n" - "{name => 'strMode', optional => true, trace => true},\n" - "{name => 'strUser', optional => true, trace => true},\n" - "{name => 'strGroup', optional => true, trace => true},\n" - "{name => 'lTimestamp', optional => true, trace => true},\n" - "{name => 'bPathCreate', optional => true, default => false, trace => true},\n" - "{name => 'bAtomic', optional => true, default => false, trace => true},\n" - "{name => 'bSync', optional => true, default => true, trace => true},\n" - ");\n" - "\n\n" - "my $self = $class->SUPER::new(\"'${strName}'\");\n" - "bless $self, $class;\n" - "\n\n" - "$self->{oDriver} = $oDriver;\n" - "$self->{strName} = $strName;\n" - "$self->{strMode} = $strMode;\n" - "$self->{strUser} = $strUser;\n" - "$self->{strGroup} = $strGroup;\n" - "$self->{lTimestamp} = $lTimestamp;\n" - "$self->{bPathCreate} = $bPathCreate;\n" - "$self->{bAtomic} = $bAtomic;\n" - "$self->{bSync} = $bSync;\n" - "\n\n" - "if ($self->{bAtomic})\n" - "{\n" - "\n" - "$self->{strNameTmp} = \"$self->{strName}.\" . $self->{oDriver}->tempExtension();\n" - "}\n" - "\n\n" - "$self->{bOpened} = false;\n" - "\n\n" - "return logDebugReturn\n" - "(\n" - "$strOperation,\n" - "{name => 'self', value => $self, trace => true}\n" - ");\n" - "}\n" - "\n\n\n\n" - "sub open\n" - "{\n" - "my $self = shift;\n" - "\n\n" - "my $strFile = $self->{bAtomic} ? $self->{strNameTmp} : $self->{strName};\n" - "\n\n" - "if (!sysopen(\n" - "$self->{fhFile}, $strFile, O_WRONLY | O_CREAT | O_TRUNC, oct(defined($self->{strMode}) ? $self->{strMode} : '0666')))\n" - "{\n" - "\n" - "if ($OS_ERROR{ENOENT} && $self->{bPathCreate})\n" - "{\n" - "$self->{oDriver}->pathCreate(dirname($strFile), {bIgnoreExists => true, bCreateParent => true});\n" - "$self->{bPathCreate} = false;\n" - "return $self->open();\n" - "}\n" - "\n" - "logErrorResult($OS_ERROR{ENOENT} ? ERROR_PATH_MISSING : ERROR_FILE_OPEN, \"unable to open '${strFile}'\", $OS_ERROR);\n" - "}\n" - "\n\n" - "binmode($self->{fhFile});\n" - "\n\n" - "$self->{oDriver}->owner($strFile, {strUser => $self->{strUser}, strGroup => $self->{strGroup}});\n" - "\n\n" - "$self->handleWriteSet($self->{fhFile});\n" - "\n\n" - "$self->{bOpened} = true;\n" - "\n" - "return true;\n" - "}\n" - "\n\n\n\n" - "sub write\n" - "{\n" - "my $self = shift;\n" - "my $rtBuffer = shift;\n" - "\n\n" - "$self->open() if !$self->opened();\n" - "\n" - "return $self->SUPER::write($rtBuffer);\n" - "}\n" - "\n\n\n\n" - "sub close\n" - "{\n" - "my $self = shift;\n" - "\n" - "if (defined($self->handle()))\n" - "{\n" - "\n" - "if ($self->{bSync})\n" - "{\n" - "$self->handle()->sync();\n" - "}\n" + "__PACKAGE__ . '->hashSize', \\@_,\n" + "{name => 'xFileExp'},\n" + "{name => 'bIgnoreMissing', optional => true, default => false},\n" + ");\n" "\n\n" - "close($self->handle());\n" - "undef($self->{fhFile});\n" + "my $strHash;\n" + "my $lSize;\n" "\n\n" - "my $strCurrentName = $self->{bAtomic} ? $self->{strNameTmp} : $self->{strName};\n" + "my $oFileIo = ref($xFileExp) ? $xFileExp : $self->openRead($xFileExp, {bIgnoreMissing => $bIgnoreMissing});\n" "\n\n" - "if (defined($self->{lTimestamp}))\n" - "{\n" - "utime(time(), $self->{lTimestamp}, $strCurrentName)\n" - "or logErrorResult(ERROR_FILE_WRITE, \"unable to set time for '${strCurrentName}'\", $OS_ERROR);\n" - "}\n" + "$oFileIo->{oStorageCRead}->filterAdd(COMMON_IO_HANDLE, undef);\n" + "$oFileIo->{oStorageCRead}->filterAdd(STORAGE_FILTER_SHA, undef);\n" "\n\n" - "if ($self->{bAtomic})\n" + "if ($self->{oStorageC}->readDrain($oFileIo->{oStorageCRead}))\n" "{\n" - "$self->{oDriver}->move($strCurrentName, $self->{strName});\n" + "$strHash = $oFileIo->result(STORAGE_FILTER_SHA);\n" + "$lSize = $oFileIo->result(COMMON_IO_HANDLE);\n" "}\n" "\n\n" - "$self->resultSet(COMMON_IO_HANDLE, $self->{lSize});\n" - "\n\n" - "$self->SUPER::close();\n" - "}\n" - "\n" - "return true;\n" + "return logDebugReturn\n" + "(\n" + "$strOperation,\n" + "{name => 'strHash', value => $strHash},\n" + "{name => 'lSize', value => $lSize}\n" + ");\n" "}\n" "\n\n\n\n" - "sub DESTROY\n" + "sub info\n" "{\n" "my $self = shift;\n" - "\n" - "if (defined($self->handle()))\n" - "{\n" - "CORE::close($self->handle());\n" - "undef($self->{fhFile});\n" - "}\n" - "}\n" - "\n\n\n\n" - "sub handle {shift->{fhFile}}\n" - "sub opened {shift->{bOpened}}\n" - "sub name {shift->{strName}}\n" - "\n" - "1;\n" - }, - { - .name = "pgBackRest/Storage/S3/Auth.pm", - .data = - "\n\n\n\n\n\n" - "package pgBackRest::Storage::S3::Auth;\n" - "\n" - "use strict;\n" - "use warnings FATAL => qw(all);\n" - "use Carp qw(confess);\n" - "use English '-no_match_vars';\n" - "\n" - "use Digest::SHA qw(hmac_sha256 hmac_sha256_hex);\n" - "use Exporter qw(import);\n" - "our @EXPORT = qw();\n" - "use POSIX qw(strftime);\n" - "\n" - "use pgBackRest::Common::Http::Common;\n" - "use pgBackRest::Common::Log;\n" - "use pgBackRest::LibC qw(:crypto);\n" - "\n\n\n\n" - "use constant S3 => 's3';\n" - "use constant AWS4 => 'AWS4';\n" - "use constant AWS4_REQUEST => 'aws4_request';\n" - "use constant AWS4_HMAC_SHA256 => 'AWS4-HMAC-SHA256';\n" - "\n" - "use constant S3_HEADER_AUTHORIZATION => 'authorization';\n" - "push @EXPORT, qw(S3_HEADER_AUTHORIZATION);\n" - "use constant S3_HEADER_DATE => 'x-amz-date';\n" - "push @EXPORT, qw(S3_HEADER_DATE);\n" - "use constant S3_HEADER_CONTENT_SHA256 => 'x-amz-content-sha256';\n" - "push @EXPORT, qw(S3_HEADER_CONTENT_SHA256);\n" - "use constant S3_HEADER_HOST => 'host';\n" - "push @EXPORT, qw(S3_HEADER_HOST);\n" - "use constant S3_HEADER_TOKEN => 'x-amz-security-token';\n" - "push @EXPORT, qw(S3_HEADER_TOKEN);\n" - "\n" - "use constant PAYLOAD_DEFAULT_HASH => cryptoHashOne('sha256', '');\n" - "push @EXPORT, qw(PAYLOAD_DEFAULT_HASH);\n" - "\n\n\n\n" - "sub s3DateTime\n" - "{\n" - "\n" + "\n\n" "my\n" "(\n" "$strOperation,\n" - "$lTime,\n" + "$strPathFileExp,\n" + "$bIgnoreMissing,\n" ") =\n" "logDebugParam\n" "(\n" - "__PACKAGE__ . '::s3DateTime', \\@_,\n" - "{name => 'lTime', default => time(), trace => true},\n" + "__PACKAGE__ . '->info', \\@_,\n" + "{name => 'strPathFileExp'},\n" + "{name => 'bIgnoreMissing', optional => true, default => false},\n" ");\n" + "\n" + "my $rhInfo;\n" + "my $strJson = $self->{oStorageC}->info($strPathFileExp, $bIgnoreMissing);\n" + "\n" + "if (defined($strJson))\n" + "{\n" + "$rhInfo = $self->{oJSON}->decode($strJson);\n" + "}\n" "\n\n" "return logDebugReturn\n" "(\n" "$strOperation,\n" - "{name => 'strDateTime', value => strftime(\"%Y%m%dT%H%M%SZ\", gmtime($lTime)), trace => true}\n" + "{name => 'rhInfo', value => $rhInfo, trace => true}\n" ");\n" "}\n" - "\n" - "push @EXPORT, qw(s3DateTime);\n" "\n\n\n\n" - "sub s3CanonicalRequest\n" + "sub linkCreate\n" "{\n" - "\n" + "my $self = shift;\n" + "\n\n" "my\n" "(\n" "$strOperation,\n" - "$strVerb,\n" - "$strUri,\n" - "$strQuery,\n" - "$hHeader,\n" - "$strPayloadHash,\n" + "$strSourcePathFileExp,\n" + "$strDestinationLinkExp,\n" + "$bHard,\n" + "$bRelative,\n" + "$bPathCreate,\n" + "$bIgnoreExists,\n" ") =\n" "logDebugParam\n" "(\n" - "__PACKAGE__ . '::s3CanonicalRequest', \\@_,\n" - "{name => 'strVerb', trace => true},\n" - "{name => 'strUri', trace => true},\n" - "{name => 'strQuery', trace => true},\n" - "{name => 'hHeader', trace => true},\n" - "{name => 'strPayloadHash', trace => true},\n" + "__PACKAGE__ . '->linkCreate', \\@_,\n" + "{name => 'strSourcePathFileExp'},\n" + "{name => 'strDestinationLinkExp'},\n" + "{name => 'bHard', optional=> true, default => false},\n" + "{name => 'bRelative', optional=> true, default => false},\n" + "{name => 'bPathCreate', optional=> true, default => true},\n" + "{name => 'bIgnoreExists', optional => true, default => false},\n" ");\n" "\n\n" - "my $strCanonicalRequest =\n" - "\"${strVerb}\\n${strUri}\\n${strQuery}\\n\";\n" - "my $strSignedHeaders;\n" - "\n" - "foreach my $strHeader (sort(keys(%{$hHeader})))\n" + "my $strSourcePathFile = $self->pathGet($strSourcePathFileExp);\n" + "my $strDestinationLink = $self->pathGet($strDestinationLinkExp);\n" + "\n\n" + "if ($bRelative)\n" "{\n" - "if (lc($strHeader) ne $strHeader)\n" + "\n" + "my @strySource = split('/', $strSourcePathFile);\n" + "my @stryDestination = split('/', $strDestinationLink);\n" + "\n" + "while (defined($strySource[0]) && defined($stryDestination[0]) && $strySource[0] eq $stryDestination[0])\n" "{\n" - "confess &log(ASSERT, \"header '${strHeader}' must be lower case\");\n" + "shift(@strySource);\n" + "shift(@stryDestination);\n" "}\n" + "\n\n" + "$strSourcePathFile = '';\n" "\n" - "$strCanonicalRequest .= $strHeader . \":$hHeader->{$strHeader}\\n\";\n" - "$strSignedHeaders .= (defined($strSignedHeaders) ? qw(;) : '') . lc($strHeader);\n" + "for (my $iIndex = 0; $iIndex < @stryDestination - 1; $iIndex++)\n" + "{\n" + "$strSourcePathFile .= '../';\n" "}\n" - "\n" - "$strCanonicalRequest .= \"\\n${strSignedHeaders}\\n${strPayloadHash}\";\n" "\n\n" - "return logDebugReturn\n" + "$strSourcePathFile .= join('/', @strySource);\n" + "\n" + "logDebugMisc\n" "(\n" - "$strOperation,\n" - "{name => 'strCanonicalRequest', value => $strCanonicalRequest, trace => true},\n" - "{name => 'strSignedHeaders', value => $strSignedHeaders, trace => true},\n" + "$strOperation, 'apply relative path',\n" + "{name => 'strSourcePathFile', value => $strSourcePathFile, trace => true}\n" ");\n" "}\n" "\n" - "push @EXPORT, qw(s3CanonicalRequest);\n" - "\n\n\n\n" - "my $hSigningKeyCache;\n" + "if (!($bHard ? link($strSourcePathFile, $strDestinationLink) : symlink($strSourcePathFile, $strDestinationLink)))\n" + "{\n" + "my $strMessage = \"unable to create link '${strDestinationLink}'\";\n" + "\n\n" + "if ($OS_ERROR{ENOENT})\n" + "{\n" + "\n" + "if (!$self->exists($strSourcePathFile))\n" + "{\n" + "confess &log(ERROR, \"${strMessage} because source '${strSourcePathFile}' does not exist\", ERROR_FILE_MISSING);\n" + "}\n" "\n" - "sub s3SigningKey\n" + "if (!$bPathCreate)\n" "{\n" + "confess &log(ERROR, \"${strMessage} because parent does not exist\", ERROR_PATH_MISSING);\n" + "}\n" + "\n\n" + "$self->pathCreate(dirname($strDestinationLink), {bIgnoreExists => true, bCreateParent => true});\n" + "\n\n" + "$self->linkCreate($strSourcePathFile, $strDestinationLink, {bHard => $bHard});\n" + "}\n" "\n" + "elsif ($OS_ERROR{EEXIST})\n" + "{\n" + "if (!$bIgnoreExists)\n" + "{\n" + "confess &log(ERROR, \"${strMessage} because it already exists\", ERROR_PATH_EXISTS);\n" + "}\n" + "}\n" + "else\n" + "{\n" + "logErrorResult(ERROR_PATH_CREATE, ${strMessage}, $OS_ERROR);\n" + "}\n" + "}\n" + "\n\n" + "return logDebugReturn($strOperation);\n" + "}\n" + "\n\n\n\n" + "sub list\n" + "{\n" + "my $self = shift;\n" + "\n\n" "my\n" "(\n" "$strOperation,\n" - "$strDate,\n" - "$strRegion,\n" - "$strSecretAccessKey,\n" + "$strPathExp,\n" + "$strExpression,\n" + "$strSortOrder,\n" + "$bIgnoreMissing,\n" ") =\n" "logDebugParam\n" "(\n" - "__PACKAGE__ . '::s3SigningKey', \\@_,\n" - "{name => 'strDate', trace => true},\n" - "{name => 'strRegion', trace => true},\n" - "{name => 'strSecretAccessKey', redact => true, trace => true},\n" + "__PACKAGE__ . '->list', \\@_,\n" + "{name => 'strPathExp', required => false},\n" + "{name => 'strExpression', optional => true},\n" + "{name => 'strSortOrder', optional => true, default => 'forward'},\n" + "{name => 'bIgnoreMissing', optional => true, default => false},\n" ");\n" "\n\n" - "my $strSigningKey = $hSigningKeyCache->{$strDate}{$strRegion}{$strSecretAccessKey};\n" - "\n\n" - "if (!defined($strSigningKey))\n" + "my $rstryFileList = [];\n" + "my $strFileList = $self->{oStorageC}->list($strPathExp, $bIgnoreMissing, $strSortOrder eq 'forward', $strExpression);\n" + "\n" + "if (defined($strFileList) && $strFileList ne '[]')\n" "{\n" - "my $strDateKey = hmac_sha256($strDate, AWS4 . $strSecretAccessKey);\n" - "my $strRegionKey = hmac_sha256($strRegion, $strDateKey);\n" - "my $strServiceKey = hmac_sha256(S3, $strRegionKey);\n" - "$strSigningKey = hmac_sha256(AWS4_REQUEST, $strServiceKey);\n" - "\n\n" - "$hSigningKeyCache->{$strDate}{$strRegion}{$strSecretAccessKey} = $strSigningKey;\n" + "$rstryFileList = $self->{oJSON}->decode($strFileList);\n" "}\n" "\n\n" "return logDebugReturn\n" "(\n" "$strOperation,\n" - "{name => 'strSigningKey', value => $strSigningKey, redact => true, trace => true}\n" + "{name => 'stryFileList', value => $rstryFileList}\n" ");\n" "}\n" - "\n" - "push @EXPORT, qw(s3SigningKey);\n" "\n\n\n\n" - "sub s3StringToSign\n" + "sub manifest\n" "{\n" - "\n" + "my $self = shift;\n" + "\n\n" "my\n" "(\n" "$strOperation,\n" - "$strDateTime,\n" - "$strRegion,\n" - "$strCanonicalRequestHash,\n" + "$strPathExp,\n" + "$strFilter,\n" ") =\n" "logDebugParam\n" "(\n" - "__PACKAGE__ . '::s3StringToSign', \\@_,\n" - "{name => 'strDateTime', trace => true},\n" - "{name => 'strRegion', trace => true},\n" - "{name => 'strCanonicalRequestHash', trace => true},\n" + "__PACKAGE__ . '->manifest', \\@_,\n" + "{name => 'strPathExp'},\n" + "{name => 'strFilter', optional => true, trace => true},\n" ");\n" "\n" - "my $strStringToSign =\n" - "AWS4_HMAC_SHA256 . \"\\n${strDateTime}\\n\" . substr($strDateTime, 0, 8) . \"/${strRegion}/\" . S3 . '/' . AWS4_REQUEST . \"\\n\" .\n" - "$strCanonicalRequestHash;\n" + "my $hManifest = $self->{oJSON}->decode($self->{oStorageC}->manifest($strPathExp, $strFilter));\n" "\n\n" "return logDebugReturn\n" "(\n" "$strOperation,\n" - "{name => 'strStringToSign', value => $strStringToSign, trace => true}\n" + "{name => 'hManifest', value => $hManifest, trace => true}\n" ");\n" "}\n" - "\n" - "push @EXPORT, qw(s3StringToSign);\n" "\n\n\n\n" - "sub s3AuthorizationHeader\n" + "sub move\n" "{\n" - "\n" + "my $self = shift;\n" + "\n\n" "my\n" "(\n" "$strOperation,\n" - "$strRegion,\n" - "$strHost,\n" - "$strVerb,\n" - "$strUri,\n" - "$strQuery,\n" - "$strDateTime,\n" - "$hHeader,\n" - "$strAccessKeyId,\n" - "$strSecretAccessKey,\n" - "$strSecurityToken,\n" - "$strPayloadHash,\n" + "$strSourceFileExp,\n" + "$strDestinationFileExp,\n" + "$bPathCreate,\n" ") =\n" "logDebugParam\n" "(\n" - "__PACKAGE__ . '::s3AuthorizationHeader', \\@_,\n" - "{name => 'strRegion', trace => true},\n" - "{name => 'strHost', trace => true},\n" - "{name => 'strVerb', trace => true},\n" - "{name => 'strUri', trace => true},\n" - "{name => 'strQuery', trace => true},\n" - "{name => 'strDateTime', trace => true},\n" - "{name => 'hHeader', required => false, trace => true},\n" - "{name => 'strAccessKeyId', redact => true, trace => true},\n" - "{name => 'strSecretAccessKey', redact => true, trace => true},\n" - "{name => 'strSecurityToken', required => false, redact => true, trace => true},\n" - "{name => 'strPayloadHash', trace => true},\n" + "__PACKAGE__ . '->move', \\@_,\n" + "{name => 'strSourceFileExp'},\n" + "{name => 'strDestinationFileExp'},\n" ");\n" "\n\n" - "delete($hHeader->{&S3_HEADER_AUTHORIZATION});\n" - "\n\n" - "$hHeader->{&S3_HEADER_HOST} = $strHost;\n" - "$hHeader->{&S3_HEADER_CONTENT_SHA256} = $strPayloadHash;\n" - "$hHeader->{&S3_HEADER_DATE} = $strDateTime;\n" + "my $strSourceFile = $self->pathGet($strSourceFileExp);\n" + "my $strDestinationFile = $self->pathGet($strDestinationFileExp);\n" "\n\n" - "if (defined($strSecurityToken))\n" + "if (!rename($strSourceFile, $strDestinationFile))\n" "{\n" - "$hHeader->{&S3_HEADER_TOKEN} = $strSecurityToken;\n" + "logErrorResult(ERROR_FILE_MOVE, \"unable to move '${strSourceFile}' to '${strDestinationFile}'\", $OS_ERROR);\n" "}\n" "\n\n" - "my ($strCanonicalRequest, $strSignedHeaders) = s3CanonicalRequest(\n" - "$strVerb, httpUriEncode($strUri, true), $strQuery, $hHeader, $strPayloadHash);\n" - "my $strStringToSign = s3StringToSign($strDateTime, $strRegion, cryptoHashOne('sha256', $strCanonicalRequest));\n" - "\n" - "$hHeader->{&S3_HEADER_AUTHORIZATION} =\n" - "AWS4_HMAC_SHA256 . \" Credential=${strAccessKeyId}/\" . substr($strDateTime, 0, 8) . \"/${strRegion}/\" . S3 . qw(/) .\n" - "AWS4_REQUEST . \",SignedHeaders=${strSignedHeaders},Signature=\" . hmac_sha256_hex($strStringToSign,\n" - "s3SigningKey(substr($strDateTime, 0, 8), $strRegion, $strSecretAccessKey));\n" - "\n\n" - "return logDebugReturn\n" - "(\n" - "$strOperation,\n" - "{name => 'hHeader', value => $hHeader, trace => true},\n" - "{name => 'strCanonicalRequest', value => $strCanonicalRequest, trace => true},\n" - "{name => 'strSignedHeaders', value => $strSignedHeaders, trace => true},\n" - "{name => 'strStringToSign', value => $strStringToSign, trace => true},\n" - ");\n" + "return logDebugReturn($strOperation);\n" "}\n" - "\n" - "push @EXPORT, qw(s3AuthorizationHeader);\n" - "\n" - "1;\n" - }, - { - .name = "pgBackRest/Storage/S3/Driver.pm", - .data = - "\n\n\n" - "package pgBackRest::Storage::S3::Driver;\n" - "use parent 'pgBackRest::Storage::S3::Request';\n" - "\n" - "use strict;\n" - "use warnings FATAL => qw(all);\n" - "use Carp qw(confess);\n" - "use English '-no_match_vars';\n" - "\n" - "use Exporter qw(import);\n" - "our @EXPORT = qw();\n" - "use Digest::MD5 qw(md5_base64);\n" - "use File::Basename qw(basename dirname);\n" - "\n" - "use pgBackRest::Common::Exception;\n" - "use pgBackRest::Common::Log;\n" - "use pgBackRest::Common::String;\n" - "use pgBackRest::Common::Xml;\n" - "use pgBackRest::Storage::S3::FileRead;\n" - "use pgBackRest::Storage::S3::FileWrite;\n" - "use pgBackRest::Storage::S3::Request;\n" - "use pgBackRest::Storage::S3::Info;\n" - "\n\n\n\n" - "use constant STORAGE_S3_DRIVER => __PACKAGE__;\n" - "push @EXPORT, qw(STORAGE_S3_DRIVER);\n" - "\n\n\n\n" - "use constant S3_QUERY_CONTINUATION_TOKEN => 'continuation-token';\n" - "use constant S3_QUERY_DELIMITER => 'delimiter';\n" - "use constant S3_QUERY_LIST_TYPE => 'list-type';\n" - "use constant S3_QUERY_PREFIX => 'prefix';\n" "\n\n\n\n" - "use constant S3_BATCH_MAX => 1000;\n" - "\n\n\n\n" - "sub openWrite\n" + "sub openRead\n" "{\n" "my $self = shift;\n" "\n\n" "my\n" "(\n" "$strOperation,\n" - "$strFile,\n" + "$xFileExp,\n" + "$bIgnoreMissing,\n" + "$rhyFilter,\n" + "$strCipherPass,\n" ") =\n" "logDebugParam\n" "(\n" - "__PACKAGE__ . '->openWrite', \\@_,\n" - "{name => 'strFile', trace => true},\n" + "__PACKAGE__ . '->openRead', \\@_,\n" + "{name => 'xFileExp'},\n" + "{name => 'bIgnoreMissing', optional => true, default => false},\n" + "{name => 'rhyFilter', optional => true},\n" + "{name => 'strCipherPass', optional => true, default => $self->cipherPassUser(), redact => true},\n" ");\n" - "\n" - "my $oFileIO = new pgBackRest::Storage::S3::FileWrite($self, $strFile);\n" + "\n\n" + "my $oFileIo = pgBackRest::LibC::StorageRead->new($self->{oStorageC}, $xFileExp, $bIgnoreMissing);\n" + "\n\n" + "if (defined($self->cipherType()))\n" + "{\n" + "$oFileIo->filterAdd(STORAGE_FILTER_CIPHER_BLOCK, $self->{oJSON}->encode([false, $self->cipherType(), $strCipherPass]));\n" + "}\n" + "\n\n" + "if (defined($rhyFilter))\n" + "{\n" + "foreach my $rhFilter (@{$rhyFilter})\n" + "{\n" + "$oFileIo->filterAdd(\n" + "$rhFilter->{strClass}, defined($rhFilter->{rxyParam}) ? $self->{oJSON}->encode($rhFilter->{rxyParam}) : undef);\n" + "}\n" + "}\n" "\n\n" "return logDebugReturn\n" "(\n" "$strOperation,\n" - "{name => 'oFileIO', value => $oFileIO, trace => true},\n" + "{name => 'oFileIo', value => new pgBackRest::Storage::StorageRead($self, $oFileIo), trace => true},\n" ");\n" "}\n" "\n\n\n\n" - "sub openRead\n" + "sub openWrite\n" "{\n" "my $self = shift;\n" "\n\n" "my\n" "(\n" "$strOperation,\n" - "$strFile,\n" - "$bIgnoreMissing,\n" + "$xFileExp,\n" + "$strMode,\n" + "$strUser,\n" + "$strGroup,\n" + "$lTimestamp,\n" + "$bAtomic,\n" + "$bPathCreate,\n" + "$rhyFilter,\n" + "$strCipherPass,\n" ") =\n" "logDebugParam\n" "(\n" - "__PACKAGE__ . '->openRead', \\@_,\n" - "{name => 'strFile', trace => true},\n" - "{name => 'bIgnoreMissing', optional => true, default => false, trace => true},\n" + "__PACKAGE__ . '->openWrite', \\@_,\n" + "{name => 'xFileExp'},\n" + "{name => 'strMode', optional => true, default => $self->{strDefaultFileMode}},\n" + "{name => 'strUser', optional => true},\n" + "{name => 'strGroup', optional => true},\n" + "{name => 'lTimestamp', optional => true, default => '0'},\n" + "{name => 'bAtomic', optional => true, default => false},\n" + "{name => 'bPathCreate', optional => true, default => false},\n" + "{name => 'rhyFilter', optional => true},\n" + "{name => 'strCipherPass', optional => true, default => $self->cipherPassUser(), redact => true},\n" ");\n" - "\n" - "my $oFileIO = new pgBackRest::Storage::S3::FileRead($self, $strFile, {bIgnoreMissing => $bIgnoreMissing});\n" + "\n\n" + "my $oFileIo = pgBackRest::LibC::StorageWrite->new(\n" + "$self->{oStorageC}, $xFileExp, oct($strMode), $strUser, $strGroup, $lTimestamp, $bAtomic, $bPathCreate);\n" + "\n\n" + "if (defined($rhyFilter))\n" + "{\n" + "foreach my $rhFilter (@{$rhyFilter})\n" + "{\n" + "$oFileIo->filterAdd(\n" + "$rhFilter->{strClass}, defined($rhFilter->{rxyParam}) ? $self->{oJSON}->encode($rhFilter->{rxyParam}) : undef);\n" + "}\n" + "}\n" + "\n\n" + "if (defined($self->cipherType()))\n" + "{\n" + "$oFileIo->filterAdd(STORAGE_FILTER_CIPHER_BLOCK, $self->{oJSON}->encode([true, $self->cipherType(), $strCipherPass]));\n" + "}\n" "\n\n" "return logDebugReturn\n" "(\n" "$strOperation,\n" - "{name => 'oFileIO', value => $oFileIO, trace => true},\n" + "{name => 'oFileIo', value => new pgBackRest::Storage::StorageWrite($self, $oFileIo), trace => true},\n" ");\n" "}\n" "\n\n\n\n" - "sub manifest\n" + "sub owner\n" "{\n" "my $self = shift;\n" "\n\n" "my\n" "(\n" "$strOperation,\n" - "$strPath,\n" - "$bRecurse,\n" - "$bPath,\n" + "$strPathFileExp,\n" + "$strUser,\n" + "$strGroup\n" ") =\n" "logDebugParam\n" "(\n" - "__PACKAGE__ . '->manifest', \\@_,\n" - "{name => 'strPath', trace => true},\n" - "\n" - "{name => 'bRecurse', optional => true, default => true, trace => true},\n" - "{name => 'bPath', optional => true, default => true, trace => true},\n" + "__PACKAGE__ . '->owner', \\@_,\n" + "{name => 'strPathFileExp'},\n" + "{name => 'strUser', required => false},\n" + "{name => 'strGroup', required => false}\n" ");\n" "\n\n" - "my $strPrefix = $strPath eq qw{/} ? undef : substr($strPath, 1) . ($bPath ? qw{/} : '');\n" - "\n\n" - "my $strDelimiter = $bRecurse ? undef : '/';\n" - "\n\n" - "my $hManifest = {};\n" - "\n\n" - "my $strContinuationToken;\n" - "\n" - "do\n" + "if (defined($strUser) || defined($strGroup))\n" "{\n" + "my $strPathFile = $self->pathGet($strPathFileExp);\n" + "my $strMessage = \"unable to set ownership for '${strPathFile}'\";\n" + "my $iUserId;\n" + "my $iGroupId;\n" + "\n\n\n" + "my $oStat = lstat($strPathFile);\n" "\n" - "my $oResponse = $self->request(\n" - "HTTP_VERB_GET, {hQuery =>\n" - "{&S3_QUERY_LIST_TYPE => 2, &S3_QUERY_PREFIX => $strPrefix, &S3_QUERY_DELIMITER => $strDelimiter,\n" - "&S3_QUERY_CONTINUATION_TOKEN => $strContinuationToken}, strResponseType => S3_RESPONSE_TYPE_XML});\n" - "\n\n" - "if (defined($strPrefix) && !$bPath)\n" + "if (!defined($oStat))\n" "{\n" + "confess &log(ERROR, \"unable to stat '${strPathFile}': No such file or directory\", ERROR_FILE_MISSING);\n" + "}\n" "\n" - "if (index($strPrefix, qw{/}) == -1)\n" + "if (!defined($strUser))\n" "{\n" - "undef($strPrefix);\n" + "$iUserId = $oStat->uid;\n" "}\n" - "else\n" + "\n" + "if (!defined($strGroup))\n" "{\n" - "$strPrefix = dirname($strPrefix) . qw{/};\n" - "}\n" + "$iGroupId = $oStat->gid;\n" "}\n" "\n\n" - "foreach my $oFile (xmlTagChildren($oResponse, \"Contents\"))\n" + "if (defined($strUser))\n" "{\n" - "my $strName = xmlTagText($oFile, \"Key\");\n" - "\n\n" - "if (defined($strPrefix))\n" + "$iUserId = getpwnam($strUser);\n" + "\n" + "if (!defined($iUserId))\n" "{\n" - "$strName = substr($strName, length($strPrefix));\n" + "logErrorResult(ERROR_FILE_OWNER, \"${strMessage} because user '${strUser}' does not exist\");\n" + "}\n" "}\n" - "\n" - "$hManifest->{$strName}->{type} = 'f';\n" - "$hManifest->{$strName}->{size} = xmlTagText($oFile, 'Size') + 0;\n" "\n\n" - "if ($bRecurse)\n" - "{\n" - "my @stryName = split(qw{/}, $strName);\n" - "\n" - "if (@stryName > 1)\n" + "if (defined($strGroup))\n" "{\n" - "$strName = undef;\n" + "$iGroupId = getgrnam($strGroup);\n" "\n" - "for (my $iIndex = 0; $iIndex < @stryName - 1; $iIndex++)\n" + "if (!defined($iGroupId))\n" "{\n" - "$strName .= (defined($strName) ? qw{/} : '') . $stryName[$iIndex];\n" - "$hManifest->{$strName}->{type} = 'd';\n" - "}\n" - "}\n" + "logErrorResult(ERROR_FILE_OWNER, \"${strMessage} because group '${strGroup}' does not exist\");\n" "}\n" "}\n" "\n\n" - "if ($bPath && !$bRecurse)\n" - "{\n" - "foreach my $oPath (xmlTagChildren($oResponse, \"CommonPrefixes\"))\n" + "if ($iUserId != $oStat->uid || $iGroupId != $oStat->gid)\n" "{\n" - "my $strName = xmlTagText($oPath, \"Prefix\");\n" - "\n\n" - "if (defined($strPrefix))\n" + "if (!chown($iUserId, $iGroupId, $strPathFile))\n" "{\n" - "$strName = substr($strName, length($strPrefix));\n" - "}\n" - "\n\n" - "$strName = substr($strName, 0, length($strName) - 1);\n" - "\n" - "$hManifest->{$strName}->{type} = 'd';\n" + "logErrorResult(ERROR_FILE_OWNER, \"${strMessage}\", $OS_ERROR);\n" "}\n" "}\n" - "\n" - "$strContinuationToken = xmlTagText($oResponse, \"NextContinuationToken\", false);\n" "}\n" - "while (defined($strContinuationToken));\n" "\n\n" - "if ($bPath)\n" - "{\n" - "$hManifest->{qw{.}}->{type} = 'd';\n" - "}\n" - "\n\n\n\n" "return logDebugReturn\n" "(\n" - "$strOperation,\n" - "{name => 'hManifest', value => $hManifest, trace => true}\n" + "$strOperation\n" ");\n" "}\n" "\n\n\n\n" - "sub list\n" + "sub pathAbsolute\n" "{\n" "my $self = shift;\n" "\n\n" "my\n" "(\n" "$strOperation,\n" - "$strPath,\n" - "$strExpression\n" + "$strBasePath,\n" + "$strPath\n" ") =\n" "logDebugParam\n" "(\n" - "__PACKAGE__ . '->list', \\@_,\n" - "{name => 'strPath', trace => true},\n" - "{name => 'strExpression', optional => true, trace => true},\n" + "__PACKAGE__ . '->pathAbsolute', \\@_,\n" + "{name => 'strBasePath', trace => true},\n" + "{name => 'strPath', trace => true}\n" ");\n" "\n\n" - "my $strPrefix = regexPrefix($strExpression);\n" + "my $strAbsolutePath;\n" + "\n\n" + "if (index($strPath, '/') == 0)\n" + "{\n" + "$strAbsolutePath = $strPath;\n" + "}\n" + "\n" + "else\n" + "{\n" + "\n" + "if (index($strBasePath, '/') != 0 || index($strBasePath, '/..') != -1)\n" + "{\n" + "confess &log(ERROR, \"${strBasePath} is not an absolute path\", ERROR_PATH_TYPE);\n" + "}\n" + "\n" + "while (index($strPath, '..') == 0)\n" + "{\n" + "$strBasePath = dirname($strBasePath);\n" + "$strPath = substr($strPath, 2);\n" + "\n" + "if (index($strPath, '/') == 0)\n" + "{\n" + "$strPath = substr($strPath, 1);\n" + "}\n" + "}\n" + "\n" + "$strAbsolutePath = \"${strBasePath}/${strPath}\";\n" + "}\n" "\n\n" - "my @stryFileList = grep(\n" - "!/^\\.$/i, keys(%{$self->manifest(\n" - "$strPath . (defined($strPrefix) ? \"/${strPrefix}\" : ''), {bRecurse => false, bPath => !defined($strPrefix)})}));\n" + "if (index($strAbsolutePath, '/') != 0 || index($strAbsolutePath, '/..') != -1)\n" + "{\n" + "confess &log(ERROR, \"result ${strAbsolutePath} was not an absolute path\", ERROR_PATH_TYPE);\n" + "}\n" "\n\n" "return logDebugReturn\n" "(\n" "$strOperation,\n" - "{name => 'stryFileList', value => \\@stryFileList, ref => true, trace => true}\n" + "{name => 'strAbsolutePath', value => $strAbsolutePath, trace => true}\n" ");\n" "}\n" "\n\n\n\n" @@ -18602,116 +15098,147 @@ "my\n" "(\n" "$strOperation,\n" - "$strPath,\n" + "$strPathExp,\n" + "$strMode,\n" + "$bIgnoreExists,\n" + "$bCreateParent,\n" ") =\n" "logDebugParam\n" "(\n" "__PACKAGE__ . '->pathCreate', \\@_,\n" - "{name => 'strPath', trace => true},\n" + "{name => 'strPathExp'},\n" + "{name => 'strMode', optional => true},\n" + "{name => 'bIgnoreExists', optional => true, default => false},\n" + "{name => 'bCreateParent', optional => true, default => false},\n" ");\n" "\n\n" - "return logDebugReturn($strOperation);\n" + "$self->{oStorageC}->pathCreate($strPathExp, $strMode, $bIgnoreExists, $bCreateParent);\n" + "\n\n" + "return logDebugReturn\n" + "(\n" + "$strOperation\n" + ");\n" "}\n" "\n\n\n\n" - "sub pathSync\n" + "sub pathExists\n" "{\n" "my $self = shift;\n" "\n\n" "my\n" "(\n" "$strOperation,\n" - "$strPath,\n" + "$strPathExp,\n" ") =\n" "logDebugParam\n" "(\n" - "__PACKAGE__ . '->pathSync', \\@_,\n" - "{name => 'strPath', trace => true},\n" + "__PACKAGE__ . '->pathExists', \\@_,\n" + "{name => 'strPathExp'},\n" ");\n" "\n\n" - "return logDebugReturn($strOperation);\n" + "my $bExists = $self->{oStorageC}->pathExists($strPathExp);\n" + "\n\n" + "return logDebugReturn\n" + "(\n" + "$strOperation,\n" + "{name => 'bExists', value => $bExists ? true : false}\n" + ");\n" "}\n" "\n\n\n\n" - "sub exists\n" + "sub pathGet\n" "{\n" "my $self = shift;\n" "\n\n" "my\n" "(\n" "$strOperation,\n" - "$strFile,\n" + "$strPathExp,\n" ") =\n" "logDebugParam\n" "(\n" - "__PACKAGE__ . '->exists', \\@_,\n" - "{name => 'strFile', trace => true},\n" + "__PACKAGE__ . '->pathGet', \\@_,\n" + "{name => 'strPathExp'},\n" ");\n" "\n\n" - "my $bExists = defined($self->manifest($strFile, {bRecurse => false, bPath => false})->{basename($strFile)}) ? true : false;\n" + "my $strPath = $self->{oStorageC}->pathGet($strPathExp);\n" "\n\n" "return logDebugReturn\n" "(\n" "$strOperation,\n" - "{name => 'bExists', value => $bExists, trace => true}\n" + "{name => 'strPath', value => $strPath, trace => true}\n" ");\n" "}\n" "\n\n\n\n" - "sub pathExists\n" + "sub pathRemove\n" "{\n" "my $self = shift;\n" "\n\n" "my\n" "(\n" "$strOperation,\n" - "$strPath,\n" + "$strPathExp,\n" + "$bIgnoreMissing,\n" + "$bRecurse,\n" ") =\n" "logDebugParam\n" "(\n" - "__PACKAGE__ . '->pathExists', \\@_,\n" - "{name => 'strPath', trace => true},\n" + "__PACKAGE__ . '->pathRemove', \\@_,\n" + "{name => 'strPathExp'},\n" + "{name => 'bIgnoreMissing', optional => true, default => true},\n" + "{name => 'bRecurse', optional => true, default => false},\n" ");\n" "\n" - "my $bExists = true;\n" + "$self->{oStorageC}->pathRemove($strPathExp, $bIgnoreMissing, $bRecurse);\n" "\n\n" - "if ($strPath ne qw{/})\n" - "{\n" - "\n" - "my $rhInfo = $self->manifest(dirname($strPath), {bRecurse => false, bPath => true})->{basename($strPath)};\n" - "$bExists = defined($rhInfo) && $rhInfo->{type} eq 'd' ? true : false;\n" + "return logDebugReturn($strOperation);\n" "}\n" + "\n\n\n\n" + "sub pathSync\n" + "{\n" + "my $self = shift;\n" "\n\n" - "return logDebugReturn\n" + "my\n" "(\n" "$strOperation,\n" - "{name => 'bExists', value => $bExists, trace => true}\n" + "$strPathExp,\n" + ") =\n" + "logDebugParam\n" + "(\n" + "__PACKAGE__ . '->pathSync', \\@_,\n" + "{name => 'strPathExp'},\n" ");\n" + "\n" + "$self->{oStorageC}->pathSync($strPathExp);\n" + "\n\n" + "return logDebugReturn($strOperation);\n" "}\n" "\n\n\n\n" - "sub info\n" + "sub put\n" "{\n" "my $self = shift;\n" "\n\n" "my\n" "(\n" "$strOperation,\n" - "$strFile,\n" + "$xFile,\n" + "$xContent,\n" + "$strCipherPass,\n" ") =\n" "logDebugParam\n" "(\n" - "__PACKAGE__ . '->info', \\@_,\n" - "{name => 'strFile', trace => true},\n" + "__PACKAGE__ . '->put', \\@_,\n" + "{name => 'xFile', trace => true},\n" + "{name => 'xContent', required => false, trace => true},\n" + "{name => 'strCipherPass', optional => true, default => $self->cipherPassUser(), trace => true, redact => true},\n" ");\n" + "\n\n\n" + "my $oFileIo = ref($xFile) ? $xFile : $self->openWrite($xFile, {strCipherPass => $strCipherPass});\n" "\n\n" - "my $rhFile = $self->manifest($strFile, {bRecurse => false, bPath => false})->{basename($strFile)};\n" - "\n" - "if (!defined($rhFile))\n" - "{\n" - "confess &log(ERROR, \"unable to get info for missing file ${strFile}\", ERROR_FILE_MISSING);\n" - "}\n" + "my $lSize = $self->{oStorageC}->put($oFileIo->{oStorageCWrite}, ref($xContent) ? $$xContent : $xContent);\n" "\n\n" "return logDebugReturn\n" "(\n" "$strOperation,\n" - "{name => 'oInfo', value => new pgBackRest::Storage::S3::Info($rhFile->{size}), trace => true}\n" + "{name => 'lSize', value => $lSize, trace => true},\n" ");\n" "}\n" "\n\n\n\n" @@ -18722,193 +15249,147 @@ "my\n" "(\n" "$strOperation,\n" - "$rstryFile,\n" - "$bRecurse,\n" + "$xFileExp,\n" + "$bIgnoreMissing,\n" ") =\n" "logDebugParam\n" "(\n" "__PACKAGE__ . '->remove', \\@_,\n" - "{name => 'rstryFile', trace => true},\n" - "{name => 'bRecurse', optional => true, default => false, trace => true},\n" + "{name => 'xFileExp'},\n" + "{name => 'bIgnoreMissing', optional => true, default => true},\n" ");\n" - "\n\n" - "if ($bRecurse)\n" - "{\n" - "my $rhManifest = $self->manifest($rstryFile);\n" - "my @stryRemoveFile;\n" - "\n\n" - "foreach my $strFile (sort({$b cmp $a} keys(%{$rhManifest})))\n" - "{\n" - "next if $rhManifest->{$strFile}->{type} eq 'd';\n" - "push(@stryRemoveFile, \"${rstryFile}/${strFile}\");\n" - "}\n" - "\n\n" - "if (@stryRemoveFile > 0)\n" - "{\n" - "$self->remove(\\@stryRemoveFile);\n" - "}\n" - "}\n" - "\n" - "else\n" - "{\n" - "\n" - "my $rstryFileAll = ref($rstryFile) ? $rstryFile : [$rstryFile];\n" - "\n" - "do\n" - "{\n" - "my $strFile = shift(@{$rstryFileAll});\n" - "my $iTotal = 0;\n" - "my $strXml = XML_HEADER . 'true';\n" "\n" - "while (defined($strFile))\n" + "foreach my $strFileExp (ref($xFileExp) ? @{$xFileExp} : ($xFileExp))\n" "{\n" - "$iTotal++;\n" - "$strXml .= '' . xmlFromText(substr($strFile, 1)) . '';\n" - "\n" - "$strFile = $iTotal < S3_BATCH_MAX ? shift(@{$rstryFileAll}) : undef;\n" + "$self->{oStorageC}->remove($strFileExp, $bIgnoreMissing);\n" "}\n" - "\n" - "$strXml .= '';\n" - "\n" - "my $hHeader = {'content-md5' => md5_base64($strXml) . '=='};\n" "\n\n" - "my $oResponse = $self->request(\n" - "HTTP_VERB_POST,\n" - "{hQuery => 'delete=', rstrBody => \\$strXml, hHeader => $hHeader, strResponseType => S3_RESPONSE_TYPE_XML});\n" - "}\n" - "while (@{$rstryFileAll} > 0);\n" + "return logDebugReturn($strOperation);\n" "}\n" + "\n\n\n\n" + "sub encrypted\n" + "{\n" + "my $self = shift;\n" "\n\n" - "return logDebugReturn\n" + "my\n" "(\n" "$strOperation,\n" - "{name => 'bResult', value => true, trace => true}\n" + "$strFileExp,\n" + "$bIgnoreMissing,\n" + ") =\n" + "logDebugParam\n" + "(\n" + "__PACKAGE__ . '->encrypted', \\@_,\n" + "{name => 'strFileExp'},\n" + "{name => 'bIgnoreMissing', optional => true, default => false},\n" ");\n" - "}\n" - "\n\n\n\n\n" - "sub capability {false}\n" - "sub className {STORAGE_S3_DRIVER}\n" "\n" - "1;\n" - }, - { - .name = "pgBackRest/Storage/S3/FileRead.pm", - .data = + "my $bEncrypted = false;\n" + "\n\n" + "my $oFileIo = new pgBackRest::Storage::StorageRead(\n" + "$self, pgBackRest::LibC::StorageRead->new($self->{oStorageC}, $strFileExp, $bIgnoreMissing));\n" "\n\n\n" - "package pgBackRest::Storage::S3::FileRead;\n" - "use parent 'pgBackRest::Common::Http::Client';\n" - "\n" - "use strict;\n" - "use warnings FATAL => qw(all);\n" - "use Carp qw(confess);\n" - "use English '-no_match_vars';\n" + "if (!$oFileIo->open())\n" + "{\n" + "if (defined($self->cipherType()))\n" + "{\n" + "$bEncrypted = true;\n" + "}\n" + "}\n" + "else\n" + "{\n" "\n" - "use Digest::MD5 qw(md5_base64);\n" - "use Fcntl qw(O_RDONLY O_WRONLY O_CREAT O_TRUNC);\n" - "use File::Basename qw(dirname);\n" + "my $tMagicSignature = '';\n" + "my $lSizeRead = $oFileIo->read(\\$tMagicSignature, length(CIPHER_MAGIC));\n" + "$oFileIo->close();\n" "\n" - "use pgBackRest::Common::Exception;\n" - "use pgBackRest::Common::Log;\n" - "use pgBackRest::Common::Xml;\n" - "use pgBackRest::Storage::Base;\n" - "use pgBackRest::Storage::S3::Request;\n" + "if (substr($tMagicSignature, 0, length(CIPHER_MAGIC)) eq CIPHER_MAGIC)\n" + "{\n" + "$bEncrypted = true;\n" + "}\n" + "}\n" + "\n\n" + "return logDebugReturn\n" + "(\n" + "$strOperation,\n" + "{name => 'bEncrypted', value => $bEncrypted}\n" + ");\n" + "}\n" "\n\n\n\n" - "sub new\n" + "sub encryptionValid\n" "{\n" - "my $class = shift;\n" + "my $self = shift;\n" "\n\n" "my\n" "(\n" "$strOperation,\n" - "$oDriver,\n" - "$strName,\n" - "$bIgnoreMissing,\n" + "$bEncrypted,\n" ") =\n" "logDebugParam\n" "(\n" - "__PACKAGE__ . '->new', \\@_,\n" - "{name => 'oDriver', trace => true},\n" - "{name => 'strName', trace => true},\n" - "{name => 'bIgnoreMissing', optional => true, default => false, trace => true},\n" + "__PACKAGE__ . '->encryptionValid', \\@_,\n" + "{name => 'bEncrypted'},\n" ");\n" - "\n\n" - "my $self = $oDriver->request(\n" - "HTTP_VERB_GET, {strUri => $strName, strResponseType => S3_RESPONSE_TYPE_IO, bIgnoreMissing => $bIgnoreMissing});\n" - "\n\n" - "if (defined($self))\n" - "{\n" - "bless $self, $class;\n" - "}\n" + "\n" + "my $bValid = ($bEncrypted && defined($self->cipherType())) || (!$bEncrypted && !defined($self->cipherType()));\n" "\n\n" "return logDebugReturn\n" "(\n" "$strOperation,\n" - "{name => 'self', value => $self, trace => true}\n" + "{name => 'bValid', value => $bValid ? true : false}\n" ");\n" "}\n" "\n\n\n\n" - "sub name {shift->{strName}}\n" + "sub capability {shift->type() eq STORAGE_POSIX}\n" + "sub type {shift->{oStorageC}->type()}\n" + "sub cipherType {shift->{strCipherType}}\n" + "sub cipherPassUser {shift->{strCipherPass}}\n" "\n" "1;\n" }, { - .name = "pgBackRest/Storage/S3/FileWrite.pm", + .name = "pgBackRest/Storage/StorageRead.pm", .data = "\n\n\n" - "package pgBackRest::Storage::S3::FileWrite;\n" - "use parent 'pgBackRest::Common::Io::Base';\n" + "package pgBackRest::Storage::StorageRead;\n" "\n" "use strict;\n" "use warnings FATAL => qw(all);\n" "use Carp qw(confess);\n" "use English '-no_match_vars';\n" "\n" - "use Digest::MD5 qw(md5_base64);\n" - "use Fcntl qw(O_RDONLY O_WRONLY O_CREAT O_TRUNC);\n" "use File::Basename qw(dirname);\n" + "use Fcntl qw(:mode);\n" + "use File::stat qw{lstat};\n" + "use JSON::PP;\n" "\n" "use pgBackRest::Common::Exception;\n" - "use pgBackRest::Common::Io::Handle;\n" "use pgBackRest::Common::Log;\n" - "use pgBackRest::Common::Xml;\n" "use pgBackRest::Storage::Base;\n" - "use pgBackRest::Storage::S3::Request;\n" - "\n\n\n\n" - "use constant S3_BUFFER_MAX => 16777216;\n" "\n\n\n\n" "sub new\n" "{\n" "my $class = shift;\n" "\n\n" - "my\n" + "my $self = {};\n" + "bless $self, $class;\n" + "\n\n" "(\n" - "$strOperation,\n" - "$oDriver,\n" - "$strName,\n" + "my $strOperation,\n" + "$self->{oStorage},\n" + "$self->{oStorageCRead},\n" ") =\n" "logDebugParam\n" "(\n" "__PACKAGE__ . '->new', \\@_,\n" - "{name => 'oDriver', trace => true},\n" - "{name => 'strName', trace => true},\n" + "{name => 'oStorage'},\n" + "{name => 'oStorageCRead'},\n" ");\n" "\n\n" - "my $self = $class->SUPER::new(\"'${strName}'\");\n" - "bless $self, $class;\n" - "\n\n" - "$self->{oDriver} = $oDriver;\n" - "$self->{strName} = $strName;\n" - "\n\n" - "$self->{rtBuffer} = '';\n" - "\n\n" - "$self->{bWritten} = false;\n" - "\n\n" - "$self->{lSize} = 0;\n" - "\n\n" "return logDebugReturn\n" "(\n" "$strOperation,\n" - "{name => 'self', value => $self, trace => true}\n" + "{name => 'self', value => $self}\n" ");\n" "}\n" "\n\n\n\n" @@ -18916,118 +15397,135 @@ "{\n" "my $self = shift;\n" "\n\n" - "my $oResponse = $self->{oDriver}->request(\n" - "HTTP_VERB_POST, {strUri => $self->{strName}, hQuery => 'uploads=', strResponseType => S3_RESPONSE_TYPE_XML});\n" + "my ($strOperation) = logDebugParam(__PACKAGE__ . '->open');\n" "\n" - "$self->{strUploadId} = xmlTagText($oResponse, 'UploadId');\n" - "\n\n" - "$self->{rstryMultiPart} = [];\n" + "return logDebugReturn\n" + "(\n" + "$strOperation,\n" + "{name => 'bResult', value => $self->{oStorageCRead}->open() ? true : false, trace => true},\n" + ");\n" "}\n" "\n\n\n\n" - "sub write\n" + "sub read\n" "{\n" "my $self = shift;\n" - "my $rtBuffer = shift;\n" "\n\n" - "$self->{bWritten} = true;\n" - "\n" - "if (defined($rtBuffer))\n" - "{\n" - "$self->{rtBuffer} .= $$rtBuffer;\n" - "$self->{lSize} += length($$rtBuffer);\n" + "my (\n" + "$strOperation,\n" + "$rtBuffer,\n" + "$iSize,\n" + ") =\n" + "logDebugParam\n" + "(\n" + "__PACKAGE__ . '->read', \\@_,\n" + "{name => 'rtBuffer'},\n" + "{name => 'iSize'},\n" + ");\n" "\n\n" - "if (length($self->{rtBuffer}) >= S3_BUFFER_MAX)\n" - "{\n" - "$self->flush();\n" - "}\n" + "my $iActualSize = 0;\n" "\n" - "return length($$rtBuffer);\n" + "if (!$self->eof())\n" + "{\n" + "my $tBuffer = $self->{oStorageCRead}->read($iSize);\n" + "$iActualSize = length($tBuffer);\n" + "$$rtBuffer .= $tBuffer;\n" "}\n" - "\n" - "return 0;\n" + "\n\n" + "return logDebugReturn\n" + "(\n" + "$strOperation,\n" + "{name => 'iActualSize', value => $iActualSize}\n" + ");\n" "}\n" "\n\n\n\n" - "sub flush\n" + "sub eof\n" "{\n" "my $self = shift;\n" "\n\n" - "$self->open() if !$self->opened();\n" - "\n\n" - "$self->{oDriver}->request(\n" - "HTTP_VERB_PUT,\n" - "{strUri => $self->{strName},\n" - "hQuery => {'partNumber' => @{$self->{rstryMultiPart}} + 1, 'uploadId' => $self->{strUploadId}},\n" - "rstrBody => \\$self->{rtBuffer}, hHeader => {'content-md5' => md5_base64($self->{rtBuffer}) . '=='}});\n" - "\n\n" - "push(@{$self->{rstryMultiPart}}, $self->{oDriver}->{hResponseHeader}{&S3_HEADER_ETAG});\n" - "\n\n" - "$self->{rtBuffer} = '';\n" + "my ($strOperation) = logDebugParam(__PACKAGE__ . '->eof');\n" + "\n" + "return logDebugReturn\n" + "(\n" + "$strOperation,\n" + "{name => 'bResult', value => $self->{oStorageCRead}->eof() ? true : false, trace => true},\n" + ");\n" "}\n" "\n\n\n\n" "sub close\n" "{\n" "my $self = shift;\n" "\n\n" - "if ($self->{bWritten})\n" - "{\n" - "\n" - "$self->{bWritten} = false;\n" - "\n\n" - "if ($self->opened())\n" - "{\n" - "\n" - "$self->flush();\n" + "my ($strOperation) = logDebugParam(__PACKAGE__ . '->close');\n" "\n" - "my $strXml = XML_HEADER . '';\n" - "my $iPartNo = 0;\n" - "\n" - "foreach my $strETag (@{$self->{rstryMultiPart}})\n" - "{\n" - "$iPartNo++;\n" + "$self->{oStorageCRead}->close();\n" "\n" - "$strXml .= \"${iPartNo}${strETag}\";\n" + "return logDebugReturn\n" + "(\n" + "$strOperation,\n" + "{name => 'bResult', value => true, trace => true},\n" + ");\n" "}\n" - "\n" - "$strXml .= '';\n" + "\n\n\n\n" + "sub result\n" + "{\n" + "my $self = shift;\n" "\n\n" - "my $oResponse = $self->{oDriver}->request(\n" - "HTTP_VERB_POST,\n" - "{strUri => $self->{strName}, hQuery => {'uploadId' => $self->{strUploadId}},\n" - "rstrBody => \\$strXml, hHeader => {'content-md5' => md5_base64($strXml) . '=='},\n" - "strResponseType => S3_RESPONSE_TYPE_XML});\n" - "}\n" + "my\n" + "(\n" + "$strOperation,\n" + "$strClass,\n" + ") =\n" + "logDebugParam\n" + "(\n" + "__PACKAGE__ . '->result', \\@_,\n" + "{name => 'strClass'},\n" + ");\n" "\n" - "else\n" - "{\n" - "$self->{oDriver}->request(\n" - "HTTP_VERB_PUT,\n" - "{strUri => $self->{strName}, rstrBody => \\$self->{rtBuffer},\n" - "hHeader => {'content-md5' => md5_base64($self->{rtBuffer}) . '=='}});\n" - "}\n" + "my $xResult = $self->{oStorage}->{oJSON}->decode($self->{oStorageCRead}->result($strClass));\n" + "\n" + "return logDebugReturn\n" + "(\n" + "$strOperation,\n" + "{name => 'xResult', value => $xResult, trace => true},\n" + ");\n" "}\n" "\n\n\n\n" - "$self->resultSet(COMMON_IO_HANDLE, $self->{lSize});\n" + "sub resultAll\n" + "{\n" + "my $self = shift;\n" + "\n\n" + "my ($strOperation) = logDebugParam(__PACKAGE__ . '->resultAll');\n" "\n" - "return true;\n" + "my $xResult = $self->{oStorage}->{oJSON}->decode($self->{oStorageCRead}->resultAll());\n" + "\n" + "return logDebugReturn\n" + "(\n" + "$strOperation,\n" + "{name => 'xResultAll', value => $xResult, trace => true},\n" + ");\n" "}\n" - "\n\n\n\n" - "sub opened {defined(shift->{strUploadId})}\n" - "sub name {shift->{strName}}\n" "\n" "1;\n" }, { - .name = "pgBackRest/Storage/S3/Info.pm", + .name = "pgBackRest/Storage/StorageWrite.pm", .data = "\n\n\n" - "package pgBackRest::Storage::S3::Info;\n" + "package pgBackRest::Storage::StorageWrite;\n" "\n" "use strict;\n" "use warnings FATAL => qw(all);\n" "use Carp qw(confess);\n" "use English '-no_match_vars';\n" "\n" + "use File::Basename qw(dirname);\n" + "use Fcntl qw(:mode);\n" + "use File::stat qw{lstat};\n" + "use JSON::PP;\n" + "\n" + "use pgBackRest::Common::Exception;\n" "use pgBackRest::Common::Log;\n" + "use pgBackRest::Storage::Base;\n" "\n\n\n\n" "sub new\n" "{\n" @@ -19038,12 +15536,14 @@ "\n\n" "(\n" "my $strOperation,\n" - "$self->{lSize},\n" + "$self->{oStorage},\n" + "$self->{oStorageCWrite},\n" ") =\n" "logDebugParam\n" "(\n" "__PACKAGE__ . '->new', \\@_,\n" - "{name => 'lSize'},\n" + "{name => 'oStorage'},\n" + "{name => 'oStorageCWrite'},\n" ");\n" "\n\n" "return logDebugReturn\n" @@ -19053,246 +15553,93 @@ ");\n" "}\n" "\n\n\n\n" - "sub size {shift->{lSize}}\n" - "\n" - "1;\n" - }, - { - .name = "pgBackRest/Storage/S3/Request.pm", - .data = - "\n\n\n" - "package pgBackRest::Storage::S3::Request;\n" - "\n" - "use strict;\n" - "use warnings FATAL => qw(all);\n" - "use Carp qw(confess);\n" - "use English '-no_match_vars';\n" - "\n" - "use Exporter qw(import);\n" - "our @EXPORT = qw();\n" - "use IO::Socket::SSL;\n" + "sub open\n" + "{\n" + "my $self = shift;\n" + "\n\n" + "my ($strOperation) = logDebugParam(__PACKAGE__ . '->open');\n" "\n" - "use pgBackRest::Common::Exception;\n" - "use pgBackRest::Common::Http::Client;\n" - "use pgBackRest::Common::Http::Common;\n" - "use pgBackRest::Common::Io::Base;\n" - "use pgBackRest::Common::Log;\n" - "use pgBackRest::Common::String;\n" - "use pgBackRest::Common::Xml;\n" - "use pgBackRest::LibC qw(:crypto);\n" - "use pgBackRest::Storage::S3::Auth;\n" - "\n\n\n\n" - "use constant HTTP_VERB_GET => 'GET';\n" - "push @EXPORT, qw(HTTP_VERB_GET);\n" - "use constant HTTP_VERB_POST => 'POST';\n" - "push @EXPORT, qw(HTTP_VERB_POST);\n" - "use constant HTTP_VERB_PUT => 'PUT';\n" - "push @EXPORT, qw(HTTP_VERB_PUT);\n" - "\n" - "use constant S3_HEADER_CONTENT_LENGTH => 'content-length';\n" - "push @EXPORT, qw(S3_HEADER_CONTENT_LENGTH);\n" - "use constant S3_HEADER_TRANSFER_ENCODING => 'transfer-encoding';\n" - "push @EXPORT, qw(S3_HEADER_TRANSFER_ENCODING);\n" - "use constant S3_HEADER_ETAG => 'etag';\n" - "push @EXPORT, qw(S3_HEADER_ETAG);\n" - "\n" - "use constant S3_RESPONSE_TYPE_IO => 'io';\n" - "push @EXPORT, qw(S3_RESPONSE_TYPE_IO);\n" - "use constant S3_RESPONSE_TYPE_NONE => 'none';\n" - "push @EXPORT, qw(S3_RESPONSE_TYPE_NONE);\n" - "use constant S3_RESPONSE_TYPE_XML => 'xml';\n" - "push @EXPORT, qw(S3_RESPONSE_TYPE_XML);\n" - "\n" - "use constant S3_RESPONSE_CODE_SUCCESS => 200;\n" - "use constant S3_RESPONSE_CODE_ERROR_AUTH => 403;\n" - "use constant S3_RESPONSE_CODE_ERROR_NOT_FOUND => 404;\n" - "use constant S3_RESPONSE_CODE_ERROR_RETRY_CLASS => 5;\n" + "$self->{oStorageCWrite}->open();\n" "\n" - "use constant S3_RETRY_MAX => 4;\n" + "return logDebugReturn\n" + "(\n" + "$strOperation,\n" + "{name => 'bResult', value => true, trace => true},\n" + ");\n" + "}\n" "\n\n\n\n" - "sub new\n" + "sub write\n" "{\n" - "my $class = shift;\n" - "\n\n" - "my $self = {};\n" - "bless $self, $class;\n" + "my $self = shift;\n" "\n\n" - "(\n" - "my $strOperation,\n" - "$self->{strBucket},\n" - "$self->{strEndPoint},\n" - "$self->{strRegion},\n" - "$self->{strAccessKeyId},\n" - "$self->{strSecretAccessKey},\n" - "$self->{strSecurityToken},\n" - "$self->{strHost},\n" - "$self->{iPort},\n" - "$self->{bVerifySsl},\n" - "$self->{strCaPath},\n" - "$self->{strCaFile},\n" - "$self->{lBufferMax},\n" + "my (\n" + "$strOperation,\n" + "$rtBuffer,\n" ") =\n" "logDebugParam\n" "(\n" - "__PACKAGE__ . '->new', \\@_,\n" - "{name => 'strBucket'},\n" - "{name => 'strEndPoint'},\n" - "{name => 'strRegion'},\n" - "{name => 'strAccessKeyId', redact => true},\n" - "{name => 'strSecretAccessKey', redact => true},\n" - "{name => 'strSecurityToken', optional => true, redact => true},\n" - "{name => 'strHost', optional => true},\n" - "{name => 'iPort', optional => true},\n" - "{name => 'bVerifySsl', optional => true, default => true},\n" - "{name => 'strCaPath', optional => true},\n" - "{name => 'strCaFile', optional => true},\n" - "{name => 'lBufferMax', optional => true, default => COMMON_IO_BUFFER_MAX},\n" + "__PACKAGE__ . '->write', \\@_,\n" + "{name => 'rtBuffer'},\n" ");\n" "\n\n" - "$self->{strHost} = defined($self->{strHost}) ? $self->{strHost} : \"$self->{strBucket}.$self->{strEndPoint}\";\n" + "return logDebugReturn\n" + "(\n" + "$strOperation,\n" + "{name => 'iActualSize', value => $self->{oStorageCWrite}->write($$rtBuffer)}\n" + ");\n" + "}\n" + "\n\n\n\n" + "sub close\n" + "{\n" + "my $self = shift;\n" "\n\n" + "my ($strOperation) = logDebugParam(__PACKAGE__ . '->close');\n" + "\n" + "$self->{oStorageCWrite}->close();\n" + "\n" "return logDebugReturn\n" "(\n" "$strOperation,\n" - "{name => 'self', value => $self, trace => true}\n" + "{name => 'bResult', value => true, trace => true},\n" ");\n" "}\n" "\n\n\n\n" - "sub request\n" + "sub result\n" "{\n" "my $self = shift;\n" "\n\n" "my\n" "(\n" "$strOperation,\n" - "$strVerb,\n" - "$strUri,\n" - "$hQuery,\n" - "$hHeader,\n" - "$rstrBody,\n" - "$strResponseType,\n" - "$bIgnoreMissing,\n" + "$strClass,\n" ") =\n" "logDebugParam\n" "(\n" - "__PACKAGE__ . '->request', \\@_,\n" - "{name => 'strVerb', trace => true},\n" - "{name => 'strUri', optional => true, default => '/', trace => true},\n" - "{name => 'hQuery', optional => true, trace => true},\n" - "{name => 'hHeader', optional => true, trace => true},\n" - "{name => 'rstrBody', optional => true, trace => true},\n" - "{name => 'strResponseType', optional => true, default => S3_RESPONSE_TYPE_NONE, trace => true},\n" - "{name => 'bIgnoreMissing', optional => true, default => false, trace => true},\n" + "__PACKAGE__ . '->result', \\@_,\n" + "{name => 'strClass'},\n" ");\n" - "\n\n" - "my $oResponse;\n" - "\n\n" - "my $bRetry;\n" - "my $iRetryTotal = 0;\n" - "\n" - "do\n" - "{\n" - "\n" - "$bRetry = false;\n" - "\n\n" - "$hHeader->{&S3_HEADER_CONTENT_SHA256} = defined($rstrBody) ? cryptoHashOne('sha256', $$rstrBody) : PAYLOAD_DEFAULT_HASH;\n" - "$hHeader->{&S3_HEADER_CONTENT_LENGTH} = defined($rstrBody) ? length($$rstrBody) : 0;\n" - "\n\n" - "($hHeader, my $strCanonicalRequest, my $strSignedHeaders, my $strStringToSign) = s3AuthorizationHeader(\n" - "$self->{strRegion}, \"$self->{strBucket}.$self->{strEndPoint}\", $strVerb, $strUri, httpQuery($hQuery), s3DateTime(),\n" - "$hHeader, $self->{strAccessKeyId}, $self->{strSecretAccessKey}, $self->{strSecurityToken},\n" - "$hHeader->{&S3_HEADER_CONTENT_SHA256});\n" - "\n\n" - "my $oHttpClient = new pgBackRest::Common::Http::Client(\n" - "$self->{strHost}, $strVerb,\n" - "{iPort => $self->{iPort}, strUri => $strUri, hQuery => $hQuery, hRequestHeader => $hHeader,\n" - "rstrRequestBody => $rstrBody, bVerifySsl => $self->{bVerifySsl}, strCaPath => $self->{strCaPath},\n" - "strCaFile => $self->{strCaFile}, bResponseBodyPrefetch => $strResponseType eq S3_RESPONSE_TYPE_XML,\n" - "lBufferMax => $self->{lBufferMax}});\n" - "\n\n" - "my $iResponseCode = $oHttpClient->responseCode();\n" - "\n" - "if ($iResponseCode == S3_RESPONSE_CODE_SUCCESS)\n" - "{\n" - "\n" - "$self->{hResponseHeader} = $oHttpClient->responseHeader();\n" - "\n\n" - "if ($strResponseType eq S3_RESPONSE_TYPE_XML)\n" - "{\n" - "my $rtResponseBody = $oHttpClient->responseBody();\n" - "\n" - "if ($oHttpClient->contentLength() == 0 || !defined($$rtResponseBody))\n" - "{\n" - "confess &log(ERROR,\n" - "\"response type '${strResponseType}' was requested but content length is zero or content is missing\",\n" - "ERROR_PROTOCOL);\n" - "}\n" - "\n" - "$oResponse = xmlParse($$rtResponseBody);\n" - "}\n" - "\n" - "elsif ($strResponseType eq S3_RESPONSE_TYPE_IO)\n" - "{\n" - "$oResponse = $oHttpClient;\n" - "}\n" - "}\n" - "else\n" - "{\n" - "\n" - "if ($iResponseCode == S3_RESPONSE_CODE_ERROR_NOT_FOUND)\n" - "{\n" - "\n" - "if (!$bIgnoreMissing)\n" - "{\n" - "confess &log(ERROR, \"unable to open '${strUri}': No such file or directory\", ERROR_FILE_MISSING);\n" - "}\n" "\n" - "$bRetry = false;\n" - "}\n" - "\n" - "else\n" - "{\n" + "my $xResult = $self->{oStorage}->{oJSON}->decode($self->{oStorageCWrite}->result($strClass));\n" "\n" - "if (int($iResponseCode / 100) == S3_RESPONSE_CODE_ERROR_RETRY_CLASS)\n" - "{\n" - "\n" - "$iRetryTotal++;\n" - "$bRetry = $iRetryTotal <= S3_RETRY_MAX;\n" - "\n\n" - "if ($iRetryTotal > 1)\n" - "{\n" - "sleep(5);\n" - "}\n" + "return logDebugReturn\n" + "(\n" + "$strOperation,\n" + "{name => 'xResult', value => $xResult, trace => true},\n" + ");\n" "}\n" - "\n\n" - "if (!$bRetry)\n" + "\n\n\n\n" + "sub resultAll\n" "{\n" - "my $rstrResponseBody = $oHttpClient->responseBody();\n" + "my $self = shift;\n" "\n\n" - "my $strRequestHeader = $oHttpClient->requestHeaderText();\n" - "$strRequestHeader =~ s/^${\\S3_HEADER_AUTHORIZATION}:.*$/${\\S3_HEADER_AUTHORIZATION}: /mg;\n" + "my ($strOperation) = logDebugParam(__PACKAGE__ . '->resultAll');\n" + "\n" + "my $xResult = $self->{oStorage}->{oJSON}->decode($self->{oStorageCWrite}->resultAll());\n" "\n" - "confess &log(ERROR,\n" - "'S3 request error' . ($iRetryTotal > 0 ? \" after \" . (S3_RETRY_MAX + 1) . \" tries\" : '') .\n" - "\" [$iResponseCode] \" . $oHttpClient->responseMessage() .\n" - "\"\\n*** request header ***\\n${strRequestHeader}\" .\n" - "($iResponseCode == S3_RESPONSE_CODE_ERROR_AUTH ?\n" - "\"\\n*** canonical request ***\\n\" . $strCanonicalRequest .\n" - "\"\\n*** signed headers ***\\n\" . $strSignedHeaders .\n" - "\"\\n*** string to sign ***\\n\" . $strStringToSign : '') .\n" - "\"\\n*** response header ***\\n\" . $oHttpClient->responseHeaderText() .\n" - "(defined($$rstrResponseBody) ? \"\\n*** response body ***\\n${$rstrResponseBody}\" : ''),\n" - "ERROR_PROTOCOL);\n" - "}\n" - "}\n" - "}\n" - "}\n" - "while ($bRetry);\n" - "\n\n" "return logDebugReturn\n" "(\n" "$strOperation,\n" - "{name => 'oResponse', value => $oResponse, trace => true, ref => true}\n" + "{name => 'xResultAll', value => $xResult, trace => true},\n" ");\n" "}\n" "\n" @@ -19325,7 +15672,7 @@ "\n" "push @EXPORT, qw(projectBin projectBinSet);\n" "\n\n\n\n\n\n" - "use constant PROJECT_VERSION => '2.15';\n" + "use constant PROJECT_VERSION => '2.16';\n" "push @EXPORT, qw(PROJECT_VERSION);\n" "\n\n\n\n\n\n" "use constant REPOSITORY_FORMAT => 5;\n" diff -Nru pgbackrest-2.15.1/src/perl/libc.auto.c pgbackrest-2.16/src/perl/libc.auto.c --- pgbackrest-2.15.1/src/perl/libc.auto.c 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/src/perl/libc.auto.c 2019-08-05 16:03:04.000000000 +0000 @@ -58,6 +58,7 @@ ***********************************************************************************************************************************/ #include "common/crypto/common.h" #include "common/error.h" +#include "common/io/io.h" #include "common/lock.h" #include "config/config.h" #include "config/define.h" @@ -77,9 +78,12 @@ These includes define data structures that are required for the C to Perl interface but are not part of the regular C source. ***********************************************************************************************************************************/ -#include "xs/crypto/cipherBlock.xsh" #include "xs/crypto/hash.xsh" #include "xs/common/encode.xsh" +#include "xs/postgres/client.xsh" +#include "xs/storage/storage.xsh" +#include "xs/storage/storageRead.xsh" +#include "xs/storage/storageWrite.xsh" /*********************************************************************************************************************************** Module definition @@ -259,527 +263,1536 @@ /* INCLUDE: Including 'xs/config/define.xs' from 'xs/config/configTest.xs' */ -/* INCLUDE: Including 'xs/crypto/cipherBlock.xs' from 'xs/config/define.xs' */ +/* INCLUDE: Including 'xs/crypto/hash.xs' from 'xs/config/define.xs' */ -/* INCLUDE: Including 'xs/crypto/hash.xs' from 'xs/crypto/cipherBlock.xs' */ +/* INCLUDE: Including 'xs/crypto/random.xs' from 'xs/crypto/hash.xs' */ -/* INCLUDE: Including 'xs/crypto/random.xs' from 'xs/crypto/hash.xs' */ +/* INCLUDE: Including 'xs/postgres/client.xs' from 'xs/crypto/random.xs' */ -/* INCLUDE: Including 'xs/postgres/pageChecksum.xs' from 'xs/crypto/random.xs' */ +/* INCLUDE: Including 'xs/postgres/pageChecksum.xs' from 'xs/postgres/client.xs' */ /* INCLUDE: Including 'xs/storage/storage.xs' from 'xs/postgres/pageChecksum.xs' */ -XS_EUPXS(XS_pgBackRest__LibC_storagePosixPathRemove); /* prototype to pass -Wmissing-prototypes */ -XS_EUPXS(XS_pgBackRest__LibC_storagePosixPathRemove) -{ - dVAR; dXSARGS; - if (items != 3) - croak_xs_usage(cv, "path, errorOnMissing, recurse"); - { - const char * path = (const char *)SvPV_nolen(ST(0)) -; - bool errorOnMissing = (bool)SvTRUE(ST(1)) -; - bool recurse = (bool)SvTRUE(ST(2)) -; - MEM_CONTEXT_XS_TEMP_BEGIN() - { - storagePathRemoveP( - storagePosixNew(strNew("/"), 0640, 750, true, NULL), strNew(path), .errorOnMissing = errorOnMissing, - .recurse = recurse); - } - MEM_CONTEXT_XS_TEMP_END(); - } - XSRETURN_EMPTY; -} +/* INCLUDE: Including 'xs/storage/storageRead.xs' from 'xs/storage/storage.xs' */ -/* INCLUDE: Returning to 'xs/postgres/pageChecksum.xs' from 'xs/storage/storage.xs' */ +/* INCLUDE: Including 'xs/storage/storageWrite.xs' from 'xs/storage/storageRead.xs' */ -XS_EUPXS(XS_pgBackRest__LibC_pageChecksum); /* prototype to pass -Wmissing-prototypes */ -XS_EUPXS(XS_pgBackRest__LibC_pageChecksum) +XS_EUPXS(XS_pgBackRest__LibC__StorageWrite_new); /* prototype to pass -Wmissing-prototypes */ +XS_EUPXS(XS_pgBackRest__LibC__StorageWrite_new) { dVAR; dXSARGS; - if (items != 3) - croak_xs_usage(cv, "page, blockNo, pageSize"); + if (items != 9) + croak_xs_usage(cv, "class, storage, file, mode, user, group, timeModified, atomic, pathCreate"); { - const char * page = (const char *)SvPV_nolen(ST(0)) + MEM_CONTEXT_XS_TEMP_BEGIN() + { + const String * class = STR_NEW_SV(ST(0)); + pgBackRest__LibC__Storage storage; + const String * file = STR_NEW_SV(ST(2)); + U32 mode = (unsigned long)SvUV(ST(3)) ; - U32 blockNo = (unsigned long)SvUV(ST(1)) + const String * user = STR_NEW_SV(ST(4)); + const String * group = STR_NEW_SV(ST(5)); + IV timeModified = (IV)SvIV(ST(6)) ; - U32 pageSize = (unsigned long)SvUV(ST(2)) + bool atomic = (bool)SvTRUE(ST(7)) ; - U16 RETVAL; - dXSTARG; - RETVAL = 0; + bool pathCreate = (bool)SvTRUE(ST(8)) +; + pgBackRest__LibC__StorageWrite RETVAL; - ERROR_XS_BEGIN() - { - RETVAL = pageChecksum( - (const unsigned char *)page, blockNo, pageSize); + if (SvROK(ST(1)) && sv_derived_from(ST(1), "pgBackRest::LibC::Storage")) { + IV tmp = SvIV((SV*)SvRV(ST(1))); + storage = INT2PTR(pgBackRest__LibC__Storage,tmp); + } + else + Perl_croak_nocontext("%s: %s is not of type %s", + "pgBackRest::LibC::StorageWrite::new", + "storage", "pgBackRest::LibC::Storage") +; + CHECK(strEqZ(class, PACKAGE_NAME_LIBC "::StorageWrite")); + + RETVAL = storageWriteMove( + storageNewWriteP( + storage, file, .modeFile = mode, .user = user, .group = group, .timeModified = (time_t)timeModified, + .noCreatePath = storageFeature(storage, storageFeaturePath) ? !pathCreate : false, .noSyncPath = !atomic, + .noAtomic = !atomic), + MEM_CONTEXT_XS_OLD()); + { + SV * RETVALSV; + RETVALSV = sv_newmortal(); + sv_setref_pv(RETVALSV, "pgBackRest::LibC::StorageWrite", (void*)RETVAL); + ST(0) = RETVALSV; + } } - ERROR_XS_END(); - XSprePUSH; PUSHu((UV)RETVAL); + MEM_CONTEXT_XS_TEMP_END(); } XSRETURN(1); } -XS_EUPXS(XS_pgBackRest__LibC_pageChecksumTest); /* prototype to pass -Wmissing-prototypes */ -XS_EUPXS(XS_pgBackRest__LibC_pageChecksumTest) +XS_EUPXS(XS_pgBackRest__LibC__StorageWrite_filterAdd); /* prototype to pass -Wmissing-prototypes */ +XS_EUPXS(XS_pgBackRest__LibC__StorageWrite_filterAdd) { dVAR; dXSARGS; - if (items != 5) - croak_xs_usage(cv, "page, blockNo, pageSize, ignoreWalId, ignoreWalOffset"); + if (items != 3) + croak_xs_usage(cv, "self, filter, param"); { - const char * page = (const char *)SvPV_nolen(ST(0)) -; - U32 blockNo = (unsigned long)SvUV(ST(1)) -; - U32 pageSize = (unsigned long)SvUV(ST(2)) -; - U32 ignoreWalId = (unsigned long)SvUV(ST(3)) -; - U32 ignoreWalOffset = (unsigned long)SvUV(ST(4)) -; - bool RETVAL; - RETVAL = false; - - ERROR_XS_BEGIN() + MEM_CONTEXT_XS_TEMP_BEGIN() { - RETVAL = pageChecksumTest( - (const unsigned char *)page, blockNo, pageSize, ignoreWalId, ignoreWalOffset); + pgBackRest__LibC__StorageWrite self; + const String * filter = STR_NEW_SV(ST(1)); + const String * param = STR_NEW_SV(ST(2)); + + if (SvROK(ST(0)) && sv_derived_from(ST(0), "pgBackRest::LibC::StorageWrite")) { + IV tmp = SvIV((SV*)SvRV(ST(0))); + self = INT2PTR(pgBackRest__LibC__StorageWrite,tmp); + } + else + Perl_croak_nocontext("%s: %s is not of type %s", + "pgBackRest::LibC::StorageWrite::filterAdd", + "self", "pgBackRest::LibC::StorageWrite") +; + IoFilterGroup *filterGroup = ioWriteFilterGroup(storageWriteIo(self)); + storageFilterXsAdd(filterGroup, filter, param); } - ERROR_XS_END(); - ST(0) = boolSV(RETVAL); + MEM_CONTEXT_XS_TEMP_END(); } - XSRETURN(1); + XSRETURN_EMPTY; } -XS_EUPXS(XS_pgBackRest__LibC_pageChecksumBufferTest); /* prototype to pass -Wmissing-prototypes */ -XS_EUPXS(XS_pgBackRest__LibC_pageChecksumBufferTest) +XS_EUPXS(XS_pgBackRest__LibC__StorageWrite_open); /* prototype to pass -Wmissing-prototypes */ +XS_EUPXS(XS_pgBackRest__LibC__StorageWrite_open) { dVAR; dXSARGS; - if (items != 6) - croak_xs_usage(cv, "pageBuffer, pageBufferSize, blockNoBegin, pageSize, ignoreWalId, ignoreWalOffset"); + if (items != 1) + croak_xs_usage(cv, "self"); { - const char * pageBuffer = (const char *)SvPV_nolen(ST(0)) -; - U32 pageBufferSize = (unsigned long)SvUV(ST(1)) -; - U32 blockNoBegin = (unsigned long)SvUV(ST(2)) -; - U32 pageSize = (unsigned long)SvUV(ST(3)) -; - U32 ignoreWalId = (unsigned long)SvUV(ST(4)) -; - U32 ignoreWalOffset = (unsigned long)SvUV(ST(5)) -; - bool RETVAL; - RETVAL = false; - - ERROR_XS_BEGIN() + MEM_CONTEXT_XS_TEMP_BEGIN() { - RETVAL = pageChecksumBufferTest( - (const unsigned char *)pageBuffer, pageBufferSize, blockNoBegin, pageSize, ignoreWalId, ignoreWalOffset); + pgBackRest__LibC__StorageWrite self; + + if (SvROK(ST(0)) && sv_derived_from(ST(0), "pgBackRest::LibC::StorageWrite")) { + IV tmp = SvIV((SV*)SvRV(ST(0))); + self = INT2PTR(pgBackRest__LibC__StorageWrite,tmp); + } + else + Perl_croak_nocontext("%s: %s is not of type %s", + "pgBackRest::LibC::StorageWrite::open", + "self", "pgBackRest::LibC::StorageWrite") +; + ioWriteOpen(storageWriteIo(self)); } - ERROR_XS_END(); - ST(0) = boolSV(RETVAL); + MEM_CONTEXT_XS_TEMP_END(); } - XSRETURN(1); + XSRETURN_EMPTY; } -/* INCLUDE: Returning to 'xs/crypto/random.xs' from 'xs/postgres/pageChecksum.xs' */ - - -XS_EUPXS(XS_pgBackRest__LibC_cryptoRandomBytes); /* prototype to pass -Wmissing-prototypes */ -XS_EUPXS(XS_pgBackRest__LibC_cryptoRandomBytes) +XS_EUPXS(XS_pgBackRest__LibC__StorageWrite_write); /* prototype to pass -Wmissing-prototypes */ +XS_EUPXS(XS_pgBackRest__LibC__StorageWrite_write) { dVAR; dXSARGS; - if (items != 1) - croak_xs_usage(cv, "size"); + if (items != 2) + croak_xs_usage(cv, "self, buffer"); { - I32 size = (I32)SvIV(ST(0)) -; - SV * RETVAL; - RETVAL = newSV(size); - SvPOK_only(RETVAL); - - cryptoRandomBytes((unsigned char *)SvPV_nolen(RETVAL), size); + MEM_CONTEXT_XS_TEMP_BEGIN() + { + pgBackRest__LibC__StorageWrite self; + const Buffer * buffer = BUF_CONST_SV(ST(1)); + UV RETVAL; + dXSTARG; - SvCUR_set(RETVAL, size); - RETVAL = sv_2mortal(RETVAL); - ST(0) = RETVAL; + if (SvROK(ST(0)) && sv_derived_from(ST(0), "pgBackRest::LibC::StorageWrite")) { + IV tmp = SvIV((SV*)SvRV(ST(0))); + self = INT2PTR(pgBackRest__LibC__StorageWrite,tmp); + } + else + Perl_croak_nocontext("%s: %s is not of type %s", + "pgBackRest::LibC::StorageWrite::write", + "self", "pgBackRest::LibC::StorageWrite") +; + ioWrite(storageWriteIo(self), buffer); + RETVAL = bufUsed(buffer); + XSprePUSH; PUSHu((UV)RETVAL); + } + MEM_CONTEXT_XS_TEMP_END(); } XSRETURN(1); } -/* INCLUDE: Returning to 'xs/crypto/hash.xs' from 'xs/crypto/random.xs' */ - - -XS_EUPXS(XS_pgBackRest__LibC__Crypto__Hash_new); /* prototype to pass -Wmissing-prototypes */ -XS_EUPXS(XS_pgBackRest__LibC__Crypto__Hash_new) +XS_EUPXS(XS_pgBackRest__LibC__StorageWrite_close); /* prototype to pass -Wmissing-prototypes */ +XS_EUPXS(XS_pgBackRest__LibC__StorageWrite_close) { dVAR; dXSARGS; - if (items != 2) - croak_xs_usage(cv, "class, type"); + if (items != 1) + croak_xs_usage(cv, "self"); { - const char * class = (const char *)SvPV_nolen(ST(0)) -; - const char * type = (const char *)SvPV_nolen(ST(1)) -; - pgBackRest__LibC__Crypto__Hash RETVAL; - RETVAL = NULL; - - // Don't warn when class param is used - (void)class; - - MEM_CONTEXT_XS_NEW_BEGIN("cryptoHashXs") + MEM_CONTEXT_XS_TEMP_BEGIN() { - RETVAL = memNew(sizeof(CryptoHashXs)); - RETVAL->memContext = MEM_CONTEXT_XS(); - RETVAL->pxPayload = cryptoHashNew(strNew(type)); - } - MEM_CONTEXT_XS_NEW_END(); - { - SV * RETVALSV; - RETVALSV = sv_newmortal(); - sv_setref_pv(RETVALSV, "pgBackRest::LibC::Crypto::Hash", (void*)RETVAL); - ST(0) = RETVALSV; + pgBackRest__LibC__StorageWrite self; + + if (SvROK(ST(0)) && sv_derived_from(ST(0), "pgBackRest::LibC::StorageWrite")) { + IV tmp = SvIV((SV*)SvRV(ST(0))); + self = INT2PTR(pgBackRest__LibC__StorageWrite,tmp); } + else + Perl_croak_nocontext("%s: %s is not of type %s", + "pgBackRest::LibC::StorageWrite::close", + "self", "pgBackRest::LibC::StorageWrite") +; + ioWriteClose(storageWriteIo(self)); } - XSRETURN(1); + MEM_CONTEXT_XS_TEMP_END(); + } + XSRETURN_EMPTY; } -XS_EUPXS(XS_pgBackRest__LibC__Crypto__Hash_process); /* prototype to pass -Wmissing-prototypes */ -XS_EUPXS(XS_pgBackRest__LibC__Crypto__Hash_process) +XS_EUPXS(XS_pgBackRest__LibC__StorageWrite_result); /* prototype to pass -Wmissing-prototypes */ +XS_EUPXS(XS_pgBackRest__LibC__StorageWrite_result) { dVAR; dXSARGS; if (items != 2) - croak_xs_usage(cv, "self, message"); + croak_xs_usage(cv, "self, filter"); { - pgBackRest__LibC__Crypto__Hash self; - SV * message = ST(1) -; + MEM_CONTEXT_XS_TEMP_BEGIN() + { + pgBackRest__LibC__StorageWrite self; + const String * filter = STR_NEW_SV(ST(1)); + const char * RETVAL; + dXSTARG; - if (SvROK(ST(0)) && sv_derived_from(ST(0), "pgBackRest::LibC::Crypto::Hash")) { + if (SvROK(ST(0)) && sv_derived_from(ST(0), "pgBackRest::LibC::StorageWrite")) { IV tmp = SvIV((SV*)SvRV(ST(0))); - self = INT2PTR(pgBackRest__LibC__Crypto__Hash,tmp); + self = INT2PTR(pgBackRest__LibC__StorageWrite,tmp); } else Perl_croak_nocontext("%s: %s is not of type %s", - "pgBackRest::LibC::Crypto::Hash::process", - "self", "pgBackRest::LibC::Crypto::Hash") + "pgBackRest::LibC::StorageWrite::result", + "self", "pgBackRest::LibC::StorageWrite") ; - MEM_CONTEXT_XS_TEMP_BEGIN() - { - STRLEN messageSize; - const void *messagePtr = SvPV(message, messageSize); - - if (messageSize > 0) - ioFilterProcessIn(self->pxPayload, BUF(messagePtr, messageSize)); + RETVAL = strPtr(storageFilterXsResult(ioWriteFilterGroup(storageWriteIo(self)), filter)); + sv_setpv(TARG, RETVAL); XSprePUSH; PUSHTARG; } MEM_CONTEXT_XS_TEMP_END(); } - XSRETURN_EMPTY; + XSRETURN(1); } -XS_EUPXS(XS_pgBackRest__LibC__Crypto__Hash_result); /* prototype to pass -Wmissing-prototypes */ -XS_EUPXS(XS_pgBackRest__LibC__Crypto__Hash_result) +XS_EUPXS(XS_pgBackRest__LibC__StorageWrite_resultAll); /* prototype to pass -Wmissing-prototypes */ +XS_EUPXS(XS_pgBackRest__LibC__StorageWrite_resultAll) { dVAR; dXSARGS; if (items != 1) croak_xs_usage(cv, "self"); { - pgBackRest__LibC__Crypto__Hash self; - SV * RETVAL; + MEM_CONTEXT_XS_TEMP_BEGIN() + { + pgBackRest__LibC__StorageWrite self; + const char * RETVAL; + dXSTARG; - if (SvROK(ST(0)) && sv_derived_from(ST(0), "pgBackRest::LibC::Crypto::Hash")) { + if (SvROK(ST(0)) && sv_derived_from(ST(0), "pgBackRest::LibC::StorageWrite")) { IV tmp = SvIV((SV*)SvRV(ST(0))); - self = INT2PTR(pgBackRest__LibC__Crypto__Hash,tmp); + self = INT2PTR(pgBackRest__LibC__StorageWrite,tmp); } else Perl_croak_nocontext("%s: %s is not of type %s", - "pgBackRest::LibC::Crypto::Hash::result", - "self", "pgBackRest::LibC::Crypto::Hash") + "pgBackRest::LibC::StorageWrite::resultAll", + "self", "pgBackRest::LibC::StorageWrite") ; - RETVAL = NULL; - - MEM_CONTEXT_XS_TEMP_BEGIN() - { - const String *hash = varStr(ioFilterResult(self->pxPayload)); - - RETVAL = newSV(strSize(hash)); - SvPOK_only(RETVAL); - strcpy((char *)SvPV_nolen(RETVAL), strPtr(hash)); - SvCUR_set(RETVAL, strSize(hash)); + RETVAL = strPtr(storageFilterXsResultAll(ioWriteFilterGroup(storageWriteIo(self)))); + sv_setpv(TARG, RETVAL); XSprePUSH; PUSHTARG; } MEM_CONTEXT_XS_TEMP_END(); - RETVAL = sv_2mortal(RETVAL); - ST(0) = RETVAL; } XSRETURN(1); } -XS_EUPXS(XS_pgBackRest__LibC__Crypto__Hash_DESTROY); /* prototype to pass -Wmissing-prototypes */ -XS_EUPXS(XS_pgBackRest__LibC__Crypto__Hash_DESTROY) +XS_EUPXS(XS_pgBackRest__LibC__StorageWrite_DESTROY); /* prototype to pass -Wmissing-prototypes */ +XS_EUPXS(XS_pgBackRest__LibC__StorageWrite_DESTROY) { dVAR; dXSARGS; if (items != 1) croak_xs_usage(cv, "self"); { - pgBackRest__LibC__Crypto__Hash self; + MEM_CONTEXT_XS_TEMP_BEGIN() + { + pgBackRest__LibC__StorageWrite self; if (SvROK(ST(0))) { IV tmp = SvIV((SV*)SvRV(ST(0))); - self = INT2PTR(pgBackRest__LibC__Crypto__Hash,tmp); + self = INT2PTR(pgBackRest__LibC__StorageWrite,tmp); } else Perl_croak_nocontext("%s: %s is not a reference", - "pgBackRest::LibC::Crypto::Hash::DESTROY", + "pgBackRest::LibC::StorageWrite::DESTROY", "self") ; - MEM_CONTEXT_XS_DESTROY(self->memContext); + storageWriteFree(self); + } + MEM_CONTEXT_XS_TEMP_END(); } XSRETURN_EMPTY; } -XS_EUPXS(XS_pgBackRest__LibC_cryptoHashOne); /* prototype to pass -Wmissing-prototypes */ -XS_EUPXS(XS_pgBackRest__LibC_cryptoHashOne) +/* INCLUDE: Returning to 'xs/storage/storageRead.xs' from 'xs/storage/storageWrite.xs' */ + + +XS_EUPXS(XS_pgBackRest__LibC__StorageRead_new); /* prototype to pass -Wmissing-prototypes */ +XS_EUPXS(XS_pgBackRest__LibC__StorageRead_new) { dVAR; dXSARGS; - if (items != 2) - croak_xs_usage(cv, "type, message"); + if (items != 4) + croak_xs_usage(cv, "class, storage, file, ignoreMissing"); { - const char * type = (const char *)SvPV_nolen(ST(0)) -; - SV * message = ST(1) -; - SV * RETVAL; - RETVAL = NULL; - MEM_CONTEXT_XS_TEMP_BEGIN() { - STRLEN messageSize; - const void *messagePtr = SvPV(message, messageSize); - - String *hash = bufHex(cryptoHashOne(strNew(type), BUF(messagePtr, messageSize))); + const String * class = STR_NEW_SV(ST(0)); + pgBackRest__LibC__Storage storage; + const String * file = STR_NEW_SV(ST(2)); + bool ignoreMissing = (bool)SvTRUE(ST(3)) +; + pgBackRest__LibC__StorageRead RETVAL; + + if (SvROK(ST(1)) && sv_derived_from(ST(1), "pgBackRest::LibC::Storage")) { + IV tmp = SvIV((SV*)SvRV(ST(1))); + storage = INT2PTR(pgBackRest__LibC__Storage,tmp); + } + else + Perl_croak_nocontext("%s: %s is not of type %s", + "pgBackRest::LibC::StorageRead::new", + "storage", "pgBackRest::LibC::Storage") +; + CHECK(strEqZ(class, PACKAGE_NAME_LIBC "::StorageRead")); - RETVAL = newSV(strSize(hash)); - SvPOK_only(RETVAL); - strcpy((char *)SvPV_nolen(RETVAL), strPtr(hash)); - SvCUR_set(RETVAL, strSize(hash)); + RETVAL = storageReadMove(storageNewReadP(storage, file, .ignoreMissing = ignoreMissing), MEM_CONTEXT_XS_OLD()); + { + SV * RETVALSV; + RETVALSV = sv_newmortal(); + sv_setref_pv(RETVALSV, "pgBackRest::LibC::StorageRead", (void*)RETVAL); + ST(0) = RETVALSV; + } } MEM_CONTEXT_XS_TEMP_END(); - RETVAL = sv_2mortal(RETVAL); - ST(0) = RETVAL; } XSRETURN(1); } -/* INCLUDE: Returning to 'xs/crypto/cipherBlock.xs' from 'xs/crypto/hash.xs' */ - - -XS_EUPXS(XS_pgBackRest__LibC__Cipher__Block_new); /* prototype to pass -Wmissing-prototypes */ -XS_EUPXS(XS_pgBackRest__LibC__Cipher__Block_new) +XS_EUPXS(XS_pgBackRest__LibC__StorageRead_filterAdd); /* prototype to pass -Wmissing-prototypes */ +XS_EUPXS(XS_pgBackRest__LibC__StorageRead_filterAdd) { dVAR; dXSARGS; - if (items < 5 || items > 6) - croak_xs_usage(cv, "class, mode, type, key, keySize, digest = NULL"); + if (items != 3) + croak_xs_usage(cv, "self, filter, param"); { - const char * class = (const char *)SvPV_nolen(ST(0)) -; - U32 mode = (unsigned long)SvUV(ST(1)) -; - const char * type = (const char *)SvPV_nolen(ST(2)) -; - unsigned char * key = (unsigned char *)SvPV_nolen(ST(3)) -; - I32 keySize = (I32)SvIV(ST(4)) -; - const char * digest; - pgBackRest__LibC__Cipher__Block RETVAL; - - if (items < 6) - digest = NULL; - else { - digest = (const char *)SvPV_nolen(ST(5)) -; - } - RETVAL = NULL; - - CHECK(type != NULL); - CHECK(key != NULL); - CHECK(keySize != 0); - - // Not much point to this but it keeps the var from being unused - if (strcmp(class, PACKAGE_NAME_LIBC "::Cipher::Block") != 0) - croak("unexpected class name '%s'", class); - - MEM_CONTEXT_XS_NEW_BEGIN("cipherBlockXs") + MEM_CONTEXT_XS_TEMP_BEGIN() { - RETVAL = memNew(sizeof(CipherBlockXs)); - RETVAL->memContext = MEM_CONTEXT_XS(); + pgBackRest__LibC__StorageRead self; + const String * filter = STR_NEW_SV(ST(1)); + const String * param = STR_NEW_SV(ST(2)); - RETVAL->pxPayload = cipherBlockNew(mode, cipherType(STR(type)), BUF(key, keySize), digest == NULL ? NULL : STR(digest)); - } - MEM_CONTEXT_XS_NEW_END(); - { - SV * RETVALSV; - RETVALSV = sv_newmortal(); - sv_setref_pv(RETVALSV, "pgBackRest::LibC::Cipher::Block", (void*)RETVAL); - ST(0) = RETVALSV; + if (SvROK(ST(0)) && sv_derived_from(ST(0), "pgBackRest::LibC::StorageRead")) { + IV tmp = SvIV((SV*)SvRV(ST(0))); + self = INT2PTR(pgBackRest__LibC__StorageRead,tmp); } + else + Perl_croak_nocontext("%s: %s is not of type %s", + "pgBackRest::LibC::StorageRead::filterAdd", + "self", "pgBackRest::LibC::StorageRead") +; + IoFilterGroup *filterGroup = ioReadFilterGroup(storageReadIo(self)); + storageFilterXsAdd(filterGroup, filter, param); } - XSRETURN(1); + MEM_CONTEXT_XS_TEMP_END(); + } + XSRETURN_EMPTY; } -XS_EUPXS(XS_pgBackRest__LibC__Cipher__Block_process); /* prototype to pass -Wmissing-prototypes */ -XS_EUPXS(XS_pgBackRest__LibC__Cipher__Block_process) +XS_EUPXS(XS_pgBackRest__LibC__StorageRead_open); /* prototype to pass -Wmissing-prototypes */ +XS_EUPXS(XS_pgBackRest__LibC__StorageRead_open) { dVAR; dXSARGS; - if (items != 2) - croak_xs_usage(cv, "self, source"); + if (items != 1) + croak_xs_usage(cv, "self"); { - pgBackRest__LibC__Cipher__Block self; - SV * source = ST(1) -; - SV * RETVAL; + MEM_CONTEXT_XS_TEMP_BEGIN() + { + pgBackRest__LibC__StorageRead self; + bool RETVAL; - if (SvROK(ST(0)) && sv_derived_from(ST(0), "pgBackRest::LibC::Cipher::Block")) { + if (SvROK(ST(0)) && sv_derived_from(ST(0), "pgBackRest::LibC::StorageRead")) { IV tmp = SvIV((SV*)SvRV(ST(0))); - self = INT2PTR(pgBackRest__LibC__Cipher__Block,tmp); + self = INT2PTR(pgBackRest__LibC__StorageRead,tmp); } else Perl_croak_nocontext("%s: %s is not of type %s", - "pgBackRest::LibC::Cipher::Block::process", - "self", "pgBackRest::LibC::Cipher::Block") + "pgBackRest::LibC::StorageRead::open", + "self", "pgBackRest::LibC::StorageRead") +; + RETVAL = ioReadOpen(storageReadIo(self)); + ST(0) = boolSV(RETVAL); + } + MEM_CONTEXT_XS_TEMP_END(); + } + XSRETURN(1); +} + + +XS_EUPXS(XS_pgBackRest__LibC__StorageRead_read); /* prototype to pass -Wmissing-prototypes */ +XS_EUPXS(XS_pgBackRest__LibC__StorageRead_read) +{ + dVAR; dXSARGS; + if (items != 2) + croak_xs_usage(cv, "self, bufferSize"); + { + MEM_CONTEXT_XS_TEMP_BEGIN() + { + pgBackRest__LibC__StorageRead self; + U32 bufferSize = (unsigned long)SvUV(ST(1)) +; + SV * RETVAL; + + if (SvROK(ST(0)) && sv_derived_from(ST(0), "pgBackRest::LibC::StorageRead")) { + IV tmp = SvIV((SV*)SvRV(ST(0))); + self = INT2PTR(pgBackRest__LibC__StorageRead,tmp); + } + else + Perl_croak_nocontext("%s: %s is not of type %s", + "pgBackRest::LibC::StorageRead::read", + "self", "pgBackRest::LibC::StorageRead") +; + RETVAL = NEWSV(0, bufferSize); + SvPOK_only(RETVAL); + + Buffer *bufferRead = bufNewUseC((unsigned char *)SvPV_nolen(RETVAL), bufferSize); + ioRead(storageReadIo(self), bufferRead); + + SvCUR_set(RETVAL, bufUsed(bufferRead)); + RETVAL = sv_2mortal(RETVAL); + ST(0) = RETVAL; + } + MEM_CONTEXT_XS_TEMP_END(); + } + XSRETURN(1); +} + + +XS_EUPXS(XS_pgBackRest__LibC__StorageRead_eof); /* prototype to pass -Wmissing-prototypes */ +XS_EUPXS(XS_pgBackRest__LibC__StorageRead_eof) +{ + dVAR; dXSARGS; + if (items != 1) + croak_xs_usage(cv, "self"); + { + MEM_CONTEXT_XS_TEMP_BEGIN() + { + pgBackRest__LibC__StorageRead self; + bool RETVAL; + + if (SvROK(ST(0)) && sv_derived_from(ST(0), "pgBackRest::LibC::StorageRead")) { + IV tmp = SvIV((SV*)SvRV(ST(0))); + self = INT2PTR(pgBackRest__LibC__StorageRead,tmp); + } + else + Perl_croak_nocontext("%s: %s is not of type %s", + "pgBackRest::LibC::StorageRead::eof", + "self", "pgBackRest::LibC::StorageRead") +; + RETVAL = ioReadEof(storageReadIo(self)); + ST(0) = boolSV(RETVAL); + } + MEM_CONTEXT_XS_TEMP_END(); + } + XSRETURN(1); +} + + +XS_EUPXS(XS_pgBackRest__LibC__StorageRead_close); /* prototype to pass -Wmissing-prototypes */ +XS_EUPXS(XS_pgBackRest__LibC__StorageRead_close) +{ + dVAR; dXSARGS; + if (items != 1) + croak_xs_usage(cv, "self"); + { + MEM_CONTEXT_XS_TEMP_BEGIN() + { + pgBackRest__LibC__StorageRead self; + + if (SvROK(ST(0)) && sv_derived_from(ST(0), "pgBackRest::LibC::StorageRead")) { + IV tmp = SvIV((SV*)SvRV(ST(0))); + self = INT2PTR(pgBackRest__LibC__StorageRead,tmp); + } + else + Perl_croak_nocontext("%s: %s is not of type %s", + "pgBackRest::LibC::StorageRead::close", + "self", "pgBackRest::LibC::StorageRead") +; + ioReadClose(storageReadIo(self)); + } + MEM_CONTEXT_XS_TEMP_END(); + } + XSRETURN_EMPTY; +} + + +XS_EUPXS(XS_pgBackRest__LibC__StorageRead_result); /* prototype to pass -Wmissing-prototypes */ +XS_EUPXS(XS_pgBackRest__LibC__StorageRead_result) +{ + dVAR; dXSARGS; + if (items != 2) + croak_xs_usage(cv, "self, filter"); + { + MEM_CONTEXT_XS_TEMP_BEGIN() + { + pgBackRest__LibC__StorageRead self; + const String * filter = STR_NEW_SV(ST(1)); + const char * RETVAL; + dXSTARG; + + if (SvROK(ST(0)) && sv_derived_from(ST(0), "pgBackRest::LibC::StorageRead")) { + IV tmp = SvIV((SV*)SvRV(ST(0))); + self = INT2PTR(pgBackRest__LibC__StorageRead,tmp); + } + else + Perl_croak_nocontext("%s: %s is not of type %s", + "pgBackRest::LibC::StorageRead::result", + "self", "pgBackRest::LibC::StorageRead") +; + RETVAL = strPtr(storageFilterXsResult(ioReadFilterGroup(storageReadIo(self)), filter)); + sv_setpv(TARG, RETVAL); XSprePUSH; PUSHTARG; + } + MEM_CONTEXT_XS_TEMP_END(); + } + XSRETURN(1); +} + + +XS_EUPXS(XS_pgBackRest__LibC__StorageRead_resultAll); /* prototype to pass -Wmissing-prototypes */ +XS_EUPXS(XS_pgBackRest__LibC__StorageRead_resultAll) +{ + dVAR; dXSARGS; + if (items != 1) + croak_xs_usage(cv, "self"); + { + MEM_CONTEXT_XS_TEMP_BEGIN() + { + pgBackRest__LibC__StorageRead self; + const char * RETVAL; + dXSTARG; + + if (SvROK(ST(0)) && sv_derived_from(ST(0), "pgBackRest::LibC::StorageRead")) { + IV tmp = SvIV((SV*)SvRV(ST(0))); + self = INT2PTR(pgBackRest__LibC__StorageRead,tmp); + } + else + Perl_croak_nocontext("%s: %s is not of type %s", + "pgBackRest::LibC::StorageRead::resultAll", + "self", "pgBackRest::LibC::StorageRead") +; + RETVAL = strPtr(storageFilterXsResultAll(ioReadFilterGroup(storageReadIo(self)))); + sv_setpv(TARG, RETVAL); XSprePUSH; PUSHTARG; + } + MEM_CONTEXT_XS_TEMP_END(); + } + XSRETURN(1); +} + + +XS_EUPXS(XS_pgBackRest__LibC__StorageRead_DESTROY); /* prototype to pass -Wmissing-prototypes */ +XS_EUPXS(XS_pgBackRest__LibC__StorageRead_DESTROY) +{ + dVAR; dXSARGS; + if (items != 1) + croak_xs_usage(cv, "self"); + { + MEM_CONTEXT_XS_TEMP_BEGIN() + { + pgBackRest__LibC__StorageRead self; + + if (SvROK(ST(0))) { + IV tmp = SvIV((SV*)SvRV(ST(0))); + self = INT2PTR(pgBackRest__LibC__StorageRead,tmp); + } + else + Perl_croak_nocontext("%s: %s is not a reference", + "pgBackRest::LibC::StorageRead::DESTROY", + "self") +; + storageReadFree(self); + } + MEM_CONTEXT_XS_TEMP_END(); + } + XSRETURN_EMPTY; +} + + +/* INCLUDE: Returning to 'xs/storage/storage.xs' from 'xs/storage/storageRead.xs' */ + + +XS_EUPXS(XS_pgBackRest__LibC__Storage_new); /* prototype to pass -Wmissing-prototypes */ +XS_EUPXS(XS_pgBackRest__LibC__Storage_new) +{ + dVAR; dXSARGS; + if (items != 3) + croak_xs_usage(cv, "class, type, path"); + { + MEM_CONTEXT_XS_TEMP_BEGIN() + { + const String * class = STR_NEW_SV(ST(0)); + const String * type = STR_NEW_SV(ST(1)); + const String * path = STR_NEW_SV(ST(2)); + pgBackRest__LibC__Storage RETVAL; + CHECK(strEqZ(class, PACKAGE_NAME_LIBC "::Storage")); + + if (strEqZ(type, "")) + { + memContextSwitch(MEM_CONTEXT_XS_OLD()); + RETVAL = storagePosixNew( + path == NULL ? STRDEF("/") : path, STORAGE_MODE_FILE_DEFAULT, STORAGE_MODE_PATH_DEFAULT, true, NULL); + storagePathEnforceSet((Storage *)RETVAL, false); + memContextSwitch(MEM_CONTEXT_XS_TEMP()); + } + else if (strEqZ(type, "")) + { + CHECK(path == NULL); + RETVAL = (Storage *)storageRepoWrite(); + } + else if (strEqZ(type, "")) + { + CHECK(path == NULL); + + memContextSwitch(MEM_CONTEXT_XS_OLD()); + RETVAL = storagePosixNew(cfgOptionStr(cfgOptPgPath), STORAGE_MODE_FILE_DEFAULT, STORAGE_MODE_PATH_DEFAULT, true, NULL); + storagePathEnforceSet((Storage *)RETVAL, false); + memContextSwitch(MEM_CONTEXT_XS_TEMP()); + } + else + THROW_FMT(AssertError, "unexpected storage type '%s'", strPtr(type)); + { + SV * RETVALSV; + RETVALSV = sv_newmortal(); + sv_setref_pv(RETVALSV, "pgBackRest::LibC::Storage", (void*)RETVAL); + ST(0) = RETVALSV; + } + } + MEM_CONTEXT_XS_TEMP_END(); + } + XSRETURN(1); +} + + +XS_EUPXS(XS_pgBackRest__LibC__Storage_bucketCreate); /* prototype to pass -Wmissing-prototypes */ +XS_EUPXS(XS_pgBackRest__LibC__Storage_bucketCreate) +{ + dVAR; dXSARGS; + if (items != 1) + croak_xs_usage(cv, "self"); + { + MEM_CONTEXT_XS_TEMP_BEGIN() + { + pgBackRest__LibC__Storage self; + + if (SvROK(ST(0)) && sv_derived_from(ST(0), "pgBackRest::LibC::Storage")) { + IV tmp = SvIV((SV*)SvRV(ST(0))); + self = INT2PTR(pgBackRest__LibC__Storage,tmp); + } + else + Perl_croak_nocontext("%s: %s is not of type %s", + "pgBackRest::LibC::Storage::bucketCreate", + "self", "pgBackRest::LibC::Storage") +; + if (strEq(storageType(self), STORAGE_S3_TYPE_STR)) + storageS3Request((StorageS3 *)storageDriver(self), HTTP_VERB_PUT_STR, FSLASH_STR, NULL, NULL, true, false); + else + THROW_FMT(AssertError, "unable to create bucket on '%s' storage", strPtr(storageType(self))); + } + MEM_CONTEXT_XS_TEMP_END(); + } + XSRETURN_EMPTY; +} + + +XS_EUPXS(XS_pgBackRest__LibC__Storage_copy); /* prototype to pass -Wmissing-prototypes */ +XS_EUPXS(XS_pgBackRest__LibC__Storage_copy) +{ + dVAR; dXSARGS; + if (items != 3) + croak_xs_usage(cv, "self, source, destination"); + { + MEM_CONTEXT_XS_TEMP_BEGIN() + { + pgBackRest__LibC__StorageRead source; + pgBackRest__LibC__StorageWrite destination; + bool RETVAL; + + if (SvROK(ST(1)) && sv_derived_from(ST(1), "pgBackRest::LibC::StorageRead")) { + IV tmp = SvIV((SV*)SvRV(ST(1))); + source = INT2PTR(pgBackRest__LibC__StorageRead,tmp); + } + else + Perl_croak_nocontext("%s: %s is not of type %s", + "pgBackRest::LibC::Storage::copy", + "source", "pgBackRest::LibC::StorageRead") +; + + if (SvROK(ST(2)) && sv_derived_from(ST(2), "pgBackRest::LibC::StorageWrite")) { + IV tmp = SvIV((SV*)SvRV(ST(2))); + destination = INT2PTR(pgBackRest__LibC__StorageWrite,tmp); + } + else + Perl_croak_nocontext("%s: %s is not of type %s", + "pgBackRest::LibC::Storage::copy", + "destination", "pgBackRest::LibC::StorageWrite") +; + RETVAL = storageCopyNP(source, destination); + ST(0) = boolSV(RETVAL); + } + MEM_CONTEXT_XS_TEMP_END(); + } + XSRETURN(1); +} + + +XS_EUPXS(XS_pgBackRest__LibC__Storage_exists); /* prototype to pass -Wmissing-prototypes */ +XS_EUPXS(XS_pgBackRest__LibC__Storage_exists) +{ + dVAR; dXSARGS; + if (items != 2) + croak_xs_usage(cv, "self, fileExp"); + { + MEM_CONTEXT_XS_TEMP_BEGIN() + { + pgBackRest__LibC__Storage self; + const String * fileExp = STR_NEW_SV(ST(1)); + bool RETVAL; + + if (SvROK(ST(0)) && sv_derived_from(ST(0), "pgBackRest::LibC::Storage")) { + IV tmp = SvIV((SV*)SvRV(ST(0))); + self = INT2PTR(pgBackRest__LibC__Storage,tmp); + } + else + Perl_croak_nocontext("%s: %s is not of type %s", + "pgBackRest::LibC::Storage::exists", + "self", "pgBackRest::LibC::Storage") +; + RETVAL = storageExistsNP(self, fileExp); + ST(0) = boolSV(RETVAL); + } + MEM_CONTEXT_XS_TEMP_END(); + } + XSRETURN(1); +} + + +XS_EUPXS(XS_pgBackRest__LibC__Storage_get); /* prototype to pass -Wmissing-prototypes */ +XS_EUPXS(XS_pgBackRest__LibC__Storage_get) +{ + dVAR; dXSARGS; + if (items != 2) + croak_xs_usage(cv, "self, read"); + { + MEM_CONTEXT_XS_TEMP_BEGIN() + { + pgBackRest__LibC__StorageRead read; + SV * RETVAL; + + if (SvROK(ST(1)) && sv_derived_from(ST(1), "pgBackRest::LibC::StorageRead")) { + IV tmp = SvIV((SV*)SvRV(ST(1))); + read = INT2PTR(pgBackRest__LibC__StorageRead,tmp); + } + else + Perl_croak_nocontext("%s: %s is not of type %s", + "pgBackRest::LibC::Storage::get", + "read", "pgBackRest::LibC::StorageRead") ; RETVAL = NULL; + Buffer *buffer = storageGetNP(read); + + if (buffer != NULL) + { + if (bufUsed(buffer) == 0) + RETVAL = newSVpv("", 0); + else + RETVAL = newSVpv((char *)bufPtr(buffer), bufUsed(buffer)); + } + RETVAL = sv_2mortal(RETVAL); + ST(0) = RETVAL; + } + MEM_CONTEXT_XS_TEMP_END(); + } + XSRETURN(1); +} + + +XS_EUPXS(XS_pgBackRest__LibC__Storage_info); /* prototype to pass -Wmissing-prototypes */ +XS_EUPXS(XS_pgBackRest__LibC__Storage_info) +{ + dVAR; dXSARGS; + if (items != 3) + croak_xs_usage(cv, "self, pathExp, ignoreMissing"); + { + MEM_CONTEXT_XS_TEMP_BEGIN() + { + pgBackRest__LibC__Storage self; + const String * pathExp = STR_NEW_SV(ST(1)); + bool ignoreMissing = (bool)SvTRUE(ST(2)) +; + SV * RETVAL; + + if (SvROK(ST(0)) && sv_derived_from(ST(0), "pgBackRest::LibC::Storage")) { + IV tmp = SvIV((SV*)SvRV(ST(0))); + self = INT2PTR(pgBackRest__LibC__Storage,tmp); + } + else + Perl_croak_nocontext("%s: %s is not of type %s", + "pgBackRest::LibC::Storage::info", + "self", "pgBackRest::LibC::Storage") +; + RETVAL = NULL; + + StorageInfo info = storageInfoP(self, pathExp, .ignoreMissing = ignoreMissing); + + if (info.exists) + { + String *json = storageManifestXsInfo(NULL, &info); + RETVAL = newSVpv((char *)strPtr(json), strSize(json)); + } + RETVAL = sv_2mortal(RETVAL); + ST(0) = RETVAL; + } + MEM_CONTEXT_XS_TEMP_END(); + } + XSRETURN(1); +} + + +XS_EUPXS(XS_pgBackRest__LibC__Storage_list); /* prototype to pass -Wmissing-prototypes */ +XS_EUPXS(XS_pgBackRest__LibC__Storage_list) +{ + dVAR; dXSARGS; + if (items != 5) + croak_xs_usage(cv, "self, pathExp, ignoreMissing, sortAsc, expression"); + { + MEM_CONTEXT_XS_TEMP_BEGIN() + { + pgBackRest__LibC__Storage self; + const String * pathExp = STR_NEW_SV(ST(1)); + bool ignoreMissing = (bool)SvTRUE(ST(2)) +; + bool sortAsc = (bool)SvTRUE(ST(3)) +; + const String * expression = STR_NEW_SV(ST(4)); + SV * RETVAL; + + if (SvROK(ST(0)) && sv_derived_from(ST(0), "pgBackRest::LibC::Storage")) { + IV tmp = SvIV((SV*)SvRV(ST(0))); + self = INT2PTR(pgBackRest__LibC__Storage,tmp); + } + else + Perl_croak_nocontext("%s: %s is not of type %s", + "pgBackRest::LibC::Storage::list", + "self", "pgBackRest::LibC::Storage") +; + StringList *fileList = strLstSort( + storageListP(self, pathExp, .errorOnMissing = storageFeature(self, storageFeaturePath) ? !ignoreMissing : false, + .expression = expression), sortAsc ? sortOrderAsc : sortOrderDesc); + + const String *fileListJson = jsonFromVar(varNewVarLst(varLstNewStrLst(fileList)), 0); + + RETVAL = newSVpv(strPtr(fileListJson), strSize(fileListJson)); + RETVAL = sv_2mortal(RETVAL); + ST(0) = RETVAL; + } + MEM_CONTEXT_XS_TEMP_END(); + } + XSRETURN(1); +} + + +XS_EUPXS(XS_pgBackRest__LibC__Storage_manifest); /* prototype to pass -Wmissing-prototypes */ +XS_EUPXS(XS_pgBackRest__LibC__Storage_manifest) +{ + dVAR; dXSARGS; + if (items < 2 || items > 3) + croak_xs_usage(cv, "self, pathExp, filter=NULL"); + { + MEM_CONTEXT_XS_TEMP_BEGIN() + { + pgBackRest__LibC__Storage self; + const String * pathExp = STR_NEW_SV(ST(1)); + const String * filter = STR_NEW_SV(ST(2)); + SV * RETVAL; + + if (SvROK(ST(0)) && sv_derived_from(ST(0), "pgBackRest::LibC::Storage")) { + IV tmp = SvIV((SV*)SvRV(ST(0))); + self = INT2PTR(pgBackRest__LibC__Storage,tmp); + } + else + Perl_croak_nocontext("%s: %s is not of type %s", + "pgBackRest::LibC::Storage::manifest", + "self", "pgBackRest::LibC::Storage") +; + StorageManifestXsCallbackData data = {.storage = self, .json = strNew("{"), .pathRoot = pathExp, .filter = filter}; + + // If a path is specified + StorageInfo info = storageInfoP(self, pathExp, .ignoreMissing = true); + + if (!info.exists || info.type == storageTypePath) + { + storageInfoListP( + self, data.pathRoot, storageManifestXsCallback, &data, + .errorOnMissing = storageFeature(self, storageFeaturePath) ? true : false); + } + // Else a file is specified + else + { + info.name = strBase(storagePath(self, pathExp)); + strCat(data.json, strPtr(storageManifestXsInfo(NULL, &info))); + } + + strCat(data.json, "}"); + + RETVAL = newSVpv((char *)strPtr(data.json), strSize(data.json)); + RETVAL = sv_2mortal(RETVAL); + ST(0) = RETVAL; + } + MEM_CONTEXT_XS_TEMP_END(); + } + XSRETURN(1); +} + + +XS_EUPXS(XS_pgBackRest__LibC__Storage_pathCreate); /* prototype to pass -Wmissing-prototypes */ +XS_EUPXS(XS_pgBackRest__LibC__Storage_pathCreate) +{ + dVAR; dXSARGS; + if (items != 5) + croak_xs_usage(cv, "self, pathExp, mode, ignoreExists, createParent"); + { + MEM_CONTEXT_XS_TEMP_BEGIN() + { + pgBackRest__LibC__Storage self; + const String * pathExp = STR_NEW_SV(ST(1)); + const String * mode = STR_NEW_SV(ST(2)); + bool ignoreExists = (bool)SvTRUE(ST(3)) +; + bool createParent = (bool)SvTRUE(ST(4)) +; + + if (SvROK(ST(0)) && sv_derived_from(ST(0), "pgBackRest::LibC::Storage")) { + IV tmp = SvIV((SV*)SvRV(ST(0))); + self = INT2PTR(pgBackRest__LibC__Storage,tmp); + } + else + Perl_croak_nocontext("%s: %s is not of type %s", + "pgBackRest::LibC::Storage::pathCreate", + "self", "pgBackRest::LibC::Storage") +; + if (storageFeature(self, storageFeaturePath)) + storagePathCreateP( + self, pathExp, .mode = mode ? cvtZToIntBase(strPtr(mode), 8) : 0, .errorOnExists = !ignoreExists, + .noParentCreate = !createParent); + } + MEM_CONTEXT_XS_TEMP_END(); + } + XSRETURN_EMPTY; +} + + +XS_EUPXS(XS_pgBackRest__LibC__Storage_pathExists); /* prototype to pass -Wmissing-prototypes */ +XS_EUPXS(XS_pgBackRest__LibC__Storage_pathExists) +{ + dVAR; dXSARGS; + if (items != 2) + croak_xs_usage(cv, "self, pathExp"); + { + MEM_CONTEXT_XS_TEMP_BEGIN() + { + pgBackRest__LibC__Storage self; + const String * pathExp = STR_NEW_SV(ST(1)); + bool RETVAL; + + if (SvROK(ST(0)) && sv_derived_from(ST(0), "pgBackRest::LibC::Storage")) { + IV tmp = SvIV((SV*)SvRV(ST(0))); + self = INT2PTR(pgBackRest__LibC__Storage,tmp); + } + else + Perl_croak_nocontext("%s: %s is not of type %s", + "pgBackRest::LibC::Storage::pathExists", + "self", "pgBackRest::LibC::Storage") +; + RETVAL = true; + + if (storageFeature(self, storageFeaturePath)) + RETVAL = storagePathExistsNP(self, pathExp); + ST(0) = boolSV(RETVAL); + } + MEM_CONTEXT_XS_TEMP_END(); + } + XSRETURN(1); +} + + +XS_EUPXS(XS_pgBackRest__LibC__Storage_pathGet); /* prototype to pass -Wmissing-prototypes */ +XS_EUPXS(XS_pgBackRest__LibC__Storage_pathGet) +{ + dVAR; dXSARGS; + if (items != 2) + croak_xs_usage(cv, "self, pathExp"); + { + MEM_CONTEXT_XS_TEMP_BEGIN() + { + pgBackRest__LibC__Storage self; + const String * pathExp = STR_NEW_SV(ST(1)); + SV * RETVAL; + + if (SvROK(ST(0)) && sv_derived_from(ST(0), "pgBackRest::LibC::Storage")) { + IV tmp = SvIV((SV*)SvRV(ST(0))); + self = INT2PTR(pgBackRest__LibC__Storage,tmp); + } + else + Perl_croak_nocontext("%s: %s is not of type %s", + "pgBackRest::LibC::Storage::pathGet", + "self", "pgBackRest::LibC::Storage") +; + String *path = storagePathNP(self, pathExp); + RETVAL = newSVpv((char *)strPtr(path), strSize(path)); + RETVAL = sv_2mortal(RETVAL); + ST(0) = RETVAL; + } + MEM_CONTEXT_XS_TEMP_END(); + } + XSRETURN(1); +} + + +XS_EUPXS(XS_pgBackRest__LibC__Storage_pathRemove); /* prototype to pass -Wmissing-prototypes */ +XS_EUPXS(XS_pgBackRest__LibC__Storage_pathRemove) +{ + dVAR; dXSARGS; + if (items != 4) + croak_xs_usage(cv, "self, pathExp, ignoreMissing, recurse"); + { + MEM_CONTEXT_XS_TEMP_BEGIN() + { + pgBackRest__LibC__Storage self; + const String * pathExp = STR_NEW_SV(ST(1)); + bool ignoreMissing = (bool)SvTRUE(ST(2)) +; + bool recurse = (bool)SvTRUE(ST(3)) +; + + if (SvROK(ST(0)) && sv_derived_from(ST(0), "pgBackRest::LibC::Storage")) { + IV tmp = SvIV((SV*)SvRV(ST(0))); + self = INT2PTR(pgBackRest__LibC__Storage,tmp); + } + else + Perl_croak_nocontext("%s: %s is not of type %s", + "pgBackRest::LibC::Storage::pathRemove", + "self", "pgBackRest::LibC::Storage") +; + storagePathRemoveP( + self, pathExp, .errorOnMissing = storageFeature(self, storageFeaturePath) ? !ignoreMissing : false, .recurse = recurse); + } + MEM_CONTEXT_XS_TEMP_END(); + } + XSRETURN_EMPTY; +} + + +XS_EUPXS(XS_pgBackRest__LibC__Storage_pathSync); /* prototype to pass -Wmissing-prototypes */ +XS_EUPXS(XS_pgBackRest__LibC__Storage_pathSync) +{ + dVAR; dXSARGS; + if (items != 2) + croak_xs_usage(cv, "self, pathExp"); + { + MEM_CONTEXT_XS_TEMP_BEGIN() + { + pgBackRest__LibC__Storage self; + const String * pathExp = STR_NEW_SV(ST(1)); + + if (SvROK(ST(0)) && sv_derived_from(ST(0), "pgBackRest::LibC::Storage")) { + IV tmp = SvIV((SV*)SvRV(ST(0))); + self = INT2PTR(pgBackRest__LibC__Storage,tmp); + } + else + Perl_croak_nocontext("%s: %s is not of type %s", + "pgBackRest::LibC::Storage::pathSync", + "self", "pgBackRest::LibC::Storage") +; + storagePathSyncNP(self, pathExp); + } + MEM_CONTEXT_XS_TEMP_END(); + } + XSRETURN_EMPTY; +} + + +XS_EUPXS(XS_pgBackRest__LibC__Storage_put); /* prototype to pass -Wmissing-prototypes */ +XS_EUPXS(XS_pgBackRest__LibC__Storage_put) +{ + dVAR; dXSARGS; + if (items != 3) + croak_xs_usage(cv, "self, write, buffer"); + { + MEM_CONTEXT_XS_TEMP_BEGIN() + { + pgBackRest__LibC__StorageWrite write; + const Buffer * buffer = BUF_CONST_SV(ST(2)); + UV RETVAL; + dXSTARG; + + if (SvROK(ST(1)) && sv_derived_from(ST(1), "pgBackRest::LibC::StorageWrite")) { + IV tmp = SvIV((SV*)SvRV(ST(1))); + write = INT2PTR(pgBackRest__LibC__StorageWrite,tmp); + } + else + Perl_croak_nocontext("%s: %s is not of type %s", + "pgBackRest::LibC::Storage::put", + "write", "pgBackRest::LibC::StorageWrite") +; + storagePutNP(write, buffer); + RETVAL = buffer ? bufUsed(buffer) : 0; + XSprePUSH; PUSHu((UV)RETVAL); + } + MEM_CONTEXT_XS_TEMP_END(); + } + XSRETURN(1); +} + + +XS_EUPXS(XS_pgBackRest__LibC__Storage_readDrain); /* prototype to pass -Wmissing-prototypes */ +XS_EUPXS(XS_pgBackRest__LibC__Storage_readDrain) +{ + dVAR; dXSARGS; + if (items != 2) + croak_xs_usage(cv, "self, read"); + { + MEM_CONTEXT_XS_TEMP_BEGIN() + { + pgBackRest__LibC__StorageRead read; + bool RETVAL; + + if (SvROK(ST(1)) && sv_derived_from(ST(1), "pgBackRest::LibC::StorageRead")) { + IV tmp = SvIV((SV*)SvRV(ST(1))); + read = INT2PTR(pgBackRest__LibC__StorageRead,tmp); + } + else + Perl_croak_nocontext("%s: %s is not of type %s", + "pgBackRest::LibC::Storage::readDrain", + "read", "pgBackRest::LibC::StorageRead") +; + RETVAL = false; + + // Read and discard all IO (this is useful for processing filters) + if (ioReadOpen(storageReadIo(read))) + { + Buffer *buffer = bufNew(ioBufferSize()); + + do + { + ioRead(storageReadIo(read), buffer); + bufUsedZero(buffer); + } + while (!ioReadEof(storageReadIo(read))); - MEM_CONTEXT_XS_BEGIN(self->memContext) + ioReadClose(storageReadIo(read)); + RETVAL = true; + } + ST(0) = boolSV(RETVAL); + } + MEM_CONTEXT_XS_TEMP_END(); + } + XSRETURN(1); +} + + +XS_EUPXS(XS_pgBackRest__LibC__Storage_remove); /* prototype to pass -Wmissing-prototypes */ +XS_EUPXS(XS_pgBackRest__LibC__Storage_remove) +{ + dVAR; dXSARGS; + if (items != 3) + croak_xs_usage(cv, "self, fileExp, ignoreMissing"); { - STRLEN tSize; - const unsigned char *sourcePtr = (const unsigned char *)SvPV(source, tSize); + MEM_CONTEXT_XS_TEMP_BEGIN() + { + pgBackRest__LibC__Storage self; + const String * fileExp = STR_NEW_SV(ST(1)); + bool ignoreMissing = (bool)SvTRUE(ST(2)) +; - RETVAL = NEWSV(0, ioBufferSize()); - SvPOK_only(RETVAL); + if (SvROK(ST(0)) && sv_derived_from(ST(0), "pgBackRest::LibC::Storage")) { + IV tmp = SvIV((SV*)SvRV(ST(0))); + self = INT2PTR(pgBackRest__LibC__Storage,tmp); + } + else + Perl_croak_nocontext("%s: %s is not of type %s", + "pgBackRest::LibC::Storage::remove", + "self", "pgBackRest::LibC::Storage") +; + storageRemoveP(self, fileExp, .errorOnMissing = storageFeature(self, storageFeaturePath) ? !ignoreMissing : false); + } + MEM_CONTEXT_XS_TEMP_END(); + } + XSRETURN_EMPTY; +} - if (tSize > 0) - { - size_t outBufferUsed = 0; - do - { - SvGROW(RETVAL, outBufferUsed + ioBufferSize()); - Buffer *outBuffer = bufNewUseC((unsigned char *)SvPV_nolen(RETVAL) + outBufferUsed, ioBufferSize()); - - ioFilterProcessInOut(self->pxPayload, BUF(sourcePtr, tSize), outBuffer); - outBufferUsed += bufUsed(outBuffer); - } - while (ioFilterInputSame(self->pxPayload)); +XS_EUPXS(XS_pgBackRest__LibC__Storage_cipherType); /* prototype to pass -Wmissing-prototypes */ +XS_EUPXS(XS_pgBackRest__LibC__Storage_cipherType) +{ + dVAR; dXSARGS; + if (items != 1) + croak_xs_usage(cv, "self"); + { + MEM_CONTEXT_XS_TEMP_BEGIN() + { + const char * RETVAL; + dXSTARG; + if (cfgOptionStr(cfgOptRepoCipherType) == NULL || cipherType(cfgOptionStr(cfgOptRepoCipherType)) == cipherTypeNone) + RETVAL = NULL; + else + RETVAL = strPtr(cfgOptionStr(cfgOptRepoCipherType)); + sv_setpv(TARG, RETVAL); XSprePUSH; PUSHTARG; + } + MEM_CONTEXT_XS_TEMP_END(); + } + XSRETURN(1); +} + - SvCUR_set(RETVAL, outBufferUsed); - } - else - SvCUR_set(RETVAL, 0); +XS_EUPXS(XS_pgBackRest__LibC__Storage_cipherPass); /* prototype to pass -Wmissing-prototypes */ +XS_EUPXS(XS_pgBackRest__LibC__Storage_cipherPass) +{ + dVAR; dXSARGS; + if (items != 1) + croak_xs_usage(cv, "self"); + { + MEM_CONTEXT_XS_TEMP_BEGIN() + { + const char * RETVAL; + dXSTARG; + RETVAL = strPtr(cfgOptionStr(cfgOptRepoCipherPass)); + sv_setpv(TARG, RETVAL); XSprePUSH; PUSHTARG; } - MEM_CONTEXT_XS_END(); - RETVAL = sv_2mortal(RETVAL); - ST(0) = RETVAL; + MEM_CONTEXT_XS_TEMP_END(); } XSRETURN(1); } -XS_EUPXS(XS_pgBackRest__LibC__Cipher__Block_flush); /* prototype to pass -Wmissing-prototypes */ -XS_EUPXS(XS_pgBackRest__LibC__Cipher__Block_flush) +XS_EUPXS(XS_pgBackRest__LibC__Storage_type); /* prototype to pass -Wmissing-prototypes */ +XS_EUPXS(XS_pgBackRest__LibC__Storage_type) { dVAR; dXSARGS; if (items != 1) croak_xs_usage(cv, "self"); { - pgBackRest__LibC__Cipher__Block self; - SV * RETVAL; + MEM_CONTEXT_XS_TEMP_BEGIN() + { + pgBackRest__LibC__Storage self; + const char * RETVAL; + dXSTARG; - if (SvROK(ST(0)) && sv_derived_from(ST(0), "pgBackRest::LibC::Cipher::Block")) { + if (SvROK(ST(0)) && sv_derived_from(ST(0), "pgBackRest::LibC::Storage")) { IV tmp = SvIV((SV*)SvRV(ST(0))); - self = INT2PTR(pgBackRest__LibC__Cipher__Block,tmp); + self = INT2PTR(pgBackRest__LibC__Storage,tmp); } else Perl_croak_nocontext("%s: %s is not of type %s", - "pgBackRest::LibC::Cipher::Block::flush", - "self", "pgBackRest::LibC::Cipher::Block") + "pgBackRest::LibC::Storage::type", + "self", "pgBackRest::LibC::Storage") ; - RETVAL = NULL; + RETVAL = strPtr(storageType(self)); + sv_setpv(TARG, RETVAL); XSprePUSH; PUSHTARG; + } + MEM_CONTEXT_XS_TEMP_END(); + } + XSRETURN(1); +} - MEM_CONTEXT_XS_BEGIN(self->memContext) + +XS_EUPXS(XS_pgBackRest__LibC_storageRepoFree); /* prototype to pass -Wmissing-prototypes */ +XS_EUPXS(XS_pgBackRest__LibC_storageRepoFree) +{ + dVAR; dXSARGS; + if (items != 0) + croak_xs_usage(cv, ""); { - RETVAL = NEWSV(0, ioBufferSize()); - SvPOK_only(RETVAL); + storageHelperFree(); + } + XSRETURN_EMPTY; +} - size_t outBufferUsed = 0; - do - { - SvGROW(RETVAL, outBufferUsed + ioBufferSize()); - Buffer *outBuffer = bufNewUseC((unsigned char *)SvPV_nolen(RETVAL) + outBufferUsed, ioBufferSize()); +/* INCLUDE: Returning to 'xs/postgres/pageChecksum.xs' from 'xs/storage/storage.xs' */ - ioFilterProcessInOut(self->pxPayload, NULL, outBuffer); - outBufferUsed += bufUsed(outBuffer); - } - while (!ioFilterDone(self->pxPayload)); - SvCUR_set(RETVAL, outBufferUsed); +XS_EUPXS(XS_pgBackRest__LibC_pageChecksum); /* prototype to pass -Wmissing-prototypes */ +XS_EUPXS(XS_pgBackRest__LibC_pageChecksum) +{ + dVAR; dXSARGS; + if (items != 3) + croak_xs_usage(cv, "page, blockNo, pageSize"); + { + const char * page = (const char *)SvPV_nolen(ST(0)) +; + U32 blockNo = (unsigned long)SvUV(ST(1)) +; + U32 pageSize = (unsigned long)SvUV(ST(2)) +; + U16 RETVAL; + dXSTARG; + RETVAL = 0; + + ERROR_XS_BEGIN() + { + RETVAL = pageChecksum( + (const unsigned char *)page, blockNo, pageSize); } - MEM_CONTEXT_XS_END(); - RETVAL = sv_2mortal(RETVAL); - ST(0) = RETVAL; + ERROR_XS_END(); + XSprePUSH; PUSHu((UV)RETVAL); + } + XSRETURN(1); +} + + +/* INCLUDE: Returning to 'xs/postgres/client.xs' from 'xs/postgres/pageChecksum.xs' */ + + +XS_EUPXS(XS_pgBackRest__LibC__PgClient_new); /* prototype to pass -Wmissing-prototypes */ +XS_EUPXS(XS_pgBackRest__LibC__PgClient_new) +{ + dVAR; dXSARGS; + if (items != 5) + croak_xs_usage(cv, "class, host, port, database, queryTimeout"); + { + MEM_CONTEXT_XS_TEMP_BEGIN() + { + const String * class = STR_NEW_SV(ST(0)); + const String * host = STR_NEW_SV(ST(1)); + U32 port = (unsigned long)SvUV(ST(2)) +; + const String * database = STR_NEW_SV(ST(3)); + UV queryTimeout = (UV)SvUV(ST(4)) +; + pgBackRest__LibC__PgClient RETVAL; + CHECK(strEqZ(class, PACKAGE_NAME_LIBC "::PgClient")); + + memContextSwitch(MEM_CONTEXT_XS_OLD()); + RETVAL = pgClientNew(host, port, database, NULL, queryTimeout); + memContextSwitch(MEM_CONTEXT_XS_TEMP()); + { + SV * RETVALSV; + RETVALSV = sv_newmortal(); + sv_setref_pv(RETVALSV, "pgBackRest::LibC::PgClient", (void*)RETVAL); + ST(0) = RETVALSV; + } + } + MEM_CONTEXT_XS_TEMP_END(); } XSRETURN(1); } -XS_EUPXS(XS_pgBackRest__LibC__Cipher__Block_DESTROY); /* prototype to pass -Wmissing-prototypes */ -XS_EUPXS(XS_pgBackRest__LibC__Cipher__Block_DESTROY) +XS_EUPXS(XS_pgBackRest__LibC__PgClient_open); /* prototype to pass -Wmissing-prototypes */ +XS_EUPXS(XS_pgBackRest__LibC__PgClient_open) { dVAR; dXSARGS; if (items != 1) croak_xs_usage(cv, "self"); { - pgBackRest__LibC__Cipher__Block self; + MEM_CONTEXT_XS_TEMP_BEGIN() + { + pgBackRest__LibC__PgClient self; + + if (SvROK(ST(0)) && sv_derived_from(ST(0), "pgBackRest::LibC::PgClient")) { + IV tmp = SvIV((SV*)SvRV(ST(0))); + self = INT2PTR(pgBackRest__LibC__PgClient,tmp); + } + else + Perl_croak_nocontext("%s: %s is not of type %s", + "pgBackRest::LibC::PgClient::open", + "self", "pgBackRest::LibC::PgClient") +; + pgClientOpen(self); + } + MEM_CONTEXT_XS_TEMP_END(); + } + XSRETURN_EMPTY; +} + + +XS_EUPXS(XS_pgBackRest__LibC__PgClient_query); /* prototype to pass -Wmissing-prototypes */ +XS_EUPXS(XS_pgBackRest__LibC__PgClient_query) +{ + dVAR; dXSARGS; + if (items != 2) + croak_xs_usage(cv, "self, query"); + { + MEM_CONTEXT_XS_TEMP_BEGIN() + { + pgBackRest__LibC__PgClient self; + const String * query = STR_NEW_SV(ST(1)); + const char * RETVAL; + dXSTARG; + + if (SvROK(ST(0)) && sv_derived_from(ST(0), "pgBackRest::LibC::PgClient")) { + IV tmp = SvIV((SV*)SvRV(ST(0))); + self = INT2PTR(pgBackRest__LibC__PgClient,tmp); + } + else + Perl_croak_nocontext("%s: %s is not of type %s", + "pgBackRest::LibC::PgClient::query", + "self", "pgBackRest::LibC::PgClient") +; + VariantList *result = pgClientQuery(self, query); + RETVAL = result ? strPtr(jsonFromVar(varNewVarLst(result), 0)) : NULL; + sv_setpv(TARG, RETVAL); XSprePUSH; PUSHTARG; + } + MEM_CONTEXT_XS_TEMP_END(); + } + XSRETURN(1); +} + + +XS_EUPXS(XS_pgBackRest__LibC__PgClient_close); /* prototype to pass -Wmissing-prototypes */ +XS_EUPXS(XS_pgBackRest__LibC__PgClient_close) +{ + dVAR; dXSARGS; + if (items != 1) + croak_xs_usage(cv, "self"); + { + MEM_CONTEXT_XS_TEMP_BEGIN() + { + pgBackRest__LibC__PgClient self; + + if (SvROK(ST(0)) && sv_derived_from(ST(0), "pgBackRest::LibC::PgClient")) { + IV tmp = SvIV((SV*)SvRV(ST(0))); + self = INT2PTR(pgBackRest__LibC__PgClient,tmp); + } + else + Perl_croak_nocontext("%s: %s is not of type %s", + "pgBackRest::LibC::PgClient::close", + "self", "pgBackRest::LibC::PgClient") +; + pgClientClose(self); + } + MEM_CONTEXT_XS_TEMP_END(); + } + XSRETURN_EMPTY; +} + + +XS_EUPXS(XS_pgBackRest__LibC__PgClient_DESTROY); /* prototype to pass -Wmissing-prototypes */ +XS_EUPXS(XS_pgBackRest__LibC__PgClient_DESTROY) +{ + dVAR; dXSARGS; + if (items != 1) + croak_xs_usage(cv, "self"); + { + MEM_CONTEXT_XS_TEMP_BEGIN() + { + pgBackRest__LibC__PgClient self; if (SvROK(ST(0))) { IV tmp = SvIV((SV*)SvRV(ST(0))); - self = INT2PTR(pgBackRest__LibC__Cipher__Block,tmp); + self = INT2PTR(pgBackRest__LibC__PgClient,tmp); } else Perl_croak_nocontext("%s: %s is not a reference", - "pgBackRest::LibC::Cipher::Block::DESTROY", + "pgBackRest::LibC::PgClient::DESTROY", "self") ; - MEM_CONTEXT_XS_DESTROY(self->memContext); + pgClientFree(self); + } + MEM_CONTEXT_XS_TEMP_END(); } XSRETURN_EMPTY; } -/* INCLUDE: Returning to 'xs/config/define.xs' from 'xs/crypto/cipherBlock.xs' */ +/* INCLUDE: Returning to 'xs/crypto/random.xs' from 'xs/postgres/client.xs' */ + + +XS_EUPXS(XS_pgBackRest__LibC_cryptoRandomBytes); /* prototype to pass -Wmissing-prototypes */ +XS_EUPXS(XS_pgBackRest__LibC_cryptoRandomBytes) +{ + dVAR; dXSARGS; + if (items != 1) + croak_xs_usage(cv, "size"); + { + I32 size = (I32)SvIV(ST(0)) +; + SV * RETVAL; + RETVAL = newSV(size); + SvPOK_only(RETVAL); + + cryptoRandomBytes((unsigned char *)SvPV_nolen(RETVAL), size); + + SvCUR_set(RETVAL, size); + RETVAL = sv_2mortal(RETVAL); + ST(0) = RETVAL; + } + XSRETURN(1); +} + + +/* INCLUDE: Returning to 'xs/crypto/hash.xs' from 'xs/crypto/random.xs' */ + + +XS_EUPXS(XS_pgBackRest__LibC_cryptoHashOne); /* prototype to pass -Wmissing-prototypes */ +XS_EUPXS(XS_pgBackRest__LibC_cryptoHashOne) +{ + dVAR; dXSARGS; + if (items != 2) + croak_xs_usage(cv, "type, message"); + { + const char * type = (const char *)SvPV_nolen(ST(0)) +; + SV * message = ST(1) +; + SV * RETVAL; + RETVAL = NULL; + + MEM_CONTEXT_XS_TEMP_BEGIN() + { + STRLEN messageSize; + const void *messagePtr = SvPV(message, messageSize); + + String *hash = bufHex(cryptoHashOne(strNew(type), BUF(messagePtr, messageSize))); + + RETVAL = newSV(strSize(hash)); + SvPOK_only(RETVAL); + strcpy((char *)SvPV_nolen(RETVAL), strPtr(hash)); + SvCUR_set(RETVAL, strSize(hash)); + } + MEM_CONTEXT_XS_TEMP_END(); + RETVAL = sv_2mortal(RETVAL); + ST(0) = RETVAL; + } + XSRETURN(1); +} + + +/* INCLUDE: Returning to 'xs/config/define.xs' from 'xs/crypto/hash.xs' */ XS_EUPXS(XS_pgBackRest__LibC_cfgCommandId); /* prototype to pass -Wmissing-prototypes */ @@ -1250,20 +2263,51 @@ #endif newXS_deffile("pgBackRest::LibC::libcUvSize", XS_pgBackRest__LibC_libcUvSize); - newXS_deffile("pgBackRest::LibC::storagePosixPathRemove", XS_pgBackRest__LibC_storagePosixPathRemove); + newXS_deffile("pgBackRest::LibC::StorageWrite::new", XS_pgBackRest__LibC__StorageWrite_new); + newXS_deffile("pgBackRest::LibC::StorageWrite::filterAdd", XS_pgBackRest__LibC__StorageWrite_filterAdd); + newXS_deffile("pgBackRest::LibC::StorageWrite::open", XS_pgBackRest__LibC__StorageWrite_open); + newXS_deffile("pgBackRest::LibC::StorageWrite::write", XS_pgBackRest__LibC__StorageWrite_write); + newXS_deffile("pgBackRest::LibC::StorageWrite::close", XS_pgBackRest__LibC__StorageWrite_close); + newXS_deffile("pgBackRest::LibC::StorageWrite::result", XS_pgBackRest__LibC__StorageWrite_result); + newXS_deffile("pgBackRest::LibC::StorageWrite::resultAll", XS_pgBackRest__LibC__StorageWrite_resultAll); + newXS_deffile("pgBackRest::LibC::StorageWrite::DESTROY", XS_pgBackRest__LibC__StorageWrite_DESTROY); + newXS_deffile("pgBackRest::LibC::StorageRead::new", XS_pgBackRest__LibC__StorageRead_new); + newXS_deffile("pgBackRest::LibC::StorageRead::filterAdd", XS_pgBackRest__LibC__StorageRead_filterAdd); + newXS_deffile("pgBackRest::LibC::StorageRead::open", XS_pgBackRest__LibC__StorageRead_open); + newXS_deffile("pgBackRest::LibC::StorageRead::read", XS_pgBackRest__LibC__StorageRead_read); + newXS_deffile("pgBackRest::LibC::StorageRead::eof", XS_pgBackRest__LibC__StorageRead_eof); + newXS_deffile("pgBackRest::LibC::StorageRead::close", XS_pgBackRest__LibC__StorageRead_close); + newXS_deffile("pgBackRest::LibC::StorageRead::result", XS_pgBackRest__LibC__StorageRead_result); + newXS_deffile("pgBackRest::LibC::StorageRead::resultAll", XS_pgBackRest__LibC__StorageRead_resultAll); + newXS_deffile("pgBackRest::LibC::StorageRead::DESTROY", XS_pgBackRest__LibC__StorageRead_DESTROY); + newXS_deffile("pgBackRest::LibC::Storage::new", XS_pgBackRest__LibC__Storage_new); + newXS_deffile("pgBackRest::LibC::Storage::bucketCreate", XS_pgBackRest__LibC__Storage_bucketCreate); + newXS_deffile("pgBackRest::LibC::Storage::copy", XS_pgBackRest__LibC__Storage_copy); + newXS_deffile("pgBackRest::LibC::Storage::exists", XS_pgBackRest__LibC__Storage_exists); + newXS_deffile("pgBackRest::LibC::Storage::get", XS_pgBackRest__LibC__Storage_get); + newXS_deffile("pgBackRest::LibC::Storage::info", XS_pgBackRest__LibC__Storage_info); + newXS_deffile("pgBackRest::LibC::Storage::list", XS_pgBackRest__LibC__Storage_list); + newXS_deffile("pgBackRest::LibC::Storage::manifest", XS_pgBackRest__LibC__Storage_manifest); + newXS_deffile("pgBackRest::LibC::Storage::pathCreate", XS_pgBackRest__LibC__Storage_pathCreate); + newXS_deffile("pgBackRest::LibC::Storage::pathExists", XS_pgBackRest__LibC__Storage_pathExists); + newXS_deffile("pgBackRest::LibC::Storage::pathGet", XS_pgBackRest__LibC__Storage_pathGet); + newXS_deffile("pgBackRest::LibC::Storage::pathRemove", XS_pgBackRest__LibC__Storage_pathRemove); + newXS_deffile("pgBackRest::LibC::Storage::pathSync", XS_pgBackRest__LibC__Storage_pathSync); + newXS_deffile("pgBackRest::LibC::Storage::put", XS_pgBackRest__LibC__Storage_put); + newXS_deffile("pgBackRest::LibC::Storage::readDrain", XS_pgBackRest__LibC__Storage_readDrain); + newXS_deffile("pgBackRest::LibC::Storage::remove", XS_pgBackRest__LibC__Storage_remove); + newXS_deffile("pgBackRest::LibC::Storage::cipherType", XS_pgBackRest__LibC__Storage_cipherType); + newXS_deffile("pgBackRest::LibC::Storage::cipherPass", XS_pgBackRest__LibC__Storage_cipherPass); + newXS_deffile("pgBackRest::LibC::Storage::type", XS_pgBackRest__LibC__Storage_type); + newXS_deffile("pgBackRest::LibC::storageRepoFree", XS_pgBackRest__LibC_storageRepoFree); newXS_deffile("pgBackRest::LibC::pageChecksum", XS_pgBackRest__LibC_pageChecksum); - newXS_deffile("pgBackRest::LibC::pageChecksumTest", XS_pgBackRest__LibC_pageChecksumTest); - newXS_deffile("pgBackRest::LibC::pageChecksumBufferTest", XS_pgBackRest__LibC_pageChecksumBufferTest); + newXS_deffile("pgBackRest::LibC::PgClient::new", XS_pgBackRest__LibC__PgClient_new); + newXS_deffile("pgBackRest::LibC::PgClient::open", XS_pgBackRest__LibC__PgClient_open); + newXS_deffile("pgBackRest::LibC::PgClient::query", XS_pgBackRest__LibC__PgClient_query); + newXS_deffile("pgBackRest::LibC::PgClient::close", XS_pgBackRest__LibC__PgClient_close); + newXS_deffile("pgBackRest::LibC::PgClient::DESTROY", XS_pgBackRest__LibC__PgClient_DESTROY); newXS_deffile("pgBackRest::LibC::cryptoRandomBytes", XS_pgBackRest__LibC_cryptoRandomBytes); - newXS_deffile("pgBackRest::LibC::Crypto::Hash::new", XS_pgBackRest__LibC__Crypto__Hash_new); - newXS_deffile("pgBackRest::LibC::Crypto::Hash::process", XS_pgBackRest__LibC__Crypto__Hash_process); - newXS_deffile("pgBackRest::LibC::Crypto::Hash::result", XS_pgBackRest__LibC__Crypto__Hash_result); - newXS_deffile("pgBackRest::LibC::Crypto::Hash::DESTROY", XS_pgBackRest__LibC__Crypto__Hash_DESTROY); newXS_deffile("pgBackRest::LibC::cryptoHashOne", XS_pgBackRest__LibC_cryptoHashOne); - newXS_deffile("pgBackRest::LibC::Cipher::Block::new", XS_pgBackRest__LibC__Cipher__Block_new); - newXS_deffile("pgBackRest::LibC::Cipher::Block::process", XS_pgBackRest__LibC__Cipher__Block_process); - newXS_deffile("pgBackRest::LibC::Cipher::Block::flush", XS_pgBackRest__LibC__Cipher__Block_flush); - newXS_deffile("pgBackRest::LibC::Cipher::Block::DESTROY", XS_pgBackRest__LibC__Cipher__Block_DESTROY); newXS_deffile("pgBackRest::LibC::cfgCommandId", XS_pgBackRest__LibC_cfgCommandId); newXS_deffile("pgBackRest::LibC::cfgOptionId", XS_pgBackRest__LibC_cfgOptionId); newXS_deffile("pgBackRest::LibC::cfgDefOptionDefault", XS_pgBackRest__LibC_cfgDefOptionDefault); diff -Nru pgbackrest-2.15.1/src/postgres/client.c pgbackrest-2.16/src/postgres/client.c --- pgbackrest-2.15.1/src/postgres/client.c 1970-01-01 00:00:00.000000000 +0000 +++ pgbackrest-2.16/src/postgres/client.c 2019-08-05 16:03:04.000000000 +0000 @@ -0,0 +1,389 @@ +/*********************************************************************************************************************************** +Postgres Client +***********************************************************************************************************************************/ +#include "build.auto.h" + +#include + +#include "common/debug.h" +#include "common/log.h" +#include "common/memContext.h" +#include "common/object.h" +#include "common/type/list.h" +#include "common/wait.h" +#include "postgres/client.h" + +/*********************************************************************************************************************************** +Object type +***********************************************************************************************************************************/ +struct PgClient +{ + MemContext *memContext; + const String *host; + unsigned int port; + const String *database; + const String *user; + TimeMSec queryTimeout; + + PGconn *connection; +}; + +OBJECT_DEFINE_FREE(PG_CLIENT); + +/*********************************************************************************************************************************** +Close protocol connection +***********************************************************************************************************************************/ +OBJECT_DEFINE_FREE_RESOURCE_BEGIN(PG_CLIENT, LOG, logLevelTrace) +{ + PQfinish(this->connection); +} +OBJECT_DEFINE_FREE_RESOURCE_END(LOG); + +/*********************************************************************************************************************************** +Create object +***********************************************************************************************************************************/ +PgClient * +pgClientNew(const String *host, const unsigned int port, const String *database, const String *user, const TimeMSec queryTimeout) +{ + FUNCTION_LOG_BEGIN(logLevelDebug); + FUNCTION_LOG_PARAM(STRING, host); + FUNCTION_LOG_PARAM(UINT, port); + FUNCTION_LOG_PARAM(STRING, database); + FUNCTION_LOG_PARAM(STRING, user); + FUNCTION_LOG_PARAM(TIME_MSEC, queryTimeout); + FUNCTION_LOG_END(); + + ASSERT(port >= 1 && port <= 65535); + ASSERT(database != NULL); + + PgClient *this = NULL; + + MEM_CONTEXT_NEW_BEGIN("PgClient") + { + this = memNew(sizeof(PgClient)); + this->memContext = memContextCurrent(); + + this->host = strDup(host); + this->port = port; + this->database = strDup(database); + this->user = strDup(user); + this->queryTimeout = queryTimeout; + } + MEM_CONTEXT_NEW_END(); + + FUNCTION_LOG_RETURN(PG_CLIENT, this); +} + +/*********************************************************************************************************************************** +Just ignore notices and warnings +***********************************************************************************************************************************/ +static void +pgClientNoticeProcessor(void *arg, const char *message) +{ + (void)arg; + (void)message; +} + +/*********************************************************************************************************************************** +Encode string to escape ' and \ +***********************************************************************************************************************************/ +static String * +pgClientEscape(const String *string) +{ + FUNCTION_TEST_BEGIN(); + FUNCTION_TEST_PARAM(STRING, string); + FUNCTION_TEST_END(); + + ASSERT(string != NULL); + + String *result = strNew("'"); + + // Iterate all characters in the string + for (unsigned stringIdx = 0; stringIdx < strSize(string); stringIdx++) + { + char stringChar = strPtr(string)[stringIdx]; + + // These characters are escaped + if (stringChar == '\'' || stringChar == '\\') + strCatChr(result, '\\'); + + strCatChr(result, stringChar); + } + + strCatChr(result, '\''); + + FUNCTION_TEST_RETURN(result); +} + +/*********************************************************************************************************************************** +Open connection to PostgreSQL +***********************************************************************************************************************************/ +PgClient * +pgClientOpen(PgClient *this) +{ + FUNCTION_LOG_BEGIN(logLevelDebug); + FUNCTION_LOG_PARAM(PG_CLIENT, this); + FUNCTION_LOG_END(); + + ASSERT(this != NULL); + CHECK(this->connection == NULL); + + MEM_CONTEXT_TEMP_BEGIN() + { + // Base connection string + String *connInfo = strNewFmt("dbname=%s port=%u", strPtr(pgClientEscape(this->database)), this->port); + + // Add user if specified + if (this->user != NULL) + strCatFmt(connInfo, " user=%s", strPtr(pgClientEscape(this->user))); + + // Add host if specified + if (this->host != NULL) + strCatFmt(connInfo, " host=%s", strPtr(pgClientEscape(this->host))); + + // Make the connection + this->connection = PQconnectdb(strPtr(connInfo)); + + // Set a callback to shutdown the connection + memContextCallbackSet(this->memContext, pgClientFreeResource, this); + + // Handle errors + if (PQstatus(this->connection) != CONNECTION_OK) + { + THROW_FMT( + DbConnectError, "unable to connect to '%s': %s", strPtr(connInfo), + strPtr(strTrim(strNew(PQerrorMessage(this->connection))))); + } + + // Set notice and warning processor + PQsetNoticeProcessor(this->connection, pgClientNoticeProcessor, NULL); + } + MEM_CONTEXT_TEMP_END(); + + FUNCTION_LOG_RETURN(PG_CLIENT, this); +} + +/*********************************************************************************************************************************** +Execute a query and return results +***********************************************************************************************************************************/ +VariantList * +pgClientQuery(PgClient *this, const String *query) +{ + FUNCTION_LOG_BEGIN(logLevelDebug); + FUNCTION_LOG_PARAM(PG_CLIENT, this); + FUNCTION_LOG_PARAM(STRING, query); + FUNCTION_LOG_END(); + + ASSERT(this != NULL); + CHECK(this->connection != NULL); + ASSERT(query != NULL); + + VariantList *result = NULL; + + MEM_CONTEXT_TEMP_BEGIN() + { + // Send the query without waiting for results so we can timeout if needed + if (!PQsendQuery(this->connection, strPtr(query))) + { + THROW_FMT( + DbQueryError, "unable to send query '%s': %s", strPtr(query), + strPtr(strTrim(strNew(PQerrorMessage(this->connection))))); + } + + // Wait for a result + Wait *wait = waitNew(this->queryTimeout); + bool busy = false; + + do + { + PQconsumeInput(this->connection); + busy = PQisBusy(this->connection); + } + while (busy && waitMore(wait)); + + // If the query is still busy after the timeout attempt to cancel + if (busy) + { + PGcancel *cancel = PQgetCancel(this->connection); + CHECK(cancel != NULL); + + TRY_BEGIN() + { + char error[256]; + + if (!PQcancel(cancel, error, sizeof(error))) + THROW_FMT(DbQueryError, "unable to cancel query '%s': %s", strPtr(query), strPtr(strTrim(strNew(error)))); + } + FINALLY() + { + PQfreeCancel(cancel); + } + TRY_END(); + } + + // Get the result (even if query was cancelled -- to prevent the connection being left in a bad state) + PGresult *pgResult = PQgetResult(this->connection); + + TRY_BEGIN() + { + // Throw timeout error if cancelled + if (busy) + THROW_FMT(DbQueryError, "query '%s' timed out after %" PRIu64 "ms", strPtr(query), this->queryTimeout); + + // If this was a command that returned no results then we are done + int resultStatus = PQresultStatus(pgResult); + + if (resultStatus != PGRES_COMMAND_OK) + { + // Expect some rows to be returned + if (resultStatus != PGRES_TUPLES_OK) + { + THROW_FMT( + DbQueryError, "unable to execute query '%s': %s", strPtr(query), + strPtr(strTrim(strNew(PQresultErrorMessage(pgResult))))); + } + + // Fetch row and column values + result = varLstNew(); + + MEM_CONTEXT_BEGIN(lstMemContext((List *)result)) + { + int rowTotal = PQntuples(pgResult); + int columnTotal = PQnfields(pgResult); + + // Get column types + Oid *columnType = memNew(sizeof(int) * (size_t)columnTotal); + + for (int columnIdx = 0; columnIdx < columnTotal; columnIdx++) + columnType[columnIdx] = PQftype(pgResult, columnIdx); + + // Get values + for (int rowIdx = 0; rowIdx < rowTotal; rowIdx++) + { + VariantList *resultRow = varLstNew(); + + for (int columnIdx = 0; columnIdx < columnTotal; columnIdx++) + { + char *value = PQgetvalue(pgResult, rowIdx, columnIdx); + + // If value is zero-length then check if it is null + if (value[0] == '\0' && PQgetisnull(pgResult, rowIdx, columnIdx)) + { + varLstAdd(resultRow, NULL); + } + // Else convert the value to a variant + else + { + // Convert column type. Not all PostgreSQL types are supported but these should suffice. + switch (columnType[columnIdx]) + { + // Boolean type + case 16: // bool + { + varLstAdd(resultRow, varNewBool(varBoolForce(varNewStrZ(value)))); + break; + } + + // Text/char types + case 18: // char + case 19: // name + case 25: // text + { + varLstAdd(resultRow, varNewStrZ(value)); + break; + } + + // Integer types + case 20: // int8 + case 21: // int2 + case 23: // int4 + case 26: // oid + { + varLstAdd(resultRow, varNewInt64(cvtZToInt64(value))); + break; + } + + default: + { + THROW_FMT( + FormatError, "unable to parse type %u in column %d for query '%s'", + columnType[columnIdx], columnIdx, strPtr(query)); + } + } + } + } + + varLstAdd(result, varNewVarLst(resultRow)); + } + } + MEM_CONTEXT_END(); + } + } + FINALLY() + { + // Free the result + PQclear(pgResult); + + // Need to get a NULL result to complete the request + CHECK(PQgetResult(this->connection) == NULL); + } + TRY_END(); + + varLstMove(result, MEM_CONTEXT_OLD()); + } + MEM_CONTEXT_TEMP_END(); + + FUNCTION_LOG_RETURN(VARIANT_LIST, result); +} + +/*********************************************************************************************************************************** +Close connection to PostgreSQL +***********************************************************************************************************************************/ +void +pgClientClose(PgClient *this) +{ + FUNCTION_LOG_BEGIN(logLevelDebug); + FUNCTION_LOG_PARAM(PG_CLIENT, this); + FUNCTION_LOG_END(); + + ASSERT(this != NULL); + + if (this->connection != NULL) + { + memContextCallbackClear(this->memContext); + PQfinish(this->connection); + this->connection = NULL; + } + + FUNCTION_LOG_RETURN_VOID(); +} + +/*********************************************************************************************************************************** +Move the pg client object to a new context +***********************************************************************************************************************************/ +PgClient * +pgClientMove(PgClient *this, MemContext *parentNew) +{ + FUNCTION_TEST_BEGIN(); + FUNCTION_TEST_PARAM(PG_CLIENT, this); + FUNCTION_TEST_PARAM(MEM_CONTEXT, parentNew); + FUNCTION_TEST_END(); + + ASSERT(parentNew != NULL); + + if (this != NULL) + memContextMove(this->memContext, parentNew); + + FUNCTION_TEST_RETURN(this); +} + +/*********************************************************************************************************************************** +Render as string for logging +***********************************************************************************************************************************/ +String * +pgClientToLog(const PgClient *this) +{ + return strNewFmt( + "{host: %s, port: %u, database: %s, user: %s, queryTimeout %" PRIu64 "}", strPtr(strToLog(this->host)), this->port, + strPtr(strToLog(this->database)), strPtr(strToLog(this->user)), this->queryTimeout); +} diff -Nru pgbackrest-2.15.1/src/postgres/client.h pgbackrest-2.16/src/postgres/client.h --- pgbackrest-2.15.1/src/postgres/client.h 1970-01-01 00:00:00.000000000 +0000 +++ pgbackrest-2.16/src/postgres/client.h 2019-08-05 16:03:04.000000000 +0000 @@ -0,0 +1,53 @@ +/*********************************************************************************************************************************** +PostgreSQL Client + +Connect to a PostgreSQL database and run queries. This is not intended to be a general purpose client but is suitable for +pgBackRest's limited needs. In particular, data type support is limited to text, int, and bool types so it may be necessary to add +casts to queries to output one of these types. +***********************************************************************************************************************************/ +#ifndef POSTGRES_QUERY_H +#define POSTGRES_QUERY_H + +#include "common/type/string.h" +#include "common/type/variantList.h" +#include "common/time.h" + +/*********************************************************************************************************************************** +Object type +***********************************************************************************************************************************/ +#define PG_CLIENT_TYPE PgClient +#define PG_CLIENT_PREFIX pgClient + +typedef struct PgClient PgClient; + +/*********************************************************************************************************************************** +Constructor +***********************************************************************************************************************************/ +PgClient *pgClientNew( + const String *host, const unsigned int port, const String *database, const String *user, const TimeMSec queryTimeout); + +/*********************************************************************************************************************************** +Functions +***********************************************************************************************************************************/ +PgClient *pgClientOpen(PgClient *this); +VariantList *pgClientQuery(PgClient *this, const String *query); +void pgClientClose(PgClient *this); + +PgClient *pgClientMove(PgClient *this, MemContext *parentNew); + +/*********************************************************************************************************************************** +Destructor +***********************************************************************************************************************************/ +void pgClientFree(PgClient *this); + +/*********************************************************************************************************************************** +Macros for function logging +***********************************************************************************************************************************/ +String *pgClientToLog(const PgClient *this); + +#define FUNCTION_LOG_PG_CLIENT_TYPE \ + PgClient * +#define FUNCTION_LOG_PG_CLIENT_FORMAT(value, buffer, bufferSize) \ + FUNCTION_LOG_STRING_OBJECT_FORMAT(value, pgClientToLog, buffer, bufferSize) + +#endif diff -Nru pgbackrest-2.15.1/src/postgres/interface.c pgbackrest-2.16/src/postgres/interface.c --- pgbackrest-2.15.1/src/postgres/interface.c 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/src/postgres/interface.c 2019-08-05 16:03:04.000000000 +0000 @@ -15,6 +15,12 @@ #include "storage/helper.h" /*********************************************************************************************************************************** +Defines for various Postgres paths and files +***********************************************************************************************************************************/ +STRING_EXTERN(PG_NAME_WAL_STR, PG_NAME_WAL); +STRING_EXTERN(PG_NAME_XLOG_STR, PG_NAME_XLOG); + +/*********************************************************************************************************************************** Define default wal segment size Before PostgreSQL 11 WAL segment size could only be changed at compile time and is not known to be well-tested, so only the default @@ -36,6 +42,11 @@ #define PG_WAL_HEADER_SIZE ((unsigned int)(512)) /*********************************************************************************************************************************** +Name of default PostgreSQL database used for running all queries and commands +***********************************************************************************************************************************/ +STRING_EXTERN(PG_DB_POSTGRES_STR, PG_DB_POSTGRES); + +/*********************************************************************************************************************************** PostgreSQL interface definitions Each supported version of PostgreSQL must have interface files named postgres/interface/vXXX.c/h that implement the functions @@ -408,6 +419,19 @@ } /*********************************************************************************************************************************** +Get WAL name (wal/xlog) for a PostgreSQL version +***********************************************************************************************************************************/ +const String * +pgWalName(unsigned int pgVersion) +{ + FUNCTION_TEST_BEGIN(); + FUNCTION_TEST_PARAM(UINT, pgVersion); + FUNCTION_TEST_END(); + + FUNCTION_TEST_RETURN(pgVersion >= PG_VERSION_WAL_RENAME ? PG_NAME_WAL_STR : PG_NAME_XLOG_STR); +} + +/*********************************************************************************************************************************** Create pg_control for testing ***********************************************************************************************************************************/ #ifdef DEBUG diff -Nru pgbackrest-2.15.1/src/postgres/interface.h pgbackrest-2.16/src/postgres/interface.h --- pgbackrest-2.15.1/src/postgres/interface.h 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/src/postgres/interface.h 2019-08-05 16:03:04.000000000 +0000 @@ -17,6 +17,17 @@ #define PG_PATH_ARCHIVE_STATUS "archive_status" #define PG_PATH_GLOBAL "global" +#define PG_NAME_WAL "wal" + STRING_DECLARE(PG_NAME_WAL_STR); +#define PG_NAME_XLOG "xlog" + STRING_DECLARE(PG_NAME_XLOG_STR); + +/*********************************************************************************************************************************** +Name of default PostgreSQL database used for running all queries and commands +***********************************************************************************************************************************/ +#define PG_DB_POSTGRES "postgres" + STRING_DECLARE(PG_DB_POSTGRES_STR); + /*********************************************************************************************************************************** Define default page size @@ -69,6 +80,8 @@ PgWal pgWalFromFile(const String *walFile); PgWal pgWalFromBuffer(const Buffer *walBuffer); +const String *pgWalName(unsigned int pgVersion); + /*********************************************************************************************************************************** Test Functions ***********************************************************************************************************************************/ diff -Nru pgbackrest-2.15.1/src/postgres/version.h pgbackrest-2.16/src/postgres/version.h --- pgbackrest-2.15.1/src/postgres/version.h 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/src/postgres/version.h 2019-08-05 16:03:04.000000000 +0000 @@ -27,6 +27,21 @@ #define PG_VERSION_MAX PG_VERSION_11 /*********************************************************************************************************************************** +Version where various PostgreSQL capabilities were introduced +***********************************************************************************************************************************/ +// application_name can be set to show the application name in pg_stat_activity +#define PG_VERSION_APPLICATION_NAME PG_VERSION_90 + +// pg_is_in_recovery() supported +#define PG_VERSION_HOT_STANDBY PG_VERSION_91 + +// pg_create_restore_point() supported +#define PG_VERSION_RESTORE_POINT PG_VERSION_91 + +// xlog was renamed to wal +#define PG_VERSION_WAL_RENAME PG_VERSION_10 + +/*********************************************************************************************************************************** PostgreSQL version string constants for use in error messages ***********************************************************************************************************************************/ #define PG_VERSION_83_STR "8.3" diff -Nru pgbackrest-2.15.1/src/protocol/client.c pgbackrest-2.16/src/protocol/client.c --- pgbackrest-2.15.1/src/protocol/client.c 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/src/protocol/client.c 2019-08-05 16:03:04.000000000 +0000 @@ -244,7 +244,7 @@ } /*********************************************************************************************************************************** -Move the file object to a new context +Move the protocol client object to a new context ***********************************************************************************************************************************/ ProtocolClient * protocolClientMove(ProtocolClient *this, MemContext *parentNew) diff -Nru pgbackrest-2.15.1/src/protocol/helper.c pgbackrest-2.16/src/protocol/helper.c --- pgbackrest-2.15.1/src/protocol/helper.c 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/src/protocol/helper.c 2019-08-05 16:03:04.000000000 +0000 @@ -69,6 +69,19 @@ } /*********************************************************************************************************************************** +Is pg local? +***********************************************************************************************************************************/ +bool +pgIsLocal(unsigned int hostId) +{ + FUNCTION_LOG_BEGIN(logLevelDebug); + FUNCTION_LOG_PARAM(UINT, hostId); + FUNCTION_LOG_END(); + + FUNCTION_LOG_RETURN(BOOL, !cfgOptionTest(cfgOptPgHost + hostId - 1)); +} + +/*********************************************************************************************************************************** Get the command line required for local protocol execution ***********************************************************************************************************************************/ static StringList * @@ -79,8 +92,6 @@ FUNCTION_LOG_PARAM(UINT, protocolId); FUNCTION_LOG_END(); - ASSERT(protocolStorageType == protocolStorageTypeRepo); // ??? Hard-coded until the function supports pg remotes - StringList *result = NULL; MEM_CONTEXT_TEMP_BEGIN() @@ -173,14 +184,16 @@ Get the command line required for remote protocol execution ***********************************************************************************************************************************/ static StringList * -protocolRemoteParam(ProtocolStorageType protocolStorageType, unsigned int protocolId) +protocolRemoteParam(ProtocolStorageType protocolStorageType, unsigned int protocolId, unsigned int hostIdx) { FUNCTION_LOG_BEGIN(logLevelDebug); FUNCTION_LOG_PARAM(ENUM, protocolStorageType); FUNCTION_LOG_PARAM(UINT, protocolId); + FUNCTION_LOG_PARAM(UINT, hostIdx); FUNCTION_LOG_END(); - ASSERT(protocolStorageType == protocolStorageTypeRepo); // ??? Hard-coded until the function supports pg remotes + // Is this a repo remote? + bool isRepo = protocolStorageType == protocolStorageTypeRepo; // Fixed parameters for ssh command StringList *result = strLstNew(); @@ -192,31 +205,64 @@ strLstAddZ(result, "PasswordAuthentication=no"); // Append port if specified - if (cfgOptionTest(cfgOptRepoHostPort)) + ConfigOption optHostPort = isRepo ? cfgOptRepoHostPort : cfgOptPgHostPort + hostIdx; + + if (cfgOptionTest(optHostPort)) { strLstAddZ(result, "-p"); - strLstAdd(result, strNewFmt("%u", cfgOptionUInt(cfgOptRepoHostPort))); + strLstAdd(result, strNewFmt("%u", cfgOptionUInt(optHostPort))); } // Append user/host - strLstAdd(result, strNewFmt("%s@%s", strPtr(cfgOptionStr(cfgOptRepoHostUser)), strPtr(cfgOptionStr(cfgOptRepoHost)))); + strLstAdd( + result, + strNewFmt( + "%s@%s", strPtr(cfgOptionStr(isRepo ? cfgOptRepoHostUser : cfgOptPgHostUser + hostIdx)), + strPtr(cfgOptionStr(isRepo ? cfgOptRepoHost : cfgOptPgHost + hostIdx)))); // Option replacements KeyValue *optionReplace = kvNew(); // Replace config options with the host versions - if (cfgOptionSource(cfgOptRepoHostConfig) != cfgSourceDefault) - kvPut(optionReplace, VARSTR(CFGOPT_CONFIG_STR), cfgOption(cfgOptRepoHostConfig)); + unsigned int optConfig = isRepo ? cfgOptRepoHostConfig : cfgOptPgHostConfig + hostIdx; + + kvPut(optionReplace, VARSTR(CFGOPT_CONFIG_STR), cfgOptionSource(optConfig) != cfgSourceDefault ? cfgOption(optConfig) : NULL); + + unsigned int optConfigIncludePath = isRepo ? cfgOptRepoHostConfigIncludePath : cfgOptPgHostConfigIncludePath + hostIdx; + + kvPut( + optionReplace, VARSTR(CFGOPT_CONFIG_INCLUDE_PATH_STR), + cfgOptionSource(optConfigIncludePath) != cfgSourceDefault ? cfgOption(optConfigIncludePath) : NULL); - if (cfgOptionSource(cfgOptRepoHostConfigIncludePath) != cfgSourceDefault) - kvPut(optionReplace, VARSTR(CFGOPT_CONFIG_INCLUDE_PATH_STR), cfgOption(cfgOptRepoHostConfigIncludePath)); + unsigned int optConfigPath = isRepo ? cfgOptRepoHostConfigPath : cfgOptPgHostConfigPath + hostIdx; - if (cfgOptionSource(cfgOptRepoHostConfigPath) != cfgSourceDefault) - kvPut(optionReplace, VARSTR(CFGOPT_CONFIG_PATH_STR), cfgOption(cfgOptRepoHostConfigPath)); + kvPut( + optionReplace, VARSTR(CFGOPT_CONFIG_PATH_STR), + cfgOptionSource(optConfigPath) != cfgSourceDefault ? cfgOption(optConfigPath) : NULL); // Use a C remote kvPut(optionReplace, VARSTR(CFGOPT_C_STR), VARBOOL(true)); + // Copy pg options to index 0 since that's what the remote will be expecting + if (hostIdx != 0) + { + kvPut(optionReplace, VARSTR(CFGOPT_PG1_PATH_STR), cfgOption(cfgOptPgPath + hostIdx)); + + if (cfgOptionSource(cfgOptPgSocketPath + hostIdx) != cfgSourceDefault) + kvPut(optionReplace, VARSTR(CFGOPT_PG1_SOCKET_PATH_STR), cfgOption(cfgOptPgSocketPath + hostIdx)); + + if (cfgOptionSource(cfgOptPgPort + hostIdx) != cfgSourceDefault) + kvPut(optionReplace, VARSTR(CFGOPT_PG1_PORT_STR), cfgOption(cfgOptPgPort + hostIdx)); + } + + // Remove pg options that are not needed on the remote. This is to reduce clustter and make debugging options easier. + for (unsigned int pgIdx = 1; pgIdx < cfgOptionIndexTotal(cfgOptPgPath); pgIdx++) + { + kvPut(optionReplace, VARSTRZ(cfgOptionName(cfgOptPgPath + pgIdx)), NULL); + kvPut(optionReplace, VARSTRZ(cfgOptionName(cfgOptPgSocketPath + pgIdx)), NULL); + kvPut(optionReplace, VARSTRZ(cfgOptionName(cfgOptPgPort + pgIdx)), NULL); + } + // Add the command option (or use the current command option if it is valid) if (!cfgOptionTest(cfgOptCommand)) kvPut(optionReplace, VARSTR(CFGOPT_COMMAND_STR), VARSTRZ(cfgCommandName(cfgCommand()))); @@ -238,10 +284,10 @@ kvPut(optionReplace, VARSTR(CFGOPT_LOG_LEVEL_STDERR_STR), VARSTRDEF("error")); // Add the type - kvPut(optionReplace, VARSTR(CFGOPT_TYPE_STR), VARSTRDEF("backup")); + kvPut(optionReplace, VARSTR(CFGOPT_TYPE_STR), isRepo ? VARSTRDEF("backup") : VARSTRDEF("db")); StringList *commandExec = cfgExecParam(cfgCmdRemote, optionReplace); - strLstInsert(commandExec, 0, cfgOptionStr(cfgOptRepoHostCmd)); + strLstInsert(commandExec, 0, cfgOptionStr(isRepo ? cfgOptRepoHostCmd : cfgOptPgHostCmd + hostIdx)); strLstAdd(result, strLstJoin(commandExec, " ")); FUNCTION_LOG_RETURN(STRING_LIST, result); @@ -251,12 +297,16 @@ Get the remote protocol client ***********************************************************************************************************************************/ ProtocolClient * -protocolRemoteGet(ProtocolStorageType protocolStorageType) +protocolRemoteGet(ProtocolStorageType protocolStorageType, unsigned int hostId) { FUNCTION_LOG_BEGIN(logLevelDebug); FUNCTION_LOG_PARAM(ENUM, protocolStorageType); + FUNCTION_LOG_PARAM(UINT, hostId); FUNCTION_LOG_END(); + // Is this a repo remote? + bool isRepo = protocolStorageType == protocolStorageTypeRepo; + protocolHelperInit(); // Allocate the client cache @@ -264,7 +314,7 @@ { MEM_CONTEXT_BEGIN(protocolHelper.memContext) { - // The number of remotes allowed is the greater of allowed repo or db configs + 1 (0 is reserved for connections from + // The number of remotes allowed is the greater of allowed repo or pg configs + 1 (0 is reserved for connections from // the main process). Since these are static and only one will be true it presents a problem for coverage. We think // that pg remotes will always be greater but we'll protect that assumption with an assertion. ASSERT(cfgDefOptionIndexTotal(cfgDefOptPgPath) >= cfgDefOptionIndexTotal(cfgDefOptRepoPath)); @@ -276,11 +326,13 @@ MEM_CONTEXT_END(); } - // Determine protocol id for the remote. If the process option is set then use that since we want to remote protocol id to - // match the local protocol id (but we'll still save it in position 0 or we'd need to allocated up to process-max slots). - // Otherwise set to 0 since the remote is being started from a main process. + // Determine protocol id for the remote. If the process option is set then use that since we want the remote protocol id to + // match the local protocol id. Otherwise set to 0 since the remote is being started from a main process and there should only + // be one remote per host. unsigned int protocolId = 0; - unsigned int protocolIdx = 0; + + // Use hostId to determine where to cache to remote + unsigned int protocolIdx = hostId - 1; if (cfgOptionTest(cfgOptProcess)) protocolId = cfgOptionUInt(cfgOptProcess); @@ -294,20 +346,22 @@ { MEM_CONTEXT_BEGIN(protocolHelper.memContext) { + unsigned int optHost = isRepo ? cfgOptRepoHost : cfgOptPgHost + hostId - 1; + // Execute the protocol command protocolHelperClient->exec = execNew( - cfgOptionStr(cfgOptCmdSsh), protocolRemoteParam(protocolStorageType, protocolId), - strNewFmt(PROTOCOL_SERVICE_REMOTE "-%u process on '%s'", protocolId, strPtr(cfgOptionStr(cfgOptRepoHost))), + cfgOptionStr(cfgOptCmdSsh), protocolRemoteParam(protocolStorageType, protocolId, hostId - 1), + strNewFmt(PROTOCOL_SERVICE_REMOTE "-%u process on '%s'", protocolId, strPtr(cfgOptionStr(optHost))), (TimeMSec)(cfgOptionDbl(cfgOptProtocolTimeout) * 1000)); execOpen(protocolHelperClient->exec); // Create protocol object protocolHelperClient->client = protocolClientNew( - strNewFmt(PROTOCOL_SERVICE_REMOTE "-%u protocol on '%s'", protocolId, strPtr(cfgOptionStr(cfgOptRepoHost))), + strNewFmt(PROTOCOL_SERVICE_REMOTE "-%u protocol on '%s'", protocolId, strPtr(cfgOptionStr(optHost))), PROTOCOL_SERVICE_REMOTE_STR, execIoRead(protocolHelperClient->exec), execIoWrite(protocolHelperClient->exec)); // Get cipher options from the remote if none are locally configured - if (strEq(cfgOptionStr(cfgOptRepoCipherType), CIPHER_TYPE_NONE_STR)) + if (isRepo && strEq(cfgOptionStr(cfgOptRepoCipherType), CIPHER_TYPE_NONE_STR)) { // Options to query VariantList *param = varLstNew(); diff -Nru pgbackrest-2.15.1/src/protocol/helper.h pgbackrest-2.16/src/protocol/helper.h --- pgbackrest-2.15.1/src/protocol/helper.h 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/src/protocol/helper.h 2019-08-05 16:03:04.000000000 +0000 @@ -28,11 +28,12 @@ ***********************************************************************************************************************************/ void protocolKeepAlive(void); ProtocolClient *protocolLocalGet(ProtocolStorageType protocolStorageType, unsigned int protocolId); -ProtocolClient *protocolRemoteGet(ProtocolStorageType protocolStorageType); +ProtocolClient *protocolRemoteGet(ProtocolStorageType protocolStorageType, unsigned int hostId); /*********************************************************************************************************************************** Getters ***********************************************************************************************************************************/ +bool pgIsLocal(unsigned int hostId); bool repoIsLocal(void); /*********************************************************************************************************************************** diff -Nru pgbackrest-2.15.1/src/protocol/server.c pgbackrest-2.16/src/protocol/server.c --- pgbackrest-2.15.1/src/protocol/server.c 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/src/protocol/server.c 2019-08-05 16:03:04.000000000 +0000 @@ -157,8 +157,13 @@ // Get the next handler ProtocolServerProcessHandler handler = *(ProtocolServerProcessHandler *)lstGet(this->handlerList, handlerIdx); - // Send the command to the handler - found = handler(command, paramList, this); + // Send the command to the handler. Run the handler in the server's memory context in case any persistent data + // needs to be stored by the handler. + MEM_CONTEXT_BEGIN(this->memContext) + { + found = handler(command, paramList, this); + } + MEM_CONTEXT_END(); // If the handler processed the command then exit the handler loop if (found) diff -Nru pgbackrest-2.15.1/src/storage/helper.c pgbackrest-2.16/src/storage/helper.c --- pgbackrest-2.15.1/src/storage/helper.c 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/src/storage/helper.c 2019-08-05 16:03:04.000000000 +0000 @@ -138,6 +138,37 @@ FUNCTION_TEST_RETURN(storageHelper.storageLocalWrite); } +/*********************************************************************************************************************************** +Get the pg storage +***********************************************************************************************************************************/ +static Storage * +storagePgGet(bool write) +{ + FUNCTION_TEST_BEGIN(); + FUNCTION_TEST_PARAM(BOOL, write); + FUNCTION_TEST_END(); + + Storage *result = NULL; + + // Determine which host to use + unsigned int hostId = cfgOptionTest(cfgOptHostId) ? cfgOptionUInt(cfgOptHostId) : 1; + + // Use remote storage + if (!pgIsLocal(hostId)) + { + result = storageRemoteNew( + STORAGE_MODE_FILE_DEFAULT, STORAGE_MODE_PATH_DEFAULT, write, NULL, + protocolRemoteGet(protocolStorageTypePg, hostId), cfgOptionUInt(cfgOptCompressLevelNetwork)); + } + // Use Posix storage + else + { + result = storagePosixNew( + cfgOptionStr(cfgOptPgPath + hostId - 1), STORAGE_MODE_FILE_DEFAULT, STORAGE_MODE_PATH_DEFAULT, write, NULL); + } + + FUNCTION_TEST_RETURN(result); +} /*********************************************************************************************************************************** Get ready-only PostgreSQL storage @@ -153,8 +184,7 @@ MEM_CONTEXT_BEGIN(storageHelper.memContext) { - storageHelper.storagePg = storagePosixNew( - cfgOptionStr(cfgOptPgPath), STORAGE_MODE_FILE_DEFAULT, STORAGE_MODE_PATH_DEFAULT, false, NULL); + storageHelper.storagePg = storagePgGet(false); } MEM_CONTEXT_END(); } @@ -176,8 +206,7 @@ MEM_CONTEXT_BEGIN(storageHelper.memContext) { - storageHelper.storagePgWrite = storagePosixNew( - cfgOptionStr(cfgOptPgPath), STORAGE_MODE_FILE_DEFAULT, STORAGE_MODE_PATH_DEFAULT, true, NULL); + storageHelper.storagePgWrite = storagePgGet(true); } MEM_CONTEXT_END(); } @@ -278,7 +307,7 @@ { result = storageRemoteNew( STORAGE_MODE_FILE_DEFAULT, STORAGE_MODE_PATH_DEFAULT, write, storageRepoPathExpression, - protocolRemoteGet(protocolStorageTypeRepo), cfgOptionUInt(cfgOptCompressLevelNetwork)); + protocolRemoteGet(protocolStorageTypeRepo, 1), cfgOptionUInt(cfgOptCompressLevelNetwork)); } // Use CIFS storage else if (strEqZ(type, STORAGE_TYPE_CIFS)) @@ -296,12 +325,16 @@ else if (strEqZ(type, STORAGE_TYPE_S3)) { // Set the default port - unsigned int port = STORAGE_S3_PORT_DEFAULT; + unsigned int port = cfgOptionUInt(cfgOptRepoS3Port); // Extract port from the endpoint and host if it is present const String *endPoint = cfgOptionHostPort(cfgOptRepoS3Endpoint, &port); const String *host = cfgOptionHostPort(cfgOptRepoS3Host, &port); + // If the port option was set explicitly then use it in preference to appended ports + if (cfgOptionSource(cfgOptRepoS3Port) != cfgSourceDefault) + port = cfgOptionUInt(cfgOptRepoS3Port); + result = storageS3New( cfgOptionStr(cfgOptRepoPath), write, storageRepoPathExpression, cfgOptionStr(cfgOptRepoS3Bucket), endPoint, cfgOptionStr(cfgOptRepoS3Region), cfgOptionStr(cfgOptRepoS3Key), cfgOptionStr(cfgOptRepoS3KeySecret), diff -Nru pgbackrest-2.15.1/src/storage/posix/storage.c pgbackrest-2.16/src/storage/posix/storage.c --- pgbackrest-2.15.1/src/storage/posix/storage.c 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/src/storage/posix/storage.c 2019-08-05 16:03:04.000000000 +0000 @@ -686,11 +686,11 @@ driver->interface = (StorageInterface) { - .feature = (1 << storageFeaturePath), .exists = storagePosixExists, .info = storagePosixInfo, - .infoList = storagePosixInfoList, .list = storagePosixList, .move = storagePosixMove, .newRead = storagePosixNewRead, - .newWrite = storagePosixNewWrite, .pathCreate = storagePosixPathCreate, .pathExists = storagePosixPathExists, - .pathRemove = storagePosixPathRemove, .pathSync = pathSync ? storagePosixPathSync : NULL, - .remove = storagePosixRemove + .feature = (1 << storageFeaturePath | 1 << storageFeatureCompress), .exists = storagePosixExists, + .info = storagePosixInfo, .infoList = storagePosixInfoList, .list = storagePosixList, .move = storagePosixMove, + .newRead = storagePosixNewRead, .newWrite = storagePosixNewWrite, .pathCreate = storagePosixPathCreate, + .pathExists = storagePosixPathExists, .pathRemove = storagePosixPathRemove, + .pathSync = pathSync ? storagePosixPathSync : NULL, .remove = storagePosixRemove }; this = storageNew(type, path, modeFile, modePath, write, pathExpressionFunction, driver, driver->interface); diff -Nru pgbackrest-2.15.1/src/storage/remote/protocol.c pgbackrest-2.16/src/storage/remote/protocol.c --- pgbackrest-2.15.1/src/storage/remote/protocol.c 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/src/storage/remote/protocol.c 2019-08-05 16:03:04.000000000 +0000 @@ -3,13 +3,19 @@ ***********************************************************************************************************************************/ #include "build.auto.h" +#include "command/backup/pageChecksum.h" #include "common/compress/gzip/compress.h" #include "common/compress/gzip/decompress.h" +#include "common/crypto/cipherBlock.h" +#include "common/crypto/hash.h" #include "common/debug.h" +#include "common/io/filter/sink.h" +#include "common/io/filter/size.h" #include "common/io/io.h" #include "common/log.h" #include "common/memContext.h" #include "common/regExp.h" +#include "config/config.h" #include "storage/remote/protocol.h" #include "storage/helper.h" #include "storage/storage.intern.h" @@ -56,17 +62,26 @@ ASSERT(filterGroup != NULL); ASSERT(filterList != NULL); - const VariantList *filterKeyList = kvKeyList(varKv(filterList)); - - for (unsigned int filterIdx = 0; filterIdx < varLstSize(filterKeyList); filterIdx++) + for (unsigned int filterIdx = 0; filterIdx < varLstSize(varVarLst(filterList)); filterIdx++) { - const String *filterKey = varStr(varLstGet(filterKeyList, filterIdx)); - const VariantList *filterParam = varVarLst(kvGet(varKv(filterList), varLstGet(filterKeyList, filterIdx))); + const KeyValue *filterKv = varKv(varLstGet(varVarLst(filterList), filterIdx)); + const String *filterKey = varStr(varLstGet(kvKeyList(filterKv), 0)); + const VariantList *filterParam = varVarLst(kvGet(filterKv, VARSTR(filterKey))); if (strEq(filterKey, GZIP_COMPRESS_FILTER_TYPE_STR)) ioFilterGroupAdd(filterGroup, gzipCompressNewVar(filterParam)); else if (strEq(filterKey, GZIP_DECOMPRESS_FILTER_TYPE_STR)) ioFilterGroupAdd(filterGroup, gzipDecompressNewVar(filterParam)); + else if (strEq(filterKey, CIPHER_BLOCK_FILTER_TYPE_STR)) + ioFilterGroupAdd(filterGroup, cipherBlockNewVar(filterParam)); + else if (strEq(filterKey, CRYPTO_HASH_FILTER_TYPE_STR)) + ioFilterGroupAdd(filterGroup, cryptoHashNewVar(filterParam)); + else if (strEq(filterKey, PAGE_CHECKSUM_FILTER_TYPE_STR)) + ioFilterGroupAdd(filterGroup, pageChecksumNewVar(filterParam)); + else if (strEq(filterKey, SINK_FILTER_TYPE_STR)) + ioFilterGroupAdd(filterGroup, ioSinkNew()); + else if (strEq(filterKey, SIZE_FILTER_TYPE_STR)) + ioFilterGroupAdd(filterGroup, ioSizeNew()); else THROW_FMT(AssertError, "unable to add filter '%s'", strPtr(filterKey)); } @@ -88,8 +103,8 @@ ASSERT(command != NULL); - // Determine which storage should be used (??? for now this is only repo) - const Storage *storage = storageRepo(); + // Determine which storage should be used + const Storage *storage = strEqZ(cfgOptionStr(cfgOptType), "backup") ? storageRepo() : storagePg(); StorageInterface interface = storageInterface(storage); void *driver = storageDriver(storage); @@ -151,9 +166,14 @@ } while (!ioReadEof(fileRead)); + ioReadClose(fileRead); + // Write a zero block to show file is complete ioWriteLine(protocolServerIoWrite(server), BUFSTRDEF(PROTOCOL_BLOCK_HEADER "0")); ioWriteFlush(protocolServerIoWrite(server)); + + // Push filter results + protocolServerResponse(server, ioFilterGroupResultAll(ioReadFilterGroup(fileRead))); } } else if (strEq(command, PROTOCOL_COMMAND_STORAGE_OPEN_WRITE_STR)) @@ -204,14 +224,19 @@ else if (remaining == 0) { ioWriteClose(fileWrite); + + // Push filter results + protocolServerResponse(server, ioFilterGroupResultAll(ioWriteFilterGroup(fileWrite))); } // Write was aborted so free the file else + { ioWriteFree(fileWrite); + protocolServerResponse(server, NULL); + } } while (remaining > 0); - protocolServerResponse(server, NULL); } else if (strEq(command, PROTOCOL_COMMAND_STORAGE_PATH_CREATE_STR)) { diff -Nru pgbackrest-2.15.1/src/storage/remote/read.c pgbackrest-2.16/src/storage/remote/read.c --- pgbackrest-2.15.1/src/storage/remote/read.c 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/src/storage/remote/read.c 2019-08-05 16:03:04.000000000 +0000 @@ -63,27 +63,26 @@ MEM_CONTEXT_TEMP_BEGIN() { - IoFilterGroup *filterGroup = ioFilterGroupNew(); - // If the file is compressible add compression filter on the remote if (this->interface.compressible) - ioFilterGroupAdd(filterGroup, gzipCompressNew((int)this->interface.compressLevel, true)); + { + ioFilterGroupAdd( + ioReadFilterGroup(storageReadIo(this->read)), gzipCompressNew((int)this->interface.compressLevel, true)); + } ProtocolCommand *command = protocolCommandNew(PROTOCOL_COMMAND_STORAGE_OPEN_READ_STR); protocolCommandParamAdd(command, VARSTR(this->interface.name)); protocolCommandParamAdd(command, VARBOOL(this->interface.ignoreMissing)); - protocolCommandParamAdd(command, ioFilterGroupParamAll(filterGroup)); + protocolCommandParamAdd(command, ioFilterGroupParamAll(ioReadFilterGroup(storageReadIo(this->read)))); + + result = varBool(protocolClientExecute(this->client, command, true)); + + // Clear filters since they will be run on the remote side + ioFilterGroupClear(ioReadFilterGroup(storageReadIo(this->read))); // If the file is compressible add decompression filter locally if (this->interface.compressible) - { - // Since we can't insert filters yet we'll just error if there are already filters in the list - CHECK(ioFilterGroupSize(ioReadFilterGroup(storageReadIo(this->read))) == 0); - ioFilterGroupAdd(ioReadFilterGroup(storageReadIo(this->read)), gzipDecompressNew(true)); - } - - result = varBool(protocolClientExecute(this->client, command, true)); } MEM_CONTEXT_TEMP_END(); @@ -122,7 +121,11 @@ this->remaining = (size_t)storageRemoteProtocolBlockSize(ioReadLine(protocolClientIoRead(this->client))); if (this->remaining == 0) + { + ioFilterGroupResultAllSet( + ioReadFilterGroup(storageReadIo(this->read)), protocolClientReadOutput(this->client, true)); this->eof = true; + } #ifdef DEBUG this->protocolReadBytes += this->remaining; diff -Nru pgbackrest-2.15.1/src/storage/remote/storage.c pgbackrest-2.16/src/storage/remote/storage.c --- pgbackrest-2.15.1/src/storage/remote/storage.c 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/src/storage/remote/storage.c 2019-08-05 16:03:04.000000000 +0000 @@ -264,7 +264,7 @@ } /*********************************************************************************************************************************** -Sync a path. There's no need for this on S3 so just return success. +Sync a path ***********************************************************************************************************************************/ static void storageRemotePathSync(THIS_VOID, const String *path) diff -Nru pgbackrest-2.15.1/src/storage/remote/write.c pgbackrest-2.16/src/storage/remote/write.c --- pgbackrest-2.15.1/src/storage/remote/write.c 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/src/storage/remote/write.c 2019-08-05 16:03:04.000000000 +0000 @@ -68,11 +68,9 @@ MEM_CONTEXT_TEMP_BEGIN() { - IoFilterGroup *filterGroup = ioFilterGroupNew(); - // If the file is compressible add decompression filter on the remote if (this->interface.compressible) - ioFilterGroupAdd(filterGroup, gzipDecompressNew(true)); + ioFilterGroupInsert(ioWriteFilterGroup(storageWriteIo(this->write)), 0, gzipDecompressNew(true)); ProtocolCommand *command = protocolCommandNew(PROTOCOL_COMMAND_STORAGE_OPEN_WRITE_STR); protocolCommandParamAdd(command, VARSTR(this->interface.name)); @@ -85,17 +83,20 @@ protocolCommandParamAdd(command, VARBOOL(this->interface.syncFile)); protocolCommandParamAdd(command, VARBOOL(this->interface.syncPath)); protocolCommandParamAdd(command, VARBOOL(this->interface.atomic)); - protocolCommandParamAdd(command, ioFilterGroupParamAll(filterGroup)); + protocolCommandParamAdd(command, ioFilterGroupParamAll(ioWriteFilterGroup(storageWriteIo(this->write)))); + + protocolClientExecute(this->client, command, false); - // If the file is compressible add compression filter locally + // Clear filters since they will be run on the remote side + ioFilterGroupClear(ioWriteFilterGroup(storageWriteIo(this->write))); + + // If the file is compressible add cecompression filter locally if (this->interface.compressible) { ioFilterGroupAdd( ioWriteFilterGroup(storageWriteIo(this->write)), gzipCompressNew((int)this->interface.compressLevel, true)); } - protocolClientExecute(this->client, command, false); - // Set free callback to ensure remote file is freed memContextCallbackSet(this->memContext, storageWriteRemoteFreeResource, this); } @@ -150,7 +151,7 @@ { ioWriteLine(protocolClientIoWrite(this->client), BUFSTRDEF(PROTOCOL_BLOCK_HEADER "0")); ioWriteFlush(protocolClientIoWrite(this->client)); - protocolClientReadOutput(this->client, false); + ioFilterGroupResultAllSet(ioWriteFilterGroup(storageWriteIo(this->write)), protocolClientReadOutput(this->client, true)); this->client = NULL; memContextCallbackClear(this->memContext); diff -Nru pgbackrest-2.15.1/src/storage/s3/storage.c pgbackrest-2.16/src/storage/s3/storage.c --- pgbackrest-2.15.1/src/storage/s3/storage.c 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/src/storage/s3/storage.c 2019-08-05 16:03:04.000000000 +0000 @@ -46,6 +46,11 @@ STRING_STATIC(S3_QUERY_VALUE_LIST_TYPE_2_STR, "2"); /*********************************************************************************************************************************** +S3 errors +***********************************************************************************************************************************/ +STRING_STATIC(S3_ERROR_REQUEST_TIME_TOO_SKEWED_STR, "RequestTimeTooSkewed"); + +/*********************************************************************************************************************************** XML tags ***********************************************************************************************************************************/ STRING_STATIC(S3_XML_TAG_CODE_STR, "Code"); @@ -249,98 +254,139 @@ ASSERT(uri != NULL); StorageS3RequestResult result = {0}; + unsigned int retryRemaining = 2; + bool done; - MEM_CONTEXT_TEMP_BEGIN() + do { - // Create header list and add content length - HttpHeader *requestHeader = httpHeaderNew(this->headerRedactList); - - // Set content length - httpHeaderAdd( - requestHeader, HTTP_HEADER_CONTENT_LENGTH_STR, - body == NULL || bufUsed(body) == 0 ? ZERO_STR : strNewFmt("%zu", bufUsed(body))); - - // Calculate content-md5 header if there is content - if (body != NULL) - { - char md5Hash[HASH_TYPE_MD5_SIZE_HEX]; - encodeToStr(encodeBase64, bufPtr(cryptoHashOne(HASH_TYPE_MD5_STR, body)), HASH_TYPE_M5_SIZE, md5Hash); - httpHeaderAdd(requestHeader, HTTP_HEADER_CONTENT_MD5_STR, STR(md5Hash)); - } + done = true; - // Generate authorization header - storageS3Auth( - this, verb, httpUriEncode(uri, true), query, storageS3DateTime(time(NULL)), requestHeader, - body == NULL || bufUsed(body) == 0 ? - STRDEF("e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855") : - bufHex(cryptoHashOne(HASH_TYPE_SHA256_STR, body))); - - // Get an http client - HttpClient *httpClient = httpClientCacheGet(this->httpClientCache); - - // Process request - Buffer *response = httpClientRequest(httpClient, verb, uri, query, requestHeader, body, returnContent); - - // Error if the request was not successful - if (!httpClientResponseCodeOk(httpClient) && - (!allowMissing || httpClientResponseCode(httpClient) != HTTP_RESPONSE_CODE_NOT_FOUND)) + MEM_CONTEXT_TEMP_BEGIN() { - // General error message - String *error = strNewFmt( - "S3 request failed with %u: %s", httpClientResponseCode(httpClient), - strPtr(httpClientResponseMessage(httpClient))); - - // Output uri/query - strCat(error, "\n*** URI/Query ***:"); - - strCatFmt(error, "\n%s", strPtr(httpUriEncode(uri, true))); - - if (query != NULL) - strCatFmt(error, "?%s", strPtr(httpQueryRender(query))); + // Create header list and add content length + HttpHeader *requestHeader = httpHeaderNew(this->headerRedactList); - // Output request headers - const StringList *requestHeaderList = httpHeaderList(requestHeader); + // Set content length + httpHeaderAdd( + requestHeader, HTTP_HEADER_CONTENT_LENGTH_STR, + body == NULL || bufUsed(body) == 0 ? ZERO_STR : strNewFmt("%zu", bufUsed(body))); - strCat(error, "\n*** Request Headers ***:"); - - for (unsigned int requestHeaderIdx = 0; requestHeaderIdx < strLstSize(requestHeaderList); requestHeaderIdx++) + // Calculate content-md5 header if there is content + if (body != NULL) { - const String *key = strLstGet(requestHeaderList, requestHeaderIdx); - - strCatFmt( - error, "\n%s: %s", strPtr(key), - httpHeaderRedact(requestHeader, key) || strEq(key, S3_HEADER_DATE_STR) ? - "" : strPtr(httpHeaderGet(requestHeader, key))); + char md5Hash[HASH_TYPE_MD5_SIZE_HEX]; + encodeToStr(encodeBase64, bufPtr(cryptoHashOne(HASH_TYPE_MD5_STR, body)), HASH_TYPE_M5_SIZE, md5Hash); + httpHeaderAdd(requestHeader, HTTP_HEADER_CONTENT_MD5_STR, STR(md5Hash)); } - // Output response headers - const HttpHeader *responseHeader = httpClientReponseHeader(httpClient); - const StringList *responseHeaderList = httpHeaderList(responseHeader); - - if (strLstSize(responseHeaderList) > 0) + // Generate authorization header + storageS3Auth( + this, verb, httpUriEncode(uri, true), query, storageS3DateTime(time(NULL)), requestHeader, + body == NULL || bufUsed(body) == 0 ? + STRDEF("e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855") : + bufHex(cryptoHashOne(HASH_TYPE_SHA256_STR, body))); + + // Get an http client + HttpClient *httpClient = httpClientCacheGet(this->httpClientCache); + + // Process request + Buffer *response = httpClientRequest(httpClient, verb, uri, query, requestHeader, body, returnContent); + + // Error if the request was not successful + if (!httpClientResponseCodeOk(httpClient) && + (!allowMissing || httpClientResponseCode(httpClient) != HTTP_RESPONSE_CODE_NOT_FOUND)) { - strCat(error, "\n*** Response Headers ***:"); + // If there are retries remaining and a response parse it as XML to extract the S3 error code + if (response != NULL && retryRemaining > 0) + { + // Attempt to parse the XML and extract the S3 error code + TRY_BEGIN() + { + XmlNode *error = xmlDocumentRoot(xmlDocumentNewBuf(response)); + const String *errorCode = xmlNodeContent(xmlNodeChild(error, S3_XML_TAG_CODE_STR, true)); + + if (strEq(errorCode, S3_ERROR_REQUEST_TIME_TOO_SKEWED_STR)) + { + LOG_DEBUG( + "retry %s: %s", strPtr(errorCode), + strPtr(xmlNodeContent(xmlNodeChild(error, S3_XML_TAG_MESSAGE_STR, true)))); + + retryRemaining--; + done = false; + } + } + // On failure just drop through and report the error as usual + CATCH_ANY() + { + } + TRY_END(); + } - for (unsigned int responseHeaderIdx = 0; responseHeaderIdx < strLstSize(responseHeaderList); responseHeaderIdx++) + // If not done then retry instead of reporting the error + if (done) { - const String *key = strLstGet(responseHeaderList, responseHeaderIdx); - strCatFmt(error, "\n%s: %s", strPtr(key), strPtr(httpHeaderGet(responseHeader, key))); + // General error message + String *error = strNewFmt( + "S3 request failed with %u: %s", httpClientResponseCode(httpClient), + strPtr(httpClientResponseMessage(httpClient))); + + // Output uri/query + strCat(error, "\n*** URI/Query ***:"); + + strCatFmt(error, "\n%s", strPtr(httpUriEncode(uri, true))); + + if (query != NULL) + strCatFmt(error, "?%s", strPtr(httpQueryRender(query))); + + // Output request headers + const StringList *requestHeaderList = httpHeaderList(requestHeader); + + strCat(error, "\n*** Request Headers ***:"); + + for (unsigned int requestHeaderIdx = 0; requestHeaderIdx < strLstSize(requestHeaderList); requestHeaderIdx++) + { + const String *key = strLstGet(requestHeaderList, requestHeaderIdx); + + strCatFmt( + error, "\n%s: %s", strPtr(key), + httpHeaderRedact(requestHeader, key) || strEq(key, S3_HEADER_DATE_STR) ? + "" : strPtr(httpHeaderGet(requestHeader, key))); + } + + // Output response headers + const HttpHeader *responseHeader = httpClientReponseHeader(httpClient); + const StringList *responseHeaderList = httpHeaderList(responseHeader); + + if (strLstSize(responseHeaderList) > 0) + { + strCat(error, "\n*** Response Headers ***:"); + + for (unsigned int responseHeaderIdx = 0; responseHeaderIdx < strLstSize(responseHeaderList); responseHeaderIdx++) + { + const String *key = strLstGet(responseHeaderList, responseHeaderIdx); + strCatFmt(error, "\n%s: %s", strPtr(key), strPtr(httpHeaderGet(responseHeader, key))); + } + } + + // If there was content then output it + if (response!= NULL) + strCatFmt(error, "\n*** Response Content ***:\n%s", strPtr(strNewBuf(response))); + + THROW(ProtocolError, strPtr(error)); } } + else + { + // On success move the buffer to the calling context + result.httpClient = httpClient; + result.responseHeader = httpHeaderMove(httpHeaderDup(httpClientReponseHeader(httpClient), NULL), MEM_CONTEXT_OLD()); + result.response = bufMove(response, MEM_CONTEXT_OLD()); + } - // If there was content then output it - if (response!= NULL) - strCatFmt(error, "\n*** Response Content ***:\n%s", strPtr(strNewBuf(response))); - - THROW(ProtocolError, strPtr(error)); } - - // On success move the buffer to the calling context - result.httpClient = httpClient; - result.responseHeader = httpHeaderMove(httpHeaderDup(httpClientReponseHeader(httpClient), NULL), MEM_CONTEXT_OLD()); - result.response = bufMove(response, MEM_CONTEXT_OLD()); + MEM_CONTEXT_TEMP_END(); } - MEM_CONTEXT_TEMP_END(); + while (!done); FUNCTION_LOG_RETURN(STORAGE_S3_REQUEST_RESULT, result); } diff -Nru pgbackrest-2.15.1/src/storage/s3/storage.h pgbackrest-2.16/src/storage/s3/storage.h --- pgbackrest-2.15.1/src/storage/s3/storage.h 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/src/storage/s3/storage.h 2019-08-05 16:03:04.000000000 +0000 @@ -15,7 +15,6 @@ /*********************************************************************************************************************************** Defaults ***********************************************************************************************************************************/ -#define STORAGE_S3_PORT_DEFAULT 443 #define STORAGE_S3_TIMEOUT_DEFAULT 60000 #define STORAGE_S3_PARTSIZE_MIN ((size_t)5 * 1024 * 1024) #define STORAGE_S3_DELETE_MAX 1000 diff -Nru pgbackrest-2.15.1/src/storage/storage.c pgbackrest-2.16/src/storage/storage.c --- pgbackrest-2.15.1/src/storage/storage.c 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/src/storage/storage.c 2019-08-05 16:03:04.000000000 +0000 @@ -28,7 +28,7 @@ mode_t modeFile; mode_t modePath; bool write; - bool pathResolve; + bool pathEnforce; StoragePathExpressionCallback pathExpressionFunction; }; @@ -73,6 +73,7 @@ this->path = strDup(path); this->modeFile = modeFile; this->modePath = modePath; + this->pathEnforce = true; this->write = write; this->pathExpressionFunction = pathExpressionFunction; @@ -493,8 +494,8 @@ // Make sure the base storage path is contained within the path expression if (this->path != NULL && !strEqZ(this->path, "/")) { - if (!strBeginsWith(pathExp, this->path) || - !(strSize(pathExp) == strSize(this->path) || *(strPtr(pathExp) + strSize(this->path)) == '/')) + if (this->pathEnforce && (!strBeginsWith(pathExp, this->path) || + !(strSize(pathExp) == strSize(this->path) || *(strPtr(pathExp) + strSize(this->path)) == '/'))) { THROW_FMT(AssertError, "absolute path '%s' is not in base path '%s'", strPtr(pathExp), strPtr(this->path)); } @@ -525,7 +526,7 @@ String *expression = strNewN(strPtr(pathExp), (size_t)(end - strPtr(pathExp) + 1)); // Create a string from the path if there is anything left after the expression - const String *path = NULL; + String *path = NULL; if (strSize(expression) < strSize(pathExp)) { @@ -537,7 +538,7 @@ if (end[2] == 0) THROW_FMT(AssertError, "path '%s' should not end in '/'", strPtr(pathExp)); - path = STR(end + 2); + path = strNew(end + 2); } // Evaluate the path @@ -552,6 +553,7 @@ // Free temp vars strFree(expression); + strFree(path); } if (this->path == NULL) @@ -586,10 +588,6 @@ ASSERT(this->interface.pathCreate != NULL && storageFeature(this, storageFeaturePath)); ASSERT(this->write); - // It doesn't make sense to combine these parameters because if we are creating missing parent paths why error when they exist? - // If this somehow wasn't caught in testing, the worst case is that the path would not be created and an error would be thrown. - ASSERT(!(param.noParentCreate && param.errorOnExists)); - MEM_CONTEXT_TEMP_BEGIN() { // Build the path @@ -782,6 +780,22 @@ } /*********************************************************************************************************************************** +Set whether absolute paths are required to be in the base path +***********************************************************************************************************************************/ +void +storagePathEnforceSet(Storage *this, bool enforce) +{ + FUNCTION_TEST_BEGIN(); + FUNCTION_TEST_PARAM(STORAGE, this); + FUNCTION_TEST_PARAM(BOOL, enforce); + FUNCTION_TEST_END(); + + this->pathEnforce = enforce; + + FUNCTION_TEST_RETURN_VOID(); +} + +/*********************************************************************************************************************************** Get the storage type (posix, cifs, etc.) ***********************************************************************************************************************************/ const String * diff -Nru pgbackrest-2.15.1/src/storage/storage.h pgbackrest-2.16/src/storage/storage.h --- pgbackrest-2.15.1/src/storage/storage.h 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/src/storage/storage.h 2019-08-05 16:03:04.000000000 +0000 @@ -33,6 +33,10 @@ // noop. We'll error on any path operation (e.g. pathExists(), pathCreate(), non-recursive removes, error on missing paths, // etc.) for storage that does not support paths. storageFeaturePath, + + // Is the storage able to do compression and therefore store the file more efficiently than what was written? If so, the size + // will need to checked after write to see if it is different. + storageFeatureCompress, } StorageFeature; /*********************************************************************************************************************************** diff -Nru pgbackrest-2.15.1/src/storage/storage.intern.h pgbackrest-2.16/src/storage/storage.intern.h --- pgbackrest-2.15.1/src/storage/storage.intern.h 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/src/storage/storage.intern.h 2019-08-05 16:03:04.000000000 +0000 @@ -82,11 +82,15 @@ StoragePathExpressionCallback pathExpressionFunction, void *driver, StorageInterface interface); /*********************************************************************************************************************************** -Getters +Getters/Setters ***********************************************************************************************************************************/ void *storageDriver(const Storage *this); StorageInterface storageInterface(const Storage *this); +// The option is intended to be used only with the Perl interface since Perl is not tidy about where it reads. It should be +// removed when the Perl interface is removed. +void storagePathEnforceSet(Storage *this, bool enforce); + /*********************************************************************************************************************************** Macros for function logging ***********************************************************************************************************************************/ diff -Nru pgbackrest-2.15.1/src/version.h pgbackrest-2.16/src/version.h --- pgbackrest-2.15.1/src/version.h 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/src/version.h 2019-08-05 16:03:04.000000000 +0000 @@ -23,6 +23,6 @@ /*********************************************************************************************************************************** Software version. Currently this value is maintained in Version.pm and updated by test.pl. ***********************************************************************************************************************************/ -#define PROJECT_VERSION "2.15" +#define PROJECT_VERSION "2.16" #endif diff -Nru pgbackrest-2.15.1/test/code-count/file-type.yaml pgbackrest-2.16/test/code-count/file-type.yaml --- pgbackrest-2.15.1/test/code-count/file-type.yaml 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/test/code-count/file-type.yaml 2019-08-05 16:03:04.000000000 +0000 @@ -191,10 +191,6 @@ class: core type: perl -lib/pgBackRest/Backup/Filter/PageChecksum.pm: - class: core - type: perl - lib/pgBackRest/Backup/Info.pm: class: core type: perl @@ -215,14 +211,6 @@ class: core/auto type: perl -lib/pgBackRest/Common/Http/Client.pm: - class: core - type: perl - -lib/pgBackRest/Common/Http/Common.pm: - class: core - type: perl - lib/pgBackRest/Common/Ini.pm: class: core type: perl @@ -263,10 +251,6 @@ class: core type: perl -lib/pgBackRest/Common/Xml.pm: - class: core - type: perl - lib/pgBackRest/Config/Config.pm: class: core type: perl @@ -323,10 +307,6 @@ class: core type: perl -lib/pgBackRest/Protocol/Local/Minion.pm: - class: core - type: perl - lib/pgBackRest/Protocol/Local/Process.pm: class: core type: perl @@ -367,63 +347,19 @@ class: core type: perl -lib/pgBackRest/Storage/Cifs/Driver.pm: - class: core - type: perl - -lib/pgBackRest/Storage/Filter/CipherBlock.pm: - class: core - type: perl - -lib/pgBackRest/Storage/Filter/Gzip.pm: - class: core - type: perl - -lib/pgBackRest/Storage/Filter/Sha.pm: - class: core - type: perl - lib/pgBackRest/Storage/Helper.pm: class: core type: perl -lib/pgBackRest/Storage/Local.pm: - class: core - type: perl - -lib/pgBackRest/Storage/Posix/Driver.pm: - class: core - type: perl - -lib/pgBackRest/Storage/Posix/FileRead.pm: - class: core - type: perl - -lib/pgBackRest/Storage/Posix/FileWrite.pm: - class: core - type: perl - -lib/pgBackRest/Storage/S3/Auth.pm: - class: core - type: perl - -lib/pgBackRest/Storage/S3/Driver.pm: - class: core - type: perl - -lib/pgBackRest/Storage/S3/FileRead.pm: - class: core - type: perl - -lib/pgBackRest/Storage/S3/FileWrite.pm: +lib/pgBackRest/Storage/Storage.pm: class: core type: perl -lib/pgBackRest/Storage/S3/Info.pm: +lib/pgBackRest/Storage/StorageRead.pm: class: core type: perl -lib/pgBackRest/Storage/S3/Request.pm: +lib/pgBackRest/Storage/StorageWrite.pm: class: core type: perl @@ -471,25 +407,25 @@ class: core type: xs -libc/xs/crypto/cipherBlock.xs: +libc/xs/crypto/hash.xs: class: core type: xs -libc/xs/crypto/cipherBlock.xsh: +libc/xs/crypto/hash.xsh: class: core type: c/h -libc/xs/crypto/hash.xs: +libc/xs/crypto/random.xs: class: core type: xs -libc/xs/crypto/hash.xsh: +libc/xs/postgres/client.xs: class: core - type: c/h + type: xs -libc/xs/crypto/random.xs: +libc/xs/postgres/client.xsh: class: core - type: xs + type: c/h libc/xs/postgres/pageChecksum.xs: class: core @@ -499,6 +435,26 @@ class: core type: xs +libc/xs/storage/storage.xsh: + class: core + type: c/h + +libc/xs/storage/storageRead.xs: + class: core + type: xs + +libc/xs/storage/storageRead.xsh: + class: core + type: c/h + +libc/xs/storage/storageWrite.xs: + class: core + type: xs + +libc/xs/storage/storageWrite.xsh: + class: core + type: c/h + src/Makefile.in: class: build type: make @@ -571,6 +527,14 @@ class: core type: c/h +src/command/backup/file.c: + class: core + type: c + +src/command/backup/file.h: + class: core + type: c/h + src/command/backup/pageChecksum.c: class: core type: c @@ -579,6 +543,22 @@ class: core type: c/h +src/command/backup/protocol.c: + class: core + type: c + +src/command/backup/protocol.h: + class: core + type: c/h + +src/command/check/check.c: + class: core + type: c + +src/command/check/check.h: + class: core + type: c/h + src/command/command.c: class: core type: c @@ -587,11 +567,11 @@ class: core type: c/h -src/command/control/control.c: +src/command/control/common.c: class: core type: c -src/command/control/control.h: +src/command/control/common.h: class: core type: c/h @@ -827,6 +807,14 @@ class: core type: c/h +src/common/io/filter/sink.c: + class: core + type: c + +src/common/io/filter/sink.h: + class: core + type: c/h + src/common/io/filter/size.c: class: core type: c @@ -1155,6 +1143,30 @@ class: build type: make +src/db/db.c: + class: core + type: c + +src/db/db.h: + class: core + type: c/h + +src/db/helper.c: + class: core + type: c + +src/db/helper.h: + class: core + type: c/h + +src/db/protocol.c: + class: core + type: c + +src/db/protocol.h: + class: core + type: c/h + src/info/info.c: class: core type: c @@ -1223,6 +1235,14 @@ class: core/auto type: c +src/postgres/client.c: + class: core + type: c + +src/postgres/client.h: + class: core + type: c/h + src/postgres/interface.c: class: core type: c @@ -1619,10 +1639,6 @@ class: test/harness type: perl -test/lib/pgBackRestTest/Env/S3EnvTest.pm: - class: test/harness - type: perl - test/lib/pgBackRestTest/Module/Backup/BackupFileUnitPerlTest.pm: class: test/module type: perl @@ -1643,10 +1659,6 @@ class: test/module type: perl -test/lib/pgBackRestTest/Module/Common/CommonHttpClientPerlTest.pm: - class: test/module - type: perl - test/lib/pgBackRestTest/Module/Common/CommonIniPerlTest.pm: class: test/module type: perl @@ -1703,10 +1715,6 @@ class: test/module type: perl -test/lib/pgBackRestTest/Module/Performance/PerformanceIoTest.pm: - class: test/module - type: perl - test/lib/pgBackRestTest/Module/Protocol/ProtocolCommonMinionPerlTest.pm: class: test/module type: perl @@ -1723,43 +1731,11 @@ class: test/module type: perl -test/lib/pgBackRestTest/Module/Storage/StorageFilterCipherBlockPerlTest.pm: - class: test/module - type: perl - -test/lib/pgBackRestTest/Module/Storage/StorageFilterGzipPerlTest.pm: - class: test/module - type: perl - -test/lib/pgBackRestTest/Module/Storage/StorageFilterShaPerlTest.pm: - class: test/module - type: perl - test/lib/pgBackRestTest/Module/Storage/StorageHelperPerlTest.pm: class: test/module type: perl -test/lib/pgBackRestTest/Module/Storage/StorageLocalPerlTest.pm: - class: test/module - type: perl - -test/lib/pgBackRestTest/Module/Storage/StoragePosixPerlTest.pm: - class: test/module - type: perl - -test/lib/pgBackRestTest/Module/Storage/StorageS3AuthPerlTest.pm: - class: test/module - type: perl - -test/lib/pgBackRestTest/Module/Storage/StorageS3CertPerlTest.pm: - class: test/module - type: perl - -test/lib/pgBackRestTest/Module/Storage/StorageS3PerlTest.pm: - class: test/module - type: perl - -test/lib/pgBackRestTest/Module/Storage/StorageS3RequestPerlTest.pm: +test/lib/pgBackRestTest/Module/Storage/StoragePerlTest.pm: class: test/module type: perl @@ -1795,6 +1771,14 @@ class: test/harness type: c/h +test/src/common/harnessPq.c: + class: test/harness + type: c + +test/src/common/harnessPq.h: + class: test/harness + type: c/h + test/src/common/harnessTest.c: class: test/harness type: c @@ -1827,6 +1811,14 @@ class: test/module type: c +test/src/module/command/backupTest.c: + class: test/module + type: c + +test/src/module/command/checkTest.c: + class: test/module + type: c + test/src/module/command/commandTest.c: class: test/module type: c @@ -2015,6 +2007,10 @@ class: test/module type: c +test/src/module/db/dbTest.c: + class: test/module + type: c + test/src/module/info/infoArchiveTest.c: class: test/module type: c @@ -2039,6 +2035,10 @@ class: test/module type: c +test/src/module/postgres/clientTest.c: + class: test/module + type: c + test/src/module/postgres/interfaceTest.c: class: test/module type: c diff -Nru pgbackrest-2.15.1/test/container.yaml pgbackrest-2.16/test/container.yaml --- pgbackrest-2.15.1/test/container.yaml 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/test/container.yaml 2019-08-05 16:03:04.000000000 +0000 @@ -13,8 +13,8 @@ # - docker tag pgbackrest/test:{vm}-base pgbackrest/test:{vm}-base-YYYYMMDDA # - docker push pgbackrest/test:{vm}-base-YYYYMMDDA # ********************************************************************************************************************************** -20190424A: - u18: 72f9a453557e21dd3269b8b3fba1b4b8d6dc9d77 - co6: 27a1efc146c200ad7592575808443bd086cf3e0d - co7: 5816e9219e7b5fb188331021c82559f9062de8b0 - u12: 20b1b60cf8f456c03dc8ebadaf7d264fe1d9f957 +20190725A: + u18: 91252e3e21ff553b231094194e13966065c32d6b + co6: 194df5323239146733c51e77e9324b495bb2c727 + co7: 3373903192a6dcf7c0b88c66ce9223da54ee2deb + u12: d3f1c8123b747b479586c2bbbf544c9a9cc42619 diff -Nru pgbackrest-2.15.1/test/define.yaml pgbackrest-2.16/test/define.yaml --- pgbackrest-2.15.1/test/define.yaml 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/test/define.yaml 2019-08-05 16:03:04.000000000 +0000 @@ -21,6 +21,8 @@ # Some options are unique to tests: # * total - total runs in the test # * vm - VMs that the test will be run on +# * include - modules to include directly into test.c (all files in coverage are automatically included) +# This is useful when a module's internal data needs to be manipulated for testing but no coverage is added by the test. # ********************************************************************************************************************************** # ********************************************************************************************************************************** @@ -214,6 +216,7 @@ common/io/filter/buffer: full common/io/filter/filter: full common/io/filter/group: full + common/io/filter/sink: full common/io/filter/size: full common/io/handleRead: full common/io/handleWrite: full @@ -223,7 +226,7 @@ # ---------------------------------------------------------------------------------------------------------------------------- - name: io-tls - total: 3 + total: 4 coverage: common/io/tls/client: full @@ -277,13 +280,6 @@ total: 1 # ---------------------------------------------------------------------------------------------------------------------------- - - name: http-client-perl - total: 2 - - coverage: - Common/Http/Client: partial - - # ---------------------------------------------------------------------------------------------------------------------------- - name: reg-exp total: 3 @@ -301,44 +297,36 @@ - name: ini-perl total: 10 - coverage: - Common/Ini: partial - # ---------------------------------------------------------------------------------------------------------------------------- - name: io-handle-perl total: 6 - coverage: - Common/Io/Handle: full - # ---------------------------------------------------------------------------------------------------------------------------- - name: io-buffered-perl total: 3 - coverage: - Common/Io/Buffered: partial - # ---------------------------------------------------------------------------------------------------------------------------- - name: io-process-perl total: 3 - coverage: - Common/Io/Process: partial - # ---------------------------------------------------------------------------------------------------------------------------- - name: log-perl total: 1 - coverage: - Common/Log: partial - # ******************************************************************************************************************************** - name: postgres test: # ---------------------------------------------------------------------------------------------------------------------------- + - name: client + total: 1 + + coverage: + postgres/client: full + + # ---------------------------------------------------------------------------------------------------------------------------- - name: interface - total: 5 + total: 6 coverage: postgres/interface: full @@ -423,82 +411,15 @@ test: # ---------------------------------------------------------------------------------------------------------------------------- - - name: filter-cipher-block-perl - total: 2 - - coverage: - Storage/Filter/CipherBlock: full - - # ---------------------------------------------------------------------------------------------------------------------------- - - name: filter-gzip-perl - total: 3 - - coverage: - Storage/Filter/Gzip: full - - # ---------------------------------------------------------------------------------------------------------------------------- - - name: filter-sha-perl - total: 2 - - coverage: - Storage/Filter/Sha: full - - # ---------------------------------------------------------------------------------------------------------------------------- - - name: posix-perl - total: 10 - - coverage: - Storage/Posix/Driver: partial - Storage/Posix/FileRead: partial - Storage/Posix/FileWrite: partial - - # ---------------------------------------------------------------------------------------------------------------------------- - - name: s3-auth-perl - total: 5 - - coverage: - Storage/S3/Auth: full - - # ---------------------------------------------------------------------------------------------------------------------------- - - name: s3-cert-perl - total: 1 - - # ---------------------------------------------------------------------------------------------------------------------------- - - name: s3-request-perl - total: 2 - - coverage: - Storage/S3/Request: partial - - # ---------------------------------------------------------------------------------------------------------------------------- - - name: s3-perl - total: 7 - - coverage: - Storage/S3/Driver: partial - Storage/S3/FileRead: partial - Storage/S3/FileWrite: full - - vm: - - co7 - - u14 - - u16 - - u18 - - d8 - - # ---------------------------------------------------------------------------------------------------------------------------- - - name: local-perl - total: 10 + - name: perl + total: 13 coverage: - Storage/Local: partial + Storage/Storage: partial # ---------------------------------------------------------------------------------------------------------------------------- - name: helper-perl - total: 4 - - coverage: - Storage/Helper: partial + total: 3 # ---------------------------------------------------------------------------------------------------------------------------- - name: cifs @@ -508,7 +429,9 @@ storage/cifs/storage: full storage/posix/storage: full storage/helper: full - storage/storage: full + + include: + - storage/storage # ---------------------------------------------------------------------------------------------------------------------------- - name: posix @@ -534,10 +457,12 @@ storage/remote/storage: full storage/remote/write: full storage/helper: full - storage/read: full - storage/write: full storage/storage: full + include: + - storage/read + - storage/write + # ---------------------------------------------------------------------------------------------------------------------------- - name: s3 total: 3 @@ -557,16 +482,10 @@ - name: common-minion-perl total: 1 - coverage: - Protocol/Base/Minion: partial - # ---------------------------------------------------------------------------------------------------------------------------- - name: helper-perl total: 2 - coverage: - Protocol/Helper: partial - # ---------------------------------------------------------------------------------------------------------------------------- - name: protocol total: 8 @@ -609,9 +528,6 @@ - name: info-archive-perl total: 4 - coverage: - Archive/Info: partial - # ---------------------------------------------------------------------------------------------------------------------------- - name: info-backup total: 2 @@ -623,8 +539,19 @@ - name: info-backup-perl total: 3 + # ******************************************************************************************************************************** + - name: db + + test: + # ---------------------------------------------------------------------------------------------------------------------------- + - name: db + total: 2 + perlReq: true + coverage: - Backup/Info: partial + db/db: full + db/helper: full + db/protocol: full # ******************************************************************************************************************************** - name: command @@ -641,9 +568,6 @@ - name: archive-common-perl total: 4 - coverage: - Archive/Common: partial - # ---------------------------------------------------------------------------------------------------------------------------- - name: archive-get total: 5 @@ -658,9 +582,6 @@ - name: archive-get-perl total: 1 - coverage: - Archive/Get/File: partial - # ---------------------------------------------------------------------------------------------------------------------------- - name: archive-push total: 4 @@ -680,6 +601,24 @@ command/backup/pageChecksum: full # ---------------------------------------------------------------------------------------------------------------------------- + - name: backup + total: 3 + + coverage: + command/backup/file: full + command/backup/protocol: full + + include: + - storage/storage + + # ---------------------------------------------------------------------------------------------------------------------------- + - name: check + total: 1 + + coverage: + command/check/check: full + + # ---------------------------------------------------------------------------------------------------------------------------- - name: command total: 1 @@ -691,7 +630,7 @@ total: 2 coverage: - command/control/control: full + command/control/common: full # ---------------------------------------------------------------------------------------------------------------------------- - name: expire @@ -751,17 +690,10 @@ - name: unit-perl total: 4 - coverage: - Backup/Common: full - Backup/Backup: partial - # ---------------------------------------------------------------------------------------------------------------------------- - name: file-unit-perl total: 2 - coverage: - Backup/File: partial - # ******************************************************************************************************************************** - name: manifest @@ -770,9 +702,6 @@ - name: all-perl total: 11 - coverage: - Manifest: partial - # ******************************************************************************************************************************** - name: stanza @@ -781,9 +710,6 @@ - name: all-perl total: 9 - coverage: - Stanza: full - # ********************************************************************************************************************************** # Integration tests # @@ -840,7 +766,3 @@ # ---------------------------------------------------------------------------------------------------------------------------- - name: archive total: 1 - - # ---------------------------------------------------------------------------------------------------------------------------- - - name: io - total: 1 diff -Nru pgbackrest-2.15.1/test/expect/mock-all-001.log pgbackrest-2.16/test/expect/mock-all-001.log --- pgbackrest-2.15.1/test/expect/mock-all-001.log 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/test/expect/mock-all-001.log 2019-08-05 16:03:04.000000000 +0000 @@ -99,9 +99,9 @@ P00 INFO: exclude postgresql.auto.conf from backup using 'postgresql.auto.conf' exclusion P01 INFO: backup file [TEST_PATH]/db-master/db/base/base/32768/33001 (64KB, 33%) checksum 6bf316f11d28c28914ea9be92c00de9bea6d9a6b P00 WARN: invalid page checksums found in file [TEST_PATH]/db-master/db/base/base/32768/33001 at pages 0, 3-5, 7 -P01 INFO: backup file [TEST_PATH]/db-master/db/base/base/32768/44000_init (32KB, 49%) checksum 4a383e4fb8b5cd2a4e8fab91ef63dce48e532a2f -P01 INFO: backup file [TEST_PATH]/db-master/db/base/base/32768/33000.32767 (32KB, 66%) checksum 21e2c7c1a326682c07053b7d6a5a40dbd49c2ec5 -P01 INFO: backup file [TEST_PATH]/db-master/db/base/base/32768/33000 (32KB, 83%) checksum 4a383e4fb8b5cd2a4e8fab91ef63dce48e532a2f +P01 INFO: backup file [TEST_PATH]/db-master/db/base/base/32768/44000_init (32KB, 49%) checksum 7a16d165e4775f7c92e8cdf60c0af57313f0bf90 +P01 INFO: backup file [TEST_PATH]/db-master/db/base/base/32768/33000.32767 (32KB, 66%) checksum 6e99b589e550e68e934fd235ccba59fe5b592a9e +P01 INFO: backup file [TEST_PATH]/db-master/db/base/base/32768/33000 (32KB, 83%) checksum 7a16d165e4775f7c92e8cdf60c0af57313f0bf90 P01 INFO: backup file [TEST_PATH]/db-master/db/base/base/16384/17000 (16KB, 91%) checksum e0101dd8ffb910c9c202ca35b5f828bcb9697bed P00 WARN: invalid page checksum found in file [TEST_PATH]/db-master/db/base/base/16384/17000 at page 1 P01 INFO: backup file [TEST_PATH]/db-master/db/base/global/pg_control (8KB, 95%) checksum 4c77c900f7af0d9ab13fa9982051a42e0b637f6c @@ -113,7 +113,7 @@ P01 INFO: backup file [TEST_PATH]/db-master/db/base/base/16384/PG_VERSION (3B, 99%) checksum 184473f470864e067ee3a22e64b47b0a1c356f29 P01 INFO: backup file [TEST_PATH]/db-master/db/base/base/1/PG_VERSION (3B, 99%) checksum 184473f470864e067ee3a22e64b47b0a1c356f29 P01 INFO: backup file [TEST_PATH]/db-master/db/base/PG_VERSION (3B, 100%) checksum 184473f470864e067ee3a22e64b47b0a1c356f29 -P01 INFO: backup file [TEST_PATH]/db-master/db/base/special-@!#$^&*()-_+~`{}[]\|:;"<>',.?% (0B, 100%) +P01 INFO: backup file [TEST_PATH]/db-master/db/base/special-!_.*'()&!@;:+,? (0B, 100%) P00 INFO: full backup size = 192KB P00 INFO: new backup label = [BACKUP-FULL-1] P00 INFO: backup command end: completed successfully @@ -195,16 +195,16 @@ pg_data/base/1/PG_VERSION={"checksum":"184473f470864e067ee3a22e64b47b0a1c356f29","mode":"0660","size":[SIZE],"timestamp":[TIMESTAMP-1],"user":false} pg_data/base/16384/17000={"checksum":"e0101dd8ffb910c9c202ca35b5f828bcb9697bed","checksum-page":false,"checksum-page-error":[1],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/16384/PG_VERSION={"checksum":"184473f470864e067ee3a22e64b47b0a1c356f29","group":false,"size":[SIZE],"timestamp":[TIMESTAMP-1]} -pg_data/base/32768/33000={"checksum":"4a383e4fb8b5cd2a4e8fab91ef63dce48e532a2f","checksum-page":true,"size":[SIZE],"timestamp":[TIMESTAMP-1]} -pg_data/base/32768/33000.32767={"checksum":"21e2c7c1a326682c07053b7d6a5a40dbd49c2ec5","checksum-page":true,"size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/base/32768/33000={"checksum":"7a16d165e4775f7c92e8cdf60c0af57313f0bf90","checksum-page":true,"size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/base/32768/33000.32767={"checksum":"6e99b589e550e68e934fd235ccba59fe5b592a9e","checksum-page":true,"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/32768/33001={"checksum":"6bf316f11d28c28914ea9be92c00de9bea6d9a6b","checksum-page":false,"checksum-page-error":[0,[3,5],7],"size":[SIZE],"timestamp":[TIMESTAMP-1]} -pg_data/base/32768/44000_init={"checksum":"4a383e4fb8b5cd2a4e8fab91ef63dce48e532a2f","checksum-page":true,"size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/base/32768/44000_init={"checksum":"7a16d165e4775f7c92e8cdf60c0af57313f0bf90","checksum-page":true,"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/32768/PG_VERSION={"checksum":"184473f470864e067ee3a22e64b47b0a1c356f29","size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/global/pg_control={"checksum":"4c77c900f7af0d9ab13fa9982051a42e0b637f6c","master":true,"size":[SIZE],"timestamp":[TIMESTAMP-2]} pg_data/pg_hba.conf={"checksum":"dd4cea0cae348309f9de28ad4ded8ee2cc2e6d5b","master":true,"size":[SIZE],"timestamp":[TIMESTAMP-2]} pg_data/pg_stat/global.stat={"checksum":"e350d5ce0153f3e22d5db21cf2a4eff00f3ee877","master":true,"size":[SIZE],"timestamp":[TIMESTAMP-2]} pg_data/postgresql.conf={"checksum":"6721d92c9fcdf4248acff1f9a1377127d9064807","master":true,"size":[SIZE],"timestamp":[TIMESTAMP-2]} -pg_data/special-@!#$^&*()-_+~`{}[]\|:;"<>',.?%={"master":true,"size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/special-!_.*'()&!@;:+,?={"master":true,"size":[SIZE],"timestamp":[TIMESTAMP-1]} [target:file:default] group="[GROUP-1]" @@ -358,9 +358,9 @@ P00 TEST: PgBaCkReStTeSt-BACKUP-RESUME-PgBaCkReStTeSt P00 DETAIL: clean resumed backup path: [TEST_PATH]/db-master/repo/backup/db/[BACKUP-FULL-2] P01 DETAIL: checksum resumed file [TEST_PATH]/db-master/db/base/base/32768/33001 (64KB, 33%) checksum 6bf316f11d28c28914ea9be92c00de9bea6d9a6b -P01 DETAIL: checksum resumed file [TEST_PATH]/db-master/db/base/base/32768/44000_init (32KB, 49%) checksum 4a383e4fb8b5cd2a4e8fab91ef63dce48e532a2f -P01 DETAIL: checksum resumed file [TEST_PATH]/db-master/db/base/base/32768/33000.32767 (32KB, 66%) checksum 21e2c7c1a326682c07053b7d6a5a40dbd49c2ec5 -P01 DETAIL: checksum resumed file [TEST_PATH]/db-master/db/base/base/32768/33000 (32KB, 83%) checksum 4a383e4fb8b5cd2a4e8fab91ef63dce48e532a2f +P01 DETAIL: checksum resumed file [TEST_PATH]/db-master/db/base/base/32768/44000_init (32KB, 49%) checksum 7a16d165e4775f7c92e8cdf60c0af57313f0bf90 +P01 DETAIL: checksum resumed file [TEST_PATH]/db-master/db/base/base/32768/33000.32767 (32KB, 66%) checksum 6e99b589e550e68e934fd235ccba59fe5b592a9e +P01 DETAIL: checksum resumed file [TEST_PATH]/db-master/db/base/base/32768/33000 (32KB, 83%) checksum 7a16d165e4775f7c92e8cdf60c0af57313f0bf90 P01 DETAIL: checksum resumed file [TEST_PATH]/db-master/db/base/base/16384/17000 (16KB, 91%) checksum e0101dd8ffb910c9c202ca35b5f828bcb9697bed P01 DETAIL: checksum resumed file [TEST_PATH]/db-master/db/base/global/pg_control (8KB, 95%) checksum 4c77c900f7af0d9ab13fa9982051a42e0b637f6c P01 DETAIL: checksum resumed file [TEST_PATH]/db-master/db/base/base/1/12000 (8KB, 99%) checksum 22c98d248ff548311eda88559e4a8405ed77c003 @@ -374,7 +374,7 @@ P01 DETAIL: checksum resumed file [TEST_PATH]/db-master/db/base/base/1/PG_VERSION (3B, 99%) checksum 184473f470864e067ee3a22e64b47b0a1c356f29 P01 INFO: backup file [TEST_PATH]/db-master/db/base/PG_VERSION (3B, 100%) checksum 184473f470864e067ee3a22e64b47b0a1c356f29 P01 INFO: backup file [TEST_PATH]/db-master/db/base/zero_from_start (0B, 100%) -P01 INFO: backup file [TEST_PATH]/db-master/db/base/special-@!#$^&*()-_+~`{}[]\|:;"<>',.?% (0B, 100%) +P01 INFO: backup file [TEST_PATH]/db-master/db/base/special-!_.*'()&!@;:+,? (0B, 100%) P00 INFO: full backup size = 192KB P00 INFO: new backup label = [BACKUP-FULL-2] P00 INFO: backup command end: completed successfully @@ -456,10 +456,10 @@ pg_data/base/1/PG_VERSION={"checksum":"184473f470864e067ee3a22e64b47b0a1c356f29","mode":"0660","size":[SIZE],"timestamp":[TIMESTAMP-1],"user":false} pg_data/base/16384/17000={"checksum":"e0101dd8ffb910c9c202ca35b5f828bcb9697bed","checksum-page":false,"checksum-page-error":[1],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/16384/PG_VERSION={"checksum":"184473f470864e067ee3a22e64b47b0a1c356f29","group":false,"size":[SIZE],"timestamp":[TIMESTAMP-1]} -pg_data/base/32768/33000={"checksum":"4a383e4fb8b5cd2a4e8fab91ef63dce48e532a2f","checksum-page":true,"size":[SIZE],"timestamp":[TIMESTAMP-1]} -pg_data/base/32768/33000.32767={"checksum":"21e2c7c1a326682c07053b7d6a5a40dbd49c2ec5","checksum-page":true,"size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/base/32768/33000={"checksum":"7a16d165e4775f7c92e8cdf60c0af57313f0bf90","checksum-page":true,"size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/base/32768/33000.32767={"checksum":"6e99b589e550e68e934fd235ccba59fe5b592a9e","checksum-page":true,"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/32768/33001={"checksum":"6bf316f11d28c28914ea9be92c00de9bea6d9a6b","checksum-page":false,"checksum-page-error":[0,[3,5],7],"size":[SIZE],"timestamp":[TIMESTAMP-1]} -pg_data/base/32768/44000_init={"checksum":"4a383e4fb8b5cd2a4e8fab91ef63dce48e532a2f","checksum-page":true,"size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/base/32768/44000_init={"checksum":"7a16d165e4775f7c92e8cdf60c0af57313f0bf90","checksum-page":true,"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/32768/PG_VERSION={"checksum":"184473f470864e067ee3a22e64b47b0a1c356f29","size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/changecontent.txt={"checksum":"238a131a3e8eb98d1fc5b27d882ca40b7618fd2a","master":true,"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/changetime.txt={"checksum":"88087292ed82e26f3eb824d0bffc05ccf7a30f8d","master":true,"size":[SIZE],"timestamp":[TIMESTAMP-1]} @@ -467,7 +467,7 @@ pg_data/pg_hba.conf={"checksum":"dd4cea0cae348309f9de28ad4ded8ee2cc2e6d5b","master":true,"size":[SIZE],"timestamp":[TIMESTAMP-2]} pg_data/pg_stat/global.stat={"checksum":"e350d5ce0153f3e22d5db21cf2a4eff00f3ee877","master":true,"size":[SIZE],"timestamp":[TIMESTAMP-2]} pg_data/postgresql.conf={"checksum":"6721d92c9fcdf4248acff1f9a1377127d9064807","master":true,"size":[SIZE],"timestamp":[TIMESTAMP-2]} -pg_data/special-@!#$^&*()-_+~`{}[]\|:;"<>',.?%={"master":true,"size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/special-!_.*'()&!@;:+,?={"master":true,"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/zero_from_start={"master":true,"size":[SIZE],"timestamp":[TIMESTAMP-1]} [target:file:default] @@ -584,9 +584,9 @@ P00 DETAIL: remove file [TEST_PATH]/db-master/db/base/backup_label.old P00 INFO: cleanup removed 18 files, 1 link, 3 paths P01 DETAIL: restore file [TEST_PATH]/db-master/db/base/base/32768/33001 - exists and matches backup (64KB, 33%) checksum 6bf316f11d28c28914ea9be92c00de9bea6d9a6b -P01 DETAIL: restore file [TEST_PATH]/db-master/db/base/base/32768/44000_init - exists and matches backup (32KB, 49%) checksum 4a383e4fb8b5cd2a4e8fab91ef63dce48e532a2f -P01 DETAIL: restore file [TEST_PATH]/db-master/db/base/base/32768/33000.32767 - exists and matches backup (32KB, 66%) checksum 21e2c7c1a326682c07053b7d6a5a40dbd49c2ec5 -P01 DETAIL: restore file [TEST_PATH]/db-master/db/base/base/32768/33000 - exists and matches backup (32KB, 83%) checksum 4a383e4fb8b5cd2a4e8fab91ef63dce48e532a2f +P01 DETAIL: restore file [TEST_PATH]/db-master/db/base/base/32768/44000_init - exists and matches backup (32KB, 49%) checksum 7a16d165e4775f7c92e8cdf60c0af57313f0bf90 +P01 DETAIL: restore file [TEST_PATH]/db-master/db/base/base/32768/33000.32767 - exists and matches backup (32KB, 66%) checksum 6e99b589e550e68e934fd235ccba59fe5b592a9e +P01 DETAIL: restore file [TEST_PATH]/db-master/db/base/base/32768/33000 - exists and matches backup (32KB, 83%) checksum 7a16d165e4775f7c92e8cdf60c0af57313f0bf90 P01 INFO: restore file [TEST_PATH]/db-master/db/base/base/16384/17000 (16KB, 91%) checksum e0101dd8ffb910c9c202ca35b5f828bcb9697bed P01 INFO: restore file [TEST_PATH]/db-master/db/base/global/pg_control.pgbackrest.tmp (8KB, 95%) checksum 4c77c900f7af0d9ab13fa9982051a42e0b637f6c P01 DETAIL: restore file [TEST_PATH]/db-master/db/base/base/1/12000 - exists and matches backup (8KB, 99%) checksum 22c98d248ff548311eda88559e4a8405ed77c003 @@ -600,7 +600,7 @@ P01 DETAIL: restore file [TEST_PATH]/db-master/db/base/base/1/PG_VERSION - exists and matches backup (3B, 99%) checksum 184473f470864e067ee3a22e64b47b0a1c356f29 P01 DETAIL: restore file [TEST_PATH]/db-master/db/base/PG_VERSION - exists and matches backup (3B, 100%) checksum 184473f470864e067ee3a22e64b47b0a1c356f29 P01 DETAIL: restore file [TEST_PATH]/db-master/db/base/zero_from_start - exists and is zero size (0B, 100%) -P01 DETAIL: restore file [TEST_PATH]/db-master/db/base/special-@!#$^&*()-_+~`{}[]\|:;"<>',.?% - exists and is zero size (0B, 100%) +P01 DETAIL: restore file [TEST_PATH]/db-master/db/base/special-!_.*'()&!@;:+,? - exists and is zero size (0B, 100%) P00 INFO: write [TEST_PATH]/db-master/db/base/recovery.conf P00 INFO: restore global/pg_control (performed last to ensure aborted restores cannot be started) P00 INFO: restore command end: completed successfully @@ -628,9 +628,9 @@ P00 DETAIL: set ownership [USER-1]:[GROUP-1] on [TEST_PATH]/db-master/db/base/base/16384/PG_VERSION P00 DETAIL: set ownership [USER-1]:[GROUP-1] on [TEST_PATH]/db-master/db/base/base/1/PG_VERSION P01 DETAIL: restore file [TEST_PATH]/db-master/db/base/base/32768/33001 - exists and matches backup (64KB, 33%) checksum 6bf316f11d28c28914ea9be92c00de9bea6d9a6b -P01 DETAIL: restore file [TEST_PATH]/db-master/db/base/base/32768/44000_init - exists and matches backup (32KB, 49%) checksum 4a383e4fb8b5cd2a4e8fab91ef63dce48e532a2f -P01 DETAIL: restore file [TEST_PATH]/db-master/db/base/base/32768/33000.32767 - exists and matches backup (32KB, 66%) checksum 21e2c7c1a326682c07053b7d6a5a40dbd49c2ec5 -P01 DETAIL: restore file [TEST_PATH]/db-master/db/base/base/32768/33000 - exists and matches backup (32KB, 83%) checksum 4a383e4fb8b5cd2a4e8fab91ef63dce48e532a2f +P01 DETAIL: restore file [TEST_PATH]/db-master/db/base/base/32768/44000_init - exists and matches backup (32KB, 49%) checksum 7a16d165e4775f7c92e8cdf60c0af57313f0bf90 +P01 DETAIL: restore file [TEST_PATH]/db-master/db/base/base/32768/33000.32767 - exists and matches backup (32KB, 66%) checksum 6e99b589e550e68e934fd235ccba59fe5b592a9e +P01 DETAIL: restore file [TEST_PATH]/db-master/db/base/base/32768/33000 - exists and matches backup (32KB, 83%) checksum 7a16d165e4775f7c92e8cdf60c0af57313f0bf90 P01 DETAIL: restore file [TEST_PATH]/db-master/db/base/base/16384/17000 - exists and matches backup (16KB, 91%) checksum e0101dd8ffb910c9c202ca35b5f828bcb9697bed P01 INFO: restore file [TEST_PATH]/db-master/db/base/global/pg_control.pgbackrest.tmp (8KB, 95%) checksum 4c77c900f7af0d9ab13fa9982051a42e0b637f6c P01 DETAIL: restore file [TEST_PATH]/db-master/db/base/base/1/12000 - exists and matches backup (8KB, 99%) checksum 22c98d248ff548311eda88559e4a8405ed77c003 @@ -644,7 +644,7 @@ P01 DETAIL: restore file [TEST_PATH]/db-master/db/base/base/1/PG_VERSION - exists and matches backup (3B, 99%) checksum 184473f470864e067ee3a22e64b47b0a1c356f29 P01 DETAIL: restore file [TEST_PATH]/db-master/db/base/PG_VERSION - exists and matches backup (3B, 100%) checksum 184473f470864e067ee3a22e64b47b0a1c356f29 P01 DETAIL: restore file [TEST_PATH]/db-master/db/base/zero_from_start - exists and is zero size (0B, 100%) -P01 DETAIL: restore file [TEST_PATH]/db-master/db/base/special-@!#$^&*()-_+~`{}[]\|:;"<>',.?% - exists and is zero size (0B, 100%) +P01 DETAIL: restore file [TEST_PATH]/db-master/db/base/special-!_.*'()&!@;:+,? - exists and is zero size (0B, 100%) P00 INFO: write [TEST_PATH]/db-master/db/base/recovery.conf P00 INFO: restore global/pg_control (performed last to ensure aborted restores cannot be started) P00 INFO: restore command end: completed successfully @@ -671,9 +671,9 @@ P00 DETAIL: preserve file [TEST_PATH]/db-master/db/base/recovery.conf P00 DETAIL: remove link [TEST_PATH]/db-master/db/base/pg_stat - destination changed P01 DETAIL: restore file [TEST_PATH]/db-master/db/base/base/32768/33001 - exists and matches backup (64KB, 33%) checksum 6bf316f11d28c28914ea9be92c00de9bea6d9a6b -P01 DETAIL: restore file [TEST_PATH]/db-master/db/base/base/32768/44000_init - exists and matches backup (32KB, 49%) checksum 4a383e4fb8b5cd2a4e8fab91ef63dce48e532a2f -P01 DETAIL: restore file [TEST_PATH]/db-master/db/base/base/32768/33000.32767 - exists and matches backup (32KB, 66%) checksum 21e2c7c1a326682c07053b7d6a5a40dbd49c2ec5 -P01 DETAIL: restore file [TEST_PATH]/db-master/db/base/base/32768/33000 - exists and matches backup (32KB, 83%) checksum 4a383e4fb8b5cd2a4e8fab91ef63dce48e532a2f +P01 DETAIL: restore file [TEST_PATH]/db-master/db/base/base/32768/44000_init - exists and matches backup (32KB, 49%) checksum 7a16d165e4775f7c92e8cdf60c0af57313f0bf90 +P01 DETAIL: restore file [TEST_PATH]/db-master/db/base/base/32768/33000.32767 - exists and matches backup (32KB, 66%) checksum 6e99b589e550e68e934fd235ccba59fe5b592a9e +P01 DETAIL: restore file [TEST_PATH]/db-master/db/base/base/32768/33000 - exists and matches backup (32KB, 83%) checksum 7a16d165e4775f7c92e8cdf60c0af57313f0bf90 P01 DETAIL: restore file [TEST_PATH]/db-master/db/base/base/16384/17000 - exists and matches backup (16KB, 91%) checksum e0101dd8ffb910c9c202ca35b5f828bcb9697bed P01 INFO: restore file [TEST_PATH]/db-master/db/base/global/pg_control.pgbackrest.tmp (8KB, 95%) checksum 4c77c900f7af0d9ab13fa9982051a42e0b637f6c P01 DETAIL: restore file [TEST_PATH]/db-master/db/base/base/1/12000 - exists and matches backup (8KB, 99%) checksum 22c98d248ff548311eda88559e4a8405ed77c003 @@ -687,7 +687,7 @@ P01 DETAIL: restore file [TEST_PATH]/db-master/db/base/base/1/PG_VERSION - exists and matches backup (3B, 99%) checksum 184473f470864e067ee3a22e64b47b0a1c356f29 P01 DETAIL: restore file [TEST_PATH]/db-master/db/base/PG_VERSION - exists and matches backup (3B, 100%) checksum 184473f470864e067ee3a22e64b47b0a1c356f29 P01 DETAIL: restore file [TEST_PATH]/db-master/db/base/zero_from_start - exists and is zero size (0B, 100%) -P01 DETAIL: restore file [TEST_PATH]/db-master/db/base/special-@!#$^&*()-_+~`{}[]\|:;"<>',.?% - exists and is zero size (0B, 100%) +P01 DETAIL: restore file [TEST_PATH]/db-master/db/base/special-!_.*'()&!@;:+,? - exists and is zero size (0B, 100%) P00 INFO: write [TEST_PATH]/db-master/db/base/recovery.conf P00 INFO: restore global/pg_control (performed last to ensure aborted restores cannot be started) P00 INFO: restore command end: completed successfully @@ -716,9 +716,9 @@ P00 INFO: remove invalid files/paths/links from [TEST_PATH]/db-master/db/base P00 DETAIL: preserve file [TEST_PATH]/db-master/db/base/recovery.conf P01 DETAIL: restore file [TEST_PATH]/db-master/db/base/base/32768/33001 - exists and matches backup (64KB, 33%) checksum 6bf316f11d28c28914ea9be92c00de9bea6d9a6b -P01 DETAIL: restore file [TEST_PATH]/db-master/db/base/base/32768/44000_init - exists and matches backup (32KB, 49%) checksum 4a383e4fb8b5cd2a4e8fab91ef63dce48e532a2f -P01 DETAIL: restore file [TEST_PATH]/db-master/db/base/base/32768/33000.32767 - exists and matches backup (32KB, 66%) checksum 21e2c7c1a326682c07053b7d6a5a40dbd49c2ec5 -P01 DETAIL: restore file [TEST_PATH]/db-master/db/base/base/32768/33000 - exists and matches backup (32KB, 83%) checksum 4a383e4fb8b5cd2a4e8fab91ef63dce48e532a2f +P01 DETAIL: restore file [TEST_PATH]/db-master/db/base/base/32768/44000_init - exists and matches backup (32KB, 49%) checksum 7a16d165e4775f7c92e8cdf60c0af57313f0bf90 +P01 DETAIL: restore file [TEST_PATH]/db-master/db/base/base/32768/33000.32767 - exists and matches backup (32KB, 66%) checksum 6e99b589e550e68e934fd235ccba59fe5b592a9e +P01 DETAIL: restore file [TEST_PATH]/db-master/db/base/base/32768/33000 - exists and matches backup (32KB, 83%) checksum 7a16d165e4775f7c92e8cdf60c0af57313f0bf90 P01 DETAIL: restore file [TEST_PATH]/db-master/db/base/base/16384/17000 - exists and matches backup (16KB, 91%) checksum e0101dd8ffb910c9c202ca35b5f828bcb9697bed P01 INFO: restore file [TEST_PATH]/db-master/db/base/global/pg_control.pgbackrest.tmp (8KB, 95%) checksum 4c77c900f7af0d9ab13fa9982051a42e0b637f6c P01 DETAIL: restore file [TEST_PATH]/db-master/db/base/base/1/12000 - exists and matches backup (8KB, 99%) checksum 22c98d248ff548311eda88559e4a8405ed77c003 @@ -732,7 +732,7 @@ P01 DETAIL: restore file [TEST_PATH]/db-master/db/base/base/1/PG_VERSION - exists and matches backup (3B, 99%) checksum 184473f470864e067ee3a22e64b47b0a1c356f29 P01 DETAIL: restore file [TEST_PATH]/db-master/db/base/PG_VERSION - exists and matches backup (3B, 100%) checksum 184473f470864e067ee3a22e64b47b0a1c356f29 P01 DETAIL: restore file [TEST_PATH]/db-master/db/base/zero_from_start - exists and is zero size (0B, 100%) -P01 DETAIL: restore file [TEST_PATH]/db-master/db/base/special-@!#$^&*()-_+~`{}[]\|:;"<>',.?% - exists and is zero size (0B, 100%) +P01 DETAIL: restore file [TEST_PATH]/db-master/db/base/special-!_.*'()&!@;:+,? - exists and is zero size (0B, 100%) P00 INFO: write [TEST_PATH]/db-master/db/base/recovery.conf P00 INFO: restore global/pg_control (performed last to ensure aborted restores cannot be started) P00 INFO: restore command end: completed successfully @@ -784,9 +784,9 @@ P00 DETAIL: check [TEST_PATH]/db-master/db/pg_config exists P00 INFO: remove invalid files/paths/links from [TEST_PATH]/db-master/db/pg_config P01 INFO: restore file [TEST_PATH]/db-master/db/base/base/32768/33001 (64KB, 33%) checksum 6bf316f11d28c28914ea9be92c00de9bea6d9a6b -P01 INFO: restore file [TEST_PATH]/db-master/db/base/base/32768/44000_init (32KB, 49%) checksum 4a383e4fb8b5cd2a4e8fab91ef63dce48e532a2f -P01 INFO: restore file [TEST_PATH]/db-master/db/base/base/32768/33000.32767 (32KB, 66%) checksum 21e2c7c1a326682c07053b7d6a5a40dbd49c2ec5 -P01 INFO: restore file [TEST_PATH]/db-master/db/base/base/32768/33000 (32KB, 83%) checksum 4a383e4fb8b5cd2a4e8fab91ef63dce48e532a2f +P01 INFO: restore file [TEST_PATH]/db-master/db/base/base/32768/44000_init (32KB, 49%) checksum 7a16d165e4775f7c92e8cdf60c0af57313f0bf90 +P01 INFO: restore file [TEST_PATH]/db-master/db/base/base/32768/33000.32767 (32KB, 66%) checksum 6e99b589e550e68e934fd235ccba59fe5b592a9e +P01 INFO: restore file [TEST_PATH]/db-master/db/base/base/32768/33000 (32KB, 83%) checksum 7a16d165e4775f7c92e8cdf60c0af57313f0bf90 P01 INFO: restore file [TEST_PATH]/db-master/db/base/base/16384/17000 (16KB, 91%) checksum e0101dd8ffb910c9c202ca35b5f828bcb9697bed P01 INFO: restore file [TEST_PATH]/db-master/db/base/global/pg_control.pgbackrest.tmp (8KB, 95%) checksum 4c77c900f7af0d9ab13fa9982051a42e0b637f6c P01 INFO: restore file [TEST_PATH]/db-master/db/base/base/1/12000 (8KB, 99%) checksum 22c98d248ff548311eda88559e4a8405ed77c003 @@ -800,7 +800,7 @@ P01 INFO: restore file [TEST_PATH]/db-master/db/base/base/1/PG_VERSION (3B, 99%) checksum 184473f470864e067ee3a22e64b47b0a1c356f29 P01 INFO: restore file [TEST_PATH]/db-master/db/base/PG_VERSION (3B, 100%) checksum 184473f470864e067ee3a22e64b47b0a1c356f29 P01 INFO: restore file [TEST_PATH]/db-master/db/base/zero_from_start (0B, 100%) -P01 INFO: restore file [TEST_PATH]/db-master/db/base/special-@!#$^&*()-_+~`{}[]\|:;"<>',.?% (0B, 100%) +P01 INFO: restore file [TEST_PATH]/db-master/db/base/special-!_.*'()&!@;:+,? (0B, 100%) P00 INFO: write [TEST_PATH]/db-master/db/base/recovery.conf P00 INFO: restore global/pg_control (performed last to ensure aborted restores cannot be started) P00 INFO: restore command end: completed successfully @@ -844,9 +844,9 @@ P00 DETAIL: remove link [TEST_PATH]/db-master/db/base/pg_hba.conf P00 INFO: cleanup removed 3 links P01 DETAIL: restore file [TEST_PATH]/db-master/db/base/base/32768/33001 - exists and matches size 65536 and modification time [MODIFICATION-TIME-1] (64KB, 33%) checksum 6bf316f11d28c28914ea9be92c00de9bea6d9a6b -P01 DETAIL: restore file [TEST_PATH]/db-master/db/base/base/32768/44000_init - exists and matches size 32768 and modification time [MODIFICATION-TIME-1] (32KB, 49%) checksum 4a383e4fb8b5cd2a4e8fab91ef63dce48e532a2f -P01 DETAIL: restore file [TEST_PATH]/db-master/db/base/base/32768/33000.32767 - exists and matches size 32768 and modification time [MODIFICATION-TIME-1] (32KB, 66%) checksum 21e2c7c1a326682c07053b7d6a5a40dbd49c2ec5 -P01 DETAIL: restore file [TEST_PATH]/db-master/db/base/base/32768/33000 - exists and matches size 32768 and modification time [MODIFICATION-TIME-1] (32KB, 83%) checksum 4a383e4fb8b5cd2a4e8fab91ef63dce48e532a2f +P01 DETAIL: restore file [TEST_PATH]/db-master/db/base/base/32768/44000_init - exists and matches size 32768 and modification time [MODIFICATION-TIME-1] (32KB, 49%) checksum 7a16d165e4775f7c92e8cdf60c0af57313f0bf90 +P01 DETAIL: restore file [TEST_PATH]/db-master/db/base/base/32768/33000.32767 - exists and matches size 32768 and modification time [MODIFICATION-TIME-1] (32KB, 66%) checksum 6e99b589e550e68e934fd235ccba59fe5b592a9e +P01 DETAIL: restore file [TEST_PATH]/db-master/db/base/base/32768/33000 - exists and matches size 32768 and modification time [MODIFICATION-TIME-1] (32KB, 83%) checksum 7a16d165e4775f7c92e8cdf60c0af57313f0bf90 P01 DETAIL: restore file [TEST_PATH]/db-master/db/base/base/16384/17000 - exists and matches size 16384 and modification time [MODIFICATION-TIME-1] (16KB, 91%) checksum e0101dd8ffb910c9c202ca35b5f828bcb9697bed P01 INFO: restore file [TEST_PATH]/db-master/db/base/global/pg_control.pgbackrest.tmp (8KB, 95%) checksum 4c77c900f7af0d9ab13fa9982051a42e0b637f6c P01 DETAIL: restore file [TEST_PATH]/db-master/db/base/base/1/12000 - exists and matches size 8192 and modification time [MODIFICATION-TIME-1] (8KB, 99%) checksum 22c98d248ff548311eda88559e4a8405ed77c003 @@ -860,7 +860,7 @@ P01 DETAIL: restore file [TEST_PATH]/db-master/db/base/base/1/PG_VERSION - exists and matches size 3 and modification time [MODIFICATION-TIME-1] (3B, 99%) checksum 184473f470864e067ee3a22e64b47b0a1c356f29 P01 INFO: restore file [TEST_PATH]/db-master/db/base/PG_VERSION (3B, 100%) checksum 184473f470864e067ee3a22e64b47b0a1c356f29 P01 DETAIL: restore file [TEST_PATH]/db-master/db/base/zero_from_start - exists and matches size 0 and modification time [MODIFICATION-TIME-1] (0B, 100%) -P01 DETAIL: restore file [TEST_PATH]/db-master/db/base/special-@!#$^&*()-_+~`{}[]\|:;"<>',.?% - exists and matches size 0 and modification time [MODIFICATION-TIME-1] (0B, 100%) +P01 DETAIL: restore file [TEST_PATH]/db-master/db/base/special-!_.*'()&!@;:+,? - exists and matches size 0 and modification time [MODIFICATION-TIME-1] (0B, 100%) P00 INFO: write [TEST_PATH]/db-master/db/base/recovery.conf P00 INFO: restore global/pg_control (performed last to ensure aborted restores cannot be started) P00 INFO: restore command end: completed successfully @@ -994,8 +994,8 @@ HINT: to retain full backups indefinitely (without warning), set option 'repo1-retention-full' to the maximum. P00 INFO: last backup label = [BACKUP-FULL-2], version = [VERSION-1] P00 WARN: incr backup cannot alter 'checksum-page' option to 'false', reset to 'true' from [BACKUP-FULL-2] -P00 ERROR: [055]: unable to stat '[TEST_PATH]/db-master/db/base_tbs': No such file or directory -P00 INFO: backup command end: aborted with exception [055] +P00 ERROR: [073]: unable to list file info for missing path '[TEST_PATH]/db-master/db/base_tbs' +P00 INFO: backup command end: aborted with exception [073] incr backup - invalid tablespace in $PGDATA (db-master host) > [CONTAINER-EXEC] db-master [BACKREST-BIN] --config=[TEST_PATH]/db-master/pgbackrest.conf --no-online --stanza=db backup @@ -1102,10 +1102,10 @@ pg_data/base/1/PG_VERSION={"checksum":"184473f470864e067ee3a22e64b47b0a1c356f29","master":false,"mode":"0660","reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/16384/17000={"checksum":"e0101dd8ffb910c9c202ca35b5f828bcb9697bed","checksum-page":false,"checksum-page-error":[1],"master":false,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/16384/PG_VERSION={"checksum":"184473f470864e067ee3a22e64b47b0a1c356f29","master":false,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} -pg_data/base/32768/33000={"checksum":"4a383e4fb8b5cd2a4e8fab91ef63dce48e532a2f","checksum-page":true,"master":false,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} -pg_data/base/32768/33000.32767={"checksum":"21e2c7c1a326682c07053b7d6a5a40dbd49c2ec5","checksum-page":true,"master":false,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/base/32768/33000={"checksum":"7a16d165e4775f7c92e8cdf60c0af57313f0bf90","checksum-page":true,"master":false,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/base/32768/33000.32767={"checksum":"6e99b589e550e68e934fd235ccba59fe5b592a9e","checksum-page":true,"master":false,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/32768/33001={"checksum":"6bf316f11d28c28914ea9be92c00de9bea6d9a6b","checksum-page":false,"checksum-page-error":[0,[3,5],7],"master":false,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} -pg_data/base/32768/44000_init={"checksum":"4a383e4fb8b5cd2a4e8fab91ef63dce48e532a2f","checksum-page":true,"master":false,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/base/32768/44000_init={"checksum":"7a16d165e4775f7c92e8cdf60c0af57313f0bf90","checksum-page":true,"master":false,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/32768/PG_VERSION={"checksum":"184473f470864e067ee3a22e64b47b0a1c356f29","master":false,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/changecontent.txt={"checksum":"238a131a3e8eb98d1fc5b27d882ca40b7618fd2a","reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/changesize.txt={"checksum":"88087292ed82e26f3eb824d0bffc05ccf7a30f8d","size":[SIZE],"timestamp":[TIMESTAMP-1]} @@ -1113,7 +1113,7 @@ pg_data/global/pg_control={"checksum":"4c77c900f7af0d9ab13fa9982051a42e0b637f6c","reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-2]} pg_data/pg_stat/global.stat={"checksum":"e350d5ce0153f3e22d5db21cf2a4eff00f3ee877","reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-2]} pg_data/postgresql.conf={"checksum":"6721d92c9fcdf4248acff1f9a1377127d9064807","reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-2]} -pg_data/special-@!#$^&*()-_+~`{}[]\|:;"<>',.?%={"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/special-!_.*'()&!@;:+,?={"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/zero_from_start={"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/zerosize.txt={"size":[SIZE],"timestamp":[TIMESTAMP-2]} pg_tblspc/1/[TS_PATH-1]/16384/tablespace1.txt={"checksum":"14c44cef6287269b08d41de489fd492bb9fc795d","checksum-page":false,"master":false,"size":[SIZE],"timestamp":[TIMESTAMP-2]} @@ -1193,9 +1193,9 @@ P00 TEST: PgBaCkReStTeSt-BACKUP-RESUME-PgBaCkReStTeSt P00 DETAIL: clean resumed backup path: [TEST_PATH]/db-master/repo/backup/db/[BACKUP-INCR-2] P01 DETAIL: match file from prior backup [TEST_PATH]/db-master/db/base/base/32768/33001 (64KB, 33%) checksum 6bf316f11d28c28914ea9be92c00de9bea6d9a6b -P01 DETAIL: match file from prior backup [TEST_PATH]/db-master/db/base/base/32768/44000_init (32KB, 49%) checksum 4a383e4fb8b5cd2a4e8fab91ef63dce48e532a2f -P01 DETAIL: match file from prior backup [TEST_PATH]/db-master/db/base/base/32768/33000.32767 (32KB, 66%) checksum 21e2c7c1a326682c07053b7d6a5a40dbd49c2ec5 -P01 DETAIL: match file from prior backup [TEST_PATH]/db-master/db/base/base/32768/33000 (32KB, 83%) checksum 4a383e4fb8b5cd2a4e8fab91ef63dce48e532a2f +P01 DETAIL: match file from prior backup [TEST_PATH]/db-master/db/base/base/32768/44000_init (32KB, 49%) checksum 7a16d165e4775f7c92e8cdf60c0af57313f0bf90 +P01 DETAIL: match file from prior backup [TEST_PATH]/db-master/db/base/base/32768/33000.32767 (32KB, 66%) checksum 6e99b589e550e68e934fd235ccba59fe5b592a9e +P01 DETAIL: match file from prior backup [TEST_PATH]/db-master/db/base/base/32768/33000 (32KB, 83%) checksum 7a16d165e4775f7c92e8cdf60c0af57313f0bf90 P01 DETAIL: match file from prior backup [TEST_PATH]/db-master/db/base/base/16384/17000 (16KB, 91%) checksum e0101dd8ffb910c9c202ca35b5f828bcb9697bed P01 DETAIL: match file from prior backup [TEST_PATH]/db-master/db/base/global/pg_control (8KB, 95%) checksum 4c77c900f7af0d9ab13fa9982051a42e0b637f6c P01 DETAIL: match file from prior backup [TEST_PATH]/db-master/db/base/base/1/12000 (8KB, 99%) checksum 22c98d248ff548311eda88559e4a8405ed77c003 @@ -1299,10 +1299,10 @@ pg_data/base/1/PG_VERSION={"checksum":"184473f470864e067ee3a22e64b47b0a1c356f29","mode":"0660","reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/16384/17000={"checksum":"e0101dd8ffb910c9c202ca35b5f828bcb9697bed","checksum-page":false,"checksum-page-error":[1],"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/16384/PG_VERSION={"checksum":"184473f470864e067ee3a22e64b47b0a1c356f29","reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} -pg_data/base/32768/33000={"checksum":"4a383e4fb8b5cd2a4e8fab91ef63dce48e532a2f","checksum-page":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} -pg_data/base/32768/33000.32767={"checksum":"21e2c7c1a326682c07053b7d6a5a40dbd49c2ec5","checksum-page":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/base/32768/33000={"checksum":"7a16d165e4775f7c92e8cdf60c0af57313f0bf90","checksum-page":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/base/32768/33000.32767={"checksum":"6e99b589e550e68e934fd235ccba59fe5b592a9e","checksum-page":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/32768/33001={"checksum":"6bf316f11d28c28914ea9be92c00de9bea6d9a6b","checksum-page":false,"checksum-page-error":[0,[3,5],7],"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} -pg_data/base/32768/44000_init={"checksum":"4a383e4fb8b5cd2a4e8fab91ef63dce48e532a2f","checksum-page":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/base/32768/44000_init={"checksum":"7a16d165e4775f7c92e8cdf60c0af57313f0bf90","checksum-page":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/32768/PG_VERSION={"checksum":"184473f470864e067ee3a22e64b47b0a1c356f29","reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/changecontent.txt={"checksum":"a094d94583e209556d03c3c5da33131a065f1689","master":true,"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/changesize.txt={"checksum":"3905d5be2ec8d67f41435dab5e0dcda3ae47455d","master":true,"size":[SIZE],"timestamp":[TIMESTAMP-1]} @@ -1310,7 +1310,7 @@ pg_data/global/pg_control={"checksum":"4c77c900f7af0d9ab13fa9982051a42e0b637f6c","master":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-2]} pg_data/pg_stat/global.stat={"checksum":"e350d5ce0153f3e22d5db21cf2a4eff00f3ee877","master":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-2]} pg_data/postgresql.conf={"checksum":"6721d92c9fcdf4248acff1f9a1377127d9064807","master":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-2]} -pg_data/special-@!#$^&*()-_+~`{}[]\|:;"<>',.?%={"master":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/special-!_.*'()&!@;:+,?={"master":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/zero_from_start={"master":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/zerosize.txt={"master":true,"size":[SIZE],"timestamp":[TIMESTAMP-2]} pg_tblspc/1/[TS_PATH-1]/16384/tablespace1.txt={"checksum":"d85de07d6421d90aa9191c11c889bfde43680f0f","checksum-page":false,"size":[SIZE],"timestamp":[TIMESTAMP-1]} @@ -1397,9 +1397,9 @@ P00 TEST: PgBaCkReStTeSt-BACKUP-NORESUME-PgBaCkReStTeSt P00 WARN: diff backup cannot alter 'checksum-page' option to 'false', reset to 'true' from [BACKUP-FULL-2] P01 DETAIL: match file from prior backup [TEST_PATH]/db-master/db/base/base/32768/33001 (64KB, 33%) checksum 6bf316f11d28c28914ea9be92c00de9bea6d9a6b -P01 DETAIL: match file from prior backup [TEST_PATH]/db-master/db/base/base/32768/44000_init (32KB, 49%) checksum 4a383e4fb8b5cd2a4e8fab91ef63dce48e532a2f -P01 DETAIL: match file from prior backup [TEST_PATH]/db-master/db/base/base/32768/33000.32767 (32KB, 66%) checksum 21e2c7c1a326682c07053b7d6a5a40dbd49c2ec5 -P01 DETAIL: match file from prior backup [TEST_PATH]/db-master/db/base/base/32768/33000 (32KB, 83%) checksum 4a383e4fb8b5cd2a4e8fab91ef63dce48e532a2f +P01 DETAIL: match file from prior backup [TEST_PATH]/db-master/db/base/base/32768/44000_init (32KB, 49%) checksum 7a16d165e4775f7c92e8cdf60c0af57313f0bf90 +P01 DETAIL: match file from prior backup [TEST_PATH]/db-master/db/base/base/32768/33000.32767 (32KB, 66%) checksum 6e99b589e550e68e934fd235ccba59fe5b592a9e +P01 DETAIL: match file from prior backup [TEST_PATH]/db-master/db/base/base/32768/33000 (32KB, 83%) checksum 7a16d165e4775f7c92e8cdf60c0af57313f0bf90 P01 DETAIL: match file from prior backup [TEST_PATH]/db-master/db/base/base/16384/17000 (16KB, 91%) checksum e0101dd8ffb910c9c202ca35b5f828bcb9697bed P01 DETAIL: match file from prior backup [TEST_PATH]/db-master/db/base/global/pg_control (8KB, 95%) checksum 4c77c900f7af0d9ab13fa9982051a42e0b637f6c P01 DETAIL: match file from prior backup [TEST_PATH]/db-master/db/base/base/1/12000 (8KB, 99%) checksum 22c98d248ff548311eda88559e4a8405ed77c003 @@ -1499,17 +1499,17 @@ pg_data/base/1/PG_VERSION={"checksum":"184473f470864e067ee3a22e64b47b0a1c356f29","mode":"0660","reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/16384/17000={"checksum":"e0101dd8ffb910c9c202ca35b5f828bcb9697bed","checksum-page":false,"checksum-page-error":[1],"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/16384/PG_VERSION={"checksum":"184473f470864e067ee3a22e64b47b0a1c356f29","reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} -pg_data/base/32768/33000={"checksum":"4a383e4fb8b5cd2a4e8fab91ef63dce48e532a2f","checksum-page":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} -pg_data/base/32768/33000.32767={"checksum":"21e2c7c1a326682c07053b7d6a5a40dbd49c2ec5","checksum-page":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/base/32768/33000={"checksum":"7a16d165e4775f7c92e8cdf60c0af57313f0bf90","checksum-page":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/base/32768/33000.32767={"checksum":"6e99b589e550e68e934fd235ccba59fe5b592a9e","checksum-page":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/32768/33001={"checksum":"6bf316f11d28c28914ea9be92c00de9bea6d9a6b","checksum-page":false,"checksum-page-error":[0,[3,5],7],"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} -pg_data/base/32768/44000_init={"checksum":"4a383e4fb8b5cd2a4e8fab91ef63dce48e532a2f","checksum-page":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/base/32768/44000_init={"checksum":"7a16d165e4775f7c92e8cdf60c0af57313f0bf90","checksum-page":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/32768/PG_VERSION={"checksum":"184473f470864e067ee3a22e64b47b0a1c356f29","reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/changecontent.txt={"checksum":"a094d94583e209556d03c3c5da33131a065f1689","master":true,"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/changetime.txt={"checksum":"88087292ed82e26f3eb824d0bffc05ccf7a30f8d","master":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-2]} pg_data/global/pg_control={"checksum":"4c77c900f7af0d9ab13fa9982051a42e0b637f6c","master":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-2]} pg_data/pg_stat/global.stat={"checksum":"e350d5ce0153f3e22d5db21cf2a4eff00f3ee877","master":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-2]} pg_data/postgresql.conf={"checksum":"6721d92c9fcdf4248acff1f9a1377127d9064807","master":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-2]} -pg_data/special-@!#$^&*()-_+~`{}[]\|:;"<>',.?%={"master":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/special-!_.*'()&!@;:+,?={"master":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/zero_from_start={"master":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/zerosize.txt={"master":true,"size":[SIZE],"timestamp":[TIMESTAMP-2]} pg_tblspc/1/[TS_PATH-1]/16384/tablespace1.txt={"checksum":"d85de07d6421d90aa9191c11c889bfde43680f0f","checksum-page":false,"size":[SIZE],"timestamp":[TIMESTAMP-1]} @@ -1593,9 +1593,9 @@ P00 TEST: PgBaCkReStTeSt-BACKUP-NORESUME-PgBaCkReStTeSt P00 WARN: diff backup cannot alter 'checksum-page' option to 'false', reset to 'true' from [BACKUP-FULL-2] P01 DETAIL: match file from prior backup [TEST_PATH]/db-master/db/base/base/32768/33001 (64KB, 33%) checksum 6bf316f11d28c28914ea9be92c00de9bea6d9a6b -P01 DETAIL: match file from prior backup [TEST_PATH]/db-master/db/base/base/32768/44000_init (32KB, 49%) checksum 4a383e4fb8b5cd2a4e8fab91ef63dce48e532a2f -P01 DETAIL: match file from prior backup [TEST_PATH]/db-master/db/base/base/32768/33000.32767 (32KB, 66%) checksum 21e2c7c1a326682c07053b7d6a5a40dbd49c2ec5 -P01 DETAIL: match file from prior backup [TEST_PATH]/db-master/db/base/base/32768/33000 (32KB, 83%) checksum 4a383e4fb8b5cd2a4e8fab91ef63dce48e532a2f +P01 DETAIL: match file from prior backup [TEST_PATH]/db-master/db/base/base/32768/44000_init (32KB, 49%) checksum 7a16d165e4775f7c92e8cdf60c0af57313f0bf90 +P01 DETAIL: match file from prior backup [TEST_PATH]/db-master/db/base/base/32768/33000.32767 (32KB, 66%) checksum 6e99b589e550e68e934fd235ccba59fe5b592a9e +P01 DETAIL: match file from prior backup [TEST_PATH]/db-master/db/base/base/32768/33000 (32KB, 83%) checksum 7a16d165e4775f7c92e8cdf60c0af57313f0bf90 P01 DETAIL: match file from prior backup [TEST_PATH]/db-master/db/base/base/16384/17000 (16KB, 91%) checksum e0101dd8ffb910c9c202ca35b5f828bcb9697bed P01 DETAIL: match file from prior backup [TEST_PATH]/db-master/db/base/global/pg_control (8KB, 95%) checksum 4c77c900f7af0d9ab13fa9982051a42e0b637f6c P01 DETAIL: match file from prior backup [TEST_PATH]/db-master/db/base/base/1/12000 (8KB, 99%) checksum 22c98d248ff548311eda88559e4a8405ed77c003 @@ -1695,17 +1695,17 @@ pg_data/base/1/PG_VERSION={"checksum":"184473f470864e067ee3a22e64b47b0a1c356f29","mode":"0660","reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/16384/17000={"checksum":"e0101dd8ffb910c9c202ca35b5f828bcb9697bed","checksum-page":false,"checksum-page-error":[1],"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/16384/PG_VERSION={"checksum":"184473f470864e067ee3a22e64b47b0a1c356f29","reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} -pg_data/base/32768/33000={"checksum":"4a383e4fb8b5cd2a4e8fab91ef63dce48e532a2f","checksum-page":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} -pg_data/base/32768/33000.32767={"checksum":"21e2c7c1a326682c07053b7d6a5a40dbd49c2ec5","checksum-page":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/base/32768/33000={"checksum":"7a16d165e4775f7c92e8cdf60c0af57313f0bf90","checksum-page":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/base/32768/33000.32767={"checksum":"6e99b589e550e68e934fd235ccba59fe5b592a9e","checksum-page":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/32768/33001={"checksum":"6bf316f11d28c28914ea9be92c00de9bea6d9a6b","checksum-page":false,"checksum-page-error":[0,[3,5],7],"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} -pg_data/base/32768/44000_init={"checksum":"4a383e4fb8b5cd2a4e8fab91ef63dce48e532a2f","checksum-page":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/base/32768/44000_init={"checksum":"7a16d165e4775f7c92e8cdf60c0af57313f0bf90","checksum-page":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/32768/PG_VERSION={"checksum":"184473f470864e067ee3a22e64b47b0a1c356f29","reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/changecontent.txt={"checksum":"a094d94583e209556d03c3c5da33131a065f1689","master":true,"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/changetime.txt={"checksum":"88087292ed82e26f3eb824d0bffc05ccf7a30f8d","master":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-2]} pg_data/global/pg_control={"checksum":"4c77c900f7af0d9ab13fa9982051a42e0b637f6c","master":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-2]} pg_data/pg_stat/global.stat={"checksum":"e350d5ce0153f3e22d5db21cf2a4eff00f3ee877","master":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-2]} pg_data/postgresql.conf={"checksum":"6721d92c9fcdf4248acff1f9a1377127d9064807","master":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-2]} -pg_data/special-@!#$^&*()-_+~`{}[]\|:;"<>',.?%={"master":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/special-!_.*'()&!@;:+,?={"master":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/zero_from_start={"master":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/zerosize.txt={"master":true,"size":[SIZE],"timestamp":[TIMESTAMP-2]} pg_tblspc/1/[TS_PATH-1]/16384/tablespace1.txt={"checksum":"d85de07d6421d90aa9191c11c889bfde43680f0f","checksum-page":false,"size":[SIZE],"timestamp":[TIMESTAMP-1]} @@ -1798,9 +1798,9 @@ P00 DETAIL: check [TEST_PATH]/db-master/db/tablespace/ts1-2 exists P00 DETAIL: check [TEST_PATH]/db-master/db/tablespace/ts2-2 exists P01 INFO: restore file [TEST_PATH]/db-master/db/base-2/base/32768/33001 (64KB, 33%) checksum 6bf316f11d28c28914ea9be92c00de9bea6d9a6b -P01 INFO: restore file [TEST_PATH]/db-master/db/base-2/base/32768/44000_init (32KB, 49%) checksum 4a383e4fb8b5cd2a4e8fab91ef63dce48e532a2f -P01 INFO: restore file [TEST_PATH]/db-master/db/base-2/base/32768/33000.32767 (32KB, 66%) checksum 21e2c7c1a326682c07053b7d6a5a40dbd49c2ec5 -P01 INFO: restore file [TEST_PATH]/db-master/db/base-2/base/32768/33000 (32KB, 83%) checksum 4a383e4fb8b5cd2a4e8fab91ef63dce48e532a2f +P01 INFO: restore file [TEST_PATH]/db-master/db/base-2/base/32768/44000_init (32KB, 49%) checksum 7a16d165e4775f7c92e8cdf60c0af57313f0bf90 +P01 INFO: restore file [TEST_PATH]/db-master/db/base-2/base/32768/33000.32767 (32KB, 66%) checksum 6e99b589e550e68e934fd235ccba59fe5b592a9e +P01 INFO: restore file [TEST_PATH]/db-master/db/base-2/base/32768/33000 (32KB, 83%) checksum 7a16d165e4775f7c92e8cdf60c0af57313f0bf90 P01 INFO: restore file [TEST_PATH]/db-master/db/base-2/base/16384/17000 (16KB, 91%) checksum e0101dd8ffb910c9c202ca35b5f828bcb9697bed P01 INFO: restore file [TEST_PATH]/db-master/db/base-2/global/pg_control.pgbackrest.tmp (8KB, 95%) checksum 4c77c900f7af0d9ab13fa9982051a42e0b637f6c P01 INFO: restore file [TEST_PATH]/db-master/db/base-2/base/1/12000 (8KB, 99%) checksum 22c98d248ff548311eda88559e4a8405ed77c003 @@ -1815,7 +1815,7 @@ P01 INFO: restore file [TEST_PATH]/db-master/db/base-2/PG_VERSION (3B, 99%) checksum 184473f470864e067ee3a22e64b47b0a1c356f29 P01 INFO: restore file [TEST_PATH]/db-master/db/base-2/zerosize.txt (0B, 99%) P01 INFO: restore file [TEST_PATH]/db-master/db/base-2/zero_from_start (0B, 99%) -P01 INFO: restore file [TEST_PATH]/db-master/db/base-2/special-@!#$^&*()-_+~`{}[]\|:;"<>',.?% (0B, 99%) +P01 INFO: restore file [TEST_PATH]/db-master/db/base-2/special-!_.*'()&!@;:+,? (0B, 99%) P01 INFO: restore file [TEST_PATH]/db-master/db/base-2/pg_tblspc/2/[TS_PATH-1]/32768/tablespace2.txt (7B, 99%) checksum dc7f76e43c46101b47acc55ae4d593a9e6983578 P01 INFO: restore file [TEST_PATH]/db-master/db/base-2/pg_tblspc/1/[TS_PATH-1]/16384/tablespace1.txt (7B, 100%) checksum d85de07d6421d90aa9191c11c889bfde43680f0f P00 INFO: write [TEST_PATH]/db-master/db/base-2/recovery.conf @@ -1842,9 +1842,9 @@ P00 INFO: remove invalid files/paths/links from [TEST_PATH]/db-master/db/base-2 P00 DETAIL: preserve file [TEST_PATH]/db-master/db/base-2/recovery.conf P01 DETAIL: restore file [TEST_PATH]/db-master/db/base-2/base/32768/33001 - exists and matches backup (64KB, 33%) checksum 6bf316f11d28c28914ea9be92c00de9bea6d9a6b -P01 DETAIL: restore file [TEST_PATH]/db-master/db/base-2/base/32768/44000_init - exists and matches backup (32KB, 49%) checksum 4a383e4fb8b5cd2a4e8fab91ef63dce48e532a2f -P01 DETAIL: restore file [TEST_PATH]/db-master/db/base-2/base/32768/33000.32767 - exists and matches backup (32KB, 66%) checksum 21e2c7c1a326682c07053b7d6a5a40dbd49c2ec5 -P01 DETAIL: restore file [TEST_PATH]/db-master/db/base-2/base/32768/33000 - exists and matches backup (32KB, 83%) checksum 4a383e4fb8b5cd2a4e8fab91ef63dce48e532a2f +P01 DETAIL: restore file [TEST_PATH]/db-master/db/base-2/base/32768/44000_init - exists and matches backup (32KB, 49%) checksum 7a16d165e4775f7c92e8cdf60c0af57313f0bf90 +P01 DETAIL: restore file [TEST_PATH]/db-master/db/base-2/base/32768/33000.32767 - exists and matches backup (32KB, 66%) checksum 6e99b589e550e68e934fd235ccba59fe5b592a9e +P01 DETAIL: restore file [TEST_PATH]/db-master/db/base-2/base/32768/33000 - exists and matches backup (32KB, 83%) checksum 7a16d165e4775f7c92e8cdf60c0af57313f0bf90 P01 DETAIL: restore file [TEST_PATH]/db-master/db/base-2/base/16384/17000 - exists and matches backup (16KB, 91%) checksum e0101dd8ffb910c9c202ca35b5f828bcb9697bed P01 INFO: restore file [TEST_PATH]/db-master/db/base-2/global/pg_control.pgbackrest.tmp (8KB, 95%) checksum 4c77c900f7af0d9ab13fa9982051a42e0b637f6c P01 DETAIL: restore file [TEST_PATH]/db-master/db/base-2/base/1/12000 - exists and matches backup (8KB, 99%) checksum 22c98d248ff548311eda88559e4a8405ed77c003 @@ -1859,7 +1859,7 @@ P01 DETAIL: restore file [TEST_PATH]/db-master/db/base-2/PG_VERSION - exists and matches backup (3B, 99%) checksum 184473f470864e067ee3a22e64b47b0a1c356f29 P01 DETAIL: restore file [TEST_PATH]/db-master/db/base-2/zerosize.txt - exists and is zero size (0B, 99%) P01 DETAIL: restore file [TEST_PATH]/db-master/db/base-2/zero_from_start - exists and is zero size (0B, 99%) -P01 DETAIL: restore file [TEST_PATH]/db-master/db/base-2/special-@!#$^&*()-_+~`{}[]\|:;"<>',.?% - exists and is zero size (0B, 99%) +P01 DETAIL: restore file [TEST_PATH]/db-master/db/base-2/special-!_.*'()&!@;:+,? - exists and is zero size (0B, 99%) P01 DETAIL: restore file [TEST_PATH]/db-master/db/base-2/pg_tblspc/2/[TS_PATH-1]/32768/tablespace2.txt - exists and matches backup (7B, 99%) checksum dc7f76e43c46101b47acc55ae4d593a9e6983578 P01 DETAIL: restore file [TEST_PATH]/db-master/db/base-2/pg_tblspc/1/[TS_PATH-1]/16384/tablespace1.txt - exists and matches backup (7B, 100%) checksum d85de07d6421d90aa9191c11c889bfde43680f0f P00 INFO: write [TEST_PATH]/db-master/db/base-2/recovery.conf @@ -1967,10 +1967,10 @@ pg_data/base/1/PG_VERSION={"checksum":"184473f470864e067ee3a22e64b47b0a1c356f29","mode":"0660","reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/16384/17000={"checksum":"e0101dd8ffb910c9c202ca35b5f828bcb9697bed","checksum-page":false,"checksum-page-error":[1],"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/16384/PG_VERSION={"checksum":"184473f470864e067ee3a22e64b47b0a1c356f29","reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} -pg_data/base/32768/33000={"checksum":"4a383e4fb8b5cd2a4e8fab91ef63dce48e532a2f","checksum-page":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} -pg_data/base/32768/33000.32767={"checksum":"21e2c7c1a326682c07053b7d6a5a40dbd49c2ec5","checksum-page":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/base/32768/33000={"checksum":"7a16d165e4775f7c92e8cdf60c0af57313f0bf90","checksum-page":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/base/32768/33000.32767={"checksum":"6e99b589e550e68e934fd235ccba59fe5b592a9e","checksum-page":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/32768/33001={"checksum":"6bf316f11d28c28914ea9be92c00de9bea6d9a6b","checksum-page":false,"checksum-page-error":[0,[3,5],7],"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} -pg_data/base/32768/44000_init={"checksum":"4a383e4fb8b5cd2a4e8fab91ef63dce48e532a2f","checksum-page":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/base/32768/44000_init={"checksum":"7a16d165e4775f7c92e8cdf60c0af57313f0bf90","checksum-page":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/32768/PG_VERSION={"checksum":"184473f470864e067ee3a22e64b47b0a1c356f29","reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/base2.txt={"checksum":"09b5e31766be1dba1ec27de82f975c1b6eea2a92","checksum-page":false,"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/changecontent.txt={"checksum":"a094d94583e209556d03c3c5da33131a065f1689","master":true,"reference":"[BACKUP-DIFF-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} @@ -1978,7 +1978,7 @@ pg_data/global/pg_control={"checksum":"4c77c900f7af0d9ab13fa9982051a42e0b637f6c","master":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-2]} pg_data/pg_stat/global.stat={"checksum":"e350d5ce0153f3e22d5db21cf2a4eff00f3ee877","master":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-2]} pg_data/postgresql.conf={"checksum":"6721d92c9fcdf4248acff1f9a1377127d9064807","master":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-2]} -pg_data/special-@!#$^&*()-_+~`{}[]\|:;"<>',.?%={"master":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/special-!_.*'()&!@;:+,?={"master":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/zero_from_start={"master":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/zerosize.txt={"master":true,"reference":"[BACKUP-DIFF-2]","size":[SIZE],"timestamp":[TIMESTAMP-2]} pg_tblspc/2/[TS_PATH-1]/32768/tablespace2.txt={"checksum":"dc7f76e43c46101b47acc55ae4d593a9e6983578","checksum-page":false,"reference":"[BACKUP-DIFF-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} @@ -2134,9 +2134,9 @@ P00 WARN: incr backup cannot alter 'checksum-page' option to 'false', reset to 'true' from [BACKUP-INCR-3] P00 WARN: file pg_data/base/16384/17000 timestamp in the past or size changed but timestamp did not, enabling delta checksum P01 DETAIL: match file from prior backup [TEST_PATH]/db-master/db/base-2/base/32768/33001 (64KB, 36%) checksum 6bf316f11d28c28914ea9be92c00de9bea6d9a6b -P01 DETAIL: match file from prior backup [TEST_PATH]/db-master/db/base-2/base/32768/44000_init (32KB, 54%) checksum 4a383e4fb8b5cd2a4e8fab91ef63dce48e532a2f -P01 DETAIL: match file from prior backup [TEST_PATH]/db-master/db/base-2/base/32768/33000.32767 (32KB, 72%) checksum 21e2c7c1a326682c07053b7d6a5a40dbd49c2ec5 -P01 DETAIL: match file from prior backup [TEST_PATH]/db-master/db/base-2/base/32768/33000 (32KB, 90%) checksum 4a383e4fb8b5cd2a4e8fab91ef63dce48e532a2f +P01 DETAIL: match file from prior backup [TEST_PATH]/db-master/db/base-2/base/32768/44000_init (32KB, 54%) checksum 7a16d165e4775f7c92e8cdf60c0af57313f0bf90 +P01 DETAIL: match file from prior backup [TEST_PATH]/db-master/db/base-2/base/32768/33000.32767 (32KB, 72%) checksum 6e99b589e550e68e934fd235ccba59fe5b592a9e +P01 DETAIL: match file from prior backup [TEST_PATH]/db-master/db/base-2/base/32768/33000 (32KB, 90%) checksum 7a16d165e4775f7c92e8cdf60c0af57313f0bf90 P01 DETAIL: match file from prior backup [TEST_PATH]/db-master/db/base-2/global/pg_control (8KB, 95%) checksum 4c77c900f7af0d9ab13fa9982051a42e0b637f6c P01 DETAIL: match file from prior backup [TEST_PATH]/db-master/db/base-2/base/1/12000 (8KB, 99%) checksum 22c98d248ff548311eda88559e4a8405ed77c003 P01 DETAIL: match file from prior backup [TEST_PATH]/db-master/db/base-2/postgresql.conf (21B, 99%) checksum 6721d92c9fcdf4248acff1f9a1377127d9064807 @@ -2238,10 +2238,10 @@ pg_data/base/1/PG_VERSION={"checksum":"184473f470864e067ee3a22e64b47b0a1c356f29","mode":"0660","reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/16384/17000={"checksum":"9a53d532e27785e681766c98516a5e93f096a501","checksum-page":false,"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/16384/PG_VERSION={"checksum":"184473f470864e067ee3a22e64b47b0a1c356f29","reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} -pg_data/base/32768/33000={"checksum":"4a383e4fb8b5cd2a4e8fab91ef63dce48e532a2f","checksum-page":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} -pg_data/base/32768/33000.32767={"checksum":"21e2c7c1a326682c07053b7d6a5a40dbd49c2ec5","checksum-page":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/base/32768/33000={"checksum":"7a16d165e4775f7c92e8cdf60c0af57313f0bf90","checksum-page":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/base/32768/33000.32767={"checksum":"6e99b589e550e68e934fd235ccba59fe5b592a9e","checksum-page":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/32768/33001={"checksum":"6bf316f11d28c28914ea9be92c00de9bea6d9a6b","checksum-page":false,"checksum-page-error":[0,[3,5],7],"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} -pg_data/base/32768/44000_init={"checksum":"4a383e4fb8b5cd2a4e8fab91ef63dce48e532a2f","checksum-page":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/base/32768/44000_init={"checksum":"7a16d165e4775f7c92e8cdf60c0af57313f0bf90","checksum-page":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/32768/PG_VERSION={"checksum":"184473f470864e067ee3a22e64b47b0a1c356f29","reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/base2.txt={"checksum":"09b5e31766be1dba1ec27de82f975c1b6eea2a92","checksum-page":false,"reference":"[BACKUP-INCR-3]","size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/changecontent.txt={"checksum":"a094d94583e209556d03c3c5da33131a065f1689","master":true,"reference":"[BACKUP-DIFF-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} @@ -2249,7 +2249,7 @@ pg_data/global/pg_control={"checksum":"4c77c900f7af0d9ab13fa9982051a42e0b637f6c","master":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-2]} pg_data/pg_stat/global.stat={"checksum":"e350d5ce0153f3e22d5db21cf2a4eff00f3ee877","master":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-2]} pg_data/postgresql.conf={"checksum":"6721d92c9fcdf4248acff1f9a1377127d9064807","master":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-2]} -pg_data/special-@!#$^&*()-_+~`{}[]\|:;"<>',.?%={"master":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/special-!_.*'()&!@;:+,?={"master":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/zero_from_start={"master":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/zerosize.txt={"master":true,"reference":"[BACKUP-DIFF-2]","size":[SIZE],"timestamp":[TIMESTAMP-2]} pg_tblspc/2/[TS_PATH-1]/32768/tablespace2.txt={"checksum":"dc7f76e43c46101b47acc55ae4d593a9e6983578","checksum-page":false,"reference":"[BACKUP-DIFF-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} @@ -2328,9 +2328,9 @@ P00 INFO: last backup label = [BACKUP-FULL-2], version = [VERSION-1] P00 WARN: diff backup cannot alter 'checksum-page' option to 'false', reset to 'true' from [BACKUP-FULL-2] P01 DETAIL: match file from prior backup [TEST_PATH]/db-master/db/base-2/base/32768/33001 (64KB, 36%) checksum 6bf316f11d28c28914ea9be92c00de9bea6d9a6b -P01 DETAIL: match file from prior backup [TEST_PATH]/db-master/db/base-2/base/32768/44000_init (32KB, 54%) checksum 4a383e4fb8b5cd2a4e8fab91ef63dce48e532a2f -P01 DETAIL: match file from prior backup [TEST_PATH]/db-master/db/base-2/base/32768/33000.32767 (32KB, 72%) checksum 21e2c7c1a326682c07053b7d6a5a40dbd49c2ec5 -P01 DETAIL: match file from prior backup [TEST_PATH]/db-master/db/base-2/base/32768/33000 (32KB, 90%) checksum 4a383e4fb8b5cd2a4e8fab91ef63dce48e532a2f +P01 DETAIL: match file from prior backup [TEST_PATH]/db-master/db/base-2/base/32768/44000_init (32KB, 54%) checksum 7a16d165e4775f7c92e8cdf60c0af57313f0bf90 +P01 DETAIL: match file from prior backup [TEST_PATH]/db-master/db/base-2/base/32768/33000.32767 (32KB, 72%) checksum 6e99b589e550e68e934fd235ccba59fe5b592a9e +P01 DETAIL: match file from prior backup [TEST_PATH]/db-master/db/base-2/base/32768/33000 (32KB, 90%) checksum 7a16d165e4775f7c92e8cdf60c0af57313f0bf90 P01 DETAIL: match file from prior backup [TEST_PATH]/db-master/db/base-2/global/pg_control (8KB, 95%) checksum 4c77c900f7af0d9ab13fa9982051a42e0b637f6c P01 DETAIL: match file from prior backup [TEST_PATH]/db-master/db/base-2/base/1/12000 (8KB, 99%) checksum 22c98d248ff548311eda88559e4a8405ed77c003 P01 DETAIL: match file from prior backup [TEST_PATH]/db-master/db/base-2/postgresql.conf (21B, 99%) checksum 6721d92c9fcdf4248acff1f9a1377127d9064807 @@ -2436,10 +2436,10 @@ pg_data/base/1/PG_VERSION={"checksum":"184473f470864e067ee3a22e64b47b0a1c356f29","mode":"0660","reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/16384/17000={"checksum":"9a53d532e27785e681766c98516a5e93f096a501","checksum-page":false,"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/16384/PG_VERSION={"checksum":"184473f470864e067ee3a22e64b47b0a1c356f29","reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} -pg_data/base/32768/33000={"checksum":"4a383e4fb8b5cd2a4e8fab91ef63dce48e532a2f","checksum-page":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} -pg_data/base/32768/33000.32767={"checksum":"21e2c7c1a326682c07053b7d6a5a40dbd49c2ec5","checksum-page":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/base/32768/33000={"checksum":"7a16d165e4775f7c92e8cdf60c0af57313f0bf90","checksum-page":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/base/32768/33000.32767={"checksum":"6e99b589e550e68e934fd235ccba59fe5b592a9e","checksum-page":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/32768/33001={"checksum":"6bf316f11d28c28914ea9be92c00de9bea6d9a6b","checksum-page":false,"checksum-page-error":[0,[3,5],7],"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} -pg_data/base/32768/44000_init={"checksum":"4a383e4fb8b5cd2a4e8fab91ef63dce48e532a2f","checksum-page":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/base/32768/44000_init={"checksum":"7a16d165e4775f7c92e8cdf60c0af57313f0bf90","checksum-page":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/32768/PG_VERSION={"checksum":"184473f470864e067ee3a22e64b47b0a1c356f29","reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/base2.txt={"checksum":"09b5e31766be1dba1ec27de82f975c1b6eea2a92","checksum-page":false,"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/changecontent.txt={"checksum":"a094d94583e209556d03c3c5da33131a065f1689","master":true,"size":[SIZE],"timestamp":[TIMESTAMP-1]} @@ -2447,7 +2447,7 @@ pg_data/global/pg_control={"checksum":"4c77c900f7af0d9ab13fa9982051a42e0b637f6c","master":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-2]} pg_data/pg_stat/global.stat={"checksum":"e350d5ce0153f3e22d5db21cf2a4eff00f3ee877","master":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-2]} pg_data/postgresql.conf={"checksum":"6721d92c9fcdf4248acff1f9a1377127d9064807","master":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-2]} -pg_data/special-@!#$^&*()-_+~`{}[]\|:;"<>',.?%={"master":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/special-!_.*'()&!@;:+,?={"master":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/zero_from_start={"master":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/zerosize.txt={"master":true,"size":[SIZE],"timestamp":[TIMESTAMP-2]} pg_tblspc/2/[TS_PATH-1]/32768/tablespace2.txt={"checksum":"dc7f76e43c46101b47acc55ae4d593a9e6983578","checksum-page":false,"size":[SIZE],"timestamp":[TIMESTAMP-1]} @@ -2613,10 +2613,10 @@ pg_data/base/1/PG_VERSION={"checksum":"184473f470864e067ee3a22e64b47b0a1c356f29","mode":"0660","reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/16384/17000={"checksum":"9a53d532e27785e681766c98516a5e93f096a501","checksum-page":false,"reference":"[BACKUP-DIFF-3]","size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/16384/PG_VERSION={"checksum":"184473f470864e067ee3a22e64b47b0a1c356f29","reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} -pg_data/base/32768/33000={"checksum":"4a383e4fb8b5cd2a4e8fab91ef63dce48e532a2f","checksum-page":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} -pg_data/base/32768/33000.32767={"checksum":"21e2c7c1a326682c07053b7d6a5a40dbd49c2ec5","checksum-page":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/base/32768/33000={"checksum":"7a16d165e4775f7c92e8cdf60c0af57313f0bf90","checksum-page":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/base/32768/33000.32767={"checksum":"6e99b589e550e68e934fd235ccba59fe5b592a9e","checksum-page":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/32768/33001={"checksum":"6bf316f11d28c28914ea9be92c00de9bea6d9a6b","checksum-page":false,"checksum-page-error":[0,[3,5],7],"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} -pg_data/base/32768/44000_init={"checksum":"4a383e4fb8b5cd2a4e8fab91ef63dce48e532a2f","checksum-page":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/base/32768/44000_init={"checksum":"7a16d165e4775f7c92e8cdf60c0af57313f0bf90","checksum-page":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/32768/PG_VERSION={"checksum":"184473f470864e067ee3a22e64b47b0a1c356f29","reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/base2.txt={"checksum":"09b5e31766be1dba1ec27de82f975c1b6eea2a92","checksum-page":false,"reference":"[BACKUP-DIFF-3]","size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/changecontent.txt={"checksum":"a094d94583e209556d03c3c5da33131a065f1689","master":true,"reference":"[BACKUP-DIFF-3]","size":[SIZE],"timestamp":[TIMESTAMP-1]} @@ -2624,7 +2624,7 @@ pg_data/global/pg_control={"checksum":"4c77c900f7af0d9ab13fa9982051a42e0b637f6c","master":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-2]} pg_data/pg_stat/global.stat={"checksum":"e350d5ce0153f3e22d5db21cf2a4eff00f3ee877","master":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-2]} pg_data/postgresql.conf={"checksum":"6721d92c9fcdf4248acff1f9a1377127d9064807","master":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-2]} -pg_data/special-@!#$^&*()-_+~`{}[]\|:;"<>',.?%={"master":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/special-!_.*'()&!@;:+,?={"master":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/zero_from_start={"master":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/zerosize.txt={"master":true,"reference":"[BACKUP-DIFF-3]","size":[SIZE],"timestamp":[TIMESTAMP-2]} pg_tblspc/2/[TS_PATH-1]/32768/tablespace2.txt={"checksum":"dc7f76e43c46101b47acc55ae4d593a9e6983578","checksum-page":false,"reference":"[BACKUP-DIFF-3]","size":[SIZE],"timestamp":[TIMESTAMP-1]} @@ -2708,9 +2708,9 @@ P00 WARN: diff backup cannot alter 'checksum-page' option to 'false', reset to 'true' from [BACKUP-FULL-2] P00 TEST: PgBaCkReStTeSt-MANIFEST-BUILD-PgBaCkReStTeSt P01 DETAIL: match file from prior backup [TEST_PATH]/db-master/db/base-2/base/32768/33001 (64KB, 36%) checksum 6bf316f11d28c28914ea9be92c00de9bea6d9a6b -P01 DETAIL: match file from prior backup [TEST_PATH]/db-master/db/base-2/base/32768/44000_init (32KB, 54%) checksum 4a383e4fb8b5cd2a4e8fab91ef63dce48e532a2f -P01 DETAIL: match file from prior backup [TEST_PATH]/db-master/db/base-2/base/32768/33000.32767 (32KB, 72%) checksum 21e2c7c1a326682c07053b7d6a5a40dbd49c2ec5 -P01 DETAIL: match file from prior backup [TEST_PATH]/db-master/db/base-2/base/32768/33000 (32KB, 90%) checksum 4a383e4fb8b5cd2a4e8fab91ef63dce48e532a2f +P01 DETAIL: match file from prior backup [TEST_PATH]/db-master/db/base-2/base/32768/44000_init (32KB, 54%) checksum 7a16d165e4775f7c92e8cdf60c0af57313f0bf90 +P01 DETAIL: match file from prior backup [TEST_PATH]/db-master/db/base-2/base/32768/33000.32767 (32KB, 72%) checksum 6e99b589e550e68e934fd235ccba59fe5b592a9e +P01 DETAIL: match file from prior backup [TEST_PATH]/db-master/db/base-2/base/32768/33000 (32KB, 90%) checksum 7a16d165e4775f7c92e8cdf60c0af57313f0bf90 P01 DETAIL: match file from prior backup [TEST_PATH]/db-master/db/base-2/global/pg_control (8KB, 95%) checksum 4c77c900f7af0d9ab13fa9982051a42e0b637f6c P01 DETAIL: match file from prior backup [TEST_PATH]/db-master/db/base-2/base/1/12000 (8KB, 99%) checksum 22c98d248ff548311eda88559e4a8405ed77c003 P01 DETAIL: match file from prior backup [TEST_PATH]/db-master/db/base-2/postgresql.conf (21B, 99%) checksum 6721d92c9fcdf4248acff1f9a1377127d9064807 @@ -2813,17 +2813,17 @@ pg_data/base/1/12000={"checksum":"22c98d248ff548311eda88559e4a8405ed77c003","checksum-page":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/1/PG_VERSION={"checksum":"184473f470864e067ee3a22e64b47b0a1c356f29","mode":"0660","reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/16384/PG_VERSION={"checksum":"184473f470864e067ee3a22e64b47b0a1c356f29","reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} -pg_data/base/32768/33000={"checksum":"4a383e4fb8b5cd2a4e8fab91ef63dce48e532a2f","checksum-page":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} -pg_data/base/32768/33000.32767={"checksum":"21e2c7c1a326682c07053b7d6a5a40dbd49c2ec5","checksum-page":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/base/32768/33000={"checksum":"7a16d165e4775f7c92e8cdf60c0af57313f0bf90","checksum-page":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/base/32768/33000.32767={"checksum":"6e99b589e550e68e934fd235ccba59fe5b592a9e","checksum-page":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/32768/33001={"checksum":"6bf316f11d28c28914ea9be92c00de9bea6d9a6b","checksum-page":false,"checksum-page-error":[0,[3,5],7],"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} -pg_data/base/32768/44000_init={"checksum":"4a383e4fb8b5cd2a4e8fab91ef63dce48e532a2f","checksum-page":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/base/32768/44000_init={"checksum":"7a16d165e4775f7c92e8cdf60c0af57313f0bf90","checksum-page":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/32768/PG_VERSION={"checksum":"184473f470864e067ee3a22e64b47b0a1c356f29","reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/changecontent.txt={"checksum":"a094d94583e209556d03c3c5da33131a065f1689","master":true,"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/changetime.txt={"checksum":"88087292ed82e26f3eb824d0bffc05ccf7a30f8d","master":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-2]} pg_data/global/pg_control={"checksum":"4c77c900f7af0d9ab13fa9982051a42e0b637f6c","master":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-2]} pg_data/pg_stat/global.stat={"checksum":"e350d5ce0153f3e22d5db21cf2a4eff00f3ee877","master":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-2]} pg_data/postgresql.conf={"checksum":"6721d92c9fcdf4248acff1f9a1377127d9064807","master":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-2]} -pg_data/special-@!#$^&*()-_+~`{}[]\|:;"<>',.?%={"master":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/special-!_.*'()&!@;:+,?={"master":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/zero_from_start={"master":true,"reference":"[BACKUP-FULL-2]","size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/zerosize.txt={"master":true,"size":[SIZE],"timestamp":[TIMESTAMP-2]} pg_tblspc/2/[TS_PATH-1]/32768/tablespace2.txt={"checksum":"dc7f76e43c46101b47acc55ae4d593a9e6983578","checksum-page":false,"size":[SIZE],"timestamp":[TIMESTAMP-1]} @@ -2903,9 +2903,9 @@ P00 WARN: option repo1-retention-full is not set, the repository may run out of space HINT: to retain full backups indefinitely (without warning), set option 'repo1-retention-full' to the maximum. P01 INFO: backup file [TEST_PATH]/db-master/db/base-2/base/32768/33001 (64KB, 36%) checksum 6bf316f11d28c28914ea9be92c00de9bea6d9a6b -P01 INFO: backup file [TEST_PATH]/db-master/db/base-2/base/32768/44000_init (32KB, 54%) checksum 4a383e4fb8b5cd2a4e8fab91ef63dce48e532a2f -P01 INFO: backup file [TEST_PATH]/db-master/db/base-2/base/32768/33000.32767 (32KB, 72%) checksum 21e2c7c1a326682c07053b7d6a5a40dbd49c2ec5 -P01 INFO: backup file [TEST_PATH]/db-master/db/base-2/base/32768/33000 (32KB, 90%) checksum 4a383e4fb8b5cd2a4e8fab91ef63dce48e532a2f +P01 INFO: backup file [TEST_PATH]/db-master/db/base-2/base/32768/44000_init (32KB, 54%) checksum 7a16d165e4775f7c92e8cdf60c0af57313f0bf90 +P01 INFO: backup file [TEST_PATH]/db-master/db/base-2/base/32768/33000.32767 (32KB, 72%) checksum 6e99b589e550e68e934fd235ccba59fe5b592a9e +P01 INFO: backup file [TEST_PATH]/db-master/db/base-2/base/32768/33000 (32KB, 90%) checksum 7a16d165e4775f7c92e8cdf60c0af57313f0bf90 P01 INFO: backup file [TEST_PATH]/db-master/db/base-2/global/pg_control (8KB, 95%) checksum 4c77c900f7af0d9ab13fa9982051a42e0b637f6c P01 INFO: backup file [TEST_PATH]/db-master/db/base-2/base/1/12000 (8KB, 99%) checksum 22c98d248ff548311eda88559e4a8405ed77c003 P01 INFO: backup file [TEST_PATH]/db-master/db/base-2/postgresql.conf (21B, 99%) checksum 6721d92c9fcdf4248acff1f9a1377127d9064807 @@ -2920,7 +2920,7 @@ P01 INFO: backup file [TEST_PATH]/db-master/db/base-2/PG_VERSION (3B, 99%) checksum 184473f470864e067ee3a22e64b47b0a1c356f29 P01 INFO: backup file [TEST_PATH]/db-master/db/base-2/zerosize.txt (0B, 99%) P01 INFO: backup file [TEST_PATH]/db-master/db/base-2/zero_from_start (0B, 99%) -P01 INFO: backup file [TEST_PATH]/db-master/db/base-2/special-@!#$^&*()-_+~`{}[]\|:;"<>',.?% (0B, 99%) +P01 INFO: backup file [TEST_PATH]/db-master/db/base-2/special-!_.*'()&!@;:+,? (0B, 99%) P01 INFO: backup file [TEST_PATH]/db-master/db/base-2/pg_tblspc/2/[TS_PATH-1]/32768/tablespace2c.txt (12B, 99%) checksum dfcb8679956b734706cf87259d50c88f83e80e66 P01 INFO: backup file [TEST_PATH]/db-master/db/base-2/pg_tblspc/2/[TS_PATH-1]/32768/tablespace2.txt (7B, 100%) checksum dc7f76e43c46101b47acc55ae4d593a9e6983578 P00 INFO: full backup size = 176KB @@ -3008,17 +3008,17 @@ pg_data/base/1/PG_VERSION={"checksum":"184473f470864e067ee3a22e64b47b0a1c356f29","mode":"0660","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/16384/17000={"checksum":"7579ada0808d7f98087a0a586d0df9de009cdc33","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/16384/PG_VERSION={"checksum":"184473f470864e067ee3a22e64b47b0a1c356f29","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} -pg_data/base/32768/33000={"checksum":"4a383e4fb8b5cd2a4e8fab91ef63dce48e532a2f","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} -pg_data/base/32768/33000.32767={"checksum":"21e2c7c1a326682c07053b7d6a5a40dbd49c2ec5","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/base/32768/33000={"checksum":"7a16d165e4775f7c92e8cdf60c0af57313f0bf90","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/base/32768/33000.32767={"checksum":"6e99b589e550e68e934fd235ccba59fe5b592a9e","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/32768/33001={"checksum":"6bf316f11d28c28914ea9be92c00de9bea6d9a6b","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} -pg_data/base/32768/44000_init={"checksum":"4a383e4fb8b5cd2a4e8fab91ef63dce48e532a2f","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/base/32768/44000_init={"checksum":"7a16d165e4775f7c92e8cdf60c0af57313f0bf90","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/32768/PG_VERSION={"checksum":"184473f470864e067ee3a22e64b47b0a1c356f29","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/changecontent.txt={"checksum":"a094d94583e209556d03c3c5da33131a065f1689","master":true,"repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/changetime.txt={"checksum":"88087292ed82e26f3eb824d0bffc05ccf7a30f8d","master":true,"repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-2]} pg_data/global/pg_control={"checksum":"4c77c900f7af0d9ab13fa9982051a42e0b637f6c","master":true,"repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-2]} pg_data/pg_stat/global.stat={"checksum":"e350d5ce0153f3e22d5db21cf2a4eff00f3ee877","master":true,"repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-2]} pg_data/postgresql.conf={"checksum":"6721d92c9fcdf4248acff1f9a1377127d9064807","master":true,"repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-2]} -pg_data/special-@!#$^&*()-_+~`{}[]\|:;"<>',.?%={"master":true,"repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/special-!_.*'()&!@;:+,?={"master":true,"repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/zero_from_start={"master":true,"repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/zerosize.txt={"master":true,"repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-2]} pg_tblspc/2/[TS_PATH-1]/32768/tablespace2.txt={"checksum":"dc7f76e43c46101b47acc55ae4d593a9e6983578","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} @@ -3475,7 +3475,7 @@ P00 DETAIL: hardlink pg_data/global/pg_control to [BACKUP-FULL-3] P00 DETAIL: hardlink pg_data/pg_stat/global.stat to [BACKUP-FULL-3] P00 DETAIL: hardlink pg_data/postgresql.conf to [BACKUP-FULL-3] -P00 DETAIL: hardlink pg_data/special-@!#$^&*()-_+~`{}[]\|:;"<>',.?% to [BACKUP-FULL-3] +P00 DETAIL: hardlink pg_data/special-!_.*'()&!@;:+,? to [BACKUP-FULL-3] P00 DETAIL: hardlink pg_data/zero_from_start to [BACKUP-FULL-3] P00 DETAIL: hardlink pg_data/zerosize.txt to [BACKUP-FULL-3] P00 DETAIL: hardlink pg_tblspc/2/[TS_PATH-1]/32768/tablespace2.txt to [BACKUP-FULL-3] @@ -3566,10 +3566,10 @@ pg_data/base/1/PG_VERSION={"checksum":"184473f470864e067ee3a22e64b47b0a1c356f29","mode":"0660","reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/16384/17000={"checksum":"7579ada0808d7f98087a0a586d0df9de009cdc33","reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/16384/PG_VERSION={"checksum":"184473f470864e067ee3a22e64b47b0a1c356f29","reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} -pg_data/base/32768/33000={"checksum":"4a383e4fb8b5cd2a4e8fab91ef63dce48e532a2f","reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} -pg_data/base/32768/33000.32767={"checksum":"21e2c7c1a326682c07053b7d6a5a40dbd49c2ec5","reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/base/32768/33000={"checksum":"7a16d165e4775f7c92e8cdf60c0af57313f0bf90","reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/base/32768/33000.32767={"checksum":"6e99b589e550e68e934fd235ccba59fe5b592a9e","reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/32768/33001={"checksum":"6bf316f11d28c28914ea9be92c00de9bea6d9a6b","reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} -pg_data/base/32768/44000_init={"checksum":"4a383e4fb8b5cd2a4e8fab91ef63dce48e532a2f","reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/base/32768/44000_init={"checksum":"7a16d165e4775f7c92e8cdf60c0af57313f0bf90","reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/32768/PG_VERSION={"checksum":"184473f470864e067ee3a22e64b47b0a1c356f29","reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/base2.txt={"checksum":"cafac3c59553f2cfde41ce2e62e7662295f108c0","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/changecontent.txt={"checksum":"a094d94583e209556d03c3c5da33131a065f1689","master":true,"reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} @@ -3577,7 +3577,7 @@ pg_data/global/pg_control={"checksum":"4c77c900f7af0d9ab13fa9982051a42e0b637f6c","master":true,"reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-2]} pg_data/pg_stat/global.stat={"checksum":"e350d5ce0153f3e22d5db21cf2a4eff00f3ee877","master":true,"reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-2]} pg_data/postgresql.conf={"checksum":"6721d92c9fcdf4248acff1f9a1377127d9064807","master":true,"reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-2]} -pg_data/special-@!#$^&*()-_+~`{}[]\|:;"<>',.?%={"master":true,"reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/special-!_.*'()&!@;:+,?={"master":true,"reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/zero_from_start={"master":true,"reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/zerosize.txt={"master":true,"reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-2]} pg_tblspc/2/[TS_PATH-1]/32768/tablespace2.txt={"checksum":"dc7f76e43c46101b47acc55ae4d593a9e6983578","reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} @@ -3677,7 +3677,7 @@ P01 DETAIL: restore file [TEST_PATH]/db-master/db/base-2/PG_VERSION - exists and matches backup (3B, 99%) checksum 184473f470864e067ee3a22e64b47b0a1c356f29 P01 DETAIL: restore file [TEST_PATH]/db-master/db/base-2/zerosize.txt - exists and is zero size (0B, 99%) P01 DETAIL: restore file [TEST_PATH]/db-master/db/base-2/zero_from_start - exists and is zero size (0B, 99%) -P01 DETAIL: restore file [TEST_PATH]/db-master/db/base-2/special-@!#$^&*()-_+~`{}[]\|:;"<>',.?% - exists and is zero size (0B, 99%) +P01 DETAIL: restore file [TEST_PATH]/db-master/db/base-2/special-!_.*'()&!@;:+,? - exists and is zero size (0B, 99%) P01 DETAIL: restore zeroed file [TEST_PATH]/db-master/db/base-2/pg_tblspc/2/[TS_PATH-1]/32768/tablespace2c.txt (12B, 99%) P01 DETAIL: restore zeroed file [TEST_PATH]/db-master/db/base-2/pg_tblspc/2/[TS_PATH-1]/32768/tablespace2.txt (7B, 100%) P00 INFO: write [TEST_PATH]/db-master/db/base-2/recovery.conf @@ -3702,9 +3702,9 @@ P00 DETAIL: databases for include/exclude (1, 16384, 32768) P00 DETAIL: database filter: (^pg_data\/base\/16384\/)|(^pg_tblspc/2\/[TS_PATH-1]\/16384\/) P01 INFO: restore file [TEST_PATH]/db-master/db/base-2/base/32768/33001 (64KB, 36%) checksum 6bf316f11d28c28914ea9be92c00de9bea6d9a6b -P01 INFO: restore file [TEST_PATH]/db-master/db/base-2/base/32768/44000_init (32KB, 54%) checksum 4a383e4fb8b5cd2a4e8fab91ef63dce48e532a2f -P01 INFO: restore file [TEST_PATH]/db-master/db/base-2/base/32768/33000.32767 (32KB, 72%) checksum 21e2c7c1a326682c07053b7d6a5a40dbd49c2ec5 -P01 INFO: restore file [TEST_PATH]/db-master/db/base-2/base/32768/33000 (32KB, 90%) checksum 4a383e4fb8b5cd2a4e8fab91ef63dce48e532a2f +P01 INFO: restore file [TEST_PATH]/db-master/db/base-2/base/32768/44000_init (32KB, 54%) checksum 7a16d165e4775f7c92e8cdf60c0af57313f0bf90 +P01 INFO: restore file [TEST_PATH]/db-master/db/base-2/base/32768/33000.32767 (32KB, 72%) checksum 6e99b589e550e68e934fd235ccba59fe5b592a9e +P01 INFO: restore file [TEST_PATH]/db-master/db/base-2/base/32768/33000 (32KB, 90%) checksum 7a16d165e4775f7c92e8cdf60c0af57313f0bf90 P01 INFO: restore file [TEST_PATH]/db-master/db/base-2/global/pg_control.pgbackrest.tmp (8KB, 95%) checksum 4c77c900f7af0d9ab13fa9982051a42e0b637f6c P01 DETAIL: restore file [TEST_PATH]/db-master/db/base-2/base/1/12000 - exists and matches backup (8KB, 99%) checksum 22c98d248ff548311eda88559e4a8405ed77c003 P01 DETAIL: restore file [TEST_PATH]/db-master/db/base-2/postgresql.conf - exists and matches backup (21B, 99%) checksum 6721d92c9fcdf4248acff1f9a1377127d9064807 @@ -3720,7 +3720,7 @@ P01 DETAIL: restore file [TEST_PATH]/db-master/db/base-2/PG_VERSION - exists and matches backup (3B, 99%) checksum 184473f470864e067ee3a22e64b47b0a1c356f29 P01 DETAIL: restore file [TEST_PATH]/db-master/db/base-2/zerosize.txt - exists and is zero size (0B, 99%) P01 DETAIL: restore file [TEST_PATH]/db-master/db/base-2/zero_from_start - exists and is zero size (0B, 99%) -P01 DETAIL: restore file [TEST_PATH]/db-master/db/base-2/special-@!#$^&*()-_+~`{}[]\|:;"<>',.?% - exists and is zero size (0B, 99%) +P01 DETAIL: restore file [TEST_PATH]/db-master/db/base-2/special-!_.*'()&!@;:+,? - exists and is zero size (0B, 99%) P01 INFO: restore file [TEST_PATH]/db-master/db/base-2/pg_tblspc/2/[TS_PATH-1]/32768/tablespace2c.txt (12B, 99%) checksum dfcb8679956b734706cf87259d50c88f83e80e66 P01 INFO: restore file [TEST_PATH]/db-master/db/base-2/pg_tblspc/2/[TS_PATH-1]/32768/tablespace2.txt (7B, 100%) checksum dc7f76e43c46101b47acc55ae4d593a9e6983578 P00 INFO: write [TEST_PATH]/db-master/db/base-2/recovery.conf @@ -3763,9 +3763,9 @@ P00 DETAIL: check [TEST_PATH]/db-master/db/base-2/base exists P00 DETAIL: check [TEST_PATH]/db-master/db/base-2/tablespace exists P01 INFO: restore file [TEST_PATH]/db-master/db/base-2/base/base/32768/33001 (64KB, 36%) checksum 6bf316f11d28c28914ea9be92c00de9bea6d9a6b -P01 INFO: restore file [TEST_PATH]/db-master/db/base-2/base/base/32768/44000_init (32KB, 54%) checksum 4a383e4fb8b5cd2a4e8fab91ef63dce48e532a2f -P01 INFO: restore file [TEST_PATH]/db-master/db/base-2/base/base/32768/33000.32767 (32KB, 72%) checksum 21e2c7c1a326682c07053b7d6a5a40dbd49c2ec5 -P01 INFO: restore file [TEST_PATH]/db-master/db/base-2/base/base/32768/33000 (32KB, 90%) checksum 4a383e4fb8b5cd2a4e8fab91ef63dce48e532a2f +P01 INFO: restore file [TEST_PATH]/db-master/db/base-2/base/base/32768/44000_init (32KB, 54%) checksum 7a16d165e4775f7c92e8cdf60c0af57313f0bf90 +P01 INFO: restore file [TEST_PATH]/db-master/db/base-2/base/base/32768/33000.32767 (32KB, 72%) checksum 6e99b589e550e68e934fd235ccba59fe5b592a9e +P01 INFO: restore file [TEST_PATH]/db-master/db/base-2/base/base/32768/33000 (32KB, 90%) checksum 7a16d165e4775f7c92e8cdf60c0af57313f0bf90 P01 INFO: restore file [TEST_PATH]/db-master/db/base-2/base/global/pg_control.pgbackrest.tmp (8KB, 95%) checksum 4c77c900f7af0d9ab13fa9982051a42e0b637f6c P01 INFO: restore file [TEST_PATH]/db-master/db/base-2/base/base/1/12000 (8KB, 99%) checksum 22c98d248ff548311eda88559e4a8405ed77c003 P01 INFO: restore file [TEST_PATH]/db-master/db/base-2/base/postgresql.conf (21B, 99%) checksum 6721d92c9fcdf4248acff1f9a1377127d9064807 @@ -3781,7 +3781,7 @@ P01 INFO: restore file [TEST_PATH]/db-master/db/base-2/base/PG_VERSION (3B, 99%) checksum 184473f470864e067ee3a22e64b47b0a1c356f29 P01 INFO: restore file [TEST_PATH]/db-master/db/base-2/base/zerosize.txt (0B, 99%) P01 INFO: restore file [TEST_PATH]/db-master/db/base-2/base/zero_from_start (0B, 99%) -P01 INFO: restore file [TEST_PATH]/db-master/db/base-2/base/special-@!#$^&*()-_+~`{}[]\|:;"<>',.?% (0B, 99%) +P01 INFO: restore file [TEST_PATH]/db-master/db/base-2/base/special-!_.*'()&!@;:+,? (0B, 99%) P01 INFO: restore file [TEST_PATH]/db-master/db/base-2/base/pg_tblspc/2/[TS_PATH-1]/32768/tablespace2c.txt (12B, 99%) checksum dfcb8679956b734706cf87259d50c88f83e80e66 P01 INFO: restore file [TEST_PATH]/db-master/db/base-2/base/pg_tblspc/2/[TS_PATH-1]/32768/tablespace2.txt (7B, 100%) checksum dc7f76e43c46101b47acc55ae4d593a9e6983578 P00 INFO: write [TEST_PATH]/db-master/db/base-2/base/recovery.conf @@ -4058,10 +4058,10 @@ pg_data/base/1/PG_VERSION={"checksum":"184473f470864e067ee3a22e64b47b0a1c356f29","mode":"0660","reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/16384/17000={"checksum":"7579ada0808d7f98087a0a586d0df9de009cdc33","reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/16384/PG_VERSION={"checksum":"184473f470864e067ee3a22e64b47b0a1c356f29","reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} -pg_data/base/32768/33000={"checksum":"4a383e4fb8b5cd2a4e8fab91ef63dce48e532a2f","reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} -pg_data/base/32768/33000.32767={"checksum":"21e2c7c1a326682c07053b7d6a5a40dbd49c2ec5","reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/base/32768/33000={"checksum":"7a16d165e4775f7c92e8cdf60c0af57313f0bf90","reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/base/32768/33000.32767={"checksum":"6e99b589e550e68e934fd235ccba59fe5b592a9e","reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/32768/33001={"checksum":"6bf316f11d28c28914ea9be92c00de9bea6d9a6b","reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} -pg_data/base/32768/44000_init={"checksum":"4a383e4fb8b5cd2a4e8fab91ef63dce48e532a2f","reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/base/32768/44000_init={"checksum":"7a16d165e4775f7c92e8cdf60c0af57313f0bf90","reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/32768/PG_VERSION={"checksum":"184473f470864e067ee3a22e64b47b0a1c356f29","reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/base2.txt={"checksum":"cafac3c59553f2cfde41ce2e62e7662295f108c0","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/changecontent.txt={"checksum":"a094d94583e209556d03c3c5da33131a065f1689","master":true,"reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} @@ -4069,7 +4069,7 @@ pg_data/global/pg_control={"checksum":"4c77c900f7af0d9ab13fa9982051a42e0b637f6c","master":true,"reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-2]} pg_data/pg_stat/global.stat={"checksum":"e350d5ce0153f3e22d5db21cf2a4eff00f3ee877","master":true,"reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-2]} pg_data/postgresql.conf={"checksum":"6721d92c9fcdf4248acff1f9a1377127d9064807","master":true,"reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-2]} -pg_data/special-@!#$^&*()-_+~`{}[]\|:;"<>',.?%={"master":true,"reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/special-!_.*'()&!@;:+,?={"master":true,"reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/zero_from_start={"master":true,"reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/zerosize.txt={"master":true,"reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-2]} pg_tblspc/2/[TS_PATH-1]/32768/tablespace2.txt={"checksum":"dc7f76e43c46101b47acc55ae4d593a9e6983578","reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} @@ -4231,10 +4231,10 @@ pg_data/base/1/PG_VERSION={"checksum":"184473f470864e067ee3a22e64b47b0a1c356f29","mode":"0660","reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/16384/17000={"checksum":"7579ada0808d7f98087a0a586d0df9de009cdc33","reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/16384/PG_VERSION={"checksum":"184473f470864e067ee3a22e64b47b0a1c356f29","reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} -pg_data/base/32768/33000={"checksum":"4a383e4fb8b5cd2a4e8fab91ef63dce48e532a2f","reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} -pg_data/base/32768/33000.32767={"checksum":"21e2c7c1a326682c07053b7d6a5a40dbd49c2ec5","reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/base/32768/33000={"checksum":"7a16d165e4775f7c92e8cdf60c0af57313f0bf90","reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/base/32768/33000.32767={"checksum":"6e99b589e550e68e934fd235ccba59fe5b592a9e","reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/32768/33001={"checksum":"6bf316f11d28c28914ea9be92c00de9bea6d9a6b","reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} -pg_data/base/32768/44000_init={"checksum":"4a383e4fb8b5cd2a4e8fab91ef63dce48e532a2f","reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/base/32768/44000_init={"checksum":"7a16d165e4775f7c92e8cdf60c0af57313f0bf90","reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/32768/PG_VERSION={"checksum":"184473f470864e067ee3a22e64b47b0a1c356f29","reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/base2.txt={"checksum":"cafac3c59553f2cfde41ce2e62e7662295f108c0","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/changecontent.txt={"checksum":"a094d94583e209556d03c3c5da33131a065f1689","master":true,"reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} @@ -4242,7 +4242,7 @@ pg_data/global/pg_control={"checksum":"4c77c900f7af0d9ab13fa9982051a42e0b637f6c","master":true,"reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-2]} pg_data/pg_stat/global.stat={"checksum":"e350d5ce0153f3e22d5db21cf2a4eff00f3ee877","master":true,"reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-2]} pg_data/postgresql.conf={"checksum":"6721d92c9fcdf4248acff1f9a1377127d9064807","master":true,"reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-2]} -pg_data/special-@!#$^&*()-_+~`{}[]\|:;"<>',.?%={"master":true,"reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/special-!_.*'()&!@;:+,?={"master":true,"reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/zero_from_start={"master":true,"reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/zerosize.txt={"master":true,"reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-2]} pg_tblspc/2/[TS_PATH-1]/32768/tablespace2.txt={"checksum":"dc7f76e43c46101b47acc55ae4d593a9e6983578","reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} diff -Nru pgbackrest-2.15.1/test/expect/mock-all-002.log pgbackrest-2.16/test/expect/mock-all-002.log --- pgbackrest-2.15.1/test/expect/mock-all-002.log 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/test/expect/mock-all-002.log 2019-08-05 16:03:04.000000000 +0000 @@ -188,15 +188,15 @@ pg_data/base/1/PG_VERSION={"checksum":"184473f470864e067ee3a22e64b47b0a1c356f29","mode":"0660","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/16384/17000={"checksum":"e0101dd8ffb910c9c202ca35b5f828bcb9697bed","checksum-page":false,"checksum-page-error":[1],"repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/16384/PG_VERSION={"checksum":"184473f470864e067ee3a22e64b47b0a1c356f29","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} -pg_data/base/32768/33000={"checksum":"4a383e4fb8b5cd2a4e8fab91ef63dce48e532a2f","checksum-page":true,"repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} -pg_data/base/32768/33000.32767={"checksum":"21e2c7c1a326682c07053b7d6a5a40dbd49c2ec5","checksum-page":true,"repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/base/32768/33000={"checksum":"7a16d165e4775f7c92e8cdf60c0af57313f0bf90","checksum-page":true,"repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/base/32768/33000.32767={"checksum":"6e99b589e550e68e934fd235ccba59fe5b592a9e","checksum-page":true,"repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/32768/33001={"checksum":"6bf316f11d28c28914ea9be92c00de9bea6d9a6b","checksum-page":false,"checksum-page-error":[0,[3,5],7],"repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/32768/PG_VERSION={"checksum":"184473f470864e067ee3a22e64b47b0a1c356f29","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/global/pg_control={"checksum":"4c77c900f7af0d9ab13fa9982051a42e0b637f6c","master":true,"repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-2]} pg_data/pg_hba.conf={"checksum":"dd4cea0cae348309f9de28ad4ded8ee2cc2e6d5b","master":true,"repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-2]} pg_data/pg_stat/global.stat={"checksum":"e350d5ce0153f3e22d5db21cf2a4eff00f3ee877","master":true,"repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-2]} pg_data/postgresql.conf={"checksum":"6721d92c9fcdf4248acff1f9a1377127d9064807","master":true,"repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-2]} -pg_data/special-@!#$^&*()-_+~`{}[]\|:;"<>',.?%={"master":true,"repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/special-!_.*'()&!@;:+,?={"master":true,"repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} [target:file:default] group="[GROUP-1]" @@ -257,7 +257,7 @@ ------------------------------------------------------------------------------------------------------------------------------------ P00 WARN: option repo1-retention-full is not set, the repository may run out of space HINT: to retain full backups indefinitely (without warning), set option 'repo1-retention-full' to the maximum. -P00 WARN: backup [BACKUP-FULL-1] missing in repository removed from backup.info +P00 WARN: backup [BACKUP-FULL-1] missing manifest removed from backup.info P00 WARN: --no-online passed and postmaster.pid exists but --force was passed so backup will continue though it looks like the postmaster is running and the backup will probably not be consistent P00 WARN: aborted backup [BACKUP-FULL-2] of same type exists, will be cleaned to remove invalid files and resumed P00 TEST: PgBaCkReStTeSt-BACKUP-RESUME-PgBaCkReStTeSt @@ -375,8 +375,8 @@ pg_data/base/1/PG_VERSION={"checksum":"184473f470864e067ee3a22e64b47b0a1c356f29","master":false,"mode":"0660","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/16384/17000={"checksum":"e0101dd8ffb910c9c202ca35b5f828bcb9697bed","checksum-page":false,"checksum-page-error":[1],"master":false,"repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/16384/PG_VERSION={"checksum":"184473f470864e067ee3a22e64b47b0a1c356f29","master":false,"repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} -pg_data/base/32768/33000={"checksum":"4a383e4fb8b5cd2a4e8fab91ef63dce48e532a2f","checksum-page":true,"master":false,"repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} -pg_data/base/32768/33000.32767={"checksum":"21e2c7c1a326682c07053b7d6a5a40dbd49c2ec5","checksum-page":true,"master":false,"repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/base/32768/33000={"checksum":"7a16d165e4775f7c92e8cdf60c0af57313f0bf90","checksum-page":true,"master":false,"repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/base/32768/33000.32767={"checksum":"6e99b589e550e68e934fd235ccba59fe5b592a9e","checksum-page":true,"master":false,"repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/32768/33001={"checksum":"6bf316f11d28c28914ea9be92c00de9bea6d9a6b","checksum-page":false,"checksum-page-error":[0,[3,5],7],"master":false,"repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/32768/PG_VERSION={"checksum":"184473f470864e067ee3a22e64b47b0a1c356f29","master":false,"repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/changecontent.txt={"checksum":"238a131a3e8eb98d1fc5b27d882ca40b7618fd2a","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} @@ -385,7 +385,7 @@ pg_data/pg_hba.conf={"checksum":"dd4cea0cae348309f9de28ad4ded8ee2cc2e6d5b","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-2]} pg_data/pg_stat/global.stat={"checksum":"e350d5ce0153f3e22d5db21cf2a4eff00f3ee877","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-2]} pg_data/postgresql.conf={"checksum":"6721d92c9fcdf4248acff1f9a1377127d9064807","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-2]} -pg_data/special-@!#$^&*()-_+~`{}[]\|:;"<>',.?%={"repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/special-!_.*'()&!@;:+,?={"repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/zero_from_start={"repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} [target:file:default] @@ -666,8 +666,8 @@ pg_data/base/1/PG_VERSION={"checksum":"184473f470864e067ee3a22e64b47b0a1c356f29","master":false,"mode":"0660","reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/16384/17000={"checksum":"e0101dd8ffb910c9c202ca35b5f828bcb9697bed","checksum-page":false,"checksum-page-error":[1],"master":false,"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/16384/PG_VERSION={"checksum":"184473f470864e067ee3a22e64b47b0a1c356f29","master":false,"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} -pg_data/base/32768/33000={"checksum":"4a383e4fb8b5cd2a4e8fab91ef63dce48e532a2f","checksum-page":true,"master":false,"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} -pg_data/base/32768/33000.32767={"checksum":"21e2c7c1a326682c07053b7d6a5a40dbd49c2ec5","checksum-page":true,"master":false,"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/base/32768/33000={"checksum":"7a16d165e4775f7c92e8cdf60c0af57313f0bf90","checksum-page":true,"master":false,"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/base/32768/33000.32767={"checksum":"6e99b589e550e68e934fd235ccba59fe5b592a9e","checksum-page":true,"master":false,"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/32768/33001={"checksum":"6bf316f11d28c28914ea9be92c00de9bea6d9a6b","checksum-page":false,"checksum-page-error":[0,[3,5],7],"master":false,"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/32768/PG_VERSION={"checksum":"184473f470864e067ee3a22e64b47b0a1c356f29","master":false,"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/changecontent.txt={"checksum":"238a131a3e8eb98d1fc5b27d882ca40b7618fd2a","reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} @@ -676,7 +676,7 @@ pg_data/global/pg_control={"checksum":"4c77c900f7af0d9ab13fa9982051a42e0b637f6c","reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-2]} pg_data/pg_stat/global.stat={"checksum":"e350d5ce0153f3e22d5db21cf2a4eff00f3ee877","reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-2]} pg_data/postgresql.conf={"checksum":"6721d92c9fcdf4248acff1f9a1377127d9064807","reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-2]} -pg_data/special-@!#$^&*()-_+~`{}[]\|:;"<>',.?%={"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/special-!_.*'()&!@;:+,?={"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/zero_from_start={"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/zerosize.txt={"repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-2]} pg_tblspc/1/[TS_PATH-1]/16384/tablespace1.txt={"checksum":"14c44cef6287269b08d41de489fd492bb9fc795d","checksum-page":false,"master":false,"repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-2]} @@ -743,7 +743,7 @@ ------------------------------------------------------------------------------------------------------------------------------------ P00 WARN: option repo1-retention-full is not set, the repository may run out of space HINT: to retain full backups indefinitely (without warning), set option 'repo1-retention-full' to the maximum. -P00 WARN: backup [BACKUP-INCR-1] missing in repository removed from backup.info +P00 WARN: backup [BACKUP-INCR-1] missing manifest removed from backup.info P00 WARN: incr backup cannot alter 'checksum-page' option to 'false', reset to 'true' from [BACKUP-FULL-2] P00 WARN: file pg_data/changetime.txt timestamp in the past or size changed but timestamp did not, enabling delta checksum P00 WARN: aborted backup [BACKUP-INCR-2] of same type exists, will be cleaned to remove invalid files and resumed @@ -866,8 +866,8 @@ pg_data/base/1/PG_VERSION={"checksum":"184473f470864e067ee3a22e64b47b0a1c356f29","master":false,"mode":"0660","reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/16384/17000={"checksum":"e0101dd8ffb910c9c202ca35b5f828bcb9697bed","checksum-page":false,"checksum-page-error":[1],"master":false,"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/16384/PG_VERSION={"checksum":"184473f470864e067ee3a22e64b47b0a1c356f29","master":false,"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} -pg_data/base/32768/33000={"checksum":"4a383e4fb8b5cd2a4e8fab91ef63dce48e532a2f","checksum-page":true,"master":false,"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} -pg_data/base/32768/33000.32767={"checksum":"21e2c7c1a326682c07053b7d6a5a40dbd49c2ec5","checksum-page":true,"master":false,"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/base/32768/33000={"checksum":"7a16d165e4775f7c92e8cdf60c0af57313f0bf90","checksum-page":true,"master":false,"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/base/32768/33000.32767={"checksum":"6e99b589e550e68e934fd235ccba59fe5b592a9e","checksum-page":true,"master":false,"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/32768/33001={"checksum":"6bf316f11d28c28914ea9be92c00de9bea6d9a6b","checksum-page":false,"checksum-page-error":[0,[3,5],7],"master":false,"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/32768/PG_VERSION={"checksum":"184473f470864e067ee3a22e64b47b0a1c356f29","master":false,"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/changecontent.txt={"checksum":"a094d94583e209556d03c3c5da33131a065f1689","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} @@ -876,7 +876,7 @@ pg_data/global/pg_control={"checksum":"4c77c900f7af0d9ab13fa9982051a42e0b637f6c","reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-2]} pg_data/pg_stat/global.stat={"checksum":"e350d5ce0153f3e22d5db21cf2a4eff00f3ee877","reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-2]} pg_data/postgresql.conf={"checksum":"6721d92c9fcdf4248acff1f9a1377127d9064807","reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-2]} -pg_data/special-@!#$^&*()-_+~`{}[]\|:;"<>',.?%={"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/special-!_.*'()&!@;:+,?={"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/zero_from_start={"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/zerosize.txt={"repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-2]} pg_tblspc/1/[TS_PATH-1]/16384/tablespace1.txt={"checksum":"d85de07d6421d90aa9191c11c889bfde43680f0f","checksum-page":false,"master":false,"repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} @@ -1073,8 +1073,8 @@ pg_data/base/1/PG_VERSION={"checksum":"184473f470864e067ee3a22e64b47b0a1c356f29","master":false,"mode":"0660","reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/16384/17000={"checksum":"e0101dd8ffb910c9c202ca35b5f828bcb9697bed","checksum-page":false,"checksum-page-error":[1],"master":false,"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/16384/PG_VERSION={"checksum":"184473f470864e067ee3a22e64b47b0a1c356f29","master":false,"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} -pg_data/base/32768/33000={"checksum":"4a383e4fb8b5cd2a4e8fab91ef63dce48e532a2f","checksum-page":true,"master":false,"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} -pg_data/base/32768/33000.32767={"checksum":"21e2c7c1a326682c07053b7d6a5a40dbd49c2ec5","checksum-page":true,"master":false,"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/base/32768/33000={"checksum":"7a16d165e4775f7c92e8cdf60c0af57313f0bf90","checksum-page":true,"master":false,"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/base/32768/33000.32767={"checksum":"6e99b589e550e68e934fd235ccba59fe5b592a9e","checksum-page":true,"master":false,"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/32768/33001={"checksum":"6bf316f11d28c28914ea9be92c00de9bea6d9a6b","checksum-page":false,"checksum-page-error":[0,[3,5],7],"master":false,"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/32768/PG_VERSION={"checksum":"184473f470864e067ee3a22e64b47b0a1c356f29","master":false,"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/changecontent.txt={"checksum":"a094d94583e209556d03c3c5da33131a065f1689","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} @@ -1083,7 +1083,7 @@ pg_data/global/pg_control={"checksum":"4c77c900f7af0d9ab13fa9982051a42e0b637f6c","reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-2]} pg_data/pg_stat/global.stat={"checksum":"e350d5ce0153f3e22d5db21cf2a4eff00f3ee877","reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-2]} pg_data/postgresql.conf={"checksum":"6721d92c9fcdf4248acff1f9a1377127d9064807","reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-2]} -pg_data/special-@!#$^&*()-_+~`{}[]\|:;"<>',.?%={"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/special-!_.*'()&!@;:+,?={"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/zero_from_start={"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/zerosize.txt={"repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-2]} pg_tblspc/1/[TS_PATH-1]/16384/tablespace1.txt={"checksum":"d85de07d6421d90aa9191c11c889bfde43680f0f","checksum-page":false,"master":false,"repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} @@ -1277,8 +1277,8 @@ pg_data/base/1/PG_VERSION={"checksum":"184473f470864e067ee3a22e64b47b0a1c356f29","master":false,"mode":"0660","reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/16384/17000={"checksum":"e0101dd8ffb910c9c202ca35b5f828bcb9697bed","checksum-page":false,"checksum-page-error":[1],"master":false,"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/16384/PG_VERSION={"checksum":"184473f470864e067ee3a22e64b47b0a1c356f29","master":false,"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} -pg_data/base/32768/33000={"checksum":"4a383e4fb8b5cd2a4e8fab91ef63dce48e532a2f","checksum-page":true,"master":false,"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} -pg_data/base/32768/33000.32767={"checksum":"21e2c7c1a326682c07053b7d6a5a40dbd49c2ec5","checksum-page":true,"master":false,"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/base/32768/33000={"checksum":"7a16d165e4775f7c92e8cdf60c0af57313f0bf90","checksum-page":true,"master":false,"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/base/32768/33000.32767={"checksum":"6e99b589e550e68e934fd235ccba59fe5b592a9e","checksum-page":true,"master":false,"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/32768/33001={"checksum":"6bf316f11d28c28914ea9be92c00de9bea6d9a6b","checksum-page":false,"checksum-page-error":[0,[3,5],7],"master":false,"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/32768/PG_VERSION={"checksum":"184473f470864e067ee3a22e64b47b0a1c356f29","master":false,"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/changecontent.txt={"checksum":"a094d94583e209556d03c3c5da33131a065f1689","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} @@ -1287,7 +1287,7 @@ pg_data/global/pg_control={"checksum":"4c77c900f7af0d9ab13fa9982051a42e0b637f6c","reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-2]} pg_data/pg_stat/global.stat={"checksum":"e350d5ce0153f3e22d5db21cf2a4eff00f3ee877","reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-2]} pg_data/postgresql.conf={"checksum":"6721d92c9fcdf4248acff1f9a1377127d9064807","reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-2]} -pg_data/special-@!#$^&*()-_+~`{}[]\|:;"<>',.?%={"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/special-!_.*'()&!@;:+,?={"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/zero_from_start={"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/zerosize.txt={"repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-2]} pg_tblspc/1/[TS_PATH-1]/16384/tablespace1.txt={"checksum":"d85de07d6421d90aa9191c11c889bfde43680f0f","checksum-page":false,"master":false,"repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} @@ -1501,8 +1501,8 @@ pg_data/base/1/PG_VERSION={"checksum":"184473f470864e067ee3a22e64b47b0a1c356f29","mode":"0660","reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/16384/17000={"checksum":"e0101dd8ffb910c9c202ca35b5f828bcb9697bed","checksum-page":false,"checksum-page-error":[1],"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/16384/PG_VERSION={"checksum":"184473f470864e067ee3a22e64b47b0a1c356f29","reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} -pg_data/base/32768/33000={"checksum":"4a383e4fb8b5cd2a4e8fab91ef63dce48e532a2f","checksum-page":true,"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} -pg_data/base/32768/33000.32767={"checksum":"21e2c7c1a326682c07053b7d6a5a40dbd49c2ec5","checksum-page":true,"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/base/32768/33000={"checksum":"7a16d165e4775f7c92e8cdf60c0af57313f0bf90","checksum-page":true,"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/base/32768/33000.32767={"checksum":"6e99b589e550e68e934fd235ccba59fe5b592a9e","checksum-page":true,"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/32768/33001={"checksum":"6bf316f11d28c28914ea9be92c00de9bea6d9a6b","checksum-page":false,"checksum-page-error":[0,[3,5],7],"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/32768/PG_VERSION={"checksum":"184473f470864e067ee3a22e64b47b0a1c356f29","reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/base2.txt={"checksum":"09b5e31766be1dba1ec27de82f975c1b6eea2a92","checksum-page":false,"repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} @@ -1512,7 +1512,7 @@ pg_data/global/pg_control={"checksum":"4c77c900f7af0d9ab13fa9982051a42e0b637f6c","master":true,"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-2]} pg_data/pg_stat/global.stat={"checksum":"e350d5ce0153f3e22d5db21cf2a4eff00f3ee877","master":true,"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-2]} pg_data/postgresql.conf={"checksum":"6721d92c9fcdf4248acff1f9a1377127d9064807","master":true,"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-2]} -pg_data/special-@!#$^&*()-_+~`{}[]\|:;"<>',.?%={"master":true,"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/special-!_.*'()&!@;:+,?={"master":true,"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/zero_from_start={"master":true,"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/zerosize.txt={"master":true,"reference":"[BACKUP-DIFF-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-2]} pg_tblspc/2/[TS_PATH-1]/32768/tablespace2.txt={"checksum":"dc7f76e43c46101b47acc55ae4d593a9e6983578","checksum-page":false,"reference":"[BACKUP-DIFF-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} @@ -1725,8 +1725,8 @@ pg_data/base/1/PG_VERSION={"checksum":"184473f470864e067ee3a22e64b47b0a1c356f29","mode":"0660","reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/16384/17000={"checksum":"9a53d532e27785e681766c98516a5e93f096a501","checksum-page":false,"repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/16384/PG_VERSION={"checksum":"184473f470864e067ee3a22e64b47b0a1c356f29","reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} -pg_data/base/32768/33000={"checksum":"4a383e4fb8b5cd2a4e8fab91ef63dce48e532a2f","checksum-page":true,"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} -pg_data/base/32768/33000.32767={"checksum":"21e2c7c1a326682c07053b7d6a5a40dbd49c2ec5","checksum-page":true,"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/base/32768/33000={"checksum":"7a16d165e4775f7c92e8cdf60c0af57313f0bf90","checksum-page":true,"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/base/32768/33000.32767={"checksum":"6e99b589e550e68e934fd235ccba59fe5b592a9e","checksum-page":true,"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/32768/33001={"checksum":"6bf316f11d28c28914ea9be92c00de9bea6d9a6b","checksum-page":false,"checksum-page-error":[0,[3,5],7],"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/32768/PG_VERSION={"checksum":"184473f470864e067ee3a22e64b47b0a1c356f29","reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/base2.txt={"checksum":"09b5e31766be1dba1ec27de82f975c1b6eea2a92","checksum-page":false,"reference":"[BACKUP-INCR-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} @@ -1736,7 +1736,7 @@ pg_data/global/pg_control={"checksum":"4c77c900f7af0d9ab13fa9982051a42e0b637f6c","master":true,"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-2]} pg_data/pg_stat/global.stat={"checksum":"e350d5ce0153f3e22d5db21cf2a4eff00f3ee877","master":true,"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-2]} pg_data/postgresql.conf={"checksum":"6721d92c9fcdf4248acff1f9a1377127d9064807","master":true,"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-2]} -pg_data/special-@!#$^&*()-_+~`{}[]\|:;"<>',.?%={"master":true,"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/special-!_.*'()&!@;:+,?={"master":true,"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/zero_from_start={"master":true,"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/zerosize.txt={"master":true,"reference":"[BACKUP-DIFF-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-2]} pg_tblspc/2/[TS_PATH-1]/32768/tablespace2.txt={"checksum":"dc7f76e43c46101b47acc55ae4d593a9e6983578","checksum-page":false,"reference":"[BACKUP-DIFF-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} @@ -1930,8 +1930,8 @@ pg_data/base/1/PG_VERSION={"checksum":"184473f470864e067ee3a22e64b47b0a1c356f29","mode":"0660","reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/16384/17000={"checksum":"9a53d532e27785e681766c98516a5e93f096a501","checksum-page":false,"repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/16384/PG_VERSION={"checksum":"184473f470864e067ee3a22e64b47b0a1c356f29","reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} -pg_data/base/32768/33000={"checksum":"4a383e4fb8b5cd2a4e8fab91ef63dce48e532a2f","checksum-page":true,"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} -pg_data/base/32768/33000.32767={"checksum":"21e2c7c1a326682c07053b7d6a5a40dbd49c2ec5","checksum-page":true,"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/base/32768/33000={"checksum":"7a16d165e4775f7c92e8cdf60c0af57313f0bf90","checksum-page":true,"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/base/32768/33000.32767={"checksum":"6e99b589e550e68e934fd235ccba59fe5b592a9e","checksum-page":true,"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/32768/33001={"checksum":"6bf316f11d28c28914ea9be92c00de9bea6d9a6b","checksum-page":false,"checksum-page-error":[0,[3,5],7],"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/32768/PG_VERSION={"checksum":"184473f470864e067ee3a22e64b47b0a1c356f29","reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/base2.txt={"checksum":"09b5e31766be1dba1ec27de82f975c1b6eea2a92","checksum-page":false,"repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} @@ -1941,7 +1941,7 @@ pg_data/global/pg_control={"checksum":"4c77c900f7af0d9ab13fa9982051a42e0b637f6c","master":true,"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-2]} pg_data/pg_stat/global.stat={"checksum":"e350d5ce0153f3e22d5db21cf2a4eff00f3ee877","master":true,"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-2]} pg_data/postgresql.conf={"checksum":"6721d92c9fcdf4248acff1f9a1377127d9064807","master":true,"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-2]} -pg_data/special-@!#$^&*()-_+~`{}[]\|:;"<>',.?%={"master":true,"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/special-!_.*'()&!@;:+,?={"master":true,"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/zero_from_start={"master":true,"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/zerosize.txt={"master":true,"repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-2]} pg_tblspc/2/[TS_PATH-1]/32768/tablespace2.txt={"checksum":"dc7f76e43c46101b47acc55ae4d593a9e6983578","checksum-page":false,"repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} @@ -2133,8 +2133,8 @@ pg_data/base/1/PG_VERSION={"checksum":"184473f470864e067ee3a22e64b47b0a1c356f29","mode":"0660","reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/16384/17000={"checksum":"9a53d532e27785e681766c98516a5e93f096a501","checksum-page":false,"reference":"[BACKUP-DIFF-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/16384/PG_VERSION={"checksum":"184473f470864e067ee3a22e64b47b0a1c356f29","reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} -pg_data/base/32768/33000={"checksum":"4a383e4fb8b5cd2a4e8fab91ef63dce48e532a2f","checksum-page":true,"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} -pg_data/base/32768/33000.32767={"checksum":"21e2c7c1a326682c07053b7d6a5a40dbd49c2ec5","checksum-page":true,"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/base/32768/33000={"checksum":"7a16d165e4775f7c92e8cdf60c0af57313f0bf90","checksum-page":true,"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/base/32768/33000.32767={"checksum":"6e99b589e550e68e934fd235ccba59fe5b592a9e","checksum-page":true,"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/32768/33001={"checksum":"6bf316f11d28c28914ea9be92c00de9bea6d9a6b","checksum-page":false,"checksum-page-error":[0,[3,5],7],"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/32768/PG_VERSION={"checksum":"184473f470864e067ee3a22e64b47b0a1c356f29","reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/base2.txt={"checksum":"09b5e31766be1dba1ec27de82f975c1b6eea2a92","checksum-page":false,"reference":"[BACKUP-DIFF-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} @@ -2144,7 +2144,7 @@ pg_data/global/pg_control={"checksum":"4c77c900f7af0d9ab13fa9982051a42e0b637f6c","master":true,"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-2]} pg_data/pg_stat/global.stat={"checksum":"e350d5ce0153f3e22d5db21cf2a4eff00f3ee877","master":true,"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-2]} pg_data/postgresql.conf={"checksum":"6721d92c9fcdf4248acff1f9a1377127d9064807","master":true,"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-2]} -pg_data/special-@!#$^&*()-_+~`{}[]\|:;"<>',.?%={"master":true,"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/special-!_.*'()&!@;:+,?={"master":true,"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/zero_from_start={"master":true,"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/zerosize.txt={"master":true,"reference":"[BACKUP-DIFF-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-2]} pg_tblspc/2/[TS_PATH-1]/32768/tablespace2.txt={"checksum":"dc7f76e43c46101b47acc55ae4d593a9e6983578","checksum-page":false,"reference":"[BACKUP-DIFF-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} @@ -2339,8 +2339,8 @@ pg_data/base/1/12000={"checksum":"22c98d248ff548311eda88559e4a8405ed77c003","checksum-page":true,"master":false,"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/1/PG_VERSION={"checksum":"184473f470864e067ee3a22e64b47b0a1c356f29","master":false,"mode":"0660","reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/16384/PG_VERSION={"checksum":"184473f470864e067ee3a22e64b47b0a1c356f29","master":false,"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} -pg_data/base/32768/33000={"checksum":"4a383e4fb8b5cd2a4e8fab91ef63dce48e532a2f","checksum-page":true,"master":false,"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} -pg_data/base/32768/33000.32767={"checksum":"21e2c7c1a326682c07053b7d6a5a40dbd49c2ec5","checksum-page":true,"master":false,"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/base/32768/33000={"checksum":"7a16d165e4775f7c92e8cdf60c0af57313f0bf90","checksum-page":true,"master":false,"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/base/32768/33000.32767={"checksum":"6e99b589e550e68e934fd235ccba59fe5b592a9e","checksum-page":true,"master":false,"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/32768/33001={"checksum":"6bf316f11d28c28914ea9be92c00de9bea6d9a6b","checksum-page":false,"checksum-page-error":[0,[3,5],7],"master":false,"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/32768/PG_VERSION={"checksum":"184473f470864e067ee3a22e64b47b0a1c356f29","master":false,"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/changecontent.txt={"checksum":"a094d94583e209556d03c3c5da33131a065f1689","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} @@ -2349,7 +2349,7 @@ pg_data/global/pg_control={"checksum":"4c77c900f7af0d9ab13fa9982051a42e0b637f6c","reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-2]} pg_data/pg_stat/global.stat={"checksum":"e350d5ce0153f3e22d5db21cf2a4eff00f3ee877","reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-2]} pg_data/postgresql.conf={"checksum":"6721d92c9fcdf4248acff1f9a1377127d9064807","reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-2]} -pg_data/special-@!#$^&*()-_+~`{}[]\|:;"<>',.?%={"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/special-!_.*'()&!@;:+,?={"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/zero_from_start={"reference":"[BACKUP-FULL-2]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/zerosize.txt={"repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-2]} pg_tblspc/2/[TS_PATH-1]/32768/tablespace2.txt={"checksum":"dc7f76e43c46101b47acc55ae4d593a9e6983578","checksum-page":false,"master":false,"repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} @@ -2539,8 +2539,8 @@ pg_data/base/1/PG_VERSION={"checksum":"184473f470864e067ee3a22e64b47b0a1c356f29","master":false,"mode":"0660","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/16384/17000={"checksum":"7579ada0808d7f98087a0a586d0df9de009cdc33","master":false,"repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/16384/PG_VERSION={"checksum":"184473f470864e067ee3a22e64b47b0a1c356f29","master":false,"repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} -pg_data/base/32768/33000={"checksum":"4a383e4fb8b5cd2a4e8fab91ef63dce48e532a2f","master":false,"repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} -pg_data/base/32768/33000.32767={"checksum":"21e2c7c1a326682c07053b7d6a5a40dbd49c2ec5","master":false,"repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/base/32768/33000={"checksum":"7a16d165e4775f7c92e8cdf60c0af57313f0bf90","master":false,"repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/base/32768/33000.32767={"checksum":"6e99b589e550e68e934fd235ccba59fe5b592a9e","master":false,"repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/32768/33001={"checksum":"6bf316f11d28c28914ea9be92c00de9bea6d9a6b","master":false,"repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/32768/PG_VERSION={"checksum":"184473f470864e067ee3a22e64b47b0a1c356f29","master":false,"repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/changecontent.txt={"checksum":"a094d94583e209556d03c3c5da33131a065f1689","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} @@ -2549,7 +2549,7 @@ pg_data/global/pg_control={"checksum":"4c77c900f7af0d9ab13fa9982051a42e0b637f6c","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-2]} pg_data/pg_stat/global.stat={"checksum":"e350d5ce0153f3e22d5db21cf2a4eff00f3ee877","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-2]} pg_data/postgresql.conf={"checksum":"6721d92c9fcdf4248acff1f9a1377127d9064807","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-2]} -pg_data/special-@!#$^&*()-_+~`{}[]\|:;"<>',.?%={"repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/special-!_.*'()&!@;:+,?={"repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/zero_from_start={"repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/zerosize.txt={"repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-2]} pg_tblspc/2/[TS_PATH-1]/32768/tablespace2.txt={"checksum":"dc7f76e43c46101b47acc55ae4d593a9e6983578","master":false,"repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} @@ -3090,8 +3090,8 @@ pg_data/base/1/PG_VERSION={"checksum":"184473f470864e067ee3a22e64b47b0a1c356f29","mode":"0660","reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/16384/17000={"checksum":"7579ada0808d7f98087a0a586d0df9de009cdc33","reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/16384/PG_VERSION={"checksum":"184473f470864e067ee3a22e64b47b0a1c356f29","reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} -pg_data/base/32768/33000={"checksum":"4a383e4fb8b5cd2a4e8fab91ef63dce48e532a2f","reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} -pg_data/base/32768/33000.32767={"checksum":"21e2c7c1a326682c07053b7d6a5a40dbd49c2ec5","reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/base/32768/33000={"checksum":"7a16d165e4775f7c92e8cdf60c0af57313f0bf90","reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/base/32768/33000.32767={"checksum":"6e99b589e550e68e934fd235ccba59fe5b592a9e","reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/32768/33001={"checksum":"6bf316f11d28c28914ea9be92c00de9bea6d9a6b","reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/32768/PG_VERSION={"checksum":"184473f470864e067ee3a22e64b47b0a1c356f29","reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/base2.txt={"checksum":"cafac3c59553f2cfde41ce2e62e7662295f108c0","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} @@ -3101,7 +3101,7 @@ pg_data/global/pg_control={"checksum":"4c77c900f7af0d9ab13fa9982051a42e0b637f6c","master":true,"reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-2]} pg_data/pg_stat/global.stat={"checksum":"e350d5ce0153f3e22d5db21cf2a4eff00f3ee877","master":true,"reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-2]} pg_data/postgresql.conf={"checksum":"6721d92c9fcdf4248acff1f9a1377127d9064807","master":true,"reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-2]} -pg_data/special-@!#$^&*()-_+~`{}[]\|:;"<>',.?%={"master":true,"reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/special-!_.*'()&!@;:+,?={"master":true,"reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/zero_from_start={"master":true,"reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/zerosize.txt={"master":true,"reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-2]} pg_tblspc/2/[TS_PATH-1]/32768/tablespace2.txt={"checksum":"dc7f76e43c46101b47acc55ae4d593a9e6983578","reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} @@ -3350,6 +3350,7 @@ P01 INFO: backup file db-master:[TEST_PATH]/db-master/db/base-2/base/base/base2.txt (9B, 100%) checksum cafac3c59553f2cfde41ce2e62e7662295f108c0 P00 INFO: diff backup size = 9B P00 INFO: new backup label = [BACKUP-DIFF-6] +P00 INFO: http statistics:[HTTP-STATISTICS] P00 INFO: backup command end: completed successfully P00 INFO: expire command begin P00 INFO: option 'repo1-retention-archive' is not set - archive logs will not be expired @@ -3472,8 +3473,8 @@ pg_data/base/1/PG_VERSION={"checksum":"184473f470864e067ee3a22e64b47b0a1c356f29","mode":"0660","reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/16384/17000={"checksum":"7579ada0808d7f98087a0a586d0df9de009cdc33","reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/16384/PG_VERSION={"checksum":"184473f470864e067ee3a22e64b47b0a1c356f29","reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} -pg_data/base/32768/33000={"checksum":"4a383e4fb8b5cd2a4e8fab91ef63dce48e532a2f","reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} -pg_data/base/32768/33000.32767={"checksum":"21e2c7c1a326682c07053b7d6a5a40dbd49c2ec5","reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/base/32768/33000={"checksum":"7a16d165e4775f7c92e8cdf60c0af57313f0bf90","reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/base/32768/33000.32767={"checksum":"6e99b589e550e68e934fd235ccba59fe5b592a9e","reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/32768/33001={"checksum":"6bf316f11d28c28914ea9be92c00de9bea6d9a6b","reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/32768/PG_VERSION={"checksum":"184473f470864e067ee3a22e64b47b0a1c356f29","reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/base2.txt={"checksum":"cafac3c59553f2cfde41ce2e62e7662295f108c0","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} @@ -3483,7 +3484,7 @@ pg_data/global/pg_control={"checksum":"4c77c900f7af0d9ab13fa9982051a42e0b637f6c","master":true,"reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-2]} pg_data/pg_stat/global.stat={"checksum":"e350d5ce0153f3e22d5db21cf2a4eff00f3ee877","master":true,"reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-2]} pg_data/postgresql.conf={"checksum":"6721d92c9fcdf4248acff1f9a1377127d9064807","master":true,"reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-2]} -pg_data/special-@!#$^&*()-_+~`{}[]\|:;"<>',.?%={"master":true,"reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/special-!_.*'()&!@;:+,?={"master":true,"reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/zero_from_start={"master":true,"reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/zerosize.txt={"master":true,"reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-2]} pg_tblspc/2/[TS_PATH-1]/32768/tablespace2.txt={"checksum":"dc7f76e43c46101b47acc55ae4d593a9e6983578","reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} @@ -3558,6 +3559,7 @@ P01 INFO: backup file db-master:[TEST_PATH]/db-master/db/base-2/base/base/base2.txt (9B, 100%) checksum cafac3c59553f2cfde41ce2e62e7662295f108c0 P00 INFO: diff backup size = 9B P00 INFO: new backup label = [BACKUP-DIFF-7] +P00 INFO: http statistics:[HTTP-STATISTICS] P00 INFO: backup command end: completed successfully P00 INFO: expire command begin P00 INFO: option 'repo1-retention-archive' is not set - archive logs will not be expired @@ -3679,8 +3681,8 @@ pg_data/base/1/PG_VERSION={"checksum":"184473f470864e067ee3a22e64b47b0a1c356f29","mode":"0660","reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/16384/17000={"checksum":"7579ada0808d7f98087a0a586d0df9de009cdc33","reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/16384/PG_VERSION={"checksum":"184473f470864e067ee3a22e64b47b0a1c356f29","reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} -pg_data/base/32768/33000={"checksum":"4a383e4fb8b5cd2a4e8fab91ef63dce48e532a2f","reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} -pg_data/base/32768/33000.32767={"checksum":"21e2c7c1a326682c07053b7d6a5a40dbd49c2ec5","reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/base/32768/33000={"checksum":"7a16d165e4775f7c92e8cdf60c0af57313f0bf90","reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/base/32768/33000.32767={"checksum":"6e99b589e550e68e934fd235ccba59fe5b592a9e","reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/32768/33001={"checksum":"6bf316f11d28c28914ea9be92c00de9bea6d9a6b","reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/32768/PG_VERSION={"checksum":"184473f470864e067ee3a22e64b47b0a1c356f29","reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/base/base2.txt={"checksum":"cafac3c59553f2cfde41ce2e62e7662295f108c0","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} @@ -3690,7 +3692,7 @@ pg_data/global/pg_control={"checksum":"4c77c900f7af0d9ab13fa9982051a42e0b637f6c","master":true,"reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-2]} pg_data/pg_stat/global.stat={"checksum":"e350d5ce0153f3e22d5db21cf2a4eff00f3ee877","master":true,"reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-2]} pg_data/postgresql.conf={"checksum":"6721d92c9fcdf4248acff1f9a1377127d9064807","master":true,"reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-2]} -pg_data/special-@!#$^&*()-_+~`{}[]\|:;"<>',.?%={"master":true,"reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} +pg_data/special-!_.*'()&!@;:+,?={"master":true,"reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/zero_from_start={"master":true,"reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} pg_data/zerosize.txt={"master":true,"reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-2]} pg_tblspc/2/[TS_PATH-1]/32768/tablespace2.txt={"checksum":"dc7f76e43c46101b47acc55ae4d593a9e6983578","reference":"[BACKUP-FULL-3]","repo-size":[SIZE],"size":[SIZE],"timestamp":[TIMESTAMP-1]} diff -Nru pgbackrest-2.15.1/test/expect/mock-archive-002.log pgbackrest-2.16/test/expect/mock-archive-002.log --- pgbackrest-2.15.1/test/expect/mock-archive-002.log 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/test/expect/mock-archive-002.log 2019-08-05 16:03:04.000000000 +0000 @@ -29,6 +29,8 @@ > [CONTAINER-EXEC] backup [BACKREST-BIN] --config=[TEST_PATH]/backup/pgbackrest.conf --stanza=db --no-online --force stanza-create ------------------------------------------------------------------------------------------------------------------------------------ P00 INFO: stanza-create command begin [BACKREST-VERSION]: --compress-level=3 --compress-level-network=1 --config=[TEST_PATH]/backup/pgbackrest.conf --db-timeout=45 --force --lock-path=[TEST_PATH]/backup/lock --log-level-console=detail --log-level-file=trace --log-level-stderr=off --log-path=[TEST_PATH]/backup/log --log-subprocess --no-log-timestamp --no-online --pg1-host=db-master --pg1-host-cmd=[BACKREST-BIN] --pg1-host-config=[TEST_PATH]/db-master/pgbackrest.conf --pg1-host-user=[USER-2] --pg1-path=[TEST_PATH]/db-master/db/base --protocol-timeout=60 --repo1-path=/ --repo1-s3-bucket=pgbackrest-dev --repo1-s3-endpoint=s3.amazonaws.com --repo1-s3-key= --repo1-s3-key-secret= --repo1-s3-region=us-east-1 --no-repo1-s3-verify-tls --repo1-type=s3 --stanza=db +P00 DETAIL: tls statistics:[TLS-STATISTICS] +P00 INFO: http statistics:[HTTP-STATISTICS] P00 INFO: stanza-create command end: completed successfully + supplemental file: /backup/db/backup.info diff -Nru pgbackrest-2.15.1/test/expect/mock-archive-stop-002.log pgbackrest-2.16/test/expect/mock-archive-stop-002.log --- pgbackrest-2.15.1/test/expect/mock-archive-stop-002.log 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/test/expect/mock-archive-stop-002.log 2019-08-05 16:03:04.000000000 +0000 @@ -5,6 +5,8 @@ > [CONTAINER-EXEC] backup [BACKREST-BIN] --config=[TEST_PATH]/backup/pgbackrest.conf --stanza=db --no-online stanza-create ------------------------------------------------------------------------------------------------------------------------------------ P00 INFO: stanza-create command begin [BACKREST-VERSION]: --compress-level=3 --compress-level-network=1 --config=[TEST_PATH]/backup/pgbackrest.conf --db-timeout=45 --lock-path=[TEST_PATH]/backup/lock --log-level-console=detail --log-level-file=trace --log-level-stderr=off --log-path=[TEST_PATH]/backup/log --log-subprocess --no-log-timestamp --no-online --pg1-host=db-master --pg1-host-cmd=[BACKREST-BIN] --pg1-host-config=[TEST_PATH]/db-master/pgbackrest.conf --pg1-host-user=[USER-1] --pg1-path=[TEST_PATH]/db-master/db/base --protocol-timeout=60 --repo1-path=/ --repo1-s3-bucket=pgbackrest-dev --repo1-s3-endpoint=s3.amazonaws.com --repo1-s3-key= --repo1-s3-key-secret= --repo1-s3-region=us-east-1 --no-repo1-s3-verify-tls --repo1-type=s3 --stanza=db +P00 DETAIL: tls statistics:[TLS-STATISTICS] +P00 INFO: http statistics:[HTTP-STATISTICS] P00 INFO: stanza-create command end: completed successfully + supplemental file: /backup/db/backup.info diff -Nru pgbackrest-2.15.1/test/expect/mock-stanza-001.log pgbackrest-2.16/test/expect/mock-stanza-001.log --- pgbackrest-2.15.1/test/expect/mock-stanza-001.log 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/test/expect/mock-stanza-001.log 2019-08-05 16:03:04.000000000 +0000 @@ -209,7 +209,7 @@ > [CONTAINER-EXEC] db-master [BACKREST-BIN] --config=[TEST_PATH]/db-master/pgbackrest.conf --stanza=db --no-online --force stanza-create ------------------------------------------------------------------------------------------------------------------------------------ P00 INFO: stanza-create command begin [BACKREST-VERSION]: --compress-level=3 --config=[TEST_PATH]/db-master/pgbackrest.conf --db-timeout=45 --force --lock-path=[TEST_PATH]/db-master/lock --log-level-console=detail --log-level-file=trace --log-level-stderr=off --log-path=[TEST_PATH]/db-master/log --log-subprocess --no-log-timestamp --no-online --pg1-path=[TEST_PATH]/db-master/db/base --protocol-timeout=60 --repo1-path=[TEST_PATH]/db-master/repo --stanza=db -P00 ERROR: [041]: unable to open '[TEST_PATH]/db-master/repo/archive/db/9.3-1/0000000100000001/000000010000000100000001-488ba4b8b98acc510bce86b8f16e3c1ed9886a29.gz': Permission denied +P00 ERROR: [041]: unable to open file '[TEST_PATH]/db-master/repo/archive/db/9.3-1/0000000100000001/000000010000000100000001-488ba4b8b98acc510bce86b8f16e3c1ed9886a29.gz' for read: [13] Permission denied P00 INFO: stanza-create command end: aborted with exception [041] + supplemental file: [TEST_PATH]/db-master/repo/backup/db/backup.info @@ -858,12 +858,12 @@ P00 INFO: stanza-delete command end: aborted with exception [055] db must not exist for successful delete -> ls [TEST_PATH]/db-master/repo/backup/ +> list backup ------------------------------------------------------------------------------------------------------------------------------------ db db must not exist for successful delete -> ls [TEST_PATH]/db-master/repo/archive/ +> list archive ------------------------------------------------------------------------------------------------------------------------------------ db @@ -880,9 +880,9 @@ P00 INFO: stanza-delete command end: completed successfully db must not exist for successful delete -> ls [TEST_PATH]/db-master/repo/backup/ +> list backup ------------------------------------------------------------------------------------------------------------------------------------ db must not exist for successful delete -> ls [TEST_PATH]/db-master/repo/archive/ +> list archive ------------------------------------------------------------------------------------------------------------------------------------ diff -Nru pgbackrest-2.15.1/test/expect/mock-stanza-002.log pgbackrest-2.16/test/expect/mock-stanza-002.log --- pgbackrest-2.15.1/test/expect/mock-stanza-002.log 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/test/expect/mock-stanza-002.log 2019-08-05 16:03:04.000000000 +0000 @@ -16,12 +16,16 @@ HINT: is archive_command configured in postgresql.conf? HINT: has a stanza-create been performed? HINT: use --no-archive-check to disable archive checks during backup if you have an alternate archiving scheme. +P00 DETAIL: tls statistics:[TLS-STATISTICS] +P00 INFO: http statistics:[HTTP-STATISTICS] P00 INFO: stanza-upgrade command end: aborted with exception [055] stanza-create db - successfully create the stanza (backup host) > [CONTAINER-EXEC] backup [BACKREST-BIN] --config=[TEST_PATH]/backup/pgbackrest.conf --stanza=db --no-online stanza-create ------------------------------------------------------------------------------------------------------------------------------------ P00 INFO: stanza-create command begin [BACKREST-VERSION]: --compress-level=3 --compress-level-network=1 --config=[TEST_PATH]/backup/pgbackrest.conf --db-timeout=45 --lock-path=[TEST_PATH]/backup/lock --log-level-console=detail --log-level-file=trace --log-level-stderr=off --log-path=[TEST_PATH]/backup/log --log-subprocess --no-log-timestamp --no-online --pg1-host=db-master --pg1-host-cmd=[BACKREST-BIN] --pg1-host-config=[TEST_PATH]/db-master/pgbackrest.conf --pg1-host-user=[USER-1] --pg1-path=[TEST_PATH]/db-master/db/base --protocol-timeout=60 --repo1-cipher-pass= --repo1-cipher-type=aes-256-cbc --repo1-path=/ --repo1-s3-bucket=pgbackrest-dev --repo1-s3-endpoint=s3.amazonaws.com --repo1-s3-key= --repo1-s3-key-secret= --repo1-s3-region=us-east-1 --no-repo1-s3-verify-tls --repo1-type=s3 --stanza=db +P00 DETAIL: tls statistics:[TLS-STATISTICS] +P00 INFO: http statistics:[HTTP-STATISTICS] P00 INFO: stanza-create command end: completed successfully + supplemental file: /backup/db/backup.info @@ -67,6 +71,8 @@ ------------------------------------------------------------------------------------------------------------------------------------ P00 INFO: stanza-create command begin [BACKREST-VERSION]: --compress-level=3 --compress-level-network=1 --config=[TEST_PATH]/backup/pgbackrest.conf --db-timeout=45 --lock-path=[TEST_PATH]/backup/lock --log-level-console=detail --log-level-file=trace --log-level-stderr=off --log-path=[TEST_PATH]/backup/log --log-subprocess --no-log-timestamp --no-online --pg1-host=db-master --pg1-host-cmd=[BACKREST-BIN] --pg1-host-config=[TEST_PATH]/db-master/pgbackrest.conf --pg1-host-user=[USER-1] --pg1-path=[TEST_PATH]/db-master/db/base --protocol-timeout=60 --repo1-cipher-pass= --repo1-cipher-type=aes-256-cbc --repo1-path=/ --repo1-s3-bucket=pgbackrest-dev --repo1-s3-endpoint=s3.amazonaws.com --repo1-s3-key= --repo1-s3-key-secret= --repo1-s3-region=us-east-1 --no-repo1-s3-verify-tls --repo1-type=s3 --stanza=db P00 INFO: stanza-create was already performed +P00 DETAIL: tls statistics:[TLS-STATISTICS] +P00 INFO: http statistics:[HTTP-STATISTICS] P00 INFO: stanza-create command end: completed successfully + supplemental file: /backup/db/backup.info @@ -113,6 +119,8 @@ P00 INFO: stanza-create command begin [BACKREST-VERSION]: --compress-level=3 --compress-level-network=1 --config=[TEST_PATH]/backup/pgbackrest.conf --db-timeout=45 --lock-path=[TEST_PATH]/backup/lock --log-level-console=detail --log-level-file=trace --log-level-stderr=off --log-path=[TEST_PATH]/backup/log --log-subprocess --no-log-timestamp --no-online --pg1-host=db-master --pg1-host-cmd=[BACKREST-BIN] --pg1-host-config=[TEST_PATH]/db-master/pgbackrest.conf --pg1-host-user=[USER-1] --pg1-path=[TEST_PATH]/db-master/db/base --protocol-timeout=60 --repo1-cipher-pass= --repo1-cipher-type=aes-256-cbc --repo1-path=/ --repo1-s3-bucket=pgbackrest-dev --repo1-s3-endpoint=s3.amazonaws.com --repo1-s3-key= --repo1-s3-key-secret= --repo1-s3-region=us-east-1 --no-repo1-s3-verify-tls --repo1-type=s3 --stanza=db P00 ERROR: [028]: backup info file or archive info file invalid HINT: use stanza-upgrade if the database has been upgraded or use --force +P00 DETAIL: tls statistics:[TLS-STATISTICS] +P00 INFO: http statistics:[HTTP-STATISTICS] P00 INFO: stanza-create command end: aborted with exception [028] + supplemental file: /backup/db/backup.info @@ -158,6 +166,8 @@ ------------------------------------------------------------------------------------------------------------------------------------ P00 INFO: stanza-upgrade command begin [BACKREST-VERSION]: --compress-level=3 --compress-level-network=1 --config=[TEST_PATH]/backup/pgbackrest.conf --db-timeout=45 --lock-path=[TEST_PATH]/backup/lock --log-level-console=detail --log-level-file=trace --log-level-stderr=off --log-path=[TEST_PATH]/backup/log --log-subprocess --no-log-timestamp --no-online --pg1-host=db-master --pg1-host-cmd=[BACKREST-BIN] --pg1-host-config=[TEST_PATH]/db-master/pgbackrest.conf --pg1-host-user=[USER-1] --pg1-path=[TEST_PATH]/db-master/db/base --protocol-timeout=60 --repo1-cipher-pass= --repo1-cipher-type=aes-256-cbc --repo1-path=/ --repo1-s3-bucket=pgbackrest-dev --repo1-s3-endpoint=s3.amazonaws.com --repo1-s3-key= --repo1-s3-key-secret= --repo1-s3-region=us-east-1 --no-repo1-s3-verify-tls --repo1-type=s3 --stanza=db P00 INFO: the stanza data is already up to date +P00 DETAIL: tls statistics:[TLS-STATISTICS] +P00 INFO: http statistics:[HTTP-STATISTICS] P00 INFO: stanza-upgrade command end: completed successfully + supplemental file: /backup/db/backup.info @@ -209,6 +219,8 @@ ------------------------------------------------------------------------------------------------------------------------------------ P00 INFO: stanza-create command begin [BACKREST-VERSION]: --compress-level=3 --compress-level-network=1 --config=[TEST_PATH]/backup/pgbackrest.conf --db-timeout=45 --force --lock-path=[TEST_PATH]/backup/lock --log-level-console=detail --log-level-file=trace --log-level-stderr=off --log-path=[TEST_PATH]/backup/log --log-subprocess --no-log-timestamp --no-online --pg1-host=db-master --pg1-host-cmd=[BACKREST-BIN] --pg1-host-config=[TEST_PATH]/db-master/pgbackrest.conf --pg1-host-user=[USER-1] --pg1-path=[TEST_PATH]/db-master/db/base --protocol-timeout=60 --repo1-cipher-pass= --repo1-cipher-type=aes-256-cbc --repo1-path=/ --repo1-s3-bucket=pgbackrest-dev --repo1-s3-endpoint=s3.amazonaws.com --repo1-s3-key= --repo1-s3-key-secret= --repo1-s3-region=us-east-1 --no-repo1-s3-verify-tls --repo1-type=s3 --stanza=db P00 ERROR: [055]: archive information missing and repo is encrypted and info file(s) are missing, --force cannot be used +P00 DETAIL: tls statistics:[TLS-STATISTICS] +P00 INFO: http statistics:[HTTP-STATISTICS] P00 INFO: stanza-create command end: aborted with exception [055] + supplemental file: /backup/db/backup.info @@ -246,6 +258,8 @@ > [CONTAINER-EXEC] backup [BACKREST-BIN] --config=[TEST_PATH]/backup/pgbackrest.conf --stanza=db --no-online stanza-upgrade ------------------------------------------------------------------------------------------------------------------------------------ P00 INFO: stanza-upgrade command begin [BACKREST-VERSION]: --compress-level=3 --compress-level-network=1 --config=[TEST_PATH]/backup/pgbackrest.conf --db-timeout=45 --lock-path=[TEST_PATH]/backup/lock --log-level-console=detail --log-level-file=trace --log-level-stderr=off --log-path=[TEST_PATH]/backup/log --log-subprocess --no-log-timestamp --no-online --pg1-host=db-master --pg1-host-cmd=[BACKREST-BIN] --pg1-host-config=[TEST_PATH]/db-master/pgbackrest.conf --pg1-host-user=[USER-1] --pg1-path=[TEST_PATH]/db-master/db/base --protocol-timeout=60 --repo1-cipher-pass= --repo1-cipher-type=aes-256-cbc --repo1-path=/ --repo1-s3-bucket=pgbackrest-dev --repo1-s3-endpoint=s3.amazonaws.com --repo1-s3-key= --repo1-s3-key-secret= --repo1-s3-region=us-east-1 --no-repo1-s3-verify-tls --repo1-type=s3 --stanza=db +P00 DETAIL: tls statistics:[TLS-STATISTICS] +P00 INFO: http statistics:[HTTP-STATISTICS] P00 INFO: stanza-upgrade command end: completed successfully + supplemental file: /backup/db/backup.info @@ -309,6 +323,8 @@ P01 INFO: backup file db-master:[TEST_PATH]/db-master/db/base/pg_xlog/archive_status/000000010000000100000001.ready (0B, 100%) P00 INFO: full backup size = 48MB P00 INFO: new backup label = [BACKUP-FULL-1] +P00 DETAIL: tls statistics:[TLS-STATISTICS] +P00 INFO: http statistics:[HTTP-STATISTICS] P00 INFO: backup command end: completed successfully P00 INFO: expire command begin P00 INFO: remove archive path: /archive/db/9.3-1 @@ -381,6 +397,8 @@ ------------------------------------------------------------------------------------------------------------------------------------ P00 INFO: stanza-create command begin [BACKREST-VERSION]: --compress-level=3 --compress-level-network=1 --config=[TEST_PATH]/backup/pgbackrest.conf --db-timeout=45 --lock-path=[TEST_PATH]/backup/lock --log-level-console=detail --log-level-file=trace --log-level-stderr=off --log-path=[TEST_PATH]/backup/log --log-subprocess --no-log-timestamp --no-online --pg1-host=db-master --pg1-host-cmd=[BACKREST-BIN] --pg1-host-config=[TEST_PATH]/db-master/pgbackrest.conf --pg1-host-user=[USER-1] --pg1-path=[TEST_PATH]/db-master/db/base --protocol-timeout=60 --repo1-cipher-pass= --repo1-cipher-type=aes-256-cbc --repo1-path=/ --repo1-s3-bucket=pgbackrest-dev --repo1-s3-endpoint=s3.amazonaws.com --repo1-s3-key= --repo1-s3-key-secret= --repo1-s3-region=us-east-1 --no-repo1-s3-verify-tls --repo1-type=s3 --stanza=db P00 ERROR: [055]: backup information missing and repo is encrypted and info file(s) are missing, --force cannot be used +P00 DETAIL: tls statistics:[TLS-STATISTICS] +P00 INFO: http statistics:[HTTP-STATISTICS] P00 INFO: stanza-create command end: aborted with exception [055] + supplemental file: /archive/db/archive.info @@ -407,6 +425,8 @@ ------------------------------------------------------------------------------------------------------------------------------------ P00 INFO: stanza-create command begin [BACKREST-VERSION]: --compress-level=3 --compress-level-network=1 --config=[TEST_PATH]/backup/pgbackrest.conf --db-timeout=45 --force --lock-path=[TEST_PATH]/backup/lock --log-level-console=detail --log-level-file=trace --log-level-stderr=off --log-path=[TEST_PATH]/backup/log --log-subprocess --no-log-timestamp --no-online --pg1-host=db-master --pg1-host-cmd=[BACKREST-BIN] --pg1-host-config=[TEST_PATH]/db-master/pgbackrest.conf --pg1-host-user=[USER-1] --pg1-path=[TEST_PATH]/db-master/db/base --protocol-timeout=60 --repo1-cipher-pass= --repo1-cipher-type=aes-256-cbc --repo1-path=/ --repo1-s3-bucket=pgbackrest-dev --repo1-s3-endpoint=s3.amazonaws.com --repo1-s3-key= --repo1-s3-key-secret= --repo1-s3-region=us-east-1 --no-repo1-s3-verify-tls --repo1-type=s3 --stanza=db P00 ERROR: [055]: backup information missing and repo is encrypted and info file(s) are missing, --force cannot be used +P00 DETAIL: tls statistics:[TLS-STATISTICS] +P00 INFO: http statistics:[HTTP-STATISTICS] P00 INFO: stanza-create command end: aborted with exception [055] + supplemental file: /archive/db/archive.info @@ -432,6 +452,8 @@ > [CONTAINER-EXEC] backup [BACKREST-BIN] --config=[TEST_PATH]/backup/pgbackrest.conf --stanza=db --no-online stanza-upgrade ------------------------------------------------------------------------------------------------------------------------------------ P00 INFO: stanza-upgrade command begin [BACKREST-VERSION]: --compress-level=3 --compress-level-network=1 --config=[TEST_PATH]/backup/pgbackrest.conf --db-timeout=45 --lock-path=[TEST_PATH]/backup/lock --log-level-console=detail --log-level-file=trace --log-level-stderr=off --log-path=[TEST_PATH]/backup/log --log-subprocess --no-log-timestamp --no-online --pg1-host=db-master --pg1-host-cmd=[BACKREST-BIN] --pg1-host-config=[TEST_PATH]/db-master/pgbackrest.conf --pg1-host-user=[USER-1] --pg1-path=[TEST_PATH]/db-master/db/base --protocol-timeout=60 --repo1-cipher-pass= --repo1-cipher-type=aes-256-cbc --repo1-path=/ --repo1-s3-bucket=pgbackrest-dev --repo1-s3-endpoint=s3.amazonaws.com --repo1-s3-key= --repo1-s3-key-secret= --repo1-s3-region=us-east-1 --no-repo1-s3-verify-tls --repo1-type=s3 --stanza=db +P00 DETAIL: tls statistics:[TLS-STATISTICS] +P00 INFO: http statistics:[HTTP-STATISTICS] P00 INFO: stanza-upgrade command end: completed successfully + supplemental file: /backup/db/backup.info @@ -495,6 +517,8 @@ P01 INFO: backup file db-master:[TEST_PATH]/db-master/db/base/pg_xlog/archive_status/000000010000000100000001.ready (0B, 100%) P00 INFO: full backup size = 48MB P00 INFO: new backup label = [BACKUP-FULL-2] +P00 DETAIL: tls statistics:[TLS-STATISTICS] +P00 INFO: http statistics:[HTTP-STATISTICS] P00 INFO: backup command end: completed successfully P00 INFO: expire command begin P00 DETAIL: tls statistics:[TLS-STATISTICS] @@ -678,12 +702,14 @@ P00 INFO: stanza-delete command end: aborted with exception [055] db must not exist for successful delete -> ls //backup/ +> list backup ------------------------------------------------------------------------------------------------------------------------------------ +db db must not exist for successful delete -> ls //archive/ +> list archive ------------------------------------------------------------------------------------------------------------------------------------ +db stop db stanza (backup host) > [CONTAINER-EXEC] backup [BACKREST-BIN] --config=[TEST_PATH]/backup/pgbackrest.conf --stanza=db stop @@ -695,12 +721,14 @@ > [CONTAINER-EXEC] backup [BACKREST-BIN] --config=[TEST_PATH]/backup/pgbackrest.conf --stanza=db stanza-delete ------------------------------------------------------------------------------------------------------------------------------------ P00 INFO: stanza-delete command begin [BACKREST-VERSION]: --compress-level=3 --compress-level-network=1 --config=[TEST_PATH]/backup/pgbackrest.conf --db-timeout=45 --lock-path=[TEST_PATH]/backup/lock --log-level-console=detail --log-level-file=trace --log-level-stderr=off --log-path=[TEST_PATH]/backup/log --log-subprocess --no-log-timestamp --pg1-host=db-master --pg1-host-cmd=[BACKREST-BIN] --pg1-host-config=[TEST_PATH]/db-master/pgbackrest.conf --pg1-host-user=[USER-1] --pg1-path=[TEST_PATH]/db-master/db/base --protocol-timeout=60 --repo1-cipher-pass= --repo1-cipher-type=aes-256-cbc --repo1-path=/ --repo1-s3-bucket=pgbackrest-dev --repo1-s3-endpoint=s3.amazonaws.com --repo1-s3-key= --repo1-s3-key-secret= --repo1-s3-region=us-east-1 --no-repo1-s3-verify-tls --repo1-type=s3 --stanza=db +P00 DETAIL: tls statistics:[TLS-STATISTICS] +P00 INFO: http statistics:[HTTP-STATISTICS] P00 INFO: stanza-delete command end: completed successfully db must not exist for successful delete -> ls //backup/ +> list backup ------------------------------------------------------------------------------------------------------------------------------------ db must not exist for successful delete -> ls //archive/ +> list archive ------------------------------------------------------------------------------------------------------------------------------------ diff -Nru pgbackrest-2.15.1/test/lib/pgBackRestTest/Common/CiTest.pm pgbackrest-2.16/test/lib/pgBackRestTest/Common/CiTest.pm --- pgbackrest-2.15.1/test/lib/pgBackRestTest/Common/CiTest.pm 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/test/lib/pgBackRestTest/Common/CiTest.pm 2019-08-05 16:03:04.000000000 +0000 @@ -100,10 +100,8 @@ $strConfig .= "\n" . "before_install:\n" . - " - sudo apt-get -qq update && sudo apt-get install libxml-checker-perl libdbd-pg-perl libperl-critic-perl" . - " libtemplate-perl libpod-coverage-perl libtest-differences-perl libhtml-parser-perl lintian debhelper txt2man" . - " devscripts libjson-perl libio-socket-ssl-perl libxml-libxml-perl libyaml-libyaml-perl python-pip lcov" . - " libjson-maybexs-perl libperl-dev\n" . + " - sudo apt-get -qq update && sudo apt-get install libxml-checker-perl libdbd-pg-perl libyaml-libyaml-perl python-pip" . + " lcov libperl-dev\n" . " - |\n" . " # Install & Configure AWS CLI\n" . " pip install --upgrade --user awscli\n" . @@ -112,11 +110,6 @@ " aws configure set aws_secret_access_key verySecretKey1\n" . " aws help --version\n" . " aws configure list\n" . - " - |\n" . - " # Install Devel::Cover\n" . - " sudo dpkg -i \${TRAVIS_BUILD_DIR?}/test/package/u14-" . packageDevelCover(VM_ARCH_AMD64) . "\n" . - " sudo apt-get -f install\n" . - ' ' . LIB_COVER_EXE . " -v\n" . "\n" . "install:\n" . " - |\n" . diff -Nru pgbackrest-2.15.1/test/lib/pgBackRestTest/Common/ContainerTest.pm pgbackrest-2.16/test/lib/pgBackRestTest/Common/ContainerTest.pm --- pgbackrest-2.15.1/test/lib/pgBackRestTest/Common/ContainerTest.pm 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/test/lib/pgBackRestTest/Common/ContainerTest.pm 2019-08-05 16:03:04.000000000 +0000 @@ -70,11 +70,6 @@ use constant CONTAINER_DEBUG => false; #################################################################################################################################### -# Container Debug - speeds container debugging by splitting each section into a separate intermediate container -#################################################################################################################################### -use constant CONTAINER_S3_SERVER_TAG => 's3-server-20180612A'; - -#################################################################################################################################### # Store cache container checksums #################################################################################################################################### my $hContainerCache; @@ -304,51 +299,6 @@ } #################################################################################################################################### -# S3 server setup -#################################################################################################################################### -sub s3ServerSetup -{ - my $strOS = shift; - - # Install node.js - my $strScript = sectionHeader() . - "# Install node.js\n"; - - if ($strOS eq VM_CO7) - { - $strScript .= - " wget -O /root/nodejs.sh https://rpm.nodesource.com/setup_6.x && \\\n" . - " bash /root/nodejs.sh && \\\n" . - " yum install -y nodejs"; - } - else - { - $strScript .= - " wget -O /root/nodejs.sh https://deb.nodesource.com/setup_6.x && \\\n" . - " bash /root/nodejs.sh && \\\n" . - " wget -qO- https://deb.nodesource.com/setup_8.x | bash - && \\\n" . - " apt-get install -y nodejs"; - } - - # Install Scality S3 - $strScript .= sectionHeader() . - "# Install Scality S3\n"; - - $strScript .= - " wget -O /root/scalitys3.tar.gz https://github.com/scality/S3/archive/GA6.4.2.1.tar.gz && \\\n" . - " mkdir /root/scalitys3 && \\\n" . - " tar -C /root/scalitys3 --strip-components 1 -xvf /root/scalitys3.tar.gz && \\\n" . - " cd /root/scalitys3 && \\\n" . - " npm install && \\\n" . - ' sed -i "0,/,/s//,\n \"certFilePaths\":{\"key\":\"\/etc\/fake\-cert\/server.key\",\"cert\":' . - '\"\/etc\/fake\-cert\/server.crt\",\"ca\":\"\/etc\/fake\-cert\/ca.crt\"},/"' . " \\\n" . - ' ./config.json' . " && \\\n" . - ' sed -i "s/ort\"\: 8000/ort\"\: 443/" ./config.json'; - - return $strScript; -} - -#################################################################################################################################### # Entry point setup #################################################################################################################################### sub entryPointSetup @@ -426,17 +376,22 @@ #--------------------------------------------------------------------------------------------------------------------------- my $strScript = sectionHeader() . - "# Install base packages\n"; + "# Install packages\n"; if ($$oVm{$strOS}{&VM_OS_BASE} eq VM_OS_BASE_RHEL) { + if ($strOS eq VM_CO6 || $strOS eq VM_CO7) + { + $strScript .= + " yum -y install epel-release && \\\n"; + } + $strScript .= - " yum -y install epel-release && \\\n" . " yum -y update && \\\n" . - " yum -y install openssh-server openssh-clients wget sudo python-pip build-essential valgrind git \\\n" . - " perl perl-Digest-SHA perl-DBD-Pg perl-XML-LibXML perl-IO-Socket-SSL perl-YAML-LibYAML \\\n" . + " yum -y install openssh-server openssh-clients wget sudo valgrind git \\\n" . + " perl perl-Digest-SHA perl-DBD-Pg perl-YAML-LibYAML openssl \\\n" . " gcc make perl-ExtUtils-MakeMaker perl-Test-Simple openssl-devel perl-ExtUtils-Embed rpm-build \\\n" . - " zlib-devel libxml2-devel lz4-devel"; + " zlib-devel libxml2-devel lz4-devel lcov"; if ($strOS eq VM_CO6) { @@ -446,24 +401,16 @@ { $strScript .= ' perl-JSON-PP'; } - - if (vmCoverageC($strOS)) - { - $strScript .= ' lcov'; - } } else { $strScript .= " export DEBCONF_NONINTERACTIVE_SEEN=true DEBIAN_FRONTEND=noninteractive && \\\n" . " apt-get update && \\\n" . - " apt-get -y install wget python && \\\n" . - " wget --no-check-certificate -O /root/get-pip.py https://bootstrap.pypa.io/get-pip.py && \\\n" . - " python /root/get-pip.py && \\\n" . - " apt-get -y install openssh-server wget sudo python-pip build-essential valgrind git \\\n" . - " libdbd-pg-perl libhtml-parser-perl libio-socket-ssl-perl libxml-libxml-perl libssl-dev libperl-dev \\\n" . + " apt-get -y install openssh-server wget sudo gcc make valgrind git \\\n" . + " libdbd-pg-perl libhtml-parser-perl libssl-dev libperl-dev \\\n" . " libyaml-libyaml-perl tzdata devscripts lintian libxml-checker-perl txt2man debhelper \\\n" . - " libppi-html-perl libtemplate-perl libtest-differences-perl zlib1g-dev libxml2-dev"; + " libppi-html-perl libtemplate-perl libtest-differences-perl zlib1g-dev libxml2-dev lcov"; if ($strOS eq VM_U12) { @@ -471,17 +418,7 @@ } else { - $strScript .= ' liblz4-dev'; - } - - if (vmLintC($strOS)) - { - $strScript .= ' clang-6.0 clang-tools-6.0'; - } - - if (vmCoverageC($strOS)) - { - $strScript .= ' lcov'; + $strScript .= ' libjson-pp-perl liblz4-dev'; } } @@ -536,24 +473,28 @@ if ($$oVm{$strOS}{&VM_OS_BASE} eq VM_OS_BASE_RHEL) { + $strScript .= + " rpm --import http://yum.postgresql.org/RPM-GPG-KEY-PGDG && \\\n"; + if ($strOS eq VM_CO6) { $strScript .= - " rpm --import http://yum.postgresql.org/RPM-GPG-KEY-PGDG-10 && \\\n" . " rpm -ivh \\\n" . - " http://yum.postgresql.org/9.0/redhat/rhel-6-x86_64/pgdg-centos90-9.0-5.noarch.rpm \\\n" . " http://yum.postgresql.org/9.1/redhat/rhel-6-x86_64/pgdg-centos91-9.1-6.noarch.rpm \\\n" . " http://yum.postgresql.org/9.2/redhat/rhel-6-x86_64/pgdg-centos92-9.2-8.noarch.rpm \\\n" . - " https://download.postgresql.org/pub/repos/yum/11/redhat/rhel-6-x86_64/pgdg-redhat-repo-latest.noarch.rpm"; + " https://download.postgresql.org/pub/repos/yum/11/redhat/rhel-6-x86_64/" . + "pgdg-redhat-repo-latest.noarch.rpm && \\\n"; } elsif ($strOS eq VM_CO7) { $strScript .= - " rpm --import http://yum.postgresql.org/RPM-GPG-KEY-PGDG-10 && \\\n" . " rpm -ivh \\\n" . " http://yum.postgresql.org/9.2/redhat/rhel-7-x86_64/pgdg-centos92-9.2-3.noarch.rpm \\\n" . - " https://download.postgresql.org/pub/repos/yum/11/redhat/rhel-7-x86_64/pgdg-redhat-repo-latest.noarch.rpm"; + " https://download.postgresql.org/pub/repos/yum/11/redhat/rhel-7-x86_64/" . + "pgdg-redhat-repo-latest.noarch.rpm && \\\n"; } + + $strScript .= " yum -y install postgresql-devel"; } else { @@ -563,7 +504,7 @@ "' >> /etc/apt/sources.list.d/pgdg.list && \\\n" . " wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | apt-key add - && \\\n" . " apt-get update && \\\n" . - " apt-get install -y postgresql-common && \\\n" . + " apt-get install -y postgresql-common libpq-dev && \\\n" . " sed -i 's/^\\#create\\_main\\_cluster.*\$/create\\_main\\_cluster \\= false/' " . "/etc/postgresql-common/createcluster.conf"; } @@ -600,19 +541,14 @@ } } - #--------------------------------------------------------------------------------------------------------------------------- - if (!$bDeprecated) - { - $strScript .= sectionHeader() . - "# Install AWS CLI\n" . - " pip install --upgrade --no-cache-dir pip==9.0.3 && \\\n" . - " pip install --upgrade awscli"; - } #--------------------------------------------------------------------------------------------------------------------------- - if (!$bDeprecated && $strOS ne VM_CO6 && $strOS ne VM_U12) + if ($$oVm{$strOS}{&VM_OS_BASE} eq VM_OS_BASE_DEBIAN) { - $strScript .= s3ServerSetup($strOS); + $strScript .= sectionHeader() . + "# Cleanup\n"; + + $strScript .= " apt-get clean"; } containerWrite( @@ -625,7 +561,6 @@ $strCopy = undef; my $strPkgDevelCover = packageDevelCover($oVm->{$strOS}{&VM_ARCH}); - my $bPkgDevelCoverBuild = vmCoveragePerl($strOS) && !$oStorageDocker->exists("test/package/${strOS}-${strPkgDevelCover}"); $strScript = sectionHeader() . "# Create test user\n" . @@ -638,18 +573,6 @@ $strScript .= sectionHeader() . "# Install pgBackRest package source\n" . " git clone https://salsa.debian.org/postgresql/pgbackrest.git /root/package-src"; - - # Build only when a new version has been specified - if ($bPkgDevelCoverBuild) - { - $strScript .= sectionHeader() . - "# Install Devel::Cover package source & build\n" . - " git clone https://salsa.debian.org/perl-team/modules/packages/libdevel-cover-perl.git" . - " /root/libdevel-cover-perl && \\\n" . - " cd /root/libdevel-cover-perl && \\\n" . - " git checkout debian/" . LIB_COVER_VERSION . " && \\\n" . - " debuild -i -us -uc -b"; - } } else { @@ -668,50 +591,6 @@ containerWrite($oStorageDocker, $strTempPath, $strOS, 'Build', $strImageParent, $strImage, $strCopy, $strScript, $bVmForce); - # Copy Devel::Cover to host so it can be installed in other containers (if it doesn't already exist) - if ($bPkgDevelCoverBuild) - { - executeTest('docker rm -f test-build', {bSuppressError => true}); - executeTest( - "docker run -itd -h test-build --name=test-build" . - " -v ${strTempPath}:${strTempPath} " . containerRepo() . ":${strOS}-build", - {bSuppressStdErr => true}); - executeTest( - "docker exec -i test-build " . - "bash -c 'cp /root/${strPkgDevelCover} ${strTempPath}/${strOS}-${strPkgDevelCover}'"); - executeTest('docker rm -f test-build'); - - $oStorageDocker->move( - "test/.vagrant/docker/${strOS}-${strPkgDevelCover}", "test/package/${strOS}-${strPkgDevelCover}"); - } - - # S3 image - ########################################################################################################################### - if (!$bDeprecated) - { - $strImage = "${strOS}-s3-server"; - $strScript = ''; - $strCopy = undef; - - $strScript = sectionHeader() . - "# Set worker clusters lower than the default for testing\n" . - " cd /root/scalitys3 && \\\n" . - ' sed -i "s/clusters\"\: [0-9]*/clusters\"\: 2/" ./config.json'; - - if ($strOS ne VM_CO6 && $strOS ne VM_U12) - { - $strImageParent = containerRepo() . ":${strOS}-base"; - $strScript .= "\n\nENTRYPOINT npm start --prefix /root/scalitys3"; - } - else - { - $strImageParent = containerRepo() . ':' . CONTAINER_S3_SERVER_TAG; - } - - containerWrite( - $oStorageDocker, $strTempPath, $strOS, 'S3 Server', $strImageParent, $strImage, $strCopy, $strScript, $bVmForce); - } - # Test image ######################################################################################################################## if (!$bDeprecated) @@ -719,28 +598,8 @@ $strImageParent = containerRepo() . ":${strOS}-base"; $strImage = "${strOS}-test"; - if (vmCoveragePerl($strOS)) - { - $oStorageDocker->copy( - "test/package/${strOS}-${strPkgDevelCover}", "test/.vagrant/docker/${strOS}-${strPkgDevelCover}"); - - $strCopy = - "# Copy Devel::Cover\n" . - "COPY ${strOS}-${strPkgDevelCover} /tmp/${strPkgDevelCover}"; - - $strScript = sectionHeader() . - "# Install packages\n" . - " apt-get install -y libjson-maybexs-perl"; - - $strScript .= sectionHeader() . - "# Install Devel::Cover\n" . - " dpkg -i /tmp/${strPkgDevelCover}"; - } - else - { - $strCopy = undef; - $strScript = ''; - } + $strCopy = undef; + $strScript = ''; #--------------------------------------------------------------------------------------------------------------------------- $strScript .= sectionHeader() . @@ -777,15 +636,6 @@ $strScript .= sshSetup($strOS, TEST_USER, TEST_GROUP, $$oVm{$strOS}{&VM_CONTROL_MASTER}); - if (!$bDeprecated) - { - $strScript .= sectionHeader() . - "# Config AWS CLI\n" . - ' sudo -i -u ' . TEST_USER . " aws configure set region us-east-1 && \\\n" . - ' sudo -i -u ' . TEST_USER . " aws configure set aws_access_key_id accessKey1 && \\\n" . - ' sudo -i -u ' . TEST_USER . " aws configure set aws_secret_access_key verySecretKey1"; - } - $strScript .= sectionHeader() . "# Create pgbackrest user\n" . ' ' . userCreate($strOS, BACKREST_USER, BACKREST_USER_ID, TEST_GROUP); diff -Nru pgbackrest-2.15.1/test/lib/pgBackRestTest/Common/DefineTest.pm pgbackrest-2.16/test/lib/pgBackRestTest/Common/DefineTest.pm --- pgbackrest-2.15.1/test/lib/pgBackRestTest/Common/DefineTest.pm 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/test/lib/pgBackRestTest/Common/DefineTest.pm 2019-08-05 16:03:04.000000000 +0000 @@ -53,6 +53,8 @@ push @EXPORT, qw(TESTDEF_DEFINE_TEST); use constant TESTDEF_DEBUG_UNIT_SUPPRESS => 'debugUnitSuppress'; push @EXPORT, qw(TESTDEF_DEBUG_UNIT_SUPPRESS); +use constant TESTDEF_INCLUDE => 'include'; + push @EXPORT, qw(TESTDEF_INCLUDE); use constant TESTDEF_INDIVIDUAL => 'individual'; push @EXPORT, qw(TESTDEF_INDIVIDUAL); use constant TESTDEF_TOTAL => 'total'; @@ -136,7 +138,7 @@ # Set module type variables $hTestDefHash->{$strModule}{$strTest}{&TESTDEF_C} = - $strModuleType eq TESTDEF_UNIT && $strTest !~ /\-perl$/ ? true : false; + $strModuleType eq TESTDEF_UNIT && $strTest !~ /perl$/ ? true : false; $hTestDefHash->{$strModule}{$strTest}{&TESTDEF_INTEGRATION} = $strModuleType eq TESTDEF_INTEGRATION ? true : false; $hTestDefHash->{$strModule}{$strTest}{&TESTDEF_EXPECT} = $bExpect; $hTestDefHash->{$strModule}{$strTest}{&TESTDEF_CONTAINER} = $bContainer; @@ -180,6 +182,9 @@ push(@{$hCoverageList->{$strCodeModule}}, {strModule=> $strModule, strTest => $strTest}); } } + + # Set include list + $hTestDefHash->{$strModule}{$strTest}{&TESTDEF_INCLUDE} = $hModuleTest->{&TESTDEF_INCLUDE}; } $hModuleTest->{$strModule} = \@stryModuleTest; diff -Nru pgbackrest-2.15.1/test/lib/pgBackRestTest/Common/FileTest.pm pgbackrest-2.16/test/lib/pgBackRestTest/Common/FileTest.pm --- pgbackrest-2.15.1/test/lib/pgBackRestTest/Common/FileTest.pm 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/test/lib/pgBackRestTest/Common/FileTest.pm 2019-08-05 16:03:04.000000000 +0000 @@ -27,8 +27,7 @@ use pgBackRest::Common::Wait; use pgBackRest::Config::Config; use pgBackRest::Manifest; -use pgBackRest::Storage::Local; -use pgBackRest::Storage::S3::Driver; +use pgBackRest::Storage::Base; use pgBackRestTest::Common::ExecuteTest; use pgBackRestTest::Common::HostGroupTest; @@ -168,7 +167,7 @@ ); # Mode commands are ignored on S3 - if ($oStorage->driver()->className() ne STORAGE_S3_DRIVER) + if ($oStorage->type() ne STORAGE_S3) { executeTest('sudo chmod ' . ($bRecurse ? '-R ' : '') . "${strMode} " . $oStorage->pathGet($strPathExp)); } @@ -203,7 +202,7 @@ ); # If S3 then use storage commands to remove - if ($oStorage->driver()->className() eq STORAGE_S3_DRIVER) + if ($oStorage->type() eq STORAGE_S3) { hostGroupGet()->hostGet(HOST_S3)->executeS3( 'mv' . ($bRecurse ? ' --recursive' : '') . ' s3://' . HOST_S3_BUCKET . $oStorage->pathGet($strSourcePathExp) . @@ -244,8 +243,8 @@ {name => 'bRecurse', optional => true, default => false}, ); - # Mode commands are ignored on S3 - if ($oStorage->driver()->className() ne STORAGE_S3_DRIVER) + # Owner commands are ignored on S3 + if ($oStorage->type() ne STORAGE_S3) { executeTest('sudo chown ' . ($bRecurse ? '-R ' : '') . "${strOwner} " . $oStorage->pathGet($strPathExp)); } @@ -278,11 +277,19 @@ ); # If S3 then use storage commands to remove - if ($oStorage->driver()->className() eq STORAGE_S3_DRIVER) + if ($oStorage->type() eq STORAGE_S3) { - $oStorage->remove($strPathExp, {bRecurse => $bRecurse}); + my $oInfo = $oStorage->info($strPathExp, {bIgnoreMissing => true}); + + if (defined($oInfo) && $oInfo->{type} eq 'f') + { + $oStorage->remove($strPathExp); + } + else + { + $oStorage->pathRemove($strPathExp, {bRecurse => true}); + } } - # Else remove using filesystem commands else { executeTest('sudo rm -f' . ($bRecurse ? 'r ' : ' ') . $oStorage->pathGet($strPathExp)); diff -Nru pgbackrest-2.15.1/test/lib/pgBackRestTest/Common/JobTest.pm pgbackrest-2.16/test/lib/pgBackRestTest/Common/JobTest.pm --- pgbackrest-2.15.1/test/lib/pgBackRestTest/Common/JobTest.pm 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/test/lib/pgBackRestTest/Common/JobTest.pm 2019-08-05 16:03:04.000000000 +0000 @@ -214,7 +214,9 @@ { executeTest( 'rsync -rt --delete --exclude=*.o --exclude=test.c --exclude=test.gcno --exclude=LibC.h --exclude=xs' . - " --exclude=test --exclude=buildflags --exclude=testflags --exclude=harnessflags" . + ' --exclude=test --exclude=buildflags --exclude=testflags --exclude=harnessflags' . + ' --exclude=build.auto.h --exclude=build.auto.h.in --exclude=Makefile --exclude=Makefile.in' . + ' --exclude=configure --exclude=configure.ac' . " $self->{strBackRestBase}/src/ $self->{strGCovPath} && " . "rsync -t $self->{strBackRestBase}/libc/LibC.h $self->{strGCovPath} && " . "rsync -rt --delete $self->{strBackRestBase}/libc/xs/ $self->{strGCovPath}/xs && " . @@ -261,9 +263,7 @@ { $strCommand = ($self->{oTest}->{&TEST_CONTAINER} ? 'docker exec -i -u ' . TEST_USER . " ${strImage} " : '') . - testRunExe( - vmCoverageC($self->{oTest}->{&TEST_VM}), undef, abs_path($0), dirname($self->{strCoveragePath}), - $self->{strBackRestBase}, $self->{oTest}->{&TEST_MODULE}, $self->{oTest}->{&TEST_NAME}) . + abs_path($0) . " --test-path=${strVmTestPath}" . " --vm=$self->{oTest}->{&TEST_VM}" . " --vm-id=$self->{iVmIdx}" . @@ -291,6 +291,8 @@ foreach my $strFile (sort(keys(%{$self->{oStorageTest}->manifest($self->{strGCovPath})}))) { + my $strFileNoExt = substr($strFile, 0, length($strFile) - 2); + # Skip all files except .c files (including .auto.c) next if $strFile !~ /(?{substr($strFile, 0, length($strFile) - 2)}) && + if (!defined($hTestCoverage->{$strFileNoExt}) && !grep(/^$strFileNoExt$/, @{$hTest->{&TESTDEF_INCLUDE}}) && $strFile !~ /^test\/module\/[^\/]*\/.*Test\.c$/) { push(@stryCFile, "${strFile}"); @@ -313,7 +315,7 @@ "test/module/$self->{oTest}->{&TEST_MODULE}/" . testRunName($self->{oTest}->{&TEST_NAME}, false) . 'Test.c'; my $strCInclude; - foreach my $strFile (sort(keys(%{$hTestCoverage}))) + foreach my $strFile (sort(keys(%{$hTestCoverage}), @{$hTest->{&TESTDEF_INCLUDE}})) { # Don't include the test file as it is already included below next if $strFile =~ /Test$/; @@ -411,6 +413,7 @@ '-I. -Itest -std=c99 -fPIC -g -Wno-clobbered -D_POSIX_C_SOURCE=200112L' . ' `perl -MExtUtils::Embed -e ccopts`' . ' `xml2-config --cflags`' . ($self->{bProfile} ? " -pg" : '') . + ' -I`pg_config --includedir`' . ($self->{oTest}->{&TEST_DEBUG_UNIT_SUPPRESS} ? '' : " -DDEBUG_UNIT") . (vmWithBackTrace($self->{oTest}->{&TEST_VM}) && $self->{bBackTrace} ? ' -DWITH_BACKTRACE' : '') . ($self->{oTest}->{&TEST_CDEF} ? " $self->{oTest}->{&TEST_CDEF}" : '') . @@ -494,7 +497,7 @@ " -c $strCFile -o " . substr($strCFile, 0, length($strCFile) - 2) . ".o\n"; } - $self->{oStorageTest}->put($self->{strGCovPath} . "/Makefile", $strMakefile); + buildPutDiffers($self->{oStorageTest}, $self->{strGCovPath} . "/Makefile", $strMakefile); } my $oExec = new pgBackRestTest::Common::ExecuteTest( diff -Nru pgbackrest-2.15.1/test/lib/pgBackRestTest/Common/RunTest.pm pgbackrest-2.16/test/lib/pgBackRestTest/Common/RunTest.pm --- pgbackrest-2.15.1/test/lib/pgBackRestTest/Common/RunTest.pm 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/test/lib/pgBackRestTest/Common/RunTest.pm 2019-08-05 16:03:04.000000000 +0000 @@ -19,10 +19,11 @@ use pgBackRest::Common::Log; use pgBackRest::Common::String; use pgBackRest::Common::Wait; -use pgBackRest::Storage::Posix::Driver; -use pgBackRest::Storage::Local; +use pgBackRest::Storage::Base; +use pgBackRest::Storage::Storage; use pgBackRest::Version; +use pgBackRestTest::Common::BuildTest; use pgBackRestTest::Common::DefineTest; use pgBackRestTest::Common::ExecuteTest; use pgBackRestTest::Common::LogTest; @@ -155,12 +156,10 @@ $self->{bFirstTest} = true; # Initialize test storage - $oStorage = new pgBackRest::Storage::Local($self->testPath(), new pgBackRest::Storage::Posix::Driver()); + $oStorage = new pgBackRest::Storage::Storage(STORAGE_LOCAL, {strPath => $self->testPath()}); # Generate backrest exe - $self->{strBackRestExe} = testRunExe( - $self->coverage(), $self->{strBackRestExeC}, $self->{strBackRestExeHelper}, dirname($self->testPath()), $self->basePath(), - $self->module(), $self->moduleTest(), true); + $self->{strBackRestExe} = defined($self->{strBackRestExeC}) ? $self->{strBackRestExeC} : $self->{strBackRestExeHelper}; projectBinSet($self->{strBackRestExe}); @@ -507,8 +506,7 @@ 'pgBackRestTest::Module::' . testRunName($strModule) . '::' . testRunName($strModule) . testRunName($strModuleTest) . 'Test'; - $oTestRun = eval( ## no critic (BuiltinFunctions::ProhibitStringyEval) - "require ${strModuleName}; ${strModuleName}->import(); return new ${strModuleName}();") + $oTestRun = eval("require ${strModuleName}; ${strModuleName}->import(); return new ${strModuleName}();") or do {confess $EVAL_ERROR}; # Return from function and log return values if any @@ -532,64 +530,6 @@ push @EXPORT, qw(testRunGet); #################################################################################################################################### -# Generate test executable -#################################################################################################################################### -sub testRunExe -{ - my $bCoverage = shift; - my $strExeC = shift; - my $strExeHelper = shift; - my $strCoveragePath = shift; - my $strBackRestBasePath = shift; - my $strModule = shift; - my $strTest = shift; - my $bLog = shift; - - my $strExe = defined($strExeC) ? $strExeC : undef; - my $strPerlModule; - - if ($bCoverage) - { - # Limit Perl modules tested to what is defined in the test coverage (if it exists) - my $hTestCoverage = (testDefModuleTest($strModule, $strTest))->{&TESTDEF_COVERAGE}; - my $strPerlModuleLog; - - if (defined($hTestCoverage)) - { - foreach my $strCoverageModule (sort(keys(%{$hTestCoverage}))) - { - $strPerlModule .= ',.*/' . $strCoverageModule . '\.p.$'; - $strPerlModuleLog .= (defined($strPerlModuleLog) ? ', ' : '') . $strCoverageModule; - } - } - - # Build the exe - if (defined($strPerlModule)) - { - $strExe .= - (defined($strExeC) ? ' --perl-option=' : 'perl ') . - "-MDevel::Cover=-silent,1,-dir,${strCoveragePath},-select${strPerlModule},+inc" . - ",${strBackRestBasePath},-coverage,statement,branch,condition,path,subroutine" . - (defined($strExeC) ? '' : " ${strExeHelper}"); - - if (defined($bLog) && $bLog) - { - &log(INFO, " coverage: ${strPerlModuleLog}"); - } - } - } - - if (!defined($strExeC) && !defined($strPerlModule)) - { - $strExe = $strExeHelper; - } - - return $strExe; -} - -push(@EXPORT, qw(testRunExe)); - -#################################################################################################################################### # storageTest - get the storage for the current test #################################################################################################################################### sub storageTest @@ -606,7 +546,6 @@ sub backrestExe {return shift->{strBackRestExe}} sub backrestUser {return shift->{strBackRestUser}} sub basePath {return shift->{strBasePath}} -sub coverage {vmCoveragePerl(shift->{strVm})} sub dataPath {return shift->basePath() . '/test/data'} sub doCleanup {return shift->{bCleanup}} sub doExpect {return shift->{bExpect}} diff -Nru pgbackrest-2.15.1/test/lib/pgBackRestTest/Common/VmTest.pm pgbackrest-2.16/test/lib/pgBackRestTest/Common/VmTest.pm --- pgbackrest-2.15.1/test/lib/pgBackRestTest/Common/VmTest.pm 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/test/lib/pgBackRestTest/Common/VmTest.pm 2019-08-05 16:03:04.000000000 +0000 @@ -31,14 +31,10 @@ push @EXPORT, qw(VM_CONTROL_MASTER); # Will coverage testing be run for C? use constant VMDEF_COVERAGE_C => 'coverage-c'; -# Will coverage testing be run for Perl? -use constant VMDEF_COVERAGE_PERL => 'coverage-perl'; use constant VM_DEPRECATED => 'deprecated'; push @EXPORT, qw(VM_DEPRECATED); use constant VM_IMAGE => 'image'; push @EXPORT, qw(VM_IMAGE); -# Will static code analysis be run for C? -use constant VMDEF_LINT_C => 'lint-c'; use constant VM_OS => 'os'; push @EXPORT, qw(VM_OS); use constant VM_OS_BASE => 'os-base'; @@ -137,7 +133,6 @@ &VM_DB => [ - PG_VERSION_90, PG_VERSION_91, PG_VERSION_92, PG_VERSION_94, @@ -148,8 +143,8 @@ &VM_DB_TEST => [ - PG_VERSION_90, PG_VERSION_91, + PG_VERSION_92, PG_VERSION_94, PG_VERSION_95, ], @@ -266,7 +261,7 @@ [ PG_VERSION_83, PG_VERSION_84, - PG_VERSION_92, + PG_VERSION_90, PG_VERSION_93, ], }, @@ -343,8 +338,6 @@ &VM_IMAGE => 'ubuntu:18.04', &VM_ARCH => VM_ARCH_AMD64, &VMDEF_COVERAGE_C => true, - &VMDEF_COVERAGE_PERL => true, - &VMDEF_LINT_C => true, &VMDEF_PGSQL_BIN => '/usr/lib/postgresql/{[version]}/bin', &VMDEF_PERL_ARCH_PATH => '/usr/local/lib/x86_64-linux-gnu/perl/5.26.1', @@ -385,7 +378,6 @@ foreach my $strPgVersion (versionSupport()) { my $strVmPgVersionRun; - my $bVmCoveragePerl = false; my $bVmCoverageC = false; foreach my $strVm (VM_LIST) @@ -395,11 +387,6 @@ $bVmCoverageC = true; } - if (vmCoveragePerl($strVm)) - { - $bVmCoveragePerl = true; - } - foreach my $strVmPgVersion (@{$oyVm->{$strVm}{&VM_DB_TEST}}) { if ($strPgVersion eq $strVmPgVersion) @@ -421,11 +408,6 @@ confess &log(ASSERT, "C coverage ${strErrorSuffix}"); } - if (!$bVmCoveragePerl) - { - confess &log(ASSERT, "Perl coverage ${strErrorSuffix}"); - } - if (!defined($strVmPgVersionRun)) { confess &log(ASSERT, "PostgreSQL ${strPgVersion} ${strErrorSuffix}"); @@ -468,30 +450,6 @@ push @EXPORT, qw(vmCoverageC); #################################################################################################################################### -# vmCoveragePerl -#################################################################################################################################### -sub vmCoveragePerl -{ - my $strVm = shift; - - return $oyVm->{$strVm}{&VMDEF_COVERAGE_PERL} ? true : false; -} - -push @EXPORT, qw(vmCoveragePerl); - -#################################################################################################################################### -# vmLintC -#################################################################################################################################### -sub vmLintC -{ - my $strVm = shift; - - return $oyVm->{$strVm}{&VMDEF_LINT_C} ? true : false; -} - -push @EXPORT, qw(vmLintC); - -#################################################################################################################################### # Get vm architecture bits #################################################################################################################################### sub vmArchBits diff -Nru pgbackrest-2.15.1/test/lib/pgBackRestTest/Env/Host/HostBackupTest.pm pgbackrest-2.16/test/lib/pgBackRestTest/Env/Host/HostBackupTest.pm --- pgbackrest-2.15.1/test/lib/pgBackRestTest/Env/Host/HostBackupTest.pm 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/test/lib/pgBackRestTest/Env/Host/HostBackupTest.pm 2019-08-05 16:03:04.000000000 +0000 @@ -15,6 +15,7 @@ our @EXPORT = qw(); use Fcntl ':mode'; use File::Basename qw(dirname); +use File::stat qw{lstat}; use Storable qw(dclone); use pgBackRest::Archive::Info; @@ -27,9 +28,8 @@ use pgBackRest::DbVersion; use pgBackRest::Manifest; use pgBackRest::Protocol::Storage::Helper; -use pgBackRest::Storage::Posix::Driver; -use pgBackRest::Storage::S3::Driver; use pgBackRest::Version; +use pgBackRest::Storage::Base; use pgBackRestTest::Env::Host::HostBaseTest; use pgBackRestTest::Env::Host::HostS3Test; @@ -262,64 +262,67 @@ } # Make sure tablespace links are correct - if (($strType eq CFGOPTVAL_BACKUP_TYPE_FULL || $self->hardLink()) && $self->hasLink()) + if ($self->hasLink()) { - my $hTablespaceManifest = storageRepo()->manifest( - STORAGE_REPO_BACKUP . "/${strBackup}/" . MANIFEST_TARGET_PGDATA . '/' . DB_PATH_PGTBLSPC); - - # Remove . and .. - delete($hTablespaceManifest->{'.'}); - delete($hTablespaceManifest->{'..'}); - - # Iterate file links - for my $strFile (sort(keys(%{$hTablespaceManifest}))) + if ($strType eq CFGOPTVAL_BACKUP_TYPE_FULL || $self->hardLink()) { - # Make sure the link is in the expected manifest - my $hManifestTarget = - $oExpectedManifest->{&MANIFEST_SECTION_BACKUP_TARGET}{&MANIFEST_TARGET_PGTBLSPC . "/${strFile}"}; + my $hTablespaceManifest = storageRepo()->manifest( + STORAGE_REPO_BACKUP . "/${strBackup}/" . MANIFEST_TARGET_PGDATA . '/' . DB_PATH_PGTBLSPC); - if (!defined($hManifestTarget) || $hManifestTarget->{&MANIFEST_SUBKEY_TYPE} ne MANIFEST_VALUE_LINK || - $hManifestTarget->{&MANIFEST_SUBKEY_TABLESPACE_ID} ne $strFile) - { - confess &log(ERROR, "'${strFile}' is not in expected manifest as a link with the correct tablespace id"); - } + # Remove . and .. + delete($hTablespaceManifest->{'.'}); + delete($hTablespaceManifest->{'..'}); - # Make sure the link really is a link - if ($hTablespaceManifest->{$strFile}{type} ne 'l') + # Iterate file links + for my $strFile (sort(keys(%{$hTablespaceManifest}))) { - confess &log(ERROR, "'${strFile}' in tablespace directory is not a link"); - } + # Make sure the link is in the expected manifest + my $hManifestTarget = + $oExpectedManifest->{&MANIFEST_SECTION_BACKUP_TARGET}{&MANIFEST_TARGET_PGTBLSPC . "/${strFile}"}; - # Make sure the link destination is correct - my $strLinkDestination = '../../' . MANIFEST_TARGET_PGTBLSPC . "/${strFile}"; + if (!defined($hManifestTarget) || $hManifestTarget->{&MANIFEST_SUBKEY_TYPE} ne MANIFEST_VALUE_LINK || + $hManifestTarget->{&MANIFEST_SUBKEY_TABLESPACE_ID} ne $strFile) + { + confess &log(ERROR, "'${strFile}' is not in expected manifest as a link with the correct tablespace id"); + } - if ($hTablespaceManifest->{$strFile}{link_destination} ne $strLinkDestination) - { - confess &log(ERROR, - "'${strFile}' link should reference '${strLinkDestination}' but actually references " . - "'$hTablespaceManifest->{$strFile}{link_destination}'"); - } - } + # Make sure the link really is a link + if ($hTablespaceManifest->{$strFile}{type} ne 'l') + { + confess &log(ERROR, "'${strFile}' in tablespace directory is not a link"); + } - # Iterate manifest targets - for my $strTarget (sort(keys(%{$oExpectedManifest->{&MANIFEST_SECTION_BACKUP_TARGET}}))) - { - my $hManifestTarget = $oExpectedManifest->{&MANIFEST_SECTION_BACKUP_TARGET}{$strTarget}; - my $strTablespaceId = $hManifestTarget->{&MANIFEST_SUBKEY_TABLESPACE_ID}; + # Make sure the link destination is correct + my $strLinkDestination = '../../' . MANIFEST_TARGET_PGTBLSPC . "/${strFile}"; + + if ($hTablespaceManifest->{$strFile}{link_destination} ne $strLinkDestination) + { + confess &log(ERROR, + "'${strFile}' link should reference '${strLinkDestination}' but actually references " . + "'$hTablespaceManifest->{$strFile}{link_destination}'"); + } + } - # Make sure the target exists as a link on disk - if ($hManifestTarget->{&MANIFEST_SUBKEY_TYPE} eq MANIFEST_VALUE_LINK && defined($strTablespaceId) && - !defined($hTablespaceManifest->{$strTablespaceId})) + # Iterate manifest targets + for my $strTarget (sort(keys(%{$oExpectedManifest->{&MANIFEST_SECTION_BACKUP_TARGET}}))) { - confess &log(ERROR, - "target '${strTarget}' does not have a link at '" . DB_PATH_PGTBLSPC. "/${strTablespaceId}'"); + my $hManifestTarget = $oExpectedManifest->{&MANIFEST_SECTION_BACKUP_TARGET}{$strTarget}; + my $strTablespaceId = $hManifestTarget->{&MANIFEST_SUBKEY_TABLESPACE_ID}; + + # Make sure the target exists as a link on disk + if ($hManifestTarget->{&MANIFEST_SUBKEY_TYPE} eq MANIFEST_VALUE_LINK && defined($strTablespaceId) && + !defined($hTablespaceManifest->{$strTablespaceId})) + { + confess &log(ERROR, + "target '${strTarget}' does not have a link at '" . DB_PATH_PGTBLSPC. "/${strTablespaceId}'"); + } } } - } - # Else there should not be a tablespace directory at all - elsif (storageRepo()->pathExists(STORAGE_REPO_BACKUP . "/${strBackup}/" . MANIFEST_TARGET_PGDATA . '/' . DB_PATH_PGTBLSPC)) - { - confess &log(ERROR, 'backup must be full or hard-linked to have ' . DB_PATH_PGTBLSPC . ' directory'); + # Else there should not be a tablespace directory at all + elsif (storageRepo()->pathExists(STORAGE_REPO_BACKUP . "/${strBackup}/" . MANIFEST_TARGET_PGDATA . '/' . DB_PATH_PGTBLSPC)) + { + confess &log(ERROR, 'backup must be full or hard-linked to have ' . DB_PATH_PGTBLSPC . ' directory'); + } } # Check that latest link exists unless repo links are disabled @@ -495,7 +498,8 @@ my $lRepoSize = $oActualManifest->test(MANIFEST_SECTION_TARGET_FILE, $strFileKey, MANIFEST_SUBKEY_REFERENCE) ? $oActualManifest->numericGet(MANIFEST_SECTION_TARGET_FILE, $strFileKey, MANIFEST_SUBKEY_REPO_SIZE, false) : - (storageRepo()->info(STORAGE_REPO_BACKUP . "/${strBackup}/${strFileKey}" . ($bCompressed ? '.gz' : '')))->size; + (storageRepo()->info(STORAGE_REPO_BACKUP . + "/${strBackup}/${strFileKey}" . ($bCompressed ? '.gz' : '')))->{size}; if (defined($lRepoSize) && $lRepoSize != $oExpectedManifest->{&MANIFEST_SECTION_TARGET_FILE}{$strFileKey}{&MANIFEST_SUBKEY_SIZE}) @@ -909,11 +913,11 @@ if (defined($self->{oLogTest}) && $self->synthetic()) { $self->{oLogTest}->logAdd( - 'ls ' . $self->repoPath() . '/backup/', $self->stanza() . ' must not exist for successful delete', - join("\n", storageRepo()->list('backup/'))); + 'list backup', $self->stanza() . ' must not exist for successful delete', + join("\n", storageRepo()->list('backup'))); $self->{oLogTest}->logAdd( - 'ls ' . $self->repoPath() . '/archive/', $self->stanza() . ' must not exist for successful delete', - join("\n", storageRepo()->list('archive/'))); + 'list archive', $self->stanza() . ' must not exist for successful delete', + join("\n", storageRepo()->list('archive'))); } # Return from function and log return values if any @@ -1882,7 +1886,7 @@ if ($oActualManifest->get(MANIFEST_SECTION_TARGET_FILE, $strName, MANIFEST_SUBKEY_SIZE) != 0) { - my $oStat = storageTest()->info($oActualManifest->dbPathGet($strSectionPath, $strName)); + my $oStat = lstat($oActualManifest->dbPathGet($strSectionPath, $strName)); # When performing a selective restore, the files for the database(s) that are not restored are still copied but as empty # sparse files (blocks == 0). If the file is not a sparse file or is a link, then get the actual checksum for comparison @@ -2049,8 +2053,8 @@ sub backrestExe {return testRunGet()->backrestExe()} sub bogusHost {return shift->{bBogusHost}} sub hardLink {return shift->{bHardLink}} -sub hasLink {storageRepo()->driver()->className() eq STORAGE_POSIX_DRIVER} -sub isFS {storageRepo()->driver()->className() ne STORAGE_S3_DRIVER} +sub hasLink {storageRepo()->capability(STORAGE_CAPABILITY_LINK)} +sub isFS {storageRepo()->type() ne STORAGE_S3} sub isHostBackup {my $self = shift; return $self->backupDestination() eq $self->nameGet()} sub isHostDbMaster {return shift->nameGet() eq HOST_DB_MASTER} sub isHostDbStandby {return shift->nameGet() eq HOST_DB_STANDBY} diff -Nru pgbackrest-2.15.1/test/lib/pgBackRestTest/Env/Host/HostDbCommonTest.pm pgbackrest-2.16/test/lib/pgBackRestTest/Env/Host/HostDbCommonTest.pm --- pgbackrest-2.15.1/test/lib/pgBackRestTest/Env/Host/HostDbCommonTest.pm 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/test/lib/pgBackRestTest/Env/Host/HostDbCommonTest.pm 2019-08-05 16:03:04.000000000 +0000 @@ -11,7 +11,6 @@ use warnings FATAL => qw(all); use Carp qw(confess); -use DBI; use Exporter qw(import); our @EXPORT = qw(); use File::Basename qw(dirname); diff -Nru pgbackrest-2.15.1/test/lib/pgBackRestTest/Env/Host/HostDbSyntheticTest.pm pgbackrest-2.16/test/lib/pgBackRestTest/Env/Host/HostDbSyntheticTest.pm --- pgbackrest-2.15.1/test/lib/pgBackRestTest/Env/Host/HostDbSyntheticTest.pm 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/test/lib/pgBackRestTest/Env/Host/HostDbSyntheticTest.pm 2019-08-05 16:03:04.000000000 +0000 @@ -11,7 +11,6 @@ use warnings FATAL => qw(all); use Carp qw(confess); -use DBI; use Exporter qw(import); our @EXPORT = qw(); use Fcntl ':mode'; @@ -224,7 +223,7 @@ my $strPathFile = $self->dbFileCreate($oManifestRef, $strTarget, $strFile, $strContent, $lTime, $strMode); # Stat the file - my $oStat = storageTest()->info($strPathFile); + my $oStat = lstat($strPathFile); ${$oManifestRef}{&MANIFEST_SECTION_TARGET_FILE}{$strManifestKey}{&MANIFEST_SUBKEY_GROUP} = getgrgid($oStat->gid); ${$oManifestRef}{&MANIFEST_SECTION_TARGET_FILE}{$strManifestKey}{&MANIFEST_SUBKEY_USER} = getpwuid($oStat->uid); @@ -245,7 +244,7 @@ if (!$bChecksumPage && $strChecksumPageError ne '0') { - my @iyChecksumPageError = eval($strChecksumPageError); ## no critic (BuiltinFunctions::ProhibitStringyEval) + my @iyChecksumPageError = eval($strChecksumPageError); $oManifestRef->{&MANIFEST_SECTION_TARGET_FILE}{$strManifestKey}{&MANIFEST_SUBKEY_CHECKSUM_PAGE_ERROR} = \@iyChecksumPageError; @@ -338,7 +337,7 @@ my $strDbFile = $self->dbLinkCreate($oManifestRef, $strPath, $strFile, $strDestination); # Stat the link - my $oStat = storageTest()->info($strDbFile); + my $oStat = lstat($strDbFile); # Check for errors in stat if (!defined($oStat)) @@ -360,7 +359,7 @@ (defined(dirname($strPath)) ? dirname($strPath) : '') . "/${strDestination}"; } - $oStat = storageTest()->info($strDestinationFile); + $oStat = lstat($strDestinationFile); my $strSection = MANIFEST_SECTION_TARGET_PATH; @@ -556,7 +555,7 @@ # Load linked path into manifest my $strLinkPath = $self->tablespacePath($iOid); my $strTarget = MANIFEST_TARGET_PGTBLSPC . "/${iOid}"; - my $oStat = storageTest()->info($strLinkPath); + my $oStat = lstat($strLinkPath); ${$oManifestRef}{&MANIFEST_SECTION_TARGET_PATH}{$strTarget}{&MANIFEST_SUBKEY_GROUP} = getgrgid($oStat->gid); ${$oManifestRef}{&MANIFEST_SECTION_TARGET_PATH}{$strTarget}{&MANIFEST_SUBKEY_USER} = getpwuid($oStat->uid); @@ -582,7 +581,7 @@ } # Load tablespace path into manifest - $oStat = storageTest()->info($strTablespacePath); + $oStat = lstat($strTablespacePath); ${$oManifestRef}{&MANIFEST_SECTION_TARGET_PATH}{&MANIFEST_TARGET_PGTBLSPC} = ${$oManifestRef}{&MANIFEST_SECTION_TARGET_PATH}{&MANIFEST_TARGET_PGDATA}; @@ -599,7 +598,7 @@ or confess "unable to link ${strLink} to ${strLinkPath}"; # Load link into the manifest - $oStat = storageTest()->info($strLink); + $oStat = lstat($strLink); my $strLinkTarget = MANIFEST_TARGET_PGDATA . "/${strTarget}"; ${$oManifestRef}{&MANIFEST_SECTION_TARGET_LINK}{$strLinkTarget}{&MANIFEST_SUBKEY_GROUP} = getgrgid($oStat->gid); diff -Nru pgbackrest-2.15.1/test/lib/pgBackRestTest/Env/Host/HostS3Test.pm pgbackrest-2.16/test/lib/pgBackRestTest/Env/Host/HostS3Test.pm --- pgbackrest-2.15.1/test/lib/pgBackRestTest/Env/Host/HostS3Test.pm 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/test/lib/pgBackRestTest/Env/Host/HostS3Test.pm 2019-08-05 16:03:04.000000000 +0000 @@ -11,6 +11,7 @@ use warnings FATAL => qw(all); use Carp qw(confess); +use Cwd qw(abs_path); use Exporter qw(import); our @EXPORT = qw(); use File::Basename qw(dirname); @@ -64,9 +65,16 @@ ); # Create the host + my $strProjectPath = dirname(dirname(abs_path($0))); + my $strFakeCertPath = "${strProjectPath}/doc/resource/fake-cert"; + my $self = $class->SUPER::new( - HOST_S3, 'test-' . testRunGet()->vmId() . '-s3-server', containerRepo() . ':' . testRunGet()->vm() . '-s3-server', - 'root', testRunGet()->vm()); + HOST_S3, 'test-' . testRunGet()->vmId() . '-s3-server', 'minio/minio:RELEASE.2019-06-04T01-15-58Z', 'root', 'u18', + ["${strFakeCertPath}/s3-server.crt:/root/.minio/certs/public.crt:ro", + "${strFakeCertPath}/s3-server.key:/root/.minio/certs/private.key:ro"], + '-e MINIO_REGION=' . HOST_S3_REGION . ' -e MINIO_DOMAIN=' . HOST_S3_ENDPOINT . ' -e MINIO_BROWSER=off' . + ' -e MINIO_ACCESS_KEY=' . HOST_S3_ACCESS_KEY . ' -e MINIO_SECRET_KEY=' . HOST_S3_ACCESS_SECRET_KEY, + 'server /data --address :443 --compat', false); bless $self, $class; # Return from function and log return values if any diff -Nru pgbackrest-2.15.1/test/lib/pgBackRestTest/Env/HostEnvTest.pm pgbackrest-2.16/test/lib/pgBackRestTest/Env/HostEnvTest.pm --- pgbackrest-2.15.1/test/lib/pgBackRestTest/Env/HostEnvTest.pm 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/test/lib/pgBackRestTest/Env/HostEnvTest.pm 2019-08-05 16:03:04.000000000 +0000 @@ -122,9 +122,6 @@ if (defined($oHostS3)) { $oHostGroup->hostAdd($oHostS3, {rstryHostName => ['pgbackrest-dev.s3.amazonaws.com', 's3.amazonaws.com']}); - - # Wait for server to start - $oHostS3->executeS3('mb s3://' . HOST_S3_BUCKET); } # Create db master config @@ -186,6 +183,12 @@ $self->configTestLoad(CFGCMD_ARCHIVE_PUSH); + # Create S3 bucket + if (defined($oHostS3)) + { + storageRepo()->{oStorageC}->bucketCreate(); + } + return $oHostDbMaster, $oHostDbStandby, $oHostBackup, $oHostS3; } diff -Nru pgbackrest-2.15.1/test/lib/pgBackRestTest/Env/S3EnvTest.pm pgbackrest-2.16/test/lib/pgBackRestTest/Env/S3EnvTest.pm --- pgbackrest-2.15.1/test/lib/pgBackRestTest/Env/S3EnvTest.pm 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/test/lib/pgBackRestTest/Env/S3EnvTest.pm 1970-01-01 00:00:00.000000000 +0000 @@ -1,60 +0,0 @@ -#################################################################################################################################### -# S3 Test Environment -#################################################################################################################################### -package pgBackRestTest::Env::S3EnvTest; -use parent 'pgBackRestTest::Common::RunTest'; - -#################################################################################################################################### -# Perl includes -#################################################################################################################################### -use strict; -use warnings FATAL => qw(all); -use Carp qw(confess); -use English '-no_match_vars'; - -use pgBackRest::Common::Log; -use pgBackRest::Common::String; -use pgBackRest::Storage::S3::Driver; - -use pgBackRestTest::Common::ContainerTest; -use pgBackRestTest::Common::ExecuteTest; -use pgBackRestTest::Common::RunTest; -use pgBackRestTest::Common::VmTest; - -#################################################################################################################################### -# initS3 -#################################################################################################################################### -sub initS3 -{ - my $self = shift; - - my ($strBucket, $strEndPoint, $strRegion, $strAccessKeyId, $strSecretAccessKey) = - ('pgbackrest-dev', 's3.amazonaws.com', 'us-east-1', 'accessKey1', 'verySecretKey1'); - - my $strS3ServerPath = $self->testPath() . '/s3server'; - my $strS3ServerDataPath = "${strS3ServerPath}/data"; - my $strS3ServerMetaPath = "${strS3ServerPath}/meta"; - my $strS3ServerLogFile = "${strS3ServerPath}/server.log"; - storageTest()->pathCreate($strS3ServerDataPath, {bCreateParent => true}); - storageTest()->pathCreate($strS3ServerMetaPath, {bCreateParent => true}); - - $self->{strS3Command} = 'export PYTHONWARNINGS="ignore" && aws s3 --no-verify-ssl'; - - executeTest("echo '127.0.0.1 ${strBucket}.${strEndPoint} ${strEndPoint}' | sudo tee -a /etc/hosts"); - executeTest('sudo sed -i "s/logLevel\"\: \"info\"/logLevel\"\: \"trace\"/" /root/scalitys3/config.json'); - executeTest("sudo npm start --prefix /root/scalitys3 > ${strS3ServerLogFile} 2>&1 &"); - executeTest("tail -f ${strS3ServerLogFile} | grep -m 1 \"server started\""); - executeTest("$self->{strS3Command} mb s3://pgbackrest-dev"); - - # Test variables - my $strFile = 'file.txt'; - my $strFileContent = 'TESTDATA'; - - # Initialize the driver - return new pgBackRest::Storage::S3::Driver( - $strBucket, $strEndPoint, $strRegion, $strAccessKeyId, $strSecretAccessKey, - {strCaFile => $self->vm() eq VM_CO7 ? CERT_FAKE_CA : undef, - bVerifySsl => $self->vm() eq VM_CO7 ? undef : false, lBufferMax => 1048576}); -} - -1; diff -Nru pgbackrest-2.15.1/test/lib/pgBackRestTest/Module/Backup/BackupFileUnitPerlTest.pm pgbackrest-2.16/test/lib/pgBackRestTest/Module/Backup/BackupFileUnitPerlTest.pm --- pgbackrest-2.15.1/test/lib/pgBackRestTest/Module/Backup/BackupFileUnitPerlTest.pm 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/test/lib/pgBackRestTest/Module/Backup/BackupFileUnitPerlTest.pm 2019-08-05 16:03:04.000000000 +0000 @@ -125,16 +125,8 @@ if ($self->begin('backupFile(), backupManifestUpdate()')) { #--------------------------------------------------------------------------------------------------------------------------- - # Copy pg_control and confirm manifestUpdate does not save the manifest yet - ($iResultCopyResult, $lResultCopySize, $lResultRepoSize, $strResultCopyChecksum, $rResultExtra) = - backupFile($self->{strPgControl}, MANIFEST_FILE_PGCONTROL, $lPgControlSize, undef, false, $strBackupLabel, false, - cfgOption(CFGOPT_COMPRESS_LEVEL), $lPgControlTime, true, undef, false, false, undef); - - $self->testResult(sub {storageTest()->exists($strPgControlRepo)}, true, 'pg_control file exists in repo'); - - $self->testResult(($iResultCopyResult == BACKUP_FILE_COPY && $strResultCopyChecksum eq $strPgControlHash && - $lResultCopySize == $lPgControlSize && $lResultRepoSize == $lPgControlSize), true, - 'pg_control file copied to repo successfully'); + # Create backup path so manifest can be saved + storageRepo->pathCreate(storageRepo()->pathGet(STORAGE_REPO_BACKUP . "/$strBackupLabel")); ($lSizeCurrent, $lManifestSaveCurrent) = backupManifestUpdate( $oBackupManifest, @@ -145,15 +137,15 @@ $lPgControlSize, undef, false, - $iResultCopyResult, - $lResultCopySize, - $lResultRepoSize, - $strResultCopyChecksum, - $rResultExtra, - $lSizeTotal, - $lSizeCurrent, - $lManifestSaveSize, - $lManifestSaveCurrent); + BACKUP_FILE_COPY, + 8192, + 8192, + $strPgControlHash, + undef, + 16785408, + 0, + 167854, + 0); # Accumulators should be same size as pg_control $self->testResult(($lSizeCurrent == $lPgControlSize && $lManifestSaveCurrent == $lPgControlSize), true, @@ -163,26 +155,12 @@ MANIFEST_SUBKEY_CHECKSUM, $strPgControlHash)}, true, "manifest updated for pg_control"); # Neither backup.manifest nor backup.manifest.copy written because size threshold not met - $self->testException(sub {storageRepo()->openRead("$strBackupPath/" . FILE_MANIFEST . INI_COPY_EXT)}, ERROR_FILE_MISSING, - "unable to open '$strBackupPath/" . FILE_MANIFEST . INI_COPY_EXT . "': No such file or directory"); - $self->testException(sub {storageRepo()->openRead("$strBackupPath/" . FILE_MANIFEST)}, ERROR_FILE_MISSING, - "unable to open '$strBackupPath/" . FILE_MANIFEST . "': No such file or directory"); + $self->testResult(sub {storageRepo()->exists("$strBackupPath/" . FILE_MANIFEST)}, false, "backup.manifest missing"); + $self->testResult( + sub {storageRepo()->exists("$strBackupPath/" . FILE_MANIFEST . INI_COPY_EXT)}, false, "backup.manifest.copy missing"); #--------------------------------------------------------------------------------------------------------------------------- # No prior checksum, no compression, no page checksum, no extra, no delta, no hasReference - ($iResultCopyResult, $lResultCopySize, $lResultRepoSize, $strResultCopyChecksum, $rResultExtra) = - backupFile($strFileDb, $strRepoFile, $lFileSize, undef, false, $strBackupLabel, false, - cfgOption(CFGOPT_COMPRESS_LEVEL), $lFileTime, true, undef, false, false, undef); - - $self->testResult(sub {storageTest()->exists($strFileRepo)}, true, 'non-compressed file exists in repo'); - - $self->testResult(($iResultCopyResult == BACKUP_FILE_COPY && $strResultCopyChecksum eq $strFileHash && - $lResultCopySize == $lFileSize && $lResultRepoSize == $lFileSize), true, - 'file copied to repo successfully'); - - $self->testException(sub {storageRepo()->openRead("$strFileRepo.gz")}, ERROR_FILE_MISSING, - "unable to open '$strFileRepo.gz': No such file or directory"); - ($lSizeCurrent, $lManifestSaveCurrent) = backupManifestUpdate( $oBackupManifest, $strHost, @@ -192,15 +170,15 @@ $lFileSize, $strFileHash, false, - $iResultCopyResult, - $lResultCopySize, - $lResultRepoSize, - $strResultCopyChecksum, - $rResultExtra, - $lSizeTotal, - $lSizeCurrent, - $lManifestSaveSize, - $lManifestSaveCurrent); + BACKUP_FILE_COPY, + 16777216, + 16777216, + '1c7e00fd09b9dd11fc2966590b3e3274645dd031', + undef, + 16785408, + 8192, + 167854, + 8192); # Accumulator includes size of pg_control and file. Manifest saved so ManifestSaveCurrent returns to 0 $self->testResult(($lSizeCurrent == ($lPgControlSize + $lFileSize) && $lManifestSaveCurrent == 0), true, @@ -212,39 +190,12 @@ # Backup.manifest not written but backup.manifest.copy written because size threshold met $self->testResult(sub {storageTest()->exists("$strBackupPath/" . FILE_MANIFEST . INI_COPY_EXT)}, true, 'backup.manifest.copy exists in repo'); - $self->testException(sub {storageRepo()->openRead("$strBackupPath/" . FILE_MANIFEST)}, ERROR_FILE_MISSING, - "unable to open '$strBackupPath/" . FILE_MANIFEST . "': No such file or directory"); - - storageTest()->remove($strFileRepo); - storageTest()->remove($strPgControlRepo); - - #--------------------------------------------------------------------------------------------------------------------------- - # No prior checksum, yes compression, yes page checksum, no extra, no delta, no hasReference - $self->testException(sub {backupFile($strFileDb, $strRepoFile, $lFileSize, undef, true, - $strBackupLabel, true, cfgOption(CFGOPT_COMPRESS_LEVEL), $lFileTime, true, undef, false, false, undef)}, ERROR_ASSERT, - "iWalId is required in Backup::Filter::PageChecksum->new"); - - # Build the lsn start parameter to pass to the extra function - my $hStartLsnParam = - { - iWalId => 0xFFFF, - iWalOffset => 0xFFFF, - }; + $self->testResult( + sub {storageRepo()->exists("$strBackupPath/" . FILE_MANIFEST)}, false, 'backup.manifest.copy missing in repo'); #--------------------------------------------------------------------------------------------------------------------------- - # No prior checksum, yes compression, yes page checksum, yes extra, no delta, no hasReference - ($iResultCopyResult, $lResultCopySize, $lResultRepoSize, $strResultCopyChecksum, $rResultExtra) = - backupFile($strFileDb, $strRepoFile, $lFileSize, undef, true, $strBackupLabel, true, - cfgOption(CFGOPT_COMPRESS_LEVEL), $lFileTime, true, $hStartLsnParam, false, false, undef); - - $self->testResult(sub {storageTest()->exists("$strFileRepo.gz")}, true, 'compressed file exists in repo'); - - $self->testResult(($iResultCopyResult == BACKUP_FILE_COPY && $strResultCopyChecksum eq $strFileHash && - $lResultRepoSize == $lRepoFileCompressSize && $rResultExtra->{bValid}), true, 'file copied to repo successfully'); - - # Only the compressed version of the file exists - $self->testException(sub {storageRepo()->openRead("$strFileRepo")}, ERROR_FILE_MISSING, - "unable to open '$strFileRepo': No such file or directory"); + # Set up page checksum result + $rResultExtra = {'valid' => true,'align' => true}; ($lSizeCurrent, $lManifestSaveCurrent) = backupManifestUpdate( $oBackupManifest, @@ -255,15 +206,15 @@ $lFileSize, $strFileHash, true, - $iResultCopyResult, - $lResultCopySize, - $lResultRepoSize, - $strResultCopyChecksum, - $rResultExtra, - $lSizeTotal, - $lSizeCurrent, - $lManifestSaveSize, - $lManifestSaveCurrent); + BACKUP_FILE_COPY, + 16777216, + 3646899, + '1c7e00fd09b9dd11fc2966590b3e3274645dd031', + $rResultExtra, + 16785408, + 16785408, + 167854, + 0); # File is compressed in repo so make sure repo-size added to manifest $self->testResult(sub {$oBackupManifest->test( @@ -273,45 +224,15 @@ MANIFEST_SECTION_TARGET_FILE, $strRepoFile, MANIFEST_SUBKEY_CHECKSUM_PAGE, $rResultExtra->{bValid})}, true, "checksum page set"); - # Save the compressed file for later test - executeTest('mv ' . "$strFileRepo.gz $strFileRepo.gz.SAVE"); - - #--------------------------------------------------------------------------------------------------------------------------- - # Add a segment number for bChecksumPage code coverage - executeTest('cp ' . "$strFileDb $strFileDb.1"); - - # No prior checksum, no compression, yes page checksum, yes extra, no delta, no hasReference - ($iResultCopyResult, $lResultCopySize, $lResultRepoSize, $strResultCopyChecksum, $rResultExtra) = - backupFile("$strFileDb.1", "$strRepoFile.1", $lFileSize, undef, true, $strBackupLabel, false, - cfgOption(CFGOPT_COMPRESS_LEVEL), $lFileTime, true, $hStartLsnParam, false, false, undef); - - $self->testResult(sub {storageTest()->exists("$strFileRepo.1")}, true, 'non-compressed segment file exists in repo'); - - $self->testResult(($iResultCopyResult == BACKUP_FILE_COPY && $strResultCopyChecksum eq $strFileHash && - $lResultRepoSize == $lFileSize && $rResultExtra->{bValid}), true, 'segment file copied to repo successfully'); - # Set a section in the manifest to ensure it is removed in the next test $oBackupManifest->set( MANIFEST_SECTION_TARGET_FILE, "$strRepoFile.1", MANIFEST_SUBKEY_CHECKSUM, $strResultCopyChecksum); $self->testResult(sub {$oBackupManifest->test(MANIFEST_SECTION_TARGET_FILE, MANIFEST_TARGET_PGDATA . "/$strFileName.1")}, - true, MANIFEST_TARGET_PGDATA . "/$strFileName.1 section exists in manifest"); + true, MANIFEST_TARGET_PGDATA . "/$strFileName.1 section exists in manifest - skip file"); #--------------------------------------------------------------------------------------------------------------------------- - # Remove the db file and try to back it up - storageTest()->remove("$strFileDb.1"); - - # No prior checksum, no compression, no page checksum, no extra, No delta, no hasReference, no db file - ($iResultCopyResult, $lResultCopySize, $lResultRepoSize, $strResultCopyChecksum, $rResultExtra) = - backupFile("$strFileDb.1", "$strRepoFile.1", $lFileSize, undef, false, $strBackupLabel, - false, cfgOption(CFGOPT_COMPRESS_LEVEL), $lFileTime, true, undef, false, false, undef); - - $self->testResult(($iResultCopyResult == BACKUP_FILE_SKIP && !defined($strResultCopyChecksum) && - !defined($lResultRepoSize) && !defined($lResultCopySize)), true, "db file missing - $strRepoFile.1 file skipped"); - - # Delta not set so file still exists in repo - $self->testResult(sub {storageTest()->exists("$strFileRepo.1")}, true, ' delta not set - file exists in repo'); - + # Removed db file is removed from manifest ($lSizeCurrent, $lManifestSaveCurrent) = backupManifestUpdate( $oBackupManifest, $strHost, @@ -321,30 +242,23 @@ $lFileSize, $strFileHash, false, - $iResultCopyResult, - $lResultCopySize, - $lResultRepoSize, - $strResultCopyChecksum, - $rResultExtra, - $lSizeTotal, - $lSizeCurrent, - $lManifestSaveSize, - $lManifestSaveCurrent); + BACKUP_FILE_SKIP, + undef, + undef, + undef, + undef, + 16785408, + 33562624, + 167854, + 0); $self->testResult(sub {$oBackupManifest->test(MANIFEST_SECTION_TARGET_FILE, "$strRepoFile.1")}, false, " $strRepoFile.1 section removed from manifest"); - # Yes prior checksum, no compression, no page checksum, no extra, yes delta, no hasReference, no db file - ($iResultCopyResult, $lResultCopySize, $lResultRepoSize, $strResultCopyChecksum, $rResultExtra) = - backupFile("$strFileDb.1", MANIFEST_TARGET_PGDATA . "/$strFileName.1", $lFileSize, $strFileHash, false, $strBackupLabel, - false, cfgOption(CFGOPT_COMPRESS_LEVEL), $lFileTime, true, undef, true, false, undef); - - $self->testResult(($iResultCopyResult == BACKUP_FILE_SKIP && !defined($strResultCopyChecksum) && - !defined($lResultRepoSize)), true, "db file missing - delta $strRepoFile.1 file skipped"); + # Add back the section + $oBackupManifest->set(MANIFEST_SECTION_TARGET_FILE, "$strRepoFile.1"); - $self->testResult(sub {storageTest()->exists("$strFileRepo.1")}, false, ' delta set - file removed from repo'); - - # Code path for host not defined for logged message of skipped file + # Code coverage for code path when host not defined for logged message of skipped file ($lSizeCurrent, $lManifestSaveCurrent) = backupManifestUpdate( $oBackupManifest, undef, @@ -354,100 +268,21 @@ $lFileSize, $strFileHash, false, - $iResultCopyResult, - $lResultCopySize, - $lResultRepoSize, - $strResultCopyChecksum, - $rResultExtra, - $lSizeTotal, - $lSizeCurrent, - $lManifestSaveSize, - $lManifestSaveCurrent); - - # Yes prior checksum, no compression, no page checksum, no extra, yes delta, no hasReference, no db file, - # do not ignoreMissing - $self->testException(sub {backupFile("$strFileDb.1", "$strRepoFile.1", $lFileSize, $strFileHash, - false, $strBackupLabel, false, cfgOption(CFGOPT_COMPRESS_LEVEL), $lFileTime, false, undef, true, false, undef)}, - ERROR_FILE_MISSING, "unable to open '$strFileDb.1': No such file or directory"); - - #--------------------------------------------------------------------------------------------------------------------------- - # Restore the compressed file - executeTest('mv ' . "$strFileRepo.gz.SAVE $strFileRepo.gz"); - - # Yes prior checksum, yes compression, no page checksum, no extra, yes delta, no hasReference - ($iResultCopyResult, $lResultCopySize, $lResultRepoSize, $strResultCopyChecksum, $rResultExtra) = - backupFile($strFileDb, $strRepoFile, $lFileSize, $strFileHash, false, $strBackupLabel, - true, cfgOption(CFGOPT_COMPRESS_LEVEL), $lFileTime, true, undef, true, false, undef); - - $self->testResult(($iResultCopyResult == BACKUP_FILE_CHECKSUM && $strResultCopyChecksum eq $strFileHash && - $lResultCopySize == $lFileSize), true, 'db checksum and repo same - no copy file'); - - #--------------------------------------------------------------------------------------------------------------------------- - # DB Checksum mismatch - storageTest()->remove("$strFileRepo", {bIgnoreMissing => true}); - # Save the compressed file for later test - executeTest('mv ' . "$strFileRepo.gz $strFileRepo.gz.SAVE"); - - # Yes prior checksum, no compression, no page checksum, no extra, yes delta, no hasReference - ($iResultCopyResult, $lResultCopySize, $lResultRepoSize, $strResultCopyChecksum, $rResultExtra) = - backupFile($strFileDb, $strRepoFile, $lFileSize, $strFileHash . "ff", false, - $strBackupLabel, false, cfgOption(CFGOPT_COMPRESS_LEVEL), $lFileTime, true, undef, true, false, undef); - - $self->testResult(($iResultCopyResult == BACKUP_FILE_COPY && $strResultCopyChecksum eq $strFileHash && - $lResultCopySize == $lFileSize && $lResultRepoSize == $lFileSize), true, 'db checksum mismatch - copy file'); - - #--------------------------------------------------------------------------------------------------------------------------- - # DB file size mismatch - # Yes prior checksum, no compression, no page checksum, no extra, yes delta, no hasReference - ($iResultCopyResult, $lResultCopySize, $lResultRepoSize, $strResultCopyChecksum, $rResultExtra) = - backupFile($strFileDb, $strRepoFile, $lFileSize + 1, $strFileHash, false, $strBackupLabel, false, - cfgOption(CFGOPT_COMPRESS_LEVEL), $lFileTime, true, undef, true, false, undef); - - $self->testResult(($iResultCopyResult == BACKUP_FILE_COPY && $strResultCopyChecksum eq $strFileHash && - $lResultCopySize == $lFileSize && $lResultRepoSize == $lFileSize), true, 'db file size mismatch - copy file'); - - #--------------------------------------------------------------------------------------------------------------------------- - # Repo mismatch - - # Restore the compressed file as if non-compressed so checksum won't match - executeTest('cp ' . "$strFileRepo.gz.SAVE $strFileRepo"); - - # Yes prior checksum, no compression, no page checksum, no extra, yes delta, no hasReference - ($iResultCopyResult, $lResultCopySize, $lResultRepoSize, $strResultCopyChecksum, $rResultExtra) = - backupFile($strFileDb, $strRepoFile, $lFileSize, $strFileHash, false, $strBackupLabel, false, - cfgOption(CFGOPT_COMPRESS_LEVEL), $lFileTime, true, undef, true, false, undef); - - $self->testResult(($iResultCopyResult == BACKUP_FILE_RECOPY && $strResultCopyChecksum eq $strFileHash && - $lResultCopySize == $lFileSize && $lResultRepoSize == $lFileSize), true, 'repo checksum mismatch - recopy file'); - - # Restore the compressed file - executeTest('mv ' . "$strFileRepo.gz.SAVE $strFileRepo.gz"); - - # Yes prior checksum, yes compression, no page checksum, no extra, no delta, no hasReference - ($iResultCopyResult, $lResultCopySize, $lResultRepoSize, $strResultCopyChecksum, $rResultExtra) = - backupFile($strFileDb, $strRepoFile, $lFileSize + 1, $strFileHash, false, - $strBackupLabel, true, cfgOption(CFGOPT_COMPRESS_LEVEL), $lFileTime, true, undef, false, false, undef); + BACKUP_FILE_SKIP, + undef, + undef, + undef, + undef, + 16785408, + 50339840, + 167854, + 0); - $self->testResult(($iResultCopyResult == BACKUP_FILE_RECOPY && $strResultCopyChecksum eq $strFileHash && - $lResultCopySize == $lFileSize), true, 'repo size mismatch - recopy file'); + $self->testResult(sub {$oBackupManifest->test(MANIFEST_SECTION_TARGET_FILE, "$strRepoFile.1")}, + false, " $strRepoFile.1 section removed from manifest on undef host"); #--------------------------------------------------------------------------------------------------------------------------- - # Has reference - # Set a reference in the manifest to ensure it is removed after backupManifestUpdate - $oBackupManifest->set(MANIFEST_SECTION_TARGET_FILE, $strRepoFile, MANIFEST_SUBKEY_REFERENCE, BOGUS); - - $self->testResult(sub {$oBackupManifest->test(MANIFEST_SECTION_TARGET_FILE, $strRepoFile, MANIFEST_SUBKEY_REFERENCE, - BOGUS)}, true, "$strRepoFile reference section exists in manifest"); - - # Yes prior checksum, no compression, no page checksum, no extra, yes delta, yes hasReference - ($iResultCopyResult, $lResultCopySize, $lResultRepoSize, $strResultCopyChecksum, $rResultExtra) = - backupFile($strFileDb, $strRepoFile, $lFileSize + 1, $strFileHash, false, - $strBackupLabel, false, cfgOption(CFGOPT_COMPRESS_LEVEL), $lFileTime, true, undef, true, true, undef); - - $self->testResult(($iResultCopyResult == BACKUP_FILE_COPY && $strResultCopyChecksum eq $strFileHash && - $lResultCopySize == $lFileSize && $lResultRepoSize == $lFileSize), true, 'db file size mismatch has reference - copy'); - - # Code path to ensure reference is removed + # Has reference - Code path to ensure reference is removed ($lSizeCurrent, $lManifestSaveCurrent) = backupManifestUpdate( $oBackupManifest, $strHost, @@ -457,15 +292,15 @@ $lFileSize, $strFileHash, false, - $iResultCopyResult, - $lResultCopySize, - $lResultRepoSize, - $strResultCopyChecksum, - $rResultExtra, - $lSizeTotal, - $lSizeCurrent, - $lManifestSaveSize, - $lManifestSaveCurrent); + BACKUP_FILE_COPY, + 16777216, + 16777216, + '1c7e00fd09b9dd11fc2966590b3e3274645dd031', + undef, + 16785408, + 67117056, + 167854, + 0); # Confirm reference to prior backup removed $self->testResult(sub {$oBackupManifest->test(MANIFEST_SECTION_TARGET_FILE, MANIFEST_TARGET_PGDATA . "/$strFileName.", @@ -474,13 +309,6 @@ #--------------------------------------------------------------------------------------------------------------------------- # BACKUP_FILE_NOOP - # Yes prior checksum, no compression, no page checksum, no extra, yes delta, yes hasReference - ($iResultCopyResult, $lResultCopySize, $lResultRepoSize, $strResultCopyChecksum, $rResultExtra) = - backupFile($strFileDb, $strRepoFile, $lFileSize, $strFileHash, false, - $strBackupLabel, false, cfgOption(CFGOPT_COMPRESS_LEVEL), $lFileTime, true, undef, true, true, undef); - - $self->testResult(($iResultCopyResult == BACKUP_FILE_NOOP && $strResultCopyChecksum eq $strFileHash && - $lResultCopySize == $lFileSize), true, 'db file same has reference - noop'); # Calculate running counts my $lSizeCurrentAfter = $lSizeCurrent + $lFileSize; @@ -498,27 +326,18 @@ $lFileSize, $strFileHash, false, - $iResultCopyResult, - $lResultCopySize, - $lResultRepoSize, - $strResultCopyChecksum, - $rResultExtra, - $lSizeTotal, - $lSizeCurrent, + BACKUP_FILE_NOOP, + 16777216, + undef, + '1c7e00fd09b9dd11fc2966590b3e3274645dd031', + undef, + 16785408, + 83894272, $lManifestSaveSize, - $lManifestSaveCurrent); + 0); $self->testResult(($lSizeCurrent ==$lSizeCurrentAfter && $lManifestSaveCurrent == $lManifestSaveCurrentAfter), true, ' running counts updated'); - - #--------------------------------------------------------------------------------------------------------------------------- - # Remove file from repo. No reference so should hard error since this means sometime between the building of the manifest - # for the aborted backup, the file went missing from the aborted backup dir. - storageTest()->remove("$strFileRepo", {bIgnoreMissing => true}); - - $self->testException(sub {backupFile($strFileDb, $strRepoFile, $lFileSize, $strFileHash, - false, $strBackupLabel, false, cfgOption(CFGOPT_COMPRESS_LEVEL), $lFileTime, true, undef, true, false, undef)}, - ERROR_FILE_MISSING, "unable to open '$strFileRepo': No such file or directory"); } ################################################################################################################################ @@ -537,6 +356,7 @@ $lSizeCurrent = 0; $lManifestSaveSize = $lFileSize * 2; $lManifestSaveCurrent = 0; + $rResultExtra = undef; $self->testResult(sub {backupManifestUpdate( $oBackupManifest, @@ -597,7 +417,7 @@ $lManifestSaveCurrent)}, ERROR_ASSERT, "$strFileDb should have calculated page checksums"); - $rResultExtra->{bValid} = false; + $rResultExtra->{valid} = false; $self->testException(sub {backupManifestUpdate( $oBackupManifest, $strHost, @@ -616,9 +436,9 @@ $lSizeCurrent, $lManifestSaveSize, $lManifestSaveCurrent)}, - ERROR_ASSERT, "bAlign flag should have been set for misaligned page"); + ERROR_ASSERT, "align flag should have been set for misaligned page"); - $rResultExtra->{bAlign} = true; + $rResultExtra->{align} = true; $self->testException(sub {backupManifestUpdate( $oBackupManifest, $strHost, @@ -637,9 +457,9 @@ $lSizeCurrent, $lManifestSaveSize, $lManifestSaveCurrent)}, - ERROR_ASSERT, "bAlign flag should have been set for misaligned page"); + ERROR_ASSERT, "align flag should have been set for misaligned page"); - $rResultExtra->{bAlign} = false; + $rResultExtra->{align} = false; $self->testResult(sub {backupManifestUpdate( $oBackupManifest, $strHost, diff -Nru pgbackrest-2.15.1/test/lib/pgBackRestTest/Module/Common/CommonHttpClientPerlTest.pm pgbackrest-2.16/test/lib/pgBackRestTest/Module/Common/CommonHttpClientPerlTest.pm --- pgbackrest-2.15.1/test/lib/pgBackRestTest/Module/Common/CommonHttpClientPerlTest.pm 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/test/lib/pgBackRestTest/Module/Common/CommonHttpClientPerlTest.pm 1970-01-01 00:00:00.000000000 +0000 @@ -1,173 +0,0 @@ -#################################################################################################################################### -# S3 Request Tests -#################################################################################################################################### -package pgBackRestTest::Module::Common::CommonHttpClientPerlTest; -use parent 'pgBackRestTest::Common::RunTest'; - -#################################################################################################################################### -# Perl includes -#################################################################################################################################### -use strict; -use warnings FATAL => qw(all); -use Carp qw(confess); -use English '-no_match_vars'; - - use IO::Socket::SSL; -use POSIX qw(strftime); - -use pgBackRest::Common::Exception; -use pgBackRest::Common::Http::Client; -use pgBackRest::Common::Log; -use pgBackRest::Common::Wait; - -use pgBackRestTest::Common::ContainerTest; -use pgBackRestTest::Common::ExecuteTest; -use pgBackRestTest::Common::RunTest; - -#################################################################################################################################### -# Port to use for testing -#################################################################################################################################### -use constant HTTPS_TEST_PORT => 9443; - -#################################################################################################################################### -# httpsServerResponse -#################################################################################################################################### -sub httpsServerResponse -{ - my $self = shift; - my $iResponseCode = shift; - my $strContent = shift; - - # Write header - $self->{oConnection}->write("HTTP/1.1 ${iResponseCode} GenericMessage\r\n"); - $self->{oConnection}->write(HTTP_HEADER_CONTENT_LENGTH . ': ' . (defined($strContent) ? length($strContent) : 0) . "\r\n"); - - # Write new line before content (even if there isn't any) - $self->{oConnection}->write("\r\n"); - - # Write content - if (defined($strContent)) - { - $self->{oConnection}->write($strContent); - } - - # This will block until the connection is closed by the client - $self->{oConnection}->read(); -} - -#################################################################################################################################### -# httpsServerAccept -#################################################################################################################################### -sub httpsServerAccept -{ - my $self = shift; - - # Wait for a connection - $self->{oConnection} = $self->{oSocketServer}->accept() - or confess "failed to accept or handshake $!, $SSL_ERROR"; - &log(INFO, " * socket server connected"); -} - -#################################################################################################################################### -# httpsServer -#################################################################################################################################### -sub httpsServer -{ - my $self = shift; - my $fnServer = shift; - - # Fork off the server - if (fork() == 0) - { - # Run server function - $fnServer->(); - - exit 0; - } -} - -#################################################################################################################################### -# Start the https testing server -#################################################################################################################################### -sub initModule -{ - my $self = shift; - - # Open the domain socket - $self->{oSocketServer} = IO::Socket::SSL->new( - LocalAddr => '127.0.0.1', LocalPort => HTTPS_TEST_PORT, Listen => 1, SSL_cert_file => CERT_FAKE_SERVER, - SSL_key_file => CERT_FAKE_SERVER_KEY) - or confess "unable to open https server for testing: $!"; - &log(INFO, " * socket server open"); -} - -#################################################################################################################################### -# Stop the https testing server -#################################################################################################################################### -sub cleanModule -{ - my $self = shift; - - # Shutdown server - $self->{oSocketServer}->close(); - &log(INFO, " * socket server closed"); -} - -#################################################################################################################################### -# run -#################################################################################################################################### -sub run -{ - my $self = shift; - - # Test variables - my $strTestHost = '127.0.0.1'; - my $strTestData = 'TESTDATA'; - - ################################################################################################################################ - if ($self->begin('content-length defined')) - { - $self->httpsServer(sub - { - $self->httpsServerAccept(); - $self->httpsServerResponse(200, $strTestData); - }); - - #--------------------------------------------------------------------------------------------------------------------------- - my $oHttpClient = $self->testResult( - sub {new pgBackRest::Common::Http::Client( - $strTestHost, HTTP_VERB_GET, {iPort => HTTPS_TEST_PORT, bVerifySsl => false})}, - '[object]', 'new http client'); - - $self->testResult(sub {${$oHttpClient->responseBody()}}, $strTestData, 'response body read'); - } - - ################################################################################################################################ - if ($self->begin('retry')) - { - $self->httpsServer(sub - { - $self->httpsServerAccept(); - $self->{oConnection}->write("HTTP/1.1 404 Error\r\nBogus-Header\r\n\r\n"); - - $self->httpsServerAccept(); - $self->{oConnection}->write("HTTP/1.1 404 Error\r\nBogus-Header\r\n\r\n"); - - $self->httpsServerAccept(); - $self->httpsServerResponse(200, $strTestData); - }); - - #--------------------------------------------------------------------------------------------------------------------------- - $self->testException( - sub {new pgBackRest::Common::Http::Client( - $strTestHost, HTTP_VERB_GET, {iPort => HTTPS_TEST_PORT, bVerifySsl => false, iTryTotal => 1})}, - ERROR_PROTOCOL, "http header 'Bogus-Header' requires colon separator"); - - $self->testResult( - sub {new pgBackRest::Common::Http::Client( - $strTestHost, HTTP_VERB_GET, {iPort => HTTPS_TEST_PORT, bVerifySsl => false, iTryTotal => 2})}, - '[object]', 'successful retries'); - } -} - -1; diff -Nru pgbackrest-2.15.1/test/lib/pgBackRestTest/Module/Common/CommonIniPerlTest.pm pgbackrest-2.16/test/lib/pgBackRestTest/Module/Common/CommonIniPerlTest.pm --- pgbackrest-2.15.1/test/lib/pgBackRestTest/Module/Common/CommonIniPerlTest.pm 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/test/lib/pgBackRestTest/Module/Common/CommonIniPerlTest.pm 2019-08-05 16:03:04.000000000 +0000 @@ -297,42 +297,6 @@ $self->testException(sub {new pgBackRest::Common::Ini($strTestFile)}, ERROR_CRYPTO, "unable to parse '$strTestFile'" . "\nHINT: Is or was the repo encrypted?"); - - # Encryption - #--------------------------------------------------------------------------------------------------------------------------- - executeTest("rm -rf ${strTestFile}*"); - - my $strCipherPass = 'x'; - my $strCipherPassSub = 'y'; - - # Unencrypted storage but a passphrase passed - $self->testException(sub {new pgBackRest::Common::Ini($strTestFile, {bLoad => false, - strCipherPass => $strCipherPass})}, ERROR_ASSERT, - "a user passphrase and sub passphrase are both required when encrypting"); - - # Unencrypted storage but a sub passphrase passed - $self->testException(sub {new pgBackRest::Common::Ini($strTestFile, {bLoad => false, - strCipherPassSub => $strCipherPassSub})}, ERROR_ASSERT, - "a user passphrase and sub passphrase are both required when encrypting"); - - # Create Encrypted storage - my $oStorage = new pgBackRest::Storage::Local($self->testPath(), new pgBackRest::Storage::Posix::Driver(), - {strCipherType => CFGOPTVAL_REPO_CIPHER_TYPE_AES_256_CBC, strCipherPassUser => $strCipherPass}); - - $self->testException(sub {new pgBackRest::Common::Ini($strTestFile, {oStorage => $oStorage})}, ERROR_CRYPTO, - "passphrase is required when storage is encrypted"); - - $self->testException(sub {new pgBackRest::Common::Ini($strTestFile, {bLoad => false, oStorage => $oStorage, - strCipherPass => $strCipherPass})}, ERROR_ASSERT, - "a user passphrase and sub passphrase are both required when encrypting"); - - $oIni = $self->testResult(sub { - new pgBackRest::Common::Ini( - $strTestFile, - {bLoad => false, oStorage => $oStorage, strCipherPass => $strCipherPass, strCipherPassSub => $strCipherPassSub})}, - '[object]', 'create new ini with encryption passphrases'); - $self->testResult(sub {($oIni->cipherPassSub() eq $strCipherPassSub) && - ($oIni->cipherPass() eq $strCipherPass)}, true, ' new ini has encryption passphrases'); } ################################################################################################################################ diff -Nru pgbackrest-2.15.1/test/lib/pgBackRestTest/Module/Manifest/ManifestAllPerlTest.pm pgbackrest-2.16/test/lib/pgBackRestTest/Module/Manifest/ManifestAllPerlTest.pm --- pgbackrest-2.15.1/test/lib/pgBackRestTest/Module/Manifest/ManifestAllPerlTest.pm 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/test/lib/pgBackRestTest/Module/Manifest/ManifestAllPerlTest.pm 2019-08-05 16:03:04.000000000 +0000 @@ -198,8 +198,8 @@ # Attempt to save without proper path #--------------------------------------------------------------------------------------------------------------------------- - $self->testException(sub {$oManifest->save()}, ERROR_PATH_MISSING, - "unable to open '" . $strBackupManifestFile . "': No such file or directory"); + $self->testException(sub {$oManifest->save()}, ERROR_FILE_MISSING, + "unable to open file '${strBackupManifestFile}' for write in missing path"); # Create path and save #--------------------------------------------------------------------------------------------------------------------------- @@ -226,8 +226,8 @@ # Build error if offline = true and no tablespace path #--------------------------------------------------------------------------------------------------------------------------- - $self->testException(sub {$oManifest->build(storageDb(), $self->{strDbPath}, undef, false, false)}, ERROR_FILE_MISSING, - "unable to stat '" . $self->{strDbPath} . "/" . MANIFEST_TARGET_PGTBLSPC . "': No such file or directory"); + $self->testException(sub {$oManifest->build(storageDb(), $self->{strDbPath}, undef, false, false)}, ERROR_PATH_MISSING, + "unable to list file info for missing path '" . $self->{strDbPath} . "/" . MANIFEST_TARGET_PGTBLSPC . "'"); # bOnline = true tests #--------------------------------------------------------------------------------------------------------------------------- @@ -236,7 +236,11 @@ {bLoad => false, strDbVersion => PG_VERSION_94, iDbCatalogVersion => $self->dbCatalogVersion(PG_VERSION_94)}); # Create expected manifest from base + my $oStorageTemp = $oManifestBase->{oStorage}; + $oManifestBase->{oStorage} = undef; my $oManifestExpected = dclone($oManifestBase); + $oManifestBase->{oStorage} = $oStorageTemp; + $oManifestExpected->{oStorage} = $oStorageTemp; # Add global/pg_control file and PG_VERSION file and create a directory with a different modes than default storageDb()->put(storageDb()->openWrite($self->{strDbPath} . '/' . DB_FILE_PGCONTROL, @@ -523,7 +527,11 @@ # Unskip code path coverage #--------------------------------------------------------------------------------------------------------------------------- + $oStorageTemp = $oManifestExpected->{oStorage}; + $oManifestExpected->{oStorage} = undef; my $oManifestExpectedUnskip = dclone($oManifestExpected); + $oManifestExpected->{oStorage} = $oStorageTemp; + $oManifestExpectedUnskip->{oStorage} = $oStorageTemp; # Change DB version to 93 $oManifest = new pgBackRest::Manifest( @@ -557,7 +565,7 @@ $oManifest = new pgBackRest::Manifest( $strBackupManifestFile, {bLoad => false, strDbVersion => PG_VERSION_91, iDbCatalogVersion => $self->dbCatalogVersion(PG_VERSION_91)}); - $oManifestExpectedUnskip->set(MANIFEST_SECTION_BACKUP_DB, MANIFEST_KEY_DB_VERSION, undef, PG_VERSION_91); + $oManifestExpectedUnskip->set(MANIFEST_SECTION_BACKUP_DB, MANIFEST_KEY_DB_VERSION, undef, PG_VERSION_91 . ''); $oManifestExpectedUnskip->numericSet(MANIFEST_SECTION_BACKUP_DB, MANIFEST_KEY_CATALOG, undef, $self->dbCatalogVersion(PG_VERSION_91)); @@ -642,7 +650,7 @@ $oManifest = new pgBackRest::Manifest( $strBackupManifestFile, {bLoad => false, strDbVersion => PG_VERSION_84, iDbCatalogVersion => $self->dbCatalogVersion(PG_VERSION_84)}); - $oManifestExpectedUnskip->set(MANIFEST_SECTION_BACKUP_DB, MANIFEST_KEY_DB_VERSION, undef, PG_VERSION_84); + $oManifestExpectedUnskip->set(MANIFEST_SECTION_BACKUP_DB, MANIFEST_KEY_DB_VERSION, undef, PG_VERSION_84 . ''); $oManifestExpectedUnskip->numericSet(MANIFEST_SECTION_BACKUP_DB, MANIFEST_KEY_CATALOG, undef, $self->dbCatalogVersion(PG_VERSION_84)); @@ -954,7 +962,7 @@ storageDb()->remove('postgresql.auto.conf'); storageDb()->remove('hosts'); storageDb()->remove('pg_log/logfile'); - storageDb()->remove('global/exclude', {bRecurse => true}); + storageDb()->pathRemove('global/exclude', {bRecurse => true}); # Reload the manifest with version < 9.0 #--------------------------------------------------------------------------------------------------------------------------- @@ -963,7 +971,7 @@ {bLoad => false, strDbVersion => PG_VERSION_84, iDbCatalogVersion => $self->dbCatalogVersion(PG_VERSION_84)}); # Catalog not stored in < 9.0 - $oManifestExpected->set(MANIFEST_SECTION_BACKUP_DB, MANIFEST_KEY_DB_VERSION, undef, PG_VERSION_84); + $oManifestExpected->set(MANIFEST_SECTION_BACKUP_DB, MANIFEST_KEY_DB_VERSION, undef, PG_VERSION_84 . ''); $oManifestExpected->numericSet(MANIFEST_SECTION_BACKUP_DB, MANIFEST_KEY_CATALOG, undef, $self->dbCatalogVersion(PG_VERSION_84)); @@ -1229,7 +1237,11 @@ {bLoad => false, strDbVersion => PG_VERSION_94, iDbCatalogVersion => $self->dbCatalogVersion(PG_VERSION_94)}); # Create expected manifest from base + my $oStorageTemp = $oManifestBase->{oStorage}; + $oManifestBase->{oStorage} = undef; my $oManifestExpected = dclone($oManifestBase); + $oManifestBase->{oStorage} = $oStorageTemp; + $oManifestExpected->{oStorage} = $oStorageTemp; # Future timestamp on file #--------------------------------------------------------------------------------------------------------------------------- @@ -1255,7 +1267,11 @@ # Future timestamp in last manifest #--------------------------------------------------------------------------------------------------------------------------- + $oStorageTemp = $oManifestExpected->{oStorage}; + $oManifestExpected->{oStorage} = undef; my $oLastManifest = dclone($oManifestExpected); + $oManifestExpected->{oStorage} = $oStorageTemp; + $oLastManifest->{oStorage} = $oStorageTemp; # Set a backup label $oLastManifest->set(MANIFEST_SECTION_BACKUP, MANIFEST_KEY_LABEL, undef, BOGUS); @@ -1507,7 +1523,12 @@ { my $oManifest = new pgBackRest::Manifest($strBackupManifestFile, {bLoad => false, strDbVersion => PG_VERSION_94, iDbCatalogVersion => $self->dbCatalogVersion(PG_VERSION_94)}); + + my $oStorageTemp = $oManifestBase->{oStorage}; + $oManifestBase->{oStorage} = undef; my $oManifestExpected = dclone($oManifestBase); + $oManifestBase->{oStorage} = $oStorageTemp; + $oManifestExpected->{oStorage} = $oStorageTemp; # Add a bogus file - all traces to be removed after the manifest has been built to simulate an inital manifest and avoid # missing files error diff -Nru pgbackrest-2.15.1/test/lib/pgBackRestTest/Module/Mock/MockAllTest.pm pgbackrest-2.16/test/lib/pgBackRestTest/Module/Mock/MockAllTest.pm --- pgbackrest-2.15.1/test/lib/pgBackRestTest/Module/Mock/MockAllTest.pm 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/test/lib/pgBackRestTest/Module/Mock/MockAllTest.pm 2019-08-05 16:03:04.000000000 +0000 @@ -184,10 +184,10 @@ pageBuild($tBasePage, 0) . pageBuild($tBasePage, 1) . pageBuild($tBasePage, 2) . - pageBuild($tBasePage, 0, 0xFFFF, 0xFFFF); + pageBuild($tBasePage, 0, 0xFFFFFFFF, 0xFFFFFFFF); $oHostDbMaster->manifestFileCreate( - \%oManifest, MANIFEST_TARGET_PGDATA, 'base/32768/33000', $tPageValid, '4a383e4fb8b5cd2a4e8fab91ef63dce48e532a2f', + \%oManifest, MANIFEST_TARGET_PGDATA, 'base/32768/33000', $tPageValid, '7a16d165e4775f7c92e8cdf60c0af57313f0bf90', $lTime); my $iBlockOffset = 32767 * 131072; @@ -196,11 +196,11 @@ pageBuild($tBasePage, $iBlockOffset + 0) . pageBuild($tBasePage, $iBlockOffset + 1) . ("\0" x 8192) . - pageBuild($tBasePage, 0, 0xFFFF, 0xFFFF); + pageBuild($tBasePage, 0, 0xFFFFFFFF, 0xFFFFFFFF); $oHostDbMaster->manifestFileCreate( \%oManifest, MANIFEST_TARGET_PGDATA, 'base/32768/33000.32767', $tPageValidSeg32767, - '21e2c7c1a326682c07053b7d6a5a40dbd49c2ec5', $lTime); + '6e99b589e550e68e934fd235ccba59fe5b592a9e', $lTime); my $tPageInvalid33001 = pageBuild($tBasePage, 1) . @@ -282,7 +282,7 @@ # Unlog and temp files to ignore (unlog _init will NOT be ignored) $oHostDbMaster->manifestFileCreate(\%oManifest, MANIFEST_TARGET_PGDATA, 'base/32768/44000_init', $tPageValid, - '4a383e4fb8b5cd2a4e8fab91ef63dce48e532a2f', $lTime); + '7a16d165e4775f7c92e8cdf60c0af57313f0bf90', $lTime); $oHostDbMaster->dbFileCreate(\%oManifest, MANIFEST_TARGET_PGDATA, 'base/32768/44000', 'IGNORE'); $oHostDbMaster->dbFileCreate(\%oManifest, MANIFEST_TARGET_PGDATA, 'base/32768/t333_44000', 'IGNORE'); @@ -381,7 +381,7 @@ # Create file with special characters $oHostDbMaster->manifestFileCreate( - \%oManifest, MANIFEST_TARGET_PGDATA, 'special-@!#$^&*()-_+~`{}[]\|:;"<>\',.?%', undef, undef, $lTime, undef, true); + \%oManifest, MANIFEST_TARGET_PGDATA, 'special-!_.*\'()&!@;:+,?', undef, undef, $lTime, undef, true); $oManifest{&MANIFEST_SECTION_BACKUP_OPTION}{&MANIFEST_KEY_PROCESS_MAX} = 1; @@ -878,7 +878,7 @@ $oHostBackup->backup( $strType, '$PGDATA is a substring of valid tblspc excluding / (file missing err expected)', - {oExpectedManifest => \%oManifest, iExpectedExitStatus => ERROR_FILE_MISSING}); + {oExpectedManifest => \%oManifest, iExpectedExitStatus => ERROR_PATH_MISSING}); testFileRemove("${strTblSpcPath}/99999"); } @@ -1311,7 +1311,7 @@ # Restore checksum values for next test $oManifest{&MANIFEST_SECTION_TARGET_FILE}{'pg_data/base/32768/33000'}{&MANIFEST_SUBKEY_CHECKSUM} = - '4a383e4fb8b5cd2a4e8fab91ef63dce48e532a2f'; + '7a16d165e4775f7c92e8cdf60c0af57313f0bf90'; $oManifest{&MANIFEST_SECTION_TARGET_FILE}{'pg_data/base/32768/33001'}{&MANIFEST_SUBKEY_CHECKSUM} = '6bf316f11d28c28914ea9be92c00de9bea6d9a6b'; $oManifest{&MANIFEST_SECTION_TARGET_FILE}{'pg_tblspc/2/PG_9.4_201409291/32768/tablespace2.txt'} diff -Nru pgbackrest-2.15.1/test/lib/pgBackRestTest/Module/Mock/MockArchiveStopTest.pm pgbackrest-2.16/test/lib/pgBackRestTest/Module/Mock/MockArchiveStopTest.pm --- pgbackrest-2.15.1/test/lib/pgBackRestTest/Module/Mock/MockArchiveStopTest.pm 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/test/lib/pgBackRestTest/Module/Mock/MockArchiveStopTest.pm 2019-08-05 16:03:04.000000000 +0000 @@ -73,14 +73,12 @@ true, $self->expect(), {bHostBackup => $bRemote, bCompress => $bCompress, bArchiveAsync => true, bS3 => $bS3, bRepoEncrypt => $bEncrypt}); - my $oStorage = storageRepo(); - # Create compression extension my $strCompressExt = $bCompress ? qw{.} . COMPRESS_EXT : ''; # Create the wal path my $strWalPath = $oHostDbMaster->dbBasePath() . '/pg_xlog'; - $oStorage->pathCreate($strWalPath, {bCreateParent => true}); + storageTest()->pathCreate($strWalPath, {bCreateParent => true}); # Create the test path for pg_control and generate pg_control for stanza-create storageTest()->pathCreate($oHostDbMaster->dbBasePath() . '/' . DB_PATH_GLOBAL, {bCreateParent => true}); @@ -97,7 +95,7 @@ if ($iError == 0) { $oHostBackup->infoMunge( - $oStorage->pathGet(STORAGE_REPO_ARCHIVE . qw{/} . ARCHIVE_INFO_FILE), + storageRepo()->pathGet(STORAGE_REPO_ARCHIVE . qw{/} . ARCHIVE_INFO_FILE), {&INFO_ARCHIVE_SECTION_DB => {&INFO_ARCHIVE_KEY_DB_VERSION => '8.0'}, &INFO_ARCHIVE_SECTION_DB_HISTORY => {1 => {&INFO_ARCHIVE_KEY_DB_VERSION => '8.0'}}}); } @@ -121,12 +119,12 @@ # Fix the database version if ($iError == 0) { - $oHostBackup->infoRestore($oStorage->pathGet(STORAGE_REPO_ARCHIVE . qw{/} . ARCHIVE_INFO_FILE)); + $oHostBackup->infoRestore(storageRepo()->pathGet(STORAGE_REPO_ARCHIVE . qw{/} . ARCHIVE_INFO_FILE)); } #--------------------------------------------------------------------------------------------------------------------------- $self->testResult( - sub {$oStorage->list( + sub {storageRepo()->list( STORAGE_REPO_ARCHIVE . qw{/} . PG_VERSION_94 . '-1/0000000100000001')}, "000000010000000100000001-${strWalHash}${strCompressExt}", 'segment 2-4 not pushed', {iWaitSeconds => 5}); @@ -135,7 +133,7 @@ $oHostDbMaster->archivePush($strWalPath, $strWalTestFile, 5); $self->testResult( - sub {$oStorage->list( + sub {storageRepo()->list( STORAGE_REPO_ARCHIVE . qw{/} . PG_VERSION_94 . '-1/0000000100000001')}, "(000000010000000100000001-${strWalHash}${strCompressExt}, " . "000000010000000100000005-${strWalHash}${strCompressExt})", diff -Nru pgbackrest-2.15.1/test/lib/pgBackRestTest/Module/Mock/MockStanzaTest.pm pgbackrest-2.16/test/lib/pgBackRestTest/Module/Mock/MockStanzaTest.pm --- pgbackrest-2.15.1/test/lib/pgBackRestTest/Module/Mock/MockStanzaTest.pm 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/test/lib/pgBackRestTest/Module/Mock/MockStanzaTest.pm 2019-08-05 16:03:04.000000000 +0000 @@ -25,7 +25,6 @@ use pgBackRest::Manifest; use pgBackRest::Protocol::Storage::Helper; use pgBackRest::Storage::Base; -use pgBackRest::Storage::Filter::Gzip; use pgBackRest::Storage::Helper; use pgBackRestTest::Env::HostEnvTest; @@ -198,7 +197,7 @@ storageRepo()->copy( storageRepo()->openRead( STORAGE_REPO_ARCHIVE . "/${strArchiveTest}.gz", - {rhyFilter => [{strClass => STORAGE_FILTER_GZIP, rxyParam => [{strCompressType => STORAGE_DECOMPRESS}]}]}), + {rhyFilter => [{strClass => STORAGE_FILTER_GZIP, rxyParam => [STORAGE_DECOMPRESS, false]}]}), STORAGE_REPO_ARCHIVE . "/${strArchiveTest}"); $oHostBackup->stanzaCreate('force create archive.info from uncompressed file', diff -Nru pgbackrest-2.15.1/test/lib/pgBackRestTest/Module/Performance/PerformanceIoTest.pm pgbackrest-2.16/test/lib/pgBackRestTest/Module/Performance/PerformanceIoTest.pm --- pgbackrest-2.15.1/test/lib/pgBackRestTest/Module/Performance/PerformanceIoTest.pm 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/test/lib/pgBackRestTest/Module/Performance/PerformanceIoTest.pm 1970-01-01 00:00:00.000000000 +0000 @@ -1,127 +0,0 @@ -#################################################################################################################################### -# I/O Performance Tests -#################################################################################################################################### -package pgBackRestTest::Module::Performance::PerformanceIoTest; - use parent 'pgBackRestTest::Env::ConfigEnvTest'; - -#################################################################################################################################### -# Perl includes -#################################################################################################################################### -use strict; -use warnings FATAL => qw(all); -use Carp qw(confess); -use English '-no_match_vars'; - -use Storable qw(dclone); -use Time::HiRes qw(gettimeofday); - -use pgBackRest::Common::Log; -use pgBackRest::Config::Config; -use pgBackRest::Protocol::Helper; -use pgBackRest::Protocol::Storage::Helper; -use pgBackRest::Storage::Filter::Gzip; -use pgBackRest::Storage::Filter::Sha; -use pgBackRest::Storage::Helper; - -use pgBackRestTest::Common::ExecuteTest; -use pgBackRestTest::Common::RunTest; - -#################################################################################################################################### -# initModule -#################################################################################################################################### -sub initModule -{ - my $self = shift; - - # Load reference page data - my $tPageBin = ${storageTest()->get($self->dataPath() . '/filecopy.table.bin')}; - - # Create large test file - $self->{iTableLargeSize} = 32; - $self->{strTableLargeFile} = 'table-large.bin'; - - my $oFileWrite = storageTest()->openWrite($self->{strTableLargeFile}); - - for (my $iIndex = 0; $iIndex < $self->{iTableLargeSize}; $iIndex++) - { - $oFileWrite->write(\$tPageBin); - } - - $oFileWrite->close(); -} - -#################################################################################################################################### -# run -#################################################################################################################################### -sub run -{ - my $self = shift; - - ################################################################################################################################ - if ($self->begin("copy")) - { - # Setup the remote for testing remote storage - $self->optionTestSet(CFGOPT_STANZA, $self->stanza()); - $self->optionTestSet(CFGOPT_PG_PATH, $self->testPath()); - $self->optionTestSet(CFGOPT_REPO_PATH, $self->testPath()); - $self->optionTestSet(CFGOPT_LOG_PATH, $self->testPath()); - $self->optionTestSet(CFGOPT_REPO_HOST, 'localhost'); - $self->optionTestSet(CFGOPT_REPO_HOST_USER, $self->backrestUser()); - $self->configTestLoad(CFGCMD_RESTORE); - - protocolGet(CFGOPTVAL_REMOTE_TYPE_BACKUP, undef, {strBackRestBin => $self->backrestExe()}); - storageRepo(); - - # Setup file info - my $strFile = $self->{strTableLargeFile}; - my $strFileCopy = "${strFile}.copy"; - - my $iRunTotal = 1; - &log(INFO, "time is average of ${iRunTotal} run(s)"); - - foreach my $bGzip (false, true) - { - foreach my $bRemote (false, true) - { - my $rhyFilter; - - push(@{$rhyFilter}, {strClass => STORAGE_FILTER_SHA}); - push(@{$rhyFilter}, {strClass => STORAGE_FILTER_GZIP, rxyParam => [{iLevel => 6}]}) if ($bGzip); - - my $lTimeTotal = 0; - my $lTimeBegin; - - for (my $iIndex = 0; $iIndex < $iRunTotal; $iIndex++) - { - # Get the remote or local for writing - my $oStorageWrite = $bRemote ? storageRepo() : storageTest(); - - # Start the timer - $lTimeBegin = gettimeofday(); - - # Copy the file - storageTest()->copy( - storageTest()->openRead($strFile, {rhyFilter => $rhyFilter}), - $oStorageWrite->openWrite($strFileCopy)); - - # Record time - $lTimeTotal += gettimeofday() - $lTimeBegin; - - # Remove file so it can be copied again - executeTest("sudo rm " . $oStorageWrite->pathGet($strFileCopy)); - } - - # Calculate out output metrics - my $fExecutionTime = int($lTimeTotal * 1000 / $iRunTotal) / 1000; - my $fGbPerHour = int((60 * 60) * 1000 / ((1024 / $self->{iTableLargeSize}) * $fExecutionTime)) / 1000; - - &log(INFO, "sha1 1, gz ${bGzip}, rmt ${bRemote}: ${fExecutionTime}s, ${fGbPerHour} GB/hr"); - } - } - - # Destroy protocol object - protocolDestroy(); - } -} - -1; diff -Nru pgbackrest-2.15.1/test/lib/pgBackRestTest/Module/Real/RealAllTest.pm pgbackrest-2.16/test/lib/pgBackRestTest/Module/Real/RealAllTest.pm --- pgbackrest-2.15.1/test/lib/pgBackRestTest/Module/Real/RealAllTest.pm 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/test/lib/pgBackRestTest/Module/Real/RealAllTest.pm 2019-08-05 16:03:04.000000000 +0000 @@ -36,6 +36,8 @@ use pgBackRestTest::Env::Host::HostBackupTest; use pgBackRestTest::Env::Host::HostDbTest; use pgBackRestTest::Env::HostEnvTest; +use pgBackRestTest::Common::Storage; +use pgBackRestTest::Common::StoragePosix; #################################################################################################################################### # run @@ -270,7 +272,7 @@ executeTest("sudo chmod 400 ${strDir}"); $strComment = 'confirm master manifest->build executed'; - $oHostDbMaster->check($strComment, {iTimeout => 5, iExpectedExitStatus => ERROR_FILE_OPEN}); + $oHostDbMaster->check($strComment, {iTimeout => 5, iExpectedExitStatus => ERROR_PATH_OPEN}); executeTest("sudo rmdir ${strDir}"); # Providing a sufficient archive-timeout, verify that the check command runs successfully now with valid @@ -503,10 +505,10 @@ my $strComment = 'confirm standby manifest->build executed'; # If there is an invalid host, the final error returned from check will be the inability to resolve the name which is - # a read error instead of an open error + # an open error instead of a read error if (!$oHostDbStandby->bogusHost()) { - $oHostDbStandby->check($strComment, {iTimeout => 5, iExpectedExitStatus => ERROR_FILE_OPEN}); + $oHostDbStandby->check($strComment, {iTimeout => 5, iExpectedExitStatus => ERROR_PATH_OPEN}); } else { @@ -746,13 +748,19 @@ { my ($strSHA1, $lSize) = storageTest()->hashSize($strDb1TablePath); - # Create a zeroed sparse file in the test directory that is the same size as the filenode.map + # Create a zeroed sparse file in the test directory that is the same size as the filenode.map. We need to use the + # posix driver directly to do this because handles cannot be passed back from the C code. + my $oStorageTrunc = new pgBackRestTest::Common::Storage($self->testPath(), new pgBackRestTest::Common::StoragePosix()); + my $strTestTable = $self->testPath() . "/testtable"; - my $oDestinationFileIo = storageTest()->openWrite($strTestTable); + my $oDestinationFileIo = $oStorageTrunc->openWrite($strTestTable); $oDestinationFileIo->open(); # Truncate to the original size which will create a sparse file. - truncate($oDestinationFileIo->handle(), $lSize); + if (!truncate($oDestinationFileIo->handle(), $lSize)) + { + confess "unable to truncate '$strTestTable' with handle " . $oDestinationFileIo->handle(); + } $oDestinationFileIo->close(); # Confirm the test filenode.map and the database test1 filenode.map are zeroed diff -Nru pgbackrest-2.15.1/test/lib/pgBackRestTest/Module/Stanza/StanzaAllPerlTest.pm pgbackrest-2.16/test/lib/pgBackRestTest/Module/Stanza/StanzaAllPerlTest.pm --- pgbackrest-2.15.1/test/lib/pgBackRestTest/Module/Stanza/StanzaAllPerlTest.pm 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/test/lib/pgBackRestTest/Module/Stanza/StanzaAllPerlTest.pm 2019-08-05 16:03:04.000000000 +0000 @@ -98,8 +98,7 @@ $self->optionTestSetBool(CFGOPT_ONLINE, true); $self->configTestLoad(CFGCMD_STANZA_CREATE); - $self->testException(sub {(new pgBackRest::Stanza())}, ERROR_DB_CONNECT, - "could not connect to server: No such file or directory\n"); + $self->testException(sub {(new pgBackRest::Stanza())}, ERROR_DB_CONNECT, "unable to connect to.*"); $self->optionTestSetBool(CFGOPT_ONLINE, false); } @@ -280,7 +279,7 @@ # Change the permissions on the archived file so reconstruction fails executeTest('sudo chmod 220 ' . $strArchivedFile); $self->testException(sub {(new pgBackRest::Stanza())->stanzaCreate()}, ERROR_FILE_OPEN, - "unable to open '" . $strArchivedFile . "': Permission denied"); + "unable to open file '${strArchivedFile}' for read"); executeTest('sudo chmod 644 ' . $strArchivedFile); # Clear the cached repo settings and change repo settings to encrypted @@ -312,7 +311,10 @@ #--------------------------------------------------------------------------------------------------------------------------- storageRepo()->pathCreate(STORAGE_REPO_ARCHIVE . "/" . PG_VERSION_94 . "-1"); storageRepo()->pathCreate(STORAGE_REPO_ARCHIVE . "/" . PG_VERSION_94 . "-1/0000000100000001"); - storageRepo()->put($strArchivedFile, $tUnencryptedArchiveContent); + storageRepo()->put( + storageRepo()->openWrite( + $strArchivedFile, {strCipherPass => new pgBackRest::Archive::Info($self->{strArchivePath})->cipherPassSub()}), + $tUnencryptedArchiveContent); storageRepo()->pathCreate($strBackupPath); # Empty backup path - no backup in progress # Confirm encrypted and create the stanza with force @@ -477,7 +479,7 @@ #--------------------------------------------------------------------------------------------------------------------------- executeTest('sudo chmod 220 ' . $self->{strArchivePath}); $self->testException(sub {$oStanza->infoObject(STORAGE_REPO_ARCHIVE, $self->{strArchivePath})}, ERROR_FILE_OPEN, - "unable to open '" . $self->{strArchivePath} . "/archive.info': Permission denied"); + "unable to open file '" . $self->{strArchivePath} . "/archive.info' for read"); executeTest('sudo chmod 640 ' . $self->{strArchivePath}); # Reset force option -------- @@ -492,8 +494,8 @@ forceStorageRemove(storageRepo(), storageRepo()->pathGet(STORAGE_REPO_BACKUP . qw{/} . FILE_BACKUP_INFO . INI_COPY_EXT)); executeTest('sudo chmod 220 ' . storageRepo()->pathGet(STORAGE_REPO_BACKUP . qw{/} . FILE_BACKUP_INFO)); $self->testException(sub {$oStanza->infoObject(STORAGE_REPO_BACKUP, $self->{strBackupPath})}, ERROR_FILE_OPEN, - "unable to open '" . storageRepo()->pathGet(STORAGE_REPO_BACKUP . qw{/} . FILE_BACKUP_INFO) . - "': Permission denied"); + "unable to open file '" . storageRepo()->pathGet(STORAGE_REPO_BACKUP . qw{/} . FILE_BACKUP_INFO) . + "' for read"); executeTest('sudo chmod 640 ' . storageRepo()->pathGet(STORAGE_REPO_BACKUP . qw{/} . FILE_BACKUP_INFO)); } @@ -762,8 +764,8 @@ executeTest("sudo chown 7777 " . $self->{strArchivePath}); lockStop(); - $self->testException(sub {$oStanza->stanzaDelete()}, ERROR_FILE_OPEN, - "unable to remove file '" . $self->{strArchivePath} . "/" . ARCHIVE_INFO_FILE . "': Permission denied"); + $self->testException(sub {$oStanza->stanzaDelete()}, ERROR_FILE_REMOVE, + "unable to remove '" . $self->{strArchivePath} . "/" . ARCHIVE_INFO_FILE . "'"); # Remove the repo executeTest("sudo rm -rf " . $self->{strArchivePath}); diff -Nru pgbackrest-2.15.1/test/lib/pgBackRestTest/Module/Storage/StorageFilterCipherBlockPerlTest.pm pgbackrest-2.16/test/lib/pgBackRestTest/Module/Storage/StorageFilterCipherBlockPerlTest.pm --- pgbackrest-2.15.1/test/lib/pgBackRestTest/Module/Storage/StorageFilterCipherBlockPerlTest.pm 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/test/lib/pgBackRestTest/Module/Storage/StorageFilterCipherBlockPerlTest.pm 1970-01-01 00:00:00.000000000 +0000 @@ -1,285 +0,0 @@ -#################################################################################################################################### -# Tests for Block Cipher -#################################################################################################################################### -package pgBackRestTest::Module::Storage::StorageFilterCipherBlockPerlTest; -use parent 'pgBackRestTest::Common::RunTest'; - -#################################################################################################################################### -# Perl includes -#################################################################################################################################### -use strict; -use warnings FATAL => qw(all); -use Carp qw(confess); -use English '-no_match_vars'; - -use Fcntl qw(O_RDONLY); - -use pgBackRest::Common::Exception; -use pgBackRest::Common::Log; -use pgBackRest::LibC qw(:random :crypto); -use pgBackRest::Storage::Base; -use pgBackRest::Storage::Filter::CipherBlock; -use pgBackRest::Storage::Posix::Driver; - -use pgBackRestTest::Common::ExecuteTest; -use pgBackRestTest::Common::RunTest; - -#################################################################################################################################### -# run -#################################################################################################################################### -sub run -{ - my $self = shift; - - # Test data - my $strFile = $self->testPath() . qw{/} . 'file.txt'; - my $strFileEncrypt = $self->testPath() . qw{/} . 'file.enc.txt'; - my $strFileDecrypt = $self->testPath() . qw{/} . 'file.dcr.txt'; - my $strFileBin = $self->testPath() . qw{/} . 'file.bin'; - my $strFileBinEncrypt = $self->testPath() . qw{/} . 'file.enc.bin'; - my $strFileContent = 'TESTDATA'; - my $iFileLength = length($strFileContent); - my $oDriver = new pgBackRest::Storage::Posix::Driver(); - my $tCipherPass = 'areallybadkey'; - my $strCipherType = 'aes-256-cbc'; - my $tContent; - - ################################################################################################################################ - if ($self->begin('new()')) - { - #--------------------------------------------------------------------------------------------------------------------------- - # Create an unencrypted file - executeTest("echo -n '${strFileContent}' | tee ${strFile}"); - - $self->testException( - sub {new pgBackRest::Storage::Filter::CipherBlock( - $oDriver->openRead($strFile), $strCipherType, $tCipherPass, {strMode => BOGUS})}, - ERROR_ASSERT, 'unknown cipher mode: ' . BOGUS); - - $self->testException( - sub {new pgBackRest::Storage::Filter::CipherBlock( - $oDriver->openRead($strFile), BOGUS, $tCipherPass)}, - ERROR_ASSERT, "invalid cipher name '" . BOGUS . "'"); - - $self->testException( - sub {new pgBackRest::Storage::Filter::CipherBlock( - $oDriver->openWrite($strFile), $strCipherType, $tCipherPass, {strMode => BOGUS})}, - ERROR_ASSERT, 'unknown cipher mode: ' . BOGUS); - - $self->testException( - sub {new pgBackRest::Storage::Filter::CipherBlock( - $oDriver->openWrite($strFile), BOGUS, $tCipherPass)}, - ERROR_ASSERT, "invalid cipher name '" . BOGUS . "'"); - } - - ################################################################################################################################ - if ($self->begin('read() and write()')) - { - my $tBuffer; - - #--------------------------------------------------------------------------------------------------------------------------- - # Create an plaintext file - executeTest("echo -n '${strFileContent}' | tee ${strFile}"); - - # Instantiate the cipher object - default action encrypt - my $oEncryptIo = $self->testResult(sub {new pgBackRest::Storage::Filter::CipherBlock($oDriver->openRead($strFile), - $strCipherType, $tCipherPass)}, '[object]', 'new encrypt file'); - - $self->testResult(sub {$oEncryptIo->read(\$tBuffer, 2)}, 16, ' read 16 bytes (header)'); - $self->testResult(sub {$oEncryptIo->read(\$tBuffer, 2)}, 16, ' read 16 bytes (data)'); - $self->testResult(sub {$oEncryptIo->read(\$tBuffer, 2)}, 0, ' read 0 bytes'); - - $self->testResult(sub {$tBuffer ne $strFileContent}, true, ' data read is encrypted'); - - $self->testResult(sub {$oEncryptIo->close()}, true, ' close'); - $self->testResult(sub {$oEncryptIo->close()}, false, ' close again'); - - # tBuffer is now encrypted - test write decrypts correctly - my $oDecryptFileIo = $self->testResult( - sub {new pgBackRest::Storage::Filter::CipherBlock($oDriver->openWrite($strFileDecrypt), - $strCipherType, $tCipherPass, {strMode => STORAGE_DECRYPT})}, - '[object]', ' new decrypt file'); - - $self->testResult(sub {$oDecryptFileIo->write(\$tBuffer)}, 32, ' write decrypted'); - $self->testResult(sub {$oDecryptFileIo->close()}, true, ' close'); - - $self->testResult(sub {${$self->storageTest()->get($strFileDecrypt)}}, $strFileContent, ' data written is decrypted'); - - #--------------------------------------------------------------------------------------------------------------------------- - $tBuffer = $strFileContent; - my $oEncryptFileIo = $self->testResult( - sub {new pgBackRest::Storage::Filter::CipherBlock($oDriver->openWrite($strFileEncrypt), - $strCipherType, $tCipherPass)}, - '[object]', 'new write encrypt'); - - $tContent = ''; - $self->testResult(sub {$oEncryptFileIo->write(\$tContent)}, 0, ' attempt empty buffer write'); - - undef($tContent); - $self->testException( - sub {$oEncryptFileIo->write(\$tContent)}, ERROR_FILE_WRITE, - "unable to write to '${strFileEncrypt}': Use of uninitialized value"); - - # Encrypted length is not known so use tBuffer then test that tBuffer was encrypted - my $iWritten = $self->testResult(sub {$oEncryptFileIo->write(\$tBuffer)}, length($tBuffer), ' write encrypted'); - $self->testResult(sub {$oEncryptFileIo->close()}, true, ' close'); - - $tContent = $self->storageTest()->get($strFileDecrypt); - $self->testResult(sub {defined($tContent) && $tContent ne $strFileContent}, true, ' data written is encrypted'); - - #--------------------------------------------------------------------------------------------------------------------------- - undef($tBuffer); - # Open encrypted file for decrypting - $oEncryptFileIo = - $self->testResult( - sub {new pgBackRest::Storage::Filter::CipherBlock( - $oDriver->openRead($strFileEncrypt), $strCipherType, $tCipherPass, - {strMode => STORAGE_DECRYPT})}, - '[object]', 'new read encrypted file, decrypt'); - - # Try to read more than the length of the data expected to be output from the decrypt and confirm the decrypted length is - # the same as the original decrypted content. - $self->testResult(sub {$oEncryptFileIo->read(\$tBuffer, $iFileLength+4)}, $iFileLength, ' read all bytes'); - - # Just because length is the same does not mean content is so confirm - $self->testResult($tBuffer, $strFileContent, ' data read is decrypted'); - $self->testResult(sub {$oEncryptFileIo->close()}, true, ' close'); - - #--------------------------------------------------------------------------------------------------------------------------- - undef($tContent); - undef($tBuffer); - my $strFileBinHash = '1c7e00fd09b9dd11fc2966590b3e3274645dd031'; - - executeTest('cp ' . $self->dataPath() . "/filecopy.archive2.bin ${strFileBin}"); - $self->testResult( - sub {cryptoHashOne('sha1', ${storageTest()->get($strFileBin)})}, $strFileBinHash, 'bin test - check sha1'); - - $tContent = ${storageTest()->get($strFileBin)}; - - $oEncryptFileIo = $self->testResult( - sub {new pgBackRest::Storage::Filter::CipherBlock( - $oDriver->openWrite($strFileBinEncrypt), $strCipherType, $tCipherPass)}, - '[object]', ' new write encrypt'); - - $self->testResult(sub {$oEncryptFileIo->write(\$tContent)}, length($tContent), ' write encrypted'); - $self->testResult(sub {$oEncryptFileIo->close()}, true, ' close'); - $self->testResult( - sub {cryptoHashOne('sha1', ${storageTest()->get($strFileBinEncrypt)}) ne $strFileBinHash}, true, - ' check sha1 different'); - - my $oEncryptBinFileIo = $self->testResult( - sub {new pgBackRest::Storage::Filter::CipherBlock( - $oDriver->openRead($strFileBinEncrypt), $strCipherType, $tCipherPass, - {strMode => STORAGE_DECRYPT})}, - '[object]', 'new read encrypted bin file'); - - $self->testResult(sub {$oEncryptBinFileIo->read(\$tBuffer, 16777216)}, 16777216, ' read 16777216 bytes'); - $self->testResult(sub {cryptoHashOne('sha1', $tBuffer)}, $strFileBinHash, ' check sha1 same as original'); - $self->testResult(sub {$oEncryptBinFileIo->close()}, true, ' close'); - - # Try to read the file with the wrong passphrase - undef($tBuffer); - undef($oEncryptBinFileIo); - - $oEncryptBinFileIo = $self->testResult( - sub {new pgBackRest::Storage::Filter::CipherBlock( - $oDriver->openRead($strFileBinEncrypt), $strCipherType, BOGUS, - {strMode => STORAGE_DECRYPT})}, - '[object]', 'new read Encrypted bin file with wrong passphrase'); - - $self->testResult(sub {$oEncryptBinFileIo->read(\$tBuffer, 16777216)}, 16777216, ' read all bytes'); - $self->testResult(sub {cryptoHashOne('sha1', $tBuffer) ne $strFileBinHash}, true, ' check sha1 NOT same as original'); - - # Test file against openssl to make sure they are compatible - #--------------------------------------------------------------------------------------------------------------------------- - undef($tBuffer); - - $self->storageTest()->put($strFile, $strFileContent); - - executeTest( - "openssl enc -k ${tCipherPass} -md sha1 -aes-256-cbc -in ${strFile} -out ${strFileEncrypt}"); - - $oEncryptFileIo = $self->testResult( - sub {new pgBackRest::Storage::Filter::CipherBlock( - $oDriver->openRead($strFileEncrypt), $strCipherType, $tCipherPass, - {strMode => STORAGE_DECRYPT})}, - '[object]', 'read file encrypted by openssl'); - - $self->testResult(sub {$oEncryptFileIo->read(\$tBuffer, 16)}, 8, ' read 8 bytes'); - $self->testResult(sub {$oEncryptFileIo->close()}, true, ' close'); - $self->testResult(sub {$tBuffer}, $strFileContent, ' check content same as original'); - - $self->storageTest()->remove($strFile); - $self->storageTest()->remove($strFileEncrypt); - - $oEncryptFileIo = $self->testResult( - sub {new pgBackRest::Storage::Filter::CipherBlock( - $oDriver->openWrite($strFileEncrypt), $strCipherType, $tCipherPass)}, - '[object]', 'write file to be read by openssl'); - - $self->testResult(sub {$oEncryptFileIo->write(\$tBuffer)}, 8, ' write 8 bytes'); - $self->testResult(sub {$oEncryptFileIo->close()}, true, ' close'); - - executeTest( - "openssl enc -d -k ${tCipherPass} -md sha1 -aes-256-cbc -in ${strFileEncrypt} -out ${strFile}"); - - $self->testResult(sub {${$self->storageTest()->get($strFile)}}, $strFileContent, ' check content same as original'); - - # Test empty file against openssl to make sure they are compatible - #--------------------------------------------------------------------------------------------------------------------------- - $tBuffer = ''; - - $self->storageTest()->put($strFile); - - executeTest( - "openssl enc -k ${tCipherPass} -md sha1 -aes-256-cbc -in ${strFile} -out ${strFileEncrypt}"); - - $oEncryptFileIo = $self->testResult( - sub {new pgBackRest::Storage::Filter::CipherBlock( - $oDriver->openRead($strFileEncrypt), $strCipherType, $tCipherPass, - {strMode => STORAGE_DECRYPT})}, - '[object]', 'read empty file encrypted by openssl'); - - $self->testResult(sub {$oEncryptFileIo->read(\$tBuffer, 16)}, 0, ' read 0 bytes'); - $self->testResult(sub {$oEncryptFileIo->close()}, true, ' close'); - $self->testResult(sub {$tBuffer}, '', ' check content same as original'); - - $self->storageTest()->remove($strFile); - $self->storageTest()->remove($strFileEncrypt); - - $oEncryptFileIo = $self->testResult( - sub {new pgBackRest::Storage::Filter::CipherBlock( - $oDriver->openWrite($strFileEncrypt), $strCipherType, $tCipherPass)}, - '[object]', 'write file to be read by openssl'); - - $self->testResult(sub {$oEncryptFileIo->write(\$tBuffer)}, 0, ' write 0 bytes'); - $self->testResult(sub {$oEncryptFileIo->close()}, true, ' close'); - - executeTest( - "openssl enc -d -k ${tCipherPass} -md sha1 -aes-256-cbc -in ${strFileEncrypt} -out ${strFile}"); - - $self->testResult(sub {${$self->storageTest()->get($strFile)}}, undef, ' check content same as original'); - - # Error on empty file decrypt - an empty file that has been encrypted will be 32 bytes - #--------------------------------------------------------------------------------------------------------------------------- - undef($tBuffer); - $self->storageTest()->put($strFileEncrypt); - - $oEncryptFileIo = - $self->testResult( - sub {new pgBackRest::Storage::Filter::CipherBlock( - $oDriver->openRead($strFileEncrypt), $strCipherType, $tCipherPass, - {strMode => STORAGE_DECRYPT})}, - '[object]', 'new read empty attempt decrypt'); - - $self->testException(sub {$oEncryptFileIo->read(\$tBuffer, 16)}, ERROR_CRYPTO, 'cipher header missing'); - $self->testResult(sub {$oEncryptFileIo->close()}, true, 'close'); - - # OpenSSL should error on the empty file - executeTest( - "openssl enc -d -k ${tCipherPass} -md sha1 -aes-256-cbc -in ${strFileEncrypt} -out ${strFile}", - {iExpectedExitStatus => 1}); - } -} - -1; diff -Nru pgbackrest-2.15.1/test/lib/pgBackRestTest/Module/Storage/StorageFilterGzipPerlTest.pm pgbackrest-2.16/test/lib/pgBackRestTest/Module/Storage/StorageFilterGzipPerlTest.pm --- pgbackrest-2.15.1/test/lib/pgBackRestTest/Module/Storage/StorageFilterGzipPerlTest.pm 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/test/lib/pgBackRestTest/Module/Storage/StorageFilterGzipPerlTest.pm 1970-01-01 00:00:00.000000000 +0000 @@ -1,222 +0,0 @@ -#################################################################################################################################### -# Tests for Storage::Filter::Gzip module -#################################################################################################################################### -package pgBackRestTest::Module::Storage::StorageFilterGzipPerlTest; -use parent 'pgBackRestTest::Common::RunTest'; - -#################################################################################################################################### -# Perl includes -#################################################################################################################################### -use strict; -use warnings FATAL => qw(all); -use Carp qw(confess); -use English '-no_match_vars'; - -use Compress::Raw::Zlib qw(Z_OK Z_BUF_ERROR Z_DATA_ERROR); - -use pgBackRest::Common::Exception; -use pgBackRest::Common::Log; -use pgBackRest::LibC qw(:crypto); -use pgBackRest::Storage::Base; -use pgBackRest::Storage::Filter::Gzip; -use pgBackRest::Storage::Posix::Driver; - -use pgBackRestTest::Common::ExecuteTest; -use pgBackRestTest::Common::RunTest; - -#################################################################################################################################### -# run -#################################################################################################################################### -sub run -{ - my $self = shift; - - # Test data - my $strFile = $self->testPath() . qw{/} . 'file.txt'; - my $strFileGz = "${strFile}.gz"; - my $strFileContent = 'TESTDATA'; - my $iFileLength = length($strFileContent); - my $oDriver = new pgBackRest::Storage::Posix::Driver(); - - ################################################################################################################################ - if ($self->begin('errorCheck()')) - { - #--------------------------------------------------------------------------------------------------------------------------- - my $oGzipIo = $self->testResult( - sub {new pgBackRest::Storage::Filter::Gzip($oDriver->openWrite($strFileGz))}, '[object]', 'new write'); - - $oGzipIo->{bWrite} = true; - $self->testException(sub {$oGzipIo->errorCheck(Z_DATA_ERROR)}, ERROR_FILE_WRITE, "unable to deflate '${strFileGz}'"); - - #--------------------------------------------------------------------------------------------------------------------------- - $self->testResult(sub {$oGzipIo->errorCheck(Z_OK)}, Z_OK, 'Z_OK'); - $self->testResult(sub {$oGzipIo->errorCheck(Z_BUF_ERROR)}, Z_OK, 'Z_BUF_ERROR'); - - #--------------------------------------------------------------------------------------------------------------------------- - $oGzipIo->{bWrite} = false; - $oGzipIo->{strCompressType} = STORAGE_DECOMPRESS; - $self->testException(sub {$oGzipIo->errorCheck(Z_DATA_ERROR)}, ERROR_FILE_READ, "unable to inflate '${strFileGz}'"); - } - - ################################################################################################################################ - if ($self->begin('write()')) - { - #--------------------------------------------------------------------------------------------------------------------------- - my $oGzipIo = $self->testResult( - sub {new pgBackRest::Storage::Filter::Gzip($oDriver->openWrite($strFileGz), {lCompressBufferMax => 4})}, - '[object]', 'new write compress'); - - my $tBuffer = substr($strFileContent, 0, 2); - $self->testResult(sub {$oGzipIo->write(\$tBuffer)}, 2, ' write 2 bytes'); - $tBuffer = substr($strFileContent, 2, 2); - $self->testResult(sub {$oGzipIo->write(\$tBuffer)}, 2, ' write 2 bytes'); - $tBuffer = substr($strFileContent, 4, 2); - $self->testResult(sub {$oGzipIo->write(\$tBuffer)}, 2, ' write 2 bytes'); - $tBuffer = substr($strFileContent, 6, 2); - $self->testResult(sub {$oGzipIo->write(\$tBuffer)}, 2, ' write 2 bytes'); - $tBuffer = ''; - $self->testResult(sub {$oGzipIo->write(\$tBuffer)}, 0, ' write 0 bytes'); - - $self->testResult(sub {$oGzipIo->close()}, true, ' close'); - - executeTest("gzip -d ${strFileGz}"); - $self->testResult(sub {${storageTest()->get($strFile)}}, $strFileContent, ' check content'); - - #--------------------------------------------------------------------------------------------------------------------------- - executeTest("gzip ${strFile}"); - my $tFile = ${storageTest()->get($strFileGz)}; - - $oGzipIo = $self->testResult( - sub {new pgBackRest::Storage::Filter::Gzip( - $oDriver->openWrite($strFile), {strCompressType => STORAGE_DECOMPRESS})}, '[object]', 'new write decompress'); - - $tBuffer = substr($tFile, 0, 10); - $self->testResult(sub {$oGzipIo->write(\$tBuffer)}, 10, ' write bytes'); - $tBuffer = substr($tFile, 10); - $self->testResult(sub {$oGzipIo->write(\$tBuffer)}, length($tFile) - 10, ' write bytes'); - $self->testResult(sub {$oGzipIo->close()}, true, ' close'); - - $self->testResult(sub {${storageTest()->get($strFile)}}, $strFileContent, ' check content'); - - } - - ################################################################################################################################ - if ($self->begin('read()')) - { - my $tBuffer; - - #--------------------------------------------------------------------------------------------------------------------------- - my $oGzipIo = $self->testResult( - sub {new pgBackRest::Storage::Filter::Gzip($oDriver->openWrite($strFileGz), {bWantGzip => false, iLevel => 3})}, - '[object]', 'new write compress'); - $self->testResult($oGzipIo->{iLevel}, 3, ' check level'); - $self->testResult(sub {$oGzipIo->write(\$strFileContent, $iFileLength)}, $iFileLength, ' write'); - $self->testResult(sub {$oGzipIo->close()}, true, ' close'); - - #--------------------------------------------------------------------------------------------------------------------------- - $oGzipIo = $self->testResult( - sub {new pgBackRest::Storage::Filter::Gzip( - $oDriver->openRead($strFileGz), {bWantGzip => false, strCompressType => STORAGE_DECOMPRESS})}, - '[object]', 'new read decompress'); - - $self->testResult(sub {$oGzipIo->read(\$tBuffer, 4)}, 4, ' read 4 bytes'); - $self->testResult(sub {$oGzipIo->read(\$tBuffer, 2)}, 2, ' read 2 bytes'); - $self->testResult(sub {$oGzipIo->read(\$tBuffer, 2)}, 2, ' read 2 bytes'); - $self->testResult(sub {$oGzipIo->read(\$tBuffer, 2)}, 0, ' read 0 bytes'); - $self->testResult(sub {$oGzipIo->read(\$tBuffer, 2)}, 0, ' read 0 bytes'); - - $self->testResult(sub {$oGzipIo->close()}, true, ' close'); - $self->testResult(sub {$oGzipIo->close()}, false, ' close again'); - $self->testResult($tBuffer, $strFileContent, ' check content'); - - storageTest()->remove($strFileGz); - - #--------------------------------------------------------------------------------------------------------------------------- - $tBuffer = 'AA'; - storageTest()->put($strFile, $strFileContent); - - $oGzipIo = $self->testResult( - sub {new pgBackRest::Storage::Filter::Gzip($oDriver->openRead($strFile))}, - '[object]', 'new read compress'); - $self->testResult(sub {$oGzipIo->read(\$tBuffer, 2)}, 10, ' read 10 bytes (request 2)'); - $self->testResult(sub {$oGzipIo->read(\$tBuffer, 2)}, 18, ' read 18 bytes (request 2)'); - $self->testResult(sub {$oGzipIo->read(\$tBuffer, 2)}, 0, ' read 0 bytes (request 2)'); - $self->testResult(sub {$oGzipIo->close()}, true, ' close'); - - $self->testResult(sub {storageTest()->put($strFileGz, substr($tBuffer, 2))}, 28, ' put content'); - executeTest("gzip -df ${strFileGz}"); - $self->testResult(sub {${storageTest()->get($strFile)}}, $strFileContent, ' check content'); - - #--------------------------------------------------------------------------------------------------------------------------- - $tBuffer = undef; - - executeTest('cat ' . $self->dataPath() . "/filecopy.archive2.bin | gzip -c > ${strFileGz}"); - - $oGzipIo = $self->testResult( - sub {new pgBackRest::Storage::Filter::Gzip( - $oDriver->openRead($strFileGz), {lCompressBufferMax => 4096, strCompressType => STORAGE_DECOMPRESS})}, - '[object]', 'new read decompress'); - - $self->testResult(sub {$oGzipIo->read(\$tBuffer, 8388608)}, 8388608, ' read 8388608 bytes'); - $self->testResult(sub {$oGzipIo->read(\$tBuffer, 4194304)}, 4194304, ' read 4194304 bytes'); - $self->testResult(sub {$oGzipIo->read(\$tBuffer, 4194304)}, 4194304, ' read 4194304 bytes'); - $self->testResult(sub {$oGzipIo->read(\$tBuffer, 1)}, 0, ' read 0 bytes'); - - $self->testResult(sub {$oGzipIo->close()}, true, ' close'); - $self->testResult(cryptoHashOne('sha1', $tBuffer), '1c7e00fd09b9dd11fc2966590b3e3274645dd031', ' check content'); - - storageTest()->remove($strFileGz); - - #--------------------------------------------------------------------------------------------------------------------------- - $tBuffer = undef; - - executeTest('cp ' . $self->dataPath() . "/filecopy.archive2.bin ${strFile}"); - - $oGzipIo = $self->testResult( - sub {new pgBackRest::Storage::Filter::Gzip($oDriver->openRead($strFile), {strCompressType => STORAGE_COMPRESS})}, - '[object]', 'new read compress'); - - $self->testResult(sub {$oGzipIo->read(\$tBuffer, 2000000) > 0}, true, ' read bytes'); - $self->testResult(sub {$oGzipIo->read(\$tBuffer, 2000000) > 0}, true, ' read bytes'); - $self->testResult(sub {$oGzipIo->close()}, true, ' close'); - - $self->testResult(sub {storageTest()->put($strFileGz, $tBuffer) > 0}, true, ' put content'); - executeTest("gzip -df ${strFileGz}"); - $self->testResult( - sub {cryptoHashOne('sha1', ${storageTest()->get($strFile)})}, '1c7e00fd09b9dd11fc2966590b3e3274645dd031', - ' check content'); - - #--------------------------------------------------------------------------------------------------------------------------- - $tBuffer = undef; - - my $oFile = $self->testResult( - sub {storageTest()->openWrite($strFile)}, '[object]', 'open file to extend during compression'); - - $oGzipIo = $self->testResult( - sub {new pgBackRest::Storage::Filter::Gzip($oDriver->openRead($strFile), {lCompressBufferMax => 4194304})}, - '[object]', ' new read compress'); - - $self->testResult(sub {$oFile->write(\$strFileContent)}, length($strFileContent), ' write first block'); - $self->testResult(sub {$oGzipIo->read(\$tBuffer, 2000000) > 0}, true, ' read compressed first block (compression done)'); - - $self->testResult(sub {$oFile->write(\$strFileContent)}, length($strFileContent), ' write second block'); - $self->testResult(sub {$oGzipIo->read(\$tBuffer, 2000000)}, 0, ' read compressed = 0'); - - $self->testResult(sub {storageTest()->put($strFileGz, $tBuffer) > 0}, true, ' put content'); - executeTest("gzip -df ${strFileGz}"); - $self->testResult( - sub {${storageTest()->get($strFile)}}, $strFileContent, ' check content'); - - #--------------------------------------------------------------------------------------------------------------------------- - storageTest()->put($strFileGz, $strFileContent); - - $oGzipIo = $self->testResult( - sub {new pgBackRest::Storage::Filter::Gzip($oDriver->openRead($strFileGz), {strCompressType => STORAGE_DECOMPRESS})}, - '[object]', 'new read decompress'); - - $self->testException( - sub {$oGzipIo->read(\$tBuffer, 1)}, ERROR_FILE_READ, "unable to inflate '${strFileGz}': incorrect header check"); - } -} - -1; diff -Nru pgbackrest-2.15.1/test/lib/pgBackRestTest/Module/Storage/StorageFilterShaPerlTest.pm pgbackrest-2.16/test/lib/pgBackRestTest/Module/Storage/StorageFilterShaPerlTest.pm --- pgbackrest-2.15.1/test/lib/pgBackRestTest/Module/Storage/StorageFilterShaPerlTest.pm 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/test/lib/pgBackRestTest/Module/Storage/StorageFilterShaPerlTest.pm 1970-01-01 00:00:00.000000000 +0000 @@ -1,107 +0,0 @@ -#################################################################################################################################### -# Tests for StorageFilterSha module -#################################################################################################################################### -package pgBackRestTest::Module::Storage::StorageFilterShaPerlTest; -use parent 'pgBackRestTest::Common::RunTest'; - -#################################################################################################################################### -# Perl includes -#################################################################################################################################### -use strict; -use warnings FATAL => qw(all); -use Carp qw(confess); -use English '-no_match_vars'; - -use pgBackRest::Common::Exception; -use pgBackRest::Common::Log; -use pgBackRest::LibC qw(:crypto); -use pgBackRest::Storage::Base; -use pgBackRest::Storage::Filter::Sha; -use pgBackRest::Storage::Posix::Driver; - -use pgBackRestTest::Common::ExecuteTest; -use pgBackRestTest::Common::RunTest; - -#################################################################################################################################### -# run -#################################################################################################################################### -sub run -{ - my $self = shift; - - # Test data - my $strFile = $self->testPath() . qw{/} . 'file.txt'; - my $strFileContent = 'TESTDATA'; - my $iFileLength = length($strFileContent); - my $oDriver = new pgBackRest::Storage::Posix::Driver(); - - ################################################################################################################################ - if ($self->begin('read()')) - { - my $tBuffer; - - #--------------------------------------------------------------------------------------------------------------------------- - executeTest("echo -n '${strFileContent}' | tee ${strFile}"); - - my $oFileIo = $self->testResult(sub {$oDriver->openRead($strFile)}, '[object]', 'open read'); - my $oShaIo = $self->testResult(sub {new pgBackRest::Storage::Filter::Sha($oFileIo)}, '[object]', 'new read'); - - $self->testResult(sub {$oShaIo->read(\$tBuffer, 2, undef)}, 2, 'read 2 bytes'); - $self->testResult(sub {$oShaIo->read(\$tBuffer, 2, 2)}, 2, 'read 2 bytes'); - $self->testResult(sub {$oShaIo->read(\$tBuffer, 2, 4)}, 2, 'read 2 bytes'); - $self->testResult(sub {$oShaIo->read(\$tBuffer, 2, 6)}, 2, 'read 2 bytes'); - $self->testResult(sub {$oShaIo->read(\$tBuffer, 2, 8)}, 0, 'read 0 bytes'); - - $self->testResult(sub {$oShaIo->close()}, true, 'close'); - my $strSha = $self->testResult( - sub {$oShaIo->result(STORAGE_FILTER_SHA)}, cryptoHashOne('sha1', $strFileContent), - 'check hash against original content'); - $self->testResult($strSha, cryptoHashOne('sha1', $tBuffer), 'check hash against buffer'); - $self->testResult(sub {${storageTest()->get($strFile)}}, $strFileContent, 'check content'); - - #--------------------------------------------------------------------------------------------------------------------------- - $tBuffer = undef; - - $oFileIo = $self->testResult( - sub {$oDriver->openRead($self->dataPath() . '/filecopy.archive2.bin')}, '[object]', 'open read'); - $oShaIo = $self->testResult(sub {new pgBackRest::Storage::Filter::Sha($oFileIo)}, '[object]', 'new read'); - - $self->testResult(sub {$oShaIo->read(\$tBuffer, 8388608)}, 8388608, ' read 8388608 bytes'); - $self->testResult(sub {$oShaIo->read(\$tBuffer, 4194304)}, 4194304, ' read 4194304 bytes'); - $self->testResult(sub {$oShaIo->read(\$tBuffer, 4194304)}, 4194304, ' read 4194304 bytes'); - $self->testResult(sub {$oShaIo->read(\$tBuffer, 1)}, 0, ' read 0 bytes'); - - $self->testResult(sub {$oShaIo->close()}, true, ' close'); - $self->testResult(sub {$oShaIo->close()}, false, ' close again to make sure nothing bad happens'); - $self->testResult($oShaIo->result(STORAGE_FILTER_SHA), '1c7e00fd09b9dd11fc2966590b3e3274645dd031', ' check hash'); - $self->testResult(cryptoHashOne('sha1', $tBuffer), '1c7e00fd09b9dd11fc2966590b3e3274645dd031', ' check content'); - } - - ################################################################################################################################ - if ($self->begin('write()')) - { - #--------------------------------------------------------------------------------------------------------------------------- - my $oFileIo = $self->testResult(sub {$oDriver->openWrite($strFile, {bAtomic => true})}, '[object]', 'open write'); - my $oShaIo = $self->testResult( - sub {new pgBackRest::Storage::Filter::Sha($oFileIo)}, '[object]', 'new'); - - my $tBuffer = substr($strFileContent, 0, 2); - $self->testResult(sub {$oShaIo->write(\$tBuffer)}, 2, 'write 2 bytes'); - $tBuffer = substr($strFileContent, 2, 2); - $self->testResult(sub {$oShaIo->write(\$tBuffer)}, 2, 'write 2 bytes'); - $tBuffer = substr($strFileContent, 4, 2); - $self->testResult(sub {$oShaIo->write(\$tBuffer)}, 2, 'write 2 bytes'); - $tBuffer = substr($strFileContent, 6, 2); - $self->testResult(sub {$oShaIo->write(\$tBuffer)}, 2, 'write 2 bytes'); - $tBuffer = ''; - $self->testResult(sub {$oShaIo->write(\$tBuffer)}, 0, 'write 0 bytes'); - - $self->testResult(sub {$oShaIo->close()}, true, 'close'); - my $strSha = $self->testResult( - sub {$oShaIo->result(STORAGE_FILTER_SHA)}, cryptoHashOne('sha1', $strFileContent), - 'check hash against original content'); - $self->testResult(sub {${storageTest()->get($strFile)}}, $strFileContent, 'check content'); - } -} - -1; diff -Nru pgbackrest-2.15.1/test/lib/pgBackRestTest/Module/Storage/StorageHelperPerlTest.pm pgbackrest-2.16/test/lib/pgBackRestTest/Module/Storage/StorageHelperPerlTest.pm --- pgbackrest-2.15.1/test/lib/pgBackRestTest/Module/Storage/StorageHelperPerlTest.pm 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/test/lib/pgBackRestTest/Module/Storage/StorageHelperPerlTest.pm 2019-08-05 16:03:04.000000000 +0000 @@ -61,11 +61,12 @@ #------------------------------------------------------------------------------------------------------------------------------- if ($self->begin("storageLocal()")) { - $self->testResult(sub {storageLocal($self->testPath())->put($strFile, $strFileContent)}, $iFileSize, 'put'); - $self->testResult(sub {${storageTest()->get($strFile)}}, $strFileContent, ' check put'); + $self->testResult(sub {storageLocal($self->testPath())->put("/tmp/${strFile}", $strFileContent)}, $iFileSize, 'put'); + $self->testResult(sub {${storageTest()->get("/tmp/${strFile}")}}, $strFileContent, ' check put'); - $self->testResult(sub {storageLocal($self->testPath())->put($strFile, $strFileContent)}, $iFileSize, 'put cache storage'); - $self->testResult(sub {${storageTest()->get($strFile)}}, $strFileContent, ' check put'); + $self->testResult( + sub {storageLocal($self->testPath())->put("/tmp/${strFile}", $strFileContent)}, $iFileSize, 'put cache storage'); + $self->testResult(sub {${storageTest()->get("/tmp/${strFile}")}}, $strFileContent, ' check put'); } #------------------------------------------------------------------------------------------------------------------------------- @@ -109,44 +110,6 @@ $self->testResult( sub {storageRepo()->pathGet(STORAGE_REPO_BACKUP . '/file')}, $self->testPath() . '/repo/backup/db/file', 'check backup file'); - - #--------------------------------------------------------------------------------------------------------------------------- - # Insert a bogus rule to generate an error - storageRepo()->{hRule}{''} = - { - fnRule => storageRepo()->{hRule}{&STORAGE_REPO_ARCHIVE}{fnRule}, - }; - - $self->testException(sub {storageRepo()->pathGet('')}, ERROR_ASSERT, 'invalid storage rule '); - } - - #------------------------------------------------------------------------------------------------------------------------------- - if ($self->begin("storageRepo() encryption")) - { - my $strStanzaEncrypt = 'test-encrypt'; - $self->optionTestSet(CFGOPT_REPO_CIPHER_TYPE, CFGOPTVAL_REPO_CIPHER_TYPE_AES_256_CBC); - $self->testException( - sub {$self->configTestLoad(CFGCMD_ARCHIVE_PUSH)}, ERROR_OPTION_REQUIRED, - 'archive-push command requires option: repo1-cipher-pass'); - - # Set the encryption passphrase and confirm passphrase and type have been set in the storage object - $self->optionTestSet(CFGOPT_REPO_CIPHER_PASS, 'x'); - $self->configTestLoad(CFGCMD_ARCHIVE_PUSH); - - $self->testResult(sub {storageRepo({strStanza => $strStanzaEncrypt})->cipherType() eq - CFGOPTVAL_REPO_CIPHER_TYPE_AES_256_CBC}, true, 'encryption type set'); - $self->testResult(sub {storageRepo({strStanza => $strStanzaEncrypt})->cipherPassUser() eq 'x'}, true, - 'encryption passphrase set'); - - # Cannot change encryption after it has been set (cached values not reset) - $self->optionTestClear(CFGOPT_REPO_CIPHER_TYPE); - $self->optionTestClear(CFGOPT_REPO_CIPHER_PASS); - $self->configTestLoad(CFGCMD_ARCHIVE_PUSH); - - $self->testResult(sub {storageRepo({strStanza => $strStanzaEncrypt})->cipherType() eq - CFGOPTVAL_REPO_CIPHER_TYPE_AES_256_CBC}, true, 'encryption type not reset'); - $self->testResult(sub {storageRepo({strStanza => $strStanzaEncrypt})->cipherPassUser() eq 'x'}, true, - 'encryption passphrase not reset'); } } diff -Nru pgbackrest-2.15.1/test/lib/pgBackRestTest/Module/Storage/StorageLocalPerlTest.pm pgbackrest-2.16/test/lib/pgBackRestTest/Module/Storage/StorageLocalPerlTest.pm --- pgbackrest-2.15.1/test/lib/pgBackRestTest/Module/Storage/StorageLocalPerlTest.pm 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/test/lib/pgBackRestTest/Module/Storage/StorageLocalPerlTest.pm 1970-01-01 00:00:00.000000000 +0000 @@ -1,475 +0,0 @@ -#################################################################################################################################### -# Tests for Storage::Local module -#################################################################################################################################### -package pgBackRestTest::Module::Storage::StorageLocalPerlTest; -use parent 'pgBackRestTest::Common::RunTest'; - -#################################################################################################################################### -# Perl includes -#################################################################################################################################### -use strict; -use warnings FATAL => qw(all); -use Carp qw(confess); -use English '-no_match_vars'; - -use pgBackRest::Config::Config; -use pgBackRest::Common::Exception; -use pgBackRest::Common::Log; -use pgBackRest::LibC qw(:crypto); -use pgBackRest::Storage::Filter::Sha; -use pgBackRest::Storage::Base; -use pgBackRest::Storage::Local; - -use pgBackRestTest::Common::ExecuteTest; -use pgBackRestTest::Env::Host::HostBackupTest; -use pgBackRestTest::Common::RunTest; - -#################################################################################################################################### -# initModule - common objects and variables used by all tests. -#################################################################################################################################### -sub initModule -{ - my $self = shift; - - # Local path - $self->{strPathLocal} = $self->testPath() . '/local'; - - # Create the dynamic rule - my $fnRule = sub - { - my $strRule = shift; - my $strFile = shift; - my $xData = shift; - - if ($strRule eq '') - { - return "fn-rule-1/${xData}" . (defined($strFile) ? "/${strFile}" : ''); - } - else - { - return 'fn-rule-2/' . (defined($strFile) ? "${strFile}/${strFile}" : 'no-file'); - } - }; - - # Create the rule hash - my $hRule = - { - '' => 'static-rule-path', - '' => - { - fnRule => $fnRule, - xData => 'test', - }, - '' => - { - fnRule => $fnRule, - }, - }; - - # Create local storage - $self->{oStorageLocal} = new pgBackRest::Storage::Local( - $self->pathLocal(), new pgBackRest::Storage::Posix::Driver(), {hRule => $hRule, bAllowTemp => false}); - - # Create encrypted storage - $self->{oStorageEncrypt} = new pgBackRest::Storage::Local( - $self->testPath(), new pgBackRest::Storage::Posix::Driver(), - {hRule => $hRule, bAllowTemp => false, strCipherType => CFGOPTVAL_REPO_CIPHER_TYPE_AES_256_CBC}); - - # Remote path - $self->{strPathRemote} = $self->testPath() . '/remote'; - - # Create the repo path so the remote won't complain that it's missing - mkdir($self->pathRemote()) - or confess &log(ERROR, "unable to create repo directory '" . $self->pathRemote() . qw{'}); - - # Remove repo path now that the remote is created - rmdir($self->{strPathRemote}) - or confess &log(ERROR, "unable to remove repo directory '" . $self->pathRemote() . qw{'}); - - # Create remote storage - $self->{oStorageRemote} = new pgBackRest::Storage::Local( - $self->pathRemote(), new pgBackRest::Storage::Posix::Driver(), {hRule => $hRule}); -} - -#################################################################################################################################### -# initTest - initialization before each test -#################################################################################################################################### -sub initTest -{ - my $self = shift; - - executeTest( - 'ssh ' . $self->backrestUser() . '\@' . $self->host() . ' mkdir -m 700 ' . $self->pathRemote(), {bSuppressStdErr => true}); - - executeTest('mkdir -m 700 ' . $self->pathLocal()); -} - -#################################################################################################################################### -# run -#################################################################################################################################### -sub run -{ - my $self = shift; - - # Define test file - my $strFile = 'file.txt'; - my $strFileCopy = 'file.txt.copy'; - my $strFileHash = 'bbbcf2c59433f68f22376cd2439d6cd309378df6'; - my $strFileContent = 'TESTDATA'; - my $iFileSize = length($strFileContent); - - ################################################################################################################################ - if ($self->begin("pathGet()")) - { - #--------------------------------------------------------------------------------------------------------------------------- - $self->testException( - sub {$self->storageLocal()->pathGet('/test', {bTemp => true})}, - ERROR_ASSERT, "temp file not supported for storage '" . $self->storageLocal()->pathBase() . "'"); - $self->testException( - sub {$self->storageRemote()->pathGet('', {bTemp => true})}, - ERROR_ASSERT, 'file part must be defined when temp file specified'); - - #--------------------------------------------------------------------------------------------------------------------------- - $self->testResult( - sub {$self->storageRemote()->pathGet('/file', {bTemp => true})}, "/file.tmp", 'absolute path temp'); - $self->testResult(sub {$self->storageRemote()->pathGet('/file')}, "/file", 'absolute path file'); - - #--------------------------------------------------------------------------------------------------------------------------- - $self->testResult( - sub {$self->storageLocal()->pathGet('file')}, $self->storageLocal()->pathBase() . '/file', 'relative path'); - $self->testResult( - sub {$self->storageRemote()->pathGet('file', {bTemp => true})}, - $self->storageRemote()->pathBase() . '/file.tmp', 'relative path temp'); - - #--------------------------------------------------------------------------------------------------------------------------- - $self->testException( - sub {$self->storageLocal()->pathGet(' in 'testException( - sub {$self->storageLocal()->pathGet('')}, ERROR_ASSERT, "storage rule '' does not exist"); - - #--------------------------------------------------------------------------------------------------------------------------- - $self->testResult( - sub {$self->storageLocal()->pathGet('/file')}, - $self->storageLocal()->pathBase() . '/static-rule-path/file', 'static rule file'); - $self->testResult( - sub {$self->storageLocal()->pathGet('')}, - $self->storageLocal()->pathBase() . '/static-rule-path', 'static rule path'); - - #--------------------------------------------------------------------------------------------------------------------------- - $self->testResult( - sub {$self->storageLocal()->pathGet('/file')}, - $self->storageLocal()->pathBase() . '/fn-rule-1/test/file', 'function rule 1 file'); - $self->testResult( - sub {$self->storageLocal()->pathGet('/file')}, - $self->storageLocal()->pathBase() . '/fn-rule-2/file/file', 'function rule 2 file'); - $self->testResult( - sub {$self->storageLocal()->pathGet('')}, - $self->storageLocal()->pathBase() . '/fn-rule-1/test', 'function rule 1 path'); - $self->testResult( - sub {$self->storageLocal()->pathGet('')}, - $self->storageLocal()->pathBase() . '/fn-rule-2/no-file', 'function rule 2 no file'); - } - - ################################################################################################################################ - if ($self->begin('openWrite()')) - { - #--------------------------------------------------------------------------------------------------------------------------- - my $oFileIo = $self->testResult(sub {$self->storageLocal()->openWrite($strFile)}, '[object]', 'open write'); - - $self->testResult(sub {$oFileIo->write(\$strFileContent)}, $iFileSize, "write $iFileSize bytes"); - $self->testResult(sub {$oFileIo->close()}, true, 'close'); - - # Check that it is not encrypted - $self->testResult(sub {$self->storageLocal()->encrypted($strFile)}, false, 'test storage not encrypted'); - } - - ################################################################################################################################ - if ($self->begin('put()')) - { - #--------------------------------------------------------------------------------------------------------------------------- - $self->testResult( - sub {$self->storageLocal()->put($self->storageLocal()->openWrite($strFile))}, 0, 'put empty'); - - #--------------------------------------------------------------------------------------------------------------------------- - $self->testResult( - sub {$self->storageLocal()->put($strFile)}, 0, 'put empty (all defaults)'); - - #--------------------------------------------------------------------------------------------------------------------------- - $self->testResult( - sub {$self->storageLocal()->put($self->storageLocal()->openWrite($strFile), $strFileContent)}, $iFileSize, 'put'); - - #--------------------------------------------------------------------------------------------------------------------------- - $self->testResult( - sub {$self->storageLocal()->put($self->storageLocal()->openWrite($strFile), \$strFileContent)}, $iFileSize, - 'put reference'); - } - - ################################################################################################################################ - if ($self->begin('openRead()')) - { - my $tContent; - - #--------------------------------------------------------------------------------------------------------------------------- - $self->testResult( - sub {$self->storageLocal()->openRead($strFile, {bIgnoreMissing => true})}, undef, 'ignore missing'); - - #--------------------------------------------------------------------------------------------------------------------------- - $self->testException( - sub {$self->storageLocal()->openRead($strFile)}, ERROR_FILE_MISSING, - "unable to open '" . $self->storageLocal()->pathBase() . "/${strFile}': No such file or directory"); - - #--------------------------------------------------------------------------------------------------------------------------- - executeTest('sudo touch ' . $self->pathLocal() . "/${strFile} && sudo chmod 700 " . $self->pathLocal() . "/${strFile}"); - - $self->testException( - sub {$self->storageLocal()->openRead($strFile)}, ERROR_FILE_OPEN, - "unable to open '" . $self->storageLocal()->pathBase() . "/${strFile}': Permission denied"); - - executeTest('sudo rm ' . $self->pathLocal() . "/${strFile}"); - - #--------------------------------------------------------------------------------------------------------------------------- - $self->storageLocal()->put($self->storageLocal()->openWrite($strFile), $strFileContent); - - my $oFileIo = $self->testResult(sub {$self->storageLocal()->openRead($strFile)}, '[object]', 'open read'); - - $self->testResult(sub {$oFileIo->read(\$tContent, $iFileSize)}, $iFileSize, "read $iFileSize bytes"); - $self->testResult($tContent, $strFileContent, ' check read'); - - #--------------------------------------------------------------------------------------------------------------------------- - $oFileIo = $self->testResult( - sub {$self->storageLocal()->openRead($strFile, {rhyFilter => [{strClass => STORAGE_FILTER_SHA}]})}, '[object]', - 'open read + checksum'); - - undef($tContent); - $self->testResult(sub {$oFileIo->read(\$tContent, $iFileSize)}, $iFileSize, "read $iFileSize bytes"); - $self->testResult(sub {$oFileIo->close()}, true, 'close'); - $self->testResult($tContent, $strFileContent, ' check read'); - $self->testResult($oFileIo->result(STORAGE_FILTER_SHA), cryptoHashOne('sha1', $strFileContent), ' check hash'); - } - - ################################################################################################################################ - if ($self->begin('get()')) - { - my $tBuffer; - - #--------------------------------------------------------------------------------------------------------------------------- - $self->testResult( - sub {$self->storageLocal()->get($self->storageLocal()->openRead($strFile, {bIgnoreMissing => true}))}, undef, - 'get missing'); - - #--------------------------------------------------------------------------------------------------------------------------- - $self->storageLocal()->put($strFile); - $self->testResult(sub {${$self->storageLocal()->get($strFile)}}, undef, 'get empty'); - - #--------------------------------------------------------------------------------------------------------------------------- - $self->storageLocal()->put($strFile, $strFileContent); - $self->testResult(sub {${$self->storageLocal()->get($strFile)}}, $strFileContent, 'get'); - - #--------------------------------------------------------------------------------------------------------------------------- - $self->testResult( - sub {${$self->storageLocal()->get($self->storageLocal()->openRead($strFile))}}, $strFileContent, 'get from io'); - } - - ################################################################################################################################ - if ($self->begin('hashSize()')) - { - my $tBuffer; - - #--------------------------------------------------------------------------------------------------------------------------- - $self->testResult( - sub {$self->storageLocal()->put($strFile, $strFileContent)}, 8, 'put'); - - $self->testResult( - sub {$self->storageLocal()->hashSize($strFile)}, - qw{(} . cryptoHashOne('sha1', $strFileContent) . ', ' . $iFileSize . qw{)}, ' check hash/size'); - $self->testResult( - sub {$self->storageLocal()->hashSize(BOGUS, {bIgnoreMissing => true})}, "([undef], [undef])", - ' check missing hash/size'); - } - - ################################################################################################################################ - if ($self->begin('copy()')) - { - #--------------------------------------------------------------------------------------------------------------------------- - $self->testException( - sub {$self->storageLocal()->copy($self->storageLocal()->openRead($strFile), $strFileCopy)}, ERROR_FILE_MISSING, - "unable to open '" . $self->storageLocal()->pathBase() . "/${strFile}': No such file or directory"); - $self->testResult( - sub {$self->storageLocal()->exists($strFileCopy)}, false, ' destination does not exist'); - - #--------------------------------------------------------------------------------------------------------------------------- - $self->testResult( - sub {$self->storageLocal()->copy( - $self->storageLocal()->openRead($strFile, {bIgnoreMissing => true}), - $self->storageLocal()->openWrite($strFileCopy))}, - false, 'missing source io'); - $self->testResult( - sub {$self->storageLocal()->exists($strFileCopy)}, false, ' destination does not exist'); - - #--------------------------------------------------------------------------------------------------------------------------- - $self->testException( - sub {$self->storageLocal()->copy($self->storageLocal()->openRead($strFile), $strFileCopy)}, ERROR_FILE_MISSING, - "unable to open '" . $self->storageLocal()->pathBase() . "/${strFile}': No such file or directory"); - - #--------------------------------------------------------------------------------------------------------------------------- - $self->storageLocal()->put($strFile, $strFileContent); - - $self->testResult(sub {$self->storageLocal()->copy($strFile, $strFileCopy)}, true, 'copy filename->filename'); - $self->testResult(sub {${$self->storageLocal()->get($strFileCopy)}}, $strFileContent, ' check copy'); - - #--------------------------------------------------------------------------------------------------------------------------- - $self->storageLocal()->remove($strFileCopy); - - $self->testResult( - sub {$self->storageLocal()->copy($self->storageLocal()->openRead($strFile), $strFileCopy)}, true, 'copy io->filename'); - $self->testResult(sub {${$self->storageLocal()->get($strFileCopy)}}, $strFileContent, ' check copy'); - - #--------------------------------------------------------------------------------------------------------------------------- - $self->storageLocal()->remove($strFileCopy); - - $self->testResult( - sub {$self->storageLocal()->copy( - $self->storageLocal()->openRead($strFile), $self->storageLocal()->openWrite($strFileCopy))}, - true, 'copy io->io'); - $self->testResult(sub {${$self->storageLocal()->get($strFileCopy)}}, $strFileContent, ' check copy'); - } - - ################################################################################################################################ - if ($self->begin('info()')) - { - $self->testResult(sub {$self->storageLocal()->info($self->{strPathLocal})}, "[object]", 'stat dir successfully'); - - $self->testException(sub {$self->storageLocal()->info($strFile)}, ERROR_FILE_MISSING, - "unable to stat '". $self->{strPathLocal} . "/" . $strFile ."': No such file or directory"); - } - - ################################################################################################################################ - if ($self->begin('pathCreate()')) - { - my $strTestPath = $self->{strPathLocal} . "/" . BOGUS; - - $self->testResult(sub {$self->storageLocal()->pathCreate($strTestPath)}, "[undef]", - "test creation of path " . $strTestPath); - - $self->testException(sub {$self->storageLocal()->pathCreate($strTestPath)}, ERROR_PATH_EXISTS, - "unable to create path '". $strTestPath. "' because it already exists"); - - $self->testResult(sub {$self->storageLocal()->pathCreate($strTestPath, {bIgnoreExists => true})}, "[undef]", - "ignore path exists"); - } - - ################################################################################################################################ - if ($self->begin('encryption')) - { - my $strCipherPass = 'x'; - $self->testResult(sub {cryptoHashOne('sha1', $strFileContent)}, $strFileHash, 'hash check contents to be written'); - - # Error when passphrase not passed - #--------------------------------------------------------------------------------------------------------------------------- - my $oFileIo = $self->testException(sub {$self->storageEncrypt()->openWrite($strFile)}, - ERROR_ASSERT, 'tCipherPass is required in Storage::Filter::CipherBlock->new'); - - # Write an encrypted file - #--------------------------------------------------------------------------------------------------------------------------- - $oFileIo = $self->testResult(sub {$self->storageEncrypt()->openWrite($strFile, {strCipherPass => $strCipherPass})}, - '[object]', 'open write'); - - my $iWritten = $oFileIo->write(\$strFileContent); - $self->testResult(sub {$oFileIo->close()}, true, ' close'); - - # Check that it is encrypted and valid for the repo encryption type - $self->testResult(sub {$self->storageEncrypt()->encryptionValid($self->storageEncrypt()->encrypted($strFile))}, true, - ' test storage encrypted and valid'); - - $self->testResult( - sub {cryptoHashOne('sha1', ${storageTest()->get($strFile)}) ne $strFileHash}, true, ' check written sha1 different'); - - # Error when passphrase not passed - #--------------------------------------------------------------------------------------------------------------------------- - $oFileIo = $self->testException(sub {$self->storageEncrypt()->openRead($strFile)}, - ERROR_ASSERT, 'tCipherPass is required in Storage::Filter::CipherBlock->new'); - - # Read it and confirm it decrypts and is same as original content - #--------------------------------------------------------------------------------------------------------------------------- - $oFileIo = $self->testResult(sub {$self->storageEncrypt()->openRead($strFile, {strCipherPass => $strCipherPass})}, - '[object]', 'open read and decrypt'); - my $strContent; - $oFileIo->read(\$strContent, $iWritten); - $self->testResult(sub {$oFileIo->close()}, true, ' close'); - $self->testResult($strContent, $strFileContent, ' decrypt read equal orginal contents'); - - # Copy - #--------------------------------------------------------------------------------------------------------------------------- - $self->testResult( - sub {$self->storageEncrypt()->copy( - $self->storageEncrypt()->openRead($strFile, {strCipherPass => $strCipherPass}), - $self->storageEncrypt()->openWrite($strFileCopy, {strCipherPass => $strCipherPass}))}, - true, 'copy - decrypt/encrypt'); - - $self->testResult( - sub {cryptoHashOne('sha1', ${$self->storageEncrypt()->get($strFileCopy, {strCipherPass => $strCipherPass})})}, - $strFileHash, ' check decrypted copy file sha1 same as original plaintext file'); - - # Write an empty encrypted file - #--------------------------------------------------------------------------------------------------------------------------- - my $strFileZero = 'file-0.txt'; - my $strZeroContent = ''; - $oFileIo = $self->testResult( - sub {$self->storageEncrypt()->openWrite($strFileZero, {strCipherPass => $strCipherPass})}, '[object]', - 'open write for zero'); - - $self->testResult(sub {$oFileIo->write(\$strZeroContent)}, 0, ' zero written'); - $self->testResult(sub {$oFileIo->close()}, true, ' close'); - - $self->testResult(sub {$self->storageEncrypt()->encrypted($strFile)}, true, ' test empty file encrypted'); - - # Write an unencrypted file to the encrypted storage and check if the file is valid for that storage - #--------------------------------------------------------------------------------------------------------------------------- - my $strFileTest = $self->testPath() . qw{/} . 'test.file.txt'; - - # Create empty file - executeTest("touch ${strFileTest}"); - $self->testResult(sub {$self->storageEncrypt()->encrypted($strFileTest)}, false, 'empty file so not encrypted'); - - # Add unencrypted content to the file - executeTest("echo -n '${strFileContent}' | tee ${strFileTest}"); - $self->testResult(sub {$self->storageEncrypt()->encryptionValid($self->storageEncrypt()->encrypted($strFileTest))}, false, - 'storage encryption and unencrypted file format do not match'); - - # Unencrypted file valid in unencrypted storage - $self->testResult(sub {$self->storageLocal()->encryptionValid($self->storageLocal()->encrypted($strFileTest))}, true, - 'unencrypted file valid in unencrypted storage'); - - # Prepend encryption Magic Signature and test encrypted file in unencrypted storage not valid - executeTest('echo "' . CIPHER_MAGIC . '$(cat ' . $strFileTest . ')" > ' . $strFileTest); - $self->testResult(sub {$self->storageLocal()->encryptionValid($self->storageLocal()->encrypted($strFileTest))}, false, - 'storage unencrypted and encrypted file format do not match'); - - # Test a file that does not exist - #--------------------------------------------------------------------------------------------------------------------------- - $strFileTest = $self->testPath() . qw{/} . 'testfile'; - $self->testException(sub {$self->storageEncrypt()->encrypted($strFileTest)}, ERROR_FILE_MISSING, - "unable to open '" . $strFileTest . "': No such file or directory"); - - $self->testResult(sub {$self->storageEncrypt()->encrypted($strFileTest, {bIgnoreMissing => true})}, true, - 'encryption for ignore missing file returns encrypted for encrypted storage'); - - $self->testResult(sub {$self->storageLocal()->encrypted($strFileTest, {bIgnoreMissing => true})}, false, - 'encryption for ignore missing file returns unencrypted for unencrypted storage'); - } -} - -#################################################################################################################################### -# Getters -#################################################################################################################################### -sub host {return '127.0.0.1'} -sub pathLocal {return shift->{strPathLocal}}; -sub pathRemote {return shift->{strPathRemote}}; -sub protocolLocal {return shift->{oProtocolLocal}}; -sub protocolRemote {return shift->{oProtocolRemote}}; -sub storageLocal {return shift->{oStorageLocal}}; -sub storageEncrypt {return shift->{oStorageEncrypt}}; -sub storageRemote {return shift->{oStorageRemote}}; - -1; diff -Nru pgbackrest-2.15.1/test/lib/pgBackRestTest/Module/Storage/StoragePerlTest.pm pgbackrest-2.16/test/lib/pgBackRestTest/Module/Storage/StoragePerlTest.pm --- pgbackrest-2.15.1/test/lib/pgBackRestTest/Module/Storage/StoragePerlTest.pm 1970-01-01 00:00:00.000000000 +0000 +++ pgbackrest-2.16/test/lib/pgBackRestTest/Module/Storage/StoragePerlTest.pm 2019-08-05 16:03:04.000000000 +0000 @@ -0,0 +1,341 @@ +#################################################################################################################################### +# Tests for Storage::Local module +#################################################################################################################################### +package pgBackRestTest::Module::Storage::StoragePerlTest; +use parent 'pgBackRestTest::Common::RunTest'; + +#################################################################################################################################### +# Perl includes +#################################################################################################################################### +use strict; +use warnings FATAL => qw(all); +use Carp qw(confess); +use English '-no_match_vars'; + +use pgBackRest::Config::Config; +use pgBackRest::Common::Exception; +use pgBackRest::Common::Log; +use pgBackRest::LibC qw(:crypto); +use pgBackRest::Storage::Base; + +use pgBackRestTest::Common::ContainerTest; +use pgBackRestTest::Common::ExecuteTest; +use pgBackRestTest::Common::RunTest; +use pgBackRestTest::Env::Host::HostBackupTest; + +#################################################################################################################################### +# run +#################################################################################################################################### +sub run +{ + my $self = shift; + + # Define test file + my $strFile = $self->testPath() . '/file.txt'; + my $strFileCopy = $self->testPath() . '/file.txt.copy'; + my $strFileHash = 'bbbcf2c59433f68f22376cd2439d6cd309378df6'; + my $strFileContent = 'TESTDATA'; + my $iFileSize = length($strFileContent); + + # Create local storage + $self->{oStorageLocal} = new pgBackRest::Storage::Storage(''); + + ################################################################################################################################ + if ($self->begin("pathGet()")) + { + $self->testResult(sub {$self->storageLocal()->pathGet('file')}, '/file', 'relative path'); + $self->testResult(sub {$self->storageLocal()->pathGet('/file2')}, '/file2', 'absolute path'); + } + + ################################################################################################################################ + if ($self->begin('put()')) + { + #--------------------------------------------------------------------------------------------------------------------------- + $self->testResult( + sub {$self->storageLocal()->put($self->storageLocal()->openWrite($strFile))}, 0, 'put empty'); + + #--------------------------------------------------------------------------------------------------------------------------- + $self->testResult( + sub {$self->storageLocal()->put($strFile)}, 0, 'put empty (all defaults)'); + + #--------------------------------------------------------------------------------------------------------------------------- + $self->testResult( + sub {$self->storageLocal()->put($self->storageLocal()->openWrite($strFile), $strFileContent)}, $iFileSize, 'put'); + + #--------------------------------------------------------------------------------------------------------------------------- + $self->testResult( + sub {$self->storageLocal()->put($self->storageLocal()->openWrite($strFile), \$strFileContent)}, $iFileSize, + 'put reference'); + } + + ################################################################################################################################ + if ($self->begin('get()')) + { + #--------------------------------------------------------------------------------------------------------------------------- + $self->testResult( + sub {$self->storageLocal()->get($self->storageLocal()->openRead($strFile, {bIgnoreMissing => true}))}, undef, + 'get missing'); + + #--------------------------------------------------------------------------------------------------------------------------- + $self->storageLocal()->put($strFile); + $self->testResult(sub {${$self->storageLocal()->get($strFile)}}, undef, 'get empty'); + + #--------------------------------------------------------------------------------------------------------------------------- + $self->storageLocal()->put($strFile, $strFileContent); + $self->testResult(sub {${$self->storageLocal()->get($strFile)}}, $strFileContent, 'get'); + + #--------------------------------------------------------------------------------------------------------------------------- + $self->testResult( + sub {${$self->storageLocal()->get($self->storageLocal()->openRead($strFile))}}, $strFileContent, 'get from io'); + } + + ################################################################################################################################ + if ($self->begin('hashSize()')) + { + $self->testResult( + sub {$self->storageLocal()->put($strFile, $strFileContent)}, 8, 'put'); + + $self->testResult( + sub {$self->storageLocal()->hashSize($strFile)}, + qw{(} . cryptoHashOne('sha1', $strFileContent) . ', ' . $iFileSize . qw{)}, ' check hash/size'); + $self->testResult( + sub {$self->storageLocal()->hashSize(BOGUS, {bIgnoreMissing => true})}, "([undef], [undef])", + ' check missing hash/size'); + } + + ################################################################################################################################ + if ($self->begin('copy()')) + { + #--------------------------------------------------------------------------------------------------------------------------- + $self->testException( + sub {$self->storageLocal()->copy($self->storageLocal()->openRead($strFile), $strFileCopy)}, ERROR_FILE_MISSING, + "unable to open missing file '${strFile}' for read"); + $self->testResult( + sub {$self->storageLocal()->exists($strFileCopy)}, false, ' destination does not exist'); + + #--------------------------------------------------------------------------------------------------------------------------- + $self->testResult( + sub {$self->storageLocal()->copy( + $self->storageLocal()->openRead($strFile, {bIgnoreMissing => true}), + $self->storageLocal()->openWrite($strFileCopy))}, + false, 'missing source io'); + $self->testResult( + sub {$self->storageLocal()->exists($strFileCopy)}, false, ' destination does not exist'); + + #--------------------------------------------------------------------------------------------------------------------------- + $self->testException( + sub {$self->storageLocal()->copy($self->storageLocal()->openRead($strFile), $strFileCopy)}, ERROR_FILE_MISSING, + "unable to open missing file '${strFile}' for read"); + + #--------------------------------------------------------------------------------------------------------------------------- + $self->storageLocal()->put($strFile, $strFileContent); + + $self->testResult(sub {$self->storageLocal()->copy($strFile, $strFileCopy)}, true, 'copy filename->filename'); + $self->testResult(sub {${$self->storageLocal()->get($strFileCopy)}}, $strFileContent, ' check copy'); + + #--------------------------------------------------------------------------------------------------------------------------- + $self->storageLocal()->remove($strFileCopy); + + $self->testResult( + sub {$self->storageLocal()->copy($self->storageLocal()->openRead($strFile), $strFileCopy)}, true, 'copy io->filename'); + $self->testResult(sub {${$self->storageLocal()->get($strFileCopy)}}, $strFileContent, ' check copy'); + + #--------------------------------------------------------------------------------------------------------------------------- + $self->storageLocal()->remove($strFileCopy); + + $self->testResult( + sub {$self->storageLocal()->copy( + $self->storageLocal()->openRead($strFile), $self->storageLocal()->openWrite($strFileCopy))}, + true, 'copy io->io'); + $self->testResult(sub {${$self->storageLocal()->get($strFileCopy)}}, $strFileContent, ' check copy'); + } + + ################################################################################################################################ + if ($self->begin('exists()')) + { + $self->storageLocal()->put($self->testPath() . "/test.file"); + + $self->testResult(sub {$self->storageLocal()->exists($self->testPath() . "/test.file")}, true, 'existing file'); + $self->testResult(sub {$self->storageLocal()->exists($self->testPath() . "/test.missing")}, false, 'missing file'); + $self->testResult(sub {$self->storageLocal()->exists($self->testPath())}, false, 'path'); + } + + ################################################################################################################################ + if ($self->begin('info()')) + { + $self->testResult( + sub {$self->storageLocal()->info($self->testPath())}, + "{group => " . $self->group() . ", mode => 0770, type => d, user => " . $self->pgUser() . "}", + 'stat dir successfully'); + + $self->testException(sub {$self->storageLocal()->info(BOGUS)}, ERROR_FILE_OPEN, + "unable to get info for missing path/file '/bogus'"); + } + + ################################################################################################################################ + if ($self->begin("manifest() and list()")) + { + #--------------------------------------------------------------------------------------------------------------------------- + $self->testException( + sub {$self->storageLocal()->manifest($self->testPath() . '/missing')}, + ERROR_PATH_MISSING, "unable to list file info for missing path '" . $self->testPath() . "/missing'"); + + #--------------------------------------------------------------------------------------------------------------------------- + # Setup test data + executeTest('mkdir -m 750 ' . $self->testPath() . '/sub1'); + executeTest('mkdir -m 750 ' . $self->testPath() . '/sub1/sub2'); + executeTest('mkdir -m 750 ' . $self->testPath() . '/sub2'); + + executeTest("echo 'TESTDATA' > " . $self->testPath() . '/test.txt'); + utime(1111111111, 1111111111, $self->testPath() . '/test.txt'); + executeTest('chmod 1640 ' . $self->testPath() . '/test.txt'); + + executeTest("echo 'TESTDATA_' > ". $self->testPath() . '/sub1/test-sub1.txt'); + utime(1111111112, 1111111112, $self->testPath() . '/sub1/test-sub1.txt'); + executeTest('chmod 0640 ' . $self->testPath() . '/sub1/test-sub1.txt'); + + executeTest("echo 'TESTDATA__' > " . $self->testPath() . '/sub1/sub2/test-sub2.txt'); + utime(1111111113, 1111111113, $self->testPath() . '/sub1/sub2/test-sub2.txt'); + executeTest('chmod 0646 ' . $self->testPath() . '/sub1/test-sub1.txt'); + + executeTest('ln ' . $self->testPath() . '/test.txt ' . $self->testPath() . '/sub1/test-hardlink.txt'); + executeTest('ln ' . $self->testPath() . '/test.txt ' . $self->testPath() . '/sub1/sub2/test-hardlink.txt'); + + executeTest('ln -s .. ' . $self->testPath() . '/sub1/test'); + executeTest('chmod 0700 ' . $self->testPath() . '/sub1/test'); + executeTest('ln -s ../.. ' . $self->testPath() . '/sub1/sub2/test'); + executeTest('chmod 0750 ' . $self->testPath() . '/sub1/sub2/test'); + + executeTest('chmod 0770 ' . $self->testPath()); + + $self->testResult( + sub {$self->storageLocal()->manifest($self->testPath())}, + '{. => {group => ' . $self->group() . ', mode => 0770, type => d, user => ' . $self->pgUser() . '}, ' . + 'sub1 => {group => ' . $self->group() . ', mode => 0750, type => d, user => ' . $self->pgUser() . '}, ' . + 'sub1/sub2 => {group => ' . $self->group() . ', mode => 0750, type => d, user => ' . $self->pgUser() . '}, ' . + 'sub1/sub2/test => {group => ' . $self->group() . ', link_destination => ../.., type => l, user => ' . + $self->pgUser() . '}, ' . + 'sub1/sub2/test-hardlink.txt => ' . + '{group => ' . $self->group() . ', mode => 0640, modification_time => 1111111111, size => 9, type => f, user => ' . + $self->pgUser() . '}, ' . + 'sub1/sub2/test-sub2.txt => ' . + '{group => ' . $self->group() . ', mode => 0666, modification_time => 1111111113, size => 11, type => f, user => ' . + $self->pgUser() . '}, ' . + 'sub1/test => {group => ' . $self->group() . ', link_destination => .., type => l, user => ' . $self->pgUser() . '}, ' . + 'sub1/test-hardlink.txt => ' . + '{group => ' . $self->group() . ', mode => 0640, modification_time => 1111111111, size => 9, type => f, user => ' . + $self->pgUser() . '}, ' . + 'sub1/test-sub1.txt => ' . + '{group => ' . $self->group() . ', mode => 0646, modification_time => 1111111112, size => 10, type => f, user => ' . + $self->pgUser() . '}, ' . + 'sub2 => {group => ' . $self->group() . ', mode => 0750, type => d, user => ' . $self->pgUser() . '}, ' . + 'test.txt => ' . + '{group => ' . $self->group() . ', mode => 0640, modification_time => 1111111111, size => 9, type => f, user => ' . + $self->pgUser() . '}}', + 'complete manifest'); + + $self->testResult(sub {$self->storageLocal()->list($self->testPath())}, "(sub1, sub2, test.txt)", "list"); + $self->testResult(sub {$self->storageLocal()->list($self->testPath(), {strExpression => "2\$"})}, "sub2", "list"); + $self->testResult( + sub {$self->storageLocal()->list($self->testPath(), {strSortOrder => 'reverse'})}, "(test.txt, sub2, sub1)", + "list reverse"); + $self->testResult(sub {$self->storageLocal()->list($self->testPath() . "/sub2")}, "[undef]", "list empty"); + $self->testResult( + sub {$self->storageLocal()->list($self->testPath() . "/sub99", {bIgnoreMissing => true})}, "[undef]", "list missing"); + $self->testException( + sub {$self->storageLocal()->list($self->testPath() . "/sub99")}, ERROR_PATH_MISSING, + "unable to list files for missing path '" . $self->testPath() . "/sub99'"); + } + + ################################################################################################################################ + if ($self->begin('move()')) + { + my $strFileCopy = "${strFile}.copy"; + my $strFileSub = $self->testPath() . '/sub/file.txt'; + + #--------------------------------------------------------------------------------------------------------------------------- + $self->testException( + sub {$self->storageLocal()->move($strFile, $strFileCopy)}, ERROR_FILE_MOVE, + "unable to move '${strFile}' to '${strFile}.copy': No such file or directory"); + } + + ################################################################################################################################ + if ($self->begin('owner()')) + { + my $strFile = $self->testPath() . "/test.txt"; + + $self->testException( + sub {$self->storageLocal()->owner($strFile, 'root')}, ERROR_FILE_MISSING, + "unable to stat '${strFile}': No such file or directory"); + + executeTest("touch ${strFile}"); + + $self->testException( + sub {$self->storageLocal()->owner($strFile, BOGUS)}, ERROR_FILE_OWNER, + "unable to set ownership for '${strFile}' because user 'bogus' does not exist"); + $self->testException( + sub {$self->storageLocal()->owner($strFile, undef, BOGUS)}, ERROR_FILE_OWNER, + "unable to set ownership for '${strFile}' because group 'bogus' does not exist"); + + $self->testResult(sub {$self->storageLocal()->owner($strFile)}, undef, "no ownership changes"); + $self->testResult(sub {$self->storageLocal()->owner($strFile, TEST_USER)}, undef, "same user"); + $self->testResult(sub {$self->storageLocal()->owner($strFile, undef, TEST_GROUP)}, undef, "same group"); + $self->testResult( + sub {$self->storageLocal()->owner($strFile, TEST_USER, TEST_GROUP)}, undef, + "same user, group"); + + $self->testException( + sub {$self->storageLocal()->owner($strFile, 'root', undef)}, ERROR_FILE_OWNER, + "unable to set ownership for '${strFile}': Operation not permitted"); + $self->testException( + sub {$self->storageLocal()->owner($strFile, undef, 'root')}, ERROR_FILE_OWNER, + "unable to set ownership for '${strFile}': Operation not permitted"); + + executeTest("sudo chown :root ${strFile}"); + $self->testResult( + sub {$self->storageLocal()->owner($strFile, undef, TEST_GROUP)}, undef, "change group back from root"); + } + + ################################################################################################################################ + if ($self->begin('pathCreate()')) + { + my $strTestPath = $self->testPath() . "/" . BOGUS; + + $self->testResult(sub {$self->storageLocal()->pathCreate($strTestPath)}, "[undef]", + "test creation of path " . $strTestPath); + + $self->testException(sub {$self->storageLocal()->pathCreate($strTestPath)}, ERROR_PATH_CREATE, + "unable to create path '". $strTestPath. "'"); + + $self->testResult(sub {$self->storageLocal()->pathCreate($strTestPath, {bIgnoreExists => true})}, "[undef]", + "ignore path exists"); + } + + ################################################################################################################################ + if ($self->begin('pathExists()')) + { + $self->storageLocal()->put($self->testPath() . "/test.file"); + + $self->testResult(sub {$self->storageLocal()->pathExists($self->testPath() . "/test.file")}, false, 'existing file'); + $self->testResult(sub {$self->storageLocal()->pathExists($self->testPath() . "/test.missing")}, false, 'missing file'); + $self->testResult(sub {$self->storageLocal()->pathExists($self->testPath())}, true, 'path'); + } + + ################################################################################################################################ + if ($self->begin('pathSync()')) + { + $self->testResult(sub {$self->storageLocal()->pathSync($self->testPath())}, "[undef]", "test path sync"); + } +} + +#################################################################################################################################### +# Getters +#################################################################################################################################### +# sub host {return '127.0.0.1'} +# sub pathLocal {return shift->{strPathLocal}}; +# sub pathRemote {return shift->{strPathRemote}}; +sub storageLocal {return shift->{oStorageLocal}}; +# sub storageEncrypt {return shift->{oStorageEncrypt}}; +# sub storageRemote {return shift->{oStorageRemote}}; + +1; diff -Nru pgbackrest-2.15.1/test/lib/pgBackRestTest/Module/Storage/StoragePosixPerlTest.pm pgbackrest-2.16/test/lib/pgBackRestTest/Module/Storage/StoragePosixPerlTest.pm --- pgbackrest-2.15.1/test/lib/pgBackRestTest/Module/Storage/StoragePosixPerlTest.pm 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/test/lib/pgBackRestTest/Module/Storage/StoragePosixPerlTest.pm 1970-01-01 00:00:00.000000000 +0000 @@ -1,444 +0,0 @@ -#################################################################################################################################### -# Posix Driver Tests -#################################################################################################################################### -package pgBackRestTest::Module::Storage::StoragePosixPerlTest; -use parent 'pgBackRestTest::Common::RunTest'; - -#################################################################################################################################### -# Perl includes -#################################################################################################################################### -use strict; -use warnings FATAL => qw(all); -use Carp qw(confess); -use English '-no_match_vars'; - -use File::Basename qw(basename dirname); -use IO::Socket::UNIX; - -use pgBackRest::Common::Exception; -use pgBackRest::Common::Log; -use pgBackRest::Storage::Posix::Driver; - -use pgBackRestTest::Common::ContainerTest; -use pgBackRestTest::Common::ExecuteTest; -use pgBackRestTest::Common::RunTest; - -#################################################################################################################################### -# run -#################################################################################################################################### -sub run -{ - my $self = shift; - - # Test data - my $strFile = $self->testPath() . '/file.txt'; - my $strFileContent = 'TESTDATA'; - my $iFileLength = length($strFileContent); - my $iFileLengthHalf = int($iFileLength / 2); - - # Test driver - my $oPosix = new pgBackRest::Storage::Posix::Driver(); - - ################################################################################################################################ - if ($self->begin('exists()')) - { - my $strPathSub = $self->testPath() . '/sub'; - - #--------------------------------------------------------------------------------------------------------------------------- - $self->testResult( - sub {$oPosix->exists($strFile)}, false, 'file'); - - #--------------------------------------------------------------------------------------------------------------------------- - executeTest("sudo mkdir ${strPathSub} && sudo chmod 700 ${strPathSub}"); - - $self->testResult( - sub {$oPosix->pathExists($strPathSub)}, true, 'path'); - - #--------------------------------------------------------------------------------------------------------------------------- - $self->testException( - sub {$oPosix->exists("${strPathSub}/file")}, ERROR_FILE_EXISTS, - "unable to test if file '${strPathSub}/file' exists: Permission denied"); - } - - ################################################################################################################################ - if ($self->begin("manifestList()")) - { - #--------------------------------------------------------------------------------------------------------------------------- - my @stryFile = ('.', 'test.txt'); - - $self->testResult( - sub {$oPosix->manifestList($self->testPath(), \@stryFile)}, - '{. => {group => ' . $self->group() . ', mode => 0770, type => d, user => ' . $self->pgUser() . '}}', - 'skip missing file'); - } - - ################################################################################################################################ - if ($self->begin("manifestStat()")) - { - #--------------------------------------------------------------------------------------------------------------------------- - my $strFile = $self->testPath() . '/test.txt'; - - $self->testResult(sub {$oPosix->manifestStat($strFile)}, '[undef]', 'ignore missing file'); - - #--------------------------------------------------------------------------------------------------------------------------- - storageTest()->put($strFile, "TEST"); - utime(1111111111, 1111111111, $strFile); - executeTest('chmod 1640 ' . $strFile); - - $self->testResult( - sub {$oPosix->manifestStat($strFile)}, - '{group => ' . $self->group() . - ', mode => 1640, modification_time => 1111111111, size => 4, type => f, user => ' . $self->pgUser() . '}', - 'stat file'); - - #--------------------------------------------------------------------------------------------------------------------------- - my $strSocketFile = $self->testPath() . '/test.socket'; - - # Create a socket to test invalid files - my $oSocket = IO::Socket::UNIX->new(Type => SOCK_STREAM(), Local => $strSocketFile, Listen => 1); - - $self->testException( - sub {$oPosix->manifestStat($strSocketFile)}, ERROR_FILE_INVALID, - "${strSocketFile} is not of type directory, file, or link"); - - # Cleanup socket - $oSocket->close(); - storageTest()->remove($strSocketFile); - - #--------------------------------------------------------------------------------------------------------------------------- - my $strTestPath = $self->testPath() . '/public_dir'; - storageTest()->pathCreate($strTestPath, {strMode => '0750'}); - - $self->testResult( - sub {$oPosix->manifestStat($strTestPath)}, - '{group => ' . $self->group() . ', mode => 0750, type => d, user => ' . $self->pgUser() . '}', - 'stat directory'); - - #--------------------------------------------------------------------------------------------------------------------------- - my $strTestLink = $self->testPath() . '/public_dir_link'; - - symlink($strTestPath, $strTestLink) - or confess &log(ERROR, "unable to create symlink from ${strTestPath} to ${strTestLink}"); - - $self->testResult( - sub {$oPosix->manifestStat($strTestLink)}, - '{group => ' . $self->group() . ", link_destination => ${strTestPath}, type => l, user => " . $self->pgUser() . '}', - 'stat link'); - } - - ################################################################################################################################ - if ($self->begin("manifestRecurse()")) - { - #--------------------------------------------------------------------------------------------------------------------------- - my $strTestPath = $self->testPath() . '/public_dir'; - my $strTestFile = "${strTestPath}/test.txt"; - - $self->testException( - sub {my $hManifest = {}; $oPosix->manifestRecurse($strTestFile, undef, 0, $hManifest); $hManifest}, - ERROR_FILE_MISSING, "unable to stat '${strTestFile}': No such file or directory"); - - #--------------------------------------------------------------------------------------------------------------------------- - storageTest()->pathCreate($strTestPath, {strMode => '0750'}); - - $self->testResult( - sub {my $hManifest = {}; $oPosix->manifestRecurse($strTestPath, undef, 0, $hManifest); $hManifest}, - '{. => {group => ' . $self->group() . ', mode => 0750, type => d, user => ' . $self->pgUser() . '}}', - 'empty directory manifest'); - - #--------------------------------------------------------------------------------------------------------------------------- - storageTest()->put($strTestFile, "TEST"); - utime(1111111111, 1111111111, $strTestFile); - executeTest('chmod 0750 ' . $strTestFile); - - storageTest()->pathCreate("${strTestPath}/sub", {strMode => '0750'}); - - $self->testResult( - sub {my $hManifest = {}; $oPosix->manifestRecurse( - $self->testPath(), basename($strTestPath), 1, $hManifest); $hManifest}, - '{public_dir => {group => ' . $self->group() . ', mode => 0750, type => d, user => ' . $self->pgUser() . '}, ' . - 'public_dir/sub => {group => ' . $self->group() . ', mode => 0750, type => d, user => ' . $self->pgUser() . '}, ' . - 'public_dir/' . basename($strTestFile) . ' => {group => ' . $self->group() . - ', mode => 0750, modification_time => 1111111111, size => 4, type => f, user => ' . $self->pgUser() . '}}', - 'directory and file manifest'); - - #--------------------------------------------------------------------------------------------------------------------------- - $self->testResult( - sub {my $hManifest = {}; $oPosix->manifestRecurse($strTestFile, undef, 0, $hManifest); $hManifest}, - '{' . basename($strTestFile) . ' => {group => ' . $self->group() . - ', mode => 0750, modification_time => 1111111111, size => 4, type => f, user => ' . $self->pgUser() . '}}', - 'single file manifest'); - } - - ################################################################################################################################ - if ($self->begin("manifest()")) - { - #--------------------------------------------------------------------------------------------------------------------------- - my $strMissingFile = $self->testPath() . '/missing'; - - $self->testException( - sub {$oPosix->manifest($strMissingFile)}, - ERROR_FILE_MISSING, "unable to stat '${strMissingFile}': No such file or directory"); - - #--------------------------------------------------------------------------------------------------------------------------- - # Setup test data - executeTest('mkdir -m 750 ' . $self->testPath() . '/sub1'); - executeTest('mkdir -m 750 ' . $self->testPath() . '/sub1/sub2'); - - executeTest("echo 'TESTDATA' > " . $self->testPath() . '/test.txt'); - utime(1111111111, 1111111111, $self->testPath() . '/test.txt'); - executeTest('chmod 1640 ' . $self->testPath() . '/test.txt'); - - executeTest("echo 'TESTDATA_' > ". $self->testPath() . '/sub1/test-sub1.txt'); - utime(1111111112, 1111111112, $self->testPath() . '/sub1/test-sub1.txt'); - executeTest('chmod 0640 ' . $self->testPath() . '/sub1/test-sub1.txt'); - - executeTest("echo 'TESTDATA__' > " . $self->testPath() . '/sub1/sub2/test-sub2.txt'); - utime(1111111113, 1111111113, $self->testPath() . '/sub1/sub2/test-sub2.txt'); - executeTest('chmod 0646 ' . $self->testPath() . '/sub1/test-sub1.txt'); - - executeTest('ln ' . $self->testPath() . '/test.txt ' . $self->testPath() . '/sub1/test-hardlink.txt'); - executeTest('ln ' . $self->testPath() . '/test.txt ' . $self->testPath() . '/sub1/sub2/test-hardlink.txt'); - - executeTest('ln -s .. ' . $self->testPath() . '/sub1/test'); - executeTest('chmod 0700 ' . $self->testPath() . '/sub1/test'); - executeTest('ln -s ../.. ' . $self->testPath() . '/sub1/sub2/test'); - executeTest('chmod 0750 ' . $self->testPath() . '/sub1/sub2/test'); - - executeTest('chmod 0770 ' . $self->testPath()); - - $self->testResult( - sub {$oPosix->manifest($self->testPath())}, - '{. => {group => ' . $self->group() . ', mode => 0770, type => d, user => ' . $self->pgUser() . '}, ' . - 'sub1 => {group => ' . $self->group() . ', mode => 0750, type => d, user => ' . $self->pgUser() . '}, ' . - 'sub1/sub2 => {group => ' . $self->group() . ', mode => 0750, type => d, user => ' . $self->pgUser() . '}, ' . - 'sub1/sub2/test => {group => ' . $self->group() . ', link_destination => ../.., type => l, user => ' . - $self->pgUser() . '}, ' . - 'sub1/sub2/test-hardlink.txt => ' . - '{group => ' . $self->group() . ', mode => 1640, modification_time => 1111111111, size => 9, type => f, user => ' . - $self->pgUser() . '}, ' . - 'sub1/sub2/test-sub2.txt => ' . - '{group => ' . $self->group() . ', mode => 0666, modification_time => 1111111113, size => 11, type => f, user => ' . - $self->pgUser() . '}, ' . - 'sub1/test => {group => ' . $self->group() . ', link_destination => .., type => l, user => ' . $self->pgUser() . '}, ' . - 'sub1/test-hardlink.txt => ' . - '{group => ' . $self->group() . ', mode => 1640, modification_time => 1111111111, size => 9, type => f, user => ' . - $self->pgUser() . '}, ' . - 'sub1/test-sub1.txt => ' . - '{group => ' . $self->group() . ', mode => 0646, modification_time => 1111111112, size => 10, type => f, user => ' . - $self->pgUser() . '}, ' . - 'test.txt => ' . - '{group => ' . $self->group() . ', mode => 1640, modification_time => 1111111111, size => 9, type => f, user => ' . - $self->pgUser() . '}}', - 'complete manifest'); - } - - ################################################################################################################################ - if ($self->begin('openRead() & Posix::FileRead')) - { - #--------------------------------------------------------------------------------------------------------------------------- - $self->testException( - sub {$oPosix->openRead($strFile)}, ERROR_FILE_MISSING, "unable to open '${strFile}': No such file or directory"); - - #--------------------------------------------------------------------------------------------------------------------------- - executeTest("echo -n '${strFileContent}' | tee ${strFile}"); - - $self->testResult( - sub {$oPosix->openRead($strFile)}, '[object]', 'open read'); - } - - ################################################################################################################################ - if ($self->begin('openWrite() & Posix::FileWrite')) - { - my $tContent = $strFileContent; - - #--------------------------------------------------------------------------------------------------------------------------- - executeTest("echo -n '${strFileContent}' | tee ${strFile}"); - executeTest("chmod 600 ${strFile} && sudo chown root:root ${strFile}"); - - $self->testException( - sub {new pgBackRest::Storage::Posix::FileRead($oPosix, $strFile)}, ERROR_FILE_OPEN, - "unable to open '${strFile}': Permission denied"); - - executeTest("sudo rm -rf ${strFile}"); - - #--------------------------------------------------------------------------------------------------------------------------- - my $oPosixIo = $self->testResult( - sub {new pgBackRest::Storage::Posix::FileWrite($oPosix, $strFile)}, '[object]', 'open'); - - $tContent = undef; - $self->testException( - sub {$oPosixIo->write(\$tContent)}, ERROR_FILE_WRITE, "unable to write to '${strFile}': Use of uninitialized value"); - - $tContent = substr($strFileContent, 0, $iFileLengthHalf); - $self->testResult( - sub {$oPosixIo->write(\$tContent)}, $iFileLengthHalf, 'write part 1'); - - $tContent = substr($strFileContent, $iFileLengthHalf); - $self->testResult( - sub {$oPosixIo->write(\$tContent)}, $iFileLength - $iFileLengthHalf, - 'write part 2'); - $oPosixIo->close(); - - $tContent = undef; - $self->testResult( - sub {(new pgBackRest::Storage::Posix::FileRead($oPosix, $strFile))->read(\$tContent, $iFileLength)}, - $iFileLength, 'check write content length'); - $self->testResult($tContent, $strFileContent, 'check write content'); - - #--------------------------------------------------------------------------------------------------------------------------- - $oPosixIo = $self->testResult( - sub {new pgBackRest::Storage::Posix::FileWrite( - $oPosix, "${strFile}.atomic", {bAtomic => true, strMode => '0666', lTimestamp => time(), bSync => false})}, - '[object]', 'open'); - - $self->testResult(sub {$oPosixIo->write(\$tContent, $iFileLength)}, $iFileLength, 'write'); - $self->testResult(sub {$oPosixIo->close()}, true, 'close'); - - $self->testResult(sub {${storageTest()->get("${strFile}.atomic")}}, $strFileContent, 'check content'); - - #--------------------------------------------------------------------------------------------------------------------------- - $oPosixIo = $self->testResult( - sub {new pgBackRest::Storage::Posix::FileWrite($oPosix, $strFile)}, '[object]', 'open'); - - $self->testResult(sub {$oPosixIo->close()}, true, 'close'); - - undef($oPosixIo); - - # Test that a premature destroy (from error or otherwise) does not rename the file - #--------------------------------------------------------------------------------------------------------------------------- - my $strFileAbort = $self->testPath() . '/file-abort.txt'; - my $strFileAbortTmp = "${strFileAbort}.tmp"; - - $oPosixIo = $self->testResult( - sub {new pgBackRest::Storage::Posix::FileWrite($oPosix, $strFileAbort, {bAtomic => true})}, '[object]', 'open'); - - $oPosixIo->write(\$strFileContent); - undef($oPosixIo); - - $self->testResult(sub {$oPosix->exists($strFileAbort)}, false, 'destination file does not exist'); - $self->testResult(sub {$oPosix->exists($strFileAbortTmp)}, true, 'destination file tmp exists'); - - #--------------------------------------------------------------------------------------------------------------------------- - $oPosixIo = $self->testResult( - sub {new pgBackRest::Storage::Posix::FileWrite($oPosix, $strFile, {lTimestamp => time()})}, '[object]', 'open'); - $self->testResult(sub {$oPosixIo->write(\$strFileContent, $iFileLength)}, $iFileLength, 'write'); - executeTest("rm -f $strFile"); - - $self->testException( - sub {$oPosixIo->close()}, ERROR_FILE_WRITE, "unable to set time for '${strFile}': No such file or directory"); - } - - ################################################################################################################################ - if ($self->begin('owner()')) - { - my $strFile = $self->testPath() . "/test.txt"; - - $self->testException( - sub {$oPosix->owner($strFile, {strUser => 'root'})}, ERROR_FILE_MISSING, - "unable to stat '${strFile}': No such file or directory"); - - executeTest("touch ${strFile}"); - - $self->testException( - sub {$oPosix->owner($strFile, {strUser => BOGUS})}, ERROR_FILE_OWNER, - "unable to set ownership for '${strFile}' because user 'bogus' does not exist"); - $self->testException( - sub {$oPosix->owner($strFile, {strGroup => BOGUS})}, ERROR_FILE_OWNER, - "unable to set ownership for '${strFile}' because group 'bogus' does not exist"); - - $self->testResult(sub {$oPosix->owner($strFile)}, undef, "no ownership changes"); - $self->testResult(sub {$oPosix->owner($strFile, {strUser => TEST_USER})}, undef, "same user"); - $self->testResult(sub {$oPosix->owner($strFile, {strGroup => TEST_GROUP})}, undef, "same group"); - $self->testResult( - sub {$oPosix->owner($strFile, {strUser => TEST_USER, strGroup => TEST_GROUP})}, undef, "same user, group"); - - $self->testException( - sub {$oPosix->owner($strFile, {strUser => 'root'})}, ERROR_FILE_OWNER, - "unable to set ownership for '${strFile}': Operation not permitted"); - $self->testException( - sub {$oPosix->owner($strFile, {strGroup => 'root'})}, ERROR_FILE_OWNER, - "unable to set ownership for '${strFile}': Operation not permitted"); - - executeTest("sudo chown :root ${strFile}"); - $self->testResult( - sub {$oPosix->owner($strFile, {strGroup => TEST_GROUP})}, undef, "change group back from root"); - } - - ################################################################################################################################ - if ($self->begin('pathCreate()')) - { - my $strPathParent = $self->testPath() . '/parent'; - my $strPathSub = "${strPathParent}/sub1/sub2"; - - #--------------------------------------------------------------------------------------------------------------------------- - $self->testResult(sub {$oPosix->pathCreate($strPathParent)}, undef, 'parent path'); - $self->testResult( - sub {$oPosix->pathExists($strPathParent)}, true, ' check path'); - - #--------------------------------------------------------------------------------------------------------------------------- - $self->testException( - sub {$oPosix->pathCreate($strPathParent)}, ERROR_PATH_EXISTS, - "unable to create path '${strPathParent}' because it already exists"); - - #--------------------------------------------------------------------------------------------------------------------------- - $self->testResult( - sub {$oPosix->pathCreate($strPathParent, {bIgnoreExists => true})}, undef, 'path already exists'); - - #--------------------------------------------------------------------------------------------------------------------------- - executeTest("sudo chown root:root ${strPathParent} && sudo chmod 700 ${strPathParent}"); - - $self->testException( - sub {$oPosix->pathCreate($strPathSub)}, ERROR_PATH_CREATE, - "unable to create path '${strPathSub}': Permission denied"); - - #--------------------------------------------------------------------------------------------------------------------------- - executeTest("rmdir ${strPathParent}"); - - $self->testException( - sub {$oPosix->pathCreate($strPathSub)}, ERROR_PATH_MISSING, - "unable to create path '${strPathSub}' because parent does not exist"); - - #--------------------------------------------------------------------------------------------------------------------------- - $self->testResult( - sub {$oPosix->pathCreate($strPathSub, {bCreateParent => true})}, undef, 'path with parents'); - $self->testResult( - sub {$oPosix->pathExists($strPathSub)}, true, ' check path'); - } - - ################################################################################################################################ - if ($self->begin('move()')) - { - my $strFileCopy = "${strFile}.copy"; - my $strFileSub = $self->testPath() . '/sub/file.txt'; - - #--------------------------------------------------------------------------------------------------------------------------- - $self->testException( - sub {$oPosix->move($strFile, $strFileCopy)}, ERROR_FILE_MISSING, - "unable to move '${strFile}' because it is missing"); - - #--------------------------------------------------------------------------------------------------------------------------- - $self->testResult(sub {storageTest()->put($strFile, $strFileContent)}, $iFileLength, 'put'); - $self->testResult( - sub {$oPosix->move($strFile, $strFileCopy)}, undef, 'simple move'); - - #--------------------------------------------------------------------------------------------------------------------------- - $self->testException( - sub {$oPosix->move($strFileCopy, $strFileSub)}, ERROR_PATH_MISSING, - "unable to move '${strFileCopy}' to missing path '" . dirname($strFileSub) . "'"); - - #--------------------------------------------------------------------------------------------------------------------------- - executeTest('sudo mkdir ' . dirname($strFileSub) . ' && sudo chmod 700 ' . dirname($strFileSub)); - - $self->testException( - sub {$oPosix->move($strFileCopy, $strFileSub)}, ERROR_FILE_MOVE, - "unable to move '${strFileCopy}' to '${strFileSub}': Permission denied"); - - executeTest('sudo rmdir ' . dirname($strFileSub)); - - #--------------------------------------------------------------------------------------------------------------------------- - $self->testResult( - sub {$oPosix->move($strFileCopy, $strFileSub, {bCreatePath => true})}, undef, 'create parent path'); - } -} - -1; diff -Nru pgbackrest-2.15.1/test/lib/pgBackRestTest/Module/Storage/StorageS3AuthPerlTest.pm pgbackrest-2.16/test/lib/pgBackRestTest/Module/Storage/StorageS3AuthPerlTest.pm --- pgbackrest-2.15.1/test/lib/pgBackRestTest/Module/Storage/StorageS3AuthPerlTest.pm 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/test/lib/pgBackRestTest/Module/Storage/StorageS3AuthPerlTest.pm 1970-01-01 00:00:00.000000000 +0000 @@ -1,167 +0,0 @@ -#################################################################################################################################### -# S3 Authentication Tests -#################################################################################################################################### -package pgBackRestTest::Module::Storage::StorageS3AuthPerlTest; -use parent 'pgBackRestTest::Common::RunTest'; - -#################################################################################################################################### -# Perl includes -#################################################################################################################################### -use strict; -use warnings FATAL => qw(all); -use Carp qw(confess); -use English '-no_match_vars'; - -use POSIX qw(strftime); - -use pgBackRest::Common::Exception; -use pgBackRest::Common::Log; -use pgBackRest::Common::Wait; -use pgBackRest::Storage::S3::Auth; - -use pgBackRestTest::Common::RunTest; - -#################################################################################################################################### -# run -#################################################################################################################################### -sub run -{ - my $self = shift; - - ################################################################################################################################ - if ($self->begin('s3DateTime')) - { - $self->testResult(sub {s3DateTime(1491267845)}, '20170404T010405Z', 'format date/time'); - - #--------------------------------------------------------------------------------------------------------------------------- - waitRemainder(); - $self->testResult(sub {s3DateTime()}, strftime("%Y%m%dT%H%M%SZ", gmtime()), 'format current date/time'); - } - - ################################################################################################################################ - if ($self->begin('s3CanonicalRequest')) - { - $self->testResult( - sub {s3CanonicalRequest( - 'GET', qw(/), 'list-type=2', - {'host' => 'bucket.s3.amazonaws.com', 'x-amz-date' => '20170606T121212Z', - 'x-amz-content-sha256' => '705636ecdedffc09f140497bcac3be1e8d069008ecc6a8029e104d6291b4e4e9'}, - '705636ecdedffc09f140497bcac3be1e8d069008ecc6a8029e104d6291b4e4e9')}, - "(GET\n/\nlist-type=2\nhost:bucket.s3.amazonaws.com\n" . - "x-amz-content-sha256:705636ecdedffc09f140497bcac3be1e8d069008ecc6a8029e104d6291b4e4e9\n" . - "x-amz-date:20170606T121212Z\n\nhost;x-amz-content-sha256;x-amz-date\n" . - '705636ecdedffc09f140497bcac3be1e8d069008ecc6a8029e104d6291b4e4e9' . - ', host;x-amz-content-sha256;x-amz-date)', - 'canonical request'); - - #--------------------------------------------------------------------------------------------------------------------------- - $self->testException( - sub {s3CanonicalRequest( - 'GET', qw(/), 'list-type=2', {'Host' => 'bucket.s3.amazonaws.com'}, - '705636ecdedffc09f140497bcac3be1e8d069008ecc6a8029e104d6291b4e4e9')}, - ERROR_ASSERT, "header 'Host' must be lower case"); - } - - ################################################################################################################################ - if ($self->begin('s3SigningKey')) - { - $self->testResult( - sub {unpack('H*', s3SigningKey('20170412', 'us-east-1', 'wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY'))}, - '705636ecdedffc09f140497bcac3be1e8d069008ecc6a8029e104d6291b4e4e9', 'signing key'); - - #--------------------------------------------------------------------------------------------------------------------------- - $self->testResult( - sub {unpack('H*', s3SigningKey('20170412', 'us-east-1', 'wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY'))}, - '705636ecdedffc09f140497bcac3be1e8d069008ecc6a8029e104d6291b4e4e9', 'same signing key from cache'); - - #--------------------------------------------------------------------------------------------------------------------------- - $self->testResult( - sub {unpack('H*', s3SigningKey('20170505', 'us-west-1', 'wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY'))}, - 'c1a1cb590bbc38ba789c8e5695a1ec0cd7fd44c6949f922e149005a221524c09', 'new signing key'); - } - - ################################################################################################################################ - if ($self->begin('s3StringToSign')) - { - $self->testResult( - sub {s3StringToSign( - '20170412T141414Z', 'us-east-1', '705636ecdedffc09f140497bcac3be1e8d069008ecc6a8029e104d6291b4e4e9')}, - "AWS4-HMAC-SHA256\n20170412T141414Z\n20170412/us-east-1/s3/aws4_request\n" . - "705636ecdedffc09f140497bcac3be1e8d069008ecc6a8029e104d6291b4e4e9", - 'string to sign'); - } - - ################################################################################################################################ - if ($self->begin('s3AuthorizationHeader')) - { - $self->testResult( - sub {s3AuthorizationHeader( - 'us-east-1', 'bucket.s3.amazonaws.com', 'GET', qw(/), 'list-type=2', '20170606T121212Z', - {'authorization' => BOGUS, 'host' => 'bucket.s3.amazonaws.com', 'x-amz-date' => '20170606T121212Z'}, - 'AKIAIOSFODNN7EXAMPLE', 'wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY', undef, - 'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855')}, - '({authorization => AWS4-HMAC-SHA256 Credential=AKIAIOSFODNN7EXAMPLE/20170606/us-east-1/s3/aws4_request,' . - 'SignedHeaders=host;x-amz-content-sha256;x-amz-date,' . - 'Signature=cb03bf1d575c1f8904dabf0e573990375340ab293ef7ad18d049fc1338fd89b3,' . - ' host => bucket.s3.amazonaws.com,' . - ' x-amz-content-sha256 => e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855,' . - ' x-amz-date => 20170606T121212Z}, ' . - "GET\n" . - "/\n" . - "list-type=2\n" . - "host:bucket.s3.amazonaws.com\n" . - "x-amz-content-sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855\n" . - "x-amz-date:20170606T121212Z\n" . - "\n" . - "host;x-amz-content-sha256;x-amz-date\n" . - "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855, " . - "host;x-amz-content-sha256;x-amz-date, " . - "AWS4-HMAC-SHA256\n" . - "20170606T121212Z\n" . - "20170606/us-east-1/s3/aws4_request\n" . - "4f2d4ee971f579e60ba6b3895e87434e17b1260f04392f02b512c1e8bada72dd)", - 'authorization header request'); - - $self->testResult( - sub {s3AuthorizationHeader( - 'us-east-1', 'bucket.s3.amazonaws.com', 'GET', qw(/), 'list-type=2', '20170606T121212Z', - {'authorization' => BOGUS, 'host' => 'bucket.s3.amazonaws.com', 'x-amz-date' => '20170606T121212Z'}, - 'AKIAIOSFODNN7EXAMPLE', 'wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY', - 'AQoDYXdzEPT//////////wEXAMPLEtc764bNrC9SAPBSM22wDOk4x4HIZ8j4FZTwdQW' . - 'LWsKWHGBuFqwAeMicRXmxfpSPfIeoIYRqTflfKD8YUuwthAx7mSEI/qkPpKPi/kMcGd' . - 'QrmGdeehM4IC1NtBmUpp2wUE8phUZampKsburEDy0KPkyQDYwT7WZ0wq5VSXDvp75YU' . - '9HFvlRd8Tx6q6fE8YQcHNVXAkiY9q6d+xo0rKwT38xVqr7ZD0u0iPPkUL64lIZbqBAz' . - '+scqKmlzm8FDrypNC9Yjc8fPOLn9FX9KSYvKTr4rvx3iSIlTJabIQwj2ICCR/oLxBA==', - 'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855')}, - '({authorization => AWS4-HMAC-SHA256 Credential=AKIAIOSFODNN7EXAMPLE/20170606/us-east-1/s3/aws4_request,' . - 'SignedHeaders=host;x-amz-content-sha256;x-amz-date;x-amz-security-token,' . - 'Signature=c12565bf5d7e0ef623f76d66e09e5431aebef803f6a25a01c586525f17e474a3,' . - ' host => bucket.s3.amazonaws.com,' . - ' x-amz-content-sha256 => e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855,' . - ' x-amz-date => 20170606T121212Z, x-amz-security-token => AQoDYXdzEPT//////////wEXAMPLEtc764bNrC9SAPBSM22wDOk4x4H' . - 'IZ8j4FZTwdQWLWsKWHGBuFqwAeMicRXmxfpSPfIeoIYRqTflfKD8YUuwthAx7mSEI/qkPpKPi/kMcGdQrmGdeehM4IC1NtBmUpp2wUE8phUZ' . - 'ampKsburEDy0KPkyQDYwT7WZ0wq5VSXDvp75YU9HFvlRd8Tx6q6fE8YQcHNVXAkiY9q6d+xo0rKwT38xVqr7ZD0u0iPPkUL64lIZbqBAz+sc' . - 'qKmlzm8FDrypNC9Yjc8fPOLn9FX9KSYvKTr4rvx3iSIlTJabIQwj2ICCR/oLxBA==}, ' . - "GET\n" . - "/\n" . - "list-type=2\n" . - "host:bucket.s3.amazonaws.com\n" . - "x-amz-content-sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855\n" . - "x-amz-date:20170606T121212Z\n" . - "x-amz-security-token:AQoDYXdzEPT//////////wEXAMPLEtc764bNrC9SAPBSM22wDOk4x4HIZ8j4FZTwdQWLWsKWHGBuFqwAeMicRXmxfpSPfIe" . - "oIYRqTflfKD8YUuwthAx7mSEI/qkPpKPi/kMcGdQrmGdeehM4IC1NtBmUpp2wUE8phUZampKsburEDy0KPkyQDYwT7WZ0wq5VSXDvp75YU9HFvlR" . - "d8Tx6q6fE8YQcHNVXAkiY9q6d+xo0rKwT38xVqr7ZD0u0iPPkUL64lIZbqBAz+scqKmlzm8FDrypNC9Yjc8fPOLn9FX9KSYvKTr4rvx3iSIlTJab" . - "IQwj2ICCR/oLxBA==\n" . - "\n" . - "host;x-amz-content-sha256;x-amz-date;x-amz-security-token\n" . - "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855, " . - "host;x-amz-content-sha256;x-amz-date;x-amz-security-token, " . - "AWS4-HMAC-SHA256\n" . - "20170606T121212Z\n" . - "20170606/us-east-1/s3/aws4_request\n" . - "c171e7a68355ef4e0e6e1003d2d4a79a7b06e7424e3000ba619f5f7882a3251e)", - 'authorization header request with token'); - } -} - -1; diff -Nru pgbackrest-2.15.1/test/lib/pgBackRestTest/Module/Storage/StorageS3CertPerlTest.pm pgbackrest-2.16/test/lib/pgBackRestTest/Module/Storage/StorageS3CertPerlTest.pm --- pgbackrest-2.15.1/test/lib/pgBackRestTest/Module/Storage/StorageS3CertPerlTest.pm 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/test/lib/pgBackRestTest/Module/Storage/StorageS3CertPerlTest.pm 1970-01-01 00:00:00.000000000 +0000 @@ -1,113 +0,0 @@ -#################################################################################################################################### -# S3 SSL Certificate Tests -# -# Verify that SSL certificate validation works on live S3 servers. -#################################################################################################################################### -package pgBackRestTest::Module::Storage::StorageS3CertPerlTest; -use parent 'pgBackRestTest::Env::ConfigEnvTest'; - -#################################################################################################################################### -# Perl includes -#################################################################################################################################### -use strict; -use warnings FATAL => qw(all); -use Carp qw(confess); -use English '-no_match_vars'; - -use Storable qw(dclone); - -use pgBackRest::Common::Exception; -use pgBackRest::Common::Log; -use pgBackRest::Common::Wait; -use pgBackRest::Config::Config; -use pgBackRest::Protocol::Storage::Helper; - -use pgBackRestTest::Common::RunTest; -use pgBackRestTest::Common::VmTest; - -#################################################################################################################################### -# run -#################################################################################################################################### -sub run -{ - my $self = shift; - - # Use long random string so bucket lookups will fail and expose access errors - my $strBucket = 'bnBfyKpXR8ZqQY5RXszxemRgvtmjXd4tf5HkFYhTpT9BndUCYMDy5NCCyRz'; - my $strEndpoint = 's3-us-west-2.amazonaws.com'; - my $strRegion = 'us-west-2'; - - # Options - $self->optionTestSet(CFGOPT_REPO_TYPE, CFGOPTVAL_REPO_TYPE_S3); - $self->optionTestSet(CFGOPT_REPO_S3_KEY, BOGUS); - $self->optionTestSet(CFGOPT_REPO_S3_KEY_SECRET, BOGUS); - $self->optionTestSet(CFGOPT_REPO_S3_TOKEN, BOGUS); - $self->optionTestSet(CFGOPT_REPO_S3_BUCKET, $strBucket); - $self->optionTestSet(CFGOPT_REPO_S3_ENDPOINT, $strEndpoint); - $self->optionTestSet(CFGOPT_REPO_S3_REGION, $strRegion); - $self->optionTestSet(CFGOPT_STANZA, $self->stanza()); - - $self->configTestLoad(CFGCMD_ARCHIVE_PUSH); - - ################################################################################################################################ - if ($self->begin('validation')) - { - if ($self->vm eq VM_U12) - { - &log(INFO, 'cannot test - certificates are no longer maintained for ' . $self->vm()); - } - else - { - #----------------------------------------------------------------------------------------------------------------------- - if ($self->vm() eq VM_CO7) - { - # Tests fails on co7 because by default certs cannot be located. This logic may need to be changed in the future if - # this bug gets fixed by Red Hat. UPDATE: The behavior changed here but it does not seems to be fixed. - $self->testException( - sub {storageRepo({strStanza => 'test1'})->list('/')}, ERROR_HOST_CONNECT, - 'SSL connect attempt failed with unknown error error.*certificate verify failed', - 'cert verify fails on ' . VM_CO7); - - # It should work when verification is disabled - $self->optionTestSetBool(CFGOPT_REPO_S3_VERIFY_TLS, false); - $self->configTestLoad(CFGCMD_ARCHIVE_PUSH); - - $self->testException( - sub {storageRepo({strStanza => 'test2'})->list('/')}, ERROR_PROTOCOL, 'S3 request error \[403\] Forbidden.*', - 'connection succeeds with verification disabled, (expected) error on invalid access key'); - - $self->optionTestClear(CFGOPT_REPO_S3_VERIFY_TLS); - $self->configTestLoad(CFGCMD_ARCHIVE_PUSH); - } - - #----------------------------------------------------------------------------------------------------------------------- - # CO7 doesn't locate certs automatically so specify the path - if ($self->vm() eq VM_CO7) - { - $self->optionTestSet(CFGOPT_REPO_S3_CA_FILE, '/etc/pki/tls/certs/ca-bundle.crt'); - $self->configTestLoad(CFGCMD_ARCHIVE_PUSH); - } - - $self->testException( - sub {storageRepo({strStanza => 'test3'})->list('/')}, ERROR_PROTOCOL, 'S3 request error \[403\] Forbidden.*', - 'connection succeeds, (expected) error on invalid access key'); - - if ($self->vm() eq VM_CO7) - { - $self->optionTestClear(CFGOPT_REPO_S3_CA_FILE); - $self->configTestLoad(CFGCMD_ARCHIVE_PUSH); - } - - #----------------------------------------------------------------------------------------------------------------------- - $self->optionTestSet(CFGOPT_REPO_S3_CA_PATH, '/bogus'); - $self->configTestLoad(CFGCMD_ARCHIVE_PUSH); - - $self->testException( - sub {storageRepo({strStanza => 'test4'})->list('/')}, ERROR_HOST_CONNECT, - $self->vm() eq VM_CO6 ? 'SSL connect attempt failed with unknown error.*certificate verify failed' : 'No such file or directory', - 'invalid ca path'); - } - } -} - -1; diff -Nru pgbackrest-2.15.1/test/lib/pgBackRestTest/Module/Storage/StorageS3PerlTest.pm pgbackrest-2.16/test/lib/pgBackRestTest/Module/Storage/StorageS3PerlTest.pm --- pgbackrest-2.15.1/test/lib/pgBackRestTest/Module/Storage/StorageS3PerlTest.pm 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/test/lib/pgBackRestTest/Module/Storage/StorageS3PerlTest.pm 1970-01-01 00:00:00.000000000 +0000 @@ -1,219 +0,0 @@ -#################################################################################################################################### -# S3 Storage Tests -#################################################################################################################################### -package pgBackRestTest::Module::Storage::StorageS3PerlTest; -use parent 'pgBackRestTest::Env::S3EnvTest'; - -#################################################################################################################################### -# Perl includes -#################################################################################################################################### -use strict; -use warnings FATAL => qw(all); -use Carp qw(confess); -use English '-no_match_vars'; - -use pgBackRest::Common::Log; -use pgBackRest::Common::String; -use pgBackRest::LibC qw(:crypto); -use pgBackRest::Storage::S3::Driver; - -use pgBackRestTest::Common::ExecuteTest; -use pgBackRestTest::Common::RunTest; - -#################################################################################################################################### -# initTest -#################################################################################################################################### -sub initTest -{ - my $self = shift; - - executeTest("$self->{strS3Command} rm --recursive s3://pgbackrest-dev"); -} - -#################################################################################################################################### -# run -#################################################################################################################################### -sub run -{ - my $self = shift; - - # Initialize the driver - my $oS3 = $self->initS3(); - my $oStorage = new pgBackRest::Storage::Local('', $oS3); - - # Test variables - my $strFile = 'file.txt'; - my $strFileContent = 'TESTDATA'; - my $iFileLength = length($strFileContent); - - ################################################################################################################################ - if ($self->begin('exists()')) - { - #--------------------------------------------------------------------------------------------------------------------------- - $self->testResult(sub {$oStorage->exists($strFile)}, false, 'root file does not exist'); - - #--------------------------------------------------------------------------------------------------------------------------- - storageTest()->put($strFile, $strFileContent); - executeTest("$self->{strS3Command} cp " . $self->testPath() . "/${strFile} s3://pgbackrest-dev"); - - $self->testResult(sub {$oStorage->exists($strFile)}, true, 'root file exists'); - - #--------------------------------------------------------------------------------------------------------------------------- - $self->testResult(sub {$oStorage->pathExists('/path/to')}, false, 'sub path does not exist'); - $self->testResult(sub {$oStorage->exists("/path/to/${strFile}")}, false, 'sub file does not exist'); - - #--------------------------------------------------------------------------------------------------------------------------- - executeTest("$self->{strS3Command} cp " . $self->testPath() . "/${strFile} s3://pgbackrest-dev/path/to/${strFile}"); - - $self->testResult(sub {$oStorage->pathExists('/path/to')}, true, 'sub path exists'); - # $oStorage->pathExists('/path/to'); - $self->testResult(sub {$oStorage->exists("/path/to/${strFile}")}, true, 'sub file exists'); - } - - ################################################################################################################################ - if ($self->begin('manifest()')) - { - #--------------------------------------------------------------------------------------------------------------------------- - $self->testResult(sub {$oStorage->manifest('')}, '{. => {type => d}}', 'no files'); - - #--------------------------------------------------------------------------------------------------------------------------- - storageTest()->put($strFile, $strFileContent); - storageTest()->put("${strFile}2", $strFileContent . '2'); - - executeTest("$self->{strS3Command} cp " . $self->testPath() . "/${strFile} s3://pgbackrest-dev"); - executeTest("$self->{strS3Command} cp " . $self->testPath() . "/${strFile}2 s3://pgbackrest-dev/path/to/${strFile}2"); - - $self->testResult( - sub {$oStorage->manifest('')}, - '{. => {type => d}, file.txt => {size => 8, type => f}, path => {type => d}, path/to => {type => d},' . - ' path/to/file.txt2 => {size => 9, type => f}}', - 'root path'); - $self->testResult( - sub {$oStorage->manifest('/path/to')}, '{. => {type => d}, file.txt2 => {size => 9, type => f}}', 'sub path'); - } - - ################################################################################################################################ - if ($self->begin('list()')) - { - #--------------------------------------------------------------------------------------------------------------------------- - $self->testResult( - sub {$oStorage->list('')}, '[undef]', 'no files'); - - #--------------------------------------------------------------------------------------------------------------------------- - storageTest()->put($strFile, $strFileContent); - storageTest()->put("${strFile}2", $strFileContent . '2'); - - executeTest("$self->{strS3Command} cp " . $self->testPath() . "/${strFile} s3://pgbackrest-dev"); - executeTest("$self->{strS3Command} cp " . $self->testPath() . "/${strFile}2 s3://pgbackrest-dev/path/to/${strFile}2"); - - $self->testResult(sub {$oStorage->list('')}, '(file.txt, path)', 'root path'); - $self->testResult(sub {$oStorage->list('/path/to')}, 'file.txt2', 'sub path'); - } - - ################################################################################################################################ - if ($self->begin('remove()')) - { - #--------------------------------------------------------------------------------------------------------------------------- - $oStorage->put($strFile, $strFileContent); - $oStorage->put("/path/to/${strFile}2", $strFileContent); - $oStorage->put("/path/to/${strFile}3", $strFileContent); - $oStorage->put("/path/to/${strFile}4 \@+", $strFileContent); - - $self->testResult( - sub {$oStorage->manifest('/')}, - '{. => {type => d}, file.txt => {size => 8, type => f}, path => {type => d}, path/to => {type => d},' . - ' path/to/file.txt2 => {size => 8, type => f}, path/to/file.txt3 => {size => 8, type => f},' . - ' path/to/file.txt4 @+ => {size => 8, type => f}}', - 'check manifest'); - - #--------------------------------------------------------------------------------------------------------------------------- - $self->testResult(sub {$oStorage->remove('/path/to', {bRecurse => true})}, true, 'remove subpath'); - - $self->testResult( - sub {$oStorage->manifest('/')}, - '{. => {type => d}, file.txt => {size => 8, type => f}}', 'check manifest'); - - #--------------------------------------------------------------------------------------------------------------------------- - $self->testResult(sub {$oStorage->remove($strFile)}, true, 'remove file'); - - $self->testResult(sub {$oStorage->manifest('/')}, '{. => {type => d}}', 'check manifest'); - } - - ################################################################################################################################ - if ($self->begin('info()')) - { - #--------------------------------------------------------------------------------------------------------------------------- - storageTest()->put($strFile, $strFileContent); - storageTest()->put("${strFile}2", $strFileContent . '2'); - - executeTest("$self->{strS3Command} cp " . $self->testPath() . "/${strFile} s3://pgbackrest-dev"); - executeTest("$self->{strS3Command} cp " . $self->testPath() . "/${strFile}2 s3://pgbackrest-dev"); - executeTest("$self->{strS3Command} cp " . $self->testPath() . "/${strFile}2 s3://pgbackrest-dev/path/to/${strFile}2"); - - $self->testResult(sub {$oStorage->info($strFile)->size()}, 8, 'file size'); - $self->testResult(sub {$oStorage->info("/path/to/${strFile}2")->size()}, 9, 'file 2 size'); - } - - ################################################################################################################################ - if ($self->begin('openRead() && S3::FileRead')) - { - # Create a random 1mb file - my $strRandomFile = $self->testPath() . '/random@1mb.bin'; - executeTest("dd if=/dev/urandom of=${strRandomFile} bs=1024k count=1", {bSuppressStdErr => true}); - my $strRandom = ${storageTest()->get($strRandomFile)}; - - executeTest("$self->{strS3Command} cp ${strRandomFile} s3://pgbackrest-dev/path/to/${strFile}"); - - #--------------------------------------------------------------------------------------------------------------------------- - my $tBuffer; - my $oFileRead = $self->testResult(sub {$oS3->openRead("/path/to/${strFile}")}, '[object]', 'open read'); - $self->testResult(sub {$oFileRead->read(\$tBuffer, 524288)}, 524288, ' read half'); - $self->testResult(sub {$oFileRead->read(\$tBuffer, 524288)}, 524288, ' read half'); - $self->testResult(sub {$oFileRead->read(\$tBuffer, 512)}, 0, ' read 0'); - $self->testResult(length($tBuffer), 1048576, ' check length'); - $self->testResult(cryptoHashOne('sha1', $tBuffer), cryptoHashOne('sha1', $strRandom), ' check hash'); - } - - ################################################################################################################################ - if ($self->begin('openWrite() && S3::FileWrite')) - { - # Create a random 1mb file - my $strRandomFile = $self->testPath() . '/random1mb.bin'; - executeTest("dd if=/dev/urandom of=${strRandomFile} bs=1024k count=1", {bSuppressStdErr => true}); - my $strRandom = ${storageTest()->get($strRandomFile)}; - - #--------------------------------------------------------------------------------------------------------------------------- - my $oFileWrite = $self->testResult(sub {$oS3->openWrite("/path/to/${strFile}")}, '[object]', 'open write'); - $self->testResult(sub {$oFileWrite->name()}, "/path/to/${strFile}", ' check filename'); - $self->testResult(sub {$oFileWrite->close()}, true, ' close without writing'); - - #--------------------------------------------------------------------------------------------------------------------------- - $oFileWrite = $self->testResult(sub {$oS3->openWrite("/path/to/${strFile}" . '.@')}, '[object]', 'open write'); - $self->testResult(sub {$oFileWrite->write()}, 0, ' write undef'); - $self->testResult(sub {$oFileWrite->write(\$strFileContent)}, $iFileLength, ' write'); - $oFileWrite->close(); - - $self->testResult(sub {$oS3->exists("/path/to/${strFile}" . '.@')}, true, 'destination file exists'); - - # Test that a premature destroy (from error or otherwise) does not rename the file - #--------------------------------------------------------------------------------------------------------------------------- - $oFileWrite = $self->testResult(sub {$oS3->openWrite("/path/to/abort.file" . '.@')}, '[object]', 'open write'); - $self->testResult(sub {$oFileWrite->write()}, 0, ' write undef'); - $self->testResult(sub {$oFileWrite->write(\$strFileContent)}, $iFileLength, ' write'); - - undef($oFileWrite); - $self->testResult(sub {$oS3->exists("/path/to/abort.file")}, false, 'destination file does not exist'); - - #--------------------------------------------------------------------------------------------------------------------------- - $oFileWrite = $self->testResult(sub {$oS3->openWrite("/path/to/${strFile}")}, '[object]', 'open write'); - - for (my $iIndex = 1; $iIndex <= 17; $iIndex++) - { - $self->testResult(sub {$oFileWrite->write(\$strRandom)}, 1024 * 1024, ' write 1mb'); - } - - $self->testResult(sub {$oFileWrite->close()}, true, ' close'); - } -} - -1; diff -Nru pgbackrest-2.15.1/test/lib/pgBackRestTest/Module/Storage/StorageS3RequestPerlTest.pm pgbackrest-2.16/test/lib/pgBackRestTest/Module/Storage/StorageS3RequestPerlTest.pm --- pgbackrest-2.15.1/test/lib/pgBackRestTest/Module/Storage/StorageS3RequestPerlTest.pm 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/test/lib/pgBackRestTest/Module/Storage/StorageS3RequestPerlTest.pm 1970-01-01 00:00:00.000000000 +0000 @@ -1,184 +0,0 @@ -#################################################################################################################################### -# S3 Request Tests -#################################################################################################################################### -package pgBackRestTest::Module::Storage::StorageS3RequestPerlTest; -use parent 'pgBackRestTest::Common::RunTest'; - -#################################################################################################################################### -# Perl includes -#################################################################################################################################### -use strict; -use warnings FATAL => qw(all); -use Carp qw(confess); -use English '-no_match_vars'; - - use IO::Socket::SSL; -use POSIX qw(strftime); - -use pgBackRest::Common::Exception; -use pgBackRest::Common::Http::Client; -use pgBackRest::Common::Log; -use pgBackRest::Common::Wait; -use pgBackRest::Storage::S3::Request; - -use pgBackRestTest::Common::ContainerTest; -use pgBackRestTest::Common::ExecuteTest; -use pgBackRestTest::Common::RunTest; - -#################################################################################################################################### -# Port to use for testing -#################################################################################################################################### -use constant HTTPS_TEST_PORT => 9443; - -#################################################################################################################################### -# httpsServerResponse -#################################################################################################################################### -sub httpsServerResponse -{ - my $self = shift; - my $iResponseCode = shift; - my $strContent = shift; - - # Write header - $self->{oConnection}->write("HTTP/1.1 ${iResponseCode} GenericMessage\r\n"); - $self->{oConnection}->write(HTTP_HEADER_CONTENT_LENGTH . ': ' . (defined($strContent) ? length($strContent) : 0) . "\r\n"); - - # Write new line before content (even if there isn't any) - $self->{oConnection}->write("\r\n"); - - # Write content - if (defined($strContent)) - { - $self->{oConnection}->write($strContent); - } - - # This will block until the connection is closed by the client - $self->{oConnection}->read(); -} - -#################################################################################################################################### -# httpsServerAccept -#################################################################################################################################### -sub httpsServerAccept -{ - my $self = shift; - - # Wait for a connection - $self->{oConnection} = $self->{oSocketServer}->accept() - or confess "failed to accept or handshake $!, $SSL_ERROR"; - &log(INFO, " * socket server connected"); -} - -#################################################################################################################################### -# httpsServer -#################################################################################################################################### -sub httpsServer -{ - my $self = shift; - my $fnServer = shift; - - # Fork off the server - if (fork() == 0) - { - # Run server function - $fnServer->(); - - exit 0; - } -} - -#################################################################################################################################### -# Start the https testing server -#################################################################################################################################### -sub initModule -{ - my $self = shift; - - # Open the domain socket - $self->{oSocketServer} = IO::Socket::SSL->new( - LocalAddr => '127.0.0.1', LocalPort => HTTPS_TEST_PORT, Listen => 1, SSL_cert_file => CERT_FAKE_SERVER, - SSL_key_file => CERT_FAKE_SERVER_KEY) - or confess "unable to open https server for testing: $!"; - &log(INFO, " * socket server open"); -} - -#################################################################################################################################### -# Stop the https testing server -#################################################################################################################################### -sub cleanModule -{ - my $self = shift; - - # Shutdown server - $self->{oSocketServer}->close(); - &log(INFO, " * socket server closed"); -} - -#################################################################################################################################### -# run -#################################################################################################################################### -sub run -{ - my $self = shift; - - # Initialize request object - my $oS3Request = new pgBackRest::Storage::S3::Request( - BOGUS, BOGUS, BOGUS, BOGUS, BOGUS, {strHost => '127.0.0.1', iPort => HTTPS_TEST_PORT, bVerifySsl => false}); - - ################################################################################################################################ - if ($self->begin('success')) - { - $self->httpsServer(sub - { - $self->httpsServerAccept(); - $self->httpsServerResponse(200); - - #----------------------------------------------------------------------------------------------------------------------- - $self->httpsServerAccept(); - $self->httpsServerResponse(200); - }); - - #--------------------------------------------------------------------------------------------------------------------------- - $self->testResult(sub {$oS3Request->request(HTTP_VERB_GET)}, undef, 'successful request'); - $self->testResult(sub {$oS3Request->request(HTTP_VERB_GET)}, undef, 'successful request'); - } - - ################################################################################################################################ - if ($self->begin('retry')) - { - $self->httpsServer(sub - { - $self->httpsServerAccept(); - $self->httpsServerResponse(500); - - $self->httpsServerAccept(); - $self->httpsServerResponse(500); - - $self->httpsServerAccept(); - $self->httpsServerResponse(200); - - #----------------------------------------------------------------------------------------------------------------------- - $self->httpsServerAccept(); - $self->httpsServerResponse(500); - - $self->httpsServerAccept(); - $self->httpsServerResponse(500); - - $self->httpsServerAccept(); - $self->httpsServerResponse(500); - - $self->httpsServerAccept(); - $self->httpsServerResponse(500); - - $self->httpsServerAccept(); - $self->httpsServerResponse(500); - }); - - #--------------------------------------------------------------------------------------------------------------------------- - $self->testResult(sub {$oS3Request->request(HTTP_VERB_GET)}, undef, 'successful request after retries'); - $self->testException( - sub {$oS3Request->request(HTTP_VERB_GET)}, ERROR_PROTOCOL, 'S3 request error after 5 tries \[500\] GenericMessage.*'); - } -} - -1; diff -Nru pgbackrest-2.15.1/test/lint/perlcritic.policy pgbackrest-2.16/test/lint/perlcritic.policy --- pgbackrest-2.15.1/test/lint/perlcritic.policy 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/test/lint/perlcritic.policy 1970-01-01 00:00:00.000000000 +0000 @@ -1,186 +0,0 @@ -# Main Perl Critic Policy Applied to Entire Code Base -#----------------------------------------------------------------------------------------------------------------------------------- - -# Important policies that should always be checked -#----------------------------------------------------------------------------------------------------------------------------------- -[TestingAndDebugging::RequireUseStrict] -severity = 5 - -[TestingAndDebugging::RequireUseWarnings] -severity = 5 - -# Permanent Exceptions -#----------------------------------------------------------------------------------------------------------------------------------- - -# Requires all local variables to be all lower/upper case -- can't see how this is a good thing. -[-NamingConventions::Capitalization] - -# Requires @_ to be immediately unpacked but won't work with param logging scheme. -[-Subroutines::RequireArgUnpacking] - -# Requires all exports to be configurable by caller. This is fine for independent libraries but -# overly burdensome for modules integrated as part of an application. Maybe apply to certain modules -# that are also used by doc and test programs? -[-Modules::ProhibitAutomaticExportation] - -# Requires built-in functions to not have parens, but in this project it is preferred to wrap all -# function calls in parens for consistency. -[-CodeLayout::ProhibitParensWithBuiltins] - -# Requires module version vars. Probably not practical for built-in modules. -[-Modules::RequireVersionVar] - -# Requires extended formatting for all regexps. Seems overly burdensome, or at least something to look -# at a lot further down the road. -[-RegularExpressions::RequireExtendedFormatting] - -# Requires Unicode safe expressions. May be worth looking at sometime. -[-RegularExpressions::ProhibitEnumeratedClasses] - -# S2 - Requires List::MoreUtils instead of boolean grep. Not worth it to load another module. -[-BuiltinFunctions::ProhibitBooleanGrep] - -# Provisional Exceptions for Test & Documentation Code -#----------------------------------------------------------------------------------------------------------------------------------- - -# S2 - Requires complete POD sections but these are being removed anyway in favor of Config.pm. -[-Documentation::RequirePodSections] -[-Documentation::RequirePodAtEnd] - -# S3 - Requires regexps to be below a certain length. Seems burdensome. -[-RegularExpressions::ProhibitComplexRegexes] - -# To Be Fixed or Evaluated -# -# Natural ordering here indicates the order in which they should be addressed. -#----------------------------------------------------------------------------------------------------------------------------------- - -# S2 - Requires all long numbers to have thousand separators. Probably a good idea bit need to change a fair amount of code. -[-ValuesAndExpressions::RequireNumberSeparators] - -# S4 - Requires parens when logical and bitwise booleans are mixed. -[-ValuesAndExpressions::ProhibitMixedBooleanOperators] - -# S4 - Requires that sub names not overlap with built-ins - a bummer for object members. -[-Subroutines::ProhibitBuiltinHomonyms] - -# S4 - Requires block form of grep for readability. Needs to be fixed in about 15 places. -[-BuiltinFunctions::RequireBlockGrep] - -# S4 - Requires modification of certain vars (e.g. $SIG) to have local scope. Needs to be fixed in about 20 places. -[-Variables::RequireLocalizedPunctuationVars] - -# S4 - Requires close() to be called soon after open but seems arbitrary. -[-InputOutput::RequireBriefOpen] - -# S1 - Requires reverse keyword for reverse sorts instead of block. May not be able to since $a $b are passed as a parameter. -[-BuiltinFunctions::ProhibitReverseSortBlock] - -# S3 - Requires use of Carp instead of die or warn. Doesn't seem useful. -[-ErrorHandling::RequireCarping] - -# S3 - Requires use of local vars in packages. Can't use as it prohibits use of $DBI::errstr. -[-Variables::ProhibitPackageVars] - -# S3 - Requires that certain operators not be mixed. -[-ValuesAndExpressions::ProhibitMismatchedOperators] - -# S2 - Requires use of if instead of unless. -[-ControlStructures::ProhibitUnlessBlocks] - -# S1 - Requires true literals to use single quotes. -[-ValuesAndExpressions::ProhibitInterpolationOfLiterals] - -# S2 - Requires split expressions to be regexp for clarity. -[-BuiltinFunctions::ProhibitStringySplit] - -# S4 - Requires use of Readonly instead of const. Has performance and syntax advantages. -[-ValuesAndExpressions::ProhibitConstantPragma] - -# S2 - Requires all numbers to be defined as constants -[-ValuesAndExpressions::ProhibitMagicNumbers] - -# S4 - Requires all subs to have a return, even if there is not value to return. -[-Subroutines::RequireFinalReturn] - -# S4 - Requires new to be called as Object->new(). -[-Objects::ProhibitIndirectSyntax] - -# S2 - Requires that & not be used in functions calls. Currently this is used a lot for &log() calls. -[-Subroutines::ProhibitAmpersandSigils] - -# S2 - Requires use of eq instead of regexp when possible. -[-RegularExpressions::ProhibitFixedStringMatches] - -# S2 - Requires that sigils be separated by braces, eg %$var becomes %{$var}. -[-References::ProhibitDoubleSigils] - -# S2 - Requires use English instead a puctuation vars such as $!. -[-Variables::ProhibitPunctuationVars] - -# S3 - Requires nested if/else have limited depth and recommends using given/when instead. -[-ControlStructures::ProhibitCascadingIfElse] - -# S2 - Requires empty strings to be represented with qw{}. -[-ValuesAndExpressions::ProhibitEmptyQuotes] - -# S2 - Requires non letter and number strings to be represented with something like qw{/}. -[-ValuesAndExpressions::ProhibitNoisyQuotes] - -# S2 - Requires expanded matching for . in regular expressions. -[-RegularExpressions::RequireDotMatchAnything] - -# S2 - Requires sed-style boundary matching. May not be appropriate for reg exps in this project, though. -[-RegularExpressions::RequireLineBoundaryMatching] - -# S1 - Requires use of Perl::Tidy. -[-CodeLayout::RequireTidyCode] - -# S2 - Requires use of Perl syntax for simple loops, e.g. for (0..$max). -[-ControlStructures::ProhibitCStyleForLoops] - -# S2 - Require standard if structures rather than postfix for readability. -[-ControlStructures::ProhibitPostfixControls] - -# S3 - Requires code have a McCabe score of no more than 20 but is configurable. -[-Subroutines::ProhibitExcessComplexity] -[-Modules::ProhibitExcessMainComplexity] - -# S3 - Requires low level of code nesting (may require a lot of refactoring). -[-ControlStructures::ProhibitDeepNests] - -# S3 - Requires subs to have <= 6 args but is configurable. -[-Subroutines::ProhibitManyArgs] - -# S3 - Requires arbitrary unambigious names but is configurable. -[-NamingConventions::ProhibitAmbiguousNames] - -# S3 - Requires that var names never be resused in a sub, not sure about this one. -[-Variables::ProhibitReusedNames] - -# S3 - Requires non-capturing groups in regexp, primarily a performance optimization. -[-RegularExpressions::ProhibitUnusedCapture] - -# S1 - Requires trailing commas on all lists. -[-CodeLayout::RequireTrailingCommas] - -# S2 - Requires chained calls be less than four. -[-ValuesAndExpressions::ProhibitLongChainsOfMethodCalls] - -# S2 - Requires check for success of close() function. -[-InputOutput::RequireCheckedClose] - -# S1 - Requires use Fatal or autodie with syscalls. -[-InputOutput::RequireCheckedSyscalls] - -# S1 - Requires less abiguity for metacharacters in strings. -[-ValuesAndExpressions::RequireInterpolationOfMetachars] - -# S4 - Requires character classes to reduce escapes in regexps. -[-RegularExpressions::ProhibitEscapedMetacharacters] - -# S1 - Require character classes rather than single character alternation. -[-RegularExpressions::ProhibitSingleCharAlternation] - -# S2 - Require qw{} syntax for quoted string lists -[-CodeLayout::ProhibitQuotedWordLists] Binary files /tmp/tmpz5C9Hb/mIU64TTomz/pgbackrest-2.15.1/test/package/d8-libdevel-cover-perl_1.23-2_amd64.deb and /tmp/tmpz5C9Hb/vNYhWKempS/pgbackrest-2.16/test/package/d8-libdevel-cover-perl_1.23-2_amd64.deb differ Binary files /tmp/tmpz5C9Hb/mIU64TTomz/pgbackrest-2.15.1/test/package/d9-libdevel-cover-perl_1.23-2_amd64.deb and /tmp/tmpz5C9Hb/vNYhWKempS/pgbackrest-2.16/test/package/d9-libdevel-cover-perl_1.23-2_amd64.deb differ Binary files /tmp/tmpz5C9Hb/mIU64TTomz/pgbackrest-2.15.1/test/package/u12-libdevel-cover-perl_1.23-2_amd64.deb and /tmp/tmpz5C9Hb/vNYhWKempS/pgbackrest-2.16/test/package/u12-libdevel-cover-perl_1.23-2_amd64.deb differ Binary files /tmp/tmpz5C9Hb/mIU64TTomz/pgbackrest-2.15.1/test/package/u14-libdevel-cover-perl_1.23-2_amd64.deb and /tmp/tmpz5C9Hb/vNYhWKempS/pgbackrest-2.16/test/package/u14-libdevel-cover-perl_1.23-2_amd64.deb differ Binary files /tmp/tmpz5C9Hb/mIU64TTomz/pgbackrest-2.15.1/test/package/u14-libdevel-cover-perl_1.29-2_amd64.deb and /tmp/tmpz5C9Hb/vNYhWKempS/pgbackrest-2.16/test/package/u14-libdevel-cover-perl_1.29-2_amd64.deb differ Binary files /tmp/tmpz5C9Hb/mIU64TTomz/pgbackrest-2.15.1/test/package/u16-libdevel-cover-perl_1.23-2_amd64.deb and /tmp/tmpz5C9Hb/vNYhWKempS/pgbackrest-2.16/test/package/u16-libdevel-cover-perl_1.23-2_amd64.deb differ Binary files /tmp/tmpz5C9Hb/mIU64TTomz/pgbackrest-2.15.1/test/package/u18-libdevel-cover-perl_1.29-2_amd64.deb and /tmp/tmpz5C9Hb/vNYhWKempS/pgbackrest-2.16/test/package/u18-libdevel-cover-perl_1.29-2_amd64.deb differ diff -Nru pgbackrest-2.15.1/test/patch/debian-package.patch pgbackrest-2.16/test/patch/debian-package.patch --- pgbackrest-2.15.1/test/patch/debian-package.patch 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/test/patch/debian-package.patch 2019-08-05 16:03:04.000000000 +0000 @@ -1,20 +1,30 @@ ---- rules -+++ rules -@@ -13,7 +13,7 @@ +--- control ++++ control +@@ -4,11 +4,10 @@ + Maintainer: Debian PostgreSQL Maintainers + Uploaders: Adrian Vondendriesch + Build-Depends: debhelper (>= 9), +- libio-socket-ssl-perl, + libperl-dev, ++ libpq-dev, + libssl-dev, + libxml-checker-perl, +- libxml-libxml-perl, + libxml2-dev, + txt2man, + zlib1g-dev +@@ -19,12 +18,12 @@ - override_dh_auto_configure: - # src contains a seperate configure script. -- cd $(CURDIR)/src && ./configure -+ cd $(CURDIR)/src && ./configure --prefix=/usr - dh_auto_configure - - override_dh_auto_build: -@@ -36,7 +36,7 @@ - dh_auto_clean - - override_dh_auto_install: -- make -C src install DESTDIR=$(CURDIR)/debian/pgbackrest/usr/bin -+ make -C src install DESTDIR=$(CURDIR)/debian/pgbackrest - dh_auto_install - - .PHONY: build + Package: pgbackrest + Architecture: any +-Depends: libdbd-pg-perl, ++Depends: perl, + postgresql-common, + ${misc:Depends}, + ${perl:Depends}, + ${shlibs:Depends} +-Suggests: libio-socket-ssl-perl, libxml-libxml-perl, pgbackrest-doc ++Suggests: pgbackrest-doc + Description: Reliable PostgreSQL Backup & Restore + pgBackRest is a simple, reliable backup and restore system for PostgreSQL + that can seamlessly scale up to the largest databases and workloads. diff -Nru pgbackrest-2.15.1/test/patch/rhel-package.patch pgbackrest-2.16/test/patch/rhel-package.patch --- pgbackrest-2.15.1/test/patch/rhel-package.patch 1970-01-01 00:00:00.000000000 +0000 +++ pgbackrest-2.16/test/patch/rhel-package.patch 2019-08-05 16:03:04.000000000 +0000 @@ -0,0 +1,20 @@ +--- pgbackrest.spec ++++ pgbackrest.spec +@@ -10,15 +10,14 @@ + Source0: https://github.com/pgbackrest/pgbackrest/archive/release/%{version}.tar.gz + Source1: pgbackrest-conf.patch + BuildRoot: %{_tmppath}/%{name}-%{version}-%{release}-root-%(%{__id_u} -n) +-Requires: perl-XML-LibXML perl-IO-Socket-SSL + %if 0%{?rhel} && 0%{?rhel} <= 6 + Requires: perl-parent perl-JSON perl-Time-HiRes + %else + Requires: perl-JSON-PP + %endif +-Requires: perl-Digest-SHA perl-DBD-Pg perl-Time-HiRes zlib ++Requires: perl-Digest-SHA perl-Time-HiRes zlib libxml2 + Requires: perl(:MODULE_COMPAT_%(eval "`%{__perl} -V:version`"; echo $version)) +-BuildRequires: openssl-devel zlib-devel perl-ExtUtils-Embed ++BuildRequires: openssl-devel zlib-devel postgresql-libs perl-ExtUtils-Embed + + %description + pgBackRest aims to be a simple, reliable backup and restore system that can diff -Nru pgbackrest-2.15.1/test/src/common/harnessPq.c pgbackrest-2.16/test/src/common/harnessPq.c --- pgbackrest-2.15.1/test/src/common/harnessPq.c 1970-01-01 00:00:00.000000000 +0000 +++ pgbackrest-2.16/test/src/common/harnessPq.c 2019-08-05 16:03:04.000000000 +0000 @@ -0,0 +1,300 @@ +/*********************************************************************************************************************************** +Pq Test Harness +***********************************************************************************************************************************/ +#ifndef HARNESS_PQ_REAL + +#include + +#include + +#include "common/type/json.h" +#include "common/type/string.h" +#include "common/type/variantList.h" + +#include "common/harnessPq.h" + +/*********************************************************************************************************************************** +Script that defines how shim functions operate +***********************************************************************************************************************************/ +HarnessPq *harnessPqScript; +unsigned int harnessPqScriptIdx; + +// If there is a script failure change the behavior of cleanup functions to return immediately so the real error will be reported +// rather than a bogus scripting error during cleanup +bool harnessPqScriptFail; + +/*********************************************************************************************************************************** +Set pq script +***********************************************************************************************************************************/ +void +harnessPqScriptSet(HarnessPq *harnessPqScriptParam) +{ + if (harnessPqScript != NULL) + THROW(AssertError, "previous pq script has not yet completed"); + + if (harnessPqScriptParam[0].function == NULL) + THROW(AssertError, "pq script must have entries"); + + harnessPqScript = harnessPqScriptParam; + harnessPqScriptIdx = 0; +} + +/*********************************************************************************************************************************** +Run pq script +***********************************************************************************************************************************/ +static HarnessPq * +harnessPqScriptRun(const char *function, const VariantList *param, HarnessPq *parent) +{ + // Convert params to json for comparison and reporting + String *paramStr = param ? jsonFromVar(varNewVarLst(param), 0) : strNew(""); + + // Ensure script has not ended + if (harnessPqScript == NULL) + { + harnessPqScriptFail = true; + THROW_FMT(AssertError, "pq script ended before %s (%s)", function, strPtr(paramStr)); + } + + // Get current script item + HarnessPq *result = &harnessPqScript[harnessPqScriptIdx]; + + // Check that expected function was called + if (strcmp(result->function, function) != 0) + { + harnessPqScriptFail = true; + + THROW_FMT( + AssertError, "pq script [%u] expected function %s (%s) but got %s (%s)", harnessPqScriptIdx, result->function, + result->param == NULL ? "" : result->param, function, strPtr(paramStr)); + } + + // Check that parameters match + if ((param != NULL && result->param == NULL) || (param == NULL && result->param != NULL) || + (param != NULL && result->param != NULL && !strEqZ(paramStr, result->param))) + { + harnessPqScriptFail = true; + + THROW_FMT( + AssertError, "pq script [%u] function '%s', expects param '%s' but got '%s'", harnessPqScriptIdx, result->function, + result->param ? result->param : "NULL", param ? strPtr(paramStr) : "NULL"); + } + + // Make sure the session matches with the parent as a sanity check + if (parent != NULL && result->session != parent->session) + { + THROW_FMT( + AssertError, "pq script [%u] function '%s', expects session '%u' but got '%u'", harnessPqScriptIdx, result->function, + result->session, parent->session); + } + + // Sleep if requested + if (result->sleep > 0) + sleepMSec(result->sleep); + + harnessPqScriptIdx++; + + if (harnessPqScript[harnessPqScriptIdx].function == NULL) + harnessPqScript = NULL; + + return result; +} + +/*********************************************************************************************************************************** +Shim for PQconnectdb() +***********************************************************************************************************************************/ +PGconn *PQconnectdb(const char *conninfo) +{ + return (PGconn *)harnessPqScriptRun(HRNPQ_CONNECTDB, varLstAdd(varLstNew(), varNewStrZ(conninfo)), NULL); +} + +/*********************************************************************************************************************************** +Shim for PQstatus() +***********************************************************************************************************************************/ +ConnStatusType PQstatus(const PGconn *conn) +{ + return (ConnStatusType)harnessPqScriptRun(HRNPQ_STATUS, NULL, (HarnessPq *)conn)->resultInt; +} + +/*********************************************************************************************************************************** +Shim for PQerrorMessage() +***********************************************************************************************************************************/ +char *PQerrorMessage(const PGconn *conn) +{ + return (char *)harnessPqScriptRun(HRNPQ_ERRORMESSAGE, NULL, (HarnessPq *)conn)->resultZ; +} + +/*********************************************************************************************************************************** +Shim for PQsetNoticeProcessor() +***********************************************************************************************************************************/ +PQnoticeProcessor +PQsetNoticeProcessor(PGconn *conn, PQnoticeProcessor proc, void *arg) +{ + (void)conn; + + // Call the processor that was passed so we have coverage + proc(arg, "test notice"); + return NULL; +} + +/*********************************************************************************************************************************** +Shim for PQsendQuery() +***********************************************************************************************************************************/ +int +PQsendQuery(PGconn *conn, const char *query) +{ + return harnessPqScriptRun(HRNPQ_SENDQUERY, varLstAdd(varLstNew(), varNewStrZ(query)), (HarnessPq *)conn)->resultInt; +} + +/*********************************************************************************************************************************** +Shim for PQconsumeInput() +***********************************************************************************************************************************/ +int +PQconsumeInput(PGconn *conn) +{ + return harnessPqScriptRun(HRNPQ_CONSUMEINPUT, NULL, (HarnessPq *)conn)->resultInt; +} + +/*********************************************************************************************************************************** +Shim for PQisBusy() +***********************************************************************************************************************************/ +int +PQisBusy(PGconn *conn) +{ + return harnessPqScriptRun(HRNPQ_ISBUSY, NULL, (HarnessPq *)conn)->resultInt; +} + +/*********************************************************************************************************************************** +Shim for PQgetCancel() +***********************************************************************************************************************************/ +PGcancel * +PQgetCancel(PGconn *conn) +{ + return (PGcancel *)harnessPqScriptRun(HRNPQ_GETCANCEL, NULL, (HarnessPq *)conn); +} + +/*********************************************************************************************************************************** +Shim for PQcancel() +***********************************************************************************************************************************/ +int +PQcancel(PGcancel *cancel, char *errbuf, int errbufsize) +{ + HarnessPq *harnessPq = harnessPqScriptRun(HRNPQ_CANCEL, NULL, (HarnessPq *)cancel); + + if (!harnessPq->resultInt) + { + strncpy(errbuf, harnessPq->resultZ, (size_t)errbufsize); + errbuf[errbufsize - 1] = '\0'; + } + + return harnessPq->resultInt; +} + +/*********************************************************************************************************************************** +Shim for PQfreeCancel() +***********************************************************************************************************************************/ +void +PQfreeCancel(PGcancel *cancel) +{ + harnessPqScriptRun(HRNPQ_FREECANCEL, NULL, (HarnessPq *)cancel); +} + +/*********************************************************************************************************************************** +Shim for PQgetResult() +***********************************************************************************************************************************/ +PGresult * +PQgetResult(PGconn *conn) +{ + if (!harnessPqScriptFail) + { + HarnessPq *harnessPq = harnessPqScriptRun(HRNPQ_GETRESULT, NULL, (HarnessPq *)conn); + return harnessPq->resultNull ? NULL : (PGresult *)harnessPq; + } + + return NULL; +} + +/*********************************************************************************************************************************** +Shim for PQresultStatus() +***********************************************************************************************************************************/ +ExecStatusType +PQresultStatus(const PGresult *res) +{ + return (ExecStatusType)harnessPqScriptRun(HRNPQ_RESULTSTATUS, NULL, (HarnessPq *)res)->resultInt; +} + +/*********************************************************************************************************************************** +Shim for PQresultErrorMessage() +***********************************************************************************************************************************/ +char * +PQresultErrorMessage(const PGresult *res) +{ + return (char *)harnessPqScriptRun(HRNPQ_RESULTERRORMESSAGE, NULL, (HarnessPq *)res)->resultZ; +} + +/*********************************************************************************************************************************** +Shim for PQntuples() +***********************************************************************************************************************************/ +int +PQntuples(const PGresult *res) +{ + return harnessPqScriptRun(HRNPQ_NTUPLES, NULL, (HarnessPq *)res)->resultInt; +} + +/*********************************************************************************************************************************** +Shim for PQnfields() +***********************************************************************************************************************************/ +int +PQnfields(const PGresult *res) +{ + return harnessPqScriptRun(HRNPQ_NFIELDS, NULL, (HarnessPq *)res)->resultInt; +} + +/*********************************************************************************************************************************** +Shim for PQgetisnull() +***********************************************************************************************************************************/ +int +PQgetisnull(const PGresult *res, int tup_num, int field_num) +{ + return harnessPqScriptRun( + HRNPQ_GETISNULL, varLstAdd(varLstAdd(varLstNew(), varNewInt(tup_num)), varNewInt(field_num)), (HarnessPq *)res)->resultInt; +} + +/*********************************************************************************************************************************** +Shim for PQftype() +***********************************************************************************************************************************/ +Oid +PQftype(const PGresult *res, int field_num) +{ + return (Oid)harnessPqScriptRun(HRNPQ_FTYPE, varLstAdd(varLstNew(), varNewInt(field_num)), (HarnessPq *)res)->resultInt; +} + +/*********************************************************************************************************************************** +Shim for PQgetvalue() +***********************************************************************************************************************************/ +char * +PQgetvalue(const PGresult *res, int tup_num, int field_num) +{ + return (char *)harnessPqScriptRun( + HRNPQ_GETVALUE, varLstAdd(varLstAdd(varLstNew(), varNewInt(tup_num)), varNewInt(field_num)), (HarnessPq *)res)->resultZ; +} + +/*********************************************************************************************************************************** +Shim for PQclear() +***********************************************************************************************************************************/ +void +PQclear(PGresult *res) +{ + if (!harnessPqScriptFail) + harnessPqScriptRun(HRNPQ_CLEAR, NULL, (HarnessPq *)res); +} + +/*********************************************************************************************************************************** +Shim for PQfinish() +***********************************************************************************************************************************/ +void PQfinish(PGconn *conn) +{ + if (!harnessPqScriptFail) + harnessPqScriptRun(HRNPQ_FINISH, NULL, (HarnessPq *)conn); +} + +#endif // HARNESS_PQ_REAL diff -Nru pgbackrest-2.15.1/test/src/common/harnessPq.h pgbackrest-2.16/test/src/common/harnessPq.h --- pgbackrest-2.15.1/test/src/common/harnessPq.h 1970-01-01 00:00:00.000000000 +0000 +++ pgbackrest-2.16/test/src/common/harnessPq.h 2019-08-05 16:03:04.000000000 +0000 @@ -0,0 +1,178 @@ +/*********************************************************************************************************************************** +Pq Test Harness + +Scripted testing for PostgreSQL pqlib so exact results can be returned for unit testing. See PostgreSQL client unit tests for +usage examples. +***********************************************************************************************************************************/ +#ifndef TEST_COMMON_HARNESS_PQ_H +#define TEST_COMMON_HARNESS_PQ_H + +#ifndef HARNESS_PQ_REAL + +#include + +#include "common/macro.h" +#include "common/time.h" +#include "version.h" + +/*********************************************************************************************************************************** +Function constants +***********************************************************************************************************************************/ +#define HRNPQ_CANCEL "PQcancel" +#define HRNPQ_CLEAR "PQclear" +#define HRNPQ_CONNECTDB "PQconnectdb" +#define HRNPQ_CONSUMEINPUT "PQconsumeInput" +#define HRNPQ_ERRORMESSAGE "PQerrorMessage" +#define HRNPQ_FINISH "PQfinish" +#define HRNPQ_FREECANCEL "PQfreeCancel" +#define HRNPQ_FTYPE "PQftype" +#define HRNPQ_GETCANCEL "PQgetCancel" +#define HRNPQ_GETISNULL "PQgetisnull" +#define HRNPQ_GETRESULT "PQgetResult" +#define HRNPQ_GETVALUE "PQgetvalue" +#define HRNPQ_ISBUSY "PQisbusy" +#define HRNPQ_NFIELDS "PQnfields" +#define HRNPQ_NTUPLES "PQntuples" +#define HRNPQ_RESULTERRORMESSAGE "PQresultErrorMessage" +#define HRNPQ_RESULTSTATUS "PQresultStatus" +#define HRNPQ_SENDQUERY "PQsendQuery" +#define HRNPQ_STATUS "PQstatus" + +/*********************************************************************************************************************************** +Macros for defining groups of functions that implement various queries and commands +***********************************************************************************************************************************/ +#define HRNPQ_MACRO_OPEN(sessionParam, connectParam) \ + {.session = sessionParam, .function = HRNPQ_CONNECTDB, .param = "[\"" connectParam "\"]"}, \ + {.session = sessionParam, .function = HRNPQ_STATUS, .resultInt = CONNECTION_OK} + +#define HRNPQ_MACRO_SET_SEARCH_PATH(sessionParam) \ + {.session = sessionParam, .function = HRNPQ_SENDQUERY, .param = "[\"set search_path = 'pg_catalog'\"]", .resultInt = 1}, \ + {.session = sessionParam, .function = HRNPQ_CONSUMEINPUT}, \ + {.session = sessionParam, .function = HRNPQ_ISBUSY}, \ + {.session = sessionParam, .function = HRNPQ_GETRESULT}, \ + {.session = sessionParam, .function = HRNPQ_RESULTSTATUS, .resultInt = PGRES_COMMAND_OK}, \ + {.session = sessionParam, .function = HRNPQ_CLEAR}, \ + {.session = sessionParam, .function = HRNPQ_GETRESULT, .resultNull = true} + +#define HRNPQ_MACRO_VALIDATE_QUERY(sessionParam, versionParam, pgPathParam) \ + {.session = sessionParam, .function = HRNPQ_SENDQUERY, .param = \ + "[\"select (select setting from pg_catalog.pg_settings where name = 'server_version_num')::int4," \ + " (select setting from pg_catalog.pg_settings where name = 'data_directory')::text\"]", \ + .resultInt = 1}, \ + {.session = sessionParam, .function = HRNPQ_CONSUMEINPUT}, \ + {.session = sessionParam, .function = HRNPQ_ISBUSY}, \ + {.session = sessionParam, .function = HRNPQ_GETRESULT}, \ + {.session = sessionParam, .function = HRNPQ_RESULTSTATUS, .resultInt = PGRES_TUPLES_OK}, \ + {.session = sessionParam, .function = HRNPQ_NTUPLES, .resultInt = 1}, \ + {.session = sessionParam, .function = HRNPQ_NFIELDS, .resultInt = 2}, \ + {.session = sessionParam, .function = HRNPQ_FTYPE, .param = "[0]", .resultInt = HRNPQ_TYPE_INT}, \ + {.session = sessionParam, .function = HRNPQ_FTYPE, .param = "[1]", .resultInt = HRNPQ_TYPE_TEXT}, \ + {.session = sessionParam, .function = HRNPQ_GETVALUE, .param = "[0,0]", .resultZ = STRINGIFY(versionParam)}, \ + {.session = sessionParam, .function = HRNPQ_GETVALUE, .param = "[0,1]", .resultZ = pgPathParam}, \ + {.session = sessionParam, .function = HRNPQ_CLEAR}, \ + {.session = sessionParam, .function = HRNPQ_GETRESULT, .resultNull = true} + +#define HRNPQ_MACRO_SET_APPLICATION_NAME(sessionParam) \ + {.session = sessionParam, .function = HRNPQ_SENDQUERY, \ + .param = strPtr(strNewFmt("[\"set application_name = '" PROJECT_NAME " [%s]'\"]", cfgCommandName(cfgCommand()))), \ + .resultInt = 1}, \ + {.session = sessionParam, .function = HRNPQ_CONSUMEINPUT}, \ + {.session = sessionParam, .function = HRNPQ_ISBUSY}, \ + {.session = sessionParam, .function = HRNPQ_GETRESULT}, \ + {.session = sessionParam, .function = HRNPQ_RESULTSTATUS, .resultInt = PGRES_COMMAND_OK}, \ + {.session = sessionParam, .function = HRNPQ_CLEAR}, \ + {.session = sessionParam, .function = HRNPQ_GETRESULT, .resultNull = true} + +#define HRNPQ_MACRO_IS_STANDBY_QUERY(sessionParam, standbyParam) \ + {.session = sessionParam, .function = HRNPQ_SENDQUERY, .param = "[\"select pg_catalog.pg_is_in_recovery()\"]", .resultInt = 1},\ + {.session = sessionParam, .function = HRNPQ_CONSUMEINPUT}, \ + {.session = sessionParam, .function = HRNPQ_ISBUSY}, \ + {.session = sessionParam, .function = HRNPQ_GETRESULT}, \ + {.session = sessionParam, .function = HRNPQ_RESULTSTATUS, .resultInt = PGRES_TUPLES_OK}, \ + {.session = sessionParam, .function = HRNPQ_NTUPLES, .resultInt = 1}, \ + {.session = sessionParam, .function = HRNPQ_NFIELDS, .resultInt = 1}, \ + {.session = sessionParam, .function = HRNPQ_FTYPE, .param = "[0]", .resultInt = HRNPQ_TYPE_BOOL}, \ + {.session = sessionParam, .function = HRNPQ_GETVALUE, .param = "[0,0]", .resultZ = STRINGIFY(standbyParam)}, \ + {.session = sessionParam, .function = HRNPQ_CLEAR}, \ + {.session = sessionParam, .function = HRNPQ_GETRESULT, .resultNull = true} + +#define HRNPQ_MACRO_CREATE_RESTORE_POINT(sessionParam, lsnParam) \ + {.session = sessionParam, \ + .function = HRNPQ_SENDQUERY, .param = "[\"select pg_catalog.pg_create_restore_point('pgBackRest Archive Check')::text\"]", \ + .resultInt = 1}, \ + {.session = sessionParam, .function = HRNPQ_CONSUMEINPUT}, \ + {.session = sessionParam, .function = HRNPQ_ISBUSY}, \ + {.session = sessionParam, .function = HRNPQ_GETRESULT}, \ + {.session = sessionParam, .function = HRNPQ_RESULTSTATUS, .resultInt = PGRES_TUPLES_OK}, \ + {.session = sessionParam, .function = HRNPQ_NTUPLES, .resultInt = 1}, \ + {.session = sessionParam, .function = HRNPQ_NFIELDS, .resultInt = 1}, \ + {.session = sessionParam, .function = HRNPQ_FTYPE, .param = "[0]", .resultInt = HRNPQ_TYPE_TEXT}, \ + {.session = sessionParam, .function = HRNPQ_GETVALUE, .param = "[0,0]", .resultZ = lsnParam}, \ + {.session = sessionParam, .function = HRNPQ_CLEAR}, \ + {.session = sessionParam, .function = HRNPQ_GETRESULT, .resultNull = true} + +#define HRNPQ_MACRO_WAL_SWITCH(sessionParam, walNameParam, walFileNameParam) \ + {.session = sessionParam, .function = HRNPQ_SENDQUERY, \ + .param = "[\"select pg_catalog.pg_" walNameParam "file_name(pg_catalog.pg_switch_" walNameParam "())::text\"]", \ + .resultInt = 1}, \ + {.session = sessionParam, .function = HRNPQ_CONSUMEINPUT}, \ + {.session = sessionParam, .function = HRNPQ_ISBUSY}, \ + {.session = sessionParam, .function = HRNPQ_GETRESULT}, \ + {.session = sessionParam, .function = HRNPQ_RESULTSTATUS, .resultInt = PGRES_TUPLES_OK}, \ + {.session = sessionParam, .function = HRNPQ_NTUPLES, .resultInt = 1}, \ + {.session = sessionParam, .function = HRNPQ_NFIELDS, .resultInt = 1}, \ + {.session = sessionParam, .function = HRNPQ_FTYPE, .param = "[0]", .resultInt = HRNPQ_TYPE_TEXT}, \ + {.session = sessionParam, .function = HRNPQ_GETVALUE, .param = "[0,0]", .resultZ = walFileNameParam}, \ + {.session = sessionParam, .function = HRNPQ_CLEAR}, \ + {.session = sessionParam, .function = HRNPQ_GETRESULT, .resultNull = true} + +#define HRNPQ_MACRO_CLOSE(sessionParam) \ + {.session = sessionParam, .function = HRNPQ_FINISH} + +#define HRNPQ_MACRO_DONE() \ + {.function = NULL} + +/*********************************************************************************************************************************** +Macros to simplify dbOpen() for specific database versions +***********************************************************************************************************************************/ +#define HRNPQ_MACRO_OPEN_84(sessionParam, connectParam, pgPathParam) \ + HRNPQ_MACRO_OPEN(sessionParam, connectParam), \ + HRNPQ_MACRO_SET_SEARCH_PATH(sessionParam), \ + HRNPQ_MACRO_VALIDATE_QUERY(sessionParam, PG_VERSION_84, pgPathParam) + +#define HRNPQ_MACRO_OPEN_92(sessionParam, connectParam, pgPathParam, standbyParam) \ + HRNPQ_MACRO_OPEN(sessionParam, connectParam), \ + HRNPQ_MACRO_SET_SEARCH_PATH(sessionParam), \ + HRNPQ_MACRO_VALIDATE_QUERY(sessionParam, PG_VERSION_92, pgPathParam), \ + HRNPQ_MACRO_SET_APPLICATION_NAME(sessionParam), \ + HRNPQ_MACRO_IS_STANDBY_QUERY(sessionParam, standbyParam) + +/*********************************************************************************************************************************** +Data type constants +***********************************************************************************************************************************/ +#define HRNPQ_TYPE_BOOL 16 +#define HRNPQ_TYPE_INT 20 +#define HRNPQ_TYPE_TEXT 25 + +/*********************************************************************************************************************************** +Structure for scripting pq responses +***********************************************************************************************************************************/ +typedef struct HarnessPq +{ + unsigned int session; // Session number when mutliple sessions are run concurrently + const char *function; // Function call expected + const char *param; // Params expected by the function for verification + int resultInt; // Int result value + const char *resultZ; // Zero-terminated result value + bool resultNull; // Return null from function that normally returns a struct ptr + TimeMSec sleep; // Sleep specified milliseconds before returning from function +} HarnessPq; + +/*********************************************************************************************************************************** +Functions +***********************************************************************************************************************************/ +void harnessPqScriptSet(HarnessPq *harnessPqScriptParam); + +#endif // HARNESS_PQ_REAL + +#endif diff -Nru pgbackrest-2.15.1/test/src/module/command/archiveCommonTest.c pgbackrest-2.16/test/src/module/command/archiveCommonTest.c --- pgbackrest-2.15.1/test/src/module/command/archiveCommonTest.c 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/test/src/module/command/archiveCommonTest.c 2019-08-05 16:03:04.000000000 +0000 @@ -7,6 +7,7 @@ #include "storage/posix/storage.h" #include "common/harnessConfig.h" +#include "common/harnessFork.h" /*********************************************************************************************************************************** Test Run @@ -196,20 +197,39 @@ strLstAddZ(argList, "archive-get"); harnessCfgLoad(strLstSize(argList), strLstPtr(argList)); - TEST_RESULT_PTR(walSegmentFind(storageRepo(), strNew("9.6-2"), strNew("123456781234567812345678")), NULL, "no path"); + TEST_RESULT_PTR(walSegmentFind(storageRepo(), strNew("9.6-2"), strNew("123456781234567812345678"), 0), NULL, "no path"); storagePathCreateNP(storageTest, strNew("archive/db/9.6-2/1234567812345678")); - TEST_RESULT_PTR(walSegmentFind(storageRepo(), strNew("9.6-2"), strNew("123456781234567812345678")), NULL, "no segment"); - - storagePutNP( - storageNewWriteNP( - storageTest, - strNew("archive/db/9.6-2/1234567812345678/123456781234567812345678-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa")), - NULL); - - TEST_RESULT_STR( - strPtr(walSegmentFind(storageRepo(), strNew("9.6-2"), strNew("123456781234567812345678"))), - "123456781234567812345678-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", "found segment"); + TEST_RESULT_PTR(walSegmentFind(storageRepo(), strNew("9.6-2"), strNew("123456781234567812345678"), 0), NULL, "no segment"); + TEST_RESULT_PTR( + walSegmentFind(storageRepo(), strNew("9.6-2"), strNew("123456781234567812345678"), 500), NULL, + "no segment after 500ms"); + + // Check timeout by making the wal segment appear after 250ms + HARNESS_FORK_BEGIN() + { + HARNESS_FORK_CHILD_BEGIN(0, false) + { + sleepMSec(250); + + storagePutNP( + storageNewWriteNP( + storageTest, + strNew( + "archive/db/9.6-2/1234567812345678/123456781234567812345678-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa")), + NULL); + } + HARNESS_FORK_CHILD_END(); + + HARNESS_FORK_PARENT_BEGIN() + { + TEST_RESULT_STR( + strPtr(walSegmentFind(storageRepo(), strNew("9.6-2"), strNew("123456781234567812345678"), 1000)), + "123456781234567812345678-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", "found segment"); + } + HARNESS_FORK_PARENT_END(); + } + HARNESS_FORK_END(); storagePutNP( storageNewWriteNP( @@ -218,7 +238,7 @@ NULL); TEST_ERROR( - walSegmentFind(storageRepo(), strNew("9.6-2"), strNew("123456781234567812345678")), + walSegmentFind(storageRepo(), strNew("9.6-2"), strNew("123456781234567812345678"), 0), ArchiveDuplicateError, "duplicates found in archive for WAL segment 123456781234567812345678:" " 123456781234567812345678-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" @@ -226,7 +246,7 @@ "\nHINT: are multiple primaries archiving to this stanza?"); TEST_RESULT_STR( - walSegmentFind(storageRepo(), strNew("9.6-2"), strNew("123456781234567812345678.partial")), NULL, + walSegmentFind(storageRepo(), strNew("9.6-2"), strNew("123456781234567812345678.partial"), 0), NULL, "did not find partial segment"); } diff -Nru pgbackrest-2.15.1/test/src/module/command/backupCommonTest.c pgbackrest-2.16/test/src/module/command/backupCommonTest.c --- pgbackrest-2.15.1/test/src/module/command/backupCommonTest.c 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/test/src/module/command/backupCommonTest.c 2019-08-05 16:03:04.000000000 +0000 @@ -161,8 +161,13 @@ ((PageHeaderData *)(bufPtr(buffer) + (PG_PAGE_SIZE_DEFAULT * 0x00)))->pd_lsn.xrecoff = 0xF0F0F0F0; write = ioBufferWriteNew(bufferOut); + ioFilterGroupAdd( - ioWriteFilterGroup(write), pageChecksumNew(0, PG_SEGMENT_PAGE_DEFAULT, PG_PAGE_SIZE_DEFAULT, 0xFACEFACE00000000)); + ioWriteFilterGroup(write), + pageChecksumNewVar( + varVarLst( + jsonToVar( + strNewFmt("[0,%u,%u,%" PRIu64 "]", PG_SEGMENT_PAGE_DEFAULT, PG_PAGE_SIZE_DEFAULT, 0xFACEFACE00000000))))); ioWriteOpen(write); ioWrite(write, buffer); ioWriteClose(write); diff -Nru pgbackrest-2.15.1/test/src/module/command/backupTest.c pgbackrest-2.16/test/src/module/command/backupTest.c --- pgbackrest-2.15.1/test/src/module/command/backupTest.c 1970-01-01 00:00:00.000000000 +0000 +++ pgbackrest-2.16/test/src/module/command/backupTest.c 2019-08-05 16:03:04.000000000 +0000 @@ -0,0 +1,457 @@ +/*********************************************************************************************************************************** +Test Backup Command +***********************************************************************************************************************************/ +#include "common/io/bufferRead.h" +#include "common/io/bufferWrite.h" +#include "common/io/io.h" +#include "storage/helper.h" +#include "storage/posix/storage.h" + +#include "common/harnessConfig.h" + +/*********************************************************************************************************************************** +Test Run +***********************************************************************************************************************************/ +void +testRun(void) +{ + FUNCTION_HARNESS_VOID(); + + // Start a protocol server to test the protocol directly + Buffer *serverWrite = bufNew(8192); + IoWrite *serverWriteIo = ioBufferWriteNew(serverWrite); + ioWriteOpen(serverWriteIo); + + ProtocolServer *server = protocolServerNew(strNew("test"), strNew("test"), ioBufferReadNew(bufNew(0)), serverWriteIo); + bufUsedSet(serverWrite, 0); + + const String *pgFile = strNew("testfile"); + const String *missingFile = strNew("missing"); + const String *backupLabel = strNew("20190718-155825F"); + const String *backupPathFile = strNewFmt(STORAGE_REPO_BACKUP "/%s/%s", strPtr(backupLabel), strPtr(pgFile)); + BackupFileResult result = {0}; + VariantList *paramList = varLstNew(); + + // ***************************************************************************************************************************** + if (testBegin("segmentNumber()")) + { + TEST_RESULT_UINT(segmentNumber(pgFile), 0, "No segment number"); + TEST_RESULT_UINT(segmentNumber(strNewFmt("%s.123", strPtr(pgFile))), 123, "Segment number"); + } + + // ***************************************************************************************************************************** + if (testBegin("backupFile(), backupProtocol")) + { + // Load Parameters + StringList *argList = strLstNew(); + strLstAddZ(argList, "pgbackrest"); + strLstAddZ(argList, "--stanza=test1"); + strLstAdd(argList, strNewFmt("--repo1-path=%s/repo", testPath())); + strLstAdd(argList, strNewFmt("--pg1-path=%s/pg", testPath())); + strLstAddZ(argList, "--repo1-retention-full=1"); + strLstAddZ(argList, "backup"); + harnessCfgLoad(strLstSize(argList), strLstPtr(argList)); + + // Create the pg path + storagePathCreateP(storagePgWrite(), NULL, .mode = 0700); + + // Pg file missing - ignoreMissing=true + // ------------------------------------------------------------------------------------------------------------------------- + TEST_ASSIGN( + result, + backupFile( + missingFile, true, 0, NULL, false, 0, missingFile, false, false, 1, backupLabel, false, cipherTypeNone, NULL), + "pg file missing, ignoreMissing=true, no delta"); + TEST_RESULT_UINT(result.copySize + result.repoSize, 0, " copy/repo size 0"); + TEST_RESULT_UINT(result.backupCopyResult, backupCopyResultSkip, " skip file"); + + // Check protocol function directly + // ------------------------------------------------------------------------------------------------------------------------- + // NULL, zero param values, ignoreMissing=true + varLstAdd(paramList, varNewStr(missingFile)); // pgFile + varLstAdd(paramList, varNewBool(true)); // pgFileIgnoreMissing + varLstAdd(paramList, varNewUInt64(0)); // pgFileSize + varLstAdd(paramList, NULL); // pgFileChecksum + varLstAdd(paramList, varNewBool(false)); // pgFileChecksumPage + varLstAdd(paramList, varNewUInt64(0)); // pgFileChecksumPageLsnLimit 1 + varLstAdd(paramList, varNewUInt64(0)); // pgFileChecksumPageLsnLimit 2 + varLstAdd(paramList, varNewStr(missingFile)); // repoFile + varLstAdd(paramList, varNewBool(false)); // repoFileHasReference + varLstAdd(paramList, varNewBool(false)); // repoFileCompress + varLstAdd(paramList, varNewUInt(0)); // repoFileCompressLevel + varLstAdd(paramList, varNewStr(backupLabel)); // backupLabel + varLstAdd(paramList, varNewBool(false)); // delta + + TEST_RESULT_BOOL( + backupProtocol(PROTOCOL_COMMAND_BACKUP_FILE_STR, paramList, server), true, "protocol backup file - skip"); + TEST_RESULT_STR(strPtr(strNewBuf(serverWrite)), "{\"out\":[3,0,0,null,null]}\n", " check result"); + bufUsedSet(serverWrite, 0); + + // Pg file missing - ignoreMissing=false + // ------------------------------------------------------------------------------------------------------------------------- + TEST_ERROR_FMT( + backupFile( + missingFile, false, 0, NULL, false, 0, missingFile, false, false, 1, backupLabel, false, cipherTypeNone, NULL), + FileMissingError, "unable to open missing file '%s/pg/missing' for read", testPath()); + + // Create a pg file to backup + storagePutNP(storageNewWriteNP(storagePgWrite(), pgFile), BUFSTRDEF("atestfile")); + + // ------------------------------------------------------------------------------------------------------------------------- + // No prior checksum, no compression, no pageChecksum, no delta, no hasReference + + // With the expected backupCopyResultCopy, unset the storageFeatureCompress bit for the storageRepo for code coverage + uint64_t feature = storageRepo()->interface.feature; + ((Storage *)storageRepo())->interface.feature = feature && ((1 << storageFeatureCompress) ^ 0xFFFFFFFFFFFFFFFF); + + TEST_ASSIGN( + result, + backupFile(pgFile, false, 9, NULL, false, 0, pgFile, false, false, 1, backupLabel, false, cipherTypeNone, NULL), + "pg file exists, no repo file, no ignoreMissing, no pageChecksum, no delta, no hasReference"); + + ((Storage *)storageRepo())->interface.feature = feature; + + TEST_RESULT_UINT(result.copySize + result.repoSize, 18, " copy=repo=pgFile size"); + TEST_RESULT_UINT(result.backupCopyResult, backupCopyResultCopy, " copy file"); + TEST_RESULT_BOOL( + (strEqZ(result.copyChecksum, "9bc8ab2dda60ef4beed07d1e19ce0676d5edde67") && + storageExistsNP(storageRepo(), backupPathFile) && result.pageChecksumResult == NULL), + true, " copy file to repo success"); + + TEST_RESULT_VOID(storageRemoveNP(storageRepoWrite(), backupPathFile), " remove repo file"); + + // ------------------------------------------------------------------------------------------------------------------------- + // Test pagechecksum + TEST_ASSIGN( + result, + backupFile( + pgFile, false, 9, NULL, true, 0xFFFFFFFFFFFFFFFF, pgFile, false, false, 1, backupLabel, false, cipherTypeNone, + NULL), + "file checksummed with pageChecksum enabled"); + TEST_RESULT_UINT(result.copySize + result.repoSize, 18, " copy=repo=pgFile size"); + TEST_RESULT_UINT(result.backupCopyResult, backupCopyResultCopy, " copy file"); + TEST_RESULT_BOOL( + (strEqZ(result.copyChecksum, "9bc8ab2dda60ef4beed07d1e19ce0676d5edde67") && + storageExistsNP(storageRepo(), backupPathFile)), + true," copy file to repo success"); + TEST_RESULT_PTR_NE(result.pageChecksumResult, NULL, " pageChecksumResult is set"); + TEST_RESULT_BOOL( + varBool(kvGet(result.pageChecksumResult, VARSTRDEF("valid"))), false, " pageChecksumResult valid=false"); + TEST_RESULT_VOID(storageRemoveNP(storageRepoWrite(), backupPathFile), " remove repo file"); + + // Check protocol function directly + // ------------------------------------------------------------------------------------------------------------------------- + // pgFileSize, ignoreMissing=false, backupLabel, pgFileChecksumPage, pgFileChecksumPageLsnLimit + paramList = varLstNew(); + varLstAdd(paramList, varNewStr(pgFile)); // pgFile + varLstAdd(paramList, varNewBool(false)); // pgFileIgnoreMissing + varLstAdd(paramList, varNewUInt64(9)); // pgFileSize + varLstAdd(paramList, NULL); // pgFileChecksum + varLstAdd(paramList, varNewBool(true)); // pgFileChecksumPage + varLstAdd(paramList, varNewUInt64(0xFFFFFFFF)); // pgFileChecksumPageLsnLimit 1 + varLstAdd(paramList, varNewUInt64(0xFFFFFFFF)); // pgFileChecksumPageLsnLimit 2 + varLstAdd(paramList, varNewStr(pgFile)); // repoFile + varLstAdd(paramList, varNewBool(false)); // repoFileHasReference + varLstAdd(paramList, varNewBool(false)); // repoFileCompress + varLstAdd(paramList, varNewUInt(1)); // repoFileCompressLevel + varLstAdd(paramList, varNewStr(backupLabel)); // backupLabel + varLstAdd(paramList, varNewBool(false)); // delta + + TEST_RESULT_BOOL( + backupProtocol(PROTOCOL_COMMAND_BACKUP_FILE_STR, paramList, server), true, "protocol backup file - pageChecksum"); + TEST_RESULT_STR( + strPtr(strNewBuf(serverWrite)), + "{\"out\":[1,9,9,\"9bc8ab2dda60ef4beed07d1e19ce0676d5edde67\",{\"align\":false,\"valid\":false}]}\n", + " check result"); + bufUsedSet(serverWrite, 0); + + // ------------------------------------------------------------------------------------------------------------------------- + // File exists in repo and db, checksum match, delta set, ignoreMissing false, hasReference - NOOP + TEST_ASSIGN( + result, + backupFile( + pgFile, false, 9, strNew("9bc8ab2dda60ef4beed07d1e19ce0676d5edde67"), false, 0, pgFile, true, false, 1, backupLabel, + true, cipherTypeNone, NULL), + "file in db and repo, checksum equal, no ignoreMissing, no pageChecksum, delta, hasReference"); + TEST_RESULT_UINT(result.copySize, 9, " copy size set"); + TEST_RESULT_UINT(result.repoSize, 0, " repo size not set since already exists in repo"); + TEST_RESULT_UINT(result.backupCopyResult, backupCopyResultNoOp, " noop file"); + TEST_RESULT_BOOL( + (strEqZ(result.copyChecksum, "9bc8ab2dda60ef4beed07d1e19ce0676d5edde67") && + storageExistsNP(storageRepo(), backupPathFile) && result.pageChecksumResult == NULL), + true, " noop"); + + // Check protocol function directly + // ------------------------------------------------------------------------------------------------------------------------- + // pgFileChecksum, hasReference, delta + paramList = varLstNew(); + varLstAdd(paramList, varNewStr(pgFile)); // pgFile + varLstAdd(paramList, varNewBool(false)); // pgFileIgnoreMissing + varLstAdd(paramList, varNewUInt64(9)); // pgFileSize + varLstAdd(paramList, varNewStrZ("9bc8ab2dda60ef4beed07d1e19ce0676d5edde67")); // pgFileChecksum + varLstAdd(paramList, varNewBool(false)); // pgFileChecksumPage + varLstAdd(paramList, varNewUInt64(0)); // pgFileChecksumPageLsnLimit 1 + varLstAdd(paramList, varNewUInt64(0)); // pgFileChecksumPageLsnLimit 2 + varLstAdd(paramList, varNewStr(pgFile)); // repoFile + varLstAdd(paramList, varNewBool(true)); // repoFileHasReference + varLstAdd(paramList, varNewBool(false)); // repoFileCompress + varLstAdd(paramList, varNewUInt(1)); // repoFileCompressLevel + varLstAdd(paramList, varNewStr(backupLabel)); // backupLabel + varLstAdd(paramList, varNewBool(true)); // delta + + TEST_RESULT_BOOL( + backupProtocol(PROTOCOL_COMMAND_BACKUP_FILE_STR, paramList, server), true, "protocol backup file - noop"); + TEST_RESULT_STR( + strPtr(strNewBuf(serverWrite)), "{\"out\":[4,9,0,\"9bc8ab2dda60ef4beed07d1e19ce0676d5edde67\",null]}\n", + " check result"); + bufUsedSet(serverWrite, 0); + + // ------------------------------------------------------------------------------------------------------------------------- + // File exists in repo and db, pg checksum mismatch, delta set, ignoreMissing false, hasReference - COPY + TEST_ASSIGN( + result, + backupFile( + pgFile, false, 9, strNew("1234567890123456789012345678901234567890"), false, 0, pgFile, true, false, 1, backupLabel, + true, cipherTypeNone, NULL), + "file in db and repo, pg checksum not equal, no ignoreMissing, no pageChecksum, delta, hasReference"); + TEST_RESULT_UINT(result.copySize + result.repoSize, 18, " copy=repo=pgFile size"); + TEST_RESULT_UINT(result.backupCopyResult, backupCopyResultCopy, " copy file"); + TEST_RESULT_BOOL( + (strEqZ(result.copyChecksum, "9bc8ab2dda60ef4beed07d1e19ce0676d5edde67") && + storageExistsNP(storageRepo(), backupPathFile) && result.pageChecksumResult == NULL), + true, " copy"); + + // ------------------------------------------------------------------------------------------------------------------------- + // File exists in repo and db, pg checksum same, pg size different, delta set, ignoreMissing false, hasReference - COPY + TEST_ASSIGN( + result, + backupFile( + pgFile, false, 8, strNew("9bc8ab2dda60ef4beed07d1e19ce0676d5edde67"), false, 0, pgFile, true, false, 1, backupLabel, + true, cipherTypeNone, NULL), + "db & repo file, pg checksum same, pg size different, no ignoreMissing, no pageChecksum, delta, hasReference"); + TEST_RESULT_UINT(result.copySize + result.repoSize, 18, " copy=repo=pgFile size"); + TEST_RESULT_UINT(result.backupCopyResult, backupCopyResultCopy, " copy file"); + TEST_RESULT_BOOL( + (strEqZ(result.copyChecksum, "9bc8ab2dda60ef4beed07d1e19ce0676d5edde67") && + storageExistsNP(storageRepo(), backupPathFile) && result.pageChecksumResult == NULL), + true, " copy"); + + // ------------------------------------------------------------------------------------------------------------------------- + // File exists in repo and db, checksum not same in repo, delta set, ignoreMissing false, no hasReference - RECOPY + TEST_RESULT_VOID( + storagePutNP(storageNewWriteNP(storageRepoWrite(), backupPathFile), BUFSTRDEF("adifferentfile")), + "create different file (size and checksum) with same name in repo"); + TEST_ASSIGN( + result, + backupFile( + pgFile, false, 9, strNew("9bc8ab2dda60ef4beed07d1e19ce0676d5edde67"), false, 0, pgFile, false, false, 1, + backupLabel, true, cipherTypeNone, NULL), + " db & repo file, pgFileMatch, repo checksum no match, no ignoreMissing, no pageChecksum, delta, no hasReference"); + TEST_RESULT_UINT(result.copySize + result.repoSize, 18, " copy=repo=pgFile size"); + TEST_RESULT_UINT(result.backupCopyResult, backupCopyResultReCopy, " recopy file"); + TEST_RESULT_BOOL( + (strEqZ(result.copyChecksum, "9bc8ab2dda60ef4beed07d1e19ce0676d5edde67") && + storageExistsNP(storageRepo(), backupPathFile) && result.pageChecksumResult == NULL), + true, " recopy"); + + // ------------------------------------------------------------------------------------------------------------------------- + // File exists in repo but missing from db, checksum same in repo, delta set, ignoreMissing true, no hasReference - SKIP + TEST_RESULT_VOID( + storagePutNP(storageNewWriteNP(storageRepoWrite(), backupPathFile), BUFSTRDEF("adifferentfile")), + "create different file with same name in repo"); + TEST_ASSIGN( + result, + backupFile( + missingFile, true, 9, strNew("9bc8ab2dda60ef4beed07d1e19ce0676d5edde67"), false, 0, pgFile, false, false, 1, + backupLabel, true, cipherTypeNone, NULL), + " file in repo only, checksum in repo equal, ignoreMissing=true, no pageChecksum, delta, no hasReference"); + TEST_RESULT_UINT(result.copySize + result.repoSize, 0, " copy=repo=0 size"); + TEST_RESULT_UINT(result.backupCopyResult, backupCopyResultSkip, " skip file"); + TEST_RESULT_BOOL( + (result.copyChecksum == NULL && !storageExistsNP(storageRepo(), backupPathFile) && result.pageChecksumResult == NULL), + true, " skip and remove file from repo"); + + // ------------------------------------------------------------------------------------------------------------------------- + // No prior checksum, compression, no page checksum, no pageChecksum, no delta, no hasReference + TEST_ASSIGN( + result, + backupFile(pgFile, false, 9, NULL, false, 0, pgFile, false, true, 3, backupLabel, false, cipherTypeNone, NULL), + "pg file exists, no checksum, no ignoreMissing, compression, no pageChecksum, no delta, no hasReference"); + + TEST_RESULT_UINT(result.copySize, 9, " copy=pgFile size"); + TEST_RESULT_UINT(result.repoSize, 29, " repo compress size"); + TEST_RESULT_UINT(result.backupCopyResult, backupCopyResultCopy, " copy file"); + TEST_RESULT_BOOL( + (strEqZ(result.copyChecksum, "9bc8ab2dda60ef4beed07d1e19ce0676d5edde67") && + storageExistsNP(storageRepo(), strNewFmt(STORAGE_REPO_BACKUP "/%s/%s.gz", strPtr(backupLabel), strPtr(pgFile))) && + result.pageChecksumResult == NULL), + true, " copy file to repo compress success"); + + // ------------------------------------------------------------------------------------------------------------------------- + // Pg and repo file exist & match, prior checksum, compression, no page checksum, no pageChecksum, no delta, no hasReference + TEST_ASSIGN( + result, + backupFile( + pgFile, false, 9, strNew("9bc8ab2dda60ef4beed07d1e19ce0676d5edde67"), false, 0, pgFile, false, true, 3, backupLabel, + false, cipherTypeNone, NULL), + "pg file & repo exists, match, checksum, no ignoreMissing, compression, no pageChecksum, no delta, no hasReference"); + + TEST_RESULT_UINT(result.copySize, 9, " copy=pgFile size"); + TEST_RESULT_UINT(result.repoSize, 29, " repo compress size"); + TEST_RESULT_UINT(result.backupCopyResult, backupCopyResultChecksum, " checksum file"); + TEST_RESULT_BOOL( + (strEqZ(result.copyChecksum, "9bc8ab2dda60ef4beed07d1e19ce0676d5edde67") && + storageExistsNP(storageRepo(), strNewFmt(STORAGE_REPO_BACKUP "/%s/%s.gz", strPtr(backupLabel), strPtr(pgFile))) && + result.pageChecksumResult == NULL), + true, " compressed repo file matches"); + + // Check protocol function directly + // ------------------------------------------------------------------------------------------------------------------------- + // compression + paramList = varLstNew(); + varLstAdd(paramList, varNewStr(pgFile)); // pgFile + varLstAdd(paramList, varNewBool(false)); // pgFileIgnoreMissing + varLstAdd(paramList, varNewUInt64(9)); // pgFileSize + varLstAdd(paramList, varNewStrZ("9bc8ab2dda60ef4beed07d1e19ce0676d5edde67")); // pgFileChecksum + varLstAdd(paramList, varNewBool(false)); // pgFileChecksumPage + varLstAdd(paramList, varNewUInt64(0)); // pgFileChecksumPageLsnLimit 1 + varLstAdd(paramList, varNewUInt64(0)); // pgFileChecksumPageLsnLimit 2 + varLstAdd(paramList, varNewStr(pgFile)); // repoFile + varLstAdd(paramList, varNewBool(false)); // repoFileHasReference + varLstAdd(paramList, varNewBool(true)); // repoFileCompress + varLstAdd(paramList, varNewUInt(3)); // repoFileCompressLevel + varLstAdd(paramList, varNewStr(backupLabel)); // backupLabel + varLstAdd(paramList, varNewBool(false)); // delta + + TEST_RESULT_BOOL( + backupProtocol(PROTOCOL_COMMAND_BACKUP_FILE_STR, paramList, server), true, "protocol backup file - copy, compress"); + TEST_RESULT_STR( + strPtr(strNewBuf(serverWrite)), "{\"out\":[0,9,29,\"9bc8ab2dda60ef4beed07d1e19ce0676d5edde67\",null]}\n", + " check result"); + bufUsedSet(serverWrite, 0); + + // ------------------------------------------------------------------------------------------------------------------------- + // Create a zero sized file - checksum will be set but in backupManifestUpdate it will not be copied + storagePutNP(storageNewWriteNP(storagePgWrite(), strNew("zerofile")), BUFSTRDEF("")); + + // No prior checksum, no compression, no pageChecksum, no delta, no hasReference + TEST_ASSIGN( + result, + backupFile( + strNew("zerofile"), false, 0, NULL, false, 0, strNew("zerofile"), false, false, 1, backupLabel, false, + cipherTypeNone, NULL), + "zero-sized pg file exists, no repo file, no ignoreMissing, no pageChecksum, no delta, no hasReference"); + TEST_RESULT_UINT(result.copySize + result.repoSize, 0, " copy=repo=pgFile size 0"); + TEST_RESULT_UINT(result.backupCopyResult, backupCopyResultCopy, " copy file"); + TEST_RESULT_PTR_NE(result.copyChecksum, NULL, " checksum set"); + TEST_RESULT_BOOL( + (storageExistsNP(storageRepo(), strNewFmt(STORAGE_REPO_BACKUP "/%s/zerofile", strPtr(backupLabel))) && + result.pageChecksumResult == NULL), + true, " copy zero file to repo success"); + + // Check invalid protocol function + // ------------------------------------------------------------------------------------------------------------------------- + TEST_RESULT_BOOL(backupProtocol(strNew(BOGUS_STR), paramList, server), false, "invalid function"); + } + + // ***************************************************************************************************************************** + if (testBegin("backupFile() - encrypt")) + { + // Load Parameters + StringList *argList = strLstNew(); + strLstAddZ(argList, "pgbackrest"); + strLstAddZ(argList, "--stanza=test1"); + strLstAdd(argList, strNewFmt("--repo1-path=%s/repo", testPath())); + strLstAdd(argList, strNewFmt("--pg1-path=%s/pg", testPath())); + strLstAddZ(argList, "--repo1-retention-full=1"); + strLstAddZ(argList, "--repo1-cipher-type=aes-256-cbc"); + strLstAddZ(argList, "backup"); + setenv("PGBACKREST_REPO1_CIPHER_PASS", "12345678", true); + harnessCfgLoad(strLstSize(argList), strLstPtr(argList)); + unsetenv("PGBACKREST_REPO1_CIPHER_PASS"); + + // Create the pg path + storagePathCreateP(storagePgWrite(), NULL, .mode = 0700); + + // Create a pg file to backup + storagePutNP(storageNewWriteNP(storagePgWrite(), pgFile), BUFSTRDEF("atestfile")); + + // ------------------------------------------------------------------------------------------------------------------------- + // No prior checksum, no compression, no pageChecksum, no delta, no hasReference + TEST_ASSIGN( + result, + backupFile( + pgFile, false, 9, NULL, false, 0, pgFile, false, false, 1, backupLabel, false, cipherTypeAes256Cbc, + strNew("12345678")), + "pg file exists, no repo file, no ignoreMissing, no pageChecksum, no delta, no hasReference"); + + TEST_RESULT_UINT(result.copySize, 9, " copy size set"); + TEST_RESULT_UINT(result.repoSize, 32, " repo size set"); + TEST_RESULT_UINT(result.backupCopyResult, backupCopyResultCopy, " copy file"); + TEST_RESULT_BOOL( + (strEqZ(result.copyChecksum, "9bc8ab2dda60ef4beed07d1e19ce0676d5edde67") && + storageExistsNP(storageRepo(), backupPathFile) && result.pageChecksumResult == NULL), + true, " copy file to encrypted repo success"); + + // ------------------------------------------------------------------------------------------------------------------------- + // Delta but pgMatch false (pg File size different), prior checksum, no compression, no pageChecksum, delta, no hasReference + TEST_ASSIGN( + result, + backupFile( + pgFile, false, 8, strNew("9bc8ab2dda60ef4beed07d1e19ce0676d5edde67"), false, 0, pgFile, false, false, 1, + backupLabel, true, cipherTypeAes256Cbc, strNew("12345678")), + "pg and repo file exists, pgFileMatch false, no ignoreMissing, no pageChecksum, delta, no hasReference"); + TEST_RESULT_UINT(result.copySize, 9, " copy size set"); + TEST_RESULT_UINT(result.repoSize, 32, " repo size set"); + TEST_RESULT_UINT(result.backupCopyResult, backupCopyResultCopy, " copy file"); + TEST_RESULT_BOOL( + (strEqZ(result.copyChecksum, "9bc8ab2dda60ef4beed07d1e19ce0676d5edde67") && + storageExistsNP(storageRepo(), backupPathFile) && result.pageChecksumResult == NULL), + true, " copy file (size missmatch) to encrypted repo success"); + + // ------------------------------------------------------------------------------------------------------------------------- + // Check repo with cipher filter. + // pg/repo file size same but checksum different, prior checksum, no compression, no pageChecksum, no delta, no hasReference + TEST_ASSIGN( + result, + backupFile( + pgFile, false, 9, strNew("1234567890123456789012345678901234567890"), false, 0, pgFile, false, false, 0, + backupLabel, false, cipherTypeAes256Cbc, strNew("12345678")), + "pg and repo file exists, repo checksum no match, no ignoreMissing, no pageChecksum, no delta, no hasReference"); + TEST_RESULT_UINT(result.copySize, 9, " copy size set"); + TEST_RESULT_UINT(result.repoSize, 32, " repo size set"); + TEST_RESULT_UINT(result.backupCopyResult, backupCopyResultReCopy, " recopy file"); + TEST_RESULT_BOOL( + (strEqZ(result.copyChecksum, "9bc8ab2dda60ef4beed07d1e19ce0676d5edde67") && + storageExistsNP(storageRepo(), backupPathFile) && result.pageChecksumResult == NULL), + true, " recopy file to encrypted repo success"); + + // Check protocol function directly + // ------------------------------------------------------------------------------------------------------------------------- + // cipherType, cipherPass + paramList = varLstNew(); + varLstAdd(paramList, varNewStr(pgFile)); // pgFile + varLstAdd(paramList, varNewBool(false)); // pgFileIgnoreMissing + varLstAdd(paramList, varNewUInt64(9)); // pgFileSize + varLstAdd(paramList, varNewStrZ("1234567890123456789012345678901234567890")); // pgFileChecksum + varLstAdd(paramList, varNewBool(false)); // pgFileChecksumPage + varLstAdd(paramList, varNewUInt64(0)); // pgFileChecksumPageLsnLimit 1 + varLstAdd(paramList, varNewUInt64(0)); // pgFileChecksumPageLsnLimit 2 + varLstAdd(paramList, varNewStr(pgFile)); // repoFile + varLstAdd(paramList, varNewBool(false)); // repoFileHasReference + varLstAdd(paramList, varNewBool(false)); // repoFileCompress + varLstAdd(paramList, varNewUInt(0)); // repoFileCompressLevel + varLstAdd(paramList, varNewStr(backupLabel)); // backupLabel + varLstAdd(paramList, varNewBool(false)); // delta + varLstAdd(paramList, varNewStrZ("12345678")); // cipherPass + + TEST_RESULT_BOOL( + backupProtocol(PROTOCOL_COMMAND_BACKUP_FILE_STR, paramList, server), true, "protocol backup file - recopy, encrypt"); + TEST_RESULT_STR( + strPtr(strNewBuf(serverWrite)), "{\"out\":[2,9,32,\"9bc8ab2dda60ef4beed07d1e19ce0676d5edde67\",null]}\n", + " check result"); + bufUsedSet(serverWrite, 0); + } + + FUNCTION_HARNESS_RESULT_VOID(); +} diff -Nru pgbackrest-2.15.1/test/src/module/command/checkTest.c pgbackrest-2.16/test/src/module/command/checkTest.c --- pgbackrest-2.15.1/test/src/module/command/checkTest.c 1970-01-01 00:00:00.000000000 +0000 +++ pgbackrest-2.16/test/src/module/command/checkTest.c 2019-08-05 16:03:04.000000000 +0000 @@ -0,0 +1,130 @@ +/*********************************************************************************************************************************** +Test Check Command +***********************************************************************************************************************************/ +#include "postgres/version.h" +#include "storage/helper.h" +#include "storage/storage.intern.h" + +#include "common/harnessConfig.h" +#include "common/harnessInfo.h" +#include "common/harnessPq.h" + +/*********************************************************************************************************************************** +Test Run +***********************************************************************************************************************************/ +void +testRun(void) +{ + FUNCTION_HARNESS_VOID(); + + // ***************************************************************************************************************************** + if (testBegin("cmdCheck()")) + { + String *pg1Path = strNewFmt("--pg1-path=%s/pg1", testPath()); + + StringList *argList = strLstNew(); + strLstAddZ(argList, "pgbackrest"); + strLstAddZ(argList, "--stanza=test1"); + strLstAdd(argList, pg1Path); + strLstAdd(argList, strNewFmt("--repo1-path=%s/repo", testPath())); + strLstAddZ(argList, "--archive-timeout=.5"); + strLstAddZ(argList, "check"); + harnessCfgLoad(strLstSize(argList), strLstPtr(argList)); + + TEST_ERROR_FMT( + cmdCheck(), FileMissingError, + "unable to load info file '%s/repo/archive/test1/archive.info' or '%s/repo/archive/test1/archive.info.copy':\n" + "FileMissingError: " STORAGE_ERROR_READ_MISSING "\n" + "FileMissingError: " STORAGE_ERROR_READ_MISSING "\n" + "HINT: archive.info cannot be opened but is required to push/get WAL segments.\n" + "HINT: is archive_command configured correctly in postgresql.conf?\n" + "HINT: has a stanza-create been performed?\n" + "HINT: use --no-archive-check to disable archive checks during backup if you have an alternate archiving scheme.", + testPath(), testPath(), strPtr(strNewFmt("%s/repo/archive/test1/archive.info", testPath())), + strPtr(strNewFmt("%s/repo/archive/test1/archive.info.copy", testPath()))); + + // Create archive.info file + storagePutNP( + storageNewWriteNP(storageRepoWrite(), INFO_ARCHIVE_PATH_FILE_STR), + harnessInfoChecksum( + strNew( + "[db]\n" + "db-id=1\n" + "db-system-id=6569239123849665679\n" + "db-version=\"9.2\"\n" + "\n" + "[db:history]\n" + "1={\"db-id\":6569239123849665679,\"db-version\":\"9.2\"}\n"))); + + // Single primary + // ------------------------------------------------------------------------------------------------------------------------- + // Error when WAL segment not found + harnessPqScriptSet((HarnessPq []) + { + HRNPQ_MACRO_OPEN_92(1, "dbname='postgres' port=5432", strPtr(pg1Path), false), + HRNPQ_MACRO_CREATE_RESTORE_POINT(1, "1/1"), + HRNPQ_MACRO_WAL_SWITCH(1, "xlog", "000000010000000100000001"), + HRNPQ_MACRO_CLOSE(1), + HRNPQ_MACRO_DONE() + }); + + TEST_ERROR( + cmdCheck(), ArchiveTimeoutError, + "WAL segment 000000010000000100000001 was not archived before the 500ms timeout\n" + "HINT: Check the archive_command to ensure that all options are correct (especially --stanza).\n" + "HINT: Check the PostgreSQL server log for errors."); + + // Create WAL segment + Buffer *buffer = bufNew(16 * 1024 * 1024); + memset(bufPtr(buffer), 0, bufSize(buffer)); + bufUsedSet(buffer, bufSize(buffer)); + + // WAL segment is found + harnessPqScriptSet((HarnessPq []) + { + HRNPQ_MACRO_OPEN_92(1, "dbname='postgres' port=5432", strPtr(pg1Path), false), + HRNPQ_MACRO_CREATE_RESTORE_POINT(1, "1/1"), + HRNPQ_MACRO_WAL_SWITCH(1, "xlog", "000000010000000100000001"), + HRNPQ_MACRO_CLOSE(1), + HRNPQ_MACRO_DONE() + }); + + storagePutNP( + storageNewWriteNP( + storageRepoWrite(), + strNew(STORAGE_REPO_ARCHIVE "/9.2-1/000000010000000100000001-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa")), + buffer); + + TEST_RESULT_VOID(cmdCheck(), "check"); + harnessLogResult( + strPtr( + strNewFmt( + "P00 INFO: WAL segment 000000010000000100000001 successfully archived to '%s/repo/archive/test1/9.2-1/" + "0000000100000001/000000010000000100000001-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'", + testPath()))); + + // Single standby + // ------------------------------------------------------------------------------------------------------------------------- + argList = strLstNew(); + strLstAddZ(argList, "pgbackrest"); + strLstAddZ(argList, "--stanza=test1"); + strLstAdd(argList, pg1Path); + strLstAdd(argList, strNewFmt("--repo1-path=%s/repo", testPath())); + strLstAddZ(argList, "--archive-timeout=.5"); + strLstAddZ(argList, "check"); + harnessCfgLoad(strLstSize(argList), strLstPtr(argList)); + + // Set script + harnessPqScriptSet((HarnessPq []) + { + HRNPQ_MACRO_OPEN_92(1, "dbname='postgres' port=5432", strPtr(pg1Path), true), + HRNPQ_MACRO_CLOSE(1), + HRNPQ_MACRO_DONE() + }); + + TEST_RESULT_VOID(cmdCheck(), "check"); + harnessLogResult("P00 INFO: switch wal not performed because no primary was found"); + } + + FUNCTION_HARNESS_RESULT_VOID(); +} diff -Nru pgbackrest-2.15.1/test/src/module/command/helpTest.c pgbackrest-2.16/test/src/module/command/helpTest.c --- pgbackrest-2.15.1/test/src/module/command/helpTest.c 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/test/src/module/command/helpTest.c 2019-08-05 16:03:04.000000000 +0000 @@ -208,6 +208,7 @@ " --repo-s3-host s3 repository host\n" " --repo-s3-key s3 repository access key\n" " --repo-s3-key-secret s3 repository secret access key\n" + " --repo-s3-port s3 repository port [default=443]\n" " --repo-s3-region s3 repository region\n" " --repo-s3-token s3 repository security token\n" " --repo-s3-verify-tls verify S3 server certificate [default=y]\n" diff -Nru pgbackrest-2.15.1/test/src/module/common/cryptoTest.c pgbackrest-2.16/test/src/module/common/cryptoTest.c --- pgbackrest-2.15.1/test/src/module/common/cryptoTest.c 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/test/src/module/common/cryptoTest.c 2019-08-05 16:03:04.000000000 +0000 @@ -1,7 +1,9 @@ /*********************************************************************************************************************************** Test Block Cipher ***********************************************************************************************************************************/ +#include "common/io/filter/filter.intern.h" #include "common/io/io.h" +#include "common/type/json.h" /*********************************************************************************************************************************** Data for testing @@ -104,6 +106,7 @@ Buffer *encryptBuffer = bufNew(TEST_BUFFER_SIZE); IoFilter *blockEncryptFilter = cipherBlockNew(cipherModeEncrypt, cipherTypeAes256Cbc, testPass, NULL); + blockEncryptFilter = cipherBlockNewVar(ioFilterParamList(blockEncryptFilter)); CipherBlock *blockEncrypt = (CipherBlock *)ioFilterDriver(blockEncryptFilter); TEST_RESULT_INT( @@ -153,7 +156,8 @@ // ------------------------------------------------------------------------------------------------------------------------- Buffer *decryptBuffer = bufNew(TEST_BUFFER_SIZE); - IoFilter *blockDecryptFilter = cipherBlockNew(cipherModeDecrypt, cipherTypeAes256Cbc, testPass, NULL); + IoFilter *blockDecryptFilter = cipherBlockNew(cipherModeDecrypt, cipherTypeAes256Cbc, testPass, HASH_TYPE_SHA1_STR); + blockDecryptFilter = cipherBlockNewVar(ioFilterParamList(blockDecryptFilter)); CipherBlock *blockDecrypt = (CipherBlock *)ioFilterDriver(blockDecryptFilter); TEST_RESULT_INT( @@ -295,7 +299,7 @@ TEST_RESULT_VOID(ioFilterFree(hash), " free hash"); // ------------------------------------------------------------------------------------------------------------------------- - TEST_ASSIGN(hash, cryptoHashNew(strNew(HASH_TYPE_SHA1)), "create sha1 hash"); + TEST_ASSIGN(hash, cryptoHashNewVar(varVarLst(jsonToVar(strNewFmt("[\"%s\"]", HASH_TYPE_SHA1)))), "create sha1 hash"); TEST_RESULT_STR( strPtr(bufHex(cryptoHash((CryptoHash *)ioFilterDriver(hash)))), "da39a3ee5e6b4b0d3255bfef95601890afd80709", " check empty hash"); diff -Nru pgbackrest-2.15.1/test/src/module/common/ioHttpTest.c pgbackrest-2.16/test/src/module/common/ioHttpTest.c --- pgbackrest-2.15.1/test/src/module/common/ioHttpTest.c 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/test/src/module/common/ioHttpTest.c 2019-08-05 16:03:04.000000000 +0000 @@ -142,7 +142,7 @@ "Connection:ack\r\n" "\r\n"); - // Head request with no content-length but no content + // Head request with content-length but no content harnessTlsServerExpect( "HEAD / HTTP/1.1\r\n" "\r\n"); @@ -152,6 +152,16 @@ "content-length:380\r\n" "\r\n"); + // Head request with transfer encoding but no content + harnessTlsServerExpect( + "HEAD / HTTP/1.1\r\n" + "\r\n"); + + harnessTlsServerReply( + "HTTP/1.1 200 OK\r\n" + "Transfer-Encoding: chunked\r\n" + "\r\n"); + // Error with content length 0 (with a few slow down errors) harnessTlsServerExpect( "GET / HTTP/1.1\r\n" @@ -409,7 +419,7 @@ TEST_ERROR( httpClientRequest(client, strNew("GET"), strNew("/"), NULL, NULL, NULL, false), FileReadError, - "unable to read data from '" TLS_TEST_HOST ":9443' after 500ms"); + "timeout after 500ms waiting for read from '" TLS_TEST_HOST ":9443'"); // Test invalid http version TEST_ERROR( @@ -463,7 +473,7 @@ strPtr(httpHeaderToLog(httpClientReponseHeader(client))), "{connection: 'ack', key1: '0', key2: 'value2'}", " check response headers"); - // Head request with no content-length but no content + // Head request with content-length but no content TEST_RESULT_VOID( httpClientRequest(client, strNew("HEAD"), strNew("/"), NULL, httpHeaderNew(NULL), NULL, true), "head request with content-length"); @@ -474,6 +484,18 @@ TEST_RESULT_STR( strPtr(httpHeaderToLog(httpClientReponseHeader(client))), "{content-length: '380'}", " check response headers"); + // Head request with transfer encoding but no content + TEST_RESULT_VOID( + httpClientRequest(client, strNew("HEAD"), strNew("/"), NULL, httpHeaderNew(NULL), NULL, true), + "head request with transfer encoding"); + TEST_RESULT_UINT(httpClientResponseCode(client), 200, " check response code"); + TEST_RESULT_STR(strPtr(httpClientResponseMessage(client)), "OK", " check response message"); + TEST_RESULT_BOOL(httpClientEof(client), true, " io is eof"); + TEST_RESULT_BOOL(httpClientBusy(client), false, " client is not busy"); + TEST_RESULT_STR( + strPtr(httpHeaderToLog(httpClientReponseHeader(client))), "{transfer-encoding: 'chunked'}", + " check response headers"); + // Error with content length 0 TEST_RESULT_VOID( httpClientRequest(client, strNew("GET"), strNew("/"), NULL, NULL, NULL, false), "error with content length 0"); diff -Nru pgbackrest-2.15.1/test/src/module/common/ioTest.c pgbackrest-2.16/test/src/module/common/ioTest.c --- pgbackrest-2.15.1/test/src/module/common/ioTest.c 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/test/src/module/common/ioTest.c 2019-08-05 16:03:04.000000000 +0000 @@ -292,19 +292,25 @@ bufferOriginal = bufNewC("123", 3); TEST_ASSIGN(bufferRead, ioBufferReadNew(bufferOriginal), "create buffer read object"); + + TEST_RESULT_VOID(ioFilterGroupClear(ioReadFilterGroup(bufferRead)), " clear does nothing when no filters"); + TEST_RESULT_VOID(ioFilterGroupAdd(ioReadFilterGroup(bufferRead), ioSizeNew()), " add filter to be cleared"); + TEST_RESULT_VOID(ioFilterGroupClear(ioReadFilterGroup(bufferRead)), " clear size filter"); + IoFilter *sizeFilter = ioSizeNew(); - TEST_RESULT_PTR( - ioFilterGroupAdd(ioReadFilterGroup(bufferRead), sizeFilter), bufferRead->filterGroup, " add filter to filter group"); TEST_RESULT_VOID( ioFilterGroupAdd(ioReadFilterGroup(bufferRead), ioTestFilterMultiplyNew("double", 2, 3, 'X')), " add filter to filter group"); + TEST_RESULT_PTR( + ioFilterGroupInsert(ioReadFilterGroup(bufferRead), 0, sizeFilter), bufferRead->filterGroup, + " add filter to filter group"); TEST_RESULT_VOID(ioFilterGroupAdd(ioReadFilterGroup(bufferRead), ioSizeNew()), " add filter to filter group"); IoFilter *bufferFilter = ioBufferNew(); TEST_RESULT_VOID(ioFilterGroupAdd(ioReadFilterGroup(bufferRead), bufferFilter), " add filter to filter group"); TEST_RESULT_PTR(ioFilterMove(NULL, memContextTop()), NULL, " move NULL filter to top context"); TEST_RESULT_STR( strPtr(jsonFromVar(ioFilterGroupParamAll(ioReadFilterGroup(bufferRead)), 0)), - "{\"buffer\":null,\"double\":[\"double\",2,3],\"size\":null}", " check filter params"); + "[{\"size\":null},{\"double\":[\"double\",2,3]},{\"size\":null},{\"buffer\":null}]", " check filter params"); TEST_RESULT_BOOL(ioReadOpen(bufferRead), true, " open"); TEST_RESULT_INT(ioReadHandle(bufferRead), -1, " handle invalid"); @@ -351,6 +357,15 @@ TEST_RESULT_VOID(ioFilterFree(bufferFilter), " free buffer filter"); TEST_RESULT_VOID(ioFilterGroupFree(ioReadFilterGroup(bufferRead)), " free filter group object"); + // Set filter group results + // ------------------------------------------------------------------------------------------------------------------------- + IoFilterGroup *filterGroup = ioFilterGroupNew(); + filterGroup->opened = true; + TEST_RESULT_VOID(ioFilterGroupResultAllSet(filterGroup, NULL), "null result"); + TEST_RESULT_VOID(ioFilterGroupResultAllSet(filterGroup, jsonToVar(strNew("{\"test\":777}"))), "add result"); + filterGroup->closed = true; + TEST_RESULT_UINT(varUInt64(ioFilterGroupResult(filterGroup, strNew("test"))), 777, " check filter result"); + // Read a zero-size buffer to ensure filters are still processed even when there is no input. Some filters (e.g. encryption // and compression) will produce output even if there is no input. // ------------------------------------------------------------------------------------------------------------------------- @@ -424,6 +439,20 @@ ioReadOpen(bufferRead); TEST_RESULT_STR(strPtr(strNewBuf(ioReadBuf(bufferRead))), "a test string", "read into buffer"); + + // Drain read IO + // ------------------------------------------------------------------------------------------------------------------------- + bufferRead = ioBufferReadNew(BUFSTRDEF("a better test string")); + ioFilterGroupAdd(ioReadFilterGroup(bufferRead), ioSizeNew()); + + TEST_RESULT_BOOL(ioReadDrain(bufferRead), true, "drain read io"); + TEST_RESULT_UINT(varUInt64(ioFilterGroupResult(ioReadFilterGroup(bufferRead), SIZE_FILTER_TYPE_STR)), 20, "check length"); + + // Cannot open file + TEST_ASSIGN( + read, ioReadNewP((void *)998, .close = testIoReadClose, .open = testIoReadOpen, .read = testIoRead), + "create io read object"); + TEST_RESULT_BOOL(ioReadDrain(read), false, "cannot open"); } // ***************************************************************************************************************************** diff -Nru pgbackrest-2.15.1/test/src/module/common/ioTlsTest.c pgbackrest-2.16/test/src/module/common/ioTlsTest.c --- pgbackrest-2.15.1/test/src/module/common/ioTlsTest.c 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/test/src/module/common/ioTlsTest.c 2019-08-05 16:03:04.000000000 +0000 @@ -81,7 +81,11 @@ // Second protocol exchange harnessTlsServerExpect("more protocol info"); harnessTlsServerReply("0123456789AB"); + harnessTlsServerClose(); + // Need data in read buffer to test tlsWriteContinue() + harnessTlsServerAccept(); + harnessTlsServerReply("0123456789AB"); harnessTlsServerClose(); exit(0); @@ -113,6 +117,19 @@ TEST_RESULT_BOOL(tlsClientHostVerifyName(strNew("a.bogus.host.com"), strNew("*.host.com")), false, "invalid host"); } + // Additional coverage not provided by other tests + // ***************************************************************************************************************************** + if (testBegin("tlsError()")) + { + TlsClient *client = NULL; + + TEST_ASSIGN(client, tlsClientNew(strNew("99.99.99.99.99"), 9443, 0, true, NULL, NULL), "new client"); + + TEST_RESULT_BOOL(tlsError(client, SSL_ERROR_WANT_READ), true, "continue after want read"); + TEST_RESULT_BOOL(tlsError(client, SSL_ERROR_ZERO_RETURN), false, "check connection closed error"); + TEST_ERROR(tlsError(client, SSL_ERROR_WANT_X509_LOOKUP), ServiceError, "tls error [4]"); + } + // ***************************************************************************************************************************** if (testBegin("TlsClient verification")) { @@ -207,7 +224,7 @@ output = bufNew(12); TEST_ERROR( ioRead(tlsClientIoRead(client), output), FileReadError, - "unable to read data from 'tls.test.pgbackrest.org:9443' after 500ms"); + "timeout after 500ms waiting for read from 'tls.test.pgbackrest.org:9443'"); // ------------------------------------------------------------------------------------------------------------------------- input = BUFSTRDEF("more protocol info"); @@ -224,6 +241,16 @@ TEST_RESULT_INT(ioRead(tlsClientIoRead(client), output), 0, "read no output after eof"); TEST_RESULT_BOOL(ioReadEof(tlsClientIoRead(client)), true, " check eof = true"); + // ------------------------------------------------------------------------------------------------------------------------- + TEST_RESULT_VOID(tlsClientOpen(client), "open client again (was closed by server)"); + TEST_RESULT_BOOL(tlsWriteContinue(client, -1, SSL_ERROR_WANT_READ, 1), true, "continue on WANT_READ"); + TEST_RESULT_BOOL(tlsWriteContinue(client, 0, SSL_ERROR_NONE, 1), true, "continue on WANT_READ"); + TEST_ERROR( + tlsWriteContinue(client, 77, 0, 88), FileWriteError, + "unable to write to tls, write size 77 does not match expected size 88"); + TEST_ERROR(tlsWriteContinue(client, 0, SSL_ERROR_ZERO_RETURN, 1), FileWriteError, "unable to write to tls [6]"); + + // ------------------------------------------------------------------------------------------------------------------------- TEST_RESULT_BOOL(tlsClientStatStr() != NULL, true, "check statistics exist"); TEST_RESULT_VOID(tlsClientFree(client), "free client"); diff -Nru pgbackrest-2.15.1/test/src/module/common/typeJsonTest.c pgbackrest-2.16/test/src/module/common/typeJsonTest.c --- pgbackrest-2.15.1/test/src/module/common/typeJsonTest.c 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/test/src/module/common/typeJsonTest.c 2019-08-05 16:03:04.000000000 +0000 @@ -288,11 +288,17 @@ TEST_ASSIGN(varListOuter, varNewVarLst(varLstNew()), "new variant list with keyValues"); varLstAdd(varVarLst(varListOuter), varNewStrZ("ASTRING")); + varLstAdd(varVarLst(varListOuter), varNewInt64(9223372036854775807LL)); + varLstAdd(varVarLst(varListOuter), varNewInt(2147483647)); + varLstAdd(varVarLst(varListOuter), varNewBool(true)); + varLstAdd(varVarLst(varListOuter), varNewVarLst(varLstNew())); + varLstAdd(varVarLst(varListOuter), NULL); varLstAdd(varVarLst(varListOuter), keyValue); TEST_ASSIGN(json, jsonFromVar(varListOuter, 0), "VariantList - no indent"); TEST_RESULT_STR(strPtr(json), - "[\"ASTRING\",{\"backup-info-size-delta\":1982702,\"backup-prior\":\"20161219-212741F_20161219-212803I\"," + "[\"ASTRING\",9223372036854775807,2147483647,true,[],null,{\"backup-info-size-delta\":1982702," + "\"backup-prior\":\"20161219-212741F_20161219-212803I\"," "\"backup-reference\":[\"20161219-212741F\",\"20161219-212741F_20161219-212803I\",null]," "\"backup-timestamp-start\":1482182951,\"checksum-page-error\":[1]," "\"section\":{\"escape\":\"\\\"\\\\/\\b\\n\\r\\t\\f\",\"key1\":\"value1\",\"key2\":null,\"key3\":\"value2\"}}]", @@ -303,7 +309,8 @@ TEST_ASSIGN(json, jsonFromVar(varListOuter, 0), "VariantList - no indent - multiple elements"); TEST_RESULT_STR(strPtr(json), - "[\"ASTRING\",{\"backup-info-size-delta\":1982702,\"backup-prior\":\"20161219-212741F_20161219-212803I\"," + "[\"ASTRING\",9223372036854775807,2147483647,true,[],null,{\"backup-info-size-delta\":1982702," + "\"backup-prior\":\"20161219-212741F_20161219-212803I\"," "\"backup-reference\":[\"20161219-212741F\",\"20161219-212741F_20161219-212803I\",null]," "\"backup-timestamp-start\":1482182951,\"checksum-page-error\":[1]," "\"section\":{\"escape\":\"\\\"\\\\/\\b\\n\\r\\t\\f\",\"key1\":\"value1\",\"key2\":null,\"key3\":\"value2\"}}," @@ -317,6 +324,11 @@ TEST_RESULT_STR(strPtr(json), "[\n" " \"ASTRING\",\n" + " 9223372036854775807,\n" + " 2147483647,\n" + " true,\n" + " []\n,\n" + " null,\n" " {\n" " \"backup-info-size-delta\" : 1982702,\n" " \"backup-prior\" : \"20161219-212741F_20161219-212803I\",\n" diff -Nru pgbackrest-2.15.1/test/src/module/common/typeListTest.c pgbackrest-2.16/test/src/module/common/typeListTest.c --- pgbackrest-2.15.1/test/src/module/common/typeListTest.c 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/test/src/module/common/typeListTest.c 2019-08-05 16:03:04.000000000 +0000 @@ -37,12 +37,15 @@ TEST_RESULT_INT(list->listSize, 0, "list size"); TEST_RESULT_INT(list->listSizeMax, 0, "list size max"); TEST_RESULT_PTR(lstMemContext(list), list->memContext, "list mem context"); + TEST_RESULT_VOID(lstClear(list), "clear list"); void *ptr = NULL; TEST_RESULT_PTR(lstAdd(list, &ptr), list, "add item"); - TEST_RESULT_STR(strPtr(lstToLog(list)), "{size: 1}", "check log"); + TEST_RESULT_VOID(lstClear(list), "clear list"); + TEST_RESULT_STR(strPtr(lstToLog(list)), "{size: 0}", "check log after clear"); + TEST_RESULT_VOID(lstFree(list), "free list"); TEST_RESULT_VOID(lstFree(lstNew(1)), "free empty list"); TEST_RESULT_VOID(lstFree(NULL), "free null list"); diff -Nru pgbackrest-2.15.1/test/src/module/db/dbTest.c pgbackrest-2.16/test/src/module/db/dbTest.c --- pgbackrest-2.15.1/test/src/module/db/dbTest.c 1970-01-01 00:00:00.000000000 +0000 +++ pgbackrest-2.16/test/src/module/db/dbTest.c 2019-08-05 16:03:04.000000000 +0000 @@ -0,0 +1,279 @@ +/*********************************************************************************************************************************** +Test Database +***********************************************************************************************************************************/ +#include "common/harnessConfig.h" +#include "common/harnessFork.h" +#include "common/harnessLog.h" +#include "common/harnessPq.h" + +#include "common/io/handleRead.h" +#include "common/io/handleWrite.h" + +/*********************************************************************************************************************************** +Test Run +***********************************************************************************************************************************/ +void +testRun(void) +{ + FUNCTION_HARNESS_VOID(); + + // ***************************************************************************************************************************** + if (testBegin("Db and dbProtocol()")) + { + HARNESS_FORK_BEGIN() + { + HARNESS_FORK_CHILD_BEGIN(0, true) + { + IoRead *read = ioHandleReadNew(strNew("client read"), HARNESS_FORK_CHILD_READ(), 2000); + ioReadOpen(read); + IoWrite *write = ioHandleWriteNew(strNew("client write"), HARNESS_FORK_CHILD_WRITE()); + ioWriteOpen(write); + + // Set options + StringList *argList = strLstNew(); + strLstAddZ(argList, "pgbackrest"); + strLstAddZ(argList, "--stanza=test1"); + strLstAddZ(argList, "--pg1-path=/path/to/pg"); + strLstAddZ(argList, "--command=backup"); + strLstAddZ(argList, "--type=db"); + strLstAddZ(argList, "--process=0"); + strLstAddZ(argList, "remote"); + harnessCfgLoad(strLstSize(argList), strLstPtr(argList)); + + // Set script + harnessPqScriptSet((HarnessPq []) + { + HRNPQ_MACRO_OPEN(1, "dbname='postgres' port=5432"), + HRNPQ_MACRO_SET_SEARCH_PATH(1), + HRNPQ_MACRO_VALIDATE_QUERY(1, PG_VERSION_84, "/pgdata"), + HRNPQ_MACRO_CLOSE(1), + + HRNPQ_MACRO_OPEN(1, "dbname='postgres' port=5432"), + HRNPQ_MACRO_SET_SEARCH_PATH(1), + HRNPQ_MACRO_VALIDATE_QUERY(1, PG_VERSION_84, "/pgdata"), + HRNPQ_MACRO_WAL_SWITCH(1, "xlog", "000000030000000200000003"), + HRNPQ_MACRO_CLOSE(1), + + HRNPQ_MACRO_DONE() + }); + + // Create server + ProtocolServer *server = NULL; + + TEST_ASSIGN(server, protocolServerNew(strNew("db test server"), strNew("test"), read, write), "create server"); + TEST_RESULT_VOID(protocolServerHandlerAdd(server, dbProtocol), "add handler"); + TEST_RESULT_VOID(protocolServerProcess(server), "run process loop"); + TEST_RESULT_VOID(protocolServerFree(server), "free server"); + } + HARNESS_FORK_CHILD_END(); + + HARNESS_FORK_PARENT_BEGIN() + { + IoRead *read = ioHandleReadNew(strNew("server read"), HARNESS_FORK_PARENT_READ_PROCESS(0), 2000); + ioReadOpen(read); + IoWrite *write = ioHandleWriteNew(strNew("server write"), HARNESS_FORK_PARENT_WRITE_PROCESS(0)); + ioWriteOpen(write); + + // Create client + ProtocolClient *client = NULL; + Db *db = NULL; + + TEST_ASSIGN(client, protocolClientNew(strNew("db test client"), strNew("test"), read, write), "create client"); + + // Open and free database + TEST_ASSIGN(db, dbNew(NULL, client, strNew("test")), "create db"); + TEST_RESULT_VOID(dbOpen(db), "open db"); + TEST_RESULT_VOID(dbFree(db), "free db"); + + // Open the database, but don't free it so the server is force to do it on shutdown + TEST_ASSIGN(db, dbNew(NULL, client, strNew("test")), "create db"); + TEST_RESULT_VOID(dbOpen(db), "open db"); + TEST_RESULT_STR(strPtr(dbWalSwitch(db)), "000000030000000200000003", " wal switch"); + TEST_RESULT_VOID(memContextCallbackClear(db->memContext), "clear context so close is not called"); + + TEST_RESULT_VOID(protocolClientFree(client), "free client"); + } + HARNESS_FORK_PARENT_END(); + } + HARNESS_FORK_END(); + } + + // ***************************************************************************************************************************** + if (testBegin("dbGet()")) + { + DbGetResult result = {0}; + + // Error connecting to primary + // ------------------------------------------------------------------------------------------------------------------------- + StringList *argList = strLstNew(); + strLstAddZ(argList, "pgbackrest"); + strLstAddZ(argList, "--stanza=test1"); + strLstAddZ(argList, "--repo1-retention-full=1"); + strLstAddZ(argList, "--pg1-path=/path/to/pg"); + strLstAddZ(argList, "backup"); + harnessCfgLoad(strLstSize(argList), strLstPtr(argList)); + + harnessPqScriptSet((HarnessPq []) + { + {.function = HRNPQ_CONNECTDB, .param = "[\"dbname='postgres' port=5432\"]"}, + {.function = HRNPQ_STATUS, .resultInt = CONNECTION_BAD}, + {.function = HRNPQ_ERRORMESSAGE, .resultZ = "error"}, + {.function = HRNPQ_FINISH}, + {.function = NULL} + }); + + TEST_ERROR(dbGet(true, true), DbConnectError, "unable to find primary cluster - cannot proceed"); + harnessLogResult( + "P00 WARN: unable to check pg-1: [DbConnectError] unable to connect to 'dbname='postgres' port=5432': error"); + + // Only cluster is a standby + // ------------------------------------------------------------------------------------------------------------------------- + harnessPqScriptSet((HarnessPq []) + { + HRNPQ_MACRO_OPEN(1, "dbname='postgres' port=5432"), + HRNPQ_MACRO_SET_SEARCH_PATH(1), + HRNPQ_MACRO_VALIDATE_QUERY(1, PG_VERSION_94, "/pgdata"), + HRNPQ_MACRO_SET_APPLICATION_NAME(1), + HRNPQ_MACRO_IS_STANDBY_QUERY(1, true), + HRNPQ_MACRO_CLOSE(1), + HRNPQ_MACRO_DONE() + }); + + TEST_ERROR(dbGet(true, true), DbConnectError, "unable to find primary cluster - cannot proceed"); + + // Primary cluster found + // ------------------------------------------------------------------------------------------------------------------------- + harnessPqScriptSet((HarnessPq []) + { + HRNPQ_MACRO_OPEN_84(1, "dbname='postgres' port=5432", "/pgdata"), + HRNPQ_MACRO_CLOSE(1), + HRNPQ_MACRO_DONE() + }); + + TEST_ASSIGN(result, dbGet(true, true), "get primary only"); + + TEST_RESULT_INT(result.primaryId, 1, " check primary id"); + TEST_RESULT_BOOL(result.primary != NULL, true, " check primary"); + TEST_RESULT_INT(result.standbyId, 0, " check standby id"); + TEST_RESULT_BOOL(result.standby == NULL, true, " check standby"); + + TEST_RESULT_VOID(dbFree(result.primary), "free primary"); + + // More than one primary found + // ------------------------------------------------------------------------------------------------------------------------- + argList = strLstNew(); + strLstAddZ(argList, "pgbackrest"); + strLstAddZ(argList, "--stanza=test1"); + strLstAddZ(argList, "--repo1-retention-full=1"); + strLstAddZ(argList, "--pg1-path=/path/to/pg1"); + strLstAddZ(argList, "--pg8-path=/path/to/pg2"); + strLstAddZ(argList, "--pg8-port=5433"); + strLstAddZ(argList, "backup"); + harnessCfgLoad(strLstSize(argList), strLstPtr(argList)); + + harnessPqScriptSet((HarnessPq []) + { + HRNPQ_MACRO_OPEN_84(1, "dbname='postgres' port=5432", "/pgdata"), + HRNPQ_MACRO_OPEN_84(8, "dbname='postgres' port=5433", "/pgdata"), + + HRNPQ_MACRO_CLOSE(1), + HRNPQ_MACRO_CLOSE(8), + + HRNPQ_MACRO_DONE() + }); + + TEST_ERROR(dbGet(true, true), DbConnectError, "more than one primary cluster found"); + + // Two standbys found but no primary + // ------------------------------------------------------------------------------------------------------------------------- + harnessPqScriptSet((HarnessPq []) + { + HRNPQ_MACRO_OPEN_92(1, "dbname='postgres' port=5432", "/pgdata", true), + HRNPQ_MACRO_OPEN_92(8, "dbname='postgres' port=5433", "/pgdata", true), + + HRNPQ_MACRO_CLOSE(8), + HRNPQ_MACRO_CLOSE(1), + + HRNPQ_MACRO_DONE() + }); + + TEST_ERROR(dbGet(false, true), DbConnectError, "unable to find primary cluster - cannot proceed"); + + // Two standbys and primary not required + // ------------------------------------------------------------------------------------------------------------------------- + harnessPqScriptSet((HarnessPq []) + { + HRNPQ_MACRO_OPEN_92(1, "dbname='postgres' port=5432", "/pgdata", true), + HRNPQ_MACRO_OPEN_92(8, "dbname='postgres' port=5433", "/pgdata", true), + + HRNPQ_MACRO_CLOSE(8), + HRNPQ_MACRO_CLOSE(1), + + HRNPQ_MACRO_DONE() + }); + + TEST_ASSIGN(result, dbGet(false, false), "get standbys"); + + TEST_RESULT_INT(result.primaryId, 0, " check primary id"); + TEST_RESULT_BOOL(result.primary == NULL, true, " check primary"); + TEST_RESULT_INT(result.standbyId, 1, " check standby id"); + TEST_RESULT_BOOL(result.standby != NULL, true, " check standby"); + + TEST_RESULT_VOID(dbFree(result.standby), "free standby"); + + // Primary and standby found + // ------------------------------------------------------------------------------------------------------------------------- + argList = strLstNew(); + strLstAddZ(argList, "pgbackrest"); + strLstAddZ(argList, "--stanza=test1"); + strLstAddZ(argList, "--repo1-retention-full=1"); + strLstAddZ(argList, "--pg1-path=/path/to/pg1"); + strLstAddZ(argList, "--pg4-path=/path/to/pg4"); + strLstAddZ(argList, "--pg4-port=5433"); + strLstAddZ(argList, "--pg5-host=localhost"); + strLstAdd(argList, strNewFmt("--pg5-host-user=%s", testUser())); + strLstAddZ(argList, "--pg5-path=/path/to/pg5"); + strLstAddZ(argList, "--pg8-path=/path/to/pg8"); + strLstAddZ(argList, "--pg8-port=5434"); + strLstAddZ(argList, "backup"); + harnessCfgLoad(strLstSize(argList), strLstPtr(argList)); + + harnessPqScriptSet((HarnessPq []) + { + HRNPQ_MACRO_OPEN_92(1, "dbname='postgres' port=5432", "/pgdata", true), + + // pg-4 error + {.session = 4, .function = HRNPQ_CONNECTDB, .param = "[\"dbname='postgres' port=5433\"]"}, + {.session = 4, .function = HRNPQ_STATUS, .resultInt = CONNECTION_BAD}, + {.session = 4, .function = HRNPQ_ERRORMESSAGE, .resultZ = "error"}, + {.session = 4, .function = HRNPQ_FINISH}, + + HRNPQ_MACRO_OPEN_92(8, "dbname='postgres' port=5434", "/pgdata", false), + + HRNPQ_MACRO_CREATE_RESTORE_POINT(8, "2/3"), + HRNPQ_MACRO_WAL_SWITCH(8, "xlog", "000000010000000200000003"), + + HRNPQ_MACRO_CLOSE(8), + HRNPQ_MACRO_CLOSE(1), + + HRNPQ_MACRO_DONE() + }); + + TEST_ASSIGN(result, dbGet(false, true), "get primary and standy"); + harnessLogResultRegExp( + "P00 WARN: unable to check pg-4: \\[DbConnectError\\] unable to connect to 'dbname='postgres' port=5433': error\n" + "P00 WARN: unable to check pg-5: \\[DbConnectError\\] raised from remote-0 protocol on 'localhost':" + " unable to connect to 'dbname='postgres' port=5432': could not connect to server: No such file or directory.*"); + + TEST_RESULT_INT(result.primaryId, 8, " check primary id"); + TEST_RESULT_BOOL(result.primary != NULL, true, " check primary"); + TEST_RESULT_STR(strPtr(dbWalSwitch(result.primary)), "000000010000000200000003", " wal switch"); + TEST_RESULT_INT(result.standbyId, 1, " check standby id"); + TEST_RESULT_BOOL(result.standby != NULL, true, " check standby"); + + TEST_RESULT_VOID(dbFree(result.primary), "free primary"); + TEST_RESULT_VOID(dbFree(result.standby), "free standby"); + } + + FUNCTION_HARNESS_RESULT_VOID(); +} diff -Nru pgbackrest-2.15.1/test/src/module/info/infoBackupTest.c pgbackrest-2.16/test/src/module/info/infoBackupTest.c --- pgbackrest-2.15.1/test/src/module/info/infoBackupTest.c 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/test/src/module/info/infoBackupTest.c 2019-08-05 16:03:04.000000000 +0000 @@ -20,7 +20,7 @@ InfoBackup *infoBackup = NULL; // ***************************************************************************************************************************** - if (testBegin("infoBackupNewLoad(), infoBackupDataTotal(), infoBackupCheckPg(), infoBackupFree()")) + if (testBegin("infoBackupNewLoad(), infoBackupDataTotal(), infoBackupFree()")) { // File missing //-------------------------------------------------------------------------------------------------------------------------- @@ -59,34 +59,6 @@ TEST_RESULT_PTR(infoBackup->backup, NULL, " backupCurrent NULL"); TEST_RESULT_INT(infoBackupDataTotal(infoBackup), 0, " infoBackupDataTotal returns 0"); - // infoBackupCheckPg - //-------------------------------------------------------------------------------------------------------------------------- - TEST_RESULT_INT(infoBackupCheckPg(infoBackup, 90400, 6569239123849665679, 201409291, 942), 1, "check PG data"); - - TEST_ERROR_FMT( - infoBackupCheckPg(infoBackup, 90500, 6569239123849665679, 201409291, 942), BackupMismatchError, - "database version = 9.5, system-id 6569239123849665679 does not match " - "backup version = 9.4, system-id = 6569239123849665679\n" - "HINT: is this the correct stanza?"); - - TEST_ERROR_FMT( - infoBackupCheckPg(infoBackup, 90400, 6569239123849665999, 201409291, 942), BackupMismatchError, - "database version = 9.4, system-id 6569239123849665999 does not match " - "backup version = 9.4, system-id = 6569239123849665679\n" - "HINT: is this the correct stanza?"); - - TEST_ERROR_FMT( - infoBackupCheckPg(infoBackup, 90400, 6569239123849665679, 201409291, 941), BackupMismatchError, - "database control-version = 941, catalog-version 201409291" - " does not match backup control-version = 942, catalog-version = 201409291\n" - "HINT: this may be a symptom of database or repository corruption!"); - - TEST_ERROR_FMT( - infoBackupCheckPg(infoBackup, 90400, 6569239123849665679, 201509291, 942), BackupMismatchError, - "database control-version = 942, catalog-version 201509291" - " does not match backup control-version = 942, catalog-version = 201409291\n" - "HINT: this may be a symptom of database or repository corruption!"); - // Free //-------------------------------------------------------------------------------------------------------------------------- TEST_RESULT_VOID(infoBackupFree(infoBackup), "infoBackupFree() - free backup info"); diff -Nru pgbackrest-2.15.1/test/src/module/info/infoPgTest.c pgbackrest-2.16/test/src/module/info/infoPgTest.c --- pgbackrest-2.15.1/test/src/module/info/infoPgTest.c 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/test/src/module/info/infoPgTest.c 2019-08-05 16:03:04.000000000 +0000 @@ -10,7 +10,7 @@ testRun(void) { // ***************************************************************************************************************************** - if (testBegin("infoPgNewLoad(), infoPgFree(), infoPgDataCurrent(), infoPgDataToLog(), infoPgAdd(), infoPgIni()")) + if (testBegin("infoPgNewLoad(), infoPgFree(), infoPgDataCurrent(), infoPgDataToLog(), infoPgAdd(), infoPgIni(), infoPgSave()")) { String *content = NULL; String *fileName = strNewFmt("%s/test.ini", testPath()); @@ -108,12 +108,12 @@ // Save the file and verify it ini = iniNew(); - TEST_RESULT_VOID(infoPgSave(infoPg, ini, storageLocalWrite(), fileName2, cipherTypeNone, NULL), "save file"); + TEST_RESULT_VOID(infoPgSave(infoPg, ini, storageLocalWrite(), fileName2, cipherTypeNone, NULL), "infoPgSave"); TEST_RESULT_BOOL( bufEq( storageGetNP(storageNewReadNP(storageLocal(), fileName)), storageGetNP(storageNewReadNP(storageLocal(), fileName2))), - true, "files are equal"); + true, " saved files are equal"); TEST_RESULT_INT(lstSize(infoPg->history), 2, "history record added"); diff -Nru pgbackrest-2.15.1/test/src/module/postgres/clientTest.c pgbackrest-2.16/test/src/module/postgres/clientTest.c --- pgbackrest-2.15.1/test/src/module/postgres/clientTest.c 1970-01-01 00:00:00.000000000 +0000 +++ pgbackrest-2.16/test/src/module/postgres/clientTest.c 2019-08-05 16:03:04.000000000 +0000 @@ -0,0 +1,299 @@ +/*********************************************************************************************************************************** +Test PostgreSQL Client + +This test can be run two ways: + +1) The default uses a pqlib shim to simulate a PostgreSQL connection. This will work with all VM types. + +2) Optionally use a real cluster for testing (only works with debian/pg11). The test Makefile must be manually updated with the +-DHARNESS_PQ_REAL flag and -lpq must be added to the libs list. This method does not have 100% coverage but is very close. +***********************************************************************************************************************************/ +#include "common/type/json.h" + +#include "common/harnessPq.h" + +/*********************************************************************************************************************************** +Test Run +***********************************************************************************************************************************/ +void +testRun(void) +{ + FUNCTION_HARNESS_VOID(); + + // ***************************************************************************************************************************** + if (testBegin("pgClient")) + { + // Create and start the test database + // ------------------------------------------------------------------------------------------------------------------------- +#ifdef HARNESS_PQ_REAL + if (system("sudo pg_createcluster 11 test") != 0) + THROW(AssertError, "unable to create cluster"); + + if (system("sudo pg_ctlcluster 11 test start") != 0) + THROW(AssertError, "unable to start cluster"); + + if (system(strPtr(strNewFmt("sudo -u postgres psql -c 'create user %s superuser'", testUser()))) != 0) + THROW(AssertError, "unable to create superuser"); +#endif + + // Test connection error + // ------------------------------------------------------------------------------------------------------------------------- +#ifndef HARNESS_PQ_REAL + harnessPqScriptSet((HarnessPq []) + { + {.function = HRNPQ_CONNECTDB, .param = "[\"dbname='postg \\\\'\\\\\\\\res' port=5433\"]"}, + {.function = HRNPQ_STATUS, .resultInt = CONNECTION_BAD}, + {.function = HRNPQ_ERRORMESSAGE, .resultZ = + "could not connect to server: No such file or directory\n" + "\tIs the server running locally and accepting\n" + "\tconnections on Unix domain socket \"/var/run/postgresql/.s.PGSQL.5433\"?\n"}, + {.function = HRNPQ_FINISH}, + {.function = NULL} + }); +#endif + + PgClient *client = NULL; + + MEM_CONTEXT_TEMP_BEGIN() + { + TEST_ASSIGN(client, pgClientNew(NULL, 5433, strNew("postg '\\res"), NULL, 3000), "new client"); + TEST_RESULT_VOID(pgClientMove(client, MEM_CONTEXT_OLD()), "move client"); + TEST_RESULT_VOID(pgClientMove(NULL, MEM_CONTEXT_OLD()), "move null client"); + } + MEM_CONTEXT_TEMP_END(); + + TEST_ERROR( + pgClientOpen(client), DbConnectError, + "unable to connect to 'dbname='postg \\'\\\\res' port=5433': could not connect to server: No such file or directory\n" + "\tIs the server running locally and accepting\n" + "\tconnections on Unix domain socket \"/var/run/postgresql/.s.PGSQL.5433\"?"); + TEST_RESULT_VOID(pgClientFree(client), "free client"); + + // Test send error + // ------------------------------------------------------------------------------------------------------------------------- +#ifndef HARNESS_PQ_REAL + harnessPqScriptSet((HarnessPq []) + { + {.function = HRNPQ_CONNECTDB, .param = "[\"dbname='postgres' port=5432\"]"}, + {.function = HRNPQ_STATUS, .resultInt = CONNECTION_OK}, + {.function = HRNPQ_SENDQUERY, .param = "[\"select bogus from pg_class\"]", .resultInt = 0}, + {.function = HRNPQ_ERRORMESSAGE, .resultZ = "another command is already in progress\n"}, + {.function = HRNPQ_FINISH}, + {.function = NULL} + }); +#endif + + TEST_ASSIGN(client, pgClientOpen(pgClientNew(NULL, 5432, strNew("postgres"), NULL, 3000)), "new client"); + +#ifdef HARNESS_PQ_REAL + PQsendQuery(client->connection, "select bogus from pg_class"); +#endif + + String *query = strNew("select bogus from pg_class"); + + TEST_ERROR( + pgClientQuery(client, query), DbQueryError, + "unable to send query 'select bogus from pg_class': another command is already in progress"); + + TEST_RESULT_VOID(pgClientFree(client), "free client"); + + // Connect + // ------------------------------------------------------------------------------------------------------------------------- +#ifndef HARNESS_PQ_REAL + harnessPqScriptSet((HarnessPq []) + { + {.function = HRNPQ_CONNECTDB, .param = strPtr( + strNewFmt("[\"dbname='postgres' port=5432 user='%s' host='/var/run/postgresql'\"]", testUser()))}, + {.function = HRNPQ_STATUS, .resultInt = CONNECTION_OK}, + {.function = NULL} + }); +#endif + + TEST_ASSIGN( + client, pgClientOpen(pgClientNew(strNew("/var/run/postgresql"), 5432, strNew("postgres"), strNew(testUser()), 500)), + "new client"); + + // Invalid query + // ------------------------------------------------------------------------------------------------------------------------- +#ifndef HARNESS_PQ_REAL + harnessPqScriptSet((HarnessPq []) + { + {.function = HRNPQ_SENDQUERY, .param = "[\"select bogus from pg_class\"]", .resultInt = 1}, + {.function = HRNPQ_CONSUMEINPUT}, + {.function = HRNPQ_ISBUSY}, + {.function = HRNPQ_GETRESULT}, + {.function = HRNPQ_RESULTSTATUS, .resultInt = PGRES_FATAL_ERROR}, + {.function = HRNPQ_RESULTERRORMESSAGE, .resultZ = + "ERROR: column \"bogus\" does not exist\n" + "LINE 1: select bogus from pg_class\n" + " ^ \n"}, + {.function = HRNPQ_CLEAR}, + {.function = HRNPQ_GETRESULT, .resultNull = true}, + {.function = NULL} + }); +#endif + + query = strNew("select bogus from pg_class"); + + TEST_ERROR( + pgClientQuery(client, query), DbQueryError, + "unable to execute query 'select bogus from pg_class': ERROR: column \"bogus\" does not exist\n" + "LINE 1: select bogus from pg_class\n" + " ^"); + + // Timeout query + // ------------------------------------------------------------------------------------------------------------------------- +#ifndef HARNESS_PQ_REAL + harnessPqScriptSet((HarnessPq []) + { + {.function = HRNPQ_SENDQUERY, .param = "[\"select pg_sleep(3000)\"]", .resultInt = 1}, + {.function = HRNPQ_CONSUMEINPUT, .sleep = 600}, + {.function = HRNPQ_ISBUSY, .resultInt = 1}, + {.function = HRNPQ_CONSUMEINPUT}, + {.function = HRNPQ_ISBUSY, .resultInt = 1}, + {.function = HRNPQ_GETCANCEL}, + {.function = HRNPQ_CANCEL, .resultInt = 1}, + {.function = HRNPQ_FREECANCEL}, + {.function = HRNPQ_GETRESULT}, + {.function = HRNPQ_CLEAR}, + {.function = HRNPQ_GETRESULT, .resultNull = true}, + {.function = NULL} + }); +#endif + + query = strNew("select pg_sleep(3000)"); + + TEST_ERROR(pgClientQuery(client, query), DbQueryError, "query 'select pg_sleep(3000)' timed out after 500ms"); + + // Cancel error (can only be run with the scripted tests + // ------------------------------------------------------------------------------------------------------------------------- +#ifndef HARNESS_PQ_REAL + harnessPqScriptSet((HarnessPq []) + { + {.function = HRNPQ_SENDQUERY, .param = "[\"select pg_sleep(3000)\"]", .resultInt = 1}, + {.function = HRNPQ_CONSUMEINPUT, .sleep = 600}, + {.function = HRNPQ_ISBUSY, .resultInt = 1}, + {.function = HRNPQ_CONSUMEINPUT}, + {.function = HRNPQ_ISBUSY, .resultInt = 1}, + {.function = HRNPQ_GETCANCEL}, + {.function = HRNPQ_CANCEL, .resultInt = 0, .resultZ = "test error"}, + {.function = HRNPQ_FREECANCEL}, + {.function = NULL} + }); + + query = strNew("select pg_sleep(3000)"); + + TEST_ERROR(pgClientQuery(client, query), DbQueryError, "unable to cancel query 'select pg_sleep(3000)': test error"); +#endif + + // Execute do block and raise notice + // ------------------------------------------------------------------------------------------------------------------------- +#ifndef HARNESS_PQ_REAL + harnessPqScriptSet((HarnessPq []) + { + {.function = HRNPQ_SENDQUERY, .param = "[\"do $$ begin raise notice 'mememe'; end $$\"]", .resultInt = 1}, + {.function = HRNPQ_CONSUMEINPUT}, + {.function = HRNPQ_ISBUSY}, + {.function = HRNPQ_GETRESULT}, + {.function = HRNPQ_RESULTSTATUS, .resultInt = PGRES_COMMAND_OK}, + {.function = HRNPQ_CLEAR}, + {.function = HRNPQ_GETRESULT, .resultNull = true}, + {.function = NULL} + }); +#endif + + query = strNew("do $$ begin raise notice 'mememe'; end $$"); + + TEST_RESULT_PTR(pgClientQuery(client, query), NULL, "execute do block"); + + // Unsupported type + // ------------------------------------------------------------------------------------------------------------------------- +#ifndef HARNESS_PQ_REAL + harnessPqScriptSet((HarnessPq []) + { + {.function = HRNPQ_SENDQUERY, .param = "[\"select clock_timestamp()\"]", .resultInt = 1}, + {.function = HRNPQ_CONSUMEINPUT}, + {.function = HRNPQ_ISBUSY}, + {.function = HRNPQ_GETRESULT}, + {.function = HRNPQ_RESULTSTATUS, .resultInt = PGRES_TUPLES_OK}, + {.function = HRNPQ_NTUPLES, .resultInt = 1}, + {.function = HRNPQ_NFIELDS, .resultInt = 1}, + {.function = HRNPQ_FTYPE, .param = "[0]", .resultInt = 1184}, + {.function = HRNPQ_GETVALUE, .param = "[0,0]", .resultZ = "2019-07-25 12:06:09.000282+00"}, + {.function = HRNPQ_CLEAR}, + {.function = HRNPQ_GETRESULT, .resultNull = true}, + {.function = NULL} + }); +#endif + + query = strNew("select clock_timestamp()"); + + TEST_ERROR( + pgClientQuery(client, query), FormatError, + "unable to parse type 1184 in column 0 for query 'select clock_timestamp()'"); + + // Successful query + // ------------------------------------------------------------------------------------------------------------------------- +#ifndef HARNESS_PQ_REAL + harnessPqScriptSet((HarnessPq []) + { + {.function = HRNPQ_SENDQUERY, .param = + "[\"select oid, case when relname = 'pg_class' then null::text else '' end, relname, relname = 'pg_class'" + " from pg_class where relname in ('pg_class', 'pg_proc')" + " order by relname\"]", + .resultInt = 1}, + {.function = HRNPQ_CONSUMEINPUT}, + {.function = HRNPQ_ISBUSY}, + {.function = HRNPQ_GETRESULT}, + {.function = HRNPQ_RESULTSTATUS, .resultInt = PGRES_TUPLES_OK}, + + {.function = HRNPQ_NTUPLES, .resultInt = 2}, + {.function = HRNPQ_NFIELDS, .resultInt = 4}, + {.function = HRNPQ_FTYPE, .param = "[0]", .resultInt = HRNPQ_TYPE_INT}, + {.function = HRNPQ_FTYPE, .param = "[1]", .resultInt = HRNPQ_TYPE_TEXT}, + {.function = HRNPQ_FTYPE, .param = "[2]", .resultInt = HRNPQ_TYPE_TEXT}, + {.function = HRNPQ_FTYPE, .param = "[3]", .resultInt = HRNPQ_TYPE_BOOL}, + + {.function = HRNPQ_GETVALUE, .param = "[0,0]", .resultZ = "1259"}, + {.function = HRNPQ_GETVALUE, .param = "[0,1]", .resultZ = ""}, + {.function = HRNPQ_GETISNULL, .param = "[0,1]", .resultInt = 1}, + {.function = HRNPQ_GETVALUE, .param = "[0,2]", .resultZ = "pg_class"}, + {.function = HRNPQ_GETVALUE, .param = "[0,3]", .resultZ = "t"}, + + {.function = HRNPQ_GETVALUE, .param = "[1,0]", .resultZ = "1255"}, + {.function = HRNPQ_GETVALUE, .param = "[1,1]", .resultZ = ""}, + {.function = HRNPQ_GETISNULL, .param = "[1,1]", .resultInt = 0}, + {.function = HRNPQ_GETVALUE, .param = "[1,2]", .resultZ = "pg_proc"}, + {.function = HRNPQ_GETVALUE, .param = "[1,3]", .resultZ = "f"}, + + {.function = HRNPQ_CLEAR}, + {.function = HRNPQ_GETRESULT, .resultNull = true}, + {.function = NULL} + }); +#endif + + query = strNew( + "select oid, case when relname = 'pg_class' then null::text else '' end, relname, relname = 'pg_class'" + " from pg_class where relname in ('pg_class', 'pg_proc')" + " order by relname"); + + TEST_RESULT_STR( + strPtr(jsonFromVar(varNewVarLst(pgClientQuery(client, query)), 0)), + "[[1259,null,\"pg_class\",true],[1255,\"\",\"pg_proc\",false]]", "simple query"); + + // Close connection + // ------------------------------------------------------------------------------------------------------------------------- +#ifndef HARNESS_PQ_REAL + harnessPqScriptSet((HarnessPq []) + { + {.function = HRNPQ_FINISH}, + {.function = HRNPQ_GETRESULT, .resultNull = true}, + {.function = NULL} + }); +#endif + TEST_RESULT_VOID(pgClientClose(client), "close client"); + TEST_RESULT_VOID(pgClientClose(client), "close client again"); + } + + FUNCTION_HARNESS_RESULT_VOID(); +} diff -Nru pgbackrest-2.15.1/test/src/module/postgres/interfaceTest.c pgbackrest-2.16/test/src/module/postgres/interfaceTest.c --- pgbackrest-2.15.1/test/src/module/postgres/interfaceTest.c 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/test/src/module/postgres/interfaceTest.c 2019-08-05 16:03:04.000000000 +0000 @@ -95,6 +95,13 @@ } // ***************************************************************************************************************************** + if (testBegin("pgWalName()")) + { + TEST_RESULT_STR(strPtr(pgWalName(PG_VERSION_96)), "xlog", "check xlog name"); + TEST_RESULT_STR(strPtr(pgWalName(PG_VERSION_10)), "wal", "check wal name"); + } + + // ***************************************************************************************************************************** if (testBegin("pgWalFromBuffer() and pgWalFromFile()")) { String *walFile = strNewFmt("%s/0000000F0000000F0000000F", testPath()); @@ -157,7 +164,6 @@ "{version: 110000, systemId: 1030522662895, walSegmentSize: 16777216, pageChecksum: true}", "check log"); } - // ***************************************************************************************************************************** if (testBegin("pgWalToLog()")) { diff -Nru pgbackrest-2.15.1/test/src/module/protocol/protocolTest.c pgbackrest-2.16/test/src/module/protocol/protocolTest.c --- pgbackrest-2.15.1/test/src/module/protocol/protocolTest.c 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/test/src/module/protocol/protocolTest.c 2019-08-05 16:03:04.000000000 +0000 @@ -66,7 +66,7 @@ strNew(testPath()), STORAGE_MODE_FILE_DEFAULT, STORAGE_MODE_PATH_DEFAULT, true, NULL); // ***************************************************************************************************************************** - if (testBegin("repoIsLocal()")) + if (testBegin("repoIsLocal() and pgIsLocal()")) { StringList *argList = strLstNew(); strLstAddZ(argList, "pgbackrest"); @@ -85,6 +85,32 @@ harnessCfgLoad(strLstSize(argList), strLstPtr(argList)); TEST_RESULT_BOOL(repoIsLocal(), false, "repo is remote"); + + // ------------------------------------------------------------------------------------------------------------------------- + argList = strLstNew(); + strLstAddZ(argList, "pgbackrest"); + strLstAddZ(argList, "--stanza=test1"); + strLstAddZ(argList, "--pg1-path=/path/to"); + strLstAddZ(argList, "--repo1-retention-full=1"); + strLstAddZ(argList, "backup"); + harnessCfgLoad(strLstSize(argList), strLstPtr(argList)); + + TEST_RESULT_BOOL(pgIsLocal(1), true, "pg is local"); + + // ------------------------------------------------------------------------------------------------------------------------- + argList = strLstNew(); + strLstAddZ(argList, "pgbackrest"); + strLstAddZ(argList, "--stanza=test1"); + strLstAddZ(argList, "--pg7-path=/path/to"); + strLstAddZ(argList, "--pg7-host=test1"); + strLstAddZ(argList, "--host-id=7"); + strLstAddZ(argList, "--command=backup"); + strLstAddZ(argList, "--type=db"); + strLstAddZ(argList, "--process=0"); + strLstAddZ(argList, "local"); + harnessCfgLoad(strLstSize(argList), strLstPtr(argList)); + + TEST_RESULT_BOOL(pgIsLocal(7), false, "pg is remote"); } // ***************************************************************************************************************************** @@ -124,16 +150,22 @@ // ***************************************************************************************************************************** if (testBegin("protocolRemoteParam()")) { + storagePutNP(storageNewWriteNP(storageTest, strNew("pgbackrest.conf")), bufNew(0)); + StringList *argList = strLstNew(); strLstAddZ(argList, "pgbackrest"); strLstAddZ(argList, "--stanza=test1"); strLstAddZ(argList, "--repo1-host=repo-host"); strLstAddZ(argList, "--repo1-host-user=repo-host-user"); + // Local config settings should never be passed to the remote + strLstAdd(argList, strNewFmt("--config=%s/pgbackrest.conf", testPath())); + strLstAdd(argList, strNewFmt("--config-include-path=%s", testPath())); + strLstAdd(argList, strNewFmt("--config-path=%s", testPath())); strLstAddZ(argList, "archive-get"); harnessCfgLoad(strLstSize(argList), strLstPtr(argList)); TEST_RESULT_STR( - strPtr(strLstJoin(protocolRemoteParam(protocolStorageTypeRepo, 0), "|")), + strPtr(strLstJoin(protocolRemoteParam(protocolStorageTypeRepo, 0, 0), "|")), strPtr( strNew( "-o|LogLevel=error|-o|Compression=no|-o|PasswordAuthentication=no|repo-host-user@repo-host" @@ -156,7 +188,7 @@ harnessCfgLoad(strLstSize(argList), strLstPtr(argList)); TEST_RESULT_STR( - strPtr(strLstJoin(protocolRemoteParam(protocolStorageTypeRepo, 1), "|")), + strPtr(strLstJoin(protocolRemoteParam(protocolStorageTypeRepo, 1, 0), "|")), strPtr( strNew( "-o|LogLevel=error|-o|Compression=no|-o|PasswordAuthentication=no|-p|444|repo-host-user@repo-host" @@ -172,19 +204,86 @@ strLstAddZ(argList, "--command=archive-get"); strLstAddZ(argList, "--process=3"); strLstAddZ(argList, "--host-id=1"); - strLstAddZ(argList, "--type=db"); + strLstAddZ(argList, "--type=backup"); strLstAddZ(argList, "--repo1-host=repo-host"); strLstAddZ(argList, "local"); harnessCfgLoad(strLstSize(argList), strLstPtr(argList)); TEST_RESULT_STR( - strPtr(strLstJoin(protocolRemoteParam(protocolStorageTypeRepo, 66), "|")), + strPtr(strLstJoin(protocolRemoteParam(protocolStorageTypeRepo, 66, 0), "|")), strPtr( strNew( "-o|LogLevel=error|-o|Compression=no|-o|PasswordAuthentication=no|pgbackrest@repo-host" "|pgbackrest --c --command=archive-get --log-level-file=off --log-level-stderr=error --process=3" " --stanza=test1 --type=backup remote")), - "remote protocol params for local"); + "remote protocol params for backup local"); + + // ------------------------------------------------------------------------------------------------------------------------- + argList = strLstNew(); + strLstAddZ(argList, "pgbackrest"); + strLstAddZ(argList, "--stanza=test1"); + strLstAddZ(argList, "--pg1-path=/path/to/1"); + strLstAddZ(argList, "--pg1-host=pg1-host"); + strLstAddZ(argList, "--repo1-retention-full=1"); + strLstAddZ(argList, "backup"); + harnessCfgLoad(strLstSize(argList), strLstPtr(argList)); + + TEST_RESULT_STR( + strPtr(strLstJoin(protocolRemoteParam(protocolStorageTypePg, 1, 0), "|")), + strPtr( + strNew( + "-o|LogLevel=error|-o|Compression=no|-o|PasswordAuthentication=no|postgres@pg1-host" + "|pgbackrest --c --command=backup --log-level-file=off --log-level-stderr=error --pg1-path=/path/to/1" + " --process=1 --stanza=test1 --type=db remote")), + "remote protocol params for db backup"); + + // ------------------------------------------------------------------------------------------------------------------------- + argList = strLstNew(); + strLstAddZ(argList, "pgbackrest"); + strLstAddZ(argList, "--stanza=test1"); + strLstAddZ(argList, "--command=backup"); + strLstAddZ(argList, "--process=4"); + strLstAddZ(argList, "--host-id=2"); + strLstAddZ(argList, "--pg1-path=/path/to/1"); + strLstAddZ(argList, "--pg2-path=/path/to/2"); + strLstAddZ(argList, "--pg2-host=pg2-host"); + strLstAddZ(argList, "--type=db"); + strLstAddZ(argList, "local"); + harnessCfgLoad(strLstSize(argList), strLstPtr(argList)); + + TEST_RESULT_STR( + strPtr(strLstJoin(protocolRemoteParam(protocolStorageTypePg, 1, 1), "|")), + strPtr( + strNew( + "-o|LogLevel=error|-o|Compression=no|-o|PasswordAuthentication=no|postgres@pg2-host" + "|pgbackrest --c --command=backup --log-level-file=off --log-level-stderr=error --pg1-path=/path/to/2" + " --process=4 --stanza=test1 --type=db remote")), + "remote protocol params for db local"); + + // ------------------------------------------------------------------------------------------------------------------------- + argList = strLstNew(); + strLstAddZ(argList, "pgbackrest"); + strLstAddZ(argList, "--stanza=test1"); + strLstAddZ(argList, "--command=backup"); + strLstAddZ(argList, "--process=4"); + strLstAddZ(argList, "--host-id=3"); + strLstAddZ(argList, "--pg1-path=/path/to/1"); + strLstAddZ(argList, "--pg3-path=/path/to/3"); + strLstAddZ(argList, "--pg3-host=pg3-host"); + strLstAddZ(argList, "--pg3-socket-path=/socket3"); + strLstAddZ(argList, "--pg3-port=3333"); + strLstAddZ(argList, "--type=db"); + strLstAddZ(argList, "local"); + harnessCfgLoad(strLstSize(argList), strLstPtr(argList)); + + TEST_RESULT_STR( + strPtr(strLstJoin(protocolRemoteParam(protocolStorageTypePg, 1, 2), "|")), + strPtr( + strNew( + "-o|LogLevel=error|-o|Compression=no|-o|PasswordAuthentication=no|postgres@pg3-host" + "|pgbackrest --c --command=backup --log-level-file=off --log-level-stderr=error --pg1-path=/path/to/3" + " --pg1-port=3333 --pg1-socket-path=/socket3 --process=4 --stanza=test1 --type=db remote")), + "remote protocol params for db local"); } // ***************************************************************************************************************************** @@ -664,8 +763,8 @@ TEST_RESULT_VOID(protocolFree(), "free protocol objects before anything has been created"); - TEST_ASSIGN(client, protocolRemoteGet(protocolStorageTypeRepo), "get remote protocol"); - TEST_RESULT_PTR(protocolRemoteGet(protocolStorageTypeRepo), client, "get remote cached protocol"); + TEST_ASSIGN(client, protocolRemoteGet(protocolStorageTypeRepo, 1), "get remote protocol"); + TEST_RESULT_PTR(protocolRemoteGet(protocolStorageTypeRepo, 1), client, "get remote cached protocol"); TEST_RESULT_PTR(protocolHelper.clientRemote[0].client, client, "check position in cache"); TEST_RESULT_VOID(protocolKeepAlive(), "keep alive"); TEST_RESULT_VOID(protocolFree(), "free remote protocol objects"); @@ -695,7 +794,7 @@ harnessCfgLoad(strLstSize(argList), strLstPtr(argList)); TEST_RESULT_STR(strPtr(cfgOptionStr(cfgOptRepoCipherPass)), "acbd", "check cipher pass before"); - TEST_ASSIGN(client, protocolRemoteGet(protocolStorageTypeRepo), "get remote protocol"); + TEST_ASSIGN(client, protocolRemoteGet(protocolStorageTypeRepo, 1), "get remote protocol"); TEST_RESULT_PTR(protocolHelper.clientRemote[0].client, client, "check position in cache"); TEST_RESULT_STR(strPtr(cfgOptionStr(cfgOptRepoCipherPass)), "acbd", "check cipher pass after"); @@ -721,9 +820,26 @@ harnessCfgLoad(strLstSize(argList), strLstPtr(argList)); TEST_RESULT_PTR(cfgOptionStr(cfgOptRepoCipherPass), NULL, "check cipher pass before"); - TEST_ASSIGN(client, protocolRemoteGet(protocolStorageTypeRepo), "get remote protocol"); + TEST_ASSIGN(client, protocolRemoteGet(protocolStorageTypeRepo, 1), "get remote protocol"); TEST_RESULT_STR(strPtr(cfgOptionStr(cfgOptRepoCipherPass)), "dcba", "check cipher pass after"); + TEST_RESULT_VOID(protocolFree(), "free remote protocol objects"); + + // Start db protocol + // ------------------------------------------------------------------------------------------------------------------------- + argList = strLstNew(); + strLstAddZ(argList, "/usr/bin/pgbackrest"); + strLstAddZ(argList, "--stanza=db"); + strLstAddZ(argList, "--protocol-timeout=10"); + strLstAddZ(argList, "--repo1-retention-full=1"); + strLstAddZ(argList, "--pg1-host=localhost"); + strLstAdd(argList, strNewFmt("--pg1-host-user=%s", testUser())); + strLstAdd(argList, strNewFmt("--pg1-path=%s", testPath())); + strLstAddZ(argList, "backup"); + harnessCfgLoad(strLstSize(argList), strLstPtr(argList)); + + TEST_ASSIGN(client, protocolRemoteGet(protocolStorageTypePg, 1), "get remote protocol"); + // Start local protocol // ------------------------------------------------------------------------------------------------------------------------- argList = strLstNew(); diff -Nru pgbackrest-2.15.1/test/src/module/storage/cifsTest.c pgbackrest-2.16/test/src/module/storage/cifsTest.c --- pgbackrest-2.15.1/test/src/module/storage/cifsTest.c 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/test/src/module/storage/cifsTest.c 2019-08-05 16:03:04.000000000 +0000 @@ -26,6 +26,8 @@ const Storage *storage = NULL; TEST_ASSIGN(storage, storageRepoGet(strNew(STORAGE_TYPE_CIFS), true), "get cifs repo storage"); TEST_RESULT_STR(strPtr(storage->type), "cifs", "check storage type"); + TEST_RESULT_BOOL(storageFeature(storage, storageFeaturePath), true, " check path feature"); + TEST_RESULT_BOOL(storageFeature(storage, storageFeatureCompress), true, " check compress feature"); // Create a FileWrite object with path sync enabled and ensure that path sync is false in the write object // ------------------------------------------------------------------------------------------------------------------------- diff -Nru pgbackrest-2.15.1/test/src/module/storage/posixTest.c pgbackrest-2.16/test/src/module/storage/posixTest.c --- pgbackrest-2.15.1/test/src/module/storage/posixTest.c 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/test/src/module/storage/posixTest.c 2019-08-05 16:03:04.000000000 +0000 @@ -101,6 +101,8 @@ TEST_RESULT_PTR(storageInterface(storageTest).exists, storageTest->interface.exists, " check interface"); TEST_RESULT_PTR(storageDriver(storageTest), storageTest->driver, " check driver"); TEST_RESULT_PTR(storageType(storageTest), storageTest->type, " check type"); + TEST_RESULT_BOOL(storageFeature(storageTest, storageFeaturePath), true, " check path feature"); + TEST_RESULT_BOOL(storageFeature(storageTest, storageFeatureCompress), true, " check compress feature"); TEST_RESULT_VOID(storageFree(storageTest), "free storage"); } @@ -465,6 +467,11 @@ storagePathNP(storageTest, strNew("/path/toot")), AssertError, "absolute path '/path/toot' is not in base path '/path/to'"); + // Path enforcement disabled + storagePathEnforceSet(storageTest, false); + TEST_RESULT_STR(strPtr(storagePathNP(storageTest, strNew("/bogus"))), "/bogus", "path enforce disabled"); + storagePathEnforceSet(storageTest, true); + TEST_ERROR(storagePathNP(storageTest, strNew(" not found in path expression '" BOGUS_STR)), AssertError, @@ -1077,6 +1084,7 @@ strLstAddZ(argList, "--archive-async"); strLstAdd(argList, strNewFmt("--spool-path=%s", testPath())); strLstAdd(argList, strNewFmt("--pg1-path=%s/db", testPath())); + strLstAdd(argList, strNewFmt("--pg2-path=%s/db2", testPath())); strLstAddZ(argList, "archive-get"); harnessCfgLoad(strLstSize(argList), strLstPtr(argList)); @@ -1130,6 +1138,13 @@ TEST_RESULT_STR(strPtr(storage->path), strPtr(strNewFmt("%s/db", testPath())), "check pg write storage path"); TEST_RESULT_BOOL(storage->write, true, "check pg write storage write"); + // Pg storage from another host id + // ------------------------------------------------------------------------------------------------------------------------- + cfgOptionSet(cfgOptHostId, cfgSourceParam, VARUINT64(2)); + cfgOptionValidSet(cfgOptHostId, true); + + TEST_RESULT_STR(strPtr(storagePgGet(false)->path), strPtr(strNewFmt("%s/db2", testPath())), "check pg-2 storage path"); + // Change the stanza to NULL, stanzaInit flag to false and make sure helper fails because stanza is required // ------------------------------------------------------------------------------------------------------------------------- storageHelper.storageSpool = NULL; diff -Nru pgbackrest-2.15.1/test/src/module/storage/remoteTest.c pgbackrest-2.16/test/src/module/storage/remoteTest.c --- pgbackrest-2.15.1/test/src/module/storage/remoteTest.c 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/test/src/module/storage/remoteTest.c 2019-08-05 16:03:04.000000000 +0000 @@ -1,9 +1,11 @@ /*********************************************************************************************************************************** Test Remote Storage ***********************************************************************************************************************************/ +#include "command/backup/pageChecksum.h" #include "common/crypto/cipherBlock.h" #include "common/io/bufferRead.h" #include "common/io/bufferWrite.h" +#include "postgres/interface.h" #include "common/harnessConfig.h" @@ -30,6 +32,16 @@ strLstAddZ(argList, "info"); harnessCfgLoad(strLstSize(argList), strLstPtr(argList)); + // Set type since we'll be running local and remote tests here + cfgOptionSet(cfgOptType, cfgSourceParam, VARSTRDEF("backup")); + cfgOptionValidSet(cfgOptType, true); + + // Set pg settings so we can run both db and backup remotes + cfgOptionSet(cfgOptPgHost, cfgSourceParam, VARSTRDEF("localhost")); + cfgOptionValidSet(cfgOptPgHost, true); + cfgOptionSet(cfgOptPgPath, cfgSourceParam, VARSTR(strNewFmt("%s/pg", testPath()))); + cfgOptionValidSet(cfgOptPgPath, true); + // Start a protocol server to test the remote protocol Buffer *serverRead = bufNew(8192); Buffer *serverWrite = bufNew(8192); @@ -48,6 +60,8 @@ Storage *storageRemote = NULL; TEST_ASSIGN(storageRemote, storageRepoGet(strNew(STORAGE_TYPE_POSIX), false), "get remote repo storage"); TEST_RESULT_UINT(storageInterface(storageRemote).feature, storageInterface(storageTest).feature, " check features"); + TEST_RESULT_BOOL(storageFeature(storageRemote, storageFeaturePath), true, " check path feature"); + TEST_RESULT_BOOL(storageFeature(storageRemote, storageFeatureCompress), true, " check compress feature"); // Check protocol function directly // ------------------------------------------------------------------------------------------------------------------------- @@ -64,12 +78,13 @@ TEST_RESULT_BOOL(storageRemoteProtocol(strNew(BOGUS_STR), varLstNew(), server), false, "invalid function"); } + // Do these tests against a db remote for coverage // ***************************************************************************************************************************** if (testBegin("storageExists()")) { Storage *storageRemote = NULL; - TEST_ASSIGN(storageRemote, storageRepoGet(strNew(STORAGE_TYPE_POSIX), false), "get remote repo storage"); - storagePathCreateNP(storageTest, strNew("repo")); + TEST_ASSIGN(storageRemote, storagePgGet(false), "get remote pg storage"); + storagePathCreateNP(storageTest, strNew("pg")); TEST_RESULT_BOOL(storageExistsNP(storageRemote, strNew("test.txt")), false, "file does not exist"); @@ -78,6 +93,9 @@ // Check protocol function directly // ------------------------------------------------------------------------------------------------------------------------- + cfgOptionSet(cfgOptType, cfgSourceParam, VARSTRDEF("db")); + cfgOptionValidSet(cfgOptType, true); + VariantList *paramList = varLstNew(); varLstAdd(paramList, varNewStr(strNew("test.txt"))); @@ -86,6 +104,9 @@ TEST_RESULT_STR(strPtr(strNewBuf(serverWrite)), "{\"out\":true}\n", "check result"); bufUsedSet(serverWrite, 0); + + cfgOptionSet(cfgOptType, cfgSourceParam, VARSTRDEF("db")); + cfgOptionValidSet(cfgOptType, true); } // ***************************************************************************************************************************** @@ -177,7 +198,7 @@ VariantList *paramList = varLstNew(); varLstAdd(paramList, varNewStr(strNew("missing.txt"))); varLstAdd(paramList, varNewBool(true)); - varLstAdd(paramList, varNewKv(kvNew())); + varLstAdd(paramList, varNewVarLst(varLstNew())); TEST_RESULT_BOOL( storageRemoteProtocol(PROTOCOL_COMMAND_STORAGE_OPEN_READ_STR, paramList, server), true, @@ -197,6 +218,11 @@ // Create filters to test filter logic IoFilterGroup *filterGroup = ioFilterGroupNew(); + ioFilterGroupAdd(filterGroup, ioSizeNew()); + ioFilterGroupAdd(filterGroup, cryptoHashNew(HASH_TYPE_SHA1_STR)); + ioFilterGroupAdd(filterGroup, pageChecksumNew(0, PG_SEGMENT_PAGE_DEFAULT, PG_PAGE_SIZE_DEFAULT, 0)); + ioFilterGroupAdd(filterGroup, cipherBlockNew(cipherModeEncrypt, cipherTypeAes256Cbc, BUFSTRZ("x"), NULL)); + ioFilterGroupAdd(filterGroup, cipherBlockNew(cipherModeDecrypt, cipherTypeAes256Cbc, BUFSTRZ("x"), NULL)); ioFilterGroupAdd(filterGroup, gzipCompressNew(3, false)); ioFilterGroupAdd(filterGroup, gzipDecompressNew(false)); varLstAdd(paramList, ioFilterGroupParamAll(filterGroup)); @@ -208,13 +234,16 @@ "{\"out\":true}\n" "BRBLOCK4\n" "TESTBRBLOCK4\n" - "DATABRBLOCK0\n", + "DATABRBLOCK0\n" + "{\"out\":{\"buffer\":null,\"cipherBlock\":null,\"gzipCompress\":null,\"gzipDecompress\":null" + ",\"hash\":\"bbbcf2c59433f68f22376cd2439d6cd309378df6\",\"pageChecksum\":{\"align\":false,\"valid\":false}" + ",\"size\":8}}\n", "check result"); bufUsedSet(serverWrite, 0); ioBufferSizeSet(8192); - // Check for error on a bogus filter + // Check protocol function directly (file exists but all data goes to sink) // ------------------------------------------------------------------------------------------------------------------------- paramList = varLstNew(); varLstAdd(paramList, varNewStr(strNew("test.txt"))); @@ -222,12 +251,32 @@ // Create filters to test filter logic filterGroup = ioFilterGroupNew(); - ioFilterGroupAdd(filterGroup, cipherBlockNew(cipherModeEncrypt, cipherTypeAes256Cbc, BUFSTRDEF("X"), NULL)); + ioFilterGroupAdd(filterGroup, ioSizeNew()); + ioFilterGroupAdd(filterGroup, cryptoHashNew(HASH_TYPE_SHA1_STR)); + ioFilterGroupAdd(filterGroup, ioSinkNew()); varLstAdd(paramList, ioFilterGroupParamAll(filterGroup)); + TEST_RESULT_BOOL( + storageRemoteProtocol(PROTOCOL_COMMAND_STORAGE_OPEN_READ_STR, paramList, server), true, "protocol open read (sink)"); + TEST_RESULT_STR( + strPtr(strNewBuf(serverWrite)), + "{\"out\":true}\n" + "BRBLOCK0\n" + "{\"out\":{\"buffer\":null,\"hash\":\"bbbcf2c59433f68f22376cd2439d6cd309378df6\",\"sink\":null,\"size\":8}}\n", + "check result"); + + bufUsedSet(serverWrite, 0); + + // Check for error on a bogus filter + // ------------------------------------------------------------------------------------------------------------------------- + paramList = varLstNew(); + varLstAdd(paramList, varNewStr(strNew("test.txt"))); + varLstAdd(paramList, varNewBool(false)); + varLstAdd(paramList, varNewVarLst(varLstAdd(varLstNew(), varNewKv(kvAdd(kvNew(), varNewStrZ("bogus"), NULL))))); + TEST_ERROR( storageRemoteProtocol( - PROTOCOL_COMMAND_STORAGE_OPEN_READ_STR, paramList, server), AssertError, "unable to add filter 'cipherBlock'"); + PROTOCOL_COMMAND_STORAGE_OPEN_READ_STR, paramList, server), AssertError, "unable to add filter 'bogus'"); } // ***************************************************************************************************************************** @@ -312,7 +361,7 @@ varLstAdd(paramList, varNewBool(true)); varLstAdd(paramList, varNewBool(true)); varLstAdd(paramList, varNewBool(true)); - varLstAdd(paramList, varNewKv(kvNew())); + varLstAdd(paramList, ioFilterGroupParamAll(ioFilterGroupAdd(ioFilterGroupNew(), ioSizeNew()))); // Generate input (includes the input for the test below -- need a way to reset this for better testing) bufCat( @@ -329,7 +378,7 @@ TEST_RESULT_STR( strPtr(strNewBuf(serverWrite)), "{}\n" - "{}\n", + "{\"out\":{\"buffer\":null,\"size\":18}}\n", "check result"); TEST_RESULT_STR( @@ -353,7 +402,7 @@ varLstAdd(paramList, varNewBool(true)); varLstAdd(paramList, varNewBool(true)); varLstAdd(paramList, varNewBool(true)); - varLstAdd(paramList, varNewKv(kvNew())); + varLstAdd(paramList, varNewVarLst(varLstNew())); TEST_RESULT_BOOL( storageRemoteProtocol(PROTOCOL_COMMAND_STORAGE_OPEN_WRITE_STR, paramList, server), true, "protocol open write"); @@ -560,7 +609,6 @@ // ------------------------------------------------------------------------------------------------------------------------- VariantList *paramList = varLstNew(); varLstAdd(paramList, varNewStr(path)); - varLstAdd(paramList, varNewBool(false)); // ignoreMissing TEST_RESULT_BOOL( storageRemoteProtocol(PROTOCOL_COMMAND_STORAGE_PATH_SYNC_STR, paramList, server), true, diff -Nru pgbackrest-2.15.1/test/src/module/storage/s3Test.c pgbackrest-2.16/test/src/module/storage/s3Test.c --- pgbackrest-2.15.1/test/src/module/storage/s3Test.c 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/test/src/module/storage/s3Test.c 2019-08-05 16:03:04.000000000 +0000 @@ -113,6 +113,21 @@ // ------------------------------------------------------------------------------------------------------------------------- // File is written all at once harnessTlsServerExpect(testS3ServerRequest(HTTP_VERB_PUT, "/file.txt", "ABCD")); + harnessTlsServerReply(testS3ServerResponse( + 403, "Forbidden", NULL, + "" + "" + "RequestTimeTooSkewed" + "The difference between the request time and the current time is too large." + "20190726T221748Z" + "2019-07-26T22:33:27Z" + "900000" + "601AA1A7F7E37AE9" + "KYMys77PoloZrGCkiQRyOIl0biqdHsk4T2EdTkhzkH1l8x00D4lvv/py5uUuHwQXG9qz6NRuldQ=" + "")); + + harnessTlsServerAccept(); + harnessTlsServerExpect(testS3ServerRequest(HTTP_VERB_PUT, "/file.txt", "ABCD")); harnessTlsServerReply(testS3ServerResponse(200, "OK", NULL, NULL)); // Zero-length file @@ -209,9 +224,42 @@ // storageDriverList() // ------------------------------------------------------------------------------------------------------------------------- - // Throw error + // Throw errors + harnessTlsServerExpect(testS3ServerRequest(HTTP_VERB_GET, "/?delimiter=%2F&list-type=2", NULL)); + harnessTlsServerReply(testS3ServerResponse( 344, "Another bad status", NULL, NULL)); + + harnessTlsServerExpect(testS3ServerRequest(HTTP_VERB_GET, "/?delimiter=%2F&list-type=2", NULL)); + harnessTlsServerReply(testS3ServerResponse( + 344, "Another bad status with xml", NULL, + "" + "" + "SomeOtherCode" + "")); + harnessTlsServerExpect(testS3ServerRequest(HTTP_VERB_GET, "/?delimiter=%2F&list-type=2", NULL)); - harnessTlsServerReply(testS3ServerResponse(344, "Another bad status", NULL, NULL)); + harnessTlsServerReply(testS3ServerResponse( + 403, "Forbidden", NULL, + "" + "" + "RequestTimeTooSkewed" + "The difference between the request time and the current time is too large." + "")); + harnessTlsServerExpect(testS3ServerRequest(HTTP_VERB_GET, "/?delimiter=%2F&list-type=2", NULL)); + harnessTlsServerReply(testS3ServerResponse( + 403, "Forbidden", NULL, + "" + "" + "RequestTimeTooSkewed" + "The difference between the request time and the current time is too large." + "")); + harnessTlsServerExpect(testS3ServerRequest(HTTP_VERB_GET, "/?delimiter=%2F&list-type=2", NULL)); + harnessTlsServerReply(testS3ServerResponse( + 403, "Forbidden", NULL, + "" + "" + "RequestTimeTooSkewed" + "The difference between the request time and the current time is too large." + "")); // list a file/path in root harnessTlsServerExpect(testS3ServerRequest(HTTP_VERB_GET, "/?delimiter=%2F&list-type=2", NULL)); @@ -502,6 +550,8 @@ TEST_RESULT_STR( strPtr(((StorageS3 *)storage->driver)->secretAccessKey), strPtr(secretAccessKey), " check secret access key"); TEST_RESULT_PTR(((StorageS3 *)storage->driver)->securityToken, NULL, " check security token"); + TEST_RESULT_BOOL(storageFeature(storage, storageFeaturePath), false, " check path feature"); + TEST_RESULT_BOOL(storageFeature(storage, storageFeatureCompress), false, " check compress feature"); // Add default options // ------------------------------------------------------------------------------------------------------------------------- @@ -597,6 +647,39 @@ strPtr(((StorageS3 *)storage->driver)->secretAccessKey), strPtr(secretAccessKey), " check secret access key"); TEST_RESULT_STR( strPtr(((StorageS3 *)storage->driver)->securityToken), strPtr(securityToken), " check security token"); + + // Use the port option to override both + // ------------------------------------------------------------------------------------------------------------------------- + argList = strLstNew(); + strLstAddZ(argList, "pgbackrest"); + strLstAddZ(argList, "--stanza=db"); + strLstAddZ(argList, "--repo1-type=s3"); + strLstAdd(argList, strNewFmt("--repo1-path=%s", strPtr(path))); + strLstAdd(argList, strNewFmt("--repo1-s3-bucket=%s", strPtr(bucket))); + strLstAdd(argList, strNewFmt("--repo1-s3-region=%s", strPtr(region))); + strLstAdd(argList, strNewFmt("--repo1-s3-endpoint=%s:999", strPtr(endPoint))); + strLstAdd(argList, strNewFmt("--repo1-s3-host=%s:7777", strPtr(host))); + strLstAddZ(argList, "--repo1-s3-port=9001"); + strLstAddZ(argList, "--repo1-s3-ca-path=" TLS_CERT_FAKE_PATH); + strLstAddZ(argList, "--repo1-s3-ca-file=" TLS_CERT_FAKE_PATH "/pgbackrest-test.crt"); + setenv("PGBACKREST_REPO1_S3_KEY", strPtr(accessKey), true); + setenv("PGBACKREST_REPO1_S3_KEY_SECRET", strPtr(secretAccessKey), true); + setenv("PGBACKREST_REPO1_S3_TOKEN", strPtr(securityToken), true); + strLstAddZ(argList, "archive-get"); + harnessCfgLoad(strLstSize(argList), strLstPtr(argList)); + + TEST_ASSIGN(storage, storageRepoGet(strNew(STORAGE_TYPE_S3), false), "get S3 repo storage with options"); + TEST_RESULT_STR(strPtr(((StorageS3 *)storage->driver)->bucket), strPtr(bucket), " check bucket"); + TEST_RESULT_STR(strPtr(((StorageS3 *)storage->driver)->region), strPtr(region), " check region"); + TEST_RESULT_STR( + strPtr(((StorageS3 *)storage->driver)->bucketEndpoint), strPtr(strNewFmt("%s.%s", strPtr(bucket), strPtr(endPoint))), + " check host"); + TEST_RESULT_UINT(((StorageS3 *)storage->driver)->port, 9001, " check port"); + TEST_RESULT_STR(strPtr(((StorageS3 *)storage->driver)->accessKey), strPtr(accessKey), " check access key"); + TEST_RESULT_STR( + strPtr(((StorageS3 *)storage->driver)->secretAccessKey), strPtr(secretAccessKey), " check secret access key"); + TEST_RESULT_STR( + strPtr(((StorageS3 *)storage->driver)->securityToken), strPtr(securityToken), " check security token"); } // ***************************************************************************************************************************** @@ -802,6 +885,35 @@ "host: " S3_TEST_HOST "\n" "x-amz-content-sha256: e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855\n" "x-amz-date: "); + TEST_ERROR(storageListNP(s3, strNew("/")), ProtocolError, + "S3 request failed with 344: Another bad status with xml\n" + "*** URI/Query ***:\n" + "/?delimiter=%2F&list-type=2\n" + "*** Request Headers ***:\n" + "authorization: \n" + "content-length: 0\n" + "host: " S3_TEST_HOST "\n" + "x-amz-content-sha256: e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855\n" + "x-amz-date: \n" + "*** Response Headers ***:\n" + "content-length: 79\n" + "*** Response Content ***:\n" + "SomeOtherCode"); + TEST_ERROR(storageListNP(s3, strNew("/")), ProtocolError, + "S3 request failed with 403: Forbidden\n" + "*** URI/Query ***:\n" + "/?delimiter=%2F&list-type=2\n" + "*** Request Headers ***:\n" + "authorization: \n" + "content-length: 0\n" + "host: " S3_TEST_HOST "\n" + "x-amz-content-sha256: e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855\n" + "x-amz-date: \n" + "*** Response Headers ***:\n" + "content-length: 179\n" + "*** Response Content ***:\n" + "RequestTimeTooSkewed" + "The difference between the request time and the current time is too large."); TEST_RESULT_STR(strPtr(strLstJoin(storageListNP(s3, strNew("/")), ",")), "path1,test1.txt", "list a file/path in root"); TEST_RESULT_STR( diff -Nru pgbackrest-2.15.1/test/test.pl pgbackrest-2.16/test/test.pl --- pgbackrest-2.15.1/test/test.pl 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/test/test.pl 2019-08-05 16:03:04.000000000 +0000 @@ -77,7 +77,6 @@ --no-cleanup don't cleaup after the last test is complete - useful for debugging --pg-version version of postgres to test (all, defaults to minimal) --log-force force overwrite of current test log files - --no-lint disable static source code analysis --build-only compile the test library / packages and run tests only --build-max max processes to use for builds (default 4) --coverage-only only run coverage tests (as a subset of selected tests) @@ -88,9 +87,9 @@ --smart perform libc/package builds only when source timestamps have changed --no-package do not build packages --no-ci-config don't overwrite the current continuous integration config - --dev --no-lint --smart --no-package --no-optimize - --dev-test --no-lint --no-package - --expect --no-lint --no-package --vm=co7 --db=9.6 --log-force + --dev --smart --no-package --no-optimize + --dev-test --no-package + --expect --no-package --vm=co7 --db=9.6 --log-force --no-valgrind don't run valgrind on C unit tests (saves time) --no-coverage don't run coverage on C unit tests (saves time) --no-optimize don't do compile optimization for C (saves compile time) @@ -145,7 +144,6 @@ my $strVmHost = VM_HOST_DEFAULT; my $bVmBuild = false; my $bVmForce = false; -my $bNoLint = false; my $bBuildOnly = false; my $iBuildMax = 4; my $bCoverageOnly = false; @@ -192,7 +190,6 @@ 'no-cleanup' => \$bNoCleanup, 'pg-version=s' => \$strPgVersion, 'log-force' => \$bLogForce, - 'no-lint' => \$bNoLint, 'build-only' => \$bBuildOnly, 'build-max=s' => \$iBuildMax, 'no-package' => \$bNoPackage, @@ -264,7 +261,6 @@ if ($bDev) { - $bNoLint = true; $bSmart = true; $bNoPackage = true; $bNoOptimize = true; @@ -273,7 +269,6 @@ if ($bDevTest) { $bNoPackage = true; - $bNoLint = true; } ################################################################################################################################ @@ -290,7 +285,6 @@ ################################################################################################################################ if ($bExpect) { - $bNoLint = true; $bNoPackage = true; $strVm = VM_EXPECT; $strPgVersion = '9.6'; @@ -342,10 +336,6 @@ { confess &log(ERROR, "select a single Debian-based VM for coverage testing"); } - elsif (!vmCoveragePerl($strVm)) - { - confess &log(ERROR, "only Debian-based VMs can be used for coverage testing"); - } } # If VM is not defined then set it to all @@ -540,7 +530,7 @@ # Auto-generate Perl code #----------------------------------------------------------------------------------------------------------------------- use lib dirname(dirname($0)) . '/libc/build/lib'; - use pgBackRestLibC::Build; ## no critic (Modules::ProhibitConditionalUseStatements) + use pgBackRestLibC::Build; if (!$bSmart || grep(/^(build|libc\/build)\//, @stryModifiedList)) { @@ -737,7 +727,7 @@ $oStorageTest->pathCreate($strCoveragePath, {strMode => '0770', bIgnoreExists => true, bCreateParent => true}); # Remove old coverage dirs -- do it this way so the dirs stay open in finder/explorer, etc. - executeTest("rm -rf ${strBackRestBase}/test/coverage/c/* ${strBackRestBase}/test/coverage/perl/*"); + executeTest("rm -rf ${strBackRestBase}/test/coverage/c/*"); # Overwrite the C coverage report so it will load but not show old coverage $oStorageTest->pathCreate("${strBackRestBase}/test/coverage", {strMode => '0770', bIgnoreExists => true}); @@ -849,9 +839,9 @@ # Build configure/compile options and see if they have changed from the previous build my $strCFlags = "-Wfatal-errors -g -fPIC -D_FILE_OFFSET_BITS=64" . - (vmWithBackTrace($strBuildVM) && $bNoLint && $bBackTrace ? ' -DWITH_BACKTRACE' : '') . + (vmWithBackTrace($strBuildVM) && $bBackTrace ? ' -DWITH_BACKTRACE' : '') . ($bDebugTestTrace ? ' -DDEBUG_TEST_TRACE' : ''); - my $strLdFlags = vmWithBackTrace($strBuildVM) && $bNoLint && $bBackTrace ? '-lbacktrace' : ''; + my $strLdFlags = vmWithBackTrace($strBuildVM) && $bBackTrace ? '-lbacktrace' : ''; my $strConfigOptions = (vmDebugIntegration($strBuildVM) ? ' --enable-test' : ''); my $strBuildFlags = "CFLAGS=${strCFlags}\nLDFLAGS=${strLdFlags}\nCONFIGURE=${strConfigOptions}"; my $strBuildFlagFile = "${strBinPath}/${strBuildVM}/build.flags"; @@ -893,11 +883,6 @@ " ${strBackRestBase}/ ${strBinPath}/${strBuildVM}"); buildPutDiffers($oStorageBackRest, $strBuildFlagFile, $strBuildFlags); - if (vmLintC($strVm) && !$bNoLint) - { - &log(INFO, " clang static analyzer ${strBuildVM} (${strBuildPath})"); - } - if ($bBuildOptionsDiffer || !$oStorageBackRest->exists("${strBuildPath}/Makefile")) { executeTest( @@ -907,7 +892,6 @@ executeTest( 'docker exec -i test-build' . - (vmLintC($strVm) && !$bNoLint ? ' scan-build-6.0' : '') . " make -j ${iBuildMax}" . ($bLogDetail ? '' : ' --silent') . " --directory ${strBuildPath} CFLAGS='${strCFlags}' LDFLAGS='${strLdFlags}'", {bShowOutputAsync => $bLogDetail}); @@ -1229,21 +1213,6 @@ #--------------------------------------------------------------------------------------------------------------------------- if (!$bDryRun) { - # Run Perl critic - if (!$bNoLint && !$bBuildOnly) - { - my $strBasePath = dirname(dirname(abs_path($0))); - - &log(INFO, "Performing static code analysis using perlcritic"); - - executeTest('perlcritic --quiet --verbose=8 --brutal --top=10' . - ' --verbose "[%p] %f: %m at line %l, column %c. %e. (Severity: %s)\n"' . - " \"--profile=${strBasePath}/test/lint/perlcritic.policy\"" . - " ${strBasePath}/lib/*" . - " ${strBasePath}/test/test.pl ${strBasePath}/test/lib/*" . - " ${strBasePath}/doc/doc.pl ${strBasePath}/doc/lib/*"); - } - logFileSet($oStorageTest, cwd() . "/test"); } @@ -1346,7 +1315,7 @@ #--------------------------------------------------------------------------------------------------------------------------- my $iUncoveredCodeModuleTotal = 0; - if ((vmCoverageC($strVm) || vmCoveragePerl($strVm)) && !$bNoCoverage && !$bDryRun && $iTestFail == 0) + if (vmCoverageC($strVm) && !$bNoCoverage && !$bDryRun && $iTestFail == 0) { # Determine which modules were covered (only check coverage if all tests were successful) #----------------------------------------------------------------------------------------------------------------------- @@ -1403,104 +1372,6 @@ &log(INFO, 'no code modules had all tests run required for coverage'); } - # Generate Perl coverage report - #----------------------------------------------------------------------------------------------------------------------- - if (vmCoveragePerl($strVm)) - { - &log(INFO, 'writing Perl coverage report'); - - executeTest("cp -rp ${strCoveragePath} ${strCoveragePath}_temp"); - executeTest( - "cd ${strCoveragePath}_temp && " . - LIB_COVER_EXE . " -report json -outputdir ${strBackRestBase}/test/coverage/perl ${strCoveragePath}_temp", - {bSuppressStdErr => true}); - executeTest("sudo rm -rf ${strCoveragePath}_temp"); - executeTest("sudo cp -rp ${strCoveragePath} ${strCoveragePath}_temp"); - executeTest( - "cd ${strCoveragePath}_temp && " . - LIB_COVER_EXE . " -outputdir ${strBackRestBase}/test/coverage/perl ${strCoveragePath}_temp", - {bSuppressStdErr => true}); - executeTest("sudo rm -rf ${strCoveragePath}_temp"); - - # Load the results of coverage testing from JSON - my $oJSON = JSON::PP->new()->allow_nonref(); - my $hCoverageResult = $oJSON->decode(${$oStorageBackRest->get('test/coverage/perl/cover.json')}); - - foreach my $strCodeModule (sort(keys(%{$hCoverageActual}))) - { - # If the first char of the module is lower case then it's a c module - if (substr($strCodeModule, 0, 1) eq lc(substr($strCodeModule, 0, 1))) - { - next; - } - - # Create code module path -- where the file is located on disk - my $strCodeModulePath = "${strBackRestBase}/lib/" . PROJECT_NAME . "/${strCodeModule}.pm"; - - # Get summary results - my $hCoverageResultAll = $hCoverageResult->{'summary'}{$strCodeModulePath}{total}; - - # Try an extra / if the module is not found - if (!defined($hCoverageResultAll)) - { - $strCodeModulePath = "/${strCodeModulePath}"; - $hCoverageResultAll = $hCoverageResult->{'summary'}{$strCodeModulePath}{total}; - } - - # If module is marked as having no code - if ($hCoverageActual->{$strCodeModule} eq TESTDEF_COVERAGE_NOCODE) - { - # Error if it really does have coverage - if ($hCoverageResultAll) - { - confess &log(ERROR, "perl module ${strCodeModule} is marked 'no code' but has code"); - } - - # Skip to next module - next; - } - - if (!defined($hCoverageResultAll)) - { - confess &log(ERROR, "unable to find coverage results for ${strCodeModule}"); - } - - # Check that all code has been covered - my $iCoverageTotal = $hCoverageResultAll->{total}; - my $iCoverageUncoverable = coalesce($hCoverageResultAll->{uncoverable}, 0); - my $iCoverageCovered = coalesce($hCoverageResultAll->{covered}, 0); - - if ($hCoverageActual->{$strCodeModule} eq TESTDEF_COVERAGE_FULL) - { - my $iUncoveredLines = $iCoverageTotal - $iCoverageCovered - $iCoverageUncoverable; - - if ($iUncoveredLines != 0) - { - &log(ERROR, "perl module ${strCodeModule} is not fully covered"); - $iUncoveredCodeModuleTotal++; - - &log(ERROR, ('-' x 80)); - executeTest( - "/usr/bin/cover -report text ${strCoveragePath} --select ${strBackRestBase}/lib/" . - PROJECT_NAME . "/${strCodeModule}.pm", - {bShowOutputAsync => true}); - &log(ERROR, ('-' x 80)); - } - } - # Else test how much partial coverage there was - elsif ($hCoverageActual->{$strCodeModule} eq TESTDEF_COVERAGE_PARTIAL) - { - my $iCoveragePercent = int(($iCoverageCovered + $iCoverageUncoverable) * 100 / $iCoverageTotal); - - if ($iCoveragePercent == 100) - { - &log(ERROR, "perl module ${strCodeModule} has 100% coverage but is not marked fully covered"); - $iUncoveredCodeModuleTotal++; - } - } - } - } - # Generate C coverage report #--------------------------------------------------------------------------------------------------------------------------- if (vmCoverageC($strVm)) diff -Nru pgbackrest-2.15.1/test/travis.pl pgbackrest-2.16/test/travis.pl --- pgbackrest-2.15.1/test/travis.pl 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/test/travis.pl 2019-08-05 16:03:04.000000000 +0000 @@ -134,23 +134,13 @@ confess &log(ERROR, '--vm is required'); } - # Only lint on U18 - my $strParam = undef; - - if ($strVm ne VM_U18) - { - $strParam .= '--no-lint'; - } - processBegin("${strVm} build"); executeTest("${strTestExe} --vm-build --vm=${strVm}", {bShowOutputAsync => true}); processEnd(); - processBegin("${strVm} test" . (defined($strParam) ? ": ${strParam}" : '')); + processBegin("${strVm} test"); executeTest( - "${strTestExe} --no-gen --no-ci-config --vm-host=" . VM_U14 . " --vm-max=2 --vm=${strVm}" . - (defined($strParam) ? " ${strParam}" : ''), - {bShowOutputAsync => true}); + "${strTestExe} --no-gen --no-ci-config --vm-host=" . VM_U14 . " --vm-max=2 --vm=${strVm}", {bShowOutputAsync => true}); processEnd(); } diff -Nru pgbackrest-2.15.1/test/Vagrantfile pgbackrest-2.16/test/Vagrantfile --- pgbackrest-2.15.1/test/Vagrantfile 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/test/Vagrantfile 2019-08-05 16:03:04.000000000 +0000 @@ -55,13 +55,12 @@ #--------------------------------------------------------------------------------------------------------------------------- echo 'Install Perl Modules' && date - apt-get install -y libdbd-pg-perl libio-socket-ssl-perl libxml-libxml-perl libxml-checker-perl libperl-critic-perl \ - libdevel-nytprof-perl libyaml-libyaml-perl + apt-get install -y libdbd-pg-perl libxml-checker-perl libyaml-libyaml-perl #--------------------------------------------------------------------------------------------------------------------------- echo 'Install Build Tools' && date apt-get install -y devscripts build-essential lintian git lcov cloc txt2man debhelper libssl-dev zlib1g-dev libperl-dev \ - libxml2-dev liblz4-dev + libxml2-dev liblz4-dev libpq-dev #--------------------------------------------------------------------------------------------------------------------------- echo 'Install AWS CLI' && date @@ -74,10 +73,6 @@ sudo -i -u vagrant aws configure set aws_secret_access_key verySecretKey1 #--------------------------------------------------------------------------------------------------------------------------- - echo 'Install Devel::Cover' && date - dpkg -i /backrest/test/package/u18-libdevel-cover-perl_1.29-2_amd64.deb - - #--------------------------------------------------------------------------------------------------------------------------- echo 'Install Docker' && date curl -fsSL https://get.docker.com | sh sudo usermod -aG docker vagrant diff -Nru pgbackrest-2.15.1/.travis.yml pgbackrest-2.16/.travis.yml --- pgbackrest-2.15.1/.travis.yml 2019-06-25 12:29:06.000000000 +0000 +++ pgbackrest-2.16/.travis.yml 2019-08-05 16:03:04.000000000 +0000 @@ -19,7 +19,7 @@ - PGB_CI="doc" before_install: - - sudo apt-get -qq update && sudo apt-get install libxml-checker-perl libdbd-pg-perl libperl-critic-perl libtemplate-perl libpod-coverage-perl libtest-differences-perl libhtml-parser-perl lintian debhelper txt2man devscripts libjson-perl libio-socket-ssl-perl libxml-libxml-perl libyaml-libyaml-perl python-pip lcov libjson-maybexs-perl libperl-dev + - sudo apt-get -qq update && sudo apt-get install libxml-checker-perl libdbd-pg-perl libyaml-libyaml-perl python-pip lcov libperl-dev - | # Install & Configure AWS CLI pip install --upgrade --user awscli @@ -28,11 +28,6 @@ aws configure set aws_secret_access_key verySecretKey1 aws help --version aws configure list - - | - # Install Devel::Cover - sudo dpkg -i ${TRAVIS_BUILD_DIR?}/test/package/u14-libdevel-cover-perl_1.29-2_amd64.deb - sudo apt-get -f install - /usr/bin/cover -v install: - |