Thanks to visit codestin.com
Credit goes to github.com

Skip to content

Commit 9af4f5f

Browse files
committed
Merge branch 'master' into issue_63
2 parents 7f1fb47 + 87f28e7 commit 9af4f5f

File tree

3 files changed

+104
-6
lines changed

3 files changed

+104
-6
lines changed

README.md

-1
Original file line numberDiff line numberDiff line change
@@ -43,7 +43,6 @@ Regardless of the chosen backup type, all backups taken with `pg_probackup` supp
4343
`pg_probackup` currently has the following limitations:
4444
* The server from which the backup was taken and the restored server must be compatible by the [block_size](https://postgrespro.com/docs/postgresql/current/runtime-config-preset#GUC-BLOCK-SIZE) and [wal_block_size](https://postgrespro.com/docs/postgresql/current/runtime-config-preset#GUC-WAL-BLOCK-SIZE) parameters and have the same major release number.
4545
* Remote mode is in beta stage.
46-
* Microsoft Windows operating system support is in beta stage.
4746

4847
## Installation and Setup
4948
### Linux Installation

tests/helpers/ptrack_helpers.py

+29
Original file line numberDiff line numberDiff line change
@@ -551,6 +551,35 @@ def check_ptrack_sanity(self, idx_dict):
551551
# success, 'Ptrack has failed to register changes in data files'
552552
# )
553553

554+
def get_backup_filelist(self, backup_dir, instance, backup_id):
555+
556+
filelist_path = os.path.join(
557+
backup_dir, 'backups',
558+
instance, backup_id, 'backup_content.control')
559+
560+
with open(filelist_path, 'r') as f:
561+
filelist_raw = f.read()
562+
563+
filelist_splitted = filelist_raw.splitlines()
564+
565+
filelist = {}
566+
for line in filelist_splitted:
567+
line = json.loads(line)
568+
filelist[line['path']] = line
569+
570+
return filelist
571+
572+
# return dict of files from filelist A,
573+
# which are not exists in filelist_B
574+
def get_backup_filelist_diff(self, filelist_A, filelist_B):
575+
576+
filelist_diff = {}
577+
for file in filelist_A:
578+
if file not in filelist_B:
579+
filelist_diff[file] = filelist_A[file]
580+
581+
return filelist_diff
582+
554583
def check_ptrack_recovery(self, idx_dict):
555584
size = idx_dict['size']
556585
for PageNum in range(size):

tests/restore.py

+75-5
Original file line numberDiff line numberDiff line change
@@ -1732,11 +1732,12 @@ def test_restore_backup_from_future(self):
17321732
self.backup_node(backup_dir, 'node', node)
17331733

17341734
node.pgbench_init(scale=3)
1735-
#pgbench = node.pgbench(options=['-T', '20', '-c', '2'])
1736-
#pgbench.wait()
1735+
# pgbench = node.pgbench(options=['-T', '20', '-c', '2'])
1736+
# pgbench.wait()
17371737

17381738
# Take PAGE from future
1739-
backup_id = self.backup_node(backup_dir, 'node', node, backup_type='page')
1739+
backup_id = self.backup_node(
1740+
backup_dir, 'node', node, backup_type='page')
17401741

17411742
with open(
17421743
os.path.join(
@@ -1755,7 +1756,8 @@ def test_restore_backup_from_future(self):
17551756
pgbench = node.pgbench(options=['-T', '3', '-c', '2', '--no-vacuum'])
17561757
pgbench.wait()
17571758

1758-
backup_id = self.backup_node(backup_dir, 'node', node, backup_type='page')
1759+
backup_id = self.backup_node(
1760+
backup_dir, 'node', node, backup_type='page')
17591761
pgdata = self.pgdata_content(node.data_dir)
17601762

17611763
node.cleanup()
@@ -1964,7 +1966,8 @@ def test_restore_target_new_options(self):
19641966

19651967
target_time = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
19661968
with node.connect("postgres") as con:
1967-
res = con.execute("INSERT INTO tbl0005 VALUES ('inserted') RETURNING (xmin)")
1969+
res = con.execute(
1970+
"INSERT INTO tbl0005 VALUES ('inserted') RETURNING (xmin)")
19681971
con.commit()
19691972
target_xid = res[0][0]
19701973

@@ -2101,3 +2104,70 @@ def test_restore_target_new_options(self):
21012104

21022105
# Clean after yourself
21032106
self.del_test_dir(module_name, fname)
2107+
2108+
# @unittest.skip("skip")
2109+
def test_smart_restore(self):
2110+
"""
2111+
make node, create database, take full backup, drop database,
2112+
take incremental backup and restore it,
2113+
make sure that files from dropped database are not
2114+
copied during restore
2115+
https://github.com/postgrespro/pg_probackup/issues/63
2116+
"""
2117+
fname = self.id().split('.')[3]
2118+
node = self.make_simple_node(
2119+
base_dir=os.path.join(module_name, fname, 'node'),
2120+
set_replication=True,
2121+
initdb_params=['--data-checksums'])
2122+
2123+
backup_dir = os.path.join(self.tmp_path, module_name, fname, 'backup')
2124+
self.init_pb(backup_dir)
2125+
self.add_instance(backup_dir, 'node', node)
2126+
self.set_archiving(backup_dir, 'node', node)
2127+
node.slow_start()
2128+
2129+
# create database
2130+
node.safe_psql(
2131+
"postgres",
2132+
"CREATE DATABASE testdb")
2133+
2134+
# take FULL backup
2135+
full_id = self.backup_node(backup_dir, 'node', node)
2136+
2137+
# drop database
2138+
node.safe_psql(
2139+
"postgres",
2140+
"DROP DATABASE testdb")
2141+
2142+
# take PAGE backup
2143+
page_id = self.backup_node(
2144+
backup_dir, 'node', node, backup_type='page')
2145+
2146+
# restore PAGE backup
2147+
node.cleanup()
2148+
self.restore_node(
2149+
backup_dir, 'node', node, backup_id=page_id,
2150+
options=['--no-validate', '--log-level-file=VERBOSE'])
2151+
2152+
logfile = os.path.join(backup_dir, 'log', 'pg_probackup.log')
2153+
with open(logfile, 'r') as f:
2154+
logfile_content = f.read()
2155+
2156+
# get delta between FULL and PAGE filelists
2157+
filelist_full = self.get_backup_filelist(
2158+
backup_dir, 'node', full_id)
2159+
2160+
filelist_page = self.get_backup_filelist(
2161+
backup_dir, 'node', page_id)
2162+
2163+
filelist_diff = self.get_backup_filelist_diff(
2164+
filelist_full, filelist_page)
2165+
2166+
for file in filelist_diff:
2167+
self.assertNotIn(file, logfile_content)
2168+
2169+
# Clean after yourself
2170+
self.del_test_dir(module_name, fname)
2171+
2172+
2173+
# smart restore of external dirs

0 commit comments

Comments
 (0)