Skip to content

Commit 2e9a5b9

Browse files
committed
tests: added test_page_backup_with_alien_wal_segment
1 parent 5b55a57 commit 2e9a5b9

File tree

1 file changed

+111
-0
lines changed

1 file changed

+111
-0
lines changed

tests/page.py

Lines changed: 111 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -846,3 +846,114 @@ def test_page_backup_with_corrupted_wal_segment(self):
846846

847847
# Clean after yourself
848848
self.del_test_dir(module_name, fname)
849+
850+
# @unittest.skip("skip")
851+
def test_page_backup_with_alien_wal_segment(self):
852+
"""
853+
make two nodes with archiving
854+
take archive full backup from both nodes,
855+
generate some wals with pgbench on both nodes,
856+
move latest archived wal segment from second node to first node`s archive
857+
run page backup on first node
858+
expecting error because of alien wal segment
859+
make sure that backup status is 'ERROR'
860+
"""
861+
fname = self.id().split('.')[3]
862+
node = self.make_simple_node(
863+
base_dir="{0}/{1}/node".format(module_name, fname),
864+
initdb_params=['--data-checksums'],
865+
pg_options={'wal_level': 'replica'}
866+
)
867+
alien_node = self.make_simple_node(
868+
base_dir="{0}/{1}/alien_node".format(module_name, fname)
869+
)
870+
871+
backup_dir = os.path.join(self.tmp_path, module_name, fname, 'backup')
872+
self.init_pb(backup_dir)
873+
self.add_instance(backup_dir, 'node', node)
874+
self.set_archiving(backup_dir, 'node', node)
875+
node.start()
876+
877+
self.add_instance(backup_dir, 'alien_node', alien_node)
878+
self.set_archiving(backup_dir, 'alien_node', alien_node)
879+
alien_node.start()
880+
881+
self.backup_node(backup_dir, 'node', node)
882+
self.backup_node(backup_dir, 'alien_node', alien_node)
883+
884+
# make some wals
885+
node.safe_psql(
886+
"postgres",
887+
"create sequence t_seq; "
888+
"create table t_heap as select i as id, "
889+
"md5(i::text) as text, "
890+
"md5(repeat(i::text,10))::tsvector as tsvector "
891+
"from generate_series(0,1000) i;")
892+
893+
alien_node.safe_psql(
894+
"postgres",
895+
"create database alien")
896+
897+
alien_node.safe_psql(
898+
"alien",
899+
"create sequence t_seq; "
900+
"create table t_heap_alien as select i as id, "
901+
"md5(i::text) as text, "
902+
"md5(repeat(i::text,10))::tsvector as tsvector "
903+
"from generate_series(0,1000) i;")
904+
905+
# copy lastest wal segment
906+
wals_dir = os.path.join(backup_dir, 'wal', 'alien_node')
907+
wals = [f for f in os.listdir(wals_dir) if os.path.isfile(os.path.join(
908+
wals_dir, f)) and not f.endswith('.backup')]
909+
wals = map(str, wals)
910+
filename = max(wals)
911+
file = os.path.join(wals_dir, filename)
912+
file_destination = os.path.join(
913+
os.path.join(backup_dir, 'wal', 'node'), filename)
914+
# file = os.path.join(wals_dir, '000000010000000000000004')
915+
print(file)
916+
print(file_destination)
917+
os.rename(file, file_destination)
918+
919+
if self.archive_compress:
920+
file = file[:-3]
921+
922+
# Single-thread PAGE backup
923+
try:
924+
self.backup_node(
925+
backup_dir, 'node', node,
926+
backup_type='page')
927+
self.assertEqual(
928+
1, 0,
929+
"Expecting Error because of alien wal segment.\n "
930+
"Output: {0} \n CMD: {1}".format(
931+
self.output, self.cmd))
932+
except ProbackupException as e:
933+
print("SUCCESS")
934+
935+
self.assertEqual(
936+
'ERROR',
937+
self.show_pb(backup_dir, 'node')[1]['status'],
938+
'Backup {0} should have STATUS "ERROR"')
939+
940+
# Multi-thread PAGE backup
941+
try:
942+
self.backup_node(
943+
backup_dir, 'node', node,
944+
backup_type='page', options=["-j", "4"])
945+
self.assertEqual(
946+
1, 0,
947+
"Expecting Error because of alien wal segment.\n "
948+
"Output: {0} \n CMD: {1}".format(
949+
self.output, self.cmd))
950+
except ProbackupException as e:
951+
print("SUCCESS")
952+
953+
self.assertEqual(
954+
'ERROR',
955+
self.show_pb(backup_dir, 'node')[2]['status'],
956+
'Backup {0} should have STATUS "ERROR"')
957+
958+
# Clean after yourself
959+
self.del_test_dir(module_name, fname)

0 commit comments

Comments
 (0)