Skip to content
This repository was archived by the owner on May 16, 2019. It is now read-only.

Commit ec1caf4

Browse files
committed
Updated backup to use generated OpenBazaar folder
1 parent cd97a39 commit ec1caf4

1 file changed

Lines changed: 85 additions & 84 deletions

File tree

backupTool.py

Lines changed: 85 additions & 84 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
__author__ = 'marc'
2+
from constants import DATA_FOLDER
23
import csv
34
import db.datastore as db
45
import os
@@ -9,103 +10,103 @@
910
import time
1011

1112
TABLES = [
12-
('hashmap', ['hash', 'filepath']),
13-
('profile', ['id', 'serializedUserInfo']),
14-
('listings', ['id', 'serializedListings']),
15-
('keys', ['type', 'privkey', 'pubkey']),
16-
('followers', ['id', 'serializedFollowers']),
17-
('following', ['id', 'serializedFollowing']),
18-
('messages', ['guid', 'handle', 'signed_pubkey', 'encryption_pubkey', 'subject', 'message_type', 'message', 'timestamp', 'avatar_hash', 'signature', 'outgoing']),
19-
('notifications', ['guid', 'handle', 'message', 'timestamp', 'avatar_hash']),
20-
('vendors', ['guid', 'ip', 'port', 'signedPubkey']),
21-
('moderators', ['guid', 'signedPubkey', 'encryptionKey', 'encryptionSignature', 'bitcoinKey', 'bitcoinSignature', 'handle']),
22-
('purchases', ['id', 'title', 'timestamp', 'btc', 'address', 'status', 'thumbnail', 'seller', 'proofSig']),
23-
('sales', ['id', 'title', 'timestamp', 'btc', 'address', 'status', 'thumbnail', 'seller']),
24-
('dht', ['keyword', 'id', 'value', 'birthday'])
13+
('hashmap', ['hash', 'filepath']),
14+
('profile', ['id', 'serializedUserInfo']),
15+
('listings', ['id', 'serializedListings']),
16+
('keys', ['type', 'privkey', 'pubkey']),
17+
('followers', ['id', 'serializedFollowers']),
18+
('following', ['id', 'serializedFollowing']),
19+
('messages', ['guid', 'handle', 'signed_pubkey', 'encryption_pubkey', 'subject', 'message_type', 'message', 'timestamp', 'avatar_hash', 'signature', 'outgoing']),
20+
('notifications', ['guid', 'handle', 'message', 'timestamp', 'avatar_hash']),
21+
('vendors', ['guid', 'ip', 'port', 'signedPubkey']),
22+
('moderators', ['guid', 'signedPubkey', 'encryptionKey', 'encryptionSignature', 'bitcoinKey', 'bitcoinSignature', 'handle']),
23+
('purchases', ['id', 'title', 'timestamp', 'btc', 'address', 'status', 'thumbnail', 'seller', 'proofSig']),
24+
('sales', ['id', 'title', 'timestamp', 'btc', 'address', 'status', 'thumbnail', 'seller']),
25+
('dht', ['keyword', 'id', 'value', 'birthday'])
2526
]
2627

27-
# TODO: Add all files and directories to back up together with the database
28-
# Ex: ['file', 'path/to/file2', ...]
29-
FILES = []
30-
3128
def _getDatabase():
32-
Database = db.Database()
33-
return Database.DATABASE
29+
Database = db.Database()
30+
return Database.DATABASE
3431

3532
def _exportDatabaseToCsv(tablesAndColumns):
36-
"""Reads the database for all given tables and stores them as CSV files."""
37-
dbFile = _getDatabase()
38-
result = None
39-
with lite.connect(dbFile) as dbConnection:
40-
dbConnection.text_factory = str
41-
cursor = dbConnection.cursor()
42-
for table in tablesAndColumns:
43-
table_name = table[0]
44-
table_columns = ', '.join(table[1])
45-
data = cursor.execute("SELECT {0} FROM {1}".format(table_columns, table_name))
46-
fileName = 'table_{0}.csv'.format(table_name)
47-
filePath = os.path.join('backup', fileName)
48-
with open(filePath, 'wb') as f:
49-
writer = csv.writer(f)
50-
writer.writerow(table[1])
51-
writer.writerows(data)
52-
return result
33+
"""Reads the database for all given tables and stores them as CSV files."""
34+
dbFile = _getDatabase()
35+
result = None
36+
with lite.connect(dbFile) as dbConnection:
37+
dbConnection.text_factory = str
38+
cursor = dbConnection.cursor()
39+
for table in tablesAndColumns:
40+
table_name = table[0]
41+
table_columns = ', '.join(table[1])
42+
data = cursor.execute("SELECT {0} FROM {1}".format(table_columns, table_name))
43+
fileName = 'table_{0}.csv'.format(table_name)
44+
filePath = os.path.join('backup', fileName)
45+
with open(filePath, 'wb') as f:
46+
writer = csv.writer(f)
47+
writer.writerow(table[1])
48+
writer.writerows(data)
49+
return result
50+
51+
def backup(tablesAndColumns=None, output=None):
52+
"""Archives given tables and files in a single tar archive."""
53+
os.chdir(DATA_FOLDER)
5354

54-
def backup(tablesAndColumns, files, output=None):
55-
"""Archives given tables and files in a single tar archive."""
56-
# Remove existing database files and re-make them
57-
if os.path.exists('backup'):
58-
shutil.rmtree('backup')
59-
os.makedirs('backup')
60-
_exportDatabaseToCsv(tablesAndColumns)
55+
# Remove existing database files and re-make them
56+
if os.path.exists('backup'):
57+
shutil.rmtree('backup')
58+
os.makedirs('backup')
59+
_exportDatabaseToCsv(tablesAndColumns)
6160

62-
# Archive files
63-
if not output:
64-
output = 'backup_{0}.tar.gz'.format(time.strftime('%Y-%m-%d'))
65-
with tarfile.open(output, 'w:gz') as tar:
66-
tar.add('backup')
67-
for f in files:
68-
tar.add(f)
69-
tar.close()
61+
# Archive files
62+
files = os.listdir(DATA_FOLDER)
63+
if not output:
64+
output = 'backup_{0}.tar.gz'.format(time.strftime('%Y-%m-%d'))
65+
with tarfile.open(output, 'w:gz') as tar:
66+
for f in files:
67+
tar.add(f)
68+
tar.close()
7069

7170
def _importCsvToTable(fileName, deleteDataFirst=False):
72-
"""Imports given CSV file to the database."""
73-
tableName = re.search('table_(\w+).csv', fileName).group(1)
74-
dbFile = _getDatabase()
75-
with lite.connect(dbFile) as dbConnection:
76-
dbConnection.text_factory = str
77-
cursor = dbConnection.cursor()
78-
if deleteDataFirst:
79-
cursor.execute('DELETE FROM {0}'.format(tableName))
80-
with open(fileName, 'rb') as f:
81-
reader = csv.reader(f)
82-
header = True
83-
for row in reader:
84-
if header:
85-
header = False
86-
columns = ', '.join(['?' for column in row])
87-
insertsql = 'INSERT INTO {0} VALUES ({1})'.format(tableName, columns)
88-
rowlen = len(row)
89-
else:
90-
if len(row) == rowlen:
91-
cursor.execute(insertsql, row)
71+
"""Imports given CSV file to the database."""
72+
tableName = re.search('table_(\w+).csv', fileName).group(1)
73+
dbFile = _getDatabase()
74+
with lite.connect(dbFile) as dbConnection:
75+
dbConnection.text_factory = str
76+
cursor = dbConnection.cursor()
77+
if deleteDataFirst:
78+
cursor.execute('DELETE FROM {0}'.format(tableName))
79+
with open(fileName, 'rb') as f:
80+
reader = csv.reader(f)
81+
header = True
82+
for row in reader:
83+
if header:
84+
header = False
85+
columns = ', '.join(['?' for column in row])
86+
insertsql = 'INSERT INTO {0} VALUES ({1})'.format(tableName, columns)
87+
rowlen = len(row)
88+
else:
89+
if len(row) == rowlen:
90+
cursor.execute(insertsql, row)
9291

9392

9493
def restore(input, deleteTableDataFirst=False):
95-
"""Restores files and tables of given archive."""
96-
# Remove existing database files if any
97-
if os.path.exists('backup'):
98-
shutil.rmtree('backup')
94+
"""Restores files and tables of given archive."""
95+
os.chdir(DATA_FOLDER)
96+
97+
# Remove existing database files if any
98+
if os.path.exists('backup'):
99+
shutil.rmtree('backup')
99100

100-
# Unarchive files
101-
with tarfile.open(input, 'r:gz') as tar:
102-
tar.extractall()
101+
# Unarchive files
102+
with tarfile.open(input, 'r:gz') as tar:
103+
tar.extractall()
103104

104-
# Restore database files to the database
105-
if os.path.exists('backup'):
106-
files = ['backup/{0}'.format(f) for f in os.listdir('backup')]
107-
for f in files:
108-
_importCsvToTable(f, deleteTableDataFirst)
105+
# Restore database files to the database
106+
if os.path.exists('backup'):
107+
files = ['backup/{0}'.format(f) for f in os.listdir('backup')]
108+
for f in files:
109+
_importCsvToTable(f, deleteTableDataFirst)
109110

110111
if __name__ == '__main__':
111-
print 'Backup tool works as a library.'
112+
print 'Backup tool works as a library.'

0 commit comments

Comments
 (0)