This repository has been archived by the owner on Aug 29, 2024. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 0
/
dump.py
77 lines (68 loc) · 1.75 KB
/
dump.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
#!/usr/bin/env python2
import os
import subprocess
from conf import *
from common import *
from loggers import logger
def run_dump_schema(db):
cmd = """
pg_dump \
--verbose \
--no-owner \
--no-acl \
--no-privileges \
--disable-triggers \
--no-security-labels \
--create \
--schema-only \
--format plain \
--file {LOCALDUMPDIR}/{DATABASENAME}.schema.sql {DATABASENAME}
""".format(
DATABASENAME=db.get("database"),
LOCALDUMPDIR=db.get("local_dir")
)
return run_cmd(cmd)
def run_dump_database(db, print_output=True):
cmd = """
pg_dump \
--verbose \
--no-owner \
--no-acl \
--no-privileges \
--disable-dollar-quoting \
--no-security-labels \
--no-tablespaces \
--quote-all-identifiers \
--disable-triggers \
--format=d --create \
--file {LOCALDUMPDIR}/data {DATABASENAME}
""".format(
DATABASENAME=db.get("database"),
LOCALDUMPDIR=db.get("local_dir")
)
return run_cmd(cmd, print_output=True)
def run_upload_gcs(db, print_output=True):
cmd = """
gsutil -m \
cp -r {LOCALDUMPDIR}/* \
{GCS_BUCKETNAME}/{GCSDIR}/data
""".format(
GCSDIR=db.get("gcs_dir"),
LOCALDUMPDIR=db.get("local_dir"),
GCS_BUCKETNAME=GCS_BUCKETNAME
)
return run_cmd(cmd, print_output=True)
def run():
hostname = run_cmd("hostname")[0].strip("\n")
dbs = databases_to_process(hostname, db_kind="MYSQL")
for db in dbs:
dbname = db.get("database")
logger.info("==> Processing %s" % dbname)
run_mkdirs(db)
run_dump_schema(db)
run_dump_database(db, print_output=True)
run_upload_gcs(db, print_output=True)
run_make_gcs_flag(db, DUMP_DONE_FLAG)
logger.info("<== Finished Processing %s" % dbname)
if __name__ == "__main__":
run()