Skip to content

Commit 1ccc195

Browse files
committed
added GCP Cloud Function for Cloud SQL export backups
1 parent 3d24281 commit 1ccc195

File tree

4 files changed

+149
-0
lines changed

4 files changed

+149
-0
lines changed
Lines changed: 19 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,19 @@
1+
#
2+
# Author: Hari Sekhon
3+
# Date: 2020-10-16 11:44:51 +0100 (Fri, 16 Oct 2020)
4+
#
5+
# vim:ts=4:sts=4:sw=4:et
6+
#
7+
# https://github.com/HariSekhon/pytools
8+
#
9+
# License: see accompanying Hari Sekhon LICENSE file
10+
#
11+
# If you're using my code you're welcome to connect with me on LinkedIn and optionally send me feedback to help steer this or other code I publish
12+
#
13+
# https://www.linkedin.com/in/HariSekhon
14+
#
15+
16+
# see also: massive generic .gcloudignore at https://github.com/HariSekhon/DevOps-Bash-tools/blob/master/.gcloudignore
17+
18+
deploy.sh
19+
test/
Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,22 @@
1+
#!/usr/bin/env bash
2+
# vim:ts=4:sts=4:sw=4:et
3+
#
4+
# Author: Hari Sekhon
5+
# Date: 2020-10-16 10:12:26 +0100 (Fri, 16 Oct 2020)
6+
#
7+
8+
set -euo pipefail
9+
[ -n "${DEBUG:-}" ] && set -x
10+
srcdir="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
11+
12+
cd "$srcdir"
13+
14+
project="$(gcloud config list --format="value(core.project)")"
15+
region="europe-west3" # not available in all regions yet
16+
17+
name="cloud-sql-backups"
18+
topic="cloud-sql-backups"
19+
service_account="cloud-function-sql-backup@$project.iam.gserviceaccount.com"
20+
vpc_connector="cloud-sql-backups" # for triggering across regions since Cloud Function may not be in the same region as the Cloud SQL instances to back up
21+
22+
gcloud functions deploy "$name" --trigger-topic "$topic" --runtime python37 --entry-point main --service-account "$service_account" --region "$region" --timeout 60 --vpc-connector "$vpc_connector" --memory 128MB
Lines changed: 105 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,105 @@
1+
#!/usr/bin/env python
2+
# coding=utf-8
3+
# vim:ts=4:sts=4:sw=4:et
4+
#
5+
# Author: Hari Sekhon
6+
# Date: 2020-10-14 15:29:38 +0100 (Wed, 14 Oct 2020)
7+
#
8+
# https://github.com/HariSekhon/pytools
9+
#
10+
# License: see accompanying Hari Sekhon LICENSE file
11+
#
12+
# If you're using my code you're welcome to connect with me on LinkedIn
13+
# and optionally send me feedback to help steer this or other code I publish
14+
#
15+
# https://www.linkedin.com/in/HariSekhon
16+
#
17+
18+
"""
19+
20+
GCP Cloud Function to export a given Cloud SQL database to GCS via PubSub notifications from Cloud Scheduler
21+
22+
Solution Documentation:
23+
24+
https://cloud.google.com/solutions/scheduling-cloud-sql-database-exports-using-cloud-scheduler
25+
26+
GCP Cloud PubSub should be sent payloads like this by Cloud Scheduler (replacing the env vars with your literals):
27+
28+
{
29+
"project": "${GOOGLE_PROJECT_ID}",
30+
"instance": "${SQL_INSTANCE_HOST}",
31+
"database": "${DATABASE}",
32+
"bucket": "${BUCKET_NAME}"
33+
}
34+
35+
Tested on GCP Cloud Functions with Python 3.7
36+
37+
"""
38+
39+
# https://cloud.google.com/functions/docs/writing/specifying-dependencies-python
40+
41+
# Code below is based on solution sample code from the link above
42+
43+
#import os
44+
import base64
45+
import logging
46+
import json
47+
48+
from datetime import datetime
49+
from httplib2 import Http
50+
51+
from googleapiclient import discovery
52+
from googleapiclient.errors import HttpError
53+
from oauth2client.client import GoogleCredentials
54+
55+
56+
# pylint: disable=unused-argument
57+
def main(event, context):
58+
# if os.getenv("DEBUG"):
59+
# # debug level logs don't appear in function details logs tab even with
60+
# # DEBUG=1 runtime env var set and logs severity filter set to >= DEBUG
61+
# #logging.debug('event: %s', event)
62+
# #logging.debug('context: %s', context)
63+
# logging.info('event: %s', event)
64+
# logging.info('context: %s', context)
65+
data = json.loads(base64.b64decode(event['data']).decode('utf-8'))
66+
credentials = GoogleCredentials.get_application_default()
67+
68+
service = discovery.build('sqladmin', 'v1beta4', http=credentials.authorize(Http()), cache_discovery=False)
69+
70+
project = data['project']
71+
bucket = data['bucket']
72+
bucket = bucket.lstrip('gs://')
73+
instance = data['instance']
74+
database = data['database']
75+
timestamp = datetime.now().strftime("%Y-%m-%d_%H%M")
76+
77+
# .gz extension so it is auto-compressed, saving storage space + billing
78+
backup_uri = "gs://{bucket}/backups/sql/{instance}--{database}--{timestamp}.sql.gz".format(
79+
bucket=bucket,
80+
instance=instance,
81+
database=database,
82+
timestamp=timestamp)
83+
84+
instances_export_request_body = {
85+
"exportContext": {
86+
"kind": "sql#exportContext",
87+
"fileType": "SQL",
88+
"uri": backup_uri,
89+
"databases": [
90+
database
91+
]
92+
}
93+
}
94+
95+
try:
96+
request = service.instances().export(
97+
project=project,
98+
instance=instance,
99+
body=instances_export_request_body
100+
)
101+
response = request.execute()
102+
except HttpError as err:
103+
logging.error("Backup FAILED. Reason: %s", err)
104+
else:
105+
logging.info("Backup task status: %s", response)
Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
# https://cloud.google.com/functions/docs/writing/specifying-dependencies-python
2+
google-api-python-client
3+
Oauth2client

0 commit comments

Comments
 (0)