|
| 1 | +#!/usr/bin/env python |
| 2 | +# coding=utf-8 |
| 3 | +# vim:ts=4:sts=4:sw=4:et |
| 4 | +# |
| 5 | +# Author: Hari Sekhon |
| 6 | +# Date: 2020-10-14 15:29:38 +0100 (Wed, 14 Oct 2020) |
| 7 | +# |
| 8 | +# https://github.com/HariSekhon/pytools |
| 9 | +# |
| 10 | +# License: see accompanying Hari Sekhon LICENSE file |
| 11 | +# |
| 12 | +# If you're using my code you're welcome to connect with me on LinkedIn |
| 13 | +# and optionally send me feedback to help steer this or other code I publish |
| 14 | +# |
| 15 | +# https://www.linkedin.com/in/HariSekhon |
| 16 | +# |
| 17 | + |
| 18 | +""" |
| 19 | +
|
| 20 | +GCP Cloud Function to export a given Cloud SQL database to GCS via PubSub notifications from Cloud Scheduler |
| 21 | +
|
| 22 | +Solution Documentation: |
| 23 | +
|
| 24 | + https://cloud.google.com/solutions/scheduling-cloud-sql-database-exports-using-cloud-scheduler |
| 25 | +
|
| 26 | +GCP Cloud PubSub should be sent payloads like this by Cloud Scheduler (replacing the env vars with your literals): |
| 27 | +
|
| 28 | +{ |
| 29 | + "project": "${GOOGLE_PROJECT_ID}", |
| 30 | + "instance": "${SQL_INSTANCE_HOST}", |
| 31 | + "database": "${DATABASE}", |
| 32 | + "bucket": "${BUCKET_NAME}" |
| 33 | +} |
| 34 | +
|
| 35 | +Tested on GCP Cloud Functions with Python 3.7 |
| 36 | +
|
| 37 | +""" |
| 38 | + |
| 39 | +# https://cloud.google.com/functions/docs/writing/specifying-dependencies-python |
| 40 | + |
| 41 | +# Code below is based on solution sample code from the link above |
| 42 | + |
| 43 | +#import os |
| 44 | +import base64 |
| 45 | +import logging |
| 46 | +import json |
| 47 | + |
| 48 | +from datetime import datetime |
| 49 | +from httplib2 import Http |
| 50 | + |
| 51 | +from googleapiclient import discovery |
| 52 | +from googleapiclient.errors import HttpError |
| 53 | +from oauth2client.client import GoogleCredentials |
| 54 | + |
| 55 | + |
| 56 | +# pylint: disable=unused-argument |
| 57 | +def main(event, context): |
| 58 | +# if os.getenv("DEBUG"): |
| 59 | +# # debug level logs don't appear in function details logs tab even with |
| 60 | +# # DEBUG=1 runtime env var set and logs severity filter set to >= DEBUG |
| 61 | +# #logging.debug('event: %s', event) |
| 62 | +# #logging.debug('context: %s', context) |
| 63 | +# logging.info('event: %s', event) |
| 64 | +# logging.info('context: %s', context) |
| 65 | + data = json.loads(base64.b64decode(event['data']).decode('utf-8')) |
| 66 | + credentials = GoogleCredentials.get_application_default() |
| 67 | + |
| 68 | + service = discovery.build('sqladmin', 'v1beta4', http=credentials.authorize(Http()), cache_discovery=False) |
| 69 | + |
| 70 | + project = data['project'] |
| 71 | + bucket = data['bucket'] |
| 72 | + bucket = bucket.lstrip('gs://') |
| 73 | + instance = data['instance'] |
| 74 | + database = data['database'] |
| 75 | + timestamp = datetime.now().strftime("%Y-%m-%d_%H%M") |
| 76 | + |
| 77 | + # .gz extension so it is auto-compressed, saving storage space + billing |
| 78 | + backup_uri = "gs://{bucket}/backups/sql/{instance}--{database}--{timestamp}.sql.gz".format( |
| 79 | + bucket=bucket, |
| 80 | + instance=instance, |
| 81 | + database=database, |
| 82 | + timestamp=timestamp) |
| 83 | + |
| 84 | + instances_export_request_body = { |
| 85 | + "exportContext": { |
| 86 | + "kind": "sql#exportContext", |
| 87 | + "fileType": "SQL", |
| 88 | + "uri": backup_uri, |
| 89 | + "databases": [ |
| 90 | + database |
| 91 | + ] |
| 92 | + } |
| 93 | + } |
| 94 | + |
| 95 | + try: |
| 96 | + request = service.instances().export( |
| 97 | + project=project, |
| 98 | + instance=instance, |
| 99 | + body=instances_export_request_body |
| 100 | + ) |
| 101 | + response = request.execute() |
| 102 | + except HttpError as err: |
| 103 | + logging.error("Backup FAILED. Reason: %s", err) |
| 104 | + else: |
| 105 | + logging.info("Backup task status: %s", response) |
0 commit comments