Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
50 changes: 0 additions & 50 deletions .github/workflows/docker-image.yml

This file was deleted.

36 changes: 0 additions & 36 deletions .github/workflows/python-package.yml

This file was deleted.

41 changes: 0 additions & 41 deletions .github/workflows/python-publish.yml

This file was deleted.

4 changes: 2 additions & 2 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,8 @@ ADD . /opt/grafana-backup-tool
RUN chmod -R a+r /opt/grafana-backup-tool \
&& find /opt/grafana-backup-tool -type d -print0 | xargs -0 chmod a+rx

RUN pip3 --no-cache-dir install .
RUN pip3 --no-cache-dir install . --break-system-packages

RUN chown -R 1337:1337 /opt/grafana-backup-tool
USER 1337
CMD sh -c 'if [ "$RESTORE" = true ]; then if [ ! -z "$AWS_S3_BUCKET_NAME" ] || [ ! -z "$AZURE_STORAGE_CONTAINER_NAME" ]; then grafana-backup restore $ARCHIVE_FILE; else grafana-backup restore _OUTPUT_/$ARCHIVE_FILE; fi else grafana-backup save; fi'
CMD sh -c 'if [ "$RESTORE" = true ]; then if [ ! -z "$AWS_S3_BUCKET_NAME" ] || [ ! -z "$AZURE_STORAGE_CONTAINER_NAME" ] || [ ! -z "$GCS_BUCKET_NAME" ]; then grafana-backup restore $ARCHIVE_FILE; else grafana-backup restore _OUTPUT_/$ARCHIVE_FILE; fi else grafana-backup save; fi'
1 change: 1 addition & 0 deletions grafana_backup/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
Usage:
grafana-backup save [--config=<filename>] [--components=<>] [--no-archive]
grafana-backup restore [--config=<filename>] [--components=<>] <archive_file>
grafana-backup restore [--config=<filename>] [--components=<>] latest (To restore latest GCS file)
grafana-backup delete [--config=<filename>] [--components=<>]
grafana-backup tools [-h | --help] [--config=<filename>] [<optional-command>] [<optional-argument>]
grafana-backup [--config=<filename>]
Expand Down
3 changes: 1 addition & 2 deletions grafana_backup/conf/grafanaSettings.json
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,7 @@
"verify_ssl": true,
"api_health_check": true,
"api_auth_check": true,
"backup_dir": "_OUTPUT_",
"backup_file_format": "%Y%m%d%H%M",
"backup_file_format": "%b_%Y_Week%V",
"uid_dashboard_slug_suffix": false,
"pretty_print": false
},
Expand Down
7 changes: 6 additions & 1 deletion grafana_backup/dashboardApi.py
Original file line number Diff line number Diff line change
Expand Up @@ -515,7 +515,12 @@ def send_grafana_get(url, http_get_headers, verify_ssl, client_cert, debug):
verify=verify_ssl, cert=client_cert)
if debug:
log_response(r)
return (r.status_code, r.json())
try:
json_data = r.json()
except json.decoder.JSONDecodeError as e:
print("Failed to parse JSON response: {e}")
json_data = r.text
return (r.status_code, json_data)


def send_grafana_post(url, json_payload, http_post_headers, verify_ssl=False, client_cert=None, debug=True):
Expand Down
22 changes: 18 additions & 4 deletions grafana_backup/gcs_download.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,17 +2,31 @@
import io
from google.cloud import storage

def get_latest_file(bucket, prefix=''):
# List objects in the bucket with the specified prefix
blobs = bucket.list_blobs(prefix=prefix)

# Get the latest file based on the last modification time
latest_blob = None
for blob in blobs:
if not latest_blob or blob.updated > latest_blob.updated:
latest_blob = blob

return latest_blob

def main(args, settings):
arg_archive_file = args.get('<archive_file>', None)

bucket_name = settings.get('GCS_BUCKET_NAME')

gcs_backup_dir = settings.get('GCS_BACKUP_DIR')
storage_client = storage.Client()
bucket = storage_client.bucket(bucket_name)

blob = bucket.blob(arg_archive_file)

if arg_archive_file.lower() == "latest":
blob = get_latest_file(bucket, gcs_backup_dir)
else:
if gcs_backup_dir:
arg_archive_file = '{0}/{1}'.format(gcs_backup_dir, arg_archive_file)
blob = bucket.blob(arg_archive_file)
try:
gcs_data = io.BytesIO(blob.download_as_bytes())
print("Download from GCS: '{0}' was successful".format(bucket_name))
Expand Down
9 changes: 7 additions & 2 deletions grafana_backup/gcs_upload.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,12 +5,17 @@
def main(args, settings):
bucket_name = settings.get('GCS_BUCKET_NAME')
backup_dir = settings.get('BACKUP_DIR')
gcs_backup_dir = settings.get('GCS_BACKUP_DIR')
timestamp = settings.get('TIMESTAMP')

storage_client = storage.Client()

gcs_file_name = '{0}.tar.gz'.format(timestamp)
archive_file = '{0}/{1}'.format(backup_dir, gcs_file_name)
file_name = '{0}.tar.gz'.format(timestamp)
archive_file = '{0}/{1}'.format(backup_dir, file_name)
if gcs_backup_dir:
gcs_file_name = '{0}/{1}'.format(gcs_backup_dir, file_name)
else:
gcs_file_name = file_name

try:
bucket = storage_client.bucket(bucket_name)
Expand Down
9 changes: 7 additions & 2 deletions grafana_backup/grafanaSettings.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,7 @@ def main(config_path):
# Cloud storage settings - GCP
gcp_config = config.get('gcp', {})
gcs_bucket_name = gcp_config.get('gcs_bucket_name', '')
gcs_backup_dir = gcp_config.get('gcs_backup_dir', '')
google_application_credentials = gcp_config.get('google_application_credentials', '')

influxdb_measurement = config.get('influxdb', {}).get('measurement', 'grafana_backup')
Expand Down Expand Up @@ -72,6 +73,7 @@ def main(config_path):
AZURE_STORAGE_CONNECTION_STRING = os.getenv('AZURE_STORAGE_CONNECTION_STRING', azure_storage_connection_string)

GCS_BUCKET_NAME = os.getenv('GCS_BUCKET_NAME', gcs_bucket_name)
GCS_BACKUP_DIR = os.getenv('GCS_BACKUP_DIR', gcs_backup_dir)
if not os.getenv('GOOGLE_APPLICATION_CREDENTIALS') and google_application_credentials:
os.environ['GOOGLE_APPLICATION_CREDENTIALS'] = google_application_credentials

Expand Down Expand Up @@ -146,6 +148,10 @@ def main(config_path):
HTTP_GET_HEADERS_BASIC_AUTH.update({'Authorization': 'Basic {0}'.format(GRAFANA_BASIC_AUTH)})
HTTP_POST_HEADERS_BASIC_AUTH = HTTP_POST_HEADERS.copy()
HTTP_POST_HEADERS_BASIC_AUTH.update({'Authorization': 'Basic {0}'.format(GRAFANA_BASIC_AUTH)})
if not HTTP_GET_HEADERS:
config_dict['HTTP_GET_HEADERS'] = HTTP_GET_HEADERS_BASIC_AUTH
if len(HTTP_POST_HEADERS.keys()) == 1:
config_dict['HTTP_POST_HEADERS'] = {**HTTP_POST_HEADERS, **HTTP_GET_HEADERS_BASIC_AUTH}
else:
HTTP_GET_HEADERS_BASIC_AUTH = None
HTTP_POST_HEADERS_BASIC_AUTH = None
Expand All @@ -163,8 +169,6 @@ def main(config_path):
config_dict['PRETTY_PRINT'] = PRETTY_PRINT
config_dict['UID_DASHBOARD_SLUG_SUFFIX'] = UID_DASHBOARD_SLUG_SUFFIX
config_dict['EXTRA_HEADERS'] = EXTRA_HEADERS
config_dict['HTTP_GET_HEADERS'] = HTTP_GET_HEADERS
config_dict['HTTP_POST_HEADERS'] = HTTP_POST_HEADERS
config_dict['HTTP_GET_HEADERS_BASIC_AUTH'] = HTTP_GET_HEADERS_BASIC_AUTH
config_dict['HTTP_POST_HEADERS_BASIC_AUTH'] = HTTP_POST_HEADERS_BASIC_AUTH
config_dict['TIMESTAMP'] = TIMESTAMP
Expand All @@ -177,6 +181,7 @@ def main(config_path):
config_dict['AZURE_STORAGE_CONTAINER_NAME'] = AZURE_STORAGE_CONTAINER_NAME
config_dict['AZURE_STORAGE_CONNECTION_STRING'] = AZURE_STORAGE_CONNECTION_STRING
config_dict['GCS_BUCKET_NAME'] = GCS_BUCKET_NAME
config_dict['GCS_BACKUP_DIR'] = GCS_BACKUP_DIR
config_dict['INFLUXDB_MEASUREMENT'] = INFLUXDB_MEASUREMENT
config_dict['INFLUXDB_HOST'] = INFLUXDB_HOST
config_dict['INFLUXDB_PORT'] = INFLUXDB_PORT
Expand Down
15 changes: 11 additions & 4 deletions grafana_backup/restore.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,22 +83,29 @@ def open_compressed_backup(compressed_backup):
restore_functions = collections.OrderedDict()
# Folders must be restored before Library-Elements
restore_functions['folder'] = create_folder
restore_functions['datasource'] = create_datasource

# Disable restoration of datasources since these are loaded from file
# The backups lack sensitive data like HTTP headers which are required to pull data
# restore_functions['datasource'] = create_datasource

# Library-Elements must be restored before dashboards
restore_functions['library_element'] = create_library_element
restore_functions['dashboard'] = create_dashboard
restore_functions['alert_channel'] = create_alert_channel
restore_functions['organization'] = create_org
restore_functions['user'] = create_user
# Disable restoration of users due to error when Google users
# attempt to login again after restoration
# restore_functions['user'] = create_user
restore_functions['snapshot'] = create_snapshot
restore_functions['annotation'] = create_annotation
restore_functions['team'] = create_team
restore_functions['team_member'] = create_team_member
restore_functions['folder_permission'] = update_folder_permissions
restore_functions['alert_rule'] = create_alert_rule
restore_functions['contact_point'] = create_contact_point
# Disable restoration of contact points since sensitive data such as webhook URL is redacted
# restore_functions['contact_point'] = create_contact_point
# There are some issues of notification policy restore api, it will lock the notification policy page and cannot be edited.
# restore_functions['notification_policys'] = update_notification_policy
restore_functions['notification_policys'] = update_notification_policy

if sys.version_info >= (3,):
with tempfile.TemporaryDirectory() as tmpdir:
Expand Down