diff --git a/.github/workflows/docker-image.yml b/.github/workflows/docker-image.yml deleted file mode 100644 index bf2bc965..00000000 --- a/.github/workflows/docker-image.yml +++ /dev/null @@ -1,50 +0,0 @@ -name: Docker Image CI with Tag - -on: - push: - tags: - - '*' - -jobs: - push_to_registry: - name: Push tagged docker image to dockerhub - runs-on: ubuntu-latest - steps: - - name: Check out the repo - uses: actions/checkout@v2 - - - name: Log in to Docker Hub - uses: docker/login-action@f054a8b539a109f9f41c372932f1ae047eff08c9 - with: - username: ${{ secrets.DOCKER_USERNAME }} - password: ${{ secrets.DOCKER_PASSWORD }} - - - name: Extract metadata (tags, labels) for Docker - id: meta - uses: docker/metadata-action@v4 - with: - images: ysde/docker-grafana-backup-tool - - - name: Extract metadata (tags, labels) for Docker (Slim version) - id: meta_slim - uses: docker/metadata-action@v4 - with: - images: ysde/docker-grafana-backup-tool - flavor: | - latest=auto - suffix=-slim,onlatest=true - - - name: Build and push Docker image - uses: docker/build-push-action@v4 - with: - push: true - tags: ${{ steps.meta.outputs.tags }} - labels: ${{ steps.meta.outputs.labels }} - - - name: Build and push Docker image slim version - uses: docker/build-push-action@v4 - with: - file: ./DockerfileSlim - push: true - tags: ${{ steps.meta_slim.outputs.tags }} - labels: ${{ steps.meta_slim.outputs.labels }} diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml deleted file mode 100644 index a0de69bb..00000000 --- a/.github/workflows/python-package.yml +++ /dev/null @@ -1,36 +0,0 @@ -# This workflow will install Python dependencies, run tests and lint with a variety of Python versions -# For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions - -name: Python package - -on: - push: - branches: [ master ] - pull_request: - branches: [ master ] - -jobs: - build: - - runs-on: ubuntu-latest - strategy: - matrix: - python-version: ["3.7", "3.8", "3.9", "3.10", "3.11"] - - steps: - - uses: actions/checkout@v2 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v2 - with: - python-version: ${{ matrix.python-version }} - - name: Install dependencies - run: | - python -m pip install --upgrade pip - python -m pip install flake8 - if [ -f requirements.txt ]; then pip install -r requirements.txt; fi - - name: Lint with flake8 - run: | - # stop the build if there are Python syntax errors or undefined names - flake8 . --builtins unicode --count --select=E9,F63,F7,F82 --show-source --statistics - # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide - flake8 . --builtins unicode --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics diff --git a/.github/workflows/python-publish.yml b/.github/workflows/python-publish.yml deleted file mode 100644 index 0fe5d22c..00000000 --- a/.github/workflows/python-publish.yml +++ /dev/null @@ -1,41 +0,0 @@ -# This workflow will upload a Python Package using Twine when a release is created -# For more information see: https://help.github.com/en/actions/language-and-framework-guides/using-python-with-github-actions#publishing-to-package-registries - -name: Upload Python Package - -on: - release: - types: [created] - -jobs: - deploy: - runs-on: ubuntu-latest - - steps: - - uses: actions/checkout@v3 - - name: Set up Python - uses: actions/setup-python@v4 - with: - python-version: '3.x' - - name: Install dependencies - run: | - python -m pip install --upgrade pip - pip install setuptools wheel twine build - - name: Build a binary wheel and a source tarball - run: >- - python -m - build - --sdist - --wheel - --outdir dist/ - . - - name: Publish distribution 📦 to Test PyPI - uses: pypa/gh-action-pypi-publish@release/v1 - with: - password: ${{ secrets.TEST_PYPI_API_TOKEN }} - repository-url: https://test.pypi.org/legacy/ - - name: Build and publish - if: startsWith(github.ref, 'refs/tags') - uses: pypa/gh-action-pypi-publish@master - with: - password: ${{ secrets.PYPI_API_TOKEN }} diff --git a/Dockerfile b/Dockerfile index 5eebebbf..f25e514e 100644 --- a/Dockerfile +++ b/Dockerfile @@ -14,8 +14,8 @@ ADD . /opt/grafana-backup-tool RUN chmod -R a+r /opt/grafana-backup-tool \ && find /opt/grafana-backup-tool -type d -print0 | xargs -0 chmod a+rx -RUN pip3 --no-cache-dir install . +RUN pip3 --no-cache-dir install . --break-system-packages RUN chown -R 1337:1337 /opt/grafana-backup-tool USER 1337 -CMD sh -c 'if [ "$RESTORE" = true ]; then if [ ! -z "$AWS_S3_BUCKET_NAME" ] || [ ! -z "$AZURE_STORAGE_CONTAINER_NAME" ]; then grafana-backup restore $ARCHIVE_FILE; else grafana-backup restore _OUTPUT_/$ARCHIVE_FILE; fi else grafana-backup save; fi' +CMD sh -c 'if [ "$RESTORE" = true ]; then if [ ! -z "$AWS_S3_BUCKET_NAME" ] || [ ! -z "$AZURE_STORAGE_CONTAINER_NAME" ] || [ ! -z "$GCS_BUCKET_NAME" ]; then grafana-backup restore $ARCHIVE_FILE; else grafana-backup restore _OUTPUT_/$ARCHIVE_FILE; fi else grafana-backup save; fi' diff --git a/grafana_backup/cli.py b/grafana_backup/cli.py index 051ed31b..54d7c3b4 100755 --- a/grafana_backup/cli.py +++ b/grafana_backup/cli.py @@ -14,6 +14,7 @@ Usage: grafana-backup save [--config=] [--components=<>] [--no-archive] grafana-backup restore [--config=] [--components=<>] + grafana-backup restore [--config=] [--components=<>] latest (To restore latest GCS file) grafana-backup delete [--config=] [--components=<>] grafana-backup tools [-h | --help] [--config=] [] [] grafana-backup [--config=] diff --git a/grafana_backup/conf/grafanaSettings.json b/grafana_backup/conf/grafanaSettings.json index 346423dc..454a7825 100644 --- a/grafana_backup/conf/grafanaSettings.json +++ b/grafana_backup/conf/grafanaSettings.json @@ -4,8 +4,7 @@ "verify_ssl": true, "api_health_check": true, "api_auth_check": true, - "backup_dir": "_OUTPUT_", - "backup_file_format": "%Y%m%d%H%M", + "backup_file_format": "%b_%Y_Week%V", "uid_dashboard_slug_suffix": false, "pretty_print": false }, diff --git a/grafana_backup/dashboardApi.py b/grafana_backup/dashboardApi.py index 8fbdaf89..2dead90d 100755 --- a/grafana_backup/dashboardApi.py +++ b/grafana_backup/dashboardApi.py @@ -515,7 +515,12 @@ def send_grafana_get(url, http_get_headers, verify_ssl, client_cert, debug): verify=verify_ssl, cert=client_cert) if debug: log_response(r) - return (r.status_code, r.json()) + try: + json_data = r.json() + except json.decoder.JSONDecodeError as e: + print("Failed to parse JSON response: {e}") + json_data = r.text + return (r.status_code, json_data) def send_grafana_post(url, json_payload, http_post_headers, verify_ssl=False, client_cert=None, debug=True): diff --git a/grafana_backup/gcs_download.py b/grafana_backup/gcs_download.py index 492713fc..cd72407a 100644 --- a/grafana_backup/gcs_download.py +++ b/grafana_backup/gcs_download.py @@ -2,17 +2,31 @@ import io from google.cloud import storage +def get_latest_file(bucket, prefix=''): + # List objects in the bucket with the specified prefix + blobs = bucket.list_blobs(prefix=prefix) + + # Get the latest file based on the last modification time + latest_blob = None + for blob in blobs: + if not latest_blob or blob.updated > latest_blob.updated: + latest_blob = blob + + return latest_blob def main(args, settings): arg_archive_file = args.get('', None) bucket_name = settings.get('GCS_BUCKET_NAME') - + gcs_backup_dir = settings.get('GCS_BACKUP_DIR') storage_client = storage.Client() bucket = storage_client.bucket(bucket_name) - - blob = bucket.blob(arg_archive_file) - + if arg_archive_file.lower() == "latest": + blob = get_latest_file(bucket, gcs_backup_dir) + else: + if gcs_backup_dir: + arg_archive_file = '{0}/{1}'.format(gcs_backup_dir, arg_archive_file) + blob = bucket.blob(arg_archive_file) try: gcs_data = io.BytesIO(blob.download_as_bytes()) print("Download from GCS: '{0}' was successful".format(bucket_name)) diff --git a/grafana_backup/gcs_upload.py b/grafana_backup/gcs_upload.py index 970676ad..8afd8839 100644 --- a/grafana_backup/gcs_upload.py +++ b/grafana_backup/gcs_upload.py @@ -5,12 +5,17 @@ def main(args, settings): bucket_name = settings.get('GCS_BUCKET_NAME') backup_dir = settings.get('BACKUP_DIR') + gcs_backup_dir = settings.get('GCS_BACKUP_DIR') timestamp = settings.get('TIMESTAMP') storage_client = storage.Client() - gcs_file_name = '{0}.tar.gz'.format(timestamp) - archive_file = '{0}/{1}'.format(backup_dir, gcs_file_name) + file_name = '{0}.tar.gz'.format(timestamp) + archive_file = '{0}/{1}'.format(backup_dir, file_name) + if gcs_backup_dir: + gcs_file_name = '{0}/{1}'.format(gcs_backup_dir, file_name) + else: + gcs_file_name = file_name try: bucket = storage_client.bucket(bucket_name) diff --git a/grafana_backup/grafanaSettings.py b/grafana_backup/grafanaSettings.py index ea2708dc..6600abde 100755 --- a/grafana_backup/grafanaSettings.py +++ b/grafana_backup/grafanaSettings.py @@ -43,6 +43,7 @@ def main(config_path): # Cloud storage settings - GCP gcp_config = config.get('gcp', {}) gcs_bucket_name = gcp_config.get('gcs_bucket_name', '') + gcs_backup_dir = gcp_config.get('gcs_backup_dir', '') google_application_credentials = gcp_config.get('google_application_credentials', '') influxdb_measurement = config.get('influxdb', {}).get('measurement', 'grafana_backup') @@ -72,6 +73,7 @@ def main(config_path): AZURE_STORAGE_CONNECTION_STRING = os.getenv('AZURE_STORAGE_CONNECTION_STRING', azure_storage_connection_string) GCS_BUCKET_NAME = os.getenv('GCS_BUCKET_NAME', gcs_bucket_name) + GCS_BACKUP_DIR = os.getenv('GCS_BACKUP_DIR', gcs_backup_dir) if not os.getenv('GOOGLE_APPLICATION_CREDENTIALS') and google_application_credentials: os.environ['GOOGLE_APPLICATION_CREDENTIALS'] = google_application_credentials @@ -146,6 +148,10 @@ def main(config_path): HTTP_GET_HEADERS_BASIC_AUTH.update({'Authorization': 'Basic {0}'.format(GRAFANA_BASIC_AUTH)}) HTTP_POST_HEADERS_BASIC_AUTH = HTTP_POST_HEADERS.copy() HTTP_POST_HEADERS_BASIC_AUTH.update({'Authorization': 'Basic {0}'.format(GRAFANA_BASIC_AUTH)}) + if not HTTP_GET_HEADERS: + config_dict['HTTP_GET_HEADERS'] = HTTP_GET_HEADERS_BASIC_AUTH + if len(HTTP_POST_HEADERS.keys()) == 1: + config_dict['HTTP_POST_HEADERS'] = {**HTTP_POST_HEADERS, **HTTP_GET_HEADERS_BASIC_AUTH} else: HTTP_GET_HEADERS_BASIC_AUTH = None HTTP_POST_HEADERS_BASIC_AUTH = None @@ -163,8 +169,6 @@ def main(config_path): config_dict['PRETTY_PRINT'] = PRETTY_PRINT config_dict['UID_DASHBOARD_SLUG_SUFFIX'] = UID_DASHBOARD_SLUG_SUFFIX config_dict['EXTRA_HEADERS'] = EXTRA_HEADERS - config_dict['HTTP_GET_HEADERS'] = HTTP_GET_HEADERS - config_dict['HTTP_POST_HEADERS'] = HTTP_POST_HEADERS config_dict['HTTP_GET_HEADERS_BASIC_AUTH'] = HTTP_GET_HEADERS_BASIC_AUTH config_dict['HTTP_POST_HEADERS_BASIC_AUTH'] = HTTP_POST_HEADERS_BASIC_AUTH config_dict['TIMESTAMP'] = TIMESTAMP @@ -177,6 +181,7 @@ def main(config_path): config_dict['AZURE_STORAGE_CONTAINER_NAME'] = AZURE_STORAGE_CONTAINER_NAME config_dict['AZURE_STORAGE_CONNECTION_STRING'] = AZURE_STORAGE_CONNECTION_STRING config_dict['GCS_BUCKET_NAME'] = GCS_BUCKET_NAME + config_dict['GCS_BACKUP_DIR'] = GCS_BACKUP_DIR config_dict['INFLUXDB_MEASUREMENT'] = INFLUXDB_MEASUREMENT config_dict['INFLUXDB_HOST'] = INFLUXDB_HOST config_dict['INFLUXDB_PORT'] = INFLUXDB_PORT diff --git a/grafana_backup/restore.py b/grafana_backup/restore.py index cc805e4c..44bdd163 100755 --- a/grafana_backup/restore.py +++ b/grafana_backup/restore.py @@ -83,22 +83,29 @@ def open_compressed_backup(compressed_backup): restore_functions = collections.OrderedDict() # Folders must be restored before Library-Elements restore_functions['folder'] = create_folder - restore_functions['datasource'] = create_datasource + + # Disable restoration of datasources since these are loaded from file + # The backups lack sensitive data like HTTP headers which are required to pull data + # restore_functions['datasource'] = create_datasource + # Library-Elements must be restored before dashboards restore_functions['library_element'] = create_library_element restore_functions['dashboard'] = create_dashboard restore_functions['alert_channel'] = create_alert_channel restore_functions['organization'] = create_org - restore_functions['user'] = create_user + # Disable restoration of users due to error when Google users + # attempt to login again after restoration + # restore_functions['user'] = create_user restore_functions['snapshot'] = create_snapshot restore_functions['annotation'] = create_annotation restore_functions['team'] = create_team restore_functions['team_member'] = create_team_member restore_functions['folder_permission'] = update_folder_permissions restore_functions['alert_rule'] = create_alert_rule - restore_functions['contact_point'] = create_contact_point + # Disable restoration of contact points since sensitive data such as webhook URL is redacted + # restore_functions['contact_point'] = create_contact_point # There are some issues of notification policy restore api, it will lock the notification policy page and cannot be edited. - # restore_functions['notification_policys'] = update_notification_policy + restore_functions['notification_policys'] = update_notification_policy if sys.version_info >= (3,): with tempfile.TemporaryDirectory() as tmpdir: