diff --git a/Dockerfile b/Dockerfile index 4bed155..7ca6946 100644 --- a/Dockerfile +++ b/Dockerfile @@ -14,7 +14,12 @@ ADD . /opt/grafana-backup-tool RUN chmod -R a+r /opt/grafana-backup-tool \ && find /opt/grafana-backup-tool -type d -print0 | xargs -0 chmod a+rx -RUN pip3 --no-cache-dir install . +# Create and activate Python virtual environment +RUN python3 -m venv venv +ENV PATH="/opt/grafana-backup-tool/venv/bin:$PATH" + +# Install Python dependencies inside the virtual environment +RUN /opt/grafana-backup-tool/venv/bin/pip3 install --no-cache-dir . RUN chown -R 1337:1337 /opt/grafana-backup-tool USER 1337 diff --git a/DockerfileSlim b/DockerfileSlim index c7b9533..c674f57 100644 --- a/DockerfileSlim +++ b/DockerfileSlim @@ -32,9 +32,12 @@ RUN chmod -R a+r /opt/grafana-backup-tool \ RUN echo "@edge http://dl-cdn.alpinelinux.org/alpine/edge/community" >> /etc/apk/repositories \ && apk add --no-cache --virtual build-deps ${DEV_PACKAGES} \ && apk add --no-cache ${PACKAGES} \ + && python3 -m venv venv \ + && source venv/bin/activate \ && pip3 --no-cache-dir install . \ + && deactivate \ && chown -R ${UID}:${GID} /opt/grafana-backup-tool \ && apk del build-deps USER ${UID} -CMD sh -c 'if [ "$RESTORE" = true ]; then if [ ! -z "$AWS_S3_BUCKET_NAME" ] || [ ! -z "$AZURE_STORAGE_CONTAINER_NAME" ] || [ ! -z "$GCS_BUCKET_NAME" ]; then grafana-backup restore $ARCHIVE_FILE; else grafana-backup restore _OUTPUT_/$ARCHIVE_FILE; fi else grafana-backup save; fi' +CMD sh -c 'source venv/bin/activate && if [ "$RESTORE" = true ]; then if [ ! -z "$AWS_S3_BUCKET_NAME" ] || [ ! -z "$AZURE_STORAGE_CONTAINER_NAME" ] || [ ! -z "$GCS_BUCKET_NAME" ]; then grafana-backup restore $ARCHIVE_FILE; else grafana-backup restore _OUTPUT_/$ARCHIVE_FILE; fi else grafana-backup save; fi' diff --git a/README.md b/README.md index 3537705..b702b29 100644 --- a/README.md +++ b/README.md @@ -156,13 +156,15 @@ docker run --user $(id -u):$(id -g) --rm --name grafana-backup-tool \ ysde/docker-grafana-backup-tool ``` -***S3 Example:*** Set S3 configurations in `-e` or `grafanaSettings.json`([example](https://github.com/ysde/grafana-backup-tool/blob/master/examples/grafana-backup.example.json)) +***S3 Example:*** Set S3 configurations in `-e` or `grafanaSettings.json`([example](https://github.com/ysde/grafana-backup-tool/blob/master/examples/grafana-backup.example.json)). If no credentials are provided, grafana-backup-tool will use the default [credential provider chain](https://docs.aws.amazon.com/sdkref/latest/guide/standardized-credentials.html#credentialProviderChain). ``` -e AWS_S3_BUCKET_NAME="my-backups-bucket" \ -e AWS_S3_BUCKET_KEY="grafana-backup-folder" \ + -e AWS_S3_SSE="(optional) AES256|aws:kms|aws:kms:dsse" \ -e AWS_DEFAULT_REGION="us-east-1" \ -e AWS_ACCESS_KEY_ID="secret" \ -e AWS_SECRET_ACCESS_KEY="secret" \ + -e AWS_SESSION_TOKEN="(optional)" \ ``` ***Azure Example:*** Set Azure configurations in `-e` or `grafanaSettings.json`([example](https://github.com/ysde/grafana-backup-tool/blob/master/examples/grafana-backup.example.json)) diff --git a/examples/grafanaSettings.example.json b/examples/grafanaSettings.example.json index 38734d3..a7e2849 100644 --- a/examples/grafanaSettings.example.json +++ b/examples/grafanaSettings.example.json @@ -15,9 +15,11 @@ "aws": { "s3_bucket_name": "bucket_name", "s3_bucket_key": "grafana-backup", + "s3_sse": "(optional) AES256|aws:kms|aws:kms:dsse", "default_region": "us-east-1", "access_key_id": "aws_access_key_id", - "secret_access_key": "aws_secret_access_key" + "secret_access_key": "aws_secret_access_key", + "session_token": "(optional)" }, "azure": { "container_name": "container_name", diff --git a/grafana_backup/grafanaSettings.py b/grafana_backup/grafanaSettings.py index a7d97d2..c29bbf8 100755 --- a/grafana_backup/grafanaSettings.py +++ b/grafana_backup/grafanaSettings.py @@ -36,7 +36,9 @@ def main(config_path): aws_default_region = config.get('aws', {}).get('default_region', '') aws_access_key_id = config.get('aws', {}).get('access_key_id', '') aws_secret_access_key = config.get('aws', {}).get('secret_access_key', '') + aws_session_token = config.get('aws', {}).get('session_token', '') aws_endpoint_url = config.get('aws', {}).get('endpoint_url', None) + aws_s3_sse = config.get('aws', {}).get('s3_sse', '') # Cloud storage settings - Azure azure_storage_container_name = config.get('azure', {}).get('container_name', '') azure_storage_connection_string = config.get('azure', {}).get('connection_string', '') @@ -67,7 +69,9 @@ def main(config_path): AWS_DEFAULT_REGION = os.getenv('AWS_DEFAULT_REGION', aws_default_region) AWS_ACCESS_KEY_ID = os.getenv('AWS_ACCESS_KEY_ID', aws_access_key_id) AWS_SECRET_ACCESS_KEY = os.getenv('AWS_SECRET_ACCESS_KEY', aws_secret_access_key) + AWS_SESSION_TOKEN = os.getenv('AWS_SESSION_TOKEN', aws_session_token) AWS_ENDPOINT_URL = os.getenv('AWS_ENDPOINT_URL', aws_endpoint_url) + AWS_S3_SSE = os.getenv('AWS_S3_SSE', aws_s3_sse) AZURE_STORAGE_CONTAINER_NAME = os.getenv('AZURE_STORAGE_CONTAINER_NAME', azure_storage_container_name) AZURE_STORAGE_CONNECTION_STRING = os.getenv('AZURE_STORAGE_CONNECTION_STRING', azure_storage_connection_string) @@ -172,9 +176,11 @@ def main(config_path): config_dict['TIMESTAMP'] = TIMESTAMP config_dict['AWS_S3_BUCKET_NAME'] = AWS_S3_BUCKET_NAME config_dict['AWS_S3_BUCKET_KEY'] = AWS_S3_BUCKET_KEY + config_dict['AWS_S3_SSE'] = AWS_S3_SSE config_dict['AWS_DEFAULT_REGION'] = AWS_DEFAULT_REGION config_dict['AWS_ACCESS_KEY_ID'] = AWS_ACCESS_KEY_ID config_dict['AWS_SECRET_ACCESS_KEY'] = AWS_SECRET_ACCESS_KEY + config_dict['AWS_SESSION_TOKEN'] = AWS_SESSION_TOKEN config_dict['AWS_ENDPOINT_URL'] = AWS_ENDPOINT_URL config_dict['AZURE_STORAGE_CONTAINER_NAME'] = AZURE_STORAGE_CONTAINER_NAME config_dict['AZURE_STORAGE_CONNECTION_STRING'] = AZURE_STORAGE_CONNECTION_STRING diff --git a/grafana_backup/restore.py b/grafana_backup/restore.py index cc805e4..f89b5f3 100755 --- a/grafana_backup/restore.py +++ b/grafana_backup/restore.py @@ -98,7 +98,7 @@ def open_compressed_backup(compressed_backup): restore_functions['alert_rule'] = create_alert_rule restore_functions['contact_point'] = create_contact_point # There are some issues of notification policy restore api, it will lock the notification policy page and cannot be edited. - # restore_functions['notification_policys'] = update_notification_policy + # restore_functions['notification_policies'] = update_notification_policy if sys.version_info >= (3,): with tempfile.TemporaryDirectory() as tmpdir: diff --git a/grafana_backup/s3_common.py b/grafana_backup/s3_common.py index 8ff5e2a..21de597 100644 --- a/grafana_backup/s3_common.py +++ b/grafana_backup/s3_common.py @@ -4,19 +4,12 @@ def get_boto_session(settings) -> boto3.Session: aws_default_region = settings.get("AWS_DEFAULT_REGION") - aws_access_key_id = settings.get("AWS_ACCESS_KEY_ID") - aws_secret_access_key = settings.get("AWS_SECRET_ACCESS_KEY") # If no credentials are provided, boto3 will use the default credentials provider chain. - if aws_access_key_id is None or aws_secret_access_key is None: - return boto3.Session( - region_name=aws_default_region, - ) - - # Otherwise, use the provided credentials. return boto3.Session( - aws_access_key_id=aws_access_key_id, - aws_secret_access_key=aws_secret_access_key, + **({'aws_access_key_id': settings.get('AWS_ACCESS_KEY_ID')} if settings.get('AWS_ACCESS_KEY_ID') else {}), + **({'aws_secret_access_key': settings.get('AWS_SECRET_ACCESS_KEY')} if settings.get('AWS_SECRET_ACCESS_KEY') else {}), + **({'aws_session_token': settings.get('AWS_SESSION_TOKEN')} if settings.get('AWS_SESSION_TOKEN') else {}), region_name=aws_default_region, ) diff --git a/grafana_backup/s3_upload.py b/grafana_backup/s3_upload.py index daa61da..e93c3f5 100644 --- a/grafana_backup/s3_upload.py +++ b/grafana_backup/s3_upload.py @@ -14,7 +14,10 @@ def main(args, settings): s3_object = get_s3_object(settings, s3_file_name=s3_file_name) try: - s3_object.put(Body=open(archive_file, 'rb')) + s3_object.put( + Body=open(archive_file, 'rb'), + **({'ServerSideEncryption': settings.get('AWS_S3_SSE')} if settings.get('AWS_S3_SSE') else {}) + ) print("Upload to S3 was successful") except FileNotFoundError: # noqa: F821 print("The file was not found") diff --git a/grafana_backup/save_notification_policies.py b/grafana_backup/save_notification_policies.py index dcf7ab8..0a82a23 100644 --- a/grafana_backup/save_notification_policies.py +++ b/grafana_backup/save_notification_policies.py @@ -34,7 +34,7 @@ def main(args, settings): notification_policies = get_all_notification_policies_in_grafana( grafana_url, http_get_headers, verify_ssl, client_cert, debug) save_notification_policies( - 'notificatioin_policies', notification_policies, folder_path, pretty_print) + 'notification_policies', notification_policies, folder_path, pretty_print) else: print("Unable to save notification policies, requires Grafana version {0} or above. Current version is {1}".format( minimum_version, grafana_version)) @@ -58,7 +58,7 @@ def get_all_notification_policies_in_grafana(grafana_url, http_get_headers, veri def save_notification_policies(file_name, notification_policies, folder_path, pretty_print): file_path = save_json(file_name, notification_policies, - folder_path, 'notification_policys', pretty_print) + folder_path, 'notification_policies', pretty_print) print_horizontal_line() print("notification policies are saved to {0}".format(file_path)) print_horizontal_line()