Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

add aws s3 sse config and aws_session_token support #262

Open
wants to merge 4 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 6 additions & 1 deletion Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,12 @@ ADD . /opt/grafana-backup-tool
RUN chmod -R a+r /opt/grafana-backup-tool \
&& find /opt/grafana-backup-tool -type d -print0 | xargs -0 chmod a+rx

RUN pip3 --no-cache-dir install .
# Create and activate Python virtual environment
RUN python3 -m venv venv
ENV PATH="/opt/grafana-backup-tool/venv/bin:$PATH"

# Install Python dependencies inside the virtual environment
RUN /opt/grafana-backup-tool/venv/bin/pip3 install --no-cache-dir .

RUN chown -R 1337:1337 /opt/grafana-backup-tool
USER 1337
Expand Down
5 changes: 4 additions & 1 deletion DockerfileSlim
Original file line number Diff line number Diff line change
Expand Up @@ -32,9 +32,12 @@ RUN chmod -R a+r /opt/grafana-backup-tool \
RUN echo "@edge http://dl-cdn.alpinelinux.org/alpine/edge/community" >> /etc/apk/repositories \
&& apk add --no-cache --virtual build-deps ${DEV_PACKAGES} \
&& apk add --no-cache ${PACKAGES} \
&& python3 -m venv venv \
&& source venv/bin/activate \
&& pip3 --no-cache-dir install . \
&& deactivate \
&& chown -R ${UID}:${GID} /opt/grafana-backup-tool \
&& apk del build-deps

USER ${UID}
CMD sh -c 'if [ "$RESTORE" = true ]; then if [ ! -z "$AWS_S3_BUCKET_NAME" ] || [ ! -z "$AZURE_STORAGE_CONTAINER_NAME" ] || [ ! -z "$GCS_BUCKET_NAME" ]; then grafana-backup restore $ARCHIVE_FILE; else grafana-backup restore _OUTPUT_/$ARCHIVE_FILE; fi else grafana-backup save; fi'
CMD sh -c 'source venv/bin/activate && if [ "$RESTORE" = true ]; then if [ ! -z "$AWS_S3_BUCKET_NAME" ] || [ ! -z "$AZURE_STORAGE_CONTAINER_NAME" ] || [ ! -z "$GCS_BUCKET_NAME" ]; then grafana-backup restore $ARCHIVE_FILE; else grafana-backup restore _OUTPUT_/$ARCHIVE_FILE; fi else grafana-backup save; fi'
4 changes: 3 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -156,13 +156,15 @@ docker run --user $(id -u):$(id -g) --rm --name grafana-backup-tool \
ysde/docker-grafana-backup-tool
```

***S3 Example:*** Set S3 configurations in `-e` or `grafanaSettings.json`([example](https://github.com/ysde/grafana-backup-tool/blob/master/examples/grafana-backup.example.json))
***S3 Example:*** Set S3 configurations in `-e` or `grafanaSettings.json`([example](https://github.com/ysde/grafana-backup-tool/blob/master/examples/grafana-backup.example.json)). If no credentials are provided, grafana-backup-tool will use the default [credential provider chain](https://docs.aws.amazon.com/sdkref/latest/guide/standardized-credentials.html#credentialProviderChain).
```
-e AWS_S3_BUCKET_NAME="my-backups-bucket" \
-e AWS_S3_BUCKET_KEY="grafana-backup-folder" \
-e AWS_S3_SSE="(optional) AES256|aws:kms|aws:kms:dsse" \
-e AWS_DEFAULT_REGION="us-east-1" \
-e AWS_ACCESS_KEY_ID="secret" \
-e AWS_SECRET_ACCESS_KEY="secret" \
-e AWS_SESSION_TOKEN="(optional)" \
```

***Azure Example:*** Set Azure configurations in `-e` or `grafanaSettings.json`([example](https://github.com/ysde/grafana-backup-tool/blob/master/examples/grafana-backup.example.json))
Expand Down
4 changes: 3 additions & 1 deletion examples/grafanaSettings.example.json
Original file line number Diff line number Diff line change
Expand Up @@ -15,9 +15,11 @@
"aws": {
"s3_bucket_name": "bucket_name",
"s3_bucket_key": "grafana-backup",
"s3_sse": "(optional) AES256|aws:kms|aws:kms:dsse",
"default_region": "us-east-1",
"access_key_id": "aws_access_key_id",
"secret_access_key": "aws_secret_access_key"
"secret_access_key": "aws_secret_access_key",
"session_token": "(optional)"
},
"azure": {
"container_name": "container_name",
Expand Down
6 changes: 6 additions & 0 deletions grafana_backup/grafanaSettings.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,9 @@ def main(config_path):
aws_default_region = config.get('aws', {}).get('default_region', '')
aws_access_key_id = config.get('aws', {}).get('access_key_id', '')
aws_secret_access_key = config.get('aws', {}).get('secret_access_key', '')
aws_session_token = config.get('aws', {}).get('session_token', '')
aws_endpoint_url = config.get('aws', {}).get('endpoint_url', None)
aws_s3_sse = config.get('aws', {}).get('s3_sse', '')
# Cloud storage settings - Azure
azure_storage_container_name = config.get('azure', {}).get('container_name', '')
azure_storage_connection_string = config.get('azure', {}).get('connection_string', '')
Expand Down Expand Up @@ -67,7 +69,9 @@ def main(config_path):
AWS_DEFAULT_REGION = os.getenv('AWS_DEFAULT_REGION', aws_default_region)
AWS_ACCESS_KEY_ID = os.getenv('AWS_ACCESS_KEY_ID', aws_access_key_id)
AWS_SECRET_ACCESS_KEY = os.getenv('AWS_SECRET_ACCESS_KEY', aws_secret_access_key)
AWS_SESSION_TOKEN = os.getenv('AWS_SESSION_TOKEN', aws_session_token)
AWS_ENDPOINT_URL = os.getenv('AWS_ENDPOINT_URL', aws_endpoint_url)
AWS_S3_SSE = os.getenv('AWS_S3_SSE', aws_s3_sse)

AZURE_STORAGE_CONTAINER_NAME = os.getenv('AZURE_STORAGE_CONTAINER_NAME', azure_storage_container_name)
AZURE_STORAGE_CONNECTION_STRING = os.getenv('AZURE_STORAGE_CONNECTION_STRING', azure_storage_connection_string)
Expand Down Expand Up @@ -172,9 +176,11 @@ def main(config_path):
config_dict['TIMESTAMP'] = TIMESTAMP
config_dict['AWS_S3_BUCKET_NAME'] = AWS_S3_BUCKET_NAME
config_dict['AWS_S3_BUCKET_KEY'] = AWS_S3_BUCKET_KEY
config_dict['AWS_S3_SSE'] = AWS_S3_SSE
config_dict['AWS_DEFAULT_REGION'] = AWS_DEFAULT_REGION
config_dict['AWS_ACCESS_KEY_ID'] = AWS_ACCESS_KEY_ID
config_dict['AWS_SECRET_ACCESS_KEY'] = AWS_SECRET_ACCESS_KEY
config_dict['AWS_SESSION_TOKEN'] = AWS_SESSION_TOKEN
config_dict['AWS_ENDPOINT_URL'] = AWS_ENDPOINT_URL
config_dict['AZURE_STORAGE_CONTAINER_NAME'] = AZURE_STORAGE_CONTAINER_NAME
config_dict['AZURE_STORAGE_CONNECTION_STRING'] = AZURE_STORAGE_CONNECTION_STRING
Expand Down
2 changes: 1 addition & 1 deletion grafana_backup/restore.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,7 @@ def open_compressed_backup(compressed_backup):
restore_functions['alert_rule'] = create_alert_rule
restore_functions['contact_point'] = create_contact_point
# There are some issues of notification policy restore api, it will lock the notification policy page and cannot be edited.
# restore_functions['notification_policys'] = update_notification_policy
# restore_functions['notification_policies'] = update_notification_policy

if sys.version_info >= (3,):
with tempfile.TemporaryDirectory() as tmpdir:
Expand Down
13 changes: 3 additions & 10 deletions grafana_backup/s3_common.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,19 +4,12 @@

def get_boto_session(settings) -> boto3.Session:
aws_default_region = settings.get("AWS_DEFAULT_REGION")
aws_access_key_id = settings.get("AWS_ACCESS_KEY_ID")
aws_secret_access_key = settings.get("AWS_SECRET_ACCESS_KEY")

# If no credentials are provided, boto3 will use the default credentials provider chain.
if aws_access_key_id is None or aws_secret_access_key is None:
return boto3.Session(
region_name=aws_default_region,
)

# Otherwise, use the provided credentials.
return boto3.Session(
aws_access_key_id=aws_access_key_id,
aws_secret_access_key=aws_secret_access_key,
**({'aws_access_key_id': settings.get('AWS_ACCESS_KEY_ID')} if settings.get('AWS_ACCESS_KEY_ID') else {}),
**({'aws_secret_access_key': settings.get('AWS_SECRET_ACCESS_KEY')} if settings.get('AWS_SECRET_ACCESS_KEY') else {}),
**({'aws_session_token': settings.get('AWS_SESSION_TOKEN')} if settings.get('AWS_SESSION_TOKEN') else {}),
region_name=aws_default_region,
)

Expand Down
5 changes: 4 additions & 1 deletion grafana_backup/s3_upload.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,10 @@ def main(args, settings):
s3_object = get_s3_object(settings, s3_file_name=s3_file_name)

try:
s3_object.put(Body=open(archive_file, 'rb'))
s3_object.put(
Body=open(archive_file, 'rb'),
**({'ServerSideEncryption': settings.get('AWS_S3_SSE')} if settings.get('AWS_S3_SSE') else {})
)
print("Upload to S3 was successful")
except FileNotFoundError: # noqa: F821
print("The file was not found")
Expand Down
4 changes: 2 additions & 2 deletions grafana_backup/save_notification_policies.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ def main(args, settings):
notification_policies = get_all_notification_policies_in_grafana(
grafana_url, http_get_headers, verify_ssl, client_cert, debug)
save_notification_policies(
'notificatioin_policies', notification_policies, folder_path, pretty_print)
'notification_policies', notification_policies, folder_path, pretty_print)
else:
print("Unable to save notification policies, requires Grafana version {0} or above. Current version is {1}".format(
minimum_version, grafana_version))
Expand All @@ -58,7 +58,7 @@ def get_all_notification_policies_in_grafana(grafana_url, http_get_headers, veri

def save_notification_policies(file_name, notification_policies, folder_path, pretty_print):
file_path = save_json(file_name, notification_policies,
folder_path, 'notification_policys', pretty_print)
folder_path, 'notification_policies', pretty_print)
print_horizontal_line()
print("notification policies are saved to {0}".format(file_path))
print_horizontal_line()
Loading