From a8401ae3d2a5cc109cbc82e0c30b0ac8fde42390 Mon Sep 17 00:00:00 2001 From: Matthew Fala Date: Wed, 1 Feb 2023 19:34:43 +0000 Subject: [PATCH] app: s3 log uploader --- apps/magic-mirror/readme.md | 2 +- apps/s3-log-uploader/Dockerfile | 28 +++++++++++++++++ apps/s3-log-uploader/readme.md | 54 +++++++++++++++++++++++++++++++++ apps/s3-log-uploader/s3_sync.sh | 18 +++++++++++ 4 files changed, 101 insertions(+), 1 deletion(-) create mode 100644 apps/s3-log-uploader/Dockerfile create mode 100644 apps/s3-log-uploader/readme.md create mode 100755 apps/s3-log-uploader/s3_sync.sh diff --git a/apps/magic-mirror/readme.md b/apps/magic-mirror/readme.md index a4cd62c..3843e09 100644 --- a/apps/magic-mirror/readme.md +++ b/apps/magic-mirror/readme.md @@ -16,5 +16,5 @@ ln: failed to create hard link '/destination/hello' => '/source/hello': Invalid ``` mkdir ./lib mkdir ./lib/source -sudo docker run -it -v `pwd`/lib:/lib --env SOURCE=/lib/source --env DESTINATION=/lib/destination --env DELAY=5 magicmirror:latest +sudo docker run -it -v ./lib:/lib --env SOURCE=/lib/source --env DESTINATION=/lib/destination --env DELAY=5 magicmirror:latest ``` \ No newline at end of file diff --git a/apps/s3-log-uploader/Dockerfile b/apps/s3-log-uploader/Dockerfile new file mode 100644 index 0000000..63e2262 --- /dev/null +++ b/apps/s3-log-uploader/Dockerfile @@ -0,0 +1,28 @@ +FROM amazonlinux:2 + +# Install aws cli +RUN yum install unzip -y +WORKDIR /var/tmp +RUN curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip" +RUN unzip awscliv2.zip +RUN ./aws/install +RUN rm awscliv2.zip + +RUN yum remove unzip -y \ + && yum clean all \ + && rm -rf /var/cache/yum + + +# Add some default directories +COPY s3_sync.sh / +RUN chmod +x /s3_sync.sh + +ENV BUCKET test-s3-instrumentation +ENV WATCHLIST /logs + +WORKDIR / + +RUN mkdir /logs + +# Execute sync loop command +CMD /s3_sync.sh $BUCKET $WATCHLIST diff --git a/apps/s3-log-uploader/readme.md b/apps/s3-log-uploader/readme.md new file mode 100644 index 0000000..d8b7f07 --- /dev/null +++ b/apps/s3-log-uploader/readme.md @@ -0,0 +1,54 @@ +# S3 Sync Image + +``` +public.ecr.aws/fala-fluentbit/s3-sync:latest +``` + +The s3 sync image is used to send all file logs to an s3 bucket location from the specified folders. + +It takes two environment variables: +1. BUCKET: This is the s3 bucket name you would like to send your logs to. +2. WATCHLIST This is a comma separated list of directories that the container will watch and send to s3. Please use the full path name starting with /. example: /logs1,/logs2. By default this is set to: /logs + +To use this script to help the firelens team check for log loss, please: +1. Create an s3 bucket for your logs to be sent to. This can be the same as your coredump bucket +2. Give the task that runs this container the following permissions: +``` +{ + "Version": "2012-10-17", + "Statement": [ + { + "Resource": [ + "arn:aws:s3:::YOUR_BUCKET_NAME", + "arn:aws:s3:::YOUR_BUCKET_NAME/*" + ], + "Effect": "Allow", + "Action": [ + "s3:DeleteObject", + "s3:GetBucketLocation", + "s3:GetObject", + "s3:ListBucket", + "s3:PutObject" + ] + } + ] +} + +``` +3. Mount your log volumes to this container +4. Set the environment variables on your task definition to point to your s3 bucket (should just be the bucket name, not arn), and the list of directories you would like this container to monitor. + + +The goal of this container is to help us determine if there is log loss on your fluent bit instance and to help get clairity into the throughput and size of you log files. + +## Test the Image +``` +mkdir ./lib +mkdir ./lib/source +mkdir ./lib2 +touch ./lib2/source2.txt + +# mount 2 volumes to the logs folder which is synced to s3 +docker run -it -v `pwd`/lib:/logs/lib -v `pwd`/lib2:/logs/lib2 --env BUCKET=test-s3-instrumentation public.ecr.aws/fala-fluentbit/s3-sync:latest +touch ./lib/source/mylogs.txt +``` diff --git a/apps/s3-log-uploader/s3_sync.sh b/apps/s3-log-uploader/s3_sync.sh new file mode 100755 index 0000000..705ca67 --- /dev/null +++ b/apps/s3-log-uploader/s3_sync.sh @@ -0,0 +1,18 @@ +#!/bin/bash + +bucket=$1 +watchlist=$2 + +IFS=',' read -ra dirs <<< "$watchlist" + +for i in "${!dirs[@]}"; do + dirs[i]=$(echo "${dirs[i]}" | sed 's|^/||') +done + +while true; do + for dir in "${dirs[@]}"; do + aws s3 sync "$dir" "s3://$bucket/$dir" + done + + sleep 60 +done