mirror of
https://github.com/autistic-symposium/backend-and-orchestration-toolkit.git
synced 2025-06-08 23:13:08 -04:00
merge files from the blockchain infra repo:
This commit is contained in:
parent
23f56ef195
commit
5a9703cfd5
346 changed files with 29097 additions and 132 deletions
|
@ -0,0 +1,4 @@
|
|||
packages
|
||||
lib
|
||||
app
|
||||
Dockerfile.build
|
|
@ -0,0 +1,9 @@
|
|||
FROM amazonlinux:1
|
||||
|
||||
WORKDIR /opt/app
|
||||
|
||||
ADD requirements.txt .
|
||||
|
||||
RUN \
|
||||
yum install -y python27-pip && \
|
||||
pip install --target=/opt/app -r requirements.txt
|
46
code/aws/lambda-function/sqs-sns_example/scripts/build_package.sh
Executable file
46
code/aws/lambda-function/sqs-sns_example/scripts/build_package.sh
Executable file
|
@ -0,0 +1,46 @@
|
|||
#!/usr/bin/env bash
|
||||
# This script adds additional dependences that are need for the lambda function package.
|
||||
|
||||
set -x
|
||||
|
||||
PACKAGE_NAME=cameras-clip.zip
|
||||
|
||||
# If S3_BUCKET env var isn't set, default it
|
||||
if [ -z "${S3_BUCKET}" ]; then
|
||||
S3_BUCKET=s3-test
|
||||
fi
|
||||
|
||||
# Set dist env and create initial zip file
|
||||
ORIGIN=$pwd
|
||||
rm -rf dist && mkdir dist
|
||||
lambda build --local-package . && mv dist/*.zip dist/$PACKAGE_NAME
|
||||
cd dist/
|
||||
|
||||
## Fetch & add binary for FFMPEG
|
||||
aws s3 cp "s3://${S3_BUCKET}/ffmpeg/ffmpeg-release-64bit-static.tar.xz" . && tar xf ffmpeg-release-64bit-static.tar.xz
|
||||
zip -j -r9 $PACKAGE_NAME ffmpeg-*-64bit-static/ffmpeg
|
||||
zip -j -r9 $PACKAGE_NAME ffmpeg-*-64bit-static/ffprobe
|
||||
|
||||
# Add this App's source code
|
||||
cp -r ../lib .
|
||||
zip -r9 $PACKAGE_NAME lib
|
||||
|
||||
# Add dependencies from pip
|
||||
mkdir packages
|
||||
cp ../scripts/Dockerfile.build Dockerfile
|
||||
cp ../scripts/.dockerignore .dockerignore
|
||||
cp ../requirements.txt .
|
||||
docker build --tag pillow-build .
|
||||
CTNHASH="$(docker create pillow-build)"
|
||||
docker cp "${CTNHASH}":/opt/app/ .
|
||||
cp -rf app/* packages/
|
||||
|
||||
# Package everything
|
||||
cd packages
|
||||
zip -ur9 ../$PACKAGE_NAME *
|
||||
cd ..
|
||||
|
||||
# Clean up
|
||||
#rm -rf ffmpeg-release-64bit-static.tar.xz ffmpeg-*-64bit-static/ packages/ lib/
|
||||
docker rm ${CTNHASH}
|
||||
cd $ORIGIN
|
177
code/aws/lambda-function/sqs-sns_example/scripts/create_test_event.py
Executable file
177
code/aws/lambda-function/sqs-sns_example/scripts/create_test_event.py
Executable file
|
@ -0,0 +1,177 @@
|
|||
#!/usr/bin/env python2
|
||||
#
|
||||
# For integration tests, different SQS events are needed.
|
||||
# This script generates events for alternate flows.
|
||||
# Global variables are defined in main().
|
||||
|
||||
import time
|
||||
import json
|
||||
import argparse
|
||||
import datetime
|
||||
import calendar
|
||||
import datetime
|
||||
|
||||
|
||||
def time_to_epoch(timestamp, timestamp_format):
|
||||
"""
|
||||
Given a timestamp string in seconds, return
|
||||
the epoch timestamp string, in milliseconds.
|
||||
"""
|
||||
date = time.strptime(str(timestamp), timestamp_format)
|
||||
return str(calendar.timegm(date)) + '000'
|
||||
|
||||
|
||||
def generate_delta_time(delta, timestamp_format, now, days):
|
||||
"""
|
||||
Given a clip duration delta, and how many days back
|
||||
from today, return a begin and end timestamp for the event.
|
||||
"""
|
||||
end = now - datetime.timedelta(days=days, minutes=0)
|
||||
begin = now - datetime.timedelta(days=days, minutes=delta)
|
||||
return begin.strftime(timestamp_format), end.strftime(timestamp_format)
|
||||
|
||||
|
||||
def get_current_local_time(timestamp):
|
||||
"""
|
||||
Return the current time in a datetime object, a
|
||||
human-readable string, and an epoch time integer.
|
||||
"""
|
||||
now = datetime.datetime.now()
|
||||
human_now = now.strftime(timestamp)
|
||||
epoch_now = time_to_epoch(human_now, timestamp)
|
||||
return now, human_now, epoch_now
|
||||
|
||||
|
||||
def create_event(begin, end, event_file, cam_id, epoch_now):
|
||||
"""
|
||||
Create an event.json SQS message file for
|
||||
tests with the new timestamps and save it to the
|
||||
destination in event_file.
|
||||
"""
|
||||
data = {'Records': [
|
||||
{
|
||||
"md5OfBody": "XXXXXXXXXXXXXXXXXXX",
|
||||
"receiptHandle": "XXXXXXXXXXXXXXXXXXX",
|
||||
"body": ("{'clipId': '1111111111111111',"
|
||||
"'retryTimestamps': [],"
|
||||
"'cameraId': '" + str(cam_id) + "',"
|
||||
"'startTimestampInMs': '" + str(begin) + "',"
|
||||
"'endTimestampInMs': '" + str(end) + "'}"),
|
||||
"eventSourceARN": "XXXXXXXXXXXXXXXXXXX",
|
||||
"eventSource": "aws:sqs",
|
||||
"awsRegion": "us-west-1",
|
||||
"messageId": "XXXXXXXXXXXXXXXXXXX",
|
||||
"attributes": {
|
||||
"ApproximateFirstReceiveTimestamp": "XXXXXXXXXXXXXXXXXXX",
|
||||
"SenderId": "XXXXXXXXXXXXXXXXXXX",
|
||||
"ApproximateReceiveCount": "1",
|
||||
"SentTimestamp": epoch_now
|
||||
},
|
||||
"messageAttributes": {}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
with open(event_file, 'w') as f:
|
||||
json.dump(data, f, separators=(',', ': '), sort_keys=True, indent=2)
|
||||
|
||||
return data['Records'][0]['body']
|
||||
|
||||
|
||||
def main():
|
||||
|
||||
# Global variables.
|
||||
EVENT_FILE = 'event.json'
|
||||
TIMESTAMP_FORMAT = '%d-%m-%Y %H:%M:%S'
|
||||
DAYS_BEFORE_PENDING = 0
|
||||
DAYS_BEFORE_AVAILABLE = 0
|
||||
DAYS_BEFORE_NOT_AVAILABLE = 2
|
||||
DAYS_BEFORE_OUT_OF_RANGE = 8
|
||||
|
||||
# Camera IDs used for tests, they should be checked whether
|
||||
# they are currently down or not. For instance:
|
||||
CAM_DOWN = '1111111111111111'
|
||||
CAM_UP = '1111111111111111'
|
||||
|
||||
# This should not be more than 5 minutes (or the rewind clip generator
|
||||
# app won't accent the event).
|
||||
SESSION_DURATION_OK = 3
|
||||
SESSION_DURATION_CLIP_TO_LONG = 8
|
||||
|
||||
# Get the time of event to be generated.
|
||||
parser = argparse.ArgumentParser(
|
||||
description='Clip duration you are looking for (in mins):')
|
||||
parser.add_argument('-a', '--clip_available',
|
||||
action='store_true', help='Event for <15 min')
|
||||
parser.add_argument('-p', '--clip_pending',
|
||||
action='store_true', help='Event cam down <15 min')
|
||||
parser.add_argument('-o', '--clip_out_of_range',
|
||||
action='store_true', help='Event for >3 days')
|
||||
parser.add_argument('-n', '--clip_not_available',
|
||||
action='store_true', help='Event cam down >3 days')
|
||||
parser.add_argument('-t', '--clip_too_long',
|
||||
action='store_true', help='Clips > 5 min')
|
||||
args = parser.parse_args()
|
||||
|
||||
# Define what type of event we want.
|
||||
if args.clip_pending:
|
||||
days_before = DAYS_BEFORE_PENDING
|
||||
cam_id = CAM_DOWN
|
||||
session_duration = SESSION_DURATION_OK
|
||||
|
||||
elif args.clip_out_of_range:
|
||||
days_before = DAYS_BEFORE_OUT_OF_RANGE
|
||||
cam_id = CAM_UP
|
||||
session_duration = SESSION_DURATION_OK
|
||||
|
||||
elif args.clip_not_available:
|
||||
days_before = DAYS_BEFORE_NOT_AVAILABLE
|
||||
cam_id = CAM_DOWN
|
||||
session_duration = SESSION_DURATION_OK
|
||||
|
||||
elif args.clip_too_long:
|
||||
days_before = DAYS_BEFORE_AVAILABLE
|
||||
cam_id = CAM_UP
|
||||
session_duration = SESSION_DURATION_CLIP_TO_LONG
|
||||
|
||||
else:
|
||||
# Defaults to CLIP_AVAILABLE event.
|
||||
days_before = DAYS_BEFORE_AVAILABLE
|
||||
cam_id = CAM_UP
|
||||
session_duration = SESSION_DURATION_OK
|
||||
|
||||
# Get current time in human string and epoch int.
|
||||
now, human_now, epoch_now = get_current_local_time(TIMESTAMP_FORMAT)
|
||||
|
||||
# Generates a random begin and end time within the last days.
|
||||
begin, end = generate_delta_time(
|
||||
session_duration, TIMESTAMP_FORMAT, now, days_before)
|
||||
|
||||
# Convert these times to epoch timestamp and human time.
|
||||
end_epoch = time_to_epoch(end, TIMESTAMP_FORMAT)
|
||||
begin_epoch = time_to_epoch(begin, TIMESTAMP_FORMAT)
|
||||
|
||||
if begin_epoch and end_epoch:
|
||||
|
||||
# Creates the JSON file for the event.
|
||||
body = create_event(begin_epoch, end_epoch,
|
||||
EVENT_FILE, cam_id, epoch_now)
|
||||
|
||||
print('-----------------------------------------------------')
|
||||
print('Event test saved at {}'.format(EVENT_FILE))
|
||||
print('Camera id is {}'.format(cam_id))
|
||||
print('Timestamp for {0} days ago, delta time is {1} mins').format(
|
||||
days_before, session_duration)
|
||||
print('Begin: {0} -> End: {1}'.format(begin_epoch, end_epoch))
|
||||
print('Begin: {0} -> End: {1}'.format(begin, end))
|
||||
print('Time: {}'.format(human_now))
|
||||
print('Body: ')
|
||||
print(body)
|
||||
print('-----------------------------------------------------')
|
||||
|
||||
else:
|
||||
print('Could not create timestamps for {}'.format(duration))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
58
code/aws/lambda-function/sqs-sns_example/scripts/deploy_lambda.sh
Executable file
58
code/aws/lambda-function/sqs-sns_example/scripts/deploy_lambda.sh
Executable file
|
@ -0,0 +1,58 @@
|
|||
#!/bin/bash -ex
|
||||
# Script that deploy this app to the AWS lambda function, similarly to Jenkins.
|
||||
|
||||
USAGE=$(cat <<-END
|
||||
Usage:
|
||||
deploy_lambda.sh <environment>
|
||||
Examples:
|
||||
deploy_lambda.sh staging
|
||||
END
|
||||
)
|
||||
|
||||
if [[ "$1" = "-h" ]]; then
|
||||
echo "${USAGE}"
|
||||
exit
|
||||
fi
|
||||
|
||||
if [[ -n "$1" ]]; then
|
||||
SERVER_GROUP=$1
|
||||
else
|
||||
echo '[ERROR] You must specify the env: production, sandbox, staging'
|
||||
echo
|
||||
echo "${USAGE}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
BUILD_ENVIRONMENT=$1
|
||||
APP_NAME=cameras-service-generate-clip
|
||||
export AWS_DEFAULT_REGION="us-west-1"
|
||||
export AWS_REGION="us-west-1"
|
||||
|
||||
if [[ "${BUILD_ENVIRONMENT}" == "sandbox" ]]; then
|
||||
S3_BUCKET=sl-artifacts-dev
|
||||
else
|
||||
S3_BUCKET="sl-artifacts-${BUILD_ENVIRONMENT}"
|
||||
fi
|
||||
|
||||
S3_PREFIX="lambda-functions/${APP_NAME}"
|
||||
S3_BUNDLE_KEY="sl-${APP_NAME}.zip"
|
||||
S3_TAGGED_BUNDLE_KEY="sl-${APP_NAME}-${BUILD_TAG}.zip"
|
||||
|
||||
make clean
|
||||
make install
|
||||
make lint
|
||||
make build
|
||||
|
||||
aws \
|
||||
s3 cp "dist/${S3_BUNDLE_KEY}" "s3://${S3_BUCKET}/${S3_PREFIX}/${S3_BUNDLE_KEY}"
|
||||
|
||||
aws \
|
||||
s3 cp "s3://${S3_BUCKET}/${S3_PREFIX}/${S3_BUNDLE_KEY}" "s3://${S3_BUCKET}/${S3_PREFIX}/${S3_TAGGED_BUNDLE_KEY}"
|
||||
|
||||
aws \
|
||||
lambda update-function-code \
|
||||
--function-name "sl-${APP_NAME}-${BUILD_ENVIRONMENT}" \
|
||||
--s3-bucket "${S3_BUCKET}" \
|
||||
--s3-key "${S3_PREFIX}/${S3_TAGGED_BUNDLE_KEY}"
|
||||
|
||||
echo "build description:${APP_NAME}|${BUILD_ENVIRONMENT}|${BUILD_TAG}|"
|
3
code/aws/lambda-function/sqs-sns_example/scripts/get_recording_list.sh
Executable file
3
code/aws/lambda-function/sqs-sns_example/scripts/get_recording_list.sh
Executable file
|
@ -0,0 +1,3 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
curl -i URL?startDate=$(date -v '-1H' +%s)000&endDate=$(date +%s)000
|
Loading…
Add table
Add a link
Reference in a new issue