Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Empty file added api/tests/__init__.py
Empty file.
31 changes: 31 additions & 0 deletions api/tests/tests.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
from django.test import TestCase, SimpleTestCase, TransactionTestCase
from api.models import preexisting_models


# Django, Writing and Running Unit Tests: https://docs.djangoproject.com/en/2.0/topics/testing/overview/
# Django, Automated Unit Testing Tutorial: https://docs.djangoproject.com/en/2.0/intro/tutorial05/

class ExampleTestCase(TestCase):
def setUp(self):
pass

def test_example(self):
self.assertTrue(True)

class ExampleModelTestCase(TestCase):
def setUp(self):
pass

def test_model(self):
self.assertTrue(True)


# https://docs.djangoproject.com/en/2.0/topics/testing/tools/#django.test.SimpleTestCase
class ExampleSimpleTestCase(SimpleTestCase):
def test_simple_example(self):
self.assertTrue(True)

# https://docs.djangoproject.com/en/2.0/topics/testing/tools/#transactiontestcase
class ExampleTransactionTestCase(TransactionTestCase):
def test_transaction_example(self):
self.assertTrue(True)
22 changes: 15 additions & 7 deletions bin/deploy.sh
Original file line number Diff line number Diff line change
Expand Up @@ -6,23 +6,31 @@ if [ -z "$TRAVIS_PULL_REQUEST" ] || [ "$TRAVIS_PULL_REQUEST" == "false" ]; then
# Push only if we're testing the master branch
if [ "$TRAVIS_BRANCH" == "master" ]; then

# remove until such time as anybody can remember why it is here
# export PATH=$PATH:$HOME/.local/bin

echo Getting the ECR login...
eval $(aws ecr get-login --no-include-email --region $AWS_DEFAULT_REGION)

REMOTE_DOCKER_PATH="$DOCKER_REPO"/"$DOCKER_REPO_NAMESPACE"/"$DOCKER_IMAGE"

echo Running docker push command...
docker push "$DOCKER_REPO"/"$DEPLOY_TARGET"/"$DOCKER_IMAGE":latest
# tag with branch and travis build number then push
TAG=travis-buildnum-"$TRAVIS_BUILD_NUMBER"
echo Tagging with "$TAG"
docker tag "$DOCKER_IMAGE":latest "$REMOTE_DOCKER_PATH":"$TAG"
docker push "$REMOTE_DOCKER_PATH":"$TAG"

# tag with "latest" then push
TAG=latest
echo Tagging with "$TAG"
docker tag "$DOCKER_IMAGE":latest "$REMOTE_DOCKER_PATH":"$TAG"
docker push "$REMOTE_DOCKER_PATH":"$TAG"

#echo Running ecs-deploy.sh script...
bin/ecs-deploy.sh \
--service-name "$ECS_SERVICE_NAME" \
--cluster "$ECS_CLUSTER" \
--image "$DOCKER_REPO"/"$DEPLOY_TARGET"/"$DOCKER_IMAGE":latest \
--image "$REMOTE_DOCKER_PATH":latest \
--timeout 300
else
echo "Skipping deploy because branch is not master"
echo "Skipping deploy because branch is not master"
fi
else
echo "Skipping deploy because it's a pull request"
Expand Down
32 changes: 32 additions & 0 deletions bin/get-ssm-parameters.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
#! /bin/bash

# Pulls in HackO API env var values as parameters from AWS Parameter Store
# Depends on pre-installed awscli

# Modelled on https://aws.amazon.com/blogs/compute/managing-secrets-for-amazon-ecs-applications-using-parameter-store-and-iam-roles-for-tasks/

EC2_REGION="us-west-2" # unfortunately cannot rely on dynamic env var values that this script is meant to pull in
NAMESPACE="/production/2018/API" # future-proofing this script for subsequent or past containers
PROJECT_CANONICAL_NAME="backend-exemplar" # must be set to each project's "Final naming convention" from here https://github.com/hackoregon/civic-devops/issues/1

# Get unencrypted values
POSTGRES_HOST=`aws ssm get-parameters --names "$NAMESPACE"/"$PROJECT_CANONICAL_NAME"/POSTGRES_HOST --no-with-decryption --region $EC2_REGION --output text | awk '{print $4}'`
POSTGRES_NAME=`aws ssm get-parameters --names "$NAMESPACE"/"$PROJECT_CANONICAL_NAME"/POSTGRES_NAME --no-with-decryption --region $EC2_REGION --output text | awk '{print $4}'`
POSTGRES_PORT=`aws ssm get-parameters --names "$NAMESPACE"/"$PROJECT_CANONICAL_NAME"/POSTGRES_PORT --no-with-decryption --region $EC2_REGION --output text | awk '{print $4}'`
POSTGRES_USER=`aws ssm get-parameters --names "$NAMESPACE"/"$PROJECT_CANONICAL_NAME"/POSTGRES_USER --no-with-decryption --region $EC2_REGION --output text | awk '{print $4}'`

# Note: this env var value is for the WSGI startup - corresponds to the folder name where the base Django project is stored in the repo
PROJECT_NAME=`aws ssm get-parameters --names "$NAMESPACE"/"$PROJECT_CANONICAL_NAME"/PROJECT_NAME --no-with-decryption --region $EC2_REGION --output text | awk '{print $4}'`

# Get encrypted values
DJANGO_SECRET_KEY=`aws ssm get-parameters --names "$NAMESPACE"/"$PROJECT_CANONICAL_NAME"/DJANGO_SECRET_KEY --with-decryption --region $EC2_REGION --output text | awk '{print $4}'`
POSTGRES_PASSWORD=`aws ssm get-parameters --names "$NAMESPACE"/"$PROJECT_CANONICAL_NAME"/POSTGRES_PASSWORD --with-decryption --region $EC2_REGION --output text | awk '{print $4}'`

# Set environment variables in the container
export DJANGO_SECRET_KEY=$DJANGO_SECRET_KEY
export POSTGRES_HOST=$POSTGRES_HOST
export POSTGRES_NAME=$POSTGRES_NAME
export POSTGRES_PASSWORD=$POSTGRES_PASSWORD
export POSTGRES_PORT=$POSTGRES_PORT
export POSTGRES_USER=$POSTGRES_USER
export PROJECT_NAME=$PROJECT_NAME
4 changes: 4 additions & 0 deletions bin/production-docker-entrypoint.sh
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,10 @@ set -e

echo Debug: $DEBUG

# Pull in environment variables values from AWS Parameter Store, and preserve the exports
# source usage per https://stackoverflow.com/q/14742358/452120
source /code/bin/get-ssm-parameters.sh

python -Wall manage.py collectstatic --noinput

gunicorn $PROJECT_NAME.wsgi -c gunicorn_config.py
3 changes: 2 additions & 1 deletion bin/test-entrypoint.sh
Original file line number Diff line number Diff line change
Expand Up @@ -9,4 +9,5 @@ set -e
echo "Collect static files"
python -Wall manage.py collectstatic --noinput

python -Wall manage.py test --nomigrations --noinput --keepdb #--parallel
# have pytest run unit tests
pytest
18 changes: 18 additions & 0 deletions conftest.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
import pytest
import os
import dead_songs

@pytest.fixture(scope='session')
def django_db_setup():
backend.settings.DATABASES['default'] = {
'ENGINE': 'django_db_geventpool.backends.postgresql_psycopg2',
'PASSWORD': os.environ.get('POSTGRES_PASSWORD'),
'NAME': os.environ.get('POSTGRES_NAME'),
'USER': os.environ.get('POSTGRES_USER'),
'HOST': os.environ.get('POSTGRES_HOST'),
'PORT': os.environ.get('POSTGRES_PORT'),
'CONN_MAX_AGE': 0,
'OPTIONS': {
'MAX_CONNS': 20
}
}
2 changes: 1 addition & 1 deletion dead_songs/settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
SECRET_KEY = os.environ.get('DJANGO_SECRET_KEY')

# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = os.environ.get('DEBUG') == "True"
DEBUG = os.environ.get('DEBUG') == "False"

ALLOWED_HOSTS = ['*']

Expand Down
29 changes: 21 additions & 8 deletions gunicorn_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,21 @@
bind = ':8000'
backlog = 2048


#
# Server Mechanics
#
# preload_app
#
# Load application code before the worker processes are forked.
#
# By preloading an application you can save some RAM resources as well as
# speed up server boot times. Although, if you defer application loading
# to each worker process, you can reload your application code easily by
# restarting workers.

preload_app = False

#
# Worker processes
#
Expand Down Expand Up @@ -70,20 +85,18 @@
from gevent import monkey
from psycogreen.gevent import patch_psycopg

# this ensures forked processes are patched with gevent/gevent-psycopg2
def post_fork(server, worker):
monkey.patch_all()
patch_psycopg()

worker.log.info("Psycogreen patched psycopg2 with green gevents in worker fork.")

# setting this inside the 'try' ensures that we only
# activate the gevent worker pool if we have gevent installed
worker_class = 'gevent'
workers = 4
# this ensures forked processes are patched with gevent/gevent-psycopg2
def do_post_fork(server, worker):
monkey.patch_all()
patch_psycopg()

# you should see this text in your gunicorn logs if it was successful
worker.log.info("Made Psycopg2 Green")

post_fork = do_post_fork
except ImportError:
pass

Expand Down
16 changes: 8 additions & 8 deletions production-docker-compose.yml
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
version: '3.4'
services:
api_production:
backend-exemplar-service:
build:
context: .
dockerfile: DOCKERFILE.api.production
image: api_production
image: $DOCKER_IMAGE
command: ./bin/production-docker-entrypoint.sh
volumes:
- .:/code
Expand All @@ -13,9 +13,9 @@ services:
environment:
- PROJECT_NAME
- DEBUG=False
- POSTGRES_USER=${PRODUCTION_POSTGRES_USER}
- POSTGRES_NAME=${PRODUCTION_POSTGRES_NAME}
- POSTGRES_HOST=${PRODUCTION_POSTGRES_HOST}
- POSTGRES_PORT=${PRODUCTION_POSTGRES_PORT}
- POSTGRES_PASSWORD=${PRODUCTION_POSTGRES_PASSWORD}
- DJANGO_SECRET_KEY=${PRODUCTION_DJANGO_SECRET_KEY}
- POSTGRES_USER=${POSTGRES_USER}
- POSTGRES_NAME=${POSTGRES_NAME}
- POSTGRES_HOST=${POSTGRES_HOST}
- POSTGRES_PORT=${POSTGRES_PORT}
- POSTGRES_PASSWORD=${POSTGRES_PASSWORD}
- DJANGO_SECRET_KEY=${DJANGO_SECRET_KEY}
4 changes: 4 additions & 0 deletions pytest.ini
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
[pytest]
DJANGO_SETTINGS_MODULE = dead_songs.settings
python_files = tests.py test_*.py *_tests.py
addopts = --reuse-db
2 changes: 2 additions & 0 deletions requirements/common.txt
Original file line number Diff line number Diff line change
Expand Up @@ -33,3 +33,5 @@ nose==1.3.7
django-test-without-migrations==0.6
# tblib required for viewing tracebacks from the unit test runner
tblib==1.3.2
pytest-django
pytest-xdist
3 changes: 3 additions & 0 deletions requirements/production.txt
Original file line number Diff line number Diff line change
Expand Up @@ -8,3 +8,6 @@ gevent==1.3a1
# DB connector for Green Connections
psycogreen==1.0
django-db-geventpool

# install for use by get-ssm-parameters.sh script
awscli