Skip to content

Commit

Permalink
Merge pull request #240 from vfedotovs/dev-1.4.12
Browse files Browse the repository at this point in the history
Test PR to merge dev-1.4.12 branch to main
  • Loading branch information
vfedotovs committed Feb 3, 2024
2 parents 7b57709 + e414859 commit ac24c70
Show file tree
Hide file tree
Showing 65 changed files with 5,883 additions and 589 deletions.
160 changes: 83 additions & 77 deletions .github/workflows/CICD-Release-1.4.9.yml
Original file line number Diff line number Diff line change
@@ -1,95 +1,101 @@
---
name: CICD-Release-1.4.9

on:
push:
branches: [ "release-1.4.9" ]

branches:
- release-1.4.9
- dev-1.4.10
- dev-1.4.11
jobs:

build_containers:
runs-on: ubuntu-latest
env:
S3_BUCKET: ${{ secrets.S3_BUCKET }}
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}

steps:
- uses: actions/checkout@v2
- name: Setup creating database.ini env file
run: |
echo "[postgresql]" > src/ws/database.ini
echo "host=db" >> src/ws/database.ini
echo "${{ secrets.PG_DB_NAME }}" >> src/ws/database.ini
echo "${{ secrets.PG_DB_USER }}" >> src/ws/database.ini
echo "${{ secrets.PG_DB_PASS }}" >> src/ws/database.ini
ls -la src/ws | grep ini
cat src/ws/database.ini
- name: Before build step 2 create .env.prod file
run: |
echo "# ws_worker container env variables" > .env.prod
echo "${{ secrets.DEST_EMAIL }}" >> .env.prod
echo "${{ secrets.SRC_EMAIL }}" >> .env.prod
echo "${{ secrets.SENDGRID_API_KEY }}" >> .env.prod
echo "${{ secrets.POSTGRES_PASSWORD }}" >> .env.prod
ls -la | grep env
cat .env.prod
- name: Show environment
run: |
cat /etc/os-release
docker -v
docker-compose -v
docker compose version
python3 -V
pip install boto3
- name: Setup step 3 download last DB file from S3 bucket
run: |
python3 ./src/db/get_last_db_backup.py
cp *.sql ./src/db
ls -lh ./src/db | grep sql
- name: Start DB container ONLY
run: |
pwd
docker-compose --env-file .env.prod up -d
- name: List DB table sizes to confirm if DB import was correct
run: |
sleep 15
docker-compose ps
docker exec sslv_web_scraper_db_1 psql -U new_docker_user -d new_docker_db -c '\dt+'
- name: Run docker compose ps
run: |
sleep 10
docker-compose ps
- name: Run compose down
run: docker-compose down


- uses: actions/checkout@v2
- name: Setup creating database.ini env file
run: |
echo "[postgresql]" > src/ws/database.ini
echo "host=db" >> src/ws/database.ini
echo "${{ secrets.PG_DB_NAME }}" >> src/ws/database.ini
echo "${{ secrets.PG_DB_USER }}" >> src/ws/database.ini
echo "${{ secrets.PG_DB_PASS }}" >> src/ws/database.ini
ls -la src/ws | grep ini
cat src/ws/database.ini
- name: Before build step 2 create .env.prod file
run: |
echo "# ws_worker container env variables" > .env.prod
echo "${{ secrets.DEST_EMAIL }}" >> .env.prod
echo "${{ secrets.SRC_EMAIL }}" >> .env.prod
echo "${{ secrets.SENDGRID_API_KEY }}" >> .env.prod
echo "${{ secrets.POSTGRES_PASSWORD }}" >> .env.prod
ls -la | grep env
cat .env.prod
- name: Show environment
run: |
cat /etc/os-release
docker -v
docker-compose -v
docker compose version
python3 -V
pip install boto3
- name: Setup step 3 download last DB file from S3 bucket
run: |
python3 ./src/db/get_last_db_backup.py
cp *.sql ./src/db
ls -lh ./src/db | grep sql
- name: Start DB container ONLY
run: |
pwd
docker-compose --env-file .env.prod up -d
- name: List DB table sizes to confirm if DB import was correct
run: >
sleep 15
docker-compose ps
docker exec sslv_web_scraper_db_1 psql -U new_docker_user -d new_docker_db -c '\dt+'
- name: Run docker compose ps
run: |
sleep 10
docker-compose ps
- name: Run compose down
run: docker-compose down
Deploy_to_AWS_EC2:
needs: build_containers
runs-on: ubuntu-latest

steps:
- uses: actions/checkout@v2
- name: Deploy in EC2
- uses: actions/checkout@v2
- name: Deploy to Release EC2
env:
PRIVATE_KEY: ${{ secrets.DEV_EC2_PRIVATE_KEY }}
HOSTNAME : ${{ secrets.DEV_EC2_IP }}
USER_NAME : ${{ secrets.DEV_EC2_USER }}

run: |
PRIVATE_KEY: ${{ secrets.DEV_EC2_PRIVATE_KEY }}
HOSTNAME: ${{ secrets.DEV_EC2_IP }}
USER_NAME: ${{ secrets.DEV_EC2_USER }}
run: >
echo "$PRIVATE_KEY" > private_key && chmod 600 private_key
ssh -o StrictHostKeyChecking=no -i private_key ${USER_NAME}@${HOSTNAME} '
#Now we have got the access of EC2 and we will start the deploy .
cd /home/ec2-user &&
docker rm -f web-v149 || true
docker run --rm -d -p 8501:8501 --name web-v149 vfedotovsdocker/sslv-web-v149:latest
'
ssh -o StrictHostKeyChecking=no -i private_key ${USER_NAME}@${HOSTNAME} '
pwd && ls -lah && \
docker ps && echo "---- before stopping ---" && \
docker ps -aq | xargs docker rm -f || true && \
docker images | xargs docker rmi -f || true && \
docker ps && echo "---- after stopping ---" && \
docker images && \
curr_time=$(date +%Y%m%d-%H%M) && \
folder_name="${curr_time}-deploy" && \
mkdir $folder_name && cd $folder_name && \
git clone https://github.com/vfedotovs/sslv_web_scraper.git . && pwd && \
git switch dev-1.4.11 && ls -la && \
cp ../.env.prod . && cp ../database.ini ./src/ws/ && \
export AWS_ACCESS_KEY_ID=${{ secrets.AWS_ACCESS_KEY_ID }} && \
export AWS_SECRET_ACCESS_KEY=${{ secrets.AWS_SECRET_ACCESS_KEY }} && \
export S3_BUCKET=${{ secrets.S3_BUCKET }} && \
./src/db/get_last_db_backup.py && \
cp *.sql ./src/db/ && \
ls -l ./src/db/ && \
docker-compose --env-file .env.prod up -d && sleep 15 && \
docker ps
curl http://0.0.0.0:80/run-task/ogre
'
128 changes: 128 additions & 0 deletions .github/workflows/CICD-dev-1412.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,128 @@
---
name: CICD-dev-1412-branch
on:
push:
branches:
- dev-1.4.12
jobs:
Test:
runs-on: ubuntu-latest
steps:
- name: Check out code
uses: actions/checkout@v3
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: 3.9
- name: Install dependencies
run: |
python -m pip install pytest
python -m pip install coverage
pip install -r ./src/ws/requirements.txt
pip install -r ./src/ts/requirements.txt
- name: Run pytest cov report
run: |
coverage run -m pytest
coverage xml -o coverage.xml
continue-on-error: true # Ignore the failure of this step
- name: Upload coverage reports to Codecov
uses: codecov/codecov-action@v3
with:
name: coverage-report
path: coverage.xml
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}


Build_Docker_Containers:
needs: Test
runs-on: ubuntu-latest
env:
S3_BUCKET: ${{ secrets.S3_BUCKET }}
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
steps:
- uses: actions/checkout@v3
- name: Setup creating database.ini env file
run: |
echo "[postgresql]" > src/ws/database.ini
echo "host=db" >> src/ws/database.ini
echo "${{ secrets.PG_DB_NAME }}" >> src/ws/database.ini
echo "${{ secrets.PG_DB_USER }}" >> src/ws/database.ini
echo "${{ secrets.PG_DB_PASS }}" >> src/ws/database.ini
ls -la src/ws | grep ini
cat src/ws/database.ini
- name: Before build step 2 create .env.prod file
run: |
echo "# ws_worker container env variables" > .env.prod
echo "${{ secrets.DEST_EMAIL }}" >> .env.prod
echo "${{ secrets.SRC_EMAIL }}" >> .env.prod
echo "${{ secrets.SENDGRID_API_KEY }}" >> .env.prod
echo "${{ secrets.POSTGRES_PASSWORD }}" >> .env.prod
ls -la | grep env
cat .env.prod
- name: Show environment
run: |
cat /etc/os-release
docker -v
docker-compose -v
docker compose version
python3 -V
pip install boto3
- name: Setup step 3 download last DB file from S3 bucket
run: |
python3 ./src/db/get_last_db_backup.py
cp *.sql ./src/db
ls -lh ./src/db | grep sql
- name: Start DB container ONLY
run: |
pwd
docker-compose --env-file .env.prod up -d
- name: List DB table sizes to confirm if DB import was correct
run: >
sleep 15
docker-compose ps
docker exec sslv_web_scraper_db_1 psql -U new_docker_user -d new_docker_db -c '\dt+'
- name: Run docker compose ps
run: |
sleep 10
docker-compose ps
- name: Run compose down
run: docker-compose down
Deploy_to_AWS_EC2:
needs: Build_Docker_Containers
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Deploy to Release EC2
env:
PRIVATE_KEY: ${{ secrets.DEV_EC2_PRIVATE_KEY }}
HOSTNAME: ${{ secrets.DEV_EC2_IP }}
USER_NAME: ${{ secrets.DEV_EC2_USER }}
run: >
echo "$PRIVATE_KEY" > private_key && chmod 600 private_key
ssh -o StrictHostKeyChecking=no -i private_key ${USER_NAME}@${HOSTNAME} '
pwd && ls -lah && \
docker ps && echo "---- before stopping ---" && \
docker ps -aq | xargs docker rm -f || true && \
docker images | xargs docker rmi -f || true && \
docker ps && echo "---- after stopping ---" && \
docker images && \
curr_time=$(date +%Y%m%d-%H%M) && \
folder_name="${curr_time}-deploy" && \
mkdir $folder_name && cd $folder_name && \
git clone https://github.com/vfedotovs/sslv_web_scraper.git . && pwd && \
git switch dev-1.4.12 && ls -la && \
cp ../.env.prod . && cp ../database.ini ./src/ws/ && \
export AWS_ACCESS_KEY_ID=${{ secrets.AWS_ACCESS_KEY_ID }} && \
export AWS_SECRET_ACCESS_KEY=${{ secrets.AWS_SECRET_ACCESS_KEY }} && \
export S3_BUCKET=${{ secrets.S3_BUCKET }} && \
./src/db/get_last_db_backup.py && \
cp *.sql ./src/db/ && \
ls -l ./src/db/ && \
docker-compose --env-file .env.prod up -d && sleep 15 && \
docker ps
'
16 changes: 13 additions & 3 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -10,11 +10,22 @@
__init__.pyc
.coverage

# postgress data base settings


# But don't ignore these test_setup_files
!scripts/test_setup_files/Ogre-raw-data-report.txt
!scripts/test_setup_files/pandas_df.csv
!scripts/test_setup_files/cleaned-sorted-df.csv
!scripts/test_setup_files/Ogre_city_report.pdf
!scripts/test_setup_files/basic_price_stats.txt
!scripts/test_setup_files/email_body_txt_m4.txt


# Exclude postgress data base settings
database.ini


# docker compose env file
# Exclude docker compose env file
.env.prod


Expand All @@ -31,6 +42,5 @@ src_email.env
dest_email.env

# Do not exclude
!TODO_items.txt
!requirements.txt
!project_data.txt
Loading

0 comments on commit ac24c70

Please sign in to comment.