Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Airflow #7

Merged
merged 4 commits into from
Jun 27, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
33 changes: 33 additions & 0 deletions Dockerfile.backend
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
# Use the official Python base image
FROM python:3.9-slim

# Set the working directory in the container
WORKDIR /app

# Copy the requirements file into the container
COPY requirements.txt .

# Install the required Python packages
RUN pip install --no-cache-dir -r requirements.txt

# Copy the entire backend project into the container
COPY ./backend .

# Expose the port that FastAPI runs on
EXPOSE 8000

# Set environment variables for the FastAPI app
ENV PYTHONUNBUFFERED=1

ENV BINANCE_API_KEY="FN6iy8IhLMbDR3kVEYp1ZqsN6lj0fZXFRQaRZeJsWuLz6Is7DkVvyb70fwPGDY3B"
ENV DATABASE_URL="postgresql://trading_db_av2v_user:210M6MA9QKEEgVdiasnUdMQDBNN417oy@dpg-cpqojbqj1k6c73bkqq3g-a.oregon-postgres.render.com/trading_db_av2v"
ENV PYCOPG_DATABASE_URL = "postgresql+psycopg2://trading_db_av2v_user:210M6MA9QKEEgVdiasnUdMQDBNN417oy@dpg-cpqojbqj1k6c73bkqq3g-a.oregon-postgres.render.com/trading_db_av2v"
ENV DB_USERNAME='group3'
ENV DB_PASSWORD='group3@week9'
ENV DB_HOST='g3.10academy.org'
ENV DB_PORT=5432
ENV DB_DATABASE='backtest'
ENV AWS_DATABASE_URL="postgresql+psycopg2://group3:group3%[email protected]/backtest"

# Run the FastAPI app with Uvicorn
CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "8000"]
29 changes: 29 additions & 0 deletions Dockerfile.frontend
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
# Use the official Node.js image as the base image
FROM node:16-alpine

# Set the working directory inside the container
WORKDIR /app

# Copy package.json and package-lock.json to the working directory
COPY ./frontend/package*.json ./

# Install the dependencies
RUN npm install

# Copy the rest of the application code to the working directory
COPY ./frontend .

# Build the React application
RUN npm run build

# Use the official Nginx image to serve the React application
FROM nginx:alpine

# Copy the built React application from the previous stage
COPY --from=0 /app/build /usr/share/nginx/html

# Expose port 80
EXPOSE 80

# Start Nginx when the container starts
CMD ["nginx", "-g", "daemon off;"]
113 changes: 113 additions & 0 deletions airflow/docker-compose.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,113 @@
version: '3'
services:
postgres:
image: postgres:13
environment:
POSTGRES_USER: airflow
POSTGRES_PASSWORD: airflow
POSTGRES_DB: airflow
volumes:
- postgres-db-volume:/var/lib/postgresql/data
networks:
- airflow-network

redis:
image: redis:6
networks:
- airflow-network

airflow-webserver:
image: apache/airflow:2.1.2
depends_on:
- postgres
- redis
environment:
AIRFLOW__CORE__EXECUTOR: CeleryExecutor
AIRFLOW__CORE__SQL_ALCHEMY_CONN: postgresql+psycopg2://airflow:airflow@postgres/airflow
AIRFLOW__CELERY__RESULT_BACKEND: db+postgresql://airflow:airflow@postgres/airflow
AIRFLOW__CELERY__BROKER_URL: redis://redis:6379/0
AIRFLOW__CORE__FERNET_KEY: aFERNET_KEY # Replace with generated key
AIRFLOW__CORE__DAGS_ARE_PAUSED_AT_CREATION: 'false'
AIRFLOW__CORE__LOAD_EXAMPLES: 'true'
AIRFLOW__API__AUTH_BACKEND: airflow.api.auth.backend.basic_auth
volumes:
- ./dags:/opt/airflow/dags
- ./logs:/opt/airflow/logs
- ./plugins:/opt/airflow/plugins
ports:
- "8080:8080"
command: webserver
networks:
- airflow-network

airflow-scheduler:
image: apache/airflow:2.1.2
depends_on:
- postgres
- redis
environment:
AIRFLOW__CORE__EXECUTOR: CeleryExecutor
AIRFLOW__CORE__SQL_ALCHEMY_CONN: postgresql+psycopg2://airflow:airflow@postgres/airflow
AIRFLOW__CELERY__RESULT_BACKEND: db+postgresql://airflow:airflow@postgres/airflow
AIRFLOW__CELERY__BROKER_URL: redis://redis:6379/0
AIRFLOW__CORE__FERNET_KEY: aFERNET_KEY # Replace with generated key
AIRFLOW__CORE__DAGS_ARE_PAUSED_AT_CREATION: 'false'
AIRFLOW__CORE__LOAD_EXAMPLES: 'true'
AIRFLOW__API__AUTH_BACKEND: airflow.api.auth.backend.basic_auth
volumes:
- ./dags:/opt/airflow/dags
- ./logs:/opt/airflow/logs
- ./plugins:/opt/airflow/plugins
command: scheduler
networks:
- airflow-network

airflow-worker:
image: apache/airflow:2.1.2
depends_on:
- postgres
- redis
environment:
AIRFLOW__CORE__EXECUTOR: CeleryExecutor
AIRFLOW__CORE__SQL_ALCHEMY_CONN: postgresql+psycopg2://airflow:airflow@postgres/airflow
AIRFLOW__CELERY__RESULT_BACKEND: db+postgresql://airflow:airflow@postgres/airflow
AIRFLOW__CELERY__BROKER_URL: redis://redis:6379/0
AIRFLOW__CORE__FERNET_KEY: aFERNET_KEY # Replace with generated key
AIRFLOW__CORE__DAGS_ARE_PAUSED_AT_CREATION: 'false'
AIRFLOW__CORE__LOAD_EXAMPLES: 'true'
AIRFLOW__API__AUTH_BACKEND: airflow.api.auth.backend.basic_auth
volumes:
- ./dags:/opt/airflow/dags
- ./logs:/opt/airflow/logs
- ./plugins:/opt/airflow/plugins
command: celery worker
networks:
- airflow-network

airflow-init:
image: apache/airflow:2.1.2
depends_on:
- postgres
- redis
environment:
AIRFLOW__CORE__EXECUTOR: CeleryExecutor
AIRFLOW__CORE__SQL_ALCHEMY_CONN: postgresql+psycopg2://airflow:airflow@postgres/airflow
AIRFLOW__CELERY__RESULT_BACKEND: db+postgresql://airflow:airflow@postgres/airflow
AIRFLOW__CELERY__BROKER_URL: redis://redis:6379/0
AIRFLOW__CORE__FERNET_KEY: aFERNET_KEY # Replace with generated key
AIRFLOW__CORE__DAGS_ARE_PAUSED_AT_CREATION: 'false'
AIRFLOW__CORE__LOAD_EXAMPLES: 'true'
AIRFLOW__API__AUTH_BACKEND: airflow.api.auth.backend.basic_auth
volumes:
- ./dags:/opt/airflow/dags
- ./logs:/opt/airflow/logs
- ./plugins:/opt/airflow/plugins
command: ["bash", "-c", "airflow db init && airflow users create --username admin --firstname FIRST_NAME --lastname LAST_NAME --role Admin --email [email protected] --password admin"]
networks:
- airflow-network

networks:
airflow-network:

volumes:
postgres-db-volume:
3 changes: 2 additions & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -12,4 +12,5 @@ mlflow
passlib
python-jose
kafka-python
tensorflow
tensorflow
apache-airflow
2 changes: 1 addition & 1 deletion scripts/backtesting/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ def run_backtest(config):
mlflow.log_param("indicator", indicator)

final_value = cerebro.broker.getvalue()
sharpe_ratio = strat.analyzers.sharperatio.get_analysis()['sharperatio']
sharpe_ratio = strat.analyzers.sharperatio.get_analysis()['sharperatio'] or 0
metrics_analyzer = strat.analyzers.getbyname('MetricsAnalyzer')
metrics = metrics_analyzer.get_analysis()
percentage_return = metrics['return']
Expand Down
Loading