From 1243711fdf3aea59bcd676252c812bd4adfb85fc Mon Sep 17 00:00:00 2001 From: dev-abuke Date: Thu, 27 Jun 2024 12:15:44 +0300 Subject: [PATCH 1/4] feat: docker fie backend --- Dockerfile.backend | 33 +++++++++++++++++++++++++++++++++ 1 file changed, 33 insertions(+) create mode 100644 Dockerfile.backend diff --git a/Dockerfile.backend b/Dockerfile.backend new file mode 100644 index 0000000..6986997 --- /dev/null +++ b/Dockerfile.backend @@ -0,0 +1,33 @@ +# Use the official Python base image +FROM python:3.9-slim + +# Set the working directory in the container +WORKDIR /app + +# Copy the requirements file into the container +COPY requirements.txt . + +# Install the required Python packages +RUN pip install --no-cache-dir -r requirements.txt + +# Copy the entire backend project into the container +COPY ./backend . + +# Expose the port that FastAPI runs on +EXPOSE 8000 + +# Set environment variables for the FastAPI app +ENV PYTHONUNBUFFERED=1 + +ENV BINANCE_API_KEY="FN6iy8IhLMbDR3kVEYp1ZqsN6lj0fZXFRQaRZeJsWuLz6Is7DkVvyb70fwPGDY3B" +ENV DATABASE_URL="postgresql://trading_db_av2v_user:210M6MA9QKEEgVdiasnUdMQDBNN417oy@dpg-cpqojbqj1k6c73bkqq3g-a.oregon-postgres.render.com/trading_db_av2v" +ENV PYCOPG_DATABASE_URL = "postgresql+psycopg2://trading_db_av2v_user:210M6MA9QKEEgVdiasnUdMQDBNN417oy@dpg-cpqojbqj1k6c73bkqq3g-a.oregon-postgres.render.com/trading_db_av2v" +ENV DB_USERNAME='group3' +ENV DB_PASSWORD='group3@week9' +ENV DB_HOST='g3.10academy.org' +ENV DB_PORT=5432 +ENV DB_DATABASE='backtest' +ENV AWS_DATABASE_URL="postgresql+psycopg2://group3:group3%40week9@g3.10academy.org/backtest" + +# Run the FastAPI app with Uvicorn +CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "8000"] From f9a0b8e4f4dfdf4c18155f197eebc682b4fea519 Mon Sep 17 00:00:00 2001 From: dev-abuke Date: Thu, 27 Jun 2024 12:16:08 +0300 Subject: [PATCH 2/4] feat: docker file frontend --- Dockerfile.frontend | 29 +++++++++++++++++++++++++++++ 1 file changed, 29 insertions(+) create mode 100644 Dockerfile.frontend diff --git a/Dockerfile.frontend b/Dockerfile.frontend new file mode 100644 index 0000000..fe1b123 --- /dev/null +++ b/Dockerfile.frontend @@ -0,0 +1,29 @@ +# Use the official Node.js image as the base image +FROM node:16-alpine + +# Set the working directory inside the container +WORKDIR /app + +# Copy package.json and package-lock.json to the working directory +COPY ./frontend/package*.json ./ + +# Install the dependencies +RUN npm install + +# Copy the rest of the application code to the working directory +COPY ./frontend . + +# Build the React application +RUN npm run build + +# Use the official Nginx image to serve the React application +FROM nginx:alpine + +# Copy the built React application from the previous stage +COPY --from=0 /app/build /usr/share/nginx/html + +# Expose port 80 +EXPOSE 80 + +# Start Nginx when the container starts +CMD ["nginx", "-g", "daemon off;"] From d898d8bfbfd861d5906e320f50afdc450e372603 Mon Sep 17 00:00:00 2001 From: dev-abuke Date: Thu, 27 Jun 2024 12:17:53 +0300 Subject: [PATCH 3/4] feat: airflow docker --- airflow/docker-compose.yml | 113 +++++++++++++++++++++++++++++++++++++ 1 file changed, 113 insertions(+) create mode 100644 airflow/docker-compose.yml diff --git a/airflow/docker-compose.yml b/airflow/docker-compose.yml new file mode 100644 index 0000000..f8ee814 --- /dev/null +++ b/airflow/docker-compose.yml @@ -0,0 +1,113 @@ +version: '3' +services: + postgres: + image: postgres:13 + environment: + POSTGRES_USER: airflow + POSTGRES_PASSWORD: airflow + POSTGRES_DB: airflow + volumes: + - postgres-db-volume:/var/lib/postgresql/data + networks: + - airflow-network + + redis: + image: redis:6 + networks: + - airflow-network + + airflow-webserver: + image: apache/airflow:2.1.2 + depends_on: + - postgres + - redis + environment: + AIRFLOW__CORE__EXECUTOR: CeleryExecutor + AIRFLOW__CORE__SQL_ALCHEMY_CONN: postgresql+psycopg2://airflow:airflow@postgres/airflow + AIRFLOW__CELERY__RESULT_BACKEND: db+postgresql://airflow:airflow@postgres/airflow + AIRFLOW__CELERY__BROKER_URL: redis://redis:6379/0 + AIRFLOW__CORE__FERNET_KEY: aFERNET_KEY # Replace with generated key + AIRFLOW__CORE__DAGS_ARE_PAUSED_AT_CREATION: 'false' + AIRFLOW__CORE__LOAD_EXAMPLES: 'true' + AIRFLOW__API__AUTH_BACKEND: airflow.api.auth.backend.basic_auth + volumes: + - ./dags:/opt/airflow/dags + - ./logs:/opt/airflow/logs + - ./plugins:/opt/airflow/plugins + ports: + - "8080:8080" + command: webserver + networks: + - airflow-network + + airflow-scheduler: + image: apache/airflow:2.1.2 + depends_on: + - postgres + - redis + environment: + AIRFLOW__CORE__EXECUTOR: CeleryExecutor + AIRFLOW__CORE__SQL_ALCHEMY_CONN: postgresql+psycopg2://airflow:airflow@postgres/airflow + AIRFLOW__CELERY__RESULT_BACKEND: db+postgresql://airflow:airflow@postgres/airflow + AIRFLOW__CELERY__BROKER_URL: redis://redis:6379/0 + AIRFLOW__CORE__FERNET_KEY: aFERNET_KEY # Replace with generated key + AIRFLOW__CORE__DAGS_ARE_PAUSED_AT_CREATION: 'false' + AIRFLOW__CORE__LOAD_EXAMPLES: 'true' + AIRFLOW__API__AUTH_BACKEND: airflow.api.auth.backend.basic_auth + volumes: + - ./dags:/opt/airflow/dags + - ./logs:/opt/airflow/logs + - ./plugins:/opt/airflow/plugins + command: scheduler + networks: + - airflow-network + + airflow-worker: + image: apache/airflow:2.1.2 + depends_on: + - postgres + - redis + environment: + AIRFLOW__CORE__EXECUTOR: CeleryExecutor + AIRFLOW__CORE__SQL_ALCHEMY_CONN: postgresql+psycopg2://airflow:airflow@postgres/airflow + AIRFLOW__CELERY__RESULT_BACKEND: db+postgresql://airflow:airflow@postgres/airflow + AIRFLOW__CELERY__BROKER_URL: redis://redis:6379/0 + AIRFLOW__CORE__FERNET_KEY: aFERNET_KEY # Replace with generated key + AIRFLOW__CORE__DAGS_ARE_PAUSED_AT_CREATION: 'false' + AIRFLOW__CORE__LOAD_EXAMPLES: 'true' + AIRFLOW__API__AUTH_BACKEND: airflow.api.auth.backend.basic_auth + volumes: + - ./dags:/opt/airflow/dags + - ./logs:/opt/airflow/logs + - ./plugins:/opt/airflow/plugins + command: celery worker + networks: + - airflow-network + + airflow-init: + image: apache/airflow:2.1.2 + depends_on: + - postgres + - redis + environment: + AIRFLOW__CORE__EXECUTOR: CeleryExecutor + AIRFLOW__CORE__SQL_ALCHEMY_CONN: postgresql+psycopg2://airflow:airflow@postgres/airflow + AIRFLOW__CELERY__RESULT_BACKEND: db+postgresql://airflow:airflow@postgres/airflow + AIRFLOW__CELERY__BROKER_URL: redis://redis:6379/0 + AIRFLOW__CORE__FERNET_KEY: aFERNET_KEY # Replace with generated key + AIRFLOW__CORE__DAGS_ARE_PAUSED_AT_CREATION: 'false' + AIRFLOW__CORE__LOAD_EXAMPLES: 'true' + AIRFLOW__API__AUTH_BACKEND: airflow.api.auth.backend.basic_auth + volumes: + - ./dags:/opt/airflow/dags + - ./logs:/opt/airflow/logs + - ./plugins:/opt/airflow/plugins + command: ["bash", "-c", "airflow db init && airflow users create --username admin --firstname FIRST_NAME --lastname LAST_NAME --role Admin --email admin@example.com --password admin"] + networks: + - airflow-network + +networks: + airflow-network: + +volumes: + postgres-db-volume: From f7829fe1c0886b67076859a1327dde07fa868097 Mon Sep 17 00:00:00 2001 From: dev-abuke Date: Thu, 27 Jun 2024 12:18:20 +0300 Subject: [PATCH 4/4] fix: error in sharpee ratio --- requirements.txt | 3 ++- scripts/backtesting/main.py | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/requirements.txt b/requirements.txt index 3dfaf82..e397162 100644 --- a/requirements.txt +++ b/requirements.txt @@ -12,4 +12,5 @@ mlflow passlib python-jose kafka-python -tensorflow \ No newline at end of file +tensorflow +apache-airflow \ No newline at end of file diff --git a/scripts/backtesting/main.py b/scripts/backtesting/main.py index e3be56f..68c8843 100644 --- a/scripts/backtesting/main.py +++ b/scripts/backtesting/main.py @@ -68,7 +68,7 @@ def run_backtest(config): mlflow.log_param("indicator", indicator) final_value = cerebro.broker.getvalue() - sharpe_ratio = strat.analyzers.sharperatio.get_analysis()['sharperatio'] + sharpe_ratio = strat.analyzers.sharperatio.get_analysis()['sharperatio'] or 0 metrics_analyzer = strat.analyzers.getbyname('MetricsAnalyzer') metrics = metrics_analyzer.get_analysis() percentage_return = metrics['return']