diff --git a/.dockerignore b/.dockerignore index dee4c93aa..7be255af5 100644 --- a/.dockerignore +++ b/.dockerignore @@ -391,7 +391,3 @@ dist # /.env /.env.*.local *._local.ts - -# Migrations -/db/migrations/*-migration.ts -/db \ No newline at end of file diff --git a/.env.example b/.env.example index 956515861..018066e39 100644 --- a/.env.example +++ b/.env.example @@ -13,9 +13,11 @@ DB_TYPE= DB_USERNAME= DB_PASSWORD= DB_HOST= -DB_DATABASE=hng +DB_NAME=hng DB_ENTITIES=dist/src/modules/**/entities/**/*.entity{.ts,.js} DB_MIGRATIONS=dist/db/migrations/*{.ts,.js} +POSGRES_USER=$DB_USERNAME +POST JWT_SECRET=gsgs JWT_EXPIRY_TIMEFRAME=1500000 DB_SSL=false @@ -36,4 +38,4 @@ SUPPORT_EMAIL= AUTH_PASSWORD= BASE_URL= FLUTTERWAVE_SECRET_KEY= -FLUTTERWAVE_BASE_URL= \ No newline at end of file +FLUTTERWAVE_BASE_URL= diff --git a/.github/workflows/dev-deployment.yaml b/.github/workflows/dev-deployment.yaml new file mode 100644 index 000000000..840a32fd6 --- /dev/null +++ b/.github/workflows/dev-deployment.yaml @@ -0,0 +1,48 @@ +name: Dev Deployment + +on: + workflow_dispatch: + push: + branches: + - dev + +jobs: + build-and-push: + if: github.event.repository.fork == false + runs-on: ubuntu-latest + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Build Docker image + run: docker build -t nestjs_dev:green . + + - name: Save and compress Docker image + run: | + docker save nestjs_dev:green | gzip > nestjs_dev.tar.gz + + - name: Copy image to server + uses: appleboy/scp-action@master + with: + host: ${{ secrets.HOST }} + username: ${{ secrets.USERNAME }} + password: ${{ secrets.PASSWORD }} + source: "nestjs_dev.tar.gz" + target: "/tmp" + + deploy: + needs: build-and-push + runs-on: ubuntu-latest + environment: + name: "dev" + url: ${{ vars.URL }} + steps: + - name: Deploy on server + uses: appleboy/ssh-action@master + with: + host: ${{ secrets.HOST }} + username: ${{ secrets.USERNAME }} + password: ${{ secrets.PASSWORD }} + script: | + cd ~/hng_boilerplate_nestjs + ./deploy.sh dev diff --git a/.github/workflows/dev.yml b/.github/workflows/dev.yml deleted file mode 100644 index d500aba47..000000000 --- a/.github/workflows/dev.yml +++ /dev/null @@ -1,117 +0,0 @@ -name: CI/CD--Dev - -on: - pull_request_target: - branches: - - dev - push: - branches: - - dev - -env: - DB_USERNAME: ${{ secrets.DB_USERNAME }} - DB_PASSWORD: ${{ secrets.DB_PASSWORD }} - DB_DATABASE: ${{ secrets.DB_DATABASE }} - DB_HOST: ${{ secrets.DB_HOST }} - DB_PORT: ${{ secrets.DB_PORT }} - DB_ENTITIES: ${{ secrets.DB_ENTITIES }} - DB_MIGRATIONS: ${{ secrets.DB_MIGRATIONS }} - GOOGLE_CLIENT_SECRET: ${{ secrets.GOOGLE_CLIENT_SECRET }} - GOOGLE_CLIENT_ID: ${{ secrets.GOOGLE_CLIENT_ID }} - GOOGLE_REDIRECT_URI: ${{ secrets.GOOGLE_REDIRECT_URI }} - REDIS_HOST: ${{ secrets.REDIS_HOST }} - DB_TYPE: 'postgres' - PROFILE: 'staging' - NODE_ENV: 'development' - PORT: 3000 - REDIS_PORT: 6379 - -jobs: - test-and-build-dev: - runs-on: ubuntu-latest - # environment: pr_environment - steps: - - name: Checkout code - uses: actions/checkout@v3 - with: - ref: ${{ github.event.pull_request.head.sha }} - - - name: Set up Node.js - uses: actions/setup-node@v3 - with: - node-version: '18' - - - name: Install dependencies - run: npm install --include=dev - - - name: Run tests - run: npm run test - - - name: Build project - run: npm run build - - - name: Generate migrations - run: npm run migration:generate - - - name: Run migrations - run: npm run migration:run - - - name: Start application - run: | - npm run start:prod > app.log 2>&1 & - APP_PID=$! - echo $APP_PID - echo "Application started with PID $APP_PID" - sleep 30 # Wait for the application to start - tail -f app.log & - # Check application status with curl - if curl --retry 5 --retry-delay 5 --max-time 10 http://localhost:3000/health; then - echo "Application is up and running." - else - echo "Application failed to start. Logs:" - cat app.log - echo "Exiting workflow due to application failure." - kill $APP_PID - exit 1 - fi - - kill $APP_PID - echo "Application terminated Successfully." - - - name: Revert Migrations - run: npm run migration:revert - if: always() - - deploy-push: - runs-on: ubuntu-latest - if: github.event_name == 'push' - - steps: - - name: Checkout code - uses: actions/checkout@v3 - - - name: Set up Node.js - uses: actions/setup-node@v3 - with: - node-version: '18' - - - name: Install dependencies - run: npm install --include=dev - - - name: Build project - run: npm run build - - - name: Run tests - run: npm run test - - - name: Deploying to virtual machine - uses: appleboy/ssh-action@v1.0.3 - with: - host: ${{ secrets.SERVER_HOST }} - username: ${{ secrets.SERVER_USER }} - password: ${{ secrets.SERVER_PASSWORD }} - port: ${{ secrets.SERVER_PORT }} - script: | - echo "hello" - export PATH=$PATH:/home/teamalpha/.nvm/versions/node/v20.15.1/bin - bash ~/deployment.sh diff --git a/.github/workflows/ci.yml b/.github/workflows/lint-build-test.yaml similarity index 67% rename from .github/workflows/ci.yml rename to .github/workflows/lint-build-test.yaml index 0b4aa0237..144facb09 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/lint-build-test.yaml @@ -1,30 +1,26 @@ -name: ci +name: Lint, Build and Test on: - pull_request: - branches: - - dev - push: - branches: - - dev - + pull_request + jobs: - test-and-build-dev: + lint-build-and-test: runs-on: ubuntu-latest - if: github.event_name == 'pull_request' - steps: - name: Checkout code uses: actions/checkout@v3 - + - name: Set up Node.js uses: actions/setup-node@v3 with: - node-version: '18' + node-version: '20' - name: Install dependencies run: npm install --include=dev + - name: Run lint + run: npm run lint + - name: Build project run: npm run build diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml deleted file mode 100644 index fde213c04..000000000 --- a/.github/workflows/main.yml +++ /dev/null @@ -1,117 +0,0 @@ -name: CI/CD--Main - -on: - pull_request: - branches: - - main - push: - branches: - - main - -env: - DB_USERNAME: ${{ secrets.DB_USERNAME }} - DB_PASSWORD: ${{ secrets.DB_PASSWORD }} - DB_DATABASE: ${{ secrets.DB_DATABASE }} - DB_HOST: ${{ secrets.DB_HOST }} - DB_PORT: ${{ secrets.DB_PORT }} - DB_ENTITIES: ${{ secrets.DB_ENTITIES }} - DB_MIGRATIONS: ${{ secrets.DB_MIGRATIONS }} - GOOGLE_CLIENT_SECRET: ${{ secrets.GOOGLE_CLIENT_SECRET }} - GOOGLE_CLIENT_ID: ${{ secrets.GOOGLE_CLIENT_ID }} - GOOGLE_REDIRECT_URI: ${{ secrets.GOOGLE_REDIRECT_URI }} - REDIS_HOST: ${{ secrets.REDIS_HOST }} - DB_TYPE: 'postgres' - PROFILE: 'staging' - NODE_ENV: 'development' - PORT: 3000 - REDIS_PORT: 6379 - -jobs: - test-and-build-main: - runs-on: ubuntu-latest - if: github.event_name == 'pull_request' - - steps: - - name: Checkout code - uses: actions/checkout@v3 - - - name: Set up Node.js - uses: actions/setup-node@v3 - with: - node-version: '18' - - - name: Install dependencies - run: npm install --include=dev - - - name: Build project - run: npm run build - - - name: Run tests - run: npm run test - - - name: Generate migrations - run: npm run migration:generate - - - name: Run migrations - run: npm run migration:run - - - name: Start application - run: | - npm run start:prod > app.log 2>&1 & - APP_PID=$! - echo $APP_PID - echo "Application started with PID $APP_PID" - sleep 30 # Wait for the application to start - tail -f app.log & - # Check application status with curl - if curl --retry 5 --retry-delay 5 --max-time 10 http://localhost:3000/health; then - echo "Application is up and running." - else - echo "Application failed to start. Logs:" - cat app.log - echo "Exiting workflow due to application failure." - kill $APP_PID - exit 1 - fi - - kill $APP_PID - echo "Application terminated Successfully." - - - name: Revert Migrations - run: npm run migration:revert - if: always() - - deploy-main: - runs-on: ubuntu-latest - if: github.event_name == 'push' - - steps: - - name: Checkout code - uses: actions/checkout@v3 - - - name: Set up Node.js - uses: actions/setup-node@v3 - with: - node-version: '18' - - - name: Install dependencies - run: npm install --include=dev - - - name: Build project - run: npm run build - - - name: Run tests - run: npm run test - - - name: Deploying to virtual machine - uses: appleboy/ssh-action@v1.0.3 - with: - host: ${{ secrets.SERVER_HOST }} - username: ${{ secrets.SERVER_USER }} - # key: ${{ secrets.SERVER_PRIVATE_KEY }} - password: ${{ secrets.SERVER_PASSWORD }} - port: ${{ secrets.SERVER_PORT }} - script: | - echo "hello" - export PATH=$PATH:/home/teamalpha/.nvm/versions/node/v20.15.1/bin - bash ~/main-deployment.sh diff --git a/.github/workflows/pr-deploy.yaml b/.github/workflows/pr-deploy.yaml new file mode 100644 index 000000000..19f6e69ea --- /dev/null +++ b/.github/workflows/pr-deploy.yaml @@ -0,0 +1,27 @@ +name: PR Deploy +on: + pull_request_target: + branches: + - dev + +jobs: + deploy-pr-for-testing: + environment: + name: preview + url: ${{ steps.deploy.outputs.preview-url }} + runs-on: ubuntu-latest + steps: + - name: Checkout to branch + uses: actions/checkout@v4 + - id: deploy + name: Pull Request Deploy + uses: hngprojects/pr-deploy@dev + with: + server_host: ${{ secrets.HOST }} + server_username: ${{ secrets.USERNAME }} + server_password: ${{ secrets.PASSWORD }} + comment: false + context: . + dockerfile: Dockerfile + exposed_port: 5000 + github_token: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/prdeploy.yml b/.github/workflows/prdeploy.yml deleted file mode 100644 index cff68a78b..000000000 --- a/.github/workflows/prdeploy.yml +++ /dev/null @@ -1,80 +0,0 @@ -name: PR Deploy -on: - pull_request_target: - branches: - - dev - -jobs: - deploy-pr-for-testing: - environment: - name: preview - url: ${{ steps.deploy.outputs.preview-url }} - runs-on: ubuntu-latest - steps: - - name: Checkout to branch - uses: actions/checkout@v4 - - - name: Install dependencies - run: npm install - - - name: Run tests - run: npm run test - - - id: deploy - name: Pull Request Deploy - uses: hngprojects/pr-deploy@dev - with: - server_host: ${{ secrets.SERVER_HOST }} - server_username: ${{ secrets.SERVER_USERNAME }} - server_password: ${{ secrets.SERVER_PASSWORD }} - server_port: ${{ secrets.SERVER_PORT }} - comment: true - context: '.' - dockerfile: 'dockerfile' - exposed_port: '3100' - host_volume_path: '/var/' - container_volume_path: '/var/' - github_token: ${{ secrets.GITHUB_TOKEN }} - - - name: Print Preview Url - run: | - echo "Preview Url: ${{ steps.deploy.outputs.preview-url }}" - - deploy-to-server: - runs-on: ubuntu-latest - - steps: - - name: Checkout code - uses: actions/checkout@v3 - - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v3 - - - name: Build Docker Images - run: | - docker build -t nestjs-backend:tag . - - - name: Save Docker Images as Tarballs - run: | - docker save -o nestjs-backend.tar nestjs-backend:tag - - - name: Upload Docker Image to Server using SCP - env: - SERVER_USERNAME: ${{ secrets.SERVER_USERNAME }} - SERVER_HOST: ${{ secrets.SERVER_HOST }} - SERVER_PASSWORD: ${{ secrets.SERVER_PASSWORD }} - run: | - sshpass -p "${SERVER_PASSWORD}" scp -o StrictHostKeyChecking=no nestjs-backend.tar ${SERVER_USERNAME}@${SERVER_HOST}:/home/teamalpha/docker/dev - - - name: Load docker image on server and run via docker compose - uses: appleboy/ssh-action@v1.0.3 - with: - host: ${{ secrets.SERVER_HOST }} - username: ${{ secrets.SERVER_USER }} - password: ${{ secrets.SERVER_PASSWORD }} - port: ${{ secrets.SERVER_PORT }} - - script: | - cd /home/teamalpha/docker/dev - sudo docker load -i nestjs-backend.tar - sudo docker-compose -f docker-compose.staging.yml up -d diff --git a/.github/workflows/prdeployMain.yml b/.github/workflows/prdeployMain.yml deleted file mode 100644 index 2ce239483..000000000 --- a/.github/workflows/prdeployMain.yml +++ /dev/null @@ -1,83 +0,0 @@ -name: PR Deploy -on: - pull_request_target: - branches: - - main - push: - branches: - - main - -jobs: - deploy-pr-for-testing: - environment: - name: preview - url: ${{ steps.deploy.outputs.preview-url }} - runs-on: ubuntu-latest - steps: - - name: Checkout to branch - uses: actions/checkout@v4 - - - name: Install dependencies - run: npm install - - - name: Run tests - run: npm run test - - - id: deploy - name: Pull Request Deploy - uses: hngprojects/pr-deploy@main - with: - server_host: ${{ secrets.SERVER_HOST }} - server_username: ${{ secrets.SERVER_USERNAME }} - server_password: ${{ secrets.SERVER_PASSWORD }} - server_port: ${{ secrets.SERVER_PORT }} - comment: true - context: '.' - dockerfile: 'dockerfile' - exposed_port: '3200' - host_volume_path: '/var/' - container_volume_path: '/var/' - github_token: ${{ secrets.GITHUB_TOKEN }} - - - name: Print Preview Url - run: | - echo "Preview Url: ${{ steps.deploy.outputs.preview-url }}" - - deploy-to-server: - runs-on: ubuntu-latest - if: github.event_name == 'push' - - steps: - - name: Checkout code - uses: actions/checkout@v3 - - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v3 - - - name: Build Docker Images - run: | - docker build -t nestjs-backend-main:tag . - - - name: Save Docker Images as Tarballs - run: | - docker save -o nestjs-backend.tar nestjs-backend-main:tag - - - name: Upload Docker Image to Server using SCP - env: - SERVER_USERNAME: ${{ secrets.SERVER_USERNAME }} - SERVER_HOST: ${{ secrets.SERVER_HOST }} - SERVER_PASSWORD: ${{ secrets.SERVER_PASSWORD }} - run: | - sshpass -p "${SERVER_PASSWORD}" scp -o StrictHostKeyChecking=no nestjs-backend-main.tar ${SERVER_USERNAME}@${SERVER_HOST}:/home/teamalpha/docker/main - - - name: Load Docker Image on Server and Run via Docker Compose - env: - SERVER_USERNAME: ${{ secrets.SERVER_USERNAME }} - SERVER_HOST: ${{ secrets.SERVER_HOST }} - SERVER_PASSWORD: ${{ secrets.SERVER_PASSWORD }} - run: | - sshpass -p "${SERVER_PASSWORD}" ssh -o StrictHostKeyChecking=no ${SERVER_USERNAME}@${SERVER_HOST} << 'EOF' - cd /home/teamalpha/docker/main - sudo docker load -i nestjs-backend.tar - sudo docker-compose -f docker-compose.staging.yml up -d - EOF diff --git a/.github/workflows/production-deployment.yaml b/.github/workflows/production-deployment.yaml new file mode 100644 index 000000000..7ce95863d --- /dev/null +++ b/.github/workflows/production-deployment.yaml @@ -0,0 +1,47 @@ +name: Production Deployment + +on: + workflow_dispatch: + push: + branches: + - main + +jobs: + build-and-push: + if: github.event.repository.fork == false + runs-on: ubuntu-latest + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Build Docker image + run: docker build -t nestjs_prod:green . + + - name: Save and compress Docker image + run: | + docker save nestjs_prod:green | gzip > nestjs_prod.tar.gz + + - name: Copy image to server + uses: appleboy/scp-action@master + with: + host: ${{ secrets.HOST }} + username: ${{ secrets.USERNAME }} + password: ${{ secrets.PASSWORD }} + source: "nestjs_prod.tar.gz" + target: "/tmp" + + deploy: + needs: build-and-push + runs-on: ubuntu-latest + environment: + name: "production" + url: ${{ vars.URL }} + steps: + - name: Deploy on server + uses: appleboy/ssh-action@master + with: + host: ${{ secrets.HOST }} + username: ${{ secrets.USERNAME }} + password: ${{ secrets.PASSWORD }} + script: | + ./deploy.sh prod diff --git a/.github/workflows/staging-deployment.yaml b/.github/workflows/staging-deployment.yaml new file mode 100644 index 000000000..b7d96b889 --- /dev/null +++ b/.github/workflows/staging-deployment.yaml @@ -0,0 +1,47 @@ +name: Staging Deployment + +on: + workflow_dispatch: + push: + branches: + - staging + +jobs: + build-and-push: + if: github.event.repository.fork == false + runs-on: ubuntu-latest + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Build Docker image + run: docker build -t nestjs_staging:green . + + - name: Save and compress Docker image + run: | + docker save nestjs_staging:green | gzip > nestjs_staging.tar.gz + + - name: Copy image to server + uses: appleboy/scp-action@master + with: + host: ${{ secrets.HOST }} + username: ${{ secrets.USERNAME }} + password: ${{ secrets.PASSWORD }} + source: "nestjs_staging.tar.gz" + target: "/tmp" + + deploy: + needs: build-and-push + runs-on: ubuntu-latest + environment: + name: "staging" + url: ${{ vars.URL }} + steps: + - name: Deploy on server + uses: appleboy/ssh-action@master + with: + host: ${{ secrets.HOST }} + username: ${{ secrets.USERNAME }} + password: ${{ secrets.PASSWORD }} + script: | + ./deploy.sh staging diff --git a/.github/workflows/staging.yml b/.github/workflows/staging.yml deleted file mode 100644 index a9fb535c4..000000000 --- a/.github/workflows/staging.yml +++ /dev/null @@ -1,117 +0,0 @@ -name: CI/CD--Staging - -on: - pull_request: - branches: - - staging - push: - branches: - - staging - -env: - DB_USERNAME: ${{ secrets.DB_USERNAME }} - DB_PASSWORD: ${{ secrets.DB_PASSWORD }} - DB_DATABASE: ${{ secrets.DB_DATABASE }} - DB_HOST: ${{ secrets.DB_HOST }} - DB_PORT: ${{ secrets.DB_PORT }} - DB_ENTITIES: ${{ secrets.DB_ENTITIES }} - DB_MIGRATIONS: ${{ secrets.DB_MIGRATIONS }} - GOOGLE_CLIENT_SECRET: ${{ secrets.GOOGLE_CLIENT_SECRET }} - GOOGLE_CLIENT_ID: ${{ secrets.GOOGLE_CLIENT_ID }} - GOOGLE_REDIRECT_URI: ${{ secrets.GOOGLE_REDIRECT_URI }} - REDIS_HOST: ${{ secrets.REDIS_HOST }} - DB_TYPE: 'postgres' - PROFILE: 'staging' - NODE_ENV: 'development' - PORT: 3000 - REDIS_PORT: 6379 - -jobs: - test-and-build-staging: - runs-on: ubuntu-latest - if: github.event_name == 'pull_request' - - steps: - - name: Checkout code - uses: actions/checkout@v3 - - - name: Set up Node.js - uses: actions/setup-node@v3 - with: - node-version: '18' - - - name: Install dependencies - run: npm install --include=dev - - - name: Build project - run: npm run build - - - name: Run tests - run: npm run test - - - name: Generate migrations - run: npm run migration:generate - - - name: Run migrations - run: npm run migration:run - - - name: Start application - run: | - npm run start:prod > app.log 2>&1 & - APP_PID=$! - echo $APP_PID - echo "Application started with PID $APP_PID" - sleep 30 # Wait for the application to start - tail -f app.log & - # Check application status with curl - if curl --retry 5 --retry-delay 5 --max-time 10 http://localhost:3000/health; then - echo "Application is up and running." - else - echo "Application failed to start. Logs:" - cat app.log - echo "Exiting workflow due to application failure." - kill $APP_PID - exit 1 - fi - - kill $APP_PID - echo "Application terminated Successfully." - - - name: Revert Migrations - run: npm run migration:revert - if: always() - - deploy-staging: - runs-on: ubuntu-latest - if: github.event_name == 'push' - - steps: - - name: Checkout code - uses: actions/checkout@v3 - - - name: Set up Node.js - uses: actions/setup-node@v3 - with: - node-version: '18' - - - name: Install dependencies - run: npm install --include=dev - - - name: Build project - run: npm run build - - - name: Run tests - run: npm run test - - - name: Deploying to virtual machine - uses: appleboy/ssh-action@v1.0.3 - with: - host: ${{ secrets.SERVER_HOST }} - username: ${{ secrets.SERVER_USER }} - # key: ${{ secrets.SERVER_PRIVATE_KEY }} - password: ${{ secrets.SERVER_PASSWORD }} - port: ${{ secrets.SERVER_PORT }} - script: | - echo "hello" - export PATH=$PATH:/home/teamalpha/.nvm/versions/node/v20.15.1/bin - bash ~/staging-deployment.sh diff --git a/.gitignore b/.gitignore index f85daeb0b..412e13690 100644 --- a/.gitignore +++ b/.gitignore @@ -389,9 +389,9 @@ Temporary Items # Local dist /.env -/.env.*.local +/.env.* *._local.ts # Migrations /db/migrations/*-migration.ts -/db \ No newline at end of file +/db diff --git a/dockerfile b/Dockerfile similarity index 77% rename from dockerfile rename to Dockerfile index 24f39ea8f..cf03f6409 100644 --- a/dockerfile +++ b/Dockerfile @@ -1,5 +1,5 @@ # Use the official Node.js image as the base image -FROM node:20-slim +FROM node:20-alpine # Set the working directory inside the container WORKDIR /app @@ -12,8 +12,7 @@ RUN npm install RUN npm run build -EXPOSE 3100 +EXPOSE 5000 # Command to run the application -# CMD ["npm", "run", "start:prod"] -CMD [ "./start.sh" ] \ No newline at end of file +CMD ["npm", "run", "start:prod"] diff --git a/compose.override.yaml b/compose.override.yaml new file mode 100644 index 000000000..a6e378b04 --- /dev/null +++ b/compose.override.yaml @@ -0,0 +1,4 @@ +services: + nginx: + ports: + - 5000:80 diff --git a/compose.yaml b/compose.yaml new file mode 100644 index 000000000..80344a477 --- /dev/null +++ b/compose.yaml @@ -0,0 +1,65 @@ +name: nestjs + +services: + app: + image: ${COMPOSE_PROJECT_NAME} + build: . + env_file: + - .env + depends_on: + db: + condition: service_healthy + redis: + condition: service_healthy + healthcheck: + test: "wget -qO- http://app:${PORT}" + interval: 10s + timeout: 10s + retries: 3 + + db: + image: postgres:16-alpine + env_file: + - .env + environment: + - POSTGRES_USER=${DB_USERNAME} + - POSTGRES_PASSWORD=${DB_PASSWORD} + - POSTGRES_DB=${DB_NAME} + volumes: + - postgres_data:/var/lib/postgresql/data + healthcheck: + test: "pg_isready -U postgres" + interval: 5s + timeout: 5s + retries: 3 + restart: always + + redis: + image: redis:7-alpine + env_file: + - .env + volumes: + - redis_data:/data + healthcheck: + test: "redis-cli ping | grep PONG" + interval: 5s + timeout: 5s + retries: 3 + restart: always + + nginx: + image: nginx:alpine + volumes: + - ./nginx/nginx.conf:/etc/nginx/nginx.conf + depends_on: + app: + condition: service_healthy + healthcheck: + test: "wget -qO- http://nginx:80" + interval: 5s + timeout: 5s + retries: 3 + +volumes: + postgres_data: + redis_data: diff --git a/compose/compose.dev.yaml b/compose/compose.dev.yaml new file mode 100644 index 000000000..555a3d039 --- /dev/null +++ b/compose/compose.dev.yaml @@ -0,0 +1,17 @@ +name: nestjs_dev + +services: + app: + env_file: + - .env.dev + + db: + env_file: + - .env.dev + + redis: + env_file: + - .env.dev + nginx: + ports: + - 5000:80 diff --git a/compose/compose.green.yaml b/compose/compose.green.yaml new file mode 100644 index 000000000..19e9a0913 --- /dev/null +++ b/compose/compose.green.yaml @@ -0,0 +1,23 @@ +services: + app-green: + image: ${COMPOSE_PROJECT_NAME}:green + build: . + env_file: + - .env.${ENV} + depends_on: + db: + condition: service_healthy + redis: + condition: service_healthy + healthcheck: + test: "wget -qO- http://localhost:${PORT}" + interval: 10s + timeout: 10s + retries: 3 + + nginx: + volumes: + - ./nginx/nginx-green.conf:/etc/nginx/nginx.conf + depends_on: + app-green: + condition: service_healthy diff --git a/compose/compose.prod.yaml b/compose/compose.prod.yaml new file mode 100644 index 000000000..b09ecd3f7 --- /dev/null +++ b/compose/compose.prod.yaml @@ -0,0 +1,17 @@ +name: nestjs_prod + +services: + app: + env_file: + - .env.prod + + db: + env_file: + - .env.prod + + redis: + env_file: + - .env.prod + nginx: + ports: + - 5002:80 diff --git a/compose/compose.staging.yaml b/compose/compose.staging.yaml new file mode 100644 index 000000000..d4cc2f434 --- /dev/null +++ b/compose/compose.staging.yaml @@ -0,0 +1,17 @@ +name: nestjs_staging + +services: + app: + env_file: + - .env.staging + + db: + env_file: + - .env.staging + + redis: + env_file: + - .env.staging + nginx: + ports: + - 5001:80 diff --git a/deploy.sh b/deploy.sh new file mode 100755 index 000000000..f14695a98 --- /dev/null +++ b/deploy.sh @@ -0,0 +1,65 @@ +#!/bin/bash + +set -e + +GREEN='\033[0;32m' +BLUE='\033[0;34m' +YELLOW='\033[1;33m' +RED='\033[0;31m' +NC='\033[0m' + +if [ -z "$1" ]; then + echo -e "${RED}Usage: $0 ${NC}" + exit 1 +elif [[ "$1" != "dev" && "$1" != "staging" && "$1" != "prod" ]]; then + echo -e "${RED}Invalid environment specified. Use dev, staging, or prod.${NC}" + exit 1 +fi + +export ENV=$1 +BRANCH=$1 +PROJECT_NAME="nestjs_$ENV" + +if [ "$ENV" == "prod" ]; then + BRANCH="main" +fi + +echo -e "${BLUE}Preparing to deploy to the $ENV environment...${NC}" +echo -e "${YELLOW}Environment: $ENV${NC}" +echo -e "${YELLOW}Branch: $BRANCH${NC}" + +echo -e "${GREEN}Loading Docker image from /tmp/nestjs_${ENV}.tar.gz...${NC}" +gunzip -c /tmp/nestjs_${ENV}.tar.gz | docker load +rm -f /tmp/nestjs_${ENV}.tar.gz + +echo -e "${GREEN}Stashing local changes and Pulling the latest changes from branch $BRANCH...${NC}" +git add . +git stash +git checkout $BRANCH +git pull origin $BRANCH + +echo -e "${BLUE}Starting Blue-Green deployment for environment: $ENV...${NC}" + +echo -e "${GREEN}Deploying the green version of the app...${NC}" +docker compose -f compose.yaml -f compose/compose.$ENV.yaml -f compose/compose.green.yaml up -d --no-recreate + +echo -e "${GREEN}Transferring traffic to green environment...${NC}" +docker compose -f compose.yaml -f compose/compose.$ENV.yaml -f compose/compose.green.yaml create nginx + +echo -e "${YELLOW}Cleaning up the blue (old) containers and image...${NC}" +docker compose -f compose.yaml -f compose/compose.$ENV.yaml stop app +docker compose -f compose.yaml -f compose/compose.$ENV.yaml rm -f app +docker rmi -f ${PROJECT_NAME}:latest + +echo -e "${GREEN}Promoting the green version to blue (main version)...${NC}" +docker tag ${PROJECT_NAME}:green ${PROJECT_NAME}:latest + +echo -e "${BLUE}Starting the blue (main) version of the app...${NC}" +docker compose -f compose.yaml -f compose/compose.$ENV.yaml up -d + +echo -e "${YELLOW}Cleaning up the green version after promotion...${NC}" +docker compose -f compose.yaml -f compose/compose.$ENV.yaml -f compose/compose.green.yaml stop app-green +docker compose -f compose.yaml -f compose/compose.$ENV.yaml -f compose/compose.green.yaml rm -f app-green +docker rmi -f ${PROJECT_NAME}:green + +echo -e "${GREEN}Blue-Green deployment complete. The blue version is now live.${NC}" diff --git a/docker-compose.prod.yml b/docker-compose.prod.yml deleted file mode 100644 index 297001a2e..000000000 --- a/docker-compose.prod.yml +++ /dev/null @@ -1,33 +0,0 @@ -version: '3.3' - -services: - # NestJS Application Service - app: - build: . - container_name: team-alpha - ports: - - '3200:3200' - # entrypoint: ./start.sh - env_file: - - ./.env - depends_on: - - db - - redis - - # PostgreSQL Service - db: - image: postgres:16 - container_name: postgres-db - env_file: - - ./.env - ports: - - '5679:5432' - volumes: - - /var/lib/postgresql/data:/var/lib/postgres> - - # Redis Service - redis: - image: redis:7 - container_name: redis-server - ports: - - '6377:6379' diff --git a/docker-compose.staging.yml b/docker-compose.staging.yml deleted file mode 100644 index eb5a9adfe..000000000 --- a/docker-compose.staging.yml +++ /dev/null @@ -1,33 +0,0 @@ -version: '3.3' - -services: - # NestJS Application Service - app: - build: . - container_name: team-alpha - ports: - - '3100:3100' - # entrypoint: ./start.sh - env_file: - - ./.env - depends_on: - - db - - redis - - # PostgreSQL Service - db: - image: postgres:16 - container_name: postgres-db - env_file: - - ./.env - ports: - - '5678:5432' - volumes: - - /var/lib/postgresql/data:/var/lib/postgres> - - # Redis Service - redis: - image: redis:7 - container_name: redis-server - ports: - - '6378:6379' diff --git a/docker-compose.yml b/docker-compose.yml deleted file mode 100644 index eb5a9adfe..000000000 --- a/docker-compose.yml +++ /dev/null @@ -1,33 +0,0 @@ -version: '3.3' - -services: - # NestJS Application Service - app: - build: . - container_name: team-alpha - ports: - - '3100:3100' - # entrypoint: ./start.sh - env_file: - - ./.env - depends_on: - - db - - redis - - # PostgreSQL Service - db: - image: postgres:16 - container_name: postgres-db - env_file: - - ./.env - ports: - - '5678:5432' - volumes: - - /var/lib/postgresql/data:/var/lib/postgres> - - # Redis Service - redis: - image: redis:7 - container_name: redis-server - ports: - - '6378:6379' diff --git a/dora/main.py b/dora/main.py deleted file mode 100644 index 5be58c988..000000000 --- a/dora/main.py +++ /dev/null @@ -1,104 +0,0 @@ -from dora import Dora -from prometheus_client import start_http_server, Gauge -import requests -import time -import datetime -import os -from dotenv import load_dotenv - -load_dotenv() - -dora = Dora() - - -deployment_frequency = Gauge('deployment_frequency', 'Number of deployments per day') -lead_time = Gauge('lead_time_for_changes', 'Time from code commit to code successfully running in production') -change_failure_rate = Gauge('change_failure_rate', 'Percentage of deployments causing a failure in production') -mttr = Gauge('mean_time_to_recovery', 'Time to restore service after a production failure') - -start_http_server(8090) - -# GitHub API configuration -GITHUB_TOKEN = os.environ.get('GITHUB_TOKEN') -REPO_OWNER = os.environ.get('REPO_OWNER') -REPO_NAME = os.environ.get('REPO_NAME') -HEADERS = {'Authorization': f'token {GITHUB_TOKEN}'} - -def get_github_stats(): - url = f'https://api.github.com/repos/{REPO_OWNER}/{REPO_NAME}/stats/code_frequency' - response = requests.get(url, headers=HEADERS) - if response.status_code == 200: - return response.json() - else: - print(f"Failed to fetch GitHub stats: {response.status_code}") - return [] - -def calculate_deployment_frequency(): - stats = get_github_stats() - now = datetime.datetime.now() - four_weeks_ago = now - datetime.timedelta(weeks=4) - - total_changes = sum(abs(stat[1]) + abs(stat[2]) for stat in stats - if datetime.datetime.fromtimestamp(stat[0]) > four_weeks_ago) - - - significant_changes = total_changes // 10 - return significant_changes / 14 - -def calculate_lead_time(): - - stats = get_github_stats() - if not stats: - return 24 * 60 * 60 - - last_week_changes = abs(stats[-1][1]) + abs(stats[-1][2]) - if last_week_changes == 0: - return 24 * 60 * 60 - - - return max(2 * 60 * 60, 24 * 60 * 60 / (last_week_changes / 100)) - -def calculate_change_failure_rate(): - - stats = get_github_stats() - if not stats: - return 5.0 - - last_week_changes = abs(stats[-1][1]) + abs(stats[-1][2]) - - return min(10.0, 5.0 + (last_week_changes / 10000)) - -def calculate_mttr(): - - stats = get_github_stats() - if not stats: - return 2 * 60 * 60 - - last_week_changes = abs(stats[-1][1]) + abs(stats[-1][2]) - - return min(4 * 60 * 60, 2 * 60 * 60 + (last_week_changes / 1000) * 60) - -def update_metrics(): - dep_freq = calculate_deployment_frequency() - lt = calculate_lead_time() - cfr = calculate_change_failure_rate() - mttr_val = calculate_mttr() - - deployment_frequency.set(dep_freq) - lead_time.set(lt) - change_failure_rate.set(cfr) - mttr.set(mttr_val) - - - -dora.configure_exporter( - exporter_type="prometheus", - metric_endpoint="http://localhost:8090" -) - - -if __name__ == "__main__": - while True: - update_metrics() - dora.export() - time.sleep(3600) diff --git a/nginx/nginx-green.conf b/nginx/nginx-green.conf new file mode 100644 index 000000000..19805c30b --- /dev/null +++ b/nginx/nginx-green.conf @@ -0,0 +1,16 @@ +worker_processes 1; + +events { worker_connections 1024; } + +http { + + resolver 127.0.0.11 valid=1s; + + server { + listen 80; + + location / { + proxy_pass http://app-green:5000; + } + } +} diff --git a/nginx/nginx.conf b/nginx/nginx.conf new file mode 100644 index 000000000..1a86ce57b --- /dev/null +++ b/nginx/nginx.conf @@ -0,0 +1,16 @@ +worker_processes 1; + +events { worker_connections 1024; } + +http { + + resolver 127.0.0.11 valid=1s; + + server { + listen 80; + + location / { + proxy_pass http://app:5000; + } + } +} diff --git a/src/app.e2e.spec.ts b/src/app.e2e.spec.ts index 518e84b80..0c6b7511c 100644 --- a/src/app.e2e.spec.ts +++ b/src/app.e2e.spec.ts @@ -14,7 +14,7 @@ describe('Health Check Test', () => { it('should return healthy endpoint', async () => { const result = { message: 'This is a healthy endpoint', status_code: 200 }; - expect(await healthController.health()).toStrictEqual(result); + expect(await healthController.health()).toMatchObject(result); }); }); diff --git a/src/database/data-source.ts b/src/database/data-source.ts index 04819df79..3075de857 100644 --- a/src/database/data-source.ts +++ b/src/database/data-source.ts @@ -11,7 +11,7 @@ const dataSource = new DataSource({ username: process.env.DB_USERNAME, password: process.env.DB_PASSWORD, host: process.env.DB_HOST, - database: process.env.DB_DATABASE, + database: process.env.DB_NAME, entities: [process.env.DB_ENTITIES], migrations: [process.env.DB_MIGRATIONS], synchronize: isDevelopment, diff --git a/src/database/seeding/seeding.service.ts b/src/database/seeding/seeding.service.ts index c49a3abe0..ef06e7cef 100644 --- a/src/database/seeding/seeding.service.ts +++ b/src/database/seeding/seeding.service.ts @@ -26,7 +26,7 @@ import { Role } from '../../modules/role/entities/role.entity'; import { User } from '../../modules/user/entities/user.entity'; import { CreateAdminDto } from './dto/admin.dto'; import { CreateAdminResponseDto } from './dto/create-admin-response.dto'; -import { OrganisationUserRole } from 'src/modules/role/entities/organisation-user-role.entity'; +import { OrganisationUserRole } from '../../modules/role/entities/organisation-user-role.entity'; @Injectable() export class SeedingService { diff --git a/src/health.controller.ts b/src/health.controller.ts index 90c3676e3..7b7ad9460 100644 --- a/src/health.controller.ts +++ b/src/health.controller.ts @@ -1,5 +1,6 @@ import { Controller, Get } from '@nestjs/common'; import { skipAuth } from './helpers/skipAuth'; +import * as os from 'os'; @Controller() export default class HealthController { @@ -24,6 +25,22 @@ export default class HealthController { @skipAuth() @Get('health') public health() { - return { status_code: 200, message: 'This is a healthy endpoint' }; + const networkInterfaces = os.networkInterfaces(); + let localIpAddress = 'Not available'; + + // Iterate over network interfaces to find the first non-internal IPv4 address + for (const interfaceKey in networkInterfaces) { + const interfaceDetails = networkInterfaces[interfaceKey]; + for (const detail of interfaceDetails) { + if (detail.family === 'IPv4' && !detail.internal) { + localIpAddress = detail.address; + break; + } + } + if (localIpAddress !== 'Not available') break; + } + + return { status_code: 200, message: 'This is a healthy endpoint', ip: localIpAddress }; } } + diff --git a/src/modules/auth/auth.controller.ts b/src/modules/auth/auth.controller.ts index 5c94428c6..e564f4276 100644 --- a/src/modules/auth/auth.controller.ts +++ b/src/modules/auth/auth.controller.ts @@ -32,7 +32,7 @@ import { GenericAuthResponseDto } from './dto/generic-reponse.dto'; import { UpdatePasswordDto } from './dto/updatePasswordDto'; import { LoginErrorResponseDto } from './dto/login-error-dto'; import { UpdateUserPasswordResponseDTO } from './dto/update-user-password.dto'; -import { CustomHttpException } from 'src/helpers/custom-http-filter'; +import { CustomHttpException } from '../../helpers/custom-http-filter'; @ApiTags('Authentication') @Controller('auth') diff --git a/src/modules/auth/tests/auth.service.spec.ts b/src/modules/auth/tests/auth.service.spec.ts index de18a5605..e15f26d25 100644 --- a/src/modules/auth/tests/auth.service.spec.ts +++ b/src/modules/auth/tests/auth.service.spec.ts @@ -17,7 +17,7 @@ import { GoogleAuthService } from '../google-auth.service'; import { Profile } from '../../profile/entities/profile.entity'; import { CustomHttpException } from '../../../helpers/custom-http-filter'; import { OrganisationsService } from '../../../modules/organisations/organisations.service'; -import { Organisation } from 'src/modules/organisations/entities/organisations.entity'; +import { Organisation } from '../../../modules/organisations/entities/organisations.entity'; jest.mock('speakeasy'); diff --git a/src/modules/blogs/blogs.service.ts b/src/modules/blogs/blogs.service.ts index 21c5009b0..20bbf159b 100644 --- a/src/modules/blogs/blogs.service.ts +++ b/src/modules/blogs/blogs.service.ts @@ -226,7 +226,7 @@ export class BlogService { private validateEmptyValues(query: any): void { for (const key in query) { - if (query.hasOwnProperty(key) && query[key] !== undefined) { + if (Object.prototype.hasOwnProperty.call(query, key) && query[key] !== undefined) { const value = query[key]; if (typeof value === 'string' && !value.trim()) { throw new CustomHttpException(`${key.replace(/_/g, ' ')} value is empty`, HttpStatus.BAD_REQUEST); diff --git a/src/modules/notifications/dtos/mark-all-notifications-as-read.dto.ts b/src/modules/notifications/dtos/mark-all-notifications-as-read.dto.ts index 286a716fa..72c56e920 100644 --- a/src/modules/notifications/dtos/mark-all-notifications-as-read.dto.ts +++ b/src/modules/notifications/dtos/mark-all-notifications-as-read.dto.ts @@ -8,5 +8,5 @@ export class MarkAllNotificationAsReadResponse { type: 'object', properties: { notifications: { type: 'array', items: { type: 'string' }, example: [] } }, }) - data: {}; + data: object; } diff --git a/src/modules/products/products.controller.ts b/src/modules/products/products.controller.ts index a579d0126..7bbcbe895 100644 --- a/src/modules/products/products.controller.ts +++ b/src/modules/products/products.controller.ts @@ -23,7 +23,7 @@ import { INVALID_ORG_ID, INVALID_PRODUCT_ID } from '../../helpers/SystemMessages import { AddCommentDto } from '../comments/dto/add-comment.dto'; import { GetTotalProductsResponseDto } from './dto/get-total-products.dto'; import { SuperAdminGuard } from '../../guards/super-admin.guard'; -import { skipAuth } from 'src/helpers/skipAuth'; +import { skipAuth } from '../../helpers/skipAuth'; @ApiTags('Products') @Controller('') diff --git a/src/modules/profile/dto/upload-profile-pic.dto.ts b/src/modules/profile/dto/upload-profile-pic.dto.ts index 96bcf2fbb..5e46214bc 100644 --- a/src/modules/profile/dto/upload-profile-pic.dto.ts +++ b/src/modules/profile/dto/upload-profile-pic.dto.ts @@ -10,5 +10,5 @@ export class UploadProfilePicDto { }) @HasMimeType(['image/jpeg', 'image/png']) @MaxFileSize(2 * 1024 * 1024) - file: Express.Multer.File; + avatar: Express.Multer.File; } diff --git a/src/modules/profile/entities/profile.entity.ts b/src/modules/profile/entities/profile.entity.ts index ce8fb04a5..b77fead41 100644 --- a/src/modules/profile/entities/profile.entity.ts +++ b/src/modules/profile/entities/profile.entity.ts @@ -39,4 +39,5 @@ export class Profile extends AbstractBaseEntity { @DeleteDateColumn() deletedAt?: Date; + } diff --git a/src/modules/profile/profile.controller.ts b/src/modules/profile/profile.controller.ts index 2328885c4..73c51c88b 100644 --- a/src/modules/profile/profile.controller.ts +++ b/src/modules/profile/profile.controller.ts @@ -68,7 +68,7 @@ export class ProfileController { @ApiOperation({ summary: 'Upload Profile Picture' }) @ApiResponse({ - status: 200, + status: 201, description: 'Profile picture uploaded successfully', }) @Post('upload-image') @@ -90,12 +90,12 @@ export class ProfileController { ) file: Express.Multer.File ): Promise<{ - status: number; - message: string; + status: string; + message: string }> { const userId = req.user.id; const uploadProfilePicDto = new UploadProfilePicDto() - uploadProfilePicDto.file = file; - return await this.profileService.uploadProfilePicture(userId, uploadProfilePicDto, BASE_URL); + uploadProfilePicDto.avatar = file + return await this.profileService.uploadProfilePicture(userId, uploadProfilePicDto, BASE_URL) } } diff --git a/src/modules/profile/profile.service.ts b/src/modules/profile/profile.service.ts index 525d243f5..ed21e670d 100644 --- a/src/modules/profile/profile.service.ts +++ b/src/modules/profile/profile.service.ts @@ -51,9 +51,11 @@ export class ProfileService { throw new NotFoundException('Profile not found'); } + const profileData = {...profile, avatar_url:profile.profile_pic_url} + const responseData = { message: 'Successfully fetched profile', - data: profile, + data:profileData, }; return responseData; @@ -122,8 +124,8 @@ export class ProfileService { userId: string, uploadProfilePicDto: UploadProfilePicDto, baseUrl: string - ): Promise<{ status: number; message: string; data: { profile_picture_url: string } }> { - if (!uploadProfilePicDto.file) { + ): Promise<{ status: string; message: string; data: { avatar_url: string } }> { + if (!uploadProfilePicDto.avatar) { throw new CustomHttpException(SYS_MSG.NO_FILE_FOUND, HttpStatus.BAD_REQUEST); } @@ -152,11 +154,11 @@ export class ProfileService { } } - const fileExtension = path.extname(uploadProfilePicDto.file.originalname); + const fileExtension = path.extname(uploadProfilePicDto.avatar.originalname); const fileName = `${userId}${fileExtension}`; const filePath = path.join(this.uploadsDir, fileName); - const fileStream = Readable.from(uploadProfilePicDto.file.buffer); + const fileStream = Readable.from(uploadProfilePicDto.avatar.buffer); const writeStream = fs.createWriteStream(filePath); return new Promise((resolve, reject) => { @@ -165,16 +167,16 @@ export class ProfileService { Logger.error(SYS_MSG.FILE_SAVE_ERROR, err.stack); reject(new CustomHttpException(SYS_MSG.FILE_SAVE_ERROR, HttpStatus.INTERNAL_SERVER_ERROR)); } else { - await sharp(uploadProfilePicDto.file.buffer).resize({ width: 200, height: 200 }).toFile(filePath); + await sharp(uploadProfilePicDto.avatar.buffer).resize({ width: 200, height: 200 }).toFile(filePath); profile.profile_pic_url = `${baseUrl}/uploads/${fileName}`; await this.profileRepository.update(profile.id, profile); const updatedProfile = await this.profileRepository.findOne({ where: { id: profile.id } }); resolve({ - status: HttpStatus.OK, + status: "success", message: SYS_MSG.PICTURE_UPDATED, - data: { profile_picture_url: updatedProfile.profile_pic_url }, + data: { avatar_url: updatedProfile.profile_pic_url }, }); } }); diff --git a/src/modules/profile/tests/profile.service.spec.ts b/src/modules/profile/tests/profile.service.spec.ts index d1c74ee22..7d0dfa0b3 100644 --- a/src/modules/profile/tests/profile.service.spec.ts +++ b/src/modules/profile/tests/profile.service.spec.ts @@ -189,11 +189,12 @@ describe('ProfileService', () => { buffer: Buffer.from('test'), originalname: 'test.jpg', }; - const mockUploadProfilePicDto = { file: mockFile as any }; - + const mockUploadProfilePicDto = { avatar: mockFile as any }; it('should throw an exception if no file is provided', async () => { - await expect(service.uploadProfilePicture(userId, { file: null }, baseUrl)).rejects.toThrow(CustomHttpException); + await expect(service.uploadProfilePicture(userId, { avatar: null }, baseUrl)).rejects.toThrow( + CustomHttpException + ); }); it('should throw an exception if user is not found', async () => { @@ -216,7 +217,6 @@ describe('ProfileService', () => { }); it('should delete previous profile picture if it exists', async () => { - jest.spyOn(userRepository, 'findOne').mockResolvedValue(mockUser); jest.spyOn(userRepository, 'findOne').mockResolvedValue(mockUser); @@ -238,7 +238,6 @@ describe('ProfileService', () => { }); it('should handle non-existent previous profile picture', async () => { - const mockResult: UpdateResult = { generatedMaps: [], raw: [], @@ -259,7 +258,6 @@ describe('ProfileService', () => { }); it('should save new profile picture and update profile', async () => { - jest.spyOn(userRepository, 'findOne').mockResolvedValue(mockUser); (sharp as jest.MockedFunction).mockReturnValue({ @@ -279,9 +277,9 @@ describe('ProfileService', () => { const result = await service.uploadProfilePicture(userId, mockUploadProfilePicDto, baseUrl); expect(result).toEqual({ - status: HttpStatus.OK, + status: 'success', message: PICTURE_UPDATED, - data: { profile_picture_url: `${baseUrl}/uploads/${userId}.jpg` }, + data: { avatar_url: `${baseUrl}/uploads/${userId}.jpg` }, }); expect(sharp).toHaveBeenCalled(); expect(profileRepository.update).toHaveBeenCalled(); diff --git a/src/modules/role/role.controller.ts b/src/modules/role/role.controller.ts index a226d9c08..6d1f5d9ae 100644 --- a/src/modules/role/role.controller.ts +++ b/src/modules/role/role.controller.ts @@ -7,7 +7,7 @@ import { AttachPermissionsDto, UpdateOrganisationRoleDto, } from './dto/update-organisation-role.dto'; -import { SuperAdminGuard } from 'src/guards/super-admin.guard'; +import { SuperAdminGuard } from '../../guards/super-admin.guard'; @ApiTags('organisation Settings') @UseGuards(SuperAdminGuard) diff --git a/wiki_readme/CICD-Pipeline.md b/wiki_readme/CICD-Pipeline.md deleted file mode 100644 index 02c105528..000000000 --- a/wiki_readme/CICD-Pipeline.md +++ /dev/null @@ -1,313 +0,0 @@ -### CI/CD Pipeline - -The CI/CD pipeline for our NestJS project automates the build, test, and deployment processes, ensuring code quality and efficient delivery. We utilize GitHub Actions to orchestrate these operations across different environments—development, staging, and production. - -#### GitHub Actions Workflow - -Our CI/CD workflow is defined across three GitHub Actions configuration files: `dev.yml`, `staging.yml`, and `main.yml`. These workflows handle the continuous integration and deployment tasks specific to their respective branches. Below are the key components of each workflow: - -1. **Build**: Checks out the codebase, sets up Node.js, installs dependencies, and builds the application. -2. **Test**: Executes unit tests to validate code quality and functionality. -3. **Deploy**: Deploys the application to the provided remote environment, controlled by branch-specific triggers. - -#### Detailed Workflow Configuration - -##### Development Environment (`dev.yml`) - -```yaml -name: CI/CD-Dev - -on: - pull_request: - branches: - - dev - push: - branches: - - dev - -jobs: - test-and-build-dev: - runs-on: ubuntu-latest - if: github.event_name == 'pull_request' - - steps: - - name: Checkout code - uses: actions/checkout@v3 - - - name: Set up Node.js - uses: actions/setup-node@v3 - with: - node-version: '18' - - - name: Install dependencies - run: npm install --include=dev - - - name: Build project - run: npm run build - - - name: Run tests - run: npm run test - - deploy-push: - runs-on: ubuntu-latest - if: github.event_name == 'push' - - steps: - - name: Checkout code - uses: actions/checkout@v3 - - - name: Set up Node.js - uses: actions/setup-node@v3 - with: - node-version: '18' - - - name: Install dependencies - run: npm install --include=dev - - - name: Build project - run: npm run build - - - name: Run tests - run: npm run test - - - name: Deploying to virtual machine - uses: appleboy/ssh-action@v1.0.3 - with: - host: ${{ secrets.SERVER_HOST }} - username: ${{ secrets.SERVER_USER }} - # key: ${{ secrets.SERVER_PRIVATE_KEY }} - password: ${{ secrets.SERVER_PASSWORD }} - port: ${{ secrets.SERVER_PORT }} - script: | - echo "hello" - export PATH=$PATH:/home/teamalpha/.nvm/versions/node/v20.15.1/bin - bash ~/deployment.sh -``` - -This workflow is triggered on both pull requests and pushes to the `dev` branch. - -**1. Pull Request Triggered:** - -- **Checkout Code:** The repository's code is checked out. -- **Set up Node.js:** The specified Node.js version (18) is set up on the runner. -- **Install Dependencies:** Project dependencies, including development dependencies, are installed using `npm install --include=dev`. -- **Build Project:** The project is built using `npm run build`. -- **Run Tests:** Unit and integration tests are executed to ensure code quality using `npm run test`. - -**2. Push Triggered:** - -- **Checkout Code:** The repository's code is checked out. -- **Set up Node.js:** The specified Node.js version (18) is set up on the runner. -- **Install Dependencies:** Project dependencies, including development dependencies, are installed using `npm install --include=dev`. -- **Build Project:** The project is built using `npm run build`. -- **Run Tests:** Unit and integration tests are executed to ensure code quality using `npm run test`. -- **Deploying to Virtual Machine:** - - The workflow uses the `appleboy/ssh-action@v1.0.3` GitHub Action to establish an SSH connection to the development server. - - Authentication is handled securely using the server's host, username, and password, which are stored as encrypted secrets within the GitHub repository settings. - - Once connected, the workflow executes the `~/deployment.sh` script located on the server. This script handles the environment-specific deployment tasks, such as building and deploying the Docker image, updating environment variables, and restarting the application. - -##### Staging Environment (`staging.yml`) - -```yaml -name: CI/CD--Staging - -on: - pull_request: - branches: - - staging - push: - branches: - - staging - -jobs: - test-and-build-staging: - runs-on: ubuntu-latest - if: github.event_name == 'pull_request' - - steps: - - name: Checkout code - uses: actions/checkout@v3 - - - name: Set up Node.js - uses: actions/setup-node@v3 - with: - node-version: '18' - - - name: Install dependencies - run: npm install --include=dev - - - name: Build project - run: npm run build - - - name: Run tests - run: npm run test - - deploy-staging: - runs-on: ubuntu-latest - # needs: test-and-build-main - if: github.event_name == 'push' - - steps: - - name: Checkout code - uses: actions/checkout@v3 - - - name: Set up Node.js - uses: actions/setup-node@v3 - with: - node-version: '18' - - - name: Install dependencies - run: npm install --include=dev - - - name: Build project - run: npm run build - - - name: Run tests - run: npm run test - - - name: Deploying to virtual machine - uses: appleboy/ssh-action@v1.0.3 - with: - host: ${{ secrets.SERVER_HOST }} - username: ${{ secrets.SERVER_USER }} - # key: ${{ secrets.SERVER_PRIVATE_KEY }} - password: ${{ secrets.SERVER_PASSWORD }} - port: ${{ secrets.SERVER_PORT }} - script: | - echo "hello" - export PATH=$PATH:/home/teamalpha/.nvm/versions/node/v20.15.1/bin - bash ~/staging-deployment.sh -``` - -This workflow is very similar to the `dev.yml` workflow but is specifically designed for the `staging` branch and deploys to the staging environment. - -**1. Pull Request Triggered:** - -- **Checkout Code:** The repository's code is checked out. -- **Set up Node.js:** The specified Node.js version (18) is set up on the runner. -- **Install Dependencies:** Project dependencies, including development dependencies, are installed using `npm install --include=dev`. -- **Build Project:** The project is built using `npm run build`. -- **Run Tests:** Unit and integration tests are executed to ensure code quality using `npm run test`. - -**2. Push Triggered:** - -- **Checkout Code:** The repository's code is checked out. -- **Set up Node.js:** The specified Node.js version (18) is set up on the runner. -- **Install Dependencies:** Project dependencies, including development dependencies, are installed using `npm install --include=dev`. -- **Build Project:** The project is built using `npm run build`. -- **Run Tests:** Unit and integration tests are executed to ensure code quality using `npm run test`. -- **Deploying to Virtual Machine:** - - The workflow uses the `appleboy/ssh-action@v1.0.3` GitHub Action to establish an SSH connection to the staging server. - - Authentication is handled securely using the server's host, username, and password, which are stored as encrypted secrets within the GitHub repository settings. - - Once connected, the workflow executes the `~/staging-deployment.sh` script located on the server. This script handles the staging-specific deployment tasks, such as building and deploying the Docker image, updating environment variables, and restarting the application. - -##### Production Environment (`main.yml`) - -```yaml -name: CI/CD--Main - -on: - pull_request: - branches: - - main - push: - branches: - - main - -jobs: - test-and-build-main: - runs-on: ubuntu-latest - if: github.event_name == 'pull_request' - - steps: - - name: Checkout code - uses: actions/checkout@v3 - - - name: Set up Node.js - uses: actions/setup-node@v3 - with: - node-version: '18' - - - name: Install dependencies - run: npm install --include=dev - - - name: Build project - run: npm run build - - - name: Run tests - run: npm run test - - deploy-main: - runs-on: ubuntu-latest - # needs: test-and-build-main - if: github.event_name == 'push' - - steps: - - name: Checkout code - uses: actions/checkout@v3 - - - name: Set up Node.js - uses: actions/setup-node@v3 - with: - node-version: '18' - - - name: Install dependencies - run: npm install --include=dev - - - name: Build project - run: npm run build - - - name: Run tests - run: npm run test - - - name: Deploying to virtual machine - uses: appleboy/ssh-action@v1.0.3 - with: - host: ${{ secrets.SERVER_HOST }} - username: ${{ secrets.SERVER_USER }} - # key: ${{ secrets.SERVER_PRIVATE_KEY }} - password: ${{ secrets.SERVER_PASSWORD }} - port: ${{ secrets.SERVER_PORT }} - script: | - echo "hello" - export PATH=$PATH:/home/teamalpha/.nvm/versions/node/v20.15.1/bin - bash ~/main-deployment.sh -``` - -This workflow manages the deployment to the production environment and is triggered by pull requests and pushes to the `main` branch. - -**1. Pull Request Triggered:** - -- **Checkout Code:** The repository's code is checked out. -- **Set up Node.js:** The specified Node.js version (18) is set up on the runner. -- **Install Dependencies:** Project dependencies, including development dependencies, are installed using `npm install --include=dev`. -- **Build Project:** The project is built using `npm run build`. -- **Run Tests:** Unit and integration tests are executed to ensure code quality using `npm run test`. - -**2. Push Triggered:** - -- **Checkout Code:** The repository's code is checked out. -- **Set up Node.js:** The specified Node.js version (18) is set up on the runner. -- **Install Dependencies:** Project dependencies, including development dependencies, are installed using `npm install --include=dev`. -- **Build Project:** The project is built using `npm run build`. -- **Run Tests:** Unit and integration tests are executed to ensure code quality using `npm run test`. -- **Deploying to Virtual Machine:** - - The workflow uses the `appleboy/ssh-action@v1.0.3` GitHub Action to establish a secure SSH connection to the production server. - - Authentication is handled using server credentials (host, username, password) stored as encrypted secrets within the GitHub repository settings. - - Once connected, the workflow executes the `~/main-deployment.sh` script located on the production server. This script handles the production-specific deployment tasks, such as building and deploying the Docker image, updating environment variables, and restarting the application. - -#### Branching Strategy - -- **Main**: Represents the production-ready codebase. -- **Dev**: Serves as the main development branch. Changes here are merged into `main` for releases. -- **Staging**: Used for final testing before production. Deployed to a subdomain or a specific path mimicking production settings. - -#### Security and Secrets - -GitHub Secrets are used to securely handle deployment credentials and configurations, ensuring that sensitive information is not exposed in the workflow files. These secrets are set in the repository settings under "Settings > Secrets and variables." - -#### Deployment Scripts - -Each environment uses a custom deployment script (`deployment.sh`, `main-deployment.sh`, `staging-deployment.sh`) executed via SSH to the virtual machine. This script is responsible for additional setup tasks and bringing the application online in the respective environments. - -This setup not only ensures the application's stability and security but also facilitates a streamlined development-to-deployment flow. diff --git a/wiki_readme/Database-Setup.md b/wiki_readme/Database-Setup.md deleted file mode 100644 index 170856097..000000000 --- a/wiki_readme/Database-Setup.md +++ /dev/null @@ -1,73 +0,0 @@ -## Database Setup: PostgreSQL with Docker Compose - -This section of the documentation details the setup and configuration of the PostgreSQL databases for the development and staging environments using Docker Compose. - -**1. Docker Compose Configuration** - -The `docker-compose.yml` file defines the configuration for both PostgreSQL databases: - -```yaml -version: '3.3' -services: - postgresdb-prod: - image: postgres:13 - restart: always - env_file: - - .env - ports: - - '5432:5432' - volumes: - - db-data-prod:/var/lib/postgresql/data - postgresdb-staging: - image: postgres:13 - restart: always - env_file: - - ./.env.staging - ports: - - '5433:5432' - volumes: - - db-data-staging:/var/lib/postgresql/data - -volumes: - db-data-prod: - driver: local - db-data-staging: - driver: local -``` - -**2. Database Details** - -- **`postgresdb-prod`:** - - - **Image:** `postgres:13` (Official PostgreSQL 13 image) - - **Environment Variables:** Loaded from `.env` file. - - **Port Mapping:** Exposes PostgreSQL on port `5432` on the host machine. - - **Data Persistence:** Uses a named volume `db-data-prod` to persist database data. - -- **`postgresdb-staging`:** - - **Image:** `postgres:13` - - **Environment Variables:** Loaded from `.env.staging` file. - - **Port Mapping:** Exposes PostgreSQL on port `5433` on the host machine. - - **Data Persistence:** Uses a named volume `db-data-staging` to persist data. - -**3. Environment Variables** - -- **`.env` and `.env.staging`:** These files contain environment-specific configurations for the databases, such as: - - `POSTGRES_DB`: Database name - - `POSTGRES_USER`: Database username - - `POSTGRES_PASSWORD`: Database password - -**4. Data Persistence** - -Both database containers use Docker volumes (`db-data-prod` and `db-data-staging`) to ensure data persistence. This means that even if the containers are stopped and restarted, the database data will be preserved. - -**5. Accessing the Databases** - -- **`postgresdb-prod`:** Accessible from the host machine via `localhost:5432`. -- **`postgresdb-staging`:** Accessible from the host machine via `localhost:5433`. - -**6. Connecting from the Application** - -The NestJS application is configured to connect to the appropriate database based on the current environment using the environment variables loaded from the respective `.env` files. - -This section outlines the configuration and setup of the PostgreSQL databases using Docker Compose. It ensures data isolation between environments and facilitates easy management and scaling of the database infrastructure. diff --git a/wiki_readme/Home.md b/wiki_readme/Home.md deleted file mode 100644 index 86dd9bfc1..000000000 --- a/wiki_readme/Home.md +++ /dev/null @@ -1,167 +0,0 @@ -## NestJS Boilerplate Project Documentation - -Welcome to the comprehensive documentation for our NestJS boilerplate project. This guide will walk you through the setup and integration of advanced CI/CD pipelines, messaging queues, and deployment processes using GitHub Actions. Our goal is to automate build, test, and deployment tasks, improve service communication via messaging queues, and ensure reliable deployments. - -**Table of Contents** - -- [NestJS Boilerplate Project Documentation](#nestjs-boilerplate-project-documentation) -- [Introduction](#introduction) -- [CI/CD Setup](#cicd-setup) - - [Choosing CI/CD Tool](#choosing-cicd-tool) - - [Pipeline Setup](#pipeline-setup) - - [Branching Strategy](#branching-strategy) -- [Database Setup](#database-setup) - - [Configuration](#configuration) -- [Messaging Queue Integration](#messaging-queue-integration) - - [RabbitMQ Setup](#rabbitmq-setup) - - [Integration with NestJS](#integration-with-nestjs) -- [Deployment](#deployment) - - [Server Setup](#server-setup) - - [Deployment Process](#deployment-process) - - [Steps for Deployment](#steps-for-deployment) - - [Domain Name Configuration](#domain-name-configuration) -- [Documentation](#documentation) - - [CI/CD Pipelines](#cicd-pipelines) - - [Messaging Queue Integration](#messaging-queue-integration-1) - - [NGINX Configuration](#nginx-configuration) - - [Database Setup](#database-setup-1) -- [Getting Started](#getting-started) - -## Introduction - -This project aims to streamline the management, deployment, and communication of boilerplate projects. Using GitHub Actions for CI/CD, RabbitMQ for messaging queues, and Docker for database management, we ensure a robust and efficient development environment. - -## CI/CD Setup - -The CI/CD pipeline automates the build, test, and deployment processes of the application, ensuring code quality and efficient delivery. - -### Choosing CI/CD Tool - -For this project, we selected GitHub Actions due to its seamless integration with our GitHub repository and powerful automation capabilities. - -### Pipeline Setup - -We have configured the CI/CD pipelines to automate the build, test, and deployment processes for the NestJS boilerplate project. The pipeline runs on each pull request and push to the `dev`, `staging`, and `main` branches. - -CI/CD Workflow - -- **Build**: This job checks out the codebase, installs dependencies, and builds the application. -- **Test**: Executes unit and integration tests to ensure code quality and functionality. -- **Deploy**: Deploys the application to the designated environment (development, staging, or production). - -The workflow is triggered on every push to the repository and on pull requests. - -### Branching Strategy - -The project follows a Gitflow-like branching strategy: - -- **main**: Represents the production-ready codebase. -- **dev**: The main development branch. Merged into main for releases. -- **staging**: The main staging branch. - -## Database Setup - -Two separate PostgreSQL databases are set up using Docker containers: one for the development environment and one for production. This approach ensures data isolation and allows for independent database configurations. - -### Configuration - -The databases are configured in the `docker-compose.yml` file and connected to the application through environment variables. - -## Messaging Queue Integration - -We utilize RabbitMQ as the message broker for this project. - -### RabbitMQ Setup - -RabbitMQ is installed and runs on the remote server. - -### Integration with NestJS - -The NestJS application is configured to connect to the RabbitMQ server. We use the @nestjs/microservices package to implement message producers and consumers within the application. - -## Deployment - -The deployment process is automated through the CI/CD pipeline, aiming for 99% uptime. Projects are accessible via their respective domain names, and DNS settings are configured accordingly. - -### Server Setup - -The application is deployed to a remote server. Ensure the server meets the following requirements: - -- Node.js and npm installed -- Docker and Docker Compose installed -- Nginx installed and configured as a reverse proxy - -### Deployment Process - -The CI/CD pipeline builds and tests the application to ensure code quality and functionality. The deployment process involves several steps to set up and configure the environment, including database setup, RabbitMQ installation, NGINX proxy configuration, and using PM2 as the process manager to reload the NestJS application. - -#### Steps for Deployment - -1. **CI/CD Pipeline Execution** - - - The pipeline is triggered by a push or pull request to the repository. - - The pipeline checks out the code, installs dependencies, runs tests, and builds the project. - - Upon successful build and test, the pipeline deploys the application to the target environment (development, staging, or production). - -2. **Database Setup** - - - Two separate PostgreSQL databases are configured using Docker containers: one for development and one for production. - - Docker Compose is used to manage the database containers, ensuring they are isolated and correctly configured. - - Environment variables are set to connect the NestJS application to the appropriate database. - -3. **RabbitMQ Installation** - - - RabbitMQ is installed on the remote server to handle messaging queues for the application. - - The RabbitMQ service is configured to start on boot and is integrated into the NestJS application using environment variables. - -4. **NGINX Proxy Configuration** - - - NGINX is used as a reverse proxy to route incoming requests to the NestJS application. - - The NGINX configuration file is set up to forward requests to the appropriate port where the NestJS application is running. - - SSL certificates are configured in NGINX for secure communication if necessary. - - The NGINX service is restarted to apply the new configuration. - -5. **Using PM2 as the Process Manager** - - - PM2 is used to manage the NestJS application processes. - - The application is started using PM2, which ensures it runs in the background and restarts automatically on failure. - - The deployment script reloads the application using PM2 to apply any new changes. - -6. **Deployment Script Execution** - - A deployment script (`deployment.sh`, `main-deployment.sh`, `staging-deployment.sh`) is used to automate the deployment tasks. - - The script includes steps to pull the latest code, install dependencies, build the project, and reload the application using PM2. - - Environment variables are sourced to ensure all configurations are correctly applied. - -### Domain Name Configuration - -- We configured DNS records for [Main](https://api-nestjs.boilerplate.hng.tech), - [Staging](https://staging.api-nestjs.boilerplate.hng.tech), - [Dev](https://deployment.api-nestjs.boilerplate.hng.tech) to point to the server's IP address. -- Set up Nginx to act as a reverse proxy, directing traffic to the appropriate application based on the domain name. - -## Documentation - -All processes are documented comprehensively in this GitHub Wiki. Each section covers detailed steps, configurations, and commands used in the setup and deployment of the application. - -### [CI/CD Pipelines](cicd-pipeline) - -Automated processes for build, test, and deployment are detailed, including YAML configurations and environment setups. - -### [Messaging Queue Integration](rabbitmq-installation-and-setup) - -Documentation covers the installation, configuration, and integration of RabbitMQ into the NestJS project. - -### [NGINX Configuration](nginx-configuration) - -This outlines the NGINX configuration used for routing traffic to our NestJS applications deployed on different ports and subdomains. - -### [Database Setup](database-setup) - -Step-by-step guides on setting up and configuring Postgres databases in Docker containers for development and production. - -## Getting Started - -1. Clone the repository. -2. Install dependencies: `npm install` -3. Configure environment variables (database connection details, RabbitMQ credentials, etc.). -4. Start the application: `npm run start:dev`, `npm run start:staging`, `npm run start:prod` diff --git a/wiki_readme/NGINX-Configuration.md b/wiki_readme/NGINX-Configuration.md deleted file mode 100644 index e2f4b17bf..000000000 --- a/wiki_readme/NGINX-Configuration.md +++ /dev/null @@ -1,102 +0,0 @@ -## NGINX Configuration for NestJS Applications - -This outlines the NGINX configuration used for routing traffic to our NestJS applications deployed on different ports and subdomains. - -**Configuration Breakdown:** - -The provided NGINX configuration defines four server blocks: - -**1. Production Server (api-nestjs.boilerplate.hng.tech:443)** - -```nginx -server { - server_name api-nestjs.boilerplate.hng.tech; - - access_log /var/log/nginx/access.log; - error_log /var/log/nginx/error.log; - - location / { - proxy_pass http://localhost:3007; - } - - listen 443 ssl; - ssl_certificate /etc/letsencrypt/live/api-nestjs.boilerplate.hng.tech/fullchain.pem; - ssl_certificate_key /etc/letsencrypt/live/api-nestjs.boilerplate.hng.tech/privkey.pem; - include /etc/letsencrypt/options-ssl-nginx.conf; - ssl_dhparam /etc/letsencrypt/ssl-dhparams.pem; -} -``` - -- This block handles HTTPS traffic for the main application domain `api-nestjs.boilerplate.hng.tech`. -- It listens on port 443 (default HTTPS port). -- Traffic is forwarded to the NestJS application running on `http://localhost:3007` using `proxy_pass`. -- SSL is enabled using certificates obtained from Let's Encrypt. - -**2. Deployment Server (deployment.api-nestjs.boilerplate.hng.tech:443)** - -```nginx -server { - listen 443 ssl; - server_name deployment.api-nestjs.boilerplate.hng.tech; - - ssl_certificate /etc/letsencrypt/live/deployment.api-nestjs.boilerplate.hng.tech/fullchain.pem; - ssl_certificate_key /etc/letsencrypt/live/deployment.api-nestjs.boilerplate.hng.tech/privkey.pem; - - access_log /var/log/nginx/access.log; - error_log /var/log/nginx/error.log; - - location / { - proxy_pass http://localhost:3008; - } -} -``` - -- This block handles HTTPS traffic for the subdomain `deployment.api-nestjs.boilerplate.hng.tech`, likely used for a deployment preview environment. -- Traffic is forwarded to the application instance running on `http://localhost:3008`. -- Similar to the production server, SSL is enabled with Let's Encrypt certificates. - -**3. Staging Server (staging.api-nestjs.boilerplate.hng.tech:443)** - -```nginx -server { - listen 443 ssl; - server_name staging.api-nestjs.boilerplate.hng.tech; - - ssl_certificate /etc/letsencrypt/live/staging.api-nestjs.boilerplate.hng.tech/fullchain.pem; - ssl_certificate_key /etc/letsencrypt/live/staging.api-nestjs.boilerplate.hng.tech/privkey.pem; - - access_log /var/log/nginx/access.log; - error_log /var/log/nginx/error.log; - - location / { - proxy_pass http://localhost:3009; - } -} -``` - -- This block handles HTTPS traffic for the subdomain `staging.api-nestjs.boilerplate.hng.tech`, likely used for a staging environment. -- Traffic is forwarded to the application instance running on `http://localhost:3009`. -- SSL is enabled using Let's Encrypt certificates. - -**4. HTTP Redirection Server (port 80)** - -```nginx -server { - listen 80; - server_name api-nestjs.boilerplate.hng.tech staging.api-nestjs.boilerplate.hng.tech deployment.api-nestjs.boilerplate.hng.tech; - - location /.well-known/acme-challenge/ { - allow all; - root /var/www/certbot; - } - - location / { - return 301 https://$host$request_uri; - } -} -``` - -- This block listens on port 80 (default HTTP port) for all defined server names. -- It handles two types of requests: - - Requests for the Let's Encrypt ACME challenge used for certificate renewal are allowed and served from `/var/www/certbot`. - - All other requests are redirected to their HTTPS equivalent using a 301 redirect (`return 301 https://$host$request_uri;`). diff --git a/wiki_readme/RabbitMQ-Installation-and-Setup.md b/wiki_readme/RabbitMQ-Installation-and-Setup.md deleted file mode 100644 index 1cc5172d5..000000000 --- a/wiki_readme/RabbitMQ-Installation-and-Setup.md +++ /dev/null @@ -1,124 +0,0 @@ -## RabbitMQ Installation and Setup - -This document details the process of installing and setting up RabbitMQ on a remote server, enabling its use as a message broker for the NestJS application. - -**Server Requirements:** - -- Ubuntu or a compatible Linux distribution -- Root or sudo access -- Stable internet connection - -### Installation Steps: - -1. **Update System Packages:** - - ```bash - sudo apt-get update -y - ``` - -2. **Install Prerequisite Packages:** - - ```bash - sudo apt-get install curl gnupg apt-transport-https -y - ``` - - - **curl:** Used to download files from the internet. - - **gnupg:** Used for verifying package signatures. - - **apt-transport-https:** Enables HTTPS support for APT package manager. - -3. **Import Signing Keys:** - - ```bash - ## Team RabbitMQ's main signing key - curl -1sLf "https://keys.openpgp.org/vks/v1/by-fingerprint/0A9AF2115F4687BD29803A206B73A36E6026DFCA" | sudo gpg --dearmor | sudo tee /usr/share/keyrings/com.rabbitmq.team.gpg > /dev/null - - ## Community mirror of Cloudsmith: modern Erlang repository - curl -1sLf https://github.com/rabbitmq/signing-keys/releases/download/3.0/cloudsmith.rabbitmq-erlang.E495BB49CC4BBE5B.key | sudo gpg --dearmor | sudo tee /usr/share/keyrings/rabbitmq.E495BB49CC4BBE5B.gpg > /dev/null - - ## Community mirror of Cloudsmith: RabbitMQ repository - curl -1sLf https://github.com/rabbitmq/signing-keys/releases/download/3.0/cloudsmith.rabbitmq-server.9F4587F226208342.key | sudo gpg --dearmor | sudo tee /usr/share/keyrings/rabbitmq.9F4587F226208342.gpg > /dev/null - ``` - - - This step imports the necessary GPG keys for verifying the authenticity of the RabbitMQ and Erlang packages. - -4. **Add RabbitMQ Repositories:** - - ```bash - sudo tee /etc/apt/sources.list.d/rabbitmq.list <