name: CI/CD Pipeline on: push: branches: [ main, develop ] pull_request: branches: [ main ] env: REGISTRY: ghcr.io IMAGE_NAME: ${{ github.repository }} jobs: test: runs-on: ubuntu-latest services: postgres: image: postgres:15 env: POSTGRES_USER: postgres POSTGRES_PASSWORD: postgres POSTGRES_DB: test_stiftung ports: - 5432:5432 options: >- --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 redis: image: redis:7-alpine ports: - 6379:6379 options: >- --health-cmd "redis-cli ping" --health-interval 10s --health-timeout 5s --health-retries 5 steps: - uses: actions/checkout@v4 - name: Set up Python uses: actions/setup-python@v5 with: python-version: '3.10' - name: Cache pip dependencies uses: actions/cache@v4 with: path: ~/.cache/pip key: ${{ runner.os }}-pip-${{ hashFiles('app/requirements.txt') }} restore-keys: | ${{ runner.os }}-pip- - name: Install dependencies run: | sudo apt-get update sudo apt-get install -y postgresql-client python -m pip install --upgrade pip pip install -r app/requirements.txt - name: Set up environment run: | cp env-template.txt app/.env cd app echo "DJANGO_DEBUG=1" >> .env echo "DJANGO_SECRET_KEY=test-secret-key-for-ci" >> .env echo "POSTGRES_DB=test_stiftung" >> .env echo "POSTGRES_USER=postgres" >> .env echo "POSTGRES_PASSWORD=postgres" >> .env echo "DB_HOST=localhost" >> .env echo "DB_PORT=5432" >> .env echo "REDIS_URL=redis://localhost:6379/0" >> .env - name: Wait for PostgreSQL run: | while ! pg_isready -h localhost -p 5432 -U postgres; do echo "Waiting for PostgreSQL..." sleep 2 done echo "PostgreSQL is ready!" - name: Run migrations working-directory: ./app env: DJANGO_DEBUG: "1" DJANGO_SECRET_KEY: "test-secret-key-for-ci" POSTGRES_DB: "test_stiftung" POSTGRES_USER: "postgres" POSTGRES_PASSWORD: "postgres" DB_HOST: "localhost" DB_PORT: "5432" run: | python manage.py migrate - name: Run tests working-directory: ./app env: DJANGO_DEBUG: "1" DJANGO_SECRET_KEY: "test-secret-key-for-ci" POSTGRES_DB: "test_stiftung" POSTGRES_USER: "postgres" POSTGRES_PASSWORD: "postgres" DB_HOST: "localhost" DB_PORT: "5432" run: | python manage.py test - name: Check Django configuration working-directory: ./app env: DJANGO_DEBUG: "1" DJANGO_SECRET_KEY: "test-secret-key-for-ci" POSTGRES_DB: "test_stiftung" POSTGRES_USER: "postgres" POSTGRES_PASSWORD: "postgres" DB_HOST: "localhost" DB_PORT: "5432" run: | python manage.py check --deploy - name: Collect static files working-directory: ./app env: DJANGO_DEBUG: "1" DJANGO_SECRET_KEY: "test-secret-key-for-ci" POSTGRES_DB: "test_stiftung" POSTGRES_USER: "postgres" POSTGRES_PASSWORD: "postgres" DB_HOST: "localhost" DB_PORT: "5432" run: | python manage.py collectstatic --noinput build: needs: test runs-on: ubuntu-latest if: github.event_name == 'push' permissions: contents: read packages: write steps: - name: Checkout repository uses: actions/checkout@v4 - name: Log in to Container Registry uses: docker/login-action@v3 with: registry: ${{ env.REGISTRY }} username: ${{ github.actor }} password: ${{ secrets.GITHUB_TOKEN }} - name: Extract metadata id: meta uses: docker/metadata-action@v5 with: images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} tags: | type=ref,event=branch type=ref,event=pr type=sha,prefix={{branch}}- - name: Build and push Docker images uses: docker/build-push-action@v5 with: context: ./app push: true tags: ${{ steps.meta.outputs.tags }} labels: ${{ steps.meta.outputs.labels }} - name: Build and push Paperless image uses: docker/build-push-action@v5 with: context: ./paperless push: true tags: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}-paperless:latest labels: ${{ steps.meta.outputs.labels }} deploy: needs: build runs-on: ubuntu-latest if: github.ref == 'refs/heads/main' # Auto-deploy when pushing to main branch environment: production steps: - name: Deploy to production uses: appleboy/ssh-action@v1.0.3 env: DEPLOY_TOKEN: ${{ secrets.DEPLOY_TOKEN }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_ACTOR: ${{ github.actor }} with: host: ${{ secrets.PROD_HOST }} username: ${{ secrets.PROD_USERNAME }} key: ${{ secrets.PROD_SSH_KEY }} envs: DEPLOY_TOKEN,GITHUB_TOKEN,GITHUB_ACTOR script: | cd /opt/stiftung # Check if production .env exists in root directory if [ ! -f .env ]; then echo "ERROR: No production .env file found at /opt/stiftung/.env" echo "Please create it manually using the env-production.template as reference" echo "Steps:" echo "1. cp env-production.template .env" echo "2. nano .env # Edit with real production values" echo "3. chmod 600 .env # Secure permissions" exit 1 fi echo "Production .env file found - proceeding with deployment" # Configure git pull strategy and force pull latest code changes git config pull.rebase false git fetch https://$DEPLOY_TOKEN@github.com/remmerinio/stiftung-management-system.git main git reset --hard FETCH_HEAD # The main compose.yml is already the correct production configuration # No need to copy from deploy-production since we use compose.yml directly echo "Using main compose.yml for production deployment" # Try to login to GitHub Container Registry and pull images echo "Attempting to pull images from GitHub Container Registry..." if echo $DEPLOY_TOKEN | docker login ghcr.io -u remmerinio --password-stdin; then echo "✅ Successfully logged into GHCR" if docker-compose -f compose.yml pull web worker beat paperless; then echo "✅ Successfully pulled web and paperless images from GHCR" USE_REMOTE_IMAGES=true else echo "⚠️ Failed to pull images from GHCR, will build locally" USE_REMOTE_IMAGES=false fi else echo "⚠️ Failed to login to GHCR, will build locally" USE_REMOTE_IMAGES=false fi # Pull other standard images (redis, postgres, grampsweb) echo "Pulling standard Docker images..." docker-compose -f compose.yml pull db redis grampsweb || echo "Some standard images failed to pull, will use cached versions" # If we couldn't pull from GHCR, build locally if [ "$USE_REMOTE_IMAGES" = "false" ]; then echo "🔨 Building images locally from source code..." docker build -t ghcr.io/remmerinio/stiftung-management-system:latest ./app docker build -t ghcr.io/remmerinio/stiftung-management-system-paperless:latest ./paperless fi # Stop containers and clean up docker-compose -f compose.yml down docker system prune -f # Start containers with latest images docker-compose -f compose.yml up -d --no-build # Wait for containers to be ready echo "Waiting for containers to start..." sleep 30 # Run migrations docker-compose -f compose.yml exec -T web python manage.py migrate # Collect static files docker-compose -f compose.yml exec -T web python manage.py collectstatic --noinput # Show container status docker-compose -f compose.yml ps