CI/CD Integration
Integrate CAPYSQUASH into your CI/CD pipelines
CI/CD INTEGRATION
Automate migration analysis and consolidation in your continuous integration and deployment pipelines.
đĄ Using CAPYSQUASH Platform? GitHub integration is built-in with automatic PR analysis. See GitHub Integration guide. This page covers manual CI/CD setup with capysquash-cli.
GITHUB ACTIONS
Basic Analysis
name: Migration Analysis
on:
pull_request:
paths:
- 'migrations/**'
- 'supabase/migrations/**'
jobs:
analyze:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Install pgsquash
run: go install github.com/CAPYSQUASH/pgsquash-engine/cmd/pgsquash@latest
- name: Analyze migrations
run: pgsquash analyze migrations/*.sql
- name: Check quality
run: |
pgsquash analyze migrations/*.sql --report-format json > analysis.json
CONSOLIDATION=$(jq '.consolidation_percentage' analysis.json)
echo "Consolidation opportunity: $CONSOLIDATION%"Full Validation Pipeline
name: Migration Quality
on:
pull_request:
paths:
- 'migrations/**'
jobs:
validate:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0 # Full history for comparison
- name: Install pgsquash
run: go install github.com/CAPYSQUASH/pgsquash-engine/cmd/pgsquash@latest
- name: Analyze migrations
run: |
pgsquash analyze migrations/*.sql --report-format html --export analysis.html
- name: Upload analysis report
uses: actions/upload-artifact@v3
with:
name: migration-analysis
path: analysis.html
- name: Squash migrations
run: |
pgsquash squash migrations/*.sql --output clean/ --safety conservative
- name: Validate with Docker
run: |
pgsquash validate migrations/ clean/
- name: Comment on PR
uses: actions/github-script@v7
with:
script: |
const fs = require('fs');
const analysis = JSON.parse(fs.readFileSync('analysis.json'));
github.rest.issues.createComment({
issue_number: context.issue.number,
owner: context.repo.owner,
repo: context.repo.repo,
body: `## đĻĢ pgsquash Analysis\n\n**Consolidation opportunity:** ${analysis.consolidation_percentage}%\n\n[View full report](${process.env.GITHUB_SERVER_URL}/${context.repo.owner}/${context.repo.repo}/actions/runs/${context.runId})`
})Scheduled Consolidation
name: Weekly Migration Consolidation
on:
schedule:
- cron: '0 0 * * 0' # Weekly on Sunday
jobs:
consolidate:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Install pgsquash
run: go install github.com/CAPYSQUASH/pgsquash-engine/cmd/pgsquash@latest
- name: Consolidate migrations
run: |
pgsquash squash migrations/*.sql --output clean/ --safety conservative
- name: Validate
run: pgsquash validate migrations/ clean/
- name: Create PR
uses: peter-evans/create-pull-request@v5
with:
commit-message: 'chore: consolidate migrations'
title: 'Weekly Migration Consolidation'
body: 'Automated migration consolidation via pgsquash'
branch: chore/consolidate-migrationsGITLAB CI
Basic Pipeline
stages:
- analyze
- validate
analyze_migrations:
stage: analyze
image: CAPYSQUASH/pgsquash:latest
script:
- pgsquash analyze migrations/*.sql --report-format html --export analysis.html
artifacts:
paths:
- analysis.html
expire_in: 30 days
rules:
- changes:
- migrations/**
validate_migrations:
stage: validate
image: CAPYSQUASH/pgsquash:latest
services:
- docker:dind
script:
- pgsquash squash migrations/*.sql --output clean/
- pgsquash validate migrations/ clean/
rules:
- changes:
- migrations/**Advanced GitLab Pipeline
.migration_template:
image: CAPYSQUASH/pgsquash:latest
only:
changes:
- migrations/**
migration:analyze:
extends: .migration_template
stage: analyze
script:
- pgsquash analyze migrations/*.sql --report-format json > analysis.json
- |
CONSOLIDATION=$(jq '.consolidation_percentage' analysis.json)
if [ $(echo "$CONSOLIDATION > 70" | bc) -eq 1 ]; then
echo "WARNING: High consolidation opportunity: $CONSOLIDATION%"
fi
artifacts:
reports:
dotenv: analysis.env
paths:
- analysis.json
migration:consolidate:
extends: .migration_template
stage: validate
script:
- pgsquash squash migrations/*.sql --output clean/ --safety conservative
- pgsquash validate migrations/ clean/
artifacts:
paths:
- clean/CIRCLE CI
version: 2.1
jobs:
analyze-migrations:
docker:
- image: CAPYSQUASH/pgsquash:latest
steps:
- checkout
- run:
name: Analyze migrations
command: |
pgsquash analyze migrations/*.sql --report-format html --export analysis.html
- store_artifacts:
path: analysis.html
validate-migrations:
docker:
- image: CAPYSQUASH/pgsquash:latest
steps:
- checkout
- setup_remote_docker
- run:
name: Consolidate and validate
command: |
pgsquash squash migrations/*.sql --output clean/
pgsquash validate migrations/ clean/
workflows:
migration-quality:
jobs:
- analyze-migrations:
filters:
branches:
only:
- main
- develop
- validate-migrations:
requires:
- analyze-migrationsJENKINS
pipeline {
agent any
stages {
stage('Analyze Migrations') {
steps {
script {
docker.image('CAPYSQUASH/pgsquash:latest').inside {
sh 'pgsquash analyze migrations/*.sql --report-format html --export analysis.html'
}
}
publishHTML([
reportDir: '.',
reportFiles: 'analysis.html',
reportName: 'Migration Analysis'
])
}
}
stage('Validate') {
steps {
script {
docker.image('CAPYSQUASH/pgsquash:latest').inside {
sh '''
pgsquash squash migrations/*.sql --output clean/
pgsquash validate migrations/ clean/
'''
}
}
}
}
}
post {
always {
archiveArtifacts artifacts: 'analysis.html', allowEmptyArchive: true
}
}
}PRE-COMMIT HOOKS
Using pre-commit framework
.pre-commit-config.yaml:
repos:
- repo: local
hooks:
- id: pgsquash-analyze
name: Analyze migrations with pgsquash
entry: pgsquash analyze
language: system
files: ^migrations/.*\.sql$
pass_filenames: trueCustom Git Hook
.git/hooks/pre-commit:
#!/bin/bash
# Check if migrations changed
if git diff --cached --name-only | grep -q "migrations/"; then
echo "đĻĢ Analyzing migrations with pgsquash..."
pgsquash analyze migrations/*.sql --report-format json > /tmp/analysis.json
CONSOLIDATION=$(jq '.consolidation_percentage' /tmp/analysis.json)
if (( $(echo "$CONSOLIDATION > 70" | bc -l) )); then
echo "â ī¸ WARNING: Consolidation opportunity: $CONSOLIDATION%"
echo "Consider running: pgsquash squash migrations/*.sql --output clean/"
echo ""
echo "Continue anyway? (y/N)"
read -r response
if [[ ! "$response" =~ ^[Yy]$ ]]; then
exit 1
fi
fi
fiDOCKER COMPOSE FOR LOCAL CI
version: '3.8'
services:
pgsquash:
image: CAPYSQUASH/pgsquash:latest
volumes:
- ./migrations:/workspace/migrations
- ./clean:/workspace/clean
command: >
sh -c "
pgsquash analyze /workspace/migrations/*.sql &&
pgsquash squash /workspace/migrations/*.sql --output /workspace/clean/ &&
pgsquash validate /workspace/migrations/ /workspace/clean/
"Usage:
docker-compose up pgsquashDEPLOYMENT PIPELINES
Before Deploy Hook
# .github/workflows/deploy.yml
name: Deploy
on:
push:
branches: [main]
jobs:
pre-deploy-check:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Install pgsquash
run: go install github.com/CAPYSQUASH/pgsquash-engine/cmd/pgsquash@latest
- name: Verify migrations quality
run: |
pgsquash analyze migrations/*.sql --report-format json > analysis.json
WARNINGS=$(jq '.warnings_count' analysis.json)
if [ "$WARNINGS" -gt 0 ]; then
echo "ERROR: Migrations have warnings. Review before deploying."
exit 1
fi
deploy:
needs: pre-deploy-check
runs-on: ubuntu-latest
steps:
# Deployment steps...MONITORING INTEGRATION
Send Metrics to Datadog
- name: Send migration metrics
run: |
ANALYSIS=$(pgsquash analyze migrations/*.sql --report-format json)
FILE_COUNT=$(echo $ANALYSIS | jq '.file_count')
CONSOLIDATION=$(echo $ANALYSIS | jq '.consolidation_percentage')
curl -X POST "https://api.datadoghq.com/api/v1/series" \
-H "DD-API-KEY: ${{ secrets.DD_API_KEY }}" \
-d '{
"series": [
{
"metric": "migrations.file_count",
"points": [['$(date +%s)', '$FILE_COUNT']],
"type": "gauge"
},
{
"metric": "migrations.consolidation_opportunity",
"points": [['$(date +%s)', '$CONSOLIDATION']],
"type": "gauge"
}
]
}'Slack Notifications
- name: Notify Slack
if: always()
run: |
ANALYSIS=$(pgsquash analyze migrations/*.sql --report-format json)
CONSOLIDATION=$(echo $ANALYSIS | jq '.consolidation_percentage')
curl -X POST ${{ secrets.SLACK_WEBHOOK }} \
-H 'Content-Type: application/json' \
-d '{
"text": "Migration Analysis Complete",
"blocks": [
{
"type": "section",
"text": {
"type": "mrkdwn",
"text": "*Consolidation Opportunity:* '${CONSOLIDATION}'%"
}
}
]
}'BEST PRACTICES
- Run on every PR - Catch migration issues early
- Automate validation - Don't rely on manual checks
- Archive reports - Keep historical analysis
- Set quality gates - Block deploys if warnings exceed threshold
- Monitor trends - Track consolidation opportunity over time
NEXT STEPS
How is this guide?