mirror of
https://github.com/btouchard/ackify.git
synced 2026-01-06 04:49:52 -06:00
feat: comprehensive CI/CD refactoring with unified code coverage
Reorganize GitHub Actions workflows into reusable components and implement complete code coverage tracking across backend, frontend, and E2E tests. **CI/CD Improvements:** - Split monolithic ci.yml into 6 specialized reusable workflows - New workflows: test-backend, test-frontend, test-e2e, build-docker, security, coverage-report - Orchestrated execution with proper dependencies and parallel jobs - Codecov integration with multi-flag coverage (backend/frontend/e2e) **Frontend Testing:** - Add Vitest for unit testing with coverage-v8 provider - Create test setup with window mocks for Ackify globals - Add 34 unit tests for titleExtractor, referenceDetector, and http utils - Configure Istanbul instrumentation for E2E coverage collection - Integrate @cypress/code-coverage for E2E test coverage **Test Infrastructure:** - Create run-tests-suite.sh for local comprehensive test execution - Proper Docker Compose orchestration for integration and E2E tests - Automatic cleanup handlers with trap for test environments - Coverage summary aggregation across all test types **Bug Fixes:** - Fix backend config tests after OAuth/MagicLink validation changes - Update tests from panic expectations to error checking - Ensure OAUTH_COOKIE_SECRET is properly configured in tests **Configuration:** - Add .codecov.yml for coverage reporting with flags - Add .nycrc.json for E2E LCOV generation - Update .gitignore for test artifacts and coverage reports - Configure Vite for test environment and code instrumentation
This commit is contained in:
58
.codecov.yml
Normal file
58
.codecov.yml
Normal file
@@ -0,0 +1,58 @@
|
||||
# Codecov configuration for Ackify CE
|
||||
# Documentation: https://docs.codecov.com/docs/codecov-yaml
|
||||
|
||||
coverage:
|
||||
status:
|
||||
project:
|
||||
default:
|
||||
target: auto
|
||||
threshold: 2%
|
||||
informational: false
|
||||
patch:
|
||||
default:
|
||||
target: auto
|
||||
threshold: 5%
|
||||
informational: true
|
||||
|
||||
# Separate flags for different test types
|
||||
flags:
|
||||
backend:
|
||||
paths:
|
||||
- backend/
|
||||
carryforward: true
|
||||
frontend:
|
||||
paths:
|
||||
- webapp/src/
|
||||
carryforward: true
|
||||
e2e:
|
||||
paths:
|
||||
- webapp/src/
|
||||
carryforward: true
|
||||
|
||||
# Files to ignore
|
||||
ignore:
|
||||
- "**/*.test.ts"
|
||||
- "**/*.spec.ts"
|
||||
- "**/*.cy.ts"
|
||||
- "webapp/cypress/**"
|
||||
- "webapp/tests/**"
|
||||
- "backend/cmd/*/main.go"
|
||||
- "backend/migrations/**"
|
||||
- "backend/locales/**"
|
||||
- "backend/templates/**"
|
||||
- "**/node_modules/**"
|
||||
- "**/dist/**"
|
||||
- "**/coverage/**"
|
||||
- "**/coverage-e2e/**"
|
||||
|
||||
# Comment behavior on pull requests
|
||||
comment:
|
||||
layout: "reach,diff,flags,tree"
|
||||
behavior: default
|
||||
require_changes: false
|
||||
require_base: false
|
||||
require_head: true
|
||||
|
||||
# GitHub checks
|
||||
github_checks:
|
||||
annotations: true
|
||||
90
.github/workflows/build-docker.yml
vendored
Normal file
90
.github/workflows/build-docker.yml
vendored
Normal file
@@ -0,0 +1,90 @@
|
||||
name: Build and Push Docker
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
push:
|
||||
description: 'Push image to registry'
|
||||
required: false
|
||||
type: boolean
|
||||
default: true
|
||||
|
||||
env:
|
||||
REGISTRY: docker.io
|
||||
IMAGE_NAME: btouchard/ackify-ce
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Build Docker Image
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '22'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: webapp/package-lock.json
|
||||
|
||||
- name: Build frontend
|
||||
run: |
|
||||
cd webapp
|
||||
npm ci --no-audit --no-fund --no-progress
|
||||
npm run build
|
||||
|
||||
- name: Copy frontend to backend embed directory
|
||||
run: |
|
||||
mkdir -p backend/cmd/community/web/dist
|
||||
cp -r webapp/dist/* backend/cmd/community/web/dist/
|
||||
|
||||
- name: Compute IMAGE_TAG
|
||||
run: |
|
||||
# Strip leading 'v' from tag refs; leave branches unchanged
|
||||
echo "IMAGE_TAG=${GITHUB_REF_NAME#v}" >> "$GITHUB_ENV"
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Log in to Docker Hub
|
||||
if: inputs.push
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
|
||||
- name: Extract metadata
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||
tags: |
|
||||
type=ref,event=branch
|
||||
type=ref,event=pr
|
||||
type=semver,pattern={{version}}
|
||||
type=semver,pattern={{major}}.{{minor}}
|
||||
type=semver,pattern={{major}}
|
||||
type=sha,prefix={{branch}}-,enable={{is_default_branch}}
|
||||
type=sha,enable=false
|
||||
type=raw,value=latest,enable=${{ github.event_name == 'release' }}
|
||||
|
||||
- name: Build and push Docker image
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
file: ./Dockerfile
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: ${{ inputs.push }}
|
||||
tags: |
|
||||
${{ steps.meta.outputs.tags }}
|
||||
${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ env.IMAGE_TAG }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
build-args: |
|
||||
VERSION=${{ github.ref_name }}
|
||||
COMMIT=${{ github.sha }}
|
||||
BUILD_DATE=${{ github.event.head_commit.timestamp }}
|
||||
242
.github/workflows/ci.yml
vendored
242
.github/workflows/ci.yml
vendored
@@ -13,226 +13,68 @@ env:
|
||||
IMAGE_NAME: btouchard/ackify-ce
|
||||
|
||||
jobs:
|
||||
test:
|
||||
name: Run Tests
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:15-alpine
|
||||
env:
|
||||
POSTGRES_USER: postgres
|
||||
POSTGRES_PASSWORD: testpassword
|
||||
POSTGRES_DB: ackify_test
|
||||
options: >-
|
||||
--health-cmd pg_isready
|
||||
--health-interval 10s
|
||||
--health-timeout 5s
|
||||
--health-retries 5
|
||||
ports:
|
||||
- 5432:5432
|
||||
# Phase 1: Tests parallèles
|
||||
backend-tests:
|
||||
name: Backend Tests
|
||||
uses: ./.github/workflows/test-backend.yml
|
||||
secrets: inherit
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
frontend-tests:
|
||||
name: Frontend Tests
|
||||
uses: ./.github/workflows/test-frontend.yml
|
||||
secrets: inherit
|
||||
|
||||
- name: Set up Go
|
||||
uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version: '1.24.5'
|
||||
cache: true
|
||||
e2e-tests:
|
||||
name: E2E Tests
|
||||
uses: ./.github/workflows/test-e2e.yml
|
||||
secrets: inherit
|
||||
needs: [backend-tests, frontend-tests]
|
||||
|
||||
- name: Download dependencies
|
||||
run: |
|
||||
cd backend
|
||||
go mod download
|
||||
# Phase 2: Coverage global
|
||||
coverage-report:
|
||||
name: Coverage Report
|
||||
uses: ./.github/workflows/coverage-report.yml
|
||||
needs: [backend-tests, frontend-tests, e2e-tests]
|
||||
secrets: inherit
|
||||
if: always()
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '22'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: webapp/package-lock.json
|
||||
|
||||
- name: Build frontend
|
||||
run: |
|
||||
cd webapp
|
||||
npm ci --no-audit --no-fund --no-progress
|
||||
npm run build
|
||||
|
||||
- name: Copy frontend to backend embed directory
|
||||
run: |
|
||||
mkdir -p backend/cmd/community/web/dist
|
||||
cp -r webapp/dist/* backend/cmd/community/web/dist/
|
||||
|
||||
- name: Run go fmt check
|
||||
run: |
|
||||
cd backend
|
||||
if [ "$(gofmt -s -l . | wc -l)" -gt 0 ]; then
|
||||
echo "The following files need to be formatted:"
|
||||
gofmt -s -l .
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Run go vet
|
||||
run: |
|
||||
cd backend
|
||||
go vet ./...
|
||||
|
||||
- name: Run unit tests
|
||||
env:
|
||||
ACKIFY_BASE_URL: "http://localhost:8080"
|
||||
ACKIFY_ORGANISATION: "Test Org"
|
||||
ACKIFY_OAUTH_CLIENT_ID: "test-client-id"
|
||||
ACKIFY_OAUTH_CLIENT_SECRET: "test-client-secret"
|
||||
ACKIFY_OAUTH_COOKIE_SECRET: "dGVzdC1jb29raWUtc2VjcmV0LXRlc3QtY29va2llLXNlY3JldA=="
|
||||
run: |
|
||||
cd backend
|
||||
go test -v -race -short ./...
|
||||
|
||||
- name: Run integration tests
|
||||
env:
|
||||
ACKIFY_DB_DSN: "postgres://postgres:testpassword@localhost:5432/ackify_test?sslmode=disable"
|
||||
INTEGRATION_TESTS: "1"
|
||||
run: |
|
||||
cd backend
|
||||
# Run DB + admin integration tests, serialized to avoid CI flakiness
|
||||
go test -v -race -tags=integration -p 1 -count=1 ./internal/infrastructure/database/... ./internal/presentation/api/admin
|
||||
|
||||
- name: Generate coverage report (unit+integration)
|
||||
env:
|
||||
ACKIFY_DB_DSN: "postgres://postgres:testpassword@localhost:5432/ackify_test?sslmode=disable"
|
||||
INTEGRATION_TESTS: "1"
|
||||
ACKIFY_BASE_URL: "http://localhost:8080"
|
||||
ACKIFY_ORGANISATION: "Test Org"
|
||||
ACKIFY_OAUTH_CLIENT_ID: "test-client-id"
|
||||
ACKIFY_OAUTH_CLIENT_SECRET: "test-client-secret"
|
||||
ACKIFY_OAUTH_COOKIE_SECRET: "dGVzdC1jb29raWUtc2VjcmV0LXRlc3QtY29va2llLXNlY3JldA=="
|
||||
run: |
|
||||
cd backend
|
||||
# 1) Unit coverage (no integration tag)
|
||||
go test -v -race -short -covermode=atomic -coverprofile=coverage-unit.out ./...
|
||||
# 2) Integration coverage only for integration packages, serialized
|
||||
go test -v -race -tags=integration -p 1 -count=1 -covermode=atomic -coverprofile=coverage-integration.out \
|
||||
./internal/infrastructure/database/... \
|
||||
./internal/presentation/api/admin
|
||||
# 3) Merge coverage
|
||||
echo "mode: atomic" > coverage.out
|
||||
tail -n +2 coverage-unit.out >> coverage.out
|
||||
tail -n +2 coverage-integration.out >> coverage.out
|
||||
go tool cover -func=coverage.out | tail -1
|
||||
|
||||
- name: Upload coverage to Codecov
|
||||
uses: codecov/codecov-action@v4
|
||||
with:
|
||||
files: ./backend/coverage.out
|
||||
flags: unittests,integration
|
||||
name: codecov-ackify-ce
|
||||
fail_ci_if_error: false
|
||||
verbose: true
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
build:
|
||||
name: Build and Push Docker Image
|
||||
runs-on: ubuntu-latest
|
||||
needs: test
|
||||
# Phase 3: Build & Deploy (seulement si tests OK + pas PR)
|
||||
build-and-push:
|
||||
name: Build and Push Docker
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: ./.github/workflows/build-docker.yml
|
||||
needs: [backend-tests, frontend-tests, e2e-tests]
|
||||
secrets: inherit
|
||||
with:
|
||||
push: true
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Compute IMAGE_TAG
|
||||
run: |
|
||||
# Strip leading 'v' from tag refs; leave branches unchanged
|
||||
echo "IMAGE_TAG=${GITHUB_REF_NAME#v}" >> "$GITHUB_ENV"
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Log in to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
|
||||
- name: Extract metadata
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||
tags: |
|
||||
type=ref,event=branch
|
||||
type=ref,event=pr
|
||||
type=semver,pattern={{version}}
|
||||
type=semver,pattern={{major}}.{{minor}}
|
||||
type=semver,pattern={{major}}
|
||||
type=sha,prefix={{branch}}-,enable={{is_default_branch}}
|
||||
type=sha,enable=false
|
||||
type=raw,value=latest,enable=${{ github.event_name == 'release' }}
|
||||
|
||||
- name: Build and push Docker image
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
file: ./Dockerfile
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
tags: |
|
||||
${{ steps.meta.outputs.tags }}
|
||||
${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ env.IMAGE_TAG }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
build-args: |
|
||||
VERSION=${{ github.ref_name }}
|
||||
COMMIT=${{ github.sha }}
|
||||
BUILD_DATE=${{ github.event.head_commit.timestamp }}
|
||||
|
||||
security:
|
||||
# Phase 4: Security scan
|
||||
security-scan:
|
||||
name: Security Scan
|
||||
runs-on: ubuntu-latest
|
||||
needs: build
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: ./.github/workflows/security.yml
|
||||
needs: build-and-push
|
||||
secrets: inherit
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Compute IMAGE_TAG
|
||||
run: |
|
||||
# Strip leading 'v' from tag refs; leave branches unchanged
|
||||
echo "IMAGE_TAG=${GITHUB_REF_NAME#v}" >> "$GITHUB_ENV"
|
||||
|
||||
- name: Run Trivy vulnerability scanner
|
||||
uses: aquasecurity/trivy-action@master
|
||||
with:
|
||||
image-ref: '${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ env.IMAGE_TAG }}'
|
||||
format: 'sarif'
|
||||
output: 'trivy-results.sarif'
|
||||
|
||||
# - name: Upload Trivy scan results to GitHub Security tab
|
||||
# uses: github/codeql-action/upload-sarif@v2
|
||||
# if: always()
|
||||
# with:
|
||||
# sarif_file: 'trivy-results.sarif'
|
||||
|
||||
# Notification finale
|
||||
notify:
|
||||
name: Notify
|
||||
runs-on: ubuntu-latest
|
||||
needs: [test, build, security]
|
||||
needs: [backend-tests, frontend-tests, e2e-tests, build-and-push, security-scan]
|
||||
if: always() && github.event_name != 'pull_request'
|
||||
|
||||
steps:
|
||||
- name: Compute IMAGE_TAG
|
||||
run: echo "IMAGE_TAG=${GITHUB_REF_NAME#v}" >> "$GITHUB_ENV"
|
||||
|
||||
- name: Notify success
|
||||
if: needs.test.result == 'success' && needs.build.result == 'success'
|
||||
if: needs.backend-tests.result == 'success' && needs.frontend-tests.result == 'success' && needs.e2e-tests.result == 'success' && needs.build-and-push.result == 'success'
|
||||
run: |
|
||||
echo "✅ CI/CD Pipeline completed successfully!"
|
||||
echo "🚀 Image pushed: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ env.IMAGE_TAG }}"
|
||||
|
||||
|
||||
- name: Notify failure
|
||||
if: needs.test.result == 'failure' || needs.build.result == 'failure'
|
||||
if: needs.backend-tests.result == 'failure' || needs.frontend-tests.result == 'failure' || needs.e2e-tests.result == 'failure' || needs.build-and-push.result == 'failure'
|
||||
run: |
|
||||
echo "❌ CI/CD Pipeline failed!"
|
||||
echo "Please check the logs above for details."
|
||||
|
||||
57
.github/workflows/coverage-report.yml
vendored
Normal file
57
.github/workflows/coverage-report.yml
vendored
Normal file
@@ -0,0 +1,57 @@
|
||||
name: Coverage Report
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
|
||||
jobs:
|
||||
merge-and-upload:
|
||||
name: Merge Coverage & Upload to Codecov
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Download backend coverage
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: backend-coverage
|
||||
path: coverage/backend
|
||||
|
||||
- name: Download frontend coverage
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: frontend-coverage
|
||||
path: coverage/frontend
|
||||
continue-on-error: true
|
||||
|
||||
- name: Download E2E coverage
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: e2e-coverage
|
||||
path: coverage/e2e
|
||||
continue-on-error: true
|
||||
|
||||
- name: Upload to Codecov (multi-format)
|
||||
uses: codecov/codecov-action@v4
|
||||
with:
|
||||
files: |
|
||||
coverage/backend/coverage.out
|
||||
coverage/frontend/lcov.info
|
||||
coverage/e2e/lcov.info
|
||||
flags: backend,frontend,e2e
|
||||
name: codecov-ackify-ce
|
||||
fail_ci_if_error: false
|
||||
verbose: true
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
- name: Generate coverage summary
|
||||
run: |
|
||||
echo "## 📊 Code Coverage Summary" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "Coverage reports uploaded to Codecov:" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- ✅ Backend (Go): coverage/backend/coverage.out" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- ✅ Frontend (Vue/TS): coverage/frontend/lcov.info" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- ✅ E2E (Cypress): coverage/e2e/lcov.info" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "View detailed report: https://codecov.io/gh/${{ github.repository }}" >> $GITHUB_STEP_SUMMARY
|
||||
35
.github/workflows/security.yml
vendored
Normal file
35
.github/workflows/security.yml
vendored
Normal file
@@ -0,0 +1,35 @@
|
||||
name: Security Scan
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
|
||||
env:
|
||||
REGISTRY: docker.io
|
||||
IMAGE_NAME: btouchard/ackify-ce
|
||||
|
||||
jobs:
|
||||
trivy:
|
||||
name: Trivy Vulnerability Scan
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Compute IMAGE_TAG
|
||||
run: |
|
||||
# Strip leading 'v' from tag refs; leave branches unchanged
|
||||
echo "IMAGE_TAG=${GITHUB_REF_NAME#v}" >> "$GITHUB_ENV"
|
||||
|
||||
- name: Run Trivy vulnerability scanner
|
||||
uses: aquasecurity/trivy-action@master
|
||||
with:
|
||||
image-ref: '${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ env.IMAGE_TAG }}'
|
||||
format: 'sarif'
|
||||
output: 'trivy-results.sarif'
|
||||
|
||||
# - name: Upload Trivy scan results to GitHub Security tab
|
||||
# uses: github/codeql-action/upload-sarif@v3
|
||||
# if: always()
|
||||
# with:
|
||||
# sarif_file: 'trivy-results.sarif'
|
||||
113
.github/workflows/test-backend.yml
vendored
Normal file
113
.github/workflows/test-backend.yml
vendored
Normal file
@@ -0,0 +1,113 @@
|
||||
name: Backend Tests
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
outputs:
|
||||
coverage-artifact:
|
||||
description: "Backend coverage artifact name"
|
||||
value: ${{ jobs.test.outputs.coverage-artifact }}
|
||||
|
||||
jobs:
|
||||
test:
|
||||
name: Backend Unit + Integration Tests
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
coverage-artifact: backend-coverage
|
||||
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:15-alpine
|
||||
env:
|
||||
POSTGRES_USER: postgres
|
||||
POSTGRES_PASSWORD: testpassword
|
||||
POSTGRES_DB: ackify_test
|
||||
options: >-
|
||||
--health-cmd pg_isready
|
||||
--health-interval 10s
|
||||
--health-timeout 5s
|
||||
--health-retries 5
|
||||
ports:
|
||||
- 5432:5432
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Go
|
||||
uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version: '1.24.5'
|
||||
cache: true
|
||||
|
||||
- name: Download dependencies
|
||||
run: |
|
||||
cd backend
|
||||
go mod download
|
||||
|
||||
- name: Run go fmt check
|
||||
run: |
|
||||
cd backend
|
||||
if [ "$(gofmt -s -l . | wc -l)" -gt 0 ]; then
|
||||
echo "The following files need to be formatted:"
|
||||
gofmt -s -l .
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Run go vet
|
||||
run: |
|
||||
cd backend
|
||||
go vet ./...
|
||||
|
||||
- name: Run unit tests
|
||||
env:
|
||||
ACKIFY_BASE_URL: "http://localhost:8080"
|
||||
ACKIFY_ORGANISATION: "Test Org"
|
||||
ACKIFY_OAUTH_CLIENT_ID: "test-client-id"
|
||||
ACKIFY_OAUTH_CLIENT_SECRET: "test-client-secret"
|
||||
ACKIFY_OAUTH_COOKIE_SECRET: "dGVzdC1jb29raWUtc2VjcmV0LXRlc3QtY29va2llLXNlY3JldA=="
|
||||
run: |
|
||||
cd backend
|
||||
go test -v -race -short ./...
|
||||
|
||||
- name: Run integration tests
|
||||
env:
|
||||
ACKIFY_DB_DSN: "postgres://postgres:testpassword@localhost:5432/ackify_test?sslmode=disable"
|
||||
INTEGRATION_TESTS: "1"
|
||||
run: |
|
||||
cd backend
|
||||
go test -v -race -tags=integration -p 1 -count=1 \
|
||||
./internal/infrastructure/database/... \
|
||||
./internal/presentation/api/admin
|
||||
|
||||
- name: Generate coverage report
|
||||
env:
|
||||
ACKIFY_DB_DSN: "postgres://postgres:testpassword@localhost:5432/ackify_test?sslmode=disable"
|
||||
INTEGRATION_TESTS: "1"
|
||||
ACKIFY_BASE_URL: "http://localhost:8080"
|
||||
ACKIFY_ORGANISATION: "Test Org"
|
||||
ACKIFY_OAUTH_CLIENT_ID: "test-client-id"
|
||||
ACKIFY_OAUTH_CLIENT_SECRET: "test-client-secret"
|
||||
ACKIFY_OAUTH_COOKIE_SECRET: "dGVzdC1jb29raWUtc2VjcmV0LXRlc3QtY29va2llLXNlY3JldA=="
|
||||
run: |
|
||||
cd backend
|
||||
# Unit coverage
|
||||
go test -v -race -short -covermode=atomic -coverprofile=coverage-unit.out ./...
|
||||
# Integration coverage
|
||||
go test -v -race -tags=integration -p 1 -count=1 \
|
||||
-covermode=atomic -coverprofile=coverage-integration.out \
|
||||
./internal/infrastructure/database/... \
|
||||
./internal/presentation/api/admin
|
||||
# Merge coverage
|
||||
echo "mode: atomic" > coverage.out
|
||||
tail -n +2 coverage-unit.out >> coverage.out
|
||||
tail -n +2 coverage-integration.out >> coverage.out
|
||||
|
||||
# Display coverage summary
|
||||
go tool cover -func=coverage.out | tail -1
|
||||
|
||||
- name: Upload backend coverage artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: backend-coverage
|
||||
path: backend/coverage.out
|
||||
retention-days: 1
|
||||
@@ -1,15 +1,18 @@
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
name: E2E Tests
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main, develop]
|
||||
pull_request:
|
||||
branches: [main, develop]
|
||||
workflow_call:
|
||||
outputs:
|
||||
coverage-artifact:
|
||||
description: "E2E coverage artifact name"
|
||||
value: ${{ jobs.cypress.outputs.coverage-artifact }}
|
||||
|
||||
jobs:
|
||||
cypress-run:
|
||||
cypress:
|
||||
name: Cypress E2E Tests
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
coverage-artifact: e2e-coverage
|
||||
|
||||
services:
|
||||
postgres:
|
||||
@@ -53,8 +56,10 @@ jobs:
|
||||
working-directory: webapp
|
||||
run: npm ci
|
||||
|
||||
- name: Build frontend
|
||||
- name: Build frontend (with instrumentation for coverage)
|
||||
working-directory: webapp
|
||||
env:
|
||||
CYPRESS_COVERAGE: "true"
|
||||
run: npm run build
|
||||
|
||||
- name: Install backend dependencies
|
||||
@@ -66,20 +71,6 @@ jobs:
|
||||
run: |
|
||||
go run ./backend/cmd/migrate/main.go -migrations-path file://backend/migrations up
|
||||
|
||||
- name: Generate Ed25519 keys
|
||||
run: |
|
||||
go run ./backend/cmd/community/keygen.go > /tmp/ed25519.key || true
|
||||
if [ ! -f /tmp/ed25519.key ]; then
|
||||
echo "Generating Ed25519 key for testing"
|
||||
# Generate a test key if keygen doesn't exist
|
||||
echo "test_private_key_base64_encoded_here" > /tmp/ed25519.key
|
||||
fi
|
||||
|
||||
- name: Copy locales and templates
|
||||
run: |
|
||||
cp -r backend/locales .
|
||||
cp -r backend/templates .
|
||||
|
||||
- name: Copy frontend dist for embed
|
||||
run: |
|
||||
mkdir -p backend/cmd/community/web/dist
|
||||
@@ -104,6 +95,7 @@ jobs:
|
||||
ACKIFY_MAIL_FROM: "noreply@ackify.test"
|
||||
ACKIFY_MAIL_FROM_NAME: "Ackify Test"
|
||||
ACKIFY_LOG_LEVEL: "debug"
|
||||
ACKIFY_AUTH_MAGICLINK_ENABLED: "true"
|
||||
run: |
|
||||
go build -o ackify ./backend/cmd/community
|
||||
./ackify &
|
||||
@@ -141,6 +133,14 @@ jobs:
|
||||
path: webapp/cypress/videos
|
||||
retention-days: 7
|
||||
|
||||
- name: Upload E2E coverage artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
if: always()
|
||||
with:
|
||||
name: e2e-coverage
|
||||
path: webapp/coverage-e2e/lcov.info
|
||||
retention-days: 1
|
||||
|
||||
- name: Stop Ackify server
|
||||
if: always()
|
||||
run: |
|
||||
58
.github/workflows/test-frontend.yml
vendored
Normal file
58
.github/workflows/test-frontend.yml
vendored
Normal file
@@ -0,0 +1,58 @@
|
||||
name: Frontend Tests
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
outputs:
|
||||
coverage-artifact:
|
||||
description: "Frontend coverage artifact name"
|
||||
value: ${{ jobs.test.outputs.coverage-artifact }}
|
||||
|
||||
jobs:
|
||||
test:
|
||||
name: Frontend Build + Lint
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
coverage-artifact: frontend-coverage
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '22'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: webapp/package-lock.json
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
cd webapp
|
||||
npm ci --no-audit --no-fund --no-progress
|
||||
|
||||
- name: Run TypeScript check
|
||||
run: |
|
||||
cd webapp
|
||||
npx vue-tsc -b
|
||||
|
||||
- name: Run i18n check
|
||||
run: |
|
||||
cd webapp
|
||||
npm run lint:i18n
|
||||
|
||||
- name: Build frontend
|
||||
run: |
|
||||
cd webapp
|
||||
npm run build
|
||||
|
||||
- name: Run unit tests with coverage
|
||||
run: |
|
||||
cd webapp
|
||||
npm run test:coverage
|
||||
|
||||
- name: Upload frontend coverage artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: frontend-coverage
|
||||
path: webapp/coverage/lcov.info
|
||||
retention-days: 1
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -20,4 +20,5 @@ client_secret*.json
|
||||
|
||||
# Tailwind CSS
|
||||
/bin/tailwindcss
|
||||
/static/output.css
|
||||
/static/output.css
|
||||
/.coverage-report/
|
||||
|
||||
@@ -343,6 +343,7 @@ func TestLoad_MissingRequiredEnvironmentVariables(t *testing.T) {
|
||||
"ACKIFY_OAUTH_CLIENT_ID": "test-client-id",
|
||||
"ACKIFY_OAUTH_CLIENT_SECRET": "test-client-secret",
|
||||
"ACKIFY_OAUTH_PROVIDER": "google",
|
||||
"ACKIFY_OAUTH_COOKIE_SECRET": base64.StdEncoding.EncodeToString(make([]byte, 32)),
|
||||
}
|
||||
|
||||
delete(envVars, missingVar)
|
||||
@@ -358,13 +359,10 @@ func TestLoad_MissingRequiredEnvironmentVariables(t *testing.T) {
|
||||
|
||||
_ = os.Unsetenv(missingVar)
|
||||
|
||||
defer func() {
|
||||
if r := recover(); r == nil {
|
||||
t.Errorf("Load() should panic when %s is missing", missingVar)
|
||||
}
|
||||
}()
|
||||
|
||||
_, _ = Load()
|
||||
_, err := Load()
|
||||
if err == nil {
|
||||
t.Errorf("Load() should return error when %s is missing", missingVar)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -384,6 +382,7 @@ func TestLoad_CustomProviderMissingRequiredVars(t *testing.T) {
|
||||
"ACKIFY_DB_DSN": "postgres://user:pass@localhost/test",
|
||||
"ACKIFY_OAUTH_CLIENT_ID": "test-client-id",
|
||||
"ACKIFY_OAUTH_CLIENT_SECRET": "test-client-secret",
|
||||
"ACKIFY_OAUTH_COOKIE_SECRET": base64.StdEncoding.EncodeToString(make([]byte, 32)),
|
||||
"ACKIFY_OAUTH_AUTH_URL": "https://auth.custom.com/oauth/authorize",
|
||||
"ACKIFY_OAUTH_TOKEN_URL": "https://auth.custom.com/oauth/token",
|
||||
"ACKIFY_OAUTH_USERINFO_URL": "https://api.custom.com/user",
|
||||
@@ -402,13 +401,10 @@ func TestLoad_CustomProviderMissingRequiredVars(t *testing.T) {
|
||||
|
||||
_ = os.Unsetenv(missingVar)
|
||||
|
||||
defer func() {
|
||||
if r := recover(); r == nil {
|
||||
t.Errorf("Load() should panic when %s is missing for custom provider", missingVar)
|
||||
}
|
||||
}()
|
||||
|
||||
_, _ = Load()
|
||||
_, err := Load()
|
||||
if err == nil {
|
||||
t.Errorf("Load() should return error when %s is missing for custom provider", missingVar)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
409
run-tests-suite.sh
Executable file
409
run-tests-suite.sh
Executable file
@@ -0,0 +1,409 @@
|
||||
#!/bin/bash
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
# Complete test suite runner with coverage reporting for Ackify CE
|
||||
|
||||
set -e # Exit on error
|
||||
|
||||
# Colors for output
|
||||
RED='\033[0;31m'
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[1;33m'
|
||||
BLUE='\033[0;34m'
|
||||
CYAN='\033[0;36m'
|
||||
NC='\033[0m' # No Color
|
||||
|
||||
# Banner
|
||||
echo -e "${BLUE}╔════════════════════════════════════════════════════════════╗${NC}"
|
||||
echo -e "${BLUE}║ Ackify CE - Complete Test Suite Runner ║${NC}"
|
||||
echo -e "${BLUE}╔════════════════════════════════════════════════════════════╗${NC}"
|
||||
echo ""
|
||||
|
||||
# Check if we're in the right directory
|
||||
if [ ! -f "go.mod" ] || [ ! -d "backend" ] || [ ! -d "webapp" ]; then
|
||||
echo -e "${RED}❌ Error: Please run this script from the project root directory${NC}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Variables
|
||||
PROJECT_ROOT=$(pwd)
|
||||
BACKEND_DIR="$PROJECT_ROOT/backend"
|
||||
WEBAPP_DIR="$PROJECT_ROOT/webapp"
|
||||
COVERAGE_DIR="$PROJECT_ROOT/.coverage-report"
|
||||
TIMESTAMP=$(date +%Y%m%d_%H%M%S)
|
||||
|
||||
# Track failures
|
||||
FAILED=0
|
||||
INTEGRATION_SKIPPED=0
|
||||
E2E_SKIPPED=0
|
||||
|
||||
# Cleanup function
|
||||
cleanup_integration() {
|
||||
if [ "$INTEGRATION_STARTED" = "1" ]; then
|
||||
echo ""
|
||||
echo -e "${YELLOW}🧹 Cleaning up integration test environment...${NC}"
|
||||
docker compose -f compose.test.yml down -v > /dev/null 2>&1 || true
|
||||
echo -e "${GREEN}✓ Integration test environment cleaned up${NC}"
|
||||
fi
|
||||
}
|
||||
|
||||
cleanup_e2e() {
|
||||
if [ "$E2E_STARTED" = "1" ]; then
|
||||
echo ""
|
||||
echo -e "${YELLOW}🧹 Cleaning up E2E test environment...${NC}"
|
||||
docker compose -f compose.e2e.yml down -v > /dev/null 2>&1 || true
|
||||
echo -e "${GREEN}✓ E2E test environment cleaned up${NC}"
|
||||
fi
|
||||
}
|
||||
|
||||
# Trap to ensure cleanup on exit
|
||||
trap cleanup_integration EXIT
|
||||
|
||||
# Create coverage directory
|
||||
mkdir -p "$COVERAGE_DIR"
|
||||
|
||||
# ==============================================================================
|
||||
# Phase 1: Backend Tests
|
||||
# ==============================================================================
|
||||
echo -e "${CYAN}━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━${NC}"
|
||||
echo -e "${CYAN} Phase 1/3: Backend Tests (Go)${NC}"
|
||||
echo -e "${CYAN}━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━${NC}"
|
||||
echo ""
|
||||
|
||||
cd "$BACKEND_DIR"
|
||||
|
||||
echo -e "${YELLOW}📦 Running go fmt check...${NC}"
|
||||
if [ "$(gofmt -s -l . | wc -l)" -gt 0 ]; then
|
||||
echo -e "${RED}❌ Code formatting issues found:${NC}"
|
||||
gofmt -s -l .
|
||||
FAILED=$((FAILED + 1))
|
||||
else
|
||||
echo -e "${GREEN}✓ Code formatting OK${NC}"
|
||||
fi
|
||||
echo ""
|
||||
|
||||
echo -e "${YELLOW}🔍 Running go vet...${NC}"
|
||||
if go vet ./...; then
|
||||
echo -e "${GREEN}✓ go vet passed${NC}"
|
||||
else
|
||||
echo -e "${RED}❌ go vet failed${NC}"
|
||||
FAILED=$((FAILED + 1))
|
||||
fi
|
||||
echo ""
|
||||
|
||||
echo -e "${YELLOW}🧪 Running unit tests...${NC}"
|
||||
if go test -v -short ./...; then
|
||||
echo -e "${GREEN}✓ Unit tests passed${NC}"
|
||||
else
|
||||
echo -e "${RED}❌ Unit tests failed${NC}"
|
||||
FAILED=$((FAILED + 1))
|
||||
fi
|
||||
echo ""
|
||||
|
||||
# Generate unit coverage
|
||||
echo -e "${YELLOW}📊 Generating unit test coverage...${NC}"
|
||||
go test -short -covermode=atomic -coverprofile="$COVERAGE_DIR/backend-unit.out" ./... 2>&1 | grep -v "no test files" || true
|
||||
echo -e "${GREEN}✓ Unit coverage generated${NC}"
|
||||
echo ""
|
||||
|
||||
# Integration tests with Docker Compose
|
||||
echo -e "${YELLOW}🔗 Running integration tests...${NC}"
|
||||
if ! command -v docker &> /dev/null; then
|
||||
echo -e "${YELLOW}⚠️ Docker not available, skipping integration tests${NC}"
|
||||
INTEGRATION_SKIPPED=1
|
||||
else
|
||||
echo -e "${YELLOW}🐳 Starting PostgreSQL + MailHog (compose.test.yml)...${NC}"
|
||||
|
||||
# Clean up previous containers
|
||||
docker compose -f "$PROJECT_ROOT/compose.test.yml" down -v > /dev/null 2>&1 || true
|
||||
|
||||
# Start services
|
||||
if docker compose -f "$PROJECT_ROOT/compose.test.yml" up -d; then
|
||||
INTEGRATION_STARTED=1
|
||||
echo -e "${GREEN}✓ Services started${NC}"
|
||||
|
||||
# Wait for PostgreSQL to be ready
|
||||
echo -e "${YELLOW}⏳ Waiting for PostgreSQL to be ready...${NC}"
|
||||
sleep 5
|
||||
|
||||
MAX_RETRIES=30
|
||||
RETRY_COUNT=0
|
||||
while [ $RETRY_COUNT -lt $MAX_RETRIES ]; do
|
||||
if docker exec ackify-db-test pg_isready -U postgres -d ackify_test > /dev/null 2>&1; then
|
||||
echo -e "${GREEN}✓ PostgreSQL is ready${NC}"
|
||||
break
|
||||
fi
|
||||
RETRY_COUNT=$((RETRY_COUNT + 1))
|
||||
echo " Retry $RETRY_COUNT/$MAX_RETRIES..."
|
||||
sleep 2
|
||||
done
|
||||
|
||||
if [ $RETRY_COUNT -eq $MAX_RETRIES ]; then
|
||||
echo -e "${RED}❌ PostgreSQL failed to start${NC}"
|
||||
FAILED=$((FAILED + 1))
|
||||
INTEGRATION_SKIPPED=1
|
||||
else
|
||||
# Run migrations
|
||||
echo -e "${YELLOW}📝 Running database migrations...${NC}"
|
||||
export ACKIFY_DB_DSN="postgres://postgres:testpassword@localhost:5432/ackify_test?sslmode=disable"
|
||||
cd "$PROJECT_ROOT"
|
||||
if go run ./backend/cmd/migrate/main.go -migrations-path file://backend/migrations up; then
|
||||
echo -e "${GREEN}✓ Migrations applied${NC}"
|
||||
|
||||
# Run integration tests
|
||||
cd "$BACKEND_DIR"
|
||||
export INTEGRATION_TESTS=1
|
||||
if go test -v -race -tags=integration -p 1 -count=1 ./internal/infrastructure/database/... ./internal/presentation/api/admin; then
|
||||
echo -e "${GREEN}✓ Integration tests passed${NC}"
|
||||
|
||||
# Generate integration coverage
|
||||
echo -e "${YELLOW}📊 Generating integration test coverage...${NC}"
|
||||
go test -race -tags=integration -p 1 -count=1 \
|
||||
-covermode=atomic -coverprofile="$COVERAGE_DIR/backend-integration.out" \
|
||||
./internal/infrastructure/database/... ./internal/presentation/api/admin 2>&1 | grep -v "no test files" || true
|
||||
echo -e "${GREEN}✓ Integration coverage generated${NC}"
|
||||
else
|
||||
echo -e "${RED}❌ Integration tests failed${NC}"
|
||||
FAILED=$((FAILED + 1))
|
||||
fi
|
||||
else
|
||||
echo -e "${RED}❌ Migrations failed${NC}"
|
||||
FAILED=$((FAILED + 1))
|
||||
INTEGRATION_SKIPPED=1
|
||||
fi
|
||||
fi
|
||||
else
|
||||
echo -e "${RED}❌ Failed to start Docker services${NC}"
|
||||
FAILED=$((FAILED + 1))
|
||||
INTEGRATION_SKIPPED=1
|
||||
fi
|
||||
fi
|
||||
echo ""
|
||||
|
||||
# Merge backend coverage
|
||||
echo -e "${YELLOW}📊 Merging backend coverage reports...${NC}"
|
||||
echo "mode: atomic" > "$COVERAGE_DIR/backend-coverage.out"
|
||||
tail -n +2 "$COVERAGE_DIR/backend-unit.out" >> "$COVERAGE_DIR/backend-coverage.out" 2>/dev/null || true
|
||||
if [ "$INTEGRATION_SKIPPED" = "0" ] && [ -f "$COVERAGE_DIR/backend-integration.out" ]; then
|
||||
tail -n +2 "$COVERAGE_DIR/backend-integration.out" >> "$COVERAGE_DIR/backend-coverage.out" 2>/dev/null || true
|
||||
fi
|
||||
|
||||
# Extract backend coverage percentage
|
||||
BACKEND_COV=$(go tool cover -func="$COVERAGE_DIR/backend-coverage.out" 2>/dev/null | tail -1 | awk '{print $3}' || echo "N/A")
|
||||
echo -e "${GREEN}✓ Backend coverage: $BACKEND_COV${NC}"
|
||||
echo ""
|
||||
|
||||
# Cleanup integration environment
|
||||
cleanup_integration
|
||||
INTEGRATION_STARTED=0
|
||||
|
||||
# ==============================================================================
|
||||
# Phase 2: Frontend Unit Tests
|
||||
# ==============================================================================
|
||||
echo -e "${CYAN}━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━${NC}"
|
||||
echo -e "${CYAN} Phase 2/3: Frontend Unit Tests (Vitest)${NC}"
|
||||
echo -e "${CYAN}━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━${NC}"
|
||||
echo ""
|
||||
|
||||
cd "$WEBAPP_DIR"
|
||||
|
||||
echo -e "${YELLOW}📦 Installing frontend dependencies...${NC}"
|
||||
if npm ci --no-audit --no-fund --prefer-offline > /dev/null 2>&1; then
|
||||
echo -e "${GREEN}✓ Dependencies installed${NC}"
|
||||
else
|
||||
echo -e "${RED}❌ Failed to install dependencies${NC}"
|
||||
FAILED=$((FAILED + 1))
|
||||
fi
|
||||
echo ""
|
||||
|
||||
echo -e "${YELLOW}🧪 Running frontend unit tests with coverage...${NC}"
|
||||
if npm run test:coverage; then
|
||||
echo -e "${GREEN}✓ Frontend unit tests passed${NC}"
|
||||
|
||||
# Extract frontend coverage percentage from lcov.info
|
||||
if [ -f "coverage/lcov.info" ]; then
|
||||
cp coverage/lcov.info "$COVERAGE_DIR/frontend-unit.lcov"
|
||||
|
||||
# Calculate coverage from lcov.info
|
||||
FRONTEND_LINES_FOUND=$(grep -c "^DA:" coverage/lcov.info 2>/dev/null || echo "0")
|
||||
FRONTEND_LINES_HIT=$(grep "^DA:" coverage/lcov.info 2>/dev/null | grep -c ",0$" || echo "0")
|
||||
FRONTEND_LINES_HIT=$((FRONTEND_LINES_FOUND - FRONTEND_LINES_HIT))
|
||||
if [ "$FRONTEND_LINES_FOUND" -gt 0 ]; then
|
||||
FRONTEND_COV=$(awk "BEGIN {printf \"%.1f%%\", ($FRONTEND_LINES_HIT/$FRONTEND_LINES_FOUND)*100}")
|
||||
else
|
||||
FRONTEND_COV="0.0%"
|
||||
fi
|
||||
echo -e "${GREEN}✓ Frontend unit coverage: $FRONTEND_COV${NC}"
|
||||
else
|
||||
echo -e "${YELLOW}⚠️ Coverage file not found${NC}"
|
||||
FRONTEND_COV="N/A"
|
||||
fi
|
||||
else
|
||||
echo -e "${RED}❌ Frontend unit tests failed${NC}"
|
||||
FAILED=$((FAILED + 1))
|
||||
FRONTEND_COV="N/A"
|
||||
fi
|
||||
echo ""
|
||||
|
||||
# ==============================================================================
|
||||
# Phase 3: E2E Tests (Cypress)
|
||||
# ==============================================================================
|
||||
echo -e "${CYAN}━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━${NC}"
|
||||
echo -e "${CYAN} Phase 3/3: E2E Tests (Cypress)${NC}"
|
||||
echo -e "${CYAN}━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━${NC}"
|
||||
echo ""
|
||||
|
||||
cd "$PROJECT_ROOT"
|
||||
|
||||
# Update trap to cleanup E2E instead
|
||||
trap cleanup_e2e EXIT
|
||||
|
||||
if ! command -v docker &> /dev/null; then
|
||||
echo -e "${YELLOW}⚠️ Docker not available, skipping E2E tests${NC}"
|
||||
E2E_SKIPPED=1
|
||||
E2E_COV="N/A"
|
||||
else
|
||||
echo -e "${YELLOW}🧹 Cleaning up previous E2E environment...${NC}"
|
||||
docker compose -f compose.e2e.yml down -v > /dev/null 2>&1 || true
|
||||
echo -e "${GREEN}✓ Cleanup complete${NC}"
|
||||
echo ""
|
||||
|
||||
echo -e "${YELLOW}🏗️ Building frontend with coverage instrumentation...${NC}"
|
||||
cd "$WEBAPP_DIR"
|
||||
if CYPRESS_COVERAGE=true npm run build > /dev/null 2>&1; then
|
||||
echo -e "${GREEN}✓ Frontend built with instrumentation${NC}"
|
||||
else
|
||||
echo -e "${RED}❌ Failed to build frontend${NC}"
|
||||
FAILED=$((FAILED + 1))
|
||||
E2E_SKIPPED=1
|
||||
fi
|
||||
echo ""
|
||||
|
||||
if [ "$E2E_SKIPPED" = "0" ]; then
|
||||
cd "$PROJECT_ROOT"
|
||||
echo -e "${YELLOW}🐳 Starting E2E stack (compose.e2e.yml --build)...${NC}"
|
||||
if docker compose -f compose.e2e.yml up -d --force-recreate --build; then
|
||||
E2E_STARTED=1
|
||||
echo -e "${GREEN}✓ E2E stack started${NC}"
|
||||
echo ""
|
||||
|
||||
# Wait for services
|
||||
echo -e "${YELLOW}⏳ Waiting for services to be ready...${NC}"
|
||||
echo " - Waiting for database..."
|
||||
sleep 5
|
||||
|
||||
echo " - Waiting for migrations..."
|
||||
docker compose -f compose.e2e.yml logs ackify-migrate 2>&1 | tail -5
|
||||
|
||||
echo " - Waiting for backend..."
|
||||
sleep 10
|
||||
|
||||
# Health check
|
||||
MAX_RETRIES=30
|
||||
RETRY_COUNT=0
|
||||
while [ $RETRY_COUNT -lt $MAX_RETRIES ]; do
|
||||
if curl -s http://localhost:8080/api/v1/health > /dev/null 2>&1; then
|
||||
echo -e "${GREEN}✓ Backend is ready!${NC}"
|
||||
break
|
||||
fi
|
||||
RETRY_COUNT=$((RETRY_COUNT + 1))
|
||||
echo " Retry $RETRY_COUNT/$MAX_RETRIES..."
|
||||
sleep 2
|
||||
done
|
||||
|
||||
if [ $RETRY_COUNT -eq $MAX_RETRIES ]; then
|
||||
echo -e "${RED}❌ Backend failed to start. Check logs:${NC}"
|
||||
docker compose -f compose.e2e.yml logs ackify-ce | tail -50
|
||||
FAILED=$((FAILED + 1))
|
||||
E2E_SKIPPED=1
|
||||
else
|
||||
echo -e "${GREEN}✓ All services are ready${NC}"
|
||||
echo ""
|
||||
|
||||
# Run Cypress tests
|
||||
cd "$WEBAPP_DIR"
|
||||
echo -e "${YELLOW}🧪 Running Cypress E2E tests...${NC}"
|
||||
if npm run test:e2e; then
|
||||
echo -e "${GREEN}✓ E2E tests passed${NC}"
|
||||
|
||||
# Extract E2E coverage
|
||||
if [ -f "coverage-e2e/lcov.info" ]; then
|
||||
cp coverage-e2e/lcov.info "$COVERAGE_DIR/e2e.lcov"
|
||||
|
||||
# Calculate coverage from lcov.info
|
||||
E2E_LINES_FOUND=$(grep -c "^DA:" coverage-e2e/lcov.info 2>/dev/null || echo "0")
|
||||
E2E_LINES_HIT=$(grep "^DA:" coverage-e2e/lcov.info 2>/dev/null | grep -c ",0$" || echo "0")
|
||||
E2E_LINES_HIT=$((E2E_LINES_FOUND - E2E_LINES_HIT))
|
||||
if [ "$E2E_LINES_FOUND" -gt 0 ]; then
|
||||
E2E_COV=$(awk "BEGIN {printf \"%.1f%%\", ($E2E_LINES_HIT/$E2E_LINES_FOUND)*100}")
|
||||
else
|
||||
E2E_COV="0.0%"
|
||||
fi
|
||||
echo -e "${GREEN}✓ E2E coverage: $E2E_COV${NC}"
|
||||
else
|
||||
echo -e "${YELLOW}⚠️ E2E coverage file not found${NC}"
|
||||
E2E_COV="N/A"
|
||||
fi
|
||||
else
|
||||
echo -e "${RED}❌ E2E tests failed${NC}"
|
||||
FAILED=$((FAILED + 1))
|
||||
E2E_COV="N/A"
|
||||
fi
|
||||
fi
|
||||
else
|
||||
echo -e "${RED}❌ Failed to start E2E stack${NC}"
|
||||
FAILED=$((FAILED + 1))
|
||||
E2E_SKIPPED=1
|
||||
E2E_COV="N/A"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
echo ""
|
||||
|
||||
# Cleanup E2E environment
|
||||
cleanup_e2e
|
||||
E2E_STARTED=0
|
||||
|
||||
# ==============================================================================
|
||||
# Summary
|
||||
# ==============================================================================
|
||||
cd "$PROJECT_ROOT"
|
||||
|
||||
echo ""
|
||||
echo -e "${BLUE}╔════════════════════════════════════════════════════════════╗${NC}"
|
||||
echo -e "${BLUE}║ Coverage Summary ║${NC}"
|
||||
echo -e "${BLUE}╚════════════════════════════════════════════════════════════╝${NC}"
|
||||
echo ""
|
||||
echo -e " ${CYAN}Backend (Go):${NC} $BACKEND_COV"
|
||||
echo -e " ${CYAN}Frontend (Vitest):${NC} $FRONTEND_COV"
|
||||
echo -e " ${CYAN}E2E (Cypress):${NC} $E2E_COV"
|
||||
echo ""
|
||||
|
||||
if [ "$INTEGRATION_SKIPPED" = "1" ]; then
|
||||
echo -e "${YELLOW}⚠️ Integration tests were skipped${NC}"
|
||||
fi
|
||||
if [ "$E2E_SKIPPED" = "1" ]; then
|
||||
echo -e "${YELLOW}⚠️ E2E tests were skipped${NC}"
|
||||
fi
|
||||
echo ""
|
||||
|
||||
echo -e "${BLUE}Coverage reports saved to:${NC} $COVERAGE_DIR"
|
||||
echo ""
|
||||
|
||||
# Display coverage files
|
||||
if [ -d "$COVERAGE_DIR" ]; then
|
||||
echo -e "${BLUE}Generated files:${NC}"
|
||||
ls -lh "$COVERAGE_DIR" 2>/dev/null | tail -n +2 | awk '{print " - " $9 " (" $5 ")"}'
|
||||
echo ""
|
||||
fi
|
||||
|
||||
# Final result
|
||||
echo -e "${BLUE}╔════════════════════════════════════════════════════════════╗${NC}"
|
||||
if [ $FAILED -eq 0 ]; then
|
||||
echo -e "${GREEN}║ ✓ All test suites passed successfully! ║${NC}"
|
||||
echo -e "${BLUE}╚════════════════════════════════════════════════════════════╝${NC}"
|
||||
exit 0
|
||||
else
|
||||
echo -e "${RED}║ ✗ $FAILED test suite(s) failed ║${NC}"
|
||||
echo -e "${BLUE}╚════════════════════════════════════════════════════════════╝${NC}"
|
||||
exit 1
|
||||
fi
|
||||
8
webapp/.gitignore
vendored
8
webapp/.gitignore
vendored
@@ -22,3 +22,11 @@ dist-ssr
|
||||
*.njsproj
|
||||
*.sln
|
||||
*.sw?
|
||||
|
||||
# Coverage
|
||||
coverage/
|
||||
*.lcov
|
||||
|
||||
# E2E Coverage
|
||||
coverage-e2e/
|
||||
.nyc_output/
|
||||
|
||||
26
webapp/.nycrc.json
Normal file
26
webapp/.nycrc.json
Normal file
@@ -0,0 +1,26 @@
|
||||
{
|
||||
"report-dir": "coverage-e2e",
|
||||
"temp-dir": ".nyc_output",
|
||||
"reporter": ["text", "lcov", "html", "json"],
|
||||
"exclude": [
|
||||
"cypress/**/*",
|
||||
"tests/**/*",
|
||||
"node_modules/**/*",
|
||||
"dist/**/*",
|
||||
"**/*.spec.ts",
|
||||
"**/*.test.ts",
|
||||
"**/*.cy.ts",
|
||||
"vite.config.ts",
|
||||
"cypress.config.ts"
|
||||
],
|
||||
"extension": [
|
||||
".js",
|
||||
".ts",
|
||||
".vue"
|
||||
],
|
||||
"all": true,
|
||||
"check-coverage": false,
|
||||
"include": [
|
||||
"src/**/*"
|
||||
]
|
||||
}
|
||||
@@ -1,5 +1,7 @@
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
import { defineConfig } from 'cypress'
|
||||
// @ts-ignore - no types available for @cypress/code-coverage
|
||||
import codeCoverageTask from '@cypress/code-coverage/task'
|
||||
|
||||
export default defineConfig({
|
||||
e2e: {
|
||||
@@ -13,9 +15,14 @@ export default defineConfig({
|
||||
requestTimeout: 10000,
|
||||
env: {
|
||||
mailhogUrl: 'http://localhost:8025',
|
||||
// Enable coverage collection
|
||||
codeCoverage: {
|
||||
exclude: ['cypress/**/*.*', 'tests/**/*.*']
|
||||
}
|
||||
},
|
||||
setupNodeEvents(on, config) {
|
||||
// implement node event listeners here
|
||||
// Register code coverage plugin
|
||||
codeCoverageTask(on, config)
|
||||
return config
|
||||
},
|
||||
},
|
||||
|
||||
@@ -18,5 +18,8 @@
|
||||
import './commands'
|
||||
import './mailhog'
|
||||
|
||||
// Import Cypress code coverage support
|
||||
import '@cypress/code-coverage/support'
|
||||
|
||||
// Alternatively you can use CommonJS syntax:
|
||||
// require('./commands')
|
||||
|
||||
6113
webapp/package-lock.json
generated
6113
webapp/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -8,6 +8,10 @@
|
||||
"build": "vue-tsc -b && vite build",
|
||||
"preview": "vite preview",
|
||||
"lint:i18n": "node scripts/check-i18n.js",
|
||||
"test": "vitest",
|
||||
"test:ui": "vitest --ui",
|
||||
"test:run": "vitest run",
|
||||
"test:coverage": "vitest run --coverage",
|
||||
"cypress:open": "cypress open",
|
||||
"cypress:run": "cypress run",
|
||||
"cypress:headless": "cypress run --headless",
|
||||
@@ -24,21 +28,31 @@
|
||||
"vue-router": "^4.6.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@cypress/code-coverage": "^3.14.7",
|
||||
"@tailwindcss/forms": "^0.5.10",
|
||||
"@tailwindcss/postcss": "^4.1.14",
|
||||
"@types/node": "^24.6.0",
|
||||
"@vitejs/plugin-vue": "^6.0.1",
|
||||
"@vitest/coverage-v8": "^4.0.13",
|
||||
"@vitest/ui": "^4.0.13",
|
||||
"@vue/test-utils": "^2.4.6",
|
||||
"@vue/tsconfig": "^0.8.1",
|
||||
"autoprefixer": "^10.4.21",
|
||||
"class-variance-authority": "^0.7.1",
|
||||
"clsx": "^2.1.1",
|
||||
"cypress": "^15.5.0",
|
||||
"happy-dom": "^20.0.10",
|
||||
"istanbul-lib-coverage": "^3.2.2",
|
||||
"jsdom": "^27.2.0",
|
||||
"nyc": "^17.1.0",
|
||||
"postcss": "^8.5.6",
|
||||
"tailwind-merge": "^3.3.1",
|
||||
"tailwindcss": "^4.1.14",
|
||||
"tailwindcss-animate": "^1.0.7",
|
||||
"typescript": "~5.9.3",
|
||||
"vite": "^7.1.7",
|
||||
"vite-plugin-istanbul": "^7.2.1",
|
||||
"vitest": "^4.0.13",
|
||||
"vue-tsc": "^3.1.0"
|
||||
}
|
||||
}
|
||||
|
||||
92
webapp/tests/services/http.test.ts
Normal file
92
webapp/tests/services/http.test.ts
Normal file
@@ -0,0 +1,92 @@
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
import { describe, it, expect } from 'vitest'
|
||||
import { extractError } from '@/services/http'
|
||||
import { AxiosError } from 'axios'
|
||||
|
||||
describe('http service', () => {
|
||||
describe('extractError', () => {
|
||||
it('should extract error message from API error response', () => {
|
||||
const axiosError = new AxiosError('Request failed')
|
||||
axiosError.response = {
|
||||
data: {
|
||||
error: {
|
||||
code: 'VALIDATION_ERROR',
|
||||
message: 'Invalid input data'
|
||||
}
|
||||
},
|
||||
status: 400,
|
||||
statusText: 'Bad Request',
|
||||
headers: {},
|
||||
config: {} as any
|
||||
}
|
||||
|
||||
const result = extractError(axiosError)
|
||||
expect(result).toBe('Invalid input data')
|
||||
})
|
||||
|
||||
it('should fallback to axios error message when no API error message', () => {
|
||||
const axiosError = new AxiosError('Network Error')
|
||||
|
||||
const result = extractError(axiosError)
|
||||
expect(result).toBe('Network Error')
|
||||
})
|
||||
|
||||
it('should return generic message for non-axios errors', () => {
|
||||
const genericError = new Error('Something went wrong')
|
||||
|
||||
const result = extractError(genericError)
|
||||
expect(result).toBe('An unexpected error occurred')
|
||||
})
|
||||
|
||||
it('should return generic message for null/undefined errors', () => {
|
||||
expect(extractError(null)).toBe('An unexpected error occurred')
|
||||
expect(extractError(undefined)).toBe('An unexpected error occurred')
|
||||
})
|
||||
|
||||
it('should handle axios error without response', () => {
|
||||
const axiosError = new AxiosError('Request timeout')
|
||||
axiosError.code = 'ECONNABORTED'
|
||||
|
||||
const result = extractError(axiosError)
|
||||
expect(result).toBe('Request timeout')
|
||||
})
|
||||
|
||||
it('should handle API error with nested details', () => {
|
||||
const axiosError = new AxiosError('Request failed')
|
||||
axiosError.response = {
|
||||
data: {
|
||||
error: {
|
||||
code: 'SIGNATURE_EXISTS',
|
||||
message: 'You have already signed this document',
|
||||
details: {
|
||||
signedAt: '2024-01-01T10:00:00Z'
|
||||
}
|
||||
}
|
||||
},
|
||||
status: 409,
|
||||
statusText: 'Conflict',
|
||||
headers: {},
|
||||
config: {} as any
|
||||
}
|
||||
|
||||
const result = extractError(axiosError)
|
||||
expect(result).toBe('You have already signed this document')
|
||||
})
|
||||
|
||||
it('should handle malformed API error response', () => {
|
||||
const axiosError = new AxiosError('Request failed')
|
||||
axiosError.response = {
|
||||
data: {
|
||||
// Missing error object
|
||||
},
|
||||
status: 500,
|
||||
statusText: 'Internal Server Error',
|
||||
headers: {},
|
||||
config: {} as any
|
||||
}
|
||||
|
||||
const result = extractError(axiosError)
|
||||
expect(result).toBe('Request failed')
|
||||
})
|
||||
})
|
||||
})
|
||||
126
webapp/tests/services/referenceDetector.test.ts
Normal file
126
webapp/tests/services/referenceDetector.test.ts
Normal file
@@ -0,0 +1,126 @@
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
import { describe, it, expect } from 'vitest'
|
||||
import { detectReference, type ReferenceInfo } from '@/services/referenceDetector'
|
||||
|
||||
describe('referenceDetector', () => {
|
||||
describe('detectReference', () => {
|
||||
it('should detect HTTP URL', () => {
|
||||
const result = detectReference('http://example.com/document.pdf')
|
||||
|
||||
expect(result.type).toBe('url')
|
||||
expect(result.value).toBe('http://example.com/document.pdf')
|
||||
expect(result.isDownloadable).toBe(true)
|
||||
expect(result.fileExtension).toBe('pdf')
|
||||
})
|
||||
|
||||
it('should detect HTTPS URL', () => {
|
||||
const result = detectReference('https://example.com/file.docx')
|
||||
|
||||
expect(result.type).toBe('url')
|
||||
expect(result.isDownloadable).toBe(true)
|
||||
expect(result.fileExtension).toBe('docx')
|
||||
})
|
||||
|
||||
it('should detect downloadable PDF', () => {
|
||||
const result = detectReference('https://example.com/report.pdf')
|
||||
|
||||
expect(result.isDownloadable).toBe(true)
|
||||
expect(result.fileExtension).toBe('pdf')
|
||||
})
|
||||
|
||||
it('should detect downloadable HTML', () => {
|
||||
const result = detectReference('https://example.com/page.html')
|
||||
|
||||
expect(result.isDownloadable).toBe(true)
|
||||
expect(result.fileExtension).toBe('html')
|
||||
})
|
||||
|
||||
it('should detect downloadable Markdown', () => {
|
||||
const result = detectReference('https://example.com/README.md')
|
||||
|
||||
expect(result.isDownloadable).toBe(true)
|
||||
expect(result.fileExtension).toBe('md')
|
||||
})
|
||||
|
||||
it('should detect downloadable text file', () => {
|
||||
const result = detectReference('https://example.com/notes.txt')
|
||||
|
||||
expect(result.isDownloadable).toBe(true)
|
||||
expect(result.fileExtension).toBe('txt')
|
||||
})
|
||||
|
||||
it('should detect non-downloadable URL without extension', () => {
|
||||
const result = detectReference('https://example.com/api/endpoint')
|
||||
|
||||
expect(result.type).toBe('url')
|
||||
expect(result.isDownloadable).toBe(false)
|
||||
expect(result.fileExtension).toBeUndefined()
|
||||
})
|
||||
|
||||
it('should detect non-downloadable URL with non-document extension', () => {
|
||||
const result = detectReference('https://example.com/image.jpg')
|
||||
|
||||
expect(result.type).toBe('url')
|
||||
expect(result.isDownloadable).toBe(false)
|
||||
expect(result.fileExtension).toBe('jpg')
|
||||
})
|
||||
|
||||
it('should detect Unix file path', () => {
|
||||
const result = detectReference('/home/user/documents/file.pdf')
|
||||
|
||||
expect(result.type).toBe('path')
|
||||
expect(result.value).toBe('/home/user/documents/file.pdf')
|
||||
expect(result.isDownloadable).toBe(false)
|
||||
})
|
||||
|
||||
it('should detect Windows file path', () => {
|
||||
const result = detectReference('C:\\Users\\John\\file.docx')
|
||||
|
||||
expect(result.type).toBe('path')
|
||||
expect(result.isDownloadable).toBe(false)
|
||||
})
|
||||
|
||||
it('should detect relative path', () => {
|
||||
const result = detectReference('./documents/file.pdf')
|
||||
|
||||
expect(result.type).toBe('path')
|
||||
expect(result.isDownloadable).toBe(false)
|
||||
})
|
||||
|
||||
it('should detect simple reference without path or URL', () => {
|
||||
const result = detectReference('CONTRACT-2024-001')
|
||||
|
||||
expect(result.type).toBe('reference')
|
||||
expect(result.value).toBe('CONTRACT-2024-001')
|
||||
expect(result.isDownloadable).toBe(false)
|
||||
})
|
||||
|
||||
it('should detect alphanumeric reference', () => {
|
||||
const result = detectReference('DOC123ABC')
|
||||
|
||||
expect(result.type).toBe('reference')
|
||||
expect(result.isDownloadable).toBe(false)
|
||||
})
|
||||
|
||||
it('should handle case-insensitive file extensions', () => {
|
||||
const result = detectReference('https://example.com/file.PDF')
|
||||
|
||||
expect(result.fileExtension).toBe('pdf')
|
||||
expect(result.isDownloadable).toBe(true)
|
||||
})
|
||||
|
||||
it('should detect ODT files as downloadable', () => {
|
||||
const result = detectReference('https://example.com/document.odt')
|
||||
|
||||
expect(result.isDownloadable).toBe(true)
|
||||
expect(result.fileExtension).toBe('odt')
|
||||
})
|
||||
|
||||
it('should detect RTF files as downloadable', () => {
|
||||
const result = detectReference('https://example.com/document.rtf')
|
||||
|
||||
expect(result.isDownloadable).toBe(true)
|
||||
expect(result.fileExtension).toBe('rtf')
|
||||
})
|
||||
})
|
||||
})
|
||||
66
webapp/tests/services/titleExtractor.test.ts
Normal file
66
webapp/tests/services/titleExtractor.test.ts
Normal file
@@ -0,0 +1,66 @@
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
import { describe, it, expect } from 'vitest'
|
||||
import { extractTitleFromPath } from '@/services/titleExtractor'
|
||||
|
||||
describe('titleExtractor', () => {
|
||||
describe('extractTitleFromPath', () => {
|
||||
it('should extract title from URL path', () => {
|
||||
const result = extractTitleFromPath('https://example.com/my-document.pdf')
|
||||
expect(result).toBe('My Document')
|
||||
})
|
||||
|
||||
it('should extract title from nested URL path', () => {
|
||||
const result = extractTitleFromPath('https://example.com/docs/user-guide.html')
|
||||
expect(result).toBe('User Guide')
|
||||
})
|
||||
|
||||
it('should use hostname when path is empty', () => {
|
||||
const result = extractTitleFromPath('https://example.com/')
|
||||
// Le code capitalise uniquement la première lettre de chaque mot
|
||||
expect(result).toBe('Example')
|
||||
})
|
||||
|
||||
it('should handle underscore separators', () => {
|
||||
const result = extractTitleFromPath('https://example.com/product_spec_v2.pdf')
|
||||
expect(result).toBe('Product Spec V2')
|
||||
})
|
||||
|
||||
it('should handle mixed separators', () => {
|
||||
const result = extractTitleFromPath('https://example.com/user-guide_final.docx')
|
||||
expect(result).toBe('User Guide Final')
|
||||
})
|
||||
|
||||
it('should remove file extension', () => {
|
||||
const result = extractTitleFromPath('https://example.com/report.2024.pdf')
|
||||
expect(result).toBe('Report.2024')
|
||||
})
|
||||
|
||||
it('should capitalize first letter of each word', () => {
|
||||
const result = extractTitleFromPath('https://example.com/annual-financial-report.pdf')
|
||||
expect(result).toBe('Annual Financial Report')
|
||||
})
|
||||
|
||||
it('should handle local file paths', () => {
|
||||
const result = extractTitleFromPath('/home/user/documents/my-file.txt')
|
||||
expect(result).toBe('My File')
|
||||
})
|
||||
|
||||
it('should handle Windows file paths', () => {
|
||||
// Note: En environnement JS, les backslashes peuvent être interprétés différemment
|
||||
// Le code utilise split(/[/\\]/) qui devrait gérer les deux types de séparateurs
|
||||
const result = extractTitleFromPath('C:/Users/John/Documents/contract.pdf')
|
||||
expect(result).toBe('Contract')
|
||||
})
|
||||
|
||||
it('should handle simple filenames without path', () => {
|
||||
const result = extractTitleFromPath('invoice-2024.pdf')
|
||||
expect(result).toBe('Invoice 2024')
|
||||
})
|
||||
|
||||
it('should handle empty segments gracefully', () => {
|
||||
const result = extractTitleFromPath('https://example.com///')
|
||||
// Les segments vides sont filtrés, donc on utilise le hostname
|
||||
expect(result).toBe('Example')
|
||||
})
|
||||
})
|
||||
})
|
||||
48
webapp/tests/setup.ts
Normal file
48
webapp/tests/setup.ts
Normal file
@@ -0,0 +1,48 @@
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
import { config } from '@vue/test-utils'
|
||||
import { vi } from 'vitest'
|
||||
|
||||
// Mock window globals injectés par le backend
|
||||
Object.defineProperty(window, 'ACKIFY_BASE_URL', {
|
||||
value: 'http://localhost:8080',
|
||||
writable: true,
|
||||
configurable: true
|
||||
})
|
||||
|
||||
Object.defineProperty(window, 'ACKIFY_VERSION', {
|
||||
value: 'v0.0.0-test',
|
||||
writable: true,
|
||||
configurable: true
|
||||
})
|
||||
|
||||
Object.defineProperty(window, 'ACKIFY_OAUTH_ENABLED', {
|
||||
value: true,
|
||||
writable: true,
|
||||
configurable: true
|
||||
})
|
||||
|
||||
Object.defineProperty(window, 'ACKIFY_MAGICLINK_ENABLED', {
|
||||
value: true,
|
||||
writable: true,
|
||||
configurable: true
|
||||
})
|
||||
|
||||
Object.defineProperty(window, 'ACKIFY_SMTP_ENABLED', {
|
||||
value: true,
|
||||
writable: true,
|
||||
configurable: true
|
||||
})
|
||||
|
||||
// Mock navigator.clipboard pour les tests
|
||||
Object.defineProperty(navigator, 'clipboard', {
|
||||
value: {
|
||||
writeText: vi.fn(() => Promise.resolve())
|
||||
},
|
||||
writable: true,
|
||||
configurable: true
|
||||
})
|
||||
|
||||
// Configuration globale de @vue/test-utils
|
||||
config.global.mocks = {
|
||||
$t: (key: string) => key // Mock simple pour vue-i18n
|
||||
}
|
||||
@@ -1,9 +1,20 @@
|
||||
import { defineConfig } from 'vite'
|
||||
import { defineConfig } from 'vitest/config'
|
||||
import vue from '@vitejs/plugin-vue'
|
||||
import { fileURLToPath, URL } from 'node:url'
|
||||
import istanbul from 'vite-plugin-istanbul'
|
||||
|
||||
export default defineConfig({
|
||||
plugins: [vue()],
|
||||
plugins: [
|
||||
vue(),
|
||||
// Instrument code for E2E coverage (only in test mode)
|
||||
istanbul({
|
||||
include: 'src/*',
|
||||
exclude: ['node_modules', 'tests/', 'cypress/', 'dist/'],
|
||||
extension: ['.js', '.ts', '.vue'],
|
||||
requireEnv: false,
|
||||
forceBuildInstrument: process.env.CYPRESS_COVERAGE === 'true'
|
||||
})
|
||||
],
|
||||
resolve: {
|
||||
alias: {
|
||||
'@': fileURLToPath(new URL('./src', import.meta.url))
|
||||
@@ -22,5 +33,35 @@ export default defineConfig({
|
||||
secure: false
|
||||
}
|
||||
}
|
||||
},
|
||||
// Vitest configuration for unit tests
|
||||
test: {
|
||||
globals: true,
|
||||
environment: 'happy-dom',
|
||||
setupFiles: './tests/setup.ts',
|
||||
coverage: {
|
||||
provider: 'v8',
|
||||
reporter: ['text', 'json', 'html', 'lcov'],
|
||||
reportsDirectory: './coverage',
|
||||
exclude: [
|
||||
'node_modules/',
|
||||
'tests/',
|
||||
'cypress/',
|
||||
'**/*.spec.ts',
|
||||
'**/*.test.ts',
|
||||
'dist/',
|
||||
'.eslintrc.cjs',
|
||||
'vite.config.ts',
|
||||
'cypress.config.ts',
|
||||
'src/main.ts'
|
||||
],
|
||||
include: ['src/**/*.{js,ts,vue}'],
|
||||
thresholds: {
|
||||
lines: 60,
|
||||
functions: 60,
|
||||
branches: 60,
|
||||
statements: 60
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
Reference in New Issue
Block a user