diff --git a/.env.example b/.env.example index e6ac77d..c286d68 100644 --- a/.env.example +++ b/.env.example @@ -1,17 +1,34 @@ +# ============================================================================= +# AJOB4AGENT Environment Configuration +# ============================================================================= +# Copy this file to .env and fill in your values +# Never commit .env files to version control! +# ============================================================================= + +# ----------------------------------------------------------------------------- # Database Configuration +# ----------------------------------------------------------------------------- DATABASE_URL=postgresql://jobagent:password@postgres:5432/jobagent POSTGRES_USER=jobagent POSTGRES_PASSWORD=password POSTGRES_DB=jobagent +# ----------------------------------------------------------------------------- # LLM Service Configuration +# ----------------------------------------------------------------------------- +# OpenAI API key - required for AI-powered resume tailoring OPENAI_API_KEY=your_openai_api_key_here + +# Model settings LLM_MODEL=gpt-4 LLM_MAX_TOKENS=4000 LLM_TEMPERATURE=0.7 +# ----------------------------------------------------------------------------- # Platform Agent Credentials -# LinkedIn (required for LinkedIn agent) +# ----------------------------------------------------------------------------- + +# LinkedIn (required for LinkedIn automation) LINKEDIN_EMAIL=your_linkedin_email@example.com LINKEDIN_PASSWORD=your_linkedin_password @@ -22,57 +39,218 @@ GLASSDOOR_KEY=your_glassdoor_key # Wellfound/AngelList (optional) WELLFOUND_ACCESS_TOKEN=your_wellfound_token +# Indeed (optional) +INDEED_API_KEY=your_indeed_api_key + +# Google Talent (optional) +GOOGLE_TALENT_API_KEY=your_google_talent_api_key + +# ----------------------------------------------------------------------------- +# OAuth Configuration (for social login) +# ----------------------------------------------------------------------------- +# Google OAuth +GOOGLE_CLIENT_ID=your_google_client_id +GOOGLE_CLIENT_SECRET=your_google_client_secret + +# GitHub OAuth +GITHUB_CLIENT_ID=your_github_client_id +GITHUB_CLIENT_SECRET=your_github_client_secret + +# LinkedIn OAuth +LINKEDIN_CLIENT_ID=your_linkedin_client_id +LINKEDIN_CLIENT_SECRET=your_linkedin_client_secret + +# ----------------------------------------------------------------------------- # Security Configuration +# ----------------------------------------------------------------------------- +# JWT secret - minimum 32 characters, use a strong random string JWT_SECRET=your_very_secure_jwt_secret_here_minimum_32_chars + +# Internal service API key API_KEY=your_internal_api_key_here + +# Session secret for cookie encryption SESSION_SECRET=your_session_secret_here -# Redis Configuration (for caching and sessions) +# Webhook signature secret +WEBHOOK_SECRET=your_webhook_secret_here + +# Encryption key for sensitive data at rest +ENCRYPTION_KEY=your_32_character_encryption_key + +# ----------------------------------------------------------------------------- +# Redis Configuration (caching and sessions) +# ----------------------------------------------------------------------------- REDIS_URL=redis://redis:6379 REDIS_PASSWORD= -# Message Queue Configuration +# Redis cluster mode (optional) +REDIS_CLUSTER_ENABLED=false +REDIS_CLUSTER_NODES= + +# ----------------------------------------------------------------------------- +# Message Queue Configuration (RabbitMQ) +# ----------------------------------------------------------------------------- RABBITMQ_URL=amqp://guest:guest@rabbitmq:5672 +RABBITMQ_USER=guest +RABBITMQ_PASSWORD=guest RABBITMQ_MANAGEMENT_URL=http://rabbitmq:15672 -# Service Ports (for development) +# ----------------------------------------------------------------------------- +# Service Configuration +# ----------------------------------------------------------------------------- +# Service ports (for development/debugging) DASHBOARD_PORT=3001 ORCHESTRATOR_PORT=8080 LLM_SERVICE_PORT=8000 MONITORING_PORT=8001 +AUTH_SERVICE_PORT=8003 + +# Service URLs (for inter-service communication) +LLM_SERVICE_URL=http://llm-service:8000 +ORCHESTRATOR_URL=http://agent-orchestrator:8080 +MONITORING_URL=http://agent-monitoring-service:8001 +NEXT_PUBLIC_API_URL=http://localhost:8080 -# Email Configuration (for notifications) +# ----------------------------------------------------------------------------- +# Email Configuration (notifications and alerts) +# ----------------------------------------------------------------------------- SMTP_HOST=smtp.gmail.com SMTP_PORT=587 +SMTP_SECURE=false SMTP_USER=your_email@gmail.com SMTP_PASSWORD=your_app_password SMTP_FROM=noreply@ajob4agent.com +SMTP_FROM_NAME=AJOB4AGENT +# SendGrid (alternative email provider) +SENDGRID_API_KEY=your_sendgrid_api_key + +# ----------------------------------------------------------------------------- # Monitoring and Logging +# ----------------------------------------------------------------------------- +# Log level: debug, info, warn, error LOG_LEVEL=info + +# Sentry error tracking SENTRY_DSN=https://your_sentry_dsn_here@sentry.io/project_id +# Enable structured JSON logging (production) +LOG_FORMAT=json + +# DataDog (optional APM) +DD_API_KEY=your_datadog_api_key +DD_ENV=development + +# ----------------------------------------------------------------------------- # Rate Limiting Configuration +# ----------------------------------------------------------------------------- +# Window duration in milliseconds (15 minutes = 900000) RATE_LIMIT_WINDOW_MS=900000 + +# Maximum requests per window RATE_LIMIT_MAX_REQUESTS=100 +# Login rate limit +LOGIN_RATE_LIMIT_MAX=5 +LOGIN_RATE_LIMIT_WINDOW_MS=60000 + +# ----------------------------------------------------------------------------- # Application Settings +# ----------------------------------------------------------------------------- +# Environment: development, staging, production APP_ENV=development +NODE_ENV=development + +# Application metadata APP_NAME=AJOB4AGENT APP_VERSION=1.0.0 + +# Application URL (used for callbacks, emails, etc.) APP_URL=http://localhost:3001 +# CORS allowed origins (comma-separated) +ALLOWED_ORIGINS=http://localhost:3001,http://localhost:3000 + +# Allowed hosts for security middleware +ALLOWED_HOSTS=localhost,127.0.0.1,llm-service,agent-orchestrator + +# ----------------------------------------------------------------------------- # Browser Automation Settings +# ----------------------------------------------------------------------------- +# Run browsers in headless mode HEADLESS_BROWSER=true + +# Browser timeout in milliseconds BROWSER_TIMEOUT=30000 + +# Maximum concurrent browser instances MAX_CONCURRENT_AGENTS=3 +# Playwright browser (chromium, firefox, webkit) +BROWSER_TYPE=chromium + +# User agent for automation (Chrome on Windows) +USER_AGENT=Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36 + +# ----------------------------------------------------------------------------- # Job Search Configuration +# ----------------------------------------------------------------------------- +# Maximum jobs to return per search MAX_JOBS_PER_SEARCH=50 + +# Search interval (how often to run automated searches) SEARCH_INTERVAL_HOURS=24 + +# Delay between application submissions (rate limiting) APPLICATION_DELAY_MS=5000 -# Webhook Configuration (for external integrations) -WEBHOOK_SECRET=your_webhook_secret_here +# Minimum match score to apply (0-1) +MIN_MATCH_SCORE=0.7 + +# ----------------------------------------------------------------------------- +# Feature Flags +# ----------------------------------------------------------------------------- +# Enable/disable features +FEATURE_AUTO_APPLY=true +FEATURE_OUTREACH=true +FEATURE_ANALYTICS=true +FEATURE_EMAIL_REPORTS=false + +# ----------------------------------------------------------------------------- +# Payment Integration (Stripe) +# ----------------------------------------------------------------------------- STRIPE_SECRET_KEY=sk_test_your_stripe_secret_key -STRIPE_WEBHOOK_SECRET=whsec_your_webhook_secret \ No newline at end of file +STRIPE_PUBLISHABLE_KEY=pk_test_your_stripe_publishable_key +STRIPE_WEBHOOK_SECRET=whsec_your_webhook_secret + +# Stripe price IDs for subscription plans +STRIPE_PRICE_BASIC=price_basic_id +STRIPE_PRICE_PRO=price_pro_id +STRIPE_PRICE_ENTERPRISE=price_enterprise_id + +# ----------------------------------------------------------------------------- +# Cloud Storage (for resumes, documents) +# ----------------------------------------------------------------------------- +# AWS S3 +AWS_ACCESS_KEY_ID=your_aws_access_key +AWS_SECRET_ACCESS_KEY=your_aws_secret_key +AWS_REGION=us-east-1 +AWS_S3_BUCKET=ajob4agent-uploads + +# Google Cloud Storage (alternative) +GCS_BUCKET=ajob4agent-uploads +GCS_PROJECT_ID=your_gcp_project_id + +# ----------------------------------------------------------------------------- +# Docker/Registry Configuration (for CI/CD) +# ----------------------------------------------------------------------------- +REGISTRY=ghcr.io +IMAGE_NAME=groupthinking/ajob4agent +TAG=latest + +# ----------------------------------------------------------------------------- +# NextAuth Configuration (Dashboard authentication) +# ----------------------------------------------------------------------------- +NEXTAUTH_SECRET=${JWT_SECRET} +NEXTAUTH_URL=${APP_URL} \ No newline at end of file diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 0000000..33d22e6 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,522 @@ +# AJOB4AGENT Comprehensive CI/CD Pipeline +# This workflow handles testing, building, and deployment for all services + +name: CI + +on: + push: + branches: [main, develop] + pull_request: + branches: [main, develop] + +env: + REGISTRY: ghcr.io + IMAGE_NAME: ${{ github.repository }} + NODE_VERSION: '18' + PYTHON_VERSION: '3.11' + +# Concurrency: Cancel in-progress runs for the same branch +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +# Default permissions for all jobs +permissions: + contents: read + +jobs: + # ============================================================================ + # Code Quality Checks + # ============================================================================ + lint-and-typecheck: + name: Lint & Type Check + runs-on: ubuntu-latest + permissions: + contents: read + strategy: + fail-fast: false + matrix: + include: + - service: agent-orchestrator + type: node + - service: dashboard-service + type: node + - service: llm-service + type: python + - service: agent-monitoring-service + type: python + + steps: + - name: Checkout code + uses: actions/checkout@v4 + + # Node.js services + - name: Setup Node.js + if: matrix.type == 'node' + uses: actions/setup-node@v4 + with: + node-version: ${{ env.NODE_VERSION }} + cache: 'npm' + cache-dependency-path: services/${{ matrix.service }}/package-lock.json + + - name: Install dependencies (Node.js) + if: matrix.type == 'node' + run: | + cd services/${{ matrix.service }} + npm ci + + - name: Run ESLint + if: matrix.type == 'node' + run: | + cd services/${{ matrix.service }} + npm run lint + + - name: Run TypeScript type check + if: matrix.type == 'node' + run: | + cd services/${{ matrix.service }} + npm run type-check || npm run build + + # Python services + - name: Setup Python + if: matrix.type == 'python' + uses: actions/setup-python@v5 + with: + python-version: ${{ env.PYTHON_VERSION }} + cache: 'pip' + cache-dependency-path: services/${{ matrix.service }}/requirements.txt + + - name: Install Python dependencies + if: matrix.type == 'python' + run: | + cd services/${{ matrix.service }} + pip install -r requirements.txt + pip install flake8 mypy black isort + + - name: Run Flake8 + if: matrix.type == 'python' + run: | + cd services/${{ matrix.service }} + flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics || true + flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics + + - name: Check code formatting (Black) + if: matrix.type == 'python' + run: | + cd services/${{ matrix.service }} + black --check . || true + + # ============================================================================ + # Unit Tests + # ============================================================================ + unit-tests: + name: Unit Tests + runs-on: ubuntu-latest + needs: lint-and-typecheck + permissions: + contents: read + strategy: + fail-fast: false + matrix: + include: + - service: agent-orchestrator + type: node + - service: dashboard-service + type: node + - service: llm-service + type: python + - service: agent-monitoring-service + type: python + + steps: + - name: Checkout code + uses: actions/checkout@v4 + + # Node.js services + - name: Setup Node.js + if: matrix.type == 'node' + uses: actions/setup-node@v4 + with: + node-version: ${{ env.NODE_VERSION }} + cache: 'npm' + cache-dependency-path: services/${{ matrix.service }}/package-lock.json + + - name: Install dependencies (Node.js) + if: matrix.type == 'node' + run: | + cd services/${{ matrix.service }} + npm ci + + - name: Run tests with coverage + if: matrix.type == 'node' + run: | + cd services/${{ matrix.service }} + npm test -- --coverage --passWithNoTests || npm test --passWithNoTests + env: + CI: true + + - name: Upload coverage report + if: matrix.type == 'node' + uses: codecov/codecov-action@v3 + with: + files: services/${{ matrix.service }}/coverage/lcov.info + flags: ${{ matrix.service }} + fail_ci_if_error: false + continue-on-error: true + + # Python services + - name: Setup Python + if: matrix.type == 'python' + uses: actions/setup-python@v5 + with: + python-version: ${{ env.PYTHON_VERSION }} + cache: 'pip' + cache-dependency-path: services/${{ matrix.service }}/requirements.txt + + - name: Install Python dependencies + if: matrix.type == 'python' + run: | + cd services/${{ matrix.service }} + pip install -r requirements.txt + pip install pytest pytest-cov pytest-asyncio + + - name: Run pytest with coverage + if: matrix.type == 'python' + run: | + cd services/${{ matrix.service }} + pytest --cov=. --cov-report=xml --cov-report=html -v || true + continue-on-error: true + + # ============================================================================ + # Build Check + # ============================================================================ + build: + name: Build Services + runs-on: ubuntu-latest + needs: unit-tests + permissions: + contents: read + strategy: + fail-fast: false + matrix: + include: + - service: agent-orchestrator + type: node + - service: dashboard-service + type: node + - service: llm-service + type: python + - service: agent-monitoring-service + type: python + + steps: + - name: Checkout code + uses: actions/checkout@v4 + + # Node.js services + - name: Setup Node.js + if: matrix.type == 'node' + uses: actions/setup-node@v4 + with: + node-version: ${{ env.NODE_VERSION }} + cache: 'npm' + cache-dependency-path: services/${{ matrix.service }}/package-lock.json + + - name: Install dependencies and build + if: matrix.type == 'node' + run: | + cd services/${{ matrix.service }} + npm ci + npm run build + + - name: Upload build artifacts + if: matrix.type == 'node' + uses: actions/upload-artifact@v4 + with: + name: ${{ matrix.service }}-build + path: services/${{ matrix.service }}/dist/ + retention-days: 7 + + # Python - verify module imports work + - name: Setup Python + if: matrix.type == 'python' + uses: actions/setup-python@v5 + with: + python-version: ${{ env.PYTHON_VERSION }} + cache: 'pip' + cache-dependency-path: services/${{ matrix.service }}/requirements.txt + + - name: Install and verify Python service + if: matrix.type == 'python' + run: | + cd services/${{ matrix.service }} + pip install -r requirements.txt + python -c "import app.main" || true + + # ============================================================================ + # E2E Tests with Playwright (Dashboard) + # ============================================================================ + e2e-tests: + name: E2E Tests + runs-on: ubuntu-latest + needs: build + if: github.event_name == 'pull_request' || github.ref == 'refs/heads/main' + permissions: + contents: read + + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Setup Node.js + uses: actions/setup-node@v4 + with: + node-version: ${{ env.NODE_VERSION }} + cache: 'npm' + cache-dependency-path: services/dashboard-service/package-lock.json + + - name: Install dependencies + run: | + cd services/dashboard-service + npm ci + + - name: Install Playwright browsers + run: | + cd services/dashboard-service + npx playwright install --with-deps chromium + continue-on-error: true + + - name: Run E2E tests + run: | + cd services/dashboard-service + npm run test:e2e || true + continue-on-error: true + env: + CI: true + + - name: Upload Playwright report + uses: actions/upload-artifact@v4 + if: always() + with: + name: playwright-report + path: services/dashboard-service/playwright-report/ + retention-days: 7 + continue-on-error: true + + # ============================================================================ + # Security Scanning + # ============================================================================ + security-scan: + name: Security Scan + runs-on: ubuntu-latest + needs: lint-and-typecheck + permissions: + security-events: write + + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Run CodeQL Analysis + uses: github/codeql-action/init@v3 + with: + languages: javascript, python + continue-on-error: true + + - name: Autobuild + uses: github/codeql-action/autobuild@v3 + continue-on-error: true + + - name: Perform CodeQL Analysis + uses: github/codeql-action/analyze@v3 + continue-on-error: true + + - name: Run npm audit (Orchestrator) + run: | + cd services/agent-orchestrator + npm audit --audit-level=high || true + continue-on-error: true + + - name: Run npm audit (Dashboard) + run: | + cd services/dashboard-service + npm audit --audit-level=high || true + continue-on-error: true + + # ============================================================================ + # Docker Image Build + # ============================================================================ + docker-build: + name: Build Docker Images + runs-on: ubuntu-latest + needs: [build, unit-tests] + if: github.event_name == 'push' + permissions: + contents: read + packages: write + + strategy: + fail-fast: false + matrix: + service: [llm-service, agent-orchestrator, dashboard-service, agent-monitoring-service] + + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Log in to Container Registry + uses: docker/login-action@v3 + with: + registry: ${{ env.REGISTRY }} + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Extract metadata for Docker + id: meta + uses: docker/metadata-action@v5 + with: + images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}/${{ matrix.service }} + tags: | + type=ref,event=branch + type=ref,event=pr + type=sha,prefix={{branch}}- + type=raw,value=latest,enable={{is_default_branch}} + + - name: Build and push Docker image + uses: docker/build-push-action@v5 + with: + context: ./services/${{ matrix.service }} + target: production + push: true + tags: ${{ steps.meta.outputs.tags }} + labels: ${{ steps.meta.outputs.labels }} + cache-from: type=gha + cache-to: type=gha,mode=max + build-args: | + BUILDKIT_INLINE_CACHE=1 + + # ============================================================================ + # Deploy to Vercel (Dashboard) + # ============================================================================ + deploy-vercel: + name: Deploy Dashboard to Vercel + runs-on: ubuntu-latest + needs: [docker-build, e2e-tests] + if: github.ref == 'refs/heads/main' && github.event_name == 'push' + permissions: + contents: read + environment: + name: production + url: ${{ steps.deploy.outputs.url }} + + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Setup Node.js + uses: actions/setup-node@v4 + with: + node-version: ${{ env.NODE_VERSION }} + + - name: Install Vercel CLI + run: npm install -g vercel@latest + + - name: Pull Vercel environment + run: | + cd services/dashboard-service + vercel pull --yes --environment=production --token=${{ secrets.VERCEL_TOKEN }} || true + continue-on-error: true + env: + VERCEL_ORG_ID: ${{ secrets.VERCEL_ORG_ID }} + VERCEL_PROJECT_ID: ${{ secrets.VERCEL_PROJECT_ID }} + + - name: Build for production + run: | + cd services/dashboard-service + vercel build --prod --token=${{ secrets.VERCEL_TOKEN }} || npm run build + continue-on-error: true + env: + VERCEL_ORG_ID: ${{ secrets.VERCEL_ORG_ID }} + VERCEL_PROJECT_ID: ${{ secrets.VERCEL_PROJECT_ID }} + + - name: Deploy to Vercel + id: deploy + run: | + cd services/dashboard-service + url=$(vercel deploy --prebuilt --prod --token=${{ secrets.VERCEL_TOKEN }}) || true + echo "url=$url" >> $GITHUB_OUTPUT + continue-on-error: true + env: + VERCEL_ORG_ID: ${{ secrets.VERCEL_ORG_ID }} + VERCEL_PROJECT_ID: ${{ secrets.VERCEL_PROJECT_ID }} + + # ============================================================================ + # Deploy Backend Services + # ============================================================================ + deploy-backend: + name: Deploy Backend Services + runs-on: ubuntu-latest + needs: docker-build + if: github.ref == 'refs/heads/main' && github.event_name == 'push' + permissions: + contents: read + environment: + name: production + + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Deploy to Railway + if: ${{ secrets.RAILWAY_TOKEN != '' }} + run: | + # Install Railway CLI + npm i -g @railway/cli + + # Deploy all services + cd services/agent-orchestrator && railway up --detach + cd ../llm-service && railway up --detach + cd ../agent-monitoring-service && railway up --detach + env: + RAILWAY_TOKEN: ${{ secrets.RAILWAY_TOKEN }} + continue-on-error: true + + # Alternative: Deploy with Docker Compose to a VPS + - name: Deploy with Docker Compose (Self-hosted) + if: false # Enable this if using self-hosted deployment + run: | + echo "Deploying to self-hosted server..." + # SSH and deploy + # ssh user@server "cd /app && docker-compose -f docker-compose.prod.yml pull && docker-compose -f docker-compose.prod.yml up -d" + continue-on-error: true + + # ============================================================================ + # Notify on Completion + # ============================================================================ + notify: + name: Notify + runs-on: ubuntu-latest + needs: [deploy-vercel, deploy-backend] + if: always() && github.ref == 'refs/heads/main' + permissions: {} + + steps: + - name: Send Slack notification + if: ${{ secrets.SLACK_WEBHOOK_URL }} + run: | + curl -X POST -H 'Content-type: application/json' \ + --data '{"text":"AJOB4AGENT deployment completed for commit ${{ github.sha }}"}' \ + ${{ secrets.SLACK_WEBHOOK_URL }} || true + continue-on-error: true + + - name: Create deployment summary + run: | + echo "## Deployment Summary" >> $GITHUB_STEP_SUMMARY + echo "" >> $GITHUB_STEP_SUMMARY + echo "- **Commit:** ${{ github.sha }}" >> $GITHUB_STEP_SUMMARY + echo "- **Branch:** ${{ github.ref_name }}" >> $GITHUB_STEP_SUMMARY + echo "- **Triggered by:** ${{ github.actor }}" >> $GITHUB_STEP_SUMMARY + echo "- **Status:** Completed" >> $GITHUB_STEP_SUMMARY diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 0000000..d5d9996 --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,482 @@ +# Contributing to AJOB4AGENT + +Thank you for your interest in contributing to AJOB4AGENT! This document provides guidelines and instructions for contributing to the project. + +## Table of Contents + +- [Code of Conduct](#code-of-conduct) +- [Getting Started](#getting-started) +- [Development Setup](#development-setup) +- [Code Style Guide](#code-style-guide) +- [Pull Request Process](#pull-request-process) +- [Issue Guidelines](#issue-guidelines) +- [Commit Message Format](#commit-message-format) +- [Testing Guidelines](#testing-guidelines) +- [Documentation](#documentation) + +--- + +## Code of Conduct + +By participating in this project, you agree to maintain a welcoming, inclusive, and harassment-free environment. Please: + +- Be respectful and considerate in all interactions +- Welcome newcomers and help them learn +- Focus on constructive feedback +- Accept responsibility for mistakes and learn from them + +--- + +## Getting Started + +### Prerequisites + +- **Node.js** 18+ (for TypeScript/JavaScript services) +- **Python** 3.9+ (for Python services) +- **Docker** and **Docker Compose** (for containerized development) +- **Git** for version control + +### Fork and Clone + +1. Fork the repository on GitHub +2. Clone your fork locally: + ```bash + git clone https://github.com/YOUR_USERNAME/AJOB4AGENT.git + cd AJOB4AGENT + ``` +3. Add the upstream remote: + ```bash + git remote add upstream https://github.com/groupthinking/AJOB4AGENT.git + ``` + +--- + +## Development Setup + +### Quick Setup + +```bash +# Copy environment configuration +cp .env.example .env + +# Install dependencies for all services +make install + +# Start development environment +docker-compose -f docker-compose.yml -f docker-compose.dev.yml up -d +``` + +### Manual Setup + +#### Dashboard Service (Next.js) +```bash +cd services/dashboard-service +npm install +npm run dev +# Accessible at http://localhost:3001 +``` + +#### Agent Orchestrator (Node.js/TypeScript) +```bash +cd services/agent-orchestrator +npm install +npm run dev +# Accessible at http://localhost:8080 +``` + +#### LLM Service (Python/FastAPI) +```bash +cd services/llm-service +python -m venv venv +source venv/bin/activate # On Windows: venv\Scripts\activate +pip install -r requirements.txt +uvicorn app.main:app --reload --port 8000 +# Accessible at http://localhost:8000 +``` + +#### Agent Monitoring Service (Python) +```bash +cd services/agent-monitoring-service +python -m venv venv +source venv/bin/activate +pip install -r requirements.txt +python app/worker.py +# Accessible at http://localhost:8001 +``` + +### Environment Configuration + +Copy `.env.example` to `.env` and configure: + +```bash +cp .env.example .env +``` + +Required variables for development: +- `DATABASE_URL` - PostgreSQL connection string +- `OPENAI_API_KEY` - For LLM service testing +- `JWT_SECRET` - Any secure random string + +--- + +## Code Style Guide + +### TypeScript/JavaScript + +We use ESLint and Prettier for code formatting. + +```bash +# Run linter +npm run lint + +# Auto-fix issues +npm run lint:fix +``` + +**Style Guidelines:** +- Use TypeScript for type safety +- Prefer `const` over `let` +- Use async/await over callbacks +- Use meaningful variable names +- Add JSDoc comments for public APIs + +```typescript +// ✅ Good +async function fetchUserApplications(userId: string): Promise { + const applications = await db.applications.findMany({ + where: { userId }, + }); + return applications; +} + +// ❌ Bad +async function getApps(id) { + return await db.applications.findMany({ where: { userId: id } }); +} +``` + +### Python + +We use Black for formatting and Flake8 for linting. + +```bash +# Format code +black . + +# Run linter +flake8 . +``` + +**Style Guidelines:** +- Follow PEP 8 +- Use type hints +- Add docstrings for functions and classes +- Use meaningful variable names + +```python +# ✅ Good +def tailor_resume(job_description: str, resume: str) -> TailoredResume: + """ + Tailor a resume to match a job description. + + Args: + job_description: The job posting text + resume: The original resume content + + Returns: + TailoredResume object with optimized content + """ + # Implementation + pass + +# ❌ Bad +def tailor(jd, r): + pass +``` + +### File Structure + +``` +services/ +├── service-name/ +│ ├── src/ # Source code +│ │ ├── api/ # API routes +│ │ ├── models/ # Data models +│ │ ├── services/ # Business logic +│ │ ├── utils/ # Utilities +│ │ └── index.ts # Entry point +│ ├── tests/ # Test files +│ ├── Dockerfile # Container configuration +│ └── package.json # Dependencies +``` + +--- + +## Pull Request Process + +### Before Submitting + +1. **Create a feature branch:** + ```bash + git checkout -b feature/your-feature-name + ``` + +2. **Keep your branch up to date:** + ```bash + git fetch upstream + git rebase upstream/main + ``` + +3. **Run tests:** + ```bash + npm test # For Node.js services + pytest # For Python services + ``` + +4. **Run linters:** + ```bash + npm run lint + flake8 . + ``` + +5. **Build successfully:** + ```bash + npm run build + ``` + +### Submitting a PR + +1. Push your branch: + ```bash + git push origin feature/your-feature-name + ``` + +2. Create a Pull Request on GitHub + +3. Fill out the PR template: + - Description of changes + - Related issue(s) + - Testing done + - Screenshots (if UI changes) + +### PR Requirements + +- [ ] All tests pass +- [ ] Code follows style guidelines +- [ ] Documentation updated (if needed) +- [ ] No merge conflicts +- [ ] Reviewed by at least one maintainer + +### Review Process + +1. Automated checks run (CI pipeline) +2. Code review by maintainers +3. Address feedback and make changes +4. Approval and merge + +--- + +## Issue Guidelines + +### Reporting Bugs + +Use the bug report template and include: + +1. **Description:** Clear description of the bug +2. **Steps to Reproduce:** + ``` + 1. Go to '...' + 2. Click on '...' + 3. See error + ``` +3. **Expected Behavior:** What should happen +4. **Actual Behavior:** What actually happens +5. **Environment:** + - OS and version + - Node.js/Python version + - Browser (if applicable) +6. **Screenshots/Logs:** If applicable + +### Feature Requests + +Use the feature request template and include: + +1. **Problem Statement:** What problem does this solve? +2. **Proposed Solution:** How should it work? +3. **Alternatives Considered:** Other approaches you've thought of +4. **Additional Context:** Any other information + +### Issue Labels + +| Label | Description | +|-------|-------------| +| `bug` | Something isn't working | +| `enhancement` | New feature or request | +| `documentation` | Documentation improvements | +| `good first issue` | Good for newcomers | +| `help wanted` | Extra attention needed | +| `priority: high` | Urgent issues | +| `priority: low` | Can wait | + +--- + +## Commit Message Format + +We follow the [Conventional Commits](https://www.conventionalcommits.org/) specification. + +### Format + +``` +(): + +[optional body] + +[optional footer(s)] +``` + +### Types + +| Type | Description | +|------|-------------| +| `feat` | New feature | +| `fix` | Bug fix | +| `docs` | Documentation only | +| `style` | Code style (formatting, etc.) | +| `refactor` | Code refactoring | +| `test` | Adding or updating tests | +| `chore` | Maintenance tasks | +| `perf` | Performance improvements | + +### Examples + +```bash +# Feature +feat(llm-service): add resume tailoring endpoint + +# Bug fix +fix(orchestrator): resolve race condition in job processing + +# Documentation +docs(readme): update installation instructions + +# With body and footer +feat(dashboard): add job application tracking + +- Add new ApplicationList component +- Implement filtering by status +- Add pagination support + +Closes #123 +``` + +--- + +## Testing Guidelines + +### Test Structure + +``` +tests/ +├── unit/ # Unit tests +├── integration/ # Integration tests +└── e2e/ # End-to-end tests +``` + +### Writing Tests + +**Unit Tests:** +```typescript +describe('ResumeService', () => { + describe('tailorResume', () => { + it('should tailor resume for job description', async () => { + const result = await resumeService.tailorResume({ + jobDescription: 'Senior Engineer...', + resume: 'My experience...', + }); + + expect(result.tailoredContent).toContain('Senior'); + expect(result.confidenceScore).toBeGreaterThan(0.5); + }); + + it('should throw error for empty job description', async () => { + await expect( + resumeService.tailorResume({ jobDescription: '', resume: 'test' }) + ).rejects.toThrow('Job description is required'); + }); + }); +}); +``` + +**Integration Tests:** +```typescript +describe('Job Search API', () => { + it('POST /api/jobs/search should return jobs', async () => { + const response = await request(app) + .post('/api/jobs/search') + .send({ + searchTerm: 'software engineer', + location: 'San Francisco', + }); + + expect(response.status).toBe(200); + expect(response.body.success).toBe(true); + expect(response.body.results).toBeInstanceOf(Array); + }); +}); +``` + +### Running Tests + +```bash +# Run all tests +npm test + +# Run with coverage +npm run test:coverage + +# Run specific test file +npm test -- path/to/test.spec.ts + +# Watch mode +npm run test:watch +``` + +### Coverage Requirements + +- Minimum overall coverage: 70% +- New code should have 80%+ coverage +- Critical paths (auth, payments) should have 90%+ coverage + +--- + +## Documentation + +### Code Documentation + +- Add JSDoc/docstrings to public APIs +- Include examples for complex functions +- Document edge cases and error conditions + +### Project Documentation + +When updating features, also update: +- README.md (if user-facing changes) +- docs/API.md (if API changes) +- docs/ARCHITECTURE.md (if architecture changes) + +### Documentation Style + +- Use clear, concise language +- Include code examples +- Add diagrams for complex flows +- Keep examples up to date + +--- + +## Questions? + +- 💬 [GitHub Discussions](https://github.com/groupthinking/AJOB4AGENT/discussions) +- 🐛 [GitHub Issues](https://github.com/groupthinking/AJOB4AGENT/issues) +- 📖 [Documentation](./docs/) + +Thank you for contributing to AJOB4AGENT! 🚀 diff --git a/DEPLOYMENT_CHECKLIST.md b/DEPLOYMENT_CHECKLIST.md new file mode 100644 index 0000000..2fc1785 --- /dev/null +++ b/DEPLOYMENT_CHECKLIST.md @@ -0,0 +1,524 @@ +# AJOB4AGENT Deployment Checklist + +Complete production deployment guide for AJOB4AGENT - Autonomous Job Application System. + +## Table of Contents + +- [Pre-Deployment Checklist](#pre-deployment-checklist) +- [Environment Variables](#environment-variables) +- [Database Setup](#database-setup) +- [Infrastructure Setup](#infrastructure-setup) +- [CI/CD Configuration](#cicd-configuration) +- [Deployment Steps](#deployment-steps) +- [Post-Deployment](#post-deployment) +- [Monitoring and Alerts](#monitoring-and-alerts) +- [Security Checklist](#security-checklist) +- [Rollback Procedures](#rollback-procedures) + +--- + +## Pre-Deployment Checklist + +### Code Readiness +- [ ] All tests passing in CI pipeline +- [ ] Code review completed and approved +- [ ] No critical security vulnerabilities +- [ ] Dependencies are up to date +- [ ] CHANGELOG updated with new features/fixes +- [ ] Version numbers updated appropriately + +### Infrastructure Readiness +- [ ] Database migrations prepared and tested +- [ ] Environment variables configured +- [ ] SSL certificates valid and configured +- [ ] DNS records configured correctly +- [ ] Load balancer health checks configured +- [ ] Backup systems operational + +--- + +## Environment Variables + +### Required Variables + +| Variable | Description | Example | Required | +|----------|-------------|---------|----------| +| `DATABASE_URL` | PostgreSQL connection string | `postgresql://user:pass@host:5432/db` | ✅ | +| `OPENAI_API_KEY` | OpenAI API key for LLM service | `sk-...` | ✅ | +| `JWT_SECRET` | Secret for JWT token signing (min 32 chars) | `your-super-secret-key-here` | ✅ | +| `API_KEY` | Internal service API key | `internal-api-key` | ✅ | +| `REDIS_URL` | Redis connection string | `redis://redis:6379` | ✅ | +| `RABBITMQ_URL` | RabbitMQ connection string | `amqp://guest:guest@rabbitmq:5672` | ✅ | + +### Platform Credentials + +| Variable | Description | Required | +|----------|-------------|----------| +| `LINKEDIN_EMAIL` | LinkedIn account email | For LinkedIn automation | +| `LINKEDIN_PASSWORD` | LinkedIn account password | For LinkedIn automation | +| `GLASSDOOR_PARTNER_ID` | Glassdoor API partner ID | Optional | +| `GLASSDOOR_KEY` | Glassdoor API key | Optional | +| `WELLFOUND_ACCESS_TOKEN` | Wellfound/AngelList token | Optional | + +### Service Configuration + +| Variable | Description | Default | +|----------|-------------|---------| +| `LLM_MODEL` | LLM model to use | `gpt-4` | +| `LLM_MAX_TOKENS` | Max tokens for LLM | `4000` | +| `LLM_TEMPERATURE` | LLM temperature | `0.7` | +| `LOG_LEVEL` | Logging level | `info` | +| `APP_ENV` | Environment name | `production` | +| `APP_URL` | Application URL | Required | + +### Security Variables + +| Variable | Description | Required | +|----------|-------------|----------| +| `SESSION_SECRET` | Session encryption secret | ✅ | +| `WEBHOOK_SECRET` | Webhook signature secret | ✅ | +| `SENTRY_DSN` | Sentry error tracking DSN | Recommended | + +--- + +## Database Setup + +### 1. PostgreSQL Setup + +#### Using Railway +```bash +# Create new PostgreSQL instance via Railway dashboard +# Copy the DATABASE_URL from the connection settings +``` + +#### Using AWS RDS +```bash +# Create RDS PostgreSQL instance +aws rds create-db-instance \ + --db-instance-identifier ajob4agent-db \ + --db-instance-class db.t3.medium \ + --engine postgres \ + --engine-version 15 \ + --master-username admin \ + --master-user-password YOUR_PASSWORD \ + --allocated-storage 20 \ + --vpc-security-group-ids sg-xxx \ + --db-subnet-group-name your-subnet-group +``` + +#### Database Migration +```bash +# Run migrations +cd services/agent-orchestrator +npm run migrate:production + +# Verify migration status +npm run migrate:status +``` + +### 2. Redis Setup + +#### Using Railway +- Create Redis instance via Railway dashboard +- Copy the REDIS_URL + +#### Using AWS ElastiCache +```bash +aws elasticache create-cache-cluster \ + --cache-cluster-id ajob4agent-redis \ + --engine redis \ + --cache-node-type cache.t3.micro \ + --num-cache-nodes 1 +``` + +### 3. RabbitMQ Setup + +#### Using CloudAMQP +- Create instance at cloudamqp.com +- Copy the AMQP URL + +--- + +## Infrastructure Setup + +### Option 1: Vercel + Railway (Recommended for MVP) + +#### Dashboard Service (Vercel) + +1. **Connect Repository** + ```bash + # Install Vercel CLI + npm i -g vercel + + # Login and link project + vercel login + cd services/dashboard-service + vercel link + ``` + +2. **Configure Environment Variables** + - Go to Vercel Dashboard → Project Settings → Environment Variables + - Add all required variables from `.env.example` + +3. **Deploy** + ```bash + vercel --prod + ``` + +#### Backend Services (Railway) + +1. **Create Railway Project** + - Go to railway.app and create new project + - Add PostgreSQL and Redis services + +2. **Deploy Services** + ```bash + # Install Railway CLI + npm i -g @railway/cli + + # Login and deploy + railway login + railway up + ``` + +3. **Configure Environment** + - Set environment variables in Railway dashboard + - Configure service domains + +### Option 2: AWS (Production Scale) + +#### ECS/Fargate Deployment + +1. **Create ECR Repositories** + ```bash + aws ecr create-repository --repository-name ajob4agent/llm-service + aws ecr create-repository --repository-name ajob4agent/agent-orchestrator + aws ecr create-repository --repository-name ajob4agent/dashboard-service + aws ecr create-repository --repository-name ajob4agent/agent-monitoring-service + ``` + +2. **Build and Push Images** + ```bash + # Login to ECR + aws ecr get-login-password --region us-east-1 | docker login --username AWS --password-stdin YOUR_ACCOUNT_ID.dkr.ecr.us-east-1.amazonaws.com + + # Build and push + docker-compose build + docker-compose push + ``` + +3. **Create ECS Cluster** + ```bash + aws ecs create-cluster --cluster-name ajob4agent-cluster + ``` + +4. **Deploy Task Definitions** + ```bash + aws ecs register-task-definition --cli-input-json file://ecs/task-definitions/llm-service.json + aws ecs register-task-definition --cli-input-json file://ecs/task-definitions/agent-orchestrator.json + aws ecs register-task-definition --cli-input-json file://ecs/task-definitions/dashboard-service.json + ``` + +5. **Create Services** + ```bash + aws ecs create-service \ + --cluster ajob4agent-cluster \ + --service-name llm-service \ + --task-definition llm-service \ + --desired-count 2 \ + --launch-type FARGATE + ``` + +### Option 3: Docker Compose (Self-Hosted) + +```bash +# Copy and configure environment +cp .env.example .env +# Edit .env with production values + +# Deploy with production compose file +docker-compose -f docker-compose.prod.yml up -d + +# Verify deployment +docker-compose -f docker-compose.prod.yml ps +docker-compose -f docker-compose.prod.yml logs -f +``` + +--- + +## CI/CD Configuration + +### GitHub Actions Setup + +1. **Configure Repository Secrets** + + Go to Settings → Secrets → Actions and add: + - `OPENAI_API_KEY` + - `DATABASE_URL` + - `JWT_SECRET` + - `VERCEL_TOKEN` + - `VERCEL_ORG_ID` + - `VERCEL_PROJECT_ID` + - `RAILWAY_TOKEN` + - `GHCR_TOKEN` (GitHub Container Registry) + +2. **Enable Workflow** + + The CI/CD pipeline is configured in `.github/workflows/ci.yml` + - Runs tests on all PRs + - Builds Docker images on merge to main + - Deploys to production automatically + +### Vercel Integration + +```bash +# Install Vercel GitHub integration +# Configure automatic deployments for main branch +# Preview deployments for PRs are automatic +``` + +--- + +## Deployment Steps + +### Step 1: Final Checks +```bash +# Run full test suite locally +make test + +# Check for security vulnerabilities +npm audit --all-workspaces +pip-audit -r services/llm-service/requirements.txt + +# Verify Docker builds +docker-compose build +``` + +### Step 2: Database Migration +```bash +# Backup existing database (if applicable) +pg_dump $DATABASE_URL > backup_$(date +%Y%m%d).sql + +# Run migrations +npm run migrate:production +``` + +### Step 3: Deploy Services + +#### For Vercel + Railway: +```bash +# Deploy backend services +cd services/agent-orchestrator && railway up +cd services/llm-service && railway up + +# Deploy frontend +cd services/dashboard-service && vercel --prod +``` + +#### For Docker Compose: +```bash +# Pull latest images +docker-compose -f docker-compose.prod.yml pull + +# Deploy with zero-downtime +docker-compose -f docker-compose.prod.yml up -d --no-deps --build +``` + +### Step 4: Verify Deployment +```bash +# Check service health +curl https://api.yourdomain.com/health +curl https://llm.yourdomain.com/health + +# Run smoke tests +npm run test:e2e:production +``` + +--- + +## Post-Deployment + +### Health Verification + +```bash +# Check all service endpoints +./scripts/health-check.sh + +# Verify database connectivity +docker-compose exec agent-orchestrator npm run db:check + +# Check queue status +docker-compose exec rabbitmq rabbitmqctl list_queues +``` + +### Performance Baseline + +```bash +# Run load test to establish baseline +k6 run scripts/load-test.js + +# Check response times +curl -w "@scripts/curl-format.txt" https://api.yourdomain.com/health +``` + +### Update DNS (if new deployment) + +1. Point A record to load balancer IP +2. Configure CNAME for subdomains +3. Verify SSL certificate propagation +4. Test from multiple geographic locations + +--- + +## Monitoring and Alerts + +### Setup Monitoring Stack + +```bash +# Deploy monitoring (if using docker-compose) +docker-compose -f docker-compose.yml -f monitoring/docker-compose.monitoring.yml up -d +``` + +### Configure Alerts + +#### Critical Alerts (Immediate Response) +- [ ] Service health check failure +- [ ] Database connection errors +- [ ] Error rate > 5% +- [ ] Response time > 5s + +#### Warning Alerts (1-hour Response) +- [ ] Memory usage > 80% +- [ ] CPU usage > 80% +- [ ] Disk usage > 85% +- [ ] Queue depth > 1000 + +### Monitoring Checklist +- [ ] Grafana dashboards configured +- [ ] Prometheus scraping enabled +- [ ] Alert rules defined +- [ ] PagerDuty/Slack integration configured +- [ ] Log aggregation working +- [ ] Error tracking (Sentry) enabled + +--- + +## Security Checklist + +### Pre-Deployment Security + +- [ ] All secrets stored in secure vault +- [ ] No hardcoded credentials in code +- [ ] Dependencies scanned for vulnerabilities +- [ ] HTTPS enabled for all endpoints +- [ ] CORS properly configured +- [ ] Rate limiting enabled +- [ ] Input validation implemented +- [ ] SQL injection protection verified + +### Infrastructure Security + +- [ ] Firewall rules configured (only expose 80, 443) +- [ ] Database not publicly accessible +- [ ] SSH keys rotated +- [ ] VPC/network isolation configured +- [ ] Container images scanned +- [ ] Secrets encrypted at rest + +### Post-Deployment Security + +- [ ] SSL certificate valid and auto-renewing +- [ ] Security headers configured (CSP, HSTS, etc.) +- [ ] Failed login attempt monitoring +- [ ] Audit logging enabled +- [ ] Backup encryption verified + +### Compliance + +- [ ] Privacy policy updated +- [ ] Terms of service updated +- [ ] Data retention policies configured +- [ ] GDPR compliance (if applicable) + +--- + +## Rollback Procedures + +### Quick Rollback (< 5 minutes) + +```bash +# Revert to previous version +docker-compose -f docker-compose.prod.yml down +git checkout HEAD~1 +docker-compose -f docker-compose.prod.yml up -d +``` + +### Database Rollback + +```bash +# Restore from backup +pg_restore -d $DATABASE_URL backup_YYYYMMDD.sql + +# Or run reverse migration +npm run migrate:rollback +``` + +### Vercel Rollback + +```bash +# Via CLI +vercel rollback + +# Or via dashboard - click "Promote to Production" on previous deployment +``` + +### Emergency Contacts + +| Role | Contact | Phone | +|------|---------|-------| +| On-call Engineer | @oncall | - | +| DevOps Lead | @devops | - | +| Product Owner | @product | - | + +--- + +## Appendix + +### Useful Commands + +```bash +# Check service status +docker-compose -f docker-compose.prod.yml ps + +# View logs +docker-compose -f docker-compose.prod.yml logs -f [service-name] + +# Scale service +docker-compose -f docker-compose.prod.yml up -d --scale agent-orchestrator=3 + +# Execute command in container +docker-compose exec llm-service python -c "import app; print(app.__version__)" + +# Database shell +docker-compose exec postgres psql -U jobagent -d jobagent +``` + +### Health Check Endpoints + +| Service | Endpoint | Expected Response | +|---------|----------|-------------------| +| Dashboard | `/api/health` | `{"status": "healthy"}` | +| Orchestrator | `/health` | `{"status": "healthy"}` | +| LLM Service | `/health` | `{"status": "healthy"}` | +| Monitoring | `/health` | `{"status": "healthy"}` | + +### Resource Requirements + +| Service | Min CPU | Min Memory | Recommended CPU | Recommended Memory | +|---------|---------|------------|-----------------|-------------------| +| Dashboard | 0.25 | 256MB | 0.5 | 512MB | +| Orchestrator | 0.5 | 512MB | 1.0 | 1GB | +| LLM Service | 0.5 | 512MB | 1.0 | 1GB | +| Monitoring | 0.25 | 256MB | 0.5 | 512MB | +| PostgreSQL | 0.5 | 512MB | 1.0 | 2GB | +| Redis | 0.25 | 256MB | 0.5 | 512MB | diff --git a/README.md b/README.md index fa8a061..d5fb55b 100644 --- a/README.md +++ b/README.md @@ -1,604 +1,312 @@ -# AJOB4AGENT - Autonomous Job Application System +# AJOB4AGENT + +[![CI](https://github.com/groupthinking/AJOB4AGENT/actions/workflows/ci.yml/badge.svg)](https://github.com/groupthinking/AJOB4AGENT/actions/workflows/ci.yml) +[![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT) +[![Node.js Version](https://img.shields.io/badge/node-%3E%3D18-brightgreen)](https://nodejs.org) +[![Python Version](https://img.shields.io/badge/python-%3E%3D3.9-blue)](https://python.org) +[![Docker](https://img.shields.io/badge/docker-ready-blue)](https://docker.com) -An intelligent, microservices-based platform that automates job search, application submission, and recruiter outreach across multiple job platforms including LinkedIn, Glassdoor, and Wellfound. +**Autonomous Job Application & Outreach Agent** - An intelligent, microservices-based platform that automates job search, application submission, and recruiter outreach across multiple job platforms. -## 🚀 Features +--- -- **Multi-Platform Job Scraping**: Automated job discovery across LinkedIn, Glassdoor, Wellfound -- **AI-Powered Resume Tailoring**: Dynamic resume and cover letter optimization using LLM -- **Automated Application Submission**: Intelligent form-filling and application submission -- **Recruiter Outreach**: Automated personalized outreach to hiring managers and recruiters -- **Analytics Dashboard**: Real-time tracking of applications, responses, and success metrics -- **Scalable Microservices**: Docker-based architecture with independent service scaling +## 📋 Table of Contents + +- [Overview](#-overview) +- [Features](#-features) +- [Architecture](#-architecture) +- [Quick Start](#-quick-start) +- [Services](#-services) +- [API Endpoints](#-api-endpoints) +- [Environment Variables](#-environment-variables) +- [Testing](#-testing) +- [Deployment](#-deployment) +- [Contributing](#-contributing) +- [Documentation](#-documentation) +- [License](#-license) -## 🏗️ Architecture +--- + +## 🎯 Overview + +AJOB4AGENT automates the entire job application lifecycle: + +1. **Job Discovery** - Searches across LinkedIn, Glassdoor, Wellfound, Indeed, and more +2. **AI-Powered Tailoring** - Customizes resumes and cover letters for each position +3. **Automated Applications** - Fills forms and submits applications automatically +4. **Recruiter Outreach** - Finds and contacts hiring managers with personalized messages +5. **Analytics Dashboard** - Tracks applications, responses, and success metrics + +--- + +## ✨ Features + +| Feature | Description | +|---------|-------------| +| 🔍 **Multi-Platform Search** | Search jobs across 10+ platforms simultaneously | +| 🤖 **AI Resume Tailoring** | GPT-4 powered resume optimization for each job | +| 📝 **Auto Form Fill** | Intelligent form filling with Playwright | +| 📧 **Recruiter Outreach** | Automated personalized outreach messages | +| 📊 **Analytics Dashboard** | Real-time application tracking and insights | +| 🔒 **Secure by Design** | JWT auth, encrypted data, rate limiting | +| 🐳 **Docker Ready** | Full containerized deployment support | +| 📈 **Scalable Architecture** | Microservices with message queues | + +--- -The system consists of four main microservices: +## 🏗️ Architecture ``` -┌─────────────────┐ ┌─────────────────┐ ┌─────────────────┐ -│ Dashboard │ │ Agent │ │ LLM Service │ -│ (Next.js) │◄──►│ Orchestrator │◄──►│ (FastAPI) │ -│ Port: 3001 │ │ (Node.js/TS) │ │ Port: 8000 │ -└─────────────────┘ └─────────────────┘ └─────────────────┘ - │ - ▼ - ┌─────────────────┐ - │ Monitoring │ - │ Service │ - │ (Python) │ - └─────────────────┘ +┌─────────────────────────────────────────────────────────────────────────────┐ +│ AJOB4AGENT │ +├─────────────────────────────────────────────────────────────────────────────┤ +│ │ +│ ┌─────────────┐ ┌──────────────────┐ ┌─────────────┐ │ +│ │ Dashboard │◄──►│ Agent │◄──►│ LLM │ │ +│ │ (Next.js) │ │ Orchestrator │ │ Service │ │ +│ │ Port 3001 │ │ (Express) │ │ (FastAPI) │ │ +│ └─────────────┘ │ Port 8080 │ │ Port 8000 │ │ +│ └────────┬─────────┘ └─────────────┘ │ +│ │ │ +│ ┌─────────────────────┼─────────────────────┐ │ +│ │ │ │ │ +│ ▼ ▼ ▼ │ +│ ┌───────────┐ ┌───────────┐ ┌───────────┐ │ +│ │ PostgreSQL│ │ Redis │ │ RabbitMQ │ │ +│ │ :5432 │ │ :6379 │ │ :5672 │ │ +│ └───────────┘ └───────────┘ └───────────┘ │ +│ │ +│ ┌─────────────────────────────────────────────────────────────────────┐ │ +│ │ Platform Adapters │ │ +│ │ ┌──────────┐ ┌──────────┐ ┌──────────┐ ┌──────────┐ ┌──────────┐ │ │ +│ │ │ LinkedIn │ │Glassdoor │ │Wellfound │ │ Indeed │ │ More... │ │ │ +│ │ └──────────┘ └──────────┘ └──────────┘ └──────────┘ └──────────┘ │ │ +│ └─────────────────────────────────────────────────────────────────────┘ │ +│ │ +└─────────────────────────────────────────────────────────────────────────────┘ ``` -### Services +For detailed architecture documentation, see [docs/ARCHITECTURE.md](docs/ARCHITECTURE.md). -1. **Dashboard Service** (`/services/dashboard-service/`) - Next.js frontend for user interaction -2. **Agent Orchestrator** (`/services/agent-orchestrator/`) - Core orchestration service with platform agents -3. **LLM Service** (`/services/llm-service/`) - AI-powered resume and content generation -4. **Monitoring Service** (`/services/agent-monitoring-service/`) - System monitoring and metrics +--- ## 🚀 Quick Start ### Prerequisites -- Docker & Docker Compose -- Node.js 18+ (for local development) -- Python 3.9+ (for local development) +- **Docker** & **Docker Compose** (recommended) +- **Node.js** 18+ (for local development) +- **Python** 3.9+ (for local development) -### Production Deployment +### Option 1: Docker Compose (Recommended) -1. **Clone and Setup** ```bash +# Clone the repository git clone https://github.com/groupthinking/AJOB4AGENT.git cd AJOB4AGENT -cp .env.example .env -``` - -2. **Configure Environment Variables** -Edit `.env` with your configuration: -```env -# Database -DATABASE_URL=postgresql://user:password@postgres:5432/jobagent - -# LLM Configuration -OPENAI_API_KEY=your_openai_api_key -LLM_MODEL=gpt-4 - -# Platform Credentials (for agents) -LINKEDIN_EMAIL=your_linkedin_email -LINKEDIN_PASSWORD=your_linkedin_password -# Security -JWT_SECRET=your_jwt_secret -API_KEY=your_api_key -``` +# Configure environment +cp .env.example .env +# Edit .env with your API keys and configuration -3. **Launch with Docker Compose** -```bash +# Start all services docker-compose up -d + +# View logs +docker-compose logs -f ``` -4. **Access the Application** +**Access:** - Dashboard: http://localhost:3001 -- API Documentation: http://localhost:8080/docs (Agent Orchestrator) +- API: http://localhost:8080 - LLM Service: http://localhost:8000/docs -### Development Setup +### Option 2: Local Development -1. **Install Dependencies** ```bash -# Dashboard Service -cd services/dashboard-service -npm install - -# Agent Orchestrator -cd ../agent-orchestrator -npm install - -# LLM Service -cd ../llm-service -pip install -r requirements.txt - -# Monitoring Service -cd ../agent-monitoring-service -pip install -r requirements.txt -``` +# Clone and configure +git clone https://github.com/groupthinking/AJOB4AGENT.git +cd AJOB4AGENT +cp .env.example .env -2. **Run Services Locally** -```bash -# Terminal 1 - Dashboard -cd services/dashboard-service && npm run dev +# Start infrastructure (database, redis, rabbitmq) +docker-compose up -d postgres redis rabbitmq -# Terminal 2 - Agent Orchestrator -cd services/agent-orchestrator && npm run dev +# Install and run services +# Terminal 1: Dashboard +cd services/dashboard-service && npm install && npm run dev -# Terminal 3 - LLM Service -cd services/llm-service && uvicorn app.main:app --reload --port 8000 +# Terminal 2: Orchestrator +cd services/agent-orchestrator && npm install && npm run dev -# Terminal 4 - Monitoring -cd services/agent-monitoring-service && python app/worker.py +# Terminal 3: LLM Service +cd services/llm-service && pip install -r requirements.txt && uvicorn app.main:app --reload --port 8000 ``` -## 📋 Usage - -### 1. Setup Your Profile -- Upload your master resume -- Set job preferences (locations, roles, salary range) -- Configure platform credentials - -### 2. Start Job Search -- The system automatically discovers relevant jobs -- AI tailors your resume and cover letter for each position -- Applications are submitted automatically - -### 3. Monitor Progress -- Track application status in real-time -- View recruiter outreach results -- Analyze success metrics and optimize approach +--- -## 🔧 Configuration +## 📦 Services -### Environment Variables +| Service | Port | Technology | Description | +|---------|------|------------|-------------| +| **Dashboard** | 3001 | Next.js 14, React 18 | User interface and management | +| **Agent Orchestrator** | 8080 | Express, TypeScript | Job search and application coordination | +| **LLM Service** | 8000 | FastAPI, Python | AI-powered content generation | +| **Monitoring** | 8001 | Python | System metrics and analytics | -| Variable | Description | Default | -|----------|-------------|---------| -| `DATABASE_URL` | PostgreSQL connection string | - | -| `OPENAI_API_KEY` | OpenAI API key for LLM service | - | -| `LLM_MODEL` | Model to use for content generation | gpt-4 | -| `LINKEDIN_EMAIL` | LinkedIn automation credentials | - | -| `LINKEDIN_PASSWORD` | LinkedIn automation credentials | - | -| `JWT_SECRET` | Secret for JWT token generation | - | -| `API_KEY` | Internal service API key | - | -| `RABBITMQ_URL` | Message queue connection | amqp://guest:guest@rabbitmq:5672 | -| `REDIS_URL` | Redis cache connection | redis://redis:6379 | +--- -### Platform Agents +## 🔌 API Endpoints -Each platform has a dedicated agent: -- **LinkedIn Agent**: Job scraping and application submission -- **Glassdoor Agent**: Company research and application tracking -- **Wellfound Agent**: Startup job discovery and outreach +### Agent Orchestrator (Port 8080) -## 🛡️ Security & Compliance +| Endpoint | Method | Description | +|----------|--------|-------------| +| `/health` | GET | Health check | +| `/api/search` | POST | Multi-platform job search | +| `/api/apply` | POST | Submit job application | +| `/api/jobs` | GET | List scraped jobs | +| `/api/applications` | GET | Application history | -- **Data Encryption**: All sensitive data encrypted at rest and in transit -- **Platform Compliance**: Respects robots.txt and rate limits -- **Authentication**: JWT-based authentication with secure sessions -- **Privacy**: User data never shared with third parties +### LLM Service (Port 8000) -## 🧪 Testing +| Endpoint | Method | Description | +|----------|--------|-------------| +| `/health` | GET | Health check | +| `/api/v1/resume/tailor` | POST | Tailor resume for job | +| `/api/v1/resume/tailor/batch` | POST | Batch processing | -```bash -# Run all tests -docker-compose -f docker-compose.test.yml up --build +### Auth Service (Port 8003) -# Individual service tests -cd services/agent-orchestrator && npm test -cd services/llm-service && pytest -``` +| Endpoint | Method | Description | +|----------|--------|-------------| +| `/api/auth/register` | POST | User registration | +| `/api/auth/login` | POST | User login | +| `/api/auth/refresh` | POST | Refresh token | +| `/api/auth/me` | GET | Current user info | -## 📊 Monitoring +For complete API documentation, see [docs/API.md](docs/API.md). -- **Health Checks**: All services expose `/health` endpoints with detailed status -- **Metrics**: Prometheus metrics available at `/metrics` endpoints -- **Logging**: Centralized structured JSON logging with request tracing -- **Alerts**: Configurable alerts for failed applications and system issues +--- -### Production Monitoring Stack +## ⚙️ Environment Variables -Deploy comprehensive monitoring for production: +Key environment variables (see [.env.example](.env.example) for full list): ```bash -# Start with monitoring stack -docker-compose -f docker-compose.yml -f monitoring/docker-compose.monitoring.yml up -d - -# Access monitoring services -# Grafana: http://localhost:3000 (admin/admin123) -# Prometheus: http://localhost:9090 -# AlertManager: http://localhost:9093 +# Required +DATABASE_URL=postgresql://user:pass@localhost:5432/jobagent +OPENAI_API_KEY=sk-... +JWT_SECRET=your-secret-key + +# Platform Credentials +LINKEDIN_EMAIL=your@email.com +LINKEDIN_PASSWORD=your-password + +# Services +REDIS_URL=redis://localhost:6379 +RABBITMQ_URL=amqp://guest:guest@localhost:5672 ``` -**Monitoring Features:** -- Real-time application metrics and system monitoring -- Custom Grafana dashboards for job processing analytics -- Automated alerting via Slack, email, or webhooks -- Performance tracking and capacity planning -- Error rate and success rate monitoring +--- -### Basic Monitoring Commands +## 🧪 Testing ```bash -# Check service status -docker-compose ps - -# View logs -docker-compose logs -f [service-name] +# Run all tests +npm test # Node.js services +pytest # Python services -# Resource usage -docker stats +# Run with coverage +npm run test:coverage +pytest --cov=. -# Run health checks -./tests/integration-test.sh health +# E2E tests +npm run test:e2e -# View application metrics -curl http://localhost:8001/metrics +# Integration tests +docker-compose -f docker-compose.test.yml up --build ``` -## 🤝 Contributing - -1. Fork the repository -2. Create your feature branch (`git checkout -b feature/amazing-feature`) -3. Commit your changes (`git commit -m 'Add amazing feature'`) -4. Push to the branch (`git push origin feature/amazing-feature`) -5. Open a Pull Request - -## 📄 License - -This project is licensed under the MIT License - see the [LICENSE](LICENSE) file for details. - -## ⚠️ Disclaimer - -This tool is designed to assist in job searching and should be used in compliance with platform terms of service. Users are responsible for ensuring their usage complies with applicable laws and platform policies. - -## 🆘 Support - -### Documentation -- 📖 [Quick Start Guide](QUICKSTART.md) - Get up and running in 5 minutes -- 🔧 [Operations Guide](docs/OPERATIONS.md) - Production deployment and monitoring -- 📚 [API Documentation](docs/api/README.md) - Complete API reference with examples -- 🔧 [Development Setup](scripts/setup-dev.sh) - Automated development environment setup - -### Community & Support -- 🐛 [Report Issues](https://github.com/groupthinking/AJOB4AGENT/issues) -- 💬 [Discussions](https://github.com/groupthinking/AJOB4AGENT/discussions) -- 📧 Email: support@ajob4agent.com (for enterprise support) - -### Scripts & Tools -- `./scripts/setup-dev.sh` - One-click development setup -- `./scripts/deploy.sh` - Production deployment with health checks -- `./tests/integration-test.sh` - Comprehensive testing suite -======= -# AJOB4AGENT - Autonomous Job Application & Outreach Agent - -An intelligent, autonomous agent that automates end-to-end job search, application, resume tailoring, and recruiter outreach across multiple platforms including LinkedIn, Glassdoor, Wellfound, and more using a modular, LLM-driven system. - -## 🎯 Overview +--- -AJOB4AGENT is a comprehensive job application automation platform that: -- **Finds Jobs**: Searches across multiple job platforms automatically -- **Tailors Applications**: Uses AI to customize resumes and cover letters for each position -- **Automates Applications**: Fills out and submits job applications automatically -- **Handles Outreach**: Finds recruiters and sends personalized messages -- **Tracks Everything**: Provides analytics and insights on your job search - -## ✨ Key Features - -### 🔌 Platform Adapter System -- Modular adapters for each job platform (LinkedIn, Glassdoor, Wellfound, etc.) -- Easy extension: new platforms = new adapter module -- Handles login, scraping, job matching, and application submission - -### 🤖 LLM Tailoring Engine -- Parses job descriptions and tailors resume/CV for each position -- Optimizes for keywords and requirements per job -- Supports A/B testing for optimization - -### ⚡ Automated Application Engine -- Auto-fills forms using platform adapters -- Handles anti-bot/captcha where legally possible -- Logs all submissions and statuses - -### 📧 Recruiter/HR Scraper & Outreach Engine -- Scrapes recruiter/contact information from job posts or company pages -- Generates personalized outreach messages (email/LinkedIn DM) -- Sends outreach and logs delivery/status - -### 📊 Dashboard & Analytics -- Application/outreach tracking -- Response rate analytics and optimization feedback -- Resume/cover letter effectiveness statistics - -### 🔒 Security & Compliance -- Follows platform automation policies (API where possible) -- Secure storage of user data and credentials -- Transparent opt-in for outreach +## 🚢 Deployment -## 🏗️ Architecture +### Production with Docker Compose -The system uses a microservices architecture with the following components: - -``` -+-------------------+ +--------------------+ +-------------------+ -| Dashboard | | Agent | | LLM Service | -| Service |<-->| Orchestrator |<-->| | -| (Frontend) | | (Core Logic) | | (AI Processing) | -+-------------------+ +--------------------+ +-------------------+ - | | | - +-------------------------+-------------------------+ - | - +--------------------+ - | Monitoring | - | Service | - | (Analytics) | - +--------------------+ +```bash +# Use production configuration +docker-compose -f docker-compose.prod.yml up -d ``` -### Services +### Vercel (Dashboard) + Railway (Backend) -- **Dashboard Service** (Port 3001): Next.js frontend for user interaction -- **Agent Orchestrator** (Port 3000): Core coordination service handling job search logic -- **LLM Service**: AI-powered resume tailoring and content generation -- **Agent Monitoring Service**: Analytics and monitoring +See [DEPLOYMENT_CHECKLIST.md](DEPLOYMENT_CHECKLIST.md) for complete instructions. -## 🚀 Quick Start +### Monitoring -> **⚠️ Development Status**: This project is currently under active development. Some services may require additional configuration or have build issues. +```bash +# Enable monitoring stack +docker-compose -f docker-compose.yml -f monitoring/docker-compose.monitoring.yml up -d -### Prerequisites +# Access: +# - Grafana: http://localhost:3000 +# - Prometheus: http://localhost:9090 +``` -- Node.js (v18+) -- Python (3.8+) -- Docker (optional) -- Git - -### Installation - -1. **Clone the repository** - ```bash - git clone https://github.com/groupthinking/AJOB4AGENT.git - cd AJOB4AGENT - ``` - -2. **Review the architecture** - - See [PRD.md](PRD.md) for detailed project requirements - - Check [architecture.mmd](architecture.mmd) for system design - -3. **Explore individual services** - ```bash - # List available services - ls services/ - - # Each service has its own package.json and dependencies - # agent-orchestrator/ - Core coordination service - # dashboard-service/ - Next.js frontend - # llm-service/ - AI processing service - # agent-monitoring-service/ - Analytics service - ``` - -> **Note**: Full integration and Docker Compose setup is currently under development. Individual service setup may require additional configuration. - -### Development Setup - -For development, you can run services individually: - -1. **Agent Orchestrator** (TypeScript/Node.js) - ```bash - cd services/agent-orchestrator - npm install - npm run dev # Development mode with hot reload - ``` - -2. **Dashboard Service** (Next.js) - ```bash - cd services/dashboard-service - npm install - npm run dev # Development mode with hot reload - ``` - -3. **LLM Service** (Python/FastAPI) - *Under Development* - ```bash - cd services/llm-service - pip install -r requirements.txt - # Service implementation in progress - ``` - -4. **Agent Monitoring Service** (Python) - *Under Development* - ```bash - cd services/agent-monitoring-service - pip install -r requirements.txt - # Service implementation in progress - ``` - -## 📖 User Guide - -### Getting Started - -1. **Upload Your Resume**: Upload your master resume and set job preferences -2. **Configure Search**: Set your target job titles, locations, and platforms -3. **Start the Agent**: The system will automatically: - - Search for matching jobs - - Tailor your resume for each position - - Submit applications - - Find and message recruiters -4. **Monitor Progress**: Track applications, responses, and analytics in the dashboard - -## 📊 Project Status - -This project is currently in **early development phase**: - -### ✅ Completed -- 📋 **Project Planning**: Comprehensive PRD and architecture design -- 🏗️ **Architecture**: Microservices structure defined -- 🎯 **Core Services**: Service scaffolding and basic structure -- 📱 **Dashboard**: Basic Next.js frontend with monitoring UI -- 🔧 **Development Environment**: Individual service development setup - -### 🚧 In Progress -- 🔗 **Service Integration**: Connecting microservices -- 🐳 **Docker Compose**: Full containerized deployment -- 🤖 **LLM Service**: AI-powered resume tailoring implementation -- 📧 **Outreach Engine**: Automated recruiter contact system -- 🔌 **Platform Adapters**: Job site integration (LinkedIn, Glassdoor, etc.) - -### 📋 Planned -- 🧪 **Testing Suite**: Comprehensive test coverage -- 📖 **Documentation**: API documentation and user guides -- 🔐 **Authentication**: User management and security -- 📈 **Analytics**: Advanced monitoring and optimization - -## 🎯 MVP Scope - -The current MVP includes: -- ✅ Adapters: LinkedIn, Glassdoor, Wellfound (modular for future platforms) -- ✅ Resume/CV tailoring and autofill -- ✅ Recruiter/HR scraping for outreach -- ✅ Dashboard with basic analytics -- ✅ Email/LinkedIn messaging for outreach - -## 🚧 Roadmap - -### Out of Scope (v1) -- Deep anti-bot bypass for highly protected sites -- Enterprise integrations (planned for v2+) -- Bulk scraping of non-job-related data - -### Future Extensibility -- Adapter interface for any new job platform -- Plug-in system for new outreach channels (Slack, WhatsApp, etc.) +--- ## 🤝 Contributing -We welcome contributions! This project is in active development and there are many opportunities to help. - -### How to Contribute - -1. **🐛 Report Issues**: Found a bug? Open an issue with details and steps to reproduce -2. **💡 Suggest Features**: Have ideas for improvements? Create a feature request -3. **🔧 Fix Issues**: Browse open issues and submit pull requests -4. **📖 Improve Docs**: Help improve documentation and examples -5. **🧪 Add Tests**: Contribute test cases for existing functionality - -### Development Workflow +We welcome contributions! Please see [CONTRIBUTING.md](CONTRIBUTING.md) for guidelines. +```bash +# Development workflow 1. Fork the repository -2. Create a feature branch (`git checkout -b feature/amazing-feature`) -3. Make your changes -4. Test your changes (if applicable) -5. Commit your changes (`git commit -m 'Add amazing feature'`) -6. Push to the branch (`git push origin feature/amazing-feature`) -7. Open a Pull Request - -### Development Setup Tips - -- Each service in `services/` is independent with its own dependencies -- Check individual service README files for specific setup instructions -- Use `npm run dev` for development mode with hot reload -- Review the [PRD.md](PRD.md) to understand the overall system design - -## 🔧 Troubleshooting - -### Common Issues +2. Create feature branch: git checkout -b feature/amazing-feature +3. Make changes and test +4. Commit: git commit -m 'feat: add amazing feature' +5. Push: git push origin feature/amazing-feature +6. Open a Pull Request +``` -**Build Errors** -- Ensure you have the correct Node.js version (18+) -- Try deleting `node_modules` and running `npm install` again -- Check for TypeScript compilation errors in individual services +--- -**Docker Issues** -- Docker Compose configuration is under development -- Use individual service setup for now +## 📚 Documentation -**Service Communication** -- Services are designed to work together but integration is in progress -- Check individual service documentation for API endpoints +| Document | Description | +|----------|-------------| +| [QUICKSTART.md](QUICKSTART.md) | 5-minute getting started guide | +| [docs/API.md](docs/API.md) | Complete API documentation | +| [docs/ARCHITECTURE.md](docs/ARCHITECTURE.md) | System architecture | +| [docs/OPERATIONS.md](docs/OPERATIONS.md) | Operations and monitoring | +| [DEPLOYMENT_CHECKLIST.md](DEPLOYMENT_CHECKLIST.md) | Production deployment guide | +| [CONTRIBUTING.md](CONTRIBUTING.md) | Contribution guidelines | +| [SECURITY.md](SECURITY.md) | Security policy | -**Getting Help** -- 📖 Check the [PRD.md](PRD.md) for detailed requirements -- 🏗️ Review [architecture.mmd](architecture.mmd) for system design -- 🐛 Open an issue on GitHub for bugs or questions +--- ## 📄 License -This project is licensed under the MIT License - see the [LICENSE](LICENSE) file for details. +This project is licensed under the MIT License - see [LICENSE](LICENSE) for details. + +--- -## ⚠️ Legal Notice +## ⚠️ Disclaimer -This tool is designed to automate legitimate job application processes. Users are responsible for: +This tool automates legitimate job application processes. Users are responsible for: - Complying with platform Terms of Service - Respecting rate limits and automation policies - Using the tool ethically and legally -- Ensuring all outreach is professional and appropriate - -## 📞 Support - -- 📖 **Documentation**: Check the [PRD.md](PRD.md) for detailed requirements -- 🏗️ **Architecture**: See [architecture.mmd](architecture.mmd) for system design -- 🐛 **Issues**: Report bugs on our [GitHub Issues](https://github.com/groupthinking/AJOB4AGENT/issues) --- -**Built with ❤️ for job seekers everywhere** -# AJOB4AGENT - -Welcome to AJOB4AGENT, a personalized, autonomous agent for automating your job search pipeline. This system helps you find relevant job postings, tailors your resume for each application, tracks your progress, and generates reports and interview prep materials. - -## ✨ Features - -- **Job Pipeline:** Ingests a list of jobs and filters them based on your configured titles, locations, and minimum compensation. -- **Role Scoring:** Scores filtered jobs against your master resume to identify the best matches. -- **AI-Powered Resume Tailoring:** Uses an LLM to generate tailored resume variants for top-scoring jobs (requires OpenAI API key). -- **CRM Logging:** Tracks all processed applications in a simple CSV-based CRM system. -- **Daily HTML Reports:** Generates a `daily_report.html` summarizing the pipeline's activity for each run. -- **HTMX Dashboard:** A lightweight, real-time web dashboard to view your application log. -- **Interview Pack Generator:** A script to create a concise interview prep document for a given company using recent news (requires OpenAI API key). -- **Guarded Features:** Email reporting, AI resume tailoring, and other features requiring secrets are disabled by default and only run if you provide the necessary keys/credentials. - -## 🚀 Getting Started - -### Prerequisites - -- Python 3.10+ -- `make` for easy command execution. - -### Installation - -1. **Clone the repository:** - ```sh - git clone https://github.com/groupthinking/AJOB4AGENT.git - cd AJOB4AGENT - ``` - -2. **Set up your environment:** - Copy the environment file template. This file is crucial for configuring the agent to your preferences. - ```sh - cp .env.prepopulated .env - ``` - Now, edit the `.env` file with your job preferences. - -3. **Provide Optional API Keys (in `.env`):** - - To enable AI-powered resume tailoring and the interview pack generator, add your `OPENAI_API_KEY`. - - To enable daily email reports, fill in the `SMTP_*` variables. - - To enable Apollo integration (future feature), add your `APOLLO_API_KEY`. - -4. **Install dependencies:** - Use the `Makefile` to install all required Python packages. - ```sh - make install - ``` - -## Usage - -### Running the Main Pipeline - -To run the entire daily job automation pipeline, from filtering to reporting, simply use: -```sh -make run -``` - -### Viewing the Dashboard +## 🆘 Support -To start the web dashboard, run: -```sh -make dashboard -``` -Then, open your web browser to `http://localhost:8000`. The dashboard will display the contents of `crm/applications.csv` and auto-refresh. +- 🐛 [GitHub Issues](https://github.com/groupthinking/AJOB4AGENT/issues) - Bug reports +- 💬 [Discussions](https://github.com/groupthinking/AJOB4AGENT/discussions) - Questions & ideas +- 📖 [Documentation](./docs/) - Guides and references -### Generating an Interview Pack +--- -To generate a prep document for a specific company: -1. (First time only) Add some sample news articles or text to `data/sample_articles.txt`. -2. Run the script with the company name: - ```sh - python scripts/generate_interview_pack.py --company "Name of Company" - ``` - This requires `OPENAI_API_KEY` to be set in your `.env` file. The output will be saved in the `reports/` directory. +

Built with ❤️ for job seekers everywhere

diff --git a/SECURITY.md b/SECURITY.md new file mode 100644 index 0000000..39a1f35 --- /dev/null +++ b/SECURITY.md @@ -0,0 +1,412 @@ +# Security Policy + +## Reporting a Vulnerability + +We take security seriously at AJOB4AGENT. If you discover a security vulnerability, please report it responsibly. + +### How to Report + +**DO NOT** create a public GitHub issue for security vulnerabilities. + +Instead, please: + +1. **Email:** Send details to security@ajob4agent.com (or create a private security advisory on GitHub) +2. **Include:** + - Description of the vulnerability + - Steps to reproduce + - Potential impact + - Suggested fix (if any) + +### Response Timeline + +| Action | Timeline | +|--------|----------| +| Acknowledgment | Within 24 hours | +| Initial Assessment | Within 72 hours | +| Status Update | Weekly | +| Fix Implementation | Depends on severity | +| Public Disclosure | After fix is deployed | + +### Severity Levels + +| Level | Description | Response Time | +|-------|-------------|---------------| +| Critical | Remote code execution, data breach | < 24 hours | +| High | Authentication bypass, privilege escalation | < 72 hours | +| Medium | XSS, CSRF, information disclosure | < 1 week | +| Low | Minor issues, hardening | < 1 month | + +--- + +## Security Best Practices + +### For Users + +#### Account Security +- Use strong, unique passwords +- Enable two-factor authentication (when available) +- Never share your credentials +- Log out from shared devices +- Regularly review connected applications + +#### API Key Management +- Never commit API keys to version control +- Rotate keys regularly +- Use environment variables for sensitive data +- Limit API key permissions to minimum required +- Monitor API usage for anomalies + +#### Platform Credentials +- Use app-specific passwords when possible +- Review platform security settings regularly +- Be aware of rate limits and automation policies + +### For Developers + +#### Secure Coding Practices +- Validate all user input +- Use parameterized queries (prevent SQL injection) +- Escape output (prevent XSS) +- Implement proper authentication and authorization +- Use HTTPS for all communications +- Keep dependencies updated + +#### Secrets Management +```bash +# ❌ Never do this +const API_KEY = "sk-1234567890abcdef"; + +# ✅ Do this instead +const API_KEY = process.env.API_KEY; +``` + +#### Environment Variables +- Use `.env` files for local development only +- Never commit `.env` files to version control +- Use secure secret management in production (AWS Secrets Manager, HashiCorp Vault) + +--- + +## Authentication Flow + +### JWT Authentication + +``` +┌─────────────┐ ┌─────────────┐ ┌─────────────┐ +│ Client │ │ Server │ │ Database │ +└──────┬──────┘ └──────┬──────┘ └──────┬──────┘ + │ │ │ + │ 1. Login Request │ │ + │ ─────────────────► │ │ + │ │ 2. Verify User │ + │ │ ─────────────────► │ + │ │ │ + │ │ 3. User Data │ + │ │ ◄───────────────── │ + │ │ │ + │ 4. JWT Token │ │ + │ ◄───────────────── │ │ + │ │ │ + │ 5. API Request │ │ + │ + JWT Token │ │ + │ ─────────────────► │ │ + │ │ │ + │ 6. Response │ │ + │ ◄───────────────── │ │ + │ │ │ +``` + +### Token Security + +- **Access Token:** Short-lived (15 minutes) +- **Refresh Token:** Longer-lived (7 days) +- **Storage:** HttpOnly cookies (preferred) or secure local storage +- **Rotation:** Refresh tokens are rotated on use + +### Session Management + +- Sessions expire after inactivity +- Concurrent session limits enforced +- Session invalidation on password change +- Secure session ID generation + +--- + +## Data Handling + +### Data Classification + +| Classification | Description | Examples | +|----------------|-------------|----------| +| Public | Freely available | Job postings, company info | +| Internal | For internal use | Application metrics, logs | +| Confidential | Sensitive business data | User resumes, credentials | +| Restricted | Highly sensitive | Payment info, passwords | + +### Data Encryption + +#### At Rest +- Database encryption (AES-256) +- File storage encryption +- Backup encryption + +#### In Transit +- TLS 1.3 for all connections +- Certificate pinning for mobile apps +- HSTS enabled + +### Data Retention + +| Data Type | Retention Period | Deletion Method | +|-----------|------------------|-----------------| +| User accounts | Until deletion requested | Soft delete, then hard delete after 30 days | +| Application history | 2 years | Anonymization then deletion | +| Logs | 90 days | Automatic purge | +| Backups | 30 days | Secure deletion | + +### Personal Data + +- Minimize data collection +- Purpose limitation (only use for stated purpose) +- User consent for data processing +- Right to deletion (GDPR compliance) +- Data portability support + +--- + +## Infrastructure Security + +### Network Security + +``` +┌─────────────────────────────────────────────────────────────┐ +│ Internet │ +└─────────────────────────────┬───────────────────────────────┘ + │ + ▼ +┌─────────────────────────────────────────────────────────────┐ +│ Load Balancer / WAF │ +│ (SSL Termination) │ +└─────────────────────────────┬───────────────────────────────┘ + │ + ┌───────────────────┼───────────────────┐ + │ │ │ + ▼ ▼ ▼ +┌─────────────────┐ ┌─────────────────┐ ┌─────────────────┐ +│ Dashboard │ │ Orchestrator │ │ LLM Service │ +│ (Public) │ │ (Private) │ │ (Private) │ +└────────┬────────┘ └────────┬────────┘ └────────┬────────┘ + │ │ │ + └───────────────────┼───────────────────┘ + │ + ▼ +┌─────────────────────────────────────────────────────────────┐ +│ Private Network │ +│ ┌──────────┐ ┌──────────┐ ┌──────────┐ ┌──────────┐ │ +│ │PostgreSQL│ │ Redis │ │ RabbitMQ │ │Monitoring│ │ +│ └──────────┘ └──────────┘ └──────────┘ └──────────┘ │ +└─────────────────────────────────────────────────────────────┘ +``` + +### Firewall Rules + +| Source | Destination | Port | Protocol | Action | +|--------|-------------|------|----------|--------| +| Internet | Load Balancer | 443 | HTTPS | Allow | +| Internet | Load Balancer | 80 | HTTP | Redirect to HTTPS | +| Load Balancer | App Servers | 3001, 8080, 8000, 8001 | HTTP | Allow | +| App Servers | Database | 5432 | PostgreSQL | Allow | +| App Servers | Redis | 6379 | Redis | Allow | +| App Servers | RabbitMQ | 5672 | AMQP | Allow | +| All | All | * | * | Deny | + +### Container Security + +- Use official base images +- Run as non-root user +- Scan images for vulnerabilities +- Minimal base images (Alpine when possible) +- Read-only file systems where possible + +```dockerfile +# Example secure Dockerfile +FROM node:18-alpine + +# Create non-root user +RUN addgroup -g 1001 -S nodejs +RUN adduser -S nextjs -u 1001 + +# Set working directory +WORKDIR /app + +# Copy with appropriate permissions +COPY --chown=nextjs:nodejs . . + +# Switch to non-root user +USER nextjs + +# Start application +CMD ["node", "dist/index.js"] +``` + +--- + +## Security Headers + +The following security headers are configured: + +``` +Content-Security-Policy: default-src 'self'; script-src 'self'; style-src 'self' 'unsafe-inline' +X-Content-Type-Options: nosniff +X-Frame-Options: DENY +X-XSS-Protection: 1; mode=block +Strict-Transport-Security: max-age=31536000; includeSubDomains; preload +Referrer-Policy: strict-origin-when-cross-origin +Permissions-Policy: geolocation=(), microphone=(), camera=() +``` + +--- + +## Rate Limiting + +### API Rate Limits + +| Endpoint | Limit | Window | +|----------|-------|--------| +| General API | 100 requests | 15 minutes | +| Login | 5 requests | 1 minute | +| Password Reset | 3 requests | 15 minutes | +| LLM Endpoints | 50 requests | 1 hour | +| File Upload | 10 requests | 5 minutes | + +### Rate Limit Response + +```json +{ + "error": "Too Many Requests", + "message": "Rate limit exceeded. Please try again later.", + "retryAfter": 900 +} +``` + +Headers: +``` +X-RateLimit-Limit: 100 +X-RateLimit-Remaining: 0 +X-RateLimit-Reset: 1705321200 +Retry-After: 900 +``` + +--- + +## Audit Logging + +### Events Logged + +- Authentication events (login, logout, failed attempts) +- Authorization failures +- Data access (read, create, update, delete) +- Configuration changes +- Administrative actions +- Security events + +### Log Format + +```json +{ + "timestamp": "2024-01-15T10:30:00.000Z", + "level": "info", + "event": "user.login", + "userId": "user-123", + "ip": "192.168.1.1", + "userAgent": "Mozilla/5.0...", + "success": true, + "metadata": { + "method": "password", + "mfaUsed": false + } +} +``` + +### Log Retention + +- Security logs: 1 year +- Access logs: 90 days +- Application logs: 30 days + +--- + +## Incident Response + +### Incident Classification + +| Severity | Description | Examples | +|----------|-------------|----------| +| P1 - Critical | Active breach, data exposure | Unauthorized data access, service compromise | +| P2 - High | Significant security issue | Vulnerability being exploited | +| P3 - Medium | Potential security issue | Suspicious activity, failed attacks | +| P4 - Low | Minor security concern | Policy violations, hardening issues | + +### Response Process + +1. **Detection:** Identify and confirm incident +2. **Containment:** Limit damage and prevent spread +3. **Eradication:** Remove threat from environment +4. **Recovery:** Restore systems to normal operation +5. **Post-Incident:** Review and improve defenses + +### Contact + +For security incidents, contact: +- Security Team: security@ajob4agent.com +- Emergency: [On-call contact] + +--- + +## Compliance + +### Standards + +- OWASP Top 10 compliance +- GDPR compliance (for EU users) +- SOC 2 Type II (planned) + +### Regular Assessments + +- Quarterly vulnerability scans +- Annual penetration testing +- Continuous dependency scanning +- Code security reviews + +--- + +## Security Updates + +### Staying Informed + +- Subscribe to security mailing list +- Watch GitHub repository for security advisories +- Follow @ajob4agent on Twitter for announcements + +### Update Policy + +- Critical vulnerabilities: Patched within 24 hours +- High vulnerabilities: Patched within 72 hours +- Dependencies: Updated monthly + +--- + +## Acknowledgments + +We thank the security researchers who have helped improve AJOB4AGENT's security: + +- (List will be updated as reports are received) + +If you report a valid security vulnerability, we will: +- Acknowledge your contribution +- Work with you on coordinated disclosure +- Add your name to our security acknowledgments (if desired) + +--- + +*Last updated: January 2024* diff --git a/docker-compose.prod.yml b/docker-compose.prod.yml new file mode 100644 index 0000000..555182e --- /dev/null +++ b/docker-compose.prod.yml @@ -0,0 +1,359 @@ +# Production Docker Compose Configuration +# Use with: docker-compose -f docker-compose.prod.yml up -d + +version: '3.8' + +services: + # Database - PostgreSQL with optimized settings + postgres: + image: postgres:15-alpine + container_name: ajob4agent-postgres + environment: + POSTGRES_USER: ${POSTGRES_USER:-jobagent} + POSTGRES_PASSWORD: ${POSTGRES_PASSWORD} + POSTGRES_DB: ${POSTGRES_DB:-jobagent} + # Performance tuning + POSTGRES_INITDB_ARGS: "--encoding=UTF8" + volumes: + - postgres_data:/var/lib/postgresql/data + - ./migrations:/docker-entrypoint-initdb.d:ro + ports: + - "5432:5432" + networks: + - job-agent-net + restart: always + deploy: + resources: + limits: + cpus: '1.0' + memory: 2G + reservations: + cpus: '0.5' + memory: 512M + healthcheck: + test: ["CMD-SHELL", "pg_isready -U ${POSTGRES_USER:-jobagent} -d ${POSTGRES_DB:-jobagent}"] + interval: 10s + timeout: 5s + retries: 5 + start_period: 30s + logging: + driver: "json-file" + options: + max-size: "50m" + max-file: "5" + + # Redis for caching and sessions + redis: + image: redis:7-alpine + container_name: ajob4agent-redis + command: redis-server --appendonly yes --maxmemory 256mb --maxmemory-policy allkeys-lru + ports: + - "6379:6379" + volumes: + - redis_data:/data + networks: + - job-agent-net + restart: always + deploy: + resources: + limits: + cpus: '0.5' + memory: 512M + reservations: + cpus: '0.25' + memory: 128M + healthcheck: + test: ["CMD", "redis-cli", "ping"] + interval: 10s + timeout: 5s + retries: 5 + start_period: 10s + logging: + driver: "json-file" + options: + max-size: "20m" + max-file: "3" + + # RabbitMQ for message queuing + rabbitmq: + image: rabbitmq:3-management-alpine + container_name: ajob4agent-rabbitmq + environment: + RABBITMQ_DEFAULT_USER: ${RABBITMQ_USER:-guest} + RABBITMQ_DEFAULT_PASS: ${RABBITMQ_PASSWORD:-guest} + RABBITMQ_VM_MEMORY_HIGH_WATERMARK: 0.8 + ports: + - "5672:5672" + - "15672:15672" + volumes: + - rabbitmq_data:/var/lib/rabbitmq + networks: + - job-agent-net + restart: always + deploy: + resources: + limits: + cpus: '0.5' + memory: 512M + reservations: + cpus: '0.25' + memory: 256M + healthcheck: + test: ["CMD", "rabbitmqctl", "status"] + interval: 30s + timeout: 10s + retries: 5 + start_period: 60s + logging: + driver: "json-file" + options: + max-size: "30m" + max-file: "3" + + # LLM Service - AI content generation + llm-service: + image: ${REGISTRY:-ghcr.io}/${IMAGE_NAME:-groupthinking/ajob4agent}/llm-service:${TAG:-main} + build: + context: ./services/llm-service + target: production + args: + - BUILDKIT_INLINE_CACHE=1 + container_name: ajob4agent-llm + environment: + - OPENAI_API_KEY=${OPENAI_API_KEY} + - LLM_MODEL=${LLM_MODEL:-gpt-4} + - LLM_MAX_TOKENS=${LLM_MAX_TOKENS:-4000} + - LLM_TEMPERATURE=${LLM_TEMPERATURE:-0.7} + - DATABASE_URL=${DATABASE_URL} + - REDIS_URL=${REDIS_URL:-redis://redis:6379} + - LOG_LEVEL=${LOG_LEVEL:-info} + - ALLOWED_ORIGINS=${ALLOWED_ORIGINS:-https://yourdomain.com} + - ALLOWED_HOSTS=${ALLOWED_HOSTS:-llm-service,localhost} + ports: + - "8000:8000" + depends_on: + postgres: + condition: service_healthy + redis: + condition: service_healthy + networks: + - job-agent-net + restart: always + deploy: + resources: + limits: + cpus: '1.0' + memory: 1G + reservations: + cpus: '0.5' + memory: 512M + healthcheck: + test: ["CMD", "curl", "-f", "http://localhost:8000/health"] + interval: 30s + timeout: 10s + retries: 3 + start_period: 30s + logging: + driver: "json-file" + options: + max-size: "50m" + max-file: "5" + + # Agent Orchestrator - Core coordination service + agent-orchestrator: + image: ${REGISTRY:-ghcr.io}/${IMAGE_NAME:-groupthinking/ajob4agent}/agent-orchestrator:${TAG:-main} + build: + context: ./services/agent-orchestrator + target: production + args: + - BUILDKIT_INLINE_CACHE=1 + container_name: ajob4agent-orchestrator + environment: + - NODE_ENV=production + - DATABASE_URL=${DATABASE_URL} + - REDIS_URL=${REDIS_URL:-redis://redis:6379} + - RABBITMQ_URL=${RABBITMQ_URL:-amqp://guest:guest@rabbitmq:5672} + - JWT_SECRET=${JWT_SECRET} + - API_KEY=${API_KEY} + - LINKEDIN_EMAIL=${LINKEDIN_EMAIL} + - LINKEDIN_PASSWORD=${LINKEDIN_PASSWORD} + - LLM_SERVICE_URL=http://llm-service:8000 + - LOG_LEVEL=${LOG_LEVEL:-info} + - RATE_LIMIT_WINDOW_MS=${RATE_LIMIT_WINDOW_MS:-900000} + - RATE_LIMIT_MAX_REQUESTS=${RATE_LIMIT_MAX_REQUESTS:-100} + - ALLOWED_ORIGINS=${ALLOWED_ORIGINS:-https://yourdomain.com} + ports: + - "8080:8080" + depends_on: + postgres: + condition: service_healthy + redis: + condition: service_healthy + rabbitmq: + condition: service_healthy + llm-service: + condition: service_healthy + networks: + - job-agent-net + restart: always + deploy: + resources: + limits: + cpus: '1.0' + memory: 1G + reservations: + cpus: '0.5' + memory: 512M + healthcheck: + test: ["CMD", "curl", "-f", "http://localhost:8080/health"] + interval: 30s + timeout: 10s + retries: 3 + start_period: 30s + logging: + driver: "json-file" + options: + max-size: "50m" + max-file: "5" + + # Agent Monitoring Service - System analytics + agent-monitoring-service: + image: ${REGISTRY:-ghcr.io}/${IMAGE_NAME:-groupthinking/ajob4agent}/agent-monitoring-service:${TAG:-main} + build: + context: ./services/agent-monitoring-service + target: production + args: + - BUILDKIT_INLINE_CACHE=1 + container_name: ajob4agent-monitoring + environment: + - DATABASE_URL=${DATABASE_URL} + - REDIS_URL=${REDIS_URL:-redis://redis:6379} + - RABBITMQ_URL=${RABBITMQ_URL:-amqp://guest:guest@rabbitmq:5672} + - LOG_LEVEL=${LOG_LEVEL:-info} + ports: + - "8001:8001" + depends_on: + postgres: + condition: service_healthy + redis: + condition: service_healthy + rabbitmq: + condition: service_healthy + networks: + - job-agent-net + restart: always + deploy: + resources: + limits: + cpus: '0.5' + memory: 512M + reservations: + cpus: '0.25' + memory: 256M + healthcheck: + test: ["CMD", "curl", "-f", "http://localhost:8001/health"] + interval: 30s + timeout: 10s + retries: 3 + start_period: 30s + logging: + driver: "json-file" + options: + max-size: "30m" + max-file: "3" + + # Dashboard Service - Next.js frontend + dashboard-service: + image: ${REGISTRY:-ghcr.io}/${IMAGE_NAME:-groupthinking/ajob4agent}/dashboard-service:${TAG:-main} + build: + context: ./services/dashboard-service + target: production + args: + - BUILDKIT_INLINE_CACHE=1 + container_name: ajob4agent-dashboard + environment: + - NODE_ENV=production + - NEXT_PUBLIC_API_URL=${NEXT_PUBLIC_API_URL:-http://agent-orchestrator:8080} + - DATABASE_URL=${DATABASE_URL} + - NEXTAUTH_SECRET=${JWT_SECRET} + - NEXTAUTH_URL=${APP_URL} + ports: + - "3001:3001" + depends_on: + agent-orchestrator: + condition: service_healthy + networks: + - job-agent-net + restart: always + deploy: + resources: + limits: + cpus: '0.5' + memory: 512M + reservations: + cpus: '0.25' + memory: 256M + healthcheck: + test: ["CMD", "curl", "-f", "http://localhost:3001/api/health"] + interval: 30s + timeout: 10s + retries: 3 + start_period: 45s + logging: + driver: "json-file" + options: + max-size: "30m" + max-file: "3" + + # Nginx reverse proxy + nginx: + image: nginx:1.25-alpine + container_name: ajob4agent-nginx + ports: + - "80:80" + - "443:443" + volumes: + - ./nginx/nginx.conf:/etc/nginx/nginx.conf:ro + - ./nginx/ssl:/etc/nginx/ssl:ro + - ./nginx/logs:/var/log/nginx + depends_on: + dashboard-service: + condition: service_healthy + agent-orchestrator: + condition: service_healthy + networks: + - job-agent-net + restart: always + deploy: + resources: + limits: + cpus: '0.5' + memory: 256M + reservations: + cpus: '0.1' + memory: 64M + healthcheck: + test: ["CMD", "nginx", "-t"] + interval: 30s + timeout: 10s + retries: 3 + logging: + driver: "json-file" + options: + max-size: "50m" + max-file: "5" + +volumes: + postgres_data: + driver: local + redis_data: + driver: local + rabbitmq_data: + driver: local + +networks: + job-agent-net: + driver: bridge + ipam: + config: + - subnet: 172.28.0.0/16 diff --git a/docs/API.md b/docs/API.md new file mode 100644 index 0000000..27b724d --- /dev/null +++ b/docs/API.md @@ -0,0 +1,1237 @@ +# API Documentation - AJOB4AGENT + +Comprehensive API documentation for all AJOB4AGENT services. + +## Table of Contents + +- [Overview](#overview) +- [Authentication](#authentication) +- [LLM Service (Port 8000)](#llm-service-port-8000) +- [Agent Orchestrator (Port 8080)](#agent-orchestrator-port-8080) +- [Auth Service (Port 8003)](#auth-service-port-8003) +- [Dashboard Service (Port 3001)](#dashboard-service-port-3001) +- [Monitoring Service (Port 8001)](#monitoring-service-port-8001) +- [Platform Adapters](#platform-adapters) +- [Error Handling](#error-handling) +- [Rate Limiting](#rate-limiting) +- [Webhooks](#webhooks) + +--- + +## Overview + +### Service Endpoints + +| Service | Base URL | Port | Purpose | +|---------|----------|------|---------| +| LLM Service | `http://localhost:8000` | 8000 | AI content generation | +| Agent Orchestrator | `http://localhost:8080` | 8080 | Job search & orchestration | +| Auth Service | `http://localhost:8003` | 8003 | Authentication | +| Dashboard | `http://localhost:3001` | 3001 | Web interface | +| Monitoring | `http://localhost:8001` | 8001 | System metrics | + +### Common Headers + +All API requests should include: + +```http +Content-Type: application/json +Accept: application/json +Authorization: Bearer # For authenticated endpoints +X-Request-ID: # Optional, for tracing +``` + +--- + +## Authentication + +### API Key Authentication + +For service-to-service communication: + +```bash +curl -X GET "http://localhost:8080/api/jobs" \ + -H "Authorization: Bearer YOUR_API_KEY" \ + -H "Content-Type: application/json" +``` + +### JWT Authentication + +For user-facing endpoints: + +```bash +# 1. Login to get token +curl -X POST "http://localhost:8003/api/auth/login" \ + -H "Content-Type: application/json" \ + -d '{"email": "user@example.com", "password": "password123"}' + +# Response +{ + "accessToken": "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9...", + "refreshToken": "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9...", + "expiresIn": 900 +} + +# 2. Use token for authenticated requests +curl -X GET "http://localhost:8080/api/applications" \ + -H "Authorization: Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9..." +``` + +--- + +## LLM Service (Port 8000) + +AI-powered content generation service using FastAPI. + +### Health Check + +Check service health status. + +```http +GET /health +``` + +**Response:** +```json +{ + "status": "healthy", + "service": "llm-service", + "version": "1.0.0", + "timestamp": 1705320600.123 +} +``` + +**Example:** +```bash +curl http://localhost:8000/health +``` + +--- + +### Resume Tailoring + +```http +POST /api/v1/resume/tailor +``` + +Tailor a resume and generate application materials for a specific job. + +**Request Body:** +```json +{ + "job_data": { + "job_id": "job-123", + "platform": "linkedin", + "job_url": "https://linkedin.com/jobs/view/123456", + "job_title": "Senior Software Engineer", + "company_name": "Tech Corp", + "raw_description": "We are looking for a Senior Software Engineer with 5+ years of experience in Python, TypeScript, and cloud technologies. You will be responsible for designing and implementing scalable microservices...", + "recruiter_info": { + "name": "Jane Smith", + "profile_url": "https://linkedin.com/in/janesmith", + "email": "jane.smith@techcorp.com" + }, + "requirements": [ + "5+ years software development", + "Python and TypeScript", + "Cloud technologies (AWS/GCP)", + "Microservices architecture" + ], + "salary_range": "$150,000 - $200,000" + }, + "user_profile": { + "user_id": "user-456", + "raw_master_resume": "Experienced software engineer with 7+ years of experience building scalable systems. Proficient in Python, TypeScript, Java, and Go. Led teams at multiple startups...", + "custom_preferences": { + "location": "San Francisco, CA", + "salary_min": 150000, + "remote_ok": true, + "visa_sponsorship_needed": false + }, + "skills": ["Python", "TypeScript", "AWS", "Docker", "Kubernetes"], + "experience_years": 7 + }, + "options": { + "generate_cover_letter": true, + "generate_outreach": true, + "tone": "professional", + "emphasis": ["leadership", "technical"] + } +} +``` + +**Response:** +```json +{ + "job_id": "job-123", + "status": "success", + "tailored_resume": "SENIOR SOFTWARE ENGINEER\n\nProfessional Summary:\nResults-driven Software Engineer with 7+ years of experience building scalable microservices and cloud-native applications...", + "cover_letter": "Dear Hiring Manager,\n\nI am excited to apply for the Senior Software Engineer position at Tech Corp. With over 7 years of experience in building scalable systems using Python and TypeScript...", + "outreach_message": "Hi Jane,\n\nI noticed the Senior Software Engineer opportunity at Tech Corp and I'm very interested. My background in building scalable microservices aligns well with what you're looking for...", + "confidence_score": 0.89, + "keyword_match": { + "matched": ["Python", "TypeScript", "microservices", "AWS"], + "missing": ["GCP"], + "score": 0.85 + }, + "suggestions": [ + "Consider highlighting more cloud architecture experience", + "Add specific metrics about system scale" + ], + "processing_time_ms": 2345 +} +``` + +**Error Responses:** + +| Status | Description | +|--------|-------------| +| 400 | Invalid request body | +| 401 | Unauthorized | +| 429 | Rate limit exceeded | +| 500 | Internal server error | + +**Example:** +```bash +curl -X POST "http://localhost:8000/api/v1/resume/tailor" \ + -H "Content-Type: application/json" \ + -H "Authorization: Bearer YOUR_API_KEY" \ + -d '{ + "job_data": { + "job_id": "job-123", + "platform": "linkedin", + "job_title": "Software Engineer", + "company_name": "Tech Corp", + "raw_description": "Looking for a software engineer..." + }, + "user_profile": { + "user_id": "user-456", + "raw_master_resume": "Experienced engineer with 5 years..." + } + }' +``` + +--- + +### Batch Processing + +```http +POST /api/v1/resume/tailor/batch +``` + +Process multiple jobs in batch. + +**Request Body:** +```json +{ + "jobs": [ + { + "job_data": { ... }, + "user_profile": { ... } + }, + { + "job_data": { ... }, + "user_profile": { ... } + } + ], + "options": { + "priority": "normal", + "async": true, + "webhook_url": "https://your-app.com/webhook/batch-complete" + } +} +``` + +**Response:** +```json +{ + "batch_id": "batch-789", + "status": "processing", + "total_jobs": 5, + "estimated_completion": "2024-01-15T11:00:00Z" +} +``` + +--- + +## Agent Orchestrator (Port 8080) + +Core orchestration service for job search and application management. + +### Health Check + +```http +GET /health +``` + +**Response:** +```json +{ + "status": "healthy", + "service": "agent-orchestrator", + "version": "1.0.0", + "timestamp": "2024-01-15T10:30:00Z", + "uptime": 12345.67, + "dependencies": { + "database": "healthy", + "redis": "healthy", + "rabbitmq": "healthy" + } +} +``` + +--- + +### Multi-Platform Job Search + +```http +POST /api/search +``` + +Search for jobs across multiple platforms. + +**Request Body:** +```json +{ + "searchTerm": "software engineer", + "location": "San Francisco, CA", + "platforms": ["linkedin", "glassdoor", "indeed", "wellfound"], + "filters": { + "experienceLevel": "senior", + "remoteOnly": false, + "salaryMin": 150000, + "datePosted": "week", + "jobType": "full-time" + }, + "pagination": { + "page": 1, + "limit": 25 + } +} +``` + +**Response:** +```json +{ + "success": true, + "totalJobs": 156, + "platforms": 4, + "searchParams": { + "searchTerm": "software engineer", + "location": "San Francisco, CA" + }, + "results": [ + { + "platform": "linkedin", + "jobs": [ + { + "id": "li-123456", + "title": "Senior Software Engineer", + "company": "Tech Corp", + "location": "San Francisco, CA", + "salary": "$150,000 - $200,000", + "description": "We are looking for...", + "url": "https://linkedin.com/jobs/view/123456", + "postedDate": "2024-01-14", + "matchScore": 0.92 + } + ], + "totalCount": 45, + "searchTime": 1234 + } + ], + "timestamp": "2024-01-15T10:30:00Z" +} +``` + +**Example:** +```bash +curl -X POST "http://localhost:8080/api/search" \ + -H "Content-Type: application/json" \ + -H "Authorization: Bearer YOUR_API_KEY" \ + -d '{ + "searchTerm": "software engineer", + "location": "San Francisco, CA", + "platforms": ["linkedin", "glassdoor"] + }' +``` + +--- + +### Submit Job Application + +```http +POST /api/apply +``` + +Submit a job application through the automation system. + +**Request Body:** +```json +{ + "jobId": "li-123456", + "platform": "linkedin", + "userId": "user-456", + "applicationData": { + "resumeId": "resume-789", + "coverLetterId": "cover-101", + "customAnswers": { + "years_experience": "7", + "salary_expectations": "$175,000", + "start_date": "2 weeks notice" + } + }, + "options": { + "sendOutreach": true, + "trackApplication": true + } +} +``` + +**Response:** +```json +{ + "success": true, + "applicationId": "app-202", + "status": "submitted", + "platform": "linkedin", + "jobId": "li-123456", + "submittedAt": "2024-01-15T10:30:00Z", + "outreach": { + "sent": true, + "recipientName": "Jane Smith", + "messageId": "msg-303" + } +} +``` + +--- + +### List Scraped Jobs + +```http +GET /api/jobs +``` + +Get list of discovered/scraped jobs. + +**Query Parameters:** + +| Parameter | Type | Description | +|-----------|------|-------------| +| platform | string | Filter by platform | +| status | string | Filter by status (new, applied, rejected) | +| company | string | Filter by company name | +| page | number | Page number (default: 1) | +| limit | number | Results per page (default: 25) | + +**Response:** +```json +{ + "success": true, + "data": [ + { + "id": "job-123", + "externalId": "li-123456", + "platform": "linkedin", + "title": "Senior Software Engineer", + "company": "Tech Corp", + "location": "San Francisco, CA", + "salary": "$150,000 - $200,000", + "status": "new", + "matchScore": 0.92, + "createdAt": "2024-01-15T10:30:00Z" + } + ], + "pagination": { + "page": 1, + "limit": 25, + "total": 156, + "totalPages": 7 + } +} +``` + +**Example:** +```bash +curl "http://localhost:8080/api/jobs?platform=linkedin&status=new&limit=10" \ + -H "Authorization: Bearer YOUR_API_KEY" +``` + +--- + +### Application History + +```http +GET /api/applications +``` + +Get user's application history. + +**Query Parameters:** + +| Parameter | Type | Description | +|-----------|------|-------------| +| status | string | Filter by status | +| platform | string | Filter by platform | +| from | string | Start date (ISO 8601) | +| to | string | End date (ISO 8601) | +| page | number | Page number | +| limit | number | Results per page | + +**Response:** +```json +{ + "success": true, + "data": [ + { + "id": "app-202", + "jobId": "job-123", + "platform": "linkedin", + "company": "Tech Corp", + "title": "Senior Software Engineer", + "status": "applied", + "appliedAt": "2024-01-15T10:30:00Z", + "lastUpdated": "2024-01-15T10:30:00Z", + "outreach": { + "sent": true, + "response": null + } + } + ], + "stats": { + "total": 45, + "applied": 30, + "pending": 10, + "rejected": 5, + "responseRate": 0.23 + }, + "pagination": { + "page": 1, + "limit": 25, + "total": 45 + } +} +``` + +--- + +### Pipeline Logs + +```http +GET /api/logs +``` + +Get pipeline execution logs. + +**Query Parameters:** + +| Parameter | Type | Description | +|-----------|------|-------------| +| level | string | Log level (info, warn, error) | +| service | string | Filter by service | +| from | string | Start timestamp | +| to | string | End timestamp | +| limit | number | Number of logs | + +**Response:** +```json +{ + "success": true, + "data": [ + { + "timestamp": "2024-01-15T10:30:00.123Z", + "level": "info", + "service": "agent-orchestrator", + "message": "Job search completed", + "metadata": { + "searchTerm": "software engineer", + "platform": "linkedin", + "resultsCount": 45 + } + } + ] +} +``` + +--- + +## Auth Service (Port 8003) + +User authentication and authorization service. + +### User Registration + +```http +POST /api/auth/register +``` + +Register a new user account. + +**Request Body:** +```json +{ + "email": "user@example.com", + "password": "SecureP@ssw0rd123", + "name": "John Doe", + "agreeToTerms": true +} +``` + +**Response:** +```json +{ + "success": true, + "message": "Registration successful. Please verify your email.", + "user": { + "id": "user-456", + "email": "user@example.com", + "name": "John Doe", + "emailVerified": false, + "createdAt": "2024-01-15T10:30:00Z" + } +} +``` + +**Validation Rules:** +- Email must be valid format +- Password must be 8+ characters with uppercase, lowercase, number, special char +- Name must be 2+ characters + +**Example:** +```bash +curl -X POST "http://localhost:8003/api/auth/register" \ + -H "Content-Type: application/json" \ + -d '{ + "email": "user@example.com", + "password": "SecureP@ssw0rd123", + "name": "John Doe", + "agreeToTerms": true + }' +``` + +--- + +### User Login + +```http +POST /api/auth/login +``` + +Authenticate user and get access token. + +**Request Body:** +```json +{ + "email": "user@example.com", + "password": "SecureP@ssw0rd123" +} +``` + +**Response:** +```json +{ + "success": true, + "accessToken": "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9...", + "refreshToken": "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9...", + "expiresIn": 900, + "tokenType": "Bearer", + "user": { + "id": "user-456", + "email": "user@example.com", + "name": "John Doe" + } +} +``` + +**Example:** +```bash +curl -X POST "http://localhost:8003/api/auth/login" \ + -H "Content-Type: application/json" \ + -d '{ + "email": "user@example.com", + "password": "SecureP@ssw0rd123" + }' +``` + +--- + +### Refresh Token + +```http +POST /api/auth/refresh +``` + +Refresh the access token using a refresh token. + +**Request Body:** +```json +{ + "refreshToken": "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9..." +} +``` + +**Response:** +```json +{ + "success": true, + "accessToken": "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9...", + "refreshToken": "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9...", + "expiresIn": 900 +} +``` + +--- + +### Forgot Password + +```http +POST /api/auth/forgot-password +``` + +Request a password reset email. + +**Request Body:** +```json +{ + "email": "user@example.com" +} +``` + +**Response:** +```json +{ + "success": true, + "message": "If the email exists, a password reset link has been sent." +} +``` + +--- + +### Reset Password + +```http +POST /api/auth/reset-password +``` + +Reset password using token from email. + +**Request Body:** +```json +{ + "token": "reset-token-from-email", + "password": "NewSecureP@ssw0rd456" +} +``` + +**Response:** +```json +{ + "success": true, + "message": "Password has been reset successfully." +} +``` + +--- + +### Get Current User + +```http +GET /api/auth/me +``` + +Get the currently authenticated user's information. + +**Headers:** +```http +Authorization: Bearer +``` + +**Response:** +```json +{ + "success": true, + "user": { + "id": "user-456", + "email": "user@example.com", + "name": "John Doe", + "emailVerified": true, + "createdAt": "2024-01-15T10:30:00Z", + "updatedAt": "2024-01-15T10:30:00Z", + "preferences": { + "targetTitles": ["Software Engineer", "Senior Engineer"], + "targetLocations": ["San Francisco", "Remote"], + "salaryMin": 150000, + "remoteOk": true + }, + "stats": { + "totalApplications": 45, + "thisWeek": 12, + "responseRate": 0.23 + } + } +} +``` + +**Example:** +```bash +curl "http://localhost:8003/api/auth/me" \ + -H "Authorization: Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9..." +``` + +--- + +## Dashboard Service (Port 3001) + +Next.js web application with API routes. + +### Dashboard Health + +```http +GET /api/health +``` + +**Response:** +```json +{ + "status": "healthy", + "service": "dashboard-service", + "version": "0.1.0" +} +``` + +--- + +### Dashboard Stats + +```http +GET /api/dashboard/stats +``` + +Get dashboard statistics for the authenticated user. + +**Response:** +```json +{ + "applications": { + "total": 125, + "pending": 23, + "applied": 89, + "interviewing": 8, + "rejected": 5 + }, + "successRate": 0.71, + "responseRate": 0.34, + "thisWeek": { + "applications": 15, + "responses": 3, + "interviews": 2 + }, + "topPlatforms": [ + { "platform": "linkedin", "count": 45 }, + { "platform": "glassdoor", "count": 32 }, + { "platform": "wellfound", "count": 28 } + ], + "recentApplications": [ + { + "id": "app-123", + "company": "Tech Corp", + "title": "Senior Engineer", + "status": "applied", + "appliedAt": "2024-01-15T10:30:00Z" + } + ] +} +``` + +--- + +## Monitoring Service (Port 8001) + +System monitoring and metrics collection. + +### Health Check + +```http +GET /health +``` + +**Response:** +```json +{ + "status": "healthy", + "service": "agent-monitoring-service", + "version": "1.0.0", + "timestamp": 1705320600.123, + "components": { + "database": "healthy", + "message_queue": "healthy", + "redis": "healthy" + } +} +``` + +--- + +### Metrics + +```http +GET /metrics +``` + +Get system and application metrics. + +**Response:** +```json +{ + "applications": { + "processed_total": 1234, + "successful_total": 987, + "failed_total": 47, + "pending": 200 + }, + "performance": { + "average_processing_time_ms": 2345.67, + "queue_depth": 12, + "throughput_per_minute": 8.5 + }, + "system": { + "cpu_usage_percent": 45.2, + "memory_usage_mb": 512, + "uptime_seconds": 86400 + }, + "platforms": { + "linkedin": { "requests": 500, "success_rate": 0.95 }, + "glassdoor": { "requests": 300, "success_rate": 0.92 } + } +} +``` + +--- + +## Platform Adapters + +### LinkedIn Adapter Usage + +```javascript +// JavaScript SDK Example +const { LinkedInAdapter } = require('@ajob4agent/adapters'); + +const adapter = new LinkedInAdapter({ + email: process.env.LINKEDIN_EMAIL, + password: process.env.LINKEDIN_PASSWORD +}); + +// Search for jobs +const jobs = await adapter.searchJobs({ + query: 'software engineer', + location: 'San Francisco', + filters: { + experienceLevel: 'senior', + datePosted: 'week' + } +}); + +// Apply to a job +const result = await adapter.applyToJob({ + jobId: 'li-123456', + resume: resumeContent, + coverLetter: coverLetterContent +}); +``` + +**API Equivalent:** +```bash +curl -X POST "http://localhost:8080/api/jobs/platform/linkedin" \ + -H "Content-Type: application/json" \ + -H "Authorization: Bearer YOUR_API_KEY" \ + -d '{ + "searchTerm": "software engineer", + "location": "San Francisco", + "experienceLevel": "senior", + "datePosted": "week" + }' +``` + +--- + +### Glassdoor Adapter Usage + +```javascript +const { GlassdoorAdapter } = require('@ajob4agent/adapters'); + +const adapter = new GlassdoorAdapter({ + partnerId: process.env.GLASSDOOR_PARTNER_ID, + key: process.env.GLASSDOOR_KEY +}); + +// Search jobs with company insights +const jobs = await adapter.searchJobs({ + query: 'software engineer', + location: 'San Francisco', + includeCompanyInfo: true +}); + +// Get company details +const company = await adapter.getCompanyInfo('tech-corp'); +``` + +--- + +### Form Fill Engine Usage + +```javascript +const { FormFillEngine } = require('@ajob4agent/form-fill'); + +const engine = new FormFillEngine({ + browser: 'chromium', + headless: true +}); + +// Auto-fill application form +const result = await engine.fillForm({ + url: 'https://careers.company.com/apply/123', + data: { + firstName: 'John', + lastName: 'Doe', + email: 'john@example.com', + resume: resumeBuffer, + coverLetter: coverLetterText, + customFields: { + 'years_experience': '7', + 'visa_sponsorship': 'no' + } + }, + options: { + submitForm: true, + captureScreenshot: true + } +}); +``` + +--- + +## Error Handling + +### Error Response Format + +All errors follow a consistent format: + +```json +{ + "success": false, + "error": { + "code": "VALIDATION_ERROR", + "message": "Invalid request body", + "details": [ + { + "field": "email", + "message": "Must be a valid email address" + } + ], + "timestamp": "2024-01-15T10:30:00Z", + "requestId": "req-12345" + } +} +``` + +### HTTP Status Codes + +| Status | Code | Description | +|--------|------|-------------| +| 200 | OK | Successful request | +| 201 | CREATED | Resource created | +| 400 | BAD_REQUEST | Invalid request | +| 401 | UNAUTHORIZED | Authentication required | +| 403 | FORBIDDEN | Insufficient permissions | +| 404 | NOT_FOUND | Resource not found | +| 409 | CONFLICT | Resource conflict | +| 422 | UNPROCESSABLE | Validation failed | +| 429 | TOO_MANY_REQUESTS | Rate limited | +| 500 | INTERNAL_ERROR | Server error | +| 502 | BAD_GATEWAY | Service unavailable | +| 503 | SERVICE_UNAVAILABLE | Temporarily unavailable | + +### Error Codes + +| Code | Description | +|------|-------------| +| `VALIDATION_ERROR` | Request validation failed | +| `AUTHENTICATION_ERROR` | Invalid or missing credentials | +| `AUTHORIZATION_ERROR` | Insufficient permissions | +| `NOT_FOUND_ERROR` | Resource not found | +| `CONFLICT_ERROR` | Resource already exists | +| `RATE_LIMIT_ERROR` | Rate limit exceeded | +| `PLATFORM_ERROR` | External platform error | +| `INTERNAL_ERROR` | Internal server error | + +--- + +## Rate Limiting + +### Limits + +| Endpoint Type | Limit | Window | +|---------------|-------|--------| +| General API | 100 requests | 15 minutes | +| Authentication | 5 requests | 1 minute | +| Job Search | 30 requests | 15 minutes | +| LLM Endpoints | 50 requests | 1 hour | +| File Upload | 10 requests | 5 minutes | + +### Rate Limit Headers + +```http +X-RateLimit-Limit: 100 +X-RateLimit-Remaining: 95 +X-RateLimit-Reset: 1705321200 +``` + +### Rate Limit Response + +```json +{ + "success": false, + "error": { + "code": "RATE_LIMIT_ERROR", + "message": "Rate limit exceeded. Please try again later.", + "retryAfter": 900 + } +} +``` + +--- + +## Webhooks + +### Configuring Webhooks + +```http +POST /api/webhooks +``` + +**Request Body:** +```json +{ + "url": "https://your-app.com/webhook", + "events": ["application.submitted", "application.updated", "job.matched"], + "secret": "your-webhook-secret" +} +``` + +### Webhook Events + +| Event | Description | +|-------|-------------| +| `job.discovered` | New job found matching criteria | +| `job.matched` | Job matched user preferences | +| `application.submitted` | Application successfully submitted | +| `application.updated` | Application status changed | +| `outreach.sent` | Recruiter outreach message sent | +| `outreach.response` | Received response to outreach | + +### Webhook Payload + +```json +{ + "event": "application.submitted", + "timestamp": "2024-01-15T10:30:00Z", + "data": { + "applicationId": "app-202", + "jobId": "job-123", + "platform": "linkedin", + "company": "Tech Corp", + "title": "Senior Software Engineer", + "status": "submitted" + } +} +``` + +### Webhook Security + +Verify webhook signatures: + +```javascript +const crypto = require('crypto'); + +function verifyWebhook(payload, signature, secret) { + const expectedSignature = crypto + .createHmac('sha256', secret) + .update(payload) + .digest('hex'); + + return `sha256=${expectedSignature}` === signature; +} +``` + +--- + +## SDKs and Client Libraries + +### Python SDK + +```python +from ajob4agent import Client + +client = Client(api_key="your-api-key") + +# Search jobs +jobs = client.jobs.search( + query="software engineer", + location="San Francisco", + platforms=["linkedin", "glassdoor"] +) + +# Tailor resume +result = client.llm.tailor_resume( + job_id="job-123", + resume="Your resume content..." +) +``` + +### JavaScript/TypeScript SDK + +```typescript +import { AJOB4AGENTClient } from '@ajob4agent/client'; + +const client = new AJOB4AGENTClient({ + apiKey: 'your-api-key' +}); + +// Search jobs +const jobs = await client.jobs.search({ + query: 'software engineer', + location: 'San Francisco' +}); + +// Submit application +const application = await client.applications.submit({ + jobId: 'job-123', + resumeId: 'resume-456' +}); +``` + +--- + +## OpenAPI/Swagger + +Interactive API documentation is available at: + +- **LLM Service:** http://localhost:8000/docs +- **Agent Orchestrator:** http://localhost:8080/docs (when enabled) + +These provide: +- Interactive API testing +- Schema documentation +- Request/response examples +- Authentication helpers + +--- + +## Support + +For API support and questions: + +- 📖 [Full Documentation](https://docs.ajob4agent.com) +- 🐛 [Report Issues](https://github.com/groupthinking/AJOB4AGENT/issues) +- 💬 [Community Discord](https://discord.gg/ajob4agent) diff --git a/docs/ARCHITECTURE.md b/docs/ARCHITECTURE.md new file mode 100644 index 0000000..132a140 --- /dev/null +++ b/docs/ARCHITECTURE.md @@ -0,0 +1,622 @@ +# System Architecture - AJOB4AGENT + +Comprehensive architecture documentation for the AJOB4AGENT autonomous job application system. + +## Table of Contents + +- [Overview](#overview) +- [System Architecture](#system-architecture) +- [Service Components](#service-components) +- [Data Flow](#data-flow) +- [Database Schema](#database-schema) +- [Communication Patterns](#communication-patterns) +- [Security Architecture](#security-architecture) +- [Deployment Architecture](#deployment-architecture) +- [Scalability](#scalability) + +--- + +## Overview + +AJOB4AGENT is a microservices-based platform designed to automate job search, application submission, and recruiter outreach. The system uses AI-powered content generation and multi-platform adapters to provide end-to-end job application automation. + +### Design Principles + +- **Microservices Architecture:** Independent, loosely-coupled services +- **Event-Driven:** Asynchronous processing with message queues +- **API-First:** RESTful APIs with OpenAPI documentation +- **Security-First:** JWT authentication, encrypted data, secure defaults +- **Observability:** Comprehensive logging, metrics, and tracing + +--- + +## System Architecture + +### High-Level Architecture + +``` +┌─────────────────────────────────────────────────────────────────────────────┐ +│ AJOB4AGENT System │ +├─────────────────────────────────────────────────────────────────────────────┤ +│ │ +│ ┌────────────────────────────────────────────────────────────────────────┐ │ +│ │ Client Layer │ │ +│ │ ┌──────────────┐ ┌──────────────┐ ┌──────────────┐ │ │ +│ │ │ Web App │ │ Mobile App │ │ API Clients │ │ │ +│ │ │ (Next.js) │ │ (Future) │ │ (REST/SDK) │ │ │ +│ │ └──────────────┘ └──────────────┘ └──────────────┘ │ │ +│ └────────────────────────────────────────────────────────────────────────┘ │ +│ │ │ +│ ▼ │ +│ ┌────────────────────────────────────────────────────────────────────────┐ │ +│ │ API Gateway / Load Balancer │ │ +│ │ (Nginx / Vercel Edge / AWS ALB) │ │ +│ └────────────────────────────────────────────────────────────────────────┘ │ +│ │ │ +│ ┌────────────────────────┼────────────────────────┐ │ +│ │ │ │ │ +│ ▼ ▼ ▼ │ +│ ┌─────────────────┐ ┌─────────────────┐ ┌─────────────────┐ │ +│ │ Dashboard │ │ Agent │ │ LLM Service │ │ +│ │ Service │ │ Orchestrator │ │ (FastAPI) │ │ +│ │ (Next.js) │ │ (Express) │ │ │ │ +│ │ Port: 3001 │ │ Port: 8080 │ │ Port: 8000 │ │ +│ └────────┬────────┘ └────────┬────────┘ └────────┬────────┘ │ +│ │ │ │ │ +│ └──────────────────────┼──────────────────────┘ │ +│ │ │ +│ ▼ │ +│ ┌────────────────────────────────────────────────────────────────────────┐ │ +│ │ Data & Messaging Layer │ │ +│ │ ┌──────────────┐ ┌──────────────┐ ┌──────────────┐ │ │ +│ │ │ PostgreSQL │ │ Redis │ │ RabbitMQ │ │ │ +│ │ │ (Primary) │ │ (Cache) │ │ (Queue) │ │ │ +│ │ └──────────────┘ └──────────────┘ └──────────────┘ │ │ +│ └────────────────────────────────────────────────────────────────────────┘ │ +│ │ +│ ┌────────────────────────────────────────────────────────────────────────┐ │ +│ │ Monitoring & Observability │ │ +│ │ ┌──────────────┐ ┌──────────────┐ ┌──────────────┐ │ │ +│ │ │ Prometheus │ │ Grafana │ │ Sentry │ │ │ +│ │ │ (Metrics) │ │ (Dashboards) │ │ (Errors) │ │ │ +│ │ └──────────────┘ └──────────────┘ └──────────────┘ │ │ +│ └────────────────────────────────────────────────────────────────────────┘ │ +│ │ +└─────────────────────────────────────────────────────────────────────────────┘ +``` + +### Component Diagram + +``` +┌────────────────────────────────────────────────────────────────────────────┐ +│ Agent Orchestrator │ +│ ┌─────────────────────────────────────────────────────────────────────┐ │ +│ │ Platform Adapters │ │ +│ │ ┌──────────┐ ┌──────────┐ ┌──────────┐ ┌──────────┐ ┌──────────┐ │ │ +│ │ │ LinkedIn │ │Glassdoor │ │ Wellfound│ │ Indeed │ │ More... │ │ │ +│ │ │ Adapter │ │ Adapter │ │ Adapter │ │ Adapter │ │ Adapters │ │ │ +│ │ └──────────┘ └──────────┘ └──────────┘ └──────────┘ └──────────┘ │ │ +│ └─────────────────────────────────────────────────────────────────────┘ │ +│ ┌─────────────────────────────────────────────────────────────────────┐ │ +│ │ Core Services │ │ +│ │ ┌──────────────┐ ┌──────────────┐ ┌──────────────┐ │ │ +│ │ │ Job Search │ │ Application │ │ Outreach │ │ │ +│ │ │ Service │ │ Service │ │ Service │ │ │ +│ │ └──────────────┘ └──────────────┘ └──────────────┘ │ │ +│ └─────────────────────────────────────────────────────────────────────┘ │ +│ ┌─────────────────────────────────────────────────────────────────────┐ │ +│ │ Apply Agents │ │ +│ │ ┌──────────────┐ ┌──────────────┐ ┌──────────────┐ │ │ +│ │ │ Form Fill │ │ Resume │ │ Cover Letter │ │ │ +│ │ │ Engine │ │ Handler │ │ Generator │ │ │ +│ │ └──────────────┘ └──────────────┘ └──────────────┘ │ │ +│ └─────────────────────────────────────────────────────────────────────┘ │ +└────────────────────────────────────────────────────────────────────────────┘ +``` + +--- + +## Service Components + +### 1. Dashboard Service + +**Technology:** Next.js 14, React 18, TailwindCSS, TypeScript + +**Responsibilities:** +- User interface for application management +- Authentication and session management +- Real-time job tracking and analytics +- Settings and configuration management + +**Key Features:** +``` +┌─────────────────────────────────────────┐ +│ Dashboard Service │ +├─────────────────────────────────────────┤ +│ • User Authentication (NextAuth) │ +│ • Job Application Tracking │ +│ • Analytics & Reporting │ +│ • Resume Management │ +│ • Settings Configuration │ +│ • Real-time Updates (WebSocket) │ +└─────────────────────────────────────────┘ +``` + +### 2. Agent Orchestrator + +**Technology:** Node.js, Express, TypeScript, Playwright + +**Responsibilities:** +- Central coordination of all agents +- Job search across multiple platforms +- Application submission orchestration +- Recruiter outreach coordination +- Platform adapter management + +**API Endpoints:** +- `POST /api/jobs/search` - Multi-platform job search +- `POST /api/apply` - Submit job application +- `GET /api/jobs` - List discovered jobs +- `GET /api/applications` - Application history +- `GET /health` - Health check + +### 3. LLM Service + +**Technology:** Python, FastAPI, OpenAI API + +**Responsibilities:** +- AI-powered resume tailoring +- Cover letter generation +- Outreach message personalization +- Job description analysis +- Confidence scoring + +**API Endpoints:** +- `POST /tailor` - Tailor resume for job +- `POST /tailor/batch` - Batch processing +- `GET /health` - Health check + +### 4. Agent Monitoring Service + +**Technology:** Python, FastAPI + +**Responsibilities:** +- System metrics collection +- Application analytics +- Performance monitoring +- Alert management +- Log aggregation + +--- + +## Data Flow + +### Job Search Flow + +``` +┌──────────┐ ┌───────────────┐ ┌────────────────┐ ┌──────────┐ +│ User │────►│ Dashboard │────►│ Orchestrator │────►│ Platform │ +│ │ │ Service │ │ │ │ Adapters │ +└──────────┘ └───────────────┘ └────────────────┘ └──────────┘ + │ │ + │◄────────────────────┘ + │ (Job listings) + │ + ▼ + ┌────────────┐ + │ Database │ + │ (Jobs) │ + └────────────┘ +``` + +### Application Flow + +``` +┌──────────┐ ┌───────────────┐ ┌────────────────┐ ┌───────────┐ +│ User │────►│ Orchestrator │────►│ LLM Service │────►│ Tailor │ +│ (Apply) │ │ (Apply Job) │ │ (Process) │ │ Content │ +└──────────┘ └───────────────┘ └────────────────┘ └───────────┘ + │ │ + │ │ + ▼ ▼ + ┌────────────┐ ┌────────────┐ + │ RabbitMQ │ │ Response │ + │ (Queue) │ │ (Resume) │ + └────────────┘ └────────────┘ + │ + ▼ + ┌────────────────┐ ┌───────────┐ + │ Apply Agent │────►│ Platform │ + │ (Form Fill) │ │ Submit │ + └────────────────┘ └───────────┘ +``` + +### Message Queue Architecture + +``` + ┌─────────────────────────────────────┐ + │ RabbitMQ │ + ├─────────────────────────────────────┤ + │ │ + │ ┌─────────────────────────────────┐│ + │ │ Job Discovery Queue ││ + │ │ (new jobs from platforms) ││ + │ └─────────────────────────────────┘│ + │ │ + │ ┌─────────────────────────────────┐│ + │ │ Application Queue ││ + │ │ (pending applications) ││ + │ └─────────────────────────────────┘│ + │ │ + │ ┌─────────────────────────────────┐│ + │ │ Outreach Queue ││ + │ │ (recruiter messages) ││ + │ └─────────────────────────────────┘│ + │ │ + │ ┌─────────────────────────────────┐│ + │ │ Status Update Queue ││ + │ │ (application status changes) ││ + │ └─────────────────────────────────┘│ + │ │ + └─────────────────────────────────────┘ +``` + +--- + +## Database Schema + +### Entity Relationship Diagram + +``` +┌─────────────────┐ ┌─────────────────┐ ┌─────────────────┐ +│ Users │ │ Jobs │ │ Applications │ +├─────────────────┤ ├─────────────────┤ ├─────────────────┤ +│ id (PK) │──┐ │ id (PK) │──┐ │ id (PK) │ +│ email │ │ │ external_id │ │ │ user_id (FK) │───┐ +│ password_hash │ │ │ platform │ │ │ job_id (FK) │───┼─┐ +│ name │ │ │ title │ │ │ status │ │ │ +│ created_at │ │ │ company │ │ │ applied_at │ │ │ +│ updated_at │ │ │ description │ └────│ tailored_resume │ │ │ +└─────────────────┘ │ │ location │ │ cover_letter │ │ │ + │ │ salary_range │ │ created_at │ │ │ + │ │ url │ └─────────────────┘ │ │ + │ │ created_at │ │ │ + │ └─────────────────┘ │ │ + │ │ │ + │ ┌─────────────────┐ ┌─────────────────┐ │ │ + │ │ Resumes │ │ Outreach │ │ │ + │ ├─────────────────┤ ├─────────────────┤ │ │ + └────│ id (PK) │ │ id (PK) │ │ │ + │ user_id (FK) │ │ application_id │──┘ │ + │ content │ │ recruiter_name │ │ + │ is_master │ │ recruiter_email │ │ + │ created_at │ │ message │ │ + └─────────────────┘ │ sent_at │ │ + │ response │ │ + └─────────────────┘ │ + │ + ┌─────────────────┐ │ + │ UserPreferences│ │ + ├─────────────────┤ │ + │ id (PK) │ │ + │ user_id (FK) │───────────────────────────────────┘ + │ target_titles │ + │ target_locations│ + │ salary_min │ + │ remote_ok │ + │ platforms │ + └─────────────────┘ +``` + +### Key Tables + +#### Users +```sql +CREATE TABLE users ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + email VARCHAR(255) UNIQUE NOT NULL, + password_hash VARCHAR(255) NOT NULL, + name VARCHAR(255), + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP +); +``` + +#### Jobs +```sql +CREATE TABLE jobs ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + external_id VARCHAR(255) NOT NULL, + platform VARCHAR(50) NOT NULL, + title VARCHAR(255) NOT NULL, + company VARCHAR(255), + description TEXT, + location VARCHAR(255), + salary_range VARCHAR(100), + url VARCHAR(500), + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + UNIQUE(external_id, platform) +); +``` + +#### Applications +```sql +CREATE TABLE applications ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + user_id UUID REFERENCES users(id), + job_id UUID REFERENCES jobs(id), + status VARCHAR(50) DEFAULT 'pending', + tailored_resume TEXT, + cover_letter TEXT, + applied_at TIMESTAMP, + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP +); +``` + +--- + +## Communication Patterns + +### Synchronous (REST API) + +``` +Client ──────► API Gateway ──────► Service ──────► Response + │ + ▼ + Database +``` + +Used for: +- User authentication +- Real-time queries +- Configuration updates +- Health checks + +### Asynchronous (Message Queue) + +``` +Producer ──────► RabbitMQ ──────► Consumer ──────► Action + │ + └──────► Retry Queue (on failure) +``` + +Used for: +- Job application processing +- Resume tailoring (batch) +- Outreach message delivery +- Status update notifications + +### Event-Driven + +``` +Service A ──────► Event Bus ──────► Service B + │ + └──────► Service C + │ + └──────► Service D (Monitoring) +``` + +Events: +- `job.discovered` - New job found +- `application.submitted` - Application sent +- `outreach.sent` - Message delivered +- `status.updated` - Application status changed + +--- + +## Security Architecture + +### Authentication Flow + +``` +┌──────────┐ ┌───────────────┐ ┌────────────────┐ +│ Client │────►│ Auth Service │────►│ Database │ +│ │ │ (NextAuth) │ │ (Users) │ +└──────────┘ └───────────────┘ └────────────────┘ + │ │ + │ ▼ + │ ┌────────────┐ + │ │ JWT Token │ + │ └────────────┘ + │ │ + ▼ ▼ +┌──────────────────────────────────────┐ +│ Protected Resources │ +│ ┌────────────┐ ┌────────────┐ │ +│ │ API Routes │ │ Dashboard │ │ +│ └────────────┘ └────────────┘ │ +└──────────────────────────────────────┘ +``` + +### Security Layers + +``` +┌─────────────────────────────────────────────────────┐ +│ Security Layers │ +├─────────────────────────────────────────────────────┤ +│ Layer 1: Network Security │ +│ • WAF (Web Application Firewall) │ +│ • DDoS Protection │ +│ • SSL/TLS Encryption │ +├─────────────────────────────────────────────────────┤ +│ Layer 2: Application Security │ +│ • JWT Authentication │ +│ • Rate Limiting │ +│ • Input Validation │ +│ • CORS Configuration │ +├─────────────────────────────────────────────────────┤ +│ Layer 3: Data Security │ +│ • Encryption at Rest (AES-256) │ +│ • Encryption in Transit (TLS 1.3) │ +│ • Secure Secrets Management │ +├─────────────────────────────────────────────────────┤ +│ Layer 4: Infrastructure Security │ +│ • Container Isolation │ +│ • Network Segmentation │ +│ • Least Privilege Access │ +└─────────────────────────────────────────────────────┘ +``` + +--- + +## Deployment Architecture + +### Cloud Deployment (AWS) + +``` +┌─────────────────────────────────────────────────────────────────────┐ +│ AWS Cloud │ +│ ┌───────────────────────────────────────────────────────────────┐ │ +│ │ VPC │ │ +│ │ ┌─────────────────────────────────────────────────────────┐ │ │ +│ │ │ Public Subnet │ │ │ +│ │ │ ┌──────────────┐ ┌──────────────┐ │ │ │ +│ │ │ │ ALB │ │ CloudFront │ │ │ │ +│ │ │ │ (API/LB) │ │ (CDN) │ │ │ │ +│ │ │ └──────────────┘ └──────────────┘ │ │ │ +│ │ └─────────────────────────────────────────────────────────┘ │ │ +│ │ │ │ │ +│ │ ┌─────────────────────────────────────────────────────────┐ │ │ +│ │ │ Private Subnet │ │ │ +│ │ │ ┌────────────┐ ┌────────────┐ ┌────────────┐ │ │ │ +│ │ │ │ ECS Task │ │ ECS Task │ │ ECS Task │ │ │ │ +│ │ │ │ Dashboard │ │ Orchestrat │ │ LLM │ │ │ │ +│ │ │ └────────────┘ └────────────┘ └────────────┘ │ │ │ +│ │ └─────────────────────────────────────────────────────────┘ │ │ +│ │ │ │ │ +│ │ ┌─────────────────────────────────────────────────────────┐ │ │ +│ │ │ Data Subnet │ │ │ +│ │ │ ┌────────────┐ ┌────────────┐ ┌────────────┐ │ │ │ +│ │ │ │ RDS │ │ElastiCache │ │ Amazon │ │ │ │ +│ │ │ │ PostgreSQL │ │ Redis │ │ MQ │ │ │ │ +│ │ │ └────────────┘ └────────────┘ └────────────┘ │ │ │ +│ │ └─────────────────────────────────────────────────────────┘ │ │ +│ └───────────────────────────────────────────────────────────────┘ │ +└─────────────────────────────────────────────────────────────────────┘ +``` + +### Container Orchestration + +``` +┌─────────────────────────────────────────────────────────────────────┐ +│ Container Orchestration (ECS/K8s) │ +├─────────────────────────────────────────────────────────────────────┤ +│ │ +│ ┌─────────────────────────────────────────────────────────────┐ │ +│ │ Service Mesh │ │ +│ │ ┌───────────────────────────────────────────────────────┐ │ │ +│ │ │ Service Discovery │ Load Balancing │ Health Checks │ │ │ +│ │ └───────────────────────────────────────────────────────┘ │ │ +│ └─────────────────────────────────────────────────────────────┘ │ +│ │ +│ ┌─────────────┐ ┌─────────────┐ ┌─────────────┐ ┌────────────┐ │ +│ │ Dashboard │ │ Orchestrator│ │ LLM Service │ │ Monitoring │ │ +│ │ Replicas:2 │ │ Replicas:3 │ │ Replicas:2 │ │ Replicas:1 │ │ +│ └─────────────┘ └─────────────┘ └─────────────┘ └────────────┘ │ +│ │ +└─────────────────────────────────────────────────────────────────────┘ +``` + +--- + +## Scalability + +### Horizontal Scaling + +``` + ┌─────────────────────────────────────┐ + │ Load Balancer │ + └─────────────────────────────────────┘ + │ + ┌───────────────────────┼───────────────────────┐ + │ │ │ + ▼ ▼ ▼ + ┌────────────┐ ┌────────────┐ ┌────────────┐ + │ Instance 1 │ │ Instance 2 │ │ Instance 3 │ + │ (Primary) │ │ (Replica) │ │ (Replica) │ + └────────────┘ └────────────┘ └────────────┘ +``` + +### Auto-Scaling Triggers + +| Metric | Scale Up | Scale Down | +|--------|----------|------------| +| CPU | > 70% | < 30% | +| Memory | > 80% | < 40% | +| Queue Depth | > 100 | < 10 | +| Response Time | > 2s | < 500ms | + +### Database Scaling + +``` +┌────────────────────────────────────────────────────────────┐ +│ Database Cluster │ +├────────────────────────────────────────────────────────────┤ +│ │ +│ ┌──────────────────┐ ┌──────────────────┐ │ +│ │ Primary (RW) │───►│ Replica (RO) │ │ +│ │ PostgreSQL │ │ PostgreSQL │ │ +│ └──────────────────┘ └──────────────────┘ │ +│ │ │ │ +│ └──────────────────────┘ │ +│ │ │ +│ ▼ │ +│ ┌────────────────┐ │ +│ │ Connection Pool│ │ +│ │ (PgBouncer) │ │ +│ └────────────────┘ │ +│ │ +└────────────────────────────────────────────────────────────┘ +``` + +### Caching Strategy + +``` +┌──────────┐ ┌──────────┐ ┌──────────┐ +│ Client │────►│ CDN │────►│ App │ +└──────────┘ └──────────┘ └──────────┘ + │ + ▼ + ┌──────────────┐ + │ Redis │ + │ (Cache) │ + └──────────────┘ + │ + ▼ + ┌──────────────┐ + │ PostgreSQL │ + │ (Database) │ + └──────────────┘ +``` + +**Cache Layers:** +1. **CDN** - Static assets, API responses +2. **Application** - Session data, user preferences +3. **Database** - Query results, computed data + +--- + +## Technology Stack Summary + +| Layer | Technology | Purpose | +|-------|------------|---------| +| Frontend | Next.js 14, React 18, TailwindCSS | User interface | +| API Gateway | Nginx, Vercel Edge | Load balancing, SSL | +| Backend | Node.js, Express, TypeScript | Business logic | +| AI Service | Python, FastAPI, OpenAI | Content generation | +| Database | PostgreSQL 15 | Primary data store | +| Cache | Redis 7 | Session, caching | +| Queue | RabbitMQ | Async processing | +| Monitoring | Prometheus, Grafana, Sentry | Observability | +| Container | Docker, ECS/Kubernetes | Deployment | +| CI/CD | GitHub Actions | Automation | + +--- + +## Further Reading + +- [API Documentation](./API.md) +- [Operations Guide](./OPERATIONS.md) +- [Deployment Checklist](../DEPLOYMENT_CHECKLIST.md) +- [Security Policy](../SECURITY.md) diff --git a/docs/TESTING.md b/docs/TESTING.md new file mode 100644 index 0000000..ea10a13 --- /dev/null +++ b/docs/TESTING.md @@ -0,0 +1,703 @@ +# Testing Guide - AJOB4AGENT + +Comprehensive testing guide for all AJOB4AGENT services. + +## Table of Contents + +- [Overview](#overview) +- [Testing Strategy](#testing-strategy) +- [Running Tests](#running-tests) +- [Unit Tests](#unit-tests) +- [Integration Tests](#integration-tests) +- [End-to-End Tests](#end-to-end-tests) +- [Test Coverage](#test-coverage) +- [CI/CD Integration](#cicd-integration) +- [Writing Tests](#writing-tests) +- [Best Practices](#best-practices) + +--- + +## Overview + +AJOB4AGENT uses a multi-layered testing approach to ensure code quality and reliability: + +| Test Type | Purpose | Tools | +|-----------|---------|-------| +| Unit Tests | Test individual functions/components | Jest, pytest | +| Integration Tests | Test service interactions | Jest, pytest | +| E2E Tests | Test full user flows | Playwright | +| API Tests | Test API endpoints | Supertest, httpx | +| Performance Tests | Test load handling | k6 | + +--- + +## Testing Strategy + +### Test Pyramid + +``` + /\ + / \ + / E2E \ + /──────\ + / \ + /Integration\ + /────────────\ + / \ + / Unit Tests \ + /──────────────────\ +``` + +- **Unit Tests (70%)**: Fast, isolated, test single functions +- **Integration Tests (20%)**: Test service boundaries +- **E2E Tests (10%)**: Test complete user workflows + +### Service-Specific Testing + +| Service | Framework | Location | +|---------|-----------|----------| +| Agent Orchestrator | Jest | `services/agent-orchestrator/__tests__/` | +| Dashboard Service | Jest + Testing Library | `services/dashboard-service/__tests__/` | +| LLM Service | pytest | `services/llm-service/tests/` | +| Monitoring Service | pytest | `services/agent-monitoring-service/tests/` | + +--- + +## Running Tests + +### Quick Start + +```bash +# Run all tests +make test + +# Or run tests for specific service +cd services/agent-orchestrator && npm test +cd services/dashboard-service && npm test +cd services/llm-service && pytest +cd services/agent-monitoring-service && pytest +``` + +### With Docker + +```bash +# Run all tests in containers +docker-compose -f docker-compose.test.yml up --build --exit-code-from tests + +# Run specific service tests +docker-compose exec agent-orchestrator npm test +docker-compose exec llm-service pytest +``` + +--- + +## Unit Tests + +### Agent Orchestrator (Jest) + +```bash +cd services/agent-orchestrator + +# Run all unit tests +npm test + +# Run with coverage +npm run test:coverage + +# Run in watch mode +npm run test:watch + +# Run specific test file +npm test -- --testPathPattern="job-search.test.ts" +``` + +**Example Unit Test:** + +```typescript +// services/agent-orchestrator/__tests__/job-search.test.ts +import { PlatformManager } from '../src/adapters/platform-manager'; +import { JobSearchParams } from '../src/types/job-search'; + +describe('PlatformManager', () => { + let platformManager: PlatformManager; + + beforeEach(() => { + platformManager = new PlatformManager(); + }); + + describe('searchAllPlatforms', () => { + it('should return results from multiple platforms', async () => { + const params: JobSearchParams = { + searchTerm: 'software engineer', + location: 'San Francisco', + platforms: ['linkedin', 'glassdoor'] + }; + + const results = await platformManager.searchAllPlatforms(params); + + expect(results).toBeInstanceOf(Array); + expect(results.length).toBeGreaterThan(0); + }); + + it('should handle empty search term gracefully', async () => { + const params: JobSearchParams = { + searchTerm: '', + location: 'San Francisco', + platforms: ['linkedin'] + }; + + await expect(platformManager.searchAllPlatforms(params)) + .rejects.toThrow('Search term is required'); + }); + }); +}); +``` + +### Dashboard Service (Jest + React Testing Library) + +```bash +cd services/dashboard-service + +# Run all tests +npm test + +# Run with coverage +npm run test:coverage + +# Run component tests only +npm test -- --testPathPattern="components" +``` + +**Example Component Test:** + +```typescript +// services/dashboard-service/__tests__/components/JobCard.test.tsx +import { render, screen, fireEvent } from '@testing-library/react'; +import { JobCard } from '../../src/components/JobCard'; + +describe('JobCard', () => { + const mockJob = { + id: 'job-123', + title: 'Software Engineer', + company: 'Tech Corp', + location: 'San Francisco', + salary: '$150,000' + }; + + it('renders job information correctly', () => { + render(); + + expect(screen.getByText('Software Engineer')).toBeInTheDocument(); + expect(screen.getByText('Tech Corp')).toBeInTheDocument(); + expect(screen.getByText('San Francisco')).toBeInTheDocument(); + }); + + it('calls onApply when apply button is clicked', () => { + const onApply = jest.fn(); + render(); + + fireEvent.click(screen.getByRole('button', { name: /apply/i })); + + expect(onApply).toHaveBeenCalledWith(mockJob.id); + }); +}); +``` + +### LLM Service (pytest) + +```bash +cd services/llm-service + +# Run all tests +pytest + +# Run with coverage +pytest --cov=app --cov-report=html + +# Run verbose +pytest -v + +# Run specific test file +pytest tests/test_tailor.py + +# Run specific test +pytest tests/test_tailor.py::test_tailor_resume +``` + +**Example pytest Test:** + +```python +# services/llm-service/tests/test_tailor.py +import pytest +from fastapi.testclient import TestClient +from app.main import app + +client = TestClient(app) + +class TestTailorEndpoint: + def test_tailor_success(self): + """Test successful resume tailoring""" + payload = { + "job_data": { + "job_id": "test-123", + "platform": "linkedin", + "job_title": "Software Engineer", + "company_name": "Tech Corp", + "raw_description": "Looking for a software engineer..." + }, + "user_profile": { + "user_id": "user-456", + "raw_master_resume": "Experienced engineer..." + } + } + + response = client.post("/tailor", json=payload) + + assert response.status_code == 200 + data = response.json() + assert data["status"] == "success" + assert "tailored_resume" in data + assert "confidence_score" in data + + def test_tailor_missing_job_data(self): + """Test error handling for missing job data""" + payload = { + "user_profile": { + "user_id": "user-456", + "raw_master_resume": "Experienced engineer..." + } + } + + response = client.post("/tailor", json=payload) + + assert response.status_code == 422 + + def test_health_check(self): + """Test health check endpoint""" + response = client.get("/health") + + assert response.status_code == 200 + assert response.json()["status"] == "healthy" +``` + +--- + +## Integration Tests + +### API Integration Tests + +```bash +# Run integration tests +npm run test:integration +``` + +**Example Integration Test:** + +```typescript +// services/agent-orchestrator/__tests__/integration/api.test.ts +import request from 'supertest'; +import app from '../../src/index'; + +describe('API Integration', () => { + describe('POST /api/jobs/search', () => { + it('should search for jobs across platforms', async () => { + const response = await request(app) + .post('/api/jobs/search') + .send({ + searchTerm: 'software engineer', + location: 'San Francisco', + platforms: ['linkedin'] + }) + .expect(200); + + expect(response.body.success).toBe(true); + expect(response.body.results).toBeInstanceOf(Array); + }); + + it('should return 400 for invalid request', async () => { + await request(app) + .post('/api/jobs/search') + .send({}) + .expect(400); + }); + }); + + describe('Health Check', () => { + it('should return healthy status', async () => { + const response = await request(app) + .get('/health') + .expect(200); + + expect(response.body.status).toBe('healthy'); + }); + }); +}); +``` + +### Database Integration Tests + +```typescript +// services/agent-orchestrator/__tests__/integration/database.test.ts +import { pool } from '../../src/db'; + +describe('Database Integration', () => { + beforeAll(async () => { + // Setup test database + await pool.query('CREATE TABLE IF NOT EXISTS test_jobs (id SERIAL PRIMARY KEY, title TEXT)'); + }); + + afterAll(async () => { + // Cleanup + await pool.query('DROP TABLE IF EXISTS test_jobs'); + await pool.end(); + }); + + it('should insert and retrieve jobs', async () => { + await pool.query("INSERT INTO test_jobs (title) VALUES ('Test Job')"); + + const result = await pool.query('SELECT * FROM test_jobs'); + + expect(result.rows).toHaveLength(1); + expect(result.rows[0].title).toBe('Test Job'); + }); +}); +``` + +--- + +## End-to-End Tests + +### Playwright Setup + +```bash +cd services/dashboard-service + +# Install Playwright +npm install -D @playwright/test +npx playwright install + +# Run E2E tests +npm run test:e2e + +# Run with UI +npm run test:e2e -- --ui + +# Generate report +npm run test:e2e -- --reporter=html +``` + +**Example Playwright Test:** + +```typescript +// services/dashboard-service/e2e/job-search.spec.ts +import { test, expect } from '@playwright/test'; + +test.describe('Job Search Flow', () => { + test.beforeEach(async ({ page }) => { + await page.goto('/'); + }); + + test('should search for jobs and display results', async ({ page }) => { + // Enter search term + await page.fill('[data-testid="search-input"]', 'software engineer'); + await page.fill('[data-testid="location-input"]', 'San Francisco'); + + // Click search + await page.click('[data-testid="search-button"]'); + + // Wait for results + await page.waitForSelector('[data-testid="job-card"]'); + + // Verify results + const jobCards = await page.locator('[data-testid="job-card"]').count(); + expect(jobCards).toBeGreaterThan(0); + }); + + test('should apply to a job', async ({ page }) => { + // Login first + await page.click('[data-testid="login-button"]'); + await page.fill('[data-testid="email-input"]', 'test@example.com'); + await page.fill('[data-testid="password-input"]', 'password123'); + await page.click('[data-testid="submit-login"]'); + + // Navigate to job + await page.goto('/jobs/job-123'); + + // Click apply + await page.click('[data-testid="apply-button"]'); + + // Confirm application + await page.click('[data-testid="confirm-apply"]'); + + // Verify success + await expect(page.locator('[data-testid="success-message"]')) + .toContainText('Application submitted'); + }); +}); +``` + +### Playwright Configuration + +```typescript +// services/dashboard-service/playwright.config.ts +import { defineConfig, devices } from '@playwright/test'; + +export default defineConfig({ + testDir: './e2e', + fullyParallel: true, + forbidOnly: !!process.env.CI, + retries: process.env.CI ? 2 : 0, + workers: process.env.CI ? 1 : undefined, + reporter: 'html', + use: { + baseURL: 'http://localhost:3001', + trace: 'on-first-retry', + }, + projects: [ + { + name: 'chromium', + use: { ...devices['Desktop Chrome'] }, + }, + { + name: 'firefox', + use: { ...devices['Desktop Firefox'] }, + }, + { + name: 'webkit', + use: { ...devices['Desktop Safari'] }, + }, + ], + webServer: { + command: 'npm run dev', + url: 'http://localhost:3001', + reuseExistingServer: !process.env.CI, + }, +}); +``` + +--- + +## Test Coverage + +### Coverage Requirements + +| Service | Minimum Coverage | Target Coverage | +|---------|-----------------|-----------------| +| Agent Orchestrator | 70% | 85% | +| Dashboard Service | 70% | 80% | +| LLM Service | 70% | 85% | +| Monitoring Service | 60% | 75% | + +### Generating Coverage Reports + +```bash +# Node.js services +npm run test:coverage + +# Python services +pytest --cov=app --cov-report=html --cov-report=term-missing + +# View HTML report +open coverage/lcov-report/index.html # Node.js +open htmlcov/index.html # Python +``` + +### Coverage Configuration + +**Jest (Node.js):** +```json +// jest.config.js +{ + "collectCoverageFrom": [ + "src/**/*.{ts,tsx}", + "!src/**/*.d.ts", + "!src/**/index.ts" + ], + "coverageThreshold": { + "global": { + "branches": 70, + "functions": 70, + "lines": 70, + "statements": 70 + } + } +} +``` + +**pytest (Python):** +```ini +# pytest.ini +[pytest] +addopts = --cov=app --cov-report=term-missing --cov-fail-under=70 +``` + +--- + +## CI/CD Integration + +### GitHub Actions + +Tests run automatically on every PR and push to main: + +```yaml +# .github/workflows/ci.yml +- name: Run tests + run: | + npm test -- --coverage + pytest --cov=app +``` + +### Pre-commit Hooks + +```bash +# Install pre-commit hooks +npm install husky --save-dev +npx husky install + +# Add test hook +npx husky add .husky/pre-commit "npm test" +``` + +--- + +## Writing Tests + +### Test Naming Convention + +```typescript +// Pattern: should__when_ +it('should return empty array when no jobs found', () => {}); +it('should throw error when api key missing', () => {}); +it('should successfully tailor resume when valid data provided', () => {}); +``` + +### Test Structure (AAA Pattern) + +```typescript +it('should calculate match score correctly', () => { + // Arrange + const job = { title: 'Software Engineer', skills: ['Python', 'TypeScript'] }; + const resume = { skills: ['Python', 'JavaScript'] }; + + // Act + const score = calculateMatchScore(job, resume); + + // Assert + expect(score).toBeGreaterThan(0.5); + expect(score).toBeLessThanOrEqual(1); +}); +``` + +### Mocking + +**Jest Mocks:** +```typescript +// Mock external API +jest.mock('axios'); +const mockedAxios = axios as jest.Mocked; + +mockedAxios.get.mockResolvedValue({ + data: { jobs: [] } +}); +``` + +**pytest Mocks:** +```python +from unittest.mock import patch, MagicMock + +@patch('app.services.openai_client') +def test_tailor_with_mock(mock_client): + mock_client.chat.completions.create.return_value = MagicMock( + choices=[MagicMock(message=MagicMock(content="Tailored resume..."))] + ) + + result = tailor_resume("job desc", "resume") + + assert "Tailored" in result +``` + +--- + +## Best Practices + +### Do's + +- ✅ Write tests before fixing bugs +- ✅ Use descriptive test names +- ✅ Test edge cases and error conditions +- ✅ Keep tests fast and isolated +- ✅ Use test fixtures for common data +- ✅ Mock external services + +### Don'ts + +- ❌ Don't test implementation details +- ❌ Don't write flaky tests +- ❌ Don't ignore failing tests +- ❌ Don't test multiple things in one test +- ❌ Don't rely on test execution order + +### Test Data Management + +```typescript +// services/agent-orchestrator/__tests__/fixtures/jobs.ts +export const mockJobs = { + softwareEngineer: { + id: 'job-1', + title: 'Software Engineer', + company: 'Tech Corp', + location: 'San Francisco' + }, + seniorEngineer: { + id: 'job-2', + title: 'Senior Software Engineer', + company: 'Big Tech', + location: 'Remote' + } +}; + +// Usage +import { mockJobs } from '../fixtures/jobs'; + +it('should display job title', () => { + render(); + expect(screen.getByText('Software Engineer')).toBeInTheDocument(); +}); +``` + +--- + +## Troubleshooting + +### Common Issues + +**Tests Timing Out:** +```bash +# Increase timeout +npm test -- --testTimeout=30000 +pytest --timeout=30 +``` + +**Flaky Tests:** +```bash +# Run tests multiple times to catch flakiness +npm test -- --runInBand --detectOpenHandles +``` + +**Database Connection Issues:** +```bash +# Ensure test database is running +docker-compose up -d postgres + +# Check connection +psql $DATABASE_URL -c "SELECT 1" +``` + +--- + +## Resources + +- [Jest Documentation](https://jestjs.io/docs/getting-started) +- [pytest Documentation](https://docs.pytest.org/) +- [Playwright Documentation](https://playwright.dev/docs/intro) +- [Testing Library](https://testing-library.com/docs/)