Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
71 changes: 71 additions & 0 deletions .env.full
Original file line number Diff line number Diff line change
@@ -0,0 +1,71 @@
# SRE Agent Full Configuration
# This file contains all possible configuration options for production use
# All features enabled - requires credentials for all services

# ===== ESSENTIAL CREDENTIALS =====
# Basic authentication token for API access
DEV_BEARER_TOKEN=your_dev_token_here

# Hugging Face token for Llama Firewall (REQUIRED)
# Get from: https://huggingface.co/settings/tokens
# Needs read access to meta-llama/Llama-Prompt-Guard-2-86M
HF_TOKEN=your_hugging_face_token_here

# LLM Provider Configuration
PROVIDER=anthropic # Options: anthropic, gemini, mock
MODEL=claude-3-5-sonnet-20241022 # Your preferred model

# LLM API Keys (provide the one matching your PROVIDER)
ANTHROPIC_API_KEY=your_anthropic_api_key_here # Required if PROVIDER=anthropic
GEMINI_API_KEY=your_gemini_api_key_here # Required if PROVIDER=gemini

# ===== SLACK INTEGRATION =====
# For sending notifications and alerts
SLACK_BOT_TOKEN=your_slack_bot_token_here
SLACK_TEAM_ID=your_slack_team_id_here
SLACK_SIGNING_SECRET=your_slack_signing_secret_here
SLACK_CHANNEL_ID=your_slack_channel_id_here

# ===== GITHUB INTEGRATION =====
# For repository access and code analysis
GITHUB_PERSONAL_ACCESS_TOKEN=your_github_token_here
GITHUB_ORGANISATION=your_org_name
GITHUB_REPO_NAME=your_repo_name
PROJECT_ROOT=src # Root directory of your project

# ===== KUBERNETES INTEGRATION =====
# Choose AWS (EKS) OR GCP (GKE) - not both

# AWS EKS Configuration
AWS_REGION=us-east-1
AWS_ACCOUNT_ID=your_aws_account_id
TARGET_EKS_CLUSTER_NAME=your_eks_cluster_name

# GCP GKE Configuration (alternative to AWS)
# CLOUDSDK_CORE_PROJECT=your_gcp_project_id
# CLOUDSDK_COMPUTE_REGION=us-central1
# TARGET_GKE_CLUSTER_NAME=your_gke_cluster_name

# ===== SERVICE CONFIGURATION =====
# Services running on your cluster (customize for your setup)
SERVICES=["cartservice", "adservice", "emailservice", "frontend", "checkoutservice"]

# Tools available to the agent
TOOLS=["list_pods", "get_logs", "get_file_contents", "slack_post_message", "create_github_issue"]

# ===== PERFORMANCE SETTINGS =====
# Maximum tokens the LLM can generate
MAX_TOKENS=10000

# Query timeout in seconds
QUERY_TIMEOUT=300

# ===== USAGE =====
# 1. Replace all placeholder values with your actual credentials
# 2. Generate this file with: uv run python setup_credentials.py --mode full
# 3. Choose either AWS or GCP configuration (comment out the unused one)
# 4. Customize SERVICES and TOOLS for your environment
# 5. Start with:
# - AWS: docker compose -f compose.aws.yaml up
# - GCP: docker compose -f compose.gcp.yaml up
# 6. Access the API at http://localhost:8003
59 changes: 59 additions & 0 deletions .env.minimal
Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@
# SRE Agent Minimal Configuration
# This file contains only the essential credentials needed for basic functionality
# Requires real API keys for LLM provider

# ===== ESSENTIAL CREDENTIALS =====
# Basic authentication token for API access
DEV_BEARER_TOKEN=your_dev_token_here

# Hugging Face token for Llama Firewall (REQUIRED)
# Get from: https://huggingface.co/settings/tokens
# Needs read access to meta-llama/Llama-Prompt-Guard-2-86M
HF_TOKEN=your_hugging_face_token_here

# LLM Provider Configuration
PROVIDER=anthropic # or "gemini"
MODEL=claude-3-5-sonnet-20241022 # or your preferred model

# LLM API Keys (provide the one matching your PROVIDER)
ANTHROPIC_API_KEY=your_anthropic_api_key_here # Required if PROVIDER=anthropic
GEMINI_API_KEY=your_gemini_api_key_here # Required if PROVIDER=gemini

# ===== DEFAULT CONFIGURATION =====
# Service configuration (minimal defaults)
SERVICES=["cartservice", "adservice", "emailservice"]
TOOLS=["list_pods", "get_logs", "get_file_contents", "slack_post_message"]

# GitHub repository defaults (for prompt server)
GITHUB_ORGANISATION=fuzzylabs
GITHUB_REPO_NAME=microservices-demo
PROJECT_ROOT=src

# Timeouts and limits
MAX_TOKENS=10000
QUERY_TIMEOUT=300

# ===== DISABLED FEATURES =====
# These features are not configured in minimal mode
# Slack notifications - disabled
SLACK_BOT_TOKEN=null
SLACK_TEAM_ID=null
SLACK_SIGNING_SECRET=null
SLACK_CHANNEL_ID=null

# GitHub integration - disabled
GITHUB_PERSONAL_ACCESS_TOKEN=null

# Kubernetes integration - not configured
# You'll need to add these manually if you want K8s integration:
# AWS_REGION=us-east-1
# TARGET_EKS_CLUSTER_NAME=your_cluster_name
# or
# CLOUDSDK_CORE_PROJECT=your_project_id
# TARGET_GKE_CLUSTER_NAME=your_cluster_name

# ===== USAGE =====
# 1. Replace placeholder values with your actual credentials
# 2. Generate this file with: uv run python setup_credentials.py --mode minimal
# 3. Start with: docker compose -f compose.aws.yaml up (or compose.gcp.yaml)
# 4. Some features (Slack, GitHub, K8s) will be disabled but core functionality works
48 changes: 48 additions & 0 deletions .env.testing
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
# SRE Agent Testing Configuration
# This file contains the minimal configuration needed for testing with mock services
# No real API keys required - everything uses mock implementations

# ===== ESSENTIAL CREDENTIALS =====
# Basic authentication token (can be any value for testing)
DEV_BEARER_TOKEN=dev-token-123

# Hugging Face token for Llama Firewall (REQUIRED - get from https://huggingface.co/settings/tokens)
# This is the only real API key needed for testing
HF_TOKEN=your_hugging_face_token_here

# LLM Provider Configuration (mock = no real LLM calls)
PROVIDER=mock
MODEL=mock-model

# Mock LLM credentials (not used but may be required by validation)
ANTHROPIC_API_KEY=null
GEMINI_API_KEY=null

# ===== DEFAULT CONFIGURATION =====
# GitHub repository defaults (for prompt server)
GITHUB_ORGANISATION=fuzzylabs
GITHUB_REPO_NAME=microservices-demo
PROJECT_ROOT=src

# Service configuration defaults
SERVICES=["cartservice", "adservice", "emailservice"]
TOOLS=["list_pods", "get_logs", "get_file_contents", "slack_post_message"]

# Timeouts and limits
MAX_TOKENS=10000
QUERY_TIMEOUT=300

# Slack configuration (disabled for testing)
SLACK_BOT_TOKEN=null
SLACK_TEAM_ID=null
SLACK_SIGNING_SECRET=null
SLACK_CHANNEL_ID=null

# GitHub integration (disabled for testing)
GITHUB_PERSONAL_ACCESS_TOKEN=null

# ===== USAGE =====
# 1. Set your HF_TOKEN above (the only real credential needed)
# 2. Generate this file with: uv run python setup_credentials.py --mode testing
# 3. Start the testing environment with: docker compose -f compose.tests.yaml up --build
# (Note: First build takes ~10-15 minutes, subsequent starts are much faster)
27 changes: 26 additions & 1 deletion .github/workflows/build-push-images.yml
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,13 @@ on:
push:
branches:
- main
release:
types: [published]

permissions:
id-token: write
contents: read
packages: write

jobs:
build-and-push:
Expand Down Expand Up @@ -57,10 +60,32 @@ jobs:
id: login-ecr
uses: aws-actions/amazon-ecr-login@v2

- name: Login to GitHub Container Registry
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}

- name: Extract metadata
id: meta
uses: docker/metadata-action@v5
with:
images: |
${{ steps.login-ecr.outputs.registry }}/mcp/${{ matrix.name }}
ghcr.io/fuzzylabs/sre-agent-${{ matrix.name }}
tags: |
type=ref,event=branch
type=ref,event=pr
type=semver,pattern={{version}}
type=semver,pattern={{major}}.{{minor}}
type=raw,value=latest,enable={{is_default_branch}}

- name: Build and Push ${{ matrix.name }}
uses: docker/build-push-action@v6
with:
context: ${{ matrix.context }}
file: ${{ matrix.dockerfile }}
push: true
tags: ${{ steps.login-ecr.outputs.registry }}/mcp/${{ matrix.name }}:latest
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
Loading