Refactor configuration and assessment modules; add S3 integration and update LLM client usage

This commit is contained in:
Santiago Martinez-Avial
2025-12-21 03:38:50 +01:00
parent 5ef0fc0ccc
commit 4a340a9661
14 changed files with 587 additions and 240 deletions

View File

@@ -1,16 +1,17 @@
# LLM Configuration
# Defaults to OpenRouter if not specified
# Database Configuration
HELIA_MONGO_URI=mongodb://localhost:27017
HELIA_DATABASE_NAME=helia
# Base URL for the LLM provider (default: https://openrouter.ai/api/v1)
HELIA_LLM_BASE_URL=https://openrouter.ai/api/v1
# S3 Configuration (MinIO or AWS)
# Required for finding and downloading transcripts
HELIA_S3_ENDPOINT=https://s3.amazonaws.com
HELIA_S3_ACCESS_KEY=your_access_key
HELIA_S3_SECRET_KEY=your_secret_key
HELIA_S3_BUCKET=your-bucket-name
HELIA_S3_REGION=us-east-1
# API Key. Checked in order: HELIA_LLM_API_KEY, OPENROUTER_API_KEY, OPENAI_API_KEY
HELIA_LLM_API_KEY=sk-or-your-api-key-here
# Model identifier (default: google/gemini-3.0-pro-preview)
HELIA_LLM_MODEL=google/gemini-3.0-pro-preview
# Neo4j Configuration
NEO4J_URI=bolt://localhost:7687
NEO4J_USER=neo4j
NEO4J_PASSWORD=password
# LLM API Keys
# These are used by the run configuration YAML via ${VAR} substitution
OPENAI_API_KEY=sk-...
ANTHROPIC_API_KEY=sk-ant-...
OPENROUTER_API_KEY=sk-or-...