Refactor configuration management; update S3 integration, add new migration scripts, and implement pre-flight checks
This commit is contained in:
@@ -1,50 +1,51 @@
|
||||
# System Configuration
|
||||
mongo_uri: "mongodb://localhost:27017"
|
||||
database_name: "helia"
|
||||
# Helia Application Configuration
|
||||
# Copy this file to config.yaml and adjust values as needed.
|
||||
|
||||
# S3 Configuration (MinIO or AWS)
|
||||
s3_endpoint: "https://s3.amazonaws.com"
|
||||
s3_access_key: "your_access_key"
|
||||
s3_secret_key: "your_secret_key"
|
||||
s3_bucket: "your-bucket-name"
|
||||
s3_prefix: ""
|
||||
s3_region: "us-east-1"
|
||||
patient_limit: 5
|
||||
|
||||
# Run Configuration
|
||||
limit: 5
|
||||
mongo:
|
||||
uri: "mongodb://localhost:27017"
|
||||
db_name: "helia"
|
||||
|
||||
s3:
|
||||
endpoint: "http://localhost:9000"
|
||||
access_key_id: ""
|
||||
secret_access_key: ""
|
||||
bucket: "helia"
|
||||
prefix: "daic-woz"
|
||||
region: "eu-west-1"
|
||||
|
||||
providers:
|
||||
openai:
|
||||
# Set api_key here or export OPENAI_API_KEY
|
||||
api_base: "https://api.openai.com/v1"
|
||||
api_format: "openai"
|
||||
api_key: "sk-xxx"
|
||||
api_spec: "openai"
|
||||
|
||||
anthropic:
|
||||
# Set api_key here or export ANTHROPIC_API_KEY
|
||||
api_base: "https://api.anthropic.com/v1"
|
||||
api_format: "anthropic"
|
||||
api_key: "sk-xxx"
|
||||
api_spec: "anthropic"
|
||||
|
||||
openrouter:
|
||||
# Set api_key here or export OPENROUTER_API_KEY
|
||||
api_base: "https://openrouter.ai/api/v1"
|
||||
api_format: "openai"
|
||||
api_key: "sk-xxx"
|
||||
api_spec: "openai"
|
||||
|
||||
local_ollama:
|
||||
# API key optional for local_* providers
|
||||
api_base: "http://localhost:11434/v1"
|
||||
api_format: "ollama"
|
||||
api_spec: "ollama"
|
||||
|
||||
runs:
|
||||
- run_name: "baseline_gpt4"
|
||||
baseline_gpt4:
|
||||
model:
|
||||
provider: openai
|
||||
provider: "openai"
|
||||
model_name: "gpt-4o"
|
||||
temperature: 0.0
|
||||
temperature: 1.0
|
||||
prompt_id: "default"
|
||||
|
||||
- run_name: "test_llama3"
|
||||
test_llama3:
|
||||
model:
|
||||
provider: local_ollama
|
||||
provider: "local_ollama"
|
||||
model_name: "llama3"
|
||||
temperature: 0.7
|
||||
prompt_id: "default"
|
||||
|
||||
Reference in New Issue
Block a user