# ============================================================================= # Datacenter Documentation System - Configuration Template # Copy this file to .env and fill in your actual values # ============================================================================= # ============================================================================= # MongoDB Configuration # ============================================================================= MONGO_ROOT_USER=admin MONGO_ROOT_PASSWORD=changeme_secure_mongo_password MONGODB_URL=mongodb://admin:changeme_secure_mongo_password@mongodb:27017 MONGODB_DATABASE=datacenter_docs # ============================================================================= # Redis Configuration # ============================================================================= REDIS_PASSWORD=changeme_redis_password REDIS_URL=redis://redis:6379/0 # ============================================================================= # MCP Server Configuration # ============================================================================= MCP_SERVER_URL=https://mcp.company.local MCP_API_KEY=your_mcp_api_key_here # ============================================================================= # LLM Configuration (OpenAI-compatible API) # Choose one of the configurations below and uncomment it # ============================================================================= # --- OpenAI (Default) --- LLM_BASE_URL=https://api.openai.com/v1 LLM_API_KEY=sk-your-openai-api-key-here LLM_MODEL=gpt-4-turbo-preview # Alternative models: gpt-4, gpt-3.5-turbo # --- Anthropic Claude (OpenAI-compatible) --- # LLM_BASE_URL=https://api.anthropic.com/v1 # LLM_API_KEY=sk-ant-your-anthropic-key-here # LLM_MODEL=claude-sonnet-4-20250514 # Alternative models: claude-3-opus-20240229, claude-3-sonnet-20240229 # --- LLMStudio (Local) --- # LLM_BASE_URL=http://localhost:1234/v1 # LLM_API_KEY=not-needed # LLM_MODEL=your-local-model-name # --- Open-WebUI (Local) --- # LLM_BASE_URL=http://localhost:8080/v1 # LLM_API_KEY=your-open-webui-key # LLM_MODEL=llama3 # Alternative models: mistral, mixtral, codellama # --- Ollama (Local) --- # LLM_BASE_URL=http://localhost:11434/v1 # LLM_API_KEY=ollama # LLM_MODEL=llama3 # Alternative models: mistral, mixtral, codellama, phi3 # LLM Generation Settings LLM_TEMPERATURE=0.3 LLM_MAX_TOKENS=4096 # ============================================================================= # API Configuration # ============================================================================= API_HOST=0.0.0.0 API_PORT=8000 WORKERS=4 # ============================================================================= # CORS Configuration # ============================================================================= CORS_ORIGINS=http://localhost:3000,https://docs.company.local # ============================================================================= # Application Settings # ============================================================================= LOG_LEVEL=INFO DEBUG=false # ============================================================================= # Celery Configuration # ============================================================================= CELERY_BROKER_URL=redis://redis:6379/0 CELERY_RESULT_BACKEND=redis://redis:6379/0 # ============================================================================= # Vector Store Configuration # ============================================================================= VECTOR_STORE_PATH=./data/chroma_db EMBEDDING_MODEL=sentence-transformers/all-MiniLM-L6-v2