- Added OpenAI-compatible LLM endpoints to API backend - Introduced web frontend with Jinja2 templates and static assets - Implemented API proxy routes in web service - Added sample db.json data for items, users, orders, reviews, categories, llm_requests - Updated ADC and Helm configs for separate AI and standard rate limiting - Upgraded FastAPI, Uvicorn, and added httpx, Jinja2, python-multipart dependencies - Added API configuration modal and client-side JS for web app
75 lines
1.6 KiB
YAML
75 lines
1.6 KiB
YAML
services:
|
|
- name: apache-service
|
|
hosts:
|
|
- commandware.it
|
|
upstream:
|
|
name: apache-upstream
|
|
scheme: http
|
|
type: roundrobin
|
|
nodes:
|
|
- host: apache-service.api7ee.svc.cluster.local
|
|
port: 80
|
|
weight: 100
|
|
routes:
|
|
- name: apache-route
|
|
uris:
|
|
- /*
|
|
vars:
|
|
- - uri
|
|
- "~~"
|
|
- "^(?!/api|/docs)"
|
|
priority: 1
|
|
plugins:
|
|
redirect:
|
|
http_to_https: true
|
|
|
|
- name: nginx-api-service
|
|
hosts:
|
|
- commandware.it
|
|
upstream:
|
|
name: nginx-upstream
|
|
scheme: http
|
|
type: roundrobin
|
|
nodes:
|
|
- host: nginx-service.api7ee.svc.cluster.local
|
|
port: 80
|
|
weight: 100
|
|
routes:
|
|
- name: nginx-api-docs-route
|
|
uris:
|
|
- /docs
|
|
- /docs/*
|
|
priority: 30
|
|
plugins:
|
|
redirect:
|
|
http_to_https: true
|
|
|
|
- name: nginx-api-llm-route
|
|
uris:
|
|
- /api/llm
|
|
- /api/llm/*
|
|
priority: 20
|
|
plugins:
|
|
redirect:
|
|
http_to_https: true
|
|
ai-rate-limiting:
|
|
limit: 100
|
|
time_window: 60
|
|
rejected_code: 429
|
|
limit_strategy: "total_tokens"
|
|
|
|
- name: nginx-api-route
|
|
uris:
|
|
- /api
|
|
- /api/*
|
|
priority: 10
|
|
plugins:
|
|
redirect:
|
|
http_to_https: true
|
|
limit-count:
|
|
count: 100
|
|
time_window: 60
|
|
rejected_code: 429
|
|
key_type: "var"
|
|
key: "remote_addr"
|