"4000:4000" # Map the container port to the host, change the host port if necessary
environment:
DATABASE_URL: "postgresql://llmproxy:dbpassword9090@db:5432/litellm"
STORE_MODEL_IN_DB: "True" # allows adding models to proxy via UI
env_file:
general_settings:
master_key: sk-1234 # [OPTIONAL] Use to enforce auth on proxy. See - https://docs.litellm.ai/docs/proxy/virtual_keys
store_model_in_db: True
proxy_budget_rescheduler_min_time: 60
proxy_budget_rescheduler_max_time: 64
proxy_batch_write_at: 1
database_connection_pool_limit: 10
database_url: "postgresql://user>:<password>@<host:/" # [OPTIONAL] use for token-based auth to proxy
pass_through_endpoints:
path: "/v1/rerank" # route you want to add to LiteLLM Proxy Server
target: "https://api.cohere.com/v1/rerank" # URL this route should forward requests to
headers: # headers to forward to this URL
content-type: application/json # (Optional) Extra Headers to pass to this endpoint
accept: application/json
forward_headers: True
What happened?
A bug happened! as the title
it's my docker-compose.yml, the part relevant to config.yaml is setup suggestted by instruction
version: "3.11" services: litellm: build: context: . args: target: runtime image: ghcr.io/berriai/litellm:main-stable volumes:
my .env file is:
LITELLM_MASTER_KEY="^^^^^" LITELLM_SALT_KEY="*****"
MISTRAL_API_KEY="^^^^^^^^^I"
my config.yaml is:
model_list:
litellm_settings:
set_verbose: True # Uncomment this if you want to see verbose logs; not recommended in production
drop_params: True
max_budget: 100
budget_duration: 30d
num_retries: 5 request_timeout: 600 telemetry: False context_window_fallbacks: [{"gpt-3.5-turbo": ["gpt-3.5-turbo-large"]}] default_team_settings:
For /fine_tuning/jobs endpoints
finetune_settings:
for /files endpoints
files_settings:
router_settings: routing_strategy: usage-based-routing-v2 redis_host: os.environ/REDIS_HOST redis_password: os.environ/REDIS_PASSWORD redis_port: os.environ/REDIS_PORT enable_pre_call_checks: true model_group_alias: {"my-special-fake-model-alias-name": "fake-openai-endpoint-3"}
general_settings: master_key: sk-1234 # [OPTIONAL] Use to enforce auth on proxy. See - https://docs.litellm.ai/docs/proxy/virtual_keys store_model_in_db: True proxy_budget_rescheduler_min_time: 60 proxy_budget_rescheduler_max_time: 64 proxy_batch_write_at: 1 database_connection_pool_limit: 10
database_url: "postgresql://user>:<password>@<host:/" # [OPTIONAL] use for token-based auth to proxy
pass_through_endpoints:
environment_variables:
settings for using redis caching
REDIS_HOST: redis-16337.c322.us-east-1-2.ec2.cloud.redislabs.com
REDIS_PORT: "16337"
REDIS_PASSWORD:
Relevant log output
Twitter / LinkedIn details
No response