set port to 4000

This commit is contained in:
2026-04-15 23:14:38 +02:00
parent 28eb419afe
commit 3a731c6374
2 changed files with 17 additions and 16 deletions
+16 -16
View File
@@ -2,22 +2,22 @@ litellm_settings:
drop_params: true # This strips 'thinking', etc. before sending to Copilot drop_params: true # This strips 'thinking', etc. before sending to Copilot
model_list: model_list:
# - model_name: github_copilot/gpt-4 - model_name: gpt-4.1
# litellm_params: litellm_params:
# model: github_copilot/gpt-4 model: github_copilot/gpt-4.1
# - model_name: github_copilot/claude-sonnet-4.6 - model_name: claude-sonnet-4.6
# litellm_params: litellm_params:
# model: github_copilot/claude-sonnet-4.6 model: github_copilot/claude-sonnet-4.6
# - model_name: github_copilot/gpt-5.1-codex - model_name: gpt-5.1-codex
# model_info: model_info:
# mode: responses mode: responses
# litellm_params: litellm_params:
# model: github_copilot/gpt-5.1-codex model: github_copilot/gpt-5.1-codex
# - model_name: github_copilot/text-embedding-ada-002 - model_name: github_copilot/text-embedding-ada-002
# model_info: model_info:
# mode: embedding mode: embedding
# litellm_params: litellm_params:
# model: github_copilot/text-embedding-ada-002 model: github_copilot/text-embedding-ada-002
- model_name: glm5 - model_name: glm5
litellm_params: litellm_params:
model: nvidia_nim/z-ai/glm5 # add nvidia_nim/ prefix to route as Nvidia NIM provider model: nvidia_nim/z-ai/glm5 # add nvidia_nim/ prefix to route as Nvidia NIM provider
+1
View File
@@ -1 +1,2 @@
export PORT=4000
litellm --config config.yaml litellm --config config.yaml