GET
/
v1
/
language
curl --request GET \
  --url http://localhost:9099/v1/language/
{
  "routers": [
    {
      "enabled": true,
      "models": [
        {
          "anthropic": {
            "baseUrl": "<string>",
            "chatEndpoint": "<string>",
            "defaultParams": {
              "max_tokens": 123,
              "metadata": "<string>",
              "stop": [
                "<string>"
              ],
              "system": "<string>",
              "temperature": 123,
              "top_k": 123,
              "top_p": 123
            },
            "model": "<string>"
          },
          "azureopenai": {
            "apiVersion": "<string>",
            "baseUrl": "<string>",
            "chatEndpoint": "<string>",
            "defaultParams": {
              "frequency_penalty": 123,
              "logit_bias": {},
              "max_tokens": 123,
              "n": 123,
              "presence_penalty": 123,
              "response_format": {},
              "seed": 123,
              "stop": [
                "<string>"
              ],
              "temperature": 123,
              "tool_choice": {},
              "tools": [
                "<string>"
              ],
              "top_p": 123,
              "user": "<string>"
            },
            "model": "<string>"
          },
          "client": {
            "timeout": "<string>"
          },
          "cohere": {
            "baseUrl": "<string>",
            "chatEndpoint": "<string>",
            "defaultParams": {
              "chat_history": [
                {
                  "message": "<string>",
                  "role": "<string>",
                  "user": "<string>"
                }
              ],
              "citiation_quality": "<string>",
              "connectors": [
                "<string>"
              ],
              "conversation_id": "<string>",
              "preamble_override": "<string>",
              "prompt_truncation": "<string>",
              "search_queries_only": true,
              "stream": true,
              "temperature": 123
            },
            "model": "<string>"
          },
          "enabled": true,
          "error_budget": "<string>",
          "id": "<string>",
          "latency": {
            "decay": 123,
            "update_interval": "<string>",
            "warmup_samples": 123
          },
          "octoml": {
            "baseUrl": "<string>",
            "chatEndpoint": "<string>",
            "defaultParams": {
              "frequency_penalty": 123,
              "max_tokens": 123,
              "presence_penalty": 123,
              "stop": [
                "<string>"
              ],
              "temperature": 123,
              "top_p": 123
            },
            "model": "<string>"
          },
          "openai": {
            "baseUrl": "<string>",
            "chatEndpoint": "<string>",
            "defaultParams": {
              "frequency_penalty": 123,
              "logit_bias": {},
              "max_tokens": 123,
              "n": 123,
              "presence_penalty": 123,
              "response_format": {},
              "seed": 123,
              "stop": [
                "<string>"
              ],
              "temperature": 123,
              "tool_choice": {},
              "tools": [
                "<string>"
              ],
              "top_p": 123,
              "user": "<string>"
            },
            "model": "<string>"
          },
          "weight": 123
        }
      ],
      "retry": {
        "base_multiplier": 123,
        "max_delay": 123,
        "max_retries": 123,
        "min_delay": 123
      },
      "routers": "<string>",
      "strategy": "<string>"
    }
  ]
}

Response

200 - application/json
OK
routers
object[]