From 78291f553ee5e58327253ea25537fd4e5f8b3b50 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rafa=C5=82=20Gajdulewicz?= Date: Tue, 5 Mar 2024 16:39:28 +0100 Subject: [PATCH] Log if one of main LLM providers is not enabled (#60857) Log if one of big 3 providers is not enabled --- cmd/cody-gateway/internal/httpapi/handler.go | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/cmd/cody-gateway/internal/httpapi/handler.go b/cmd/cody-gateway/internal/httpapi/handler.go index 3be1495fabe..b3c3631c08c 100644 --- a/cmd/cody-gateway/internal/httpapi/handler.go +++ b/cmd/cody-gateway/internal/httpapi/handler.go @@ -106,6 +106,8 @@ func NewHandler( otelhttp.WithPublicEndpoint(), ), )) + } else { + logger.Error("Anthropic access token not set") } if config.OpenAI.AccessToken != "" { v1router.Path("/completions/openai").Methods(http.MethodPost).Handler( @@ -174,6 +176,8 @@ func NewHandler( otelhttp.WithPublicEndpoint(), ), )) + } else { + logger.Error("OpenAI access token not set") } if config.Fireworks.AccessToken != "" { v1router.Path("/completions/fireworks").Methods(http.MethodPost).Handler( @@ -200,6 +204,8 @@ func NewHandler( otelhttp.WithPublicEndpoint(), ), )) + } else { + logger.Error("Fireworks access token not set") } // Register a route where actors can retrieve their current rate limit state.