mirror of
https://github.com/sourcegraph/sourcegraph.git
synced 2026-02-06 15:31:48 +00:00
Add a better Cody client server-sent configuration mechanism (#63591)
Signed-off-by: Stephen Gutekanst <stephen@sourcegraph.com>
This commit is contained in:
parent
08252b1625
commit
239f42947b
@ -151,6 +151,7 @@ export interface CodyLLMSiteConfiguration {
|
||||
completionModelMaxTokens?: number
|
||||
provider?: string
|
||||
smartContextWindow?: boolean
|
||||
disableClientConfigAPI?: boolean
|
||||
}
|
||||
|
||||
interface IsContextRequiredForChatQueryResponse {
|
||||
|
||||
@ -7496,6 +7496,11 @@ type CodyLLMConfiguration {
|
||||
"""
|
||||
smartContextWindow: String!
|
||||
"""
|
||||
Disable Cody clients from using the new server-side config API. This is an escape-hatch for any issues
|
||||
that may arise. This field will be removed in the future.
|
||||
"""
|
||||
disableClientConfigAPI: Boolean!
|
||||
"""
|
||||
Name of the model being used for fast chat.
|
||||
"""
|
||||
fastChatModel: String!
|
||||
|
||||
@ -666,6 +666,9 @@ func (c *codyLLMConfigurationResolver) SmartContextWindow() string {
|
||||
}
|
||||
return "enabled"
|
||||
}
|
||||
func (c *codyLLMConfigurationResolver) DisableClientConfigAPI() bool {
|
||||
return c.config.DisableClientConfigAPI
|
||||
}
|
||||
|
||||
func (c *codyLLMConfigurationResolver) FastChatModel() string { return c.config.FastChatModel }
|
||||
func (c *codyLLMConfigurationResolver) FastChatModelMaxTokens() *int32 {
|
||||
|
||||
38
cmd/frontend/internal/clientconfig/BUILD.bazel
Normal file
38
cmd/frontend/internal/clientconfig/BUILD.bazel
Normal file
@ -0,0 +1,38 @@
|
||||
load("//dev:go_defs.bzl", "go_test")
|
||||
load("@io_bazel_rules_go//go:def.bzl", "go_library")
|
||||
|
||||
go_library(
|
||||
name = "clientconfig",
|
||||
srcs = [
|
||||
"clientconfig.go",
|
||||
"httpapi.go",
|
||||
],
|
||||
importpath = "github.com/sourcegraph/sourcegraph/cmd/frontend/internal/clientconfig",
|
||||
visibility = ["//cmd/frontend:__subpackages__"],
|
||||
deps = [
|
||||
"//cmd/frontend/internal/cody",
|
||||
"//internal/actor",
|
||||
"//internal/clientconfig",
|
||||
"//internal/conf",
|
||||
"//internal/database",
|
||||
"@com_github_sourcegraph_log//:log",
|
||||
],
|
||||
)
|
||||
|
||||
go_test(
|
||||
name = "clientconfig_test",
|
||||
srcs = ["httpapi_test.go"],
|
||||
embed = [":clientconfig"],
|
||||
tags = ["requires-network"],
|
||||
deps = [
|
||||
"//internal/actor",
|
||||
"//internal/conf",
|
||||
"//internal/database",
|
||||
"//internal/database/dbtest",
|
||||
"//internal/license",
|
||||
"//internal/licensing",
|
||||
"//schema",
|
||||
"@com_github_hexops_autogold_v2//:autogold",
|
||||
"@com_github_sourcegraph_log//logtest",
|
||||
],
|
||||
)
|
||||
47
cmd/frontend/internal/clientconfig/clientconfig.go
Normal file
47
cmd/frontend/internal/clientconfig/clientconfig.go
Normal file
@ -0,0 +1,47 @@
|
||||
package clientconfig
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/sourcegraph/log"
|
||||
|
||||
"github.com/sourcegraph/sourcegraph/cmd/frontend/internal/cody"
|
||||
"github.com/sourcegraph/sourcegraph/internal/actor"
|
||||
"github.com/sourcegraph/sourcegraph/internal/clientconfig"
|
||||
"github.com/sourcegraph/sourcegraph/internal/conf"
|
||||
"github.com/sourcegraph/sourcegraph/internal/database"
|
||||
)
|
||||
|
||||
func GetForActor(ctx context.Context, logger log.Logger, db database.DB, actor *actor.Actor) (*clientconfig.ClientConfig, error) {
|
||||
c := clientconfig.ClientConfig{
|
||||
// TODO(chrsmith): TODO(slimsag): Set this to `true` when and only when clients should use
|
||||
// the new LLM models httpapi endpoint being added in e.g. https://github.com/sourcegraph/sourcegraph/pull/63507
|
||||
ModelsAPIEnabled: false,
|
||||
}
|
||||
|
||||
// 🚨 SECURITY: This code lets site admins restrict who has access to Cody at all via RBAC.
|
||||
// https://sourcegraph.com/docs/cody/clients/enable-cody-enterprise#enable-cody-only-for-some-users
|
||||
c.CodyEnabled, _ = cody.IsCodyEnabled(ctx, db)
|
||||
|
||||
// 🚨 SECURITY: This code enforces that users do not have access to Cody features which
|
||||
// site admins do not want them to have access to.
|
||||
//
|
||||
// Legacy admin-control configuration which should be moved to RBAC, not globally in site
|
||||
// config. e.g. we should do it like https://github.com/sourcegraph/sourcegraph/pull/58831
|
||||
features := conf.GetConfigFeatures(conf.Get().SiteConfig())
|
||||
if features != nil { // nil -> Cody not enabled
|
||||
c.ChatEnabled = features.Chat
|
||||
c.AutoCompleteEnabled = features.AutoComplete
|
||||
c.CustomCommandsEnabled = features.Commands
|
||||
c.AttributionEnabled = features.Attribution
|
||||
}
|
||||
|
||||
// Legacy feature-enablement configuration which should be moved to featureflag or RBAC,
|
||||
// not exist in site config.
|
||||
completionConfig := conf.GetCompletionsConfig(conf.Get().SiteConfig())
|
||||
if completionConfig != nil { // nil -> Cody not enabled
|
||||
c.SmartContextWindowEnabled = completionConfig.SmartContextWindow != "disabled"
|
||||
}
|
||||
|
||||
return &c, nil
|
||||
}
|
||||
56
cmd/frontend/internal/clientconfig/httpapi.go
Normal file
56
cmd/frontend/internal/clientconfig/httpapi.go
Normal file
@ -0,0 +1,56 @@
|
||||
package clientconfig
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
|
||||
"github.com/sourcegraph/log"
|
||||
|
||||
"github.com/sourcegraph/sourcegraph/internal/actor"
|
||||
"github.com/sourcegraph/sourcegraph/internal/database"
|
||||
)
|
||||
|
||||
// HTTP handlers for interacting with this Sourcegraph instance's
|
||||
// Cody client configuration. These handlers perform auth checks.
|
||||
type HTTPHandlers struct {
|
||||
db database.DB
|
||||
logger log.Logger
|
||||
}
|
||||
|
||||
func NewHandlers(db database.DB, logger log.Logger) *HTTPHandlers {
|
||||
return &HTTPHandlers{
|
||||
db: db,
|
||||
logger: logger,
|
||||
}
|
||||
}
|
||||
|
||||
// GetClientConfigHandler returns the current Sourcegraph instance's Cody client configuration
|
||||
// data as JSON. Requires that the calling user is an authenticated.
|
||||
func (h *HTTPHandlers) GetClientConfigHandler(w http.ResponseWriter, r *http.Request) {
|
||||
ctx := r.Context()
|
||||
|
||||
h.logger.Info("fetching client config")
|
||||
|
||||
// Auth check.
|
||||
callingActor := actor.FromContext(ctx)
|
||||
if callingActor == nil || !callingActor.IsAuthenticated() {
|
||||
h.logger.Warn("unauthenticated user requesting cody client config")
|
||||
http.Error(w, http.StatusText(http.StatusUnauthorized), http.StatusUnauthorized)
|
||||
return
|
||||
}
|
||||
|
||||
currentConfig, err := GetForActor(r.Context(), h.logger, h.db, callingActor)
|
||||
if err != nil {
|
||||
h.logger.Error("fetching current cody client configuration", log.Error(err))
|
||||
http.Error(w, http.StatusText(http.StatusInternalServerError), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
rawJSON, err := json.MarshalIndent(currentConfig, "", " ")
|
||||
if err != nil {
|
||||
h.logger.Error("marshalling configuration", log.Error(err))
|
||||
http.Error(w, http.StatusText(http.StatusInternalServerError), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
http.Error(w, string(rawJSON), http.StatusOK)
|
||||
}
|
||||
87
cmd/frontend/internal/clientconfig/httpapi_test.go
Normal file
87
cmd/frontend/internal/clientconfig/httpapi_test.go
Normal file
@ -0,0 +1,87 @@
|
||||
package clientconfig
|
||||
|
||||
import (
|
||||
"context"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"testing"
|
||||
|
||||
"github.com/hexops/autogold/v2"
|
||||
"github.com/sourcegraph/log/logtest"
|
||||
|
||||
"github.com/sourcegraph/sourcegraph/internal/actor"
|
||||
"github.com/sourcegraph/sourcegraph/internal/conf"
|
||||
"github.com/sourcegraph/sourcegraph/internal/database"
|
||||
"github.com/sourcegraph/sourcegraph/internal/database/dbtest"
|
||||
"github.com/sourcegraph/sourcegraph/internal/license"
|
||||
"github.com/sourcegraph/sourcegraph/internal/licensing"
|
||||
"github.com/sourcegraph/sourcegraph/schema"
|
||||
)
|
||||
|
||||
func TestAPI(t *testing.T) {
|
||||
logger := logtest.Scoped(t)
|
||||
db := database.NewDB(logger, dbtest.NewDB(t))
|
||||
ctx := context.Background()
|
||||
|
||||
// Enable Cody (and all other license features)
|
||||
oldLicensingMock := licensing.MockCheckFeature
|
||||
licensing.MockCheckFeature = func(feature licensing.Feature) error {
|
||||
return nil
|
||||
}
|
||||
t.Cleanup(func() { licensing.MockCheckFeature = oldLicensingMock })
|
||||
|
||||
// Mock the site configuration
|
||||
truePtr := true
|
||||
falsePtr := false
|
||||
licenseKey := "theasdfkey"
|
||||
licenseAccessToken := license.GenerateLicenseKeyBasedAccessToken(licenseKey)
|
||||
conf.Mock(&conf.Unified{
|
||||
SiteConfiguration: schema.SiteConfiguration{
|
||||
CodyEnabled: &truePtr,
|
||||
CodyPermissions: &falsePtr, // disable RBAC Cody permissions
|
||||
Completions: &schema.Completions{
|
||||
AccessToken: licenseAccessToken,
|
||||
},
|
||||
},
|
||||
})
|
||||
t.Cleanup(func() { conf.Mock(nil) })
|
||||
|
||||
// Grab HTTP handlers
|
||||
handlers := NewHandlers(db, logger)
|
||||
|
||||
// Note: all the mechanics of conf.GetConfigFeatures, RBAC cody access via cody.IsCodyEnabled,
|
||||
// and conf.GetCompletionsConfig are tested independently at their implementations. We really
|
||||
// only test that those properties are relayed correctly by the HTTP API here.
|
||||
|
||||
t.Run("unauthenticated", func(t *testing.T) {
|
||||
req, _ := http.NewRequest("GET", "", nil)
|
||||
recorder := httptest.NewRecorder()
|
||||
handlers.GetClientConfigHandler(recorder, req)
|
||||
|
||||
autogold.Expect(int(401)).Equal(t, recorder.Code)
|
||||
autogold.Expect("Unauthorized\n").Equal(t, recorder.Body.String())
|
||||
})
|
||||
|
||||
t.Run("authenticated_defaults", func(t *testing.T) {
|
||||
req, _ := http.NewRequest("GET", "", nil)
|
||||
req = req.WithContext(
|
||||
actor.WithActor(ctx, &actor.Actor{
|
||||
UID: 99,
|
||||
}),
|
||||
)
|
||||
recorder := httptest.NewRecorder()
|
||||
handlers.GetClientConfigHandler(recorder, req)
|
||||
|
||||
autogold.Expect(int(200)).Equal(t, recorder.Code)
|
||||
autogold.Expect(`{
|
||||
"codyEnabled": true,
|
||||
"chatEnabled": true,
|
||||
"autoCompleteEnabled": true,
|
||||
"customCommandsEnabled": true,
|
||||
"attributionEnabled": false,
|
||||
"smartContextWindowEnabled": true,
|
||||
"modelsAPIEnabled": false
|
||||
}
|
||||
`).Equal(t, recorder.Body.String())
|
||||
})
|
||||
}
|
||||
@ -26,6 +26,7 @@ go_library(
|
||||
"//cmd/frontend/backend",
|
||||
"//cmd/frontend/enterprise",
|
||||
"//cmd/frontend/graphqlbackend",
|
||||
"//cmd/frontend/internal/clientconfig",
|
||||
"//cmd/frontend/internal/cody",
|
||||
"//cmd/frontend/internal/handlerutil",
|
||||
"//cmd/frontend/internal/httpapi/releasecache",
|
||||
|
||||
@ -22,6 +22,7 @@ import (
|
||||
"github.com/sourcegraph/sourcegraph/cmd/frontend/backend"
|
||||
"github.com/sourcegraph/sourcegraph/cmd/frontend/enterprise"
|
||||
"github.com/sourcegraph/sourcegraph/cmd/frontend/graphqlbackend"
|
||||
"github.com/sourcegraph/sourcegraph/cmd/frontend/internal/clientconfig"
|
||||
"github.com/sourcegraph/sourcegraph/cmd/frontend/internal/handlerutil"
|
||||
"github.com/sourcegraph/sourcegraph/cmd/frontend/internal/httpapi/releasecache"
|
||||
"github.com/sourcegraph/sourcegraph/cmd/frontend/internal/httpapi/webhookhandlers"
|
||||
@ -174,6 +175,10 @@ func NewHandler(
|
||||
m.Path("/completions/stream").Methods("POST").Handler(handlers.NewChatCompletionsStreamHandler())
|
||||
m.Path("/completions/code").Methods("POST").Handler(handlers.NewCodeCompletionsHandler())
|
||||
|
||||
// HTTP endpoints related to Cody client configuration.
|
||||
clientConfigHandlers := clientconfig.NewHandlers(db, logger)
|
||||
m.Path("/client-config").Methods("GET").HandlerFunc(clientConfigHandlers.GetClientConfigHandler)
|
||||
|
||||
// HTTP endpoints related to LLM model configuration.
|
||||
modelConfigHandlers := modelconfig.NewHandlers(db, logger)
|
||||
m.Path("/modelconfig/supported-models.json").Methods("GET").HandlerFunc(modelConfigHandlers.GetSupportedModelsHandler)
|
||||
|
||||
8
internal/clientconfig/BUILD.bazel
Normal file
8
internal/clientconfig/BUILD.bazel
Normal file
@ -0,0 +1,8 @@
|
||||
load("@io_bazel_rules_go//go:def.bzl", "go_library")
|
||||
|
||||
go_library(
|
||||
name = "clientconfig",
|
||||
srcs = ["types.go"],
|
||||
importpath = "github.com/sourcegraph/sourcegraph/internal/clientconfig",
|
||||
visibility = ["//:__subpackages__"],
|
||||
)
|
||||
38
internal/clientconfig/types.go
Normal file
38
internal/clientconfig/types.go
Normal file
@ -0,0 +1,38 @@
|
||||
package clientconfig
|
||||
|
||||
// This is the JSON object which all clients request after authentication to determine how
|
||||
// they should behave, e.g. if a site admin has restricted chat/autocomplete/other functionality,
|
||||
// if experimental features are available, etc.
|
||||
//
|
||||
// The configuration is always specific to a single authenticated user.
|
||||
//
|
||||
// Adding new fields here is fine, but you cannot make backwards-incompatible changes (removing
|
||||
// fields or change the meaning of fields in backwars-incompatible ways.) If you need to do that,
|
||||
// then read up on https://github.com/sourcegraph/sourcegraph/pull/63591#discussion_r1663211601
|
||||
//
|
||||
// After adding a field here, you can implement it in cmd/frontend/internal/clientconfig/clientconfig.go
|
||||
// GetForActor method.
|
||||
type ClientConfig struct {
|
||||
// Whether the site admin allows this user to make use of Cody at all.
|
||||
CodyEnabled bool `json:"codyEnabled"`
|
||||
|
||||
// Whether the site admin allows this user to make use of the Cody chat feature.
|
||||
ChatEnabled bool `json:"chatEnabled"`
|
||||
|
||||
// Whether the site admin allows this user to make use of the Cody autocomplete feature.
|
||||
AutoCompleteEnabled bool `json:"autoCompleteEnabled"`
|
||||
|
||||
// Whether the site admin allows the user to make use of the **custom** Cody commands feature.
|
||||
CustomCommandsEnabled bool `json:"customCommandsEnabled"`
|
||||
|
||||
// Whether the site admin allows this user to make use of the Cody attribution feature.
|
||||
AttributionEnabled bool `json:"attributionEnabled"`
|
||||
|
||||
// Whether the 'smart context window' feature should be enabled, and whether the Sourcegraph
|
||||
// instance supports various new GraphQL APIs needed to make it work.
|
||||
SmartContextWindowEnabled bool `json:"smartContextWindowEnabled"`
|
||||
|
||||
// Whether the new Sourcegraph backend LLM models API endpoint should be used to query which
|
||||
// models are available.
|
||||
ModelsAPIEnabled bool `json:"modelsAPIEnabled"`
|
||||
}
|
||||
@ -931,12 +931,15 @@ func GetCompletionsConfig(siteConfig schema.SiteConfiguration) (c *conftypes.Com
|
||||
completionsConfig.SmartContextWindow = "enabled"
|
||||
}
|
||||
|
||||
disableClientConfigAPI := completionsConfig.DisableClientConfigAPI != nil && *completionsConfig.DisableClientConfigAPI
|
||||
|
||||
computedConfig := &conftypes.CompletionsConfig{
|
||||
Provider: conftypes.CompletionsProviderName(completionsConfig.Provider),
|
||||
AccessToken: completionsConfig.AccessToken,
|
||||
ChatModel: completionsConfig.ChatModel,
|
||||
ChatModelMaxTokens: completionsConfig.ChatModelMaxTokens,
|
||||
SmartContextWindow: completionsConfig.SmartContextWindow,
|
||||
DisableClientConfigAPI: disableClientConfigAPI,
|
||||
FastChatModel: completionsConfig.FastChatModel,
|
||||
FastChatModelMaxTokens: completionsConfig.FastChatModelMaxTokens,
|
||||
AzureUseDeprecatedCompletionsAPIForOldModels: completionsConfig.AzureUseDeprecatedCompletionsAPIForOldModels,
|
||||
|
||||
@ -321,6 +321,7 @@ func TestGetCompletionsConfig(t *testing.T) {
|
||||
ChatModel: "anthropic/claude-3-sonnet-20240229",
|
||||
ChatModelMaxTokens: 12000,
|
||||
SmartContextWindow: "enabled",
|
||||
DisableClientConfigAPI: false,
|
||||
FastChatModel: "anthropic/claude-3-haiku-20240307",
|
||||
FastChatModelMaxTokens: 12000,
|
||||
CompletionModel: "fireworks/starcoder",
|
||||
@ -411,6 +412,7 @@ func TestGetCompletionsConfig(t *testing.T) {
|
||||
ChatModel: "claude-3-sonnet-20240229",
|
||||
ChatModelMaxTokens: 12000,
|
||||
SmartContextWindow: "enabled",
|
||||
DisableClientConfigAPI: false,
|
||||
FastChatModel: "claude-3-haiku-20240307",
|
||||
FastChatModelMaxTokens: 12000,
|
||||
CompletionModel: "claude-3-haiku-20240307",
|
||||
@ -426,18 +428,20 @@ func TestGetCompletionsConfig(t *testing.T) {
|
||||
CodyEnabled: pointers.Ptr(true),
|
||||
LicenseKey: licenseKey,
|
||||
Completions: &schema.Completions{
|
||||
Enabled: pointers.Ptr(true),
|
||||
Provider: "anthropic",
|
||||
AccessToken: "asdf",
|
||||
ChatModel: "claude-3-opus-20240229",
|
||||
SmartContextWindow: "disabled",
|
||||
CompletionModel: "claude-instant-1.2",
|
||||
Enabled: pointers.Ptr(true),
|
||||
Provider: "anthropic",
|
||||
AccessToken: "asdf",
|
||||
ChatModel: "claude-3-opus-20240229",
|
||||
SmartContextWindow: "disabled",
|
||||
DisableClientConfigAPI: pointers.Ptr(false),
|
||||
CompletionModel: "claude-instant-1.2",
|
||||
},
|
||||
},
|
||||
wantConfig: &conftypes.CompletionsConfig{
|
||||
ChatModel: "claude-3-opus-20240229",
|
||||
ChatModelMaxTokens: 12000,
|
||||
SmartContextWindow: "disabled",
|
||||
DisableClientConfigAPI: false,
|
||||
FastChatModel: "claude-3-haiku-20240307",
|
||||
FastChatModelMaxTokens: 12000,
|
||||
CompletionModel: "claude-instant-1.2",
|
||||
@ -472,6 +476,7 @@ func TestGetCompletionsConfig(t *testing.T) {
|
||||
ChatModel: "gpt-4",
|
||||
ChatModelMaxTokens: 7000,
|
||||
SmartContextWindow: "enabled",
|
||||
DisableClientConfigAPI: false,
|
||||
FastChatModel: "gpt-3.5-turbo",
|
||||
FastChatModelMaxTokens: 16000,
|
||||
CompletionModel: "gpt-3.5-turbo-instruct",
|
||||
@ -487,19 +492,21 @@ func TestGetCompletionsConfig(t *testing.T) {
|
||||
CodyEnabled: pointers.Ptr(true),
|
||||
LicenseKey: licenseKey,
|
||||
Completions: &schema.Completions{
|
||||
Provider: "azure-openai",
|
||||
AccessToken: "asdf",
|
||||
Endpoint: "https://acmecorp.openai.azure.com",
|
||||
ChatModel: "gpt4-deployment",
|
||||
SmartContextWindow: "disabled",
|
||||
FastChatModel: "gpt35-turbo-deployment",
|
||||
CompletionModel: "gpt35-turbo-deployment",
|
||||
Provider: "azure-openai",
|
||||
AccessToken: "asdf",
|
||||
Endpoint: "https://acmecorp.openai.azure.com",
|
||||
ChatModel: "gpt4-deployment",
|
||||
SmartContextWindow: "disabled",
|
||||
DisableClientConfigAPI: pointers.Ptr(false),
|
||||
FastChatModel: "gpt35-turbo-deployment",
|
||||
CompletionModel: "gpt35-turbo-deployment",
|
||||
},
|
||||
},
|
||||
wantConfig: &conftypes.CompletionsConfig{
|
||||
ChatModel: "gpt4-deployment",
|
||||
ChatModelMaxTokens: 7000,
|
||||
SmartContextWindow: "disabled",
|
||||
DisableClientConfigAPI: false,
|
||||
FastChatModel: "gpt35-turbo-deployment",
|
||||
FastChatModelMaxTokens: 7000,
|
||||
CompletionModel: "gpt35-turbo-deployment",
|
||||
@ -523,6 +530,7 @@ func TestGetCompletionsConfig(t *testing.T) {
|
||||
ChatModel: "accounts/fireworks/models/llama-v2-7b",
|
||||
ChatModelMaxTokens: 3000,
|
||||
SmartContextWindow: "enabled",
|
||||
DisableClientConfigAPI: false,
|
||||
FastChatModel: "accounts/fireworks/models/llama-v2-7b",
|
||||
FastChatModelMaxTokens: 3000,
|
||||
CompletionModel: "starcoder",
|
||||
@ -546,6 +554,7 @@ func TestGetCompletionsConfig(t *testing.T) {
|
||||
ChatModel: "anthropic.claude-v2",
|
||||
ChatModelMaxTokens: 12000,
|
||||
SmartContextWindow: "enabled",
|
||||
DisableClientConfigAPI: false,
|
||||
FastChatModel: "anthropic.claude-instant-v1",
|
||||
FastChatModelMaxTokens: 9000,
|
||||
CompletionModel: "anthropic.claude-instant-v1",
|
||||
@ -571,6 +580,7 @@ func TestGetCompletionsConfig(t *testing.T) {
|
||||
ChatModel: "anthropic.claude-3-haiku-20240307-v1:0-100k/arn:aws:bedrock:us-west-2:012345678901:provisioned-model/abcdefghijkl",
|
||||
ChatModelMaxTokens: 100_000,
|
||||
SmartContextWindow: "enabled",
|
||||
DisableClientConfigAPI: false,
|
||||
FastChatModel: "anthropic.claude-v2",
|
||||
FastChatModelMaxTokens: 12000,
|
||||
CompletionModel: "anthropic.claude-instant-v1",
|
||||
@ -611,6 +621,7 @@ func TestGetCompletionsConfig(t *testing.T) {
|
||||
ChatModel: "anthropic/claude-v1.3",
|
||||
ChatModelMaxTokens: 9000,
|
||||
SmartContextWindow: "enabled",
|
||||
DisableClientConfigAPI: false,
|
||||
FastChatModel: "anthropic/claude-instant-1.3",
|
||||
FastChatModelMaxTokens: 9000,
|
||||
CompletionModel: "anthropic/claude-instant-1.3",
|
||||
|
||||
@ -6,7 +6,8 @@ type CompletionsConfig struct {
|
||||
ChatModel string
|
||||
ChatModelMaxTokens int
|
||||
|
||||
SmartContextWindow string
|
||||
SmartContextWindow string
|
||||
DisableClientConfigAPI bool
|
||||
|
||||
FastChatModel string
|
||||
FastChatModelMaxTokens int
|
||||
|
||||
@ -680,6 +680,8 @@ type Completions struct {
|
||||
CompletionModel string `json:"completionModel,omitempty"`
|
||||
// CompletionModelMaxTokens description: The maximum number of tokens to use as client when talking to completionModel. If not set, clients need to set their own limit.
|
||||
CompletionModelMaxTokens int `json:"completionModelMaxTokens,omitempty"`
|
||||
// DisableClientConfigAPI description: Should not be set. If set to true, disables the use of the new client config API. This new API has no user-facing effect, this opt-out is provided only as an escape hatch in case of issues.
|
||||
DisableClientConfigAPI *bool `json:"disableClientConfigAPI,omitempty"`
|
||||
// Enabled description: DEPRECATED. Use cody.enabled instead to turn Cody on/off.
|
||||
Enabled *bool `json:"enabled,omitempty"`
|
||||
// Endpoint description: The endpoint under which to reach the provider. Currently only used for provider types "sourcegraph", "openai" and "anthropic". The default values are "https://cody-gateway.sourcegraph.com", "https://api.openai.com/v1/chat/completions", and "https://api.anthropic.com/v1/messages" for Sourcegraph, OpenAI, and Anthropic, respectively.
|
||||
|
||||
@ -2926,6 +2926,14 @@
|
||||
"default": "enabled",
|
||||
"enum": ["enabled", "disabled"]
|
||||
},
|
||||
"disableClientConfigAPI": {
|
||||
"description": "Should not be set. If set to true, disables the use of the new client config API. This new API has no user-facing effect, this opt-out is provided only as an escape hatch in case of issues.",
|
||||
"type": "boolean",
|
||||
"!go": {
|
||||
"pointer": true
|
||||
},
|
||||
"deprecationMessage": "This opt-out feature flag will be removed soon."
|
||||
},
|
||||
"completionModel": {
|
||||
"description": "The model used for code completion. If using the default provider 'sourcegraph', a reasonable default model will be set.\n NOTE: The Anthropic messages API does not support model names like claude-2 or claude-instant-1 where only the major version is specified as they are retired. We recommend using a specific model identifier as specified here https://docs.anthropic.com/claude/docs/models-overview#model-comparison ",
|
||||
"type": "string",
|
||||
|
||||
Loading…
Reference in New Issue
Block a user