Thanks to visit codestin.com
Credit goes to github.com

Skip to content

Commit abf18a6

Browse files
committed
Merge branch 'main' into lilac/persist-terraform-modules
2 parents 1998624 + 4369765 commit abf18a6

File tree

74 files changed

+5211
-184
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

74 files changed

+5211
-184
lines changed

.github/actions/setup-go/action.yaml

Lines changed: 27 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -5,17 +5,42 @@ inputs:
55
version:
66
description: "The Go version to use."
77
default: "1.24.2"
8+
use-preinstalled-go:
9+
description: "Whether to use preinstalled Go."
10+
default: "false"
11+
use-temp-cache-dirs:
12+
description: "Whether to use temporary GOCACHE and GOMODCACHE directories."
13+
default: "false"
814
runs:
915
using: "composite"
1016
steps:
17+
- name: Override GOCACHE and GOMODCACHE
18+
shell: bash
19+
if: inputs.use-temp-cache-dirs == 'true'
20+
run: |
21+
# cd to another directory to ensure we're not inside a Go project.
22+
# That'd trigger Go to download the toolchain for that project.
23+
cd "$RUNNER_TEMP"
24+
# RUNNER_TEMP should be backed by a RAM disk on Windows if
25+
# coder/setup-ramdisk-action was used
26+
export GOCACHE_DIR="$RUNNER_TEMP""\go-cache"
27+
export GOMODCACHE_DIR="$RUNNER_TEMP""\go-mod-cache"
28+
export GOPATH_DIR="$RUNNER_TEMP""\go-path"
29+
mkdir -p "$GOCACHE_DIR"
30+
mkdir -p "$GOMODCACHE_DIR"
31+
mkdir -p "$GOPATH_DIR"
32+
go env -w GOCACHE="$GOCACHE_DIR"
33+
go env -w GOMODCACHE="$GOMODCACHE_DIR"
34+
go env -w GOPATH="$GOPATH_DIR"
35+
1136
- name: Setup Go
1237
uses: actions/setup-go@0a12ed9d6a96ab950c8f026ed9f722fe0da7ef32 # v5.0.2
1338
with:
14-
go-version: ${{ inputs.version }}
39+
go-version: ${{ inputs.use-preinstalled-go == 'false' && inputs.version || '' }}
1540

1641
- name: Install gotestsum
1742
shell: bash
18-
run: go install gotest.tools/gotestsum@latest
43+
run: go install gotest.tools/gotestsum@3f7ff0ec4aeb6f95f5d67c998b71f272aa8a8b41 # v1.12.1
1944

2045
# It isn't necessary that we ever do this, but it helps
2146
# separate the "setup" from the "run" times.

.github/workflows/ci.yaml

Lines changed: 15 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -313,7 +313,7 @@ jobs:
313313
run: ./scripts/check_unstaged.sh
314314

315315
test-go:
316-
runs-on: ${{ matrix.os == 'ubuntu-latest' && github.repository_owner == 'coder' && 'depot-ubuntu-22.04-4' || matrix.os == 'macos-latest' && github.repository_owner == 'coder' && 'depot-macos-latest' || matrix.os == 'windows-2022' && github.repository_owner == 'coder' && 'windows-latest-16-cores' || matrix.os }}
316+
runs-on: ${{ matrix.os == 'ubuntu-latest' && github.repository_owner == 'coder' && 'depot-ubuntu-22.04-4' || matrix.os == 'macos-latest' && github.repository_owner == 'coder' && 'depot-macos-latest' || matrix.os == 'windows-2022' && github.repository_owner == 'coder' && 'depot-windows-2022-16' || matrix.os }}
317317
needs: changes
318318
if: needs.changes.outputs.go == 'true' || needs.changes.outputs.ci == 'true' || github.ref == 'refs/heads/main'
319319
timeout-minutes: 20
@@ -326,17 +326,31 @@ jobs:
326326
- windows-2022
327327
steps:
328328
- name: Harden Runner
329+
# Harden Runner is only supported on Ubuntu runners.
330+
if: runner.os == 'Linux'
329331
uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0
330332
with:
331333
egress-policy: audit
332334

335+
# Set up RAM disks to speed up the rest of the job. This action is in
336+
# a separate repository to allow its use before actions/checkout.
337+
- name: Setup RAM Disks
338+
if: runner.os == 'Windows'
339+
uses: coder/setup-ramdisk-action@79dacfe70c47ad6d6c0dd7f45412368802641439
340+
333341
- name: Checkout
334342
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
335343
with:
336344
fetch-depth: 1
337345

338346
- name: Setup Go
339347
uses: ./.github/actions/setup-go
348+
with:
349+
# Runners have Go baked-in and Go will automatically
350+
# download the toolchain configured in go.mod, so we don't
351+
# need to reinstall it. It's faster on Windows runners.
352+
use-preinstalled-go: ${{ runner.os == 'Windows' }}
353+
use-temp-cache-dirs: ${{ runner.os == 'Windows' }}
340354

341355
- name: Setup Terraform
342356
uses: ./.github/actions/setup-tf

cli/server.go

Lines changed: 98 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -61,6 +61,7 @@ import (
6161
"github.com/coder/serpent"
6262
"github.com/coder/wgtunnel/tunnelsdk"
6363

64+
"github.com/coder/coder/v2/coderd/ai"
6465
"github.com/coder/coder/v2/coderd/entitlements"
6566
"github.com/coder/coder/v2/coderd/notifications/reports"
6667
"github.com/coder/coder/v2/coderd/runtimeconfig"
@@ -610,6 +611,22 @@ func (r *RootCmd) Server(newAPI func(context.Context, *coderd.Options) (*coderd.
610611
)
611612
}
612613

614+
aiProviders, err := ReadAIProvidersFromEnv(os.Environ())
615+
if err != nil {
616+
return xerrors.Errorf("read ai providers from env: %w", err)
617+
}
618+
vals.AI.Value.Providers = append(vals.AI.Value.Providers, aiProviders...)
619+
for _, provider := range aiProviders {
620+
logger.Debug(
621+
ctx, "loaded ai provider",
622+
slog.F("type", provider.Type),
623+
)
624+
}
625+
languageModels, err := ai.ModelsFromConfig(ctx, vals.AI.Value.Providers)
626+
if err != nil {
627+
return xerrors.Errorf("create language models: %w", err)
628+
}
629+
613630
realIPConfig, err := httpmw.ParseRealIPConfig(vals.ProxyTrustedHeaders, vals.ProxyTrustedOrigins)
614631
if err != nil {
615632
return xerrors.Errorf("parse real ip config: %w", err)
@@ -640,6 +657,7 @@ func (r *RootCmd) Server(newAPI func(context.Context, *coderd.Options) (*coderd.
640657
CacheDir: cacheDir,
641658
GoogleTokenValidator: googleTokenValidator,
642659
ExternalAuthConfigs: externalAuthConfigs,
660+
LanguageModels: languageModels,
643661
RealIPConfig: realIPConfig,
644662
SSHKeygenAlgorithm: sshKeygenAlgorithm,
645663
TracerProvider: tracerProvider,
@@ -739,6 +757,15 @@ func (r *RootCmd) Server(newAPI func(context.Context, *coderd.Options) (*coderd.
739757
_ = sqlDB.Close()
740758
}()
741759

760+
if options.DeploymentValues.Prometheus.Enable {
761+
// At this stage we don't think the database name serves much purpose in these metrics.
762+
// It requires parsing the DSN to determine it, which requires pulling in another dependency
763+
// (i.e. https://github.com/jackc/pgx), but it's rather heavy.
764+
// The conn string (https://www.postgresql.org/docs/current/libpq-connect.html#LIBPQ-CONNSTRING) can
765+
// take different forms, which make parsing non-trivial.
766+
options.PrometheusRegistry.MustRegister(collectors.NewDBStatsCollector(sqlDB, ""))
767+
}
768+
742769
options.Database = database.New(sqlDB)
743770
ps, err := pubsub.New(ctx, logger.Named("pubsub"), sqlDB, dbURL)
744771
if err != nil {
@@ -2612,6 +2639,77 @@ func redirectHTTPToHTTPSDeprecation(ctx context.Context, logger slog.Logger, inv
26122639
}
26132640
}
26142641

2642+
func ReadAIProvidersFromEnv(environ []string) ([]codersdk.AIProviderConfig, error) {
2643+
// The index numbers must be in-order.
2644+
sort.Strings(environ)
2645+
2646+
var providers []codersdk.AIProviderConfig
2647+
for _, v := range serpent.ParseEnviron(environ, "CODER_AI_PROVIDER_") {
2648+
tokens := strings.SplitN(v.Name, "_", 2)
2649+
if len(tokens) != 2 {
2650+
return nil, xerrors.Errorf("invalid env var: %s", v.Name)
2651+
}
2652+
2653+
providerNum, err := strconv.Atoi(tokens[0])
2654+
if err != nil {
2655+
return nil, xerrors.Errorf("parse number: %s", v.Name)
2656+
}
2657+
2658+
var provider codersdk.AIProviderConfig
2659+
switch {
2660+
case len(providers) < providerNum:
2661+
return nil, xerrors.Errorf(
2662+
"provider num %v skipped: %s",
2663+
len(providers),
2664+
v.Name,
2665+
)
2666+
case len(providers) == providerNum:
2667+
// At the next next provider.
2668+
providers = append(providers, provider)
2669+
case len(providers) == providerNum+1:
2670+
// At the current provider.
2671+
provider = providers[providerNum]
2672+
}
2673+
2674+
key := tokens[1]
2675+
switch key {
2676+
case "TYPE":
2677+
provider.Type = v.Value
2678+
case "API_KEY":
2679+
provider.APIKey = v.Value
2680+
case "BASE_URL":
2681+
provider.BaseURL = v.Value
2682+
case "MODELS":
2683+
provider.Models = strings.Split(v.Value, ",")
2684+
}
2685+
providers[providerNum] = provider
2686+
}
2687+
for _, envVar := range environ {
2688+
tokens := strings.SplitN(envVar, "=", 2)
2689+
if len(tokens) != 2 {
2690+
continue
2691+
}
2692+
switch tokens[0] {
2693+
case "OPENAI_API_KEY":
2694+
providers = append(providers, codersdk.AIProviderConfig{
2695+
Type: "openai",
2696+
APIKey: tokens[1],
2697+
})
2698+
case "ANTHROPIC_API_KEY":
2699+
providers = append(providers, codersdk.AIProviderConfig{
2700+
Type: "anthropic",
2701+
APIKey: tokens[1],
2702+
})
2703+
case "GOOGLE_API_KEY":
2704+
providers = append(providers, codersdk.AIProviderConfig{
2705+
Type: "google",
2706+
APIKey: tokens[1],
2707+
})
2708+
}
2709+
}
2710+
return providers, nil
2711+
}
2712+
26152713
// ReadExternalAuthProvidersFromEnv is provided for compatibility purposes with
26162714
// the viper CLI.
26172715
func ReadExternalAuthProvidersFromEnv(environ []string) ([]codersdk.ExternalAuthConfig, error) {

cli/testdata/server-config.yaml.golden

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -519,6 +519,9 @@ client:
519519
# Support links to display in the top right drop down menu.
520520
# (default: <unset>, type: struct[[]codersdk.LinkConfig])
521521
supportLinks: []
522+
# Configure AI providers.
523+
# (default: <unset>, type: struct[codersdk.AIConfig])
524+
ai: {}
522525
# External Authentication providers.
523526
# (default: <unset>, type: struct[[]codersdk.ExternalAuthConfig])
524527
externalAuthProviders: []

coderd/ai/ai.go

Lines changed: 167 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,167 @@
1+
package ai
2+
3+
import (
4+
"context"
5+
6+
"github.com/anthropics/anthropic-sdk-go"
7+
anthropicoption "github.com/anthropics/anthropic-sdk-go/option"
8+
"github.com/kylecarbs/aisdk-go"
9+
"github.com/openai/openai-go"
10+
openaioption "github.com/openai/openai-go/option"
11+
"golang.org/x/xerrors"
12+
"google.golang.org/genai"
13+
14+
"github.com/coder/coder/v2/codersdk"
15+
)
16+
17+
type LanguageModel struct {
18+
codersdk.LanguageModel
19+
StreamFunc StreamFunc
20+
}
21+
22+
type StreamOptions struct {
23+
SystemPrompt string
24+
Model string
25+
Messages []aisdk.Message
26+
Thinking bool
27+
Tools []aisdk.Tool
28+
}
29+
30+
type StreamFunc func(ctx context.Context, options StreamOptions) (aisdk.DataStream, error)
31+
32+
// LanguageModels is a map of language model ID to language model.
33+
type LanguageModels map[string]LanguageModel
34+
35+
func ModelsFromConfig(ctx context.Context, configs []codersdk.AIProviderConfig) (LanguageModels, error) {
36+
models := make(LanguageModels)
37+
38+
for _, config := range configs {
39+
var streamFunc StreamFunc
40+
41+
switch config.Type {
42+
case "openai":
43+
opts := []openaioption.RequestOption{
44+
openaioption.WithAPIKey(config.APIKey),
45+
}
46+
if config.BaseURL != "" {
47+
opts = append(opts, openaioption.WithBaseURL(config.BaseURL))
48+
}
49+
client := openai.NewClient(opts...)
50+
streamFunc = func(ctx context.Context, options StreamOptions) (aisdk.DataStream, error) {
51+
openaiMessages, err := aisdk.MessagesToOpenAI(options.Messages)
52+
if err != nil {
53+
return nil, err
54+
}
55+
tools := aisdk.ToolsToOpenAI(options.Tools)
56+
if options.SystemPrompt != "" {
57+
openaiMessages = append([]openai.ChatCompletionMessageParamUnion{
58+
openai.SystemMessage(options.SystemPrompt),
59+
}, openaiMessages...)
60+
}
61+
62+
return aisdk.OpenAIToDataStream(client.Chat.Completions.NewStreaming(ctx, openai.ChatCompletionNewParams{
63+
Messages: openaiMessages,
64+
Model: options.Model,
65+
Tools: tools,
66+
MaxTokens: openai.Int(8192),
67+
})), nil
68+
}
69+
if config.Models == nil {
70+
models, err := client.Models.List(ctx)
71+
if err != nil {
72+
return nil, err
73+
}
74+
config.Models = make([]string, len(models.Data))
75+
for i, model := range models.Data {
76+
config.Models[i] = model.ID
77+
}
78+
}
79+
case "anthropic":
80+
client := anthropic.NewClient(anthropicoption.WithAPIKey(config.APIKey))
81+
streamFunc = func(ctx context.Context, options StreamOptions) (aisdk.DataStream, error) {
82+
anthropicMessages, systemMessage, err := aisdk.MessagesToAnthropic(options.Messages)
83+
if err != nil {
84+
return nil, err
85+
}
86+
if options.SystemPrompt != "" {
87+
systemMessage = []anthropic.TextBlockParam{
88+
*anthropic.NewTextBlock(options.SystemPrompt).OfRequestTextBlock,
89+
}
90+
}
91+
return aisdk.AnthropicToDataStream(client.Messages.NewStreaming(ctx, anthropic.MessageNewParams{
92+
Messages: anthropicMessages,
93+
Model: options.Model,
94+
System: systemMessage,
95+
Tools: aisdk.ToolsToAnthropic(options.Tools),
96+
MaxTokens: 8192,
97+
})), nil
98+
}
99+
if config.Models == nil {
100+
models, err := client.Models.List(ctx, anthropic.ModelListParams{})
101+
if err != nil {
102+
return nil, err
103+
}
104+
config.Models = make([]string, len(models.Data))
105+
for i, model := range models.Data {
106+
config.Models[i] = model.ID
107+
}
108+
}
109+
case "google":
110+
client, err := genai.NewClient(ctx, &genai.ClientConfig{
111+
APIKey: config.APIKey,
112+
Backend: genai.BackendGeminiAPI,
113+
})
114+
if err != nil {
115+
return nil, err
116+
}
117+
streamFunc = func(ctx context.Context, options StreamOptions) (aisdk.DataStream, error) {
118+
googleMessages, err := aisdk.MessagesToGoogle(options.Messages)
119+
if err != nil {
120+
return nil, err
121+
}
122+
tools, err := aisdk.ToolsToGoogle(options.Tools)
123+
if err != nil {
124+
return nil, err
125+
}
126+
var systemInstruction *genai.Content
127+
if options.SystemPrompt != "" {
128+
systemInstruction = &genai.Content{
129+
Parts: []*genai.Part{
130+
genai.NewPartFromText(options.SystemPrompt),
131+
},
132+
Role: "model",
133+
}
134+
}
135+
return aisdk.GoogleToDataStream(client.Models.GenerateContentStream(ctx, options.Model, googleMessages, &genai.GenerateContentConfig{
136+
SystemInstruction: systemInstruction,
137+
Tools: tools,
138+
})), nil
139+
}
140+
if config.Models == nil {
141+
models, err := client.Models.List(ctx, &genai.ListModelsConfig{})
142+
if err != nil {
143+
return nil, err
144+
}
145+
config.Models = make([]string, len(models.Items))
146+
for i, model := range models.Items {
147+
config.Models[i] = model.Name
148+
}
149+
}
150+
default:
151+
return nil, xerrors.Errorf("unsupported model type: %s", config.Type)
152+
}
153+
154+
for _, model := range config.Models {
155+
models[model] = LanguageModel{
156+
LanguageModel: codersdk.LanguageModel{
157+
ID: model,
158+
DisplayName: model,
159+
Provider: config.Type,
160+
},
161+
StreamFunc: streamFunc,
162+
}
163+
}
164+
}
165+
166+
return models, nil
167+
}

0 commit comments

Comments
 (0)