Thanks to visit codestin.com
Credit goes to github.com

Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 5 additions & 1 deletion .editorconfig
Original file line number Diff line number Diff line change
Expand Up @@ -81,4 +81,8 @@ dotnet_style_coalesce_expression = true:suggestion
dotnet_style_null_propagation = true:suggestion

[*.{cs,vb}]
dotnet_sort_system_directives_first = true
dotnet_sort_system_directives_first = true

[*.cs]
# Some ExcelDna calls are not supported on all platforms: "This call site is reachable on all platforms. '...' is only supported on: 'Windows' 7.0 and later.
dotnet_diagnostic.CA1416.severity = none
4 changes: 4 additions & 0 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,11 @@ on:
push:
branches:
- main
- develop
pull_request:
branches:
- main
- develop
workflow_dispatch:

jobs:
Expand All @@ -16,6 +18,8 @@ jobs:
NUGET_PACKAGES: ${{ github.workspace }}\.nuget\packages
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Setup .NET
uses: actions/setup-dotnet@v4
with:
Expand Down
2 changes: 1 addition & 1 deletion src/Cellm.Models/Behaviors/ToolBehavior.cs
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ public async Task<TResponse> Handle(TRequest request, RequestHandlerDelegate<TRe
if (providerConfiguration.CurrentValue.EnableTools.Any(t => t.Value))
{
request.Prompt.Options.Tools = functions
.Where(f => providerConfiguration.CurrentValue.EnableTools[f.Metadata.Name])
.Where(f => providerConfiguration.CurrentValue.EnableTools[f.Name])
.ToList<AITool>();
}

Expand Down
1 change: 0 additions & 1 deletion src/Cellm.Models/Cellm.Models.projitems
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@
<Compile Include="$(MSBuildThisFileDirectory)Providers\Anthropic\AnthropicRequest.cs" />
<Compile Include="$(MSBuildThisFileDirectory)Providers\Anthropic\AnthropicRequestHandler.cs" />
<Compile Include="$(MSBuildThisFileDirectory)Providers\Anthropic\AnthropicResponse.cs" />
<Compile Include="$(MSBuildThisFileDirectory)Providers\Anthropic\Models.cs" />
<Compile Include="$(MSBuildThisFileDirectory)Providers\DeepSeek\DeepSeekConfiguration.cs" />
<Compile Include="$(MSBuildThisFileDirectory)Providers\IModelRequest.cs" />
<Compile Include="$(MSBuildThisFileDirectory)Providers\IModelRequestHandler.cs" />
Expand Down
2 changes: 1 addition & 1 deletion src/Cellm.Models/Providers/Anthropic/AnthropicRequest.cs
Original file line number Diff line number Diff line change
Expand Up @@ -2,4 +2,4 @@

namespace Cellm.Models.Providers.Anthropic;

internal record AnthropicRequest(Prompt Prompt, Uri? BaseAddress = null) : IModelRequest<AnthropicResponse>;
internal record AnthropicRequest(Prompt Prompt) : IModelRequest<AnthropicResponse>;
85 changes: 11 additions & 74 deletions src/Cellm.Models/Providers/Anthropic/AnthropicRequestHandler.cs
Original file line number Diff line number Diff line change
@@ -1,89 +1,26 @@
using System.Text;
using System.Text.Encodings.Web;
using System.Text.Json;
using System.Text.Json.Serialization;
using Cellm.Models.Exceptions;
using Cellm.Models.Prompts;
using Cellm.Models.Prompts;
using Microsoft.Extensions.AI;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Options;

namespace Cellm.Models.Providers.Anthropic;

internal class AnthropicRequestHandler : IModelRequestHandler<AnthropicRequest, AnthropicResponse>
internal class AnthropicRequestHandler(
[FromKeyedServices(Provider.Anthropic)] IChatClient chatClient,
IOptionsMonitor<ProviderConfiguration> providerConfiguration)
: IModelRequestHandler<AnthropicRequest, AnthropicResponse>
{
private readonly AnthropicConfiguration _anthropicConfiguration;
private readonly ProviderConfiguration _providerConfiguration;
private readonly HttpClient _httpClient;
private readonly Serde _serde;

public AnthropicRequestHandler(
IOptionsMonitor<AnthropicConfiguration> anthropicConfiguration,
IOptionsMonitor<ProviderConfiguration> providerConfiguration,
HttpClient httpClient,
Serde serde)
{
_anthropicConfiguration = anthropicConfiguration.CurrentValue;
_providerConfiguration = providerConfiguration.CurrentValue;
_httpClient = httpClient;
_serde = serde;
}

public async Task<AnthropicResponse> Handle(AnthropicRequest request, CancellationToken cancellationToken)
{
const string path = "/v1/messages";
var address = request.BaseAddress is null ? new Uri(path, UriKind.Relative) : new Uri(request.BaseAddress, path);

var json = Serialize(request);
var jsonAsStringContent = new StringContent(json, Encoding.UTF8, "application/json");
jsonAsStringContent.Headers.Add("x-api-key", _anthropicConfiguration.ApiKey);

var response = await _httpClient.PostAsync(address, jsonAsStringContent, cancellationToken);
var responseBodyAsString = await response.Content.ReadAsStringAsync(cancellationToken);

if (!response.IsSuccessStatusCode)
{
throw new HttpRequestException($"{nameof(AnthropicRequest)} failed: {responseBodyAsString}", null, response.StatusCode);
}

return Deserialize(request, responseBodyAsString);
}

public string Serialize(AnthropicRequest request)
{
var requestBody = new AnthropicRequestBody
{
System = request.Prompt.Messages.Where(x => x.Role == ChatRole.System).First().Text,
Messages = request.Prompt.Messages.Where(x => x.Role != ChatRole.System).Select(x => new AnthropicMessage { Content = x.Text, Role = x.Role.ToString().ToLower() }).ToList(),
Model = request.Prompt.Options.ModelId ?? _anthropicConfiguration.DefaultModel,
Temperature = request.Prompt.Options.Temperature ?? _providerConfiguration.DefaultTemperature,
MaxTokens = 2048
};

return _serde.Serialize(requestBody, new JsonSerializerOptions
{
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
Encoder = JavaScriptEncoder.UnsafeRelaxedJsonEscaping,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull
});
}

public AnthropicResponse Deserialize(AnthropicRequest request, string response)
{
var responseBody = _serde.Deserialize<AnthropicResponseBody>(response, new JsonSerializerOptions
{
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
Encoder = JavaScriptEncoder.UnsafeRelaxedJsonEscaping,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull
});
// Required by Anthropic API
request.Prompt.Options.MaxOutputTokens ??= providerConfiguration.CurrentValue.MaxOutputTokens;

var assistantMessage = responseBody?.Content?.Last()?.Text ?? throw new CellmModelException("#EMPTY_RESPONSE?");
var chatResponse = await chatClient.GetResponseAsync(request.Prompt.Messages, request.Prompt.Options, cancellationToken);

var prompt = new PromptBuilder(request.Prompt)
.AddAssistantMessage(assistantMessage)
.AddMessage(chatResponse.Messages.First())
.Build();

var chatCompletion = new ChatCompletion(new ChatMessage(ChatRole.Assistant, assistantMessage));

return new AnthropicResponse(prompt, chatCompletion);
return new AnthropicResponse(prompt, chatResponse);
}
}
2 changes: 1 addition & 1 deletion src/Cellm.Models/Providers/Anthropic/AnthropicResponse.cs
Original file line number Diff line number Diff line change
Expand Up @@ -3,4 +3,4 @@

namespace Cellm.Models.Providers.Anthropic;

internal record AnthropicResponse(Prompt Prompt, ChatCompletion ChatCompletion) : IModelResponse;
internal record AnthropicResponse(Prompt Prompt, ChatResponse ChatResponse) : IModelResponse;
59 changes: 0 additions & 59 deletions src/Cellm.Models/Providers/Anthropic/Models.cs

This file was deleted.

2 changes: 1 addition & 1 deletion src/Cellm.Models/Providers/IModelResponse.cs
Original file line number Diff line number Diff line change
Expand Up @@ -7,5 +7,5 @@ public interface IModelResponse
{
Prompt Prompt { get; }

ChatCompletion ChatCompletion { get; }
ChatResponse ChatResponse { get; }
}
6 changes: 3 additions & 3 deletions src/Cellm.Models/Providers/Ollama/OllamaRequestHandler.cs
Original file line number Diff line number Diff line change
Expand Up @@ -28,15 +28,15 @@ public async Task<OllamaResponse> Handle(OllamaRequest request, CancellationToke
response.EnsureSuccessStatusCode();
}

var chatCompletion = await chatClient.CompleteAsync(
var chatResponse = await chatClient.GetResponseAsync(
request.Prompt.Messages,
request.Prompt.Options,
cancellationToken);

var prompt = new PromptBuilder(request.Prompt)
.AddMessage(chatCompletion.Message)
.AddMessage(chatResponse.Messages.First())
.Build();

return new OllamaResponse(prompt, chatCompletion);
return new OllamaResponse(prompt, chatResponse);
}
}
2 changes: 1 addition & 1 deletion src/Cellm.Models/Providers/Ollama/OllamaResponse.cs
Original file line number Diff line number Diff line change
Expand Up @@ -3,4 +3,4 @@

namespace Cellm.Models.Providers.Ollama;

internal record OllamaResponse(Prompt Prompt, ChatCompletion ChatCompletion) : IModelResponse;
internal record OllamaResponse(Prompt Prompt, ChatResponse ChatResponse) : IModelResponse;
6 changes: 3 additions & 3 deletions src/Cellm.Models/Providers/OpenAi/OpenAiRequestHandler.cs
Original file line number Diff line number Diff line change
Expand Up @@ -10,12 +10,12 @@ internal class OpenAiRequestHandler([FromKeyedServices(Provider.OpenAi)] IChatCl

public async Task<OpenAiResponse> Handle(OpenAiRequest request, CancellationToken cancellationToken)
{
var chatCompletion = await chatClient.CompleteAsync(request.Prompt.Messages, request.Prompt.Options, cancellationToken);
var chatResponse = await chatClient.GetResponseAsync(request.Prompt.Messages, request.Prompt.Options, cancellationToken);

var prompt = new PromptBuilder(request.Prompt)
.AddMessage(chatCompletion.Message)
.AddMessage(chatResponse.Messages.First())
.Build();

return new OpenAiResponse(prompt, chatCompletion);
return new OpenAiResponse(prompt, chatResponse);
}
}
2 changes: 1 addition & 1 deletion src/Cellm.Models/Providers/OpenAi/OpenAiResponse.cs
Original file line number Diff line number Diff line change
Expand Up @@ -3,4 +3,4 @@

namespace Cellm.Models.Providers.OpenAi;

internal record OpenAiResponse(Prompt Prompt, ChatCompletion ChatCompletion) : IModelResponse;
internal record OpenAiResponse(Prompt Prompt, ChatResponse ChatResponse) : IModelResponse;
Original file line number Diff line number Diff line change
Expand Up @@ -13,12 +13,12 @@ public async Task<OpenAiCompatibleResponse> Handle(OpenAiCompatibleRequest reque
{
var chatClient = ServiceLocator.ServiceProvider.GetRequiredKeyedService<IChatClient>(request.Provider);

var chatCompletion = await chatClient.CompleteAsync(request.Prompt.Messages, request.Prompt.Options, cancellationToken);
var chatResponse = await chatClient.GetResponseAsync(request.Prompt.Messages, request.Prompt.Options, cancellationToken);

var prompt = new PromptBuilder(request.Prompt)
.AddMessage(chatCompletion.Message)
.AddMessage(chatResponse.Messages.First())
.Build();

return new OpenAiCompatibleResponse(prompt, chatCompletion);
return new OpenAiCompatibleResponse(prompt, chatResponse);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -3,4 +3,4 @@

namespace Cellm.Models.Providers.OpenAiCompatible;

internal record OpenAiCompatibleResponse(Prompt Prompt, ChatCompletion ChatCompletion) : IModelResponse;
internal record OpenAiCompatibleResponse(Prompt Prompt, ChatResponse ChatResponse) : IModelResponse;
2 changes: 2 additions & 0 deletions src/Cellm.Models/Providers/ProviderConfiguration.cs
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,8 @@ public class ProviderConfiguration : IProviderConfiguration

public double DefaultTemperature { get; init; }

public int MaxOutputTokens { get; init; } = 8192;

public Dictionary<string, bool> EnableTools { get; init; } = [];

public bool EnableCache { get; init; } = true;
Expand Down
49 changes: 10 additions & 39 deletions src/Cellm.Models/ServiceCollectionExtensions.cs
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
using System.ClientModel;
using System.ClientModel.Primitives;
using System.Threading.RateLimiting;
using Anthropic.SDK;
using Cellm.Models.Providers;
using Cellm.Models.Providers.Anthropic;
using Cellm.Models.Providers.DeepSeek;
Expand All @@ -10,7 +11,6 @@
using Cellm.Models.Providers.OpenAi;
using Cellm.Models.Providers.OpenAiCompatible;
using Cellm.Models.Resilience;
using MediatR;
using Microsoft.Extensions.AI;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
Expand Down Expand Up @@ -118,47 +118,18 @@ public static IServiceCollection AddRetryHttpClient(this IServiceCollection serv

public static IServiceCollection AddAnthropicChatClient(this IServiceCollection services, IConfiguration configuration)
{
var anthropicConfiguration = configuration
.GetSection(nameof(AnthropicConfiguration))
.Get<AnthropicConfiguration>()
?? throw new ArgumentException(nameof(AnthropicConfiguration));

var resilienceConfiguration = configuration
.GetSection(nameof(ResilienceConfiguration))
.Get<ResilienceConfiguration>()
?? throw new ArgumentException(nameof(ResilienceConfiguration));

services
.AddHttpClient<IRequestHandler<AnthropicRequest, AnthropicResponse>, AnthropicRequestHandler>(anthropicHttpClient =>
{
anthropicHttpClient.BaseAddress = anthropicConfiguration.BaseAddress;
anthropicHttpClient.DefaultRequestHeaders.Add("anthropic-version", anthropicConfiguration.Version);
anthropicHttpClient.Timeout = TimeSpan.FromSeconds(resilienceConfiguration.RetryConfiguration.HttpTimeoutInSeconds);
})
.AddResilienceHandler($"{nameof(AnthropicRequestHandler)}RetryHttpClientHandler", builder =>
.AddTransientKeyedChatClient(Provider.Anthropic, serviceProvider =>
{
_ = builder
.AddRetry(new RetryStrategyOptions<HttpResponseMessage>
{
ShouldHandle = args => ValueTask.FromResult(RetryHttpClientHelpers.ShouldRetry(args.Outcome)),
BackoffType = DelayBackoffType.Exponential,
UseJitter = true,
MaxRetryAttempts = resilienceConfiguration.RetryConfiguration.MaxRetryAttempts,
Delay = TimeSpan.FromSeconds(resilienceConfiguration.RetryConfiguration.DelayInSeconds),
})
.AddCircuitBreaker(new CircuitBreakerStrategyOptions<HttpResponseMessage>
{
ShouldHandle = args => ValueTask.FromResult(RetryHttpClientHelpers.ShouldBreakCircuit(args.Outcome)),
FailureRatio = resilienceConfiguration.CircuitBreakerConfiguration.FailureRatio,
SamplingDuration = TimeSpan.FromSeconds(resilienceConfiguration.CircuitBreakerConfiguration.SamplingDurationInSeconds),
MinimumThroughput = resilienceConfiguration.CircuitBreakerConfiguration.MinimumThroughput,
BreakDuration = TimeSpan.FromSeconds(resilienceConfiguration.CircuitBreakerConfiguration.BreakDurationInSeconds),
})
.AddTimeout(TimeSpan.FromSeconds(resilienceConfiguration.RetryConfiguration.HttpTimeoutInSeconds))
.Build();
});
var anthropicConfiguration = serviceProvider.GetRequiredService<IOptionsMonitor<AnthropicConfiguration>>();
var resilientHttpClient = serviceProvider.GetKeyedService<HttpClient>("ResilientHttpClient") ?? throw new NullReferenceException("ResilientHttpClient");

// TODO: Add IChatClient-compatible Anthropic client
return new AnthropicClient(anthropicConfiguration.CurrentValue.ApiKey, resilientHttpClient)
.Messages
.AsBuilder()
.Build();
})
.UseFunctionInvocation();

return services;
}
Expand Down
Loading