Thanks to visit codestin.com
Credit goes to github.com

Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
40 changes: 40 additions & 0 deletions docs/content/reference/observer.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
---
title: "Observer"
description:
linkTitle: "Observer"
menu: { main: { parent: 'reference', weight: -92 } }
---

## Observer

The `Observer` interface helps to observe, debug, and analyze LLM applications. This component tracks metrics (e.g. LLM cost, latency, quality) and gains insights from external dashboards and data exports. To enable tracing an LLM application, create an observer and pass it to the LLM instance.

### Supported platform

* [Langfuse](https://langfuse.com/)

### Usage

```go

o := langfuse.New(context.Background())
trace, err := o.Trace(&observer.Trace{Name: "Who are you"})
if err != nil {
panic(err)
}

openaillm := openai.New().WithObserver(o, trace.ID)

t := thread.New().AddMessage(
thread.NewUserMessage().AddContent(
thread.NewTextContent("Hello, who are you?"),
),
)

err = openaillm.Generate(context.Background(), t)
if err != nil {
panic(err)
}

o.Flush(context.Background())
```
119 changes: 119 additions & 0 deletions examples/observer/langfuse/main.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,119 @@
package main

import (
"context"

"github.com/henomis/lingoose/observer"
"github.com/henomis/lingoose/observer/langfuse"
"github.com/henomis/lingoose/thread"
"github.com/henomis/lingoose/types"
)

func main() {
l := langfuse.New(context.Background())

trace, err := l.Trace(
&observer.Trace{
Name: "trace",
},
)
if err != nil {
panic(err)
}

span, err := l.Span(
&observer.Span{
Name: "span",
TraceID: trace.ID,
},
)
if err != nil {
panic(err)
}

generation, err := l.Generation(
&observer.Generation{
ParentID: span.ID,
TraceID: trace.ID,
Name: "generation",
Model: "gpt-3.5-turbo",
ModelParameters: types.M{
"maxTokens": "1000",
"temperature": "0.9",
},
Input: []*thread.Message{
{
Role: thread.RoleSystem,
Contents: []*thread.Content{
{
Type: thread.ContentTypeText,
Data: "You are a helpful assistant.",
},
},
},
{
Role: thread.RoleUser,
Contents: []*thread.Content{
{
Type: thread.ContentTypeText,
Data: "Please generate a summary of the following documents \nThe engineering department defined the following OKR goals...\nThe marketing department defined the following OKR goals...",
},
},
},
},
Metadata: types.M{
"key": "value",
},
},
)
if err != nil {
panic(err)
}

_, err = l.Event(
&observer.Event{
ParentID: generation.ID,
TraceID: trace.ID,
Name: "event",
Metadata: types.M{
"key": "value",
},
},
)
if err != nil {
panic(err)
}

generation.Output = &thread.Message{
Role: thread.RoleAssistant,
Contents: []*thread.Content{
{
Type: thread.ContentTypeText,
Data: "The Q3 OKRs contain goals for multiple teams...",
},
},
}

_, err = l.GenerationEnd(generation)
if err != nil {
panic(err)
}

_, err = l.Score(
&observer.Score{
TraceID: trace.ID,
Name: "score",
Value: 0.9,
},
)
if err != nil {
panic(err)
}

_, err = l.SpanEnd(span)
if err != nil {
panic(err)
}

l.Flush(context.Background())
}
33 changes: 33 additions & 0 deletions examples/observer/langfuse/openai/main.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
package main

import (
"context"

"github.com/henomis/lingoose/llm/openai"
"github.com/henomis/lingoose/observer"
"github.com/henomis/lingoose/observer/langfuse"
"github.com/henomis/lingoose/thread"
)

func main() {
o := langfuse.New(context.Background())
trace, err := o.Trace(&observer.Trace{Name: "Who are you"})
if err != nil {
panic(err)
}

openaillm := openai.New().WithObserver(o, trace.ID)

t := thread.New().AddMessage(
thread.NewUserMessage().AddContent(
thread.NewTextContent("Hello, who are you?"),
),
)

err = openaillm.Generate(context.Background(), t)
if err != nil {
panic(err)
}

o.Flush(context.Background())
}
7 changes: 3 additions & 4 deletions go.mod
Original file line number Diff line number Diff line change
@@ -1,15 +1,14 @@
module github.com/henomis/lingoose

go 1.21

toolchain go1.21.1
go 1.21.1

require github.com/mitchellh/mapstructure v1.5.0

require (
github.com/RediSearch/redisearch-go/v2 v2.1.1
github.com/google/uuid v1.3.0
github.com/google/uuid v1.6.0
github.com/henomis/cohere-go v1.1.2
github.com/henomis/langfuse-go v0.0.3
github.com/henomis/milvus-go v0.0.4
github.com/henomis/pinecone-go/v2 v2.0.0
github.com/henomis/qdrant-go v1.1.0
Expand Down
6 changes: 4 additions & 2 deletions go.sum
Original file line number Diff line number Diff line change
Expand Up @@ -5,10 +5,12 @@ github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/gomodule/redigo v1.8.9 h1:Sl3u+2BI/kk+VEatbj0scLdrFhjPmbxOc1myhDP41ws=
github.com/gomodule/redigo v1.8.9/go.mod h1:7ArFNvsTjH8GMMzB4uy1snslv2BwmginuMs06a1uzZE=
github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I=
github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/henomis/cohere-go v1.1.2 h1:rzEA1JRm26RnaQValoVeVQ1y1nTofuhr/z/fPyhUW/4=
github.com/henomis/cohere-go v1.1.2/go.mod h1:z+UIgBbNCnLH5M47FYqg4h3CDWxjUv2VDfEwdTb95MU=
github.com/henomis/langfuse-go v0.0.3 h1:Z5Mqlnj1fsok7eAT7jt+N4tegjbhY5HnZQ7wu1iVKss=
github.com/henomis/langfuse-go v0.0.3/go.mod h1:gSRuO3nvjAvk/mgmb7b+9BcoN9s64GvXeaSN7PfVEKQ=
github.com/henomis/milvus-go v0.0.4 h1:ArddXRJx/EGdQ75gB7TyEzD4Z4BqXzW16p8jRcjJOhs=
github.com/henomis/milvus-go v0.0.4/go.mod h1:nZ/NvDOLoGl7FQrYSm0JfeefPBVXph9PpE4y3lPpbj4=
github.com/henomis/pinecone-go/v2 v2.0.0 h1:HdAX0nGzBagL6ubn17utIz3FGwXfignBovZesOjXiEI=
Expand Down
79 changes: 78 additions & 1 deletion llm/openai/openai.go
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ import (
"strings"

"github.com/henomis/lingoose/llm/cache"
"github.com/henomis/lingoose/observer"
"github.com/henomis/lingoose/thread"
"github.com/henomis/lingoose/types"
"github.com/mitchellh/mapstructure"
Expand All @@ -26,6 +27,13 @@ var threadRoleToOpenAIRole = map[thread.Role]string{
thread.RoleTool: "tool",
}

type Observer interface {
Span(*observer.Span) (*observer.Span, error)
SpanEnd(*observer.Span) (*observer.Span, error)
Generation(*observer.Generation) (*observer.Generation, error)
GenerationEnd(*observer.Generation) (*observer.Generation, error)
}

type OpenAI struct {
openAIClient *openai.Client
model Model
Expand All @@ -37,6 +45,8 @@ type OpenAI struct {
streamCallbackFn StreamCallback
toolChoice *string
cache *cache.Cache
observer Observer
observerTraceID string
}

// WithModel sets the model to use for the OpenAI instance.
Expand Down Expand Up @@ -95,6 +105,12 @@ func (o *OpenAI) WithCache(cache *cache.Cache) *OpenAI {
return o
}

func (o *OpenAI) WithObserver(observer Observer, traceID string) *OpenAI {
o.observer = observer
o.observerTraceID = traceID
return o
}

// SetStop sets the stop sequences for the completion.
func (o *OpenAI) SetStop(stop []string) {
o.stop = stop
Expand Down Expand Up @@ -186,16 +202,32 @@ func (o *OpenAI) Generate(ctx context.Context, t *thread.Thread) error {
chatCompletionRequest.ToolChoice = o.getChatCompletionRequestToolChoice()
}

var span *observer.Span
var generation *observer.Generation

if o.observer != nil {
span, generation, err = o.startObserveGeneration(t)
if err != nil {
return fmt.Errorf("%w: %w", ErrOpenAIChat, err)
}
}

if o.streamCallbackFn != nil {
err = o.stream(ctx, t, chatCompletionRequest)
} else {
err = o.generate(ctx, t, chatCompletionRequest)
}

if err != nil {
return err
}

if o.observer != nil {
err = o.stopObserveGeneration(span, generation, t)
if err != nil {
return fmt.Errorf("%w: %w", ErrOpenAIChat, err)
}
}

if o.cache != nil {
err = o.setCache(ctx, t, cacheResult)
if err != nil {
Expand Down Expand Up @@ -409,3 +441,48 @@ func (o *OpenAI) callTools(toolCalls []openai.ToolCall) []*thread.Message {

return messages
}

func (o *OpenAI) startObserveGeneration(t *thread.Thread) (*observer.Span, *observer.Generation, error) {
span, err := o.observer.Span(
&observer.Span{
TraceID: o.observerTraceID,
Name: "openai",
},
)
if err != nil {
return nil, nil, err
}

generation, err := o.observer.Generation(
&observer.Generation{
TraceID: o.observerTraceID,
ParentID: span.ID,
Name: fmt.Sprintf("openai-%s", o.model),
Model: string(o.model),
ModelParameters: types.M{
"maxTokens": o.maxTokens,
"temperature": o.temperature,
},
Input: t.Messages,
},
)
if err != nil {
return nil, nil, err
}
return span, generation, nil
}

func (o *OpenAI) stopObserveGeneration(
span *observer.Span,
generation *observer.Generation,
t *thread.Thread,
) error {
_, err := o.observer.SpanEnd(span)
if err != nil {
return err
}

generation.Output = t.LastMessage()
_, err = o.observer.GenerationEnd(generation)
return err
}
Loading