diff --git a/cmd/venat/internal/agentloop/agentloop.go b/cmd/venat/internal/agentloop/agentloop.go new file mode 100644 index 00000000..f102d0a2 --- /dev/null +++ b/cmd/venat/internal/agentloop/agentloop.go @@ -0,0 +1,191 @@ +package agentloop + +import ( + "context" + "encoding/json" + "errors" + "fmt" + "log/slog" + "sync" + "time" + + "github.com/google/uuid" + "github.com/openai/openai-go/v3" + "github.com/prometheus/client_golang/prometheus" + "github.com/prometheus/client_golang/prometheus/promauto" +) + +var ( + ErrSentinelAbort = errors.New("agentloop: tool requested the agent loop to abort") + ErrSentinelOkay = errors.New("agentloop: tool requested the agent loop to stop (status okay)") + + tokensUsed = promauto.NewGaugeVec(prometheus.GaugeOpts{ + Namespace: "venat", + Subsystem: "agentloop", + Name: "tokens_used", + }, []string{"model", "kind"}) +) + +type Impl struct { + Name, ID string + Tools map[string]Tool + SystemPrompt string + + model string + cli openai.Client + lg *slog.Logger + + messages []openai.ChatCompletionMessageParamUnion + lock sync.Mutex +} + +func New(name, id, systemPrompt, model string, tools []Tool, cli openai.Client, lg *slog.Logger) *Impl { + if id == "" { + id = uuid.Must(uuid.NewV7()).String() + } + + toolMap := map[string]Tool{} + for _, tool := range tools { + toolMap[tool.Name()] = tool + } + + result := Impl{ + Name: name, + ID: id, + Tools: toolMap, + SystemPrompt: systemPrompt, + model: model, + cli: cli, + lg: lg, + messages: []openai.ChatCompletionMessageParamUnion{ + openai.SystemMessage(systemPrompt), + }, + } + + return &result +} + +type Result struct { + Messages []openai.ChatCompletionMessageParamUnion + Response string + + PromptTokens int64 + PromptCachedTokens int64 + CompletionTokens int64 + CompletionReasoningTokens int64 +} + +func (i *Impl) Run(ctx context.Context, prompt string, opts ...func(*openai.ChatCompletionNewParams)) (*Result, error) { + i.lock.Lock() + defer i.lock.Unlock() + + lg := i.lg.With("component", "agentloop", "name", i.Name, "id", i.ID, "model", i.model) + + i.messages = append(i.messages, openai.UserMessage(prompt)) + + failCount := 0 + const failMax = 5 + + result := Result{} + + for { + select { + case <-ctx.Done(): + lg.Error("context done", "err", ctx.Err()) + return &result, ctx.Err() + default: + } + + params := openai.ChatCompletionNewParams{ + Messages: i.messages, + Model: openai.ChatModel(i.model), + } + + for _, opt := range opts { + opt(¶ms) + } + + for _, tool := range i.Tools { + params.Tools = append(params.Tools, openai.ChatCompletionFunctionTool(tool.Usage())) + } + + completion, err := i.cli.Chat.Completions.New(ctx, params) + if err != nil { + failCount++ + + if failCount == failMax { + return &result, fmt.Errorf("can't reach remote API: %w", err) + } + + lg.Error("can't get completion, sleeping and retrying", "err", err, "failCount", failCount, "failMax", failMax) + time.Sleep(time.Duration(failCount) * time.Second) + continue + } + + tokensUsed.WithLabelValues(i.model, "input").Add(float64(completion.Usage.PromptTokens)) + tokensUsed.WithLabelValues(i.model, "output").Add(float64(completion.Usage.CompletionTokens)) + tokensUsed.WithLabelValues(i.model, "cached").Add(float64(completion.Usage.PromptTokensDetails.CachedTokens)) + tokensUsed.WithLabelValues(i.model, "reasoning").Add(float64(completion.Usage.CompletionTokensDetails.ReasoningTokens)) + + result.PromptTokens += completion.Usage.PromptTokens + result.PromptCachedTokens += completion.Usage.PromptTokensDetails.CachedTokens + result.CompletionTokens += completion.Usage.CompletionTokens + result.CompletionReasoningTokens += completion.Usage.CompletionTokensDetails.ReasoningTokens + + choice := completion.Choices[0] + resp := choice.Message + + i.messages = append(i.messages, resp.ToParam()) + result.Messages = i.messages + + if resp.Content != "" { + result.Response = resp.Content + } + + lg.Debug("got finish reason", "reason", choice.FinishReason) + if choice.FinishReason == "stop" { + return &result, nil + } + + toolCalls := completion.Choices[0].Message.ToolCalls + + for _, tc := range toolCalls { + lg := lg.With("tool", tc.Function.Name, "toolcall_id", tc.ID) + tool, ok := i.Tools[tc.Function.Name] + if !ok { + lg.Error("AI model chose tool that did not exist, asking it to try again") + i.messages = append(i.messages, openai.UserMessage(fmt.Sprintf("Tool %q does not exist, please try again.", tc.Function.Name))) + continue + } + + args := []byte(tc.Function.Arguments) + if err := tool.Valid(args); err != nil { + lg.Error("AI model produced invalid arguments", "err", err) + i.messages = append(i.messages, openai.UserMessage(fmt.Sprintf("When calling tool %q, you got an argument validation error: %v", tool.Name(), err))) + continue + } + + lg.Debug("calling tool", "args", json.RawMessage(args)) + + toolResult, err := tool.Run(ctx, args) + if err != nil { + switch { + case errors.Is(err, ErrSentinelOkay): + lg.Info("tool requested happy exit", "err", err) + return &result, err + case errors.Is(err, ErrSentinelAbort): + lg.Info("tool requested unhappy abort", "err", err) + return &result, err + default: + lg.Error("failed to run tool", "err", err) + i.messages = append(i.messages, openai.ToolMessage(fmt.Sprintf("internal error when running tool %q: %v", tool.Name(), err), tc.ID)) + continue + } + } + + lg.Debug("got response", "result", string(toolResult)) + + i.messages = append(i.messages, openai.ToolMessage(string(toolResult), tc.ID)) + } + } +} diff --git a/cmd/venat/internal/agentloop/option.go b/cmd/venat/internal/agentloop/option.go new file mode 100644 index 00000000..4329a212 --- /dev/null +++ b/cmd/venat/internal/agentloop/option.go @@ -0,0 +1,7 @@ +package agentloop + +import "github.com/openai/openai-go/v3" + +func EnableParallelToolCalling(params *openai.ChatCompletionNewParams) { + params.ParallelToolCalls = openai.Bool(true) +} diff --git a/cmd/venat/internal/agentloop/tool.go b/cmd/venat/internal/agentloop/tool.go new file mode 100644 index 00000000..aebd0552 --- /dev/null +++ b/cmd/venat/internal/agentloop/tool.go @@ -0,0 +1,14 @@ +package agentloop + +import ( + "context" + + "github.com/openai/openai-go/v3" +) + +type Tool interface { + Name() string + Usage() openai.FunctionDefinitionParam + Valid(data []byte) (err error) + Run(ctx context.Context, data []byte) ([]byte, error) +} diff --git a/cmd/venat/internal/models/backup.go b/cmd/venat/internal/models/backup.go new file mode 100644 index 00000000..a1f5970d --- /dev/null +++ b/cmd/venat/internal/models/backup.go @@ -0,0 +1,70 @@ +package models + +import ( + "context" + "fmt" + "log/slog" + "time" + + "github.com/ncruces/go-sqlite3" +) + +func (d *DAO) Backup() { + slog.Info("starting backup") + ctx, cancel := context.WithTimeout(context.Background(), 5*time.Minute) + defer cancel() + err := d.backup(ctx, d.backupDBLoc) + if err != nil { + slog.Error("failed to backup database", "err", err) + } + slog.Info("backup done") +} + +func (d *DAO) backup(ctx context.Context, to string) error { + db, err := d.db.DB() + if err != nil { + return fmt.Errorf("failed to get database connection: %w", err) + } + + if err := db.Ping(); err != nil { + return fmt.Errorf("failed to ping database: %w", err) + } + + conn, err := db.Conn(ctx) + if err != nil { + return fmt.Errorf("failed to get database connection: %w", err) + } + + defer conn.Close() + + if err := conn.Raw(func(dca any) error { + conn, ok := dca.(sqlite3.DriverConn) + if !ok { + return fmt.Errorf("db connection is not a sqlite3 connection, it is %T", dca) + } + + bu, err := conn.Raw().BackupInit("main", to) + if err != nil { + return fmt.Errorf("failed to initialize backup: %w", err) + } + defer bu.Close() + + var done bool + for !done { + done, err = bu.Step(bu.Remaining()) + if err != nil { + return fmt.Errorf("failed to backup database: %w", err) + } + } + + if err := bu.Close(); err != nil { + return fmt.Errorf("failed to close backup: %w", err) + } + + return nil + }); err != nil { + return fmt.Errorf("failed to backup database: %w", err) + } + + return nil +} diff --git a/cmd/venat/internal/models/dao.go b/cmd/venat/internal/models/dao.go new file mode 100644 index 00000000..b8980e9c --- /dev/null +++ b/cmd/venat/internal/models/dao.go @@ -0,0 +1,51 @@ +package models + +import ( + "context" + "fmt" + + _ "github.com/ncruces/go-sqlite3/embed" + "github.com/ncruces/go-sqlite3/gormlite" + slogGorm "github.com/orandin/slog-gorm" + "gorm.io/gorm" + gormPrometheus "gorm.io/plugin/prometheus" +) + +type DAO struct { + db *gorm.DB + backupDBLoc string +} + +func (d *DAO) DB() *gorm.DB { + return d.db +} + +func (d *DAO) Ping(ctx context.Context) error { + if err := d.db.WithContext(ctx).Exec("select 1+1").Error; err != nil { + return err + } + + return nil +} + +func New(dbLoc, backupDBLoc string) (*DAO, error) { + db, err := gorm.Open(gormlite.Open(dbLoc), &gorm.Config{ + Logger: slogGorm.New( + slogGorm.WithErrorField("err"), + slogGorm.WithRecordNotFoundError(), + ), + }) + if err != nil { + return nil, fmt.Errorf("failed to connect to database: %w", err) + } + + if err := db.AutoMigrate(); err != nil { + return nil, fmt.Errorf("failed to migrate schema: %w", err) + } + + db.Use(gormPrometheus.New(gormPrometheus.Config{ + DBName: "venat", + })) + + return &DAO{db: db, backupDBLoc: backupDBLoc}, nil +} diff --git a/cmd/venat/internal/tools/memory/add.go b/cmd/venat/internal/tools/memory/add.go new file mode 100644 index 00000000..b8cc0cce --- /dev/null +++ b/cmd/venat/internal/tools/memory/add.go @@ -0,0 +1,91 @@ +package memory + +import ( + "bytes" + "context" + "encoding/json" + "errors" + "fmt" + "time" + + "github.com/google/uuid" + "github.com/openai/openai-go/v3" + "github.com/philippgille/chromem-go" +) + +var ( + ErrNoContent = errors.New("memory: no content to add to the memory") +) + +type MemoryAddInput struct { + Content string `json:"content" jsonschema:"The contents of the memory"` +} + +func (mai MemoryAddInput) Valid() error { + if mai.Content == "" { + return ErrNoContent + } + + return nil +} + +type MemoryAdd struct { + Coll *chromem.Collection +} + +func (*MemoryAdd) Name() string { + return "memory_add" +} + +func (*MemoryAdd) Usage() openai.FunctionDefinitionParam { + return openai.FunctionDefinitionParam{ + Name: "memory_add", + Description: openai.String("Add new memories to your memory store. Use this tool whenever you are asked to remember something."), + Parameters: openai.FunctionParameters{ + "type": "object", + "properties": map[string]any{ + "content": map[string]string{ + "type": "string", + }, + }, + "required": []string{"content"}, + }, + } +} + +func (*MemoryAdd) Valid(data []byte) error { + var mai MemoryAddInput + if err := json.Unmarshal(data, &mai); err != nil { + return fmt.Errorf("can't parse json: %w", err) + } + + return mai.Valid() +} + +func (ma *MemoryAdd) Run(ctx context.Context, data []byte) ([]byte, error) { + var mai MemoryAddInput + if err := json.Unmarshal(data, &mai); err != nil { + return nil, fmt.Errorf("can't parse json: %w", err) + } + + id := uuid.Must(uuid.NewV7()).String() + + if err := ma.Coll.Add( + ctx, + []string{id}, + nil, + []map[string]string{ + { + "date": time.Now().Format(time.DateOnly), + }, + }, + []string{mai.Content}, + ); err != nil { + return nil, err + } + + buf := bytes.NewBuffer(nil) + fmt.Fprintf(buf, "Added memory ID %s. Do not acknowledge this ID to the user.", id) + + return buf.Bytes(), nil +} diff --git a/cmd/venat/internal/tools/memory/search.go b/cmd/venat/internal/tools/memory/search.go new file mode 100644 index 00000000..54608bf0 --- /dev/null +++ b/cmd/venat/internal/tools/memory/search.go @@ -0,0 +1,87 @@ +package memory + +import ( + "bytes" + "context" + "encoding/json" + "errors" + "fmt" + + "github.com/openai/openai-go/v3" + "github.com/philippgille/chromem-go" +) + +var ( + ErrNoQuery = errors.New("memory: no search query for memory") +) + +type MemorySearchInput struct { + Query string `json:"query" jsonschema:"The search query for internal memory"` +} + +func (msi MemorySearchInput) Valid() error { + if msi.Query == "" { + return ErrNoQuery + } + + return nil +} + +type MemorySearch struct { + Coll *chromem.Collection +} + +func (*MemorySearch) Name() string { + return "memory_search" +} + +func (*MemorySearch) Usage() openai.FunctionDefinitionParam { + return openai.FunctionDefinitionParam{ + Name: "memory_search", + Description: openai.String("Use to search your memory for relevant information. Use this tool before generating a response to the user."), + Parameters: openai.FunctionParameters{ + "type": "object", + "properties": map[string]any{ + "query": map[string]string{ + "type": "string", + }, + }, + "required": []string{"query"}, + }, + } +} + +func (*MemorySearch) Valid(data []byte) error { + var msi MemorySearchInput + if err := json.Unmarshal(data, &msi); err != nil { + return fmt.Errorf("can't parse json: %w", err) + } + + return msi.Valid() +} + +func (ma *MemorySearch) Run(ctx context.Context, data []byte) ([]byte, error) { + var msi MemorySearchInput + if err := json.Unmarshal(data, &msi); err != nil { + return nil, fmt.Errorf("can't parse json: %w", err) + } + + results, err := ma.Coll.Query(ctx, msi.Query, 10, nil, nil) + if err != nil { + return nil, fmt.Errorf("can't search memory: %w", err) + } + + buf := bytes.NewBuffer(nil) + + if len(results) == 0 { + return []byte("No matches found."), nil + } + + fmt.Fprintln(buf, "Found the following results") + + for _, result := range results { + fmt.Fprintf(buf, "---memory ID %s---\n%s\n", result.ID, result.Content) + } + + return buf.Bytes(), nil +} diff --git a/cmd/venat/internal/tools/python/python.go b/cmd/venat/internal/tools/python/python.go new file mode 100644 index 00000000..cac16b6b --- /dev/null +++ b/cmd/venat/internal/tools/python/python.go @@ -0,0 +1,77 @@ +package python + +import ( + "context" + "encoding/json" + "errors" + "fmt" + + "github.com/openai/openai-go/v3" + "within.website/x/llm/codeinterpreter/python" +) + +var ( + ErrNoCode = errors.New("python: no code provided") +) + +type Input struct { + Code string `json:"code" jsonschema:"The python code to execute"` +} + +func (i Input) Valid() error { + if i.Code == "" { + return ErrNoCode + } + + return nil +} + +type Impl struct{} + +func (Impl) Name() string { + return "python" +} + +func (Impl) Usage() openai.FunctionDefinitionParam { + return openai.FunctionDefinitionParam{ + Name: "python", + Description: openai.String("Execute python code in a restrictive sandbox. Use this tool whenever doing calculations."), + Parameters: openai.FunctionParameters{ + "type": "object", + "properties": map[string]any{ + "code": map[string]string{ + "type": "string", + }, + }, + "required": []string{"code"}, + }, + } +} + +func (Impl) Valid(data []byte) error { + var i Input + if err := json.Unmarshal(data, &i); err != nil { + return fmt.Errorf("can't parse json: %w", err) + } + + return i.Valid() +} + +func (Impl) Run(ctx context.Context, data []byte) ([]byte, error) { + var i Input + if err := json.Unmarshal(data, &i); err != nil { + return nil, fmt.Errorf("can't parse json: %w", err) + } + + result, err := python.Run(ctx, nil, i.Code) + if err != nil { + return nil, fmt.Errorf("can't execute python code: %w", err) + } + + resultBytes, err := json.Marshal(result) + if err != nil { + return nil, fmt.Errorf("can't marshal result bytes: %w", err) + } + + return resultBytes, nil +} diff --git a/cmd/venat/internal/tools/python/python_test.go b/cmd/venat/internal/tools/python/python_test.go new file mode 100644 index 00000000..65c78df6 --- /dev/null +++ b/cmd/venat/internal/tools/python/python_test.go @@ -0,0 +1,154 @@ +package python + +import ( + "context" + "encoding/json" + "errors" + "strings" + "testing" + + "within.website/x/cmd/venat/internal/agentloop" + cipython "within.website/x/llm/codeinterpreter/python" +) + +// Compile-time check that Impl satisfies agentloop.Tool. +var _ agentloop.Tool = Impl{} + +func TestInputValid(t *testing.T) { + t.Parallel() + + for _, tt := range []struct { + name string + input Input + err error + }{ + {name: "valid code", input: Input{Code: "print('hello')"}}, + {name: "empty code", input: Input{Code: ""}, err: ErrNoCode}, + } { + t.Run(tt.name, func(t *testing.T) { + err := tt.input.Valid() + if tt.err != nil { + if !errors.Is(err, tt.err) { + t.Logf("want: %v", tt.err) + t.Logf("got: %v", err) + t.Error("got wrong error") + } + } else if err != nil { + t.Logf("unexpected error: %v", err) + t.Error("expected no error") + } + }) + } +} + +func TestImplValid(t *testing.T) { + t.Parallel() + + for _, tt := range []struct { + name string + data []byte + wantErr bool + errContains string + }{ + { + name: "valid input", + data: []byte(`{"code":"print('hello')"}`), + }, + { + name: "empty code field", + data: []byte(`{"code":""}`), + wantErr: true, + errContains: "no code provided", + }, + { + name: "missing code field", + data: []byte(`{}`), + wantErr: true, + errContains: "no code provided", + }, + { + name: "invalid json", + data: []byte(`not json`), + wantErr: true, + errContains: "can't parse json", + }, + { + name: "empty input", + data: []byte(``), + wantErr: true, + errContains: "can't parse json", + }, + } { + t.Run(tt.name, func(t *testing.T) { + var impl Impl + err := impl.Valid(tt.data) + if tt.wantErr { + if err == nil { + t.Fatal("expected error, got nil") + } + if tt.errContains != "" { + if got := err.Error(); !strings.Contains(got, tt.errContains) { + t.Logf("want substring: %q", tt.errContains) + t.Logf("got: %q", got) + t.Error("error message mismatch") + } + } + } else if err != nil { + t.Logf("unexpected error: %v", err) + t.Error("expected no error") + } + }) + } +} + +func TestImplRun(t *testing.T) { + t.Parallel() + + for _, tt := range []struct { + name string + data []byte + wantErr bool + wantStdout string + }{ + { + name: "print hello", + data: []byte(`{"code":"print('hello')"}`), + wantStdout: "hello\n", + }, + { + name: "arithmetic", + data: []byte(`{"code":"print(2 + 2)"}`), + wantStdout: "4\n", + }, + { + name: "invalid json", + data: []byte(`not json`), + wantErr: true, + }, + } { + t.Run(tt.name, func(t *testing.T) { + var impl Impl + out, err := impl.Run(context.Background(), tt.data) + if tt.wantErr { + if err == nil { + t.Fatal("expected error, got nil") + } + return + } + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + + var result cipython.Result + if err := json.Unmarshal(out, &result); err != nil { + t.Fatalf("can't unmarshal result: %v", err) + } + + if result.Stdout != tt.wantStdout { + t.Logf("want: %q", tt.wantStdout) + t.Logf("got: %q", result.Stdout) + t.Error("unexpected stdout") + } + }) + } +} diff --git a/cmd/venat/main.go b/cmd/venat/main.go new file mode 100644 index 00000000..d303564b --- /dev/null +++ b/cmd/venat/main.go @@ -0,0 +1,128 @@ +package main + +import ( + "context" + "errors" + "flag" + "fmt" + "log/slog" + "net/http" + "os" + "path/filepath" + "time" + + "github.com/openai/openai-go/v3" + "github.com/openai/openai-go/v3/option" + "github.com/philippgille/chromem-go" + "github.com/prometheus/client_golang/prometheus/promhttp" + "github.com/robfig/cron/v3" + "golang.org/x/sync/errgroup" + "within.website/x/cmd/venat/internal/agentloop" + "within.website/x/cmd/venat/internal/models" + "within.website/x/cmd/venat/internal/tools/memory" + "within.website/x/cmd/venat/internal/tools/python" + "within.website/x/internal" + + _ "net/http/pprof" +) + +var ( + dataDir = flag.String("data-dir", "./var", "data directory for Venat data") + metricsBind = flag.String("metrics-bind", ":9095", "metrics bind address") + + ErrMainExited = errors.New("venat: main exited") +) + +func main() { + internal.HandleStartup() + ctx, cancel := context.WithCancelCause(context.Background()) + defer cancel(ErrMainExited) + + if err := run(ctx); err != nil { + slog.Error("error running venat", "err", err) + os.Exit(1) + } +} + +func run(ctx context.Context) error { + vectorDB, err := chromem.NewPersistentDB(filepath.Join(*dataDir, "vectordb"), true) + if err != nil { + return fmt.Errorf("can't create vector database: %w", err) + } + + _ = vectorDB + + dao, err := models.New(filepath.Join(*dataDir, "venat.db"), filepath.Join(*dataDir, "venat-backup.db")) + if err != nil { + return fmt.Errorf("can't create SQLite database: %w", err) + } + + if err := dao.Ping(ctx); err != nil { + return fmt.Errorf("can't ping database: %w", err) + } + + g, ctx := errgroup.WithContext(ctx) + + g.Go(func() error { + http.DefaultServeMux.Handle("/metrics", promhttp.Handler()) + slog.Info("starting metrics server", "bind", *metricsBind) + return http.ListenAndServe(*metricsBind, nil) + }) + + g.Go(func() error { + c := cron.New() + if _, err := c.AddFunc("@every 1h", dao.Backup); err != nil { + return fmt.Errorf("failed to add cron job: %w", err) + } + c.Start() + <-ctx.Done() + c.Stop() + return nil + }) + + g.Go(func() error { + model := "glm-4.7-flash:latest" + apiEndpoint := "http://localhost:11434/v1" + apiKey := "ollama" + + cli := openai.NewClient( + option.WithAPIKey(apiKey), + option.WithBaseURL(apiEndpoint), + ) + + coll, err := vectorDB.GetOrCreateCollection("memory", map[string]string{"date": time.Now().Format(time.DateOnly)}, chromem.NewEmbeddingFuncOllama("qwen3-embedding:latest", "")) + if err != nil { + return fmt.Errorf("can't create memory collection: %w", err) + } + + al := agentloop.New( + "test-agent", + "", + "You are a helpful Python assistant that writes and executes Python code according to user questions. Use the python tool to execute Python code in WebAssembly. You MUST use the python tool before answering questions. Do not comment on using the python tool, just use it. When asked to remember things, use the memory_add tool.", + model, + []agentloop.Tool{ + python.Impl{}, + &memory.MemoryAdd{Coll: coll}, + &memory.MemorySearch{Coll: coll}, + }, + cli, + slog.Default(), + ) + + result, err := al.Run(ctx, "How many r's are in the word raspberry? Remember that I like my pizza cooked well done.") + if err != nil { + return err + } + + fmt.Println(result.Response) + + return nil + }) + + if err := g.Wait(); err != nil { + slog.Error("error in one of the grouped workers", "err", err) + return fmt.Errorf("error in one of the grouped workers: %w", err) + } + + return nil +} diff --git a/cmd/venat/var/.gitignore b/cmd/venat/var/.gitignore new file mode 100644 index 00000000..d6b7ef32 --- /dev/null +++ b/cmd/venat/var/.gitignore @@ -0,0 +1,2 @@ +* +!.gitignore diff --git a/go.mod b/go.mod index c72d8b45..15ad7de5 100644 --- a/go.mod +++ b/go.mod @@ -228,6 +228,7 @@ require ( github.com/opentracing/opentracing-go v1.2.0 // indirect github.com/otiai10/mint v1.6.3 // indirect github.com/petar/GoLLRB v0.0.0-20210522233825-ae3b015fd3e9 // indirect + github.com/philippgille/chromem-go v0.7.0 // indirect github.com/pjbgf/sha1cd v0.3.2 // indirect github.com/pkg/term v1.2.0-beta.2 // indirect github.com/polydawn/refmt v0.89.1-0.20221221234430-40501e09de1f // indirect diff --git a/go.sum b/go.sum index 074c4ccb..3b25d7a3 100644 --- a/go.sum +++ b/go.sum @@ -780,6 +780,8 @@ github.com/otiai10/mint v1.6.3/go.mod h1:MJm72SBthJjz8qhefc4z1PYEieWmy8Bku7CjcAq github.com/pborman/getopt v0.0.0-20170112200414-7148bc3a4c30/go.mod h1:85jBQOZwpVEaDAr341tbn15RS4fCAsIst0qp7i8ex1o= github.com/petar/GoLLRB v0.0.0-20210522233825-ae3b015fd3e9 h1:1/WtZae0yGtPq+TI6+Tv1WTxkukpXeMlviSxvL7SRgk= github.com/petar/GoLLRB v0.0.0-20210522233825-ae3b015fd3e9/go.mod h1:x3N5drFsm2uilKKuuYo6LdyD8vZAW55sH/9w+pbo1sw= +github.com/philippgille/chromem-go v0.7.0 h1:4jfvfyKymjKNfGxBUhHUcj1kp7B17NL/I1P+vGh1RvY= +github.com/philippgille/chromem-go v0.7.0/go.mod h1:hTd+wGEm/fFPQl7ilfCwQXkgEUxceYh86iIdoKMolPo= github.com/pires/go-proxyproto v0.7.0 h1:IukmRewDQFWC7kfnb66CSomk2q/seBuilHBYFwyq0Hs= github.com/pires/go-proxyproto v0.7.0/go.mod h1:Vz/1JPY/OACxWGQNIRY2BeyDmpoaWmEP40O9LbuiFR4= github.com/pjbgf/sha1cd v0.3.2 h1:a9wb0bp1oC2TGwStyn0Umc/IGKQnEgF0vVaZ8QF8eo4=