Skip to main content
Agents can be served as HTTP endpoints, allowing them to be invoked through standard HTTP requests. The SDK client implements http.Handler, making it compatible with Go’s standard net/http package.

Creating an HTTP Server

Pass the SDK client directly to http.ListenAndServe:
client, err := sdk.New(&sdk.ClientOptions{
    LLMConfigs: sdk.NewInMemoryConfigStore([]*gateway.ProviderConfig{
        {
            ProviderName: llm.ProviderNameOpenAI,
            ApiKeys: []*gateway.APIKeyConfig{
                {
                    Name:   "Key 1",
                    APIKey: os.Getenv("OPENAI_API_KEY"),
                },
            },
        },
    }),
})
if err != nil {
    log.Fatal(err)
}

// Create agents
client.NewAgent(&sdk.AgentOptions{
    Name:        "SampleAgent",
    Instruction: client.Prompt("You are helpful assistant."),
    LLM: client.NewLLM(sdk.LLMOptions{
        Provider: llm.ProviderNameOpenAI,
        Model:    "gpt-4o-mini",
    }),
})

// Start HTTP server
http.ListenAndServe(":8070", client)

Invoking an Agent

Send a POST request to http://localhost:8070/?agent=<agent-name> with the request body as agents.AgentInput:
curl -X POST "http://localhost:8070/?agent=SampleAgent" \
  -H "Content-Type: application/json" \
  -d '{
    "messages": [
      {
        "role": "user",
        "content": "Hello!"
      }
    ]
  }'

Request Structure

The request body follows the agents.AgentInput structure:
type AgentInput struct {
    Namespace         string
    PreviousMessageID string
    Messages          []responses.InputMessageUnion
    RunContext        map[string]any
    Callback          func(chunk *responses.ResponseChunk)
}
  • Namespace - Optional. Namespace for conversation isolation
  • PreviousMessageID - Optional. Previous message ID for conversation history
  • Messages - Required. Array of input messages
  • RunContext - Optional. Additional context for the execution

Complete Example

package main

import (
	"log"
	"net/http"
	"os"

	"github.com/curaious/uno/pkg/gateway"
	"github.com/curaious/uno/pkg/llm"
	"github.com/curaious/uno/pkg/sdk"
)

func main() {
	client, err := sdk.New(&sdk.ClientOptions{
		LLMConfigs: sdk.NewInMemoryConfigStore([]*gateway.ProviderConfig{
			{
				ProviderName:  llm.ProviderNameOpenAI,
				BaseURL:       "",
				CustomHeaders: nil,
				ApiKeys: []*gateway.APIKeyConfig{
					{
						Name:   "Key 1",
						APIKey: os.Getenv("OPENAI_API_KEY"),
					},
				},
			},
		}),
	})
	if err != nil {
		log.Fatal(err)
	}

	model := client.NewLLM(sdk.LLMOptions{
		Provider: llm.ProviderNameOpenAI,
		Model:    "gpt-4.1-mini",
	})

	history := client.NewConversationManager()
	agentName := "SampleAgent"
	_ = client.NewAgent(&sdk.AgentOptions{
		Name:        agentName,
		Instruction: client.Prompt("You are helpful assistant."),
		LLM:         model,
		History:     history,
	})

	http.ListenAndServe(":8070", client)

	// You can then invoke by hitting POST http://localhost:8070/?agent=SampleAgent with `agents.AgentInput` as your payload
	/*
		  curl -X POST "http://localhost:8070/?agent=SampleAgent" \
		  -H "Content-Type: application/json" \
		  -d '{
			"messages": [
			  {
				"role": "user",
				"content": "Hello!"
			  }
			]
		  }'
	*/
}