waveterm/pkg/waveai/waveai.go
Mike Sawka 6e3554407b
AI SDK Backend (#2336)
Working on AI SDK compatible backends for OpenAI and Anthropic. Thinking + ToolUse etc.  For use with AI SDK useChat on frontend.  Still needs more testing, WIP, but this is a good start.  Want to get this committed to so I can work on more integrations.
2025-09-12 12:56:24 -07:00

118 lines
3.6 KiB
Go

// Copyright 2025, Command Line Inc.
// SPDX-License-Identifier: Apache-2.0
package waveai
import (
"context"
"log"
"net/url"
"strings"
"github.com/wavetermdev/waveterm/pkg/telemetry"
"github.com/wavetermdev/waveterm/pkg/telemetry/telemetrydata"
"github.com/wavetermdev/waveterm/pkg/wshrpc"
)
const WaveAIPacketstr = "waveai"
const APIType_Anthropic = "anthropic"
const APIType_Perplexity = "perplexity"
const APIType_Google = "google"
const APIType_OpenAI = "openai"
type WaveAICmdInfoPacketOutputType struct {
Model string `json:"model,omitempty"`
Created int64 `json:"created,omitempty"`
FinishReason string `json:"finish_reason,omitempty"`
Message string `json:"message,omitempty"`
Error string `json:"error,omitempty"`
}
func MakeWaveAIPacket() *wshrpc.WaveAIPacketType {
return &wshrpc.WaveAIPacketType{Type: WaveAIPacketstr}
}
type WaveAICmdInfoChatMessage struct {
MessageID int `json:"messageid"`
IsAssistantResponse bool `json:"isassistantresponse,omitempty"`
AssistantResponse *WaveAICmdInfoPacketOutputType `json:"assistantresponse,omitempty"`
UserQuery string `json:"userquery,omitempty"`
UserEngineeredQuery string `json:"userengineeredquery,omitempty"`
}
type AIBackend interface {
StreamCompletion(
ctx context.Context,
request wshrpc.WaveAIStreamRequest,
) chan wshrpc.RespOrErrorUnion[wshrpc.WaveAIPacketType]
}
func IsCloudAIRequest(opts *wshrpc.WaveAIOptsType) bool {
if opts == nil {
return true
}
return opts.BaseURL == "" && opts.APIToken == ""
}
func isLocalURL(baseURL string) bool {
if baseURL == "" {
return false
}
u, err := url.Parse(baseURL)
if err != nil {
return false
}
host := strings.ToLower(u.Hostname())
return host == "localhost" || host == "127.0.0.1" || host == "0.0.0.0" || strings.HasPrefix(host, "192.168.") || strings.HasPrefix(host, "10.") || (strings.HasPrefix(host, "172.") && len(host) > 4)
}
func makeAIError(err error) wshrpc.RespOrErrorUnion[wshrpc.WaveAIPacketType] {
return wshrpc.RespOrErrorUnion[wshrpc.WaveAIPacketType]{Error: err}
}
func RunAICommand(ctx context.Context, request wshrpc.WaveAIStreamRequest) chan wshrpc.RespOrErrorUnion[wshrpc.WaveAIPacketType] {
telemetry.GoUpdateActivityWrap(wshrpc.ActivityUpdate{NumAIReqs: 1}, "RunAICommand")
endpoint := request.Opts.BaseURL
if endpoint == "" {
endpoint = "default"
}
var backend AIBackend
var backendType string
if request.Opts.APIType == APIType_Anthropic {
backend = AnthropicBackend{}
backendType = APIType_Anthropic
} else if request.Opts.APIType == APIType_Perplexity {
backend = PerplexityBackend{}
backendType = APIType_Perplexity
} else if request.Opts.APIType == APIType_Google {
backend = GoogleBackend{}
backendType = APIType_Google
} else if IsCloudAIRequest(request.Opts) {
endpoint = "waveterm cloud"
request.Opts.APIType = APIType_OpenAI
request.Opts.Model = "default"
backend = WaveAICloudBackend{}
backendType = "wave"
} else {
backend = OpenAIBackend{}
backendType = APIType_OpenAI
}
if backend == nil {
log.Printf("no backend found for %s\n", request.Opts.APIType)
return nil
}
aiLocal := backendType != "wave" && isLocalURL(request.Opts.BaseURL)
telemetry.GoRecordTEventWrap(&telemetrydata.TEvent{
Event: "action:runaicmd",
Props: telemetrydata.TEventProps{
AiBackendType: backendType,
AiLocal: aiLocal,
},
})
log.Printf("sending ai chat message to %s endpoint %q using model %s\n", request.Opts.APIType, endpoint, request.Opts.Model)
return backend.StreamCompletion(ctx, request)
}