ai-css/library/modelprovider/client.go
2026-02-12 08:50:11 +00:00

59 lines
1.3 KiB
Go
Executable File

package modelprovider
import (
"context"
"errors"
"time"
"github.com/openai/openai-go/v3/shared"
)
type Client struct {
provider Provider
defaultModel shared.ResponsesModel
}
func NewClient(p Provider, defaultModel shared.ResponsesModel) *Client {
return &Client{provider: p, defaultModel: defaultModel}
}
// Chat 对话接口
func (c *Client) Chat(ctx context.Context, req ChatRequest) (*ChatResponse, error) {
if len(req.Messages) == 0 {
return nil, errors.New("empty messages")
}
if req.Model == "" {
req.Model = c.defaultModel
}
resp, err := c.provider.InvokeCompletion(ctx, &req)
if err != nil {
return nil, err
}
if resp != nil {
if resp.Meta.Vendor == "" {
resp.Meta.Vendor = c.provider.Capabilities().Vendor
}
if resp.Meta.CreatedAt.IsZero() {
resp.Meta.CreatedAt = time.Now()
}
if resp.Model == "" {
resp.Model = req.Model
}
}
return resp, nil
}
// StreamChat 流式问答接口
func (c *Client) StreamChat(ctx context.Context, req ChatRequest, handler StreamChatCallback) error {
if len(req.Messages) == 0 {
return errors.New("empty messages")
}
if req.Model == "" {
req.Model = c.defaultModel
}
if !c.provider.Capabilities().SupportsStreaming {
return errors.New("provider does not support streaming")
}
return c.provider.StreamCompletion(ctx, &req, handler)
}