deepseek

package
v0.2.0 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Feb 8, 2026 License: Apache-2.0 Imports: 17 Imported by: 0

Documentation

Overview

Package deepseek implements a client for the DeepSeek API.

It is described at https://api-docs.deepseek.com/

Index

Examples

Constants

This section is empty.

Variables

This section is empty.

Functions

func ProcessStream

func ProcessStream(chunks iter.Seq[ChatStreamChunkResponse]) (iter.Seq[genai.Reply], func() (genai.Usage, [][]genai.Logprob, error))

ProcessStream converts the raw packets from the streaming API into Reply fragments.

func Scoreboard

func Scoreboard() scoreboard.Score

Scoreboard for DeepSeek.

Types

type ChatRequest

type ChatRequest struct {
	Model            string    `json:"model"`
	Messages         []Message `json:"messages"`
	Stream           bool      `json:"stream"`
	Temperature      float64   `json:"temperature,omitzero"`       // [0, 2]
	FrequencyPenalty float64   `json:"frequency_penalty,omitzero"` // [-2, 2]
	MaxToks          int64     `json:"max_tokens,omitzero"`        // [1, 8192]
	PresencePenalty  float64   `json:"presence_penalty,omitzero"`  // [-2, 2]
	ResponseFormat   struct {
		Type string `json:"type,omitzero"` // "text", "json_object"
	} `json:"response_format,omitzero"`
	Stop          []string `json:"stop,omitzero"`
	StreamOptions struct {
		IncludeUsage bool `json:"include_usage,omitzero"`
	} `json:"stream_options,omitzero"`
	TopP float64 `json:"top_p,omitzero"` // [0, 1]
	// Alternative when forcing a specific function. This can probably be achieved
	// by providing a single tool and ToolChoice == "required".
	// ToolChoice struct {
	// 	Type     string `json:"type,omitzero"` // "function"
	// 	Function struct {
	// 		Name string `json:"name,omitzero"`
	// 	} `json:"function,omitzero"`
	// } `json:"tool_choice,omitzero"`
	ToolChoice string `json:"tool_choice,omitzero"` // "none", "auto", "required"
	Tools      []Tool `json:"tools,omitzero"`
	Logprobs   bool   `json:"logprobs,omitzero"`
	TopLogprob int64  `json:"top_logprobs,omitzero"`
}

ChatRequest is documented at https://api-docs.deepseek.com/api/create-chat-completion

func (*ChatRequest) Init

func (c *ChatRequest) Init(msgs genai.Messages, model string, opts ...genai.GenOption) error

Init initializes the provider specific completion request with the generic completion request.

func (*ChatRequest) SetStream

func (c *ChatRequest) SetStream(stream bool)

SetStream sets the streaming mode.

type ChatResponse

type ChatResponse struct {
	ID      string `json:"id"`
	Choices []struct {
		FinishReason FinishReason `json:"finish_reason"`
		Index        int64        `json:"index"`
		Message      Message      `json:"message"`
		Logprobs     Logprobs     `json:"logprobs"`
	} `json:"choices"`
	Created           int64  `json:"created"` // Unix timestamp
	Model             string `json:"model"`
	SystemFingerPrint string `json:"system_fingerprint"`
	Object            string `json:"object"` // chat.completion
	Usage             Usage  `json:"usage"`
}

ChatResponse is the provider-specific chat completion response.

func (*ChatResponse) ToResult

func (c *ChatResponse) ToResult() (genai.Result, error)

ToResult converts the response to a genai.Result.

type ChatStreamChunkResponse

type ChatStreamChunkResponse struct {
	ID                string `json:"id"`
	Object            string `json:"object"`  // chat.completion.chunk
	Created           int64  `json:"created"` // Unix timestamp
	Model             string `json:"model"`
	SystemFingerprint string `json:"system_fingerprint"`
	Choices           []struct {
		Index        int64        `json:"index"`
		Delta        Message      `json:"delta"`
		Logprobs     Logprobs     `json:"logprobs"`
		FinishReason FinishReason `json:"finish_reason"`
	} `json:"choices"`
	Usage Usage `json:"usage"`
}

ChatStreamChunkResponse is the provider-specific streaming chat chunk.

type Client

type Client struct {
	base.NotImplemented
	// contains filtered or unexported fields
}

Client implements genai.Provider.

func New

func New(ctx context.Context, opts ...genai.ProviderOption) (*Client, error)

New creates a new client to talk to the DeepSeek platform API in China.

If ProviderOptionAPIKey is not provided, it tries to load it from the DEEPSEEK_API_KEY environment variable. If none is found, it will still return a client coupled with an base.ErrAPIKeyRequired error. Get your API key at https://platform.deepseek.com/api_keys

To use multiple models, create multiple clients. Use one of the model from https://api-docs.deepseek.com/quick_start/pricing

Example (HTTP_record)
package main

import (
	"context"
	"fmt"
	"log"
	"net/http"
	"os"

	"github.com/maruel/genai"
	"github.com/maruel/genai/httprecord"
	"github.com/maruel/genai/providers/deepseek"
	"gopkg.in/dnaeon/go-vcr.v4/pkg/recorder"
)

func main() {
	// Example to do HTTP recording and playback for smoke testing.
	// The example recording is in testdata/example.yaml.
	var rr *recorder.Recorder
	defer func() {
		// In a smoke test, use t.Cleanup().
		if rr != nil {
			if err := rr.Stop(); err != nil {
				log.Printf("Failed saving recordings: %v", err)
			}
		}
	}()

	// Simple trick to force recording via an environment variable.
	mode := recorder.ModeRecordOnce
	if os.Getenv("RECORD") == "1" {
		mode = recorder.ModeRecordOnly
	}
	wrapper := func(h http.RoundTripper) http.RoundTripper {
		var err error
		rr, err = httprecord.New("testdata/example", h, recorder.WithMode(mode))
		if err != nil {
			log.Fatal(err)
		}
		return rr
	}
	// When playing back the smoke test, no API key is needed. Insert a fake API key.
	var opts []genai.ProviderOption
	if os.Getenv("DEEPSEEK_API_KEY") == "" {
		opts = append(opts, genai.ProviderOptionAPIKey("<insert_api_key_here>"))
	}
	ctx := context.Background()
	c, err := deepseek.New(ctx, append([]genai.ProviderOption{genai.ProviderOptionTransportWrapper(wrapper)}, opts...)...)
	if err != nil {
		log.Fatal(err)
	}
	models, err := c.ListModels(ctx)
	if err != nil {
		log.Fatal(err)
	}
	if len(models) > 1 {
		fmt.Println("Found multiple models")
	}
}
Output:

Found multiple models

func (*Client) GenStream

func (c *Client) GenStream(ctx context.Context, msgs genai.Messages, opts ...genai.GenOption) (iter.Seq[genai.Reply], func() (genai.Result, error))

GenStream implements genai.Provider.

func (*Client) GenStreamRaw

func (c *Client) GenStreamRaw(ctx context.Context, in *ChatRequest) (iter.Seq[ChatStreamChunkResponse], func() error)

GenStreamRaw provides access to the raw API.

func (*Client) GenSync

func (c *Client) GenSync(ctx context.Context, msgs genai.Messages, opts ...genai.GenOption) (genai.Result, error)

GenSync implements genai.Provider.

func (*Client) GenSyncRaw

func (c *Client) GenSyncRaw(ctx context.Context, in *ChatRequest, out *ChatResponse) error

GenSyncRaw provides access to the raw API.

func (*Client) HTTPClient

func (c *Client) HTTPClient() *http.Client

HTTPClient returns the HTTP client to fetch results (e.g. videos) generated by the provider.

func (*Client) ListModels

func (c *Client) ListModels(ctx context.Context) ([]genai.Model, error)

ListModels implements genai.Provider.

func (*Client) ModelID

func (c *Client) ModelID() string

ModelID implements genai.Provider.

It returns the selected model ID.

func (*Client) Name

func (c *Client) Name() string

Name implements genai.Provider.

It returns the name of the provider.

func (*Client) OutputModalities

func (c *Client) OutputModalities() genai.Modalities

OutputModalities implements genai.Provider.

It returns the output modalities, i.e. what kind of output the model will generate (text, audio, image, video, etc).

func (*Client) Scoreboard

func (c *Client) Scoreboard() scoreboard.Score

Scoreboard implements genai.Provider.

type ErrorResponse

type ErrorResponse struct {
	// Type  string `json:"type"`
	ErrorVal struct {
		Message string `json:"message"`
		Type    string `json:"type"`
		Param   string `json:"param"`
		Code    string `json:"code"`
	} `json:"error"`
}

ErrorResponse is the provider-specific error response.

func (*ErrorResponse) Error

func (er *ErrorResponse) Error() string

func (*ErrorResponse) IsAPIError

func (er *ErrorResponse) IsAPIError() bool

IsAPIError implements base.ErrorResponseI.

type FinishReason

type FinishReason string

FinishReason is a provider-specific finish reason.

const (
	FinishStop          FinishReason = "stop"
	FinishToolCalls     FinishReason = "tool_calls"
	FinishLength        FinishReason = "length"
	FinishContentFilter FinishReason = "content_filter"
	FinishInsufficient  FinishReason = "insufficient_system_resource"
)

Finish reason values.

func (FinishReason) ToFinishReason

func (f FinishReason) ToFinishReason() genai.FinishReason

ToFinishReason converts to a genai.FinishReason.

type Logprobs

type Logprobs struct {
	Content []struct {
		Token       string  `json:"token"`
		Bytes       []byte  `json:"bytes"`
		Logprob     float64 `json:"logprob"`
		TopLogprobs []struct {
			Token   string  `json:"token"`
			Bytes   []byte  `json:"bytes"`
			Logprob float64 `json:"logprob"`
		} `json:"top_logprobs"`
	} `json:"content"`
}

Logprobs is the provider-specific log probabilities.

func (*Logprobs) To

func (l *Logprobs) To() [][]genai.Logprob

To converts to the genai equivalent.

type Message

type Message struct {
	Role             string     `json:"role,omitzero"` // "system", "assistant", "user"
	Name             string     `json:"name,omitzero"` // An optional name for the participant. Provides the model information to differentiate between participants of the same role.
	Content          string     `json:"content,omitzero"`
	Prefix           bool       `json:"prefix,omitzero"` // Force the model to start its answer by the content of the supplied prefix in this assistant message.
	ReasoningContent string     `json:"reasoning_content,omitzero"`
	ToolCalls        []ToolCall `json:"tool_calls,omitzero"`
	ToolCallID       string     `json:"tool_call_id,omitzero"` // Tool call that this message is responding to, with response in Content field.
}

Message is documented at https://api-docs.deepseek.com/api/create-chat-completion

func (*Message) From

func (m *Message) From(in *genai.Message) error

From must be called with at most one Request or one ToolCallResults.

func (*Message) To

func (m *Message) To(out *genai.Message)

To converts to the genai equivalent.

type Model

type Model struct {
	ID      string `json:"id"`
	Object  string `json:"object"` // model
	OwnedBy string `json:"owned_by"`
}

Model is the provider-specific model metadata.

func (*Model) Context

func (m *Model) Context() int64

Context implements genai.Model.

func (*Model) GetID

func (m *Model) GetID() string

GetID implements genai.Model.

func (*Model) String

func (m *Model) String() string

type ModelsResponse

type ModelsResponse struct {
	Object string  `json:"object"` // list
	Data   []Model `json:"data"`
}

ModelsResponse represents the response structure for DeepSeek models listing.

func (*ModelsResponse) ToModels

func (r *ModelsResponse) ToModels() []genai.Model

ToModels converts DeepSeek models to genai.Model interfaces.

type Tool

type Tool struct {
	Type     string `json:"type"` // "function"
	Function struct {
		Name        string             `json:"name,omitzero"`
		Description string             `json:"description,omitzero"`
		Parameters  *jsonschema.Schema `json:"parameters,omitzero"`
	} `json:"function"`
}

Tool is a provider-specific tool definition.

type ToolCall

type ToolCall struct {
	Index    int64  `json:"index,omitzero"`
	ID       string `json:"id,omitzero"`
	Type     string `json:"type,omitzero"` // "function"
	Function struct {
		Name      string `json:"name,omitzero"`
		Arguments string `json:"arguments,omitzero"`
	} `json:"function,omitzero"`
}

ToolCall is a provider-specific tool call.

func (*ToolCall) From

func (t *ToolCall) From(in *genai.ToolCall) error

From converts from the genai equivalent.

func (*ToolCall) To

func (t *ToolCall) To(out *genai.ToolCall)

To converts to the genai equivalent.

type Usage

type Usage struct {
	CompletionTokens      int64 `json:"completion_tokens"`
	PromptTokens          int64 `json:"prompt_tokens"`
	PromptCacheHitTokens  int64 `json:"prompt_cache_hit_tokens"`
	PromptCacheMissTokens int64 `json:"prompt_cache_miss_tokens"`
	TotalTokens           int64 `json:"total_tokens"`
	PromptTokensDetails   struct {
		CachedTokens int64 `json:"cached_tokens"`
	} `json:"prompt_tokens_details"`
	ChatTokensDetails struct {
		ReasoningTokens int64 `json:"reasoning_tokens"`
	} `json:"completion_tokens_details"`
}

Usage is the provider-specific token usage.

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL