lmstudio

package
v1.0.4 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Sep 8, 2025 License: Apache-2.0 Imports: 17 Imported by: 0

Documentation

Index

Constants

View Source
const (
	LMStudioWsAPITimeoutSec     = 30
	SystemAPINamespace          = "system"
	LLMNamespace                = "llm"                  // Add LLM namespace for loaded models
	EmbeddingNamespace          = "embedding"            // Add Embedding namespace for embedding models
	ModelListLoadedEndpoint     = "listLoaded"           // Endpoint for listing loaded models
	ModelLoadEndpoint           = "loadModel"            // Endpoint for loading a model
	ModelUnloadEndpoint         = "unloadModel"          // Endpoint for unloading a model
	ModelListDownloadedEndpoint = "listDownloadedModels" // Endpoint for listing downloaded models
	ModelChatEndpoint           = "predict"              // Endpoint for chat/prediction interactions
	MaxConnectionRetries        = 3
	ConnectionRetryDelaySec     = 2
	LMStudioAPIVersion          = 1
)
View Source
const (
	LMStudioGoVersion = "1.0.1"
)

Variables

View Source
var (
	LMStudioAPIHosts = []string{"localhost", "127.0.0.1", "0.0.0.0"}
	LMStudioAPIPorts = []int{1234, 12345}
)

Functions

func DiscoverLMStudioServer added in v1.0.1

func DiscoverLMStudioServer(host string, port int, logger Logger) (discoveredUrl string, err error)

DiscoverLMStudioServer attempts to discover an LM Studio server running on the local network. It first checks if the server is running on localhost, and if not, it tries to find it on the local network interfaces. Returns the discovered LMStudioAddr if found, or an error if not found.

func NewLogger

func NewLogger(level LogLevel) *loggerStruct

NewLogger creates a new logger with the specified log level

func NewMockLMStudioService

func NewMockLMStudioService(t *testing.T, logger Logger) *httptest.Server

NewMockLMStudioService creates a test WebSocket server for unit testing. Handles LM Studio API endpoints as used in the test suite.

Types

type ChannelHandler

type ChannelHandler interface {
	// contains filtered or unexported methods
}

ChannelHandler is an interface for different types of channel handlers

type ChatMessage

type ChatMessage struct {
	Role    string `json:"role"`
	Content string `json:"content"`
}

Add new struct for chat messages

type LMStudioClient

type LMStudioClient struct {
	// contains filtered or unexported fields
}

LMStudioClient represents a client for LM Studio service

func NewLMStudioClient

func NewLMStudioClient(apiHost string, logger Logger) *LMStudioClient

NewLMStudioClient creates a new LM Studio client

func (*LMStudioClient) CheckStatus

func (c *LMStudioClient) CheckStatus() (bool, error)

CheckStatus checks if the LM Studio service is running and accessible

func (*LMStudioClient) Close

func (c *LMStudioClient) Close() error

Close closes all namespace connections

func (*LMStudioClient) ListAllLoadedModels

func (c *LMStudioClient) ListAllLoadedModels() ([]Model, error)

ListAllLoadedModels lists all loaded models (both LLM and embedding) available in LM Studio

func (*LMStudioClient) ListDownloadedModels

func (c *LMStudioClient) ListDownloadedModels() ([]Model, error)

ListDownloadedModels lists all downloaded models available in LM Studio

func (*LMStudioClient) ListLoadedEmbeddingModels

func (c *LMStudioClient) ListLoadedEmbeddingModels() ([]Model, error)

ListLoadedEmbeddingModels lists all loaded embedding models available in LM Studio

func (*LMStudioClient) ListLoadedLLMs

func (c *LMStudioClient) ListLoadedLLMs() ([]Model, error)

ListLoadedLLMs lists all loaded models available in LM Studio

func (*LMStudioClient) LoadModel

func (c *LMStudioClient) LoadModel(modelIdentifier string) error

LoadModel loads a specified model in LM Studio

func (*LMStudioClient) LoadModelWithProgress added in v1.0.2

func (c *LMStudioClient) LoadModelWithProgress(
	loadTimeout time.Duration,
	modelIdentifier string,
	progressCallback func(progress float64, modelInfo *Model),
) error

LoadModelWithProgress loads a specified model in LM Studio with progress reporting and cancellation support

func (*LMStudioClient) LoadModelWithProgressContext added in v1.0.2

func (c *LMStudioClient) LoadModelWithProgressContext(
	ctx context.Context,
	loadTimeout time.Duration,
	modelIdentifier string,
	progressCallback func(progress float64, modelInfo *Model),
) error

LoadModelWithProgressContext loads a specified model in LM Studio with progress reporting and cancellation support via context

func (*LMStudioClient) NewModelLoadingChannel

func (c *LMStudioClient) NewModelLoadingChannel(namespace string, progressFn func(float64)) (*ModelLoadingChannel, error)

NewModelLoadingChannel creates a new channel for loading a model

func (*LMStudioClient) SendPrompt

func (c *LMStudioClient) SendPrompt(modelIdentifier string, prompt string, temperature float64, callback func(token string)) error

SendPrompt sends a prompt to the model and streams back the response

func (*LMStudioClient) UnloadAllModels

func (c *LMStudioClient) UnloadAllModels() error

UnloadAllModels unloads all currently loaded models in LM Studio

func (*LMStudioClient) UnloadModel

func (c *LMStudioClient) UnloadModel(modelIdentifier string) error

UnloadModel unloads a specified model in LM Studio

type LogLevel

type LogLevel int

LogLevel defines the level of logging

const (
	// LogLevelError only shows error messages
	LogLevelError LogLevel = iota
	// LogLevelWarn shows warning and error messages
	LogLevelWarn
	// LogLevelInfo shows info and error messages
	LogLevelInfo
	// LogLevelDebug shows all messages including debug
	LogLevelDebug
	// LogLevelTrace shows all messages including trace
	LogLevelTrace
)

type Logger

type Logger interface {
	SetLevel(level LogLevel)
	Error(format string, v ...interface{})
	Warn(format string, v ...interface{})
	Info(format string, v ...interface{})
	Debug(format string, v ...interface{})
	Trace(format string, v ...interface{})
}

Logger is the interface for logging, it can be overridden by the client code

type Model

type Model struct {
	// Common fields
	ModelKey          string `json:"modelKey"`
	Path              string `json:"path"`
	Type              string `json:"type"`
	Format            string `json:"format,omitempty"`
	Size              int64  `json:"sizeBytes,omitempty"`
	MaxContextLength  int    `json:"maxContextLength,omitempty"`
	DisplayName       string `json:"displayName,omitempty"`
	Architecture      string `json:"architecture,omitempty"`
	Vision            bool   `json:"vision,omitempty"`
	TrainedForToolUse bool   `json:"trainedForToolUse,omitempty"`

	// Fields specific to loaded models
	Identifier        string `json:"identifier,omitempty"`
	InstanceReference string `json:"instanceReference,omitempty"`
	ContextLength     int    `json:"contextLength,omitempty"`

	// Legacy fields kept for compatibility
	ModelType string `json:"modelType,omitempty"`
	Family    string `json:"family,omitempty"`
	ModelName string `json:"modelName,omitempty"`

	// Internal tracking - not from JSON
	IsLoaded bool `json:"-"`
}

Model represents a unified model structure for both downloaded and loaded models

type ModelLoadingChannel

type ModelLoadingChannel struct {
	// contains filtered or unexported fields
}

ModelLoadingChannel handles the channel-based loading of models

func (*ModelLoadingChannel) Close

func (ch *ModelLoadingChannel) Close() error

Close closes the model loading channel

func (*ModelLoadingChannel) CreateChannel

func (ch *ModelLoadingChannel) CreateChannel(modelKey string) error

CreateChannel sends a channelCreate message to create a model loading channel

func (*ModelLoadingChannel) WaitForResult

func (ch *ModelLoadingChannel) WaitForResult(timeout time.Duration) (*ModelLoadingResult, error)

WaitForResult waits for the model loading to complete

func (*ModelLoadingChannel) WaitForResultWithContext added in v1.0.2

func (ch *ModelLoadingChannel) WaitForResultWithContext(ctx context.Context, timeout time.Duration) (*ModelLoadingResult, error)

WaitForResultWithContext waits for the model loading to complete with cancellation support

type ModelLoadingResult

type ModelLoadingResult struct {
	Identifier string `json:"identifier"`
	Success    bool   `json:"success"`
}

type ModelStreamingChannel

type ModelStreamingChannel struct {
	// contains filtered or unexported fields
}

ModelStreamingChannel handles streaming messages from models

func NewModelStreamingChannel

func NewModelStreamingChannel(channelID int, conn *namespaceConnection, streamCh chan string, errCh chan error, doneCh chan struct{}) *ModelStreamingChannel

NewModelStreamingChannel creates a new channel for streaming responses

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL