Documentation
¶
Overview ¶
filepath: internal/flow/branch.go
Package flow provides conversation flow implementation for persistent conversational interactions.
Package flow provides recovery implementation for conversation flows ¶
Package flow defines a common interface for coordinator implementations.
Package flow provides coordinator module functionality for managing conversation routing and tools.
Package flow provides a static, rule-based coordinator implementation.
Package flow provides a small selector utility to choose between coordinator implementations.
Package flow provides feedback module functionality for tracking user feedback and updating profiles.
filepath: internal/flow/flow.go
filepath: internal/flow/genai.go
Package flow provides intake module functionality for building structured user profiles.
Package flow provides profile save tool functionality for managing user profiles.
Package flow provides prompt generator tool functionality for creating personalized habit prompts.
Package flow provides scheduler tool functionality for conversation flows.
Package flow defines state management interfaces for stateful flows.
Package flow provides concrete implementations of state management.
Package flow provides state transition tool functionality for managing conversation state transitions.
filepath: internal/flow/static.go
Package flow provides timer implementations for scheduled actions.
Index ¶
- func Generate(ctx context.Context, p models.Prompt) (string, error)
- func GetDebugModeFromContext(ctx context.Context) bool
- func GetPhoneNumberContextKey() contextKey
- func GetPhoneNumberFromContext(ctx context.Context) (string, bool)
- func Register(pt models.PromptType, gen Generator)
- func RegisterWithDependencies(pt models.PromptType, gen StatefulGenerator, deps Dependencies)
- func SendDebugMessageIfEnabled(ctx context.Context, participantID string, msgService MessagingService, ...)
- func SetDebugModeInContext(ctx context.Context, debugMode bool) context.Context
- type BranchGenerator
- type ConversationFlow
- func NewConversationFlow(stateManager StateManager, genaiClient genai.ClientInterface, ...) *ConversationFlow
- func NewConversationFlowWithAllTools(stateManager StateManager, genaiClient genai.ClientInterface, ...) *ConversationFlow
- func NewConversationFlowWithAllToolsAndTimeouts(stateManager StateManager, genaiClient genai.ClientInterface, ...) *ConversationFlow
- func NewConversationFlowWithScheduler(stateManager StateManager, genaiClient genai.ClientInterface, ...) *ConversationFlow
- func (f *ConversationFlow) Generate(ctx context.Context, p models.Prompt) (string, error)
- func (f *ConversationFlow) LoadSystemPrompt() error
- func (f *ConversationFlow) LoadToolSystemPrompts() error
- func (f *ConversationFlow) ProcessResponse(ctx context.Context, participantID, response string) (string, error)
- func (f *ConversationFlow) SetChatHistoryLimit(limit int)
- func (f *ConversationFlow) SetDebugMode(enabled bool)
- func (f *ConversationFlow) SetDependencies(deps Dependencies)
- type ConversationFlowRecovery
- func (r *ConversationFlowRecovery) GetFlowType() models.FlowType
- func (r *ConversationFlowRecovery) RecoverParticipant(ctx context.Context, participantID string, participant interface{}, ...) error
- func (r *ConversationFlowRecovery) RecoverState(ctx context.Context, registry *recovery.RecoveryRegistry) error
- type ConversationHistory
- type ConversationMessage
- type Coordinator
- type CoordinatorChoice
- type CoordinatorModule
- type Dependencies
- type FeedbackModule
- func (fm *FeedbackModule) CancelPendingFeedback(ctx context.Context, participantID string)
- func (fm *FeedbackModule) ExecuteFeedbackTrackerWithHistoryAndConversation(ctx context.Context, participantID string, args map[string]interface{}, ...) (string, error)
- func (fm *FeedbackModule) GetToolDefinition() openai.ChatCompletionToolParam
- func (fm *FeedbackModule) IsSystemPromptLoaded() bool
- func (fm *FeedbackModule) LoadSystemPrompt() error
- func (fm *FeedbackModule) ScheduleFeedbackCollection(ctx context.Context, participantID string) error
- type GenAIGenerator
- type Generator
- type IntakeModule
- func (im *IntakeModule) ExecuteIntakeBotWithHistory(ctx context.Context, participantID string, args map[string]interface{}, ...) (string, error)
- func (im *IntakeModule) ExecuteIntakeBotWithHistoryAndConversation(ctx context.Context, participantID string, args map[string]interface{}, ...) (string, error)
- func (im *IntakeModule) LoadSystemPrompt() error
- type MessagingService
- type ProfileSaveTool
- func (pst *ProfileSaveTool) ExecuteProfileSave(ctx context.Context, participantID string, args map[string]interface{}) (string, error)
- func (pst *ProfileSaveTool) GetOrCreateUserProfile(ctx context.Context, participantID string) (*UserProfile, error)
- func (pst *ProfileSaveTool) GetToolDefinition() openai.ChatCompletionToolParam
- type PromptGeneratorService
- type PromptGeneratorTool
- func (pgt *PromptGeneratorTool) ExecutePromptGenerator(ctx context.Context, participantID string, args map[string]interface{}) (string, error)
- func (pgt *PromptGeneratorTool) ExecutePromptGeneratorWithHistory(ctx context.Context, participantID string, args map[string]interface{}, ...) (string, error)
- func (pgt *PromptGeneratorTool) GetToolDefinition() openai.ChatCompletionToolParam
- func (pgt *PromptGeneratorTool) LoadSystemPrompt() error
- type Registry
- func (r *Registry) Generate(ctx context.Context, p models.Prompt) (string, error)
- func (r *Registry) Get(pt models.PromptType) (Generator, bool)
- func (r *Registry) Register(pt models.PromptType, gen Generator)
- func (r *Registry) RegisterWithDependencies(pt models.PromptType, gen StatefulGenerator, deps Dependencies)
- type SchedulerTool
- func NewSchedulerTool(timer models.Timer, msgService MessagingService) *SchedulerTool
- func NewSchedulerToolComplete(timer models.Timer, msgService MessagingService, ...) *SchedulerTool
- func NewSchedulerToolWithGenAI(timer models.Timer, msgService MessagingService, ...) *SchedulerTool
- func NewSchedulerToolWithPrepTime(timer models.Timer, msgService MessagingService, ...) *SchedulerTool
- func NewSchedulerToolWithPrepTimeAndAutoFeedback(timer models.Timer, msgService MessagingService, ...) *SchedulerTool
- func NewSchedulerToolWithStateManager(timer models.Timer, msgService MessagingService, ...) *SchedulerTool
- type SimpleTimer
- func (t *SimpleTimer) Cancel(id string) error
- func (t *SimpleTimer) GetTimer(id string) (*models.TimerInfo, error)
- func (t *SimpleTimer) ListActive() []models.TimerInfo
- func (t *SimpleTimer) ScheduleAfter(delay time.Duration, fn func()) (string, error)
- func (t *SimpleTimer) ScheduleAt(when time.Time, fn func()) (string, error)
- func (t *SimpleTimer) ScheduleWithSchedule(schedule *models.Schedule, fn func()) (string, error)
- func (t *SimpleTimer) Stop()
- type StateManager
- type StateTransitionTool
- func (stt *StateTransitionTool) CancelPendingTransition(ctx context.Context, participantID string) error
- func (stt *StateTransitionTool) ExecuteStateTransition(ctx context.Context, participantID string, args map[string]interface{}) (string, error)
- func (stt *StateTransitionTool) GetToolDefinition() openai.ChatCompletionToolParam
- type StatefulGenerator
- type StaticCoordinatorModule
- type StaticGenerator
- type StoreBasedStateManager
- func (sm *StoreBasedStateManager) GetCurrentState(ctx context.Context, participantID string, flowType models.FlowType) (models.StateType, error)
- func (sm *StoreBasedStateManager) GetStateData(ctx context.Context, participantID string, flowType models.FlowType, ...) (string, error)
- func (sm *StoreBasedStateManager) ResetState(ctx context.Context, participantID string, flowType models.FlowType) error
- func (sm *StoreBasedStateManager) SetCurrentState(ctx context.Context, participantID string, flowType models.FlowType, ...) error
- func (sm *StoreBasedStateManager) SetStateData(ctx context.Context, participantID string, flowType models.FlowType, ...) error
- func (sm *StoreBasedStateManager) TransitionState(ctx context.Context, participantID string, flowType models.FlowType, ...) error
- type UserProfile
Constants ¶
This section is empty.
Variables ¶
This section is empty.
Functions ¶
func Generate ¶
Generate finds and runs the Generator for the prompt's type using the default registry.
func GetDebugModeFromContext ¶
GetDebugModeFromContext retrieves debug mode from the context
func GetPhoneNumberContextKey ¶
func GetPhoneNumberContextKey() contextKey
GetPhoneNumberContextKey returns the context key used for storing phone numbers
func GetPhoneNumberFromContext ¶
GetPhoneNumberFromContext retrieves the phone number from the context
func Register ¶
func Register(pt models.PromptType, gen Generator)
Register associates a PromptType with a Generator implementation in the default registry.
func RegisterWithDependencies ¶
func RegisterWithDependencies(pt models.PromptType, gen StatefulGenerator, deps Dependencies)
RegisterWithDependencies registers a stateful generator with dependencies in the default registry.
func SendDebugMessageIfEnabled ¶
func SendDebugMessageIfEnabled(ctx context.Context, participantID string, msgService MessagingService, message string)
SendDebugMessageIfEnabled sends a debug message if debug mode is enabled in context
Types ¶
type BranchGenerator ¶
type BranchGenerator struct{}
BranchGenerator formats branch-type prompts into a selectable list.
type ConversationFlow ¶
type ConversationFlow struct { // contains filtered or unexported fields } // NewConversationFlow creates a new conversation flow with dependencies.
ConversationFlow implements a stateful conversation flow that maintains history and uses GenAI.
func NewConversationFlow ¶
func NewConversationFlow(stateManager StateManager, genaiClient genai.ClientInterface, systemPromptFile string) *ConversationFlow
func NewConversationFlowWithAllTools ¶
func NewConversationFlowWithAllTools(stateManager StateManager, genaiClient genai.ClientInterface, systemPromptFile string, msgService MessagingService, intakeBotPromptFile, promptGeneratorPromptFile, feedbackTrackerPromptFile string) *ConversationFlow
NewConversationFlowWithAllTools creates a new conversation flow with all tools for the 3-bot architecture.
func NewConversationFlowWithAllToolsAndTimeouts ¶
func NewConversationFlowWithAllToolsAndTimeouts(stateManager StateManager, genaiClient genai.ClientInterface, systemPromptFile string, msgService MessagingService, intakeBotPromptFile, promptGeneratorPromptFile, feedbackTrackerPromptFile, feedbackInitialTimeout, feedbackFollowupDelay string, schedulerPrepTimeMinutes int, autoFeedbackAfterPromptEnabled bool) *ConversationFlow
NewConversationFlowWithAllToolsAndTimeouts creates a new conversation flow with all tools and configurable feedback timeouts for the 3-bot architecture.
func NewConversationFlowWithScheduler ¶
func NewConversationFlowWithScheduler(stateManager StateManager, genaiClient genai.ClientInterface, systemPromptFile string, schedulerTool *SchedulerTool) *ConversationFlow
NewConversationFlowWithScheduler creates a new conversation flow with scheduler tool support.
func (*ConversationFlow) Generate ¶
Generate generates conversation responses based on user input and history.
func (*ConversationFlow) LoadSystemPrompt ¶
func (f *ConversationFlow) LoadSystemPrompt() error
LoadSystemPrompt loads the system prompt from the configured file.
func (*ConversationFlow) LoadToolSystemPrompts ¶
func (f *ConversationFlow) LoadToolSystemPrompts() error
LoadToolSystemPrompts loads system prompts for all modules.
func (*ConversationFlow) ProcessResponse ¶
func (f *ConversationFlow) ProcessResponse(ctx context.Context, participantID, response string) (string, error)
ProcessResponse handles participant responses and maintains conversation state. Returns the AI response that should be sent back to the user.
func (*ConversationFlow) SetChatHistoryLimit ¶
func (f *ConversationFlow) SetChatHistoryLimit(limit int)
SetChatHistoryLimit sets the limit for number of history messages sent to bot tools. -1: no limit, 0: no history, positive: limit to last N messages
func (*ConversationFlow) SetDebugMode ¶
func (f *ConversationFlow) SetDebugMode(enabled bool)
SetDebugMode enables or disables debug mode for user-facing debug messages.
func (*ConversationFlow) SetDependencies ¶
func (f *ConversationFlow) SetDependencies(deps Dependencies)
SetDependencies injects dependencies into the flow.
type ConversationFlowRecovery ¶
type ConversationFlowRecovery struct{}
ConversationFlowRecovery implements recovery for conversation flows
func NewConversationFlowRecovery ¶
func NewConversationFlowRecovery() *ConversationFlowRecovery
NewConversationFlowRecovery creates a new recovery handler for conversation flows
func (*ConversationFlowRecovery) GetFlowType ¶
func (r *ConversationFlowRecovery) GetFlowType() models.FlowType
GetFlowType returns the flow type this recoverable handles
func (*ConversationFlowRecovery) RecoverParticipant ¶
func (r *ConversationFlowRecovery) RecoverParticipant(ctx context.Context, participantID string, participant interface{}, registry *recovery.RecoveryRegistry) error
RecoverParticipant recovers state for a single conversation participant
func (*ConversationFlowRecovery) RecoverState ¶
func (r *ConversationFlowRecovery) RecoverState(ctx context.Context, registry *recovery.RecoveryRegistry) error
RecoverState performs recovery for all conversation participants
type ConversationHistory ¶
type ConversationHistory struct {
Messages []ConversationMessage `json:"messages"`
}
ConversationHistory represents the full conversation history for a participant.
type ConversationMessage ¶
type ConversationMessage struct { Role string `json:"role"` // "user", "assistant", or "system" Content string `json:"content"` // message content Timestamp time.Time `json:"timestamp"` // when the message was sent }
ConversationMessage represents a single message in the conversation history.
type Coordinator ¶
type Coordinator interface { // LoadSystemPrompt loads any system prompt or configuration needed. LoadSystemPrompt() error // ProcessMessageWithHistory handles a user message and may update the provided // conversation history in-place. Returns the assistant's reply to send. ProcessMessageWithHistory(ctx context.Context, participantID, userMessage string, chatHistory []openai.ChatCompletionMessageParamUnion, conversationHistory *ConversationHistory) (string, error) }
Coordinator defines the minimal behavior needed from a coordinator module. Both the LLM-driven CoordinatorModule and the static, rule-based coordinator should implement this interface to be swappable.
func NewCoordinator ¶
func NewCoordinator(choice CoordinatorChoice, stateManager StateManager, genaiClient any, msgService MessagingService, systemPromptFile string, schedulerTool *SchedulerTool, promptGeneratorTool *PromptGeneratorTool, stateTransitionTool *StateTransitionTool, profileSaveTool *ProfileSaveTool) Coordinator
NewCoordinator selects and constructs a coordinator implementation without changing existing call sites. Default is LLM if choice unrecognized or nil dependencies.
type CoordinatorChoice ¶
type CoordinatorChoice string
CoordinatorChoice determines which coordinator to use.
const ( CoordinatorChoiceLLM CoordinatorChoice = "llm" CoordinatorChoiceStatic CoordinatorChoice = "static" )
type CoordinatorModule ¶
type CoordinatorModule struct {
// contains filtered or unexported fields
}
CoordinatorModule provides functionality for the coordinator conversation state. The coordinator is responsible for routing conversations, using tools, and deciding when to transition to other states (intake, feedback).
func NewCoordinatorModule ¶
func NewCoordinatorModule(stateManager StateManager, genaiClient genai.ClientInterface, msgService MessagingService, systemPromptFile string, schedulerTool *SchedulerTool, promptGeneratorTool *PromptGeneratorTool, stateTransitionTool *StateTransitionTool, profileSaveTool *ProfileSaveTool) *CoordinatorModule
NewCoordinatorModule creates a new coordinator module instance.
func (*CoordinatorModule) LoadSystemPrompt ¶
func (cm *CoordinatorModule) LoadSystemPrompt() error
LoadSystemPrompt loads the system prompt from the configured file.
func (*CoordinatorModule) ProcessMessageWithHistory ¶
func (cm *CoordinatorModule) ProcessMessageWithHistory(ctx context.Context, participantID, userMessage string, chatHistory []openai.ChatCompletionMessageParamUnion, conversationHistory *ConversationHistory) (string, error)
ProcessMessageWithHistory handles a user message and can modify the conversation history directly.
type Dependencies ¶
type Dependencies struct { StateManager StateManager Timer models.Timer }
Dependencies holds all dependencies that can be injected into flow generators.
type FeedbackModule ¶
type FeedbackModule struct {
// contains filtered or unexported fields
}
FeedbackModule provides LLM module functionality for tracking user feedback and updating profiles. This module handles the feedback conversation state and has access to shared tools.
func NewFeedbackModule ¶
func NewFeedbackModule(stateManager StateManager, genaiClient genai.ClientInterface, systemPromptFile string, stateTransitionTool *StateTransitionTool, profileSaveTool *ProfileSaveTool, schedulerTool *SchedulerTool) *FeedbackModule
NewFeedbackModule creates a new feedback module instance.
func NewFeedbackModuleWithTimeouts ¶
func NewFeedbackModuleWithTimeouts(stateManager StateManager, genaiClient genai.ClientInterface, systemPromptFile string, timer models.Timer, msgService MessagingService, feedbackInitialTimeout, feedbackFollowupDelay string, stateTransitionTool *StateTransitionTool, profileSaveTool *ProfileSaveTool, schedulerTool *SchedulerTool) *FeedbackModule
NewFeedbackModuleWithTimeouts creates a new feedback module instance with timeout configuration.
func (*FeedbackModule) CancelPendingFeedback ¶
func (fm *FeedbackModule) CancelPendingFeedback(ctx context.Context, participantID string)
CancelPendingFeedback cancels any pending feedback timers for a participant
func (*FeedbackModule) ExecuteFeedbackTrackerWithHistoryAndConversation ¶
func (fm *FeedbackModule) ExecuteFeedbackTrackerWithHistoryAndConversation(ctx context.Context, participantID string, args map[string]interface{}, chatHistory []openai.ChatCompletionMessageParamUnion, conversationHistory *ConversationHistory) (string, error)
ExecuteFeedbackTrackerWithHistoryAndConversation executes the feedback tracking tool and can modify the conversation history directly.
func (*FeedbackModule) GetToolDefinition ¶
func (fm *FeedbackModule) GetToolDefinition() openai.ChatCompletionToolParam
GetToolDefinition returns the OpenAI tool definition for tracking feedback.
func (*FeedbackModule) IsSystemPromptLoaded ¶
func (fm *FeedbackModule) IsSystemPromptLoaded() bool
IsSystemPromptLoaded checks if the system prompt is loaded and not empty
func (*FeedbackModule) LoadSystemPrompt ¶
func (fm *FeedbackModule) LoadSystemPrompt() error
LoadSystemPrompt loads the system prompt from the configured file.
func (*FeedbackModule) ScheduleFeedbackCollection ¶
func (fm *FeedbackModule) ScheduleFeedbackCollection(ctx context.Context, participantID string) error
ScheduleFeedbackCollection schedules automatic feedback collection after a habit prompt. This should be called after a prompt generator session completes.
type GenAIGenerator ¶
GenAIGenerator uses a GenAI client to generate prompt bodies.
type IntakeModule ¶
type IntakeModule struct {
// contains filtered or unexported fields
}
IntakeModule provides LLM module functionality for conducting intake conversations and building user profiles. This module handles the intake conversation state and has access to shared tools.
func NewIntakeModule ¶
func NewIntakeModule(stateManager StateManager, genaiClient genai.ClientInterface, msgService MessagingService, systemPromptFile string, stateTransitionTool *StateTransitionTool, profileSaveTool *ProfileSaveTool, schedulerTool *SchedulerTool, promptGeneratorTool *PromptGeneratorTool) *IntakeModule
NewIntakeModule creates a new intake module instance.
func (*IntakeModule) ExecuteIntakeBotWithHistory ¶
func (im *IntakeModule) ExecuteIntakeBotWithHistory(ctx context.Context, participantID string, args map[string]interface{}, chatHistory []openai.ChatCompletionMessageParamUnion) (string, error)
ExecuteIntakeBotWithHistory executes the intake bot tool with conversation history context.
func (*IntakeModule) ExecuteIntakeBotWithHistoryAndConversation ¶
func (im *IntakeModule) ExecuteIntakeBotWithHistoryAndConversation(ctx context.Context, participantID string, args map[string]interface{}, chatHistory []openai.ChatCompletionMessageParamUnion, conversationHistory *ConversationHistory) (string, error)
ExecuteIntakeBotWithHistoryAndConversation executes the intake bot tool and can modify the conversation history directly.
func (*IntakeModule) LoadSystemPrompt ¶
func (im *IntakeModule) LoadSystemPrompt() error
LoadSystemPrompt loads the system prompt from the configured file.
type MessagingService ¶
type MessagingService interface { ValidateAndCanonicalizeRecipient(recipient string) (string, error) SendMessage(ctx context.Context, to, message string) error }
MessagingService defines the interface for messaging operations needed by the scheduler.
type ProfileSaveTool ¶
type ProfileSaveTool struct {
// contains filtered or unexported fields
}
ProfileSaveTool provides functionality for saving and updating user profiles. This tool is shared across all conversation modules (coordinator, intake, feedback).
func NewProfileSaveTool ¶
func NewProfileSaveTool(stateManager StateManager) *ProfileSaveTool
NewProfileSaveTool creates a new profile save tool instance.
func (*ProfileSaveTool) ExecuteProfileSave ¶
func (pst *ProfileSaveTool) ExecuteProfileSave(ctx context.Context, participantID string, args map[string]interface{}) (string, error)
ExecuteProfileSave executes the profile save tool call.
func (*ProfileSaveTool) GetOrCreateUserProfile ¶
func (pst *ProfileSaveTool) GetOrCreateUserProfile(ctx context.Context, participantID string) (*UserProfile, error)
getOrCreateUserProfile retrieves or creates a new user profile
func (*ProfileSaveTool) GetToolDefinition ¶
func (pst *ProfileSaveTool) GetToolDefinition() openai.ChatCompletionToolParam
GetToolDefinition returns the OpenAI tool definition for saving user profiles.
type PromptGeneratorService ¶
type PromptGeneratorService interface {
ExecutePromptGenerator(ctx context.Context, participantID string, args map[string]interface{}) (string, error)
}
PromptGeneratorService defines the interface for prompt generation operations.
type PromptGeneratorTool ¶
type PromptGeneratorTool struct {
// contains filtered or unexported fields
}
PromptGeneratorTool provides LLM tool functionality for generating personalized habit prompts based on user profiles.
func NewPromptGeneratorTool ¶
func NewPromptGeneratorTool(stateManager StateManager, genaiClient genai.ClientInterface, msgService MessagingService, systemPromptFile string) *PromptGeneratorTool
NewPromptGeneratorTool creates a new prompt generator tool instance.
func (*PromptGeneratorTool) ExecutePromptGenerator ¶
func (pgt *PromptGeneratorTool) ExecutePromptGenerator(ctx context.Context, participantID string, args map[string]interface{}) (string, error)
ExecutePromptGenerator executes the prompt generator tool call.
func (*PromptGeneratorTool) ExecutePromptGeneratorWithHistory ¶
func (pgt *PromptGeneratorTool) ExecutePromptGeneratorWithHistory(ctx context.Context, participantID string, args map[string]interface{}, chatHistory []openai.ChatCompletionMessageParamUnion) (string, error)
ExecutePromptGeneratorWithHistory executes the prompt generator tool call with conversation history context.
func (*PromptGeneratorTool) GetToolDefinition ¶
func (pgt *PromptGeneratorTool) GetToolDefinition() openai.ChatCompletionToolParam
GetToolDefinition returns the OpenAI tool definition for generating habit prompts.
func (*PromptGeneratorTool) LoadSystemPrompt ¶
func (pgt *PromptGeneratorTool) LoadSystemPrompt() error
LoadSystemPrompt loads the system prompt from the configured file.
type Registry ¶
type Registry struct {
// contains filtered or unexported fields
}
Registry manages Generator implementations for different prompt types.
func (*Registry) Get ¶
func (r *Registry) Get(pt models.PromptType) (Generator, bool)
Get retrieves the Generator for a given PromptType.
func (*Registry) Register ¶
func (r *Registry) Register(pt models.PromptType, gen Generator)
Register associates a PromptType with a Generator implementation.
func (*Registry) RegisterWithDependencies ¶
func (r *Registry) RegisterWithDependencies(pt models.PromptType, gen StatefulGenerator, deps Dependencies)
RegisterWithDependencies registers a stateful generator with its dependencies.
type SchedulerTool ¶
type SchedulerTool struct {
// contains filtered or unexported fields
}
SchedulerTool provides LLM tool functionality for scheduling daily habit prompts. This unified implementation uses a preparation time approach for both fixed and random scheduling.
func NewSchedulerTool ¶
func NewSchedulerTool(timer models.Timer, msgService MessagingService) *SchedulerTool
NewSchedulerTool creates a new scheduler tool instance with default 10-minute preparation time.
func NewSchedulerToolComplete ¶
func NewSchedulerToolComplete(timer models.Timer, msgService MessagingService, genaiClient genai.ClientInterface, stateManager StateManager, promptGenerator PromptGeneratorService) *SchedulerTool
NewSchedulerToolComplete creates a new scheduler tool instance with all dependencies.
func NewSchedulerToolWithGenAI ¶
func NewSchedulerToolWithGenAI(timer models.Timer, msgService MessagingService, genaiClient genai.ClientInterface) *SchedulerTool
NewSchedulerToolWithGenAI creates a new scheduler tool instance with GenAI support.
func NewSchedulerToolWithPrepTime ¶
func NewSchedulerToolWithPrepTime(timer models.Timer, msgService MessagingService, genaiClient genai.ClientInterface, stateManager StateManager, promptGenerator PromptGeneratorService, prepTimeMinutes int) *SchedulerTool
NewSchedulerToolWithPrepTime creates a new scheduler tool instance with custom preparation time.
func NewSchedulerToolWithPrepTimeAndAutoFeedback ¶
func NewSchedulerToolWithPrepTimeAndAutoFeedback(timer models.Timer, msgService MessagingService, genaiClient genai.ClientInterface, stateManager StateManager, promptGenerator PromptGeneratorService, prepTimeMinutes int, autoFeedbackEnabled bool) *SchedulerTool
NewSchedulerToolWithPrepTimeAndAutoFeedback creates a scheduler with explicit auto-feedback flag.
func NewSchedulerToolWithStateManager ¶
func NewSchedulerToolWithStateManager(timer models.Timer, msgService MessagingService, genaiClient genai.ClientInterface, stateManager StateManager) *SchedulerTool
NewSchedulerToolWithStateManager creates a new scheduler tool instance with state management.
func (*SchedulerTool) ExecuteScheduler ¶
func (st *SchedulerTool) ExecuteScheduler(ctx context.Context, participantID string, params models.SchedulerToolParams) (*models.ToolResult, error)
ExecuteScheduler executes the scheduler tool call.
func (*SchedulerTool) GetToolDefinition ¶
func (st *SchedulerTool) GetToolDefinition() openai.ChatCompletionToolParam
GetToolDefinition returns the OpenAI tool definition for the scheduler.
type SimpleTimer ¶
type SimpleTimer struct {
// contains filtered or unexported fields
}
SimpleTimer implements the Timer interface using Go's standard time package.
func (*SimpleTimer) Cancel ¶
func (t *SimpleTimer) Cancel(id string) error
Cancel cancels a scheduled function by ID.
func (*SimpleTimer) GetTimer ¶
func (t *SimpleTimer) GetTimer(id string) (*models.TimerInfo, error)
GetTimer returns information about a specific timer by ID.
func (*SimpleTimer) ListActive ¶
func (t *SimpleTimer) ListActive() []models.TimerInfo
ListActive returns information about all active timers.
func (*SimpleTimer) ScheduleAfter ¶
func (t *SimpleTimer) ScheduleAfter(delay time.Duration, fn func()) (string, error)
ScheduleAfter schedules a function to run after a delay.
func (*SimpleTimer) ScheduleAt ¶
func (t *SimpleTimer) ScheduleAt(when time.Time, fn func()) (string, error)
ScheduleAt schedules a function to run at a specific time.
func (*SimpleTimer) ScheduleWithSchedule ¶
func (t *SimpleTimer) ScheduleWithSchedule(schedule *models.Schedule, fn func()) (string, error)
ScheduleWithSchedule schedules a function to run according to a Schedule.
type StateManager ¶
type StateManager interface { // GetCurrentState retrieves the current state for a participant in a flow GetCurrentState(ctx context.Context, participantID string, flowType models.FlowType) (models.StateType, error) // SetCurrentState updates the current state for a participant in a flow SetCurrentState(ctx context.Context, participantID string, flowType models.FlowType, state models.StateType) error // GetStateData retrieves additional data associated with the participant's state GetStateData(ctx context.Context, participantID string, flowType models.FlowType, key models.DataKey) (string, error) // SetStateData stores additional data associated with the participant's state SetStateData(ctx context.Context, participantID string, flowType models.FlowType, key models.DataKey, value string) error // TransitionState transitions from one state to another TransitionState(ctx context.Context, participantID string, flowType models.FlowType, fromState, toState models.StateType) error // ResetState removes all state data for a participant in a flow ResetState(ctx context.Context, participantID string, flowType models.FlowType) error }
StateManager defines the interface for managing flow state.
func NewMockStateManager ¶
func NewMockStateManager() StateManager
NewMockStateManager creates a mock state manager for testing
type StateTransitionTool ¶
type StateTransitionTool struct {
// contains filtered or unexported fields
}
StateTransitionTool provides functionality for transitioning between conversation states.
func NewStateTransitionTool ¶
func NewStateTransitionTool(stateManager StateManager, timer models.Timer) *StateTransitionTool
NewStateTransitionTool creates a new state transition tool instance.
func (*StateTransitionTool) CancelPendingTransition ¶
func (stt *StateTransitionTool) CancelPendingTransition(ctx context.Context, participantID string) error
CancelPendingTransition cancels any pending delayed state transition for a participant.
func (*StateTransitionTool) ExecuteStateTransition ¶
func (stt *StateTransitionTool) ExecuteStateTransition(ctx context.Context, participantID string, args map[string]interface{}) (string, error)
ExecuteStateTransition executes a state transition, either immediately or after a delay.
func (*StateTransitionTool) GetToolDefinition ¶
func (stt *StateTransitionTool) GetToolDefinition() openai.ChatCompletionToolParam
GetToolDefinition returns the OpenAI tool definition for state transitions.
type StatefulGenerator ¶
type StatefulGenerator interface { Generator // SetDependencies injects dependencies into the generator SetDependencies(deps Dependencies) }
StatefulGenerator extends the Generator interface for flows that need state management.
type StaticCoordinatorModule ¶
type StaticCoordinatorModule struct {
// contains filtered or unexported fields
}
StaticCoordinatorModule is a rule-based, non-LLM coordinator that deterministically routes the conversation between COORDINATOR, INTAKE, and FEEDBACK, and calls tools directly without relying on model reasoning.
func NewStaticCoordinatorModule ¶
func NewStaticCoordinatorModule(stateManager StateManager, msgService MessagingService, schedulerTool *SchedulerTool, promptGeneratorTool *PromptGeneratorTool, stateTransitionTool *StateTransitionTool, profileSaveTool *ProfileSaveTool) *StaticCoordinatorModule
NewStaticCoordinatorModule creates a new static coordinator instance.
func (*StaticCoordinatorModule) LoadSystemPrompt ¶
func (sc *StaticCoordinatorModule) LoadSystemPrompt() error
LoadSystemPrompt is a no-op for the static coordinator (kept for interface parity).
func (*StaticCoordinatorModule) ProcessMessageWithHistory ¶
func (sc *StaticCoordinatorModule) ProcessMessageWithHistory(ctx context.Context, participantID, userMessage string, chatHistory []openai.ChatCompletionMessageParamUnion, conversationHistory *ConversationHistory) (string, error)
ProcessMessageWithHistory implements a deterministic state machine: - If profile is incomplete -> transition to INTAKE and prompt for missing fields - If profile is complete -> generate a prompt and transition to FEEDBACK
type StoreBasedStateManager ¶
type StoreBasedStateManager struct {
// contains filtered or unexported fields
}
StoreBasedStateManager implements StateManager using a Store backend.
func NewStoreBasedStateManager ¶
func NewStoreBasedStateManager(st store.Store) *StoreBasedStateManager
NewStoreBasedStateManager creates a new StateManager backed by a Store.
func (*StoreBasedStateManager) GetCurrentState ¶
func (sm *StoreBasedStateManager) GetCurrentState(ctx context.Context, participantID string, flowType models.FlowType) (models.StateType, error)
GetCurrentState retrieves the current state for a participant in a flow.
func (*StoreBasedStateManager) GetStateData ¶
func (sm *StoreBasedStateManager) GetStateData(ctx context.Context, participantID string, flowType models.FlowType, key models.DataKey) (string, error)
GetStateData retrieves additional data associated with the participant's state.
func (*StoreBasedStateManager) ResetState ¶
func (sm *StoreBasedStateManager) ResetState(ctx context.Context, participantID string, flowType models.FlowType) error
ResetState removes all state data for a participant in a flow.
func (*StoreBasedStateManager) SetCurrentState ¶
func (sm *StoreBasedStateManager) SetCurrentState(ctx context.Context, participantID string, flowType models.FlowType, state models.StateType) error
SetCurrentState updates the current state for a participant in a flow.
type UserProfile ¶
type UserProfile struct { HabitDomain string `json:"habit_domain"` // e.g., "healthy eating", "physical activity" MotivationalFrame string `json:"motivational_frame"` // User's personal motivation PreferredTime string `json:"preferred_time"` // Time window for nudging PromptAnchor string `json:"prompt_anchor"` // When habit fits naturally AdditionalInfo string `json:"additional_info"` // Any extra personalization info CreatedAt time.Time `json:"created_at"` // When profile was created UpdatedAt time.Time `json:"updated_at"` // Last profile update // Feedback tracking fields LastSuccessfulPrompt string `json:"last_successful_prompt,omitempty"` // Last prompt that worked LastBarrier string `json:"last_barrier,omitempty"` // Last reported barrier LastTweak string `json:"last_tweak,omitempty"` // Last requested modification SuccessCount int `json:"success_count"` // Number of successful completions TotalPrompts int `json:"total_prompts"` // Total prompts sent }
UserProfile represents the structured user profile built by the intake bot
Source Files
¶
- branch_gen.go
- conversation_flow.go
- conversation_flow_recovery.go
- coordinator_interface.go
- coordinator_module.go
- coordinator_module_static.go
- coordinator_selector.go
- feedback_module.go
- flow.go
- genai_gen.go
- intake_module.go
- profile_save_tool.go
- prompt_generator_tool.go
- scheduler_tool.go
- state.go
- state_manager.go
- state_transition_tool.go
- static_gen.go
- test_helpers.go
- timer.go