Documentation
¶
Index ¶
- func Accuracy(y, t matrix.Matrix) float64
- func Perplexity(loss float64, count int) float64
- func Random(trainSize, batchSize int, s ...randv2.Source) []int
- func Range(i, batchSize int) (int, int)
- func Time(xs matrix.Matrix) []matrix.Matrix
- type Input
- type Model
- type Optimizer
- type RNNLM
- type RNNLMInput
- type RNNLMTrainer
- type Seq2Seq
- type Seq2SeqInput
- type Seq2SeqTrainer
- type Trainer
Examples ¶
Constants ¶
This section is empty.
Variables ¶
This section is empty.
Functions ¶
func Accuracy ¶
Example ¶
package main import ( "fmt" "github.com/itsubaki/neu/math/matrix" "github.com/itsubaki/neu/trainer" ) func main() { // data y0 := matrix.New([]float64{0, 1}, []float64{1, 0}, []float64{1, 0}) y1 := matrix.New([]float64{0, 1}, []float64{1, 0}, []float64{0, 1}) y2 := matrix.New([]float64{0, 1}, []float64{0, 1}, []float64{0, 1}) y3 := matrix.New([]float64{1, 0}, []float64{0, 1}, []float64{0, 1}) t := matrix.New([]float64{1, 0}, []float64{0, 1}, []float64{0, 1}) fmt.Println(trainer.Accuracy(y0, t)) fmt.Println(trainer.Accuracy(y1, t)) fmt.Println(trainer.Accuracy(y2, t)) fmt.Println(trainer.Accuracy(y3, t)) }
Output: 0 0.3333333333333333 0.6666666666666666 1
func Perplexity ¶
Example ¶
package main import ( "fmt" "github.com/itsubaki/neu/trainer" ) func main() { fmt.Println(trainer.Perplexity(1.0, 2)) fmt.Println(trainer.Perplexity(1.0, 1)) }
Output: 1.6487212707001282 2.718281828459045
func Random ¶
Random returns random index.
Example ¶
package main import ( "fmt" "sort" "github.com/itsubaki/neu/math/matrix" "github.com/itsubaki/neu/math/rand" "github.com/itsubaki/neu/trainer" ) func main() { x := matrix.New([]float64{0, 1}, []float64{0, 2}, []float64{0, 3}, []float64{0, 4}) s := rand.Const(1) r1 := trainer.Random(len(x), 1, s) r2 := trainer.Random(len(x), 2, s) r3 := trainer.Random(len(x), 3, s) r4 := trainer.Random(len(x), 4, s) r5 := trainer.Random(1, 1) sort.Ints(r1) sort.Ints(r2) sort.Ints(r3) sort.Ints(r4) fmt.Println(r1) fmt.Println(r2) fmt.Println(r3) fmt.Println(r4) fmt.Println(r5) }
Output: [3] [0 2] [0 1 2] [0 1 2 3] [0]
func Time ¶
Example ¶
package main import ( "fmt" "github.com/itsubaki/neu/math/matrix" "github.com/itsubaki/neu/trainer" ) func main() { xs := matrix.New( // (N, T) (2, 3) []float64{1, 2, 3}, []float64{4, 5, 6}, ) // (T, N, 1) (3, 2, 1) txs := trainer.Time(xs) for _, tx := range txs { fmt.Println(tx) } fmt.Println() }
Output: [[1] [4]] [[2] [5]] [[3] [6]]
Types ¶
type RNNLMInput ¶
type RNNLMTrainer ¶
type RNNLMTrainer struct { Model RNNLM Optimizer Optimizer // contains filtered or unexported fields }
Example ¶
package main import ( "fmt" "github.com/itsubaki/neu/layer" "github.com/itsubaki/neu/math/matrix" "github.com/itsubaki/neu/model" "github.com/itsubaki/neu/optimizer" "github.com/itsubaki/neu/trainer" ) type TestRNNLM struct{} func (m *TestRNNLM) Predict(xs []matrix.Matrix, opts ...layer.Opts) []matrix.Matrix { return nil } func (m *TestRNNLM) Forward(xs, ts []matrix.Matrix) []matrix.Matrix { return []matrix.Matrix{{{1}}} } func (m *TestRNNLM) Backward() []matrix.Matrix { return nil } func (m *TestRNNLM) Layers() []model.TimeLayer { return nil } func (m *TestRNNLM) Params() [][]matrix.Matrix { return nil } func (m *TestRNNLM) Grads() [][]matrix.Matrix { return nil } func (m *TestRNNLM) SetParams(p [][]matrix.Matrix) { for i, l := range m.Layers() { l.SetParams(p[i]...) } } func main() { tr := trainer.NewRNNLM(&TestRNNLM{}, &optimizer.SGD{ LearningRate: 0.1, }) tr.Fit(&trainer.RNNLMInput{ Train: []int{0, 1, 2, 3, 4, 5}, TrainLabel: []int{1, 2, 3, 4, 5, 6}, Epochs: 3, BatchSize: 1, TimeSize: 2, Verbose: func(epoch, j int, perplexity float64, m trainer.RNNLM) { fmt.Printf("%d, %d: %T\n", epoch, j, m) }, }) }
Output: 0, 0: *trainer_test.TestRNNLM 0, 1: *trainer_test.TestRNNLM 0, 2: *trainer_test.TestRNNLM 1, 0: *trainer_test.TestRNNLM 1, 1: *trainer_test.TestRNNLM 1, 2: *trainer_test.TestRNNLM 2, 0: *trainer_test.TestRNNLM 2, 1: *trainer_test.TestRNNLM 2, 2: *trainer_test.TestRNNLM
func NewRNNLM ¶
func NewRNNLM(m RNNLM, o Optimizer) *RNNLMTrainer
func (*RNNLMTrainer) Fit ¶
func (tr *RNNLMTrainer) Fit(in *RNNLMInput)
type Seq2SeqInput ¶
type Seq2SeqTrainer ¶
Example ¶
package main import ( "fmt" "github.com/itsubaki/neu/model" "github.com/itsubaki/neu/optimizer" "github.com/itsubaki/neu/trainer" "github.com/itsubaki/neu/weight" ) func main() { tr := trainer.NewSeq2Seq( model.NewSeq2Seq(&model.RNNLMConfig{ VocabSize: 13, WordVecSize: 16, HiddenSize: 128, WeightInit: weight.Xavier, }), &optimizer.Adam{ Alpha: 0.001, Beta1: 0.9, Beta2: 0.999, }) tr.Fit(&trainer.Seq2SeqInput{ Train: [][]int{{0, 1, 2, 3, 4, 5}}, TrainLabel: [][]int{{1, 2, 3, 4, 5, 6}}, Epochs: 3, BatchSize: 1, Verbose: func(epoch, j int, loss float64, m trainer.Seq2Seq) { fmt.Printf("%d: %T\n", epoch, m) }, }) }
Output: 0: *model.Seq2Seq 1: *model.Seq2Seq 2: *model.Seq2Seq
Example (Rand) ¶
package main import ( "fmt" "github.com/itsubaki/neu/math/rand" "github.com/itsubaki/neu/model" "github.com/itsubaki/neu/optimizer" "github.com/itsubaki/neu/trainer" "github.com/itsubaki/neu/weight" ) func main() { tr := trainer.NewSeq2Seq( model.NewSeq2Seq(&model.RNNLMConfig{ VocabSize: 13, WordVecSize: 16, HiddenSize: 128, WeightInit: weight.Xavier, }), &optimizer.Adam{ Alpha: 0.001, Beta1: 0.9, Beta2: 0.999, }) s := rand.Const(1) tr.Fit(&trainer.Seq2SeqInput{ Train: [][]int{{0, 1, 2, 3, 4, 5}}, TrainLabel: [][]int{{1, 2, 3, 4, 5, 6}}, Epochs: 3, BatchSize: 1, Verbose: func(epoch, j int, loss float64, m trainer.Seq2Seq) { fmt.Printf("%d: %T\n", epoch, m) }, }, s) }
Output: 0: *model.Seq2Seq 1: *model.Seq2Seq 2: *model.Seq2Seq
func NewSeq2Seq ¶
func NewSeq2Seq(m Seq2Seq, o Optimizer) *Seq2SeqTrainer
func (*Seq2SeqTrainer) Fit ¶
func (tr *Seq2SeqTrainer) Fit(in *Seq2SeqInput, s ...randv2.Source)
type Trainer ¶
func (*Trainer) Fit ¶
Fit trains the model using the provided optimizer.
Example ¶
package main import ( "fmt" "github.com/itsubaki/neu/layer" "github.com/itsubaki/neu/math/matrix" "github.com/itsubaki/neu/model" "github.com/itsubaki/neu/optimizer" "github.com/itsubaki/neu/trainer" ) type TestModel struct{} func (m *TestModel) Predict(x matrix.Matrix, opts ...layer.Opts) matrix.Matrix { return nil } func (m *TestModel) Forward(x, t matrix.Matrix) matrix.Matrix { return matrix.New([]float64{1}) } func (m *TestModel) Backward() matrix.Matrix { return nil } func (m *TestModel) Layers() []model.Layer { return nil } func (m *TestModel) Params() [][]matrix.Matrix { return nil } func (m *TestModel) Grads() [][]matrix.Matrix { return nil } func (m *TestModel) SetParams(p [][]matrix.Matrix) { for i, l := range m.Layers() { l.SetParams(p[i]...) } } func main() { tr := trainer.New(&TestModel{}, &optimizer.SGD{ LearningRate: 0.1, }) tr.Fit(&trainer.Input{ Train: matrix.New([]float64{0.5, 0.5}, []float64{1, 0}, []float64{0, 1}), TrainLabel: matrix.New([]float64{1, 0}, []float64{0, 1}, []float64{0, 1}), Epochs: 3, BatchSize: 1, Verbose: func(epoch, j int, loss float64, m trainer.Model) { fmt.Printf("%v,%v: %T\n", epoch, j, m) }, }) }
Output: 0,0: *trainer_test.TestModel 0,1: *trainer_test.TestModel 0,2: *trainer_test.TestModel 1,0: *trainer_test.TestModel 1,1: *trainer_test.TestModel 1,2: *trainer_test.TestModel 2,0: *trainer_test.TestModel 2,1: *trainer_test.TestModel 2,2: *trainer_test.TestModel
Click to show internal directories.
Click to hide internal directories.