mirror of
https://github.com/sjwhitworth/golearn.git
synced 2025-04-28 13:48:56 +08:00
Fix go vet complaints
This commit is contained in:
parent
3fbf507799
commit
481da97eca
@ -10,7 +10,7 @@ import (
|
|||||||
// - useful for representing classes.
|
// - useful for representing classes.
|
||||||
type CategoricalAttribute struct {
|
type CategoricalAttribute struct {
|
||||||
Name string
|
Name string
|
||||||
values []string `json:"values"`
|
values []string
|
||||||
}
|
}
|
||||||
|
|
||||||
// MarshalJSON returns a JSON version of this Attribute.
|
// MarshalJSON returns a JSON version of this Attribute.
|
||||||
|
@ -2,6 +2,7 @@ package base
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
|
|
||||||
"github.com/gonum/matrix/mat64"
|
"github.com/gonum/matrix/mat64"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -9,7 +10,7 @@ func checkAllAttributesAreFloat(attrs []Attribute) error {
|
|||||||
// Check that all the attributes are float
|
// Check that all the attributes are float
|
||||||
for _, a := range attrs {
|
for _, a := range attrs {
|
||||||
if _, ok := a.(*FloatAttribute); !ok {
|
if _, ok := a.(*FloatAttribute); !ok {
|
||||||
fmt.Errorf("All []Attributes to this method must be FloatAttributes")
|
return fmt.Errorf("All []Attributes to this method must be FloatAttributes")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
|
@ -157,7 +157,7 @@ func (l *LazilyFilteredInstances) transformNewToOldAttribute(as AttributeSpec) (
|
|||||||
func (l *LazilyFilteredInstances) Get(as AttributeSpec, row int) []byte {
|
func (l *LazilyFilteredInstances) Get(as AttributeSpec, row int) []byte {
|
||||||
asOld, err := l.transformNewToOldAttribute(as)
|
asOld, err := l.transformNewToOldAttribute(as)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
panic(fmt.Sprintf("Attribute %s could not be resolved. (Error: %s)", as, err))
|
panic(fmt.Sprintf("Attribute %s could not be resolved. (Error: %s)", as.String(), err.Error()))
|
||||||
}
|
}
|
||||||
byteSeq := l.src.Get(asOld, row)
|
byteSeq := l.src.Get(asOld, row)
|
||||||
if l.unfilteredMap[as.attr] {
|
if l.unfilteredMap[as.attr] {
|
||||||
@ -177,7 +177,7 @@ func (l *LazilyFilteredInstances) MapOverRows(asv []AttributeSpec, mapFunc func(
|
|||||||
for i, a := range asv {
|
for i, a := range asv {
|
||||||
old, err := l.transformNewToOldAttribute(a)
|
old, err := l.transformNewToOldAttribute(a)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("Couldn't fetch old Attribute: '%s'", a)
|
return fmt.Errorf("Couldn't fetch old Attribute: '%s'", a.String())
|
||||||
}
|
}
|
||||||
oldAsv[i] = old
|
oldAsv[i] = old
|
||||||
}
|
}
|
||||||
|
@ -69,7 +69,7 @@ func (f *FixedAttributeGroup) set(col int, row int, val []byte) {
|
|||||||
// Copy the value in
|
// Copy the value in
|
||||||
copied := copy(f.alloc[offset:], val)
|
copied := copy(f.alloc[offset:], val)
|
||||||
if copied != f.size {
|
if copied != f.size {
|
||||||
panic(fmt.Sprintf("set() terminated by only copying %d bytes", copied, f.size))
|
panic(fmt.Sprintf("set() terminated by only copying %d bytes, should be %d", copied, f.size))
|
||||||
}
|
}
|
||||||
|
|
||||||
row++
|
row++
|
||||||
|
@ -132,9 +132,9 @@ func deserializeAttributes(data []byte) []Attribute {
|
|||||||
|
|
||||||
// Define a JSON shim Attribute
|
// Define a JSON shim Attribute
|
||||||
type JSONAttribute struct {
|
type JSONAttribute struct {
|
||||||
Type string `json:type`
|
Type string `json:"type"`
|
||||||
Name string `json:name`
|
Name string `json:"name"`
|
||||||
Attr json.RawMessage `json:attr`
|
Attr json.RawMessage `json:"attr"`
|
||||||
}
|
}
|
||||||
|
|
||||||
var ret []Attribute
|
var ret []Attribute
|
||||||
|
@ -3,6 +3,7 @@ package clustering
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
|
|
||||||
"github.com/sjwhitworth/golearn/base"
|
"github.com/sjwhitworth/golearn/base"
|
||||||
"github.com/sjwhitworth/golearn/metrics/pairwise"
|
"github.com/sjwhitworth/golearn/metrics/pairwise"
|
||||||
)
|
)
|
||||||
@ -66,7 +67,7 @@ func (ref ClusterMap) Equals(other ClusterMap) (bool, error) {
|
|||||||
if c3, ok := clusterIdMap[c2]; ok { // what's our correspondance with c2?
|
if c3, ok := clusterIdMap[c2]; ok { // what's our correspondance with c2?
|
||||||
if c1 != c3 {
|
if c1 != c3 {
|
||||||
// if c1 is not what we've currently got, error out
|
// if c1 is not what we've currently got, error out
|
||||||
return false, fmt.Errorf("ref point %d (cluster %d) is assigned to a different cluster (%d) in ref %s", p, c2, c1, clusterIdMap)
|
return false, fmt.Errorf("ref point %d (cluster %d) is assigned to a different cluster (%d) in ref %+v", p, c2, c1, clusterIdMap)
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
clusterIdMap[c2] = c1
|
clusterIdMap[c2] = c1
|
||||||
@ -97,7 +98,7 @@ func (ref ClusterMap) Equals(other ClusterMap) (bool, error) {
|
|||||||
for cOld := range other {
|
for cOld := range other {
|
||||||
cNew := clusterIdMap[cOld]
|
cNew := clusterIdMap[cOld]
|
||||||
if !arraysEqual(ref[cNew], other[cOld]) {
|
if !arraysEqual(ref[cNew], other[cOld]) {
|
||||||
return false, fmt.Errorf("Re-labelled cluster %d => %d doesn't contain the same points (%s, %s)", cOld, cNew, ref[cNew], other[cOld])
|
return false, fmt.Errorf("Re-labelled cluster %d => %d doesn't contain the same points (%d, %d)", cOld, cNew, ref[cNew], other[cOld])
|
||||||
}
|
}
|
||||||
newMap[cNew] = other[cOld]
|
newMap[cNew] = other[cOld]
|
||||||
}
|
}
|
||||||
|
@ -2,8 +2,9 @@ package filters
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"github.com/sjwhitworth/golearn/base"
|
|
||||||
"math"
|
"math"
|
||||||
|
|
||||||
|
"github.com/sjwhitworth/golearn/base"
|
||||||
)
|
)
|
||||||
|
|
||||||
// BinningFilter does equal-width binning for numeric
|
// BinningFilter does equal-width binning for numeric
|
||||||
@ -31,7 +32,7 @@ func NewBinningFilter(d base.FixedDataGrid, bins int) *BinningFilter {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (b *BinningFilter) String() string {
|
func (b *BinningFilter) String() string {
|
||||||
return fmt.Sprintf("BinningFilter(%d Attribute(s), %d bin(s)", b.attrs, b.bins)
|
return fmt.Sprintf("BinningFilter(%d Attribute(s), %d bin(s)", len(b.attrs), b.bins)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Train computes and stores the bin values
|
// Train computes and stores the bin values
|
||||||
|
@ -4,11 +4,12 @@ import (
|
|||||||
"fmt"
|
"fmt"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// FrequencyTableEntry is a struct holding a value and a map of frequency
|
||||||
type FrequencyTableEntry struct {
|
type FrequencyTableEntry struct {
|
||||||
Value float64
|
Value float64
|
||||||
Frequency map[string]int
|
Frequency map[string]int
|
||||||
}
|
}
|
||||||
|
|
||||||
func (t *FrequencyTableEntry) String() string {
|
func (t *FrequencyTableEntry) String() string {
|
||||||
return fmt.Sprintf("%.2f %s", t.Value, t.Frequency)
|
return fmt.Sprintf("%.2f %+v", t.Value, t.Frequency)
|
||||||
}
|
}
|
||||||
|
@ -1,20 +1,21 @@
|
|||||||
package meta
|
package meta
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"math/rand"
|
||||||
|
"testing"
|
||||||
|
"time"
|
||||||
|
|
||||||
"github.com/sjwhitworth/golearn/base"
|
"github.com/sjwhitworth/golearn/base"
|
||||||
"github.com/sjwhitworth/golearn/evaluation"
|
"github.com/sjwhitworth/golearn/evaluation"
|
||||||
"github.com/sjwhitworth/golearn/filters"
|
"github.com/sjwhitworth/golearn/filters"
|
||||||
"github.com/sjwhitworth/golearn/trees"
|
"github.com/sjwhitworth/golearn/trees"
|
||||||
. "github.com/smartystreets/goconvey/convey"
|
. "github.com/smartystreets/goconvey/convey"
|
||||||
"math/rand"
|
|
||||||
"testing"
|
|
||||||
"time"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func BenchmarkBaggingRandomForestFit(t *testing.B) {
|
func BenchmarkBaggingRandomForestFit(t *testing.B) {
|
||||||
inst, err := base.ParseCSVToInstances("../examples/datasets/iris_headers.csv", true)
|
inst, err := base.ParseCSVToInstances("../examples/datasets/iris_headers.csv", true)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatal("Unable to parse CSV to instances: %s", err.Error())
|
t.Fatalf("Unable to parse CSV to instances: %s", err.Error())
|
||||||
}
|
}
|
||||||
|
|
||||||
rand.Seed(time.Now().UnixNano())
|
rand.Seed(time.Now().UnixNano())
|
||||||
@ -39,7 +40,7 @@ func BenchmarkBaggingRandomForestFit(t *testing.B) {
|
|||||||
func BenchmarkBaggingRandomForestPredict(t *testing.B) {
|
func BenchmarkBaggingRandomForestPredict(t *testing.B) {
|
||||||
inst, err := base.ParseCSVToInstances("../examples/datasets/iris_headers.csv", true)
|
inst, err := base.ParseCSVToInstances("../examples/datasets/iris_headers.csv", true)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatal("Unable to parse CSV to instances: %s", err.Error())
|
t.Fatalf("Unable to parse CSV to instances: %s", err.Error())
|
||||||
}
|
}
|
||||||
|
|
||||||
rand.Seed(time.Now().UnixNano())
|
rand.Seed(time.Now().UnixNano())
|
||||||
|
Loading…
x
Reference in New Issue
Block a user