1
0
mirror of https://github.com/sjwhitworth/golearn.git synced 2025-04-30 13:48:57 +08:00
golearn/knn/knn.go

160 lines
4.1 KiB
Go
Raw Normal View History

// Package KNN implements a K Nearest Neighbors object, capable of both classification
// and regression. It accepts data in the form of a slice of float64s, which are then reshaped
// into a X by Y matrix.
2014-01-04 19:31:33 +00:00
package knn
2013-12-28 18:41:13 +00:00
import (
2014-05-03 23:08:43 +01:00
"github.com/gonum/matrix/mat64"
2014-04-30 22:13:07 +08:00
base "github.com/sjwhitworth/golearn/base"
pairwiseMetrics "github.com/sjwhitworth/golearn/metrics/pairwise"
2014-04-30 22:13:07 +08:00
util "github.com/sjwhitworth/golearn/utilities"
2014-04-30 08:57:13 +01:00
)
2013-12-28 18:41:13 +00:00
2014-05-03 23:08:43 +01:00
// A KNN Classifier. Consists of a data matrix, associated labels in the same order as the matrix, and a distance function.
// The accepted distance functions at this time are 'euclidean' and 'manhattan'.
2013-12-28 18:41:13 +00:00
type KNNClassifier struct {
2014-05-01 19:56:30 +01:00
base.BaseEstimator
Labels []string
DistanceFunc string
2013-12-28 18:41:13 +00:00
}
2014-05-03 23:08:43 +01:00
// Returns a new classifier
func NewKnnClassifier(distfunc string) *KNNClassifier {
KNN := KNNClassifier{}
KNN.DistanceFunc = distfunc
return &KNN
}
func (KNN *KNNClassifier) Fit(labels []string, numbers []float64, rows int, cols int) {
2014-05-03 23:08:43 +01:00
if rows != len(labels) {
panic(mat64.ErrShape)
2014-05-03 23:08:43 +01:00
}
2013-12-28 18:41:13 +00:00
2014-05-03 23:08:43 +01:00
KNN.Data = mat64.NewDense(rows, cols, numbers)
2013-12-28 18:41:13 +00:00
KNN.Labels = labels
}
2014-05-01 19:56:30 +01:00
// Returns a classification for the vector, based on a vector input, using the KNN algorithm.
2014-05-03 23:08:43 +01:00
// See http://en.wikipedia.org/wiki/K-nearest_neighbors_algorithm.
func (KNN *KNNClassifier) Predict(vector []float64, K int) string {
2013-12-28 18:41:13 +00:00
2014-05-03 23:08:43 +01:00
convertedVector := util.FloatsToMatrix(vector)
// Get the number of rows
rows, _ := KNN.Data.Dims()
2013-12-28 18:41:13 +00:00
rownumbers := make(map[int]float64)
2014-01-05 00:23:31 +00:00
labels := make([]string, 0)
maxmap := make(map[string]int)
2013-12-28 18:41:13 +00:00
2014-05-03 23:08:43 +01:00
// Check what distance function we are using
switch KNN.DistanceFunc {
case "euclidean":
{
euclidean := pairwiseMetrics.NewEuclidean()
for i := 0; i < rows; i++ {
row := KNN.Data.RowView(i)
rowMat := util.FloatsToMatrix(row)
distance := euclidean.Distance(rowMat, convertedVector)
rownumbers[i] = distance
}
}
case "manhattan":
{
manhattan := pairwiseMetrics.NewEuclidean()
for i := 0; i < rows; i++ {
row := KNN.Data.RowView(i)
rowMat := util.FloatsToMatrix(row)
distance := manhattan.Distance(rowMat, convertedVector)
rownumbers[i] = distance
}
}
2013-12-28 18:41:13 +00:00
}
sorted := util.SortIntMap(rownumbers)
2013-12-28 18:41:13 +00:00
values := sorted[:K]
for _, elem := range values {
2014-05-03 23:08:43 +01:00
// It's when we access this map
2013-12-28 18:41:13 +00:00
labels = append(labels, KNN.Labels[elem])
if _, ok := maxmap[KNN.Labels[elem]]; ok {
maxmap[KNN.Labels[elem]] += 1
} else {
maxmap[KNN.Labels[elem]] = 1
}
2013-12-28 18:41:13 +00:00
}
sortedlabels := util.SortStringMap(maxmap)
label := sortedlabels[0]
2014-05-03 23:08:43 +01:00
return label
2014-04-30 08:57:13 +01:00
}
2014-05-04 09:52:13 +01:00
//A KNN Regressor. Consists of a data matrix, associated result variables in the same order as the matrix, and a name.
type KNNRegressor struct {
base.BaseEstimator
Values []float64
DistanceFunc string
}
// Mints a new classifier.
func NewKnnRegressor(distfunc string) *KNNRegressor {
2014-05-04 09:52:13 +01:00
KNN := KNNRegressor{}
KNN.DistanceFunc = distfunc
return &KNN
}
func (KNN *KNNRegressor) Fit(values []float64, numbers []float64, rows int, cols int) {
if rows != len(values) {
panic(mat64.ErrShape)
}
KNN.Data = mat64.NewDense(rows, cols, numbers)
KNN.Values = values
}
2014-05-04 09:52:13 +01:00
//Returns an average of the K nearest labels/variables, based on a vector input.
func (KNN *KNNRegressor) Predict(vector *mat64.Dense, K int) float64 {
// Get the number of rows
rows, _ := KNN.Data.Dims()
rownumbers := make(map[int]float64)
labels := make([]float64, 0)
// Check what distance function we are using
switch KNN.DistanceFunc {
case "euclidean":
{
euclidean := pairwiseMetrics.NewEuclidean()
for i := 0; i < rows; i++ {
row := KNN.Data.RowView(i)
rowMat := util.FloatsToMatrix(row)
distance := euclidean.Distance(rowMat, vector)
rownumbers[i] = distance
}
}
case "manhattan":
{
manhattan := pairwiseMetrics.NewEuclidean()
for i := 0; i < rows; i++ {
row := KNN.Data.RowView(i)
rowMat := util.FloatsToMatrix(row)
distance := manhattan.Distance(rowMat, vector)
rownumbers[i] = distance
}
}
}
sorted := util.SortIntMap(rownumbers)
values := sorted[:K]
var sum float64
for _, elem := range values {
value := KNN.Values[elem]
labels = append(labels, value)
sum += value
}
average := sum / float64(K)
return average
}