1
0
mirror of https://github.com/sjwhitworth/golearn.git synced 2025-04-26 13:49:14 +08:00

Merge b7178c36848df4f256b406586e2978afe7de102b into 74ae077eafb245fa3bdca0288854b6d51f97fe60

This commit is contained in:
coding567 2023-01-11 15:06:17 +09:00 committed by GitHub
commit 37114206ee
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 94 additions and 5 deletions

View File

@ -47,12 +47,16 @@ func NewProblem(X [][]float64, y []float64, bias float64) *Problem {
prob.c_prob.l = C.int(len(X))
prob.c_prob.n = C.int(len(X[0]) + 1)
prob.c_prob.x = convert_features(X, bias)
c_y := make([]C.double, len(y))
prob.c_prob.x = convert_features_optim(X, bias)
var c_y *C.double = C.mallocDouble(C.int(len(y)))
for i := 0; i < len(y); i++ {
c_y[i] = C.double(y[i])
tmp_next := uintptr(unsafe.Pointer(c_y)) + unsafe.Sizeof(*c_y) * uintptr(i)
tmp_next_p := (*C.double)(unsafe.Pointer(tmp_next))
*tmp_next_p = C.double(y[i])
}
prob.c_prob.y = &c_y[0]
prob.c_prob.y = c_y
prob.c_prob.bias = C.double(-1)
return &prob
@ -150,6 +154,35 @@ func convert_features(X [][]float64, bias float64) **C.struct_feature_node {
x_space[cursor].index = C.int(-1)
cursor++
}
c_x = &x[0]
return c_x
}
func convert_features_optim(X [][]float64, bias float64) **C.struct_feature_node {
n_samples := len(X)
n_elements := 0
n_features := 0
if n_samples > 0 {
n_features = len(X[0])
}
var X_data *C.double = C.mallocDouble(C.int(n_samples * n_features))
for i := 0; i < n_samples; i++ {
for j := 0; j < len(X[i]); j++ {
if X[i][j] != 0.0 {
n_elements++
}
tmp := uintptr(unsafe.Pointer(X_data)) + unsafe.Sizeof(*X_data) * uintptr(i * len(X[i]) + j)
tmp_p := (*C.double)(unsafe.Pointer(tmp))
*tmp_p = C.double(X[i][j])
n_elements++ //for bias
}
}
var x **C.struct_feature_node = C.convert_features_helper(X_data,
C.int(n_samples), C.int(n_features), C.int(n_elements), C.double(bias))
return x
}

View File

@ -2291,7 +2291,7 @@ static void train_one(const problem *prob, const parameter *param, double *w, do
// Interface functions
//
model* train(const problem *prob, const parameter *param)
{
{
int i,j;
int l = prob->l;
int n = prob->n;
@ -2835,3 +2835,54 @@ void set_print_string_function(void (*print_func)(const char*))
liblinear_print_string = print_func;
}
double* mallocDouble(int size) {
return Malloc(double, (size_t)size);
}
feature_node* mallocSingleDimensionFeatureNode(int size) {
feature_node* f_node_s_p = (feature_node*)Malloc(feature_node, (size_t)size);
memset(f_node_s_p, 0, sizeof(feature_node) * (size_t)size);
return f_node_s_p;
}
feature_node** mallocDoubleDimensionFeatureNodePointer(int size) {
feature_node** f_node_d_p;
f_node_d_p = (feature_node**)Malloc(feature_node*, (size_t)size);
return f_node_d_p;
}
feature_node** convert_features_helper(double* data, int samples, int features, int elements, double bias) {
feature_node* x_space = mallocSingleDimensionFeatureNode(elements + samples);
int cursor = 0;
feature_node** x = mallocDoubleDimensionFeatureNodePointer(samples);
for (int i = 0; i < samples; i++) {
x[i] = &x_space[cursor];
for (int j = 0; j < features; j++) {
if (data[i * features +j] != 0.0) {
x_space[cursor].index = j + 1;
x_space[cursor].value = data[i * features +j];
cursor++;
}
if (bias > 0) {
x_space[cursor].index = 0;
x_space[cursor].value = bias;
cursor++;
}
}
x_space[cursor].index = -1;
x_space[cursor].value = 0.0;
cursor++;
}
return x;
}

View File

@ -66,6 +66,11 @@ const char *check_parameter(const struct problem *prob, const struct parameter *
int check_probability_model(const struct model *model);
void set_print_string_function(void (*print_func) (const char*));
double* mallocDouble(int size);
struct feature_node* mallocSingleDimensionFeatureNode(int size);
struct feature_node** mallocDoubleDimensionFeatureNodePointer(int size);
struct feature_node** convert_features_helper(double* data, int samples, int features, int elements, double bias);
#ifdef __cplusplus
}
#endif