Algorithms_in_C  1.0.0
Set of algorithms implemented in C.
adaline_learning.c File Reference

Adaptive Linear Neuron (ADALINE) implementation More...

#include <assert.h>
#include <limits.h>
#include <math.h>
#include <stdbool.h>
#include <stdio.h>
#include <stdlib.h>
#include <time.h>
Include dependency graph for adaline_learning.c:

Data Structures

struct  adaline
 

Macros

#define MAX_ITER   500
 
#define ACCURACY   1e-5
 convergence accuracy \(=1\times10^{-5}\)
 

Functions

struct adaline new_adaline (const int num_features, const double eta)
 
void delete_adaline (struct adaline *ada)
 
int activation (double x)
 
char * get_weights_str (struct adaline *ada)
 
int predict (struct adaline *ada, const double *x, double *out)
 
double fit_sample (struct adaline *ada, const double *x, const int y)
 
void fit (struct adaline *ada, double **X, const int *y, const int N)
 
void test1 (double eta)
 
void test2 (double eta)
 
void test3 (double eta)
 
int main (int argc, char **argv)
 

Detailed Description

Adaptive Linear Neuron (ADALINE) implementation

Author
Krishna Vedala

source ADALINE is one of the first and simplest single layer artificial neural network. The algorithm essentially implements a linear function

\[ f\left(x_0,x_1,x_2,\ldots\right) = \sum_j x_jw_j+\theta \]

where \(x_j\) are the input features of a sample, \(w_j\) are the coefficients of the linear function and \(\theta\) is a constant. If we know the \(w_j\), then for any given set of features, \(y\) can be computed. Computing the \(w_j\) is a supervised learning algorithm wherein a set of features and their corresponding outputs are given and weights are computed using stochastic gradient descent method.

Function Documentation

◆ activation()

int activation ( double  x)

Heaviside activation function

96 { return x > 0 ? 1 : -1; }

◆ delete_adaline()

void delete_adaline ( struct adaline ada)

delete dynamically allocated memory

Parameters
[in]adamodel from which the memory is to be freeed.
84 {
85  if (ada == NULL)
86  return;
87 
88  free(ada->weights);
89 };

◆ fit()

void fit ( struct adaline ada,
double **  X,
const int *  y,
const int  N 
)

Update the weights of the model using supervised learning for an array of vectors.

Parameters
[in]adaadaline model to train
[in]Xarray of feature vector
[in]yknown output value for each feature vector
[in]Nnumber of training samples
174 {
175  double avg_pred_error = 1.f;
176 
177  int iter;
178  for (iter = 0; (iter < MAX_ITER) && (avg_pred_error > ACCURACY); iter++)
179  {
180  avg_pred_error = 0.f;
181 
182  // perform fit for each sample
183  for (int i = 0; i < N; i++)
184  {
185  double err = fit_sample(ada, X[i], y[i]);
186  avg_pred_error += fabs(err);
187  }
188  avg_pred_error /= N;
189 
190  // Print updates every 200th iteration
191  // if (iter % 100 == 0)
192  printf("\tIter %3d: Training weights: %s\tAvg error: %.4f\n", iter,
193  get_weights_str(ada), avg_pred_error);
194  }
195 
196  if (iter < MAX_ITER)
197  printf("Converged after %d iterations.\n", iter);
198  else
199  printf("Did not converged after %d iterations.\n", iter);
200 }

◆ fit_sample()

double fit_sample ( struct adaline ada,
const double *  x,
const int  y 
)

Update the weights of the model using supervised learning for one feature vector

Parameters
[in]adaadaline model to fit
[in]xfeature vector
[in]yknown output value
Returns
correction factor
148 {
149  /* output of the model with current weights */
150  int p = predict(ada, x, NULL);
151  int prediction_error = y - p; // error in estimation
152  double correction_factor = ada->eta * prediction_error;
153 
154  /* update each weight, the last weight is the bias term */
155  for (int i = 0; i < ada->num_weights - 1; i++)
156  {
157  ada->weights[i] += correction_factor * x[i];
158  }
159  ada->weights[ada->num_weights - 1] += correction_factor; // update bias
160 
161  return correction_factor;
162 }
Here is the call graph for this function:

◆ get_weights_str()

char* get_weights_str ( struct adaline ada)

Operator to print the weights of the model

102 {
103  static char out[100]; // static so the value is persistent
104 
105  sprintf(out, "<");
106  for (int i = 0; i < ada->num_weights; i++)
107  {
108  sprintf(out, "%s%.4g", out, ada->weights[i]);
109  if (i < ada->num_weights - 1)
110  sprintf(out, "%s, ", out);
111  }
112  sprintf(out, "%s>", out);
113  return out;
114 }

◆ main()

int main ( int  argc,
char **  argv 
)

Main function

386 {
387  srand(time(NULL)); // initialize random number generator
388 
389  double eta = 0.1; // default value of eta
390  if (argc == 2) // read eta value from commandline argument if present
391  eta = strtof(argv[1], NULL);
392 
393  test1(eta);
394 
395  printf("Press ENTER to continue...\n");
396  getchar();
397 
398  test2(eta);
399 
400  printf("Press ENTER to continue...\n");
401  getchar();
402 
403  test3(eta);
404 
405  return 0;
406 }
Here is the call graph for this function:

◆ new_adaline()

struct adaline new_adaline ( const int  num_features,
const double  eta 
)

Default constructor

Parameters
[in]num_featuresnumber of features present
[in]etalearning rate (optional, default=0.1)
Returns
new adaline model
53 {
54  if (eta <= 0.f || eta >= 1.f)
55  {
56  fprintf(stderr, "learning rate should be > 0 and < 1\n");
57  exit(EXIT_FAILURE);
58  }
59 
60  // additional weight is for the constant bias term
61  int num_weights = num_features + 1;
62  struct adaline ada;
63  ada.eta = eta;
64  ada.num_weights = num_weights;
65  ada.weights = (double *)malloc(num_weights * sizeof(double));
66  if (!ada.weights)
67  {
68  perror("Unable to allocate error for weights!");
69  return ada;
70  }
71 
72  // initialize with random weights in the range [-50, 49]
73  for (int i = 0; i < num_weights; i++)
74  ada.weights[i] = 1.f;
75  // ada.weights[i] = (double)(rand() % 100) - 50);
76 
77  return ada;
78 }

◆ predict()

int predict ( struct adaline ada,
const double *  x,
double *  out 
)

predict the output of the model for given set of features

Parameters
[in]adaadaline model to predict
[in]xinput vector
[out]outoptional argument to return neuron output before applying activation function (NULL to ignore)
Returns
model prediction output
126 {
127  double y = ada->weights[ada->num_weights - 1]; // assign bias value
128 
129  for (int i = 0; i < ada->num_weights - 1; i++)
130  y += x[i] * ada->weights[i];
131 
132  if (out) // if out variable is not NULL
133  *out = y;
134 
135  return activation(y); // quantizer: apply ADALINE threshold function
136 }
Here is the call graph for this function:

◆ test1()

void test1 ( double  eta)

test function to predict points in a 2D coordinate system above the line \(x=y\) as +1 and others as -1. Note that each point is defined by 2 values or 2 features.

Parameters
[in]etalearning rate (optional, default=0.01)
209 {
210  struct adaline ada = new_adaline(2, eta); // 2 features
211 
212  const int N = 10; // number of sample points
213  const double saved_X[10][2] = {{0, 1}, {1, -2}, {2, 3}, {3, -1},
214  {4, 1}, {6, -5}, {-7, -3}, {-8, 5},
215  {-9, 2}, {-10, -15}};
216 
217  double **X = (double **)malloc(N * sizeof(double *));
218  const int Y[10] = {1, -1, 1, -1, -1,
219  -1, 1, 1, 1, -1}; // corresponding y-values
220  for (int i = 0; i < N; i++)
221  {
222  X[i] = (double *)saved_X[i];
223  }
224 
225  printf("------- Test 1 -------\n");
226  printf("Model before fit: %s", get_weights_str(&ada));
227 
228  fit(&ada, X, Y, N);
229  printf("Model after fit: %s\n", get_weights_str(&ada));
230 
231  double test_x[] = {5, -3};
232  int pred = predict(&ada, test_x, NULL);
233  printf("Predict for x=(5,-3): % d", pred);
234  assert(pred == -1);
235  printf(" ...passed\n");
236 
237  double test_x2[] = {5, 8};
238  pred = predict(&ada, test_x2, NULL);
239  printf("Predict for x=(5, 8): % d", pred);
240  assert(pred == 1);
241  printf(" ...passed\n");
242 
243  // for (int i = 0; i < N; i++)
244  // free(X[i]);
245  free(X);
246  delete_adaline(&ada);
247 }
Here is the call graph for this function:

◆ test2()

void test2 ( double  eta)

test function to predict points in a 2D coordinate system above the line \(x+3y=-1\) as +1 and others as -1. Note that each point is defined by 2 values or 2 features. The function will create random sample points for training and test purposes.

Parameters
[in]etalearning rate (optional, default=0.01)
257 {
258  struct adaline ada = new_adaline(2, eta); // 2 features
259 
260  const int N = 50; // number of sample points
261 
262  double **X = (double **)malloc(N * sizeof(double *));
263  int *Y = (int *)malloc(N * sizeof(int)); // corresponding y-values
264  for (int i = 0; i < N; i++)
265  X[i] = (double *)malloc(2 * sizeof(double));
266 
267  // generate sample points in the interval
268  // [-range2/100 , (range2-1)/100]
269  int range = 500; // sample points full-range
270  int range2 = range >> 1; // sample points half-range
271  for (int i = 0; i < N; i++)
272  {
273  double x0 = ((rand() % range) - range2) / 100.f;
274  double x1 = ((rand() % range) - range2) / 100.f;
275  X[i][0] = x0;
276  X[i][1] = x1;
277  Y[i] = (x0 + 3. * x1) > -1 ? 1 : -1;
278  }
279 
280  printf("------- Test 2 -------\n");
281  printf("Model before fit: %s", get_weights_str(&ada));
282 
283  fit(&ada, X, Y, N);
284  printf("Model after fit: %s\n", get_weights_str(&ada));
285 
286  int N_test_cases = 5;
287  double test_x[2];
288  for (int i = 0; i < N_test_cases; i++)
289  {
290  double x0 = ((rand() % range) - range2) / 100.f;
291  double x1 = ((rand() % range) - range2) / 100.f;
292 
293  test_x[0] = x0;
294  test_x[1] = x1;
295  int pred = predict(&ada, test_x, NULL);
296  printf("Predict for x=(% 3.2f,% 3.2f): % d", x0, x1, pred);
297 
298  int expected_val = (x0 + 3. * x1) > -1 ? 1 : -1;
299  assert(pred == expected_val);
300  printf(" ...passed\n");
301  }
302 
303  for (int i = 0; i < N; i++)
304  free(X[i]);
305  free(X);
306  free(Y);
307  delete_adaline(&ada);
308 }
Here is the call graph for this function:

◆ test3()

void test3 ( double  eta)

test function to predict points in a 3D coordinate system lying within the sphere of radius 1 and centre at origin as +1 and others as -1. Note that each point is defined by 3 values but we use 6 features. The function will create random sample points for training and test purposes. The sphere centred at origin and radius 1 is defined as: \(x^2+y^2+z^2=r^2=1\) and if the \(r^2<1\), point lies within the sphere else, outside.

Parameters
[in]etalearning rate (optional, default=0.01)
322 {
323  struct adaline ada = new_adaline(6, eta); // 2 features
324 
325  const int N = 50; // number of sample points
326 
327  double **X = (double **)malloc(N * sizeof(double *));
328  int *Y = (int *)malloc(N * sizeof(int)); // corresponding y-values
329  for (int i = 0; i < N; i++)
330  X[i] = (double *)malloc(6 * sizeof(double));
331 
332  // generate sample points in the interval
333  // [-range2/100 , (range2-1)/100]
334  int range = 200; // sample points full-range
335  int range2 = range >> 1; // sample points half-range
336  for (int i = 0; i < N; i++)
337  {
338  double x0 = ((rand() % range) - range2) / 100.f;
339  double x1 = ((rand() % range) - range2) / 100.f;
340  double x2 = ((rand() % range) - range2) / 100.f;
341  X[i][0] = x0;
342  X[i][1] = x1;
343  X[i][2] = x2;
344  X[i][3] = x0 * x0;
345  X[i][4] = x1 * x1;
346  X[i][5] = x2 * x2;
347  Y[i] = (x0 * x0 + x1 * x1 + x2 * x2) <= 1 ? 1 : -1;
348  }
349 
350  printf("------- Test 3 -------\n");
351  printf("Model before fit: %s", get_weights_str(&ada));
352 
353  fit(&ada, X, Y, N);
354  printf("Model after fit: %s\n", get_weights_str(&ada));
355 
356  int N_test_cases = 5;
357  double test_x[6];
358  for (int i = 0; i < N_test_cases; i++)
359  {
360  double x0 = ((rand() % range) - range2) / 100.f;
361  double x1 = ((rand() % range) - range2) / 100.f;
362  double x2 = ((rand() % range) - range2) / 100.f;
363  test_x[0] = x0;
364  test_x[1] = x1;
365  test_x[2] = x2;
366  test_x[3] = x0 * x0;
367  test_x[4] = x1 * x1;
368  test_x[5] = x2 * x2;
369  int pred = predict(&ada, test_x, NULL);
370  printf("Predict for x=(% 3.2f,% 3.2f): % d", x0, x1, pred);
371 
372  int expected_val = (x0 * x0 + x1 * x1 + x2 * x2) <= 1 ? 1 : -1;
373  assert(pred == expected_val);
374  printf(" ...passed\n");
375  }
376 
377  for (int i = 0; i < N; i++)
378  free(X[i]);
379  free(X);
380  free(Y);
381  delete_adaline(&ada);
382 }
Here is the call graph for this function:
get_weights_str
char * get_weights_str(struct adaline *ada)
Definition: adaline_learning.c:101
adaline::weights
double * weights
weights of the neural network
Definition: adaline_learning.c:40
delete_adaline
void delete_adaline(struct adaline *ada)
Definition: adaline_learning.c:83
data
Definition: prime_factoriziation.c:25
N
#define N
Definition: sol1.c:111
adaline::eta
double eta
learning rate of the algorithm
Definition: adaline_learning.c:39
predict
int predict(struct adaline *ada, const double *x, double *out)
Definition: adaline_learning.c:125
new_adaline
struct adaline new_adaline(const int num_features, const double eta)
Definition: adaline_learning.c:52
test3
void test3(double eta)
Definition: adaline_learning.c:321
fit_sample
double fit_sample(struct adaline *ada, const double *x, const int y)
Definition: adaline_learning.c:147
activation
int activation(double x)
Definition: adaline_learning.c:96
test1
void test1(double eta)
Definition: adaline_learning.c:208
test2
void test2(double eta)
Definition: adaline_learning.c:256
fit
void fit(struct adaline *ada, double **X, const int *y, const int N)
Definition: adaline_learning.c:173
adaline
Definition: adaline_learning.c:37
ACCURACY
#define ACCURACY
convergence accuracy
Definition: adaline_learning.c:44
adaline::num_weights
int num_weights
number of weights of the neural network
Definition: adaline_learning.c:41