Skip to content
Snippets Groups Projects
Commit 0fa5860d authored by David Lanzendörfer's avatar David Lanzendörfer
Browse files

Train on a complete time series

parent 56a3b9f9
No related branches found
No related tags found
No related merge requests found
Pipeline #92 canceled with stage
in 55 minutes and 56 seconds
...@@ -88,15 +88,15 @@ void main() ...@@ -88,15 +88,15 @@ void main()
uint32_t current_num_neurons; uint32_t current_num_neurons;
// Write params // Write params
uint32_t value_write_counter; uint32_t value_write_counter = 0;
int new_value; int new_value;
// Training // Training
uint32_t new_token; uint32_t new_token;
uint32_t token_series[MAX_NUM_TOKENS]; uint32_t token_series[MAX_NUM_TOKENS];
int token_counter; int token_counter = 0;
uint32_t learning_rate; uint32_t learning_rate = 0;
uint32_t decay_rate; uint32_t decay_rate = 0;
while(true) { while(true) {
...@@ -304,7 +304,7 @@ void main() ...@@ -304,7 +304,7 @@ void main()
case TRAIN_RUN_EPOCHS: case TRAIN_RUN_EPOCHS:
uint32_t num_epochs = atoi(numstr); uint32_t num_epochs = atoi(numstr);
response = run_training(response, num_epochs, learning_rate, decay_rate, token_series[0], token_series[1]); //response = run_training(response, num_epochs, learning_rate, decay_rate, token_series, token_counter);
command_mode = START; command_mode = START;
break; break;
......
...@@ -141,14 +141,14 @@ void reset_network(); ...@@ -141,14 +141,14 @@ void reset_network();
* num_epochs: Amount of epochs * num_epochs: Amount of epochs
* learning_rate_zero: initial learning rate * learning_rate_zero: initial learning rate
* decay_rate: the decay rate for gradient decay * decay_rate: the decay rate for gradient decay
* x: input token * xp: Pointer to array of values
* y: output token * xn: Amount of values in array
*/ */
char* run_training( char* run_training(
char *msgbuf, char *msgbuf,
int num_epochs, int num_epochs,
uint32_t learning_rate_zero, uint32_t learning_rate_zero,
uint32_t decay_rate, uint32_t decay_rate,
uint32_t x, uint32_t *xp,
uint32_t y uint32_t xn
); );
...@@ -393,32 +393,40 @@ void set_bias_values(int bias) ...@@ -393,32 +393,40 @@ void set_bias_values(int bias)
* num_epochs: Amount of epochs * num_epochs: Amount of epochs
* learning_rate_zero: initial learning rate * learning_rate_zero: initial learning rate
* decay_rate: the decay rate for gradient decay * decay_rate: the decay rate for gradient decay
* x: input token * xp: Pointer to array of values
* y: output token * xn: Amount of values in array
*/ */
char* run_training( char* run_training(
char *msgbuf, char *msgbuf,
int num_epochs, int num_epochs,
uint32_t learning_rate_zero, uint32_t learning_rate_zero,
uint32_t decay_rate, uint32_t decay_rate,
uint32_t x, uint32_t *xp,
uint32_t y uint32_t xn
) )
{ {
int last_val; int last_val;
uint32_t train_mask; uint32_t train_mask;
uint32_t y;
for(int epoch=0; epoch<num_epochs;epoch++) {
reset_network(); for(uint32_t xni=1; xni<xn; xni++) {
last_val = predict_next_token(x); msgbuf = "FAIL";
if(last_val==y) { y = xp[xni];
return "SUCCESS"; for(int epoch=0; epoch<num_epochs;epoch++) {
break; reset_network();
for(int xpi=0; xpi<xni; xpi++) {
last_val = predict_next_token(xp[xpi]);
}
if(last_val==y) {
msgbuf = "SUCCESS";
break;
}
train_mask = last_val ^ y;
set_alpha(learning_rate_zero/(1+(decay_rate*epoch)));
mask_back_propgatation(train_mask);
} }
train_mask = last_val ^ y;
set_alpha(learning_rate_zero/(1+(decay_rate*epoch)));
mask_back_propgatation(train_mask);
} }
return "FAIL";
return msgbuf;
} }
...@@ -54,7 +54,6 @@ run_command(server,"DECAY_RATE") ...@@ -54,7 +54,6 @@ run_command(server,"DECAY_RATE")
# α=(1/(1+decayRate×epochNumber))*​α0 # α=(1/(1+decayRate×epochNumber))*​α0
run_command(server,str(decay_rate)) run_command(server,str(decay_rate))
run_command(server,"TRAIN") run_command(server,"TRAIN")
run_command(server,"RUN_EPOCHS") run_command(server,"RUN_EPOCHS")
run_command(server,str(20000)) run_command(server,str(20000))
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment