@@ -181,9 +181,21 @@ OkNNr_learn(OkNNrdata *data, double *features, double target, double rfactor)
181181 */
182182if (data -> rows > 0 && distances [mid ]< object_selection_threshold )
183183{
184+ double lr = learning_rate * rfactor /data -> rfactors [mid ];
185+
186+ if (lr > 1. )
187+ {
188+ elog (WARNING ,"[AQO] Something goes wrong in the ML core: learning rate = %lf" ,lr );
189+ lr = 1. ;
190+ }
191+
192+ Assert (lr > 0. );
193+ Assert (data -> rfactors [mid ]> 0. && data -> rfactors [mid ] <=1. );
194+
184195for (j = 0 ;j < data -> cols ;++ j )
185- data -> matrix [mid ][j ]+= learning_rate * (features [j ]- data -> matrix [mid ][j ]);
186- data -> targets [mid ]+= learning_rate * (target - data -> targets [mid ]);
196+ data -> matrix [mid ][j ]+= lr * (features [j ]- data -> matrix [mid ][j ]);
197+ data -> targets [mid ]+= lr * (target - data -> targets [mid ]);
198+ data -> rfactors [mid ]+= lr * (rfactor - data -> rfactors [mid ]);
187199
188200return data -> rows ;
189201}
@@ -229,7 +241,7 @@ OkNNr_learn(OkNNrdata *data, double *features, double target, double rfactor)
229241 * Compute average value for target by nearest neighbors. We need to
230242 * check idx[i] != -1 because we may have smaller value of nearest
231243 * neighbors than aqo_k.
232- * Semantics ofcoef1 : it is defined distance between new object and
244+ * Semantics oftc_coef : it is defined distance between new object and
233245 * this superposition value (with linear smoothing).
234246 * fc_coef - feature changing rate.
235247 * */
@@ -240,10 +252,21 @@ OkNNr_learn(OkNNrdata *data, double *features, double target, double rfactor)
240252/* Modify targets and features of each nearest neighbor row. */
241253for (i = 0 ;i < aqo_k && idx [i ]!= -1 ;++ i )
242254{
243- fc_coef = tc_coef * (data -> targets [idx [i ]]- avg_target )* w [i ]* w [i ] /
244- sqrt (data -> cols ) /w_sum ;
255+ double lr = learning_rate * rfactor /data -> rfactors [mid ];
256+
257+ if (lr > 1. )
258+ {
259+ elog (WARNING ,"[AQO] Something goes wrong in the ML core: learning rate = %lf" ,lr );
260+ lr = 1. ;
261+ }
262+
263+ Assert (lr > 0. );
264+ Assert (data -> rfactors [mid ]> 0. && data -> rfactors [mid ] <=1. );
265+
266+ fc_coef = tc_coef * lr * (data -> targets [idx [i ]]- avg_target )*
267+ w [i ]* w [i ] /sqrt (data -> cols ) /w_sum ;
245268
246- data -> targets [idx [i ]]-= tc_coef * w [i ] /w_sum ;
269+ data -> targets [idx [i ]]-= tc_coef * lr * w [i ] /w_sum ;
247270for (j = 0 ;j < data -> cols ;++ j )
248271{
249272feature = data -> matrix [idx [i ]];