mapNeural function
Map neural template patterns in code to their current values.
Implementation
String mapNeural(WidgetRef ref, String code) {
// Obtain the current values of the global variables.
int hiddenLayerSizes = ref.read(hiddenLayerNeuralProvider);
int neuralStepMax = ref.read(stepMaxNeuralProvider);
int nnetMaxit = ref.read(maxitNeuralProvider);
bool neuralIgnoreCategoric = ref.read(ignoreCategoricNeuralProvider);
double neuralThreshold = ref.read(thresholdNeuralProvider);
String hiddenNeurons = ref.read(hiddenLayersNeuralProvider);
String neuralActivationFct = ref.read(activationFctNeuralProvider);
String neuralErrorFct = ref.read(errorFctNeuralProvider);
// Perform the mapping.
code = code.replaceAll('<NNET_HIDDEN_LAYERS>', hiddenLayerSizes.toString());
code = code.replaceAll('<NEURAL_HIDDEN_LAYERS>', 'c($hiddenNeurons)');
code = code.replaceAll('<NEURAL_MAXIT>', nnetMaxit.toString());
code = code.replaceAll(
'<NEURAL_MAX_NWTS>',
ref.read(neuralMaxWeightsProvider).toString(),
);
code = code.replaceAll(
'<NEURAL_ERROR_FCT>',
'"${neuralErrorFct.toString()}"',
);
code = code.replaceAll(
'<NEURAL_ACT_FCT>',
'"${neuralActivationFct.toString()}"',
);
if (neuralActivationFct != 'relu') {
code = code.replaceAll(
'<NEURAL_ACT_FCT>',
'"${neuralActivationFct.toString()}"',
);
} else if (neuralActivationFct == 'relu') {
// relu corresponds to the ReLU function from the sigmoid package in R.
code = code.replaceAll('<NEURAL_ACT_FCT>', 'relu');
}
code = code.replaceAll('<NEURAL_THRESHOLD>', neuralThreshold.toString());
code = code.replaceAll('<NEURAL_STEP_MAX>', neuralStepMax.toString());
code = code.replaceAll(
'<NEURAL_IGNORE_CATEGORIC>',
neuralIgnoreCategoric.toString().toUpperCase(),
);
return (code);
}