声振论坛

 找回密码
 我要加入

QQ登录

只需一步,快速开始

查看: 7743|回复: 18

[人工智能] 神经网络基本算法源程序

[复制链接]
发表于 2006-10-12 09:59 | 显示全部楼层 |阅读模式

马上注册,结交更多好友,享用更多功能,让你轻松玩转社区。

您需要 登录 才可以下载或查看,没有账号?我要加入

x
神经网络基本算法源程序,包含四个基本算法:BF,ART I,RBF,自组织算法

Pittnet currently has four popular neural network paradigms:
(1) backpropagation (BP)
(2) Kohonen self-organizing
(3) adaptive resonance theory I (ART I)
(4) radial basis function (RBF).
Pittnet takes ascii text files as inputs and the output files are also ascii text.
Pittnet is designed to run on any hardware including low end PC's. The source may
be modified by the students to customize the neural networks or to include the neural
networks as components of larger software systems. The cascade correlation network
paradigm will be added to Pittnet in the near future. The developement of Pittnet was
funded by the National Science Foundation (NSF) grant, CAREER, DMI 9502134.

  1. // pittnet.CPP  // Backpropagation / ART1 / Kohonen / Radial Basis

  2. // The purpose of this prototype is to allow the user to construct and
  3. // initialize a series of neural nets. Using the concept of inheritance and
  4. // derived classes from C++ object oriented programming, the neceessity to
  5. // declare multiple large structures that duplicate attributes is eliminated
  6. // Utilizing pointers and the "new" function, dynamic arrays are established
  7. // The user can then specify the storage array size for the number of hidden
  8. // units and output units for the neural network while the program is running.
  9. // This strategy eliminates the need to establish extremely large arrays
  10. // while still maintaining the flexibility required to design nets of various.
  11. // shapes and sizes.  The "Neural" classes allows the attributes of the newly
  12. // constructed networks to be stored for further processing.

  13. #include <stdio.h>
  14. #include <stdlib.h>
  15. #include <math.h>
  16. #include <time.h>
  17. #include <iostream.h>
  18. #include <string.h>
  19. #include <conio.h>
  20. #include <float.h>
  21. #include <fstream.h>
  22. #include <ctype.h>

  23. #define IA   16807
  24. #define IM   2147483647
  25. #define AM   (1.0 / IM)
  26. #define IQ   127773
  27. #define IR   2836
  28. #define NTAB 32
  29. #define NDIV (1+(IM-1) / NTAB)
  30. #define EPS  1.2e-7
  31. #define RNMX (1.0 - EPS)

  32. // The following function is a random number generator
  33. float bedlam(long *idum);
  34. int gaset = -2500;

  35. float bedlam(long *idum)
  36. {
  37.   int xj;
  38.   long xk;
  39.   static long iy=0;
  40.   static long iv[NTAB];
  41.   float temp;

  42.   if(*idum <= 0 || !iy)
  43.   {
  44.     if(-(*idum) < 1)
  45.     {
  46.       *idum = 1 + *idum;
  47.     }
  48.     else
  49.     {
  50.       *idum = -(*idum);
  51.     }
  52.     for(xj = NTAB+7; xj >= 0; xj--)
  53.     {
  54.       xk = (*idum) / IQ;
  55.       *idum = IA * (*idum - xk * IQ) - IR * xk;
  56.       if(*idum < 0)
  57.       {
  58.         *idum += IM;
  59.       }
  60.       if(xj < NTAB)
  61.       {
  62.         iv[xj] = *idum;
  63.       }
  64.     }
  65.       iy = iv[0];
  66.   }

  67.   xk = (*idum) / IQ;
  68.   *idum = IA * (*idum - xk * IQ) - IR * xk;
  69.   if(*idum < 0)
  70.   {
  71.     *idum += IM;
  72.   }
  73.   xj = iy / NDIV;
  74.   iy = iv[xj];
  75.   iv[xj] = *idum;

  76.   if((temp=AM*iy) > RNMX)
  77.   {
  78.     return(RNMX);
  79.   }
  80.   else
  81.   {
  82.     return(temp);
  83.   }
  84. } // end of bedlam function

  85. //%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
  86. //%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%

  87. // (Fully connected network using backpropagation)

  88. // In this base class, all nodes in the network have the following attributes

  89. class Processing_units
  90. {
  91. public:
  92. float *processing_unit_input;
  93. int number_of_input_units;
  94. void establish_array_of_processing_unit_inputs(void);
  95. float *weight_of_inputs;
  96. void establish_weight_vector_for_processing_units(void);
  97. float bias;
  98. float output_signal;
  99. void calculate_output_signal(int activation_function);
  100. float calculate_output_signal_derivative(int afun);
  101. float error_information_term;
  102. void calculate_weight_and_bias_correction_terms(float learning_rate);
  103. float *weight_correction_term;
  104. float bias_correction_term;
  105. float sum_of_weighted_inputs;
  106. void update_weights_and_biases(void);
  107. Processing_units();
  108. ~Processing_units();
  109. };

  110. Processing_units::Processing_units()
  111. {
  112.   bias = 0.0;
  113.   output_signal = 0.0;
  114.   error_information_term = 0.0;
  115.   bias_correction_term = 0.0;
  116.   sum_of_weighted_inputs = 0.0;
  117. }

  118. Processing_units::~Processing_units()
  119. {
  120.   delete [] processing_unit_input;
  121.   delete [] weight_of_inputs;
  122.   delete [] weight_correction_term;
  123. }

  124. // Define base class member functions

  125. void Processing_units::establish_array_of_processing_unit_inputs(void)
  126. {
  127.   processing_unit_input = new float[number_of_input_units];
  128.   weight_of_inputs = new float[number_of_input_units];
  129.   weight_correction_term = new float[number_of_input_units];
  130. }

  131. void Processing_units::establish_weight_vector_for_processing_units(void)
  132. {
  133.   for(int i = 0; i < number_of_input_units; i++)
  134.   {
  135.     // weights range from 1 to -1
  136.     weight_of_inputs[i] = 1.0 - (2.0 * bedlam((long*)(gaset)));
  137.   }
  138. }

  139. void Processing_units::calculate_output_signal(int activation_function)
  140. {
  141.   sum_of_weighted_inputs = 0.0;
  142.   for(int i = 0; i < number_of_input_units; i++)
  143.   {
  144.    if(i == number_of_input_units - 1)
  145.    {sum_of_weighted_inputs += (processing_unit_input[i] * weight_of_inputs[i]) + bias;}
  146.    else
  147.    {sum_of_weighted_inputs += processing_unit_input[i] * weight_of_inputs[i];}
  148.   }

  149.   switch(activation_function)
  150.   {
  151.     case 1: // binary sigmoid function
  152.     output_signal = 1.0 / (1.0 + exp(-1.0 * sum_of_weighted_inputs));
  153.     break;

  154.     case 2: // bipolar sigmoid function
  155.     output_signal = (2.0 / (1.0 + exp(-1.0 * sum_of_weighted_inputs))) - 1;
  156.     break;
  157.   }

  158. }

  159. float Processing_units::calculate_output_signal_derivative(int afun)
  160. {
  161.   float derivative;
  162.   switch(afun) // derivative used based on activation function seleted
  163.   {
  164.     case 1: // binary sigmoid function
  165.     derivative = output_signal * (1.0 - output_signal);
  166.     break;

  167.     case 2:  // bipolar sigmoid function
  168.     derivative = 0.5 * (1.0 + output_signal) * (1.0 - output_signal);
  169.     break;
  170.   }
  171.    return derivative;
  172. }

  173. void Processing_units::calculate_weight_and_bias_correction_terms(float learning_rate)
  174. {
  175.   for(int i = 0; i < number_of_input_units; i++)
  176.   {weight_correction_term[i] = learning_rate * error_information_term * processing_unit_input[i];}
  177.   bias_correction_term = learning_rate * error_information_term;
  178.   error_information_term = 0.0;
  179.   update_weights_and_biases();
  180. }

  181. void Processing_units::update_weights_and_biases(void)
  182. {
  183.   for(int i = 0; i < number_of_input_units; i++)
  184.   {weight_of_inputs[i] = weight_of_inputs[i] + weight_correction_term[i];}
  185.   bias = bias + bias_correction_term;
  186. }

  187. // Declare a derived class "Hidden_units" for hidden layer of network
  188. class Hidden_units : public Processing_units
  189. {
  190.   public:
  191.   void calculate_hidden_error_information_term(int afun);
  192. };

  193. // Define member functions for derived class "Hidden_units"
  194. void Hidden_units::calculate_hidden_error_information_term(int afun)
  195. {
  196.   float af = afun;
  197.   float output_signal_derivative = calculate_output_signal_derivative(af);
  198.   error_information_term = error_information_term * output_signal_derivative;
  199. }

  200. // Declare a derived class "Output_units" for output layer of network
  201. class Output_units : public Processing_units
  202. {
  203. public:
  204. void calculate_output_error_information_term(float target_value, int af);
  205. float absolute_error_difference;
  206. float error_difference_squared;
  207. };


  208. // Define member functions for derived class "Output_units"
  209. void Output_units::calculate_output_error_information_term(float target_value, int af)
  210. {
  211.   float afun = af;
  212.   float output_signal_derivative = calculate_output_signal_derivative(afun);
  213.   absolute_error_difference = fabs(target_value - output_signal);
  214.   error_information_term = (target_value - output_signal) * output_signal_derivative;
  215.   error_difference_squared = pow((target_value - output_signal), 2.0);
  216. }

  217. // Create classes to contain neural net specifications
  218. class Hidden_layer
  219. {
  220.   public:
  221.   Hidden_units  *node_in_hidden_layer;
  222.   int nodes_in_hidden_layer;
  223.   ~Hidden_layer();
  224. };

  225. Hidden_layer::~Hidden_layer()
  226. {delete [] node_in_hidden_layer;}

  227. // The following class represents an artificial neural network containing
  228. // the topology, weights, training performance and testing performance
  229. class Back_Topology
  230. {
  231.   public:
  232.   Hidden_layer *hidden_layer_number;
  233.   Output_units *node_in_output_layer;
  234.   int number_of_hidden_layers;
  235.   int activation_function_for_hidden_layer;
  236.   int nodes_in_output_layer;
  237.   int activation_function_for_output_layer;
  238.   int signal_dimensions;
  239.   int number_of_tests;
  240.   void establish_activation_functions(void);
  241.   void construct_and_initialize_backprop_network(void);
  242.   void upload_network(void);
  243.   void savenet(void);
  244.   ~Back_Topology();
  245. };


  246. void Back_Topology::construct_and_initialize_backprop_network(void)
  247. {
  248.   int nodes, inputs_to_output_node;
  249.   char netcreate;
  250.   int looploc = 0;

  251.    do
  252.    {
  253.      cout <<"\n";
  254.      cout << "Do you wish to" << "\n\n";
  255.      cout << "C.  Create your own Backprop Network " << "\n";
  256.      cout << "U.  Upload an existing Backprop Network " << "\n\n";
  257.      cout << "Your choice?:  "; cin >> netcreate;
  258.      netcreate = toupper(netcreate);
  259.      cout << "\n";
  260.      if((netcreate == 'C') || (netcreate == 'U')) {looploc = 1;}
  261.    } while(looploc <= 0);

  262.   if(netcreate == 'U')
  263.   {upload_network();}
  264.   else
  265.   {
  266.     cout << "Please enter the dimensions of the input vector: ";
  267.     cin >> signal_dimensions;
  268.     cout << "\n\n";
  269.       do
  270.       {
  271.         cout << "please enter the number of hidden layers (0 - 2):  ";
  272.         cin >> number_of_hidden_layers;
  273.         cout << "\n\n";
  274.       } while(number_of_hidden_layers > 2);

  275.     if(number_of_hidden_layers > 0)
  276.     {
  277.      hidden_layer_number = new Hidden_layer[number_of_hidden_layers];
  278.      for(int layer = 0; layer < number_of_hidden_layers; layer++)
  279.      {
  280.        cout << "please enter the number of nodes in hidden layer " << layer + 1 << ": ";
  281.        cin >> hidden_layer_number[layer].nodes_in_hidden_layer;
  282.        cout << "\n\n";
  283.      }
  284.     }
  285.     cout << "\n";
  286.     cout << "please enter the number of nodes in the output layer: ";
  287.     cin >> nodes_in_output_layer;
  288.     cout << "\n\n";

  289.     // establish for dynamic arrays for number of nodes in hidden and output layers

  290.     if(number_of_hidden_layers > 0)
  291.     {
  292.       for(int layer = 0; layer < number_of_hidden_layers; layer++)
  293.       {
  294.         nodes = hidden_layer_number[layer].nodes_in_hidden_layer;
  295.         hidden_layer_number[layer].node_in_hidden_layer = new Hidden_units[nodes];
  296.       }
  297.     }
  298.     node_in_output_layer = new Output_units[nodes_in_output_layer];

  299.     if(number_of_hidden_layers > 0)
  300.     {
  301.       // establish input connection between signal and hidden layer
  302.       for(nodes = 0; nodes < hidden_layer_number[0].nodes_in_hidden_layer; nodes++)
  303.       {
  304.         hidden_layer_number[0].node_in_hidden_layer[nodes].number_of_input_units = signal_dimensions;
  305.         hidden_layer_number[0].node_in_hidden_layer[nodes].establish_array_of_processing_unit_inputs();
  306.         hidden_layer_number[0].node_in_hidden_layer[nodes].establish_weight_vector_for_processing_units();
  307.         hidden_layer_number[0].node_in_hidden_layer[nodes].bias = 1.0 - (2.0 * bedlam((long*)(gaset)));
  308.       }
  309.       if(number_of_hidden_layers > 1)
  310.       {
  311.         // establish connection between first and second hidden layers
  312.         for(nodes = 0; nodes < hidden_layer_number[1].nodes_in_hidden_layer; nodes++)
  313.         {
  314.           hidden_layer_number[1].node_in_hidden_layer[nodes].number_of_input_units = hidden_layer_number[0].nodes_in_hidden_layer;
  315.           hidden_layer_number[1].node_in_hidden_layer[nodes].establish_array_of_processing_unit_inputs();
  316.           hidden_layer_number[1].node_in_hidden_layer[nodes].establish_weight_vector_for_processing_units();
  317.           hidden_layer_number[1].node_in_hidden_layer[nodes].bias = 1.0 - (2.0 * bedlam((long*)(gaset)));
  318.         }
  319.       }
  320.     }

  321.     // determine number of inputs to the output layer
  322.     if(number_of_hidden_layers > 0)
  323.     {inputs_to_output_node = hidden_layer_number[number_of_hidden_layers - 1].nodes_in_hidden_layer;}
  324.     else
  325.     {inputs_to_output_node = signal_dimensions;}

  326.     // establish input connections to output layer
  327.     for(nodes = 0; nodes < nodes_in_output_layer; nodes++)
  328.     {
  329.      node_in_output_layer[nodes].number_of_input_units = inputs_to_output_node;
  330.      node_in_output_layer[nodes].establish_array_of_processing_unit_inputs();
  331.      node_in_output_layer[nodes].establish_weight_vector_for_processing_units();
  332.      node_in_output_layer[nodes].bias = 1.0 - (2.0 * bedlam((long*)(gaset)));
  333.     }
  334.     establish_activation_functions(); // for hidden and output nodes
  335.   }
  336. } // end construct and initialize neural network function

  337. void Back_Topology::upload_network(void)
  338. {
  339.   char getname[13];
  340.   ifstream get_ptr;
  341.   int netid, nodes, dim, inputs_to_output_node, hid, inputs;
  342.   int dolock = 0;

  343.   do
  344.   {
  345.     cout << "\n\n";
  346.     cout << "Please enter the name of the file which holds the Backpropagation network" << "\n";
  347.     cin >> getname; cout << "\n";
  348.     get_ptr.open(getname, ios::in);
  349.     get_ptr >> netid;
  350.     if(netid == 1) {dolock = 1;}
  351.     else
  352.     {
  353.       cout << "Error** file contents do not match Backprop specifications" << "\n";
  354.       cout << "try again" << "\n";
  355.       get_ptr.close();
  356.     }
  357.   } while(dolock <= 0);

  358.   get_ptr >> signal_dimensions;
  359.   get_ptr >> activation_function_for_output_layer;
  360.   get_ptr >> nodes_in_output_layer;
  361.   get_ptr >> inputs_to_output_node;

  362.   // establish output layer
  363.   node_in_output_layer = new Output_units[nodes_in_output_layer];
  364.   for(nodes = 0; nodes < nodes_in_output_layer; nodes++)
  365.   {
  366.    node_in_output_layer[nodes].number_of_input_units = inputs_to_output_node;
  367.    node_in_output_layer[nodes].establish_array_of_processing_unit_inputs();
  368.    node_in_output_layer[nodes].establish_weight_vector_for_processing_units();
  369.    get_ptr >> node_in_output_layer[nodes].bias;
  370.   }
  371.   for(nodes = 0; nodes < nodes_in_output_layer; nodes++)
  372.   {
  373.    for(dim = 0; dim < inputs_to_output_node; dim++)
  374.    {get_ptr >> node_in_output_layer[nodes].weight_of_inputs[dim];}
  375.   }

  376.   // establish hidden layer(s)
  377.   get_ptr >> number_of_hidden_layers;
  378.   if(number_of_hidden_layers > 0)
  379.   {
  380.     hidden_layer_number = new Hidden_layer[number_of_hidden_layers];
  381.     get_ptr >> activation_function_for_hidden_layer;
  382.     for(hid = 0; hid < number_of_hidden_layers; hid++)
  383.     {
  384.       get_ptr >> hidden_layer_number[hid].nodes_in_hidden_layer;
  385.       nodes = hidden_layer_number[hid].nodes_in_hidden_layer;
  386.       hidden_layer_number[hid].node_in_hidden_layer = new Hidden_units[nodes];

  387.       if(hid == 0) {inputs = signal_dimensions;}
  388.       else
  389.       {inputs = hidden_layer_number[0].nodes_in_hidden_layer;}

  390.       for(nodes = 0; nodes < hidden_layer_number[hid].nodes_in_hidden_layer; nodes++)
  391.       {
  392.         hidden_layer_number[hid].node_in_hidden_layer[nodes].number_of_input_units = inputs;
  393.         hidden_layer_number[hid].node_in_hidden_layer[nodes].establish_array_of_processing_unit_inputs();
  394.         get_ptr >> hidden_layer_number[hid].node_in_hidden_layer[nodes].bias;
  395.       }
  396.       for(nodes = 0; nodes < hidden_layer_number[hid].nodes_in_hidden_layer; nodes++)
  397.       {
  398.         for(dim = 0; dim < inputs; dim++)
  399.         {get_ptr >> hidden_layer_number[hid].node_in_hidden_layer[nodes].weight_of_inputs[dim];}
  400.       }
  401.     }
  402.   }
  403.   get_ptr.close();
  404. }

  405. void Back_Topology::savenet(void)
  406. {
  407.   char savename[13];
  408.   ofstream save_ptr;
  409.   int nodes, dim, inputs, hid;

  410.   cout << "\n\n";
  411.   cout << "Please enter the name of the file that will hold" << "\n";
  412.   cout << "the Backpropagation network:  "; cin >> savename;

  413.   save_ptr.open(savename, ios::out);
  414.   save_ptr << 1 << "\n";   // network identifier number
  415.   save_ptr << signal_dimensions << "\n";
  416.   save_ptr << activation_function_for_output_layer << "\n";
  417.   save_ptr << nodes_in_output_layer << "\n";

  418.   if(number_of_hidden_layers > 0)
  419.   {inputs = hidden_layer_number[number_of_hidden_layers - 1].nodes_in_hidden_layer;}
  420.   else
  421.   {inputs = signal_dimensions;}
  422.   save_ptr << inputs << "\n";
  423.   for(nodes = 0; nodes < nodes_in_output_layer; nodes++)
  424.   {save_ptr << node_in_output_layer[nodes].bias << " ";}
  425.   save_ptr << "\n";


  426.   for(nodes = 0; nodes < nodes_in_output_layer; nodes++)
  427.   {
  428.     for(dim = 0; dim < inputs; dim++)
  429.     {save_ptr << node_in_output_layer[nodes].weight_of_inputs[dim] << " ";}
  430.     save_ptr << "\n";
  431.   }

  432.   save_ptr << number_of_hidden_layers << "\n";

  433.   if(number_of_hidden_layers > 0)
  434.   {
  435.     save_ptr << activation_function_for_hidden_layer << "\n";

  436.     for(hid = 0; hid < number_of_hidden_layers; hid++)
  437.     {
  438.       save_ptr << hidden_layer_number[hid].nodes_in_hidden_layer << "\n";
  439.       if(hid == 0) {inputs = signal_dimensions;}
  440.       else {inputs = hidden_layer_number[0].nodes_in_hidden_layer;}

  441.       for(nodes = 0; nodes < hidden_layer_number[hid].nodes_in_hidden_layer; nodes++)
  442.       {save_ptr << hidden_layer_number[hid].node_in_hidden_layer[nodes].bias << " ";}
  443.       save_ptr << "\n";

  444.       for(nodes = 0; nodes < hidden_layer_number[hid].nodes_in_hidden_layer; nodes++)
  445.       {
  446.         for(dim = 0; dim < inputs; dim++)
  447.         {save_ptr << hidden_layer_number[hid].node_in_hidden_layer[nodes].weight_of_inputs[dim] << " ";}
  448.         save_ptr << "\n";
  449.       }
  450.     }
  451.   }
  452.   save_ptr.close();
  453. }

  454. Back_Topology::~Back_Topology()
  455. {
  456.    delete [] hidden_layer_number;
  457.    delete [] node_in_output_layer;
  458. }

  459. void Back_Topology::establish_activation_functions(void)
  460. {
  461.   int bchoice, count;
  462.   int dolock = 1;

  463.   for(count = 0; count < 2; count++)
  464.   {
  465.     cout << "\n";
  466.     if((count == 0) && (number_of_hidden_layers > 0))
  467.     {cout << "For the nodes in the hidden layer(s):" << "\n";}
  468.     else
  469.     {cout << "For the output layer:" << "\n";}
  470.     do
  471.     {
  472.       cout << "please select the type of activation function you wish the nodes to use" << "\n\n";
  473.       cout << "1.  Binary Sigmoid Function " << "\n";
  474.       cout << "2.  Bipolar Sigmoid Function " << "\n\n";
  475.       cout << "Your Selection "; cin >> bchoice;
  476.       cout << "\n\n";
  477.       if((bchoice == 1) || (bchoice == 2)) {dolock = 0;}
  478.     } while(dolock >= 1);

  479.     if((count == 0) && (number_of_hidden_layers > 0))
  480.     {activation_function_for_hidden_layer = bchoice;}
  481.     else
  482.     {activation_function_for_output_layer = bchoice;}
  483.   }
  484. }

  485. // Declare classes that will establish training and testing data arrays
  486. class sample_data
  487. {
  488.   public:
  489.   float *data_in_sample; // pointer to the dimensions of a single signal
  490.   ~sample_data();
  491. };

  492. sample_data:: ~sample_data()
  493. {delete [] data_in_sample;}

  494. class Data_type
  495. {
  496. public:
  497. char filename[13];                       // File containing data for network training or testing
  498. char resultsname[13];                    // File containing data for results of training or testing
  499. int signal_dimensions;                   // Number of dimensions contained in signal
  500. int sample_number;                       // Number of signals in training set
  501. int nodes_in_output_layer;               // Dimensions of test data output
  502. sample_data *number_of_samples;          // Pointer to the array containing signals
  503. float *max_output_value;
  504. float *min_output_value;
  505. virtual void determine_sample_number(void);
  506. void specify_signal_sample_size(void);
  507. virtual void load_data_into_array(void); // Function to place data into the array
  508. void acquire_net_info(int signal, int no_output_nodes);
  509. void delete_signal_array(void);          // Function to free memory allocated to hold signals
  510. virtual void normalize_data_in_array(void);
  511. ~Data_type();   // class destructor
  512. };

  513. Data_type::~Data_type()
  514. {
  515.   delete [] max_output_value;
  516.   delete [] min_output_value;
  517. }

  518. // define functions of Data_type Class
  519. void Data_type :: determine_sample_number(void)
  520. {
  521.   ifstream dfile_ptr; // pointer to a file
  522.   dfile_ptr.open(filename, ios::in);

  523.   float hold;
  524.   int lock = 1;
  525.   sample_number = 0;

  526.   do
  527.   {
  528.     if(dfile_ptr.eof()){lock = 0;}
  529.     else
  530.     {dfile_ptr >> hold;  sample_number += 1;}
  531.   }while(lock > 0);

  532.   dfile_ptr.close();
  533.   sample_number = int(sample_number / (signal_dimensions + nodes_in_output_layer));
  534. }

  535. void Data_type::specify_signal_sample_size(void)
  536. {
  537.   char tchoice;
  538.   int dolock = 1;
  539.   do
  540.   {
  541.     cout <<"\n";
  542.     cout << "Please select the number of samples you wish to use" << "\n\n";
  543.     cout << "        A.  All samples in the file" << "\n";
  544.     cout << "        S.  Specific number of samples"<< "\n\n";
  545.     cout << "        Your Selection: "; cin >> tchoice;
  546.     cout << "\n\n";
  547.     tchoice = toupper(tchoice);
  548.     if((tchoice == 'A') || (tchoice == 'S')) {dolock = 0;}
  549.   } while(dolock >= 1);
  550.   cout <<"\n";
  551.   if(tchoice == 'A') {determine_sample_number();}
  552.   else
  553.   {
  554.     cout << "\n";
  555.     cout << "please enter the number of testing samples you wish to use: ";
  556.     cin >> sample_number;
  557.     cout << "\n";
  558.   }
  559.   load_data_into_array();
  560. }

  561. void Data_type::normalize_data_in_array(void)
  562. {
  563.   int imax, imin, trigger;
  564.   float min, max, rmax, rmin;
  565.   int total_dimension = signal_dimensions + nodes_in_output_layer;
  566.   int i, j;
  567.   max_output_value = new float[nodes_in_output_layer];
  568.   min_output_value = new float[nodes_in_output_layer];

  569.   for(j = 0; j < total_dimension; j++)
  570.   {
  571.     trigger = 1;
  572.     // identify the minimum and maximum values for each dimension
  573.     for(i = 0; i < sample_number; i++)
  574.     {
  575.       if(i == 0)
  576.       {
  577.         max = number_of_samples[i].data_in_sample[j];
  578.         min = number_of_samples[i].data_in_sample[j];
  579.         if((j >= (total_dimension - nodes_in_output_layer)))
  580.         {
  581.           min_output_value[j - (total_dimension - nodes_in_output_layer)] = min;
  582.           max_output_value[j - (total_dimension - nodes_in_output_layer)] = max;
  583.         }
  584.       }
  585.       else
  586.       {
  587.         if(number_of_samples[i].data_in_sample[j] < min)
  588.         {
  589.           min = number_of_samples[i].data_in_sample[j];
  590.           if(j >= (total_dimension - nodes_in_output_layer))
  591.           {min_output_value[j - (total_dimension - nodes_in_output_layer)] = min;}
  592.         }

  593.         if(number_of_samples[i].data_in_sample[j] > max)
  594.         {
  595.           max = number_of_samples[i].data_in_sample[j];
  596.           if(j >= (total_dimension - nodes_in_output_layer))
  597.           {max_output_value[j - (total_dimension - nodes_in_output_layer)] = max;}
  598.         }
  599.       }
  600.     }

  601.     imax = int(max_output_value[j - (total_dimension - nodes_in_output_layer)]);
  602.     imin = int(min_output_value[j - (total_dimension - nodes_in_output_layer)]);
  603.     rmax = max_output_value[j - (total_dimension - nodes_in_output_layer)];
  604.     rmin = min_output_value[j - (total_dimension - nodes_in_output_layer)];

  605.       if((imax == 1) && (imin == 0) && (rmax <= 1.0) && (rmin <= 0.0))
  606.       {trigger = 0;}

  607.       if((imax == 1) && (imin == 1) && (rmax <= 1.0) && (rmin <= 1.0))
  608.       {trigger = 0;}

  609.       if((imax == 0) && (imin == 0) && (rmax <= 0.0) && (rmin <= 0.0))
  610.       {trigger = 0;}

  611.     // normalize the values in each dimension of the signal
  612.    if(trigger != 0)
  613.    {
  614.     for(i = 0; i < sample_number; i++)
  615.     {number_of_samples[i].data_in_sample[j] = (number_of_samples[i].data_in_sample[j] - min) / (max - min);}
  616.    }
  617.   }
  618. }

  619. void Data_type :: acquire_net_info(int signal, int no_output_nodes)
  620. {
  621.   signal_dimensions = signal;
  622.   nodes_in_output_layer = no_output_nodes;
  623. }

  624. void Data_type :: load_data_into_array(void)
  625. {
  626.   // open the file containing the data
  627.   ifstream file_ptr;  // pointer to a file
  628.   int i;
  629.   file_ptr.open(filename, ios::in);

  630.   // create dynamic array to hold the specified number of samples
  631.   number_of_samples = new sample_data[sample_number];

  632.   for(i = 0; i < sample_number; i++)
  633.   // create a dynamic array to hold the dimensions of each signal
  634.   {number_of_samples[i].data_in_sample = new float[signal_dimensions + nodes_in_output_layer];}

  635.   int dimensions = signal_dimensions + nodes_in_output_layer;

  636.   //read in data from file and place in array
  637.   for(i = 0; i < sample_number; i++)
  638.   {
  639.     for(int j = 0; j < dimensions; j++)
  640.     {file_ptr >> number_of_samples[i].data_in_sample[j];}
  641.   }
  642.   file_ptr.close();
  643.   cout << "\n";
  644. }

  645. void Data_type::delete_signal_array(void)
  646. {delete [] number_of_samples;}

  647. class signal_data // Class for randomizing the input signals
  648. {
  649.   public:
  650.   int signal_value;
  651.   float signal_rank;
  652. };

  653. class Training : public Data_type   // Derived Class For Training Data
  654. {
  655.   public:
  656.   void request_training_data(int net_no); // Function to request data for training
  657.   int number_of_epochs;
  658.   signal_data *signalpoint;
  659.   float rate_of_learning; // learning rate constant used by the net
  660.   char presentation_order; // determines fixed or random signal presentation
  661.   void scramble_data_in_array(void);
  662.   float minimum_average_squared_error;
  663.   void delete_signal_data_array(void);
  664.   ~Training();
  665. };

  666. Training::~Training()
  667. {
  668.   delete [] signalpoint;
  669.   delete [] max_output_value;
  670.   delete [] min_output_value;
  671. }


  672. void Training::request_training_data(int net_no)
  673. {
  674.   cout << "Please enter the file name containing the training data for neural net no. "<< net_no << "\n";
  675.   cin >> filename;
  676.   specify_signal_sample_size();
  677.   signalpoint = new signal_data[sample_number];
  678.   for(int i = 0; i < sample_number; i++) {signalpoint[i].signal_value = i;}
  679.   normalize_data_in_array();
  680. }

  681. void Training::scramble_data_in_array(void)
  682. {
  683.   int swap1, swap2, hold_sample;
  684.   float hold_rank;

  685.   // randomly assign rank to all signals
  686.   for(int sig = 0; sig < sample_number; sig ++)
  687.   {signalpoint[sig].signal_rank = bedlam((long*)(gaset));}

  688.   // reorder signals according to rank
  689.   for(swap1 = 0; swap1 < sample_number - 1; swap1++)
  690.   {
  691.     for(swap2 = swap1 + 1; swap2 < sample_number; swap2++)
  692.     {
  693.       if(signalpoint[swap1].signal_rank > signalpoint[swap2].signal_rank)
  694.       {
  695.         hold_sample = signalpoint[swap2].signal_value;
  696.         hold_rank = signalpoint[swap2].signal_rank;
  697.         signalpoint[swap2].signal_value = signalpoint[swap1].signal_value;
  698.         signalpoint[swap2].signal_rank = signalpoint[swap1].signal_rank;
  699.         signalpoint[swap1].signal_value = hold_sample;
  700.         signalpoint[swap1].signal_rank = hold_rank;
  701.       }
  702.     }
  703.   }
  704. }

  705. void Training::delete_signal_data_array(void)
  706. {
  707.   delete [] signalpoint;
  708.   delete_signal_array();
  709. }

  710. class Testing : public Training   // Derived Class For Testing Data
  711. {
  712.   public:
  713.   void request_testing_data(int net_no, int test); // Function to request data for testing
  714.   float average_squared_error;
  715. };

  716. void Testing::request_testing_data(int net_no, int test)
  717. {
  718.   cout << "Please enter the file name containing the testing data for neural net no. "<< net_no << "\n\n";
  719.   cin >> filename;
  720.   cout << "\n\n";
  721.   cout << "For test #" << test + 1  << ":";
  722.   cout << "\n\n";
  723.   specify_signal_sample_size();
  724.   normalize_data_in_array();
  725. }


  726. //************************************************************************//
  727. class NeuralB    // class containing neural net structure for backpropagation
  728. {                // along with training and testing data
  729.   private:
  730.   Training Training_Data;       // file name and dynamic array for training
  731.   Testing *Test_Data;           // files containing data to test the network
  732.   void initialize_training_storage_array(int N);
  733.   void establish_test_battery_size(void);
  734.   void train_net_with_backpropagation(void);
  735.   void test_neural_network(int BNET);
  736.   public:
  737.   Back_Topology Net_Design;     // specifications for backpropagating network
  738.   int number_of_tests;
  739.   void establish_backprop_network(void);
  740.   void network_training_testing(int TT);
  741.   ~NeuralB();
  742. };
  743. //************************************************************************//

  744. // these Neural class member function transmits data from the topology
  745. // to the data storage arrays

  746. NeuralB:: ~NeuralB()
  747. {delete [] Test_Data;}

  748. void NeuralB :: initialize_training_storage_array(int N)
  749. {
  750.    Training_Data.acquire_net_info(Net_Design.signal_dimensions, Net_Design.nodes_in_output_layer);
  751.    Training_Data.request_training_data(N);
  752. }

  753. void NeuralB :: establish_test_battery_size(void)
  754. {
  755.   clrscr();
  756.   cout << "Please enter the number of tests you wish to run on the BP neural net: ";
  757.   cin >> number_of_tests;  cout << "\n";
  758.   if(number_of_tests > 0)
  759.   {
  760.     Test_Data = new Testing[number_of_tests];
  761.     for(int i = 0; i < number_of_tests; i++)
  762.     {Test_Data[i].acquire_net_info(Net_Design.signal_dimensions, Net_Design.nodes_in_output_layer);}
  763.   }
  764. }



  765. // define the establish_backprop_network function
  766. void NeuralB::establish_backprop_network(void)
  767. {
  768.   clrscr();
  769.   cout << " **** Feedforward network using backpropagation **** " << "\n\n\n";
  770.   Net_Design.construct_and_initialize_backprop_network();
  771. } // end establish_backprop_network function

  772. // set the activation functions of the nodes of the network

  773. // define train_net_with_backpropagation function
  774. void NeuralB::train_net_with_backpropagation(void)
  775. {
  776.   char savefile;
  777.   float output_error, sum_of_error, real_error_difference, target_minimum_average_squared_error;
  778.   int sig, layers, sigdim, epoch, hidnode, hidnode2, outnode;
  779.   int loopexit = 0;
  780.   float *maxdifference;
  781.   float *meandifference;

  782.   ofstream savefile_ptr;

  783.   clrscr();
  784.   cout << "please enter the number of epochs you wish to use for training: ";
  785.   cin >> Training_Data.number_of_epochs; cout<< "\n";
  786.   cout << "please enter the learning rate constant for backpropagation (0-1): ";
  787.   cin >> Training_Data.rate_of_learning; cout << "\n";
  788.   cout << "please enter the minimum average squared error you wish to target" << "\n";
  789.   cin >> target_minimum_average_squared_error; cout << "\n";
  790.   do
  791.   {
  792.    cout << "do you wish to save the mean error, maximum error" << "\n";
  793.    cout << "and average squared error for each epoch to a file? (Y or N): "; cin >> savefile;
  794.    savefile = toupper(savefile);
  795.    if((savefile == 'Y') || (savefile == 'N')) {loopexit = 2;}
  796.    cout << "\n";
  797.   } while(loopexit <= 1);

  798.   if(savefile == 'Y')
  799.   {
  800.       cout << "please enter the name of the file which will hold the results of training:" << "\n";
  801.       cin >> Training_Data.resultsname; cout <<"\n";
  802.       savefile_ptr.open(Training_Data.resultsname, ios::out);
  803.   }

  804.    cout << "Do you want signal presentation in random or fixed order(R or F): ";
  805.    cin >> Training_Data.presentation_order;  cout << "\n";
  806.    Training_Data.presentation_order = toupper(Training_Data.presentation_order); cout << "\n";

  807.    maxdifference = new float[Net_Design.nodes_in_output_layer];
  808.    meandifference = new float[Net_Design.nodes_in_output_layer];

  809.    // intiate backpropagation for appropriate number of epochs
  810.    epoch = 0;
  811.    do
  812.    {
  813.     sum_of_error = 0;

  814.     for(sig = 0; sig < Training_Data.sample_number; sig++)
  815.     {
  816.       output_error = 0;
  817.       for(sigdim = 0; sigdim < Training_Data.signal_dimensions; sigdim++)
  818.       {

  819.        if(Net_Design.number_of_hidden_layers == 0) // no hidden layers present
  820.        {
  821.         for(outnode = 0; outnode < Net_Design.nodes_in_output_layer; outnode++)
  822.         {Net_Design.node_in_output_layer[outnode].processing_unit_input[sigdim] = Training_Data.number_of_samples[Training_Data.signalpoint[sig].signal_value].data_in_sample[sigdim];}
  823.        }
  824.        else // 1 or 2 hidden layers present
  825.        {
  826.         for(hidnode = 0; hidnode < Net_Design.hidden_layer_number[0].nodes_in_hidden_layer; hidnode++)
  827.         {Net_Design.hidden_layer_number[0].node_in_hidden_layer[hidnode].processing_unit_input[sigdim] = Training_Data.number_of_samples[Training_Data.signalpoint[sig].signal_value].data_in_sample[sigdim];}
  828.        }
  829.       }

  830.       if(Net_Design.number_of_hidden_layers == 2) // two layers are present
  831.       {
  832.         for(hidnode = 0; hidnode < Net_Design.hidden_layer_number[0].nodes_in_hidden_layer; hidnode++)
  833.         {
  834.           Net_Design.hidden_layer_number[0].node_in_hidden_layer[hidnode].calculate_output_signal(Net_Design.activation_function_for_hidden_layer);
  835.           for(hidnode2 = 0; hidnode2 < Net_Design.hidden_layer_number[1].nodes_in_hidden_layer; hidnode2++)
  836.           {Net_Design.hidden_layer_number[1].node_in_hidden_layer[hidnode2].processing_unit_input[hidnode] = Net_Design.hidden_layer_number[0].node_in_hidden_layer[hidnode].output_signal;}
  837.         }
  838.       }

  839.       if(Net_Design.number_of_hidden_layers > 0)
  840.       {
  841.         for(hidnode = 0; hidnode < Net_Design.hidden_layer_number[Net_Design.number_of_hidden_layers -1].nodes_in_hidden_layer; hidnode++)
  842.         {
  843.           Net_Design.hidden_layer_number[Net_Design.number_of_hidden_layers -1].node_in_hidden_layer[hidnode].calculate_output_signal(Net_Design.activation_function_for_hidden_layer);
  844.           for(outnode = 0; outnode < Net_Design.nodes_in_output_layer; outnode++)
  845.           {Net_Design.node_in_output_layer[outnode].processing_unit_input[hidnode] = Net_Design.hidden_layer_number[Net_Design.number_of_hidden_layers -1].node_in_hidden_layer[hidnode].output_signal;}
  846.         }
  847.       }
  848.       for(outnode = 0; outnode < Net_Design.nodes_in_output_layer; outnode++)
  849.       {
  850.         Net_Design.node_in_output_layer[outnode].calculate_output_signal(Net_Design.activation_function_for_output_layer);
  851.         Net_Design.node_in_output_layer[outnode].calculate_output_error_information_term(Training_Data.number_of_samples[Training_Data.signalpoint[sig].signal_value].data_in_sample[Training_Data.signal_dimensions + outnode], Net_Design.activation_function_for_output_layer);
  852.         // calculate the instantaneous sum of squared errors (Haykin, 1994)
  853.         real_error_difference = (pow(Net_Design.node_in_output_layer[outnode].error_difference_squared, 0.5)) * (Training_Data.max_output_value[outnode] - Training_Data.min_output_value[outnode]);
  854.         output_error += 0.5 * pow(real_error_difference, 2.0);

  855.         // calculate maximum and mean absolute error difference for each node
  856.         real_error_difference = Net_Design.node_in_output_layer[outnode].absolute_error_difference * (Training_Data.max_output_value[outnode] - Training_Data.min_output_value[outnode]);
  857.         meandifference[outnode] += real_error_difference / float(Training_Data.sample_number);
  858.         if(sig == 0) {maxdifference[outnode] = real_error_difference;}
  859.         else
  860.         {
  861.           if(real_error_difference > maxdifference[outnode])
  862.           {maxdifference[outnode] = real_error_difference;}
  863.         }
  864.       }

  865.       // average squared error for each signal is saved
  866.       sum_of_error += output_error / float (Training_Data.sample_number);

  867.       // backpropagation of error will depend on the number of hidden layers
  868.       if(Net_Design.number_of_hidden_layers > 0)
  869.       { // backpropagate from output node to adjacent hidden layer
  870.         for(outnode = 0; outnode < Net_Design.nodes_in_output_layer; outnode++)
  871.         {
  872.           for(hidnode = 0; hidnode < Net_Design.hidden_layer_number[Net_Design.number_of_hidden_layers - 1].nodes_in_hidden_layer; hidnode++)
  873.           {Net_Design.hidden_layer_number[Net_Design.number_of_hidden_layers - 1].node_in_hidden_layer[hidnode].error_information_term += Net_Design.node_in_output_layer[outnode].error_information_term * Net_Design.node_in_output_layer[outnode].weight_of_inputs[hidnode];}
  874.         }
  875.         // calculate error information term for each node in hiddenlayer
  876.         for(hidnode = 0; hidnode < Net_Design.hidden_layer_number[Net_Design.number_of_hidden_layers - 1].nodes_in_hidden_layer; hidnode++)
  877.         {Net_Design.hidden_layer_number[Net_Design.number_of_hidden_layers - 1].node_in_hidden_layer[hidnode].calculate_hidden_error_information_term(Net_Design.activation_function_for_hidden_layer);}


  878.           if(Net_Design.number_of_hidden_layers > 1)
  879.           { // backpropagate error from hidden layer 2 to hidden layer 1
  880.             for(hidnode2 = 0; hidnode2 < Net_Design.hidden_layer_number[1].nodes_in_hidden_layer; hidnode2++)
  881.             {
  882.               for(hidnode = 0; hidnode < Net_Design.hidden_layer_number[0].nodes_in_hidden_layer; hidnode++)
  883.               {Net_Design.hidden_layer_number[0].node_in_hidden_layer[hidnode].error_information_term += Net_Design.hidden_layer_number[1].node_in_hidden_layer[hidnode2].error_information_term * Net_Design.hidden_layer_number[1].node_in_hidden_layer[hidnode2].weight_of_inputs[hidnode];}
  884.             }
  885.             for(hidnode = 0; hidnode < Net_Design.hidden_layer_number[0].nodes_in_hidden_layer; hidnode++)
  886.             {Net_Design.hidden_layer_number[0].node_in_hidden_layer[hidnode].calculate_hidden_error_information_term(Net_Design.activation_function_for_hidden_layer);}
  887.           }
  888.       }

  889.       // update the networks output nodes
  890.       for(outnode = 0; outnode < Net_Design.nodes_in_output_layer; outnode++)
  891.       {Net_Design.node_in_output_layer[outnode].calculate_weight_and_bias_correction_terms(Training_Data.rate_of_learning);}

  892.       // update the networks hidden nodes (if they exist)
  893.       if(Net_Design.number_of_hidden_layers > 0)
  894.       {
  895.         for(layers = 0; layers < Net_Design.number_of_hidden_layers; layers++)
  896.         {
  897.           for(hidnode = 0; hidnode < Net_Design.hidden_layer_number[layers].nodes_in_hidden_layer; hidnode++)
  898.           {Net_Design.hidden_layer_number[layers].node_in_hidden_layer[hidnode].calculate_weight_and_bias_correction_terms(Training_Data.rate_of_learning);}
  899.         }
  900.       }
  901.     } // end sig loop

  902.    // save error information (if required)
  903.      if(savefile == 'Y')
  904.      {
  905.    savefile_ptr << epoch + 1 << " ";
  906.         savefile_ptr << sum_of_error << "  ";
  907.         for(outnode = 0; outnode < Net_Design.nodes_in_output_layer; outnode++)
  908.         {savefile_ptr << maxdifference[outnode] << " " << meandifference[outnode] << "    ";}
  909.         savefile_ptr << endl;
  910.         cout.width(6);
  911.         clrscr();
  912.         cout << "Epoch #"<< epoch + 1 <<" is completed " << endl;
  913.      }

  914.      if(epoch == 0)
  915.      {Training_Data.minimum_average_squared_error = sum_of_error;}
  916.      else
  917.      {
  918.        if(sum_of_error < Training_Data.minimum_average_squared_error)
  919.        {Training_Data.minimum_average_squared_error = sum_of_error;}
  920.      }

  921.      // scramble the order of signal presentation (if required)
  922.      if(Training_Data.presentation_order == 'R')
  923.      {Training_Data.scramble_data_in_array();}

  924.      for(outnode = 0; outnode < Net_Design.nodes_in_output_layer; outnode++)
  925.      { maxdifference[outnode] = 0.0; meandifference[outnode] = 0.0;}

  926.      if(Training_Data.minimum_average_squared_error <= target_minimum_average_squared_error)
  927.      {break;}

  928.      epoch = epoch + 1;

  929.    } while(epoch < Training_Data.number_of_epochs);

  930.    savefile_ptr.close();

  931.    // delete arrays holding the training data
  932.    Training_Data.delete_signal_data_array();
  933.    delete [] maxdifference;
  934.    delete [] meandifference;
  935. } // end of backpropagation function


  936. // define the function that tests the neural network
  937. void NeuralB::test_neural_network(int BNET)
  938. {
  939.   float output_error, sum_of_error, real_output;
  940.   int sig, sigdim, hidnode, hidnode2, outnode;

  941.   int bnet = BNET;
  942.   for(int t = 0; t < number_of_tests; t++)
  943.   {
  944.     Test_Data[t].request_testing_data(bnet, t);

  945.     sum_of_error = 0;

  946.       cout << "please enter the name of the file wich will hold the results of test: "<< t+1 << "\n";
  947.       cin >> Test_Data[t].resultsname; cout <<"\n";
  948.       ofstream savefile_ptr(Test_Data[t].resultsname);

  949.     for(sig = 0; sig < Test_Data[t].sample_number; sig++)
  950.     {
  951.       output_error = 0;
  952.       savefile_ptr << sig + 1 << " ";

  953.       for(sigdim = 0; sigdim < Test_Data[t].signal_dimensions; sigdim++)
  954.       {

  955.        if(Net_Design.number_of_hidden_layers == 0) // no hidden layers present
  956.        {
  957.         for(outnode = 0; outnode < Net_Design.nodes_in_output_layer; outnode++)
  958.         {Net_Design.node_in_output_layer[outnode].processing_unit_input[sigdim] = Test_Data[t].number_of_samples[sig].data_in_sample[sigdim];}
  959.        }
  960.        else // 1 or 2 hidden layers present
  961.        {
  962.         for(hidnode = 0; hidnode < Net_Design.hidden_layer_number[0].nodes_in_hidden_layer; hidnode++)
  963.         {Net_Design.hidden_layer_number[0].node_in_hidden_layer[hidnode].processing_unit_input[sigdim] = Test_Data[t].number_of_samples[sig].data_in_sample[sigdim];}
  964.        }
  965.       }

  966.       if(Net_Design.number_of_hidden_layers == 2) // two layers are present
  967.       {
  968.         for(hidnode = 0; hidnode < Net_Design.hidden_layer_number[0].nodes_in_hidden_layer; hidnode++)
  969.         {
  970.           Net_Design.hidden_layer_number[0].node_in_hidden_layer[hidnode].calculate_output_signal(Net_Design.activation_function_for_hidden_layer);
  971.           for(hidnode2 = 0; hidnode2 < Net_Design.hidden_layer_number[1].nodes_in_hidden_layer; hidnode2++)
  972.           {Net_Design.hidden_layer_number[1].node_in_hidden_layer[hidnode2].processing_unit_input[hidnode] = Net_Design.hidden_layer_number[0].node_in_hidden_layer[hidnode].output_signal;}
  973.         }
  974.       }

  975.       if(Net_Design.number_of_hidden_layers > 0)
  976.       {
  977.         for(hidnode = 0; hidnode < Net_Design.hidden_layer_number[Net_Design.number_of_hidden_layers -1].nodes_in_hidden_layer; hidnode++)
  978.         {
  979.           Net_Design.hidden_layer_number[Net_Design.number_of_hidden_layers -1].node_in_hidden_layer[hidnode].calculate_output_signal(Net_Design.activation_function_for_hidden_layer);
  980.           for(outnode = 0; outnode < Net_Design.nodes_in_output_layer; outnode++)
  981.           {Net_Design.node_in_output_layer[outnode].processing_unit_input[hidnode] = Net_Design.hidden_layer_number[Net_Design.number_of_hidden_layers -1].node_in_hidden_layer[hidnode].output_signal;}
  982.         }
  983.       }
  984.       for(outnode = 0; outnode < Net_Design.nodes_in_output_layer; outnode++)
  985.       {
  986.         Net_Design.node_in_output_layer[outnode].calculate_output_signal(Net_Design.activation_function_for_output_layer);
  987.         Net_Design.node_in_output_layer[outnode].calculate_output_error_information_term(Test_Data[t].number_of_samples[sig].data_in_sample[Test_Data[t].signal_dimensions + outnode], Net_Design.activation_function_for_output_layer);
  988.       }

  989.        // convert normalized target output data and send to file
  990.       for(outnode = 0; outnode < Net_Design.nodes_in_output_layer; outnode++)
  991.       {
  992.              real_output = Test_Data[t].min_output_value[outnode] + (Test_Data[t].number_of_samples[sig].data_in_sample[outnode + Test_Data[t].signal_dimensions] * (Test_Data[t].max_output_value[outnode] - Test_Data[t].min_output_value[outnode]));
  993.              savefile_ptr << real_output << " ";
  994.       }

  995.         savefile_ptr << " ";

  996.       // convert normalized output data and send to file
  997.       for(outnode = 0; outnode < Net_Design.nodes_in_output_layer; outnode++)
  998.       {
  999.              real_output = Test_Data[t].min_output_value[outnode] + (Net_Design.node_in_output_layer[outnode].output_signal * (Test_Data[t].max_output_value[outnode] - Test_Data[t].min_output_value[outnode]));
  1000.              savefile_ptr << real_output << " ";
  1001.       }

  1002.       // send absolute differences between each node and its output to a file
  1003.       for(outnode = 0; outnode < Net_Design.nodes_in_output_layer; outnode++)
  1004.       {
  1005.         real_output = (pow(Net_Design.node_in_output_layer[outnode].error_difference_squared, 0.5)) * (Test_Data[t].max_output_value[outnode] - Test_Data[t].min_output_value[outnode]);
  1006.         savefile_ptr << real_output << " ";
  1007.         real_output = pow(real_output, 2.0);
  1008.         output_error += 0.5 * real_output;
  1009.       }
  1010.         // sum square of error
  1011.         savefile_ptr << output_error << "\n";
  1012.         if(sig == Test_Data[t].sample_number - 1)
  1013.         {savefile_ptr.close();}

  1014.         sum_of_error += output_error;
  1015.     }
  1016.         Test_Data[t].average_squared_error = sum_of_error / Test_Data[t].sample_number;
  1017.         Test_Data[t].delete_signal_array();
  1018.   }
  1019. } // end test neural network function

  1020. void NeuralB::network_training_testing(int TT)
  1021. {
  1022.   int tt = TT;
  1023.   int menu_choice;

  1024.   clrscr();
  1025.   cout << "\n\n\n\n";
  1026.   cout << "**************** Operations Menu ****************" << "\n\n";
  1027.   cout << "  Please select one of the following options:" <<"\n\n";
  1028.   cout << "      1. Train Backprop network only " <<"\n\n";
  1029.   cout << "      2. Test Backprop network only " <<"\n\n";
  1030.   cout << "      3. Train and Test Backprop network" <<"\n\n";
  1031.   cout << "*************************************************" << "\n\n";
  1032.   cout << "         Your choice?: "; cin >> menu_choice;
  1033.   cout << "\n\n";
  1034.      switch(menu_choice)
  1035.      {
  1036.        case 1:
  1037.        initialize_training_storage_array(tt);
  1038.        train_net_with_backpropagation();
  1039.        break;

  1040.        case 2:
  1041.        establish_test_battery_size();
  1042.        if(number_of_tests > 0)
  1043.        {test_neural_network(tt);}
  1044.        break;

  1045.        case 3:
  1046.        initialize_training_storage_array(tt);
  1047.        train_net_with_backpropagation();
  1048.        establish_test_battery_size();
  1049.        if(number_of_tests > 0)
  1050.        {test_neural_network(tt);}
  1051.        break;

  1052.        default:network_training_testing(tt);
  1053.      }
  1054. }
  1055. // This concludes the backpropagation section of the program

  1056. //%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
  1057. //%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%

  1058. // (ART 1)  Define base class for Interface and Cluster units of the
  1059. //          Adaptive Resonance Theory Neural Network 1

  1060. class ART_units
  1061. {
  1062. public:
  1063. float *input_value;
  1064. float *output_value;
  1065. float *input_weight_vector;
  1066. int number_of_inputs;
  1067. int number_of_outputs;
  1068. float activation;
  1069. void establish_input_output_arrays(void);
  1070. virtual void establish_input_weight_vector_array(void);
  1071. virtual void initialize_inputs_and_weights(void);
  1072. ~ART_units();
  1073. };

  1074. ART_units::~ART_units()
  1075. {
  1076.   delete [] input_value;
  1077.   delete [] output_value;
  1078.   delete [] input_weight_vector;
  1079. }

  1080. void ART_units::establish_input_output_arrays(void)
  1081. {
  1082.   input_value = new float[number_of_inputs];
  1083.   output_value = new float[number_of_outputs];
  1084. }

  1085. void ART_units::establish_input_weight_vector_array(void)
  1086. {input_weight_vector = new float[number_of_inputs - 1];}

  1087. void ART_units::initialize_inputs_and_weights(void)
  1088. {
  1089.   for(int w = 0; w < number_of_inputs - 1; w++)
  1090.   {input_weight_vector[w] = 1.0;}

  1091.   for(int c = 1; c < number_of_inputs; c++)
  1092.   {input_value[c] = 0.0;}
  1093.   activation = 0.0;
  1094. }

  1095. // establish Interface node attributes
  1096. class Interface_units: public ART_units
  1097. {
  1098.   public:
  1099.   void recompute_activation(int winning_cluster);
  1100.   void calculate_output_value(int G1);
  1101. };

  1102. void Interface_units::recompute_activation(int winning_cluster)
  1103. {activation = input_value[0] * input_weight_vector[winning_cluster];}

  1104. void Interface_units::calculate_output_value(int G1)
  1105. {
  1106. float feedback_signal, node_output, two_thirds_rule;
  1107. feedback_signal = 0.0;
  1108. // calculate feedback signal through use of weighted sum
  1109. for(int f = 0; f < number_of_inputs-1; f++)
  1110. {feedback_signal+=input_weight_vector[f]*input_value[f+1];}

  1111. two_thirds_rule = feedback_signal + input_value[0] + float(G1);

  1112. // use Two Thirds Rule to determine node output
  1113. if(two_thirds_rule >= 2.0) {node_output = 1.0;} else {node_output = 0.0;}

  1114. // establish output vector to cluster units
  1115. for(int p = 0; p < number_of_outputs; p++)
  1116. {output_value[p] = node_output;}
  1117. }

  1118. // establish Cluster node attributes
  1119. class Cluster_units: public ART_units
  1120. {
  1121.   public:
  1122.   int cluster_tag;
  1123.   float net_input;
  1124.   void establish_input_weight_vector_array(void);
  1125.   void initialize_inputs_and_weights(void);
  1126.   void calculate_net_input(void);
  1127.   void establish_node_output(void);
  1128.   Cluster_units();  // default constructor
  1129. };

  1130. Cluster_units::Cluster_units()
  1131. {cluster_tag = 0;}

  1132. void Cluster_units::establish_input_weight_vector_array(void)
  1133. {input_weight_vector = new float[number_of_inputs];}

  1134. void Cluster_units::initialize_inputs_and_weights(void)
  1135. {
  1136.   for(int c = 0; c < number_of_inputs; c++)
  1137.   {input_weight_vector[c] = 1.0 / (1.0 + number_of_inputs);}
  1138. }

  1139. void Cluster_units::calculate_net_input(void)
  1140. {
  1141.   net_input = 0.0;
  1142.   for(int n = 0; n < number_of_inputs; n++)
  1143.   {net_input += input_value[n] * input_weight_vector[n];}
  1144. }

  1145. void Cluster_units::establish_node_output(void)
  1146. {
  1147. for(int oput = 0; oput < number_of_outputs - 1; oput++)
  1148. if(activation >= 0.0)
  1149. {output_value[oput] = activation;}
  1150. else
  1151. {output_value[oput] = 0.0;}
  1152. }

  1153. // establish Inputs unit attributes
  1154. class Input_units {public: float signal_value;};

  1155. // establish ART1 neural network attributes
  1156. class ART_Topology
  1157. {
  1158. public:
  1159. char netcreate;
  1160. int clustercount;
  1161. int dimensions_of_signal;
  1162. int number_of_cluster_units;
  1163. int reset_value;
  1164. int resetcount;
  1165. float vigilance_parameter;
  1166. float norm_of_activation_vector;
  1167. float norm_of_input_vector;
  1168. float weight_update_parameter;
  1169. int cluster_champ;
  1170. int clusterange;
  1171. Input_units     *node_in_input_layer;
  1172. Interface_units *node_in_interface_layer;
  1173. Cluster_units   *node_in_cluster_layer;
  1174. void establish_net_topology(void);
  1175. void upload_network(void);
  1176. void transmit_pattern_to_interface(void);
  1177. void transmit_pattern_to_cluster(void);
  1178. void broadcast_output_to_cluster_layer(void);
  1179. void cluster_nodes_compete_for_activation(int train_or_test);
  1180. void compute_norm_of_activation_vector(void);
  1181. void compute_norm_of_input_vector(void);
  1182. void recompute_activation_vector_of_interface_layer(void);
  1183. void update_the_network(void);
  1184. void set_cluster_activation_to_zero(void);
  1185. void savenet(void);
  1186. ART_Topology();
  1187. ~ART_Topology(); // class destructor
  1188. };

  1189. ART_Topology::ART_Topology()
  1190. {
  1191. clustercount = 0;
  1192. clusterange = 0;
  1193. resetcount = 0;
  1194. }

  1195. ART_Topology::~ART_Topology()
  1196. {
  1197.   delete [] node_in_input_layer;
  1198.   delete [] node_in_interface_layer;
  1199.   delete [] node_in_cluster_layer;
  1200. }

  1201. void ART_Topology::establish_net_topology(void)
  1202. {
  1203.   weight_update_parameter = 2.0;
  1204.   node_in_input_layer = new Input_units[dimensions_of_signal];
  1205.   node_in_interface_layer = new Interface_units[dimensions_of_signal];
  1206.   node_in_cluster_layer = new Cluster_units[number_of_cluster_units];

  1207.   // Establish interface layer of ART1 network
  1208.   for(int I = 0; I < dimensions_of_signal; I++)
  1209.   {
  1210.     node_in_interface_layer[I].number_of_inputs = number_of_cluster_units + 1;
  1211.     node_in_interface_layer[I].number_of_outputs = number_of_cluster_units;
  1212.     node_in_interface_layer[I].establish_input_output_arrays();
  1213.     node_in_interface_layer[I].establish_input_weight_vector_array();
  1214.     node_in_interface_layer[I].initialize_inputs_and_weights();
  1215.   }

  1216.   // Establish cluster layer of ART1 network
  1217.   for(int C = 0; C < number_of_cluster_units; C++)
  1218.   {
  1219.     node_in_cluster_layer[C].number_of_inputs = dimensions_of_signal;
  1220.     node_in_cluster_layer[C].number_of_outputs = dimensions_of_signal + 1;
  1221.     node_in_cluster_layer[C].establish_input_output_arrays();
  1222.     node_in_cluster_layer[C].establish_input_weight_vector_array();
  1223.     node_in_cluster_layer[C].initialize_inputs_and_weights();
  1224.   }

  1225. }

  1226. void ART_Topology::upload_network(void)
  1227. {
  1228.   char getname[13];
  1229.   ifstream get_ptr;
  1230.   int netid, node, dim;
  1231.   int dolock = 0;

  1232.   do
  1233.   {
  1234.     cout << "\n\n";
  1235.     cout << "Please enter the name of the file which holds the ART1 Network" << "\n";
  1236.     cin >> getname; cout << "\n";
  1237.     get_ptr.open(getname, ios::in);
  1238.     get_ptr >> netid;
  1239.     if(netid == 2) {dolock = 1;}
  1240.     else
  1241.     {
  1242.       cout << "Error** file contents do not match ART1 specifications" << "\n";
  1243.       cout << "try again" << "\n";
  1244.       get_ptr.close();
  1245.     }
  1246.   } while(dolock <= 0);

  1247.   get_ptr >> dimensions_of_signal;
  1248.   get_ptr >> weight_update_parameter;
  1249.   get_ptr >> vigilance_parameter;
  1250.   get_ptr >> clusterange;
  1251.   get_ptr >> clustercount;
  1252.   get_ptr >> number_of_cluster_units;

  1253.   node_in_input_layer = new Input_units[dimensions_of_signal];
  1254.   node_in_interface_layer = new Interface_units[dimensions_of_signal];
  1255.   node_in_cluster_layer = new Cluster_units[number_of_cluster_units];

  1256.   for(node = 0; node < dimensions_of_signal; node++)
  1257.   {
  1258.     node_in_interface_layer[node].number_of_inputs = number_of_cluster_units + 1;
  1259.     node_in_interface_layer[node].number_of_outputs = number_of_cluster_units;
  1260.     node_in_interface_layer[node].establish_input_output_arrays();
  1261.     node_in_interface_layer[node].establish_input_weight_vector_array();
  1262.     node_in_interface_layer[node].initialize_inputs_and_weights();
  1263.     for(dim = 1; dim < number_of_cluster_units + 1; dim++)
  1264.     {get_ptr >> node_in_interface_layer[node].input_weight_vector[dim];}
  1265.   }

  1266.   for(node = 0; node < number_of_cluster_units; node++)
  1267.   {
  1268.     node_in_cluster_layer[node].number_of_inputs = dimensions_of_signal;
  1269.     node_in_cluster_layer[node].number_of_outputs = dimensions_of_signal + 1;
  1270.     node_in_cluster_layer[node].establish_input_output_arrays();
  1271.     node_in_cluster_layer[node].establish_input_weight_vector_array();
  1272.     node_in_cluster_layer[node].initialize_inputs_and_weights();
  1273.     get_ptr >> node_in_cluster_layer[node].cluster_tag;
  1274.     for(dim = 0; dim < dimensions_of_signal; dim++)
  1275.     {get_ptr >> node_in_cluster_layer[node].input_weight_vector[dim];}
  1276.   }
  1277.   get_ptr.close();
  1278. }

  1279. void ART_Topology::transmit_pattern_to_interface(void)
  1280. {
  1281.   for(int d = 0; d < dimensions_of_signal; d++)
  1282.   {
  1283.     node_in_interface_layer[d].input_value[0] = node_in_input_layer[d].signal_value;
  1284.     node_in_interface_layer[d].activation = node_in_input_layer[d].signal_value;
  1285.   }
  1286. }

  1287. void ART_Topology::transmit_pattern_to_cluster(void)
  1288. {
  1289.    int c;
  1290.    for(int d = 0; d < dimensions_of_signal; d++)
  1291.    {
  1292.      for(c = 0; c < number_of_cluster_units; c++)
  1293.      {node_in_cluster_layer[c].input_value[d] = node_in_input_layer[d].signal_value;}
  1294.    }
  1295. }

  1296. void ART_Topology::broadcast_output_to_cluster_layer(void)
  1297. {
  1298.   int Gain_one;
  1299.   int cluster_active = 0;
  1300.   int d, c;
  1301.   for(c = 0; c < number_of_cluster_units; c++)
  1302.   {if(node_in_cluster_layer[c].activation == 1.0) {cluster_active = 1;} }
  1303.   compute_norm_of_input_vector();

  1304.   if((cluster_active != 1) && (norm_of_input_vector > 0.0))
  1305.   {Gain_one = 1;} else {Gain_one = 0;}

  1306.   // establish interface output vector
  1307.   for(d = 0; d < dimensions_of_signal; d++)
  1308.   {node_in_interface_layer[d].calculate_output_value(Gain_one);}

  1309.   //transmit interface output to units in cluster layer
  1310.   for(d = 0; d < dimensions_of_signal; d++)
  1311.   {
  1312.     for(c = 0; c < number_of_cluster_units; c++)
  1313.     {node_in_cluster_layer[c].input_value[d] = node_in_interface_layer[d].output_value[c];}
  1314.   }
  1315. }

  1316. void ART_Topology::cluster_nodes_compete_for_activation(int train_or_test)
  1317. {
  1318.   int d, cluster;
  1319.   float champion = 0.0;

  1320.   for(cluster = 0; cluster < clusterange + 1; cluster++)
  1321.   {
  1322.     if(node_in_cluster_layer[cluster].activation != -1.0)
  1323.     {
  1324.       node_in_cluster_layer[cluster].calculate_net_input();
  1325.       if(node_in_cluster_layer[cluster].net_input > champion)
  1326.       {
  1327.         champion = node_in_cluster_layer[cluster].net_input;
  1328.         cluster_champ = cluster;
  1329.       }
  1330.     }
  1331.   }
  1332.     if((node_in_cluster_layer[cluster_champ].cluster_tag == 0) && (train_or_test < 2))
  1333.     {
  1334.       node_in_cluster_layer[cluster_champ].cluster_tag = clustercount + 1;
  1335.       clustercount = clustercount + 1;
  1336.     }

  1337.   if(train_or_test < 2)
  1338.   {

  1339.     for(cluster = 0; cluster < clusterange + 1; cluster++)
  1340.     {
  1341.       if(cluster == cluster_champ)
  1342.       {node_in_cluster_layer[cluster].activation = 1.0;}
  1343.       else
  1344.       {
  1345.         if(node_in_cluster_layer[cluster].activation != -1.0)
  1346.         {node_in_cluster_layer[cluster].activation = 0.0;}
  1347.       }
  1348.       node_in_cluster_layer[cluster].establish_node_output();

  1349.       // send output signals to Interface layer
  1350.       for(d = 0; d < dimensions_of_signal; d++)
  1351.       {node_in_interface_layer[d].input_value[cluster + 1] = node_in_cluster_layer[cluster].output_value[d];}
  1352.     }
  1353.   }
  1354. }

  1355. void ART_Topology::compute_norm_of_activation_vector(void)
  1356. {
  1357.   norm_of_activation_vector = 0.0;
  1358.   for(int d = 0; d < dimensions_of_signal; d++)
  1359.   {norm_of_activation_vector += node_in_interface_layer[d].activation;}
  1360.   compute_norm_of_input_vector();
  1361. }

  1362. void ART_Topology::compute_norm_of_input_vector(void)
  1363. {
  1364.   norm_of_input_vector = 0.0;
  1365.   for(int d = 0; d < dimensions_of_signal; d++)
  1366.   {norm_of_input_vector += node_in_input_layer[d].signal_value;}
  1367. }

  1368. void ART_Topology::recompute_activation_vector_of_interface_layer(void)
  1369. {
  1370.   for(int d = 0; d < dimensions_of_signal; d++)
  1371.   {node_in_interface_layer[d].recompute_activation(cluster_champ);}
  1372. }

  1373. void ART_Topology:: update_the_network(void)
  1374. {
  1375.   recompute_activation_vector_of_interface_layer();
  1376.   compute_norm_of_activation_vector();
  1377.   float ratio_test = norm_of_activation_vector / norm_of_input_vector;

  1378.   if(ratio_test < vigilance_parameter)
  1379.   {
  1380.    node_in_cluster_layer[cluster_champ].activation = -1.0;
  1381.    reset_value = 1;
  1382.    resetcount += reset_value;
  1383.    if(resetcount == number_of_cluster_units - 1)
  1384.    {
  1385.      clusterange = clusterange + 1;
  1386.      if(clusterange > number_of_cluster_units)
  1387.      {clusterange = number_of_cluster_units;}
  1388.    }
  1389.   }
  1390.   else
  1391.   {
  1392.     // update the weights of the champion cluster unit
  1393.     for(int u = 0; u < node_in_cluster_layer[cluster_champ].number_of_inputs; u++)
  1394.     {node_in_cluster_layer[cluster_champ].input_weight_vector[u] = (weight_update_parameter * node_in_interface_layer[u].activation * node_in_cluster_layer[cluster_champ].input_weight_vector[u]) / ((weight_update_parameter - 1.0) + norm_of_activation_vector);}
  1395.     for(int n = 0; n < dimensions_of_signal; n++)
  1396.     {node_in_interface_layer[n].input_weight_vector[cluster_champ] = node_in_interface_layer[n].input_weight_vector[cluster_champ] * node_in_interface_layer[n].activation;}

  1397.     reset_value = 0;
  1398.     resetcount = 0;
  1399.   }
  1400. }

  1401. void ART_Topology::set_cluster_activation_to_zero(void)
  1402. {
  1403.    for(int cnode = 0; cnode < clusterange + 1; cnode++)
  1404.    {node_in_cluster_layer[cnode].activation = 0.0;}
  1405. }

  1406. void ART_Topology::savenet(void)
  1407. {
  1408.   char savename[13];
  1409.   ofstream save_ptr;
  1410.   int node, dim;

  1411.   cout << "\n\n";
  1412.   cout << "Please enter the name of the file which will hold the ART network"<<"\n";
  1413.   cin >> savename; cout <<"\n";
  1414.   save_ptr.open(savename, ios::out);

  1415.   save_ptr << 2 << "\n";  //network identifier number
  1416.   save_ptr << dimensions_of_signal << "\n";
  1417.   save_ptr << weight_update_parameter << "\n";
  1418.   save_ptr << vigilance_parameter << "\n";
  1419.   save_ptr << clusterange << "\n";
  1420.   save_ptr << clustercount << "\n";
  1421.   save_ptr << number_of_cluster_units << "\n";

  1422.   for(node = 0; node < dimensions_of_signal; node++)
  1423.   {
  1424.     for(dim = 1; dim < number_of_cluster_units + 1; dim++)
  1425.     {save_ptr << node_in_interface_layer[node].input_weight_vector[dim] << " ";}
  1426.     save_ptr << "\n";
  1427.   }

  1428.   for(node = 0; node < number_of_cluster_units; node++)
  1429.   {
  1430.     save_ptr << node_in_cluster_layer[node].cluster_tag << "\n";
  1431.     for(dim = 0; dim < dimensions_of_signal; dim++)
  1432.     {save_ptr << node_in_cluster_layer[node].input_weight_vector[dim] << " ";}
  1433.     save_ptr << "\n";
  1434.   }
  1435.   save_ptr.close();
  1436. }

  1437. // Classes which specifies the containers of ART training and test data
  1438. class ART_Training_Data : public Data_type
  1439. {
  1440.   public:
  1441.   void determine_sample_number(void);
  1442.   void load_data_into_array(void);
  1443.   virtual void request_ART_data(int net_no);
  1444. };


  1445. void ART_Training_Data::load_data_into_array(void)
  1446. {
  1447.    int d, i;
  1448.    float dimensions;

  1449.    // open the file containing the data
  1450.    ifstream Afile_ptr; // pointer to a file
  1451.    Afile_ptr.open(filename, ios::in);
  1452.    //create a dynamic array to hold the specified number of samples
  1453.    number_of_samples = new sample_data[sample_number];

  1454.    for(i = 0; i < sample_number; i++)
  1455.    {number_of_samples[i].data_in_sample = new float[signal_dimensions];}

  1456.    // read in data from file and place in array
  1457.    for(i = 0; i < sample_number; i++)
  1458.    {
  1459.      for(d = 0; d < signal_dimensions; d++)
  1460.      {
  1461.         Afile_ptr >> dimensions;
  1462.         number_of_samples[i].data_in_sample[d] = dimensions;
  1463.      }
  1464.    }
  1465.     Afile_ptr.close();
  1466. }

  1467. void ART_Training_Data :: determine_sample_number(void)
  1468. {
  1469.   ifstream dfile_ptr; // pointer to a file
  1470.   dfile_ptr.open(filename, ios::in);

  1471.   float hold;
  1472.   int lock = 1;
  1473.   sample_number = 0;

  1474.   do
  1475.   {
  1476.     if(dfile_ptr.eof()){lock = 0;}
  1477.     else
  1478.     {dfile_ptr >> hold;  sample_number += 1;}
  1479.   }while(lock > 0);

  1480.   dfile_ptr.close();
  1481.   sample_number = int(sample_number / signal_dimensions);
  1482. }

  1483. void ART_Training_Data::request_ART_data(int net_no)
  1484. {
  1485.   cout << "Please enter the file name containing the training data for ART network no. "<< net_no << "\n";
  1486.   cin >> filename; cout << "\n";
  1487.   specify_signal_sample_size();
  1488. }

  1489. class ART_Test_Data : public ART_Training_Data
  1490. {public: void request_ART_data(int net_no);};

  1491. void ART_Test_Data::request_ART_data(int net_no)
  1492. {
  1493.   cout << "Please enter the file name containing the test data for ART network no. " << net_no << "\n";
  1494.   cin >> filename; cout << "\n";
  1495.   specify_signal_sample_size();
  1496. }

  1497. //************************************************************************//
  1498. class NeuralA    // class containing the ART1 neural net structure
  1499. {                // along with training and testing data
  1500.   private:
  1501.   ART_Training_Data ART_Train;
  1502.   ART_Test_Data * ART_Test;      // the number of test is variable
  1503.   int number_of_ART_tests;
  1504.   void initialize_ART_training_storage_array(int AN);
  1505.   void establish_ART_test_battery_size(void);
  1506.   void train_ART_network(int ARTN);
  1507.   void test_ART_network(int ANET);
  1508.   public:
  1509.   ART_Topology ART_Design;
  1510.   void construct_ART_network(void);
  1511.   void network_training_testing(int TT);
  1512.   ~NeuralA();
  1513. };
  1514. //****************************************************************************//

  1515. NeuralA::~NeuralA()
  1516. { delete [] ART_Test; }

  1517. void NeuralA::construct_ART_network(void)
  1518. {
  1519. int looploc = 0;
  1520.   clrscr();
  1521.   cout << " **** Adaptive Resonance Theory network for binary signals **** " <<"\n\n\n";
  1522. do
  1523. {
  1524.    cout <<"\n";
  1525.    cout << "Do you wish to" << "\n\n";
  1526.    cout << "C.  Create your own ART1 Network " << "\n";
  1527.    cout << "U.  Upload an existing ART1 Network " << "\n\n";
  1528.    cout << "Your choice?:  "; cin >> ART_Design.netcreate;
  1529.    cout << "\n\n";
  1530.    ART_Design.netcreate = toupper(ART_Design.netcreate);
  1531.    if((ART_Design.netcreate == 'C') || (ART_Design.netcreate == 'U')) {looploc = 1;}
  1532. } while(looploc <= 0);
  1533. if(ART_Design.netcreate == 'U')
  1534. {ART_Design.upload_network();}
  1535. else
  1536. {
  1537.   cout << "\n";
  1538.   cout << "Please enter the dimensions of the ART network's input signal vector: ";
  1539.   cin >> ART_Design.dimensions_of_signal; cout << "\n";
  1540.   cout << "Please enter the vigilance parameter of the ART network: ";
  1541.   cin >> ART_Design.vigilance_parameter; cout << "\n";
  1542. }
  1543. }

  1544. void NeuralA::initialize_ART_training_storage_array(int AN)
  1545. {
  1546.   int AT = AN;
  1547.   ART_Train.acquire_net_info(ART_Design.dimensions_of_signal, ART_Design.number_of_cluster_units);
  1548.   ART_Train.request_ART_data(AT);
  1549.   if(ART_Design.netcreate == 'C')  // constructing new network
  1550.   {
  1551.     ART_Design.number_of_cluster_units = ART_Train.sample_number;
  1552.     ART_Design.establish_net_topology();
  1553.   }
  1554. }


  1555. void NeuralA::train_ART_network(int ARTN)
  1556. {
  1557.   int dim, nodes_available_for_clustering;
  1558.   char savetrain;
  1559.   int dolock = 0;

  1560.   clrscr();
  1561.   cout << "\n\n";
  1562.   cout << "For Neural Network #" << ARTN << "\n";
  1563.   do
  1564.   {
  1565.     cout << "do you wish to save the ART Training results to a file? (Y or N): ";
  1566.     cin >> savetrain;
  1567.     savetrain = toupper(savetrain);
  1568.     if((savetrain == 'N') || (savetrain == 'Y')) {dolock = 1;}
  1569.     cout << "\n";
  1570.   } while(dolock <= 0);

  1571.   if(savetrain == 'Y')
  1572.   {
  1573.     cout << "please enter the name of the file to hold the results of the ART Training" << "\n";
  1574.     cin >> ART_Train.resultsname; cout << "\n";
  1575.   }

  1576.   for(int pattern = 0; pattern < ART_Train.sample_number; pattern++)
  1577.   {
  1578.     // present pattern to input layer
  1579.     for(dim = 0; dim < ART_Design.dimensions_of_signal; dim++)
  1580.     {ART_Design.node_in_input_layer[dim].signal_value = ART_Train.number_of_samples[pattern].data_in_sample[dim];}

  1581.     nodes_available_for_clustering = ART_Design.number_of_cluster_units;

  1582.     do
  1583.     {
  1584.       ART_Design.transmit_pattern_to_interface();
  1585.       ART_Design.broadcast_output_to_cluster_layer();
  1586.       ART_Design.cluster_nodes_compete_for_activation(1);
  1587.       ART_Design.update_the_network();
  1588.       nodes_available_for_clustering = nodes_available_for_clustering - ART_Design.reset_value;
  1589.       if(nodes_available_for_clustering < 1) // input pattern cannot be clustered
  1590.       {
  1591.         // clrscr();
  1592.         cout << "Input pattern #" << pattern + 1 << ": ";
  1593.         for(dim = 0; dim < ART_Design.dimensions_of_signal; dim++)
  1594.         {cout << int(ART_Design.node_in_input_layer[dim].signal_value);}
  1595.         cout << " cannot be clustered" << "\n";
  1596.         break;
  1597.       }
  1598.     } while (ART_Design.reset_value >=1);

  1599.     if(savetrain == 'Y')
  1600.     {
  1601.       ofstream ART_savefile_ptr(ART_Train.resultsname, ios::out|ios::app);
  1602.       ART_savefile_ptr << pattern + 1 << " ";
  1603.       for(dim = 0; dim < ART_Design.dimensions_of_signal; dim++)
  1604.       {ART_savefile_ptr << int(ART_Design.node_in_input_layer[dim].signal_value);}
  1605.       ART_savefile_ptr << " " << ART_Design.node_in_cluster_layer[ART_Design.cluster_champ].cluster_tag << "\n";
  1606.       ART_savefile_ptr.close();
  1607.     }
  1608.     ART_Design.set_cluster_activation_to_zero();
  1609.   }
  1610.    // delete array containing training data
  1611.    ART_Train.delete_signal_array();
  1612. }

  1613. void NeuralA::establish_ART_test_battery_size(void)
  1614. {
  1615.   cout <<"Please enter the number of tests you wish to run on the ART neural network: ";
  1616.   cin >> number_of_ART_tests; cout <<"\n";
  1617.   // create testing array
  1618.   if(number_of_ART_tests > 0)
  1619.   {
  1620.     ART_Test = new ART_Test_Data[number_of_ART_tests];
  1621.     for(int t = 0; t < number_of_ART_tests; t++)
  1622.     {ART_Test[t].acquire_net_info(ART_Design.dimensions_of_signal, ART_Design.number_of_cluster_units);}
  1623.   }
  1624. }

  1625. void NeuralA::test_ART_network(int ANET)
  1626. {
  1627.   int tnet, dim, pattern;

  1628.   tnet = ANET;
  1629.   for(int Atest = 0; Atest < number_of_ART_tests; Atest++)
  1630.   {
  1631.     ART_Test[Atest].request_ART_data(tnet);
  1632.     cout << "For ART1 neural network #" << ANET <<" and test #"<<Atest+1<<":" <<"\n";
  1633.     cout << "please enter the name of the file to hold the results of the ART Testing " << "\n";
  1634.     cin  >> ART_Test[Atest].resultsname; cout << "\n";
  1635.     ofstream ART_savefile_ptr(ART_Test[Atest].resultsname);

  1636.     for(pattern = 0; pattern < ART_Test[Atest].sample_number; pattern++)
  1637.     {
  1638.       for(dim = 0; dim < ART_Design.dimensions_of_signal; dim++)
  1639.       {ART_Design.node_in_input_layer[dim].signal_value = ART_Test[Atest].number_of_samples[pattern].data_in_sample[dim];}

  1640.       ART_Design.transmit_pattern_to_cluster();
  1641.       ART_Design.cluster_nodes_compete_for_activation(2);

  1642.       ART_savefile_ptr <<pattern + 1<<" ";
  1643.       for(dim = 0; dim < ART_Design.dimensions_of_signal; dim++)
  1644.       {ART_savefile_ptr << int(ART_Design.node_in_input_layer[dim].signal_value);}

  1645.       ART_savefile_ptr << " " << ART_Design.node_in_cluster_layer[ART_Design.cluster_champ].cluster_tag << "\n";
  1646.     }

  1647.       ART_savefile_ptr.close();        // end of test
  1648.       ART_Test[Atest].delete_signal_array();
  1649.   }

  1650. }

  1651. void NeuralA::network_training_testing(int TT)
  1652. {
  1653.   int tt = TT;
  1654.   int menu_choice;

  1655.   clrscr();
  1656.   cout << "\n\n\n\n";
  1657.   cout << "**************** Operations Menu ****************" << "\n\n";
  1658.   cout << "  Please select one of the following options:" <<"\n\n";
  1659.   cout << "      1. Train ART1 network only " <<"\n\n";
  1660.   if(ART_Design.netcreate == 'U')
  1661.   {
  1662.     cout << "      2. Test ART1 network only " <<"\n\n";
  1663.     cout << "      3. Train and Test ART1 network" <<"\n\n";
  1664.   }
  1665.   else
  1666.   {
  1667.     cout << "      2. Train and Test ART1 network" <<"\n\n";
  1668.   }
  1669.   cout << "*************************************************" << "\n\n";
  1670.   cout << "         Your choice?: "; cin >> menu_choice;
  1671.   cout << "\n\n";
  1672.   if((menu_choice == 2) && (ART_Design.netcreate == 'C')) {menu_choice = 3;}
  1673.   if((menu_choice == 3) && (ART_Design.netcreate == 'U')) {menu_choice = 3;}

  1674.      switch(menu_choice)
  1675.      {
  1676.        case 1:
  1677.        initialize_ART_training_storage_array(tt);
  1678.        train_ART_network(tt);
  1679.        break;

  1680.        case 2:
  1681.        establish_ART_test_battery_size();
  1682.        if(number_of_ART_tests > 0)
  1683.        {test_ART_network(tt);}
  1684.        break;

  1685.        case 3:
  1686.        initialize_ART_training_storage_array(tt);
  1687.        train_ART_network(tt);
  1688.        establish_ART_test_battery_size();
  1689.        if(number_of_ART_tests > 0)
  1690.        {test_ART_network(tt);}
  1691.        break;

  1692.        default:network_training_testing(tt);
  1693.      }

  1694. }

  1695. // This concludes the ART1 section of the program
  1696. //%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
  1697. //%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%

  1698. // (Kohonen) Define base class for the Clustering Nodes of
  1699. //           the Kohonen Self-Organizing Map

  1700. //************************ ATTENTION ************************************
  1701. // Note that the Class Kohonen_units will also contain variables and
  1702. // functions relevant to the Radial Basis Function Neural Network (RBFN)
  1703. //***********************************************************************
  1704. class Kohonen_units: public ART_units
  1705. {
  1706.   public:
  1707.   void establish_input_weight_vector_array(void);
  1708.   void initialize_inputs_and_weights(void);
  1709.   void calculate_sum_square_Euclidean_distance(void);
  1710.   void update_the_weights(float learning_rate);
  1711.   Kohonen_units(); // default constructor
  1712. //*******************************************************
  1713.   float transfer_function_width;                 // RBFN
  1714.   float Gaussian_transfer_output;                // RBFN
  1715.   void execute_Gaussian_transfer_function(void); // RBFN
  1716. //*******************************************************
  1717. };

  1718. Kohonen_units::Kohonen_units()
  1719. {number_of_outputs = 1;}

  1720. void Kohonen_units::establish_input_weight_vector_array(void)
  1721. {input_weight_vector = new float[number_of_inputs];}

  1722. void Kohonen_units::initialize_inputs_and_weights(void)
  1723. {
  1724.   for(int k = 0; k < number_of_inputs; k++)
  1725.   {input_weight_vector[k] = bedlam((long*)(gaset));}
  1726. }

  1727. void Kohonen_units::calculate_sum_square_Euclidean_distance(void)
  1728. {
  1729.   double sumsquare;
  1730.   float ss1;
  1731.   int ci;
  1732.   output_value[0] = 0.0;
  1733.   for(int k = 0; k < number_of_inputs; k++)
  1734.   {
  1735.     ci = k;

  1736.     if(input_value[ci] == 0.0)
  1737.     {
  1738.       sumsquare = pow(input_weight_vector[ci], 2.0);
  1739.     }
  1740.     else
  1741.     {
  1742.       sumsquare = pow(fabs(input_weight_vector[ci] - input_value[ci]), 2.0);
  1743.     }
  1744.     output_value[0] += sumsquare;
  1745.     // cout << output_value[0] << "\n";
  1746.     // cin >> output_value[0];
  1747.   }
  1748.   ss1 = output_value[0];

  1749.   output_value[0] = sqrt(fabs(ss1));

  1750. }

  1751. void Kohonen_units::update_the_weights(float learning_rate)
  1752. {
  1753.   for(int k = 0; k < number_of_inputs; k++)
  1754.   {input_weight_vector[k] = input_weight_vector[k] + (learning_rate * (input_value[k] - input_weight_vector[k]));}
  1755. }
  1756. // RBFN //
  1757. void Kohonen_units::execute_Gaussian_transfer_function(void)
  1758. {
  1759.   float transfer_ratio = (-1.0) * pow((output_value[0] / transfer_function_width), 2.0);
  1760.   Gaussian_transfer_output = exp(transfer_ratio);
  1761. }

  1762. // define class and member functions which define Kohonen Topology
  1763. class Kohonen_Topology
  1764. {
  1765.   public:
  1766.   int kluster_champ;
  1767.   int dimensions_of_signal;
  1768.   int maximum_number_of_clusters;
  1769.   float max_learning_rate;
  1770.   float min_learning_rate;
  1771.   float interim_learning_rate;
  1772.   Kohonen_units *node_in_cluster_layer;
  1773.   void establish_Kohonen_topology(int netuse);
  1774.   void kluster_nodes_compete_for_activation(void);
  1775.   void update_the_Kohonen_network(int epoch_count, int max_epochs);
  1776.   virtual void upload_network(void); // retrieve network from file
  1777.   virtual void savenet(void); // save network to file
  1778.   Kohonen_Topology();   // class constructor
  1779.   ~Kohonen_Topology();  // class destructor
  1780. };

  1781. Kohonen_Topology::Kohonen_Topology()
  1782. {interim_learning_rate = 1.0;}

  1783. Kohonen_Topology::~Kohonen_Topology()
  1784. {delete [] node_in_cluster_layer;}

  1785. void Kohonen_Topology::establish_Kohonen_topology(int netuse)
  1786. {
  1787. char netcreate;
  1788. int looploc = 0;

  1789. if(netuse == 1)
  1790. {
  1791.    do
  1792.    {
  1793.      cout <<"\n";
  1794.      cout << "Do you wish to" << "\n\n";
  1795.      cout << "C.  Create your own Kohonen Map " << "\n";
  1796.      cout << "U.  Upload an existing Kohonen Map " << "\n\n";
  1797.      cout << "Your choice?:  "; cin >> netcreate;
  1798.      cout << "\n\n";
  1799.      netcreate = toupper(netcreate);
  1800.      if((netcreate == 'C') || (netcreate == 'U')) {looploc = 1;}
  1801.    } while(looploc <= 0);
  1802. }
  1803. else
  1804. {
  1805.    netcreate = 'C';
  1806. }

  1807. if((netcreate == 'U') && (netuse == 1))
  1808. {upload_network();}
  1809. else
  1810. {
  1811.    if(netuse == 1)
  1812.    {
  1813.      cout <<"Please enter the dimensions of the network's input signal vector: ";
  1814.      cin >> dimensions_of_signal; cout <<"\n";
  1815.    }
  1816.    cout << "please enter the maximum number of clusters to be formed: ";
  1817.    cin >> maximum_number_of_clusters; cout << "\n";

  1818.    // establish clustering layer of Kohonen network
  1819.    node_in_cluster_layer = new Kohonen_units[maximum_number_of_clusters];
  1820.    for(int c = 0; c < maximum_number_of_clusters; c++)
  1821.    {
  1822.      node_in_cluster_layer[c].number_of_inputs = dimensions_of_signal;
  1823.      node_in_cluster_layer[c].establish_input_output_arrays();
  1824.      node_in_cluster_layer[c].establish_input_weight_vector_array();
  1825.      node_in_cluster_layer[c].initialize_inputs_and_weights();
  1826.    }
  1827. }
  1828. }

  1829. void Kohonen_Topology::upload_network(void)
  1830. {
  1831.   char getname[13];
  1832.   ifstream get_ptr;
  1833.   int netid, nodes, dim;
  1834.   int dolock = 0;

  1835.   do
  1836.   {
  1837.     cout << "\n\n";
  1838.     cout << "Please enter the name of the file which holds the Kohonen Map" << "\n";
  1839.     cin >> getname; cout << "\n";
  1840.     get_ptr.open(getname, ios::in);
  1841.     get_ptr >> netid;
  1842.     if(netid == 3) {dolock = 1;}
  1843.     else
  1844.     {
  1845.       cout << "Error** file contents do not match Kohonen specifications" << "\n";
  1846.       cout << "try again" << "\n";
  1847.       get_ptr.close();
  1848.     }
  1849.   } while(dolock <= 0);
  1850.   get_ptr >> dimensions_of_signal;
  1851.   get_ptr >> maximum_number_of_clusters;

  1852.   node_in_cluster_layer = new Kohonen_units[maximum_number_of_clusters];
  1853.   for(nodes = 0; nodes < maximum_number_of_clusters; nodes++)
  1854.   {
  1855.    node_in_cluster_layer[nodes].number_of_inputs = dimensions_of_signal;
  1856.    node_in_cluster_layer[nodes].establish_input_output_arrays();
  1857.    node_in_cluster_layer[nodes].establish_input_weight_vector_array();
  1858.   }

  1859.   for(nodes = 0; nodes < maximum_number_of_clusters; nodes++)
  1860.   {
  1861.     for(dim = 0; dim < dimensions_of_signal; dim++)
  1862.     {get_ptr >> node_in_cluster_layer[nodes].input_weight_vector[dim];}
  1863.   }
  1864.   get_ptr.close();
  1865. }

  1866. void Kohonen_Topology:: kluster_nodes_compete_for_activation(void)
  1867. {
  1868.   float minimum_distance;
  1869.   for(int m = 0; m < maximum_number_of_clusters; m++)
  1870.   {
  1871.     node_in_cluster_layer[m].calculate_sum_square_Euclidean_distance();
  1872.     if(m == 0)
  1873.     {
  1874.       kluster_champ = m;
  1875.       minimum_distance = node_in_cluster_layer[m].output_value[0];
  1876.     }
  1877.     else
  1878.     {
  1879.       if(node_in_cluster_layer[m].output_value[0] < minimum_distance)
  1880.       {
  1881.         kluster_champ = m;
  1882.         minimum_distance = node_in_cluster_layer[m].output_value[0];
  1883.       }
  1884.     }
  1885.   }
  1886. }

  1887. void Kohonen_Topology::update_the_Kohonen_network(int epoch_count, int max_epochs)
  1888. {
  1889.   int maxepoch;
  1890.   if(max_epochs == 1) {maxepoch = 1;} else {maxepoch = max_epochs - 1;}
  1891.   float adjusted_learning_rate = max_learning_rate - (((max_learning_rate - min_learning_rate) / maxepoch) * epoch_count);
  1892.   interim_learning_rate = adjusted_learning_rate * interim_learning_rate;
  1893.   node_in_cluster_layer[kluster_champ].update_the_weights(interim_learning_rate);
  1894. }

  1895. void Kohonen_Topology::savenet(void)
  1896. {
  1897.   char savename[13];
  1898.   ofstream save_ptr;
  1899.   int node, dim;

  1900.   cout << "\n\n";
  1901.   cout << "Please enter the name of the file which will hold the Kohonen Map" <<"\n";
  1902.   cin >> savename; cout <<"\n";
  1903.   save_ptr.open(savename, ios::out);
  1904.   save_ptr << 3 << "\n";   // network identifier number
  1905.   save_ptr << dimensions_of_signal << "\n";
  1906.   save_ptr << maximum_number_of_clusters << "\n";
  1907.   for(node = 0; node < maximum_number_of_clusters; node++)
  1908.   {
  1909.     for(dim = 0; dim < dimensions_of_signal; dim++)
  1910.     {save_ptr << node_in_cluster_layer[node].input_weight_vector[dim] << " ";}
  1911.     save_ptr <<"\n";
  1912.   }
  1913.   save_ptr.close();
  1914. }

  1915. // define class and member functions which define training and test data
  1916. // storage for the Kohonen Self_Organizing Map

  1917. class Kohonen_Training_Data : public ART_Training_Data
  1918. {
  1919.   public:
  1920.   void acquire_net_info(int signal);
  1921.   void normalize_data_in_array(void);
  1922.   virtual void request_Kohonen_data(int net_no);
  1923. };

  1924. void Kohonen_Training_Data::acquire_net_info(int signal)
  1925. {signal_dimensions = signal;}

  1926. void Kohonen_Training_Data::normalize_data_in_array(void)
  1927. {
  1928.   int i, j, imax, imin;
  1929.   int trigger;
  1930.   float min, max;
  1931.   max_output_value = new float[signal_dimensions];
  1932.   min_output_value = new float[signal_dimensions];

  1933.   for(j = 0; j < signal_dimensions; j++)
  1934.   {
  1935.     trigger = 1;
  1936.     // identify minimum and maximum values for each dimension
  1937.     for(i = 0; i < sample_number; i++)
  1938.     {
  1939.       if(i == 0)
  1940.       {
  1941.         max = number_of_samples[i].data_in_sample[j];
  1942.         min = number_of_samples[i].data_in_sample[j];
  1943.       }
  1944.       else
  1945.       {
  1946.         if(number_of_samples[i].data_in_sample[j] < min)
  1947.         {min = number_of_samples[i].data_in_sample[j];}

  1948.         if(number_of_samples[i].data_in_sample[j] > max)
  1949.         {max = number_of_samples[i].data_in_sample[j];}
  1950.       }
  1951.     }

  1952.     // normalize the values in each dimension of the signal
  1953.     max_output_value[j] = max;
  1954.     min_output_value[j] = min;

  1955.     imax = int(max);
  1956.     imin = int(min);



  1957.       if((imax == 1) && (imin == 0) && (max <= 1.0) && (min <= 0.0))
  1958.       {trigger = 0;}

  1959.       if((imax == 1) && (imin == 1) && (max <= 1.0) && (min <= 1.0))
  1960.       {trigger = 0;}

  1961.       if((imax == 0) && (imin == 0) && (max <= 0.0) && (min <= 0.0))
  1962.       {trigger = 0;}


  1963.     if(trigger != 0)   //  do not normalize binary signals
  1964.     {
  1965.       for(i = 0; i < sample_number; i++)
  1966.       {number_of_samples[i].data_in_sample[j] = (number_of_samples[i].data_in_sample[j] - min)/(max - min);}
  1967.     }
  1968.   }
  1969. }

  1970. void Kohonen_Training_Data::request_Kohonen_data(int net_no)
  1971. {
  1972.   cout << "Enter the file name containing the training data for Kohonen network no. " <<net_no << "\n";
  1973.   cin >> filename; cout <<"\n";
  1974.   specify_signal_sample_size();
  1975.   normalize_data_in_array();
  1976. }

  1977. class Kohonen_Test_Data: public Kohonen_Training_Data
  1978. {public: void request_Kohonen_data(int net_no);};

  1979. void Kohonen_Test_Data::request_Kohonen_data(int net_no)
  1980. {
  1981.   cout << "Please enter the file name containing the test data for Kohonen network no. " <<net_no << "\n";
  1982.   cin >> filename; cout <<"\n";
  1983.   specify_signal_sample_size();
  1984.   normalize_data_in_array();
  1985. }

  1986. //************************************************************************//
  1987. class NeuralK    // class containing the Kohonen neural net structure
  1988. {                // along with training and testing data
  1989.   private:
  1990.   Kohonen_Training_Data Kohonen_Train;
  1991.   Kohonen_Test_Data *Kohonen_Test; // number of tests is variable
  1992.   int number_of_Kohonen_tests;
  1993.   void initialize_Kohonen_training_storage_array(int KN);
  1994.   void establish_Kohonen_test_battery_size(void);
  1995.   void train_Kohonen_network(int KOHN);
  1996.   void test_Kohonen_network(int KNET);
  1997.   public:
  1998.   Kohonen_Topology Kohonen_Design;
  1999.   void construct_Kohonen_network(void);
  2000.   void network_training_testing(int TT);
  2001.   ~NeuralK();
  2002. };
  2003. //*************************************************************************//

  2004. NeuralK::~NeuralK()
  2005. {delete [] Kohonen_Test;}

  2006. void NeuralK::construct_Kohonen_network(void)
  2007. {
  2008.   clrscr();
  2009.   cout <<"**** Kohonen Self-Organizing Map ****"<< "\n\n\n";
  2010.   Kohonen_Design.establish_Kohonen_topology(1);
  2011. }

  2012. void NeuralK::initialize_Kohonen_training_storage_array(int KN)
  2013. {
  2014.   int KT = KN;
  2015.   Kohonen_Train.acquire_net_info(Kohonen_Design.dimensions_of_signal);
  2016.   Kohonen_Train.request_Kohonen_data(KT);
  2017. }

  2018. void NeuralK::establish_Kohonen_test_battery_size(void)
  2019. {
  2020.   cout << "Please enter the number of tests you wish to run on the Kohonen Neural Network: ";
  2021.   cin >> number_of_Kohonen_tests; cout << "\n";
  2022.   if(number_of_Kohonen_tests > 0)
  2023.   {
  2024.     // create testing array
  2025.     Kohonen_Test = new Kohonen_Test_Data[number_of_Kohonen_tests];
  2026.     for(int t = 0; t < number_of_Kohonen_tests; t++)
  2027.     {Kohonen_Test[t].acquire_net_info(Kohonen_Design.dimensions_of_signal);}
  2028.   }
  2029. }

  2030. void NeuralK::train_Kohonen_network(int KOHN)
  2031. {
  2032.   int dim, ep, k_epochs, pattern, knodes, dolock;
  2033.   clrscr();
  2034.   cout <<"\n\n";
  2035.   cout << "For Neural Network #"<<KOHN<<"\n\n";
  2036.   cout << "please enter the maximum learning rate parameter (0-1): ";
  2037.   cin >> Kohonen_Design.max_learning_rate; cout <<"\n";
  2038.   cout << "please enter the minimum learning rate parameter (0-1): ";
  2039.   cin >>  Kohonen_Design.min_learning_rate; cout <<"\n";
  2040.   cout << "please enter the number of epochs used to train the Kohonen Map: ";
  2041.   cin >> k_epochs; cout << "\n";
  2042.   ep = 0;
  2043.   dolock = 0;
  2044.   do
  2045.   {
  2046.     for(pattern = 0; pattern < Kohonen_Train.sample_number; pattern++)
  2047.     {
  2048.       for(knodes = 0; knodes < Kohonen_Design.maximum_number_of_clusters; knodes++)
  2049.       {
  2050.         for(dim = 0; dim < Kohonen_Design.dimensions_of_signal; dim++)
  2051.         {Kohonen_Design.node_in_cluster_layer[knodes].input_value[dim] = Kohonen_Train.number_of_samples[pattern].data_in_sample[dim];}
  2052.       }
  2053.       Kohonen_Design.kluster_nodes_compete_for_activation();
  2054.       Kohonen_Design.update_the_Kohonen_network(ep, k_epochs);
  2055.     }
  2056.     cout << "Epoch " << ep + 1 << " is completed" <<"\n";
  2057.     if((ep == k_epochs - 1) || (Kohonen_Design.interim_learning_rate == 0.0))
  2058.     {dolock = 1;}
  2059.     ep = ep + 1;
  2060.   } while(dolock <= 0);

  2061.   Kohonen_Train.delete_signal_array();
  2062. }

  2063. void NeuralK::test_Kohonen_network(int KNET)
  2064. {
  2065.   int tnet, dim, pattern, knodes;
  2066.   float realvalue;
  2067.   tnet = KNET;
  2068.   clrscr();
  2069.   for(int ktest = 0; ktest < number_of_Kohonen_tests; ktest++)
  2070.   {
  2071.     Kohonen_Test[ktest].request_Kohonen_data(tnet);
  2072.     cout <<"For Kohonen neural network #"<< KNET <<" and test #"<< ktest+1 <<":" <<"\n";
  2073.     cout <<"please enter the name of the file to hold the test" << "\n";
  2074.     cin >> Kohonen_Test[ktest].resultsname; cout <<"\n";
  2075.     ofstream Kohonen_savefile_ptr(Kohonen_Test[ktest].resultsname);

  2076.     for(pattern = 0; pattern < Kohonen_Test[ktest].sample_number; pattern++)
  2077.     {
  2078.       for(knodes = 0; knodes < Kohonen_Design.maximum_number_of_clusters; knodes++)
  2079.       {
  2080.         for(dim = 0; dim < Kohonen_Design.dimensions_of_signal; dim++)
  2081.         {Kohonen_Design.node_in_cluster_layer[knodes].input_value[dim] = Kohonen_Test[ktest].number_of_samples[pattern].data_in_sample[dim];}
  2082.       }
  2083.       Kohonen_Design.kluster_nodes_compete_for_activation();

  2084.       Kohonen_savefile_ptr << pattern + 1 << " ";
  2085.       for(dim = 0; dim < Kohonen_Design.dimensions_of_signal; dim++)
  2086.       {
  2087.         realvalue = (Kohonen_Test[ktest].number_of_samples[pattern].data_in_sample[dim]*(Kohonen_Test[ktest].max_output_value[dim] - Kohonen_Test[ktest].min_output_value[dim])) + Kohonen_Test[ktest].min_output_value[dim];
  2088.         Kohonen_savefile_ptr << realvalue << " ";
  2089.       }
  2090.       Kohonen_savefile_ptr << " " << Kohonen_Design.kluster_champ + 1 << "\n";
  2091.     }
  2092.     Kohonen_savefile_ptr.close();
  2093.     Kohonen_Test[ktest].delete_signal_array();
  2094.   }  // end test loop
  2095. }

  2096. void NeuralK::network_training_testing(int TT)
  2097. {
  2098.   int tt = TT;
  2099.   int menu_choice;

  2100.   clrscr();
  2101.   cout << "\n\n\n\n";
  2102.   cout << "**************** Operations Menu ****************" << "\n\n";
  2103.   cout << "  Please select one of the following options:" <<"\n\n";
  2104.   cout << "      1. Train Kohonen network only " <<"\n\n";
  2105.   cout << "      2. Test Kohonen network only " <<"\n\n";
  2106.   cout << "      3. Train and Test Kohonen network" <<"\n\n";
  2107.   cout << "*************************************************" << "\n\n";
  2108.   cout << "         Your choice?: "; cin >> menu_choice;
  2109.   cout << "\n\n";
  2110.      switch(menu_choice)
  2111.      {
  2112.        case 1:
  2113.        initialize_Kohonen_training_storage_array(tt);
  2114.        train_Kohonen_network(tt);
  2115.        break;

  2116.        case 2:
  2117.        establish_Kohonen_test_battery_size();
  2118.        if(number_of_Kohonen_tests > 0)
  2119.        {test_Kohonen_network(tt);}
  2120.        break;

  2121.        case 3:
  2122.        initialize_Kohonen_training_storage_array(tt);
  2123.        train_Kohonen_network(tt);
  2124.        establish_Kohonen_test_battery_size();
  2125.        if(number_of_Kohonen_tests > 0)
  2126.        {test_Kohonen_network(tt);}
  2127.        break;

  2128.        default:network_training_testing(tt);
  2129.      }

  2130. }
  2131. // This concludes the Kohonen section of the program
  2132. //%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
  2133. //%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
  2134. // (Radial Basis Function Network)
  2135. // define class and member functions which define Radial Basis Topology

  2136. class Radial_Basis_Topology: public Kohonen_Topology
  2137. {
  2138. public:
  2139. int number_of_output_units;
  2140. Output_units *node_in_output_layer;
  2141. int activation_function;
  2142. void establish_Radial_Basis_topology(void);
  2143. void establish_activation_function(void);
  2144. void calculate_transfer_function_widths(void);
  2145. void transfer_Gaussian_to_Output_layer(void);
  2146. void upload_network(void); // retrieve network from file
  2147. void savenet(void);
  2148. ~Radial_Basis_Topology();
  2149. };

  2150. Radial_Basis_Topology::~Radial_Basis_Topology()
  2151. { delete [] node_in_output_layer;}

  2152. void Radial_Basis_Topology::establish_activation_function(void)
  2153. {
  2154.   int dolock = 1;
  2155.   int bchoice;
  2156.   cout << "\n";
  2157.   cout << "For the output layer:" << "\n";
  2158.     do
  2159.     {
  2160.       cout << "please select the type of activation function you wish the nodes to use" << "\n\n";
  2161.       cout << "1.  Binary Sigmoid Function " << "\n";
  2162.       cout << "2.  Bipolar Sigmoid Function " << "\n\n";
  2163.       cout << "Your Selection "; cin >> bchoice;
  2164.       cout << "\n\n";
  2165.       if((bchoice == 1) || (bchoice == 2)) {dolock = 0;}
  2166.     } while(dolock >= 1);
  2167.     activation_function = bchoice;
  2168. }

  2169. void Radial_Basis_Topology::establish_Radial_Basis_topology(void)
  2170. {
  2171. char netcreate;
  2172. int looploc = 0;

  2173.    do
  2174.    {
  2175.      cout <<"\n";
  2176.      cout << "Do you wish to" << "\n\n";
  2177.      cout << "C.  Create your own RBF Network " << "\n";
  2178.      cout << "U.  Upload an existing RBF Network " << "\n\n";
  2179.      cout << "Your choice?:  "; cin >> netcreate;
  2180.      cout << "\n\n";
  2181.      netcreate = toupper(netcreate);
  2182.      if((netcreate == 'C') || (netcreate == 'U')) {looploc = 1;}
  2183.    } while(looploc <= 0);

  2184.   if(netcreate == 'U')
  2185.   {upload_network();}
  2186.   else
  2187.   {
  2188.     cout << "Please enter the dimensions of the RBF networks's input signal vector: ";
  2189.     cin >> dimensions_of_signal; cout << "\n";
  2190.     establish_Kohonen_topology(2); // establishes maximum number of clusters
  2191.     cout << "\n";
  2192.     cout << "please enter the number of nodes in the RBF output layer: ";
  2193.     cin >> number_of_output_units;
  2194.     cout << "\n\n";
  2195.     node_in_output_layer = new Output_units[number_of_output_units];
  2196.     for(int o = 0; o < number_of_output_units; o++)
  2197.     {
  2198.       node_in_output_layer[o].number_of_input_units =  maximum_number_of_clusters;
  2199.       node_in_output_layer[o].establish_array_of_processing_unit_inputs();
  2200.       node_in_output_layer[o].establish_weight_vector_for_processing_units();
  2201.       node_in_output_layer[o].bias = 1.0 - (2.0 * bedlam((long*)(gaset)));
  2202.     }
  2203.     establish_activation_function();
  2204.   }
  2205. }

  2206. void Radial_Basis_Topology::upload_network(void)
  2207. {
  2208.   char getname[13];
  2209.   ifstream get_ptr;
  2210.   int netid, node, dim;
  2211.   int dolock = 0;

  2212.   do
  2213.   {
  2214.     cout << "\n\n";
  2215.     cout << "Please enter the name of the file which holds the RBF network" << "\n";
  2216.     cin >> getname; cout << "\n";
  2217.     get_ptr.open(getname, ios::in);
  2218.     get_ptr >> netid;
  2219.     if(netid == 4) {dolock = 1;}
  2220.     else
  2221.     {
  2222.       cout << "Error** file contents do not match RBF specifications" << "\n";
  2223.       cout << "try again" << "\n";
  2224.       get_ptr.close();
  2225.     }
  2226.   } while(dolock <= 0);
  2227.   get_ptr >> dimensions_of_signal;
  2228.   get_ptr >> number_of_output_units;
  2229.   get_ptr >> activation_function;
  2230.   get_ptr >> maximum_number_of_clusters;

  2231.   node_in_output_layer = new Output_units[number_of_output_units];
  2232.   for(node = 0; node < number_of_output_units; node++)
  2233.   {
  2234.     node_in_output_layer[node].number_of_input_units =  maximum_number_of_clusters;
  2235.     node_in_output_layer[node].establish_array_of_processing_unit_inputs();
  2236.     node_in_output_layer[node].establish_weight_vector_for_processing_units();
  2237.     get_ptr >> node_in_output_layer[node].bias;
  2238.   }

  2239.   for(node = 0; node < number_of_output_units; node++)
  2240.   {
  2241.     for(dim = 0; dim < maximum_number_of_clusters; dim++)
  2242.     {get_ptr >> node_in_output_layer[node].weight_of_inputs[dim];}
  2243.   }

  2244.   node_in_cluster_layer = new Kohonen_units[maximum_number_of_clusters];
  2245.   for(node = 0; node < maximum_number_of_clusters; node++)
  2246.   {
  2247.    node_in_cluster_layer[node].number_of_inputs = dimensions_of_signal;
  2248.    node_in_cluster_layer[node].establish_input_output_arrays();
  2249.    node_in_cluster_layer[node].establish_input_weight_vector_array();
  2250.    get_ptr >> node_in_cluster_layer[node].transfer_function_width;
  2251.   }

  2252.   for(node = 0; node < maximum_number_of_clusters; node++)
  2253.   {
  2254.     for(dim = 0; dim < maximum_number_of_clusters; dim++)
  2255.     {get_ptr >> node_in_cluster_layer[node].input_weight_vector[dim];}
  2256.   }
  2257.   get_ptr.close();
  2258. }

  2259. void Radial_Basis_Topology::calculate_transfer_function_widths(void)
  2260. {
  2261.   float sum, w1, w2;
  2262.   int i, j, k, ihold, jhold, khold;

  2263.   for(i = 0; i < maximum_number_of_clusters; i++)
  2264.   {node_in_cluster_layer[i].transfer_function_width = 0.0;}

  2265.   for(i = 0; i < maximum_number_of_clusters - 1; i++)
  2266.   {
  2267.     for(j = i + 1; j < maximum_number_of_clusters; j++)
  2268.     {
  2269.       sum = 0.0;
  2270.       for(k = 0; k < dimensions_of_signal; k++)
  2271.       {
  2272.        khold = k;
  2273.        ihold = i;
  2274.        jhold = j;
  2275.        w1 = node_in_cluster_layer[ihold].input_weight_vector[khold];
  2276.        w2 = node_in_cluster_layer[jhold].input_weight_vector[khold];
  2277.        sum = pow((w1 - w2), 2.0);
  2278.        node_in_cluster_layer[ihold].transfer_function_width += sum;
  2279.        node_in_cluster_layer[jhold].transfer_function_width += sum;
  2280.       }
  2281.     }
  2282.   }

  2283.   for(i = 0; i < maximum_number_of_clusters; i++)
  2284.   {
  2285.     node_in_cluster_layer[i].transfer_function_width = (1.0 / (maximum_number_of_clusters - 1)) * node_in_cluster_layer[i].transfer_function_width;
  2286.     node_in_cluster_layer[i].transfer_function_width = pow(node_in_cluster_layer[i].transfer_function_width, 0.5);
  2287.   }
  2288. }

  2289. void Radial_Basis_Topology::transfer_Gaussian_to_Output_layer(void)
  2290. {
  2291.   int i, j;

  2292.   for(i = 0; i < maximum_number_of_clusters; i++)
  2293.   {
  2294.     node_in_cluster_layer[i].calculate_sum_square_Euclidean_distance();
  2295.     node_in_cluster_layer[i].execute_Gaussian_transfer_function();
  2296.   }

  2297.   // transfer signal from cluster to output units and calculate output
  2298.   for(i = 0; i < number_of_output_units; i++)
  2299.   {
  2300.     for(j = 0; j < maximum_number_of_clusters; j++)
  2301.     {node_in_output_layer[i].processing_unit_input[j] = node_in_cluster_layer[j].Gaussian_transfer_output;}
  2302.     node_in_output_layer[i].calculate_output_signal(activation_function);
  2303.   }
  2304. }

  2305. void Radial_Basis_Topology::savenet(void)
  2306. {
  2307.   char savename[13];
  2308.   ofstream save_ptr;
  2309.   int node, dim;

  2310.   cout << "\n\n";
  2311.   cout << "Please enter the name of the file which will hold the RBF network"<<"\n";
  2312.   cin >> savename; cout <<"\n";
  2313.   save_ptr.open(savename, ios::out);

  2314.   save_ptr << 4 << "\n";  //network identifier number
  2315.   save_ptr << dimensions_of_signal << "\n";
  2316.   save_ptr << number_of_output_units << "\n";
  2317.   save_ptr << activation_function << "\n";
  2318.   save_ptr << maximum_number_of_clusters << "\n";

  2319.   for(node = 0; node < number_of_output_units; node++)
  2320.   {save_ptr << node_in_output_layer[node].bias << " ";}
  2321.   save_ptr << "\n";

  2322.   for(node = 0; node < number_of_output_units; node++)
  2323.   {
  2324.     for(dim = 0; dim < maximum_number_of_clusters; dim++)
  2325.     {save_ptr << node_in_output_layer[node].weight_of_inputs[dim] << " ";}
  2326.     save_ptr << "\n";
  2327.   }

  2328.   for(node = 0; node < maximum_number_of_clusters; node++)
  2329.   {save_ptr << node_in_cluster_layer[node].transfer_function_width << " ";}
  2330.   save_ptr << "\n";

  2331.   for(node = 0; node < maximum_number_of_clusters; node++)
  2332.   {
  2333.     for(dim = 0; dim < dimensions_of_signal; dim++)
  2334.     {save_ptr << node_in_cluster_layer[node].input_weight_vector[dim] << " ";}
  2335.     save_ptr << "\n";
  2336.   }
  2337.   save_ptr.close();

  2338. }
  2339. //******************************************************************************
  2340. class NeuralR   // class containing the Radial Basis neural net structure
  2341. {               // along with training and testing data
  2342.   private:
  2343.   Training RTrain;                    // file name and dynamic array for training
  2344.   Testing *RTests;                    // files containing data to test network
  2345.   int number_of_tests;                // number of tests run on the neural net
  2346.   void initialize_training_storage_array(int R);
  2347.   void establish_test_battery_size(void);
  2348.   void train_RBF_neural_network(int RBF);
  2349.   void test_neural_network(int RBN);
  2350.   public:
  2351.   Radial_Basis_Topology RBF_Design;   // specification of radial basis network
  2352.   void establish_Radial_Basis_network(void);
  2353.   void network_training_testing(int TT);
  2354.   ~NeuralR();
  2355. };
  2356. //******************************************************************************

  2357. NeuralR::~NeuralR()
  2358. {delete [] RTests;}

  2359. void NeuralR::initialize_training_storage_array(int R)
  2360. {
  2361.    RTrain.acquire_net_info(RBF_Design.dimensions_of_signal, RBF_Design.number_of_output_units);
  2362.    RTrain.request_training_data(R);
  2363. }

  2364. void NeuralR::establish_test_battery_size(void)
  2365. {
  2366.   clrscr();
  2367.   cout << "Please enter the number of tests you wish to run on the RBF network: ";
  2368.   cin >> number_of_tests; cout << "\n";
  2369.   RTests = new Testing[number_of_tests];
  2370.   for(int i = 0; i < number_of_tests; i++)
  2371.   {RTests[i].acquire_net_info(RBF_Design.dimensions_of_signal, RBF_Design.number_of_output_units);}
  2372. }

  2373. void NeuralR::establish_Radial_Basis_network(void)
  2374. {
  2375.   clrscr();
  2376.   cout << " **** Radial Basis Function Network **** " << "\n\n\n";
  2377.   RBF_Design.establish_Radial_Basis_topology();
  2378. }

  2379. void NeuralR::train_RBF_neural_network(int RBF)
  2380. {
  2381.   char savefile;
  2382.   float output_error, sum_of_error, real_error_difference, target_minimum_average_squared_error;
  2383.   int bepoch, outnode, sig, sigdim, cnode;
  2384.   int dim, ep, k_epochs, pattern, knodes, dolock;
  2385.   float *maxdifference;
  2386.   float *meandifference;
  2387.   int loopexit = 1;
  2388.   ofstream savefile_ptr;

  2389.   // establish cluster centers weight vectors via K-means clustering
  2390.   clrscr();
  2391.   cout <<"\n\n";
  2392.   cout << "For Neural Network #"<<RBF<<"\n\n";
  2393.   cout << "please enter the maximum learning rate parameter (0-1): ";
  2394.   cin >> RBF_Design.max_learning_rate; cout <<"\n";
  2395.   cout << "please enter the minimum learning rate parameter (0-1): ";
  2396.   cin >>  RBF_Design.min_learning_rate; cout <<"\n";
  2397.   cout << "please enter the number of epochs used to train the RBF clusters: ";
  2398.   cin >> k_epochs; cout << "\n\n\n";
  2399.   ep = 0;
  2400.   dolock = 0;
  2401.   do
  2402.   {
  2403.     for(pattern = 0; pattern < RTrain.sample_number; pattern++)
  2404.     {
  2405.       for(knodes = 0; knodes < RBF_Design.maximum_number_of_clusters; knodes++)
  2406.       {
  2407.         for(dim = 0; dim < RBF_Design.dimensions_of_signal; dim++)
  2408.         {
  2409.           RBF_Design.node_in_cluster_layer[knodes].input_value[dim] = RTrain.number_of_samples[pattern].data_in_sample[dim];
  2410.         }

  2411.       }
  2412.       RBF_Design.kluster_nodes_compete_for_activation();
  2413.       RBF_Design.update_the_Kohonen_network(ep, k_epochs);
  2414.     }

  2415.     if((ep == k_epochs - 1) || (RBF_Design.interim_learning_rate == 0.0))
  2416.     {dolock = 1;}
  2417.     ep = ep + 1;
  2418.   } while(dolock <= 0);

  2419.   RBF_Design.calculate_transfer_function_widths();

  2420.   // use supervised learning for output layer weight vector
  2421.   cout << "Cluster center vectors established" << "\n\n";
  2422.   cout << "please enter the number of epochs you wish to use for training"<< "\n";
  2423.   cout << "the output layer: "; cin >> RTrain.number_of_epochs; cout<< "\n\n";
  2424.   cout << "please enter the learning rate constant for backpropagation (0-1): ";
  2425.   cin >> RTrain.rate_of_learning; cout << "\n";
  2426.   cout << "please enter the minimum average squared error you wish to target" << "\n";
  2427.   cin >> target_minimum_average_squared_error;  cout << "\n";
  2428.   do
  2429.   {
  2430.    cout << "do you wish to save the mean error, maximum error" << "\n";
  2431.    cout << "and average squared error for each epoch to a file? (Y or N): "; cin >> savefile;
  2432.    savefile = toupper(savefile);
  2433.    if((savefile == 'Y') || (savefile == 'N')) {loopexit = 2;}
  2434.    cout << "\n";
  2435.   } while(loopexit <= 1);

  2436.   if(savefile == 'Y')
  2437.   {
  2438.       cout << "please enter the name of the file which will hold the results of training:" << "\n";
  2439.       cin >> RTrain.resultsname; cout <<"\n";
  2440.       savefile_ptr.open(RTrain.resultsname, ios::out);
  2441.   }

  2442.    maxdifference = new float[RBF_Design.number_of_output_units];
  2443.    meandifference = new float[RBF_Design.number_of_output_units];

  2444.    // intiate backpropagation for appropriate number of epochs
  2445.    bepoch = 0;
  2446.    do
  2447.    {
  2448.     sum_of_error = 0;

  2449.     for(sig = 0; sig < RTrain.sample_number; sig++)
  2450.     {
  2451.       output_error = 0;
  2452.       for(sigdim = 0; sigdim < RTrain.signal_dimensions; sigdim++)
  2453.       {
  2454.         for(cnode = 0; cnode < RBF_Design.maximum_number_of_clusters; cnode++)
  2455.         {RBF_Design.node_in_cluster_layer[cnode].input_value[sigdim] = RTrain.number_of_samples[sig].data_in_sample[sigdim];}
  2456.       }
  2457.         RBF_Design.transfer_Gaussian_to_Output_layer();

  2458.      for(outnode = 0; outnode < RBF_Design.number_of_output_units; outnode++)
  2459.      {
  2460.         RBF_Design.node_in_output_layer[outnode].calculate_output_error_information_term(RTrain.number_of_samples[sig].data_in_sample[RTrain.signal_dimensions + outnode], RBF_Design.activation_function);
  2461.         // calculate the instantaneous sum of squared errors (Haykin, 1994)
  2462.         real_error_difference = (pow(RBF_Design.node_in_output_layer[outnode].error_difference_squared, 0.5)) * (RTrain.max_output_value[outnode] - RTrain.min_output_value[outnode]);
  2463.         output_error += 0.5 * pow(real_error_difference, 2.0);

  2464.         // calculate maximum and mean absolute error difference for each node
  2465.         real_error_difference = RBF_Design.node_in_output_layer[outnode].absolute_error_difference * (RTrain.max_output_value[outnode] - RTrain.min_output_value[outnode]);
  2466.         meandifference[outnode] += real_error_difference / float(RTrain.sample_number);
  2467.         if(sig == 0) {maxdifference[outnode] = real_error_difference;}
  2468.         else
  2469.         {
  2470.           if(real_error_difference > maxdifference[outnode])
  2471.           {maxdifference[outnode] = real_error_difference;}
  2472.         }
  2473.       }

  2474.       // average squared error for each signal is saved
  2475.       sum_of_error += output_error / float (RTrain.sample_number);

  2476.       // update the RBF network's output nodes
  2477.       for(outnode = 0; outnode < RBF_Design.number_of_output_units; outnode++)
  2478.       {RBF_Design.node_in_output_layer[outnode].calculate_weight_and_bias_correction_terms(RTrain.rate_of_learning);}

  2479.     } // end sig loop

  2480.      // save error information (if required)
  2481.      if(savefile == 'Y')
  2482.      {
  2483.    savefile_ptr << bepoch + 1 << " ";
  2484.         savefile_ptr << sum_of_error << "  ";
  2485.         for(outnode = 0; outnode < RBF_Design.number_of_output_units; outnode++)
  2486.         {savefile_ptr << maxdifference[outnode] << " " << meandifference[outnode] << "    ";}
  2487.         savefile_ptr << endl;
  2488.         cout.width(6);
  2489.         clrscr();
  2490.         cout << "Epoch #"<< bepoch + 1 <<" is completed " << endl;
  2491.      }

  2492.      if(bepoch == 0)
  2493.      {RTrain.minimum_average_squared_error = sum_of_error;}
  2494.      else
  2495.      {
  2496.        if(sum_of_error < RTrain.minimum_average_squared_error)
  2497.        {RTrain.minimum_average_squared_error = sum_of_error;}
  2498.      }

  2499.      for(outnode = 0; outnode < RBF_Design.number_of_output_units; outnode++)
  2500.      { maxdifference[outnode] = 0.0; meandifference[outnode] = 0.0;}

  2501.      if(RTrain.minimum_average_squared_error <= target_minimum_average_squared_error)
  2502.      {break;}

  2503.      bepoch = bepoch + 1;

  2504.    } while(bepoch < RTrain.number_of_epochs);

  2505.    savefile_ptr.close();

  2506.    // delete arrays holding the training data
  2507.    RTrain.delete_signal_data_array();
  2508.    delete [] maxdifference;
  2509.    delete [] meandifference;
  2510. }

  2511. void NeuralR::test_neural_network(int RBN)
  2512. {
  2513. float output_error, real_output;
  2514. int sig, sigdim, knodes, outnode;
  2515. int rbn = RBN;

  2516. for(int RBtest = 0; RBtest < number_of_tests; RBtest++)
  2517. {
  2518.    RTests[RBtest].request_testing_data(rbn, RBtest + 1);

  2519.    cout << "please enter the name of the file which will hold the results of test: " << RBtest + 1 << "\n";
  2520.    cin >> RTests[RBtest].resultsname; cout << "\n";
  2521.    ofstream savefile_ptr(RTests[RBtest].resultsname);

  2522.    for(sig = 0; sig < RTests[RBtest].sample_number; sig++)
  2523.    {
  2524.      output_error = 0.0;
  2525.      savefile_ptr << sig + 1 <<" ";
  2526.      for(knodes = 0; knodes < RBF_Design.maximum_number_of_clusters; knodes++)
  2527.      {
  2528.        for(sigdim = 0; sigdim < RBF_Design.dimensions_of_signal; sigdim++)
  2529.        {RBF_Design.node_in_cluster_layer[knodes].input_value[sigdim] = RTests[RBtest].number_of_samples[sig].data_in_sample[sigdim];}
  2530.      }
  2531.      RBF_Design.transfer_Gaussian_to_Output_layer();

  2532.      // send target output to a file
  2533.      for(outnode = 0; outnode < RBF_Design.number_of_output_units; outnode++)
  2534.      {
  2535.        real_output = RTests[RBtest].min_output_value[outnode] + (RTests[RBtest].number_of_samples[sig].data_in_sample[outnode + RBF_Design.dimensions_of_signal] * (RTests[RBtest].max_output_value[outnode] - RTests[RBtest].min_output_value[outnode]));
  2536.        savefile_ptr << real_output << " ";
  2537.      }

  2538.        savefile_ptr <<" ";

  2539.      // send network output to a file  
  2540.      for(outnode = 0; outnode < RBF_Design.number_of_output_units; outnode++)
  2541.      {
  2542.        RBF_Design.node_in_output_layer[outnode].calculate_output_error_information_term(RTests[RBtest].number_of_samples[sig].data_in_sample[RTests[RBtest].signal_dimensions + outnode], RBF_Design.activation_function);
  2543.        real_output = RTests[RBtest].min_output_value[outnode] + (RBF_Design.node_in_output_layer[outnode].output_signal * (RTests[RBtest].max_output_value[outnode] - RTests[RBtest].min_output_value[outnode]));
  2544.        savefile_ptr << real_output << " ";
  2545.      }

  2546.      // send absolute error difference to a file
  2547.      for(outnode = 0; outnode < RBF_Design.number_of_output_units; outnode++)
  2548.      {
  2549.         real_output = (pow(RBF_Design.node_in_output_layer[outnode].error_difference_squared, 0.5)) * (RTests[RBtest].max_output_value[outnode] - RTests[RBtest].min_output_value[outnode]);
  2550.         savefile_ptr << real_output << " ";
  2551.         real_output = pow(real_output, 2.0);
  2552.         output_error += 0.5 * real_output;
  2553.       }
  2554.         // save sum square of error
  2555.         savefile_ptr << output_error << "\n";
  2556.         if(sig == RTests[RBtest].sample_number - 1)
  2557.         {savefile_ptr.close();}
  2558.    }
  2559.         RTests[RBtest].delete_signal_array();
  2560. }
  2561. } // end test neural network function

  2562. void NeuralR::network_training_testing(int TT)
  2563. {
  2564.   int tt = TT;
  2565.   int menu_choice;

  2566.   clrscr();
  2567.   cout << "\n\n\n\n";
  2568.   cout << "**************** Operations Menu ****************" << "\n\n";
  2569.   cout << "  Please select one of the following options:" <<"\n\n";
  2570.   cout << "      1. Train RBF network only " <<"\n\n";
  2571.   cout << "      2. Test RBF network only " <<"\n\n";
  2572.   cout << "      3. Train and Test RBF network" <<"\n\n";
  2573.   cout << "*************************************************" << "\n\n";
  2574.   cout << "         Your choice?: "; cin >> menu_choice;
  2575.   cout << "\n\n";

  2576.      switch(menu_choice)
  2577.      {
  2578.        case 1:
  2579.        initialize_training_storage_array(tt);
  2580.        train_RBF_neural_network(tt);
  2581.        break;

  2582.        case 2:
  2583.        establish_test_battery_size();
  2584.        if(number_of_tests > 0)
  2585.        {test_neural_network(tt);}
  2586.        break;

  2587.        case 3:
  2588.        initialize_training_storage_array(tt);
  2589.        train_RBF_neural_network(tt);
  2590.        establish_test_battery_size();
  2591.        if(number_of_tests > 0)
  2592.        {test_neural_network(tt);}
  2593.        break;

  2594.        default:network_training_testing(tt);
  2595.      }
  2596. }
  2597. //%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
  2598. // this template class stores the neural networks to a file

  2599. template <class Type>
  2600. class Storage
  2601. {
  2602.   public:
  2603.   void save_neural_network(Type & NET_Topology);
  2604. };

  2605. template <class Type>
  2606. void Storage<Type>::save_neural_network(Type & NET_Topology)
  2607. {
  2608.   char schoice;
  2609.   int dolock = 0;

  2610.   do
  2611.   {
  2612.    clrscr();
  2613.    cout << "\n\n\n\n";
  2614.    cout << "Do you wish to save this neural network? (Y/N): ";
  2615.    cin >> schoice;
  2616.    schoice = toupper(schoice);
  2617.    if((schoice == 'Y') || (schoice == 'N')) {dolock = 1;}
  2618.   } while(dolock <= 0);
  2619.   if(schoice == 'Y')
  2620.   {NET_Topology.savenet();}
  2621. }
  2622. //%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%

  2623. class Neural_Window  // this class holds the different types of neural nets
  2624. {
  2625.   private:
  2626.   void establish_network_type(void);

  2627.   public:  // user interface
  2628.   char neural_network_type;
  2629.   int neural_network_number;
  2630.   void display_menu_for_net_selection(int NNnum);
  2631. };

  2632. void Neural_Window::display_menu_for_net_selection(int NNnum)
  2633. {
  2634.   clrscr();
  2635.   neural_network_number = NNnum;
  2636.   cout.fill('*');
  2637.   cout.width(70); cout << "\n";
  2638.   cout.width(42);
  2639.   cout << " Neural Network " << neural_network_number << " ";
  2640.   cout.width(26); cout << "\n";
  2641.   cout.width(71); cout << "\n\n";
  2642.   cout << "Please select one of the following network types from the Main Menu";
  2643.   int i = 0;
  2644.   do {cout << "\n"; i = i + 1;} while (i < 3);
  2645.   cout.fill(' ');
  2646.   cout.width(10);
  2647.   cout << " *** / Main Menu \\ ***";  cout << "\n\n";
  2648.   cout.width(6);
  2649.   cout << " F.  Feedforward network using backpropagation " << "\n\n";
  2650.   cout.width(6);
  2651.   cout << " A.  Adaptive Resonance Theory network for binary signals " <<"\n\n";
  2652.   cout.width(6);
  2653.   cout << " K.  Kohonen Self-Organizing Map " <<"\n\n";
  2654.   cout.width(6);
  2655.   cout << " R.  Radial Basis Function Network " <<"\n\n";
  2656.   cout.width(6);
  2657.   cout << " E.  Exit Program" <<"\n\n";
  2658.   cout << "\n\n\n";
  2659.   cout.width(6);
  2660.   cout << "Network Type (?) "; cin >> neural_network_type;
  2661.   neural_network_type = toupper(neural_network_type);
  2662.   if(neural_network_type != 'E')
  2663.   {establish_network_type();}
  2664. }

  2665. void Neural_Window::establish_network_type(void)
  2666. {
  2667.    int NNN =  neural_network_number;

  2668.      NeuralA  *ART;
  2669.      NeuralB  *Backpropagation;
  2670.      NeuralK *KOH;
  2671.      NeuralR *RBF;

  2672.    switch(neural_network_type)
  2673.    {
  2674.      case 'A':  // Adaptive Resonance Theory Network (ART1) for clustering
  2675.      ART = new NeuralA;
  2676.      Storage<ART_Topology> Astore;
  2677.      ART->construct_ART_network();
  2678.      ART->network_training_testing(NNN);
  2679.      Astore.save_neural_network(ART->ART_Design);
  2680.      break;

  2681.      case 'F':  // Feedforward Network Using Backpropagation
  2682.      Backpropagation = new NeuralB;
  2683.      Storage<Back_Topology> Bstore;
  2684.      Backpropagation->establish_backprop_network();
  2685.      Backpropagation->network_training_testing(NNN);
  2686.      Bstore.save_neural_network(Backpropagation->Net_Design);
  2687.      break;

  2688.      case 'K':  // Kohonen Self-Organizing Map
  2689.           KOH = new NeuralK;
  2690.      Storage<Kohonen_Topology> Kstore;
  2691.      KOH->construct_Kohonen_network();
  2692.      KOH->network_training_testing(NNN);
  2693.      Kstore.save_neural_network(KOH->Kohonen_Design);
  2694.      break;

  2695.      case 'R': // Radial Basis Function Network
  2696.      RBF = new NeuralR;
  2697.      Storage<Radial_Basis_Topology> Rstore;
  2698.      RBF->establish_Radial_Basis_network();
  2699.      RBF->network_training_testing(NNN);
  2700.      Rstore.save_neural_network(RBF->RBF_Design);
  2701.      break;

  2702.      default: display_menu_for_net_selection(neural_network_number);

  2703.    }

  2704. }

  2705. //%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
  2706. //%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%

  2707. void  main(void)
  2708. {
  2709.   _control87(MCW_EM, MCW_EM); // will mask floating point overflows,
  2710.                               // underflows, or divisions by 0

  2711. int number_of_nets;
  2712. Neural_Window User_net;
  2713. clrscr();
  2714. cout << " ******* Welcome to Pitt-Networks!! ******** " << "\n\n\n\a";
  2715. cout << "Please enter the number of networks you wish to develop: "; cin >> number_of_nets;


  2716. for(int NWnet = 1; NWnet < number_of_nets + 1; NWnet++)
  2717. {
  2718.    User_net.display_menu_for_net_selection(NWnet);
  2719.    if(User_net.neural_network_type == 'E')
  2720.    {break;}
  2721. }

  2722. }
复制代码
回复
分享到:

使用道具 举报

 楼主| 发表于 2006-10-12 09:59 | 显示全部楼层
编译成功后的文件以及相关的帮助
http://www.pudn.com/downloads29/ ... rs/detail82080.html

本程序版权属原作者所有,此处仅提供欣赏,程序所有者如有异议请和本人联系删除
发表于 2007-10-10 10:00 | 显示全部楼层

楼主你好

楼主你好 你的这个程序我运行时怎么老是出现错误?你能不能把编译成功后的文件及相关的帮助 发到我的邮箱 heiyanzi123@126.com  小弟非常感激!
发表于 2008-7-7 10:09 | 显示全部楼层
楼主好,有没有matlab程序啊?
发表于 2008-10-30 09:43 | 显示全部楼层
不错,谢谢啊,:loveliness:
发表于 2008-11-29 10:12 | 显示全部楼层
感谢搂主,好人
发表于 2009-3-11 21:50 | 显示全部楼层
我下不了,哪位也给小弟我发一个吧,万分感谢ym_qi@126.com
发表于 2009-3-12 16:32 | 显示全部楼层
谢谢楼主共享!
发表于 2009-3-16 10:09 | 显示全部楼层
我这下载不了。感谢哪位好人给小弟发一份:zhu.xiaoxun@163.com
当然,要先感谢一下楼主。
发表于 2009-4-30 09:47 | 显示全部楼层
给我也发一份,谢谢,邮箱:liling101112@163.com
发表于 2009-5-11 15:07 | 显示全部楼层
楼主你好:
      麻烦给我发一份,liuxiaopei2007@163.com
      不胜感激!!
发表于 2009-5-17 20:06 | 显示全部楼层
学长能不能给也发一个啊?我现在做毕业设计,急用啊?谢谢421069750@163.com
发表于 2009-5-25 14:05 | 显示全部楼层
感谢楼上的兄弟姐妹,我的邮箱 dovejin@126.com,麻烦发给我一份,急用!谢谢~~~
发表于 2009-6-10 17:14 | 显示全部楼层
急需谢谢 jinsenana@163.com
发表于 2009-7-5 11:21 | 显示全部楼层

楼主,我也需要一份谢谢!

您需要登录后才可以回帖 登录 | 我要加入

本版积分规则

QQ|小黑屋|Archiver|手机版|联系我们|声振论坛

GMT+8, 2024-5-18 04:47 , Processed in 0.077165 second(s), 18 queries , Gzip On.

Powered by Discuz! X3.4

Copyright © 2001-2021, Tencent Cloud.

快速回复 返回顶部 返回列表