#include "backprop.h" /*#define LOCAL_FEATURE_ONLY*/ #define MAX_TRAINGING_FILE int main(int argc, char **argv) { char paraFile[256], comm[256]; MLP_NETWORK anetwork; CONTROL_INFO controlInfo; double **traindata; double delta_Sum, tmp; double delta_Max; int ntraining; int i,j,k; int k1, k2; int crow, ccol; FILE *fp, *OUT; int items_per_line; if (argc >1) { strcpy(paraFile, argv[1]); } else { printf("Please specify the parameter file: "); scanf("%s", paraFile); } if (argc > 2) { items_per_line = atoi(argv[2]); } else { items_per_line = 5; } SetDefaultParameter(&controlInfo); LoadParameter(paraFile, &controlInfo); DisplayParameter(&controlInfo); anetwork.INPUT_UNIT = controlInfo.input_unit; anetwork.HIDDEN_UNIT = controlInfo.hidden_unit; anetwork.OUTPUT_UNIT = controlInfo.output_unit; controlInfo.input_unit = anetwork.INPUT_UNIT; DisplayParameter(&controlInfo); /* init_mlp_archit(networkInFile, &anetwork); */ allocate_mlp_network(&anetwork); ntraining = 0; if (controlInfo.inputFile[0] != 0) { fp = fopen(controlInfo.inputFile,"rb"); if (fp == NULL) { fprintf(stderr,"Cannot open training data file \"%s\"\n", controlInfo.inputFile); free_mlp_network(&anetwork); return -1; } fscanf(fp,"%d", &k); traindata = (double **)Allocate_Array(k, (anetwork.INPUT_UNIT+ anetwork.OUTPUT_UNIT+1)* sizeof(double)); ntraining = 0; for (i=0; i< k; i++) { for (j=0; j < anetwork.INPUT_UNIT; j++) { if (fscanf(fp,"%lf", &(traindata[ntraining][j])) != 1) { fprintf(stderr,"Error when reading %d row ", i+1); fprintf(stderr,"from training data file \"%s\"\n", controlInfo.inputFile); fclose(fp); Free_Array((void **)traindata, k); return -1; } } /* This one is used as the bias term. In other words, the input is always -1 permentally */ traindata[ntraining][anetwork.INPUT_UNIT] = -1.; for (j=0; j < anetwork.OUTPUT_UNIT; j++) { if (fscanf(fp,"%lf", &(traindata[ntraining][j+anetwork.INPUT_UNIT+1])) != 1) { fprintf(stderr,"Error when reading %d row ", i+1); fprintf(stderr,"from training data file \"%s\"\n", controlInfo.inputFile); fclose(fp); Free_Array((void **)traindata, k); return -1; } } ntraining++; } printf("There are %d training samples loaded ",ntraining); printf("from file \"%s\"\n", controlInfo.inputFile); for (i=0; i< ntraining; i++) { for (j=0; j < (anetwork.OUTPUT_UNIT+anetwork.INPUT_UNIT+1); j++) { printf("%6.4f ", traindata[i][j]); } printf("\n"); } printf("\n"); } /* Initialize the neural network */ init_mlp_network(&anetwork, &controlInfo); if (controlInfo.networkInFile[0] != 0) { if (controlInfo.forced_training == 0) { load_mlp_network(controlInfo.networkInFile, &anetwork); } } else { if (ntraining == 0) { printf("Warning: The network is random.\n"); printf("\tThe network is not trained nor loaded.\n"); } } if (ntraining > 0) { save_mlp_network("init_network.dat", &anetwork); k1 = 0; for (i=0; i< controlInfo.max_iteration; i++) { delta_Sum = 0.0; delta_Max = 0.0; for (j=0; j < ntraining; j++) { for (k=0; k < (anetwork.INPUT_UNIT+1); k++) { anetwork.v1[k] = traindata[j][k]; } for (k=0; k < anetwork.OUTPUT_UNIT; k++) { anetwork.v_out[k] = traindata[j][anetwork.INPUT_UNIT+1+k]; } tmp = backpropagation(&anetwork, &controlInfo); if (delta_Max < tmp) delta_Max = tmp; delta_Sum += tmp; } if ( ((i+1) % items_per_line) ==0 ) { printf("\n"); } printf("%4d (%8.6f) ", i+1, delta_Sum); /* printf("\nError at iteration %d: %8.6f\n", i+1, delta_Sum);*/ /*scanf("%d", &k);*/ if (delta_Max < (controlInfo.tol*0.8)) { k1++; if (k1 > 10) break; } else { k1 =0; } } printf("\nFinal error at iteration %d is %8.6f.\n", i+1, delta_Sum); } save_mlp_network(controlInfo.networkOutFile, &anetwork); printf("\nClassification results for training data.\n"); for (i=0; i< ntraining; i++) { printf("%4d : ", i); for (j=0; j < (anetwork.OUTPUT_UNIT+anetwork.INPUT_UNIT); j++) { if (j == anetwork.INPUT_UNIT) { printf(" "); continue; } if (anetwork.INPUT_UNIT > 8) { if (j<3 || (j>(anetwork.INPUT_UNIT-3))) { printf("%6.4f ", traindata[i][j]); } else { if (j == (anetwork.INPUT_UNIT-4)) { printf(" ... "); } } } else { printf("%6.4f ", traindata[i][j]); } } for (j=0; j < (anetwork.INPUT_UNIT+1); j++) { anetwork.v1[j] = traindata[i][j]; } forward_prop_only(&anetwork, &controlInfo); for (j=0; j < anetwork.OUTPUT_UNIT; j++) { printf("%6.4f ", anetwork.v3[j]); } printf("["); j = anetwork.OUTPUT_UNIT+anetwork.INPUT_UNIT; printf("%6.4f (%6.4f)", traindata[i][j], (float)(traindata[i][j]-anetwork.v3[0])); printf("]\n"); } printf("\n"); printf("Feedforward classification: \n"); do { printf("Please enter the feature vector with length %d: \n", anetwork.INPUT_UNIT); for (i=0; i < anetwork.INPUT_UNIT; i++) { scanf("%lf", &(anetwork.v1[i])); } anetwork.v1[anetwork.INPUT_UNIT] = -1.; forward_prop_only(&anetwork, &controlInfo); printf("The neural network output result is :\n\t"); for (j=0; j < anetwork.OUTPUT_UNIT; j++) { printf("%6.4f ", anetwork.v3[j]); } printf("\n"); printf("Do you want to try another feature vector (Yes/No): "); scanf("%s", comm); if (comm[0] == 'N' || comm[0] == 'n') break; } while(1); printf("\n"); Free_Array((void **)traindata, ntraining); free_mlp_network(&anetwork); return 0; }