0

I'm working with a neural network project that requires to run two instance of the neural network program with different training set specifically two at the same time. For that I'm using multithreading in java plus encog library for the implementation of the ANN. So I created two threads each of them contains the ANN implementation but with two different CSV files. I have part of the result works which returns the min and max for every column in the CSV files but the problem that the output of the ANN is computed for just one files. Here is my implementation:

ReadfileMT.java

public class ReadFileMT implements Runnable {

    public static void dumpFieldInfo(EncogAnalyst analyst) {
        
     System.out.println("Fields found in file:");
    for (AnalystField field : analyst.getScript().getNormalize()
            .getNormalizedFields())
          {

        StringBuilder line = new StringBuilder();
        line.append(field.getName());
        line.append(",action=");
        line.append(field.getAction());
        line.append(",min=");
        line.append(field.getActualLow());
        line.append(",max=");
        line.append(field.getActualHigh());
        System.out.println(line.toString());
    }
}

     public void run() {
     File sourceFile = new File("d:\\data\\F21.csv");
                    File targetFile = new File("d:\\data\\F2_norm.csv"); 
        EncogAnalyst analyst = new EncogAnalyst();
                AnalystWizard wizard = new AnalystWizard(analyst);
                    AnalystField targetField = wizard.getTargetField();
                    wizard.setTargetField("Old_Resp");
        wizard.wizard(sourceFile, true, AnalystFileFormat.DECPNT_COMMA);
        dumpFieldInfo(analyst);
        final AnalystNormalizeCSV norm = new AnalystNormalizeCSV();
        norm.analyze(sourceFile, true, CSVFormat.ENGLISH, analyst);
                    norm.setProduceOutputHeaders(true);
                    norm.normalize(targetFile);
                   // Encog.getInstance().shutdown();
        //*****************************Read from the csv file**************************************************               
       
        
             
    final BasicNetwork network = EncogUtility.simpleFeedForward(4,  4, 0, 1,
                            false);
          
     network.addLayer(new BasicLayer(new     ActivationSigmoid(),false,4));
     network.addLayer(newBasicLayer(newActivationSigmoid(),false,4));
     network.addLayer(newBasicLayer(newActivationSigmoid(),false,1));
     network.getStructure().finalizeStructure();
     network.reset();
            //create training data
            final MLDataSet trainingSet = TrainingSetUtil.loadCSVTOMemory(
            CSVFormat.ENGLISH, "c:\\temp\\F2_norm.csv",false, 4, 1);
            
            // train the neural network
            System.out.println();
            System.out.println("Training Network");
     final   Backpropagationtrain=newBackpropagation
     (network,trainingSet,0.05,0.9);
            train.fixFlatSpot(false);
            int epoch = 1;

    do {
        train.iteration();
        System.out.println("Epoch #" + epoch + " Error:" +train.getError());
        epoch++;
    } while(train.getError() > 0.01);
    train.finishTraining();
           //final Train train=newResilientPropagation(network,trainingSet);
           /*int epoch = 1;
        do {
            train.iteration();
            System.out.println("Epoch #" + epoch + " Error:"
                    + train.getError() * 100 + "%");
            epoch++;
        } while (train.getError() > 0.015);*/
        
            /*int epoch = 1;

    do {
        train.iteration();
        System.out.println("Epoch #" + epoch + " Error:" +train.getError());
        epoch++;
    } while(train.getError() > 0.01);
    train.finishTraining();*/
            
          
         
          
            // test the neural network
   
         
            System.out.println("Neural Network Results:");
    for(MLDataPair pair: trainingSet ) {
        final MLData output = network.compute(pair.getInput());
         System.out.println(pair.getInput().getData(0)+",
         "+pair.getInput().getData(1)+","+ pair.getInput().getData(2)
         +","+   pair.getInput().getData(3) 
         + ", actual=" + output.getData(0) + ",ideal="
        +pair.getIdeal().getData(0));
    }

    Encog.getInstance().shutdown();
            
}
}

ReadFileMT2.java

public class ReadFileMT2 implements Runnable {


    public static void dumpFieldInfo(EncogAnalyst analyst) {
        
     System.out.println("Fields found in file:");
    for (AnalystField field : analyst.getScript().getNormalize()
            .getNormalizedFields())
          {

        StringBuilder line = new StringBuilder();
        line.append(field.getName());
        line.append(",action=");
        line.append(field.getAction());
        line.append(",min=");
        line.append(field.getActualLow());
        line.append(",max=");
        line.append(field.getActualHigh());
        System.out.println(line.toString());
     }
    }


     public void run() {
     File sourceFile = new File("d:\\data\\RespTime.csv");
     File targetFile =newFile("d:\\data\\RespTime_norm.csv"); 
        EncogAnalyst analyst = new EncogAnalyst();
                AnalystWizard wizard = new AnalystWizard(analyst);
                    AnalystField targetField = wizard.getTargetField();
                    wizard.setTargetField("Old_Resp");
        wizard.wizard(sourceFile, true, AnalystFileFormat.DECPNT_COMMA);
        dumpFieldInfo(analyst);
        final AnalystNormalizeCSV norm = new AnalystNormalizeCSV();
        norm.analyze(sourceFile, true, CSVFormat.ENGLISH, analyst);
                    norm.setProduceOutputHeaders(true);
                    norm.normalize(targetFile);
                   // Encog.getInstance().shutdown();
        //******Read from the csv file*************************              
       
        
             
    final BasicNetwork network = EncogUtility.simpleFeedForward(4, 4, 0, 1,
                            false);
          
            network.addLayer(newBasicLayer(newActivationSigmoid(),false,4));
            network.addLayer(newBasicLayer(newActivationSigmoid(),false,4));
            network.addLayer(newBasicLayer(newActivationSigmoid(),false,1));
            network.getStructure().finalizeStructure();
            network.reset();
            //create training data
            final MLDataSet trainingSet = TrainingSetUtil.loadCSVTOMemory(
            CSVFormat.ENGLISH, "c:\\temp\\RespTime_norm.csv",false, 4, 1);
            
            // train the neural network
            System.out.println();
            System.out.println("Training Network");
            
            final Backpropagation train = new Backpropagation
            (network,trainingSet,0.05, 0.9);
            train.fixFlatSpot(false);
            int epoch = 1;

    do {
        train.iteration();
        System.out.println("Epoch #" + epoch + " Error:" +train.getError());
        epoch++;
     } while(train.getError() > 0.01);
     train.finishTraining();
           
           /*int epoch = 1;
        do {
            train.iteration();
            System.out.println("Epoch #" + epoch + " Error:"
                    + train.getError() * 100 + "%");
            epoch++;
        } while (train.getError() > 0.015);*/
        
            /*int epoch = 1;

    do {
        train.iteration();
        System.out.println("Epoch #" + epoch + " Error:" +train.getError());
        epoch++;
    } while(train.getError() > 0.01);
    train.finishTraining();*/
            
          
         
          
            // test the neural network
   
         
            System.out.println("Neural Network Results:");
    for(MLDataPair pair: trainingSet ) {
        final MLData output = network.compute(pair.getInput());
        System.out.println(pair.getInput().getData(0) + ","
        +pair.getInput().getData(1)+",
        "+ pair.getInput().getData(2)+
        ","+ pair.getInput().getData(3) 
        + ", actual=" +output.getData(0)+",ideal="+
        pair.getIdeal().getData(0));
    }

    Encog.getInstance().shutdown();
           
      }
        }

main.java

    public static void main(String[] args) {

       ReadFileMT obj1 = new ReadFileMT();
       ReadFileMT2 obj2 = new ReadFileMT2();
       Thread t1 = new Thread(obj1);
       Thread t2 = new Thread(obj2);
        t1.start();
        t2.start();
    
             }
      }         
             

I don't understand what is wrong. P.s.: I am new in parallel programming.

8
  • please, add what exactly you want to achieve: do you want to split results in different files or to write them in one (in particular order) Commented Oct 8, 2015 at 19:38
  • Iam using neural network for prediction time series so i have two CSV with different data i want the ANN to give me the prediction of the two csv files in parralel no spliting files they are alearedy splited to sum up i want two instance of the same ANN that runs in parralel each of them take a csv file to do prediction Commented Oct 8, 2015 at 20:09
  • Pleaze tell me i can't achieve what i want? Commented Oct 8, 2015 at 20:17
  • someone helps me pleaze pleaze Commented Oct 8, 2015 at 21:31
  • Did you try to run each method separately? Does each method works correctly? Commented Oct 9, 2015 at 6:59

0

Your Answer

By clicking “Post Your Answer”, you agree to our terms of service and acknowledge you have read our privacy policy.

Start asking to get answers

Find the answer to your question by asking.

Ask question

Explore related questions

See similar questions with these tags.