From: Aaron A. <aa...@us...> - 2007-05-29 05:05:27
|
Update of /cvsroot/jboost/jboost/src/jboost/monitor In directory sc8-pr-cvs6.sourceforge.net:/tmp/cvs-serv17610/monitor Modified Files: Monitor.java Log Message: monitor can now output margins as well as scores Index: Monitor.java =================================================================== RCS file: /cvsroot/jboost/jboost/src/jboost/monitor/Monitor.java,v retrieving revision 1.2 retrieving revision 1.3 diff -C2 -d -r1.2 -r1.3 *** Monitor.java 16 May 2007 21:16:30 -0000 1.2 --- Monitor.java 29 May 2007 05:05:24 -0000 1.3 *************** *** 21,215 **** */ public class Monitor { ! private static Date startTime; ! private static Date afterInitTime; ! private static Date endTime; ! private String outputStem; ! private PrintWriter infoStream; // the stream for providing ! private String infoFileName; ! // a high-level log of the program's progress. ! private PrintWriter scoresStream; // a stream for the training data scores ! private String scoresOutputFileName; ! private int scoresPrintRate; ! // parameter that controls when scores are printed ! private PrintWriter testScoresStream; // a stream for the test data scores ! private String testScoresOutputFileName; ! private PrintWriter samplingStream; ! // a stream for logging resampling activity ! private String samplingOutputFileName; ! private static String logOutputFileName; ! private static PrintWriter logStream; // a central stream for all ! // logging/debugging purposes ! private ExampleSet trainSet; // the training ExampleSet ! private ExampleSet testSet; // the test ExampleSet ! private Booster m_booster; // used to get theoretical bound and m_margins ! /** ! * a public variable that stores the logging level for this run. The variable ! * should be checked before each call to log(). Calls to log should be of the ! * form</br> ! * <tt> ! if(Monitor.logLevel> 5) Monitor.log("log message"); ! </tt> ! */ ! public static int logLevel= 0; ! public static void init_log(Configuration config) throws IOException { ! String stem= config.getString("S", "data"); ! logOutputFileName= config.getString("log", stem + ".log"); ! logLevel= config.getInt("loglevel", 2); ! if (logLevel < 2) { ! logStream= new PrintWriter(System.out); ! } else { ! logStream= ! new PrintWriter(new BufferedWriter(new FileWriter(logOutputFileName))); } ! startTime= new Date(); // remember time at start to report it later ! } ! /** a central place to print debugging logs */ ! public static void log(Object message) { ! logStream.println(message); ! } ! /** close the logging file */ ! public static void closeLog() { ! logStream.close(); ! } ! /** ! * The constructor ! * ! * @param config a configuration object with the run-time parameters ! * @param trainSet the training set (to calcualte training error) ! * @param testSet the test set ! * @param m_booster the m_booster (to compute m_margins) ! */ ! public Monitor(Booster booster, ControllerConfiguration config) { ! trainSet= config.getTrainSet(); ! testSet= config.getTestSet(); ! m_booster= booster; ! outputStem= config.getString("S", "noname_out"); ! infoFileName= config.getString("info", outputStem + ".info"); ! scoresOutputFileName= outputStem + ".train.scores"; ! testScoresOutputFileName= outputStem + ".test.scores"; ! samplingOutputFileName= outputStem + ".sampling"; ! try { ! infoStream= ! new PrintWriter( ! new BufferedWriter(new FileWriter(outputStem + ".info"))); ! infoStream.println("Command line parameters: " + config.getString("args")); ! infoStream.println(); ! infoStream.println("Configuration parameters:\n" + config); ! infoStream.println(""); ! infoStream.println("FILENAMES"); ! infoStream.println("specFileName = " + config.getSpecFileName()); ! infoStream.println("trainFileName = " + config.getTrainFileName()); ! infoStream.println("testFileName = " + config.getTestFileName()); ! infoStream.println("scoresOutputFileName = " + scoresOutputFileName); ! infoStream.println("testScoresOutputFileName = " + ! testScoresOutputFileName); ! infoStream.println("resultOutputFileName = " + ! config.getResultOutputFileName()); ! infoStream.println("samplingOutputFileName = " + samplingOutputFileName); ! infoStream.println("logOutputFileName = " + logOutputFileName); ! infoStream.println(""); ! infoStream.println("Train set size = " + trainSet.getExampleNo()); ! infoStream.println("Test set size = " + testSet.getExampleNo()); ! infoStream.println(""); ! scoresPrintRate= config.getInt("a", 0); ! if (scoresPrintRate != 0) { ! scoresStream= new PrintWriter(new BufferedWriter( ! new FileWriter(scoresOutputFileName))); ! testScoresStream= new PrintWriter(new BufferedWriter( ! new FileWriter(testScoresOutputFileName))); ! samplingStream= new PrintWriter(new BufferedWriter( ! new FileWriter(samplingOutputFileName))); ! logLabels(); ! // output train and test m_labels onto the samplingStream ! samplingStream.close(); ! } ! afterInitTime= new Date(); ! infoStream.println("Init Start time = " + startTime); ! infoStream.println("Learn Start time = " + afterInitTime); ! infoStream.println("iter \tbound \ttrain \ttest"); ! infoStream.flush(); ! } catch (IOException e) { ! throw new RuntimeException( ! "monitor failed to open file for output\n" + e.getMessage()); } - } ! /** print the m_labels of trainSet and testSet onto samplingStream */ ! private void logLabels() { ! ArrayList labels= trainSet.getBinaryLabels(); ! samplingStream.println("train labels, elements=" + labels.size()); ! for (int i= 0; i < labels.size(); i++) { ! samplingStream.println(((Boolean) labels.get(i)).booleanValue() ? "+1" : "-1"); } ! labels.clear(); // release memory ! labels= testSet.getBinaryLabels(); ! samplingStream.println("test labels, elements=" + labels.size()); ! for (int i= 0; i < labels.size(); i++) { ! samplingStream.println(((Boolean) labels.get(i)).booleanValue() ? "+1" : "-1"); } - labels.clear(); // release memory - labels= null; - } ! /** generate logs for current boosting iteration */ ! public void logIteration(int iter, Predictor combined, Predictor base) { ! double trainError= trainSet.calcError(iter, combined, base); ! double testError= testSet.calcError(iter, combined, base); ! double theoryBound= m_booster.getTheoryBound(); ! NumberFormat f= new DecimalFormat("0.000"); ! infoStream.print(iter + "\t" + f.format(theoryBound) + "\t" ! + f.format(trainError) + "\t" + f.format(testError)); ! infoStream.flush(); ! logScores(iter, combined, base); ! infoStream.println(); ! } ! /** output the scores distribution of the training set */ ! private void logScores(int iter, Predictor combined, Predictor base) { ! if (scoresPrintRate == 0 || // never print scores ! (scoresPrintRate > 0 && scoresPrintRate != iter)) ! // or print scores only on iteration scoresPrintRate ! return; ! if (scoresPrintRate == -1) { ! // print score when highest order digit in iter changes. ! double m= ! java.lang.Math.floor( ! java.lang.Math.log(iter) / java.lang.Math.log(10.0)); ! int t= (int) java.lang.Math.pow(10.0, m); ! if (iter == 0) ! t= 1; // fix bug in "pow" ! if ((iter % t) != 0) ! return; } - infoStream.print(" \t# output scores #"); - // output training scores - // double m_margins[] = m_booster.getMargins(); // get m_margins from the - // m_booster - ArrayList trainScores= trainSet.calcScores(iter, combined, base); - scoresStream.println( - "iteration=" + iter + ", elements=" + trainScores.size()); - for (int i= 0; i < trainScores.size(); i++) - scoresStream.println((Double) trainScores.get(i)); - trainScores= null; // release memory - // output test scores - ArrayList testScores= testSet.calcScores(iter, combined, base); - testScoresStream.println( - "iteration=" + iter + ", elements=" + testScores.size()); - for (int i= 0; i < testScores.size(); i++) - testScoresStream.println((Double) testScores.get(i)); - testScores.clear(); // release memory - testScores= null; - } ! /** close the monitor output files */ ! public void close() throws IOException { ! endTime= new Date(); ! infoStream.println("End time=" + endTime); ! infoStream.close(); ! if (scoresStream != null) ! scoresStream.close(); ! if (testScoresStream != null) ! testScoresStream.close(); ! log("finished closing output files"); ! } } --- 21,293 ---- */ public class Monitor { ! private static Date startTime; ! private static Date afterInitTime; ! private static Date endTime; ! private String outputStem; ! private PrintWriter infoStream; // the stream for providing ! private String infoFilename; ! // a high-level log of the program's progress. ! ! ! /** parameter that controls when scores are printed */ ! private int scoresPrintRate; ! private PrintWriter testScoresStream; // a stream for the test data scores ! private String testScoresOutputFilename; ! private PrintWriter trainScoresStream; // a stream for the training data scores ! private String trainScoresOutputFilename; ! ! /** margin streams and varialbes */ ! private int marginPrintRate; ! private PrintWriter testMarginStream; // a stream for the test data scores ! private String testMarginOutputFilename; ! private PrintWriter trainMarginStream; // a stream for the training data scores ! private String trainMarginOutputFilename; ! ! /** a stream for logging resampling activity */ ! private PrintWriter samplingStream; ! private String samplingOutputFilename; ! ! /** log file info */ ! private static String logOutputFilename; ! private static PrintWriter logStream; ! ! ! private ExampleSet trainSet; // the training ExampleSet ! private ExampleSet testSet; // the test ExampleSet ! private Booster m_booster; // used to get theoretical bound and m_margins ! /** ! * a public variable that stores the logging level for this run. The variable ! * should be checked before each call to log(). Calls to log should be of the ! * form</br> ! * <tt> ! if(Monitor.logLevel> 5) Monitor.log("log message"); ! </tt> ! */ ! public static int logLevel= 0; ! public static void init_log(Configuration config) throws IOException { ! String stem= config.getString("S", "data"); ! logOutputFilename= config.getString("log", stem + ".log"); ! logLevel= config.getInt("loglevel", 2); ! if (logLevel < 2) { ! logStream= new PrintWriter(System.out); ! } else { ! logStream= ! new PrintWriter(new BufferedWriter(new FileWriter(logOutputFilename))); ! } ! startTime= new Date(); // remember time at start to report it later } ! /** a central place to print debugging logs */ ! public static void log(Object message) { ! logStream.println(message); ! } ! /** close the logging file */ ! public static void closeLog() { ! logStream.close(); ! } ! /** ! * The constructor ! * ! * @param config a configuration object with the run-time parameters ! * @param trainSet the training set (to calcualte training error) ! * @param testSet the test set ! * @param m_booster the m_booster (to compute m_margins) ! */ ! public Monitor(Booster booster, ControllerConfiguration config) { ! trainSet= config.getTrainSet(); ! testSet= config.getTestSet(); ! m_booster= booster; ! outputStem= config.getString("S", "noname_out"); ! infoFilename= config.getString("info", outputStem + ".info"); ! ! trainScoresOutputFilename= outputStem + ".train.scores"; ! testScoresOutputFilename= outputStem + ".test.scores"; ! trainMarginOutputFilename= outputStem + ".train.margin"; ! testMarginOutputFilename= outputStem + ".test.margin"; ! ! samplingOutputFilename= outputStem + ".sampling"; ! try { ! infoStream= ! new PrintWriter( ! new BufferedWriter(new FileWriter(outputStem + ".info"))); ! infoStream.println("Command line parameters: " + config.getString("args")); ! infoStream.println(); ! infoStream.println("Configuration parameters:\n" + config); ! infoStream.println(""); ! infoStream.println("FILENAMES"); ! infoStream.println("specFilename = " + config.getSpecFileName()); ! infoStream.println("trainFilename = " + config.getTrainFileName()); ! infoStream.println("testFilename = " + config.getTestFileName()); ! infoStream.println("trainScoresOutputFilename = " ! +trainScoresOutputFilename); ! infoStream.println("testScoresOutputFilename = " + ! testScoresOutputFilename); ! infoStream.println("trainMarginOutputFilename = " ! +trainMarginOutputFilename); ! infoStream.println("testMarginOutputFilename = " ! +testMarginOutputFilename); ! infoStream.println("resultOutputFilename = " + ! config.getResultOutputFileName()); ! infoStream.println("samplingOutputFilename = " + samplingOutputFilename); ! infoStream.println("logOutputFilename = " + logOutputFilename); ! infoStream.println(""); ! infoStream.println("Train set size = " + trainSet.getExampleNo()); ! infoStream.println("Test set size = " + testSet.getExampleNo()); ! infoStream.println(""); ! scoresPrintRate= config.getInt("a", 0); ! marginPrintRate= scoresPrintRate; ! if (scoresPrintRate != 0) { ! trainScoresStream= new PrintWriter(new BufferedWriter( ! new FileWriter(trainScoresOutputFilename))); ! testScoresStream= new PrintWriter(new BufferedWriter( ! new FileWriter(testScoresOutputFilename))); ! samplingStream= new PrintWriter(new BufferedWriter( ! new FileWriter(samplingOutputFilename))); ! logLabels(); ! // output train and test m_labels onto the samplingStream ! samplingStream.close(); ! } ! if (marginPrintRate != 0) { ! trainMarginStream= new PrintWriter( ! new BufferedWriter( ! new FileWriter(trainMarginOutputFilename))); ! testMarginStream= new PrintWriter( ! new BufferedWriter( ! new FileWriter(testMarginOutputFilename))); ! } ! afterInitTime= new Date(); ! infoStream.println("Init Start time = " + startTime); ! infoStream.println("Learn Start time = " + afterInitTime); ! infoStream.println("iter \tbound \ttrain \ttest"); ! infoStream.flush(); ! } catch (IOException e) { ! throw new RuntimeException( ! "monitor failed to open file for output\n" + e.getMessage()); ! } } ! /** print the m_labels of trainSet and testSet onto samplingStream */ ! private void logLabels() { ! ArrayList labels= trainSet.getBinaryLabels(); ! samplingStream.println("train labels, elements=" + labels.size()); ! for (int i= 0; i < labels.size(); i++) { ! samplingStream.println(((Boolean) labels.get(i)).booleanValue() ? "+1" : "-1"); ! } ! labels.clear(); // release memory ! labels= testSet.getBinaryLabels(); ! samplingStream.println("test labels, elements=" + labels.size()); ! for (int i= 0; i < labels.size(); i++) { ! samplingStream.println(((Boolean) labels.get(i)).booleanValue() ? "+1" : "-1"); ! } ! labels.clear(); // release memory ! labels= null; } ! ! /** generate logs for current boosting iteration */ ! public void logIteration(int iter, Predictor combined, Predictor base) { ! double trainError= trainSet.calcError(iter, combined, base); ! double testError= testSet.calcError(iter, combined, base); ! double theoryBound= m_booster.getTheoryBound(); ! NumberFormat f= new DecimalFormat("0.000"); ! infoStream.print(iter + "\t" + f.format(theoryBound) + "\t" ! + f.format(trainError) + "\t" + f.format(testError)); ! infoStream.flush(); ! logScores(iter, combined, base); ! logMargins(iter, combined, base); ! infoStream.println(); } ! /** output the scores distribution of the training set */ ! private void logScores(int iter, Predictor combined, Predictor base) { ! if (scoresPrintRate == 0 || // never print scores ! (scoresPrintRate > 0 && scoresPrintRate != iter)) ! // or print scores only on iteration scoresPrintRate ! return; ! if (scoresPrintRate == -1) { ! // print score when highest order digit in iter changes. ! double m= ! java.lang.Math.floor( ! java.lang.Math.log(iter) / java.lang.Math.log(10.0)); ! int t= (int) java.lang.Math.pow(10.0, m); ! if (iter == 0) ! t= 1; // fix bug in "pow" ! if ((iter % t) != 0) ! return; ! } ! infoStream.print(" \t# output scores #"); ! // output training scores ! // double m_margins[] = m_booster.getMargins(); // get m_margins from the ! // m_booster ! ArrayList trainScores= trainSet.calcScores(iter, combined, base); ! trainScoresStream.println( ! "iteration=" + iter + ", elements=" + trainScores.size()); ! for (int i= 0; i < trainScores.size(); i++) ! trainScoresStream.println((Double) trainScores.get(i)); ! trainScores= null; // release memory ! // output test scores ! ArrayList testScores= testSet.calcScores(iter, combined, base); ! testScoresStream.println( ! "iteration=" + iter + ", elements=" + testScores.size()); ! for (int i= 0; i < testScores.size(); i++) ! testScoresStream.println((Double) testScores.get(i)); ! testScores.clear(); // release memory ! testScores= null; ! } ! ! ! /** output the scores distribution of the training set */ ! private void logMargins(int iter, Predictor combined, Predictor base) { ! if (marginPrintRate == 0 || // never print scores ! (marginPrintRate > 0 && scoresPrintRate != iter)) ! // or print scores only on iteration scoresPrintRate ! return; ! if (marginPrintRate == -1) { ! // print score when highest order digit in iter changes. ! double m= ! java.lang.Math.floor( ! java.lang.Math.log(iter) / java.lang.Math.log(10.0)); ! int t= (int) java.lang.Math.pow(10.0, m); ! if (iter == 0) ! t= 1; // fix bug in "pow" ! if ((iter % t) != 0) ! return; ! } ! infoStream.print(" \t# output margins #"); ! // output training scores ! // double m_margins[] = m_booster.getMargins(); // get m_margins from the ! // m_booster ! ArrayList trainMargin= trainSet.calcMargins(iter, combined, base); ! trainMarginStream.println( ! "iteration=" + iter + ", elements=" + trainMargin.size()); ! for (int i= 0; i < trainMargin.size(); i++) ! trainMarginStream.println((Double) trainMargin.get(i)); ! trainMargin= null; // release memory ! // output test scores ! ArrayList testMargin= testSet.calcMargins(iter, combined, base); ! testMarginStream.println( ! "iteration=" + iter + ", elements=" + testMargin.size()); ! for (int i= 0; i < testMargin.size(); i++) ! testMarginStream.println((Double) testMargin.get(i)); ! testMargin.clear(); // release memory ! testMargin= null; } ! ! ! ! /** close the monitor output files */ ! public void close() throws IOException { ! endTime= new Date(); ! infoStream.println("End time=" + endTime); ! infoStream.close(); ! if (trainScoresStream != null) ! trainScoresStream.close(); ! if (testScoresStream != null) ! testScoresStream.close(); ! if (trainMarginStream != null) ! trainMarginStream.close(); ! if (testMarginStream != null) ! testMarginStream.close(); ! log("finished closing output files"); ! } } |