Learn how easy it is to sync an existing GitHub or Google Code repo to a SourceForge project! See Demo

Close

Commit [r704] Maximize Restore History

Several enhancements of Mulan's multi-target regression capabilities.

-A new multi-target regression method added: RandomLinearCombinations
-Fixes and extensions of existing transformation-based multi-target regression methods
-2 classes added in the experiments package
-improved CLUS support

lefman 2014-04-22

added /trunk/mulan/src/mulan/regressor/transformation/RandomLinearCombinations.java
added /trunk/mulan/src/mulan/regressor/transformation/RegressorChainSimple.java
added /trunk/mulan/src/mulan/regressor/clus/ClusRandomForest.java
added /trunk/mulan/src/mulan/experiments/ExperimentRLC.java
added /trunk/mulan/src/mulan/regressor/clus/ClusWrapperRegression.java
changed /trunk/mulan/src/mulan/regressor/transformation/RegressorChain.java
changed /trunk/mulan/src/mulan/regressor/transformation/TransformationBasedMultiTargetRegressor.java
changed /trunk/mulan/src/mulan/regressor/transformation/SingleTargetRegressor.java
changed /trunk/mulan/src/mulan/regressor/transformation/MultiTargetStacking.java
changed /trunk/mulan/src/mulan/evaluation/Evaluator.java
changed /trunk/mulan/src/mulan/classifier/clus/ClusWrapperClassification.java
changed /trunk/mulan/src/mulan/transformations/regression/SingleTargetTransformation.java
copied /trunk/mulan/src/mulan/transformations/regression/RegressorChainTransformation.java -> /trunk/mulan/src/mulan/transformations/regression/ChainTransformation.java
copied /trunk/mulan/src/mulan/regressor/transformation/RegressorChainCorrected.java -> /trunk/mulan/src/mulan/experiments/ExperimentMTR.java
/trunk/mulan/src/mulan/regressor/transformation/RandomLinearCombinations.java Diff Switch to side-by-side view
Loading...
/trunk/mulan/src/mulan/regressor/transformation/RegressorChainSimple.java Diff Switch to side-by-side view
Loading...
/trunk/mulan/src/mulan/regressor/clus/ClusRandomForest.java Diff Switch to side-by-side view
Loading...
/trunk/mulan/src/mulan/experiments/ExperimentRLC.java Diff Switch to side-by-side view
Loading...
/trunk/mulan/src/mulan/regressor/clus/ClusWrapperRegression.java Diff Switch to side-by-side view
Loading...
/trunk/mulan/src/mulan/regressor/transformation/RegressorChain.java Diff Switch to side-by-side view
Loading...
/trunk/mulan/src/mulan/regressor/transformation/TransformationBasedMultiTargetRegressor.java Diff Switch to side-by-side view
Loading...
/trunk/mulan/src/mulan/regressor/transformation/SingleTargetRegressor.java Diff Switch to side-by-side view
Loading...
/trunk/mulan/src/mulan/regressor/transformation/MultiTargetStacking.java Diff Switch to side-by-side view
Loading...
/trunk/mulan/src/mulan/evaluation/Evaluator.java Diff Switch to side-by-side view
Loading...
/trunk/mulan/src/mulan/classifier/clus/ClusWrapperClassification.java Diff Switch to side-by-side view
Loading...
/trunk/mulan/src/mulan/transformations/regression/SingleTargetTransformation.java Diff Switch to side-by-side view
Loading...
/trunk/mulan/src/mulan/transformations/regression/RegressorChainTransformation.java to /trunk/mulan/src/mulan/transformations/regression/ChainTransformation.java
--- a/trunk/mulan/src/mulan/transformations/regression/RegressorChainTransformation.java
+++ b/trunk/mulan/src/mulan/transformations/regression/ChainTransformation.java
@@ -24,12 +24,12 @@
 import weka.filters.unsupervised.attribute.Remove;
 
 /**
- * This class implements the Regressor Chain transformation and is used by the RegressorChain class.
+ * This class implements the Classifier/Regressor Chain transformation.
  * 
  * @author Eleftherios Spyromitros-Xioufis
- * @version 2013.07.28
+ * @version 2014.04.01
  */
-public class RegressorChainTransformation implements Serializable {
+public class ChainTransformation implements Serializable {
 
     private static final long serialVersionUID = 1L;
 
@@ -114,8 +114,8 @@
         Instance original = train.getDataSet().instance(0);
         System.out.println("Original:\t" + original);
         for (int i = 1; i <= train.getNumLabels(); i++) {
-            Instance transformed = RegressorChainTransformation.transformInstance(original,
-                    targetIndices, i);
+            Instance transformed = ChainTransformation
+                    .transformInstance(original, targetIndices, i);
             System.out.println("Transformed " + i + ":\t" + transformed);
         }
     }
/trunk/mulan/src/mulan/regressor/transformation/RegressorChainCorrected.java to /trunk/mulan/src/mulan/experiments/ExperimentMTR.java
--- a/trunk/mulan/src/mulan/regressor/transformation/RegressorChainCorrected.java
+++ b/trunk/mulan/src/mulan/experiments/ExperimentMTR.java
@@ -1,405 +1,302 @@
-package mulan.regressor.transformation;
-
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.HashSet;
-import java.util.Random;
-
-import mulan.classifier.MultiLabelOutput;
-import mulan.data.DataUtils;
-import mulan.data.MultiLabelInstances;
-import mulan.transformations.regression.RegressorChainTransformation;
-import weka.classifiers.AbstractClassifier;
-import weka.classifiers.Classifier;
-import weka.classifiers.meta.FilteredClassifier;
-import weka.core.Instance;
-import weka.core.Instances;
-import weka.filters.Filter;
-import weka.filters.unsupervised.attribute.AddID;
-import weka.filters.unsupervised.attribute.Remove;
-
-/**
- * This class implements the Regressor Chain (RC) method. 3 alternative methods to obtain the values
- * of the meta features are implemented.<br/>
- * For more information, see:<br/>
- * E. Spyromitros-Xioufis, W. Groves, G. Tsoumakas, I. Vlahavas (2012). Multi-label Classification
- * Methods for Multi-target Regression. <a href="http://arxiv.org/abs/1211.6581">ArXiv e-prints</a>.
- * 
- * @author Eleftherios Spyromitros-Xioufis
- * @version 2013.07.28
- */
-public class RegressorChainCorrected extends TransformationBasedMultiTargetRegressor {
-
-    private static final long serialVersionUID = 1L;
-
-    /**
-     * The 3 alternative methods to obtain the values of the meta features.
-     */
-    public enum metaType {
-        /**
-         * Using internal k fold cross-validation.
-         */
-        CV,
-        /**
-         * Using the full training set.
-         */
-        TRAIN,
-        /**
-         * Using the true target values.
-         */
-        TRUE
-    }
-
-    /**
-     * The method used to obtain the values of the meta features. CV is used by default.
-     */
-    private metaType meta = metaType.CV;
-
-    /**
-     * A permutation of the target indices. E.g. If there are 3 targets with indices 14,15 and 16, a
-     * valid chain is 15,14,16.
-     */
-    private int[] chain;
-
-    /**
-     * The seed to use for random number generation in order to create a random chain (other than
-     * the default one which consists of the targets chained in the order they appear in the arff
-     * file).
-     */
-    private int chainSeed = 0;
-
-    /**
-     * The number of folds to use in internal k fold cross-validation. 3 folds are used by default.
-     */
-    private int numFolds = 3;
-
-    /**
-     * The training data of each regressor of the chain. After training the actual data are deleted
-     * and only the header information is held which is needed during prediction.
-     */
-    private Instances[] chainRegressorsTrainSets;
-
-    /** The regressors of the chain. */
-    private Classifier[] chainRegressors;
-
-    /**
-     * The values of the meta features (obtained using one of the available methods). The first
-     * dimension's size is equal to the number of training examples and the second is equal to the
-     * number of targets minus 1 (we do need meta features for the last target of the chain).
-     */
-    private double[][] metaFeatures;
-
-    /**
-     * When the base regressor is capable of attribute selection this ArrayList holds the indices of
-     * the target variables that were selected in each target's model.
-     */
-    protected ArrayList<Integer>[] selectedTargetIndices;
-    /**
-     * When the base regressor is capable of attribute selection this ArrayList holds the indices of
-     * the normal feature variables that were selected in each target's model.
-     */
-    protected ArrayList<Integer>[] selectedFeatureIndices;
-
-    /**
-     * Creates a new instance with the given base regressor. If {@link #chainSeed} == 0, the default
-     * chain is used. Otherwise, a random chain is created using the given seed.
-     * 
-     * @param baseRegressor the base regression algorithm that will be used
-     * @throws Exception
-     */
-    public RegressorChainCorrected(Classifier baseRegressor) throws Exception {
-        super(baseRegressor);
-    }
-
-    /**
-     * Creates a new instance with the given base regressor and chain ordering.
-     * 
-     * @param baseRegressor the base regression algorithm that will be used
-     * @param aChain a chain ordering
-     * @throws Exception
-     */
-    public RegressorChainCorrected(Classifier baseRegressor, int[] aChain) throws Exception {
-        super(baseRegressor);
-        chain = aChain;
-    }
-
-    @Override
-    protected void buildInternal(MultiLabelInstances trainSet) throws Exception {
-        // =============================== INITIALIZATION START ===============================
-        chainRegressorsTrainSets = new Instances[numLabels];
-        metaFeatures = new double[trainSet.getNumInstances()][numLabels - 1];
-        chainRegressors = new Classifier[numLabels];
-        selectedTargetIndices = new ArrayList[numLabels];
-        selectedFeatureIndices = new ArrayList[numLabels];
-        // =============================== INITIALIZATION END =================================
-
-        // =============================== CHAIN CREATION START ===============================
-        // if no chain has been defined, create the default chain
-        if (chain == null) {
-            chain = new int[numLabels];
-            for (int j = 0; j < numLabels; j++) {
-                chain[j] = labelIndices[j];
-            }
-        }
-
-        if (chainSeed != 0) { // a random chain will be created by shuffling the existing chain
-            Random rand = new Random(chainSeed);
-            ArrayList<Integer> chainAsList = new ArrayList<Integer>(numLabels);
-            for (int j = 0; j < numLabels; j++) {
-                chainAsList.add(chain[j]);
-            }
-            Collections.shuffle(chainAsList, rand);
-            for (int j = 0; j < numLabels; j++) {
-                chain[j] = chainAsList.get(j);
-            }
-        }
-        debug("Using chain: " + Arrays.toString(chain));
-        // =============================== CHAIN CREATION END =================================
-
-        for (int targetIndex = 0; targetIndex < numLabels; targetIndex++) {
-            selectedTargetIndices[targetIndex] = new ArrayList<Integer>();
-            selectedFeatureIndices[targetIndex] = new ArrayList<Integer>();
-            chainRegressors[targetIndex] = AbstractClassifier.makeCopy(baseRegressor);
-
-            // ======================= TRAINING SET CREATION START============================
-            // create a copy of the training set and transform it according to the CC transformation
-            // the copy is probably not needed
-            // Instances trainCopy = new Instances(trainSet.getDataSet());
-            chainRegressorsTrainSets[targetIndex] = RegressorChainTransformation
-                    .transformInstances(trainSet.getDataSet(), chain, targetIndex + 1);
-
-            // if it is not the first target in the chain and if we are not using the true values
-            // (as in original RC), use the values stored in metaFeatures to update the training set
-            if (targetIndex > 0 && meta != metaType.TRUE) {
-                // replace the true values of the targets with the predictions made by the
-                // previous regressors in the chain
-                for (int j = 0; j < targetIndex; j++) {
-                    // get the name of the target for which predictions have been made in the
-                    // previous iteration
-                    String targetName = chainRegressorsTrainSets[targetIndex - 1 - j]
-                            .classAttribute().name();
-                    // get the index of the attribute in the new dataset
-                    int indexInNewDataset = chainRegressorsTrainSets[targetIndex].attribute(
-                            targetName).index();
-
-                    for (int i = 0; i < chainRegressorsTrainSets[targetIndex].numInstances(); i++) {
-                        // get the predicted value of that attribute
-                        double predictedValue = metaFeatures[i][targetIndex - 1 - j];
-                        // replace the true value with the prediction
-                        chainRegressorsTrainSets[targetIndex].instance(i).setValue(
-                                indexInNewDataset, predictedValue);
-                    }
-                }
-            }
-            // ========================== TRAINING SET CREATION ENDED ===========================
-
-            // =========================== REGRESSOR TRAINING START =============================
-            debug("RC bulding model " + (targetIndex + 1) + "/" + numLabels + " (for target "
-                    + chainRegressorsTrainSets[targetIndex].classAttribute().name() + ")");
-            chainRegressors[targetIndex].buildClassifier(chainRegressorsTrainSets[targetIndex]);
-            // =========================== REGRESSOR TRAINING ENDED =============================
-
-            String output = chainRegressors[targetIndex].toString();
-            // if this is a classifier that performs attribute selection (i.e.
-            // AttributeSelectedClassifier or InfoTheoreticFeatureSelectionClassifier)
-            if (output.contains("Selected attributes: ")) {
-                // gather and output information about which feature and which target attributes
-                // were selected
-                String selectedString = output.split("Selected attributes: ")[1].split(" :")[0];
-                String[] selectedIndicesString = selectedString.split(",");
-                for (int j = 0; j < selectedIndicesString.length; j++) {
-                    int selectedIndex = Integer.parseInt(selectedIndicesString[j]) - 1;
-                    boolean isTarget = false;
-                    for (int k = 0; k < numLabels; k++) {
-                        String nameOfKthTarget = trainSet.getDataSet().attribute(labelIndices[k])
-                                .name();
-                        String nameOfSelectedAttribute = chainRegressorsTrainSets[targetIndex]
-                                .attribute(selectedIndex).name();
-                        if (nameOfKthTarget.equals(nameOfSelectedAttribute)) {
-                            selectedTargetIndices[targetIndex].add(labelIndices[k]);
-                            isTarget = true;
-                            break;
-                        }
-                    }
-                    if (!isTarget) {
-                        selectedFeatureIndices[targetIndex].add(selectedIndex);
-                    }
-                }
-
-                System.out.println("# selected feature attributes for target " + targetIndex + ": "
-                        + selectedFeatureIndices[targetIndex].size());
-                System.out.println(selectedFeatureIndices[targetIndex].toString());
-                System.out.println("# selected target attributes for target " + targetIndex + ": "
-                        + selectedTargetIndices[targetIndex].size());
-                System.out.println(selectedTargetIndices[targetIndex].toString());
-                System.out.flush();
-            }
-
-            // ============================ META FEATURE CREATION START ============================
-            // we do not need a meta feature for the last target in the chain
-            if (targetIndex < numLabels - 1) {
-
-                if (meta == metaType.CV) {
-                    // attach an index attribute in order to keep track of the original
-                    // positions of the examples before the internal cross-validation
-                    AddID filter = new AddID();
-                    filter.setInputFormat(chainRegressorsTrainSets[targetIndex]);
-                    chainRegressorsTrainSets[targetIndex] = Filter.useFilter(
-                            chainRegressorsTrainSets[targetIndex], filter);
-
-                    // debug("Performing internal cv to get predictions (for target "
-                    // + chainRegressorsTrainSets[labelIndex].classAttribute().name() + ")");
-                    // perform k-fold cross-validation and save the predictions which
-                    // will be used by the next regressor in the chain in order to build its model
-                    HashSet<Integer> indices = new HashSet<Integer>();
-                    for (int foldIndex = 0; foldIndex < numFolds; foldIndex++) {
-                        // debug("Label=" + labelIndex + ", Fold=" + foldIndex);
-                        // create the training and test set for the current fold
-                        Instances foldKTrainset = chainRegressorsTrainSets[targetIndex].trainCV(
-                                numFolds, foldIndex);
-                        Instances foldKTestset = chainRegressorsTrainSets[targetIndex].testCV(
-                                numFolds, foldIndex);
-                        // create a filtered meta classifier, used to ignore
-                        // the ID attribute in the build process
-                        FilteredClassifier fil = new FilteredClassifier();
-                        fil.setClassifier(AbstractClassifier.makeCopy(baseRegressor));
-                        Remove remove = new Remove();
-                        remove.setAttributeIndices("first");
-                        remove.setInputFormat(foldKTrainset);
-                        fil.setFilter(remove);
-                        fil.buildClassifier(foldKTrainset);
-
-                        // Make prediction for each test instance
-                        for (int i = 0; i < foldKTestset.numInstances(); i++) {
-                            double score = fil.classifyInstance(foldKTestset.instance(i));
-                            // get index of the instance which was just classified
-                            int index = (int) foldKTestset.instance(i).value(0);
-                            if (!indices.add(index)) {
-                                System.out.println("Something went wrong: index" + index
-                                        + " was already predicted!");
-                            }
-                            // The index starts from 1
-                            metaFeatures[index - 1][targetIndex] = score;
-                        }
-                    }
-                    if (indices.size() != trainSet.getNumInstances()) {
-                        System.out.println("Something went wrong: indices size is "
-                                + indices.size() + " instead of " + trainSet.getNumInstances());
-                    }
-                    // now we can detach the indices from this target's training set
-                    Remove remove = new Remove();
-                    remove.setAttributeIndices("first");
-                    remove.setInputFormat(chainRegressorsTrainSets[targetIndex]);
-                    chainRegressorsTrainSets[targetIndex] = Filter.useFilter(
-                            chainRegressorsTrainSets[targetIndex], remove);
-                } else if (meta == metaType.TRAIN) {
-                    // Make prediction for each in the training set instance
-                    for (int i = 0; i < chainRegressorsTrainSets[targetIndex].numInstances(); i++) {
-                        double score = chainRegressors[targetIndex]
-                                .classifyInstance(chainRegressorsTrainSets[targetIndex].instance(i));
-                        metaFeatures[i][targetIndex] = score;
-                    }
-                } else if (meta == metaType.TRUE) {
-                    for (int i = 0; i < chainRegressorsTrainSets[targetIndex].numInstances(); i++) {
-                        metaFeatures[i][targetIndex] = chainRegressorsTrainSets[targetIndex]
-                                .instance(i).classValue();
-                    }
-                }
-                // these data are no more needed so they are deleted to save some memory
-                chainRegressorsTrainSets[targetIndex].delete();
-            }
-            // ============================ META FEATURE CREATION ENDED ============================
-        }
-        outputExtraLog();
-    }
-
-    protected MultiLabelOutput makePredictionInternal(Instance instance) throws Exception {
-        double[] scores = new double[numLabels];
-
-        Instance copyOfInstance = DataUtils.createInstance(instance, instance.weight(),
-                instance.toDoubleArray());
-        copyOfInstance.setDataset(instance.dataset());
-
-        for (int counter = 0; counter < numLabels; counter++) {
-            Instance temp = RegressorChainTransformation.transformInstance(copyOfInstance, chain,
-                    counter + 1);
-            temp.setDataset(chainRegressorsTrainSets[counter]);
-
-            double score = chainRegressors[counter].classifyInstance(temp);
-
-            // find the appropriate position for that score in the scores array
-            // i.e. which is the corresponding target
-            int pos = 0;
-            for (int i = 0; i < numLabels; i++) {
-                if (chain[counter] == labelIndices[i]) {
-                    pos = i;
-                }
-            }
-            scores[pos] = score;
-            copyOfInstance.setValue(chain[counter], score);
-        }
-
-        MultiLabelOutput mlo = new MultiLabelOutput(scores, true);
-        return mlo;
-    }
-
-    @Override
-    protected String getModelForTarget(int targetIndex) {
-        try {
-            chainRegressors[targetIndex].getClass().getMethod("toString", (Class<?>[]) null);
-        } catch (NoSuchMethodException e) {
-            return "A string representation for this base algorithm is not provided!";
-        }
-        return chainRegressors[targetIndex].toString();
-    }
-
-    public void setMeta(metaType meta) {
-        this.meta = meta;
-    }
-
-    public void setNumFolds(int numFolds) {
-        this.numFolds = numFolds;
-    }
-
-    public void setChainSeed(int chainSeed) {
-        this.chainSeed = chainSeed;
-    }
-
-    public ArrayList<Integer>[] getSelectedTargetIndices() {
-        return selectedTargetIndices;
-    }
-
-    public ArrayList<Integer>[] getSelectedFeatureIndices() {
-        return selectedFeatureIndices;
-    }
-
-    public void outputExtraLog() {
-        // for (int i = 0; i < numLabels; i++) {
-        // System.out
-        // .println("Feature attributes selected" + selectedFeatureIndices[i].toString());
-        // System.out.println("Target attributes selected" + selectedTargetIndices[i].toString());
-        // }
-        // output the predictions of the base level models
-        // BufferedWriter out = new BufferedWriter(new FileWriter(new File("RC" + meta.toString()
-        // + "_predictions.txt")));
-        //
-        // for (int j = 0; j < metaFeatures[0].length; j++) {
-        // String targetName = chainRegressorsTrainSets[j].classAttribute().name();
-        // out.write(targetName + "_pred " + targetName + " ");
-        // }
-        // out.write("\n");
-        //
-        // for (int i = 0; i < metaFeatures.length; i++) {
-        // for (int j = 0; j < metaFeatures[0].length; j++) {
-        // String targetName = chainRegressorsTrainSets[j].classAttribute().name();
-        // int targetIndexInTrainSet = trainSet.getDataSet().attribute(targetName).index();
-        // out.write(metaFeatures[i][j] + " "
-        // + trainSet.getDataSet().instance(i).value(targetIndexInTrainSet) + " ");
-        // }
-        // out.write("\n");
-        // }
-        // out.close();
-    }
-
-}
+package mulan.experiments;
+
+import java.io.BufferedWriter;
+import java.io.FileWriter;
+import java.lang.management.ManagementFactory;
+import java.lang.management.ThreadMXBean;
+import java.util.ArrayList;
+import java.util.List;
+
+import mulan.classifier.MultiLabelLearnerBase;
+import mulan.data.MultiLabelInstances;
+import mulan.evaluation.Evaluation;
+import mulan.evaluation.Evaluator;
+import mulan.evaluation.MultipleEvaluation;
+import mulan.evaluation.measure.AverageRelativeRMSE;
+import mulan.evaluation.measure.Measure;
+import mulan.regressor.clus.ClusRandomForest;
+import mulan.regressor.transformation.EnsembleOfRegressorChains;
+import mulan.regressor.transformation.MultiTargetStacking;
+import mulan.regressor.transformation.RegressorChain;
+import mulan.regressor.transformation.SingleTargetRegressor;
+import weka.classifiers.Classifier;
+import weka.classifiers.meta.Bagging;
+import weka.classifiers.rules.ZeroR;
+import weka.classifiers.trees.REPTree;
+import weka.core.Utils;
+
+/**
+ * <p>
+ * Class replicating the experiment in
+ * <em>E. Spyromitros-Xioufis, G. Tsoumakas, W. Groves, I. Vlahavas. 2014. Multi-label Classification Methods for
+ * Multi-target Regression. <a href="http://arxiv.org/abs/1211.6581">arXiv e-prints</a></em>.
+ * </p>
+ * 
+ * @author Eleftherios Spyromitros-Xioufis
+ * @version 2014.04.01
+ * 
+ */
+public class ExperimentMTR {
+
+    /** the number of models in ensemble methods (ERC) **/
+    public static final int numEnsembleModels = 10;
+    /** whether the base learner should output debug messages **/
+    public static final boolean baseDebug = false;
+    /** whether the multi-target methods should output debug messages **/
+    public static final boolean mtDebug = true;
+    /** the number of cross-validation folds to use for evaluation **/
+    public static final int numFolds = 10;
+    /** the type of sampling in ERC **/
+    public static final EnsembleOfRegressorChains.SamplingMethod sampling = EnsembleOfRegressorChains.SamplingMethod.None;
+    /** number of execution slots to use by Weka's algorithms which support this option **/
+    private static int numSlots;
+    /** number of targets **/
+    private static int numTargets;
+    /** the multi-target datasets. Train and test will be null when cv is performed and vise versa **/
+    private static MultiLabelInstances full;
+    private static MultiLabelInstances train;
+    private static MultiLabelInstances test;
+
+    /**
+     * @param args <ul>
+     *            <li><b>-path:</b> full path to the dataset folder</li>
+     *            <li><b>-filestem:</b> the dataset's filestem (name)</li>
+     *            <li><b>-targets:</b> the number of targets in the dataset</li>
+     *            <li><b>-eval:</b> the type of evaluation to perform ('cv' for cross-validation / 'train' for
+     *            train/test split)</li>
+     *            <li><b>-mt:</b> comma separated list of the multi-target regression methods to evaluate:<br>
+     *            (ST,MTS,MTSC,ERC,ERCC,MORF)</li>
+     *            <li><b>-base:</b>the base regressor to use (reptree-bag)</li>
+     *            <li><b>-slots:</b> number of execution slots to be used by Weka's algorithms which support
+     *            this option</li>
+     *            </ul>
+     * @throws Exception
+     */
+    public static void main(String[] args) throws Exception {
+        // parsing options related to dataset and evaluation type
+        String path = Utils.getOption("path", args);
+        String fileStem = Utils.getOption("filestem", args);
+        numTargets = Integer.parseInt(Utils.getOption("targets", args));
+        String evalType = Utils.getOption("eval", args);
+
+        // parsing options related to multi-target methods being evaluated
+        String mt = Utils.getOption("mt", args);
+        String[] mtMethods = mt.split(",");
+        String base = Utils.getOption("base", args);
+
+        try {
+            numSlots = Integer.parseInt(Utils.getOption("slots", args));
+        } catch (Exception e) {
+            System.out.println("Number of execution slots not specified, using 1.");
+            numSlots = 1;
+        }
+
+        // loading the datasets
+        if (evalType.startsWith("cv")) {
+            full = new MultiLabelInstances(path + fileStem + ".arff", numTargets);
+        } else {
+            train = new MultiLabelInstances(path + fileStem + "-train.arff", numTargets);
+            test = new MultiLabelInstances(path + fileStem + "-test.arff", numTargets);
+            full = train; // just for initializing the measures
+        }
+
+        List<Measure> measures = new ArrayList<Measure>();
+        measures.add(new AverageRelativeRMSE(numTargets, full, full));
+
+        MultiLabelLearnerBase mtMethodPtr = null;
+
+        String resultsFileName = "results_" + fileStem + evalType + "_" + mt + "_"
+                + base.substring(0, 10) + "..." + base.substring(base.length() - 10, base.length())
+                + ".txt";
+        BufferedWriter outResults = new BufferedWriter(new FileWriter(resultsFileName));
+
+        // header
+        outResults
+                .write("dataset\teval_type\tmt_method\tbase_learner\ttarget_index\ttarget_name\t");
+        // print the measures name
+        for (Measure m : measures) {
+            outResults.write("'" + m.getName() + "'\t");
+        }
+        outResults.write("real_time\tcpu_time\n");
+        outResults.flush();
+
+        for (int j = 0; j < mtMethods.length; j++) { // for each mt method
+            String mtMethodChoice = mtMethods[j];
+            String baseLearnerChoice;
+            Classifier baseLearner = null;
+            if (mtMethodChoice.equals("MORF")) {
+                baseLearnerChoice = "no";
+            } else {
+                baseLearnerChoice = base;
+                baseLearner = selectBaseLearner(baseLearnerChoice);
+            }
+            if (mtMethodChoice.equals("ST")) {
+                SingleTargetRegressor str = new SingleTargetRegressor(baseLearner);
+                mtMethodPtr = str;
+            } else if (mtMethodChoice.equals("MTS")) {
+                MultiTargetStacking MTS = new MultiTargetStacking(baseLearner, baseLearner);
+                MTS.setIncludeAttrs(true);
+                MTS.setMeta(MultiTargetStacking.metaType.TRAIN);
+                mtMethodPtr = MTS;
+            } else if (mtMethodChoice.equals("MTSC")) {
+                MultiTargetStacking MTSC = new MultiTargetStacking(baseLearner, baseLearner);
+                MTSC.setIncludeAttrs(true);
+                MTSC.setMeta(MultiTargetStacking.metaType.CV);
+                MTSC.setNumFolds(10);
+                mtMethodPtr = MTSC;
+            } else if (mtMethodChoice.equals("ERC")) {
+                EnsembleOfRegressorChains ERC = new EnsembleOfRegressorChains(baseLearner,
+                        numEnsembleModels, sampling);
+                ERC.setMeta(RegressorChain.metaType.TRUE);
+                mtMethodPtr = ERC;
+            } else if (mtMethodChoice.equals("ERCC")) {
+                EnsembleOfRegressorChains ERCC = new EnsembleOfRegressorChains(baseLearner,
+                        numEnsembleModels, sampling);
+                ERCC.setMeta(RegressorChain.metaType.CV);
+                ERCC.setNumFolds(10);
+                mtMethodPtr = ERCC;
+            } else if (mtMethodChoice.startsWith("MORF")) {
+                ClusRandomForest MORF = new ClusRandomForest("clusWorkingDir/", fileStem, 100);
+                mtMethodPtr = MORF;
+            } else {
+                throw new Exception(mtMethodChoice + " mt method is not supported!");
+            }
+
+            mtMethodPtr.setDebug(mtDebug);
+
+            if (evalType.equals("train")) { // train-test evaluation
+                long startTraining = System.currentTimeMillis();
+                long startTrainingNano = getCpuTime();
+                mtMethodPtr.build(train);
+                long endTraining = System.currentTimeMillis();
+                long endTrainingCPU = getCpuTime();
+
+                Evaluator eval = new Evaluator();
+                long startEval = System.currentTimeMillis();
+                long startEvalNano = getCpuTime();
+                Evaluation results = eval.evaluate(mtMethodPtr, test, train);
+                long endEval = System.currentTimeMillis();
+                long endEvalCPU = getCpuTime();
+
+                AverageRelativeRMSE arrmse = (AverageRelativeRMSE) results.getMeasures().get(1);
+
+                // print static information
+                outResults.write(fileStem + "\t" + evalType + "\t" + mtMethodChoice + "\t"
+                        + baseLearnerChoice + "\t0\tall\t");
+                // print measure for all targets
+                outResults.write(arrmse.getValue() + "\t");
+                // print training/evaluation time
+                outResults.write((endTraining - startTraining) + "\t"
+                        + (endTrainingCPU - startTrainingNano) + "\t" + (endEval - startEval)
+                        + "\t" + (endEvalCPU - startEvalNano) + "\n");
+
+                // print measure per target
+                for (int i = 0; i < numTargets; i++) {
+                    outResults.write(fileStem + "\t" + evalType + "\t" + mtMethodChoice + "\t"
+                            + baseLearnerChoice + "\t");
+                    // print target index and name
+                    outResults.write((i + 1) + "\t");
+                    outResults.write(train.getDataSet().attribute(train.getLabelIndices()[i])
+                            .name()
+                            + "\t");
+                    outResults.write(arrmse.getValue(i) + "\t");
+                    // print training/evaluation time
+                    outResults.write((endTraining - startTraining) + "\t"
+                            + (endTrainingCPU - startTrainingNano) + "\t" + (endEval - startEval)
+                            + "\t" + (endEvalCPU - startEvalNano) + "\n");
+                }
+                outResults.flush();
+
+            } else if (evalType.equals("cv")) {
+                Evaluator eval = new Evaluator();
+                eval.setSeed(1);
+                MultipleEvaluation results = null;
+
+                long start = System.currentTimeMillis();
+                long startCPU = getCpuTime();
+                results = eval.crossValidate(mtMethodPtr, full, numFolds);
+                long end = System.currentTimeMillis();
+                long endCPU = getCpuTime();
+
+                ArrayList<Evaluation> evals = results.getEvaluations();
+                double[][] totalSEs = new double[numTargets][numFolds]; // a_i
+                double[][] trainMeanTotalSEs = new double[numTargets][numFolds]; // b_i_us
+                int[][] nonMissingInstances = new int[numTargets][numFolds];
+
+                for (int t = 0; t < evals.size(); t++) { // for each fold!
+                    AverageRelativeRMSE arrmse = ((AverageRelativeRMSE) evals.get(t).getMeasures()
+                            .get(1));
+                    for (int r = 0; r < numTargets; r++) {
+                        totalSEs[r][t] = arrmse.getTotalSE(r);
+                        trainMeanTotalSEs[r][t] = arrmse.getTrainMeanTotalSE(r);
+                        // either measure can be used for getting the num non-missing
+                        nonMissingInstances[r][t] = arrmse.getNumNonMissing(r);
+                    }
+                }
+
+                // calculating rrmse
+                double[] rrmse_us = new double[numTargets];
+                for (int r = 0; r < numTargets; r++) {
+                    for (int t = 0; t < numFolds; t++) {
+                        rrmse_us[r] += Math.sqrt(totalSEs[r][t])
+                                / Math.sqrt(trainMeanTotalSEs[r][t]);
+                    }
+                    rrmse_us[r] /= numFolds;
+                }
+
+                // print static information
+                outResults.write(fileStem + "\t" + evalType + "\t" + mtMethodChoice + "\t"
+                        + baseLearnerChoice + "\t0\tall\t");
+                for (Measure m : measures) {
+                    outResults.write(results.getMean(m.getName()) + "\t");
+                }
+                outResults.write((end - start) + "\t" + (endCPU - startCPU) + "\n");
+
+                for (int m = 0; m < numTargets; m++) {
+                    String targetName = full.getDataSet().attribute(full.getLabelIndices()[m])
+                            .name();
+                    outResults.write(fileStem + "\t" + evalType + "\t" + mtMethodChoice + "\t"
+                            + baseLearnerChoice + "\t" + (m + 1) + "\t" + targetName + "\t");
+                    for (Measure me : measures) {
+                        outResults.write(results.getMean(me.getName(), m) + "\t");
+                    }
+                    outResults.write((end - start) + "\t" + (endCPU - startCPU) + "\n");
+                }
+                outResults.flush();
+            } else {
+                throw new Exception("Wrong evaluation type given!");
+            }
+        }
+        outResults.close();
+
+    }
+
+    public static Classifier selectBaseLearner(String stLearnerName) throws Exception {
+        Classifier stLearner = null;
+        if (stLearnerName.equals("zeror")) {
+            // Weka's ZeroR (mean predictor)
+            ZeroR zeror = new ZeroR();
+            stLearner = zeror;
+        } else if (stLearnerName.equals("reptree-bag")) {
+            // Bagging of 100 Weka's REPTrees (default)
+            REPTree reptree = new REPTree();
+            int numBags = 100;
+            Bagging bagging = new Bagging();
+            bagging.setNumIterations(numBags);
+            bagging.setNumExecutionSlots(numSlots);
+            bagging.setClassifier(reptree);
+            stLearner = bagging;
+        } else {
+            throw new Exception(stLearnerName + " base learner is not supported!");
+        }
+        return stLearner;
+    }
+
+    /** Get CPU time in milliseconds. */
+    public static long getCpuTime() {
+        ThreadMXBean bean = ManagementFactory.getThreadMXBean();
+        return bean.isCurrentThreadCpuTimeSupported() ? (long) ((double) bean
+                .getCurrentThreadCpuTime() / 1000000.0) : 0L;
+    }
+
+}