View Javadoc

1   /*
2    *    This program is free software; you can redistribute it and/or modify
3    *    it under the terms of the GNU General Public License as published by
4    *    the Free Software Foundation; either version 2 of the License, or
5    *    (at your option) any later version.
6    *
7    *    This program is distributed in the hope that it will be useful,
8    *    but WITHOUT ANY WARRANTY; without even the implied warranty of
9    *    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
10   *    GNU General Public License for more details.
11   *
12   *    You should have received a copy of the GNU General Public License
13   *    along with this program; if not, write to the Free Software
14   *    Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
15   */
16  
17  /*
18   *    EvaluationACO.java
19   *    Copyright (C) 1999 University of Waikato, Hamilton, New Zealand
20   *
21   */
22  ///
23  package jr239.co620;
24  
25  
26  import  weka.classifiers.CostMatrix;
27  import weka.classifiers.Classifier;
28  import weka.classifiers.Sourcable;
29  import weka.classifiers.UpdateableClassifier;
30  
31  ///
32  import weka.classifiers.evaluation.NominalPrediction;
33  import weka.classifiers.evaluation.ThresholdCurve;
34  import weka.classifiers.xml.XMLClassifier;
35  import weka.core.Drawable;
36  import weka.core.FastVector;
37  import weka.core.Instance;
38  import weka.core.Instances;
39  import weka.core.Option;
40  import weka.core.OptionHandler;
41  import weka.core.Range;
42  import weka.core.Summarizable;
43  import weka.core.Utils;
44  import weka.core.Version;
45  import weka.core.converters.ConverterUtils.DataSink;
46  import weka.core.converters.ConverterUtils.DataSource;
47  import weka.core.xml.KOML;
48  import weka.core.xml.XMLOptions;
49  import weka.core.xml.XMLSerialization;
50  import weka.estimators.Estimator;
51  import weka.estimators.KernelEstimator;
52  
53  import java.io.BufferedInputStream;
54  import java.io.BufferedOutputStream;
55  import java.io.BufferedReader;
56  import java.io.FileInputStream;
57  import java.io.FileOutputStream;
58  import java.io.FileReader;
59  import java.io.InputStream;
60  import java.io.ObjectInputStream;
61  import java.io.ObjectOutputStream;
62  import java.io.OutputStream;
63  import java.io.Reader;
64  import java.util.Date;
65  import java.util.Enumeration;
66  import java.util.Random;
67  import java.util.zip.GZIPInputStream;
68  import java.util.zip.GZIPOutputStream;
69  
70  /**
71   * Class for evaluating machine learning models. <p/>
72   *
73   * ------------------------------------------------------------------- <p/>
74   *
75   * General options when evaluating a learning scheme from the command-line: <p/>
76   *
77   * -t filename <br/>
78   * Name of the file with the training data. (required) <p/>
79   *
80   * -T filename <br/>
81   * Name of the file with the test data. If missing a cross-validation 
82   * is performed. <p/>
83   *
84   * -c index <br/>
85   * Index of the class attribute (1, 2, ...; default: last). <p/>
86   *
87   * -x number <br/>
88   * The number of folds for the cross-validation (default: 10). <p/>
89   *
90   * -no-cv <br/>
91   * No cross validation.  If no test file is provided, no EvaluationACO
92   * is done. <p/>
93   * 
94   * -split-percentage percentage <br/>
95   * Sets the percentage for the train/test set split, e.g., 66. <p/>
96   * 
97   * -preserve-order <br/>
98   * Preserves the order in the percentage split instead of randomizing
99   * the data first with the seed value ('-s'). <p/>
100  *
101  * -s seed <br/>
102  * Random number seed for the cross-validation and percentage split
103  * (default: 1). <p/>
104  *
105  * -m filename <br/>
106  * The name of a file containing a cost matrix. <p/>
107  *
108  * -l filename <br/>
109  * Loads classifier from the given file. In case the filename ends with ".xml" 
110  * the options are loaded from XML. <p/>
111  *
112  * -d filename <br/>
113  * Saves classifier built from the training data into the given file. In case 
114  * the filename ends with ".xml" the options are saved XML, not the model. <p/>
115  *
116  * -v <br/>
117  * Outputs no statistics for the training data. <p/>
118  *
119  * -o <br/>
120  * Outputs statistics only, not the classifier. <p/>
121  * 
122  * -i <br/>
123  * Outputs information-retrieval statistics per class. <p/>
124  *
125  * -k <br/>
126  * Outputs information-theoretic statistics. <p/>
127  *
128  * -p range <br/>
129  * Outputs predictions for test instances (or the train instances if no test
130  * instances provided), along with the attributes in the specified range 
131  * (and nothing else). Use '-p 0' if no attributes are desired. <p/>
132  * 
133  * -distribution <br/>
134  * Outputs the distribution instead of only the prediction
135  * in conjunction with the '-p' option (only nominal classes). <p/>
136  *
137  * -r <br/>
138  * Outputs cumulative margin distribution (and nothing else). <p/>
139  *
140  * -g <br/> 
141  * Only for classifiers that implement "Graphable." Outputs
142  * the graph representation of the classifier (and nothing
143  * else). <p/>
144  * 
145  * -xml filename | xml-string <br/>
146  * Retrieves the options from the XML-data instead of the command line. <p/>
147  * 
148  * -threshold-file file <br/>
149  * The file to save the threshold data to.
150  * The format is determined by the extensions, e.g., '.arff' for ARFF
151  * format or '.csv' for CSV. <p/>
152  *         
153  * -threshold-label label <br/>
154  * The class label to determine the threshold data for
155  * (default is the first label) <p/>
156  *         
157  * ------------------------------------------------------------------- <p/>
158  *
159  * Example usage as the main of a classifier (called FunkyClassifier):
160  * <code> <pre>
161  * public static void main(String [] args) {
162  *   runClassifier(new FunkyClassifier(), args);
163  * }
164  * </pre> </code> 
165  * <p/>
166  *
167  * ------------------------------------------------------------------ <p/>
168  *
169  * Example usage from within an application:
170  * <code> <pre>
171  * Instances trainInstances = ... instances got from somewhere
172  * Instances testInstances = ... instances got from somewhere
173  * Classifier scheme = ... scheme got from somewhere
174  *
175  * EvaluationACO EvaluationACO = new EvaluationACO(trainInstances);
176  * EvaluationACO.evaluateModel(scheme, testInstances);
177  * System.out.println(EvaluationACO.toSummaryString());
178  * </pre> </code> 
179  *
180  *
181  * @author   Eibe Frank (eibe@cs.waikato.ac.nz)
182  * @author   Len Trigg (trigg@cs.waikato.ac.nz)
183  * @version  $Revision: 1.83 $
184  */
185 
186  class EvaluationACO
187 implements Summarizable {
188 
189   /** The number of classes. */
190   protected int m_NumClasses;
191 
192   /** The number of folds for a cross-validation. */
193   protected int m_NumFolds =10;
194 
195   /** The weight of all incorrectly classified instances. */
196   protected double m_Incorrect;
197 
198   /** The weight of all correctly classified instances. */
199   protected double m_Correct;
200 
201   /** The weight of all unclassified instances. */
202   protected double m_Unclassified;
203 
204   /*** The weight of all instances that had no class assigned to them. */
205   protected double m_MissingClass;
206 
207   /** The weight of all instances that had a class assigned to them. */
208   protected double m_WithClass;
209 
210   /** Array for storing the confusion matrix. */
211   protected double [][] m_ConfusionMatrix;
212 
213   /** The names of the classes. */
214   protected String [] m_ClassNames;
215 
216   /** Is the class nominal or numeric? */
217   protected boolean m_ClassIsNominal;
218 
219   /** The prior probabilities of the classes */
220   protected double [] m_ClassPriors;
221 
222   /** The sum of counts for priors */
223   protected double m_ClassPriorsSum;
224 
225   /** The cost matrix (if given). */
226   protected CostMatrix m_CostMatrix;
227 
228   /** The total cost of predictions (includes instance weights) */
229   protected double m_TotalCost;
230 
231   /** Sum of errors. */
232   protected double m_SumErr;
233 
234   /** Sum of absolute errors. */
235   protected double m_SumAbsErr;
236 
237   /** Sum of squared errors. */
238   protected double m_SumSqrErr;
239 
240   /** Sum of class values. */
241   protected double m_SumClass;
242 
243   /** Sum of squared class values. */
244   protected double m_SumSqrClass;
245 
246   /*** Sum of predicted values. */
247   protected double m_SumPredicted;
248 
249   /** Sum of squared predicted values. */
250   protected double m_SumSqrPredicted;
251 
252   /** Sum of predicted * class values. */
253   protected double m_SumClassPredicted;
254 
255   /** Sum of absolute errors of the prior */
256   protected double m_SumPriorAbsErr;
257 
258   /** Sum of absolute errors of the prior */
259   protected double m_SumPriorSqrErr;
260 
261   /** Total Kononenko & Bratko Information */
262   protected double m_SumKBInfo;
263 
264   /*** Resolution of the margin histogram */
265   protected static int k_MarginResolution = 500;
266 
267   /** Cumulative margin distribution */
268   protected double m_MarginCounts [];
269 
270   /** Number of non-missing class training instances seen */
271   protected int m_NumTrainClassVals;
272 
273   /** Array containing all numeric training class values seen */
274   protected double [] m_TrainClassVals;
275 
276   /** Array containing all numeric training class weights */
277   protected double [] m_TrainClassWeights;
278 
279   /** Numeric class error estimator for prior */
280   protected Estimator m_PriorErrorEstimator;
281 
282   /** Numeric class error estimator for scheme */
283   protected Estimator m_ErrorEstimator;
284 
285   /**
286    * The minimum probablility accepted from an estimator to avoid
287    * taking log(0) in Sf calculations.
288    */
289   protected static final double MIN_SF_PROB = Double.MIN_VALUE;
290 
291   /** Total entropy of prior predictions */
292   protected double m_SumPriorEntropy;
293 
294   /** Total entropy of scheme predictions */
295   protected double m_SumSchemeEntropy;
296 
297   /** The list of predictions that have been generated (for computing AUC) */
298   private FastVector m_Predictions;
299 
300   /** enables/disables the use of priors, e.g., if no training set is
301    * present in case of de-serialized schemes */
302   protected boolean m_NoPriors = false;
303 
304   /**
305    * Initializes all the counters for the EvaluationACO. 
306    * Use <code>useNoPriors()</code> if the dataset is the test set and you
307    * can't initialize with the priors from the training set via 
308    * <code>setPriors(Instances)</code>.
309    *
310    * @param data 	set of training instances, to get some header 
311    * 			information and prior class distribution information
312    * @throws Exception 	if the class is not defined
313    * @see 		#useNoPriors()
314    * @see 		#setPriors(Instances)
315    */
316   public EvaluationACO(Instances data ) throws Exception {
317 
318     this(data, null);
319    /// calss the secpnd cpnstructor @param costMatrix 	the cost matrix---if null, default costs will be use
320      
321   }
322 
323   /**
324    * Initializes all the counters for the EvaluationACO and also takes a
325    * cost matrix as parameter.
326    * Use <code>useNoPriors()</code> if the dataset is the test set and you
327    * can't initialize with the priors from the training set via 
328    * <code>setPriors(Instances)</code>.
329    *
330    * @param data 	set of training instances, to get some header 
331    * 			information and prior class distribution information
332    * @param costMatrix 	the cost matrix---if null, default costs will be used
333    * @throws Exception 	if cost matrix is not compatible with 
334    * 			data, the class is not defined or the class is numeric
335    * @see 		#useNoPriors()
336    * @see 		#setPriors(Instances)
337    */
338   public EvaluationACO(Instances data, CostMatrix costMatrix) 
339   throws Exception {
340 
341     m_NumClasses = data.numClasses();
342     m_NumFolds = 1;
343     m_ClassIsNominal = data.classAttribute().isNominal();
344 
345     if (m_ClassIsNominal) {
346       m_ConfusionMatrix = new double [m_NumClasses][m_NumClasses];
347       m_ClassNames = new String [m_NumClasses];
348       for(int i = 0; i < m_NumClasses; i++) {
349 	m_ClassNames[i] = data.classAttribute().value(i);
350       }
351     }
352     m_CostMatrix = costMatrix;
353     if (m_CostMatrix != null) {
354       if (!m_ClassIsNominal) {
355 	throw new Exception("Class has to be nominal if cost matrix " + 
356 	"given!");
357       }
358       if (m_CostMatrix.size() != m_NumClasses) {
359 	throw new Exception("Cost matrix not compatible with data!");
360       }
361     }
362     m_ClassPriors = new double [m_NumClasses];
363     setPriors(data);
364     m_MarginCounts = new double [k_MarginResolution + 1];
365   }
366 
367   /**
368    * Returns the area under ROC for those predictions that have been collected
369    * in the evaluateClassifier(Classifier, Instances) method. Returns 
370    * Instance.missingValue() if the area is not available.
371    *
372    * @param classIndex the index of the class to consider as "positive"
373    * @return the area under the ROC curve or not a number
374    */
375   public double areaUnderROC(int classIndex) {
376 
377     // Check if any predictions have been collected
378     if (m_Predictions == null) {
379       return Instance.missingValue();
380     } else {
381       ThresholdCurve tc = new ThresholdCurve();
382       Instances result = tc.getCurve(m_Predictions, classIndex);
383       return ThresholdCurve.getROCArea(result);
384     }
385   }
386 
387   /**
388    * Returns a copy of the confusion matrix.
389    *
390    * @return a copy of the confusion matrix as a two-dimensional array
391    */
392   public double[][] confusionMatrix() {
393 
394     double[][] newMatrix = new double[m_ConfusionMatrix.length][0];
395 
396     for (int i = 0; i < m_ConfusionMatrix.length; i++) {
397       newMatrix[i] = new double[m_ConfusionMatrix[i].length];
398       System.arraycopy(m_ConfusionMatrix[i], 0, newMatrix[i], 0,
399 	  m_ConfusionMatrix[i].length);
400     }
401     return newMatrix;
402   }
403 
404   /**
405    * Performs a (stratified if class is nominal) cross-validation 
406    * for a classifier on a set of instances. Now performs
407    * a deep copy of the classifier before each call to 
408    * buildClassifier() (just in case the classifier is not
409    * initialized properly).
410    *
411    * @param classifier the classifier with any options set.
412    * @param data the data on which the cross-validation is to be 
413    * performed 
414    * @param numFolds the number of folds for the cross-validation
415    * @param random random number generator for randomization 
416    * @throws Exception if a classifier could not be generated 
417    * successfully or the class is not defined
418    */
419   public void crossValidateModel(Classifier classifier,
420       Instances data, int numFolds, Random random) 
421   throws Exception {
422 
423     // Make a copy of the data we can reorder
424     data = new Instances(data);
425     data.randomize(random);
426     if (data.classAttribute().isNominal()) {
427       data.stratify(numFolds);
428     }
429     // Do the folds
430     for (int i = 0; i < numFolds; i++) {
431       Instances train = data.trainCV(numFolds, i, random);
432       setPriors(train);
433       Classifier copiedClassifier = Classifier.makeCopy(classifier);
434       copiedClassifier.buildClassifier(train);
435       Instances test = data.testCV(numFolds, i);
436       evaluateModel(copiedClassifier, test);
437     }
438     m_NumFolds = numFolds;
439   }
440 
441   // ******************************   yo
442   
443     public void
444     validateACOmodel(Classifier bc ,   Instances trainigData,
445 				Instances testingData)   throws Exception {
446 	
447       setPriors(trainigData);
448      
449       bc.buildClassifier(trainigData);
450       
451       evaluateModel(bc, testingData);
452    
453     //m_NumFolds = numFolds; //set to deafault 10
454   }
455   
456   
457   /////////// *************************************************************************************
458   
459   /**
460    * Performs a (stratified if class is nominal) cross-validation 
461    * for a classifier on a set of instances.
462    *
463    * @param classifierString a string naming the class of the classifier
464    * @param data the data on which the cross-validation is to be 
465    * performed 
466    * @param numFolds the number of folds for the cross-validation
467    * @param options the options to the classifier. Any options
468    * @param random the random number generator for randomizing the data
469    * accepted by the classifier will be removed from this array.
470    * @throws Exception if a classifier could not be generated 
471    * successfully or the class is not defined
472    */
473   public void crossValidateModel(String classifierString,
474       Instances data, int numFolds,
475       String[] options, Random random) 
476   throws Exception {
477 
478     crossValidateModel(Classifier.forName(classifierString, options),
479 	data, numFolds, random);
480   }
481 
482   /**
483    * Evaluates a classifier with the options given in an array of
484    * strings. <p/>
485    *
486    * Valid options are: <p/>
487    *
488    * -t filename <br/>
489    * Name of the file with the training data. (required) <p/>
490    *
491    * -T filename <br/>
492    * Name of the file with the test data. If missing a cross-validation 
493    * is performed. <p/>
494    *
495    * -c index <br/>
496    * Index of the class attribute (1, 2, ...; default: last). <p/>
497    *
498    * -x number <br/>
499    * The number of folds for the cross-validation (default: 10). <p/>
500    *
501    * -no-cv <br/>
502    * No cross validation.  If no test file is provided, no EvaluationACO
503    * is done. <p/>
504    * 
505    * -split-percentage percentage <br/>
506    * Sets the percentage for the train/test set split, e.g., 66. <p/>
507    * 
508    * -preserve-order <br/>
509    * Preserves the order in the percentage split instead of randomizing
510    * the data first with the seed value ('-s'). <p/>
511    *
512    * -s seed <br/>
513    * Random number seed for the cross-validation and percentage split
514    * (default: 1). <p/>
515    *
516    * -m filename <br/>
517    * The name of a file containing a cost matrix. <p/>
518    *
519    * -l filename <br/>
520    * Loads classifier from the given file. In case the filename ends with
521    * ".xml" the options are loaded from XML. <p/>
522    *
523    * -d filename <br/>
524    * Saves classifier built from the training data into the given file. In case 
525    * the filename ends with ".xml" the options are saved XML, not the model. <p/>
526    *
527    * -v <br/>
528    * Outputs no statistics for the training data. <p/>
529    *
530    * -o <br/>
531    * Outputs statistics only, not the classifier. <p/>
532    * 
533    * -i <br/>
534    * Outputs detailed information-retrieval statistics per class. <p/>
535    *
536    * -k <br/>
537    * Outputs information-theoretic statistics. <p/>
538    *
539    * -p range <br/>
540    * Outputs predictions for test instances (or the train instances if no test
541    * instances provided), along with the attributes in the specified range (and 
542    *  nothing else). Use '-p 0' if no attributes are desired. <p/>
543    *
544    * -distribution <br/>
545    * Outputs the distribution instead of only the prediction
546    * in conjunction with the '-p' option (only nominal classes). <p/>
547    *
548    * -r <br/>
549    * Outputs cumulative margin distribution (and nothing else). <p/>
550    *
551    * -g <br/> 
552    * Only for classifiers that implement "Graphable." Outputs
553    * the graph representation of the classifier (and nothing
554    * else). <p/>
555    *
556    * -xml filename | xml-string <br/>
557    * Retrieves the options from the XML-data instead of the command line. <p/>
558    * 
559    * -threshold-file file <br/>
560    * The file to save the threshold data to.
561    * The format is determined by the extensions, e.g., '.arff' for ARFF
562    * format or '.csv' for CSV. <p/>
563    *         
564    * -threshold-label label <br/>
565    * The class label to determine the threshold data for
566    * (default is the first label) <p/>
567    *
568    * @param classifierString class of machine learning classifier as a string
569    * @param options the array of string containing the options
570    * @throws Exception if model could not be evaluated successfully
571    * @return a string describing the results 
572    */
573   public static String evaluateModel(String classifierString, 
574       String [] options) throws Exception {
575 
576     Classifier classifier;	 
577 
578     // Create classifier
579     try {
580       classifier = 
581 	(Classifier)Class.forName(classifierString).newInstance();
582     } catch (Exception e) {
583       throw new Exception("Can't find class with name " 
584 	  + classifierString + '.');
585     }
586     return evaluateModel(classifier, options);
587   }
588 
589   /**
590    * A test method for this class. Just extracts the first command line
591    * argument as a classifier class name and calls evaluateModel.
592    * @param args an array of command line arguments, the first of which
593    * must be the class name of a classifier.
594    */
595   public static void main(String [] args) {
596 
597     try {
598       if (args.length == 0) {
599 	throw new Exception("The first argument must be the class name"
600 	    + " of a classifier");
601       }
602       String classifier = args[0];
603       args[0] = "";
604       System.out.println(evaluateModel(classifier, args));
605     } catch (Exception ex) {
606       ex.printStackTrace();
607       System.err.println(ex.getMessage());
608     }
609   }
610 
611   /**
612    * Evaluates a classifier with the options given in an array of
613    * strings. <p/>
614    *
615    * Valid options are: <p/>
616    *
617    * -t name of training file <br/>
618    * Name of the file with the training data. (required) <p/>
619    *
620    * -T name of test file <br/>
621    * Name of the file with the test data. If missing a cross-validation 
622    * is performed. <p/>
623    *
624    * -c class index <br/>
625    * Index of the class attribute (1, 2, ...; default: last). <p/>
626    *
627    * -x number of folds <br/>
628    * The number of folds for the cross-validation (default: 10). <p/>
629    *
630    * -no-cv <br/>
631    * No cross validation.  If no test file is provided, no EvaluationACO
632    * is done. <p/>
633    * 
634    * -split-percentage percentage <br/>
635    * Sets the percentage for the train/test set split, e.g., 66. <p/>
636    * 
637    * -preserve-order <br/>
638    * Preserves the order in the percentage split instead of randomizing
639    * the data first with the seed value ('-s'). <p/>
640    *
641    * -s seed <br/>
642    * Random number seed for the cross-validation and percentage split
643    * (default: 1). <p/>
644    *
645    * -m file with cost matrix <br/>
646    * The name of a file containing a cost matrix. <p/>
647    *
648    * -l filename <br/>
649    * Loads classifier from the given file. In case the filename ends with
650    * ".xml" the options are loaded from XML. <p/>
651    *
652    * -d filename <br/>
653    * Saves classifier built from the training data into the given file. In case 
654    * the filename ends with ".xml" the options are saved XML, not the model. <p/>
655    *
656    * -v <br/>
657    * Outputs no statistics for the training data. <p/>
658    *
659    * -o <br/>
660    * Outputs statistics only, not the classifier. <p/>
661    * 
662    * -i <br/>
663    * Outputs detailed information-retrieval statistics per class. <p/>
664    *
665    * -k <br/>
666    * Outputs information-theoretic statistics. <p/>
667    *
668    * -p range <br/>
669    * Outputs predictions for test instances (or the train instances if no test
670    * instances provided), along with the attributes in the specified range 
671    * (and nothing else). Use '-p 0' if no attributes are desired. <p/>
672    *
673    * -distribution <br/>
674    * Outputs the distribution instead of only the prediction
675    * in conjunction with the '-p' option (only nominal classes). <p/>
676    *
677    * -r <br/>
678    * Outputs cumulative margin distribution (and nothing else). <p/>
679    *
680    * -g <br/> 
681    * Only for classifiers that implement "Graphable." Outputs
682    * the graph representation of the classifier (and nothing
683    * else). <p/>
684    *
685    * -xml filename | xml-string <br/>
686    * Retrieves the options from the XML-data instead of the command line. <p/>
687    *
688    * @param classifier machine learning classifier
689    * @param options the array of string containing the options
690    * @throws Exception if model could not be evaluated successfully
691    * @return a string describing the results 
692    */
693   public static String evaluateModel(Classifier classifier,
694       String [] options) throws Exception {
695 
696     Instances train = null, tempTrain, test = null, template = null;
697     int seed = 1, folds = 10, classIndex = -1;
698     boolean noCrossValidation = false;
699     String trainFileName, testFileName, sourceClass, 
700     classIndexString, seedString, foldsString, objectInputFileName, 
701     objectOutputFileName, attributeRangeString;
702     boolean noOutput = false,
703     printClassifications = false, trainStatistics = true,
704     printMargins = false, printComplexityStatistics = false,
705     printGraph = false, classStatistics = false, printSource = false;
706     StringBuffer text = new StringBuffer();
707     DataSource trainSource = null, testSource = null;
708     ObjectInputStream objectInputStream = null;
709     BufferedInputStream xmlInputStream = null;
710     CostMatrix costMatrix = null;
711     StringBuffer schemeOptionsText = null;
712     Range attributesToOutput = null;
713     long trainTimeStart = 0, trainTimeElapsed = 0,
714     testTimeStart = 0, testTimeElapsed = 0;
715     String xml = "";
716     String[] optionsTmp = null;
717     Classifier classifierBackup;
718     Classifier classifierClassifications = null;
719     boolean printDistribution = false;
720     int actualClassIndex = -1;  // 0-based class index
721     String splitPercentageString = "";
722     int splitPercentage = -1;
723     boolean preserveOrder = false;
724     boolean trainSetPresent = false;
725     boolean testSetPresent = false;
726     String thresholdFile;
727     String thresholdLabel;
728 
729     // help requested?
730     if (Utils.getFlag("h", options) || Utils.getFlag("help", options)) {
731       throw new Exception("\nHelp requested." + makeOptionString(classifier));
732     }
733     
734     try {
735       // do we get the input from XML instead of normal parameters?
736       xml = Utils.getOption("xml", options);
737       if (!xml.equals(""))
738 	options = new XMLOptions(xml).toArray();
739 
740       // is the input model only the XML-Options, i.e. w/o built model?
741       optionsTmp = new String[options.length];
742       for (int i = 0; i < options.length; i++)
743 	optionsTmp[i] = options[i];
744 
745       if (Utils.getOption('l', optionsTmp).toLowerCase().endsWith(".xml")) {
746 	// load options from serialized data ('-l' is automatically erased!)
747 	XMLClassifier xmlserial = new XMLClassifier();
748 	Classifier cl = (Classifier) xmlserial.read(Utils.getOption('l', options));
749 	// merge options
750 	optionsTmp = new String[options.length + cl.getOptions().length];
751 	System.arraycopy(cl.getOptions(), 0, optionsTmp, 0, cl.getOptions().length);
752 	System.arraycopy(options, 0, optionsTmp, cl.getOptions().length, options.length);
753 	options = optionsTmp;
754       }
755 
756       noCrossValidation = Utils.getFlag("no-cv", options);
757       // Get basic options (options the same for all schemes)
758       classIndexString = Utils.getOption('c', options);
759       if (classIndexString.length() != 0) {
760 	if (classIndexString.equals("first"))
761 	  classIndex = 1;
762 	else if (classIndexString.equals("last"))
763 	  classIndex = -1;
764 	else
765 	  classIndex = Integer.parseInt(classIndexString);
766       }
767       trainFileName = Utils.getOption('t', options); 
768       objectInputFileName = Utils.getOption('l', options);
769       objectOutputFileName = Utils.getOption('d', options);
770       testFileName = Utils.getOption('T', options);
771       foldsString = Utils.getOption('x', options);
772       if (foldsString.length() != 0) {
773 	folds = Integer.parseInt(foldsString);
774       }
775       seedString = Utils.getOption('s', options);
776       if (seedString.length() != 0) {
777 	seed = Integer.parseInt(seedString);
778       }
779       if (trainFileName.length() == 0) {
780 	if (objectInputFileName.length() == 0) {
781 	  throw new Exception("No training file and no object "+
782 	  "input file given.");
783 	} 
784 	if (testFileName.length() == 0) {
785 	  throw new Exception("No training file and no test "+
786 	  "file given.");
787 	}
788       } else if ((objectInputFileName.length() != 0) &&
789 	  ((!(classifier instanceof UpdateableClassifier)) ||
790 	      (testFileName.length() == 0))) {
791 	throw new Exception("Classifier not incremental, or no " +
792 	    "test file provided: can't "+
793 	"use both train and model file.");
794       }
795       try {
796 	if (trainFileName.length() != 0) {
797 	  trainSetPresent = true;
798 	  trainSource = new DataSource(trainFileName);
799 	}
800 	if (testFileName.length() != 0) {
801 	  testSetPresent = true;
802 	  testSource = new DataSource(testFileName);
803 	}
804 	if (objectInputFileName.length() != 0) {
805 	  InputStream is = new FileInputStream(objectInputFileName);
806 	  if (objectInputFileName.endsWith(".gz")) {
807 	    is = new GZIPInputStream(is);
808 	  }
809 	  // load from KOML?
810 	  if (!(objectInputFileName.endsWith(".koml") && KOML.isPresent()) ) {
811 	    objectInputStream = new ObjectInputStream(is);
812 	    xmlInputStream    = null;
813 	  }
814 	  else {
815 	    objectInputStream = null;
816 	    xmlInputStream    = new BufferedInputStream(is);
817 	  }
818 	}
819       } catch (Exception e) {
820 	throw new Exception("Can't open file " + e.getMessage() + '.');
821       }
822       if (testSetPresent) {
823 	template = test = testSource.getStructure();
824 	if (classIndex != -1) {
825 	  test.setClassIndex(classIndex - 1);
826 	} else {
827 	  if ( (test.classIndex() == -1) || (classIndexString.length() != 0) )
828 	    test.setClassIndex(test.numAttributes() - 1);
829 	}
830 	actualClassIndex = test.classIndex();
831       }
832       else {
833 	// percentage split
834 	splitPercentageString = Utils.getOption("split-percentage", options);
835 	if (splitPercentageString.length() != 0) {
836 	  if (foldsString.length() != 0)
837 	    throw new Exception(
838 		"Percentage split cannot be used in conjunction with "
839 		+ "cross-validation ('-x').");
840 	  splitPercentage = Integer.parseInt(splitPercentageString);
841 	  if ((splitPercentage <= 0) || (splitPercentage >= 100))
842 	    throw new Exception("Percentage split value needs be >0 and <100.");
843 	}
844 	else {
845 	  splitPercentage = -1;
846 	}
847 	preserveOrder = Utils.getFlag("preserve-order", options);
848 	if (preserveOrder) {
849 	  if (splitPercentage == -1)
850 	    throw new Exception("Percentage split ('-percentage-split') is missing.");
851 	}
852 	// create new train/test sources
853 	if (splitPercentage > 0) {
854 	  testSetPresent = true;
855 	  Instances tmpInst = trainSource.getDataSet(actualClassIndex);
856 	  if (!preserveOrder)
857 	    tmpInst.randomize(new Random(seed));
858 	  int trainSize = tmpInst.numInstances() * splitPercentage / 100;
859 	  int testSize  = tmpInst.numInstances() - trainSize;
860 	  Instances trainInst = new Instances(tmpInst, 0, trainSize);
861 	  Instances testInst  = new Instances(tmpInst, trainSize, testSize);
862 	  trainSource = new DataSource(trainInst);
863 	  testSource  = new DataSource(testInst);
864 	  template = test = testSource.getStructure();
865 	  if (classIndex != -1) {
866 	    test.setClassIndex(classIndex - 1);
867 	  } else {
868 	    if ( (test.classIndex() == -1) || (classIndexString.length() != 0) )
869 	      test.setClassIndex(test.numAttributes() - 1);
870 	  }
871 	  actualClassIndex = test.classIndex();
872 	}
873       }
874       if (trainSetPresent) {
875 	template = train = trainSource.getStructure();
876 	if (classIndex != -1) {
877 	  train.setClassIndex(classIndex - 1);
878 	} else {
879 	  if ( (train.classIndex() == -1) || (classIndexString.length() != 0) )
880 	    train.setClassIndex(train.numAttributes() - 1);
881 	}
882 	actualClassIndex = train.classIndex();
883 	if ((testSetPresent) && !test.equalHeaders(train)) {
884 	  throw new IllegalArgumentException("Train and test file not compatible!");
885 	}
886       }
887       if (template == null) {
888 	throw new Exception("No actual dataset provided to use as template");
889       }
890       costMatrix = handleCostOption(
891 	  Utils.getOption('m', options), template.numClasses());
892 
893       classStatistics = Utils.getFlag('i', options);
894       noOutput = Utils.getFlag('o', options);
895       trainStatistics = !Utils.getFlag('v', options);
896       printComplexityStatistics = Utils.getFlag('k', options);
897       printMargins = Utils.getFlag('r', options);
898       printGraph = Utils.getFlag('g', options);
899       sourceClass = Utils.getOption('z', options);
900       printSource = (sourceClass.length() != 0);
901       printDistribution = Utils.getFlag("distribution", options);
902       thresholdFile = Utils.getOption("threshold-file", options);
903       thresholdLabel = Utils.getOption("threshold-label", options);
904 
905       // Check -p option
906       try {
907 	attributeRangeString = Utils.getOption('p', options);
908       }
909       catch (Exception e) {
910 	throw new Exception(e.getMessage() + "\nNOTE: the -p option has changed. " +
911 	    "It now expects a parameter specifying a range of attributes " +
912 	"to list with the predictions. Use '-p 0' for none.");
913       }
914       if (attributeRangeString.length() != 0) {
915 	printClassifications = true;
916 	if (!attributeRangeString.equals("0")) 
917 	  attributesToOutput = new Range(attributeRangeString);
918       }
919 
920       if (!printClassifications && printDistribution)
921 	throw new Exception("Cannot print distribution without '-p' option!");
922 
923       // if no training file given, we don't have any priors
924       if ( (!trainSetPresent) && (printComplexityStatistics) )
925 	throw new Exception("Cannot print complexity statistics ('-k') without training file ('-t')!");
926 
927       // If a model file is given, we can't process 
928       // scheme-specific options
929       if (objectInputFileName.length() != 0) {
930 	Utils.checkForRemainingOptions(options);
931       } else {
932 
933 	// Set options for classifier
934 	if (classifier instanceof OptionHandler) {
935 	  for (int i = 0; i < options.length; i++) {
936 	    if (options[i].length() != 0) {
937 	      if (schemeOptionsText == null) {
938 		schemeOptionsText = new StringBuffer();
939 	      }
940 	      if (options[i].indexOf(' ') != -1) {
941 		schemeOptionsText.append('"' + options[i] + "\" ");
942 	      } else {
943 		schemeOptionsText.append(options[i] + " ");
944 	      }
945 	    }
946 	  }
947 	  ((OptionHandler)classifier).setOptions(options);
948 	}
949       }
950       Utils.checkForRemainingOptions(options);
951     } catch (Exception e) {
952       throw new Exception("\nWeka exception: " + e.getMessage()
953 	  + makeOptionString(classifier));
954     }
955 
956     // Setup up EvaluationACO objects
957     EvaluationACO trainingEvaluationACO = new EvaluationACO(new Instances(template, 0), costMatrix);
958     EvaluationACO testingEvaluationACO = new EvaluationACO(new Instances(template, 0), costMatrix);
959 
960     // disable use of priors if no training file given
961     if (!trainSetPresent)
962       testingEvaluationACO.useNoPriors();
963 
964     if (objectInputFileName.length() != 0) {
965       // Load classifier from file
966       if (objectInputStream != null) {
967 	classifier = (Classifier) objectInputStream.readObject();
968         // try and read a header (if present)
969         Instances savedStructure = null;
970         try {
971           savedStructure = (Instances) objectInputStream.readObject();
972         } catch (Exception ex) {
973           // don't make a fuss
974         }
975         if (savedStructure != null) {
976           // test for compatibility with template
977           if (!template.equalHeaders(savedStructure)) {
978             throw new Exception("training and test set are not compatible");
979           }
980         }
981 	objectInputStream.close();
982       }
983       else {
984 	// whether KOML is available has already been checked (objectInputStream would null otherwise)!
985 	classifier = (Classifier) KOML.read(xmlInputStream);
986 	xmlInputStream.close();
987       }
988     }
989 
990     // backup of fully setup classifier for cross-validation
991     classifierBackup = Classifier.makeCopy(classifier);
992 
993     // Build the classifier if no object file provided
994     if ((classifier instanceof UpdateableClassifier) &&
995 	(testSetPresent) &&
996 	(costMatrix == null) &&
997 	(trainSetPresent)) {
998 
999       // Build classifier incrementally
1000       trainingEvaluationACO.setPriors(train);
1001       testingEvaluationACO.setPriors(train);
1002       trainTimeStart = System.currentTimeMillis();
1003       if (objectInputFileName.length() == 0) {
1004 	classifier.buildClassifier(train);
1005       }
1006       Instance trainInst;
1007       while (trainSource.hasMoreElements(train)) {
1008 	trainInst = trainSource.nextElement(train);
1009 	trainingEvaluationACO.updatePriors(trainInst);
1010 	testingEvaluationACO.updatePriors(trainInst);
1011 	((UpdateableClassifier)classifier).updateClassifier(trainInst);
1012       }
1013       trainTimeElapsed = System.currentTimeMillis() - trainTimeStart;
1014     } else if (objectInputFileName.length() == 0) {
1015       // Build classifier in one go
1016       tempTrain = trainSource.getDataSet(actualClassIndex);
1017       trainingEvaluationACO.setPriors(tempTrain);
1018       testingEvaluationACO.setPriors(tempTrain);
1019       trainTimeStart = System.currentTimeMillis();
1020       classifier.buildClassifier(tempTrain);
1021       trainTimeElapsed = System.currentTimeMillis() - trainTimeStart;
1022     } 
1023 
1024     // backup of fully trained classifier for printing the classifications
1025     if (printClassifications)
1026       classifierClassifications = Classifier.makeCopy(classifier);
1027 
1028     // Save the classifier if an object output file is provided
1029     if (objectOutputFileName.length() != 0) {
1030       OutputStream os = new FileOutputStream(objectOutputFileName);
1031       // binary
1032       if (!(objectOutputFileName.endsWith(".xml") || (objectOutputFileName.endsWith(".koml") && KOML.isPresent()))) {
1033 	if (objectOutputFileName.endsWith(".gz")) {
1034 	  os = new GZIPOutputStream(os);
1035 	}
1036 	ObjectOutputStream objectOutputStream = new ObjectOutputStream(os);
1037 	objectOutputStream.writeObject(classifier);
1038         if (template != null) {
1039           objectOutputStream.writeObject(template);
1040         }
1041 	objectOutputStream.flush();
1042 	objectOutputStream.close();
1043       }
1044       // KOML/XML
1045       else {
1046 	BufferedOutputStream xmlOutputStream = new BufferedOutputStream(os);
1047 	if (objectOutputFileName.endsWith(".xml")) {
1048 	  XMLSerialization xmlSerial = new XMLClassifier();
1049 	  xmlSerial.write(xmlOutputStream, classifier);
1050 	}
1051 	else
1052 	  // whether KOML is present has already been checked
1053 	  // if not present -> ".koml" is interpreted as binary - see above
1054 	  if (objectOutputFileName.endsWith(".koml")) {
1055 	    KOML.write(xmlOutputStream, classifier);
1056 	  }
1057 	xmlOutputStream.close();
1058       }
1059     }
1060 
1061     // If classifier is drawable output string describing graph
1062     if ((classifier instanceof Drawable) && (printGraph)){
1063       return ((Drawable)classifier).graph();
1064     }
1065 
1066     // Output the classifier as equivalent source
1067     if ((classifier instanceof Sourcable) && (printSource)){
1068       return wekaStaticWrapper((Sourcable) classifier, sourceClass);
1069     }
1070 
1071     // Output model
1072     if (!(noOutput || printMargins)) {
1073       if (classifier instanceof OptionHandler) {
1074 	if (schemeOptionsText != null) {
1075 	  text.append("\nOptions: "+schemeOptionsText);
1076 	  text.append("\n");
1077 	}
1078       }
1079       text.append("\n" + classifier.toString() + "\n");
1080     }
1081 
1082     if (!printMargins && (costMatrix != null)) {
1083       text.append("\n=== EvaluationACO Cost Matrix ===\n\n");
1084       text.append(costMatrix.toString());
1085     }
1086 
1087     // Output test instance predictions only
1088     if (printClassifications) {
1089       DataSource source = testSource;
1090       // no test set -> use train set
1091       if (source == null)
1092 	source = trainSource;
1093       return printClassifications(classifierClassifications, new Instances(template, 0),
1094 	  source, actualClassIndex + 1, attributesToOutput,
1095 	  printDistribution);
1096     }
1097 
1098     // Compute error estimate from training data
1099     if ((trainStatistics) && (trainSetPresent)) {
1100 
1101       if ((classifier instanceof UpdateableClassifier) &&
1102 	  (testSetPresent) &&
1103 	  (costMatrix == null)) {
1104 
1105 	// Classifier was trained incrementally, so we have to 
1106 	// reset the source.
1107 	trainSource.reset();
1108 
1109 	// Incremental testing
1110 	train = trainSource.getStructure(actualClassIndex);
1111 	testTimeStart = System.currentTimeMillis();
1112 	Instance trainInst;
1113 	while (trainSource.hasMoreElements(train)) {
1114 	  trainInst = trainSource.nextElement(train);
1115 	  trainingEvaluationACO.evaluateModelOnce((Classifier)classifier, trainInst);
1116 	}
1117 	testTimeElapsed = System.currentTimeMillis() - testTimeStart;
1118       } else {
1119 	testTimeStart = System.currentTimeMillis();
1120 	trainingEvaluationACO.evaluateModel(
1121 	    classifier, trainSource.getDataSet(actualClassIndex));
1122 	testTimeElapsed = System.currentTimeMillis() - testTimeStart;
1123       }
1124 
1125       // Print the results of the training EvaluationACO
1126       if (printMargins) {
1127 	return trainingEvaluationACO.toCumulativeMarginDistributionString();
1128       } else {
1129 	text.append("\nTime taken to build model: "
1130 	    + Utils.doubleToString(trainTimeElapsed / 1000.0,2)
1131 	    + " seconds");
1132 	
1133 	if (splitPercentage > 0)
1134 	  text.append("\nTime taken to test model on training split: ");
1135 	else
1136 	  text.append("\nTime taken to test model on training data: ");
1137 	text.append(Utils.doubleToString(testTimeElapsed / 1000.0,2) + " seconds");
1138 
1139 	if (splitPercentage > 0)
1140 	  text.append(trainingEvaluationACO.toSummaryString("\n\n=== Error on training"
1141 	      + " split ===\n", printComplexityStatistics));
1142 	else
1143 	  text.append(trainingEvaluationACO.toSummaryString("\n\n=== Error on training"
1144 	      + " data ===\n", printComplexityStatistics));
1145 	
1146 	if (template.classAttribute().isNominal()) {
1147 	  if (classStatistics) {
1148 	    text.append("\n\n" + trainingEvaluationACO.toClassDetailsString());
1149 	  }
1150           if (!noCrossValidation)
1151             text.append("\n\n" + trainingEvaluationACO.toMatrixString());
1152 	}
1153 
1154       }
1155     }
1156 
1157     // Compute proper error estimates
1158     if (testSource != null) {
1159       // Testing is on the supplied test data
1160       Instance testInst;
1161       while (testSource.hasMoreElements(test)) {
1162 	testInst = testSource.nextElement(test);
1163 	testingEvaluationACO.evaluateModelOnceAndRecordPrediction(
1164             (Classifier)classifier, testInst);
1165       }
1166 
1167       if (splitPercentage > 0)
1168 	text.append("\n\n" + testingEvaluationACO.
1169 	    toSummaryString("=== Error on test split ===\n",
1170 		printComplexityStatistics));
1171       else
1172 	text.append("\n\n" + testingEvaluationACO.
1173 	    toSummaryString("=== Error on test data ===\n",
1174 		printComplexityStatistics));
1175 
1176     } else if (trainSource != null) {
1177       if (!noCrossValidation) {
1178 	// Testing is via cross-validation on training data
1179 	Random random = new Random(seed);
1180 	// use untrained (!) classifier for cross-validation
1181 	classifier = Classifier.makeCopy(classifierBackup);
1182 	testingEvaluationACO.crossValidateModel(
1183 	    classifier, trainSource.getDataSet(actualClassIndex), folds, random);
1184 	if (template.classAttribute().isNumeric()) {
1185 	  text.append("\n\n\n" + testingEvaluationACO.
1186 	      toSummaryString("=== Cross-validation ===\n",
1187 		  printComplexityStatistics));
1188 	} else {
1189 	  text.append("\n\n\n" + testingEvaluationACO.
1190 	      toSummaryString("=== Stratified " + 
1191 		  "cross-validation ===\n",
1192 		  printComplexityStatistics));
1193 	}
1194       }
1195     }
1196     if (template.classAttribute().isNominal()) {
1197       if (classStatistics) {
1198 	text.append("\n\n" + testingEvaluationACO.toClassDetailsString());
1199       }
1200       if (!noCrossValidation)
1201         text.append("\n\n" + testingEvaluationACO.toMatrixString());
1202     }
1203 
1204     if ((thresholdFile.length() != 0) && template.classAttribute().isNominal()) {
1205       int labelIndex = 0;
1206       if (thresholdLabel.length() != 0)
1207 	labelIndex = template.classAttribute().indexOfValue(thresholdLabel);
1208       if (labelIndex == -1)
1209 	throw new IllegalArgumentException(
1210 	    "Class label '" + thresholdLabel + "' is unknown!");
1211       ThresholdCurve tc = new ThresholdCurve();
1212       Instances result = tc.getCurve(testingEvaluationACO.predictions(), labelIndex);
1213       DataSink.write(thresholdFile, result);
1214     }
1215     
1216     return text.toString();
1217   }
1218 
1219   /**
1220    * Attempts to load a cost matrix.
1221    *
1222    * @param costFileName the filename of the cost matrix
1223    * @param numClasses the number of classes that should be in the cost matrix
1224    * (only used if the cost file is in old format).
1225    * @return a <code>CostMatrix</code> value, or null if costFileName is empty
1226    * @throws Exception if an error occurs.
1227    */
1228   protected static CostMatrix handleCostOption(String costFileName, 
1229       int numClasses) 
1230   throws Exception {
1231 
1232     if ((costFileName != null) && (costFileName.length() != 0)) {
1233       System.out.println(
1234 	  "NOTE: The behaviour of the -m option has changed between WEKA 3.0"
1235 	  +" and WEKA 3.1. -m now carries out cost-sensitive *EvaluationACO*"
1236 	  +" only. For cost-sensitive *prediction*, use one of the"
1237 	  +" cost-sensitive metaschemes such as"
1238 	  +" weka.classifiers.meta.CostSensitiveClassifier or"
1239 	  +" weka.classifiers.meta.MetaCost");
1240 
1241       Reader costReader = null;
1242       try {
1243 	costReader = new BufferedReader(new FileReader(costFileName));
1244       } catch (Exception e) {
1245 	throw new Exception("Can't open file " + e.getMessage() + '.');
1246       }
1247       try {
1248 	// First try as a proper cost matrix format
1249 	return new CostMatrix(costReader);
1250       } catch (Exception ex) {
1251 	try {
1252 	  // Now try as the poxy old format :-)
1253 	  //System.err.println("Attempting to read old format cost file");
1254 	  try {
1255 	    costReader.close(); // Close the old one
1256 	    costReader = new BufferedReader(new FileReader(costFileName));
1257 	  } catch (Exception e) {
1258 	    throw new Exception("Can't open file " + e.getMessage() + '.');
1259 	  }
1260 	  CostMatrix costMatrix = new CostMatrix(numClasses);
1261 	  //System.err.println("Created default cost matrix");
1262 	  costMatrix.readOldFormat(costReader);
1263 	  return costMatrix;
1264 	  //System.err.println("Read old format");
1265 	} catch (Exception e2) {
1266 	  // re-throw the original exception
1267 	  //System.err.println("Re-throwing original exception");
1268 	  throw ex;
1269 	}
1270       }
1271     } else {
1272       return null;
1273     }
1274   }
1275 
1276   /**
1277    * Evaluates the classifier on a given set of instances. Note that
1278    * the data must have exactly the same format (e.g. order of
1279    * attributes) as the data used to train the classifier! Otherwise
1280    * the results will generally be meaningless.
1281    *
1282    * @param classifier machine learning classifier
1283    * @param data set of test instances for EvaluationACO
1284    * @return the predictions
1285    * @throws Exception if model could not be evaluated 
1286    * successfully 
1287    */
1288   public double[] evaluateModel(Classifier classifier,
1289       Instances data) throws Exception {
1290 
1291     double predictions[] = new double[data.numInstances()];
1292 
1293     // Need to be able to collect predictions if appropriate (for AUC)
1294 
1295     for (int i = 0; i < data.numInstances(); i++) {
1296       predictions[i] = evaluateModelOnceAndRecordPrediction((Classifier)classifier, 
1297 	  data.instance(i));
1298     }
1299 
1300     return predictions;
1301   }
1302 
1303   /**
1304    * Evaluates the classifier on a single instance and records the
1305    * prediction (if the class is nominal).
1306    *
1307    * @param classifier machine learning classifier
1308    * @param instance the test instance to be classified
1309    * @return the prediction made by the clasifier
1310    * @throws Exception if model could not be evaluated 
1311    * successfully or the data contains string attributes
1312    */
1313   public double evaluateModelOnceAndRecordPrediction(Classifier classifier,
1314       Instance instance) throws Exception {
1315 
1316     Instance classMissing = (Instance)instance.copy();
1317     double pred = 0;
1318     classMissing.setDataset(instance.dataset());
1319     classMissing.setClassMissing();
1320     if (m_ClassIsNominal) {
1321       if (m_Predictions == null) {
1322 	m_Predictions = new FastVector();
1323       }
1324       double [] dist = classifier.distributionForInstance(classMissing);
1325       pred = Utils.maxIndex(dist);
1326       if (dist[(int)pred] <= 0) {
1327 	pred = Instance.missingValue();
1328       }
1329       updateStatsForClassifier(dist, instance);
1330       m_Predictions.addElement(new NominalPrediction(instance.classValue(), dist, 
1331 	  instance.weight()));
1332     } else {
1333       pred = classifier.classifyInstance(classMissing);
1334       updateStatsForPredictor(pred, instance);
1335     }
1336     return pred;
1337   }
1338 
1339   /**
1340    * Evaluates the classifier on a single instance.
1341    *
1342    * @param classifier machine learning classifier
1343    * @param instance the test instance to be classified
1344    * @return the prediction made by the clasifier
1345    * @throws Exception if model could not be evaluated 
1346    * successfully or the data contains string attributes
1347    */
1348   public double evaluateModelOnce(Classifier classifier,
1349       Instance instance) throws Exception {
1350 
1351     Instance classMissing = (Instance)instance.copy();
1352     double pred = 0;
1353     classMissing.setDataset(instance.dataset());
1354     classMissing.setClassMissing();
1355     if (m_ClassIsNominal) {
1356       double [] dist = classifier.distributionForInstance(classMissing);
1357       pred = Utils.maxIndex(dist);
1358       if (dist[(int)pred] <= 0) {
1359 	pred = Instance.missingValue();
1360       }
1361       updateStatsForClassifier(dist, instance);
1362     } else {
1363       pred = classifier.classifyInstance(classMissing);
1364       updateStatsForPredictor(pred, instance);
1365     }
1366     return pred;
1367   }
1368 
1369   /**
1370    * Evaluates the supplied distribution on a single instance.
1371    *
1372    * @param dist the supplied distribution
1373    * @param instance the test instance to be classified
1374    * @return the prediction
1375    * @throws Exception if model could not be evaluated 
1376    * successfully
1377    */
1378   public double evaluateModelOnce(double [] dist, 
1379       Instance instance) throws Exception {
1380     double pred;
1381     if (m_ClassIsNominal) {
1382       pred = Utils.maxIndex(dist);
1383       if (dist[(int)pred] <= 0) {
1384 	pred = Instance.missingValue();
1385       }
1386       updateStatsForClassifier(dist, instance);
1387     } else {
1388       pred = dist[0];
1389       updateStatsForPredictor(pred, instance);
1390     }
1391     return pred;
1392   }
1393 
1394   /**
1395    * Evaluates the supplied distribution on a single instance.
1396    *
1397    * @param dist the supplied distribution
1398    * @param instance the test instance to be classified
1399    * @return the prediction
1400    * @throws Exception if model could not be evaluated 
1401    * successfully
1402    */
1403   public double evaluateModelOnceAndRecordPrediction(double [] dist, 
1404       Instance instance) throws Exception {
1405     double pred;
1406     if (m_ClassIsNominal) {
1407       if (m_Predictions == null) {
1408 	m_Predictions = new FastVector();
1409       }
1410       pred = Utils.maxIndex(dist);
1411       if (dist[(int)pred] <= 0) {
1412 	pred = Instance.missingValue();
1413       }
1414       updateStatsForClassifier(dist, instance);
1415       m_Predictions.addElement(new NominalPrediction(instance.classValue(), dist, 
1416 	  instance.weight()));
1417     } else {
1418       pred = dist[0];
1419       updateStatsForPredictor(pred, instance);
1420     }
1421     return pred;
1422   }
1423 
1424   /**
1425    * Evaluates the supplied prediction on a single instance.
1426    *
1427    * @param prediction the supplied prediction
1428    * @param instance the test instance to be classified
1429    * @throws Exception if model could not be evaluated 
1430    * successfully
1431    */
1432   public void evaluateModelOnce(double prediction,
1433       Instance instance) throws Exception {
1434 
1435     if (m_ClassIsNominal) {
1436       updateStatsForClassifier(makeDistribution(prediction), 
1437 	  instance);
1438     } else {
1439       updateStatsForPredictor(prediction, instance);
1440     }
1441   }
1442 
1443   /**
1444    * Returns the predictions that have been collected.
1445    *
1446    * @return a reference to the FastVector containing the predictions
1447    * that have been collected. This should be null if no predictions
1448    * have been collected (e.g. if the class is numeric).
1449    */
1450   public FastVector predictions() {
1451 
1452     return m_Predictions;
1453   }
1454 
1455   /**
1456    * Wraps a static classifier in enough source to test using the weka
1457    * class libraries.
1458    *
1459    * @param classifier a Sourcable Classifier
1460    * @param className the name to give to the source code class
1461    * @return the source for a static classifier that can be tested with
1462    * weka libraries.
1463    * @throws Exception if code-generation fails
1464    */
1465   public static String wekaStaticWrapper(Sourcable classifier, String className)     
1466     throws Exception {
1467 
1468     StringBuffer result = new StringBuffer();
1469     String staticClassifier = classifier.toSource(className);
1470     
1471     result.append("// Generated with Weka " + Version.VERSION + "\n");
1472     result.append("//\n");
1473     result.append("// This code is public domain and comes with no warranty.\n");
1474     result.append("//\n");
1475     result.append("// Timestamp: " + new Date() + "\n");
1476     result.append("\n");
1477     result.append("package weka.classifiers;\n");
1478     result.append("\n");
1479     result.append("import weka.core.Attribute;\n");
1480     result.append("import weka.core.Capabilities;\n");
1481     result.append("import weka.core.Capabilities.Capability;\n");
1482     result.append("import weka.core.Instance;\n");
1483     result.append("import weka.core.Instances;\n");
1484     result.append("import weka.classifiers.Classifier;\n");
1485     result.append("\n");
1486     result.append("public class WekaWrapper\n");
1487     result.append("  extends Classifier {\n");
1488     
1489     // globalInfo
1490     result.append("\n");
1491     result.append("  /**\n");
1492     result.append("   * Returns only the toString() method.\n");
1493     result.append("   *\n");
1494     result.append("   * @return a string describing the classifier\n");
1495     result.append("   */\n");
1496     result.append("  public String globalInfo() {\n");
1497     result.append("    return toString();\n");
1498     result.append("  }\n");
1499     
1500     // getCapabilities
1501     result.append("\n");
1502     result.append("  /**\n");
1503     result.append("   * Returns the capabilities of this classifier.\n");
1504     result.append("   *\n");
1505     result.append("   * @return the capabilities\n");
1506     result.append("   */\n");
1507     result.append("  public Capabilities getCapabilities() {\n");
1508     result.append(((Classifier) classifier).getCapabilities().toSource("result", 4));
1509     result.append("    return result;\n");
1510     result.append("  }\n");
1511     
1512     // buildClassifier
1513     result.append("\n");
1514     result.append("  /**\n");
1515     result.append("   * only checks the data against its capabilities.\n");
1516     result.append("   *\n");
1517     result.append("   * @param i the training data\n");
1518     result.append("   */\n");
1519     result.append("  public void buildClassifier(Instances i) throws Exception {\n");
1520     result.append("    // can classifier handle the data?\n");
1521     result.append("    getCapabilities().testWithFail(i);\n");
1522     result.append("  }\n");
1523     
1524     // classifyInstance
1525     result.append("\n");
1526     result.append("  /**\n");
1527     result.append("   * Classifies the given instance.\n");
1528     result.append("   *\n");
1529     result.append("   * @param i the instance to classify\n");
1530     result.append("   * @return the classification result\n");
1531     result.append("   */\n");
1532     result.append("  public double classifyInstance(Instance i) throws Exception {\n");
1533     result.append("    Object[] s = new Object[i.numAttributes()];\n");
1534     result.append("    \n");
1535     result.append("    for (int j = 0; j < s.length; j++) {\n");
1536     result.append("      if (!i.isMissing(j)) {\n");
1537     result.append("        if (i.attribute(j).isNominal())\n");
1538     result.append("          s[j] = new String(i.stringValue(j));\n");
1539     result.append("        else if (i.attribute(j).isNumeric())\n");
1540     result.append("          s[j] = new Double(i.value(j));\n");
1541     result.append("      }\n");
1542     result.append("    }\n");
1543     result.append("    \n");
1544     result.append("    // set class value to missing\n");
1545     result.append("    s[i.classIndex()] = null;\n");
1546     result.append("    \n");
1547     result.append("    return " + className + ".classify(s);\n");
1548     result.append("  }\n");
1549     
1550     // toString
1551     result.append("\n");
1552     result.append("  /**\n");
1553     result.append("   * Returns only the classnames and what classifier it is based on.\n");
1554     result.append("   *\n");
1555     result.append("   * @return a short description\n");
1556     result.append("   */\n");
1557     result.append("  public String toString() {\n");
1558     result.append("    return \"Auto-generated classifier wrapper, based on " 
1559 	+ classifier.getClass().getName() + " (generated with Weka " + Version.VERSION + ").\\n" 
1560 	+ "\" + this.getClass().getName() + \"/" + className + "\";\n");
1561     result.append("  }\n");
1562     
1563     // main
1564     result.append("\n");
1565     result.append("  /**\n");
1566     result.append("   * Runs the classfier from commandline.\n");
1567     result.append("   *\n");
1568     result.append("   * @param args the commandline arguments\n");
1569     result.append("   */\n");
1570     result.append("  public static void main(String args[]) {\n");
1571     result.append("    runClassifier(new WekaWrapper(), args);\n");
1572     result.append("  }\n");
1573     result.append("}\n");
1574     
1575     // actual classifier code
1576     result.append("\n");
1577     result.append(staticClassifier);
1578     
1579     return result.toString();
1580   }
1581 
1582   /**
1583    * Gets the number of test instances that had a known class value
1584    * (actually the sum of the weights of test instances with known 
1585    * class value).
1586    *
1587    * @return the number of test instances with known class
1588    */
1589   public final double numInstances() {
1590 
1591     return m_WithClass;
1592   }
1593 
1594   /**
1595    * Gets the number of instances incorrectly classified (that is, for
1596    * which an incorrect prediction was made). (Actually the sum of the weights
1597    * of these instances)
1598    *
1599    * @return the number of incorrectly classified instances 
1600    */
1601   public final double incorrect() {
1602 
1603     return m_Incorrect;
1604   }
1605 
1606   /**
1607    * Gets the percentage of instances incorrectly classified (that is, for
1608    * which an incorrect prediction was made).
1609    *
1610    * @return the percent of incorrectly classified instances 
1611    * (between 0 and 100)
1612    */
1613   public final double pctIncorrect() {
1614 
1615     return 100 * m_Incorrect / m_WithClass;
1616   }
1617 
1618   /**
1619    * Gets the total cost, that is, the cost of each prediction times the
1620    * weight of the instance, summed over all instances.
1621    *
1622    * @return the total cost
1623    */
1624   public final double totalCost() {
1625 
1626     return m_TotalCost;
1627   }
1628 
1629   /**
1630    * Gets the average cost, that is, total cost of misclassifications
1631    * (incorrect plus unclassified) over the total number of instances.
1632    *
1633    * @return the average cost.  
1634    */
1635   public final double avgCost() {
1636 
1637     return m_TotalCost / m_WithClass;
1638   }
1639 
1640   /**
1641    * Gets the number of instances correctly classified (that is, for
1642    * which a correct prediction was made). (Actually the sum of the weights
1643    * of these instances)
1644    *
1645    * @return the number of correctly classified instances
1646    */
1647   public final double correct() {
1648 
1649     return m_Correct;
1650   }
1651 
1652   /**
1653    * Gets the percentage of instances correctly classified (that is, for
1654    * which a correct prediction was made).
1655    *
1656    * @return the percent of correctly classified instances (between 0 and 100)
1657    */
1658   public final double pctCorrect() {
1659 
1660     return 100 * m_Correct / m_WithClass;
1661   }
1662 
1663   /**
1664    * Gets the number of instances not classified (that is, for
1665    * which no prediction was made by the classifier). (Actually the sum
1666    * of the weights of these instances)
1667    *
1668    * @return the number of unclassified instances
1669    */
1670   public final double unclassified() {
1671 
1672     return m_Unclassified;
1673   }
1674 
1675   /**
1676    * Gets the percentage of instances not classified (that is, for
1677    * which no prediction was made by the classifier).
1678    *
1679    * @return the percent of unclassified instances (between 0 and 100)
1680    */
1681   public final double pctUnclassified() {
1682 
1683     return 100 * m_Unclassified / m_WithClass;
1684   }
1685 
1686   /**
1687    * Returns the estimated error rate or the root mean squared error
1688    * (if the class is numeric). If a cost matrix was given this
1689    * error rate gives the average cost.
1690    *
1691    * @return the estimated error rate (between 0 and 1, or between 0 and 
1692    * maximum cost)
1693    */
1694   public final double errorRate() {
1695 
1696     if (!m_ClassIsNominal) {
1697       return Math.sqrt(m_SumSqrErr / (m_WithClass - m_Unclassified));
1698     }
1699     if (m_CostMatrix == null) {
1700       return m_Incorrect / m_WithClass;
1701     } else {
1702       return avgCost();
1703     }
1704   }
1705 
1706   /**
1707    * Returns value of kappa statistic if class is nominal.
1708    *
1709    * @return the value of the kappa statistic
1710    */
1711   public final double kappa() {
1712 
1713 
1714     double[] sumRows = new double[m_ConfusionMatrix.length];
1715     double[] sumColumns = new double[m_ConfusionMatrix.length];
1716     double sumOfWeights = 0;
1717     for (int i = 0; i < m_ConfusionMatrix.length; i++) {
1718       for (int j = 0; j < m_ConfusionMatrix.length; j++) {
1719 	sumRows[i] += m_ConfusionMatrix[i][j];
1720 	sumColumns[j] += m_ConfusionMatrix[i][j];
1721 	sumOfWeights += m_ConfusionMatrix[i][j];
1722       }
1723     }
1724     double correct = 0, chanceAgreement = 0;
1725     for (int i = 0; i < m_ConfusionMatrix.length; i++) {
1726       chanceAgreement += (sumRows[i] * sumColumns[i]);
1727       correct += m_ConfusionMatrix[i][i];
1728     }
1729     chanceAgreement /= (sumOfWeights * sumOfWeights);
1730     correct /= sumOfWeights;
1731 
1732     if (chanceAgreement < 1) {
1733       return (correct - chanceAgreement) / (1 - chanceAgreement);
1734     } else {
1735       return 1;
1736     }
1737   }
1738 
1739   /**
1740    * Returns the correlation coefficient if the class is numeric.
1741    *
1742    * @return the correlation coefficient
1743    * @throws Exception if class is not numeric
1744    */
1745   public final double correlationCoefficient() throws Exception {
1746 
1747     if (m_ClassIsNominal) {
1748       throw
1749       new Exception("Can't compute correlation coefficient: " + 
1750       "class is nominal!");
1751     }
1752 
1753     double correlation = 0;
1754     double varActual = 
1755       m_SumSqrClass - m_SumClass * m_SumClass / 
1756       (m_WithClass - m_Unclassified);
1757     double varPredicted = 
1758       m_SumSqrPredicted - m_SumPredicted * m_SumPredicted / 
1759       (m_WithClass - m_Unclassified);
1760     double varProd = 
1761       m_SumClassPredicted - m_SumClass * m_SumPredicted / 
1762       (m_WithClass - m_Unclassified);
1763 
1764     if (varActual * varPredicted <= 0) {
1765       correlation = 0.0;
1766     } else {
1767       correlation = varProd / Math.sqrt(varActual * varPredicted);
1768     }
1769 
1770     return correlation;
1771   }
1772 
1773   /**
1774    * Returns the mean absolute error. Refers to the error of the
1775    * predicted values for numeric classes, and the error of the 
1776    * predicted probability distribution for nominal classes.
1777    *
1778    * @return the mean absolute error 
1779    */
1780   public final double meanAbsoluteError() {
1781 
1782     return m_SumAbsErr / (m_WithClass - m_Unclassified);
1783   }
1784 
1785   /**
1786    * Returns the mean absolute error of the prior.
1787    *
1788    * @return the mean absolute error 
1789    */
1790   public final double meanPriorAbsoluteError() {
1791 
1792     if (m_NoPriors)
1793       return Double.NaN;
1794 
1795     return m_SumPriorAbsErr / m_WithClass;
1796   }
1797 
1798   /**
1799    * Returns the relative absolute error.
1800    *
1801    * @return the relative absolute error 
1802    * @throws Exception if it can't be computed
1803    */
1804   public final double relativeAbsoluteError() throws Exception {
1805 
1806     if (m_NoPriors)
1807       return Double.NaN;
1808 
1809     return 100 * meanAbsoluteError() / meanPriorAbsoluteError();
1810   }
1811 
1812   /**
1813    * Returns the root mean squared error.
1814    *
1815    * @return the root mean squared error 
1816    */
1817   public final double rootMeanSquaredError() {
1818 
1819     return Math.sqrt(m_SumSqrErr / (m_WithClass - m_Unclassified));
1820   }
1821 
1822   /**
1823    * Returns the root mean prior squared error.
1824    *
1825    * @return the root mean prior squared error 
1826    */
1827   public final double rootMeanPriorSquaredError() {
1828 
1829     if (m_NoPriors)
1830       return Double.NaN;
1831 
1832     return Math.sqrt(m_SumPriorSqrErr / m_WithClass);
1833   }
1834 
1835   /**
1836    * Returns the root relative squared error if the class is numeric.
1837    *
1838    * @return the root relative squared error 
1839    */
1840   public final double rootRelativeSquaredError() {
1841 
1842     if (m_NoPriors)
1843       return Double.NaN;
1844 
1845     return 100.0 * rootMeanSquaredError() / 
1846     rootMeanPriorSquaredError();
1847   }
1848 
1849   /**
1850    * Calculate the entropy of the prior distribution
1851    *
1852    * @return the entropy of the prior distribution
1853    * @throws Exception if the class is not nominal
1854    */
1855   public final double priorEntropy() throws Exception {
1856 
1857     if (!m_ClassIsNominal) {
1858       throw
1859       new Exception("Can't compute entropy of class prior: " + 
1860       "class numeric!");
1861     }
1862 
1863     if (m_NoPriors)
1864       return Double.NaN;
1865 
1866     double entropy = 0;
1867     for(int i = 0; i < m_NumClasses; i++) {
1868       entropy -= m_ClassPriors[i] / m_ClassPriorsSum 
1869       * Utils.log2(m_ClassPriors[i] / m_ClassPriorsSum);
1870     }
1871     return entropy;
1872   }
1873 
1874   /**
1875    * Return the total Kononenko & Bratko Information score in bits
1876    *
1877    * @return the K&B information score
1878    * @throws Exception if the class is not nominal
1879    */
1880   public final double KBInformation() throws Exception {
1881 
1882     if (!m_ClassIsNominal) {
1883       throw
1884       new Exception("Can't compute K&B Info score: " + 
1885       "class numeric!");
1886     }
1887 
1888     if (m_NoPriors)
1889       return Double.NaN;
1890 
1891     return m_SumKBInfo;
1892   }
1893 
1894   /**
1895    * Return the Kononenko & Bratko Information score in bits per 
1896    * instance.
1897    *
1898    * @return the K&B information score
1899    * @throws Exception if the class is not nominal
1900    */
1901   public final double KBMeanInformation() throws Exception {
1902 
1903     if (!m_ClassIsNominal) {
1904       throw
1905       new Exception("Can't compute K&B Info score: "
1906 	  + "class numeric!");
1907     }
1908 
1909     if (m_NoPriors)
1910       return Double.NaN;
1911 
1912     return m_SumKBInfo / (m_WithClass - m_Unclassified);
1913   }
1914 
1915   /**
1916    * Return the Kononenko & Bratko Relative Information score
1917    *
1918    * @return the K&B relative information score
1919    * @throws Exception if the class is not nominal
1920    */
1921   public final double KBRelativeInformation() throws Exception {
1922 
1923     if (!m_ClassIsNominal) {
1924       throw
1925       new Exception("Can't compute K&B Info score: " + 
1926       "class numeric!");
1927     }
1928 
1929     if (m_NoPriors)
1930       return Double.NaN;
1931 
1932     return 100.0 * KBInformation() / priorEntropy();
1933   }
1934 
1935   /**
1936    * Returns the total entropy for the null model
1937    * 
1938    * @return the total null model entropy
1939    */
1940   public final double SFPriorEntropy() {
1941 
1942     if (m_NoPriors)
1943       return Double.NaN;
1944 
1945     return m_SumPriorEntropy;
1946   }
1947 
1948   /**
1949    * Returns the entropy per instance for the null model
1950    * 
1951    * @return the null model entropy per instance
1952    */
1953   public final double SFMeanPriorEntropy() {
1954 
1955     if (m_NoPriors)
1956       return Double.NaN;
1957 
1958     return m_SumPriorEntropy / m_WithClass;
1959   }
1960 
1961   /**
1962    * Returns the total entropy for the scheme
1963    * 
1964    * @return the total scheme entropy
1965    */
1966   public final double SFSchemeEntropy() {
1967 
1968     if (m_NoPriors)
1969       return Double.NaN;
1970 
1971     return m_SumSchemeEntropy;
1972   }
1973 
1974   /**
1975    * Returns the entropy per instance for the scheme
1976    * 
1977    * @return the scheme entropy per instance
1978    */
1979   public final double SFMeanSchemeEntropy() {
1980 
1981     if (m_NoPriors)
1982       return Double.NaN;
1983 
1984     return m_SumSchemeEntropy / (m_WithClass - m_Unclassified);
1985   }
1986 
1987   /**
1988    * Returns the total SF, which is the null model entropy minus
1989    * the scheme entropy.
1990    * 
1991    * @return the total SF
1992    */
1993   public final double SFEntropyGain() {
1994 
1995     if (m_NoPriors)
1996       return Double.NaN;
1997 
1998     return m_SumPriorEntropy - m_SumSchemeEntropy;
1999   }
2000 
2001   /**
2002    * Returns the SF per instance, which is the null model entropy
2003    * minus the scheme entropy, per instance.
2004    * 
2005    * @return the SF per instance
2006    */
2007   public final double SFMeanEntropyGain() {
2008 
2009     if (m_NoPriors)
2010       return Double.NaN;
2011 
2012     return (m_SumPriorEntropy - m_SumSchemeEntropy) / 
2013       (m_WithClass - m_Unclassified);
2014   }
2015 
2016   /**
2017    * Output the cumulative margin distribution as a string suitable
2018    * for input for gnuplot or similar package.
2019    *
2020    * @return the cumulative margin distribution
2021    * @throws Exception if the class attribute is nominal
2022    */
2023   public String toCumulativeMarginDistributionString() throws Exception {
2024 
2025     if (!m_ClassIsNominal) {
2026       throw new Exception("Class must be nominal for margin distributions");
2027     }
2028     String result = "";
2029     double cumulativeCount = 0;
2030     double margin;
2031     for(int i = 0; i <= k_MarginResolution; i++) {
2032       if (m_MarginCounts[i] != 0) {
2033 	cumulativeCount += m_MarginCounts[i];
2034 	margin = (double)i * 2.0 / k_MarginResolution - 1.0;
2035 	result = result + Utils.doubleToString(margin, 7, 3) + ' ' 
2036 	+ Utils.doubleToString(cumulativeCount * 100 
2037 	    / m_WithClass, 7, 3) + '\n';
2038       } else if (i == 0) {
2039 	result = Utils.doubleToString(-1.0, 7, 3) + ' ' 
2040 	+ Utils.doubleToString(0, 7, 3) + '\n';
2041       }
2042     }
2043     return result;
2044   }
2045 
2046 
2047   /**
2048    * Calls toSummaryString() with no title and no complexity stats
2049    *
2050    * @return a summary description of the classifier EvaluationACO
2051    */
2052   public String toSummaryString() {
2053 
2054     return toSummaryString("", false);
2055   }
2056 
2057   /**
2058    * Calls toSummaryString() with a default title.
2059    *
2060    * @param printComplexityStatistics if true, complexity statistics are
2061    * returned as well
2062    * @return the summary string
2063    */
2064   public String toSummaryString(boolean printComplexityStatistics) {
2065 
2066     return toSummaryString("=== Summary ===\n", printComplexityStatistics);
2067   }
2068 
2069   /**
2070    * Outputs the performance statistics in summary form. Lists 
2071    * number (and percentage) of instances classified correctly, 
2072    * incorrectly and unclassified. Outputs the total number of 
2073    * instances classified, and the number of instances (if any) 
2074    * that had no class value provided. 
2075    *
2076    * @param title the title for the statistics
2077    * @param printComplexityStatistics if true, complexity statistics are
2078    * returned as well
2079    * @return the summary as a String
2080    */
2081   public String toSummaryString(String title, 
2082       boolean printComplexityStatistics) { 
2083 
2084     StringBuffer text = new StringBuffer();
2085 
2086     if (printComplexityStatistics && m_NoPriors) {
2087       printComplexityStatistics = false;
2088       System.err.println("Priors disabled, cannot print complexity statistics!");
2089     }
2090 
2091     text.append(title + "\n");
2092     try {
2093       if (m_WithClass > 0) {
2094 	if (m_ClassIsNominal) {
2095 
2096 	  text.append("Correctly Classified Instances     ");
2097 	  text.append(Utils.doubleToString(correct(), 12, 4) + "     " +
2098 	      Utils.doubleToString(pctCorrect(),
2099 		  12, 4) + " %\n");
2100 	  text.append("Incorrectly Classified Instances   ");
2101 	  text.append(Utils.doubleToString(incorrect(), 12, 4) + "     " +
2102 	      Utils.doubleToString(pctIncorrect(),
2103 		  12, 4) + " %\n");
2104 	  text.append("Kappa statistic                    ");
2105 	  text.append(Utils.doubleToString(kappa(), 12, 4) + "\n");
2106 
2107 	  if (m_CostMatrix != null) {
2108 	    text.append("Total Cost                         ");
2109 	    text.append(Utils.doubleToString(totalCost(), 12, 4) + "\n");
2110 	    text.append("Average Cost                       ");
2111 	    text.append(Utils.doubleToString(avgCost(), 12, 4) + "\n");
2112 	  }
2113 	  if (printComplexityStatistics) {
2114 	    text.append("K&B Relative Info Score            ");
2115 	    text.append(Utils.doubleToString(KBRelativeInformation(), 12, 4) 
2116 		+ " %\n");
2117 	    text.append("K&B Information Score              ");
2118 	    text.append(Utils.doubleToString(KBInformation(), 12, 4) 
2119 		+ " bits");
2120 	    text.append(Utils.doubleToString(KBMeanInformation(), 12, 4) 
2121 		+ " bits/instance\n");
2122 	  }
2123 	} else {        
2124 	  text.append("Correlation coefficient            ");
2125 	  text.append(Utils.doubleToString(correlationCoefficient(), 12 , 4) +
2126 	  "\n");
2127 	}
2128 	if (printComplexityStatistics) {
2129 	  text.append("Class complexity | order 0         ");
2130 	  text.append(Utils.doubleToString(SFPriorEntropy(), 12, 4) 
2131 	      + " bits");
2132 	  text.append(Utils.doubleToString(SFMeanPriorEntropy(), 12, 4) 
2133 	      + " bits/instance\n");
2134 	  text.append("Class complexity | scheme          ");
2135 	  text.append(Utils.doubleToString(SFSchemeEntropy(), 12, 4) 
2136 	      + " bits");
2137 	  text.append(Utils.doubleToString(SFMeanSchemeEntropy(), 12, 4) 
2138 	      + " bits/instance\n");
2139 	  text.append("Complexity improvement     (Sf)    ");
2140 	  text.append(Utils.doubleToString(SFEntropyGain(), 12, 4) + " bits");
2141 	  text.append(Utils.doubleToString(SFMeanEntropyGain(), 12, 4) 
2142 	      + " bits/instance\n");
2143 	}
2144 
2145 	text.append("Mean absolute error                ");
2146 	text.append(Utils.doubleToString(meanAbsoluteError(), 12, 4) 
2147 	    + "\n");
2148 	text.append("Root mean squared error            ");
2149 	text.append(Utils.
2150 	    doubleToString(rootMeanSquaredError(), 12, 4) 
2151 	    + "\n");
2152 	if (!m_NoPriors) {
2153 	  text.append("Relative absolute error            ");
2154 	  text.append(Utils.doubleToString(relativeAbsoluteError(), 
2155 	      12, 4) + " %\n");
2156 	  text.append("Root relative squared error        ");
2157 	  text.append(Utils.doubleToString(rootRelativeSquaredError(), 
2158 	      12, 4) + " %\n");
2159 	}
2160       }
2161       if (Utils.gr(unclassified(), 0)) {
2162 	text.append("UnClassified Instances             ");
2163 	text.append(Utils.doubleToString(unclassified(), 12,4) +  "     " +
2164 	    Utils.doubleToString(pctUnclassified(),
2165 		12, 4) + " %\n");
2166       }
2167       text.append("Total Number of Instances          ");
2168       text.append(Utils.doubleToString(m_WithClass, 12, 4) + "\n");
2169       if (m_MissingClass > 0) {
2170 	text.append("Ignored Class Unknown Instances            ");
2171 	text.append(Utils.doubleToString(m_MissingClass, 12, 4) + "\n");
2172       }
2173     } catch (Exception ex) {
2174       // Should never occur since the class is known to be nominal 
2175       // here
2176       System.err.println("Arggh - Must be a bug in EvaluationACO class");
2177     }
2178 
2179     return text.toString(); 
2180   }
2181 
2182   /**
2183    * Calls toMatrixString() with a default title.
2184    *
2185    * @return the confusion matrix as a string
2186    * @throws Exception if the class is numeric
2187    */
2188   public String toMatrixString() throws Exception {
2189 
2190     return toMatrixString("=== Confusion Matrix ===\n");
2191   }
2192 
2193   /**
2194    * Outputs the performance statistics as a classification confusion
2195    * matrix. For each class value, shows the distribution of 
2196    * predicted class values.
2197    *
2198    * @param title the title for the confusion matrix
2199    * @return the confusion matrix as a String
2200    * @throws Exception if the class is numeric
2201    */
2202   public String toMatrixString(String title) throws Exception {
2203 
2204     StringBuffer text = new StringBuffer();
2205     char [] IDChars = {'a','b','c','d','e','f','g','h','i','j',
2206 	'k','l','m','n','o','p','q','r','s','t',
2207 	'u','v','w','x','y','z'};
2208     int IDWidth;
2209     boolean fractional = false;
2210 
2211     if (!m_ClassIsNominal) {
2212       throw new Exception("EvaluationACO: No confusion matrix possible!");
2213     }
2214 
2215     // Find the maximum value in the matrix
2216     // and check for fractional display requirement 
2217     double maxval = 0;
2218     for(int i = 0; i < m_NumClasses; i++) {
2219       for(int j = 0; j < m_NumClasses; j++) {
2220 	double current = m_ConfusionMatrix[i][j];
2221 	if (current < 0) {
2222 	  current *= -10;
2223 	}
2224 	if (current > maxval) {
2225 	  maxval = current;
2226 	}
2227 	double fract = current - Math.rint(current);
2228 	if (!fractional
2229 	    && ((Math.log(fract) / Math.log(10)) >= -2)) {
2230 	  fractional = true;
2231 	}
2232       }
2233     }
2234 
2235     IDWidth = 1 + Math.max((int)(Math.log(maxval) / Math.log(10) 
2236 	+ (fractional ? 3 : 0)),
2237 	(int)(Math.log(m_NumClasses) / 
2238 	    Math.log(IDChars.length)));
2239     text.append(title).append("\n");
2240     for(int i = 0; i < m_NumClasses; i++) {
2241       if (fractional) {
2242 	text.append(" ").append(num2ShortID(i,IDChars,IDWidth - 3))
2243 	.append("   ");
2244       } else {
2245 	text.append(" ").append(num2ShortID(i,IDChars,IDWidth));
2246       }
2247     }
2248     text.append("   <-- classified as\n");
2249     for(int i = 0; i< m_NumClasses; i++) { 
2250       for(int j = 0; j < m_NumClasses; j++) {
2251 	text.append(" ").append(
2252 	    Utils.doubleToString(m_ConfusionMatrix[i][j],
2253 		IDWidth,
2254 		(fractional ? 2 : 0)));
2255       }
2256       text.append(" | ").append(num2ShortID(i,IDChars,IDWidth))
2257       .append(" = ").append(m_ClassNames[i]).append("\n");
2258     }
2259     return text.toString();
2260   }
2261 
2262   /**
2263    * Generates a breakdown of the accuracy for each class (with default title),
2264    * incorporating various information-retrieval statistics, such as
2265    * true/false positive rate, precision/recall/F-Measure.  Should be
2266    * useful for ROC curves, recall/precision curves.  
2267    * 
2268    * @return the statistics presented as a string
2269    * @throws Exception if class is not nominal
2270    */
2271   public String toClassDetailsString() throws Exception {
2272 
2273     return toClassDetailsString("=== Detailed Accuracy By Class ===\n");
2274   }
2275 
2276   /**
2277    * Generates a breakdown of the accuracy for each class,
2278    * incorporating various information-retrieval statistics, such as
2279    * true/false positive rate, precision/recall/F-Measure.  Should be
2280    * useful for ROC curves, recall/precision curves.  
2281    * 
2282    * @param title the title to prepend the stats string with 
2283    * @return the statistics presented as a string
2284    * @throws Exception if class is not nominal
2285    */
2286   public String toClassDetailsString(String title) throws Exception {
2287 
2288     if (!m_ClassIsNominal) {
2289       throw new Exception("EvaluationACO: No confusion matrix possible!");
2290     }
2291     StringBuffer text = new StringBuffer(title 
2292 	+ "\nTP Rate   FP Rate"
2293 	+ "   Precision   Recall"
2294 	+ "  F-Measure   ROC Area  Class\n");
2295     for(int i = 0; i < m_NumClasses; i++) {
2296       text.append(Utils.doubleToString(truePositiveRate(i), 7, 3))
2297       .append("   ");
2298       text.append(Utils.doubleToString(falsePositiveRate(i), 7, 3))
2299       .append("    ");
2300       text.append(Utils.doubleToString(precision(i), 7, 3))
2301       .append("   ");
2302       text.append(Utils.doubleToString(recall(i), 7, 3))
2303       .append("   ");
2304       text.append(Utils.doubleToString(fMeasure(i), 7, 3))
2305       .append("    ");
2306       double rocVal = areaUnderROC(i);
2307       if (Instance.isMissingValue(rocVal)) {
2308 	text.append("  ?    ")
2309 	.append("    ");
2310       } else {
2311 	text.append(Utils.doubleToString(rocVal, 7, 3))
2312 	.append("    ");
2313       }
2314       text.append(m_ClassNames[i]).append('\n');
2315     }
2316     return text.toString();
2317   }
2318 
2319   /**
2320    * Calculate the number of true positives with respect to a particular class. 
2321    * This is defined as<p/>
2322    * <pre>
2323    * correctly classified positives
2324    * </pre>
2325    *
2326    * @param classIndex the index of the class to consider as "positive"
2327    * @return the true positive rate
2328    */
2329   public double numTruePositives(int classIndex) {
2330 
2331     double correct = 0;
2332     for (int j = 0; j < m_NumClasses; j++) {
2333       if (j == classIndex) {
2334 	correct += m_ConfusionMatrix[classIndex][j];
2335       }
2336     }
2337     return correct;
2338   }
2339 
2340   /**
2341    * Calculate the true positive rate with respect to a particular class. 
2342    * This is defined as<p/>
2343    * <pre>
2344    * correctly classified positives
2345    * ------------------------------
2346    *       total positives
2347    * </pre>
2348    *
2349    * @param classIndex the index of the class to consider as "positive"
2350    * @return the true positive rate
2351    */
2352   public double truePositiveRate(int classIndex) {
2353 
2354     double correct = 0, total = 0;
2355     for (int j = 0; j < m_NumClasses; j++) {
2356       if (j == classIndex) {
2357 	correct += m_ConfusionMatrix[classIndex][j];
2358       }
2359       total += m_ConfusionMatrix[classIndex][j];
2360     }
2361     if (total == 0) {
2362       return 0;
2363     }
2364     return correct / total;
2365   }
2366 
2367   /**
2368    * Calculate the number of true negatives with respect to a particular class. 
2369    * This is defined as<p/>
2370    * <pre>
2371    * correctly classified negatives
2372    * </pre>
2373    *
2374    * @param classIndex the index of the class to consider as "positive"
2375    * @return the true positive rate
2376    */
2377   public double numTrueNegatives(int classIndex) {
2378 
2379     double correct = 0;
2380     for (int i = 0; i < m_NumClasses; i++) {
2381       if (i != classIndex) {
2382 	for (int j = 0; j < m_NumClasses; j++) {
2383 	  if (j != classIndex) {
2384 	    correct += m_ConfusionMatrix[i][j];
2385 	  }
2386 	}
2387       }
2388     }
2389     return correct;
2390   }
2391 
2392   /**
2393    * Calculate the true negative rate with respect to a particular class. 
2394    * This is defined as<p/>
2395    * <pre>
2396    * correctly classified negatives
2397    * ------------------------------
2398    *       total negatives
2399    * </pre>
2400    *
2401    * @param classIndex the index of the class to consider as "positive"
2402    * @return the true positive rate
2403    */
2404   public double trueNegativeRate(int classIndex) {
2405 
2406     double correct = 0, total = 0;
2407     for (int i = 0; i < m_NumClasses; i++) {
2408       if (i != classIndex) {
2409 	for (int j = 0; j < m_NumClasses; j++) {
2410 	  if (j != classIndex) {
2411 	    correct += m_ConfusionMatrix[i][j];
2412 	  }
2413 	  total += m_ConfusionMatrix[i][j];
2414 	}
2415       }
2416     }
2417     if (total == 0) {
2418       return 0;
2419     }
2420     return correct / total;
2421   }
2422 
2423   /**
2424    * Calculate number of false positives with respect to a particular class. 
2425    * This is defined as<p/>
2426    * <pre>
2427    * incorrectly classified negatives
2428    * </pre>
2429    *
2430    * @param classIndex the index of the class to consider as "positive"
2431    * @return the false positive rate
2432    */
2433   public double numFalsePositives(int classIndex) {
2434 
2435     double incorrect = 0;
2436     for (int i = 0; i < m_NumClasses; i++) {
2437       if (i != classIndex) {
2438 	for (int j = 0; j < m_NumClasses; j++) {
2439 	  if (j == classIndex) {
2440 	    incorrect += m_ConfusionMatrix[i][j];
2441 	  }
2442 	}
2443       }
2444     }
2445     return incorrect;
2446   }
2447 
2448   /**
2449    * Calculate the false positive rate with respect to a particular class. 
2450    * This is defined as<p/>
2451    * <pre>
2452    * incorrectly classified negatives
2453    * --------------------------------
2454    *        total negatives
2455    * </pre>
2456    *
2457    * @param classIndex the index of the class to consider as "positive"
2458    * @return the false positive rate
2459    */
2460   public double falsePositiveRate(int classIndex) {
2461 
2462     double incorrect = 0, total = 0;
2463     for (int i = 0; i < m_NumClasses; i++) {
2464       if (i != classIndex) {
2465 	for (int j = 0; j < m_NumClasses; j++) {
2466 	  if (j == classIndex) {
2467 	    incorrect += m_ConfusionMatrix[i][j];
2468 	  }
2469 	  total += m_ConfusionMatrix[i][j];
2470 	}
2471       }
2472     }
2473     if (total == 0) {
2474       return 0;
2475     }
2476     return incorrect / total;
2477   }
2478 
2479   /**
2480    * Calculate number of false negatives with respect to a particular class. 
2481    * This is defined as<p/>
2482    * <pre>
2483    * incorrectly classified positives
2484    * </pre>
2485    *
2486    * @param classIndex the index of the class to consider as "positive"
2487    * @return the false positive rate
2488    */
2489   public double numFalseNegatives(int classIndex) {
2490 
2491     double incorrect = 0;
2492     for (int i = 0; i < m_NumClasses; i++) {
2493       if (i == classIndex) {
2494 	for (int j = 0; j < m_NumClasses; j++) {
2495 	  if (j != classIndex) {
2496 	    incorrect += m_ConfusionMatrix[i][j];
2497 	  }
2498 	}
2499       }
2500     }
2501     return incorrect;
2502   }
2503 
2504   /**
2505    * Calculate the false negative rate with respect to a particular class. 
2506    * This is defined as<p/>
2507    * <pre>
2508    * incorrectly classified positives
2509    * --------------------------------
2510    *        total positives
2511    * </pre>
2512    *
2513    * @param classIndex the index of the class to consider as "positive"
2514    * @return the false positive rate
2515    */
2516   public double falseNegativeRate(int classIndex) {
2517 
2518     double incorrect = 0, total = 0;
2519     for (int i = 0; i < m_NumClasses; i++) {
2520       if (i == classIndex) {
2521 	for (int j = 0; j < m_NumClasses; j++) {
2522 	  if (j != classIndex) {
2523 	    incorrect += m_ConfusionMatrix[i][j];
2524 	  }
2525 	  total += m_ConfusionMatrix[i][j];
2526 	}
2527       }
2528     }
2529     if (total == 0) {
2530       return 0;
2531     }
2532     return incorrect / total;
2533   }
2534 
2535   /**
2536    * Calculate the recall with respect to a particular class. 
2537    * This is defined as<p/>
2538    * <pre>
2539    * correctly classified positives
2540    * ------------------------------
2541    *       total positives
2542    * </pre><p/>
2543    * (Which is also the same as the truePositiveRate.)
2544    *
2545    * @param classIndex the index of the class to consider as "positive"
2546    * @return the recall
2547    */
2548   public double recall(int classIndex) {
2549 
2550     return truePositiveRate(classIndex);
2551   }
2552 
2553   /**
2554    * Calculate the precision with respect to a particular class. 
2555    * This is defined as<p/>
2556    * <pre>
2557    * correctly classified positives
2558    * ------------------------------
2559    *  total predicted as positive
2560    * </pre>
2561    *
2562    * @param classIndex the index of the class to consider as "positive"
2563    * @return the precision
2564    */
2565   public double precision(int classIndex) {
2566 
2567     double correct = 0, total = 0;
2568     for (int i = 0; i < m_NumClasses; i++) {
2569       if (i == classIndex) {
2570 	correct += m_ConfusionMatrix[i][classIndex];
2571       }
2572       total += m_ConfusionMatrix[i][classIndex];
2573     }
2574     if (total == 0) {
2575       return 0;
2576     }
2577     return correct / total;
2578   }
2579 
2580   /**
2581    * Calculate the F-Measure with respect to a particular class. 
2582    * This is defined as<p/>
2583    * <pre>
2584    * 2 * recall * precision
2585    * ----------------------
2586    *   recall + precision
2587    * </pre>
2588    *
2589    * @param classIndex the index of the class to consider as "positive"
2590    * @return the F-Measure
2591    */
2592   public double fMeasure(int classIndex) {
2593 
2594     double precision = precision(classIndex);
2595     double recall = recall(classIndex);
2596     if ((precision + recall) == 0) {
2597       return 0;
2598     }
2599     return 2 * precision * recall / (precision + recall);
2600   }
2601 
2602   /**
2603    * Sets the class prior probabilities
2604    *
2605    * @param train the training instances used to determine
2606    * the prior probabilities
2607    * @throws Exception if the class attribute of the instances is not
2608    * set
2609    */
2610   public void setPriors(Instances train) throws Exception {
2611     m_NoPriors = false;
2612 
2613     if (!m_ClassIsNominal) {
2614 
2615       m_NumTrainClassVals = 0;
2616       m_TrainClassVals = null;
2617       m_TrainClassWeights = null;
2618       m_PriorErrorEstimator = null;
2619       m_ErrorEstimator = null;
2620 
2621       for (int i = 0; i < train.numInstances(); i++) {
2622 	Instance currentInst = train.instance(i);
2623 	if (!currentInst.classIsMissing()) {
2624 	  addNumericTrainClass(currentInst.classValue(), 
2625 	      currentInst.weight());
2626 	}
2627       }
2628 
2629     } else {
2630       for (int i = 0; i < m_NumClasses; i++) {
2631 	m_ClassPriors[i] = 1;
2632       }
2633       m_ClassPriorsSum = m_NumClasses;
2634       for (int i = 0; i < train.numInstances(); i++) {
2635 	if (!train.instance(i).classIsMissing()) {
2636 	  m_ClassPriors[(int)train.instance(i).classValue()] += 
2637 	    train.instance(i).weight();
2638 	  m_ClassPriorsSum += train.instance(i).weight();
2639 	}
2640       }
2641     }
2642   }
2643 
2644   /**
2645    * Get the current weighted class counts
2646    * 
2647    * @return the weighted class counts
2648    */
2649   public double [] getClassPriors() {
2650     return m_ClassPriors;
2651   }
2652 
2653   /**
2654    * Updates the class prior probabilities (when incrementally 
2655    * training)
2656    *
2657    * @param instance the new training instance seen
2658    * @throws Exception if the class of the instance is not
2659    * set
2660    */
2661   public void updatePriors(Instance instance) throws Exception {
2662     if (!instance.classIsMissing()) {
2663       if (!m_ClassIsNominal) {
2664 	if (!instance.classIsMissing()) {
2665 	  addNumericTrainClass(instance.classValue(), 
2666 	      instance.weight());
2667 	}
2668       } else {
2669 	m_ClassPriors[(int)instance.classValue()] += 
2670 	  instance.weight();
2671 	m_ClassPriorsSum += instance.weight();
2672       }
2673     }    
2674   }
2675 
2676   /**
2677    * disables the use of priors, e.g., in case of de-serialized schemes
2678    * that have no access to the original training set, but are evaluated
2679    * on a set set.
2680    */
2681   public void useNoPriors() {
2682     m_NoPriors = true;
2683   }
2684 
2685   /**
2686    * Tests whether the current EvaluationACO object is equal to another
2687    * EvaluationACO object
2688    *
2689    * @param obj the object to compare against
2690    * @return true if the two objects are equal
2691    */
2692   public boolean equals(Object obj) {
2693 
2694     if ((obj == null) || !(obj.getClass().equals(this.getClass()))) {
2695       return false;
2696     }
2697     EvaluationACO cmp = (EvaluationACO) obj;
2698     if (m_ClassIsNominal != cmp.m_ClassIsNominal) return false;
2699     if (m_NumClasses != cmp.m_NumClasses) return false;
2700 
2701     if (m_Incorrect != cmp.m_Incorrect) return false;
2702     if (m_Correct != cmp.m_Correct) return false;
2703     if (m_Unclassified != cmp.m_Unclassified) return false;
2704     if (m_MissingClass != cmp.m_MissingClass) return false;
2705     if (m_WithClass != cmp.m_WithClass) return false;
2706 
2707     if (m_SumErr != cmp.m_SumErr) return false;
2708     if (m_SumAbsErr != cmp.m_SumAbsErr) return false;
2709     if (m_SumSqrErr != cmp.m_SumSqrErr) return false;
2710     if (m_SumClass != cmp.m_SumClass) return false;
2711     if (m_SumSqrClass != cmp.m_SumSqrClass) return false;
2712     if (m_SumPredicted != cmp.m_SumPredicted) return false;
2713     if (m_SumSqrPredicted != cmp.m_SumSqrPredicted) return false;
2714     if (m_SumClassPredicted != cmp.m_SumClassPredicted) return false;
2715 
2716     if (m_ClassIsNominal) {
2717       for (int i = 0; i < m_NumClasses; i++) {
2718 	for (int j = 0; j < m_NumClasses; j++) {
2719 	  if (m_ConfusionMatrix[i][j] != cmp.m_ConfusionMatrix[i][j]) {
2720 	    return false;
2721 	  }
2722 	}
2723       }
2724     }
2725 
2726     return true;
2727   }
2728 
2729   /**
2730    * Prints the predictions for the given dataset into a String variable.
2731    * 
2732    * @param classifier		the classifier to use
2733    * @param train		the training data
2734    * @param testSource		the test set
2735    * @param classIndex		the class index (1-based), if -1 ot does not 
2736    * 				override the class index is stored in the data 
2737    * 				file (by using the last attribute)
2738    * @param attributesToOutput	the indices of the attributes to output
2739    * @return			the generated predictions for the attribute range
2740    * @throws Exception 		if test file cannot be opened
2741    */
2742   protected static String printClassifications(Classifier classifier, 
2743       Instances train,
2744       DataSource testSource,
2745       int classIndex,
2746       Range attributesToOutput) throws Exception {
2747     
2748     return printClassifications(
2749 	classifier, train, testSource, classIndex, attributesToOutput, false);
2750   }
2751 
2752   /**
2753    * Prints the predictions for the given dataset into a String variable.
2754    * 
2755    * @param classifier		the classifier to use
2756    * @param train		the training data
2757    * @param testSource		the test set
2758    * @param classIndex		the class index (1-based), if -1 ot does not 
2759    * 				override the class index is stored in the data 
2760    * 				file (by using the last attribute)
2761    * @param attributesToOutput	the indices of the attributes to output
2762    * @param printDistribution	prints the complete distribution for nominal 
2763    * 				classes, not just the predicted value
2764    * @return			the generated predictions for the attribute range
2765    * @throws Exception 		if test file cannot be opened
2766    */
2767   protected static String printClassifications(Classifier classifier, 
2768       Instances train,
2769       DataSource testSource,
2770       int classIndex,
2771       Range attributesToOutput,
2772       boolean printDistribution) throws Exception {
2773 
2774     StringBuffer text = new StringBuffer();
2775     if (testSource != null) {
2776       Instances test = testSource.getStructure();
2777       if (classIndex != -1) {
2778 	test.setClassIndex(classIndex - 1);
2779       } else {
2780 	if (test.classIndex() == -1)
2781 	  test.setClassIndex(test.numAttributes() - 1);
2782       }
2783 
2784       // print header
2785       if (test.classAttribute().isNominal())
2786 	if (printDistribution)
2787 	  text.append(" inst#     actual  predicted error distribution");
2788 	else
2789 	  text.append(" inst#     actual  predicted error prediction");
2790       else
2791 	text.append(" inst#     actual  predicted      error");
2792       if (attributesToOutput != null) {
2793 	attributesToOutput.setUpper(test.numAttributes() - 1);
2794 	text.append(" (");
2795 	boolean first = true;
2796 	for (int i = 0; i < test.numAttributes(); i++) {
2797 	  if (i == test.classIndex())
2798 	    continue;
2799 
2800 	  if (attributesToOutput.isInRange(i)) {
2801 	    if (!first)
2802 	      text.append(",");
2803 	    text.append(test.attribute(i).name());
2804 	    first = false;
2805 	  }
2806 	}
2807 	text.append(")");
2808       }
2809       text.append("\n");
2810 
2811       // print predictions
2812       int i = 0;
2813       testSource.reset();
2814       test = testSource.getStructure(test.classIndex());
2815       while (testSource.hasMoreElements(test)) {
2816 	Instance inst = testSource.nextElement(test);
2817 	text.append(
2818 	    predictionText(
2819 		classifier, inst, i, attributesToOutput, printDistribution));
2820 	i++;
2821       }
2822     }
2823     return text.toString();
2824   }
2825 
2826   /**
2827    * returns the prediction made by the classifier as a string
2828    * 
2829    * @param classifier		the classifier to use
2830    * @param inst		the instance to generate text from
2831    * @param instNum		the index in the dataset
2832    * @param attributesToOutput	the indices of the attributes to output
2833    * @param printDistribution	prints the complete distribution for nominal 
2834    * 				classes, not just the predicted value
2835    * @return			the generated text
2836    * @throws Exception		if something goes wrong
2837    * @see			#printClassifications(Classifier, Instances, String, int, Range, boolean)
2838    */
2839   protected static String predictionText(Classifier classifier, 
2840       Instance inst, 
2841       int instNum,
2842       Range attributesToOutput,
2843       boolean printDistribution) 
2844   throws Exception {
2845 
2846     StringBuffer result = new StringBuffer();
2847     int width = 10;
2848     int prec = 3;
2849 
2850     Instance withMissing = (Instance)inst.copy();
2851     withMissing.setDataset(inst.dataset());
2852     double predValue = ((Classifier)classifier).classifyInstance(withMissing);
2853 
2854     // index
2855     result.append(Utils.padLeft("" + (instNum+1), 6));
2856 
2857     if (inst.dataset().classAttribute().isNumeric()) {
2858       // actual
2859       if (inst.classIsMissing())
2860 	result.append(" " + Utils.padLeft("?", width));
2861       else
2862 	result.append(" " + Utils.doubleToString(inst.classValue(), width, prec));
2863       // predicted
2864       if (Instance.isMissingValue(predValue))
2865 	result.append(" " + Utils.padLeft("?", width));
2866       else
2867 	result.append(" " + Utils.doubleToString(predValue, width, prec));
2868       // error
2869       if (Instance.isMissingValue(predValue) || inst.classIsMissing())
2870 	result.append(" " + Utils.padLeft("?", width));
2871       else
2872 	result.append(" " + Utils.doubleToString(predValue - inst.classValue(), width, prec));
2873     } else {
2874       // actual
2875       result.append(" " + Utils.padLeft(((int) inst.classValue()+1) + ":" + inst.toString(inst.classIndex()), width));
2876       // predicted
2877       if (Instance.isMissingValue(predValue))
2878 	result.append(" " + Utils.padLeft("?", width));
2879       else
2880 	result.append(" " + Utils.padLeft(((int) predValue+1) + ":" + inst.dataset().classAttribute().value((int)predValue), width));
2881       // error?
2882       if ((int) predValue+1 != (int) inst.classValue()+1)
2883 	result.append(" " + "  +  ");
2884       else
2885 	result.append(" " + "     ");
2886       // prediction/distribution
2887       if (printDistribution) {
2888 	if (Instance.isMissingValue(predValue)) {
2889 	  result.append(" " + "?");
2890 	}
2891 	else {
2892 	  result.append(" ");
2893 	  double[] dist = classifier.distributionForInstance(withMissing);
2894 	  for (int n = 0; n < dist.length; n++) {
2895 	    if (n > 0)
2896 	      result.append(",");
2897 	    if (n == (int) predValue)
2898 	      result.append("*");
2899 	    result.append(Utils.doubleToString(dist[n], prec));
2900 	  }
2901 	}
2902       }
2903       else {
2904 	if (Instance.isMissingValue(predValue))
2905 	  result.append(" " + "?");
2906 	else
2907 	  result.append(" " + Utils.doubleToString(classifier.distributionForInstance(withMissing) [(int)predValue], prec));
2908       }
2909     }
2910 
2911     // attributes
2912     result.append(" " + attributeValuesString(withMissing, attributesToOutput) + "\n");
2913 
2914     return result.toString();
2915   }
2916 
2917   /**
2918    * Builds a string listing the attribute values in a specified range of indices,
2919    * separated by commas and enclosed in brackets.
2920    *
2921    * @param instance the instance to print the values from
2922    * @param attRange the range of the attributes to list
2923    * @return a string listing values of the attributes in the range
2924    */
2925   protected static String attributeValuesString(Instance instance, Range attRange) {
2926     StringBuffer text = new StringBuffer();
2927     if (attRange != null) {
2928       boolean firstOutput = true;
2929       attRange.setUpper(instance.numAttributes() - 1);
2930       for (int i=0; i<instance.numAttributes(); i++)
2931 	if (attRange.isInRange(i) && i != instance.classIndex()) {
2932 	  if (firstOutput) text.append("(");
2933 	  else text.append(",");
2934 	  text.append(instance.toString(i));
2935 	  firstOutput = false;
2936 	}
2937       if (!firstOutput) text.append(")");
2938     }
2939     return text.toString();
2940   }
2941 
2942   /**
2943    * Make up the help string giving all the command line options
2944    *
2945    * @param classifier the classifier to include options for
2946    * @return a string detailing the valid command line options
2947    */
2948   protected static String makeOptionString(Classifier classifier) {
2949 
2950     StringBuffer optionsText = new StringBuffer("");
2951 
2952     // General options
2953     optionsText.append("\n\nGeneral options:\n\n");
2954     optionsText.append("-t <name of training file>\n");
2955     optionsText.append("\tSets training file.\n");
2956     optionsText.append("-T <name of test file>\n");
2957     optionsText.append("\tSets test file. If missing, a cross-validation will be performed\n");
2958     optionsText.append("\ton the training data.\n");
2959     optionsText.append("-c <class index>\n");
2960     optionsText.append("\tSets index of class attribute (default: last).\n");
2961     optionsText.append("-x <number of folds>\n");
2962     optionsText.append("\tSets number of folds for cross-validation (default: 10).\n");
2963     optionsText.append("-no-cv\n");
2964     optionsText.append("\tDo not perform any cross validation.\n");
2965     optionsText.append("-split-percentage <percentage>\n");
2966     optionsText.append("\tSets the percentage for the train/test set split, e.g., 66.\n");
2967     optionsText.append("-preserve-order\n");
2968     optionsText.append("\tPreserves the order in the percentage split.\n");
2969     optionsText.append("-s <random number seed>\n");
2970     optionsText.append("\tSets random number seed for cross-validation or percentage split\n");
2971     optionsText.append("\t(default: 1).\n");
2972     optionsText.append("-m <name of file with cost matrix>\n");
2973     optionsText.append("\tSets file with cost matrix.\n");
2974     optionsText.append("-l <name of input file>\n");
2975     optionsText.append("\tSets model input file. In case the filename ends with '.xml',\n");
2976     optionsText.append("\tthe options are loaded from the XML file.\n");
2977     optionsText.append("-d <name of output file>\n");
2978     optionsText.append("\tSets model output file. In case the filename ends with '.xml',\n");
2979     optionsText.append("\tonly the options are saved to the XML file, not the model.\n");
2980     optionsText.append("-v\n");
2981     optionsText.append("\tOutputs no statistics for training data.\n");
2982     optionsText.append("-o\n");
2983     optionsText.append("\tOutputs statistics only, not the classifier.\n");
2984     optionsText.append("-i\n");
2985     optionsText.append("\tOutputs detailed information-retrieval");
2986     optionsText.append(" statistics for each class.\n");
2987     optionsText.append("-k\n");
2988     optionsText.append("\tOutputs information-theoretic statistics.\n");
2989     optionsText.append("-p <attribute range>\n");
2990     optionsText.append("\tOnly outputs predictions for test instances (or the train\n"
2991 	+ "\tinstances if no test instances provided), along with attributes\n"
2992 	+ "\t(0 for none).\n");
2993     optionsText.append("-distribution\n");
2994     optionsText.append("\tOutputs the distribution instead of only the prediction\n");
2995     optionsText.append("\tin conjunction with the '-p' option (only nominal classes).\n");
2996     optionsText.append("-r\n");
2997     optionsText.append("\tOnly outputs cumulative margin distribution.\n");
2998     if (classifier instanceof Sourcable) {
2999       optionsText.append("-z <class name>\n");
3000       optionsText.append("\tOnly outputs the source representation"
3001 	  + " of the classifier,\n\tgiving it the supplied"
3002 	  + " name.\n");
3003     }
3004     if (classifier instanceof Drawable) {
3005       optionsText.append("-g\n");
3006       optionsText.append("\tOnly outputs the graph representation"
3007 	  + " of the classifier.\n");
3008     }
3009     optionsText.append("-xml filename | xml-string\n");
3010     optionsText.append("\tRetrieves the options from the XML-data instead of the " 
3011 	+ "command line.\n");
3012     optionsText.append("-threshold-file <file>\n");
3013     optionsText.append("\tThe file to save the threshold data to.\n"
3014 	+ "\tThe format is determined by the extensions, e.g., '.arff' for ARFF \n"
3015 	+ "\tformat or '.csv' for CSV.\n");
3016     optionsText.append("-threshold-label <label>\n");
3017     optionsText.append("\tThe class label to determine the threshold data for\n"
3018 	+ "\t(default is the first label)\n");
3019 
3020     // Get scheme-specific options
3021     if (classifier instanceof OptionHandler) {
3022       optionsText.append("\nOptions specific to "
3023 	  + classifier.getClass().getName()
3024 	  + ":\n\n");
3025       Enumeration enu = ((OptionHandler)classifier).listOptions();
3026       while (enu.hasMoreElements()) {
3027 	Option option = (Option) enu.nextElement();
3028 	optionsText.append(option.synopsis() + '\n');
3029 	optionsText.append(option.description() + "\n");
3030       }
3031     }
3032     return optionsText.toString();
3033   }
3034 
3035   /**
3036    * Method for generating indices for the confusion matrix.
3037    *
3038    * @param num 	integer to format
3039    * @param IDChars	the characters to use
3040    * @param IDWidth	the width of the entry
3041    * @return 		the formatted integer as a string
3042    */
3043   protected String num2ShortID(int num, char[] IDChars, int IDWidth) {
3044 
3045     char ID [] = new char [IDWidth];
3046     int i;
3047 
3048     for(i = IDWidth - 1; i >=0; i--) {
3049       ID[i] = IDChars[num % IDChars.length];
3050       num = num / IDChars.length - 1;
3051       if (num < 0) {
3052 	break;
3053       }
3054     }
3055     for(i--; i >= 0; i--) {
3056       ID[i] = ' ';
3057     }
3058 
3059     return new String(ID);
3060   }
3061 
3062   /**
3063    * Convert a single prediction into a probability distribution
3064    * with all zero probabilities except the predicted value which
3065    * has probability 1.0;
3066    *
3067    * @param predictedClass the index of the predicted class
3068    * @return the probability distribution
3069    */
3070   protected double [] makeDistribution(double predictedClass) {
3071 
3072     double [] result = new double [m_NumClasses];
3073     if (Instance.isMissingValue(predictedClass)) {
3074       return result;
3075     }
3076     if (m_ClassIsNominal) {
3077       result[(int)predictedClass] = 1.0;
3078     } else {
3079       result[0] = predictedClass;
3080     }
3081     return result;
3082   } 
3083 
3084   /**
3085    * Updates all the statistics about a classifiers performance for 
3086    * the current test instance.
3087    *
3088    * @param predictedDistribution the probabilities assigned to 
3089    * each class
3090    * @param instance the instance to be classified
3091    * @throws Exception if the class of the instance is not
3092    * set
3093    */
3094   protected void updateStatsForClassifier(double [] predictedDistribution,
3095       Instance instance)
3096   throws Exception {
3097 
3098     int actualClass = (int)instance.classValue();
3099 
3100     if (!instance.classIsMissing()) {
3101       updateMargins(predictedDistribution, actualClass, instance.weight());
3102 
3103       // Determine the predicted class (doesn't detect multiple 
3104       // classifications)
3105       int predictedClass = -1;
3106       double bestProb = 0.0;
3107       for(int i = 0; i < m_NumClasses; i++) {
3108 	if (predictedDistribution[i] > bestProb) {
3109 	  predictedClass = i;
3110 	  bestProb = predictedDistribution[i];
3111 	}
3112       }
3113 
3114       m_WithClass += instance.weight();
3115 
3116       // Determine misclassification cost
3117       if (m_CostMatrix != null) {
3118 	if (predictedClass < 0) {
3119 	  // For missing predictions, we assume the worst possible cost.
3120 	  // This is pretty harsh.
3121 	  // Perhaps we could take the negative of the cost of a correct
3122 	  // prediction (-m_CostMatrix.getElement(actualClass,actualClass)),
3123 	  // although often this will be zero
3124 	  m_TotalCost += instance.weight()
3125 	  * m_CostMatrix.getMaxCost(actualClass, instance);
3126 	} else {
3127 	  m_TotalCost += instance.weight() 
3128 	  * m_CostMatrix.getElement(actualClass, predictedClass,
3129 	      instance);
3130 	}
3131       }
3132 
3133       // Update counts when no class was predicted
3134       if (predictedClass < 0) {
3135 	m_Unclassified += instance.weight();
3136 	return;
3137       }
3138 
3139       double predictedProb = Math.max(MIN_SF_PROB,
3140 	  predictedDistribution[actualClass]);
3141       double priorProb = Math.max(MIN_SF_PROB,
3142 	  m_ClassPriors[actualClass]
3143 	                / m_ClassPriorsSum);
3144       if (predictedProb >= priorProb) {
3145 	m_SumKBInfo += (Utils.log2(predictedProb) - 
3146 	    Utils.log2(priorProb))
3147 	    * instance.weight();
3148       } else {
3149 	m_SumKBInfo -= (Utils.log2(1.0-predictedProb) - 
3150 	    Utils.log2(1.0-priorProb))
3151 	    * instance.weight();
3152       }
3153 
3154       m_SumSchemeEntropy -= Utils.log2(predictedProb) * instance.weight();
3155       m_SumPriorEntropy -= Utils.log2(priorProb) * instance.weight();
3156 
3157       updateNumericScores(predictedDistribution, 
3158 	  makeDistribution(instance.classValue()), 
3159 	  instance.weight());
3160 
3161       // Update other stats
3162       m_ConfusionMatrix[actualClass][predictedClass] += instance.weight();
3163       if (predictedClass != actualClass) {
3164 	m_Incorrect += instance.weight();
3165       } else {
3166 	m_Correct += instance.weight();
3167       }
3168     } else {
3169       m_MissingClass += instance.weight();
3170     }
3171   }
3172 
3173   /**
3174    * Updates all the statistics about a predictors performance for 
3175    * the current test instance.
3176    *
3177    * @param predictedValue the numeric value the classifier predicts
3178    * @param instance the instance to be classified
3179    * @throws Exception if the class of the instance is not
3180    * set
3181    */
3182   protected void updateStatsForPredictor(double predictedValue,
3183       Instance instance) 
3184   throws Exception {
3185 
3186     if (!instance.classIsMissing()){
3187 
3188       // Update stats
3189       m_WithClass += instance.weight();
3190       if (Instance.isMissingValue(predictedValue)) {
3191 	m_Unclassified += instance.weight();
3192 	return;
3193       }
3194       m_SumClass += instance.weight() * instance.classValue();
3195       m_SumSqrClass += instance.weight() * instance.classValue()
3196       *	instance.classValue();
3197       m_SumClassPredicted += instance.weight() 
3198       * instance.classValue() * predictedValue;
3199       m_SumPredicted += instance.weight() * predictedValue;
3200       m_SumSqrPredicted += instance.weight() * predictedValue * predictedValue;
3201 
3202       if (m_ErrorEstimator == null) {
3203 	setNumericPriorsFromBuffer();
3204       }
3205       double predictedProb = Math.max(m_ErrorEstimator.getProbability(
3206 	  predictedValue 
3207 	  - instance.classValue()),
3208 	  MIN_SF_PROB);
3209       double priorProb = Math.max(m_PriorErrorEstimator.getProbability(
3210 	  instance.classValue()),
3211 	  MIN_SF_PROB);
3212 
3213       m_SumSchemeEntropy -= Utils.log2(predictedProb) * instance.weight();
3214       m_SumPriorEntropy -= Utils.log2(priorProb) * instance.weight();
3215       m_ErrorEstimator.addValue(predictedValue - instance.classValue(), 
3216 	  instance.weight());
3217 
3218       updateNumericScores(makeDistribution(predictedValue),
3219 	  makeDistribution(instance.classValue()),
3220 	  instance.weight());
3221 
3222     } else
3223       m_MissingClass += instance.weight();
3224   }
3225 
3226   /**
3227    * Update the cumulative record of classification margins
3228    *
3229    * @param predictedDistribution the probability distribution predicted for
3230    * the current instance
3231    * @param actualClass the index of the actual instance class
3232    * @param weight the weight assigned to the instance
3233    */
3234   protected void updateMargins(double [] predictedDistribution, 
3235       int actualClass, double weight) {
3236 
3237     double probActual = predictedDistribution[actualClass];
3238     double probNext = 0;
3239 
3240     for(int i = 0; i < m_NumClasses; i++)
3241       if ((i != actualClass) &&
3242 	  (predictedDistribution[i] > probNext))
3243 	probNext = predictedDistribution[i];
3244 
3245     double margin = probActual - probNext;
3246     int bin = (int)((margin + 1.0) / 2.0 * k_MarginResolution);
3247     m_MarginCounts[bin] += weight;
3248   }
3249 
3250   /**
3251    * Update the numeric accuracy measures. For numeric classes, the
3252    * accuracy is between the actual and predicted class values. For 
3253    * nominal classes, the accuracy is between the actual and 
3254    * predicted class probabilities.
3255    *
3256    * @param predicted the predicted values
3257    * @param actual the actual value
3258    * @param weight the weight associated with this prediction
3259    */
3260   protected void updateNumericScores(double [] predicted, 
3261       double [] actual, double weight) {
3262 
3263     double diff;
3264     double sumErr = 0, sumAbsErr = 0, sumSqrErr = 0;
3265     double sumPriorAbsErr = 0, sumPriorSqrErr = 0;
3266     for(int i = 0; i < m_NumClasses; i++) {
3267       diff = predicted[i] - actual[i];
3268       sumErr += diff;
3269       sumAbsErr += Math.abs(diff);
3270       sumSqrErr += diff * diff;
3271       diff = (m_ClassPriors[i] / m_ClassPriorsSum) - actual[i];
3272       sumPriorAbsErr += Math.abs(diff);
3273       sumPriorSqrErr += diff * diff;
3274     }
3275     m_SumErr += weight * sumErr / m_NumClasses;
3276     m_SumAbsErr += weight * sumAbsErr / m_NumClasses;
3277     m_SumSqrErr += weight * sumSqrErr / m_NumClasses;
3278     m_SumPriorAbsErr += weight * sumPriorAbsErr / m_NumClasses;
3279     m_SumPriorSqrErr += weight * sumPriorSqrErr / m_NumClasses;
3280   }
3281 
3282   /**
3283    * Adds a numeric (non-missing) training class value and weight to 
3284    * the buffer of stored values.
3285    *
3286    * @param classValue the class value
3287    * @param weight the instance weight
3288    */
3289   protected void addNumericTrainClass(double classValue, double weight) {
3290 
3291     if (m_TrainClassVals == null) {
3292       m_TrainClassVals = new double [100];
3293       m_TrainClassWeights = new double [100];
3294     }
3295     if (m_NumTrainClassVals == m_TrainClassVals.length) {
3296       double [] temp = new double [m_TrainClassVals.length * 2];
3297       System.arraycopy(m_TrainClassVals, 0, 
3298 	  temp, 0, m_TrainClassVals.length);
3299       m_TrainClassVals = temp;
3300 
3301       temp = new double [m_TrainClassWeights.length * 2];
3302       System.arraycopy(m_TrainClassWeights, 0, 
3303 	  temp, 0, m_TrainClassWeights.length);
3304       m_TrainClassWeights = temp;
3305     }
3306     m_TrainClassVals[m_NumTrainClassVals] = classValue;
3307     m_TrainClassWeights[m_NumTrainClassVals] = weight;
3308     m_NumTrainClassVals++;
3309   }
3310 
3311   /**
3312    * Sets up the priors for numeric class attributes from the 
3313    * training class values that have been seen so far.
3314    */
3315   protected void setNumericPriorsFromBuffer() {
3316 
3317     double numPrecision = 0.01; // Default value
3318     if (m_NumTrainClassVals > 1) {
3319       double [] temp = new double [m_NumTrainClassVals];
3320       System.arraycopy(m_TrainClassVals, 0, temp, 0, m_NumTrainClassVals);
3321       int [] index = Utils.sort(temp);
3322       double lastVal = temp[index[0]];
3323       double deltaSum = 0;
3324       int distinct = 0;
3325       for (int i = 1; i < temp.length; i++) {
3326 	double current = temp[index[i]];
3327 	if (current != lastVal) {
3328 	  deltaSum += current - lastVal;
3329 	  lastVal = current;
3330 	  distinct++;
3331 	}
3332       }
3333       if (distinct > 0) {
3334 	numPrecision = deltaSum / distinct;
3335       }
3336     }
3337     m_PriorErrorEstimator = new KernelEstimator(numPrecision);
3338     m_ErrorEstimator = new KernelEstimator(numPrecision);
3339     m_ClassPriors[0] = m_ClassPriorsSum = 0;
3340     for (int i = 0; i < m_NumTrainClassVals; i++) {
3341       m_ClassPriors[0] += m_TrainClassVals[i] * m_TrainClassWeights[i];
3342       m_ClassPriorsSum += m_TrainClassWeights[i];
3343       m_PriorErrorEstimator.addValue(m_TrainClassVals[i],
3344 	  m_TrainClassWeights[i]);
3345     }
3346   }
3347 }