edu.stanford.nlp.maxent.iis
Class WekaProblemSolverCombinations

java.lang.Object
  |
  +--edu.stanford.nlp.maxent.iis.WekaProblemSolverCombinations

public class WekaProblemSolverCombinations
extends Object


Constructor Summary
WekaProblemSolverCombinations()
           
WekaProblemSolverCombinations(String wekaProbFile)
           
 
Method Summary
 void analyseFeatures(String kind)
           
 void buildClassifier(String trainFileName, int iters, double gaincutoff)
           
 void buildClassifierCrossValidation(String trainFileName, int iters, double gaincutoff)
           
 void buildClassifierValidation(String trainFileName, int iters, double gaincutoff)
           
 int getClassification(DataDouble d)
           
 int getClassificationVoting(DataDouble d, int topNo, boolean inTrain)
           
 int getClassificationWeightedVoting(DataDouble d, int topNo, boolean inTrain)
           
 double[] getPosteriors(DataDouble d)
           
 double[] getPosteriorsFN(DataDouble d)
           
 double[] getPosteriorsNom(DataDouble d)
           
 double getPrecision(int fNo)
           
 int[] getSortedAccuracy()
           
 boolean isOk(String key)
           
static void main(String[] args)
          Parameters : -train trainFileArff ( training will be done now ) -gain double ( the gain cutoff ) -support int ( the minimum number of times a feature must appear to be included ) -test trainFile testFile -iters numIterations ( iterative scaling iterations ) -binary ( indicates that for attributes that are binary we are adding features only for the value 1 of them ) -validation ( use cross-validation to select features ) -clean ( in testing, print only one classification per line ) -ftNum [numFeatures] ( the maximum number of features ) -no_sel ( do not do feature selection ) -usetop [numTop] ( use only top numTop classifiers ) -fixedtop ( do not select number of classifiers to include, use specified ) -crossval ( use cross validation to choose optinmal number of clasisifers to combine )
 void makeFeatures(String kind)
           
 void makeFeaturesAssociations()
           
 void printFeatures()
           
 void read(String filename)
           
 void readTrainingInstances(String wekaDataFile)
           
 void save(String filename)
           
 void selectClassifiers()
           
 void test(String fileName)
          This file is supposed to be in Weka format The class attribute might be missing
 
Methods inherited from class java.lang.Object
clone, equals, finalize, getClass, hashCode, notify, notifyAll, toString, wait, wait, wait
 

Constructor Detail

WekaProblemSolverCombinations

public WekaProblemSolverCombinations()

WekaProblemSolverCombinations

public WekaProblemSolverCombinations(String wekaProbFile)
Method Detail

readTrainingInstances

public void readTrainingInstances(String wekaDataFile)
                           throws Exception
Exception

makeFeatures

public void makeFeatures(String kind)

isOk

public boolean isOk(String key)

analyseFeatures

public void analyseFeatures(String kind)

getPrecision

public double getPrecision(int fNo)

makeFeaturesAssociations

public void makeFeaturesAssociations()

buildClassifier

public void buildClassifier(String trainFileName,
                            int iters,
                            double gaincutoff)
                     throws Exception
Exception

buildClassifierCrossValidation

public void buildClassifierCrossValidation(String trainFileName,
                                           int iters,
                                           double gaincutoff)
                                    throws Exception
Exception

buildClassifierValidation

public void buildClassifierValidation(String trainFileName,
                                      int iters,
                                      double gaincutoff)
                               throws Exception
Exception

test

public void test(String fileName)
This file is supposed to be in Weka format The class attribute might be missing


getClassification

public int getClassification(DataDouble d)

getClassificationVoting

public int getClassificationVoting(DataDouble d,
                                   int topNo,
                                   boolean inTrain)

getClassificationWeightedVoting

public int getClassificationWeightedVoting(DataDouble d,
                                           int topNo,
                                           boolean inTrain)

getPosteriorsFN

public double[] getPosteriorsFN(DataDouble d)

getPosteriors

public double[] getPosteriors(DataDouble d)

getPosteriorsNom

public double[] getPosteriorsNom(DataDouble d)

main

public static void main(String[] args)
Parameters : -train trainFileArff ( training will be done now ) -gain double ( the gain cutoff ) -support int ( the minimum number of times a feature must appear to be included ) -test trainFile testFile -iters numIterations ( iterative scaling iterations ) -binary ( indicates that for attributes that are binary we are adding features only for the value 1 of them ) -validation ( use cross-validation to select features ) -clean ( in testing, print only one classification per line ) -ftNum [numFeatures] ( the maximum number of features ) -no_sel ( do not do feature selection ) -usetop [numTop] ( use only top numTop classifiers ) -fixedtop ( do not select number of classifiers to include, use specified ) -crossval ( use cross validation to choose optinmal number of clasisifers to combine )


save

public void save(String filename)

read

public void read(String filename)

getSortedAccuracy

public int[] getSortedAccuracy()

selectClassifiers

public void selectClassifiers()

printFeatures

public void printFeatures()


Stanford NLP Group