Example usage for weka.core Instances classIndex

List of usage examples for weka.core Instances classIndex

Introduction

In this page you can find the example usage for weka.core Instances classIndex.

Prototype


publicint classIndex() 

Source Link

Document

Returns the class attribute's index.

Usage

From source file:MultiClassClassifier.java

License:Open Source License

/**
 * Builds the classifiers./*from  w  w w  . j a v a 2 s . c o m*/
 *
 * @param insts the training data.
 * @throws Exception if a classifier can't be built
 */
public void buildClassifier(Instances insts) throws Exception {

    Instances newInsts;

    // can classifier handle the data?
    getCapabilities().testWithFail(insts);

    // remove instances with missing class
    insts = new Instances(insts);
    insts.deleteWithMissingClass();

    if (m_Classifier == null) {
        throw new Exception("No base classifier has been set!");
    }
    m_ZeroR = new ZeroR();
    m_ZeroR.buildClassifier(insts);

    m_TwoClassDataset = null;

    int numClassifiers = insts.numClasses();
    if (numClassifiers <= 2) {

        m_Classifiers = Classifier.makeCopies(m_Classifier, 1);
        m_Classifiers[0].buildClassifier(insts);

        m_ClassFilters = null;

    } else if (m_Method == METHOD_1_AGAINST_1) {
        // generate fastvector of pairs
        FastVector pairs = new FastVector();
        for (int i = 0; i < insts.numClasses(); i++) {
            for (int j = 0; j < insts.numClasses(); j++) {
                if (j <= i)
                    continue;
                int[] pair = new int[2];
                pair[0] = i;
                pair[1] = j;
                pairs.addElement(pair);
            }
        }

        numClassifiers = pairs.size();
        m_Classifiers = Classifier.makeCopies(m_Classifier, numClassifiers);
        m_ClassFilters = new Filter[numClassifiers];
        m_SumOfWeights = new double[numClassifiers];

        // generate the classifiers
        for (int i = 0; i < numClassifiers; i++) {
            RemoveWithValues classFilter = new RemoveWithValues();
            classFilter.setAttributeIndex("" + (insts.classIndex() + 1));
            classFilter.setModifyHeader(true);
            classFilter.setInvertSelection(true);
            classFilter.setNominalIndicesArr((int[]) pairs.elementAt(i));
            Instances tempInstances = new Instances(insts, 0);
            tempInstances.setClassIndex(-1);
            classFilter.setInputFormat(tempInstances);
            newInsts = Filter.useFilter(insts, classFilter);
            if (newInsts.numInstances() > 0) {
                newInsts.setClassIndex(insts.classIndex());
                m_Classifiers[i].buildClassifier(newInsts);
                m_ClassFilters[i] = classFilter;
                m_SumOfWeights[i] = newInsts.sumOfWeights();
            } else {
                m_Classifiers[i] = null;
                m_ClassFilters[i] = null;
            }
        }

        // construct a two-class header version of the dataset
        m_TwoClassDataset = new Instances(insts, 0);
        int classIndex = m_TwoClassDataset.classIndex();
        m_TwoClassDataset.setClassIndex(-1);
        m_TwoClassDataset.deleteAttributeAt(classIndex);
        FastVector classLabels = new FastVector();
        classLabels.addElement("class0");
        classLabels.addElement("class1");
        m_TwoClassDataset.insertAttributeAt(new Attribute("class", classLabels), classIndex);
        m_TwoClassDataset.setClassIndex(classIndex);

    } else {
        // use error correcting code style methods
        Code code = null;
        switch (m_Method) {
        case METHOD_ERROR_EXHAUSTIVE:
            code = new ExhaustiveCode(numClassifiers);
            break;
        case METHOD_ERROR_RANDOM:
            code = new RandomCode(numClassifiers, (int) (numClassifiers * m_RandomWidthFactor), insts);
            break;
        case METHOD_1_AGAINST_ALL:
            code = new StandardCode(numClassifiers);
            break;
        default:
            throw new Exception("Unrecognized correction code type");
        }
        numClassifiers = code.size();
        m_Classifiers = Classifier.makeCopies(m_Classifier, numClassifiers);
        m_ClassFilters = new MakeIndicator[numClassifiers];
        for (int i = 0; i < m_Classifiers.length; i++) {
            m_ClassFilters[i] = new MakeIndicator();
            MakeIndicator classFilter = (MakeIndicator) m_ClassFilters[i];
            classFilter.setAttributeIndex("" + (insts.classIndex() + 1));
            classFilter.setValueIndices(code.getIndices(i));
            classFilter.setNumeric(false);
            classFilter.setInputFormat(insts);
            newInsts = Filter.useFilter(insts, m_ClassFilters[i]);
            m_Classifiers[i].buildClassifier(newInsts);
        }
    }
    m_ClassAttribute = insts.classAttribute();
}

From source file:task2.java

/**
 * Processes requests for both HTTP <code>GET</code> and <code>POST</code>
 * methods./*from ww w.  j  av a2s  .  c o  m*/
 *
 * @param request servlet request
 * @param response servlet response
 * @throws ServletException if a servlet-specific error occurs
 * @throws IOException if an I/O error occurs
 */
protected void processRequest(HttpServletRequest request, HttpServletResponse response)
        throws ServletException, IOException {
    response.setContentType("text/html;charset=UTF-8");
    try (PrintWriter out = response.getWriter()) {
        /* TODO output your page here. You may use following sample code. */
        out.println("<!DOCTYPE html>");
        out.println("<html>");
        out.println("<head>");
        out.println("<title>Servlet selection</title>");
        out.println("</head>");
        out.println("<body>");
        CSVLoader loader = new CSVLoader();
        loader.setSource(new File("C:/Users//Raguvinoth/Desktop/5339.csv"));
        Instances data = loader.getDataSet();

        //Save ARFF
        ArffSaver saver = new ArffSaver();
        saver.setInstances(data);
        saver.setFile(new File("\"C:/Users/Raguvinoth/Desktop/5339_converted.arff"));
        saver.writeBatch();

        BufferedReader reader = new BufferedReader(
                new FileReader("C://Users//Raguvinoth//Desktop//weka1//5339_nominal.arff"));
        Instances data1 = new Instances(reader);

        if (data1.classIndex() == -1)
            data1.setClassIndex(data1.numAttributes() - 14);
        // 1. meta-classifier
        // useClassifier(data);

        // 2. AttributeSelector
        try {
            AttributeSelection attsel = new AttributeSelection();
            GreedyStepwise search = new GreedyStepwise();
            CfsSubsetEval eval = new CfsSubsetEval();
            attsel.setEvaluator(eval);
            attsel.setSearch(search);
            attsel.SelectAttributes(data);
            int[] indices = attsel.selectedAttributes();

            System.out.println("selected attribute indices:\n" + Utils.arrayToString(indices));
            System.out.println("\n 4. Linear-Regression on above selected attributes");
            long time1 = System.currentTimeMillis();
            long sec1 = time1 / 1000;
            BufferedReader reader1 = new BufferedReader(
                    new FileReader("C://Users//Raguvinoth//Desktop//weka1//5339_linear2.arff"));
            Instances data2 = new Instances(reader1);
            data2.setClassIndex(0);
            LinearRegression lr = new LinearRegression();
            lr.buildClassifier(data2);

            System.out.println(lr.toString());
            long time2 = System.currentTimeMillis();
            long sec2 = time2 / 1000;
            long timeTaken = sec2 - sec1;
            System.out.println("Total time taken for building the model: " + timeTaken + " seconds");

            for (int i = 0; i < 5; i++) {
                out.println("<p>" + "selected attribute indices:\n" + Utils.arrayToString(indices[i]) + "</p>");
            }
            out.println("<p>" + "\n 4. Linear-Regression on above selected attributes" + "</p>");
            out.println("<p>" + lr.toString() + "</p>");
            out.println("<p>" + "Total time taken for building the model: " + timeTaken + " seconds" + "</p>");
            out.println("</body>");
            out.println("</html>");
        } catch (Exception e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }
    }
}

From source file:GrowTree.java

public static void main(String[] args) throws Exception {
    runClassifier(new GrowTree(), args);
    DataSource source = new DataSource(
            "F:\\backup\\BTH\\#6DV2542 Machine Learning\\WEKA experiments\\UCI\\iris.arff");
    Instances data = source.getDataSet();
    if (data.classIndex() == -1)
        data.setClassIndex(data.numAttributes() - 1);
}

From source file:SMO.java

License:Open Source License

/**
 * Method for building the classifier. Implements a one-against-one
 * wrapper for multi-class problems.//from ww  w  .  j a v  a 2 s . c  o m
 *
 * @param insts the set of training instances
 * @throws Exception if the classifier can't be built successfully
 */
public void buildClassifier(Instances insts) throws Exception {

    if (!m_checksTurnedOff) {
        // can classifier handle the data?
        getCapabilities().testWithFail(insts);

        // remove instances with missing class
        insts = new Instances(insts);
        insts.deleteWithMissingClass();

        /* Removes all the instances with weight equal to 0.
         MUST be done since condition (8) of Keerthi's paper 
         is made with the assertion Ci > 0 (See equation (3a). */
        Instances data = new Instances(insts, insts.numInstances());
        for (int i = 0; i < insts.numInstances(); i++) {
            if (insts.instance(i).weight() > 0)
                data.add(insts.instance(i));
        }
        if (data.numInstances() == 0) {
            throw new Exception("No training instances left after removing " + "instances with weight 0!");
        }
        insts = data;
    }

    if (!m_checksTurnedOff) {
        m_Missing = new ReplaceMissingValues();
        m_Missing.setInputFormat(insts);
        insts = Filter.useFilter(insts, m_Missing);
    } else {
        m_Missing = null;
    }

    if (getCapabilities().handles(Capability.NUMERIC_ATTRIBUTES)) {
        boolean onlyNumeric = true;
        if (!m_checksTurnedOff) {
            for (int i = 0; i < insts.numAttributes(); i++) {
                if (i != insts.classIndex()) {
                    if (!insts.attribute(i).isNumeric()) {
                        onlyNumeric = false;
                        break;
                    }
                }
            }
        }

        if (!onlyNumeric) {
            m_NominalToBinary = new NominalToBinary();
            m_NominalToBinary.setInputFormat(insts);
            insts = Filter.useFilter(insts, m_NominalToBinary);
        } else {
            m_NominalToBinary = null;
        }
    } else {
        m_NominalToBinary = null;
    }

    if (m_filterType == FILTER_STANDARDIZE) {
        m_Filter = new Standardize();
        m_Filter.setInputFormat(insts);
        insts = Filter.useFilter(insts, m_Filter);
    } else if (m_filterType == FILTER_NORMALIZE) {
        m_Filter = new Normalize();
        m_Filter.setInputFormat(insts);
        insts = Filter.useFilter(insts, m_Filter);
    } else {
        m_Filter = null;
    }

    m_classIndex = insts.classIndex();
    m_classAttribute = insts.classAttribute();
    m_KernelIsLinear = (m_kernel instanceof PolyKernel) && (((PolyKernel) m_kernel).getExponent() == 1.0);

    // Generate subsets representing each class
    Instances[] subsets = new Instances[insts.numClasses()];
    for (int i = 0; i < insts.numClasses(); i++) {
        subsets[i] = new Instances(insts, insts.numInstances());
    }
    for (int j = 0; j < insts.numInstances(); j++) {
        Instance inst = insts.instance(j);
        subsets[(int) inst.classValue()].add(inst);
    }
    for (int i = 0; i < insts.numClasses(); i++) {
        subsets[i].compactify();
    }

    // Build the binary classifiers
    Random rand = new Random(m_randomSeed);
    m_classifiers = new BinarySMO[insts.numClasses()][insts.numClasses()];
    for (int i = 0; i < insts.numClasses(); i++) {
        for (int j = i + 1; j < insts.numClasses(); j++) {
            m_classifiers[i][j] = new BinarySMO();
            m_classifiers[i][j].setKernel(Kernel.makeCopy(getKernel()));
            Instances data = new Instances(insts, insts.numInstances());
            for (int k = 0; k < subsets[i].numInstances(); k++) {
                data.add(subsets[i].instance(k));
            }
            for (int k = 0; k < subsets[j].numInstances(); k++) {
                data.add(subsets[j].instance(k));
            }
            data.compactify();
            data.randomize(rand);
            m_classifiers[i][j].buildClassifier(data, i, j, m_fitLogisticModels, m_numFolds, m_randomSeed);
        }
    }
}

From source file:RunBestFirstSearch.java

License:Open Source License

public static void main(String[] arg) throws Exception {
    // Load data.
    ///*w w w .  jav  a  2s .  c om*/
    System.out.println("\nLoading sample file...");
    DataSource source = new DataSource(arg[0]);
    Instances data = source.getDataSet();

    if (data.classIndex() == -1)
        data.setClassIndex(data.numAttributes() - 1);

    int n = Integer.parseInt(arg[1]);

    System.out.println("Instance with " + n + " features!");
    System.out.println("\nRunning BFS algorithm with CFS cost function...");

    // Run feature selection algorithm BFS using CFS cost function.
    //
    runAttributeSelection(data, n);
}

From source file:MPCKMeans.java

License:Open Source License

/** Sets training instances */
public void setInstances(Instances instances) {
    m_Instances = instances;/*  www  .  ja  v a2 s.c om*/

    // create the checksum coefficients
    m_checksumCoeffs = new double[instances.numAttributes()];
    for (int i = 0; i < m_checksumCoeffs.length; i++) {
        m_checksumCoeffs[i] = m_RandomNumberGenerator.nextDouble();
    }

    // hash the instance checksums
    m_checksumHash = new HashMap(instances.numInstances());
    int classIdx = instances.classIndex();
    for (int i = 0; i < instances.numInstances(); i++) {
        Instance instance = instances.instance(i);
        double[] values = instance.toDoubleArray();
        double checksum = 0;

        for (int j = 0; j < values.length; j++) {
            if (j != classIdx) {
                checksum += m_checksumCoeffs[j] * values[j];
            }
        }

        // take care of chaining
        Object list = m_checksumHash.get(new Double((float) checksum));
        ArrayList idxList = null;
        if (list == null) {
            idxList = new ArrayList();
            m_checksumHash.put(new Double((float) checksum), idxList);
        } else { // chaining
            idxList = (ArrayList) list;
        }
        idxList.add(new Integer(i));
    }
}

From source file:ab.demo.AIAssignment2.java

License:Open Source License

public GameState solve() {

    // capture Image
    BufferedImage screenshot = ActionRobot.doScreenShot();

    // process image
    Vision vision = new Vision(screenshot);

    // find the slingshot
    Rectangle sling = vision.findSlingshotMBR();

    // confirm the slingshot
    while (sling == null && aRobot.getState() == GameState.PLAYING) {
        System.out.println("No slingshot detected. Please remove pop up or zoom out");
        ActionRobot.fullyZoomOut();/*from  w  w w .j  av a 2  s. co m*/
        screenshot = ActionRobot.doScreenShot();
        vision = new Vision(screenshot);
        sling = vision.findSlingshotMBR();
    }

    // get all the pigs
    List<ABObject> pigs = vision.findPigsMBR();
    List<ABObject> blocks = vision.findBlocksMBR();

    GameState state = aRobot.getState();

    // if there is a sling, then play, otherwise just skip.
    if (sling != null) {

        if (!pigs.isEmpty()) { //if there are pigs in the level

            Point releasePoint = null;
            Shot shot = new Shot();
            int dx, dy;
            {
                //random pick up a pig
                ABObject pig = pigs.get(randomGenerator.nextInt(pigs.size()));
                Point _tpt = pig.getCenter();

                // estimate the trajectory
                ArrayList<Point> pts = tp.estimateLaunchPoint(sling, _tpt);

                //define all of the wood, ice and stone in the stage
                ArrayList<ABObject> wood = new ArrayList<ABObject>();
                ArrayList<ABObject> stone = new ArrayList<ABObject>();
                ArrayList<ABObject> ice = new ArrayList<ABObject>();
                ArrayList<ABObject> tnt = new ArrayList<ABObject>();

                //initialise counters to store how many times the trajectory intersects blocks of these types
                int woodCount = 0;
                int stoneCount = 0;
                int iceCount = 0;
                int pigsCount = 0;
                int tntCount = 0;

                //populate the wood, stone and ice ArrayLists with the correct materials
                for (int i = 0; i < blocks.size(); i++) {
                    if (blocks.get(i).type == ABType.Wood)
                        wood.add(blocks.get(i));
                    if (blocks.get(i).type == ABType.Stone)
                        stone.add(blocks.get(i));
                    if (blocks.get(i).type == ABType.Ice)
                        ice.add(blocks.get(i));
                    if (blocks.get(i).type == ABType.TNT)
                        tnt.add(blocks.get(i));
                }

                //check whether the trajectory intersects any wood blocks
                for (int i = 0; i < wood.size(); i++) {
                    for (int j = 0; j < pts.size(); j++) {
                        if (wood.get(i).contains(pts.get(j))) {
                            System.out.println("Trajectory intersects some wood");
                            woodCount++;
                        }
                        if (pig.contains(pts.get(j))) //if we find the pig on this point
                            j = pts.size() - 1; //stop looking for wood on the trajectory (escape for loop)
                    }
                }

                //check whether the trajectory intersects any ice blocks
                for (int i = 0; i < ice.size(); i++) {
                    for (int j = 0; j < pts.size(); j++) {
                        if (ice.get(i).contains(pts.get(j))) {
                            System.out.println("Trajectory intersects some ice");
                            iceCount++;
                        }
                        if (pig.contains(pts.get(j))) //if we find the pig on this point
                            j = pts.size() - 1; //stop looking for ice on the trajectory (escape for loop)
                    }
                }

                //check whether the trajectory intersects any stone blocks            
                for (int i = 0; i < stone.size(); i++) {
                    for (int j = 0; j < pts.size(); j++) {
                        if (stone.get(i).contains(pts.get(j))) {
                            System.out.println("Trajectory intersects some stone");
                            stoneCount++;
                        }
                        if (pig.contains(pts.get(j))) //if we find the pig on this point
                            j = pts.size() - 1; //stop looking for stone on the trajectory (escape for loop)
                    }
                }

                //how many pigs the trajectory intersects (this should always be at least 1)         
                for (int i = 0; i < pigs.size(); i++) {
                    for (int j = 0; j < pts.size(); j++) {
                        if (pigs.get(i).contains(pts.get(j))) {
                            System.out.println("Trajectory intersects a pig");
                            pigsCount++;
                        }
                    }
                }

                //how many tnt blocks the trajectory intersects            
                for (int i = 0; i < tnt.size(); i++) {
                    for (int j = 0; j < pts.size(); j++) {
                        if (tnt.get(i).contains(pts.get(j))) {
                            System.out.println("Trajectory intersects some tnt");
                            tntCount++;
                        }
                        if (pig.contains(pts.get(j))) //if we find the pig on this point
                            j = pts.size() - 1; //stop looking for tnt on the trajectory
                    }
                }

                StringBuilder sb = new StringBuilder();
                sb.append(pigsCount + "," + woodCount + "," + iceCount + "," + stoneCount + "," + tntCount
                        + ",?");
                String dataEntry = sb.toString();
                try (PrintWriter out = new PrintWriter(
                        new BufferedWriter(new FileWriter("dataset/birds.level.arff", true)))) {
                    out.println(dataEntry);
                } catch (IOException e) {
                    System.out.println("Error - dataset/birds.level.arff file not found or in use!");
                }

                //indicator of if the agent should continue this shot or not (used in the bayes classifier)
                ArrayList<Boolean> takeShot = new ArrayList<Boolean>();

                try {
                    DataSource source = new DataSource("dataset/birds.data.arff"); //initialise the learning set for the agent
                    Instances data = source.getDataSet();

                    // setting class attribute if the data format does not provide this information
                    // For example, the XRFF format saves the class attribute information as well
                    if (data.classIndex() == -1)
                        data.setClassIndex(data.numAttributes() - 1);

                    DataSource thisLevel = new DataSource("dataset/birds.level.arff"); //initialise the data retrieved from the current level for the agent
                    Instances thisLevelData = thisLevel.getDataSet();
                    if (thisLevelData.classIndex() == -1)
                        thisLevelData.setClassIndex(data.numAttributes() - 1);

                    //build a new NaiveBayes classifier
                    NaiveBayes bayes = new NaiveBayes();
                    bayes.buildClassifier(data);

                    for (int i = 0; i < thisLevelData.numInstances(); i++) { //for all instances in the current level
                        double label = bayes.classifyInstance(thisLevelData.instance(i)); //generate an outcome/classify an instance in the current level
                        thisLevelData.instance(i).setClassValue(label); //store this outcome
                        System.out.println(thisLevelData.instance(i).stringValue(5)); //print it
                        if (thisLevelData.instance(i).stringValue(5) != "?") { //if there is a decisive choice as to whether a shot should be taken
                            data.add(thisLevelData.instance(i)); //store it
                            if (thisLevelData.instance(i).stringValue(5) == "yes") {//if the classifier classifies a good shot, store it
                                takeShot.add(true);
                            } else { //if no, store this too
                                takeShot.add(false);
                            }
                        }
                    }

                    //add all non ? entries to the learning set
                    BufferedWriter writer = new BufferedWriter(new FileWriter("dataset/birds.data.arff"));
                    writer.write(data.toString());
                    writer.flush();
                    writer.close();

                } catch (Exception e) {
                    e.printStackTrace();
                    System.out.println("Exception caught - file handle error");
                }

                //TODO: roll a random number to determine whether we take a shot or not.
                //populated using the bayesian classification above.
                //if we roll true, continue with the random pig shot as usual.
                //if not, take a new random pig and try again.
                //TODO: implement a failsafe so the agent does not get stuck randomly choosing pigs which the bayesian classification does not allow.
                Random rng = new Random(takeShot.size());
                if (takeShot.get(rng.nextInt()))
                    System.out.println("Taking this shot.");
                else {
                    System.out.println("Not taking this shot. Finding another random pig.");
                    return state;
                }

                // if the target is very close to before, randomly choose a
                // point near it
                if (prevTarget != null && distance(prevTarget, _tpt) < 10) {
                    double _angle = randomGenerator.nextDouble() * Math.PI * 2;
                    _tpt.x = _tpt.x + (int) (Math.cos(_angle) * 10);
                    _tpt.y = _tpt.y + (int) (Math.sin(_angle) * 10);
                    System.out.println("Randomly changing to " + _tpt);
                }

                prevTarget = new Point(_tpt.x, _tpt.y);

                // do a high shot when entering a level to find an accurate velocity
                if (firstShot && pts.size() > 1) {
                    releasePoint = pts.get(1);
                } else if (pts.size() == 1)
                    releasePoint = pts.get(0);
                else if (pts.size() == 2) {
                    // randomly choose between the trajectories, with a 1 in
                    // 6 chance of choosing the high one
                    if (randomGenerator.nextInt(6) == 0)
                        releasePoint = pts.get(1);
                    else
                        releasePoint = pts.get(0);
                } else if (pts.isEmpty()) {
                    System.out.println("No release point found for the target");
                    System.out.println("Try a shot with 45 degree");
                    releasePoint = tp.findReleasePoint(sling, Math.PI / 4);
                }

                // Get the reference point
                Point refPoint = tp.getReferencePoint(sling);

                //Calculate the tapping time according the bird type 
                if (releasePoint != null) {
                    double releaseAngle = tp.getReleaseAngle(sling, releasePoint);
                    System.out.println("Release Point: " + releasePoint);
                    System.out.println("Release Angle: " + Math.toDegrees(releaseAngle));
                    int tapInterval = 0;
                    switch (aRobot.getBirdTypeOnSling()) {

                    case RedBird:
                        tapInterval = 0;
                        break; // start of trajectory
                    case YellowBird:
                        tapInterval = 65 + randomGenerator.nextInt(25);
                        break; // 65-90% of the way
                    case WhiteBird:
                        tapInterval = 70 + randomGenerator.nextInt(20);
                        break; // 70-90% of the way
                    case BlackBird:
                        tapInterval = 70 + randomGenerator.nextInt(20);
                        break; // 70-90% of the way
                    case BlueBird:
                        tapInterval = 65 + randomGenerator.nextInt(20);
                        break; // 65-85% of the way
                    default:
                        tapInterval = 60;
                    }

                    int tapTime = tp.getTapTime(sling, releasePoint, _tpt, tapInterval);
                    dx = (int) releasePoint.getX() - refPoint.x;
                    dy = (int) releasePoint.getY() - refPoint.y;
                    shot = new Shot(refPoint.x, refPoint.y, dx, dy, 0, tapTime);
                } else {
                    System.err.println("No Release Point Found");
                    return state;
                }
            }

            // check whether the slingshot is changed. the change of the slingshot indicates a change in the scale.
            {
                ActionRobot.fullyZoomOut();
                screenshot = ActionRobot.doScreenShot();
                vision = new Vision(screenshot);
                Rectangle _sling = vision.findSlingshotMBR();
                if (_sling != null) {
                    double scale_diff = Math.pow((sling.width - _sling.width), 2)
                            + Math.pow((sling.height - _sling.height), 2);
                    if (scale_diff < 25) {
                        if (dx < 0) {
                            aRobot.cshoot(shot);
                            state = aRobot.getState();
                            if (state == GameState.PLAYING) {
                                screenshot = ActionRobot.doScreenShot();
                                vision = new Vision(screenshot);
                                List<Point> traj = vision.findTrajPoints();
                                tp.adjustTrajectory(traj, sling, releasePoint);
                                firstShot = false;
                            }
                        }
                    } else
                        System.out.println(
                                "Scale is changed, can not execute the shot, will re-segement the image");
                } else
                    System.out
                            .println("no sling detected, can not execute the shot, will re-segement the image");
            }

        }

    }
    return state;
}

From source file:adams.data.conversion.WekaInstancesToSpreadSheet.java

License:Open Source License

/**
 * Performs the actual conversion./*from w  w  w .  ja  va2s .  co m*/
 *
 * @return      the converted data
 * @throws Exception   if something goes wrong with the conversion
 */
@Override
protected Object doConvert() throws Exception {
    SpreadSheet result;
    Instances data;
    Row row;
    int i;
    int n;
    String str;

    data = (Instances) m_Input;

    // special case for InstancesViews
    if (m_SpreadSheetType instanceof InstancesView) {
        result = new InstancesView((Instances) m_Input);
        return result;
    }

    // create header
    result = m_SpreadSheetType.newInstance();
    result.setDataRowClass(m_DataRowType.getClass());
    row = result.getHeaderRow();
    for (n = 0; n < data.numAttributes(); n++)
        row.addCell("" + n).setContent(data.attribute(n).name());
    if (result instanceof Dataset) {
        if (data.classIndex() != -1)
            ((Dataset) result).setClassAttribute(data.classIndex(), true);
    }

    // fill spreadsheet
    for (i = 0; i < data.numInstances(); i++) {
        row = result.addRow("" + i);

        for (n = 0; n < data.numAttributes(); n++) {
            if (data.instance(i).isMissing(n))
                continue;
            if (data.attribute(n).type() == Attribute.DATE) {
                row.addCell("" + n).setContent(new DateTimeMsec(new Date((long) data.instance(i).value(n))));
            } else if (data.attribute(n).type() == Attribute.NUMERIC) {
                row.addCell("" + n).setContent(data.instance(i).value(n));
            } else {
                str = data.instance(i).stringValue(n);
                if (str.equals(SpreadSheet.MISSING_VALUE))
                    row.addCell("" + n).setContentAsString("'" + str + "'");
                else
                    row.addCell("" + n).setContentAsString(str);
            }
        }
    }

    return result;
}

From source file:adams.data.instancesanalysis.PCA.java

License:Open Source License

/**
 * Performs the actual analysis.//from   w ww.  j  a v a 2 s. co m
 *
 * @param data   the data to analyze
 * @return      null if successful, otherwise error message
 * @throws Exception   if analysis fails
 */
@Override
protected String doAnalyze(Instances data) throws Exception {
    String result;
    Remove remove;
    PublicPrincipalComponents pca;
    int i;
    Capabilities caps;
    PartitionedMultiFilter2 part;
    Range rangeUnsupported;
    Range rangeSupported;
    TIntList listNominal;
    Range rangeNominal;
    ArrayList<ArrayList<Double>> coeff;
    Instances filtered;
    SpreadSheet transformed;
    WekaInstancesToSpreadSheet conv;
    String colName;

    result = null;
    m_Loadings = null;
    m_Scores = null;

    if (!m_AttributeRange.isAllRange()) {
        if (isLoggingEnabled())
            getLogger().info("Filtering attribute range: " + m_AttributeRange.getRange());
        remove = new Remove();
        remove.setAttributeIndicesArray(m_AttributeRange.getIntIndices());
        remove.setInvertSelection(true);
        remove.setInputFormat(data);
        data = Filter.useFilter(data, remove);
    }
    if (isLoggingEnabled())
        getLogger().info("Performing PCA...");

    listNominal = new TIntArrayList();
    if (m_SkipNominal) {
        for (i = 0; i < data.numAttributes(); i++) {
            if (i == data.classIndex())
                continue;
            if (data.attribute(i).isNominal())
                listNominal.add(i);
        }
    }

    // check for unsupported attributes
    caps = new PublicPrincipalComponents().getCapabilities();
    m_Supported = new TIntArrayList();
    m_Unsupported = new TIntArrayList();
    for (i = 0; i < data.numAttributes(); i++) {
        if (!caps.test(data.attribute(i)) || (i == data.classIndex()) || (listNominal.contains(i)))
            m_Unsupported.add(i);
        else
            m_Supported.add(i);
    }
    data.setClassIndex(-1);

    m_NumAttributes = m_Supported.size();

    // the principal components will delete the attributes without any distinct values.
    // this checks which instances will be kept.
    m_Kept = new ArrayList<>();
    for (i = 0; i < m_Supported.size(); i++) {
        if (data.numDistinctValues(m_Supported.get(i)) > 1)
            m_Kept.add(m_Supported.get(i));
    }

    // build a model using the PublicPrincipalComponents
    pca = new PublicPrincipalComponents();
    pca.setMaximumAttributes(m_MaxAttributes);
    pca.setVarianceCovered(m_Variance);
    pca.setMaximumAttributeNames(m_MaxAttributeNames);
    part = null;
    if (m_Unsupported.size() > 0) {
        rangeUnsupported = new Range();
        rangeUnsupported.setMax(data.numAttributes());
        rangeUnsupported.setIndices(m_Unsupported.toArray());
        rangeSupported = new Range();
        rangeSupported.setMax(data.numAttributes());
        rangeSupported.setIndices(m_Supported.toArray());
        part = new PartitionedMultiFilter2();
        part.setFilters(new Filter[] { pca, new AllFilter(), });
        part.setRanges(new weka.core.Range[] { new weka.core.Range(rangeSupported.getRange()),
                new weka.core.Range(rangeUnsupported.getRange()), });
    }
    try {
        if (part != null)
            part.setInputFormat(data);
        else
            pca.setInputFormat(data);
    } catch (Exception e) {
        result = Utils.handleException(this, "Failed to set data format", e);
    }

    transformed = null;
    if (result == null) {
        try {
            if (part != null)
                filtered = weka.filters.Filter.useFilter(data, part);
            else
                filtered = weka.filters.Filter.useFilter(data, pca);
        } catch (Exception e) {
            result = Utils.handleException(this, "Failed to apply filter", e);
            filtered = null;
        }
        if (filtered != null) {
            conv = new WekaInstancesToSpreadSheet();
            conv.setInput(filtered);
            result = conv.convert();
            if (result == null) {
                transformed = (SpreadSheet) conv.getOutput();
                // shorten column names again
                if (part != null) {
                    for (i = 0; i < transformed.getColumnCount(); i++) {
                        colName = transformed.getColumnName(i);
                        colName = colName.replaceFirst("filtered-[0-9]*-", "");
                        transformed.getHeaderRow().getCell(i).setContentAsString(colName);
                    }
                }
            }
        }
    }

    if (result == null) {
        // get the coefficients from the filter
        m_Scores = transformed;
        coeff = pca.getCoefficients();
        m_Loadings = extractLoadings(data, coeff);
        m_Loadings.setName("Loadings for " + data.relationName());
    }

    return result;
}

From source file:adams.data.instancesanalysis.pls.AbstractSingleClassPLS.java

License:Open Source License

/**
 * Preprocesses the data.//from   w w  w. jav  a  2 s .com
 *
 * @param instances the data to process
 * @return the preprocessed data
 */
protected Instances preTransform(Instances instances, Map<String, Object> params) throws Exception {
    double[] classValues;

    switch (m_PredictionType) {
    case ALL:
        classValues = null;
        break;
    default:
        classValues = instances.attributeToDoubleArray(instances.classIndex());
    }

    if (classValues != null)
        params.put(PARAM_CLASSVALUES, classValues);

    if (!isInitialized()) {
        if (m_ReplaceMissing) {
            m_Missing = new ReplaceMissingValues();
            m_Missing.setInputFormat(instances);
        } else {
            m_Missing = null;
        }

        switch (m_PreprocessingType) {
        case CENTER:
            m_ClassMean = instances.meanOrMode(instances.classIndex());
            m_ClassStdDev = 1;
            m_Filter = new Center();
            ((Center) m_Filter).setIgnoreClass(true);
            break;
        case STANDARDIZE:
            m_ClassMean = instances.meanOrMode(instances.classIndex());
            m_ClassStdDev = StrictMath.sqrt(instances.variance(instances.classIndex()));
            m_Filter = new Standardize();
            ((Standardize) m_Filter).setIgnoreClass(true);
            break;
        case NONE:
            m_ClassMean = 0;
            m_ClassStdDev = 1;
            m_Filter = null;
            break;
        default:
            throw new IllegalStateException("Unhandled preprocessing type; " + m_PreprocessingType);
        }
        if (m_Filter != null)
            m_Filter.setInputFormat(instances);
    }

    // filter data
    if (m_Missing != null)
        instances = Filter.useFilter(instances, m_Missing);
    if (m_Filter != null)
        instances = Filter.useFilter(instances, m_Filter);

    return instances;
}