Example usage for weka.core Instances enumerateAttributes

List of usage examples for weka.core Instances enumerateAttributes

Introduction

In this page you can find the example usage for weka.core Instances enumerateAttributes.

Prototype

publicEnumeration<Attribute> enumerateAttributes() 

Source Link

Document

Returns an enumeration of all the attributes.

Usage

From source file:org.wkwk.classifier.MyC45.java

public Instances prune(Instances data) throws Exception {
    ArrayList<Integer> unsignificantAttr = new ArrayList<>();
    Enumeration attEnum = data.enumerateAttributes();
    while (attEnum.hasMoreElements()) {
        Attribute att = (Attribute) attEnum.nextElement();
        double currentGainRatio;

        if (att.isNominal()) {
            currentGainRatio = computeInfoGain(data, att);
        } else {/*ww w .j  a  va2 s. com*/
            currentGainRatio = computeInfoGainCont(data, att, bestThreshold(data, att));
        }
        if (currentGainRatio < 1.0) {
            unsignificantAttr.add(att.index() + 1);
        }
    }
    if (unsignificantAttr.size() > 0) {
        StringBuilder unsignificant = new StringBuilder();
        int i = 0;
        for (Integer current : unsignificantAttr) {
            unsignificant.append(current.toString());
            if (i != unsignificantAttr.size() - 1) {
                unsignificant.append(",");
            }
            i++;
        }
        return removeAttr(data, unsignificant.toString());
    } else {
        return data;
    }

}

From source file:tr.gov.ulakbim.jDenetX.experiments.wrappers.EvalActiveBoostingID.java

License:Open Source License

public static Instances clusterInstances(Instances data) {
    XMeans xmeans = new XMeans();
    Remove filter = new Remove();
    Instances dataClusterer = null;/*from   ww  w .  j  a v a 2s.c  o m*/
    if (data == null) {
        throw new NullPointerException("Data is null at clusteredInstances method");
    }
    //Get the attributes from the data for creating the sampled_data object

    ArrayList<Attribute> attrList = new ArrayList<Attribute>();
    Enumeration attributes = data.enumerateAttributes();
    while (attributes.hasMoreElements()) {
        attrList.add((Attribute) attributes.nextElement());
    }

    Instances sampled_data = new Instances(data.relationName(), attrList, 0);
    data.setClassIndex(data.numAttributes() - 1);
    sampled_data.setClassIndex(data.numAttributes() - 1);
    filter.setAttributeIndices("" + (data.classIndex() + 1));
    data.remove(0);//In Wavelet Stream of MOA always the first element comes without class

    try {
        filter.setInputFormat(data);
        dataClusterer = Filter.useFilter(data, filter);
        String[] options = new String[4];
        options[0] = "-L"; // max. iterations
        options[1] = Integer.toString(noOfClassesInPool - 1);
        if (noOfClassesInPool > 2) {
            options[1] = Integer.toString(noOfClassesInPool - 1);
            xmeans.setMinNumClusters(noOfClassesInPool - 1);
        } else {
            options[1] = Integer.toString(noOfClassesInPool);
            xmeans.setMinNumClusters(noOfClassesInPool);
        }
        xmeans.setMaxNumClusters(data.numClasses() + 1);
        System.out.println("No of classes in the pool: " + noOfClassesInPool);
        xmeans.setUseKDTree(true);
        //xmeans.setOptions(options);
        xmeans.buildClusterer(dataClusterer);
        System.out.println("Xmeans\n:" + xmeans);
    } catch (Exception e) {
        e.printStackTrace();
    }
    //System.out.println("Assignments\n: " + assignments);
    ClusterEvaluation eval = new ClusterEvaluation();
    eval.setClusterer(xmeans);
    try {
        eval.evaluateClusterer(data);
        int classesToClustersMap[] = eval.getClassesToClusters();
        //check the classes to cluster map
        int clusterNo = 0;
        for (int i = 0; i < data.size(); i++) {
            clusterNo = xmeans.clusterInstance(dataClusterer.get(i));
            //Check if the class value of instance and class value of cluster matches
            if ((int) data.get(i).classValue() == classesToClustersMap[clusterNo]) {
                sampled_data.add(data.get(i));
            }
        }
    } catch (Exception e) {
        e.printStackTrace();
    }
    return ((Instances) sampled_data);
}

From source file:trainableSegmentation.WekaSegmentation.java

License:GNU General Public License

/**
 * Adjust current segmentation state (attributes and classes) to
 * loaded data//from  ww  w  . j a v  a2  s  . c  o  m
 * @param data loaded instances
 * @return false if error
 */
public boolean adjustSegmentationStateToData(Instances data) {
    // Check the features that were used in the loaded data
    boolean featuresChanged = false;
    Enumeration<Attribute> attributes = data.enumerateAttributes();
    final int numFeatures = FeatureStack.availableFeatures.length;
    boolean[] usedFeatures = new boolean[numFeatures];

    // Initialize list of names for the features to use
    this.featureNames = new ArrayList<String>();

    float minSigma = Float.MAX_VALUE;
    float maxSigma = Float.MIN_VALUE;

    while (attributes.hasMoreElements()) {
        final Attribute a = attributes.nextElement();
        this.featureNames.add(a.name());
        for (int i = 0; i < numFeatures; i++) {
            if (a.name().startsWith(FeatureStack.availableFeatures[i])) {
                usedFeatures[i] = true;
                if (i == FeatureStack.MEMBRANE) {
                    int index = a.name().indexOf("s_") + 4;
                    int index2 = a.name().indexOf("_", index + 1);
                    final int patchSize = Integer.parseInt(a.name().substring(index, index2));
                    if (patchSize != membranePatchSize) {
                        membranePatchSize = patchSize;
                        this.featureStackArray.setMembranePatchSize(patchSize);
                        featuresChanged = true;
                    }
                    index = a.name().lastIndexOf("_");
                    final int thickness = Integer.parseInt(a.name().substring(index + 1));
                    if (thickness != membraneThickness) {
                        membraneThickness = thickness;
                        this.featureStackArray.setMembraneSize(thickness);
                        featuresChanged = true;
                    }

                } else if (i < FeatureStack.ANISOTROPIC_DIFFUSION) {
                    String[] tokens = a.name().split("_");
                    for (int j = 0; j < tokens.length; j++)
                        if (tokens[j].indexOf(".") != -1) {
                            final float sigma = Float.parseFloat(tokens[j]);
                            if (sigma < minSigma)
                                minSigma = sigma;
                            if (sigma > maxSigma)
                                maxSigma = sigma;
                        }
                }
            }
        }
    }

    IJ.log("Field of view: max sigma = " + maxSigma + ", min sigma = " + minSigma);
    IJ.log("Membrane thickness: " + membraneThickness + ", patch size: " + membranePatchSize);
    if (minSigma != this.minimumSigma && minSigma != 0) {
        this.minimumSigma = minSigma;
        featuresChanged = true;
        this.featureStackArray.setMinimumSigma(minSigma);
    }
    if (maxSigma != this.maximumSigma) {
        this.maximumSigma = maxSigma;
        featuresChanged = true;
        this.featureStackArray.setMaximumSigma(maxSigma);
    }

    // Check if classes match
    Attribute classAttribute = data.classAttribute();
    Enumeration<String> classValues = classAttribute.enumerateValues();

    // Update list of names of loaded classes
    loadedClassNames = new ArrayList<String>();

    int j = 0;
    setNumOfClasses(0);

    while (classValues.hasMoreElements()) {
        final String className = classValues.nextElement().trim();
        loadedClassNames.add(className);
    }

    for (String className : loadedClassNames) {
        IJ.log("Read class name: " + className);

        setClassLabel(j, className);
        addClass();
        j++;
    }

    final boolean[] oldEnableFeatures = this.featureStackArray.getEnabledFeatures();
    // Read checked features and check if any of them changed
    for (int i = 0; i < numFeatures; i++) {
        if (usedFeatures[i] != oldEnableFeatures[i])
            featuresChanged = true;
    }
    // Update feature stack if necessary
    if (featuresChanged) {
        //this.setButtonsEnabled(false);
        this.setEnabledFeatures(usedFeatures);
        // Force features to be updated
        updateFeatures = true;
    }

    return true;
}

From source file:tubes2ai.AIJKFFNN.java

@Override
public void buildClassifier(Instances instances) throws Exception {
    getCapabilities().testWithFail(instances);
    int nInputNeuron, nOutputNeuron;

    /* Inisialisasi tiap layer */
    nInputNeuron = instances.numAttributes() - 1;
    nOutputNeuron = instances.numClasses();
    inputLayer = new Vector<Neuron>(nInputNeuron);
    hiddenLayer = new Vector<Neuron>(nHiddenNeuron);
    outputLayer = new Vector<Neuron>(nOutputNeuron);

    Random random = new Random(getSeed());

    Enumeration<Attribute> attributeEnumeration = instances.enumerateAttributes();
    attributeList = Collections.list(attributeEnumeration);

    /* Mengisi layer dengan neuron-neuron dengan weight default */
    for (int k = 0; k < nOutputNeuron; k++) {
        outputLayer.add(new Neuron());
    }//from w w  w  . ja  v  a  2  s.co m

    for (int k = 0; k < nInputNeuron; k++) {
        inputLayer.add(new Neuron());
    }

    /* Kalau ada hidden layer */
    if (nHiddenLayer > 0) {
        for (int j = 0; j < nHiddenNeuron; j++) {
            hiddenLayer.add(new Neuron());
        }
    }

    /* Link */
    if (nHiddenLayer > 0) {
        linkNeurons(inputLayer, hiddenLayer);
        linkNeurons(hiddenLayer, outputLayer);
    } else {
        linkNeurons(inputLayer, outputLayer);
    }

    for (Neuron neuron : inputLayer) {
        neuron.initialize(random);
    }

    inputLayerArray = new Neuron[nInputNeuron];
    int i = 0;
    for (Neuron neuron : inputLayer) {
        inputLayerArray[i] = neuron;
        i++;
    }

    outputCalculationArray = new Neuron[nHiddenLayer * nHiddenNeuron + nOutputNeuron];
    int j = 0;
    for (Neuron neuron : hiddenLayer) {
        outputCalculationArray[j] = neuron;
        j++;
    }
    for (Neuron neuron : outputLayer) {
        outputCalculationArray[j] = neuron;
        j++;
    }

    if (nHiddenLayer > 0) {
        for (Neuron neuron : hiddenLayer) {
            neuron.initialize(random);
        }

    }

    for (Neuron neuron : outputLayer) {
        neuron.initialize(random);
    }

    /* Learning */
    int iterations = 0;
    List<Double> errors = new ArrayList<>();
    do {
        for (Instance instance : instances) {
            /* Memasukkan instance ke input neuron */
            loadInput(instance);

            /* Menghitung error dari layer output ke input */
            /* Menyiapkan nilai target */
            for (int ix = 0; ix < outputLayer.size(); ix++) {
                if (ix == (int) instance.classValue()) {
                    outputLayer.get(ix).errorFromTarget(1);
                } else {
                    outputLayer.get(ix).errorFromTarget(0);
                }
            }
            if (nHiddenLayer != 0) {
                for (Neuron nHid : hiddenLayer) {
                    nHid.calculateError();
                }
            }

            /* Update Weight */

            for (int k = 0; k < outputCalculationArray.length; k++) {
                outputCalculationArray[k].updateWeights(learningRate);
            }
        }

        iterations++;

        if (iterations % 500 == 0) {
            System.out.println("FFNN iteration " + iterations);
        }

    } while (iterations < maxIterations);

}