Example usage for weka.core UnsupportedAttributeTypeException UnsupportedAttributeTypeException

List of usage examples for weka.core UnsupportedAttributeTypeException UnsupportedAttributeTypeException

Introduction

In this page you can find the example usage for weka.core UnsupportedAttributeTypeException UnsupportedAttributeTypeException.

Prototype

public UnsupportedAttributeTypeException(String message) 

Source Link

Document

Creates a new UnsupportedAttributeTypeException.

Usage

From source file:Pair.java

License:Open Source License

/**
 * Boosting method.//w ww . j av  a2 s  . c  o  m
 *
 * @param data the training data to be used for generating the
 * boosted classifier.
 * @exception Exception if the classifier could not be built successfully
 */

public void buildClassifier(Instances data) throws Exception {

    super.buildClassifier(data);

    if (data.checkForStringAttributes()) {
        throw new UnsupportedAttributeTypeException("Cannot handle string attributes!");
    }
    data = new Instances(data);
    data.deleteWithMissingClass();
    if (data.numInstances() == 0) {
        throw new Exception("No train instances without class missing!");
    }
    if (!data.classAttribute().isNumeric()) {
        throw new UnsupportedClassTypeException("TrAdaBoostR2 can only handle a numeric class!");
    }
    if (m_SourceInstances == null) {
        throw new Exception("Source data has not been specified!");
    }

    m_NumClasses = data.numClasses();
    try {
        doCV(data);
    } catch (Exception e) {
        e.printStackTrace();
    }
}

From source file:MPCKMeans.java

License:Open Source License

/**
 * Generates a clusterer. Instances in data have to be
 * either all sparse or all non-sparse// w w  w.jav  a  2 s  . co m
 *
 * @param data set of instances serving as training data 
 * @exception Exception if the clusterer has not been 
 * generated successfully
 */
public void buildClusterer(Instances data) throws Exception {
    System.out.println("ML weight=" + m_MLweight);
    System.out.println("CL weight= " + m_CLweight);
    System.out.println("LOG term weight=" + m_logTermWeight);
    System.out.println("Regularizer weight= " + m_regularizerTermWeight);
    m_RandomNumberGenerator = new Random(m_RandomSeed);

    if (m_metric instanceof OfflineLearnableMetric) {
        m_isOfflineMetric = true;
    } else {
        m_isOfflineMetric = false;
    }

    // Don't rebuild the metric if it was already trained
    if (!m_metricBuilt) {
        m_metric.buildMetric(data);
        m_metricBuilt = true;
        m_metricLearner.setMetric(m_metric);
        m_metricLearner.setClusterer(this);

        m_metrics = new LearnableMetric[m_NumClusters];
        m_metricLearners = new MPCKMeansMetricLearner[m_NumClusters];
        for (int i = 0; i < m_metrics.length; i++) {
            if (m_useMultipleMetrics) {
                m_metrics[i] = (LearnableMetric) m_metric.clone();
                m_metricLearners[i] = (MPCKMeansMetricLearner) m_metricLearner.clone();
                m_metricLearners[i].setMetric(m_metrics[i]);
                m_metricLearners[i].setClusterer(this);
            } else {
                m_metrics[i] = m_metric;
                m_metricLearners[i] = m_metricLearner;
            }
        }
    }

    setInstances(data);
    m_ClusterCentroids = new Instances(m_Instances, m_NumClusters);
    m_ClusterAssignments = new int[m_Instances.numInstances()];

    if (m_Instances.checkForNominalAttributes() && m_Instances.checkForStringAttributes()) {
        throw new UnsupportedAttributeTypeException("Cannot handle nominal attributes\n");
    }

    m_ClusterCentroids = m_Initializer.initialize();

    // if all instances are smoothed by the metric, the centroids
    // need to be smoothed too (note that this is independent of
    // centroid smoothing performed by K-Means)
    if (m_metric instanceof InstanceConverter) {
        System.out.println("Converting centroids...");
        Instances convertedCentroids = new Instances(m_ClusterCentroids, m_NumClusters);
        for (int i = 0; i < m_ClusterCentroids.numInstances(); i++) {
            Instance centroid = m_ClusterCentroids.instance(i);
            convertedCentroids.add(((InstanceConverter) m_metric).convertInstance(centroid));
        }

        m_ClusterCentroids.delete();
        for (int i = 0; i < convertedCentroids.numInstances(); i++) {
            m_ClusterCentroids.add(convertedCentroids.instance(i));
        }
    }

    System.out.println("Done initializing clustering ...");
    getIndexClusters();

    if (m_verbose && m_Seedable) {
        printIndexClusters();
        for (int i = 0; i < m_NumClusters; i++) {
            System.out.println("Centroid " + i + ": " + m_ClusterCentroids.instance(i));
        }
    }

    // Some extra work for smoothing metrics
    if (m_metric instanceof SmoothingMetric && ((SmoothingMetric) m_metric).getUseSmoothing()) {

        SmoothingMetric smoothingMetric = (SmoothingMetric) m_metric;
        Instances smoothedCentroids = new Instances(m_Instances, m_NumClusters);

        for (int i = 0; i < m_ClusterCentroids.numInstances(); i++) {
            Instance smoothedCentroid = smoothingMetric.smoothInstance(m_ClusterCentroids.instance(i));
            smoothedCentroids.add(smoothedCentroid);
        }
        m_ClusterCentroids = smoothedCentroids;

        updateSmoothingMetrics();
    }

    runKMeans();
}

From source file:net.sf.bddbddb.order.MyId3.java

License:LGPL

/**
 * Builds Id3 decision tree classifier.// ww w.  j  a v a  2 s  .  c om
 * 
 * @param data
 *            the training data
 * @exception Exception
 *                if classifier can't be built successfully
 */
public void buildClassifier(Instances data) throws Exception {
    if (!data.classAttribute().isNominal()) {
        throw new UnsupportedClassTypeException("Id3: nominal class, please.");
    }
    Enumeration enumAtt = data.enumerateAttributes();
    while (enumAtt.hasMoreElements()) {
        if (!((Attribute) enumAtt.nextElement()).isNominal()) {
            throw new UnsupportedAttributeTypeException("Id3: only nominal " + "attributes, please.");
        }
    }
    data = new Instances(data);
    data.deleteWithMissingClass();
    makeTree(data);
}

From source file:smo2.SMO.java

License:Open Source License

/**
 * Method for building the classifier. Implements a one-against-one wrapper
 * for multi-class problems./*from  w  ww  .j  a  va2  s.com*/
 *
 * @param insts
 *            the set of training instances
 * @exception Exception
 *                if the classifier can't be built successfully
 */
public void buildClassifier(Instances insts) throws Exception {

    if (!m_checksTurnedOff) {
        if (insts.checkForStringAttributes()) {
            throw new UnsupportedAttributeTypeException("Cannot handle string attributes!");
        }
        if (insts.classAttribute().isNumeric()) {
            throw new UnsupportedClassTypeException(
                    "mySMO can't handle a numeric class! Use" + "SMOreg for performing regression.");
        }
        insts = new Instances(insts);
        insts.deleteWithMissingClass();
        if (insts.numInstances() == 0) {
            throw new Exception("No training instances without a missing class!");
        }

        /*
         * Removes all the instances with weight equal to 0. MUST be done
         * since condition (8) of Keerthi's paper is made with the assertion
         * Ci > 0 (See equation (3a).
         */
        Instances data = new Instances(insts, insts.numInstances());
        for (int i = 0; i < insts.numInstances(); i++) {
            if (insts.instance(i).weight() > 0)
                data.add(insts.instance(i));
        }
        if (data.numInstances() == 0) {
            throw new Exception("No training instances left after removing "
                    + "instance with either a weight null or a missing class!");
        }
        insts = data;

    }

    m_onlyNumeric = true;
    if (!m_checksTurnedOff) {
        for (int i = 0; i < insts.numAttributes(); i++) {
            if (i != insts.classIndex()) {
                if (!insts.attribute(i).isNumeric()) {
                    m_onlyNumeric = false;
                    break;
                }
            }
        }
    }

    if (!m_checksTurnedOff) {
        m_Missing = new ReplaceMissingValues();
        m_Missing.setInputFormat(insts);
        insts = Filter.useFilter(insts, m_Missing);
    } else {
        m_Missing = null;
    }

    if (!m_onlyNumeric) {
        m_NominalToBinary = new NominalToBinary();
        m_NominalToBinary.setInputFormat(insts);
        insts = Filter.useFilter(insts, m_NominalToBinary);
    } else {
        m_NominalToBinary = null;
    }

    if (m_filterType == FILTER_STANDARDIZE) {
        m_Filter = new Standardize();
        m_Filter.setInputFormat(insts);
        insts = Filter.useFilter(insts, m_Filter);
    } else if (m_filterType == FILTER_NORMALIZE) {
        m_Filter = new Normalize();
        m_Filter.setInputFormat(insts);
        insts = Filter.useFilter(insts, m_Filter);
    } else {
        m_Filter = null;
    }

    m_classIndex = insts.classIndex();
    m_classAttribute = insts.classAttribute();

    // Generate subsets representing each class
    Instances[] subsets = new Instances[insts.numClasses()];
    for (int i = 0; i < insts.numClasses(); i++) {
        subsets[i] = new Instances(insts, insts.numInstances());
    }
    for (int j = 0; j < insts.numInstances(); j++) {
        Instance inst = insts.instance(j);
        subsets[(int) inst.classValue()].add(inst);
    }
    for (int i = 0; i < insts.numClasses(); i++) {
        subsets[i].compactify();
    }

    // Build the binary classifiers
    Random rand = new Random(m_randomSeed);
    m_classifiers = new BinarymySMO[insts.numClasses()][insts.numClasses()];
    for (int i = 0; i < insts.numClasses(); i++) {
        for (int j = i + 1; j < insts.numClasses(); j++) {
            m_classifiers[i][j] = new BinarymySMO();
            Instances data = new Instances(insts, insts.numInstances());
            for (int k = 0; k < subsets[i].numInstances(); k++) {
                data.add(subsets[i].instance(k));
            }
            for (int k = 0; k < subsets[j].numInstances(); k++) {
                data.add(subsets[j].instance(k));
            }
            data.compactify();
            data.randomize(rand);
            m_classifiers[i][j].buildClassifier(data, i, j, m_fitLogisticModels, m_numFolds, m_randomSeed);
        }
    }
}