List of usage examples for weka.core Instances setClassIndex
public void setClassIndex(int classIndex)
From source file:com.reactivetechnologies.analytics.mapper.ARFFDataMapper.java
License:Open Source License
@Override public Dataset mapStringToModel(JsonRequest request) throws ParseException { if (!(request instanceof ArffJsonRequest)) { throw new ParseException("Not an instance of " + ArffJsonRequest.class, -1); }//from www .ja v a 2s . c o m try { ArffJsonRequest arff = (ArffJsonRequest) request; ArffReader ar = new ArffReader(new StringReader(request.toString())); Instances ins = ar.getData(); ins.setClassIndex(arff.getClassIndex() >= 0 ? arff.getClassIndex() : ins.numAttributes() - 1); return new Dataset(ins); } catch (Exception e) { ParseException pe = new ParseException("Cannot convert JSON stream to ARFF", -1); pe.initCause(e); throw pe; } }
From source file:com.reactivetechnologies.analytics.mapper.TEXTDataMapper.java
License:Open Source License
@Override public Dataset mapStringToModel(JsonRequest request) throws ParseException { if (request != null && request.getData() != null && request.getData().length > 0) { FastVector fvWekaAttributes = new FastVector(2); FastVector nil = null;/* ww w .j ava 2 s . c o m*/ Attribute attr0 = new Attribute("text", nil, 0); FastVector fv = new FastVector(); for (String nominal : request.getClassVars()) { fv.addElement(nominal); } Attribute attr1 = new Attribute("class", fv, 1); fvWekaAttributes.addElement(attr0); fvWekaAttributes.addElement(attr1); Instances ins = new Instances("attr-reln", fvWekaAttributes, request.getData().length); ins.setClassIndex(1); for (Text s : request.getData()) { Instance i = new Instance(2); i.setValue(attr0, s.getText()); i.setValue(attr1, s.getTclass()); ins.add(i); } return new Dataset(ins); } return null; }
From source file:com.reactivetechnologies.platform.analytics.mapper.JSONDataMapper.java
License:Open Source License
@Override public TrainModel mapStringToModel(ArffJsonRequest request) throws ParseException { try {/* ww w . j a v a 2s .c o m*/ ArffReader ar = new ArffReader(new StringReader(request.toString())); Instances ins = ar.getData(); ins.setClassIndex(request.getClassIndex() >= 0 ? request.getClassIndex() : ins.numAttributes() - 1); return new TrainModel(ins); } catch (Exception e) { ParseException pe = new ParseException("Cannot convert JSON stream to ARFF", -1); pe.initCause(e); throw pe; } }
From source file:com.relationalcloud.main.ExplanationSingleAttribute.java
License:Open Source License
/** * @param args//from w w w .ja va 2s. co m */ @Deprecated public static void main(String[] args) { Properties ini = new Properties(); try { ini.load(new FileInputStream(System.getProperty("prop"))); } catch (FileNotFoundException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } // loading properties from file String schemaname = ini.getProperty("schemaname"); String partitioningMethod = ini.getProperty("partitioningMethod"); String pcol; if (partitioningMethod.equals("repGraph")) { System.out.println("Replication Graph: using replicated column"); pcol = ini.getProperty("replicatedPartitionCol"); } else { pcol = ini.getProperty("graphPartitionCol"); } String accessLogTable = ini.getProperty("accessLogTable"); String numb_trans_to_process = ini.getProperty("numb_trans_to_process"); String txnLogTable = ini.getProperty("txnLogTable"); String driver = ini.getProperty("driver"); String connection = ini.getProperty("conn"); String user = ini.getProperty("user"); String password = ini.getProperty("password"); System.out.println("Loading and processing " + schemaname + " traces..."); // Register jdbcDriver try { Class.forName(driver); } catch (ClassNotFoundException e) { e.printStackTrace(); } Connection conn; try { conn = DriverManager.getConnection(connection + schemaname, user, password); conn.setAutoCommit(true); Connection infschema_conn = DriverManager.getConnection(connection + "information_schema", user, password); Schema schema = SchemaLoader.loadSchemaFromDB(infschema_conn, schemaname); Statement stmt = conn.createStatement(); // NOTE: the paramenter numb_trans_to_process is used to limit // the number of transactions parsed to determine the which attributes // are common in the workload WHERE clauses. This can be a subset of the // overall set String sqlstring = "SELECT sqlstring FROM `" + txnLogTable + "` LIMIT " + numb_trans_to_process; ResultSet res = stmt.executeQuery(sqlstring); ExplanationWorkloadPrepocessor wa = new ExplanationWorkloadPrepocessor(schemaname, schema); double tstart = System.currentTimeMillis(); double i = 0; while (res.next()) { String sql = res.getString(1); // PARSE THE STATEMENT wa.processSql(sql); i++; } double tend = System.currentTimeMillis(); System.out.println("Processed " + i + " statements in " + (tend - tstart) + "ms average:" + (tend - tstart) / i + "ms per statement"); System.out.println("ANALISYS RESULTS:\n "); wa.printStatsByTableColumn(); for (String str : wa.getAllTableNames()) { if (str == null) continue; System.out.println("-------------------------------------------"); System.out.println("ANALYZING TABLE IN USED IN THE TRANSACTION TRACE " + str); for (SimpleCount sc : wa.getFeatures(str)) { ArrayList<Double> a0 = new ArrayList<Double>(); ArrayList<Double> a1 = new ArrayList<Double>(); sqlstring = "SELECT s." + sc.colname + ", g." + pcol + " FROM `" + accessLogTable + "` g, relcloud_" + str + " s WHERE tableid = \"" + str + "\" AND s.relcloud_id = g.tupleid"; // System.out.println(sqlstring); res = stmt.executeQuery(sqlstring); while (res.next()) { Object o1 = res.getObject(1); Object o2 = res.getObject(2); if (o1 != null && o2 != null) { a0.add(new Double(o1.hashCode())); a1.add(new Double(o2.hashCode())); } } if (a0.size() >= 1) { double[] d0 = new double[a0.size()]; double[] d1 = new double[a1.size()]; boolean unary = true; for (int j = 0; j < a0.size(); j++) { d0[j] = a0.get(j).doubleValue(); d1[j] = a1.get(j).doubleValue(); if (j > 0 && d1[j - 1] != d1[j]) unary = false; } if (unary) { System.out.println("EASY CASE: " + str + " is not partitioned and is stored in partition: " + d1[0]); } else { double correlation = PearsonCorrelation.getPearsonCorrelation(d0, d1); correlationThreshold = Double.parseDouble(ini.getProperty("correlationThreshold")); // if the correlation is high enough proceed to use decision // trees. if (Math.abs(correlation) > correlationThreshold) { System.out.println("Testing " + str + "." + sc.colname + ", " + pcol + " correlation: " + correlation + " (HIGH)"); try { // InstanceQuery query; // query = new InstanceQuery(); // query.setUsername("bbb"); // query.setPassword("qwer"); // query.connectToDatabase(); // Instances data = query.retrieveInstances(sqlstring); res.beforeFirst(); Instances data = WekaHelper.retrieveInstanceFromResultSet(res); // set the last column to be the classIndex... is this // correct? data.setClassIndex(data.numAttributes() - 1); Instances newData; if (data.attribute(data.numAttributes() - 1).type() == Attribute.NUMERIC) { NumericToNominal ntn = new NumericToNominal(); String[] options = new String[2]; options[0] = "-R"; // "range" options[1] = "2"; // first attribute ntn.setOptions(options); // set options ntn.setInputFormat(data); // inform filter about dataset // **AFTER** setting options newData = Filter.useFilter(data, ntn); // apply fil } else { StringToNominal ntn = new StringToNominal(); String[] options = new String[2]; options[0] = "-R"; // "range" options[1] = "2"; // first attribute ntn.setOptions(options); // set options ntn.setInputFormat(data); // inform filter about dataset // **AFTER** setting options newData = Filter.useFilter(data, ntn); // apply fil } String[] options = new String[1]; options[0] = "-P"; J48 tree = new J48(); // new instance of tree tree.setOptions(options); // set the options if (!tree.getCapabilities().test(newData)) { System.err.println("ERROR the FOLLOWING DATA CANNOT BE PROCESED:" + newData.toSummaryString()); System.err.println("QUERY WAS:" + sqlstring); } else { long treeTstart = System.currentTimeMillis(); tree.buildClassifier(newData); // build classifier long treeTend = System.currentTimeMillis(); System.out.println("CLASSIFICATION CONFIDENCE: " + tree.getConfidenceFactor() + "\n TREE BUILDING TIME: " + (treeTend - treeTstart) + "ms \n" + tree.toString()); System.out.println("TREE:" + tree.prefix()); } } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } } else { System.out.println("Testing " + str + "." + sc.colname + ", " + pcol + " correlation: " + correlation + " (LOW)"); } } } } } } catch (SQLException e) { e.printStackTrace(); } }
From source file:com.relationalcloud.misc.JustifyAgnosticPartitioning.java
License:Open Source License
/** * @param args//from w w w.j av a2 s .c o m */ public static void main(String[] args) { Properties ini = new Properties(); try { ini.load(new FileInputStream(System.getProperty("prop"))); } catch (FileNotFoundException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } // Register jdbcDriver try { Class.forName(ini.getProperty("driver")); } catch (ClassNotFoundException e) { e.printStackTrace(); } // READ FROM MYSQL THE TPCC TRANSACTION LOG, PARSE EACH STATEMENT AND TEST // VARIOUS PARSER FUNCTIONALITIES System.out.println("Loading and processing TPCC traces..."); Connection conn; try { String schemaname = ini.getProperty("schema"); String connection = ini.getProperty("conn"); String user = ini.getProperty("user"); String password = ini.getProperty("password"); conn = DriverManager.getConnection(connection + schemaname, user, password); Connection infschema_conn = DriverManager.getConnection(connection + "information_schema", user, password); Schema schema = SchemaLoader.loadSchemaFromDB(infschema_conn, schemaname); ExplanationWorkloadPrepocessor wa = new ExplanationWorkloadPrepocessor(schemaname, schema); conn.setAutoCommit(true); Statement stmt = conn.createStatement(); String txnLogTable = ini.getProperty("txnLogTable"); String sqlstring = "SELECT sqlstring FROM `" + txnLogTable + "`"; ResultSet res = stmt.executeQuery(sqlstring); double tstart = System.currentTimeMillis(); double i = 0; while (res.next()) { String sql = res.getString(1); // PARSE THE STATEMENT wa.processSql(sql); // System.out.println("SQL: " +sql); i++; } double tend = System.currentTimeMillis(); String accessLogTable = ini.getProperty("accessLogTable"); System.out.println("Processed " + i + " statements in " + (tend - tstart) + "ms average:" + (tend - tstart) / i + "ms per statement"); for (String str : wa.getAllTableNames()) { System.out.println("-------------------------------------------"); System.out.println("ANALYZING TABLE " + str); for (SimpleCount sc : wa.getFeatures(str)) { ArrayList<Double> a0 = new ArrayList<Double>(); ArrayList<Double> a1 = new ArrayList<Double>(); sqlstring = "SELECT s." + sc.colname + ", g.partition FROM `" + accessLogTable + "` g, " + str + " s WHERE tableid = \"" + str + "\" AND s.id = g.id"; System.out.println(sqlstring); res = stmt.executeQuery(sqlstring); while (res.next()) { a0.add(new Double(res.getObject(1).hashCode())); a1.add(new Double(res.getObject(2).hashCode())); } if (a0.size() >= 1) { double[] d0 = new double[a0.size()]; double[] d1 = new double[a1.size()]; boolean unary = true; for (int j = 0; j < a0.size(); j++) { d0[j] = a0.get(j).doubleValue(); d1[j] = a1.get(j).doubleValue(); if (j > 0 && d1[j - 1] != d1[j]) unary = false; } if (unary) { System.out.println("EASY CASE: " + str + " is not partitioned and is stored in partition: " + d1[0]); } else { double correlation = PearsonCorrelation.getPearsonCorrelation(d0, d1); correlationThreshold = Double.parseDouble(ini.getProperty("correlationThreshold")); // if the correlation is high enough proceed to use decision // trees. if (Math.abs(correlation) > correlationThreshold) { System.out.println("Testing " + str + "." + sc.colname + ", g.partition correlation: " + correlation + " (HIGH)"); try { // InstanceQuery query; // query = new InstanceQuery(); // query.setUsername("bbb"); // query.setPassword("qwer"); // query.connectToDatabase(); // Instances data = query.retrieveInstances(sqlstring); res.beforeFirst(); Instances data = retrieveInstanceFromResultSet(res); // set the last column to be the classIndex... is this // correct? data.setClassIndex(data.numAttributes() - 1); Instances newData; if (data.attribute(data.numAttributes() - 1).type() == Attribute.NUMERIC) { NumericToNominal ntn = new NumericToNominal(); String[] options = new String[2]; options[0] = "-R"; // "range" options[1] = "2"; // first attribute ntn.setOptions(options); // set options ntn.setInputFormat(data); // inform filter about dataset // **AFTER** setting options newData = Filter.useFilter(data, ntn); // apply fil } else { StringToNominal ntn = new StringToNominal(); String[] options = new String[2]; options[0] = "-R"; // "range" options[1] = "2"; // first attribute ntn.setOptions(options); // set options ntn.setInputFormat(data); // inform filter about dataset // **AFTER** setting options newData = Filter.useFilter(data, ntn); // apply fil } String[] options = new String[1]; options[0] = "-P"; J48 tree = new J48(); // new instance of tree tree.setOptions(options); // set the options if (!tree.getCapabilities().test(newData)) { System.err.println("ERROR the FOLLOWING DATA CANNOT BE PROCESED:" + newData.toSummaryString()); System.err.println("QUERY WAS:" + sqlstring); } else { tree.buildClassifier(newData); // build classifier } System.out.println("CLASSIFICATION CONFIDENCE: " + tree.getConfidenceFactor() + "\n " + tree.toString()); } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } } else { System.out.println("Testing " + str + "." + sc.colname + ", g.partition correlation: " + correlation + " (LOW)"); } } } } } } catch (SQLException e) { e.printStackTrace(); } }
From source file:com.relationalcloud.partitioning.explanation.ExplanationHandler.java
License:Open Source License
/** * Repeat the selection from the database removing duplicates, since they will * only increase the execution time. And run the tuples through the classifier * to populate the justifiedpartition column. * // w ww . j a v a 2 s .c o m * @param tableProcessed * @param classifier * @param wa * @throws SQLException * @throws Exception */ public void populateJustifiedColumn(String tableProcessed, Classifier classifier, ArrayList<String> attributes, Connection conn, int numbPart, Enumeration enumclassvalues) throws SQLException, Exception { if (true) { labelTest(tableProcessed, classifier, conn); return; } tableProcessed = removeQuotes(tableProcessed); // get from the DB the tuples content and their partitioning column String sqlstring = "SELECT distinct g.tupleid, "; for (String sc : attributes) { sqlstring += "s." + sc + ", "; } sqlstring += "g." + pcol + " FROM " + "(SELECT distinct tupleid," + pcol + " FROM `" + testingtable + "` WHERE tableid = '" + tableProcessed + "') AS g, relcloud_" + tableProcessed + " AS s " + "WHERE s.relcloud_id = g.tupleid;"; System.out.println(sqlstring); Statement stmt = conn.createStatement(); // initializing the testing table to avoid complaints from classifier with // an hash partition like distribution if (!testingtable.equals(sampledtrainingtable)) { int i = 0; Object o = enumclassvalues.nextElement(); // set everything to an existing value to ensure that every field is // covered stmt.executeUpdate("UPDATE " + testingtable + " SET " + pcol + "=" + o + " WHERE tableid = '" + tableProcessed + "'"); // and than sparkly in a bunch of other values (unsure whether it is // required); while (enumclassvalues.hasMoreElements()) { o = enumclassvalues.nextElement(); // FIXME there might still be an issue in which tupleid%i do not exists, // and thus one of the "o" never appears in the instance... stmt.executeUpdate("UPDATE " + testingtable + " SET " + pcol + "=" + o + " WHERE tupleid%" + numbPart + "=" + i + " AND tableid = '" + tableProcessed + "'"); i++; } } ResultSet res = stmt.executeQuery(sqlstring); // create an instance from the resultset Instances data_tupleid = WekaHelper.retrieveInstanceFromResultSetComplete(res, dbPropertyFile); res.close(); data_tupleid.setClassIndex(data_tupleid.numAttributes() - 1); Instances data_no_tupleid = makeLastNominal(data_tupleid); data_no_tupleid.setClassIndex(data_no_tupleid.numAttributes() - 1); // remove tupleid from data_no_tupleid, still available in data_tupleid data_no_tupleid.deleteAttributeAt(0); // if(data_no_tupleid.classAttribute().numValues()>1){ System.out.println("Running the tuples through the classifier to populate " + explainedPartitionCol); // use data that still has the tupleid and newData for the classification Enumeration enum_data_tupleid = data_tupleid.enumerateInstances(); Enumeration enum_data_no_tupleid = data_no_tupleid.enumerateInstances(); PreparedStatement updateJustCol = conn.prepareStatement("UPDATE `" + testingtable + "` SET `" + explainedPartitionCol + "` = ? " + "WHERE tableid = '" + tableProcessed + "' AND tupleid = ?;"); while (enum_data_tupleid.hasMoreElements() && enum_data_no_tupleid.hasMoreElements()) { Instance tupIDinstance = (Instance) enum_data_tupleid.nextElement(); Instance instance = (Instance) enum_data_no_tupleid.nextElement(); double part = classifier.classifyInstance(instance); if (part == Instance.missingValue()) System.err.println("No classification for:" + instance.toString()); updateJustCol.setInt(1, (int) part); updateJustCol.setInt(2, (int) tupIDinstance.value(0)); // System.out.println(tableProcessed+" "+ instance.value(0) + " " + // tupIDinstance.classValue() +" "+ part); updateJustCol.execute(); updateJustCol.clearParameters(); } updateJustCol.close(); }
From source file:com.relationalcloud.partitioning.explanation.ExplanationHandler.java
License:Open Source License
/** * Fetch from the database the content of the table and the partition lables, * and prepares a Weka Instance by sampling and cleaning it * //from w w w . j a v a2 s . com * @param tableProcessed * @param arraySc * @param conn * @return */ public Instances generateInstancesForTable(String tabname, ArrayList<SimpleCount> arraySc, Connection conn) { tabname = removeQuotes(tabname); Statement stmt; try { stmt = conn.createStatement(); ResultSet test = stmt .executeQuery("SELECT count(*) FROM " + sampledtrainingtable + " WHERE " + pcol + " is null"); // safety check, verifies that there are no nulls in input table. if (test.next() && test.getInt(1) > 0) throw new Exception("Table " + sampledtrainingtable + " contains nulls in " + pcol); // get from the DB the tuples content and their partitioning column String sqlstring = "SELECT "; for (SimpleCount sc : arraySc) { sqlstring += "s." + sc.colname + ", "; } sqlstring += "g." + pcol + " FROM " + "(SELECT tupleid," + pcol + " FROM `" + sampledtrainingtable + "` WHERE tableid = '" + tabname + "') AS g, relcloud_" + tabname + " AS s " + "WHERE s.relcloud_id = g.tupleid"; System.out.println(sqlstring); ResultSet res = stmt.executeQuery(sqlstring); // create an instance from the resultset Instances data = WekaHelper.retrieveInstanceFromResultSetComplete(res, dbPropertyFile); res.close(); // prepare the data, by setting class attributed and sampling if required data = makeLastNominal(data); data.setClassIndex(data.numAttributes() - 1); data = sampleTraining(Double.parseDouble(ini.getProperty("Explanation.j48SamplingThreshold")), data); System.out.println(data.toSummaryString()); return data; } catch (SQLException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } return null; }
From source file:com.sensyscal.activityrecognition2.utils.Classifiers.java
License:LGPL
public static int customKnnClassifier(double[] newInstanceArray) { // TODO Auto-generated method stub ts1 = new Timestamp(System.currentTimeMillis()); int activityId = 0; String classLabel = ""; ArrayList<Attribute> atts = new ArrayList<Attribute>(); ArrayList<String> classVal = new ArrayList<String>(); classVal.add("STANDING"); classVal.add("SITTING"); classVal.add("LYINGDOWN"); classVal.add("WALKING"); atts.add(new Attribute("class", classVal)); atts.add(new Attribute("1_1_2_1")); atts.add(new Attribute("1_1_3_1")); atts.add(new Attribute("1_1_9_2")); atts.add(new Attribute("2_1_3_1")); atts.add(new Attribute("2_1_4_1")); atts.add(new Attribute("2_1_9_2")); Instances dataUnlabeled = new Instances("TestInstances", atts, 0); dataUnlabeled.add(new DenseInstance(1.0, newInstanceArray)); dataUnlabeled.setClassIndex(0); try {//from ww w . j av a 2 s . c om activityId = (int) (MonitoringWorkerThread.cls.classifyInstance(dataUnlabeled.firstInstance())); classLabel = dataUnlabeled.firstInstance().classAttribute().value(activityId); } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } ts = new Timestamp(System.currentTimeMillis()); // Log.e("classifyActivity Knn," -> Impiegati: // "+(ts.getTime()-ts1.getTime())+" ms;\n"); return getActivityIDofClassLabel(classLabel); }
From source file:com.sensyscal.activityrecognition2.utils.Classifiers.java
License:LGPL
public static int customJRipClassifier(double[] newInstanceArray) { // TODO Auto-generated method stub ts1 = new Timestamp(System.currentTimeMillis()); int activityId = 0; String classLabel = ""; ArrayList<Attribute> atts = new ArrayList<Attribute>(); ArrayList<String> classVal = new ArrayList<String>(); classVal.add("STANDING"); classVal.add("WALKING"); classVal.add("SITTING"); classVal.add("LYINGDOWN"); atts.add(new Attribute("class", classVal)); atts.add(new Attribute("1_1_2_1")); atts.add(new Attribute("1_1_3_1")); atts.add(new Attribute("1_1_9_2")); atts.add(new Attribute("2_1_3_1")); atts.add(new Attribute("2_1_4_1")); atts.add(new Attribute("2_1_9_2")); Instances dataUnlabeled = new Instances("TestInstances", atts, 0); dataUnlabeled.add(new DenseInstance(1.0, newInstanceArray)); dataUnlabeled.setClassIndex(0); try {/*www .j a v a 2 s . co m*/ activityId = (int) MonitoringWorkerThread.cls.classifyInstance(dataUnlabeled.firstInstance()); Log.i("classifyActivity JRip ---->", activityId + ""); classLabel = dataUnlabeled.firstInstance().classAttribute().value((int) activityId); } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } ts = new Timestamp(System.currentTimeMillis()); // Log.i("classifyActivity JRip"," -> Impiegati: "+(ts.getTime()-ts1.getTime())+" ms;\n"); return getActivityIDofClassLabel(classLabel); }
From source file:com.sensyscal.activityrecognition2.utils.Classifiers.java
License:LGPL
public static int customJ48Classifier(double[] newInstanceArray) { // TODO Auto-generated method stub ts1 = new Timestamp(System.currentTimeMillis()); int activityId = 0; String classLabel = ""; ArrayList<Attribute> atts = new ArrayList<Attribute>(); ArrayList<String> classVal = new ArrayList<String>(); classVal.add("STANDING"); classVal.add("SITTING"); classVal.add("LYINGDOWN"); classVal.add("WALKING"); atts.add(new Attribute("class", classVal)); atts.add(new Attribute("1_1_2_1")); atts.add(new Attribute("1_1_3_1")); atts.add(new Attribute("1_1_9_2")); atts.add(new Attribute("2_1_3_1")); atts.add(new Attribute("2_1_4_1")); atts.add(new Attribute("2_1_9_2")); Instances dataUnlabeled = new Instances("TestInstances", atts, 0); dataUnlabeled.add(new DenseInstance(1.0, newInstanceArray)); dataUnlabeled.setClassIndex(0); try {//from w w w . j a v a2 s . com activityId = (int) getJ48ActivityId( MonitoringWorkerThread.cls.classifyInstance(dataUnlabeled.firstInstance())); classLabel = dataUnlabeled.firstInstance().classAttribute().value((int) activityId); } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } ts = new Timestamp(System.currentTimeMillis()); // Log.e("classifyActivity J48"," -> Impiegati: "+(ts.getTime()-ts1.getTime())+" ms;\n"); return activityId;// getActivityIDofClassLabel(classLabel); }