List of usage examples for org.apache.mahout.math Arrays toString
public static String toString(boolean[] array)
From source file:analyticalmethods.MeanValueAnalysis.java
License:Open Source License
public MeanValueAnalysis(double[] visitVector, Distribution[] serviceTimeDistributions, StationType[] stationType, int nJobs) { nStations = stationType.length;/* w w w. ja v a2 s .com*/ throughtputs = new double[nStations][nJobs]; utilization = new double[nStations][nJobs]; meanN = new double[nStations][nJobs]; MeanWaitingTimes = new double[nStations][nJobs]; double sum; //con n = 0 il numero medio di job 0 for (int i = 0; i < nStations; i++) { meanN[0][i] = 0.0; } for (int k = 1; k < nJobs; k++) { //calcolo i tempi medi di attesa per ogni stazione for (int i = 0; i < nStations; i++) { //wi(n)= si(1 + ni(n-1)) if (stationType[i] == StationType.SINGLE_SERVER) { MeanWaitingTimes[k][i] = serviceTimeDistributions[i].getMean() * (1 + meanN[k - 1][i]); } if (stationType[i] == StationType.INFINITE_SERVER) { //le stazioni infinite server non hanno coda, i job vengono serviti immediatamente MeanWaitingTimes[k][i] = serviceTimeDistributions[i].getMean(); } } //usando little calcolo throughtput del cortocircuito tra stazione 0 e 1 //va bene? c' bisogno di visite calcolate su xo? sum = 0.0; for (int i = 0; i < nStations; i++) { sum += visitVector[i] * MeanWaitingTimes[k][i]; } throughtputs[k][0] = k / sum; for (int i = 0; i < nStations; i++) { //Xi(k) = Vi * X0 throughtputs[k][i] = visitVector[i] * throughtputs[k][0]; // Ui(k) = Si * Xi(k) utilization[k][i] = serviceTimeDistributions[i].getMean() * throughtputs[i][k]; //ni(k) = ui(k) (1 + ni(k-1)) meanN[k][i] = utilization[k][i] * (1 + meanN[k - 1][i]); } } // System.out.println("analyticalmethods.MeanValueAnalysisLI.MeanValueAnalysisLI()"); System.out.println("MeanWaitingTime " + Arrays.toString(MeanWaitingTimes[nJobs])); System.out.println("meanN " + Arrays.toString(meanN[nJobs])); System.out.println("utilization " + Arrays.toString(utilization[nJobs])); System.out.println("throughtput " + Arrays.toString(throughtputs[nJobs])); }
From source file:cc.kave.episodes.mining.evaluation.Evaluation.java
License:Apache License
private void configurations() { append("\n"); append("%% - Evaluations configuration:\n"); append("%% - Frequency = %d\n", FREQUENCY); append("%% - Bidirectional measure = %s\n", MathUtils.round(BIDIRECTIONAL, 2)); append("%% - Querying strategy = %s\n", Arrays.toString(percentages)); append("%% - Proposal strategy = %d\n", PROPOSALS); append("%% - Similarity metric = F1-value\n"); append("%% - Number of maximal queries = all combinations\n\n"); }
From source file:com.ikanow.infinit.e.processing.custom.launcher.CustomSavedQueryQueueLauncher.java
License:Open Source License
@SuppressWarnings("unchecked") public static void executeQuery(DocumentQueueControlPojo savedQuery) { if (null == savedQuery._parentShare) { return;/*w w w.jav a2s .c om*/ } AdvancedQueryPojo query = savedQuery.getQueryInfo().getQuery(); // 1) append the a time as an extra query term (unless it's the first time) if (null != savedQuery.getLastDocIdInserted()) { long lastRun = savedQuery.getLastDocIdInserted().getTime(); if (null != savedQuery.getQueryInfo().getLastRun()) { long altLastRun = savedQuery.getQueryInfo().getLastRun().getTime(); if (altLastRun < lastRun) { // pick the longest duration lastRun = altLastRun; } } lastRun = ((new Date().getTime() - lastRun) / 1000L + 3599L) / 3600L; // (hours rounded up) if (lastRun < (14L * 24L)) { // if it's more than 14 days then query over everything if (null == query.qt) { query.qt = new ArrayList<AdvancedQueryPojo.QueryTermPojo>(1); } AdvancedQueryPojo.QueryTermPojo extraTermTime = new AdvancedQueryPojo.QueryTermPojo(); extraTermTime.time = new AdvancedQueryPojo.QueryTermPojo.TimeTermPojo(); extraTermTime.time.max = "now+1d"; // (ie now plus some margin) if (savedQuery.getQueryInfo() .getFrequency() == DocumentQueueControlPojo.SavedQueryInfo.DocQueueFrequency.Hourly) { extraTermTime.time.min = "now-" + (lastRun + 1) + "h"; //extraTermTime.time.min = "now-2h"; // (just add some margin) } else if (savedQuery.getQueryInfo() .getFrequency() == DocumentQueueControlPojo.SavedQueryInfo.DocQueueFrequency.Daily) { extraTermTime.time.min = "now-" + (lastRun + 6) + "h"; //extraTermTime.time.min = "now-30h"; // (just add some margin) } else if (savedQuery.getQueryInfo() .getFrequency() == DocumentQueueControlPojo.SavedQueryInfo.DocQueueFrequency.Weekly) { lastRun = (lastRun + 23L) / 24L; extraTermTime.time.min = "now-" + (lastRun + 1) + "d"; //extraTermTime.time.min = "now-8d"; // (just add some margin) } query.qt.add(extraTermTime); if (null != query.logic) { // else terms ANDed together, ie what I want query.logic = "(" + query.logic + ") AND " + query.qt.size(); } } } //TESTED (test3abc) // 2) other minor mods to the query engine (because there's lots we don't care about) if (null == query.output) { query.output = new AdvancedQueryPojo.QueryOutputPojo(); if (null == query.output.docs) { query.output.docs = new AdvancedQueryPojo.QueryOutputPojo.DocumentOutputPojo(); } } if (null == query.score) { query.score = new AdvancedQueryPojo.QueryScorePojo(); } if (null == query.input) { query.input = new AdvancedQueryPojo.QueryInputPojo(); } query.output.aggregation = null; // (no aggregations) query.output.docs.ents = false; query.output.docs.events = false; query.output.docs.facts = false; query.output.docs.summaries = false; query.output.docs.eventsTimeline = false; query.output.docs.metadata = false; if (null == query.output.docs.numReturn) { query.output.docs.numReturn = 100; // (default) } if (null == query.score.numAnalyze) { query.output.docs.numReturn = 1000; // (default) } //TESTED (entire block) // 3) run saved query: QueryHandler queryHandler = new QueryHandler(); StringBuffer errorString = new StringBuffer(); StringBuffer communityIdStrList = new StringBuffer(); for (ObjectId commId : savedQuery.getQueryInfo().getQuery().communityIds) { if (communityIdStrList.length() > 0) { communityIdStrList.append(','); } communityIdStrList.append(commId.toString()); } //TESTED try { //DEBUG //System.out.println("COMMS="+communityIdStrList.toString() + ": QUERY=" + query.toApi()); // (should have a version of this that just returns the IPs from the index engine) // (for now this will do) ResponsePojo rp = queryHandler.doQuery(savedQuery._parentShare.getOwner().get_id().toString(), query, communityIdStrList.toString(), errorString); if (null == rp) { throw new RuntimeException(errorString.toString()); // (handled below) } // 4) Add the results to the original data SharePojo savedQueryShare = SharePojo.fromDb(DbManager.getSocial().getShare() .findOne(new BasicDBObject(SharePojo._id_, savedQuery._parentShare.get_id())), SharePojo.class); if (null != savedQueryShare) { DocumentQueueControlPojo toModify = DocumentQueueControlPojo.fromApi(savedQueryShare.getShare(), DocumentQueueControlPojo.class); List<BasicDBObject> docs = (List<BasicDBObject>) rp.getData(); if ((null != docs) && !docs.isEmpty()) { if (null == toModify.getQueueList()) { toModify.setQueueList(new ArrayList<ObjectId>(docs.size())); } ObjectId ignoreBeforeId = toModify.getLastDocIdInserted(); ObjectId maxDocId = toModify.getLastDocIdInserted(); //DEBUG //System.out.println("before, num docs=" + toModify.getQueueList().size() + " adding " + docs.size() + " from " + ignoreBeforeId); // Some alerting preamble StringBuffer alertText = null; StringBuffer alertTitle = null; String rootUrl = new PropertiesManager().getURLRoot().replace("/api/", ""); int maxDocsToAdd = 10; // (default) boolean alert = false; if ((null != toModify.getQueryInfo().getAlert()) && (null != toModify.getQueryInfo().getAlert().getEmailAddresses()) && !toModify.getQueryInfo().getAlert().getEmailAddresses().isEmpty()) { alert = true; alertText = new StringBuffer(); if (null != toModify.getQueryInfo().getAlert().getMaxDocsToInclude()) { maxDocsToAdd = toModify.getQueryInfo().getAlert().getMaxDocsToInclude(); if (maxDocsToAdd < 0) { maxDocsToAdd = Integer.MAX_VALUE; } } createAlertPreamble(alertText, toModify.getQueryInfo().getQuery(), savedQuery._parentShare.get_id(), rootUrl); } //TESTED // Add new docs... int numDocsAdded = 0; for (BasicDBObject doc : docs) { ObjectId docId = doc.getObjectId(DocumentPojo._id_); if (null != docId) { if (null != ignoreBeforeId) { if (docId.compareTo(ignoreBeforeId) <= 0) { // ie docId <= ignoreBeforeId continue; } } //(end check if this doc has already been seen) toModify.getQueueList().add(0, docId); //Alerting if (alert) { // (this fn checks if the max number of docs have been added): createAlertDocSummary(alertText, numDocsAdded, maxDocsToAdd, doc, rootUrl); numDocsAdded++; } if (null == maxDocId) { maxDocId = docId; } else if (maxDocId.compareTo(docId) < 0) { // ie maxDocId < docId maxDocId = docId; } } //TESTED (test5) } //(end loop over new docs) // More alerting if (alert && (numDocsAdded > 0)) { alertTitle = new StringBuffer("IKANOW: Queue \"").append(toModify.getQueueName()) .append("\" has ").append(numDocsAdded).append(" new"); if (numDocsAdded == 1) { alertTitle.append(" document."); } else { alertTitle.append(" documents."); } // (terminate the doc list) if (maxDocsToAdd > 0) { alertText.append("</ol>"); alertText.append("\n"); } String to = (Arrays .toString(toModify.getQueryInfo().getAlert().getEmailAddresses().toArray()) .replaceAll("[\\[\\]]", "")).replace(',', ';'); try { new SendMail(null, to, alertTitle.toString(), alertText.toString()).send("text/html"); } catch (Exception e) { //DEBUG //e.printStackTrace(); } } //TESTED // Remove old docs... int maxDocs = query.output.docs.numReturn; if (null != toModify.getMaxDocs()) { // override maxDocs = toModify.getMaxDocs(); } if (toModify.getQueueList().size() > maxDocs) { toModify.setQueueList(toModify.getQueueList().subList(0, maxDocs)); } //TESTED (test2.2) //DEBUG //System.out.println("after, num docs=" + toModify.getQueueList().size() + " at " + maxDocId); // Update share info: toModify.setLastDocIdInserted(maxDocId); // We've modified the share so update it: savedQueryShare.setShare(toModify.toApi()); savedQueryShare.setModified(new Date()); DbManager.getSocial().getShare().save(savedQueryShare.toDb()); } //(end found some docs) } //(end found share) } catch (Exception e) { _logger.info("knowledge/query userid=" + savedQuery._parentShare.getOwner().get_id() + " groups=" + communityIdStrList + " error=" + e.getMessage()); } }
From source file:com.ikanow.infinit.e.processing.custom.launcher.CustomSavedQueryQueueLauncher.java
License:Open Source License
public static void createAlertPreamble(StringBuffer alertEmailText, AdvancedQueryPojo query, ObjectId queueId, String rootUrl) {//from www . ja v a 2 s . c o m alertEmailText.append("<p>"); alertEmailText.append("Links for viewing the documents in the GUI:"); alertEmailText.append("</p>"); alertEmailText.append("\n"); alertEmailText.append("<ul>"); alertEmailText.append("\n"); alertEmailText.append("<li/>"); alertEmailText.append("<a href=\"").append(rootUrl); try { alertEmailText.append("?query="); StringBuffer guiQuery = new StringBuffer("{\"qt\":[{\"ftext\":\"$cache:").append(queueId) .append("\"}]}"); alertEmailText.append(URLEncoder.encode(guiQuery.toString(), "UTF-8")); alertEmailText.append("&communityIds=") .append(Arrays.toString(query.communityIds.toArray()).replaceAll("[\\[\\]]", "")); } catch (Exception e) { } // (just carry on) alertEmailText.append("\">"); alertEmailText.append("The current queue"); alertEmailText.append("</a>"); alertEmailText.append("\n"); alertEmailText.append("<li/>"); alertEmailText.append("<a href=\"").append(rootUrl); try { alertEmailText.append("?query="); alertEmailText.append(URLEncoder.encode(query.toApi(), "UTF-8").replace("+", "%20")); alertEmailText.append("&communityIds=") .append(Arrays.toString(query.communityIds.toArray()).replaceAll("[\\[\\]]", "")); } catch (Exception e) { } // (just carry on) alertEmailText.append("\">"); alertEmailText.append("Results from the saved query"); alertEmailText.append("</a>"); alertEmailText.append("\n"); alertEmailText.append("</ul>"); alertEmailText.append("\n"); }
From source file:edu.coeia.filesignature.FileSignaturePanel.java
License:Open Source License
private void fillDataBaseTable() throws IOException { listFiles = FileSignatureParser.paserFile(); for (FileSignature fs : listFiles) { Object[] arr = { Arrays.toString(fs.getExtension().toArray()), fs.getSignature(), fs.getType(), fs.getID() };/*from w w w .ja va 2s . c om*/ JTableUtil.addRowToJTable(SignatureTableDB, arr); } }
From source file:edu.coeia.filesignature.FileSignaturePanel.java
License:Open Source License
private Object[] FormatTable(File file, String message, FileSignature fs, Set<String> SignatureList, List<String> Exenstions) throws IOException { Object[] status_msg = new Object[5]; status_msg[0] = file.getName();//w w w. j a v a 2 s . c om status_msg[1] = FileSignatureAnalysis.getFileSignature(file); status_msg[2] = message; SignatureList.add(fs.getSignature()); String formatedSignatures = Utilities.getCommaSeparatedStringFromCollection(SignatureList); status_msg[3] = formatedSignatures; Exenstions.add(Arrays.toString(fs.getExtension().toArray())); String formatedExtensions = Utilities.getCommaSeparatedStringFromCollection(Exenstions); status_msg[4] = formatedExtensions; return status_msg; }
From source file:edu.isi.karma.cleaning.ExampleSelection.java
License:Apache License
public void printdata() { String s1 = ""; String s2 = ""; for (String key : this.testdata.keySet()) { HashMap<String, String[]> r = testdata.get(key); s1 += "partition " + key + "\n"; for (String[] elem : r.values()) { s1 += Arrays.toString(elem) + "\n"; }/* w w w .jav a 2s .c om*/ } System.out.println("" + s1); for (String[] v : this.raw.values()) { s2 += Arrays.toString(v) + "\n"; } System.out.println(s2); }
From source file:edu.isi.karma.cleaning.Research.Tools.java
License:Apache License
public void transformFile(String fpath) { try {/*w w w . j a va 2 s. com*/ Vector<String[]> examples = new Vector<String[]>(); // read and write the data File nf = new File(fpath); BufferedReader cr = new BufferedReader(new FileReader(fpath)); String pair = ""; Vector<String> vtmp = new Vector<String>(); while ((pair = cr.readLine()) != null) { pair = pair.trim(); if (pair.length() == 0) continue; if (pair.charAt(0) == '\"') { pair = pair.substring(1); } if (pair.charAt(pair.length() - 1) == '\"') { pair = pair.substring(0, pair.length() - 1); } vtmp.add(pair); } DataPreProcessor dpp = new DataPreProcessor(vtmp); dpp.run(); Messager msger = new Messager(); while (true) { Vector<String[]> result = new Vector<String[]>(); System.out.print("Enter raw value\n"); // open up standard input BufferedReader br = new BufferedReader(new InputStreamReader(System.in)); String raw = null; raw = br.readLine(); if (raw.compareTo("end") == 0) { break; } System.out.print("Enter tar value\n"); // open up standard input String tar = null; tar = br.readLine(); // learn the program String[] xStrings = { "<_START>" + raw + "<_END>", tar }; examples.add(xStrings); for (String[] elem : examples) { System.out.println("Examples inputed: " + Arrays.toString(elem)); } String ofpath = "/Users/bowu/Research/50newdata/tmp/" + nf.getName(); CSVWriter cw = new CSVWriter(new FileWriter(new File(ofpath))); ProgSynthesis psProgSynthesis = new ProgSynthesis(); psProgSynthesis.inite(examples, dpp, msger); // Collection<ProgramRule> ps = psProgSynthesis.run_main(); msger.updateCM_Constr(psProgSynthesis.partiCluster.getConstraints()); msger.updateWeights(psProgSynthesis.partiCluster.weights); ProgramRule pr = ps.iterator().next(); System.out.println("" + psProgSynthesis.myprog.toString()); System.out.println("" + pr.toString()); for (String org : vtmp) { String ttar = pr.transform(org); String[] pValue = { org, ttar }; cw.writeNext(pValue); System.out.println(String.format("%s,%s", org, ttar)); result.add(pValue); } cw.close(); } } catch (Exception e) { e.printStackTrace(); } }
From source file:edu.isi.karma.cleaning.Tools.java
License:Apache License
public void transformFile(String fpath) { try {/*from w w w . j a v a 2s. c o m*/ Vector<String[]> examples = new Vector<String[]>(); while (true) { System.out.print("Enter raw value\n"); // open up standard input BufferedReader br = new BufferedReader(new InputStreamReader(System.in)); String raw = null; raw = br.readLine(); if (raw.compareTo("end") == 0) { break; } System.out.print("Enter tar value\n"); // open up standard input String tar = null; tar = br.readLine(); // learn the program String[] xStrings = { "<_START>" + raw + "<_END>", tar }; examples.add(xStrings); for (String[] elem : examples) { System.out.println("Examples inputed: " + Arrays.toString(elem)); } ProgSynthesis psProgSynthesis = new ProgSynthesis(); psProgSynthesis.inite(examples); Collection<ProgramRule> ps = psProgSynthesis.run_main(); ProgramRule pr = ps.iterator().next(); System.out.println("" + pr.toString()); // read and write the data File nf = new File(fpath); String ofpath = "/Users/bowu/Research/50newdata/tmp/" + nf.getName(); CSVWriter cw = new CSVWriter(new FileWriter(new File(ofpath))); @SuppressWarnings("resource") BufferedReader cr = new BufferedReader(new FileReader(fpath)); String pair = ""; while ((pair = cr.readLine()) != null) { pair = pair.trim(); if (pair.length() == 0) continue; if (pair.charAt(0) == '\"') { pair = pair.substring(1); } if (pair.charAt(pair.length() - 1) == '\"') { pair = pair.substring(0, pair.length() - 1); } InterpreterType rule = pr.getRuleForValue(pair); String val = rule.execute(pair); String[] elem = { pair, val }; System.out.println(elem[0] + "," + elem[1]); cw.writeNext(elem); } cw.close(); } } catch (Exception e) { e.printStackTrace(); } }
From source file:gov.va.isaac.gui.treeview.SctTreeView.java
License:Apache License
public void showConcept(final UUID conceptUUID, final BooleanProperty workingIndicator) { if (initializationCountDownLatch_.getCount() > 1) { // Called before initial init() run, so run init(). // showConcept Task will internally await() init() completion. init();// w w w . j a v a 2s. co m } // Do work in background. Task<SctTreeItem> task = new Task<SctTreeItem>() { @Override protected SctTreeItem call() throws Exception { // await() init() completion. initializationCountDownLatch_.await(); LOG.debug("Looking for concept {} in tree", conceptUUID); final ArrayList<UUID> pathToRoot = new ArrayList<>(); pathToRoot.add(conceptUUID); // Walk up taxonomy to origin until no parent found. UUID current = conceptUUID; while (true) { ConceptChronicleDdo concept = buildFxConcept(current); if (concept == null) { // Must be a "pending concept". // Not handled yet. return null; } // Look for an IS_A relationship to origin. boolean found = false; for (RelationshipChronicleDdo chronicle : concept.getOriginRelationships()) { RelationshipVersionDdo relationship = chronicle.getVersions() .get(chronicle.getVersions().size() - 1); UUID isaRelTypeUUID = SnomedRelType.IS_A.getUuids()[0]; if (relationship.getTypeReference().getUuid().equals(isaRelTypeUUID)) { UUID parentUUID = relationship.getDestinationReference().getUuid(); pathToRoot.add(parentUUID); current = parentUUID; found = true; break; } } // No parent IS_A relationship found, stop looking. if (!found) { break; } } LOG.debug("Calculated root path {}", Arrays.toString(pathToRoot.toArray())); SctTreeItem currentTreeItem = rootTreeItem; // Walk down path from root. for (int i = pathToRoot.size() - 1; i >= 0; i--) { SctTreeItem child = findChild(currentTreeItem, pathToRoot.get(i)); if (child == null) { break; } currentTreeItem = child; } return currentTreeItem; } @Override protected void succeeded() { final SctTreeItem lastItemFound = this.getValue(); // Expand tree to last item found. if (lastItemFound != null) { int row = treeView_.getRow(lastItemFound); treeView_.scrollTo(row); treeView_.getSelectionModel().clearAndSelect(row); } // Turn off progress indicator. if (workingIndicator != null) { workingIndicator.set(false); } } @Override protected void failed() { Throwable ex = getException(); if (!wasGlobalShutdownRequested()) { LOG.warn("Unexpected error trying to find concept in Tree", ex); // Turn off progress indicator. if (workingIndicator != null) { workingIndicator.set(false); } } } }; Utility.execute(task); }