List of usage examples for org.apache.commons.lang ArrayUtils removeElement
public static short[] removeElement(short[] array, short element)
Removes the first occurrence of the specified element from the specified array.
From source file:com.contrastsecurity.ide.eclipse.ui.internal.views.TagDialog.java
private void removeTag(String tag) { if (traceTagsResource.getTags().contains(tag) || orgTagsResource.getTags().contains(tag)) { tagsComboViewer.removeSelectionChangedListener(tagsComboViewerListener); tagsComboViewer.add(tag);// www . ja v a 2 s . c om tagsComboViewer.addSelectionChangedListener(tagsComboViewerListener); } String[] tagsArray = (String[]) tableViewer.getInput(); String[] newData = (String[]) ArrayUtils.removeElement(tagsArray, tag); tableViewer.setInput(newData); }
From source file:com.nec.harvest.controller.UriageController.java
/** * This function will be mapped with URL {@link /uriage/ organizationCode} * /{businessDay}}, it can be used to render the Sales report with a month * for a shop. Will be displayed a message when have a exception or an error * occurred//from www. j a va 2 s . c o m * * @param orgCodeStored * @param businessDay * @param orgCode * @param monthly * @param request * @param model * @return String redirect Uri */ @RequestMapping("/{orgCode:[a-z0-9]+}/{monthly:[\\d]+}") public String render(@SessionAttribute(Constants.SESS_BUSINESS_DAY) Date businessDay, @UserPrincipal User user, @PathVariable String proGNo, @PathVariable String orgCode, @PathVariable @DateTimeFormat(pattern = "yyyyMM") Date monthly, final Model model) { if (logger.isDebugEnabled()) { logger.debug("Redering uriage page..."); } logger.info("Trying to generate the uriage template for monthly {} and organization code {}", monthly, orgCode); String monthlyStr = null; try { monthlyStr = DateFormatUtil.format(monthly, DateFormat.DATE_WITHOUT_DAY); model.addAttribute(PROCESSING_MONTH, monthly); } catch (NullPointerException | IllegalArgumentException ex) { logger.warn(ex.getMessage()); } // ???? Organization organization = user.getOrganization(); if (organization != null) { // ????1???""?2???""?? logger.info( "????1???\"\"?2???\"\"??"); // organization = organizationService.findByOrgCode(organization.getStrCode()); // model.addAttribute(TAX_TYPE, organization.getTaxKbn()); model.addAttribute(COUNT_DATE, getActualMaximumDayOfMonth(monthly)); model.addAttribute(ORGANIZATION, organization); } // Get a list of UriageData(Hendo)/at001 data from DB List<SalesChange> salesChanges = null; try { salesChanges = salesChangeService.findByOrgCodeAndMonth(orgCode, monthlyStr, false); } catch (IllegalArgumentException | ObjectNotFoundException ex) { logger.warn(ex.getMessage()); } catch (TooManyObjectsException | ServiceException ex) { logger.error(ex.getMessage(), ex); // ??????????? model.addAttribute(ERROR_MESSAGE, getSystemError()); model.addAttribute(ERROR, true); return getViewName(); } try { String nextMontly = BusinessDayHelper.getNextMonthly(monthly, DateFormat.DATE_WITHOUT_DAY); boolean checkEmptyMonthly = hasSaleChangesOfMonthly(orgCode, nextMontly); // Disable the "NEXT MONTHLY" button if the data of next month is empty or null if (!checkEmptyMonthly) { nextMontly = null; } model.addAttribute(NEXT_MONTH, nextMontly); String previousMonthly = BusinessDayHelper.getPreviousMonthly(monthly, DateFormat.DATE_WITHOUT_DAY); checkEmptyMonthly = hasSaleChangesOfMonthly(orgCode, previousMonthly); // Disable the "PREVIOUS MONTHLY" button if the data of previous month is empty or null if (!checkEmptyMonthly) { previousMonthly = null; } model.addAttribute(PREVIOUS_MONTH, previousMonthly); } catch (ServiceException ex) { logger.error(ex.getMessage()); // ??????????? model.addAttribute(ERROR_MESSAGE, getSystemError()); model.addAttribute(ERROR, true); return getViewName(); } List<SalesChangeBean> salesChangeBeans = new ArrayList<>(); if (CollectionUtils.isEmpty(salesChanges)) { if (logger.isDebugEnabled()) { logger.debug( "sales data of organization code: " + orgCode + " on month: " + monthly + " is not exist."); } // model.addAttribute(ERROR, true); model.addAttribute(ERROR_MESSAGE, MessageHelper.get(MsgConstants.CM_QRY_M01)); model.addAttribute(EDITABLE, Boolean.FALSE); model.addAttribute(SALES_CHANGE_CATEGORY, salesChangeBeans); return getViewName(); } try { Map<String, Double> rateDefMap = consumptionTaxRateService.findRateDefByMonth(monthly); Object[] keySet = rateDefMap.keySet().toArray(); Double taxRateTmp = 0D; String enfDateStrTmp = null; String keyStr = null; for (SalesChange obj : salesChanges) { for (int i = 0; i < keySet.length; i++) { keyStr = keySet[i].toString(); Date actualDate = obj.getSrDate(); try { Date date = DateFormatUtil.parse(keyStr, FORMAT_DATE); if (!actualDate.before(date)) { //get taxRate with folowed date if (enfDateStrTmp == null || !date.before(DateFormatUtil.parse(enfDateStrTmp, FORMAT_DATE))) { enfDateStrTmp = keyStr; taxRateTmp = rateDefMap.get(keyStr); } keySet = ArrayUtils.removeElement(keySet, keySet[i]); i--; } } catch (NullPointerException | ParseException e) { logger.warn("warning parse string to date is error: " + e.getMessage()); continue; } } SalesChangeBean bean = new SalesChangeBean(taxRateTmp, obj); salesChangeBeans.add(bean); } } catch (IllegalArgumentException ex) { logger.warn(ex.getMessage()); model.addAttribute(ERROR, true); model.addAttribute(ERROR_MESSAGE, MessageHelper.get(MsgConstants.CM_QRY_M01)); return getViewName(); } catch (ServiceException ex) { logger.error(ex.getMessage(), ex); // ??????????? model.addAttribute(ERROR_MESSAGE, getSystemError()); model.addAttribute(ERROR, true); return getViewName(); } model.addAttribute(SALES_CHANGE_CATEGORY, salesChangeBeans); try { SalesFixed salesFixedObj = salesFixedService.findByOrgCodeAndMonth(orgCode, monthlyStr, false); model.addAttribute(SALES_FIXED, salesFixedObj); } catch (IllegalArgumentException ex) { logger.warn(ex.getMessage()); // model.addAttribute(SALES_FIXED, null); } catch (ServiceException ex) { logger.error(ex.getMessage(), ex); // ??????????? model.addAttribute(ERROR_MESSAGE, getSystemError()); model.addAttribute(ERROR, true); return getViewName(); } // The following source code will be detected the end-user can be changed // the sales data or not. If the data already pushed into Tighten table // so that means end-user can not modify the data. Otherwise that is TRUE Tighten tighten = null; try { tighten = tightenService.findByClassifyAndMonth("1"); // The final month year of tighten String sudoOfTighten = tighten.getGetSudo(); try { Date monthlyOfTighten = DateFormatUtil.parse(sudoOfTighten, DateFormat.DATE_WITHOUT_DAY); model.addAttribute(EDITABLE, monthly.after(monthlyOfTighten)); } catch (NullPointerException | ParseException ex) { logger.warn(ex.getMessage(), ex); // The default is can be edited model.addAttribute(EDITABLE, Boolean.TRUE); } } catch (IllegalArgumentException | ObjectNotFoundException ex) { logger.warn(ex.getMessage()); // If the sales data is not push into the Tighten that means can edit // current data. So the EDITABLE should being Boolean.TRUE Date monthsToSubtract = DateUtil.monthsToSubtract(businessDay, 3); model.addAttribute(EDITABLE, monthly.after(monthsToSubtract)); } catch (TooManyObjectsException ex) { logger.warn(ex.getMessage(), ex); // The default is can be edited model.addAttribute(EDITABLE, Boolean.TRUE); } catch (ServiceException ex) { logger.error(ex.getMessage(), ex); // ??????????? model.addAttribute(ERROR, Boolean.TRUE); model.addAttribute(ERROR_MESSAGE, getSystemError()); return getViewName(); } return getViewName(); }
From source file:com.adobe.acs.commons.workflow.bulk.execution.model.Workspace.java
/** * Removes the payload group from the list of active payload groups. * * @param payloadGroup the payload group to remove from the active list. *//* w ww.j av a2 s . c om*/ public void removeActivePayloadGroup(PayloadGroup payloadGroup) { if (payloadGroup != null && ArrayUtils.contains(activePayloadGroups, payloadGroup.getDereferencedPath())) { activePayloadGroups = (String[]) ArrayUtils.removeElement(activePayloadGroups, payloadGroup.getDereferencedPath()); properties.put(PN_ACTIVE_PAYLOAD_GROUPS, activePayloadGroups); } }
From source file:bdv.bigcat.label.FragmentSegmentAssignment.java
/** * Detach a segment from the body that it has been associated with * * @param fragmentId//from w w w.ja va 2 s . c om */ public void detachFragment(final long fragmentId) { synchronized (this) { final long segmentId = lut.get(fragmentId); final long[] fragments = ilut.get(segmentId); if (fragments != null && fragments.length > 1) { final long[] newFragments = ArrayUtils.removeElement(fragments, fragmentId); ilut.put(segmentId, newFragments); final long newSegmentId = fragmentId; lut.put(fragmentId, newSegmentId); ilut.put(newSegmentId, new long[] { fragmentId }); } } }
From source file:de.csw.linkgenerator.plugin.lucene.LucenePlugin.java
/** * Creates and submits a query to the Lucene engine. * /* w ww . j a v a2s. c o m*/ * @param query The base query, using the query engine supported by Lucene. * @param sortFields A list of fields to sort results by. For each field, if the name starts * with '-', then that field (excluding the -) is used for reverse sorting. If * <tt>null</tt> or empty, sort by hit score. * @param virtualWikiNames Comma separated list of virtual wiki names to search in, may be * <tt>null</tt> to search all virtual wikis. * @param languages Comma separated list of language codes to search in, may be <tt>null</tt> * or empty to search all languages. * @param indexes List of Lucene indexes (searchers) to search. * @param context The context of the request. * @return The list of search results. * @throws IOException If the Lucene searchers encounter a problem reading the indexes. * @throws ParseException If the query is not valid. */ private SearchResults search(String query, String[] sortFields, String virtualWikiNames, String languages, IndexSearcher[] indexes, XWikiContext context) throws IOException, org.apache.lucene.queryparser.classic.ParseException { // Turn the sorting field names into SortField objects. SortField[] sorts = null; if (sortFields != null && sortFields.length > 0) { sorts = new SortField[sortFields.length]; for (int i = 0; i < sortFields.length; ++i) { sorts[i] = getSortField(sortFields[i]); } // Remove any null values from the list. int prevLength = -1; while (prevLength != sorts.length) { prevLength = sorts.length; sorts = (SortField[]) ArrayUtils.removeElement(sorts, null); } } // Perform the actual search return search(query, (sorts != null) ? new Sort(sorts) : null, virtualWikiNames, languages, indexes, context); }
From source file:com.activecq.samples.replication.impl.ReverseReplicatorImpl.java
protected void activate(ComponentContext componentContext) { Dictionary properties = componentContext.getProperties(); enabled = PropertiesUtil.toBoolean(properties.get(PROP_ENABLED), DEFAULT_ENABLED); log.debug("Enabled: " + enabled); sychronous = PropertiesUtil.toBoolean(properties.get(PROP_SYNCHRONOUS), DEFAULT_SYNCHRONOUS); suppressStatusUpdate = PropertiesUtil.toBoolean(properties.get(PROP_SUPRESS_STATUS_UPDATE), DEFAULT_SUPRESS_STATUS_UPDATE); supressVersioning = PropertiesUtil.toBoolean(properties.get(PROP_SUPRESS_VERSIONING), DEFAULT_SUPRESS_VERSIONING); paths = PropertiesUtil.toStringArray(properties.get(PROP_PATHS), DEFAULT_PATHS); paths = (String[]) ArrayUtils.removeElement(paths, ""); pathWhitelist = PropertiesUtil.toStringArray(properties.get(PROP_PATH_WHITELIST), DEFAULT_PATH_WHITELIST); pathWhitelist = (String[]) ArrayUtils.removeElement(pathWhitelist, ""); pathBlacklist = PropertiesUtil.toStringArray(properties.get(PROP_PATH_BLACKLIST), DEFAULT_PATH_BLACKLIST); pathBlacklist = (String[]) ArrayUtils.removeElement(pathBlacklist, ""); primaryTypes = PropertiesUtil.toStringArray(properties.get(PROP_PRIMARY_TYPES), DEFAULT_PRIMARY_TYPES); primaryTypes = (String[]) ArrayUtils.removeElement(primaryTypes, ""); String[] tmp = PropertiesUtil.toStringArray(properties.get(PROP_PROPERTY_MATCHES), DEFAULT_PROPERTY_MATCHES);/*from w w w. j a va2 s. c o m*/ tmp = (String[]) ArrayUtils.removeElement(tmp, ""); for (final String t : tmp) { String[] s = StringUtils.split(t, '='); if (s == null || s.length != 2) { continue; } propertyMatches.put(s[0], s[1]); } }
From source file:com.cartmatic.estore.common.helper.ConfigUtil.java
public String[] getBulkProdCommAttrs() { String temp[] = this.getConfig("BulkProdCommAttrs").split(","); while (ArrayUtils.contains(temp, "")) { temp = (String[]) ArrayUtils.removeElement(temp, ""); }//from w w w .j ava2 s .c o m return temp; }
From source file:com.cartmatic.estore.common.helper.ConfigUtil.java
public String[] getBulkSkuCommAttrs() { String temp[] = this.getConfig("BulkSkuCommAttrs").split(","); while (ArrayUtils.contains(temp, "")) { temp = (String[]) ArrayUtils.removeElement(temp, ""); }//from w w w . j a v a 2s .c o m return temp; }
From source file:com.cartmatic.estore.common.helper.ConfigUtil.java
public String[] getBulkProdAttrs() { String temp[] = this.getConfig("BulkProdAttrs").split(","); while (ArrayUtils.contains(temp, "")) { temp = (String[]) ArrayUtils.removeElement(temp, ""); }/* w ww . j a v a 2s. com*/ return temp; }
From source file:edu.isi.karma.research.modeling.ModelLearner_KnownModels.java
public static void runResearchEvaluation() throws Exception { /***/*from w ww .java 2s.c o m*/ * When running with k=1, change the flag "multiple.same.property.per.node" to true so all attributes have at least one semantic types */ ServletContextParameterMap contextParameters = ContextParametersRegistry.getInstance() .registerByKarmaHome("/Users/mohsen/karma/"); contextParameters.setParameterValue(ContextParameter.USER_DIRECTORY_PATH, "/Users/mohsen/karma/"); contextParameters.setParameterValue(ContextParameter.USER_CONFIG_DIRECTORY, "/Users/mohsen/karma/config"); contextParameters.setParameterValue(ContextParameter.TRAINING_EXAMPLE_MAX_COUNT, "1000000"); contextParameters.setParameterValue(ContextParameter.SEMTYPE_MODEL_DIRECTORY, "/Users/mohsen/karma/semantic-type-files/"); contextParameters.setParameterValue(ContextParameter.JSON_MODELS_DIR, "/Users/mohsen/karma/models-json/"); contextParameters.setParameterValue(ContextParameter.GRAPHVIZ_MODELS_DIR, "/Users/mohsen/karma/models-graphviz/"); contextParameters.setParameterValue(ContextParameter.USER_PYTHON_SCRIPTS_DIRECTORY, "/Users/mohsen/karma/python/"); contextParameters.setParameterValue(ContextParameter.EVALUATE_MRR, "/Users/mohsen/karma/evaluate-mrr/"); PythonRepository pythonRepository = new PythonRepository(true, contextParameters.getParameterValue(ContextParameter.USER_PYTHON_SCRIPTS_DIRECTORY)); PythonRepositoryRegistry.getInstance().register(pythonRepository); // String inputPath = Params.INPUT_DIR; String graphPath = Params.GRAPHS_DIR; File semFilesFolder = new File( contextParameters.getParameterValue(ContextParameter.SEMTYPE_MODEL_DIRECTORY)); // List<SemanticModel> semanticModels = ModelReader.importSemanticModels(inputPath); List<SemanticModel> semanticModels = ModelReader.importSemanticModelsFromJsonFiles(Params.MODEL_DIR, Params.MODEL_MAIN_FILE_EXT); File[] sources = new File(Params.SOURCE_DIR).listFiles(); File[] r2rmlModels = new File(Params.R2RML_DIR).listFiles(); if (sources.length > 0 && sources[0].getName().startsWith(".")) sources = (File[]) ArrayUtils.removeElement(sources, sources[0]); if (r2rmlModels.length > 0 && r2rmlModels[0].getName().startsWith(".")) r2rmlModels = (File[]) ArrayUtils.removeElement(r2rmlModels, r2rmlModels[0]); List<SemanticModel> trainingData = new ArrayList<SemanticModel>(); File[] trainingSources; File[] trainingModels; File trainingSource = null; File trainingModel = null; File testSource; File testModel; OntologyManager ontologyManager = new OntologyManager(contextParameters.getId()); File ff = new File(Params.ONTOLOGY_DIR); File[] files = ff.listFiles(); for (File f : files) { if (f.getName().startsWith(".") || f.isDirectory()) { continue; //Ignore . files } ontologyManager.doImport(f, "UTF-8"); } ontologyManager.updateCache(); ModelLearningGraph modelLearningGraph = null; ModelLearner_KnownModels modelLearner; boolean onlyGenerateSemanticTypeStatistics = false; boolean iterativeEvaluation = true; boolean useCorrectType = false; boolean onlyEvaluateInternalLinks = false || useCorrectType; boolean zeroKnownModel = false; int numberOfCandidates = 1; if (onlyGenerateSemanticTypeStatistics) { getStatistics(semanticModels, null, 0); return; } int numberOfKnownModels; String filePath = Params.RESULTS_DIR + "temp/"; String filename = ""; filename += "results"; filename += useCorrectType ? "-correct" : "-k=" + numberOfCandidates; filename += zeroKnownModel ? "-ontology" : ""; filename += onlyEvaluateInternalLinks ? "-internal" : "-all"; filename += iterativeEvaluation ? "-iterative" : ""; filename += ".csv"; PrintWriter resultFileIterative = null; PrintWriter resultFile = null; StringBuffer[] resultsArray = null; if (iterativeEvaluation) { resultFileIterative = new PrintWriter(new File(filePath + filename)); resultsArray = new StringBuffer[semanticModels.size() + 2]; for (int i = 0; i < resultsArray.length; i++) { resultsArray[i] = new StringBuffer(); } } else { resultFile = new PrintWriter(new File(filePath + filename)); resultFile.println("source \t p \t r \t t \t a \t m \n"); } // new OfflineTraining().getCorrectModel(contextParameters, // null, null, // sources[20], r2rmlModels[20], 0, numberOfCandidates); // if (true) return; for (int i = 0; i < semanticModels.size(); i++) { // for (int i = 0; i <= 1; i++) { // int i = 1; { // clean semantic files folder in karma home FileUtils.cleanDirectory(semFilesFolder); trainingSource = null; trainingModel = null; int newSourceIndex = i; SemanticModel newSource = semanticModels.get(newSourceIndex); logger.info("======================================================"); logger.info(newSource.getName() + "(#attributes:" + newSource.getColumnNodes().size() + ")"); System.out.println(newSource.getName() + "(#attributes:" + newSource.getColumnNodes().size() + ")"); logger.info("======================================================"); if (zeroKnownModel) numberOfKnownModels = 0; else numberOfKnownModels = iterativeEvaluation ? 0 : semanticModels.size() - 1; if (iterativeEvaluation) { if (resultsArray[0].length() > 0) resultsArray[0].append(" \t "); resultsArray[0].append(newSource.getName() + "(" + newSource.getColumnNodes().size() + ")" + "\t" + " " + "\t" + " " + "\t" + " " + "\t" + " "); if (resultsArray[1].length() > 0) resultsArray[1].append(" \t "); resultsArray[1].append("p \t r \t t \t a \t m"); } // numberOfKnownModels = 2; while (numberOfKnownModels <= semanticModels.size() - 1) { trainingData.clear(); trainingSources = new File[numberOfKnownModels]; trainingModels = new File[numberOfKnownModels]; int j = 0, count = 0; while (count < numberOfKnownModels) { if (j != newSourceIndex) { trainingData.add(semanticModels.get(j)); trainingSources[count] = sources[j]; trainingModels[count] = r2rmlModels[j]; count++; if (count == numberOfKnownModels) { trainingSource = sources[j]; trainingModel = r2rmlModels[j]; } } j++; } modelLearningGraph = (ModelLearningGraphCompact) ModelLearningGraph .getEmptyInstance(ontologyManager, ModelLearningGraphType.Compact); SemanticModel correctModel; if (useCorrectType) { correctModel = newSource; correctModel.setAccuracy(1.0); correctModel.setMrr(1.0); } else { testSource = sources[newSourceIndex]; testModel = r2rmlModels[newSourceIndex]; if (iterativeEvaluation) { correctModel = new OfflineTraining().getCorrectModel(contextParameters, trainingSource, trainingModel, testSource, testModel, numberOfKnownModels, numberOfCandidates); } else { correctModel = new OfflineTraining().getCorrectModel(contextParameters, trainingSources, trainingModels, testSource, testModel, numberOfCandidates); } } List<ColumnNode> columnNodes = correctModel.getColumnNodes(); // if (useCorrectType && numberOfCRFCandidates > 1) // updateCrfSemanticTypesForResearchEvaluation(columnNodes); List<Node> steinerNodes = new LinkedList<Node>(columnNodes); modelLearner = new ModelLearner_KnownModels(modelLearningGraph.getGraphBuilder(), steinerNodes); long start = System.currentTimeMillis(); String graphName = !iterativeEvaluation ? graphPath + semanticModels.get(newSourceIndex).getName() + Params.GRAPH_JSON_FILE_EXT : graphPath + semanticModels.get(newSourceIndex).getName() + ".knownModels=" + numberOfKnownModels + Params.GRAPH_JSON_FILE_EXT; if (new File(graphName).exists()) { // read graph from file try { logger.info("loading the graph ..."); DirectedWeightedMultigraph<Node, DefaultLink> graph = GraphUtil.importJson(graphName); GraphBuilder gb = new GraphBuilderTopK(ontologyManager, graph); modelLearner = new ModelLearner_KnownModels(gb, steinerNodes); } catch (Exception e) { e.printStackTrace(); } } else { logger.info("building the graph ..."); for (SemanticModel sm : trainingData) // modelLearningGraph.addModel(sm); modelLearningGraph.addModelAndUpdate(sm, PatternWeightSystem.JWSPaperFormula); modelLearner = new ModelLearner_KnownModels(modelLearningGraph.getGraphBuilder(), steinerNodes); // modelLearner.graphBuilder = modelLearningGraph.getGraphBuilder(); // modelLearner.nodeIdFactory = modelLearner.graphBuilder.getNodeIdFactory(); // save graph to file try { // GraphUtil.exportJson(modelLearningGraph.getGraphBuilder().getGraph(), graphName, true, true); // GraphVizUtil.exportJGraphToGraphviz(modelLearner.graphBuilder.getGraph(), // "test", // true, // GraphVizLabelType.LocalId, // GraphVizLabelType.LocalUri, // false, // true, // graphName + ".dot"); } catch (Exception e) { e.printStackTrace(); } } List<SortableSemanticModel> hypothesisList = modelLearner.hypothesize(useCorrectType, numberOfCandidates); long elapsedTimeMillis = System.currentTimeMillis() - start; float elapsedTimeSec = elapsedTimeMillis / 1000F; int cutoff = 20;//ModelingConfiguration.getMaxCandidateModels(); List<SortableSemanticModel> topHypotheses = null; if (hypothesisList != null) { topHypotheses = hypothesisList.size() > cutoff ? hypothesisList.subList(0, cutoff) : hypothesisList; } Map<String, SemanticModel> models = new TreeMap<String, SemanticModel>(); ModelEvaluation me; models.put("1-correct model", correctModel); if (topHypotheses != null) for (int k = 0; k < topHypotheses.size(); k++) { SortableSemanticModel m = topHypotheses.get(k); me = m.evaluate(correctModel, onlyEvaluateInternalLinks, false); String label = "candidate " + k + "\n" + // (m.getSteinerNodes() == null ? "" : m.getSteinerNodes().getScoreDetailsString()) + "link coherence:" + (m.getLinkCoherence() == null ? "" : m.getLinkCoherence().getCoherenceValue()) + "\n"; label += (m.getSteinerNodes() == null || m.getSteinerNodes().getCoherence() == null) ? "" : "node coherence:" + m.getSteinerNodes().getCoherence().getCoherenceValue() + "\n"; label += "confidence:" + m.getConfidenceScore() + "\n"; label += m.getSteinerNodes() == null ? "" : "mapping score:" + m.getSteinerNodes().getScore() + "\n"; label += "cost:" + roundDecimals(m.getCost(), 6) + "\n" + // "-distance:" + me.getDistance() + "-precision:" + me.getPrecision() + "-recall:" + me.getRecall(); models.put(label, m); if (k == 0) { // first rank model System.out.println("number of known models: " + numberOfKnownModels + ", precision: " + me.getPrecision() + ", recall: " + me.getRecall() + ", time: " + elapsedTimeSec + ", accuracy: " + correctModel.getAccuracy() + ", mrr: " + correctModel.getMrr()); logger.info("number of known models: " + numberOfKnownModels + ", precision: " + me.getPrecision() + ", recall: " + me.getRecall() + ", time: " + elapsedTimeSec + ", accuracy: " + correctModel.getAccuracy() + ", mrr: " + correctModel.getMrr()); // resultFile.println("number of known models \t precision \t recall"); // resultFile.println(numberOfKnownModels + "\t" + me.getPrecision() + "\t" + me.getRecall()); String s = me.getPrecision() + "\t" + me.getRecall() + "\t" + elapsedTimeSec + "\t" + correctModel.getAccuracy() + "\t" + correctModel.getMrr(); if (iterativeEvaluation) { if (resultsArray[numberOfKnownModels + 2].length() > 0) resultsArray[numberOfKnownModels + 2].append(" \t "); resultsArray[numberOfKnownModels + 2].append(s); } else { // s = newSource.getName() + "\t" + // me.getPrecision() + "\t" + // me.getRecall() + "\t" + // elapsedTimeSec + "\t" + // correctModel.getAccuracy() + "\t" + // correctModel.getMrr(); s = me.getPrecision() + "\t" + me.getRecall() + "\t" + elapsedTimeSec; resultFile.println(s); } // resultFile.println(me.getPrecision() + "\t" + me.getRecall() + "\t" + elapsedTimeSec); } } String outputPath = Params.OUTPUT_DIR; String outName = !iterativeEvaluation ? outputPath + semanticModels.get(newSourceIndex).getName() + Params.GRAPHVIS_OUT_DETAILS_FILE_EXT : outputPath + semanticModels.get(newSourceIndex).getName() + ".knownModels=" + numberOfKnownModels + Params.GRAPHVIS_OUT_DETAILS_FILE_EXT; GraphVizUtil.exportSemanticModelsToGraphviz(models, newSource.getName(), outName, GraphVizLabelType.LocalId, GraphVizLabelType.LocalUri, true, true); numberOfKnownModels++; if (zeroKnownModel) break; } } if (iterativeEvaluation) { for (StringBuffer s : resultsArray) resultFileIterative.println(s.toString()); resultFileIterative.close(); } else { resultFile.close(); } }