List of usage examples for org.apache.commons.lang ArrayUtils toString
public static String toString(Object array)
Outputs an array as a String, treating null
as an empty array.
From source file:com.liferay.dynamic.data.mapping.test.util.search.TestOrderHelper.java
protected void assertSearch(AssetEntryQuery assetEntryQuery) throws Exception { Hits hits = search(assetEntryQuery); Bundle bundle = FrameworkUtil.getBundle(TestOrderHelper.class); List<AssetEntry> assetEntries = OSGiServiceUtil.callService(bundle.getBundleContext(), AssetHelper.class, assetHelper -> assetHelper.getAssetEntries(hits)); Assert.assertEquals(ArrayUtils.toString(_sortedValues), ArrayUtils.toString(getValues(assetEntries))); }
From source file:com.kylinolap.job.JobInstanceBuilder.java
private String formatPaths(String[] paths) { String retVal = ArrayUtils.toString(paths); retVal = StringUtils.remove(retVal, "{"); retVal = StringUtils.remove(retVal, "}"); return retVal; }
From source file:com.rb.ofbiz.test.utils.logging.HtmlResultFormatter.java
String formatMetrics(TestMetricsBean metrics) { String failedCommandsRow = ""; if (metrics.getFailedCommands() > 0) { failedCommandsRow = "<tr class=\"" + CSS_CLASS_FAILED + "\"><td>failed commands:</td><td>" + metrics.getFailedCommands() + "</td></tr>\n"; if (StringUtils.isNotBlank(metrics.getLastFailedCommandMessage())) { failedCommandsRow = failedCommandsRow + "<tr class=\"" + CSS_CLASS_FAILED + "\"><td>last failed message:</td><td>" + metrics.getLastFailedCommandMessage() + "</td></tr>\n"; } else {// w w w .j a v a2 s .c o m System.err.println("WARNING: NO LastFailedCommandMessage"); } } return MessageFormat.format(HTML_METRICS, metrics.getUserAgent(), metrics.getSeleniumRcVersion(), metrics.getSeleniumRcRevision(), metrics.getSeleniumCoreVersion(), metrics.getSeleniumCoreRevision(), metrics.getLoggingSeleniumRevision(), LOGGING_DATETIME_FORMAT.format(metrics.getStartTimeStamp()), LOGGING_DATETIME_FORMAT.format(metrics.getEndTimeStamp()), metrics.getTestDuration(), metrics.getCommandsProcessed(), metrics.getVerificationsProcessed(), failedCommandsRow, ArrayUtils.toString(metrics.getCommandsExcludedFromLogging())); }
From source file:com.rb.ofbiz.test.utils.logging.LoggingCommandProcessor.java
/** * {@inheritDoc}// w w w . j av a 2s. c o m */ public boolean[] getBooleanArray(String commandName, String[] args) { long cmdStartMillis = System.currentTimeMillis(); this.seleniumTestMetrics.incCommandsProcessed(); boolean[] results; try { results = this.realCommandProcessor.getBooleanArray(commandName, args); } catch (RuntimeException e) { doExceptionLogging(commandName, args, "", e, cmdStartMillis); throw e; } doLogging(commandName, args, LoggingCommandProcessor.SELENIUM_RC_OK_RESULT_PREFIX_WITH_COMMA + ArrayUtils.toString(results), cmdStartMillis); return results; }
From source file:info.magnolia.cms.servlets.PropertyInitializer.java
/** * @see javax.servlet.ServletContextListener#contextInitialized(javax.servlet.ServletContextEvent) *///ww w.ja va 2 s . c o m public void contextInitialized(ServletContextEvent sce) { final ServletContext context = sce.getServletContext(); String propertiesLocationString = context.getInitParameter(MAGNOLIA_INITIALIZATION_FILE); if (log.isDebugEnabled()) { log.debug("{} value in web.xml is :[{}]", MAGNOLIA_INITIALIZATION_FILE, propertiesLocationString); //$NON-NLS-1$ } if (StringUtils.isEmpty(propertiesLocationString)) { propertiesLocationString = DEFAULT_INITIALIZATION_PARAMETER; } String[] propertiesLocation = StringUtils.split(propertiesLocationString, ','); String servername = null; try { servername = StringUtils.lowerCase(InetAddress.getLocalHost().getHostName()); } catch (UnknownHostException e) { log.error(e.getMessage()); } String rootPath = StringUtils.replace(context.getRealPath(StringUtils.EMPTY), "\\", "/"); //$NON-NLS-1$ //$NON-NLS-2$ String webapp = StringUtils.substringAfterLast(rootPath, "/"); //$NON-NLS-1$ File logs = new File(webapp + File.separator + "logs"); File tmp = new File(webapp + File.separator + "tmp"); if (!logs.exists()) { logs.mkdir(); log.debug("Creating " + logs.getAbsoluteFile() + " folder"); } if (!tmp.exists()) { tmp.mkdir(); log.debug("Creating " + tmp.getAbsoluteFile() + " folder"); } if (log.isDebugEnabled()) { log.debug("rootPath is {}, webapp is {}", rootPath, webapp); //$NON-NLS-1$ } for (int j = 0; j < propertiesLocation.length; j++) { String location = StringUtils.trim(propertiesLocation[j]); location = StringUtils.replace(location, "${servername}", servername); //$NON-NLS-1$ location = StringUtils.replace(location, "${webapp}", webapp); //$NON-NLS-1$ File initFile = new File(rootPath, location); if (!initFile.exists() || initFile.isDirectory()) { if (log.isDebugEnabled()) { log.debug("Configuration file not found with path [{}]", //$NON-NLS-1$ initFile.getAbsolutePath()); } continue; } InputStream fileStream; try { fileStream = new FileInputStream(initFile); } catch (FileNotFoundException e1) { log.debug("Configuration file not found with path [{}]", //$NON-NLS-1$ initFile.getAbsolutePath()); return; } try { envProperties.load(fileStream); log.info("Loading configuration at {}", initFile.getAbsolutePath());//$NON-NLS-1$ Log4jConfigurer.initLogging(context, envProperties); new ConfigLoader(context, envProperties); } catch (Exception e) { log.error(e.getMessage(), e); } finally { IOUtils.closeQuietly(fileStream); } return; } log.error(MessageFormat.format( "No configuration found using location list {0}. [servername] is [{1}], [webapp] is [{2}] and base path is [{3}]", //$NON-NLS-1$ new Object[] { ArrayUtils.toString(propertiesLocation), servername, webapp, rootPath })); }
From source file:com.rb.ofbiz.test.utils.logging.LoggingCommandProcessor.java
/** * {@inheritDoc}//from w ww.j a va 2s. com */ public Number[] getNumberArray(String commandName, String[] args) { long cmdStartMillis = System.currentTimeMillis(); this.seleniumTestMetrics.incCommandsProcessed(); Number[] results; try { results = this.realCommandProcessor.getNumberArray(commandName, args); } catch (RuntimeException e) { doExceptionLogging(commandName, args, "", e, cmdStartMillis); throw e; } doLogging(commandName, args, LoggingCommandProcessor.SELENIUM_RC_OK_RESULT_PREFIX_WITH_COMMA + ArrayUtils.toString(results), cmdStartMillis); return results; }
From source file:edu.cornell.med.icb.goby.R.FisherExact.java
/** * Pass the fisher expression to R for computation. * * @param rengine The R engine to use to calcuate the results. * @param fisherExpression The string representing the expression to evaluate. * @param is2x2matrix Whether or not the data being evaluated represents a 2x2 matrix * @return The results of the evaluation (may be null if an exception interrupts the calculation) *//*from ww w . j a va2 s .c o m*/ private static Result evaluateFisherExpression(final Rengine rengine, final String fisherExpression, final boolean is2x2matrix) { // evaluate the R expression if (LOG.isDebugEnabled()) { LOG.debug("About to evaluate: " + fisherExpression); } final REXP fisherResultExpression = rengine.eval(fisherExpression); if (LOG.isDebugEnabled()) { LOG.debug(fisherResultExpression); } if (fisherResultExpression == null) { return null; } // the result from R is a vector/map of values final RVector fisherResultVector = fisherResultExpression.asVector(); if (LOG.isDebugEnabled()) { LOG.debug(fisherResultVector); } // extract the p-value final REXP pValueExpression = fisherResultVector.at("p.value"); final double pValue; if (pValueExpression != null) { pValue = pValueExpression.asDouble(); } else { pValue = Double.NaN; } // extract the alternative hypothesis final REXP alternativeExpression = fisherResultVector.at("alternative"); final String alternative = alternativeExpression.asString(); if (LOG.isDebugEnabled()) { LOG.debug("alternative: " + alternative); } final AlternativeHypothesis alternativeHypothesis = AlternativeHypothesis .valueOf(StringUtils.remove(alternative, '.')); // some values are only returned when the input was a 2x2 matrix final double estimate; final double[] confidenceInterval; final double oddsRatio; if (is2x2matrix) { final REXP estimateExpression = fisherResultVector.at("estimate"); if (estimateExpression != null) { estimate = estimateExpression.asDouble(); } else { estimate = Double.NaN; } if (LOG.isDebugEnabled()) { LOG.debug(estimateExpression); LOG.debug("estimate: " + estimate); } final REXP confidenceIntervalExpression = fisherResultVector.at("conf.int"); if (confidenceIntervalExpression != null) { confidenceInterval = confidenceIntervalExpression.asDoubleArray(); } else { confidenceInterval = ArrayUtils.EMPTY_DOUBLE_ARRAY; } if (LOG.isDebugEnabled()) { LOG.debug(confidenceIntervalExpression); LOG.debug("confidenceInterval: " + ArrayUtils.toString(confidenceInterval)); } final REXP oddsRatioExpression = fisherResultVector.at("null.value"); if (oddsRatioExpression != null) { oddsRatio = oddsRatioExpression.asDouble(); } else { oddsRatio = Double.NaN; } if (LOG.isDebugEnabled()) { LOG.debug(oddsRatioExpression); LOG.debug("oddsRatio: " + ArrayUtils.toString(oddsRatio)); } } else { // these values are not present in the 2xN case estimate = Double.NaN; confidenceInterval = ArrayUtils.EMPTY_DOUBLE_ARRAY; oddsRatio = Double.NaN; } return new Result(pValue, confidenceInterval, estimate, oddsRatio, alternativeHypothesis); }
From source file:edu.cornell.med.icb.R.script.RScript.java
/** * Before the script executes, configure the input variables on the connection. * @param connection the rconnection//ww w. j av a 2s . c o m * @throws RserveException r server error * @throws REngineException r server error */ private void setInputs(final RConnection connection) throws RserveException, REngineException { assert connection != null; for (final RDataObject input : inputMap.values()) { if (input.getDataType() == RDataObjectType.String) { if (LOG.isDebugEnabled()) { LOG.debug(String.format("R-input: %s <- %s%n", input.getFieldName(), input.getValue())); } connection.assign(input.getFieldName(), (String) input.getValue()); } else if (input.getDataType() == RDataObjectType.StringArray) { if (LOG.isDebugEnabled()) { LOG.debug(String.format("R-input: %s <- c(%s)%n", input.getFieldName(), ArrayUtils.toString(input.getValue()))); } connection.assign(input.getFieldName(), (String[]) input.getValue()); } else if (input.getDataType() == RDataObjectType.Double) { final String rcode = String.format("%s <- %s", input.getFieldName(), Double.toString((Double) input.getValue())); if (LOG.isDebugEnabled()) { LOG.debug(String.format("R-input: %s", rcode)); } // Special case for Double since connection.assign doesn't // directly support Double values connection.voidEval(rcode); } else if (input.getDataType() == RDataObjectType.DoubleArray) { if (LOG.isDebugEnabled()) { LOG.debug(String.format("R-input: %s <- c(%s)%n", input.getFieldName(), ArrayUtils.toString(input.getValue()))); } connection.assign(input.getFieldName(), (double[]) input.getValue()); } else if (input.getDataType() == RDataObjectType.Double2DArray) { final double[][] twoDValues = (double[][]) input.getValue(); final int numRows = twoDValues.length; final String oneDFieldName = input.getFieldName() + "_1D_DATA"; // Convert the double[][] into a double[] and use connection.assign to // send that data to R connection.assign(oneDFieldName, RDataObject.flatten2DArrayByRows(twoDValues)); // Use R code to create the matrix from the double[] // This seems slightly inefficient, memorywise in both R and Java, // but I am not aware of a good way to get the double[][] directly // into R. final String rcode = String.format("%s <- matrix(%s, nrow=%d, byrow=TRUE)", input.getFieldName(), oneDFieldName, numRows); if (LOG.isDebugEnabled()) { LOG.debug(String.format("R-input: %s", rcode)); } connection.voidEval(rcode); } } }
From source file:com.rb.ofbiz.test.utils.logging.LoggingCommandProcessor.java
/** * {@inheritDoc}//from www .j ava 2 s .c o m */ public String getString(String commandName, String[] args) { long cmdStartMillis = System.currentTimeMillis(); this.seleniumTestMetrics.incCommandsProcessed(); String result; try { result = this.realCommandProcessor.getString(commandName, args); } catch (RuntimeException e) { doExceptionLogging(commandName, args, "", e, cmdStartMillis); throw e; } doLogging(commandName, args, LoggingCommandProcessor.SELENIUM_RC_OK_RESULT_PREFIX_WITH_COMMA + ArrayUtils.toString(result), cmdStartMillis); return result; }
From source file:com.ikanow.infinit.e.application.handlers.polls.LogstashSourceDeletionPollHandler.java
@Override public void performPoll() { boolean isSlave = false; if (null == LOGSTASH_CONFIG) { // (static memory not yet initialized) try {/*www . jav a 2s. c om*/ Thread.sleep(1000); // (extend the sleep time a bit) } catch (Exception e) { } return; } File logstashDirectory = new File(LOGSTASH_CONFIG); String slaveHostname = null; if (!logstashDirectory.isDirectory() || !logstashDirectory.canRead() || !logstashDirectory.canWrite()) { logstashDirectory = new File(LOGSTASH_CONFIG_DISTRIBUTED); isSlave = true; if (!logstashDirectory.isDirectory() || !logstashDirectory.canRead() || !logstashDirectory.canWrite()) { try { Thread.sleep(10000); // (extend the sleep time a bit) } catch (Exception e) { } return; } try { slaveHostname = java.net.InetAddress.getLocalHost().getHostName(); } catch (Exception e) { // too complex if we don't have a hostname, just return return; } } // Deletion of distributed sources requires some co-ordination, we'll do it in master if (isSlave) { // register my existence BasicDBObject existence = new BasicDBObject("_id", slaveHostname); existence.put("ping", new Date()); DbManager.getIngest().getLogHarvesterSlaves().save(existence); } //TESTED (by hand) else { // MASTER: clear out old slaves // (if it hasn't pinged for more than 30 minutes) long now = new Date().getTime(); BasicDBObject deadSlaveQuery = new BasicDBObject("ping", new BasicDBObject(DbManager.lt_, new Date(now - 1000L * 1800L))); boolean found = false; DBCursor dbc = DbManager.getIngest().getLogHarvesterSlaves().find(deadSlaveQuery); while (dbc.hasNext()) { BasicDBObject deadSlave = (BasicDBObject) dbc.next(); found = true; String hostname = deadSlave.getString("_id"); if (null != hostname) { DbManager.getIngest().getLogHarvesterQ().remove(new BasicDBObject("forSlave", hostname)); _logger.info("Removing unresponsive slave host=" + hostname); } } if (found) { DbManager.getIngest().getLogHarvesterSlaves().remove(deadSlaveQuery); } } //TESTED (by hand) // Read delete elements from the Q... if (null == _logHarvesterQ) { _logHarvesterQ = new MongoQueue(DbManager.getIngest().getLogHarvesterQ().getDB().getName(), DbManager.getIngest().getLogHarvesterQ().getName()); } BasicDBObject queueQuery = new BasicDBObject("deleteOnlyCommunityId", new BasicDBObject(DbManager.exists_, true)); if (!isSlave) { // only get master messages queueQuery.put("forSlave", new BasicDBObject(DbManager.exists_, false)); } else { // only get messages intended for me queueQuery.put("forSlave", slaveHostname); } DBObject nextElement = _logHarvesterQ.pop(queueQuery); LinkedList<TestLogstashExtractorPojo> secondaryQueue = new LinkedList<TestLogstashExtractorPojo>(); LinkedList<String> deleteAfterRestartQueue = new LinkedList<String>(); boolean deletedSources = false; boolean deletedSinceDbs = false; while (nextElement != null) { //DEBUG //System.out.println("HOST: " + slaveHostname + ": RECEIVED: " + nextElement.toString() + " FROM " + queueQuery); _logger.info("host=" + slaveHostname + " received=" + nextElement.toString() + " from=" + queueQuery); TestLogstashExtractorPojo testInfo = TestLogstashExtractorPojo.fromDb(nextElement, TestLogstashExtractorPojo.class); if (null == testInfo.sourceKey) { continue; // need a sourceKey parameter... } if (!isSlave) { // slaves don't need to delete anything from the index, only files secondaryQueue.add(testInfo); } //(end if master) try { // First off - need to remove the conf file and restart logstash if we're actually deleting this... boolean deletedSource = false; if ((null == testInfo.deleteDocsOnly) || !testInfo.deleteDocsOnly) { // (default = delete entire source) deletedSources = true; deletedSource = true; String fileToDelete = new StringBuffer(LOGSTASH_CONFIG).append(testInfo._id.toString()) .append(LOGSTASH_CONFIG_EXTENSION).toString(); boolean deleted = false; try { deleted = new File(fileToDelete).delete(); } catch (Exception e) { } //DEBUG //System.out.println("DELETED CONF FILE" + fileToDelete + " ? " + deleted); _logger.info("delete conf_file=" + fileToDelete + " success=" + deleted); } //TESTED (docs-only + source deletion) // If _not_ deleting the source, then do delete the sincedb file // (else let it get cleaned up separately - minimizes race conditions where the source starts ingesting again) String fileToDelete = new StringBuffer(LOGSTASH_WD).append(".sincedb_") .append(testInfo._id.toString()).toString(); if (!deletedSource) { boolean deleted = false; try { deleted = new File(fileToDelete).delete(); deletedSinceDbs |= deleted; } catch (Exception e) { } //DEBUG //System.out.println("DELETED SINCEDB" + fileToDelete + " ? " + deletedSinceDb); _logger.info("primary delete sincedb_file=" + fileToDelete + " success=" + deleted); } else { deleteAfterRestartQueue.add(fileToDelete); } //TESTED (primary + secondary deletes) } catch (Exception e) { //e.printStackTrace(); } // probably just doesn't exist // Get next element and carry on nextElement = _logHarvesterQ.pop(queueQuery); } //TESTED (end first loop over elements to delete) if (deletedSources || deletedSinceDbs) { // this file actually existed - need to restart the logstash unfortunately _logger.info("Restarting logstash, and sleeping until logstash is restarted"); try { new File(LOGSTASH_RESTART_FILE).createNewFile(); for (int i = 0; i < 12; ++i) { Thread.sleep(10L * 1000L); if (!new File(LOGSTASH_RESTART_FILE).exists()) { Thread.sleep(5L * 1000L); // (extra wait for it to shut down) break; // (early exit) } } } catch (Exception e) { } } //TESTED (from doc deletion and from src deletion) for (String fileToDelete : deleteAfterRestartQueue) { boolean deleted = false; try { deleted = new File(fileToDelete).delete(); } catch (Exception e) { } //DEBUG //System.out.println("DELETED SINCEDB" + fileToDelete + " ? " + deletedSinceDb); _logger.info("secondary delete sincedb_file=" + fileToDelete + " success=" + deleted); } //TESTED (primary and secondary deletion) for (TestLogstashExtractorPojo testInfo : secondaryQueue) { String commIdStr = testInfo.deleteOnlyCommunityId.toString(); // Get all the indexes that might need to be cleansed: ElasticSearchManager indexMgr = ElasticSearchManager.getIndex(DUMMY_INDEX); // Stashed index ArrayList<String> indices = new ArrayList<String>(); String stashedIndex = "recs_" + commIdStr; ClusterStateResponse retVal = indexMgr.getRawClient().admin().cluster().prepareState() .setIndices(stashedIndex).setRoutingTable(false).setNodes(false).setListenerThreaded(false) .get(); if (!retVal.getState().getMetaData().getIndices().isEmpty()) { indices.add(stashedIndex); } // (else doesn't exist...) // Live indexes: String indexPattern = new StringBuffer("recs_t_").append(commIdStr).append("*").toString(); retVal = indexMgr.getRawClient().admin().cluster().prepareState().setIndices(indexPattern) .setRoutingTable(false).setNodes(false).setListenerThreaded(false).get(); for (IndexMetaData indexMetadata : retVal.getState().getMetaData()) { //DEBUG //System.out.println("INDEX=" + indexMetadata.index()); indices.add(indexMetadata.index()); } deleteSourceKeyRecords(indexMgr, indices.toArray(new String[0]), testInfo.sourceKey); _logger.info("Deleted key=" + testInfo.sourceKey + " from indexes=" + ArrayUtils.toString(indices.toArray())); // Now I've deleted, go and distribute the deletion messages to the slaves if ((null != testInfo.distributed) && testInfo.distributed) { // Copy into the slaves' queue DBCursor dbc = DbManager.getIngest().getLogHarvesterSlaves().find(); while (dbc.hasNext()) { BasicDBObject slave = (BasicDBObject) dbc.next(); testInfo.forSlave = slave.getString("_id"); _logHarvesterQ.push(testInfo.toDb()); testInfo.forSlave = null; //DEBUG //System.out.println("DISTRIBUTING DELETION MESSAGE TO " + slave.toString()); _logger.info("distributing deletion message to host=" + slave.toString()); } } //TESTED (by hand) } //(end loop over secondary queue, ie to actually delete the indexes) }