List of usage examples for org.apache.commons.lang ArrayUtils addAll
public static double[] addAll(double[] array1, double[] array2)
Adds all the elements of the given arrays into a new array.
From source file:org.alfresco.repo.exporter.ViewXMLExporter.java
public void start(ExporterContext context) { try {//from w ww .ja v a 2s . c o m this.context = context; contentHandler.startDocument(); contentHandler.startPrefixMapping(NamespaceService.REPOSITORY_VIEW_PREFIX, NamespaceService.REPOSITORY_VIEW_1_0_URI); contentHandler.startElement(NamespaceService.REPOSITORY_VIEW_PREFIX, VIEW_LOCALNAME, VIEW_QNAME.toPrefixString(), EMPTY_ATTRIBUTES); // // output metadata // contentHandler.startElement(NamespaceService.REPOSITORY_VIEW_PREFIX, METADATA_LOCALNAME, METADATA_QNAME.toPrefixString(), EMPTY_ATTRIBUTES); // exported by contentHandler.startElement(NamespaceService.REPOSITORY_VIEW_PREFIX, EXPORTEDBY_LOCALNAME, EXPORTEDBY_QNAME.toPrefixString(), EMPTY_ATTRIBUTES); contentHandler.characters(context.getExportedBy().toCharArray(), 0, context.getExportedBy().length()); contentHandler.endElement(NamespaceService.REPOSITORY_VIEW_PREFIX, EXPORTEDBY_LOCALNAME, EXPORTEDBY_QNAME.toPrefixString()); // exported date contentHandler.startElement(NamespaceService.REPOSITORY_VIEW_PREFIX, EXPORTEDDATE_LOCALNAME, EXPORTEDDATE_QNAME.toPrefixString(), EMPTY_ATTRIBUTES); String date = DefaultTypeConverter.INSTANCE.convert(String.class, context.getExportedDate()); contentHandler.characters(date.toCharArray(), 0, date.length()); contentHandler.endElement(NamespaceService.REPOSITORY_VIEW_PREFIX, EXPORTEDDATE_LOCALNAME, EXPORTEDDATE_QNAME.toPrefixString()); // exporter version contentHandler.startElement(NamespaceService.REPOSITORY_VIEW_PREFIX, EXPORTERVERSION_LOCALNAME, EXPORTERVERSION_QNAME.toPrefixString(), EMPTY_ATTRIBUTES); contentHandler.characters(context.getExporterVersion().toCharArray(), 0, context.getExporterVersion().length()); contentHandler.endElement(NamespaceService.REPOSITORY_VIEW_PREFIX, EXPORTERVERSION_LOCALNAME, EXPORTERVERSION_QNAME.toPrefixString()); // export of contentHandler.startElement(NamespaceService.REPOSITORY_VIEW_PREFIX, EXPORTOF_LOCALNAME, EXPORTOF_QNAME.toPrefixString(), EMPTY_ATTRIBUTES); NodeRef[] exportList = context.getExportList(); int comma = 1; for (int i = 0; i < exportList.length; i++) { NodeRef nodeRef = exportList[i]; String path = nodeService.getPath(nodeRef).toPrefixString(namespaceService); if (i == exportList.length - 1) { comma = 0; } contentHandler.characters(ArrayUtils.addAll(path.toCharArray(), ",".toCharArray()), 0, path.length() + comma); } contentHandler.endElement(NamespaceService.REPOSITORY_VIEW_PREFIX, EXPORTOF_LOCALNAME, EXPORTOF_QNAME.toPrefixString()); contentHandler.endElement(NamespaceService.REPOSITORY_VIEW_PREFIX, METADATA_LOCALNAME, METADATA_QNAME.toPrefixString()); } catch (SAXException e) { throw new ExporterException("Failed to process export start event", e); } }
From source file:org.apache.accumulo.core.client.lexicoder.impl.AbstractLexicoderTest.java
public static <T> void assertDecodes(AbstractLexicoder<T> lexicoder, T expected) { LexicoderTest.assertDecodes(lexicoder, expected); byte[] encoded = lexicoder.encode(expected); assertOutOfBoundsFails(lexicoder, encoded); // munge bytes at start and end, then use offset and length to decode final byte[] combined = ArrayUtils.addAll(ArrayUtils.addAll(START_PAD, encoded), END_PAD); int offset = START_PAD.length; int len = encoded.length; T result = lexicoder.decode(combined, offset, len); assertEquals(expected, result);//from www . j av a2 s . com }
From source file:org.apache.accumulo.core.client.lexicoder.impl.AbstractLexicoderTest.java
public void assertDecodesB(AbstractLexicoder<byte[]> lexicoder, byte[] expected) { super.assertDecodesB(lexicoder, expected); byte[] encoded = lexicoder.encode(expected); assertOutOfBoundsFails(lexicoder, encoded); // munge bytes at start and end, then use offset and length to decode final byte[] combined = ArrayUtils.addAll(ArrayUtils.addAll(START_PAD, encoded), END_PAD); int offset = START_PAD.length; int len = encoded.length; byte[] result = lexicoder.decode(combined, offset, len); assertEqualsB(expected, result);/*from ww w .j av a 2 s .c o m*/ }
From source file:org.apache.activemq.network.MQTTNetworkOfBrokersFailoverTest.java
@SuppressWarnings("unused") private void assertOneDurableSubOn(BrokerService broker, String subName) throws Exception { BrokerViewMBean brokerView = broker.getAdminView(); ObjectName[] activeDurableSubs = brokerView.getDurableTopicSubscribers(); ObjectName[] inactiveDurableSubs = brokerView.getInactiveDurableTopicSubscribers(); ObjectName[] allDurables = (ObjectName[]) ArrayUtils.addAll(activeDurableSubs, inactiveDurableSubs); assertEquals(1, allDurables.length); // at this point our assertions should prove that we have only on durable sub DurableSubscriptionViewMBean durableSubView = (DurableSubscriptionViewMBean) broker.getManagementContext() .newProxyInstance(allDurables[0], DurableSubscriptionViewMBean.class, true); assertEquals(subName, durableSubView.getClientId()); }
From source file:org.apache.bazel.checkstyle.JavaCheckstyle.java
private static void checkStyle(String[] files, String config) throws IOException { if (files.length == 0) { LOG.fine("No java files found by checkstyle"); return;//w ww. ja v a 2 s .com } LOG.fine(files.length + " java files found by checkstyle"); String[] checkstyleArgs = (String[]) ArrayUtils.addAll(new String[] { "-c", config }, files); LOG.fine("checkstyle args: " + Joiner.on(" ").join(checkstyleArgs)); com.puppycrawl.tools.checkstyle.Main.main(checkstyleArgs); }
From source file:org.apache.cassandra.db.HintedHandOffManager.java
public static ByteBuffer makeCombinedName(String tableName, String columnFamily) { byte[] withsep = ArrayUtils.addAll(tableName.getBytes(UTF_8), SEPARATOR.getBytes(UTF_8)); return ByteBuffer.wrap(ArrayUtils.addAll(withsep, columnFamily.getBytes(UTF_8))); }
From source file:org.apache.eagle.jpm.spark.history.crawl.JHFSparkEventReader.java
private void handleEnvironmentSet(JSONObject event) { app.setConfig(new JobConfig()); JSONObject sparkProps = (JSONObject) event.get("Spark Properties"); String[] props = { "spark.yarn.app.id", "spark.executor.memory", "spark.driver.host", "spark.driver.port", "spark.driver.memory", "spark.scheduler.pool", "spark.executor.cores", "spark.yarn.am.memory", "spark.yarn.am.cores", "spark.yarn.executor.memoryOverhead", "spark.yarn.driver.memoryOverhead", "spark.yarn.am.memoryOverhead", "spark.master" }; String[] additionalJobConf = null; if (conf.hasPath("spark.jobConf.additional.info")) { additionalJobConf = conf.getString("spark.jobConf.additional.info").split(",\\s*"); }/* www . j av a2 s. co m*/ String[] jobConf = (String[]) ArrayUtils.addAll(additionalJobConf, props); for (String prop : jobConf) { if (sparkProps.containsKey(prop)) { app.getConfig().getConfig().put(prop, (String) sparkProps.get(prop)); } } }
From source file:org.apache.falcon.regression.core.util.MathUtil.java
public static Object[][] append(Object[][] arr1, Object[][] arr2) { Assert.assertFalse(ArrayUtils.isEmpty(arr1), "arr1 can't be empty:" + Arrays.deepToString(arr1)); Assert.assertFalse(ArrayUtils.isEmpty(arr2), "arr2 can't be empty:" + Arrays.deepToString(arr2)); Assert.assertEquals(arr1[0].length, arr2[0].length, "Array rows are not compatible. " + "row of first array: " + Arrays.deepToString(arr1[0]) + "row of second array: " + Arrays.deepToString(arr2[0])); return (Object[][]) ArrayUtils.addAll(arr1, arr2); }
From source file:org.apache.flume.instrumentation.ChannelCounter.java
public ChannelCounter(String name, String[] attributes) { super(MonitoredCounterGroup.Type.CHANNEL, name, (String[]) ArrayUtils.addAll(attributes, ATTRIBUTES)); }
From source file:org.apache.flume.instrumentation.SinkCounter.java
public SinkCounter(String name, String[] attributes) { super(MonitoredCounterGroup.Type.SINK, name, (String[]) ArrayUtils.addAll(attributes, ATTRIBUTES)); }