List of usage examples for org.apache.commons.lang ArrayUtils subarray
public static boolean[] subarray(boolean[] array, int startIndexInclusive, int endIndexExclusive)
Produces a new boolean
array containing the elements between the start and end indices.
From source file:nl.intercommit.weaves.components.Modal.java
@OnEvent(value = "fetchModalContent") private void openModal(final Object... localcontext) { if (block != null) { arr.addRender(tc.coerce(localcontext[1], String.class), resources.getPage().getComponentResources().findBlock(block)); } else {/*from ww w.j a v a 2 s.co m*/ resources.triggerEvent(event, ArrayUtils.subarray(localcontext, 2, localcontext.length), new ComponentEventCallback<Block>() { public boolean handleResult(Block result) { if (result != null) { arr.addRender(tc.coerce(localcontext[1], String.class), result); return true; } return false; }; }); } arr.addCallback(new JavaScriptCallback() { @Override public void run(JavaScriptSupport javascriptSupport) { javascriptSupport.addScript( "$T5_JQUERY('#m_" + tc.coerce(localcontext[0], String.class) + "').modal('toggle');"); } }); }
From source file:nl.salp.warcraft4j.casc.cdn.RawDataDecompressor.java
@Override public byte[] decompress(byte[] data, long dataOffset, long dataLength, long decompressedSize) throws CascParsingException { if (decompressedSize != 0 && data.length != decompressedSize) { throw new CascParsingException( format("Error parsing raw BLTE file chunk, got %d bytes of compressed data instead of %d", data.length, decompressedSize)); }/*from w w w. ja v a2 s. c o m*/ LOGGER.trace("Returning {} bytes raw BLTE file chunk as data.", data.length); return ArrayUtils.subarray(data, (int) dataOffset, (int) dataLength); }
From source file:nl.salp.warcraft4j.casc.FileHeaderTest.java
@Test public void shouldUseAvailableDataAsHeaderLength() { byte[] data = ArrayUtils.subarray(CONTENT_DATA, 0, 2); FileHeader header = FileHeader.parse(() -> new ByteArrayDataReader(data)); assertArrayEquals(data, header.getHeader()); assertEquals(DataTypeUtil.hash(data), header.hashCode()); }
From source file:org.ala.hbase.RepoDataLoader.java
/** * This takes a list of infosource ids... * <p/>/*w w w. j a v a 2s.co m*/ * Usage: -stats or -reindex or -gList and list of infosourceId * * @param args */ public static void main(String[] args) throws Exception { //RepoDataLoader loader = new RepoDataLoader(); ApplicationContext context = SpringUtils.getContext(); RepoDataLoader loader = (RepoDataLoader) context.getBean(RepoDataLoader.class); long start = System.currentTimeMillis(); loader.loadInfoSources(); String filePath = repositoryDir; if (args.length > 0) { if (args[0].equalsIgnoreCase("-stats")) { loader.statsOnly = true; args = (String[]) ArrayUtils.subarray(args, 1, args.length); } if (args[0].equalsIgnoreCase("-reindex")) { loader.reindex = true; loader.indexer = context.getBean(PartialIndex.class); args = (String[]) ArrayUtils.subarray(args, 1, args.length); logger.info("**** -reindex: " + loader.reindex); logger.debug("reindex url: " + loader.reindexUrl); } if (args[0].equalsIgnoreCase("-gList")) { loader.gList = true; args = (String[]) ArrayUtils.subarray(args, 1, args.length); logger.info("**** -gList: " + loader.gList); } if (args[0].equalsIgnoreCase("-biocache")) { Hashtable<String, String> hashTable = new Hashtable<String, String>(); hashTable.put("accept", "application/json"); ObjectMapper mapper = new ObjectMapper(); mapper.getDeserializationConfig().set(DeserializationConfig.Feature.FAIL_ON_UNKNOWN_PROPERTIES, false); RestfulClient restfulClient = new RestfulClient(0); String fq = "&fq="; if (args.length > 1) { java.util.Date date = new java.util.Date(); if (args[1].equals("-lastWeek")) { date = DateUtils.addWeeks(date, -1); } else if (args[1].equals("-lastMonth")) { date = DateUtils.addMonths(date, -1); } else if (args[1].equals("-lastYear")) { date = DateUtils.addYears(date, -1); } else date = null; if (date != null) { SimpleDateFormat sfd = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'"); fq += "last_load_date:%5B" + sfd.format(date) + "%20TO%20*%5D"; } } Object[] resp = restfulClient .restGet("http://biocache.ala.org.au/ws/occurrences/search?q=multimedia:Image" + fq + "&facets=data_resource_uid&pageSize=0", hashTable); logger.info("The URL: " + "http://biocache.ala.org.au/ws/occurrences/search?q=multimedia:Image" + fq + "&facets=data_resource_uid&pageSize=0"); if ((Integer) resp[0] == HttpStatus.SC_OK) { String content = resp[1].toString(); logger.debug(resp[1]); if (content != null && content.length() > "[]".length()) { Map map = mapper.readValue(content, Map.class); try { List<java.util.LinkedHashMap<String, String>> list = ((List<java.util.LinkedHashMap<String, String>>) ((java.util.LinkedHashMap) ((java.util.ArrayList) map .get("facetResults")).get(0)).get("fieldResult")); Set<String> arg = new LinkedHashSet<String>(); for (int i = 0; i < list.size(); i++) { java.util.LinkedHashMap<String, String> value = list.get(i); String dataResource = getDataResource(value.get("fq")); Object provider = (loader.getUidInfoSourceMap().get(dataResource)); if (provider != null) { arg.add(provider.toString()); } } logger.info("Set of biocache infosource ids to load: " + arg); args = new String[] {}; args = arg.toArray(args); //handle the situation where biocache-service reports no data resources if (args.length < 1) { logger.error("No biocache data resources found. Unable to load."); System.exit(0); } } catch (Exception e) { logger.error("ERROR: exit process....." + e); e.printStackTrace(); System.exit(0); } } } else { logger.warn("Unable to process url: "); } } } int filesRead = loader.load(filePath, args); //FIX ME - move to config long finish = System.currentTimeMillis(); logger.info(filesRead + " files scanned/loaded in: " + ((finish - start) / 60000) + " minutes " + ((finish - start) / 1000) + " seconds."); System.exit(1); }
From source file:org.alfresco.module.org_alfresco_module_rm.jscript.app.JSONConversionComponent.java
/** * Helper method to add information about node * * @param nodeInfo node information * @param rootJSONObject root JSON object *//* w w w .j a v a 2s . c om*/ @SuppressWarnings("unchecked") private void addInfo(final FileInfo nodeInfo, JSONObject rootJSONObject) { String itemType = (String) rootJSONObject.get("type"); final QName itemTypeQName = QName.createQName(itemType, namespaceService); NodeRef originatingLocation = AuthenticationUtil.runAsSystem(new RunAsWork<NodeRef>() { public NodeRef doWork() { NodeRef originatingLocation = null; if (dictionaryService.isSubClass(itemTypeQName, ContentModel.TYPE_CONTENT)) { NodeRef nodeRef = nodeInfo.getNodeRef(); List<ChildAssociationRef> parentAssocs = nodeService.getParentAssocs(nodeRef); for (ChildAssociationRef parent : parentAssocs) { // FIXME: What if there is more than a secondary parent? if (!parent.isPrimary()) { originatingLocation = parent.getParentRef(); // only consider the non-RM parent otherwise we can // run into issues with frozen or transferring records if (!nodeService.hasAspect(originatingLocation, RecordsManagementModel.ASPECT_FILE_PLAN_COMPONENT)) { // assume we have found the correct in-place location // FIXME when we support multiple in-place locations break; } } } } return originatingLocation; } }); if (originatingLocation != null) { // add the originating location (if there is one) String pathSeparator = "/"; String displayPath = getDisplayPath(originatingLocation); String[] displayPathElements = displayPath.split(pathSeparator); Object[] subPath = ArrayUtils.subarray(displayPathElements, 5, displayPathElements.length); StringBuffer originatingLocationPath = new StringBuffer(); for (int i = 0; i < subPath.length; i++) { originatingLocationPath.append(pathSeparator).append(subPath[i]); } rootJSONObject.put("originatingLocationPath", originatingLocationPath.toString()); } }
From source file:org.alfresco.util.schemacomp.SchemaCompTestingUtils.java
/** * Create collection of indexes using strings of format "name column1 [column2 ... columnN]" *//*from w ww. j a va 2 s. c o m*/ public static Collection<Index> indexes(String... indexDefs) { Index[] indexes = new Index[indexDefs.length]; for (int i = 0; i < indexDefs.length; i++) { String[] parts = indexDefs[i].split(" "); String name = parts[0]; boolean unique = false; int columnsStart = 1; if (parts[1].equals("[unique]")) { unique = true; columnsStart++; } String[] columns = (String[]) ArrayUtils.subarray(parts, columnsStart, parts.length); indexes[i] = new Index(null, name, Arrays.asList(columns)); indexes[i].setUnique(unique); } return Arrays.asList(indexes); }
From source file:org.apache.archiva.metadata.repository.storage.maven2.Maven2RepositoryStorage.java
/** * FIXME remove/* ww w. j av a 2s . co m*/ * * @param href * @return */ private static String removePrefix(final String href) { String[] parts = StringUtils.split(href, '/'); parts = (String[]) ArrayUtils.subarray(parts, 1, parts.length); if (parts == null || parts.length == 0) { return "/"; } String joinedString = StringUtils.join(parts, '/'); if (href.endsWith("/")) { joinedString = joinedString + "/"; } return joinedString; }
From source file:org.apache.archiva.webdav.util.RepositoryPathUtil.java
private static String removePrefix(final String href) { String[] parts = StringUtils.split(href, '/'); parts = (String[]) ArrayUtils.subarray(parts, 1, parts.length); if (parts == null || parts.length == 0) { return "/"; }//from w ww . ja va 2 s.co m String joinedString = StringUtils.join(parts, '/'); if (href.endsWith("/")) { joinedString = joinedString + "/"; } return joinedString; }
From source file:org.apache.cassandra.db.clock.IncrementCounterContext.java
protected static int partitionElements(byte[] context, int left, int right, int pivotIndex) { int leftOffset = HEADER_LENGTH + (left * stepLength); int rightOffset = HEADER_LENGTH + (right * stepLength); int pivotOffset = HEADER_LENGTH + (pivotIndex * stepLength); byte[] pivotValue = ArrayUtils.subarray(context, pivotOffset, pivotOffset + stepLength); swapElement(context, pivotOffset, rightOffset); int storeOffset = leftOffset; for (int i = leftOffset; i < rightOffset; i += stepLength) { if (FBUtilities.compareByteSubArrays(context, i, pivotValue, 0, stepLength) <= 0) { swapElement(context, i, storeOffset); storeOffset += stepLength;//from ww w . jav a2s.c om } } swapElement(context, storeOffset, rightOffset); return (storeOffset - HEADER_LENGTH) / stepLength; }
From source file:org.apache.cassandra.db.clock.IncrementCounterContext.java
/** * Return a context w/ an aggregated count for each node id. * * @param contexts//from w ww . ja va2 s .c o m * a list of contexts to be merged */ public byte[] merge(List<byte[]> contexts) { // strategy: // 1) take highest timestamp // 2) take highest delete timestamp // 3) map id -> count // a) local id: sum counts; keep highest timestamp // b) remote id: keep highest count (reconcile) // 4) create a context from sorted array long highestTimestamp = Long.MIN_VALUE; long highestDeleteTimestamp = Long.MIN_VALUE; Map<FBUtilities.ByteArrayWrapper, Long> contextsMap = new HashMap<FBUtilities.ByteArrayWrapper, Long>(); for (byte[] context : contexts) { // take highest timestamp highestTimestamp = Math.max(FBUtilities.byteArrayToLong(context, 0), highestTimestamp); highestDeleteTimestamp = Math.max(FBUtilities.byteArrayToLong(context, TIMESTAMP_LENGTH), highestDeleteTimestamp); // map id -> count for (int offset = HEADER_LENGTH; offset < context.length; offset += stepLength) { FBUtilities.ByteArrayWrapper id = new FBUtilities.ByteArrayWrapper( ArrayUtils.subarray(context, offset, offset + idLength)); long count = FBUtilities.byteArrayToLong(context, offset + idLength); Long previousCount = contextsMap.put(id, count); if (previousCount == null) continue; // local id: sum counts if (this.idWrapper.equals(id)) { contextsMap.put(id, count + previousCount); continue; } // remote id: keep highest count contextsMap.put(id, Math.max(count, previousCount)); } } List<Map.Entry<FBUtilities.ByteArrayWrapper, Long>> contextsList = new ArrayList<Map.Entry<FBUtilities.ByteArrayWrapper, Long>>( contextsMap.entrySet()); Collections.sort(contextsList, new Comparator<Map.Entry<FBUtilities.ByteArrayWrapper, Long>>() { public int compare(Map.Entry<FBUtilities.ByteArrayWrapper, Long> e1, Map.Entry<FBUtilities.ByteArrayWrapper, Long> e2) { // reversed int result = e2.getValue().compareTo(e1.getValue()); if (result != 0) return result; return FBUtilities.compareByteArrays(e2.getKey().data, e1.getKey().data); } }); int length = contextsList.size(); byte[] merged = new byte[HEADER_LENGTH + (length * stepLength)]; FBUtilities.copyIntoBytes(merged, 0, highestTimestamp); FBUtilities.copyIntoBytes(merged, TIMESTAMP_LENGTH, highestDeleteTimestamp); for (int i = 0; i < length; i++) { Map.Entry<FBUtilities.ByteArrayWrapper, Long> entry = contextsList.get(i); writeElementAtStepOffset(merged, i, entry.getKey().data, entry.getValue().longValue()); } return merged; }