List of usage examples for org.apache.commons.lang ArrayUtils add
public static short[] add(short[] array, short element)
Copies the given array and adds the given element at the end of the new array.
From source file:net.navasoft.madcoin.backend.services.vo.request.SuccessRequestVOWrapper.java
/** * Gets the error processing values.//from w w w .ja va 2 s . com * * @return the error processing values * @since 27/07/2014, 06:49:08 PM */ @Override public ControllerExceptionArgument[] getErrorProcessingValues() { ControllerExceptionArgument[] errors = (ControllerExceptionArgument[]) Array .newInstance(ControllerExceptionArgument.class, 0); for (Method accessor : helper.getDeclaredMethods()) { for (Annotation expected : accessor.getAnnotations()) { if (expected.annotationType() == ProcessingErrorValue.class) { try { ProcessingErrorValue expectedReal = ((ProcessingErrorValue) expected); Object requestValue = null; switch (expectedReal.precedence()) { case BASIC_OPTIONAL: requestValue = accessor.invoke(hidden, ArrayUtils.EMPTY_OBJECT_ARRAY); requestValue = (requestValue != null) ? requestValue : "No data provided"; errors = (ControllerExceptionArgument[]) ArrayUtils.add(errors, new ControllerExceptionArgument(requestValue)); break; case BASIC_REQUIRED: requestValue = accessor.invoke(hidden, ArrayUtils.EMPTY_OBJECT_ARRAY); errors = (ControllerExceptionArgument[]) ArrayUtils.add(errors, new ControllerExceptionArgument(requestValue)); break; case COMPLEX_OPTIONAL: break; case COMPLEX_REQUIRED: break; default: break; } } catch (IllegalAccessException e) { e.printStackTrace(); } catch (IllegalArgumentException e) { e.printStackTrace(); } catch (InvocationTargetException e) { e.printStackTrace(); } } } } return errors; }
From source file:info.raack.appliancelabeler.machinelearning.weka.WekaMachineLearningEngine.java
public int predictWithModel(int modelId, double[] variables, double minProbability) { ModelStorage modelStorage = modelsActive.get(modelId); if (modelStorage == null) { throw new RuntimeException("No classifier loaded with modelId " + modelId); }/*from ww w . j av a 2 s . com*/ // add unknown label double[] fullList = ArrayUtils.add(variables, Utils.missingValue()); Instance instance = new DenseInstance(1.0, fullList); Instances trainingData = modelStorage.getTrainingData(); instance.setDataset(trainingData); try { double[] distributionForUserApplianceIdIndices = modelStorage.getModel() .distributionForInstance(instance); // need to have probability of at least 70% // since the probabilities must sum to 1, if we see a probability >= 0.7, we are guaranteed that it is the largest and only one int userApplianceIndex = -1; double probability = 0; for (int i = 0; i < distributionForUserApplianceIdIndices.length; i++) { if (distributionForUserApplianceIdIndices[i] >= minProbability && distributionForUserApplianceIdIndices[i] >= probability) { userApplianceIndex = i; probability = distributionForUserApplianceIdIndices[i]; } } if (userApplianceIndex >= 0) { int userApplianceId = (int) Double.parseDouble( trainingData.attribute(trainingData.classIndex()).value((int) userApplianceIndex)); if (userApplianceId == 0) { int j = 6; } return userApplianceId; } else { return -1; } } catch (Exception e) { throw new RuntimeException("Could not classify instance", e); } }
From source file:au.org.ala.biocache.service.SpeciesLookupRestService.java
/** * initialise the common header components that will be added to the the supplied header field. *///from w w w. j a va2 s.c om private void initHeaders() { baseHeader = new String[] { messageSource.getMessage("species.name", null, "Species Name", null), messageSource.getMessage("species.author", null, "Scientific Name Author", null), messageSource.getMessage("species.rank", null, "Taxon Rank", null), messageSource.getMessage("species.kingdom", null, "Kingdom", null), messageSource.getMessage("species.phylum", null, "Phylum", null), messageSource.getMessage("species.class", null, "Class", null), messageSource.getMessage("species.order", null, "Order", null), messageSource.getMessage("species.family", null, "Family", null), messageSource.getMessage("species.genus", null, "Genus", null), messageSource.getMessage("species.common", null, "Vernacular Name", null) }; countBaseHeader = (String[]) ArrayUtils.add(baseHeader, messageSource.getMessage("species.count", null, "Number of Records", null)); synonymHeader = (String[]) ArrayUtils.add(baseHeader, messageSource.getMessage("species.synonyms", null, "Synonyms", null)); countSynonymHeader = (String[]) ArrayUtils.add(synonymHeader, messageSource.getMessage("species.count", null, "Number of Records", null)); }
From source file:gda.scan.ScanDataPoint.java
/** * Add a position to the array of positions. Calls to this method must be made in the same order as calls to * addScannable to associate the array of numbers with the scannable. * <p>/*from w ww . j av a 2 s .c om*/ * It is recommended to call setScannables instead. * * @param data */ @Override public void addScannablePosition(Object data, String[] format) { if (data != null) { scannablePositions.add(data); scannableFormats = (String[][]) ArrayUtils.add(scannableFormats, format); } }
From source file:com.adobe.acs.commons.workflow.bulk.execution.model.Workspace.java
/** * Adds the payload group to the list of active payload groups. * * @param payloadGroup the payload group to add as active *//*from w w w. j a va 2 s. c o m*/ public void addActivePayloadGroup(PayloadGroup payloadGroup) { if (payloadGroup != null && !ArrayUtils.contains(activePayloadGroups, payloadGroup.getDereferencedPath())) { activePayloadGroups = (String[]) ArrayUtils.add(activePayloadGroups, payloadGroup.getDereferencedPath()); properties.put(PN_ACTIVE_PAYLOAD_GROUPS, activePayloadGroups); } }
From source file:gda.jython.scriptcontroller.logging.LoggingScriptController.java
private void determineColumns() { Method[] gettersWeWant = new Method[0]; columnGetters = new LinkedHashMap<Method, String>(); refreshColumnGetters = new HashMap<Method, String>(); // loop over all methods to find ones with the correct annotation Method[] methods = messageClassToLog.getDeclaredMethods(); for (Method method : methods) { Annotation[] annotations = method.getDeclaredAnnotations(); for (Annotation annotation : annotations) { if (annotation instanceof ScriptControllerLogColumn) { gettersWeWant = (Method[]) ArrayUtils.add(gettersWeWant, method); continue; }/* w ww .ja v a 2 s . co m*/ } } // order the methods bsed on the annotation's column index Method[] gettersWeWant_ordered = new Method[gettersWeWant.length]; for (Method method : gettersWeWant) { Annotation[] annotations = method.getDeclaredAnnotations(); for (Annotation annotation : annotations) { if (annotation instanceof ScriptControllerLogColumn) { gettersWeWant_ordered[((ScriptControllerLogColumn) annotation).columnIndex()] = method; } } } // add the method references and their column labels to the hashmaps for (Method method : gettersWeWant_ordered) { Annotation[] annotations = method.getDeclaredAnnotations(); for (Annotation annotation : annotations) { if (annotation instanceof ScriptControllerLogColumn) { columnGetters.put(method, ((ScriptControllerLogColumn) annotation).columnName()); if (((ScriptControllerLogColumn) annotation).refresh()) { refreshColumnGetters.put(method, ((ScriptControllerLogColumn) annotation).columnName()); } } } } }
From source file:bdv.bigcat.label.FragmentSegmentAssignment.java
/** * Synchronize the inverse Lookup (segment > [fragments]) with the current * forward lookup (fragment > segment)). The current state of the inverse * lookup will be cleared.//from ww w . j av a2 s . c om */ protected void syncILut() { ilut.clear(); final TLongLongIterator lutIterator = lut.iterator(); while (lutIterator.hasNext()) { lutIterator.advance(); final long fragmentId = lutIterator.key(); final long segmentId = lutIterator.value(); long[] fragments = ilut.get(segmentId); if (fragments == null) fragments = new long[] { fragmentId }; else fragments = ArrayUtils.add(fragments, fragmentId); ilut.put(segmentId, fragments); } }
From source file:ee.ria.xroad.proxy.serverproxy.ServerMessageProcessor.java
private void verifySslClientCert() throws Exception { log.trace("verifySslClientCert()"); if (requestMessage.getOcspResponses().isEmpty()) { throw new CodedException(X_SSL_AUTH_FAILED, "Cannot verify TLS certificate, corresponding OCSP response is missing"); }/*from www. ja va 2 s.c o m*/ String instanceIdentifier = requestMessage.getSoap().getClient().getXRoadInstance(); X509Certificate trustAnchor = GlobalConf.getCaCert(instanceIdentifier, clientSslCerts[clientSslCerts.length - 1]); if (trustAnchor == null) { throw new Exception("Unable to find trust anchor"); } try { CertChain chain = CertChain.create(instanceIdentifier, (X509Certificate[]) ArrayUtils.add(clientSslCerts, trustAnchor)); CertHelper.verifyAuthCert(chain, requestMessage.getOcspResponses(), requestMessage.getSoap().getClient()); } catch (Exception e) { throw new CodedException(X_SSL_AUTH_FAILED, e); } }
From source file:gda.scan.ConcurrentScanChild.java
protected void reorderAllScanObjects() { Vector<ScanObject> sortedAllScanObjects = new Vector<ScanObject>(); int i = 0;/* w w w. ja v a 2 s . c o m*/ for (Object nextObject : allScannables) { for (ScanObject nextScanObject : allScanObjects) { if (nextScanObject.scannable.equals(nextObject)) { sortedAllScanObjects.add(i, nextScanObject); i++; } } } allScanObjects = sortedAllScanObjects; // now save information about which scannables are at each level scannableLevels = new TreeMap<Integer, Scannable[]>(); // loop through all levels saving the amount of scannables at each level for (Scannable scannable : allScannables) { Integer thisLevel = scannable.getLevel(); if (scannableLevels.containsKey(thisLevel)) { Scannable[] levelArray = scannableLevels.get(thisLevel); levelArray = (Scannable[]) ArrayUtils.add(levelArray, scannable); scannableLevels.put(thisLevel, levelArray); } else { Scannable[] levelArray = new Scannable[] { scannable }; scannableLevels.put(thisLevel, levelArray); } } }
From source file:au.org.ala.biocache.dao.SearchDAOImpl.java
/** * Returns a list of species that are endemic to the supplied region. Values are cached * due to the "expensive" operation./*from ww w. jav a2s . co m*/ */ @Cacheable(cacheName = "endemicCache") public List<FieldResultDTO> getEndemicSpecies(SpatialSearchRequestParams requestParams) throws Exception { if (executor == null) { executor = Executors.newFixedThreadPool(maxMultiPartThreads); } // 1)get a list of species that are in the WKT logger.debug("Starting to get Endemic Species..."); List<FieldResultDTO> list1 = getValuesForFacet(requestParams);//new ArrayList(Arrays.asList(getValuesForFacets(requestParams))); logger.debug("Retrieved species within area...(" + list1.size() + ")"); // 2)get a list of species that occur in the inverse WKT String reverseQuery = SpatialUtils.getWKTQuery(spatialField, requestParams.getWkt(), true);//"-geohash:\"Intersects(" +wkt + ")\""; logger.debug("The reverse query:" + reverseQuery); requestParams.setWkt(null); int i = 0, localterms = 0; String facet = requestParams.getFacets()[0]; String[] originalFqs = requestParams.getFq(); //add the negated WKT query to the fq originalFqs = (String[]) ArrayUtils.add(originalFqs, reverseQuery); List<Future<List<FieldResultDTO>>> threads = new ArrayList<Future<List<FieldResultDTO>>>(); //batch up the rest of the world query so that we have fqs based on species we want to test for. This should improve the performance of the endemic services. while (i < list1.size()) { StringBuffer sb = new StringBuffer(); while ((localterms == 0 || localterms % termQueryLimit != 0) && i < list1.size()) { if (localterms != 0) sb.append(" OR "); sb.append(facet).append(":").append(ClientUtils.escapeQueryChars(list1.get(i).getFieldValue())); i++; localterms++; } String newfq = sb.toString(); if (localterms == 1) newfq = newfq + " OR " + newfq; //cater for the situation where there is only one term. We don't want the term to be escaped again localterms = 0; //System.out.println("FQ = " + newfq); SpatialSearchRequestParams srp = new SpatialSearchRequestParams(); BeanUtils.copyProperties(requestParams, srp); srp.setFq((String[]) ArrayUtils.add(originalFqs, newfq)); int batch = i / termQueryLimit; EndemicCallable callable = new EndemicCallable(srp, batch, this); threads.add(executor.submit(callable)); } for (Future<List<FieldResultDTO>> future : threads) { List<FieldResultDTO> list = future.get(); if (list != null) list1.removeAll(list); } logger.debug("Determined final endemic list (" + list1.size() + ")..."); return list1; }