Example usage for org.apache.commons.lang ArrayUtils add

List of usage examples for org.apache.commons.lang ArrayUtils add

Introduction

In this page you can find the example usage for org.apache.commons.lang ArrayUtils add.

Prototype

public static short[] add(short[] array, short element) 

Source Link

Document

Copies the given array and adds the given element at the end of the new array.

Usage

From source file:com.ibm.jaggr.core.impl.layer.LayerTest.java

@Test
public void featureSetUpdatingTests() throws Exception {
    replay(mockAggregator, mockRequest, mockResponse, mockDependencies);
    requestAttributes.put(IAggregator.AGGREGATOR_REQATTRNAME, mockAggregator);
    String configJson = "{paths:{p1:'p1',p2:'p2'}, packages:[{name:'foo', location:'foo'}]}";
    configRef.set(new ConfigImpl(mockAggregator, tmpdir.toURI(), configJson));
    File cacheDir = mockAggregator.getCacheManager().getCacheDir();
    ConcurrentLinkedHashMap<String, CacheEntry> cacheMap = (ConcurrentLinkedHashMap<String, CacheEntry>) ((LayerCacheImpl) mockAggregator
            .getCacheManager().getCache().getLayers()).getLayerBuildMap();
    long totalSize = 0;
    testDepMap.put("p1/a", (String[]) ArrayUtils.add(testDepMap.get("p2/a"), "p1/aliased/d"));
    List<String> layerCacheInfo = new LinkedList<String>();
    configJson = "{paths:{p1:'p1',p2:'p2'}, aliases:[[/\\/aliased\\//, function(s){if (has('foo')) return '/foo/'; else if (has('bar')) return '/bar/'; has('non'); return '/non/'}]]}";
    configRef.set(new ConfigImpl(mockAggregator, tmpdir.toURI(), configJson));

    MockRequestedModuleNames modules = new MockRequestedModuleNames();
    modules.setModules(Arrays.asList(new String[] { "p1/a", "p1/p1" }));
    requestAttributes.put(IHttpTransport.REQUESTEDMODULENAMES_REQATTRNAME, modules);
    requestAttributes.put(IHttpTransport.OPTIMIZATIONLEVEL_REQATTRNAME, IHttpTransport.OptimizationLevel.NONE);
    requestAttributes.put(LayerImpl.LAYERCACHEINFO_PROPNAME, layerCacheInfo);

    LayerImpl layer = newLayerImpl(modules.toString(), mockAggregator);

    InputStream in = layer.getInputStream(mockRequest, mockResponse);
    Writer writer = new StringWriter();
    CopyUtil.copy(in, writer);/*w  w  w. ja  v  a2  s. com*/
    String result = writer.toString();
    totalSize += result.length();
    assertEquals("weighted size error", totalSize, cacheMap.weightedSize());
    assertEquals("cache file size error", totalSize, TestUtils.getDirListSize(cacheDir, layerFilter));

    Map<String, ICacheKeyGenerator> keyGen = layer.getCacheKeyGenerators();
    System.out.println(keyGen.values());
    assertTrue(keyGen.values().toString().contains("js:(has:[conditionFalse, conditionTrue])"));

    requestAttributes.put(IHttpTransport.EXPANDREQUIRELISTS_REQATTRNAME, Boolean.TRUE);
    Features features = new Features();
    features.put("foo", true);
    requestAttributes.put(IHttpTransport.FEATUREMAP_REQATTRNAME, features);

    in = layer.getInputStream(mockRequest, mockResponse);
    writer = new StringWriter();
    CopyUtil.copy(in, writer);
    result = writer.toString();
    totalSize += result.length();
    keyGen = layer.getCacheKeyGenerators();
    System.out.println(keyGen.values());
    assertEquals("[added, update_keygen, update_key, update_add]", layerCacheInfo.toString());
    assertTrue(keyGen.values().toString().contains("js:(has:[conditionFalse, conditionTrue, foo])"));
    assertEquals("weighted size error", totalSize, cacheMap.weightedSize());
    assertEquals("cache file size error", totalSize, TestUtils.getDirListSize(cacheDir, layerFilter));

    features.put("foo", false);
    features.put("bar", true);
    in = layer.getInputStream(mockRequest, mockResponse);
    writer = new StringWriter();
    CopyUtil.copy(in, writer);
    result = writer.toString();
    totalSize += result.length();
    keyGen = layer.getCacheKeyGenerators();
    System.out.println(keyGen.values());
    assertEquals("[added, update_keygen, update_key, update_add]", layerCacheInfo.toString());
    assertTrue(keyGen.values().toString().contains("js:(has:[bar, conditionFalse, conditionTrue, foo])"));
    assertEquals("weighted size error", totalSize, cacheMap.weightedSize());
    assertEquals("cache file size error", totalSize, TestUtils.getDirListSize(cacheDir, layerFilter));

    features.put("foo", true);
    features.put("bar", false);
    in = layer.getInputStream(mockRequest, mockResponse);
    writer = new StringWriter();
    CopyUtil.copy(in, writer);
    result = writer.toString();
    totalSize += result.length();
    assertTrue(keyGen == layer.getCacheKeyGenerators());
    assertEquals("[added, update_weights_2]", layerCacheInfo.toString());
    assertEquals("weighted size error", totalSize, cacheMap.weightedSize());
    assertEquals("cache file size error", totalSize, TestUtils.getDirListSize(cacheDir, layerFilter));

    features.put("foo", false);
    features.put("bar", false);
    in = layer.getInputStream(mockRequest, mockResponse);
    writer = new StringWriter();
    CopyUtil.copy(in, writer);
    result = writer.toString();
    totalSize += result.length();
    assertEquals("[added, update_keygen, update_key, update_weights_2]", layerCacheInfo.toString());
    keyGen = layer.getCacheKeyGenerators();
    System.out.println(keyGen.values());
    assertTrue(keyGen.values().toString().contains("js:(has:[bar, conditionFalse, conditionTrue, foo, non])"));
    assertEquals("weighted size error", totalSize, cacheMap.weightedSize());
    assertEquals("cache file size error", totalSize, TestUtils.getDirListSize(cacheDir, layerFilter));

    features.put("foo", true);
    features.put("bar", true);
    in = layer.getInputStream(mockRequest, mockResponse);
    writer = new StringWriter();
    CopyUtil.copy(in, writer);
    result = writer.toString();
    totalSize += result.length();
    assertEquals("[added, update_weights_2]", layerCacheInfo.toString());
    assertTrue(keyGen == layer.getCacheKeyGenerators());
    assertEquals("weighted size error", totalSize, cacheMap.weightedSize());
    assertEquals("cache file size error", totalSize, TestUtils.getDirListSize(cacheDir, layerFilter));

    features.remove("bar");
    in = layer.getInputStream(mockRequest, mockResponse);
    writer = new StringWriter();
    CopyUtil.copy(in, writer);
    result = writer.toString();
    assertEquals("[hit_1]", layerCacheInfo.toString());
    assertTrue(keyGen == layer.getCacheKeyGenerators());
    assertEquals("weighted size error", totalSize, cacheMap.weightedSize());
    assertEquals("cache file size error", totalSize, TestUtils.getDirListSize(cacheDir, layerFilter));

}

From source file:au.org.ala.biocache.dao.SearchDAOImpl.java

/**
 * Note - this method extracts from CASSANDRA rather than the Index.
 *//*from  w  w w  . ja v  a  2s  .co  m*/
public Map<String, Integer> writeResultsToStream(DownloadRequestParams downloadParams, OutputStream out, int i,
        boolean includeSensitive, DownloadDetailsDTO dd) throws Exception {

    int resultsCount = 0;
    Map<String, Integer> uidStats = new HashMap<String, Integer>();
    //stores the remaining limit for data resources that have a download limit
    Map<String, Integer> downloadLimit = new HashMap<String, Integer>();

    try {
        SolrQuery solrQuery = initSolrQuery(downloadParams, false, null);
        //ensure that the qa facet is being ordered alphabetically so that the order is consistent.
        boolean getAssertionsFromFacets = "all".equals(downloadParams.getQa());
        if (getAssertionsFromFacets) {
            //set the order for the facet to be based on the index - this will force the assertions to be returned in the same order each time
            //based on alphabetical sort.  The number of QA's may change between searches so we can't guarantee that the order won't change
            solrQuery.add("f.assertions.facet.sort", "index");
        }
        formatSearchQuery(downloadParams);
        //add context information
        updateQueryContext(downloadParams);
        logger.info("search query: " + downloadParams.getFormattedQuery());
        solrQuery.setQuery(buildSpatialQueryString(downloadParams));
        //Only the fields specified below will be included in the results from the SOLR Query
        solrQuery.setFields("row_key", "institution_uid", "collection_uid", "data_resource_uid",
                "data_provider_uid");

        String dFields = downloadParams.getFields();

        if (includeSensitive) {
            //include raw latitude and longitudes
            dFields = dFields
                    .replaceFirst("decimalLatitude.p", "decimalLatitude,decimalLongitude,decimalLatitude.p")
                    .replaceFirst(",locality,", ",locality,sensitive_locality,");
        }

        StringBuilder sb = new StringBuilder(dFields);
        if (downloadParams.getExtra().length() > 0)
            sb.append(",").append(downloadParams.getExtra());
        StringBuilder qasb = new StringBuilder();

        QueryResponse qr = runSolrQuery(solrQuery, downloadParams.getFq(), 0, 0, "_docid_", "asc");
        dd.setTotalRecords(qr.getResults().getNumFound());
        //get the assertion facets to add them to the download fields
        List<FacetField> facets = qr.getFacetFields();
        for (FacetField facet : facets) {
            if (facet.getName().equals("assertions") && facet.getValueCount() > 0) {

                for (FacetField.Count facetEntry : facet.getValues()) {
                    //System.out.println("facet: " + facetEntry.getName());
                    if (qasb.length() > 0)
                        qasb.append(",");
                    qasb.append(facetEntry.getName());
                }
            } else if (facet.getName().equals("data_resource_uid") && checkDownloadLimits) {
                //populate the download limit
                initDownloadLimits(downloadLimit, facet);
            }
        }

        //Write the header line
        String qas = qasb.toString();

        String[] fields = sb.toString().split(",");
        String[] qaFields = qas.equals("") ? new String[] {} : qas.split(",");
        String[] qaTitles = downloadFields.getHeader(qaFields, false);
        String[] titles = downloadFields.getHeader(fields, true);
        String[] header = org.apache.commons.lang3.ArrayUtils.addAll(titles, qaTitles);
        //Create the Writer that will be used to format the records
        //construct correct RecordWriter based on the supplied fileType
        final au.org.ala.biocache.RecordWriter rw = downloadParams.getFileType().equals("csv")
                ? new CSVRecordWriter(out, header, downloadParams.getSep(), downloadParams.getEsc())
                : new ShapeFileRecordWriter(downloadParams.getFile(), out,
                        (String[]) ArrayUtils.addAll(fields, qaFields));

        if (rw instanceof ShapeFileRecordWriter) {
            dd.setHeaderMap(((ShapeFileRecordWriter) rw).getHeaderMappings());
        }

        //download the records that have limits first...
        if (downloadLimit.size() > 0) {
            String[] originalFq = downloadParams.getFq();
            StringBuilder fqBuilder = new StringBuilder("-(");
            for (String dr : downloadLimit.keySet()) {
                //add another fq to the search for data_resource_uid                    
                downloadParams.setFq((String[]) ArrayUtils.add(originalFq, "data_resource_uid:" + dr));
                resultsCount = downloadRecords(downloadParams, rw, downloadLimit, uidStats, fields, qaFields,
                        resultsCount, dr, includeSensitive, dd);
                if (fqBuilder.length() > 2)
                    fqBuilder.append(" OR ");
                fqBuilder.append("data_resource_uid:").append(dr);
            }
            fqBuilder.append(")");
            //now include the rest of the data resources
            //add extra fq for the remaining records
            downloadParams.setFq((String[]) ArrayUtils.add(originalFq, fqBuilder.toString()));
            resultsCount = downloadRecords(downloadParams, rw, downloadLimit, uidStats, fields, qaFields,
                    resultsCount, null, includeSensitive, dd);
        } else {
            //download all at once
            downloadRecords(downloadParams, rw, downloadLimit, uidStats, fields, qaFields, resultsCount, null,
                    includeSensitive, dd);
        }
        rw.finalise();

    } catch (SolrServerException ex) {
        logger.error("Problem communicating with SOLR server. " + ex.getMessage(), ex);
    }

    return uidStats;
}

From source file:adalid.core.programmers.AbstractSqlProgrammer.java

private boolean isInParentheses(String string) {
    if (StringUtils.isBlank(string)) {
        return false;
    }//  w  w  w . ja v  a  2s  . c o m
    String[] searchStrings = (String[]) ArrayUtils.add(stringsOperadorExpresionUnario(), getCoalesce());
    String s = removeStart(string, searchStrings);
    return StrUtils.isDelimited(s, LRB, RRB);
}

From source file:adalid.core.programmers.AbstractSqlProgrammer.java

private String[] stringsOperadorExpresionUnario() {
    ScalarOp[] enums = ScalarOp.values();
    String[] strings = null;//w  w w .  j a  va 2s . c om
    String string;
    for (ScalarOp sop : enums) {
        string = getString(sop);
        if (StringUtils.isNotBlank(string)) {
            strings = (String[]) ArrayUtils.add(strings, StringUtils.trimToEmpty(string));
        }
    }
    return strings;
}

From source file:hybridewah.HybridBitmap.java

public void concatenate(HybridBitmap a) {

    if (this.verbatim && a.verbatim) {
        this.buffer = ArrayUtils.addAll(this.buffer, a.buffer);
    } else if (this.verbatim) {
        long[] firstWord = new long[] { new Long(this.buffer.length) << 33 }; //first word indicates the number of following literals         
        this.buffer = ArrayUtils.addAll(firstWord, this.buffer);
        this.buffer = ArrayUtils.addAll(this.buffer, a.buffer);
        this.verbatim = false;
    } else if (a.verbatim) {
        this.buffer = ArrayUtils.add(this.buffer, new Long(a.buffer.length) << 33);
        this.buffer = ArrayUtils.addAll(this.buffer, a.buffer);
    } else {/*from  w  w  w.j ava  2 s .co m*/
        this.buffer = ArrayUtils.addAll(this.buffer, a.buffer);
    }
    this.actualsizeinwords = this.buffer.length;
    this.density = (this.density * this.sizeinbits + a.density * a.sizeinbits)
            / (this.sizeinbits + a.sizeinbits);
    this.sizeinbits = this.sizeinbits + a.sizeinbits;

}

From source file:net.stuxcrystal.simpledev.configuration.parser.generators.xml.XMLParser.java

/**
 * Actually update the node that is below our current stack.
 * @param uri            PAIN IN THE ASS
 * @param localName      PAIN IN THE ASS
 * @param qName          PAIN IN THE ASS
 * @throws SAXException Cannot mix node types.
 *//* www .  jav  a 2  s. c om*/
@Override
public void endElement(String uri, String localName, String qName) throws SAXException {
    NodeContainer container = this.currentStack.pop();

    if (this.currentStack.isEmpty()) {
        this.base = container.getNode();
    } else {
        NodeContainer parent = this.currentStack.peek();
        if (!(parent.getRawNode() instanceof NullNode) && !(parent.getRawNode() instanceof MapNode))
            throw new SAXException("The configuration should not mix multiple element types:"
                    + parent.getRawNode().toString());

        if (parent.getRawNode() instanceof NullNode) {
            MapNode base = (MapNode) parent.convert(new MapNode());
            base.setData(new Node<?>[0]);
        }

        MapNode node = parent.getCastedRawNode();
        node.setData((Node<?>[]) ArrayUtils.add(node.getData(), container.getNode()));
    }
}

From source file:net.tooan.ynpay.third.mongodb.mapper.MapperUtil.java

/**
 * Get the name property of @Entity annotation.
 * If the name property is not set, then return the class' name, in lower case type.
 *
 * @param clazz/*from   w w w . jav a 2  s  .co  m*/
 * @return
 */
public static String[] getEntityName(Class<?> clazz) {
    Entity entity = clazz.getAnnotation(Entity.class);
    String[] name = entity.name().split("\\.");
    if (name.length == 1) {
        name = (String[]) ArrayUtils.add(name, clazz.getSimpleName().toLowerCase());
    } else {
        name[1] = StringUtils.join(ArrayUtils.remove(name, 0), ".");
    }
    return name;
}

From source file:net.ymate.platform.persistence.jdbc.repo.RepoProxy.java

public Object doProxy(IProxyChain proxyChain) throws Throwable {
    Repository _repo = proxyChain.getTargetMethod().getAnnotation(Repository.class);
    if (_repo == null) {
        return proxyChain.doProxyChain();
    }// w w  w .  j av a2 s  .co m
    //
    String _targetSQL = _repo.value();
    if (StringUtils.isBlank(_targetSQL))
        try {
            IConfiguration _conf = ((IRepository) proxyChain.getTargetObject()).getConfig();
            _targetSQL = _conf.getString(_repo.item());
        } catch (Exception e) {
            _LOG.warn("", RuntimeUtils.unwrapThrow(e));
        }
    if (StringUtils.isNotBlank(_targetSQL)) {
        IDatabase _db = JDBC.get(proxyChain.getProxyFactory().getOwner());
        IConnectionHolder _connHolder = null;
        if (StringUtils.isNotBlank(_repo.dsName())) {
            _connHolder = _db.getConnectionHolder(_repo.dsName());
        } else {
            Repository _superRepo = proxyChain.getTargetClass().getAnnotation(Repository.class);
            if (StringUtils.isNotBlank(_superRepo.dsName())) {
                _connHolder = _db.getConnectionHolder(_superRepo.dsName());
            } else {
                _connHolder = _db.getDefaultConnectionHolder();
            }
        }
        //
        Object _result = null;
        switch (_repo.type()) {
        case UPDATE:
            _result = __doUpdate(_db, _connHolder, _targetSQL, proxyChain.getTargetMethod(),
                    proxyChain.getMethodParams());
            break;
        default:
            _result = __doQuery(_db, _connHolder, _targetSQL, proxyChain.getTargetMethod(),
                    proxyChain.getMethodParams());
        }
        // ??
        int _position = proxyChain.getMethodParams().length - 1;
        Class<?> _paramType = proxyChain.getMethodParams()[_position].getClass();
        if (_paramType.isArray()) {
            if (_result != null) {
                proxyChain.getMethodParams()[_position] = ArrayUtils
                        .add((Object[]) proxyChain.getMethodParams()[_position], _result);
            }
        } else {
            proxyChain.getMethodParams()[_position] = _result;
        }
    }
    return proxyChain.doProxyChain();
}

From source file:nl.cyso.vsphere.client.VsphereQuery.java

/**
 * Returns all the MOREFs of the specified type that are present under the container
 * /*from   w  w  w . j  ava2 s  .  c  o  m*/
 * @param folder {@link ManagedObjectReference} of the container to begin the search from
 * @param morefType Type of the managed entity that needs to be searched
 * @return Map of name and MOREF of the managed objects present. If none exist then empty Map is returned
 * @throws RemoteException
 * @throws RuntimeFault
 */
private static Map<String, ManagedObjectReference> getMOREFsInContainerByType(ManagedObjectReference folder,
        String morefType) throws RuntimeFault, RemoteException {
    String PROP_ME_NAME = "name";
    // ManagedObjectReference viewManager = VsphereManager.getServiceContent().getViewManager();
    ViewManager viewManager = VsphereManager.getServiceInstance().getViewManager();
    ContainerView containerView = viewManager.createContainerView(
            new Folder(VsphereManager.getServiceInstance().getServerConnection(), folder),
            new String[] { morefType }, true);

    Map<String, ManagedObjectReference> tgtMoref = new HashMap<String, ManagedObjectReference>();

    // Create Property Spec
    PropertySpec propertySpec = new PropertySpec();
    propertySpec.setAll(Boolean.FALSE);
    propertySpec.setType(morefType);
    propertySpec.setPathSet((String[]) ArrayUtils.add(propertySpec.getPathSet(), PROP_ME_NAME));

    TraversalSpec ts = new TraversalSpec();
    ts.setName("view");
    ts.setPath("view");
    ts.setSkip(false);
    ts.setType("ContainerView");

    // Now create Object Spec
    ObjectSpec objectSpec = new ObjectSpec();
    objectSpec.setObj(containerView.getMOR());
    objectSpec.setSkip(Boolean.TRUE);
    objectSpec.setSelectSet((SelectionSpec[]) ArrayUtils.add(objectSpec.getSelectSet(), ts));

    // Create PropertyFilterSpec using the PropertySpec and ObjectPec
    // created above.
    PropertyFilterSpec propertyFilterSpec = new PropertyFilterSpec();
    propertyFilterSpec
            .setPropSet((PropertySpec[]) ArrayUtils.add(propertyFilterSpec.getPropSet(), propertySpec));
    propertyFilterSpec
            .setObjectSet((ObjectSpec[]) ArrayUtils.add(propertyFilterSpec.getObjectSet(), objectSpec));

    PropertyFilterSpec[] propertyFilterSpecs = new PropertyFilterSpec[] { propertyFilterSpec };
    PropertyCollector propertyCollector = VsphereManager.getServiceInstance().getPropertyCollector();

    RetrieveResult rslts = propertyCollector.retrievePropertiesEx(propertyFilterSpecs, new RetrieveOptions());
    List<ObjectContent> listobjcontent = new ArrayList<ObjectContent>();
    if (rslts != null && rslts.getObjects() != null && rslts.getObjects().length != 0) {
        listobjcontent.addAll(Arrays.asList(rslts.getObjects()));
    }
    String token = null;
    if (rslts != null && rslts.getToken() != null) {
        token = rslts.getToken();
    }
    while (token != null && !token.isEmpty()) {
        rslts = propertyCollector.continueRetrievePropertiesEx(token);
        token = null;
        if (rslts != null) {
            token = rslts.getToken();
            if (rslts.getObjects() != null && rslts.getObjects().length != 0) {
                listobjcontent.addAll(Arrays.asList(rslts.getObjects()));
            }
        }
    }
    for (ObjectContent oc : listobjcontent) {
        ManagedObjectReference mr = oc.getObj();
        String entityNm = null;
        DynamicProperty[] dps = oc.getPropSet();
        if (dps != null) {
            for (DynamicProperty dp : dps) {
                entityNm = (String) dp.getVal();
            }
        }
        tgtMoref.put(entityNm, mr);
    }
    return tgtMoref;
}

From source file:nl.cyso.vsphere.client.VsphereQuery.java

/**
 * Method to retrieve properties of a {@link ManagedObjectReference}
 * //w  w w .ja v a  2s  .c  o  m
 * @param entityMor {@link ManagedObjectReference} of the entity
 * @param props Array of properties to be looked up
 * @return Map of the property name and its corresponding value
 * @throws RemoteException
 * @throws RuntimeFault
 * @throws InvalidProperty
 */
private static Map<String, Object> getEntityProps(ManagedObjectReference entityMor, String[] props)
        throws InvalidProperty, RuntimeFault, RemoteException {
    HashMap<String, Object> retVal = new HashMap<String, Object>();

    // Create Property Spec
    PropertySpec propertySpec = new PropertySpec();
    propertySpec.setAll(Boolean.FALSE);
    propertySpec.setType(entityMor.getType());
    propertySpec.setPathSet(props);

    // Now create Object Spec
    ObjectSpec objectSpec = new ObjectSpec();
    objectSpec.setObj(entityMor);

    // Create PropertyFilterSpec using the PropertySpec and ObjectPec
    // created above.
    PropertyFilterSpec propertyFilterSpec = new PropertyFilterSpec();
    propertyFilterSpec
            .setPropSet((PropertySpec[]) ArrayUtils.add(propertyFilterSpec.getPropSet(), propertySpec));
    propertyFilterSpec
            .setObjectSet((ObjectSpec[]) ArrayUtils.add(propertyFilterSpec.getObjectSet(), objectSpec));

    PropertyFilterSpec[] propertyFilterSpecs = new PropertyFilterSpec[] { propertyFilterSpec };
    PropertyCollector propertyCollector = VsphereManager.getServiceInstance().getPropertyCollector();

    RetrieveResult rslts = propertyCollector.retrievePropertiesEx(propertyFilterSpecs, new RetrieveOptions());
    List<ObjectContent> listobjcontent = new ArrayList<ObjectContent>();
    if (rslts != null && rslts.getObjects() != null && rslts.getObjects().length != 0) {
        listobjcontent.addAll(Arrays.asList(rslts.getObjects()));
    }
    String token = null;
    if (rslts != null && rslts.getToken() != null) {
        token = rslts.getToken();
    }
    while (token != null && !token.isEmpty()) {
        rslts = propertyCollector.continueRetrievePropertiesEx(token);
        token = null;
        if (rslts != null) {
            token = rslts.getToken();
            if (rslts.getObjects() != null && rslts.getObjects().length != 0) {
                listobjcontent.addAll(Arrays.asList(rslts.getObjects()));
            }
        }
    }
    for (ObjectContent oc : listobjcontent) {
        DynamicProperty[] dps = oc.getPropSet();
        if (dps != null) {
            for (DynamicProperty dp : dps) {
                retVal.put(dp.getName(), dp.getVal());
            }
        }
    }
    return retVal;
}