Example usage for org.apache.commons.lang ArrayUtils toString

List of usage examples for org.apache.commons.lang ArrayUtils toString

Introduction

In this page you can find the example usage for org.apache.commons.lang ArrayUtils toString.

Prototype

public static String toString(Object array) 

Source Link

Document

Outputs an array as a String, treating null as an empty array.

Usage

From source file:edu.cornell.med.icb.goby.readers.sam.SamComparison.java

/**
 * Dump the details of expectedSamRecord and actualSamRecord (and gobyAlignment if available). This is
 * called when there are differences between expected and actual to help debug the conversion process.
 * The differences are written to stdout.
 * @param expectedSamRecord the expected sam record
 * @param actualSamRecord the actual sam record
 * @param gobyAlignment the actual goby alignment record
 *///from  w  w w . j  a v  a 2  s .  c o  m
public void dumpComparison(final SAMRecord expectedSamRecord, final SAMRecord actualSamRecord,
        final Alignments.AlignmentEntry gobyAlignment) {
    comparisonErrorDump.setLength(0);
    comparisonErrorDump.append("Read Num         : ").append(readNum).append('\n');
    comparisonErrorDump.append("     ERROR(s)    : ").append(ArrayUtils.toString(comparisonFailures))
            .append('\n');
    if (gobyAlignment != null) {
        comparisonErrorDump.append("     g.index     : ").append(gobyAlignment.getQueryIndex()).append('\n');
        comparisonErrorDump.append("     g.position  : ").append(gobyAlignment.getPosition()).append('\n');
        comparisonErrorDump.append("     g.leftClip  : ").append(gobyAlignment.getSoftClippedBasesLeft())
                .append('\n');
        comparisonErrorDump.append("     g.leftClipQual  : ")
                .append(ByteArrayList.wrap(gobyAlignment.getSoftClippedQualityLeft().toByteArray()))
                .append('\n');
        comparisonErrorDump.append("     g.rightClip : ").append(gobyAlignment.getSoftClippedBasesRight())
                .append('\n');
        comparisonErrorDump.append("     g.rightClipQual : ")
                .append(ByteArrayList.wrap(gobyAlignment.getSoftClippedQualityRight().toByteArray()))
                .append('\n');
        comparisonErrorDump.append("     g.qAlignLen : ").append(gobyAlignment.getQueryAlignedLength())
                .append('\n');
        comparisonErrorDump.append("     g.tAlignLen : ").append(gobyAlignment.getTargetAlignedLength())
                .append('\n');
    }
    comparisonErrorDump.append("     readName (S): ").append(expectedSamRecord.getReadName()).append('\n');
    comparisonErrorDump.append("     readName (D): ").append(actualSamRecord.getReadName()).append('\n');
    if (gobyAlignment != null && gobyAlignment.hasReadName()) {
        comparisonErrorDump.append("     readName (G): ").append(gobyAlignment.getReadName()).append('\n');
    }
    comparisonErrorDump.append("     position (S): ").append(expectedSamRecord.getAlignmentStart())
            .append('\n');
    comparisonErrorDump.append("     position (D): ").append(actualSamRecord.getAlignmentStart()).append('\n');
    comparisonErrorDump.append("     refName  (S): ").append(expectedSamRecord.getReferenceName()).append('\n');
    comparisonErrorDump.append("     refName  (D): ").append(actualSamRecord.getReferenceName()).append('\n');
    comparisonErrorDump.append("     flags    (S): ").append(expectedSamRecord.getFlags()).append('\n');
    comparisonErrorDump.append("     flags    (D): ").append(actualSamRecord.getFlags()).append('\n');
    comparisonErrorDump.append("     mapQual  (S): ").append(expectedSamRecord.getMappingQuality())
            .append('\n');
    comparisonErrorDump.append("     mapQual  (D): ").append(actualSamRecord.getMappingQuality()).append('\n');
    comparisonErrorDump.append("     negStrand(S): ").append(expectedSamRecord.getReadNegativeStrandFlag())
            .append('\n');
    comparisonErrorDump.append("     negStrand(D): ").append(actualSamRecord.getReadNegativeStrandFlag())
            .append('\n');
    comparisonErrorDump.append("     cigar    (S): ").append(expectedSamRecord.getCigar()).append('\n');
    comparisonErrorDump.append("     cigar    (D): ").append(actualSamRecord.getCigar()).append('\n');
    comparisonErrorDump.append("     mdz      (S): ").append(expectedSamRecord.getStringAttribute("MD"))
            .append('\n');
    comparisonErrorDump.append("     mdz      (D): ").append(actualSamRecord.getStringAttribute("MD"))
            .append('\n');
    comparisonErrorDump.append("     mdz-c    (S): ")
            .append(SamHelper.canonicalMdz(expectedSamRecord.getStringAttribute("MD"))).append('\n');
    comparisonErrorDump.append("     mdz-c    (D): ")
            .append(SamHelper.canonicalMdz(actualSamRecord.getStringAttribute("MD"))).append('\n');
    comparisonErrorDump.append("     read     (S): ").append(expectedSamRecord.getReadString()).append('\n');
    comparisonErrorDump.append("     read     (D): ").append(actualSamRecord.getReadString()).append('\n');
    comparisonErrorDump.append("     qual     (S): ").append(expectedSamRecord.getBaseQualityString())
            .append('\n');
    comparisonErrorDump.append("     qual     (D): ").append(actualSamRecord.getBaseQualityString())
            .append('\n');
    System.out.println(comparisonErrorDump.toString());
}

From source file:com.github.webapp_minifier.WebappMinifierMojo.java

/**
 * Returns the array of files to process.
 *
 * @return the array of files to process.
 *///from www.ja v  a 2  s . c o m
protected String[] getFilesToProcess() {
    final DirectoryScanner directoryScanner = new DirectoryScanner();
    directoryScanner.setBasedir(this.minifiedDirectory);

    final String[] includes = CommonUtils.isEmpty(this.htmlIncludes) ? getDefaultIncludes() : this.htmlIncludes;
    getLog().debug("HTML Includes: " + ArrayUtils.toString(includes));
    directoryScanner.setIncludes(includes);

    final String[] excludes = CommonUtils.isEmpty(this.htmlExcludes) ? getDefaultExcludes() : this.htmlExcludes;
    getLog().debug("HTML Excludes: " + ArrayUtils.toString(excludes));
    directoryScanner.setExcludes(excludes);

    directoryScanner.scan();
    return directoryScanner.getIncludedFiles();
}

From source file:com.taobao.itest.listener.ITestDataSetListener.java

private String[] determineLocations(TestContext testContext, ITestDataSet annotation) {
    Class<?> testClass = testContext.getTestInstance().getClass();
    String fileType = annotation.fileType();
    String[] value = annotation.value();
    String[] locations = annotation.locations();
    if (!ArrayUtils.isEmpty(value) && !ArrayUtils.isEmpty(locations)) {
        String msg = String.format(
                "Test class [%s] has been configured with @ITestDataSetListener' 'value' [%s] and 'locations' [%s] attributes. Use one or the other, but not both.",
                testClass, ArrayUtils.toString(value), ArrayUtils.toString(locations));
        throw new RuntimeException(msg);
    } else if (!ArrayUtils.isEmpty(value)) {
        locations = value;/*from   ww  w  . ja  v  a  2s  .  co  m*/
    }

    if (ArrayUtils.isEmpty(locations)) {// user undefined,using default
        // location
        locations = ResourceLocationProcessingUtil.generateDefaultLocations(testClass, "." + fileType);
    } else {// process to standard resource
        locations = ResourceLocationProcessingUtil.modifyLocations(testClass, locations);
    }
    return locations;
}

From source file:edu.cornell.med.icb.clustering.TestMCLClusterer.java

/**
 * A test that uses a clusters words of equal length together.
 *///from www.  j av  a 2 s .  c o m
@Test
public void clusterWordsInAString() {
    final String text = "Four score and seven years ago our fathers brought forth on this"
            + " continent a new nation conceived in liberty and dedicated to the proposition"
            + " that all men are created equal";

    final List<String[]> expectedResults = new ArrayList<String[]>();
    expectedResults.add(new String[] { "and", "ago", "our", "new", "and", "the", "all", "men", "are" });
    expectedResults.add(new String[] { "score", "seven", "years", "forth", "equal" });
    expectedResults.add(new String[] { "fathers", "brought", "liberty", "created" });
    expectedResults.add(new String[] { "Four", "this", "that" });
    expectedResults.add(new String[] { "on", "in", "to" });
    expectedResults.add(new String[] { "continent", "conceived", "dedicated" });
    expectedResults.add(new String[] { "a" });
    expectedResults.add(new String[] { "nation" });
    expectedResults.add(new String[] { "proposition" });

    // break the text up into an array of individual words
    final String[] words = text.split(" ");

    // create a distance calculator that returns the difference in size between the two words
    final SimilarityDistanceCalculator distanceCalculator = new MaxLinkageDistanceCalculator() {
        public double distance(final int i, final int j) {
            return Math.abs(words[i].length() - words[j].length());
        }
    };

    // and cluster the words into groups according to their size
    final Clusterer clusterer = new MCLClusterer(words.length);
    final List<int[]> clusters = clusterer.cluster(distanceCalculator, 0);

    assertEquals("Number of clusters don't match", expectedResults.size(), clusters.size());

    int j = 0;
    for (final int[] cluster : clusters) {
        // convert instance indexes from the cluster to source data
        final String[] result = new String[cluster.length];
        for (int k = 0; k < result.length; k++) {
            result[k] = words[cluster[k]];
        }
        LOGGER.debug(ArrayUtils.toString(cluster));
        LOGGER.debug(ArrayUtils.toString(result));
        assertArrayEquals("Cluster " + j + " does not match expected", expectedResults.get(j), result);
        j++;
    }
}

From source file:eionet.cr.web.action.UploadCSVActionBeanTest.java

/**
 * Does and tests the "save" step, using the given data-linking scripts and expecting the given SPARQL to be generated.
 *
 * @param dataLinkingScripts The given data-linking scripts.
 * @param expectedSparql Expecting this SPARQL query to be generated for the saved file.
 * @throws Exception For any sort of problems.
 *///from  w w  w.  ja  v  a2 s . c om
private void doSave(ArrayList<DataLinkingScript> dataLinkingScripts, String expectedSparql) throws Exception {

    MockServletContext ctx = createContextMock();
    MockRoundtrip trip = new MockRoundtrip(ctx, UploadCSVActionBeanMock.class);

    // Prepare the rich-type request parameters: the given data-linking scripts (if any)
    // and specifying the list of columns that will constitute unique key.
    HashMap<String, Object> richTypeRequestParams = new HashMap<String, Object>();
    if (CollectionUtils.isNotEmpty(dataLinkingScripts)) {
        richTypeRequestParams.put("dataLinkingScripts", dataLinkingScripts);
    }
    richTypeRequestParams.put("uniqueColumns", Collections.singletonList("Presidency"));
    trip.getRequest().setAttribute(RICH_TYPE_REQUEST_PARAMS_ATTR_NAME, richTypeRequestParams);

    if (CollectionUtils.isNotEmpty(dataLinkingScripts)) {
        trip.setParameter("addDataLinkingScripts", "true");
    }
    trip.setParameter("overwrite", "true");

    trip.setParameter("attribution", "testCopyright");
    trip.setParameter("license", "All rights reserved");
    trip.setParameter("publisher", "testPublisher");
    trip.setParameter("source", "testSource");

    trip.setParameter("fileType", "CSV");
    trip.setParameter("objectsType", "President");
    trip.setParameter("fileEncoding", "UTF-8");
    trip.setParameter("finalEncoding", "UTF-8");

    trip.setParameter("fileName", TEST_FILE_NAME);
    trip.setParameter("fileLabel", TEST_FILE_NAME);
    trip.setParameter("folderUri", TEST_FOLDER_URI);
    trip.setParameter("relativeFilePath", TEST_FILE_NAME);

    trip.execute("save");

    UploadCSVActionBean actionBean = trip.getActionBean(UploadCSVActionBeanMock.class);
    MockHttpServletResponse response = (MockHttpServletResponse) actionBean.getContext().getResponse();

    // On successful saving, we expect to be redirected, hence expecting response code 302.
    assertEquals(302, response.getStatus());

    // Assert existence of various triples about the uploaded file.
    String[] statement6 = { TEST_FILE_URI, Predicates.CR_OBJECTS_UNIQUE_COLUMN, "Presidency" };
    assertTrue("Expected statement: " + ArrayUtils.toString(statement6), hasLiteralStatement(statement6));

    String[] statement7 = { TEST_FILE_URI, Predicates.CR_SPARQL_QUERY, null };
    assertTrue("Expected statement: " + ArrayUtils.toString(7), hasLiteralStatement(statement7));

    // Assert that the SPARQL query generated for the uploaded file is correct.
    // Keeping in mind that above we requested the delete-column script on the "Thumbnail" column.
    SubjectDTO fileSubject = DAOFactory.get().getDao(HelperDAO.class).getSubject(TEST_FILE_URI);
    assertTrue("Expected a non-null file subject with predicates",
            fileSubject != null && fileSubject.getPredicateCount() > 0);
    String actualSparql = fileSubject.getObjectValue(Predicates.CR_SPARQL_QUERY);
    String actualSparqlCompressed = actualSparql == null ? null : actualSparql.replaceAll("\\s+", "");
    String expectedSparqlCompressed = expectedSparql.replaceAll("\\s+", "");
    assertEquals("Actual SPARQL query is not what expected", expectedSparqlCompressed, actualSparqlCompressed);
}

From source file:com.ebay.cloud.cms.query.service.QueryPaginationTest.java

@Test
public void testQueryIte_reverse() {
    String query = "VPool[exists @environment]{*}.parentCluster!Compute[@fqdns=~\".*.com\"]";
    QueryContext qc = newQueryContext(STRATUS_REPO, IBranch.DEFAULT_BRANCH);
    qc.setAllowFullTableScan(true);//from  w w w  .j a  va  2s  . c om
    qc.setLimits(new int[] { 1, 2 });
    qc.setRegistration(TestUtils.getDefaultDalImplementation(dataSource));
    qc.setSourceIP("127.0.0.1");
    IQueryResult result = queryService.query(query, qc);
    Assert.assertTrue(result.hasMoreResults());
    int[] nLimit = result.getNextCursor().getLimits();
    int[] nSkips = result.getNextCursor().getSkips();
    int hint = result.getNextCursor().getHint();
    Assert.assertEquals(0, hint);
    Assert.assertEquals(0, nSkips[0]);
    Assert.assertEquals(2, nSkips[1]);
    Assert.assertEquals(2, nLimit.length);
    Assert.assertEquals(1, nLimit[0]);
    Assert.assertEquals(2, nLimit[1]);

    int count = result.getEntities().size();
    System.out.println("fetch size:  " + count);
    int iterateCount = 1;
    while (result.hasMoreResults()) {
        iterateCount++;
        System.out.println("iterate round: " + iterateCount + ", next skips: "
                + ArrayUtils.toString(result.getNextCursor().getSkips()) + ",next limits: "
                + ArrayUtils.toString(result.getNextCursor().getLimits()));
        qc.setSkips(result.getNextCursor().getSkips());
        qc.setLimits(result.getNextCursor().getLimits());

        result = queryService.query(query, qc);
        System.out.println("fetch size:  " + result.getEntities().size());
        count += result.getEntities().size();
    }
    Assert.assertEquals(11, iterateCount);

    QueryContext qc1 = newQueryContext(STRATUS_REPO, IBranch.DEFAULT_BRANCH);
    qc1.setAllowFullTableScan(true);
    qc1.setSourceIP("127.0.0.1");
    IQueryResult result1 = queryService.query(query, qc1);
    Assert.assertFalse(result1.hasMoreResults());
    Assert.assertTrue(count >= result1.getEntities().size());
}

From source file:edu.cornell.med.icb.clustering.QTClusterer.java

/**
 * Groups instances into clusters. Returns the indices of the instances
 * that belong to a cluster as an int array in the list result.
 *
 * @param calculator       The/*  ww w .  j  a v  a  2 s  . c  o m*/
 *                         {@link edu.cornell.med.icb.clustering.SimilarityDistanceCalculator}
 *                         that should be used when clustering
 * @param qualityThreshold The QT clustering algorithm quality threshold (d)
 * @return The list of clusters.
 */
public List<int[]> cluster(final SimilarityDistanceCalculator calculator, final double qualityThreshold) {
    final ProgressLogger clusterProgressLogger = new ProgressLogger(LOGGER, logInterval, "instances clustered");
    clusterProgressLogger.displayFreeMemory = true;
    clusterProgressLogger.expectedUpdates = instanceCount;
    clusterProgressLogger.start("Starting to cluster " + instanceCount + " instances using "
            + parallelTeam.getThreadCount() + " threads.");

    // reset cluster results
    clusterCount = 0;
    // instanceList is the set "G" to cluster
    final LinkedList<Integer> instanceList = new LinkedList<Integer>();
    for (int i = 0; i < instanceCount; i++) {
        clusters[i].clear();

        // set each node in the instance list to it's
        // original position in the source data array
        instanceList.add(i);
    }

    final double ignoreDistance = calculator.getIgnoreDistance();

    // eliminate any instances that will never cluster with anything else
    final IntList singletonClusters = identifySingletonClusters(calculator, qualityThreshold, instanceList,
            clusterProgressLogger);

    final ProgressLogger innerLoopProgressLogger = new ProgressLogger(LOGGER, logInterval,
            "inner loop iterations");
    innerLoopProgressLogger.displayFreeMemory = false;

    final ProgressLogger outerLoopProgressLogger = new ProgressLogger(LOGGER, logInterval,
            "outer loop iterations");
    outerLoopProgressLogger.displayFreeMemory = true;

    try {
        // loop over instances until they have all been added to a cluster
        while (!instanceList.isEmpty()) {
            // cluster remaining instances to find the maximum cardinality
            for (int i = 0; i < instanceList.size(); i++) {
                candidateClusters[i].clear();
            }

            if (logOuterLoopProgress) {
                outerLoopProgressLogger.expectedUpdates = instanceList.size();
                outerLoopProgressLogger.start("Entering outer loop for " + instanceList.size() + " iterations");
            }

            // for each i in G (instance list)
            // find instance j such that distance i,j minimum
            parallelTeam.execute(new ParallelRegion() { // NOPMD

                @Override
                public void run() throws Exception { // NOPMD
                    // each thread will populate a different portion of the "candidateCluster"
                    // array so we shouldn't need to worry about concurrent access
                    execute(0, instanceList.size() - 1, new IntegerForLoop() {
                        @Override
                        public void run(final int first, final int last) {
                            if (LOGGER.isDebugEnabled()) {
                                LOGGER.debug("first = " + first + ", last = " + last);
                            }
                            for (int i = first; i <= last; i++) {
                                @SuppressWarnings("unchecked")
                                final LinkedList<Integer> notClustered = (LinkedList<Integer>) instanceList
                                        .clone();

                                // add the first instance to the next candidate cluster
                                final IntArrayList candidateCluster = candidateClusters[i];
                                candidateCluster.add(notClustered.remove(i));

                                if (logInnerLoopProgress) {
                                    innerLoopProgressLogger.expectedUpdates = notClustered.size();
                                    innerLoopProgressLogger.start(
                                            "Entering inner loop for " + notClustered.size() + " iterations");
                                }

                                // cluster the remaining instances to find the maximum
                                // cardinality find instance j such that distance i,j minimum
                                boolean done = false;
                                while (!done && !notClustered.isEmpty()) {
                                    // find the node that has minimum distance between the
                                    // current cluster and the instances that have not yet
                                    // been clustered.
                                    double minDistance = Double.POSITIVE_INFINITY;
                                    int minDistanceInstanceIndex = 0;
                                    int instanceIndex = 0;
                                    for (final int instance : notClustered) {
                                        double newDistance = ignoreDistance;

                                        final int[] cluster = candidateCluster.elements();
                                        for (int instanceInCluster = 0; instanceInCluster < candidateCluster
                                                .size(); instanceInCluster++) {
                                            final double a = calculator.distance(cluster[instanceInCluster],
                                                    instance);
                                            // if the distance of the instance will force the candidate cluster
                                            // to be larger than the cutoff value, we can stop here
                                            // because we know that this candidate cluster will be too large
                                            if (a >= minDistance) {
                                                newDistance = ignoreDistance;
                                                break;
                                            }
                                            final double b = newDistance;

                                            // This code is inlined from java.lang.Math.max(a, b)
                                            if (a != a) { // a is NaN
                                                newDistance = a;
                                            } else if (a == 0.0d && b == 0.0d
                                                    && Double.doubleToLongBits(a) == negativeZeroDoubleBits) {
                                                newDistance = b;
                                            } else if (a >= b) {
                                                newDistance = a;
                                            } else {
                                                newDistance = b;
                                            }
                                        }

                                        if (newDistance != ignoreDistance && newDistance < minDistance) {
                                            minDistance = newDistance;
                                            minDistanceInstanceIndex = instanceIndex;
                                        }
                                        instanceIndex++;
                                    }
                                    // grow clusters until min distance between new instance
                                    // and cluster reaches quality threshold
                                    // if (diameter(Ai U {j}) > d)
                                    if (minDistance > qualityThreshold) {
                                        done = true;
                                    } else {
                                        // remove the instance from the ones to be considered
                                        final int instance = notClustered.remove(minDistanceInstanceIndex);
                                        // and add it to the newly formed cluster
                                        candidateCluster.add(instance);
                                    }
                                    if (logInnerLoopProgress) {
                                        innerLoopProgressLogger.update();
                                    }
                                }
                                if (logInnerLoopProgress) {
                                    innerLoopProgressLogger.stop("Inner loop completed.");
                                }
                                if (logOuterLoopProgress) {
                                    outerLoopProgressLogger.update();
                                }
                            }
                        }
                    });
                }
            });

            if (logOuterLoopProgress) {
                outerLoopProgressLogger.stop("Outer loop completed.");
            }

            // identify cluster (set C) with maximum cardinality
            int maxCardinality = 0;
            int selectedClusterIndex = -1;
            for (int i = 0; i < instanceList.size(); i++) {
                final int size = candidateClusters[i].size();
                if (LOGGER.isTraceEnabled() && size > 0) {
                    LOGGER.trace("potential cluster " + i + ": " + ArrayUtils.toString(candidateClusters[i]));
                }
                if (size > maxCardinality) {
                    maxCardinality = size;
                    selectedClusterIndex = i;
                }
            }

            final IntArrayList selectedCluster = candidateClusters[selectedClusterIndex];

            if (LOGGER.isTraceEnabled()) {
                LOGGER.trace("adding " + selectedCluster.size() + " instances to cluster " + clusterCount);
            }
            // and add that cluster to the final result
            clusters[clusterCount].addAll(selectedCluster);

            // remove instances in cluster C so they are no longer considered
            instanceList.removeAll(selectedCluster);

            if (logClusterProgress) {
                final int selectedClusterSize = selectedCluster.size();
                int i = 0;
                while (i < selectedClusterSize - 1) {
                    clusterProgressLogger.lightUpdate();
                    i++;
                }
                // make sure there is at least one "full" update per loop
                if (i < selectedClusterSize) {
                    clusterProgressLogger.update();
                }
            }

            // we just created a new cluster
            clusterCount++;

            // next iteration is over (G - C)
        }
    } catch (RuntimeException e) {
        LOGGER.error("Caught runtime exception - rethrowing", e);
        throw e;
    } catch (Exception e) {
        LOGGER.error("Caught exception - rethrowing as ClusteringException", e);
        throw new ClusteringException(e);
    }

    // add singleton clusters to the end so the largest clusters are at the start of the list
    for (final int singleton : singletonClusters) {
        clusters[clusterCount].add(singleton);
        clusterCount++;
    }

    clusterProgressLogger.stop("Clustering completed.");
    return getClusters();
}

From source file:lfsom.visualization.clustering.LFSKMeans.java

public double[][] getClusterVariances() {
    double[][] variances = new double[clusters.length][numberOfAttributes];
    for (int indexClusters = 0; indexClusters < clusters.length; indexClusters++) {
        double[][] instances = clusters[indexClusters].getInstances(data);
        // for all attributes in this cluster
        for (int i = 0; i < numberOfAttributes; i++) {
            double n = 0;
            double mean = 0;
            double m2 = 0;
            double delta = 0;
            for (double[] instance : instances) {
                n++;/*from   w w w. j a  va 2  s . c  om*/
                double value = instance[i];
                delta = value - mean;
                mean += delta / n;
                m2 += delta * (value - mean);

            }
            variances[indexClusters][i] = m2 / n;
        }
    }
    for (double[] vars : variances) {
        System.out.println(ArrayUtils.toString(vars));
    }
    return variances;
}

From source file:com.ebay.cloud.cms.query.service.QueryPaginationTest.java

@Test
public void testQueryIte_join() {
    String query = "VPool[exists @environment]{*}.computes[@fqdns=~\".*.com\"]";
    QueryContext qc = newQueryContext(STRATUS_REPO, IBranch.DEFAULT_BRANCH);
    qc.setAllowFullTableScan(true);/*from  w  w  w . j a v  a  2 s  . c o  m*/
    qc.setRegistration(TestUtils.getDefaultDalImplementation(dataSource));
    qc.setLimits(new int[] { 1, 6 });
    qc.setSourceIP("127.0.0.1");
    IQueryResult result = queryService.query(query, qc);
    Assert.assertTrue(result.hasMoreResults());
    int[] nLimit = result.getNextCursor().getLimits();
    int[] nSkips = result.getNextCursor().getSkips();
    int hint = result.getNextCursor().getHint();
    Assert.assertEquals(0, hint);
    Assert.assertEquals(1, nSkips[0]);
    Assert.assertEquals(0, nSkips[1]);
    Assert.assertEquals(2, nLimit.length);
    Assert.assertEquals(1, nLimit[0]);
    Assert.assertEquals(6, nLimit[1]);

    int count = result.getEntities().size();
    System.out.println("fetch size:  " + count);
    int iterateCount = 1;
    while (result.hasMoreResults()) {
        iterateCount++;
        System.out.println("iterate round: " + iterateCount + ", next skips: "
                + ArrayUtils.toString(result.getNextCursor().getSkips()) + ",next limits: "
                + ArrayUtils.toString(result.getNextCursor().getLimits()));
        qc.setSkips(result.getNextCursor().getSkips());
        qc.setLimits(result.getNextCursor().getLimits());

        result = queryService.query(query, qc);
        System.out.println("fetch size:  " + result.getEntities().size());
        count += result.getEntities().size();
    }
    Assert.assertEquals(10, iterateCount);

    QueryContext qc1 = newQueryContext(STRATUS_REPO, IBranch.DEFAULT_BRANCH);
    qc1.setAllowFullTableScan(true);
    qc1.setSourceIP("127.0.0.1");
    IQueryResult result1 = queryService.query(query, qc1);
    Assert.assertFalse(result1.hasMoreResults());
    Assert.assertTrue(2 >= result1.getEntities().size());
}

From source file:gov.redhawk.ide.debug.internal.LocalApplicationFactory.java

public static String toString(DataType t) {
    Object value = AnyUtils.convertAny(t.value);
    if (value instanceof DataType[]) {
        StringBuilder builder = new StringBuilder();
        builder.append("\t" + t.id + " = {");
        for (DataType child : (DataType[]) value) {
            builder.append("\n\t\t" + LocalApplicationFactory.toString(child));
        }/*from   w w w  . ja  v a2s  .co m*/
        builder.append("\n\t}");
        return builder.toString();
    } else if (value instanceof DataType) {
        return "\t" + t.id + " = {" + LocalApplicationFactory.toString((DataType) value) + "}";
    } else if (value instanceof Any[]) {
        StringBuilder builder = new StringBuilder();
        builder.append("\t" + t.id + " = [] {");
        int i = 0;
        for (Any child : (Any[]) value) {
            builder.append("\n\t\t[" + i++ + "] = ");
            Object childValue = AnyUtils.convertAny(child);
            String valueStr;
            if (childValue instanceof DataType) {
                valueStr = LocalApplicationFactory.toString((DataType) childValue);
            } else if (childValue instanceof DataType[]) {
                StringBuilder valueStrBuilder = new StringBuilder();
                valueStrBuilder.append("{");
                for (DataType childType : (DataType[]) childValue) {
                    valueStrBuilder.append("\n\t\t" + LocalApplicationFactory.toString(childType));
                }
                valueStrBuilder.append("\n\t\t}");
                valueStr = valueStrBuilder.toString();
            } else {
                valueStr = String.valueOf(childValue);
            }
            builder.append(valueStr);
        }
        builder.append("\n\t}");
        return builder.toString();
    } else if (value != null && value.getClass().isArray()) {

        return "\t" + t.id + " = " + ArrayUtils.toString(value);
    } else {
        return "\t" + t.id + " = " + value;
    }
}