Example usage for org.apache.commons.lang ArrayUtils toString

List of usage examples for org.apache.commons.lang ArrayUtils toString

Introduction

In this page you can find the example usage for org.apache.commons.lang ArrayUtils toString.

Prototype

public static String toString(Object array) 

Source Link

Document

Outputs an array as a String, treating null as an empty array.

Usage

From source file:org.opennms.features.topology.app.internal.VEProviderGraphContainerTest.java

@Test
public void testContainerWithHopProvider() throws Exception {
    // Wrap the test GraphProvider in a VertexHopGraphProvider
    ProviderManager providerManager = new ProviderManager();
    providerManager.onEdgeProviderBind(m_edgeProvider);
    GraphContainer graphContainer = new VEProviderGraphContainer(providerManager);
    graphContainer.setBaseTopology(new VertexHopGraphProvider(m_graphProvider));
    graphContainer.setSemanticZoomLevel(0);

    m_graphContainer = graphContainer;//  ww w  .j  av  a 2 s . co  m

    // There should be zero vertices or edges if no focus vertices are set
    Graph graph = m_graphContainer.getGraph();
    assertEquals(0, graph.getDisplayVertices().size());
    assertEquals(0, graph.getDisplayEdges().size());

    // Add one focus vertex
    DefaultVertexHopCriteria hopCriteria = new DefaultVertexHopCriteria(new DefaultVertexRef("nodes", "v1"));
    m_graphContainer.addCriteria(hopCriteria);
    // This needs to be 2 because there is a SemanticZoomLevelCriteria in there also
    assertEquals(2, m_graphContainer.getCriteria().length);

    // Verify that a single vertex is in the graph
    graph = m_graphContainer.getGraph();
    assertEquals(1, graph.getDisplayVertices().size());
    assertEquals(0, graph.getDisplayEdges().size());

    expectVertex("nodes", "v1", "vertex");
    graph.visit(verifier());
    verify();
    verifyConnectedness(graph);
    reset();

    // Change SZL to 1
    m_graphContainer.setSemanticZoomLevel(1);
    assertEquals(2, m_graphContainer.getCriteria().length);

    // Focus vertex
    expectVertex("nodes", "v1", "vertex");

    expectVertex("nodes", "v2", "vertex");
    /*
       This is a problem with the VEProviderGraphContainer... it wraps a delegate GraphProvider
       in a MergingGraphProvider like so:
            
       VEProviderGraphContainer { MergingGraphProvider { VertexHopGraphProvider } } }
            
       But for the VertexHopProvider to calculate the SZL correctly, it needs to be aware of all
       edges, including those provided by the MergingGraphProvider. So we should rearrange things
       so that they are laid out like:
            
       VEProviderGraphContainer { VertexHopGraphProvider { MergingGraphProvider } } }
            
       We should decouple the MergingGraphProvider from the VEProviderGraphContainer and then just
       inject them in the correct order. When this problem is fixed, uncomment all of the lines that
       are commented out in this test.
    */
    //expectVertex("nodes", "v3", "vertex");
    expectVertex("nodes", "v4", "vertex");

    expectEdge("nodes", "e1", "edge");
    //expectEdge("nodes", "e2", "edge");
    //expectEdge("nodes", "e3", "edge");
    expectEdge("nodes", "e4", "edge");
    //expectEdge("ncs", "ncs1", "ncs edge");
    expectEdge("ncs", "ncs2", "ncs edge");
    expectEdge("ncs", "ncs3", "ncs edge");

    graph = m_graphContainer.getGraph();
    //assertEquals(4, graph.getDisplayVertices().size());
    //assertEquals(5, graph.getDisplayEdges().size());
    assertEquals(3, graph.getDisplayVertices().size());
    assertEquals(4, graph.getDisplayEdges().size());

    graph.visit(verifier());
    verify();
    verifyConnectedness(graph);
    reset();

    // Add a collapsed criteria to the container
    Criteria collapsibleCriteria = new TestCriteria1();
    m_graphContainer.addCriteria(collapsibleCriteria);
    assertEquals(3, m_graphContainer.getCriteria().length);

    // Make sure that the TestCollapsibleCriteria is mapping "v2" and "v4" to the collapsed "test" vertex
    Map<VertexRef, Set<Vertex>> collapsed = VertexHopGraphProvider.getMapOfVerticesToCollapsedVertices(
            VertexHopGraphProvider.getCollapsedCriteria(m_graphContainer.getCriteria()));
    assertTrue(collapsed.containsKey(new DefaultVertexRef("nodes", "v2")));
    assertTrue(collapsed.containsKey(new DefaultVertexRef("nodes", "v4")));
    assertTrue(collapsed.get(new DefaultVertexRef("nodes", "v2"))
            .equals(Collections.singleton(new DefaultVertexRef("nodes", "test"))));
    assertTrue(collapsed.get(new DefaultVertexRef("nodes", "v4"))
            .equals(Collections.singleton(new DefaultVertexRef("nodes", "test"))));

    assertEquals(ArrayUtils.toString(m_graphContainer.getGraph().getDisplayVertices()), 3,
            m_graphContainer.getGraph().getDisplayVertices().size());
    assertEquals(ArrayUtils.toString(m_graphContainer.getBaseTopology().getVertices(new TestCriteria1())), 3,
            m_graphContainer.getBaseTopology().getVertices(new TestCriteria1()).size());

    expectVertex("nodes", "v1", "vertex");
    expectVertex("nodes", "v3", "vertex");
    // Collapsed vertex that contains v2 and v4
    expectVertex("nodes", "test", "test");

    expectEdge("ncs", "ncs1", "ncs edge");
    expectEdge("nodes", "collapsedTarget-e1", "edge");
    expectEdge("nodes", "collapsedSource-e2", "edge");
    expectEdge("nodes", "collapsedTarget-e3", "edge");
    expectEdge("nodes", "collapsedSource-e4", "edge");

    graph = m_graphContainer.getGraph();

    assertEquals(3, graph.getDisplayVertices().size());
    assertEquals(5, graph.getDisplayEdges().size());

    for (Edge edge : graph.getDisplayEdges()) {
        if (edge.getId().equals("collapsedTarget-e1")) {
            assertEquals("v1", edge.getSource().getVertex().getId());
            assertEquals("test", edge.getTarget().getVertex().getId());
        } else if (edge.getId().equals("collapsedSource-e2")) {
            assertEquals("test", edge.getSource().getVertex().getId());
            assertEquals("v3", edge.getTarget().getVertex().getId());
        } else if (edge.getId().equals("collapsedTarget-e3")) {
            assertEquals("v3", edge.getSource().getVertex().getId());
            assertEquals("test", edge.getTarget().getVertex().getId());
        } else if (edge.getId().equals("collapsedSource-e4")) {
            assertEquals("test", edge.getSource().getVertex().getId());
            assertEquals("v1", edge.getTarget().getVertex().getId());
        } else if (edge.getId().equals("ncs1")) {
            assertEquals("v1", edge.getSource().getVertex().getId());
            assertEquals("v3", edge.getTarget().getVertex().getId());
        } else {
            fail("Unknown edge ID: " + edge.getId());
        }
    }

    graph.visit(verifier());
    verify();
    verifyConnectedness(graph);
    reset();

    // Remove the collapsed criteria and make sure that the state reverts correctly
    m_graphContainer.removeCriteria(collapsibleCriteria);
    graph = m_graphContainer.getGraph();

    //assertEquals(4, graph.getDisplayVertices().size());
    //assertEquals(5, graph.getDisplayEdges().size());
    assertEquals(3, graph.getDisplayVertices().size());
    assertEquals(4, graph.getDisplayEdges().size());

    collapsibleCriteria = new TestCriteria1();
    m_graphContainer.addCriteria(collapsibleCriteria);
    collapsibleCriteria = new TestCriteria2();
    m_graphContainer.addCriteria(collapsibleCriteria);
    assertEquals(4, m_graphContainer.getCriteria().length);

    graph = m_graphContainer.getGraph();

    assertEquals(ArrayUtils.toString(m_graphContainer.getGraph().getDisplayVertices()), 3,
            m_graphContainer.getGraph().getDisplayVertices().size());
    /*
     * One edge is missing because of the VertexHopGraphProvider issue mentioned above.
    assertEquals(
       ArrayUtils.toString(m_graphContainer.getGraph().getDisplayEdges()), 
       5, 
       m_graphContainer.getGraph().getDisplayEdges().size()
    );
     */
    assertEquals(ArrayUtils.toString(m_graphContainer.getGraph().getDisplayEdges()), 4,
            m_graphContainer.getGraph().getDisplayEdges().size());

    for (Edge edge : graph.getDisplayEdges()) {
        if (edge.getId().equals("collapsedTarget-e1")) {
            assertEquals("v1", edge.getSource().getVertex().getId());
            assertEquals("test", edge.getTarget().getVertex().getId());
        } else if (edge.getId().equals("collapsed-e2")) {
            assertEquals("test", edge.getSource().getVertex().getId());
            assertEquals("collapse-v3", edge.getTarget().getVertex().getId());
        } else if (edge.getId().equals("collapsed-e3")) {
            assertEquals("collapse-v3", edge.getSource().getVertex().getId());
            assertEquals("test", edge.getTarget().getVertex().getId());
        } else if (edge.getId().equals("collapsedSource-e4")) {
            assertEquals("test", edge.getSource().getVertex().getId());
            assertEquals("v1", edge.getTarget().getVertex().getId());
            /**
             * This edge is not found because of the issue mentioned above.
            } else if (edge.getId().equals("collapsedTarget-ncs1")) {
               assertEquals("v1", edge.getSource().getVertex().getId());
               assertEquals("collapse-v3", edge.getTarget().getVertex().getId());
             */
        } else {
            fail("Unknown edge ID: " + edge.getId());
        }
    }
}

From source file:org.opentestsystem.delivery.testreg.upload.validator.fileformat.AbstractRecordDataValidator.java

@Override
public void validate(final Object target, final Errors errors) {

    Iterator<DataRecord> recordsIterator = null;
    boolean hasGlobalErrors = false;

    @SuppressWarnings("unchecked")
    List<DataRecord> recordsList = ((List<DataRecord>) target);

    if (validateDuplicates) {

        DuplicateUniquePair duplicateUniquePair = duplicateRecordProcessor.processDuplicates(recordsList);

        /*//  w  ww .ja  v  a  2 s  .  c om
         * Check for Duplicates. If found, reject them as global errors and have user correct the input.
         */
        if (duplicateUniquePair.hasDuplicates()) {
            hasGlobalErrors = true;

            /*
             * Duplicates are found. Get the row numbers and add error messages for each data record that has duplicates.
             */
            for (Integer[] dupRows : duplicateRecordProcessor.getDuplicateRowNumbers(duplicateUniquePair)) {
                rejectValue(errors, getDatasetName(), null, null,
                        "Duplicates records found matching in rows " + ArrayUtils.toString(dupRows));
            }
        } else {
            recordsIterator = duplicateUniquePair.getUniqueRecordSet().iterator();
        }

    } else {
        recordsIterator = recordsList.iterator();

    }

    if (!hasGlobalErrors) {
        // Validate Header Record
        boolean isValidHeader;
        if (FormatType.valueOf(datasetName).equals(FormatType.DESIGNATEDSUPPORTSANDACCOMMODATIONS)) {
            FileHeaderRecord record = (FileHeaderRecord) recordsIterator.next();
            // Based on TO-15( SB-1426 ) requirement 3.9.5 
            // Consolidating all accessibility codes into one field ( AccommodationCodes header column) for both import and export formats.
            // So length of new accommodation headers is 4 ( StudentIdentifier, StateAbbreviation, Subject, AccommodationCodes)
            // Also maintaining backward compatibility to accept multiple-column Accommodation upload file ( accommodation headers length more than 4 )  
            if (record.getColumns().length == DomainIndexConstants.NEW_DES_SUPPORTS_ACC_UPLOAD_FILE_COLS) {
                List<String> accommodationHeaders = templateDownloadMap
                        .get(FormatType.DESIGNATEDSUPPORTSANDACCOMMODATIONS);
                String headersAcc[] = accommodationHeaders.toArray(new String[accommodationHeaders.size()]);
                this.headers = headersAcc;
            } else if (record
                    .getColumns().length > DomainIndexConstants.NEW_DES_SUPPORTS_ACC_UPLOAD_FILE_COLS) {
                HashMap<FormatType, List<String>> dbHeaderMap = masterResourceAccommodationService
                        .getAllCodes();
                List<String> dbHeaders = dbHeaderMap.get(FormatType.DESIGNATEDSUPPORTSANDACCOMMODATIONS);
                if (dbHeaders != null && dbHeaders.size() > 0) {
                    String headersAcc[] = dbHeaders.toArray(new String[dbHeaders.size()]);
                    this.headers = headersAcc;
                }
            }
            isValidHeader = isValidHeader(record, errors);
        } else {
            isValidHeader = isValidHeader((FileHeaderRecord) recordsIterator.next(), errors);
        }

        if (!isValidHeader) {
            hasGlobalErrors = true;
        }

        while (isValidHeader && recordsIterator.hasNext()) {
            FileDataRecord record = (FileDataRecord) recordsIterator.next();
            // Validate Data Format otherwise
            if (isValidFormat(record, errors)) {//If no field errors, then do business rule validation
                isValidData(record, errors);
            }
        }
    }
    //check for duplicate NCESID if format is DISTRICT or INSTITUTION
    if (!hasGlobalErrors && isNCESIDDupCheckRequired()) {
        Map<String, List<Integer>> duplicateNcesIds = duplicateRecordProcessor.getDuplicateNCESIds(recordsList,
                FormatType.valueOf(datasetName));

        if (!duplicateNcesIds.isEmpty()) {
            hasGlobalErrors = true;
            FileDataRecord record = (FileDataRecord) recordsList.get(0);
            TestRegistrationBase sb11Entity = fileUploadUtils.getDomainObject(FormatType.valueOf(datasetName),
                    record.getColumns());
            for (Map.Entry<String, List<Integer>> entry : duplicateNcesIds.entrySet()) {
                String label = getFieldLabel(sb11Entity, "nationwideIdentifier");
                rejectValue(errors, getDatasetName(), null, null, "Duplicate " + getHumanReadableName(label)
                        + " " + entry.getKey() + " found in rows " + entry.getValue());
            }
        }
    }
}

From source file:org.openvpms.component.business.dao.hibernate.im.query.QueryBuilderTestCase.java

/**
 * Verifies there is parameter with the expected value.
 *
 * @param context  the query context//ww  w . jav a2  s  .  c  om
 * @param expected the expected value
 * @param names    the possible parameter names
 */
private void checkParameter(QueryContext context, Object expected, String... names) {
    Map<String, Object> parameters = context.getParameters();
    for (String name : names) {
        Object value = parameters.get(name);
        if (value != null && ObjectUtils.equals(expected, value)) {
            return;
        }
    }
    fail("No parameter named " + ArrayUtils.toString(names) + " with value " + expected);
}

From source file:org.openvpms.component.business.dao.hibernate.im.query.TypeSet.java

/**
 * Creates a new type set from an {@link ArchetypeConstraint} and node descriptors.
 *
 * @param constraint the constraint/*from w w w .  j a  va  2 s.c  o m*/
 * @param nodes      the node descriptors
 * @param cache      the archetype descriptor cache
 * @param assembler  the assembler
 * @return a new type set
 * @throws ArchetypeServiceException for any archetype service error
 * @throws QueryBuilderException     if there are no matching archetypes for the constraint
 */
public static TypeSet create(ArchetypeConstraint constraint, List<NodeDescriptor> nodes,
        IArchetypeDescriptorCache cache, CompoundAssembler assembler) {
    Set<String> matches = new HashSet<String>();
    for (NodeDescriptor descriptor : nodes) {
        mergeArchetypeRange(matches, descriptor);
    }
    String[] shortNames = matches.toArray(new String[matches.size()]);
    Set<ArchetypeDescriptor> descriptors = getDescriptors(shortNames, constraint.isPrimaryOnly(), cache);
    if (descriptors.isEmpty()) {
        throw new QueryBuilderException(NoMatchingArchetypesForShortName, ArrayUtils.toString(shortNames));
    }
    return new TypeSet(constraint.getAlias(), descriptors, assembler);
}

From source file:org.openvpms.component.business.dao.hibernate.im.query.TypeSet.java

/**
 * Creates a new type set from a {@link ShortNameConstraint}.
 *
 * @param constraint the constraint/* w  w  w .  j a  va 2 s  .co m*/
 * @param cache      the archetype descriptor cache
 * @param assembler  the assembler
 * @return a new type set
 * @throws ArchetypeServiceException for any archetype service error
 * @throws QueryBuilderException     if there are no matching archetypes for
 *                                   the constraint
 */
public static TypeSet create(ShortNameConstraint constraint, IArchetypeDescriptorCache cache,
        CompoundAssembler assembler) {
    Set<ArchetypeDescriptor> descriptors = getDescriptors(constraint.getShortNames(),
            constraint.isPrimaryOnly(), cache);
    // check that we have at least one match
    if (descriptors.isEmpty()) {
        throw new QueryBuilderException(NoMatchingArchetypesForShortName,
                ArrayUtils.toString(constraint.getShortNames()));
    }
    return new TypeSet(constraint.getAlias(), descriptors, assembler);
}

From source file:org.polymap.biotop.model.importer.WaldbiotopeImportOperation.java

/**
 *
 * @return/*from   ww w  .  ja v a 2  s  .  c o m*/
 */
protected File findFile(String... exts) {
    File result = null;
    for (File f : zipDir.listFiles()) {
        String ext = FilenameUtils.getExtension(f.getName());
        if (ArrayUtils.contains(exts, ext)) {
            if (result != null) {
                throw new IllegalStateException(
                        "ZIP enthlt mehrere Files fr die Endung: " + ArrayUtils.toString(exts));
            }
            result = f;
        }
    }
    if (result == null) {
        throw new IllegalStateException("ZIP enthlt kein File mit der Endung: " + ArrayUtils.toString(exts));
    }
    return result;
}

From source file:org.projectforge.business.user.UserTest.java

private void assertIds(final String[] expectedEntries, final String csvString) {
    final String[] entries = StringUtils.split(csvString, ',');
    for (final String expected : expectedEntries) {
        assertTrue("'" + expected + "' expected in: " + ArrayUtils.toString(entries),
                ArrayUtils.contains(entries, expected));
    }//  w w  w. j av a 2s.c  o  m
    for (final String entry : entries) {
        assertTrue("'" + entry + "' doesn't expected in: " + ArrayUtils.toString(expectedEntries),
                ArrayUtils.contains(expectedEntries, entry));
    }
}

From source file:org.projectforge.core.BaseDao.java

/**
 * Get all declared hibernate search fields. These fields are defined over annotations in the database object class. The names are the
 * property names or, if defined the name declared in the annotation of a field. <br/>
 * The user can search in these fields explicit by typing e. g. authors:beck (<field>:<searchString>)
 * @return/*  w  ww .  ja v  a2  s .  c o m*/
 */
public synchronized String[] getSearchFields() {
    if (searchFields != null) {
        return searchFields;
    }
    final Field[] fields = BeanHelper.getAllDeclaredFields(clazz);
    final Set<String> fieldNames = new TreeSet<String>();
    for (final Field field : fields) {
        if (field.isAnnotationPresent(org.hibernate.search.annotations.Field.class) == true) {
            // @Field(index = Index.TOKENIZED),
            final org.hibernate.search.annotations.Field annotation = field
                    .getAnnotation(org.hibernate.search.annotations.Field.class);
            fieldNames.add(getSearchName(field.getName(), annotation));
        } else if (field.isAnnotationPresent(org.hibernate.search.annotations.Fields.class) == true) {
            // @Fields( {
            // @Field(index = Index.TOKENIZED),
            // @Field(name = "name_forsort", index = Index.UN_TOKENIZED)
            // } )
            final org.hibernate.search.annotations.Fields annFields = field
                    .getAnnotation(org.hibernate.search.annotations.Fields.class);
            for (final org.hibernate.search.annotations.Field annotation : annFields.value()) {
                fieldNames.add(getSearchName(field.getName(), annotation));
            }
        } else if (field.isAnnotationPresent(Id.class) == true) {
            fieldNames.add(field.getName());
        } else if (field.isAnnotationPresent(DocumentId.class) == true) {
            fieldNames.add(field.getName());
        }
    }
    final Method[] methods = clazz.getMethods();
    for (final Method method : methods) {
        if (method.isAnnotationPresent(org.hibernate.search.annotations.Field.class) == true) {
            final org.hibernate.search.annotations.Field annotation = method
                    .getAnnotation(org.hibernate.search.annotations.Field.class);
            fieldNames.add(getSearchName(method.getName(), annotation));
        } else if (method.isAnnotationPresent(DocumentId.class) == true) {
            final String prop = BeanHelper.determinePropertyName(method);
            fieldNames.add(prop);
        }
    }
    if (getAdditionalSearchFields() != null) {
        for (final String str : getAdditionalSearchFields()) {
            fieldNames.add(str);
        }
    }
    searchFields = new String[fieldNames.size()];
    fieldNames.toArray(searchFields);
    log.info("Search fields for '" + clazz + "': " + ArrayUtils.toString(searchFields));
    return searchFields;
}

From source file:org.sakaiproject.roster.impl.SakaiProxyImpl.java

@SuppressWarnings("unchecked")
private List<RosterMember> filterHiddenMembers(List<RosterMember> members, String currentUserId, String siteId,
        AuthzGroup authzGroup) {/*from   w  ww.  j a  va2  s.  c o m*/

    log.debug("filterHiddenMembers");

    if (isAllowed(currentUserId, RosterFunctions.ROSTER_FUNCTION_VIEWHIDDEN, authzGroup.getReference())) {
        log.debug("permission to view all, including hidden");
        return members;
    }

    List<RosterMember> filtered = new ArrayList<RosterMember>();

    Set<String> userIds = new HashSet<String>();

    for (Iterator<RosterMember> i = members.iterator(); i.hasNext();) {
        RosterMember member = i.next();

        userIds.add(member.getEid());

        // If this member is not in the authzGroup, remove them.
        if (authzGroup.getMember(member.getUserId()) == null) {
            i.remove();
        }
    }

    Set<String> hiddenUserIds = privacyManager.findHidden("/site/" + siteId, userIds);

    //get the list of visible roles, optional config.
    //if set, the only users visible in the tool will be those with their role defined in this list
    String[] visibleRoles = serverConfigurationService.getStrings("roster2.visibleroles");

    boolean filterRoles = ArrayUtils.isNotEmpty(visibleRoles);

    if (log.isDebugEnabled())
        log.debug("visibleRoles: " + ArrayUtils.toString(visibleRoles));
    if (log.isDebugEnabled())
        log.debug("filterRoles: " + filterRoles);

    // determine filtered membership
    for (RosterMember member : members) {

        // skip if privacy restricted
        if (hiddenUserIds.contains(member.getEid())) {
            continue;
        }

        // now filter out users based on their role
        if (filterRoles) {
            String memberRoleId = member.getRole();
            if (ArrayUtils.contains(visibleRoles, memberRoleId)) {
                filtered.add(member);
            }
        } else {
            filtered.add(member);
        }
    }

    if (log.isDebugEnabled())
        log.debug("filteredMembership.size(): " + filtered.size());

    return filtered;
}

From source file:org.soaplab.services.metadata.InputPropertyDef.java

/**************************************************************************
 * As toString() but with indentations.//from   ww w  .j a  v a2 s  . c om
 **************************************************************************/
public String format(int indent) {
    String tabs = GenUtils.indent(indent);
    StringBuilder buf = new StringBuilder();
    buf.append(tabs + "[InputPropertyDef]\n");
    buf.append(tabs + "\tname = " + name + "\n");
    buf.append(tabs + "\ttype = " + type + "\n");
    buf.append(tabs + "\tmandatory = " + mandatory + "\n");
    buf.append(tabs + "\tdefault value = " + defaultValue + "\n");
    buf.append(tabs + "\tallowed values = " + ArrayUtils.toString(possibleValues) + "\n");
    return buf.toString();
}