Example usage for org.springframework.util MultiValueMap size

List of usage examples for org.springframework.util MultiValueMap size

Introduction

In this page you can find the example usage for org.springframework.util MultiValueMap size.

Prototype

int size();

Source Link

Document

Returns the number of key-value mappings in this map.

Usage

From source file:org.crazydog.util.spring.CollectionUtils.java

/**
 * Return an unmodifiable view of the specified multi-value map.
 *
 * @param map the map for which an unmodifiable view is to be returned.
 * @return an unmodifiable view of the specified multi-value map.
 * @since 3.1/*from   w w w .  jav  a 2  s  .  com*/
 */
@SuppressWarnings("unchecked")
public static <K, V> MultiValueMap<K, V> unmodifiableMultiValueMap(
        MultiValueMap<? extends K, ? extends V> map) {
    org.springframework.util.Assert.notNull(map, "'map' must not be null");
    Map<K, List<V>> result = new LinkedHashMap<K, List<V>>(map.size());
    for (Map.Entry<? extends K, ? extends List<? extends V>> entry : map.entrySet()) {
        List<? extends V> values = Collections.unmodifiableList(entry.getValue());
        result.put(entry.getKey(), (List<V>) values);
    }
    Map<K, List<V>> unmodifiableMap = Collections.unmodifiableMap(result);
    return toMultiValueMap(unmodifiableMap);
}

From source file:com.ephesoft.dcma.util.WebServiceUtil.java

/**
 * To validate Split API.//from ww  w. j  a  v  a  2s .c o m
 * @param fileMap MultiValueMap<String, MultipartFile>
 * @param isGSTool boolean
 * @param outputParams {@link String}
 * @param inputParams {@link String}
 * @return {@link String}
 */
public static String validateSplitAPI(MultiValueMap<String, MultipartFile> fileMap, boolean isGSTool,
        String outputParams, String inputParams) {
    String results = EMPTY_STRING;
    if (fileMap.size() == 0) {
        results = "No files provided to split";
    } else if (isGSTool && inputParams.isEmpty()) {
        results = "Input Params expected with GhostScript tool flag. Please set the input params.";
    }
    return results;
}

From source file:io.curly.bloodhound.query.PossibilitiesResolver.java

/**
 * Main method to convert the user inputted keys into our pattern
 *
 * @param raw the map containing not standardized keys
 * @return transformed new map with correct keys
 *//* w  w  w  .ja  va 2s . co m*/
@Override
public MultiValueMap<String, String> resolve(MultiValueMap<String, String> raw) {
    Assert.notNull(raw, "Raw MultiValueMap must be not null!");
    MultiValueMap<String, String> output = new LinkedMultiValueMap<>(raw.size());
    raw.forEach((key, value) -> transform(output, key, value));
    return output;
}

From source file:org.socialsignin.springsocial.security.userdetails.SpringSocialSecurityUserDetailsService.java

private List<Connection<?>> getConnections(ConnectionRepository connectionRepository, String userName) {
    MultiValueMap<String, Connection<?>> connections = connectionRepository.findAllConnections();
    List<Connection<?>> allConnections = new ArrayList<Connection<?>>();
    if (connections.size() > 0) {
        for (List<Connection<?>> connectionList : connections.values()) {
            for (Connection<?> connection : connectionList) {
                allConnections.add(connection);
            }//  www  . jav  a 2  s  .  com
        }
    }
    return allConnections;
}

From source file:org.awesomeagile.webapp.security.AwesomeAgileConnectionRepositoryTest.java

@Test
public void testFindAllConnections() throws Exception {
    Connection connection = mock(Connection.class);
    ArgumentCaptor<ConnectionData> connectionDataCaptor = ArgumentCaptor.forClass(ConnectionData.class);
    when(connectionFactoryOne.createConnection(connectionDataCaptor.capture())).thenReturn(connection);
    MultiValueMap<String, Connection<?>> allConnections = connectionRepository.findAllConnections();
    assertNotNull(allConnections);/*from ww w .j  a v a 2  s  .  c o m*/
    assertEquals(1, allConnections.size());
    List<Connection<?>> providerOneConnections = allConnections.get(PROVIDER_ONE);
    assertThat(providerOneConnections, contains(connection));
    ConnectionData connectionData = connectionDataCaptor.getValue();
    assertConnectionData(user(), connectionData);
}

From source file:com.vedri.mtp.frontend.support.stomp.DefaultSubscriptionRegistry.java

private MultiValueMap<String, String> filterSubscriptions(MultiValueMap<String, String> allMatches,
        Message<?> message) {/*from  www  .  ja  va2 s  .  c o m*/

    if (!this.selectorHeaderInUse) {
        return allMatches;
    }
    EvaluationContext context = null;
    MultiValueMap<String, String> result = new LinkedMultiValueMap<String, String>(allMatches.size());
    for (String sessionId : allMatches.keySet()) {
        for (String subId : allMatches.get(sessionId)) {
            SessionSubscriptionInfo info = this.subscriptionRegistry.getSubscriptions(sessionId);
            if (info == null) {
                continue;
            }
            Subscription sub = info.getSubscription(subId);
            if (sub == null) {
                continue;
            }
            Expression expression = sub.getSelectorExpression();
            if (expression == null) {
                result.add(sessionId, subId);
                continue;
            }
            if (context == null) {
                context = new StandardEvaluationContext(message);
                context.getPropertyAccessors().add(new SimpMessageHeaderPropertyAccessor());
            }
            try {
                if (expression.getValue(context, boolean.class)) {
                    result.add(sessionId, subId);
                }
            } catch (SpelEvaluationException ex) {
                if (logger.isDebugEnabled()) {
                    logger.debug("Failed to evaluate selector: " + ex.getMessage());
                }
            } catch (Throwable ex) {
                logger.debug("Failed to evaluate selector", ex);
            }
        }
    }
    return result;
}

From source file:com.svds.resttest.services.GenericDataService.java

/**
 * Build and run a COUNT query//from   w w  w . j  av a  2 s  .c  o  m
 * 
 * @param databaseEngine    Database engine to query (hive or impala)
 * @param tableName         Table name to query
 * @param requestParams     Parameters for WHERE clause
 * @return                  Output of count query
 */
public GenericCountOutput runQueryCount(String databaseEngine, String tableName,
        MultiValueMap<String, String> requestParams) {

    GenericCountOutput genericCountOutput = new GenericCountOutput();
    genericCountOutput.setName("GenericDataService.runQueryCount");
    genericCountOutput.setRequestParams(requestParams);
    genericCountOutput.setDatabaseEngine(databaseEngine);
    genericCountOutput.setTableName(tableName);

    Connection connection = null;
    PreparedStatement pstmt;
    ResultSet rs;

    StringBuilder connectionURL = new StringBuilder();
    connectionURL.append(DATABASES_BUNDLE.getString(databaseEngine));
    LOG.info("connectionURL: " + connectionURL.toString());

    StringBuilder sql = new StringBuilder();
    sql.append("SELECT COUNT(1) FROM ");
    sql.append(TABLES_BUNDLE.getString(tableName));

    try {

        LOG.info("RequestParams: " + requestParams.size());
        if (requestParams.size() > 0) {
            sql.append(" WHERE ");
            sql.append(BuildWhereClause.buildWhereClause(requestParams));
        }
        LOG.info("sql: " + sql.toString());
        genericCountOutput.setSql(sql.toString());
        Class.forName("org.apache.hive.jdbc.HiveDriver");

        connection = DriverManager.getConnection(connectionURL.toString(), "hadoop", "");
        pstmt = connection.prepareStatement(sql.toString());

        rs = pstmt.executeQuery();

        while (rs.next()) {
            genericCountOutput.setCount(rs.getInt(1));
            LOG.info("Count: " + rs.getInt(1));
        }

        genericCountOutput.setStatus("OK");
    } catch (Exception e) {
        LOG.error("GenericDataService.runQueryCount(): " + e.getMessage(), e);
        genericCountOutput.setMessage(e.getMessage());
        genericCountOutput.setStatus("ERROR");
    } finally {
        try {
            connection.close();
        } catch (Exception e) {
            LOG.error("GenericDataService.runQueryCount() Close connection: " + e.getMessage(), e);
        }
    }

    return genericCountOutput;
}

From source file:com.svds.resttest.services.GenericDataService.java

/**
* Build and run a SELECT query/* ww  w .j  a va 2  s  .  c  o  m*/
* 
* @param databaseEngine    Database engine to query (hive or impala)
* @param tableName         Table name to query
* @param requestParams     Parameters for WHERE clause
* @return                  Output of select query
*/
public GenericResultsOutput runQueryResults(String databaseEngine, String tableName,
        MultiValueMap<String, String> requestParams) {

    GenericResultsOutput genericResultsOutput = new GenericResultsOutput();
    genericResultsOutput.setName("GenericDataService.runQueryCount");
    genericResultsOutput.setRequestParams(requestParams);
    genericResultsOutput.setDatabaseEngine(databaseEngine);
    genericResultsOutput.setTableName(tableName);

    Connection connection = null;
    PreparedStatement pstmt = null;
    ResultSet rs = null;

    int limit = ROW_LIMIT;

    if (requestParams.containsKey("limit")) {
        limit = new Integer(requestParams.getFirst("limit"));
    }

    StringBuilder connectionURL = new StringBuilder();
    connectionURL.append(DATABASES_BUNDLE.getString(databaseEngine));
    LOG.info("connectionURL: " + connectionURL.toString());

    StringBuilder sql = new StringBuilder();
    sql.append("SELECT * FROM ");
    sql.append(TABLES_BUNDLE.getString(tableName));

    try {

        LOG.info("RequestParams: " + requestParams.size());
        if (requestParams.size() > 0) {
            sql.append(" WHERE ");
            sql.append(BuildWhereClause.buildWhereClause(requestParams));
        }
        sql.append(" limit ").append(limit);
        LOG.info("sql: " + sql.toString());
        genericResultsOutput.setSql(sql.toString());
        Class.forName("org.apache.hive.jdbc.HiveDriver");

        connection = DriverManager.getConnection(connectionURL.toString(), "hadoop", "");
        pstmt = connection.prepareStatement(sql.toString());

        rs = pstmt.executeQuery();

        int rowCount = 0;
        while (rs.next()) {

            if (genericResultsOutput.getMetaData().size() > 0) {
                //Got it!!
            } else {
                Map<String, Integer> metaDataSet = new HashMap<>();
                this.getMetaData(rs, metaDataSet, genericResultsOutput);
            }

            List<Object> resultsArrayList = new ArrayList<>();
            this.resultsWithoutMetaData(rs, genericResultsOutput, resultsArrayList);
            genericResultsOutput.getResults().add(resultsArrayList);

            rowCount++;
        }

        genericResultsOutput.setCount(rowCount);
        genericResultsOutput.setStatus("OK");
    } catch (Exception e) {
        LOG.error("GenericDataService.runQueryResults(): " + e.getMessage(), e);
        genericResultsOutput.setMessage(e.getMessage());
        genericResultsOutput.setStatus("ERROR");
    } finally {
        try {
            rs.close();
        } catch (Exception e) {
            LOG.error("GenericDataService.runQueryResults() Close result set: " + e.getMessage(), e);
        }
        try {
            pstmt.close();
        } catch (Exception e) {
            LOG.error("GenericDataService.runQueryResults() Close prepared statement: " + e.getMessage(), e);
        }
        try {
            connection.close();
        } catch (Exception e) {
            LOG.error("GenericDataService.runQueryResults() Close connection: " + e.getMessage(), e);
        }
    }

    return genericResultsOutput;
}

From source file:com.ephesoft.dcma.workflow.service.webservices.EphesoftWebServiceAPI.java

@RequestMapping(value = "/importBatchClass", method = RequestMethod.POST)
@ResponseBody/*  ww w  .j  a  v a  2s. c  o  m*/
public void importBatchClass(final HttpServletRequest req, final HttpServletResponse resp) {
    String respStr = WebServiceUtil.EMPTY_STRING;
    logger.info("Start processing import batch class web service");
    String workingDir = WebServiceUtil.EMPTY_STRING;
    if (req instanceof DefaultMultipartHttpServletRequest) {
        InputStream instream = null;
        OutputStream outStream = null;
        final String webServiceFolderPath = bsService.getWebServicesFolderPath();
        final DefaultMultipartHttpServletRequest mPartReq = (DefaultMultipartHttpServletRequest) req;
        final MultiValueMap<String, MultipartFile> fileMap = mPartReq.getMultiFileMap();

        if (fileMap.size() == 2) {
            try {
                workingDir = WebServiceUtil.createWebServiceWorkingDir(webServiceFolderPath);
                ImportBatchClassOptions option = null;
                String zipFilePath = WebServiceUtil.EMPTY_STRING;
                for (final String fileName : fileMap.keySet()) {
                    final MultipartFile f = mPartReq.getFile(fileName);
                    instream = f.getInputStream();
                    if (fileName.toLowerCase().indexOf(FileType.XML.getExtension().toLowerCase()) > -1) {
                        final Source source = XMLUtil.createSourceFromStream(instream);
                        option = (ImportBatchClassOptions) batchSchemaDao.getJAXB2Template()
                                .getJaxb2Marshaller().unmarshal(source);
                        continue;
                    } else if (fileName.toLowerCase().indexOf(FileType.ZIP.getExtension().toLowerCase()) > -1) {
                        zipFilePath = workingDir + File.separator + fileName;
                        logger.info("Zip file is using for importing batch class is " + zipFilePath);
                        final File file = new File(zipFilePath);
                        outStream = new FileOutputStream(file);
                        final byte[] buf = new byte[WebServiceUtil.bufferSize];
                        int len;
                        while ((len = instream.read(buf)) > 0) {
                            outStream.write(buf, 0, len);
                        }
                        continue;
                    }
                }

                if (option != null && !zipFilePath.isEmpty()) {
                    final Map<Boolean, String> results = importBatchService.validateInputXML(option);
                    final String errorMessg = results.get(Boolean.FALSE);
                    if (errorMessg != null && !errorMessg.isEmpty()) {
                        respStr = errorMessg;
                    } else {
                        final File tempZipFile = new File(zipFilePath);
                        final String tempOutputUnZipDir = tempZipFile.getParent() + File.separator + tempZipFile
                                .getName().substring(0, tempZipFile.getName().indexOf(WebServiceUtil.DOT));
                        try {
                            FileUtils.unzip(tempZipFile, tempOutputUnZipDir);
                        } catch (final Exception e) {
                            FileUtils.deleteDirectoryAndContentsRecursive(new File(workingDir).getParentFile());
                            tempZipFile.delete();
                            resp.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR,
                                    "Unable to unzip file. Returning without processing the results.");
                        }

                        option.setZipFilePath(tempOutputUnZipDir);
                        logger.info("Importing batch class");
                        final boolean isDeployed = deploymentService.isDeployed(option.getName());
                        final Map<Boolean, String> resultsImport = importBatchService.importBatchClass(option,
                                isDeployed, true, null);
                        final String errorMessgImport = resultsImport.get(Boolean.FALSE);
                        if (errorMessgImport != null && !errorMessgImport.isEmpty()) {
                            respStr = errorMessgImport;
                        }
                    }
                } else {
                    respStr = "Improper input to the server.Proceeding without processing";
                }

            } catch (final XmlMappingException xmle) {
                respStr = "Error in mapping input XML in the desired format. Please send it in the specified format. Detailed exception is "
                        + xmle;
            } catch (final Exception e) {
                respStr = "Internal Server error.Please check logs for further details." + e.getMessage();
            } finally {
                IOUtils.closeQuietly(instream);
                IOUtils.closeQuietly(outStream);

                FileUtils.deleteDirectoryAndContentsRecursive(new File(workingDir).getParentFile());
            }
        } else {
            respStr = "Improper input to server. Expected two files: zip and xml file. Returning without processing the results.";
        }
    } else {
        respStr = "Improper input to server. Expected multipart request. Returning without processing the results.";
    }
    if (!workingDir.isEmpty()) {
        FileUtils.deleteDirectoryAndContentsRecursive(new File(workingDir).getParentFile());
    }
    if (!respStr.isEmpty()) {
        try {
            resp.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, respStr);
        } catch (final IOException ioe) {

        }
    }
}

From source file:com.ephesoft.dcma.workflow.service.webservices.EphesoftWebServiceAPI.java

@RequestMapping(value = "/extractFieldFromHocr", method = RequestMethod.POST)
@ResponseBody// w  ww  . j  a  v  a  2 s .  c  o m
public void extractFieldFromHocr(final HttpServletRequest req, final HttpServletResponse resp) {
    logger.info("Start processing web service for extractFieldFromHocr.");
    String respStr = WebServiceUtil.EMPTY_STRING;
    String workingDir = WebServiceUtil.EMPTY_STRING;

    if (req instanceof DefaultMultipartHttpServletRequest) {
        try {
            final String webServiceFolderPath = bsService.getWebServicesFolderPath();
            workingDir = WebServiceUtil.createWebServiceWorkingDir(webServiceFolderPath);

            InputStream instream = null;
            OutputStream outStream = null;

            final DefaultMultipartHttpServletRequest multipartReq = (DefaultMultipartHttpServletRequest) req;
            String fieldValue = WebServiceUtil.EMPTY_STRING;
            for (final Enumeration<String> params = multipartReq.getParameterNames(); params
                    .hasMoreElements();) {
                final String paramName = params.nextElement();
                if (paramName.equalsIgnoreCase("fieldValue")) {
                    fieldValue = multipartReq.getParameter(paramName);
                    break;
                }
            }

            if (fieldValue == null || fieldValue.isEmpty()) {
                respStr = "Field Value not specified.";
            }
            if (respStr.isEmpty()) {
                final MultiValueMap<String, MultipartFile> fileMap = multipartReq.getMultiFileMap();

                if (fileMap.size() == 1) {
                    String hocrFileName = "";
                    for (final String fileName : fileMap.keySet()) {
                        // only single html file is expected as input
                        if (fileName.toLowerCase().indexOf(FileType.HTML.getExtension()) > -1) {
                            // only HTML file is expected
                            hocrFileName = fileName;
                            final MultipartFile f = multipartReq.getFile(fileName);
                            instream = f.getInputStream();
                            final File file = new File(workingDir + File.separator + fileName);
                            outStream = new FileOutputStream(file);
                            final byte buf[] = new byte[1024];
                            int len;
                            while ((len = instream.read(buf)) > 0) {
                                outStream.write(buf, 0, len);
                            }
                            if (instream != null) {
                                instream.close();
                            }

                            if (outStream != null) {
                                outStream.close();
                            }
                            break;
                        } else {
                            respStr = "Improper input to server. Expected only one html file. Returning without processing the results.";
                        }
                    }
                    if (respStr.isEmpty()) {
                        String fileName = workingDir + File.separator + hocrFileName;

                        // generate hocr file from html file.
                        HocrPages hocrPages = new HocrPages();
                        List<HocrPage> hocrPageList = hocrPages.getHocrPage();
                        HocrPage hocrPage = new HocrPage();
                        String pageID = "PG0";
                        hocrPage.setPageID(pageID);
                        hocrPageList.add(hocrPage);
                        bsService.hocrGenerationAPI(workingDir, "PG0", fileName, hocrPage);

                        List<KVExtraction> kvExtractionList = kvFieldService.createKeyValueFieldAPI(fieldValue,
                                hocrPage);

                        final KVExtractionFieldPatterns patterns = new KVExtractionFieldPatterns();

                        final List<KVExtractionFieldPattern> pattern = patterns.getKVExtractionFieldPattern();
                        for (final KVExtraction eachKVExtraction : kvExtractionList) {
                            final KVExtractionFieldPattern kvField = new KVExtractionFieldPattern();
                            kvField.setDistance(eachKVExtraction.getDistance());
                            kvField.setFetchValue(eachKVExtraction.getFetchValue().name());
                            kvField.setKeyPattern(eachKVExtraction.getKeyPattern());
                            kvField.setLength(eachKVExtraction.getLength());
                            kvField.setLocation(eachKVExtraction.getLocationType().name());
                            kvField.setMultiplier(eachKVExtraction.getMultiplier());
                            kvField.setNoOfWords(eachKVExtraction.getNoOfWords() == null ? 0
                                    : eachKVExtraction.getNoOfWords());
                            kvField.setValuePattern(eachKVExtraction.getValuePattern());
                            kvField.setWidth(eachKVExtraction.getWidth());
                            kvField.setXOffset(eachKVExtraction.getXoffset());
                            kvField.setYOffset(eachKVExtraction.getYoffset());
                            pattern.add(kvField);
                        }
                        StreamResult result;
                        try {
                            result = new StreamResult(resp.getOutputStream());
                            resp.setStatus(HttpServletResponse.SC_OK);
                            batchSchemaDao.getJAXB2Template().getJaxb2Marshaller().marshal(patterns, result);
                        } catch (final IOException e) {
                            try {
                                resp.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR,
                                        "Internal Server error.Please check logs for further details."
                                                + e.getMessage());
                            } catch (final IOException ioe) {

                            }
                        }
                    }
                } else {
                    respStr = "Improper input to server. Expected only one html file. Returning without processing the results.";
                }
            }
        } catch (final DCMAException dcmae) {
            respStr = "Error in processing request. Detailed exception is " + dcmae;
        } catch (final Exception e) {
            respStr = "Internal Server error.Please check logs for further details." + e;
            if (!workingDir.isEmpty()) {
                FileUtils.deleteDirectoryAndContentsRecursive(new File(workingDir).getParentFile());
            }
        }
    } else {
        respStr = "Improper input to server. Expected multipart request. Returing without processing the results.";
    }

    if (!workingDir.isEmpty()) {
        FileUtils.deleteDirectoryAndContentsRecursive(new File(workingDir).getParentFile());
    }
    if (!respStr.isEmpty()) {
        try {
            resp.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, respStr);
        } catch (final IOException ioe) {

        }
    }
}