Example usage for org.apache.commons.collections.set ListOrderedSet ListOrderedSet

List of usage examples for org.apache.commons.collections.set ListOrderedSet ListOrderedSet

Introduction

In this page you can find the example usage for org.apache.commons.collections.set ListOrderedSet ListOrderedSet.

Prototype

public ListOrderedSet() 

Source Link

Document

Constructs a new empty ListOrderedSet using a HashSet and an ArrayList internally.

Usage

From source file:it.geosolutions.geobatch.opensdi.ndvi.NDVIStatsAction.java

/**
 * Generate CSV file with the parameters
 * /*from  w  w w.  ja  v  a2 s . co  m*/
 * @param coverage tiff file to use in stats
 * @param fc zones to obtain the NDVI
 * @param classifier
 * @param mask
 * @param ndviFileName
 * @param csvSeparator
 * @param maskFullPath
 * 
 * @throws Exception
 */
private void generateCSV(GridCoverage2D coverage, SimpleFeatureCollection fc, CLASSIFIER_TYPE classifier,
        MASK_TYPE mask, String ndviFileName, String csvSeparator, String maskFullPath) throws Exception {

    // Prepare for CSV generation
    String csvPath = getCSVFullPath(classifier, mask, ndviFileName);

    // obtain header
    List<String> header = getHeader(classifier);

    // values
    String year = "";
    String month = "";
    String dekad = "";
    String factor = "NDVI_avg";
    String distr = "";
    String prov = "";

    // Obtain year, month, decad from the name of the file:
    // dv_20130101_20130110.tif
    year = ndviFileName.substring(3, 7);
    month = ndviFileName.substring(7, 9);
    // Remove "0"
    if (month.startsWith("0")) {
        month = month.replace("0", "");
    }
    month = getMonthName(Integer.decode(month));
    dekad = ndviFileName.substring(9, 11);
    dekad = dekad.equals("01") ? "1" : dekad.equals("11") ? "2" : "3";

    @SuppressWarnings("unchecked")
    Set<Object[]> data = new ListOrderedSet();
    data.add(header.toArray());
    int i = 1;

    List<FeatureAggregation> result = new ArrayList<FeatureAggregation>();

    // only one band
    int[] bands = new int[] { 0 };
    StatsType[] stats = new StatsType[] { StatsType.MEAN };

    // get the world to grid transformation
    final GridGeometry2D gg2D = coverage.getGridGeometry();
    final MathTransform worldToGrid = gg2D.getGridToCRS(PixelInCell.CELL_CORNER).inverse();
    final CoordinateReferenceSystem rasterCRS = gg2D.getCoordinateReferenceSystem();

    // ROI for the MASK in raster space
    final ROIGeometry maskROI = getROIMask(mask, worldToGrid, rasterCRS, maskFullPath);

    // getting the ROI in raster space for the zones
    final List<ROI> zonesROI = new ArrayList<ROI>();
    SimpleFeatureIterator iterator = null;

    // Selection of the FeatureCollection CoordinateReferenceSystem
    final CoordinateReferenceSystem featureCollectionCRS = fc.getSchema().getCoordinateReferenceSystem();
    if (featureCollectionCRS == null) {
        throw new IllegalArgumentException("The input features need a CRS");
    }
    // do we need to reproject?
    if (!CRS.equalsIgnoreMetadata(rasterCRS, featureCollectionCRS)) {
        // create a transformation
        final MathTransform transform = CRS.findMathTransform(featureCollectionCRS, rasterCRS, true);// lenient tranformation
        if (!transform.isIdentity()) {
            // reproject
            fc = new ReprojectProcess().execute(fc, featureCollectionCRS, rasterCRS);

        }
    }
    // Cycle on the features for creating a list of geometries
    try {
        iterator = fc.features();
        listenerForwarder.progressing(1f, "Classifing zones...");

        while (iterator.hasNext()) {
            SimpleFeature feature = iterator.next();

            // zones ROI
            ROI transformedROI = new ROIGeometry(
                    JTS.transform((Geometry) feature.getDefaultGeometry(), worldToGrid));

            zonesROI.add(transformedROI);

            // CSV Data
            if (CLASSIFIER_TYPE.DISTRICT.equals(classifier) || CLASSIFIER_TYPE.PROVINCE.equals(classifier)) {
                prov = feature.getProperty("province").getValue().toString();
            }
            if (CLASSIFIER_TYPE.DISTRICT.equals(classifier)) {
                distr = feature.getProperty("district").getValue().toString();
            }
            Map<String, Object> parameters = new HashMap<String, Object>();
            parameters.put("rowId", i++);
            parameters.put("year", year);
            parameters.put("mon", month);
            parameters.put("dec", dekad);
            parameters.put("factor", factor);
            parameters.put("prov", prov);
            parameters.put("distr", distr);
            // parameters.put("NDVI_avg", avg.toString());
            FeatureAggregation featureAgregation = new FeatureAggregation(parameters, header, ",", true);
            result.add(featureAgregation);
        }
        listenerForwarder.progressing(10f, ("Found " + result.size() + " zones for statistic generation"));
    } finally {
        if (iterator != null)
            iterator.close();
        if (dbStore != null) {
            dbStore.dispose();
        }
    }
    // Definition of the ZonalStats operation
    listenerForwarder.progressing(15f, "Zonal statistics");
    RenderedOp op = ZonalStatsDescriptor.create(coverage.getRenderedImage(), null, null, zonesROI, null,
            maskROI, false, bands, stats, null, null, null, null, false, null);
    // Calculation of the ZonalStats property
    @SuppressWarnings("unchecked")
    List<ZoneGeometry> statsResult = (List<ZoneGeometry>) op.getProperty(ZonalStatsDescriptor.ZS_PROPERTY);
    int index = 0;
    listenerForwarder.progressing(90f, "Result Post Processing");
    for (ZoneGeometry statResult : statsResult) {
        FeatureAggregation featureAgregation = result.get(index++);
        Double mean = (Double) statResult.getStatsPerBandNoClassifierNoRange(0)[0].getResult();
        // If the mean is 0, then no calculations are performed
        if (mean != 0.0) {
            // apply NDVI: Physical value = pixel value*0.004 - 0.1
            Double ndvi = (mean * 0.004) - 0.1;
            featureAgregation.getProperties().put("NDVI_avg", ndvi.toString());
            if (mean > 0.0) {
                // include data
                data.add(featureAgregation.toRow());
            } else {
                // log error: the mean shouldn't be never less than 0
                LOGGER.error("Zonal statistics corrupted not included for: " + featureAgregation.toRow());
            }
        }
    }

    File csv = new File(csvPath);
    listenerForwarder.progressing(95f, "writing output file...");
    CSVWriter.writeCsv(LOGGER, data, csv, csvSeparator, true);
    listenerForwarder.progressing(100f, "output file " + csvPath + " generated successfully!");
}

From source file:com.jaspersoft.jasperserver.ws.axis2.scheduling.ReportJobBeanTraslator.java

protected Object toCollectionValue(Class parameterType, Object valueArray) {
    Object reportValue;//ww  w .  j  a  va 2  s  . co m
    int valueCount = Array.getLength(valueArray);
    if (parameterType.equals(Object.class) || parameterType.equals(Collection.class)
            || parameterType.equals(Set.class)) {
        Collection values = new ListOrderedSet();
        for (int i = 0; i < valueCount; ++i) {
            values.add(Array.get(valueArray, i));
        }
        reportValue = values;
    } else if (parameterType.equals(List.class)) {
        Collection values = new ArrayList(valueCount);
        for (int i = 0; i < valueCount; ++i) {
            values.add(Array.get(valueArray, i));
        }
        reportValue = values;
    } else if (parameterType.isArray()) {
        Class componentType = parameterType.getComponentType();
        if (componentType.equals(valueArray.getClass().getComponentType())) {
            reportValue = valueArray;
        } else {
            reportValue = Array.newInstance(componentType, valueCount);
            for (int i = 0; i < valueCount; ++i) {
                Array.set(reportValue, i, Array.get(valueArray, i));
            }
        }
    } else {
        throw new JSException("report.scheduling.ws.collection.parameter.type.not.supported",
                new Object[] { parameterType.getName() });
    }
    return reportValue;
}

From source file:com.nextep.designer.beng.services.impl.DeliveryService.java

@SuppressWarnings("unchecked")
@Override/*from   w  ww .ja  va 2s . co m*/
public List<IVersionInfo> buildDependencies(List<IVersionInfo> processed, IDeliveryModule module) {
    IVersionInfo moduleRelease = module.getTargetRelease();
    if (processed.contains(moduleRelease)) {
        return Collections.EMPTY_LIST;
    } else {
        processed.add(moduleRelease);
    }
    // FIXME transform the collection to IVersionable<IVersionContainer>
    // collection
    ListOrderedSet containers = new ListOrderedSet();
    // containers.addAll(getContainerDependencies(moduleContainer));
    for (IVersionInfo vc : module.getDependencies()) {
        containers.add(vc);
    }
    for (IVersionInfo c : new ArrayList<IVersionInfo>(containers)) {
        final IDeliveryModule depModule = loadDelivery(c);
        if (depModule != null) {
            containers.addAll(0, buildDependencies(processed, depModule));
        } else {
            throw new ErrorException(BengMessages.getString("missingDependentDelivery")); //$NON-NLS-1$
        }
    }
    // containers.add(moduleRelease);
    return containers.asList();
}

From source file:net.sourceforge.vulcan.core.support.ProjectImporterImpl.java

@Override
public void createProjectsForUrl(String startUrl, String username, String password, boolean createSubprojects,
        NameCollisionResolutionMode nameCollisionResolutionMode, String[] schedulerNames, Set<String> labels,
        ProjectImportStatusDto statusDto) throws ConfigException, StoreException, DuplicateNameException {
    final List<RepositoryAdaptorPlugin> repositoryPlugins = pluginManager
            .getPlugins(RepositoryAdaptorPlugin.class);
    final List<BuildToolPlugin> buildToolPlugins = pluginManager.getPlugins(BuildToolPlugin.class);

    final ListOrderedSet urls = new ListOrderedSet();
    urls.add(startUrl);//w  ww  .  ja  v  a  2s . c  o m

    final List<ProjectConfigDto> newProjects = new ArrayList<ProjectConfigDto>();
    final List<ProjectRepositoryConfigurator> repoConfigurators = new ArrayList<ProjectRepositoryConfigurator>();

    final List<String> existingProjectNames = new ArrayList<String>(stateManager.getProjectConfigNames());

    for (int i = 0; i < urls.size(); i++) {
        final String url = (String) urls.get(i);

        if (statusDto != null) {
            statusDto.setCurrentUrl(url);
            statusDto.setNumProjectsCreated(newProjects.size());
            statusDto.setNumRemainingModules(urls.size() - i);
        }

        final ProjectConfigDto projectConfig = new ProjectConfigDto();
        projectConfig.setSchedulerNames(schedulerNames);

        final ProjectRepositoryConfigurator repoConfigurator = createRepositoryConfiguratorForUrl(
                repositoryPlugins, projectConfig, url, username, password);

        File buildSpecFile = null;
        final ProjectBuildConfigurator buildConfigurator;

        try {
            buildSpecFile = downloadBuildSpecFile(repoConfigurator);
            final Document xmlDocument = tryParse(buildSpecFile);
            buildConfigurator = createBuildToolConfigurator(buildToolPlugins, projectConfig, url, buildSpecFile,
                    xmlDocument);
        } finally {
            deleteIfPresent(buildSpecFile);
        }

        final boolean shouldCreate = configureProject(projectConfig, repoConfigurator, buildConfigurator, url,
                existingProjectNames, nameCollisionResolutionMode, createSubprojects, labels);

        if (createSubprojects) {
            final List<String> subprojectUrls = buildConfigurator.getSubprojectUrls();

            makeAbsolute(url, subprojectUrls);

            if (subprojectUrls != null) {
                urls.addAll(subprojectUrls);
            }
        }

        if (shouldCreate) {
            existingProjectNames.add(projectConfig.getName());

            newProjects.add(projectConfig);
            repoConfigurators.add(repoConfigurator);

            log.info("Configured project " + projectConfig.getName());
        } else {
            log.info("Skipping project " + projectConfig.getName());
        }
    }

    final Map<String, PluginConfigDto> pluginConfigs = new HashMap<String, PluginConfigDto>();

    for (int i = 0; i < newProjects.size(); i++) {
        final ProjectConfigDto projectConfig = newProjects.get(i);
        try {
            final String pluginId = projectConfig.getRepositoryAdaptorPluginId();
            PluginConfigDto pluginConfig = pluginConfigs.get(pluginId);

            if (pluginConfig == null) {
                pluginConfig = (PluginConfigDto) pluginManager.getPluginConfigInfo(pluginId).copy();
            }

            if (repoConfigurators.get(i).updateGlobalConfig(pluginConfig)) {
                pluginConfigs.put(pluginId, pluginConfig);
            }
        } catch (PluginNotConfigurableException ignore) {
        } catch (PluginNotFoundException e) {
            throw new RuntimeException(e);
        }
    }

    final ConfigUpdatesDto updates = new ConfigUpdatesDto();

    updates.setNewProjectConfigs(newProjects);

    if (!pluginConfigs.isEmpty()) {
        updates.setModifiedPluginConfigs(pluginConfigs);
    }

    try {
        stateManager.applyMultipleUpdates(updates);
    } catch (PluginNotFoundException e) {
        // Very unlikely...
        throw new RuntimeException(e);
    }

    log.info("Successfully imported project(s) for URL " + startUrl);
}

From source file:org.apache.cayenne.gen.DataMapUtils.java

/**
 * Get list of parameter names in the same order as in qualifier.
 * /*from  w w  w  . ja v a2  s.  c o m*/
 * @param qualifierString
 *            to be parsed
 * @return List of parameter names.
 */
private Set parseQualifier(String qualifierString) {
    @SuppressWarnings("unchecked")
    Set<String> result = (Set<String>) new ListOrderedSet();
    Pattern pattern = Pattern.compile("\\$[\\w]+");
    Matcher matcher = pattern.matcher(qualifierString);
    while (matcher.find()) {
        String name = matcher.group();
        result.add(Util.underscoredToJava(name.substring(1), false));
    }

    return result;
}

From source file:org.apache.ddlutils.platform.mysql.MySqlBuilder.java

/**
 * {@inheritDoc}/*from  www . ja v a2s .  c om*/
 */
protected void processTableStructureChanges(Database currentModel, Database desiredModel, Table sourceTable,
        Table targetTable, Map parameters, List changes) throws IOException {
    // in order to utilize the ALTER TABLE ADD COLUMN AFTER statement
    // we have to apply the add column changes in the correct order
    // thus we first gather all add column changes and then execute them
    ArrayList addColumnChanges = new ArrayList();

    for (Iterator changeIt = changes.iterator(); changeIt.hasNext();) {
        TableChange change = (TableChange) changeIt.next();

        if (change instanceof AddColumnChange) {
            addColumnChanges.add((AddColumnChange) change);
            changeIt.remove();
        }
    }
    for (Iterator changeIt = addColumnChanges.iterator(); changeIt.hasNext();) {
        AddColumnChange addColumnChange = (AddColumnChange) changeIt.next();

        processChange(currentModel, desiredModel, addColumnChange);
        changeIt.remove();
    }

    ListOrderedSet changedColumns = new ListOrderedSet();

    // we don't have to care about the order because the comparator will have ensured
    // that a add primary key change comes after all necessary columns are present
    for (Iterator changeIt = changes.iterator(); changeIt.hasNext();) {
        TableChange change = (TableChange) changeIt.next();

        if (change instanceof RemoveColumnChange) {
            processChange(currentModel, desiredModel, (RemoveColumnChange) change);
            changeIt.remove();
        } else if (change instanceof AddPrimaryKeyChange) {
            processChange(currentModel, desiredModel, (AddPrimaryKeyChange) change);
            changeIt.remove();
        } else if (change instanceof PrimaryKeyChange) {
            processChange(currentModel, desiredModel, (PrimaryKeyChange) change);
            changeIt.remove();
        } else if (change instanceof RemovePrimaryKeyChange) {
            processChange(currentModel, desiredModel, (RemovePrimaryKeyChange) change);
            changeIt.remove();
        } else if (change instanceof ColumnChange) {
            // we gather all changed columns because we can use the ALTER TABLE MODIFY COLUMN
            // statement for them
            changedColumns.add(((ColumnChange) change).getChangedColumn());
            changeIt.remove();
        }
    }
    for (Iterator columnIt = changedColumns.iterator(); columnIt.hasNext();) {
        Column sourceColumn = (Column) columnIt.next();
        Column targetColumn = targetTable.findColumn(sourceColumn.getName(),
                getPlatform().isDelimitedIdentifierModeOn());

        processColumnChange(sourceTable, targetTable, sourceColumn, targetColumn);
    }
}

From source file:org.apache.ddlutils.task.DumpMetadataTask.java

/**
 * Determines the columns that are present in the given result set.
 * /*www  .  j a va  2s  .c  o  m*/
 * @param resultSet The result set
 * @return The columns
 */
private Set getColumnsInResultSet(ResultSet resultSet) throws SQLException {
    ListOrderedSet result = new ListOrderedSet();
    ResultSetMetaData metaData = resultSet.getMetaData();

    for (int idx = 1; idx <= metaData.getColumnCount(); idx++) {
        result.add(metaData.getColumnName(idx).toUpperCase());
    }

    return result;
}

From source file:org.apache.jackrabbit.core.security.authentication.AbstractLoginModule.java

/**
 * @return a Collection of principals that contains the current user
 * principal and all groups it is member of.
 *//*w  ww .ja  v a  2  s .  c  om*/
protected Set getPrincipals() {
    // use ListOrderedSet instead of Hashset in order to maintain the order
    // of principals (as in the Subject).
    Set principals = new ListOrderedSet();
    principals.add(principal);
    Iterator groups = principalProvider.getGroupMembership(principal);
    while (groups.hasNext()) {
        principals.add(groups.next());
    }
    return principals;
}

From source file:org.apache.jackrabbit.core.security.principal.DefaultPrincipalProvider.java

/**
 * @see PrincipalProvider#getGroupMembership(Principal)
 *//* w ww  .  j  av  a 2  s  .  c o  m*/
public PrincipalIterator getGroupMembership(Principal userPrincipal) {
    checkInitialized();
    Set mship;
    synchronized (membershipCache) {
        mship = (Set) membershipCache.get(userPrincipal.getName());
        if (mship == null) {
            mship = new ListOrderedSet();
            // recursively collect group membership
            collectGroupMembership(userPrincipal, mship);

            // make sure everyone-group is not missing
            if (!mship.contains(everyonePrincipal) && everyonePrincipal.isMember(userPrincipal)) {
                mship.add(everyonePrincipal);
            }
            membershipCache.put(userPrincipal.getName(), mship);
        }
    }
    return new PrincipalIteratorAdapter(mship);

}

From source file:org.apache.ojb.broker.accesslayer.sql.SqlSelectStatement.java

/**
 * Return the Fields to be selected./*from w  ww .  j  a  v a2  s  . c  om*/
 *
 * @param cld the ClassDescriptor
 * @return the Fields to be selected
 */
protected FieldDescriptor[] buildFieldsForSelect(ClassDescriptor cld) {
    DescriptorRepository repository = cld.getRepository();
    Set fields = new ListOrderedSet(); // keep the order of the fields

    // add Standard Fields
    // MBAIRD: if the object being queried on has multiple classes mapped to the table,
    // then we will get all the fields that are a unique set across all those classes so if we need to
    // we can materialize an extent
    FieldDescriptor fds[] = repository.getFieldDescriptorsForMultiMappedTable(cld);
    for (int i = 0; i < fds.length; i++) {
        fields.add(fds[i]);
    }

    // add inherited Fields. This is important when querying for a class having a super-reference
    fds = cld.getFieldDescriptor(true);
    for (int i = 0; i < fds.length; i++) {
        fields.add(fds[i]);
    }

    // add Fields of joined subclasses
    Class[] multiJoinedClasses = repository.getSubClassesMultipleJoinedTables(cld, true);
    for (int c = 0; c < multiJoinedClasses.length; c++) {
        ClassDescriptor subCld = repository.getDescriptorFor(multiJoinedClasses[c]);
        fds = subCld.getFieldDescriptions();
        for (int i = 0; i < fds.length; i++) {
            fields.add(fds[i]);
        }
    }

    FieldDescriptor[] result = new FieldDescriptor[fields.size()];
    fields.toArray(result);
    return result;
}