Example usage for org.apache.commons.collections.bidimap DualHashBidiMap put

List of usage examples for org.apache.commons.collections.bidimap DualHashBidiMap put

Introduction

In this page you can find the example usage for org.apache.commons.collections.bidimap DualHashBidiMap put.

Prototype

public Object put(Object key, Object value) 

Source Link

Usage

From source file:edu.jhuapl.openessence.datasource.jdbc.entry.JdbcOeDataEntrySource.java

/**
 * Executes INSERT SQL Statment using Spring's JdbcTemplate.
 *
 * @param tableName        table to insert values into
 * @param ignoreSpecialSql the flag to ignore specialSql definitions in the groovy def file. In general, set false
 *                         during add* and set true during update*
 * @param dimIds           DimensionIds that we will insert data for //todo ?? why is this needed?
 * @param editDims         editable DimensionIds that we will insert data for
 * @param values           values that correspond to the editable DimensionIds. These values get written into the
 *                         database//from  w ww  .  j  a  va2 s  .  co  m
 * @return Map of the primary keys and values for the inserted record -- only for the Parent Record - children return
 *         null
 * @throws OeDataSourceAccessException if error occurs at database level
 * @throws OeDataSourceException       if error occurs during processing
 */
private Map editableInsertQuery(String tableName, boolean ignoreSpecialSql, List<String> dimIds,
        Map<String, Dimension> editDims, Map<String, Object> values)
        throws OeDataSourceAccessException, OeDataSourceException {

    List<String> generatedKeys = new ArrayList<String>();
    Set<String> tablePkIds;

    // insert on parent table
    if (tableName.equals(parentTableDetails.getTableName())) {

        // setup KeyHolder from pk_dimIds
        tablePkIds = parentTableDetails.getPks();

        // setup autogen to sqlcol map
        Map<String, Object> superEditCopy = new LinkedHashMap<String, Object>(superEditMap);
        Set<String> superEditKeys = superEditCopy.keySet();
        DualHashBidiMap bidimap = new DualHashBidiMap();
        superEditKeys.retainAll(tablePkIds);
        for (Map.Entry<String, Object> e : superEditCopy.entrySet()) {
            e.setValue(((DimensionBean) e.getValue()).getSqlCol());
            bidimap.put(e.getKey(), e.getValue());
        }

        // setup KeyHolder from pk_dimIds
        generatedKeys.addAll(tablePkIds); // NOTE: jdbc driver clears this and puts in the autoincs it finds.
        Map<String, Object> generatedKeyMap = new HashMap<String, Object>();
        for (String eachKey : generatedKeys) {
            generatedKeyMap.put(eachKey, null);
        }
        List<Map<String, Object>> keyMapList = new ArrayList<Map<String, Object>>();
        keyMapList.add(generatedKeyMap);
        KeyHolder keyHolder = new GeneratedKeyHolder(keyMapList);

        jdbcTemplate.update(new MultiTableInsertPreparedStatementCreator(tableName, ignoreSpecialSql, dimIds,
                editDims, values), keyHolder);

        Map<String, Object> keyMap = keyHolder.getKeys();

        // TODO: current implementation of getGeneratedKeys for PGSQL 8.4 returns ALL column/vals...we just want the pk's we know about
        // TODO: CHECK FOR WHAT HAPPENS WITH LOWER/UPPER CASE
        //http://archives.postgresql.org/pgsql-jdbc/2010-04/msg00061.php
        boolean isPostgreSql = isPostgreSqlDBMS();
        if (isPostgreSql) {
            // postgres' implementation of keyholder lowercases the key column
            DbKeyValMap dbkvm = new DbKeyValMap(bidimap);
            Set<String> kyids = dbkvm.keySet();
            for (String ky : kyids) {
                dbkvm.put(ky, keyMap.get(bidimap.get(ky)));
            }
            kyids.retainAll(tablePkIds);
            keyMap = dbkvm;
        }

        // -OR-
        // if table had no auto-gen keys but the INSERT suceedes, means the pks taken from the 'values' worked.
        // therefore, safe to use these as the "generated" PKs. retains the values that are designated "PK" dimensions
        //
        else if (keyMap == null || keyMap.size() == 0) {
            DbKeyValMap dbkvm = new DbKeyValMap(values);
            Set<String> kyids = dbkvm.keySet();
            kyids.retainAll(tablePkIds);
            keyMap = dbkvm;
        }

        // make sure got *ALL* pkIds/values configured in the ds def.
        List<Map> allkeys = getAllGeneratedKeys(tableName, tablePkIds, new DbKeyValMap(keyMap));

        return (allkeys.size() > 0 ? allkeys.get(0) : null);

    } else { // insert on child table.
        // don't need to know the returned PK ids & vals for children. just do typical INSERT
        jdbcTemplate.update(new MultiTableInsertPreparedStatementCreator(tableName, ignoreSpecialSql, dimIds,
                editDims, values));
        return null;
    }
}

From source file:playground.johannes.coopsim.analysis.TripDistanceAccessibilityTask.java

@Override
public void analyze(Set<Trajectory> trajectories, Map<String, DescriptiveStatistics> results) {
    TObjectDoubleHashMap<Vertex> xVals = accessibility.values(graph.getVertices());

    Set<String> purposes = new HashSet<String>();
    for (Trajectory t : trajectories) {
        for (int i = 0; i < t.getElements().size(); i += 2) {
            purposes.add(((Activity) t.getElements().get(i)).getType());
        }//w  w w.ja  v a 2  s  . co  m
    }

    DualHashBidiMap bidiMap = new DualHashBidiMap();
    for (Trajectory t : trajectories) {
        bidiMap.put(t.getPerson(), t);
    }

    for (String purpose : purposes) {
        TripDistanceMean tripDist = new TripDistanceMean(purpose, facilities);
        PersonTrajectoryPropertyAdaptor pAdaptor = new PersonTrajectoryPropertyAdaptor(bidiMap, tripDist);
        VertexPersonPropertyAdaptor vAdaptor = new VertexPersonPropertyAdaptor(graph, pAdaptor);

        TObjectDoubleHashMap<Vertex> yVals = vAdaptor.values(graph.getVertices());

        TDoubleDoubleHashMap correl = VertexPropertyCorrelation.mean(yVals, xVals,
                FixedSampleSizeDiscretizer.create(xVals.getValues(), 50, 100));
        try {
            TXTWriter.writeMap(correl, "A", "d", getOutputDirectory() + "d_mean_A.txt");
        } catch (IOException e) {
            e.printStackTrace();
        }
    }
}

From source file:playground.johannes.coopsim.analysis.TripDistanceDegreeTask.java

@Override
public void analyze(Set<Trajectory> trajectories, Map<String, DescriptiveStatistics> results) {
    TObjectDoubleHashMap<Vertex> xVals = Degree.getInstance().values(graph.getVertices());

    Set<String> purposes = new HashSet<String>();
    for (Trajectory t : trajectories) {
        for (int i = 0; i < t.getElements().size(); i += 2) {
            purposes.add(((Activity) t.getElements().get(i)).getType());
        }//from  ww  w  .j  a  v a 2  s .  co  m
    }
    purposes.add(null);

    DualHashBidiMap bidiMap = new DualHashBidiMap();
    for (Trajectory t : trajectories) {
        bidiMap.put(t.getPerson(), t);
    }

    for (String purpose : purposes) {
        TripDistanceMean tripDist = new TripDistanceMean(purpose, facilities);
        PersonTrajectoryPropertyAdaptor pAdaptor = new PersonTrajectoryPropertyAdaptor(bidiMap, tripDist);
        VertexPersonPropertyAdaptor vAdaptor = new VertexPersonPropertyAdaptor(graph, pAdaptor);

        TObjectDoubleHashMap<Vertex> yVals = vAdaptor.values(graph.getVertices());

        TDoubleDoubleHashMap correl = VertexPropertyCorrelation.mean(yVals, xVals,
                FixedSampleSizeDiscretizer.create(xVals.getValues(), 50, 100));

        if (purpose == null)
            purpose = "all";

        try {
            TXTWriter.writeMap(correl, "k", "d",
                    String.format("%1$s/d_mean_k.%2$s.txt", getOutputDirectory(), purpose));
        } catch (IOException e) {
            e.printStackTrace();
        }
    }

}