Example usage for org.apache.commons.collections IteratorUtils toList

List of usage examples for org.apache.commons.collections IteratorUtils toList

Introduction

In this page you can find the example usage for org.apache.commons.collections IteratorUtils toList.

Prototype

public static List toList(Iterator iterator) 

Source Link

Document

Gets a list based on an iterator.

Usage

From source file:pl.p.lodz.ftims.server.logic.ChallengeService.java

@Override
public List<Challenge> getChallenges(Coordinates coords) {
    List<Challenge> challenges = IteratorUtils.toList(challengesDAO.findAll().iterator());
    challenges.sort((Challenge o1, Challenge o2) -> {
        double distance1 = new Coordinates(o1.getLocation()).computeDistance(coords);
        double distance2 = new Coordinates(o2.getLocation()).computeDistance(coords);
        return (int) (distance2 - distance1);
    });/*  w  ww. j  a v  a2 s.  c  om*/
    return challenges;
}

From source file:pl.p.lodz.ftims.server.logic.ChallengeService.java

@Override
public List<Challenge> getAllChallenges() {
    return IteratorUtils.toList(challengesDAO.findAll().iterator());
}

From source file:pl.p.lodz.ftims.server.logic.RankingService.java

@Override
public List<Ranking> getRanking() {
    return IteratorUtils.toList(rankingDAO.findAll().iterator());
}

From source file:pl.p.lodz.ftims.server.logic.UserProfileService.java

@Override
@Transactional(readOnly = true)
public List<User> getAllUsers() {
    return IteratorUtils.toList(profilesDAO.findAll().iterator());
}

From source file:playRepository.GitRepository.java

@SuppressWarnings("unchecked")
public static List<RevCommit> diffRevCommits(Repository repository, ObjectId from, ObjectId to)
        throws IOException, GitAPIException {
    return IteratorUtils.toList(new Git(repository).log().addRange(from, to).call().iterator());
}

From source file:PolygonMatching.MatchingGeoPolygon.java

public static void main(String[] args) throws Exception {
    //      SparkConf sparkConf = new SparkConf().setAppName("GeoMatchingSpark").setMaster("local");
    SparkConf sparkConf = new SparkConf().setAppName("GeoMatchingSpark");
    JavaSparkContext ctx = new JavaSparkContext(sparkConf);

    String dataSource1 = args[0];
    String dataSource2 = args[1];
    final double thresholdLinguistic = Double.parseDouble(args[2]);
    final double thresholdPolygon = Double.parseDouble(args[3]);
    String outputPath = args[4];/*from ww w. j av a 2s  . c o  m*/
    Integer amountPartition = Integer.parseInt(args[5]);
    String sourceType = args[6];

    DataSource dataSourcePref = null;
    DataSource dataSourceOSM = null;
    if (sourceType.equals("CSV")) {
        dataSourcePref = AbstractExec.getDataCSV(dataSource1, ';');
        dataSourceOSM = AbstractExec.getDataCSV(dataSource2, ';');
    } else { //is postgis
        dataSourcePref = AbstractExec.getDataPostGres(dataSource1);
        dataSourceOSM = AbstractExec.getDataPostGres(dataSource2);
    }

    //      DataSource dataSourcePref = AbstractExec.getDataPostGres(dataSource1); //squaresOfCuritiba Pref
    //      DataSource dataSourceOSM = AbstractExec.getDataPostGres(dataSource2); //squaresOfCuritiba OSM

    //      DataSource dataSourcePref = AbstractExec.getDataPostGres("queries/squares_pref_curitiba.txt"); //squaresOfCuritiba Pref
    //      DataSource dataSourceOSM = AbstractExec.getDataPostGres("queries/osm_curitiba.txt"); //squaresOfCuritiba OSM

    //      DataSource dataSourcePref = AbstractExec.getDataPostGres("queries/parks_pref_ny.txt"); //parksOfNY Pref
    //      DataSource dataSourceOSM = AbstractExec.getDataPostGres("queries/osm_ny.txt"); //parksOfNY OSM

    StorageManager storagePref = new StorageManager();
    StorageManager storageOSM = new StorageManager();

    // enables in-memory execution for faster processing
    // this can be done since the whole data fits into memory
    storagePref.enableInMemoryProcessing();
    storageOSM.enableInMemoryProcessing();

    // adds the "data" to the algorithm
    storagePref.addDataSource(dataSourcePref);
    storageOSM.addDataSource(dataSourceOSM);

    if (!storagePref.isDataExtracted()) {
        storagePref.extractData();
    }
    if (!storageOSM.isDataExtracted()) {
        storageOSM.extractData();
    }

    List<GeoPolygon> geoentitiesPref = new ArrayList<GeoPolygon>();
    List<GeoPolygon> geoentitiesOSM = new ArrayList<GeoPolygon>();

    // the algorithm returns each generated pair step-by-step
    int indexOfPref = 0;
    for (GenericObject genericObj : storagePref.getExtractedData()) {
        String nome = "";
        Integer id;
        if (!genericObj.getData().get("name").toString().equals("null")) {//for curitiba use atribute "nome" for new york "signname"
            nome = genericObj.getData().get("name").toString();
            id = Integer.parseInt(genericObj.getData().get("id").toString());//for curitiba use atribute "gid" for new york "id"
            geoentitiesPref.add(new GeoPolygon(genericObj.getData().get("geometry").toString(), nome,
                    InputTypes.GOV_POLYGON, indexOfPref, id));
            indexOfPref++;
        }

    }

    int indexOfOSM = 0;
    for (GenericObject genericObj : storageOSM.getExtractedData()) {
        //               System.out.println(genericObj.getData().get("geometry"));
        String nome = "";
        Integer id;
        if (!genericObj.getData().get("name").toString().equals("null")) {
            nome = genericObj.getData().get("name").toString();
            id = Integer.parseInt(genericObj.getData().get("id").toString());
            geoentitiesOSM.add(new GeoPolygon(genericObj.getData().get("geometry").toString(), nome,
                    InputTypes.OSM_POLYGON, indexOfOSM, id));
            indexOfOSM++;
        }

    }

    JavaRDD<GeoPolygon> polygonsOSM = ctx.parallelize(geoentitiesOSM);
    JavaRDD<GeoPolygon> polygonsPref = ctx.parallelize(geoentitiesPref);

    JavaRDD<GeoPolygon> polygons = polygonsPref.union(polygonsOSM);

    final Broadcast<Integer> numReplication = ctx.broadcast(amountPartition);
    JavaRDD<Tuple2<Integer, GeoPolygon>> polygonLabed = polygons
            .flatMap(new FlatMapFunction<GeoPolygon, Tuple2<Integer, GeoPolygon>>() {

                public Iterator<Tuple2<Integer, GeoPolygon>> call(GeoPolygon s) throws Exception {
                    List<Tuple2<Integer, GeoPolygon>> listOfPolygonTuple = new ArrayList<Tuple2<Integer, GeoPolygon>>();
                    if (s.getType().equals(InputTypes.OSM_POLYGON)) {
                        listOfPolygonTuple.add(new Tuple2<Integer, GeoPolygon>(
                                s.getIdGeometry() % numReplication.getValue(), s));
                        return listOfPolygonTuple.iterator();
                    } else { //equals to InputTypes.GOV_POLYGON
                        for (int i = 0; i < numReplication.value(); i++) {
                            listOfPolygonTuple.add(new Tuple2<Integer, GeoPolygon>(i, s));
                        }
                        return listOfPolygonTuple.iterator();
                    }
                }

            });

    JavaPairRDD<Integer, GeoPolygon> polygonsPaired = polygonLabed
            .mapToPair(new PairFunction<Tuple2<Integer, GeoPolygon>, Integer, GeoPolygon>() {

                public Tuple2<Integer, GeoPolygon> call(Tuple2<Integer, GeoPolygon> tuple) throws Exception {
                    return new Tuple2<Integer, GeoPolygon>(tuple._1(), tuple._2());
                }
            });

    JavaPairRDD<Integer, Iterable<GeoPolygon>> polygonsGrouped = polygonsPaired.groupByKey(amountPartition);//number of partitions

    JavaPairRDD<Integer, PolygonPair> matches = polygonsGrouped.flatMapToPair(
            new PairFlatMapFunction<Tuple2<Integer, Iterable<GeoPolygon>>, Integer, PolygonPair>() {

                public Iterator<Tuple2<Integer, PolygonPair>> call(Tuple2<Integer, Iterable<GeoPolygon>> tuple)
                        throws Exception {
                    List<GeoPolygon> polygonsPerKey = IteratorUtils.toList(tuple._2().iterator());
                    List<GeoPolygon> polygonsSource = new ArrayList<GeoPolygon>();
                    List<GeoPolygon> polygonsTarget = new ArrayList<GeoPolygon>();
                    for (GeoPolygon entity : polygonsPerKey) {
                        if (entity.getType() == InputTypes.OSM_POLYGON) {
                            polygonsSource.add(entity);
                        } else {
                            polygonsTarget.add(entity);
                        }
                    }

                    List<Tuple2<Integer, PolygonPair>> entityMatches = new ArrayList<Tuple2<Integer, PolygonPair>>();
                    JaccardSimilarity jaccard = new JaccardSimilarity();
                    for (GeoPolygon entSource : polygonsSource) {
                        for (GeoPolygon entTarget : polygonsTarget) {
                            double linguisticSimilarity = 0.0;
                            //calculate the linguistic similarity
                            if (!entTarget.getGeoName().isEmpty()) {
                                linguisticSimilarity = jaccard.getSimilarity(
                                        entTarget.getGeoName().toLowerCase(),
                                        entSource.getGeoName().toLowerCase());
                            }

                            //calculate the polygon similarity
                            double polygonSimilarity = entSource.getPolygonSimilarity(entTarget);

                            //classification of pairs
                            PolygonPair pair;
                            if (linguisticSimilarity > thresholdLinguistic
                                    && polygonSimilarity > thresholdPolygon) {
                                pair = new PolygonPair(entSource, entTarget, linguisticSimilarity,
                                        polygonSimilarity, PolygonClassification.MATCH);
                            } else if (linguisticSimilarity < thresholdLinguistic
                                    && polygonSimilarity < thresholdPolygon) {
                                pair = new PolygonPair(entSource, entTarget, linguisticSimilarity,
                                        polygonSimilarity, PolygonClassification.NON_MATCH);
                            } else {
                                pair = new PolygonPair(entSource, entTarget, linguisticSimilarity,
                                        polygonSimilarity, PolygonClassification.POSSIBLE_PROBLEM);
                            }

                            //                  int index = entityMatches.size();
                            //                  entityMatches.add(new Tuple2<Integer, PolygonPair>(index, pair));

                            //for use case 04
                            if (pair.getPolygonClassification().equals(PolygonClassification.POSSIBLE_PROBLEM)
                                    || pair.getPolygonClassification().equals(PolygonClassification.MATCH)) {
                                int index = entityMatches.size();
                                entityMatches.add(new Tuple2<Integer, PolygonPair>(index, pair));
                            }

                            //                     if (Math.abs(entTarget.getArea() - entSource.getArea()) > thresholdArea) {
                            //                        entityMatches.add(new Tuple2<String, String>(entTarget.getGeoName(), entSource.getGeoName() + ":" + Math.abs(entTarget.getArea() - entSource.getArea())));
                            ////                        System.out.println(entTarget.getGeoName() +  " - " + entSource.getGeoNameame(), _2));
                            ////                        System.out.println(entTarget.getGeoName() +  " - " + ());
                            ////                        System.out.println(entTarget.getGeoName() + " pref: " + String.format("%.2f", entTarget.getArea()));
                            ////                        System.out.println(entSource.getGeoName() + " OSM: " + String.format("%.2f", entSource.getArea()));
                            ////                        System.out.println();
                            //                     }
                        }
                    }
                    return entityMatches.iterator();
                }
            });

    matches.flatMap(new FlatMapFunction<Tuple2<Integer, PolygonPair>, String>() {

        public Iterator<String> call(Tuple2<Integer, PolygonPair> t) throws Exception {
            ArrayList<String> listOutput = new ArrayList<String>();
            listOutput.add(t._2().toStringCSV());
            return listOutput.iterator();
        }

    }).saveAsTextFile(outputPath);

    ctx.stop();
    ctx.close();
}

From source file:SingleMatchingGeoPolygon.SingleMatchingGeoPolygon.java

public static void main(String[] args) throws Exception {
    //      SparkConf sparkConf = new SparkConf().setAppName("GeoMatchingSpark").setMaster("local");
    SparkConf sparkConf = new SparkConf().setAppName("GeoMatchingSpark");
    JavaSparkContext ctx = new JavaSparkContext(sparkConf);

    String dataSource = args[0];//w ww. ja  va2 s . co m
    final double thresholdLinguistic = Double.parseDouble(args[1]);
    final double thresholdPolygon = Double.parseDouble(args[2]);
    String outputPath = args[3];
    Integer amountPartition = Integer.parseInt(args[4]);
    String sourceType = args[5];

    DataSource source1 = null;
    if (sourceType.equals("CSV")) {
        source1 = AbstractExec.getDataCSV(dataSource, ';');
    } else { //is postgis
        source1 = AbstractExec.getDataPostGres(dataSource);
    }

    ReadAbstractSource reader = new ReadAbstractSource();
    StorageManager storagePolygon = reader.readFile(source1);

    List<GeoPolygon> geoentities = new ArrayList<GeoPolygon>();

    int index = 0;
    for (GenericObject genericObj : storagePolygon.getExtractedData()) {
        //               System.out.println(genericObj.getData().get("geometry"));
        String nome = "";
        Integer id;
        if (!genericObj.getData().get("name").toString().equals("null")) {
            nome = genericObj.getData().get("name").toString();
            id = Integer.parseInt(genericObj.getData().get("id").toString());
            geoentities.add(new GeoPolygon(genericObj.getData().get("geometry").toString(), nome,
                    InputTypes.OSM_POLYGON, index, id));
            index++;
        }
    }

    JavaRDD<GeoPolygon> polygons = ctx.parallelize(geoentities);

    final Broadcast<Integer> numReplication = ctx.broadcast(amountPartition);
    JavaRDD<Tuple2<Integer, GeoPolygon>> polygonLabed = polygons
            .flatMap(new FlatMapFunction<GeoPolygon, Tuple2<Integer, GeoPolygon>>() {

                public Iterator<Tuple2<Integer, GeoPolygon>> call(GeoPolygon s) throws Exception {
                    List<Tuple2<Integer, GeoPolygon>> listOfPolygonTuple = new ArrayList<Tuple2<Integer, GeoPolygon>>();
                    GeoPolygon tocompare = s.getGeoPolygon();
                    tocompare.setDuplicated(false);
                    listOfPolygonTuple.add(new Tuple2<Integer, GeoPolygon>(
                            tocompare.getIdGeometry() % numReplication.getValue(), tocompare));//entity that not replicated

                    GeoPolygon duplicated = s.getGeoPolygon();
                    duplicated.setDuplicated(true);
                    for (int i = 0; i < numReplication.value(); i++) {//the entities that will be replicated
                        listOfPolygonTuple.add(new Tuple2<Integer, GeoPolygon>(i, duplicated));
                    }
                    return listOfPolygonTuple.iterator();
                }

            });

    JavaPairRDD<Integer, GeoPolygon> polygonsPaired = polygonLabed
            .mapToPair(new PairFunction<Tuple2<Integer, GeoPolygon>, Integer, GeoPolygon>() {

                public Tuple2<Integer, GeoPolygon> call(Tuple2<Integer, GeoPolygon> tuple) throws Exception {
                    return new Tuple2<Integer, GeoPolygon>(tuple._1(), tuple._2());
                }
            });

    JavaPairRDD<Integer, Iterable<GeoPolygon>> polygonsGrouped = polygonsPaired.groupByKey(amountPartition);//number of partitions

    JavaPairRDD<Integer, PolygonPair> matches = polygonsGrouped.flatMapToPair(
            new PairFlatMapFunction<Tuple2<Integer, Iterable<GeoPolygon>>, Integer, PolygonPair>() {

                public Iterator<Tuple2<Integer, PolygonPair>> call(Tuple2<Integer, Iterable<GeoPolygon>> tuple)
                        throws Exception {
                    List<GeoPolygon> polygonsPerKey = IteratorUtils.toList(tuple._2().iterator());
                    List<GeoPolygon> polygonsToCompare = new ArrayList<GeoPolygon>();
                    List<GeoPolygon> polygonsDuplicated = new ArrayList<GeoPolygon>();
                    for (GeoPolygon entity : polygonsPerKey) {
                        if (entity.isDuplicated()) {
                            polygonsDuplicated.add(entity);
                        } else {
                            polygonsToCompare.add(entity);
                        }
                    }

                    List<Tuple2<Integer, PolygonPair>> entityMatches = new ArrayList<Tuple2<Integer, PolygonPair>>();
                    JaccardSimilarity jaccard = new JaccardSimilarity();
                    for (GeoPolygon entSource : polygonsToCompare) {
                        for (GeoPolygon entTarget : polygonsDuplicated) {
                            double linguisticSimilarity = 0.0;
                            //calculate the linguistic similarity
                            if (!entTarget.getGeoName().isEmpty()) {
                                linguisticSimilarity = jaccard.getSimilarity(
                                        entTarget.getGeoName().toLowerCase(),
                                        entSource.getGeoName().toLowerCase());
                            }

                            //calculate the polygon similarity
                            double polygonSimilarity = entSource.getPolygonSimilarity(entTarget);

                            //classification of pairs
                            PolygonPair pair;
                            if (linguisticSimilarity > thresholdLinguistic
                                    && polygonSimilarity > thresholdPolygon) {
                                pair = new PolygonPair(entSource, entTarget, linguisticSimilarity,
                                        polygonSimilarity, PolygonClassification.MATCH);
                            } else if (linguisticSimilarity < thresholdLinguistic
                                    && polygonSimilarity < thresholdPolygon) {
                                pair = new PolygonPair(entSource, entTarget, linguisticSimilarity,
                                        polygonSimilarity, PolygonClassification.NON_MATCH);
                            } else {
                                pair = new PolygonPair(entSource, entTarget, linguisticSimilarity,
                                        polygonSimilarity, PolygonClassification.POSSIBLE_PROBLEM);
                            }

                            //                  int index = entityMatches.size();
                            //                  entityMatches.add(new Tuple2<Integer, PolygonPair>(index, pair));

                            //for use case 04
                            if (pair.getPolygonClassification().equals(PolygonClassification.MATCH) && (pair
                                    .getSource().getIdInDataset() != pair.getTarget().getIdInDataset())) {
                                int index = entityMatches.size();
                                entityMatches.add(new Tuple2<Integer, PolygonPair>(index, pair));
                            }

                            //                     if (Math.abs(entTarget.getArea() - entSource.getArea()) > thresholdArea) {
                            //                        entityMatches.add(new Tuple2<String, String>(entTarget.getGeoName(), entSource.getGeoName() + ":" + Math.abs(entTarget.getArea() - entSource.getArea())));
                            ////                        System.out.println(entTarget.getGeoName() +  " - " + entSource.getGeoNameame(), _2));
                            ////                        System.out.println(entTarget.getGeoName() +  " - " + ());
                            ////                        System.out.println(entTarget.getGeoName() + " pref: " + String.format("%.2f", entTarget.getArea()));
                            ////                        System.out.println(entSource.getGeoName() + " OSM: " + String.format("%.2f", entSource.getArea()));
                            ////                        System.out.println();
                            //                     }
                        }
                    }
                    return entityMatches.iterator();
                }
            });

    matches.flatMap(new FlatMapFunction<Tuple2<Integer, PolygonPair>, String>() {

        public Iterator<String> call(Tuple2<Integer, PolygonPair> t) throws Exception {
            ArrayList<String> listOutput = new ArrayList<String>();
            listOutput.add(t._2().toStringCSV());
            return listOutput.iterator();
        }

    }).saveAsTextFile(outputPath);

    ctx.stop();
    ctx.close();
}

From source file:SingleMatchingGeoPolygon.SingleMatchingGeoPolygonBlocked.java

public static void main(String[] args) throws Exception {
    //      SparkConf sparkConf = new SparkConf().setAppName("GeoMatchingSpark").setMaster("local");
    SparkConf sparkConf = new SparkConf().setAppName("GeoMatchingSpark");
    JavaSparkContext ctx = new JavaSparkContext(sparkConf);

    String dataSource = args[0];// ww w .  j  a  va2 s .  c  o  m
    final double thresholdLinguistic = Double.parseDouble(args[1]);
    final double thresholdPolygon = Double.parseDouble(args[2]);
    String outputPath = args[3];
    Integer amountPartition = Integer.parseInt(args[4]);
    String sourceType = args[5];

    DataSource source1 = null;
    if (sourceType.equals("CSV")) {
        source1 = AbstractExec.getDataCSV(dataSource, ';');
    } else { //is postgis
        source1 = AbstractExec.getDataPostGres(dataSource);
    }

    ReadAbstractSource reader = new ReadAbstractSource();
    StorageManager storagePolygon = reader.readFile(source1);
    //      StorageManager storagePolygon = reader.readFile(AbstractExec.getDataPostGres("queries/osm_curitiba.txt"));
    //      StorageManager storagePolygon = reader.readFile(AbstractExec.getDataPostGres("queries/squares_pref_curitiba.txt"));

    List<GeoPolygon> geoentities = new ArrayList<GeoPolygon>();

    int index = 0;
    for (GenericObject genericObj : storagePolygon.getExtractedData()) {
        //               System.out.println(genericObj.getData().get("geometry"));
        String nome = "";
        Integer id;
        if (!genericObj.getData().get("name").toString().equals("null")) {
            nome = genericObj.getData().get("name").toString();
            id = Integer.parseInt(genericObj.getData().get("id").toString());
            geoentities.add(new GeoPolygon(genericObj.getData().get("geometry").toString(), nome,
                    InputTypes.OSM_POLYGON, index, id));
            index++;
        }
    }

    JavaRDD<GeoPolygon> polygons = ctx.parallelize(geoentities);

    Broadcast<Integer> numReplication = ctx.broadcast(amountPartition);
    JavaRDD<Tuple2<String, GeoPolygon>> polygonLabed = polygons
            .flatMap(new FlatMapFunction<GeoPolygon, Tuple2<String, GeoPolygon>>() {

                public Iterator<Tuple2<String, GeoPolygon>> call(GeoPolygon s) throws Exception {
                    List<Tuple2<String, GeoPolygon>> listOfPolygonTuple = new ArrayList<Tuple2<String, GeoPolygon>>();
                    GeoPolygon tocompare = s.getGeoPolygon();
                    tocompare.setDuplicated(false);
                    if (tocompare.getGeoName().length() < 3) {
                        listOfPolygonTuple
                                .add(new Tuple2<String, GeoPolygon>(tocompare.getGeoName(), tocompare));//entity that not replicated
                    } else {
                        listOfPolygonTuple.add(new Tuple2<String, GeoPolygon>(
                                tocompare.getGeoName().substring(0, 3), tocompare));//entity that not replicated
                    }

                    GeoPolygon duplicated = s.getGeoPolygon();
                    duplicated.setDuplicated(true);
                    if (duplicated.getGeoName().length() < 3) {
                        listOfPolygonTuple
                                .add(new Tuple2<String, GeoPolygon>(duplicated.getGeoName(), duplicated));
                    } else {
                        listOfPolygonTuple.add(new Tuple2<String, GeoPolygon>(
                                duplicated.getGeoName().substring(0, 3), duplicated));
                    }

                    //            for (int i = 0; i < numReplication.value(); i++) {//the entities that will be replicated
                    //               listOfPolygonTuple.add(new Tuple2<Integer, GeoPolygon>(duplicated, duplicated));
                    //            }
                    return listOfPolygonTuple.iterator();
                }

            });

    JavaPairRDD<String, GeoPolygon> polygonsPaired = polygonLabed
            .mapToPair(new PairFunction<Tuple2<String, GeoPolygon>, String, GeoPolygon>() {

                public Tuple2<String, GeoPolygon> call(Tuple2<String, GeoPolygon> tuple) throws Exception {
                    return new Tuple2<String, GeoPolygon>(tuple._1(), tuple._2());
                }
            });

    JavaPairRDD<String, Iterable<GeoPolygon>> polygonsGrouped = polygonsPaired.groupByKey(amountPartition);//number of partitions

    JavaPairRDD<Integer, PolygonPair> matches = polygonsGrouped.flatMapToPair(
            new PairFlatMapFunction<Tuple2<String, Iterable<GeoPolygon>>, Integer, PolygonPair>() {

                public Iterator<Tuple2<Integer, PolygonPair>> call(Tuple2<String, Iterable<GeoPolygon>> tuple)
                        throws Exception {
                    List<GeoPolygon> polygonsPerKey = IteratorUtils.toList(tuple._2().iterator());
                    List<GeoPolygon> polygonsToCompare = new ArrayList<GeoPolygon>();
                    List<GeoPolygon> polygonsDuplicated = new ArrayList<GeoPolygon>();
                    for (GeoPolygon entity : polygonsPerKey) {
                        if (entity.isDuplicated()) {
                            polygonsDuplicated.add(entity);
                        } else {
                            polygonsToCompare.add(entity);
                        }
                    }

                    List<Tuple2<Integer, PolygonPair>> entityMatches = new ArrayList<Tuple2<Integer, PolygonPair>>();
                    JaccardSimilarity jaccard = new JaccardSimilarity();
                    for (GeoPolygon entSource : polygonsToCompare) {
                        for (GeoPolygon entTarget : polygonsDuplicated) {
                            double linguisticSimilarity = 0.0;
                            //calculate the linguistic similarity
                            if (!entTarget.getGeoName().isEmpty()) {
                                linguisticSimilarity = jaccard.getSimilarity(
                                        entTarget.getGeoName().toLowerCase(),
                                        entSource.getGeoName().toLowerCase());
                            }

                            //calculate the polygon similarity
                            double polygonSimilarity = entSource.getPolygonSimilarity(entTarget);

                            //classification of pairs
                            PolygonPair pair;
                            if (linguisticSimilarity > thresholdLinguistic
                                    && polygonSimilarity > thresholdPolygon) {
                                pair = new PolygonPair(entSource, entTarget, linguisticSimilarity,
                                        polygonSimilarity, PolygonClassification.MATCH);
                            } else if (linguisticSimilarity < thresholdLinguistic
                                    && polygonSimilarity < thresholdPolygon) {
                                pair = new PolygonPair(entSource, entTarget, linguisticSimilarity,
                                        polygonSimilarity, PolygonClassification.NON_MATCH);
                            } else {
                                pair = new PolygonPair(entSource, entTarget, linguisticSimilarity,
                                        polygonSimilarity, PolygonClassification.POSSIBLE_PROBLEM);
                            }

                            //                  int index = entityMatches.size();
                            //                  entityMatches.add(new Tuple2<Integer, PolygonPair>(index, pair));

                            //for use case 04
                            if (pair.getPolygonClassification().equals(PolygonClassification.MATCH) && (pair
                                    .getSource().getIdInDataset() != pair.getTarget().getIdInDataset())) {
                                int index = entityMatches.size();
                                entityMatches.add(new Tuple2<Integer, PolygonPair>(index, pair));
                            }

                            //                     if (Math.abs(entTarget.getArea() - entSource.getArea()) > thresholdArea) {
                            //                        entityMatches.add(new Tuple2<String, String>(entTarget.getGeoName(), entSource.getGeoName() + ":" + Math.abs(entTarget.getArea() - entSource.getArea())));
                            ////                        System.out.println(entTarget.getGeoName() +  " - " + entSource.getGeoNameame(), _2));
                            ////                        System.out.println(entTarget.getGeoName() +  " - " + ());
                            ////                        System.out.println(entTarget.getGeoName() + " pref: " + String.format("%.2f", entTarget.getArea()));
                            ////                        System.out.println(entSource.getGeoName() + " OSM: " + String.format("%.2f", entSource.getArea()));
                            ////                        System.out.println();
                            //                     }
                        }
                    }
                    return entityMatches.iterator();
                }
            });

    matches.flatMap(new FlatMapFunction<Tuple2<Integer, PolygonPair>, String>() {

        public Iterator<String> call(Tuple2<Integer, PolygonPair> t) throws Exception {
            ArrayList<String> listOutput = new ArrayList<String>();
            listOutput.add(t._2().toStringCSV());
            return listOutput.iterator();
        }

    }).saveAsTextFile(outputPath);

    ctx.stop();
    ctx.close();
}

From source file:TDS.Proctor.Sql.Data.Accommodations.AccTypes.java

public AccTypesDTO getAccTypesDTO() {
    if (_sortKeysBySortOrder == null)
        _sortKeysBySortOrder = new ArrayList<String>(IteratorUtils.toList(this.keySet().iterator()));//

    AccTypesDTO accTypesDTO = new AccTypesDTO();
    for (String key : _sortKeysBySortOrder) {
        if (this.containsKey(key)) {
            AccType value = this.get(key);
            accTypesDTO.add(new AccTypeDTO(key, value));// kvp.Key,
            // kvp.Value
        }/* w  w w .j  ava  2  s  . co  m*/
    }
    return accTypesDTO;
}

From source file:tds.student.web.handlers.MasterShellHandler.java

TestInfo loadTestInfo(OpportunityInstance oppInstance, TestConfig testConfig) throws ReturnStatusException {
    TestProperties testProps = itemBankService.getTestProperties(StudentContext.getTestKey()); // getTestProperties(testIdentifiers.TestKey);

    TestInfo testInfo = new TestInfo();
    testInfo.setReviewPage(0);/* w  w w.j a v a2 s .  com*/
    testInfo.setUrlBase(UrlHelper.getBase());
    testInfo.setHasAudio(testProps.getRequirements().isHasAudio());

    // config (var tdsTestConfig = {})
    testInfo.setTestName(testProps.getDisplayName());
    testInfo.setTestLength(testConfig.getTestLength());
    testInfo.setStartPosition(testConfig.getStartPosition());
    testInfo.setContentLoadTimeout(testConfig.getContentLoadTimeout());
    testInfo.setRequestInterfaceTimeout(testConfig.getRequestInterfaceTimeout());
    testInfo.setOppRestartMins(testConfig.getOppRestartMins());
    testInfo.setInterfaceTimeout(testConfig.getInterfaceTimeout());
    testInfo.setPrefetch(testConfig.getPrefetch());
    testInfo.setValidateCompleteness(testConfig.isValidateCompleteness());

    testInfo.setInterfaceTimeoutDialog(TestShellSettings.getTimeoutDialog().getValue());
    testInfo.setAutoSaveInterval(TestShellSettings.getAutoSaveInterval().getValue());
    testInfo.setForbiddenAppsInterval(TestShellSettings.getForbiddenAppsInterval().getValue());
    testInfo.setDisableSaveWhenInactive(TestShellSettings.isDisableSaveWhenInactive().getValue());
    testInfo.setDisableSaveWhenForbiddenApps(TestShellSettings.isDisableSaveWhenForbiddenApps().getValue());
    testInfo.setAllowSkipAudio(TestShellSettings.isAllowSkipAudio().getValue());
    testInfo.setShowSegmentLabels(TestShellSettings.isShowSegmentLabels().getValue());
    testInfo.setAudioTimeout(TestShellSettings.getAudioTimeout().getValue());
    testInfo.setEnableLogging(TestShellSettings.isEnableLogging().getValue());
    testInfo.setDictionaryUrl(TestShellSettings.getDictionaryUrl());

    // segments (var tdsSegments = [])
    testInfo.setSegments(loadTestSegments(oppInstance, testProps));

    // comments (TDS.Comments = [])
    testInfo.setComments(IteratorUtils.toList(this._studentSettings.getComments().iterator()));
    return testInfo;
}