Example usage for com.google.common.collect HashBasedTable create

List of usage examples for com.google.common.collect HashBasedTable create

Introduction

In this page you can find the example usage for com.google.common.collect HashBasedTable create.

Prototype

public static <R, C, V> HashBasedTable<R, C, V> create() 

Source Link

Document

Creates an empty HashBasedTable .

Usage

From source file:edu.mit.streamjit.impl.compiler2.Compiler2.java

public Compiler2(Set<Worker<?, ?>> workers, Configuration config, int maxNumCores, DrainData initialState,
        Input<?> input, Output<?> output) {
    this.workers = ImmutableSet.copyOf(workers);
    Map<Class<?>, ActorArchetype> archetypesBuilder = new HashMap<>();
    Map<Worker<?, ?>, WorkerActor> workerActors = new HashMap<>();
    for (Worker<?, ?> w : workers) {
        @SuppressWarnings("unchecked")
        Class<? extends Worker<?, ?>> wClass = (Class<? extends Worker<?, ?>>) w.getClass();
        if (archetypesBuilder.get(wClass) == null)
            archetypesBuilder.put(wClass, new ActorArchetype(wClass, module));
        WorkerActor actor = new WorkerActor(w, archetypesBuilder.get(wClass));
        workerActors.put(w, actor);//from   www .ja  v  a  2s. com
    }
    this.archetypes = ImmutableSet.copyOf(archetypesBuilder.values());

    Map<Token, TokenActor> tokenActors = new HashMap<>();
    Table<Actor, Actor, Storage> storageTable = HashBasedTable.create();
    int[] inputTokenId = new int[] { Integer.MIN_VALUE }, outputTokenId = new int[] { Integer.MAX_VALUE };
    for (WorkerActor a : workerActors.values())
        a.connect(ImmutableMap.copyOf(workerActors), tokenActors, storageTable, inputTokenId, outputTokenId);
    this.actors = new TreeSet<>();
    this.actors.addAll(workerActors.values());
    this.actors.addAll(tokenActors.values());
    this.storage = new HashSet<>(storageTable.values());

    this.config = config;
    this.maxNumCores = maxNumCores;
    this.initialState = initialState;
    ImmutableMap.Builder<Token, ImmutableList<Object>> initialStateDataMapBuilder = ImmutableMap.builder();
    if (initialState != null) {
        for (Table.Cell<Actor, Actor, Storage> cell : storageTable.cellSet()) {
            Token tok;
            if (cell.getRowKey() instanceof TokenActor)
                tok = ((TokenActor) cell.getRowKey()).token();
            else if (cell.getColumnKey() instanceof TokenActor)
                tok = ((TokenActor) cell.getColumnKey()).token();
            else
                tok = new Token(((WorkerActor) cell.getRowKey()).worker(),
                        ((WorkerActor) cell.getColumnKey()).worker());
            ImmutableList<Object> data = initialState.getData(tok);
            if (data != null && !data.isEmpty()) {
                initialStateDataMapBuilder.put(tok, data);
                cell.getValue().initialData().add(Pair.make(data, IndexFunction.identity()));
            }
        }
    }
    this.initialStateDataMap = initialStateDataMapBuilder.build();
    this.overallInput = input;
    this.overallOutput = output;
}

From source file:co.turnus.profiling.impl.ProfilingWeightsImpl.java

@Override
public Table<Actor, Action, ActionProfilingWeights> asTable() {
    Table<Actor, Action, ActionProfilingWeights> table = HashBasedTable.create();
    for (ActorProfilingWeights actorWeights : getActorsWeights()) {
        for (ActionProfilingWeights actionWeights : actorWeights.getActionsWeights()) {
            table.put(actorWeights.getActor(), actionWeights.getAction(), actionWeights);
        }//from w  w  w .j a  v a2  s .c o  m
    }
    return table;
}

From source file:i5.las2peer.services.recommender.librec.rating.TimeComNeighSVD.java

@Override
protected void initModel() throws Exception {
    super.initModel();

    minTrainTimestamp = Long.MAX_VALUE;
    maxTrainTimestamp = Long.MIN_VALUE;
    for (MatrixEntry e : trainMatrix) {
        long t = (long) timeMatrix.get(e.row(), e.column());
        if (t < minTrainTimestamp)
            minTrainTimestamp = t;//from  w w  w .ja va2  s.  c om
        if (t > maxTrainTimestamp)
            maxTrainTimestamp = t;
    }
    numDays = days(maxTrainTimestamp, minTrainTimestamp) + 1;

    userBias = new DenseVector(numUsers);
    userBias.init(initMean, initStd);

    itemBias = new DenseVector(numItems);
    itemBias.init(initMean, initStd);

    Alpha = new DenseVector(numUsers);
    Alpha.init(initMean, initStd);

    Bit = new DenseMatrix(numItems, numBins);
    Bit.init(initMean, initStd);

    Bipt = new DenseMatrix(numItems, 7);
    Bipt.init(initMean, initStd);

    Y = new DenseMatrix(numItems, numFactors);
    Y.init(initMean, initStd);

    Auk = new DenseMatrix(numUsers, numFactors);
    Auk.init(initMean, initStd);

    But = HashBasedTable.create();

    Bupt = new DenseMatrix(numUsers, 7);
    Bupt.init(initMean, initStd);

    Pukt = new HashMap<>();

    Cu = new DenseVector(numUsers);
    Cu.init(initMean, initStd);

    Cut = new DenseMatrix(numUsers, numDays);
    Cut.init(initMean, initStd);

    W = new DenseMatrix(numItems, numItems);
    W.init(initMean, initStd);

    C = new DenseMatrix(numItems, numItems);
    C.init(initMean, initStd);

    Phi = new DenseVector(numUsers);
    Phi.init(0.01);

    // cache
    userItemsCache = trainMatrix.rowColumnsCache(cacheSpec);

    // global average date
    double sum = 0;
    int cnt = 0;
    for (MatrixEntry me : trainMatrix) {
        int u = me.row();
        int i = me.column();
        double rui = me.get();

        if (rui <= 0)
            continue;

        sum += days((long) timeMatrix.get(u, i), minTrainTimestamp);
        cnt++;
    }
    globalMeanDate = sum / cnt;

    // compute users' mean rating timestamps
    userMeanDate = new DenseVector(numUsers);
    List<Integer> Ru = null;
    for (int u = 0; u < numUsers; u++) {

        sum = 0;
        Ru = userItemsCache.get(u);
        for (int i : Ru) {
            sum += days((long) timeMatrix.get(u, i), minTrainTimestamp);
        }

        double mean = (Ru.size() > 0) ? (sum + 0.0) / Ru.size() : globalMeanDate;
        userMeanDate.set(u, mean);
    }

    // build user and item graphs
    Logs.info("{}{} build user and item graphs ...", new Object[] { algoName, foldInfo });
    SparseMatrix[] userMatrix = new SparseMatrix[numCBins + 1];
    SparseMatrix[] itemMatrix = new SparseMatrix[numCBins + 1];
    GraphBuilder gb = new GraphBuilder();
    gb.setMethod(graphMethod);
    gb.setK(knn);
    gb.setSimilarityMeasure(sim);

    gb.setTaggingData(userTagTable, itemTagTable);
    gb.setRatingData(trainMatrix);
    gb.buildGraphs();
    userMatrix[0] = gb.getUserAdjacencyMatrix();
    itemMatrix[0] = gb.getItemAdjacencyMatrix();

    if (numCBins > 1) {
        SparseMatrix[] trainMatrixCBin = trainDataCBins();
        List<Table<Integer, Integer, Set<Long>>> userTagTableCBin = null;
        List<Table<Integer, Integer, Set<Long>>> itemTagTableCBin = null;
        if (graphMethod == GraphConstructionMethod.TAGS) {
            userTagTableCBin = tagDataCBins(userTagTable);
            itemTagTableCBin = tagDataCBins(itemTagTable);
        }

        for (int cbin = 1; cbin <= numCBins; cbin++) {
            if (graphMethod == GraphConstructionMethod.TAGS) {
                gb.setTaggingData(userTagTableCBin.get(cbin - 1), itemTagTableCBin.get(cbin - 1));
            } else {
                gb.setRatingData(trainMatrixCBin[cbin - 1]);
            }
            gb.buildGraphs();
            userMatrix[cbin] = gb.getUserAdjacencyMatrix();
            itemMatrix[cbin] = gb.getItemAdjacencyMatrix();
        }
    }
    gb = null;

    // detect communities
    Logs.info("{}{} detect communities ...", new Object[] { algoName, foldInfo });
    userMemberships = new SparseMatrix[numCBins + 1];
    itemMemberships = new SparseMatrix[numCBins + 1];
    userCommunitiesCache = new ArrayList<LoadingCache<Integer, List<Integer>>>(numCBins + 1);
    itemCommunitiesCache = new ArrayList<LoadingCache<Integer, List<Integer>>>(numCBins + 1);
    numUserCommunities = new int[numCBins + 1];
    numItemCommunities = new int[numCBins + 1];
    CommunityDetector cd = new CommunityDetector();
    cd.setAlgorithm(cdAlgo);
    if (cdAlgo == CommunityDetectionAlgorithm.WALKTRAP)
        cd.setWalktrapParameters(wtSteps);
    for (int cbin = 0; cbin <= numCBins; cbin++) {
        if (numCBins == 1 && cbin == 1) {
            // if we use only one bin no need to detect communities again
            userMemberships[cbin] = userMemberships[0];
            itemMemberships[cbin] = itemMemberships[0];
        } else {
            cd.setGraph(userMatrix[cbin]);
            cd.detectCommunities();
            userMemberships[cbin] = cd.getMemberships();
            cd.setGraph(itemMatrix[cbin]);
            cd.detectCommunities();
            itemMemberships[cbin] = cd.getMemberships();
        }
        if (maxOC > 0) {
            Logs.info("{}{} reduce community memberships to max. {} communities per user/item ...",
                    new Object[] { algoName, foldInfo, maxOC });
            userMemberships[cbin] = Communities.limitOverlappingCommunities(userMemberships[cbin], maxOC);
            itemMemberships[cbin] = Communities.limitOverlappingCommunities(itemMemberships[cbin], maxOC);
        }
        userCommunitiesCache.add(cbin, userMemberships[cbin].rowColumnsCache(cacheSpec));
        numUserCommunities[cbin] = userMemberships[cbin].numColumns();
        itemCommunitiesCache.add(cbin, itemMemberships[cbin].rowColumnsCache(cacheSpec));
        numItemCommunities[cbin] = itemMemberships[cbin].numColumns();
    }
    userMatrix = null;
    itemMatrix = null;
    cd = null;

    logCommunityInfo();

    // compute user communities' average ratings for each item
    communityRatingsMatrix = new SparseMatrix[numCBins + 1];
    communityTimeMatrix = new SparseMatrix[numCBins + 1];
    communityMeanDate = new DenseVector[numCBins + 1];
    for (int cbin = 0; cbin <= numCBins; cbin++) {
        communityMeanDate[cbin] = new DenseVector(numUserCommunities[cbin]);
        Table<Integer, Integer, Double> communityRatingsTable = HashBasedTable.create();
        Table<Integer, Integer, Double> communityTimeTable = HashBasedTable.create();
        for (int community = 0; community < numUserCommunities[cbin]; community++) {
            // each user's membership level for the community
            SparseVector communityUsersVector = userMemberships[cbin].column(community);
            // build set of items that have been rated by members of the community
            HashSet<Integer> items = new HashSet<Integer>();
            for (VectorEntry e : communityUsersVector) {
                int user = e.index();
                List<Integer> userItems = userItemsCache.get(user);
                for (int item : userItems)
                    items.add(item);
            }
            // to compute mean rating times for each community keep track of time and number of ratings given
            double communityTimeSum = 0;
            int ratingsCount = 0;
            for (int item : items) {
                // Sum of ratings given by users of the community to the item, weighted by the users community membership levels
                double ratingsSum = 0;
                double communityItemTimeSum = 0;
                double membershipsSum = 0;
                // Each user's rating for the item
                SparseVector itemUsersVector = trainMatrix.column(item);
                for (VectorEntry e : communityUsersVector) {
                    int user = e.index();
                    if (itemUsersVector.contains(user)) {
                        double muc = userMemberships[cbin].get(user, community);
                        double rui = itemUsersVector.get(user);
                        double tui = timeMatrix.get(user, item);
                        ratingsSum += rui * muc;
                        communityItemTimeSum += tui * muc;
                        membershipsSum += muc;
                        communityTimeSum += days((long) timeMatrix.get(user, item), minTrainTimestamp);
                        ratingsCount++;
                    }
                }
                if (membershipsSum > 0) {
                    double communityRating = ratingsSum / membershipsSum;
                    double communityTime = communityItemTimeSum / membershipsSum;
                    communityRatingsTable.put(community, item, communityRating);
                    communityTimeTable.put(community, item, communityTime);
                }
            }
            double meanTime = (ratingsCount > 0) ? (communityTimeSum) / ratingsCount : globalMeanDate;
            communityMeanDate[cbin].set(community, meanTime);
        }
        communityRatingsMatrix[cbin] = new SparseMatrix(numUserCommunities[cbin], numItems,
                communityRatingsTable);
        communityTimeMatrix[cbin] = new SparseMatrix(numUserCommunities[cbin], numItems, communityTimeTable);
        int numRatingsPerCommunity = communityRatingsMatrix[cbin].size()
                / communityRatingsMatrix[cbin].numRows();
        Logs.info("{}{} Community Ratings: Number of communities: {}, Avg. number of ratings per community: {}",
                algoName, foldInfo, communityRatingsMatrix[cbin].numRows(), numRatingsPerCommunity);
    }

    // compute each user's communities' average rating for each item
    userCommunitiesRatingsMatrix = new SparseMatrix[numCBins + 1];
    userCommunitiesTimeMatrix = new SparseMatrix[numCBins + 1];
    userCommunitiesItemsCache = new ArrayList<LoadingCache<Integer, List<Integer>>>(numCBins + 1);
    for (int cbin = 0; cbin <= numCBins; cbin++) {
        Table<Integer, Integer, Double> userCommunitiesRatingsTable = HashBasedTable.create();
        Table<Integer, Integer, Double> userCommunitiesTimeTable = HashBasedTable.create();
        for (int user = 0; user < numUsers; user++) {
            List<Integer> userCommunities;
            userCommunities = userCommunitiesCache.get(cbin).get(user);

            int[] topKItems = new int[communitiesItemsK];
            double[] topKItemsMemberships = new double[communitiesItemsK];
            double[] topKItemsRatings = new double[communitiesItemsK];
            double[] topKItemsTime = new double[communitiesItemsK];
            for (int i = 0; i < communitiesItemsK; i++) {
                topKItems[i] = -1;
            }
            // position of the item with the lowest membership level in the top-k array
            int minItemPos = 0;
            // membership level of that item
            double minMembership = 0;

            for (int item = 0; item < numItems; item++) {
                double ratingsSum = 0;
                double timeSum = 0;
                double membershipsSum = 0;
                for (int community : userCommunities) {
                    double communityRating = communityRatingsMatrix[cbin].get(community, item);
                    double communityTime = communityTimeMatrix[cbin].get(community, item);
                    double userMembership = userMemberships[cbin].get(user, community);
                    ratingsSum += communityRating * userMembership;
                    timeSum += communityTime * userMembership;
                    membershipsSum += userMembership;
                }
                if (ratingsSum > 0 && membershipsSum > minMembership) {
                    topKItems[minItemPos] = item;
                    topKItemsMemberships[minItemPos] = membershipsSum;
                    topKItemsRatings[minItemPos] = ratingsSum;
                    topKItemsTime[minItemPos] = timeSum;
                    // find item with lowest membership level in the array
                    minMembership = membershipsSum;
                    for (int i = 0; i < communitiesItemsK; i++) {
                        if (topKItemsMemberships[i] < minMembership) {
                            minItemPos = i;
                            minMembership = topKItemsMemberships[i];
                        }
                    }
                }
            }
            // fill top-k items into table
            for (int i = 0; i < communitiesItemsK; i++) {
                if (topKItems[i] >= 0) {
                    int item = topKItems[i];
                    double userCommunitiesRating = topKItemsRatings[i] / topKItemsMemberships[i];
                    double userCommunitiesTime = topKItemsTime[i] / topKItemsMemberships[i];
                    userCommunitiesRatingsTable.put(user, item, userCommunitiesRating);
                    userCommunitiesTimeTable.put(user, item, userCommunitiesTime);
                }
            }
        }
        userCommunitiesRatingsMatrix[cbin] = new SparseMatrix(numUsers, numItems, userCommunitiesRatingsTable);
        userCommunitiesTimeMatrix[cbin] = new SparseMatrix(numUsers, numItems, userCommunitiesTimeTable);
        userCommunitiesItemsCache.add(cbin, userCommunitiesRatingsMatrix[cbin].rowColumnsCache(cacheSpec));
        int numRatingsPerUser = userCommunitiesRatingsMatrix[cbin].size()
                / userCommunitiesRatingsMatrix[cbin].numRows();
        Logs.info(
                "{}{} User Communities Ratings: Number of users: {}, Avg. number of community ratings per user: {}",
                algoName, foldInfo, userCommunitiesRatingsMatrix[cbin].numRows(), numRatingsPerUser);
    }

    // initialize community-related model parameters
    AlphaC = new DenseVector(numUserCommunities[0]);
    AlphaC.init(initMean, initStd);
    D = new DenseMatrix(numItems, numItems);
    D.init(initMean, initStd);
    Psi = new DenseVector(numUsers);
    Psi.init(0.01);

    BCu = new DenseVector[numCBins + 1];
    BCut = new ArrayList<Table<Integer, Integer, Double>>(numCBins + 1);
    BCi = new DenseVector[numCBins + 1];
    BCit = new DenseMatrix[numCBins + 1];
    OCi = new DenseMatrix[numCBins + 1];
    OCu = new DenseMatrix[numCBins + 1];
    OCut = new ArrayList<Map<Integer, Table<Integer, Integer, Double>>>(numCBins + 1);
    ACu = new DenseMatrix(numUserCommunities[0], numFactors);
    ACu.init(initMean, initStd);
    Z = new DenseMatrix(numItems, numItems);
    Z.init(initMean, initStd);
    for (int cbin = 0; cbin <= numCBins; cbin++) {
        BCu[cbin] = new DenseVector(numUserCommunities[cbin]);
        BCu[cbin].init(initMean, initStd);
        BCut.add(cbin, HashBasedTable.create());
        BCi[cbin] = new DenseVector(numItemCommunities[cbin]);
        BCi[cbin].init(initMean, initStd);
        BCit[cbin] = new DenseMatrix(numItemCommunities[cbin], numBins);
        BCit[cbin].init(initMean, initStd);
        OCi[cbin] = new DenseMatrix(numItemCommunities[cbin], numFactors);
        OCi[cbin].init(initMean, initStd);
        OCu[cbin] = new DenseMatrix(numUserCommunities[cbin], numFactors);
        OCu[cbin].init(initMean, initStd);
        OCut.add(cbin, new HashMap<>());
    }
}

From source file:org.opennms.netmgt.bsm.vaadin.adminpage.BusinessServiceTreeTable.java

private com.google.common.collect.Table<Long, Optional<Long>, Boolean> getCurrentExpandState() {
    // Gather the current collapse state
    final com.google.common.collect.Table<Long, Optional<Long>, Boolean> collapseState = HashBasedTable
            .create();// ww  w.  java 2  s  .c om
    for (Object itemId : getItemIds()) {
        final BusinessServiceRow row = getItem(itemId).getBean();
        collapseState.put(row.getBusinessService().getId(),
                Optional.ofNullable(row.getParentBusinessServiceId()), isCollapsed(itemId));
    }
    return collapseState;
}

From source file:org.mousephenotype.dcc.exportlibrary.exporter.dbloading.Loader.java

public CentreSpecimenSet getSingleColonyID() throws ConfigurationException, HibernateException {
    String printFile = FileReader.printFile(SINGLE_COLONYID);
    CentreSpecimenSet centreSpecimenSet = new CentreSpecimenSet();
    List<Specimen> specimens = this.hibernateManager.nativeQuery(printFile, Specimen.class);
    logger.trace("{} specimens retrieved", specimens.size());
    if (specimens != null && !specimens.isEmpty()) {
        CentreSpecimen aux = null;/*from w w w .  jav  a 2s.co m*/
        Table<String, Class, Object> parameters = HashBasedTable.create();

        Map<String, org.hibernate.type.Type> scalars = ImmutableMap.<String, org.hibernate.type.Type>builder()
                .put("centreID", StringType.INSTANCE).build();
        logger.trace("linking to ");
        for (Specimen specimen : specimens) {
            parameters.put("specimenHJID", Long.class, specimen.getHjid());
            List<String> nativeQuery = this.hibernateManager.nativeQuery(
                    "select CENTRESPECIMEN.CENTREID as centreID from phenodcc_raw.CENTRESPECIMEN join phenodcc_raw.SPECIMEN on CENTRESPECIMEN.HJID = SPECIMEN.MOUSEOREMBRYO_CENTRESPECIMEN_0 where SPECIMEN.HJID = :specimenHJID",
                    scalars, parameters);
            if (nativeQuery != null && !nativeQuery.isEmpty()) {
                logger.trace("{} centre for specimenID {}", nativeQuery.get(0), specimen.getSpecimenID());
                aux = this.getCentreSpecimen(centreSpecimenSet, CentreILARcode.valueOf(nativeQuery.get(0)));
                if (aux == null) {
                    aux = new CentreSpecimen();
                    aux.setCentreID(CentreILARcode.valueOf(nativeQuery.get(0)));
                    centreSpecimenSet.getCentre().add(aux);
                }
                aux.getMouseOrEmbryo().add(specimen);
            } else {
                logger.error("specimen HJID {} is not part of a centreSpecimen", specimen.getHjid());
            }
        }
    }
    return centreSpecimenSet;
}

From source file:i5.las2peer.services.recommender.librec.data.DataSplitter.java

/**
 * Split ratings into two parts where one rating per user is preserved as the test set and the remaining data as the
 * training set//from www  .  java  2s  .  c o  m
 * 
 */
public SparseMatrix[] getLOOByUser(boolean isByDate, SparseMatrix timestamps) throws Exception {

    SparseMatrix trainMatrix = new SparseMatrix(rateMatrix);

    // for building test matrix
    Table<Integer, Integer, Double> dataTable = HashBasedTable.create();
    Multimap<Integer, Integer> colMap = HashMultimap.create();

    for (int u = 0, um = rateMatrix.numRows(); u < um; u++) {

        List<Integer> items = rateMatrix.getColumns(u);
        int i = -1;

        if (!isByDate) {
            // by random
            int randIdx = (int) (items.size() * Math.random());
            i = items.get(randIdx);
        } else {
            // by date
            List<RatingContext> rcs = new ArrayList<>();
            for (int j : items) {
                rcs.add(new RatingContext(u, j, (long) timestamps.get(u, j)));
            }
            Collections.sort(rcs);

            i = rcs.get(rcs.size() - 1).getItem(); // most recent item
        }

        trainMatrix.set(u, i, 0); // remove from training

        dataTable.put(u, i, rateMatrix.get(u, i));
        colMap.put(i, u);
    }

    // remove zero entries
    SparseMatrix.reshape(trainMatrix);

    // build test matrix
    SparseMatrix testMatrix = new SparseMatrix(rateMatrix.numRows, rateMatrix.numColumns, dataTable, colMap);

    debugInfo(trainMatrix, testMatrix, -1);

    return new SparseMatrix[] { trainMatrix, testMatrix };
}

From source file:eu.lp0.cursus.xml.scores.XMLScores.java

private void extractRaceResults() {
    for (ScoresXMLRaceResults raceResult : scoresXML.getRaceResults()) {
        Race race = dereference(raceResult);
        resultsRaces.put(raceResult, race);

        Table<Pilot, Race, ScoresXMLRaceScore> raceScores = HashBasedTable.create();
        for (ScoresXMLRaceScore raceScore : raceResult.getRacePilots()) {
            raceScores.row(dereference(raceScore)).put(race, raceScore);
        }//from w w  w  . j  a  v a 2s.co m
        resultsPilotRaceScores.put(raceResult, raceScores);

        for (ScoresXMLOverallScore overallScore : raceResult.getOverallPilots()) {
            Pilot pilot = dereference(overallScore);
            resultsPilotOverallScores.row(raceResult).put(pilot, overallScore);
            resultsPilots.put(raceResult, pilot);
        }
    }
}

From source file:net.librec.data.convertor.ArffDataConvertor.java

/**
 * Build the {@link #oneHotFeatureMatrix}
 * and {@link #oneHotRatingVector}//from  ww  w.j  a v  a2 s.  c o m
 */
public void oneHotEncoding() {
    Table<Integer, Integer, Double> dataTable = HashBasedTable.create();
    Multimap<Integer, Integer> colMap = HashMultimap.create();

    int numRows = instances.size();
    int numCols = 0;
    int numAttrs = attributes.size();

    double[] ratings = new double[numRows];

    // set numCols
    for (int i = 0; i < attributes.size(); i++) {
        // skip rating column
        if (i == ratingCol)
            continue;

        ArffAttribute attr = attributes.get(i);
        numCols += attr.getColumnSet().size() == 0 ? 1 : attr.getColumnSet().size();
    }

    // build one-hot encoding matrix
    for (int row = 0; row < numRows; row++) {
        ArffInstance instance = instances.get(row);
        int colPrefix = 0;
        int col = 0;
        for (int i = 0; i < numAttrs; i++) {
            String type = attrTypes.get(i);
            Object val = instance.getValueByIndex(i);

            // rating column
            if (i == ratingCol) {
                ratings[row] = (double) val;
                continue;
            }

            // appender column
            switch (type) {
            case "NUMERIC":
            case "REAL":
            case "INTEGER":
                col = colPrefix;
                dataTable.put(row, col, (double) val);
                colMap.put(col, row);
                colPrefix += 1;
                break;
            case "STRING":
                col = colPrefix + columnIds.get(i).get(val);
                dataTable.put(row, col, 1d);
                colMap.put(col, row);
                colPrefix += columnIds.get(i).size();
                break;
            case "NOMINAL":
                for (String v : (ArrayList<String>) val) {
                    col = colPrefix + columnIds.get(i).get(v);
                    colMap.put(col, row);
                    dataTable.put(row, col, 1d);
                }
                colPrefix += columnIds.get(i).size();
                break;
            }
        }
    }
    oneHotFeatureMatrix = new SparseMatrix(numRows, numCols, dataTable, colMap);
    oneHotRatingVector = new DenseVector(ratings);

    // release memory
    dataTable = null;
    colMap = null;
}

From source file:es.upm.dit.xsdinferencer.extraction.extractorImpl.TypesExtractorImpl.java

/**
 * Method that actually initializes data. This is the code which normally should go on constructors. 
 * However, we have separated it to allow subclasses to initialize parameters after having performed 
 * some other own tasks. So, if a constructor on any subclass calls {@link TypesExtractorImpl#TypesExtractorImpl()} empty 
 * constructor, it must call this method in any moment.
 * //from w w w .  ja v  a2  s .  com
 * @param xmlDocuments A list of all the input XML Documents, as JDOM2 {@link Document} objects.
 * @param configuration the inference configuration
 * @param inferencersFactory {@link InferencersFactory} used to build {@link AttributeListInferencer} and {@link SimpleTypeInferencer} objects used.
 */
protected void initializeData(List<Document> xmlDocuments, XSDInferenceConfiguration configuration,
        InferencersFactory inferencersFactory) {
    checkNotNull(xmlDocuments, "'xmlDocuments' must not be null");
    checkNotNull(configuration, "'configuration' must not be null");
    this.xmlDocuments = xmlDocuments;
    this.configuration = configuration;
    this.simpleTypeInferencersOfComplexTypes = new HashMap<String, SimpleTypeInferencer>();
    this.attributeListInferencers = new HashMap<String, AttributeListInferencer>();
    this.automatons = new HashMap<String, ExtendedAutomaton>();
    this.statistics = new Statistics(xmlDocuments.size());
    this.elements = HashBasedTable.create();
    this.complexTypes = new HashMap<>();
    this.simpleTypes = new HashMap<String, SimpleType>();
    this.elements = HashBasedTable.create();
    this.attributes = HashBasedTable.create();
    this.prefixNamespaceMapping = new TreeMap<String, SortedSet<String>>();
    this.inferencersFactory = inferencersFactory;
}

From source file:co.turnus.profiling.impl.ProfilingDataImpl.java

@Override
public Table<Actor, Action, ActionProfilingData> asTable() {
    Table<Actor, Action, ActionProfilingData> table = HashBasedTable.create();
    for (ActorProfilingData actorData : getActorsData()) {
        for (ActionProfilingData actionData : actorData.getActionsData()) {
            table.put(actorData.getActor(), actionData.getAction(), actionData);
        }//from  www . j av  a2 s  .  c  o  m
    }

    return table;
}