Example usage for com.google.common.collect Table clear

List of usage examples for com.google.common.collect Table clear

Introduction

In this page you can find the example usage for com.google.common.collect Table clear.

Prototype

void clear();

Source Link

Document

Removes all mappings from the table.

Usage

From source file:com.torodb.d2r.R2DTranslatorImpl.java

@Override
public List<ToroDocument> translate(Iterator<DocPartResult> docPartResultIt) {
    ImmutableList.Builder<ToroDocument> readedDocuments = ImmutableList.builder();

    Table<TableRef, Integer, Map<String, List<KvValue<?>>>> currentFieldDocPartTable = HashBasedTable
            .<TableRef, Integer, Map<String, List<KvValue<?>>>>create();
    Table<TableRef, Integer, Map<String, List<KvValue<?>>>> childFieldDocPartTable = HashBasedTable
            .<TableRef, Integer, Map<String, List<KvValue<?>>>>create();

    int previousDepth = -1;

    while (docPartResultIt.hasNext()) {
        DocPartResult docPartResult = docPartResultIt.next();
        MetaDocPart metaDocPart = docPartResult.getMetaDocPart();
        TableRef tableRef = metaDocPart.getTableRef();

        if (previousDepth != -1 && previousDepth != tableRef.getDepth()) {
            Table<TableRef, Integer, Map<String, List<KvValue<?>>>> previousFieldChildDocPartTable = childFieldDocPartTable;
            childFieldDocPartTable = currentFieldDocPartTable;
            currentFieldDocPartTable = previousFieldChildDocPartTable;

            if (!tableRef.isRoot()) {
                currentFieldDocPartTable.clear();
            }/*from   w  w w  .j ava 2s.  co  m*/
        }
        previousDepth = tableRef.getDepth();

        Map<Integer, Map<String, List<KvValue<?>>>> childFieldDocPartRow = childFieldDocPartTable.row(tableRef);
        Map<Integer, Map<String, List<KvValue<?>>>> currentFieldDocPartRow;

        if (tableRef.isRoot()) {
            currentFieldDocPartRow = null;
        } else {
            currentFieldDocPartRow = currentFieldDocPartTable.row(tableRef.getParent().get());
        }

        readResult(metaDocPart, tableRef, docPartResult, currentFieldDocPartRow, childFieldDocPartRow,
                readedDocuments);
    }

    return readedDocuments.build();
}

From source file:co.turnus.analysis.bottlenecks.AlgorithmicBottlenecks.java

private ImpactData impactAnalysis(HotspotsDataAnalyser hsdAnalyzer) {
    BottlenecksFactory f = BottlenecksFactory.eINSTANCE;
    ImpactData impactData = f.createImpactData();

    boolean actorLevel = getOption(IMPACT_ANALYSIS_ACTORLEVEL, false);
    impactData.setActorsLevel(actorLevel);

    int actions = getOption(IMPACT_ANALYSIS_ACTIONS, 4);
    int grid = getOption(IMPACT_ANALYSIS_POINTS, 10);
    int points[] = AnalysisUtil.linspacei(0, 100, grid + 1);

    int iterations = actions * grid;
    int iteration = 1;

    double cpVal = hsdAnalyzer.getSumData().getCriticalExec().getClockCyclesMean();

    Table<String, String, Double> rtTable = HashBasedTable.create();
    if (actorLevel) {
        TurnusLogger.info("Actor  Level, for " + actions + " actions. Required Iterations: " + iterations);

        Table<Actor, Action, ExtendExecData> table = hsdAnalyzer.getSumDataTable(Actor.class,
                Key.CRITICAL_CLOCKCYCLES, Order.DECREASING);
        int analysedActions = 0;
        for (Cell<Actor, Action, ExtendExecData> entry : table.cellSet()) {
            Actor actor = entry.getRowKey();
            Action action = entry.getColumnKey();

            ActionImpactData impactStep = f.createActionImpactData();
            impactStep.setAction(action);
            impactStep.getActors().add(actor);

            for (int i = 1; i < points.length; i++) {
                double ratio = (100.0 - points[i]) / 100.0;
                rtTable.clear();
                rtTable.put(actor.getId(), action.getId(), ratio);

                TurnusLogger.info("iteration " + iteration++ + " of " + iterations);
                TurnusLogger.debug(/*from w ww. java 2  s.  c  om*/
                        "analysisng actor: " + actor.getId() + ", " + action.getId() + ", ratio " + ratio);
                double[] cp = measure(rtTable);

                double cplr = (1 - cp[MEAN] / (cpVal)) * 100;
                double wlr = (1 - ratio) * 100;
                TurnusLogger.debug("Reduction: " + cplr + ", at " + wlr);

                impactStep.getWlReduction().add(wlr);
                impactStep.getCplReduction().add(cplr);

            }

            impactData.getActionsData().add(impactStep);

            analysedActions++;
            if (analysedActions == actions) {
                break;
            }
        }
    } else {
        TurnusLogger.info("Actor  Level, for " + actions + " actions. Required Iterations: " + iterations);

        Table<ActorClass, Action, ExtendExecData> table = hsdAnalyzer.getSumDataTable(ActorClass.class,
                Key.CRITICAL_CLOCKCYCLES, Order.DECREASING);

        int analysedActions = 0;
        for (Cell<ActorClass, Action, ExtendExecData> entry : table.cellSet()) {

            Action action = entry.getColumnKey();
            Collection<Actor> actors = action.getOwner().getActors();

            ActionImpactData impactStep = f.createActionImpactData();
            impactStep.setAction(action);
            impactStep.getActors().addAll(actors);

            for (int i = 1; i < points.length; i++) {
                double ratio = (100.0 - points[i]) / 100.0;
                rtTable.clear();
                for (Actor actor : actors) {
                    rtTable.put(actor.getId(), action.getId(), ratio);
                }

                TurnusLogger.info("iteration " + iteration++ + " of " + iterations);
                TurnusLogger.debug(
                        "analysisng actor-class: " + action.getOwner().getName() + ", " + action.getId());
                double[] cp = measure(rtTable);

                double cplr = (1 - cp[MEAN] / (cpVal)) * 100;
                double wlr = (1 - ratio) * 100;
                TurnusLogger.debug("Reduction: " + cplr + ", at " + wlr);

                impactStep.getWlReduction().add(wlr);
                impactStep.getCplReduction().add(cplr);

            }

            impactData.getActionsData().add(impactStep);

            analysedActions++;
            if (analysedActions == actions) {
                break;
            }
        }
    }

    return impactData;
}

From source file:com.inmobi.conduit.local.LocalStreamService.java

private void checkPoint(Table<String, String, String> checkPointPaths) throws Exception {
    Set<String> streams = checkPointPaths.rowKeySet();
    for (String streamName : streams) {
        Map<String, String> collectorCheckpointValueMap = checkPointPaths.row(streamName);
        for (String collector : collectorCheckpointValueMap.keySet()) {
            String checkpointKey = getCheckPointKey(getClass().getSimpleName(), streamName, collector);
            LOG.debug("Check Pointing Key [" + checkpointKey + "] with value ["
                    + collectorCheckpointValueMap.get(collector) + "]");
            retriableCheckPoint(checkpointProvider, checkpointKey,
                    collectorCheckpointValueMap.get(collector).getBytes(), streamName);
        }/*  ww  w. java2s. c o  m*/
    }
    checkPointPaths.clear();
}

From source file:org.apache.sentry.provider.file.SimpleFileProviderBackend.java

private void parse() {
    configErrors.clear();//from   www .  j a v a2s .  c  o  m
    configWarnings.clear();
    Table<String, String, Set<String>> groupRolePrivilegeTableTemp = HashBasedTable.create();
    Ini ini;
    LOGGER.info("Parsing " + resourcePath);
    LOGGER.info("Filesystem: " + fileSystem.getUri());
    try {
        try {
            ini = PolicyFiles.loadFromPath(fileSystem, resourcePath);
        } catch (IOException e) {
            configErrors.add("Failed to read policy file " + resourcePath + " Error: " + e.getMessage());
            throw new SentryConfigurationException("Error loading policy file " + resourcePath, e);
        } catch (IllegalArgumentException e) {
            configErrors.add("Failed to read policy file " + resourcePath + " Error: " + e.getMessage());
            throw new SentryConfigurationException("Error loading policy file " + resourcePath, e);
        }

        if (LOGGER.isDebugEnabled()) {
            for (String sectionName : ini.getSectionNames()) {
                LOGGER.debug("Section: " + sectionName);
                Ini.Section section = ini.get(sectionName);
                for (String key : section.keySet()) {
                    String value = section.get(key);
                    LOGGER.debug(key + " = " + value);
                }
            }
        }
        parseIni(null, ini, validators, resourcePath, groupRolePrivilegeTableTemp);
        mergeResult(groupRolePrivilegeTableTemp);
        groupRolePrivilegeTableTemp.clear();
        Ini.Section filesSection = ini.getSection(DATABASES);
        if (filesSection == null) {
            LOGGER.info("Section " + DATABASES + " needs no further processing");
        } else if (!allowPerDatabaseSection) {
            String msg = "Per-db policy file is not expected in this configuration.";
            throw new SentryConfigurationException(msg);
        } else {
            for (Map.Entry<String, String> entry : filesSection.entrySet()) {
                String database = Strings.nullToEmpty(entry.getKey()).trim().toLowerCase();
                Path perDbPolicy = new Path(Strings.nullToEmpty(entry.getValue()).trim());
                if (isRelative(perDbPolicy)) {
                    perDbPolicy = new Path(resourcePath.getParent(), perDbPolicy);
                }
                try {
                    LOGGER.debug("Parsing " + perDbPolicy);
                    Ini perDbIni = PolicyFiles.loadFromPath(perDbPolicy.getFileSystem(conf), perDbPolicy);
                    if (perDbIni.containsKey(USERS)) {
                        configErrors.add(
                                "Per-db policy file cannot contain " + USERS + " section in " + perDbPolicy);
                        throw new SentryConfigurationException(
                                "Per-db policy files cannot contain " + USERS + " section");
                    }
                    if (perDbIni.containsKey(DATABASES)) {
                        configErrors.add("Per-db policy files cannot contain " + DATABASES + " section in "
                                + perDbPolicy);
                        throw new SentryConfigurationException(
                                "Per-db policy files cannot contain " + DATABASES + " section");
                    }
                    parseIni(database, perDbIni, validators, perDbPolicy, groupRolePrivilegeTableTemp);
                } catch (Exception e) {
                    configErrors.add(
                            "Failed to read per-DB policy file " + perDbPolicy + " Error: " + e.getMessage());
                    LOGGER.error("Error processing key " + entry.getKey() + ", skipping " + entry.getValue(),
                            e);
                }
            }
        }
        mergeResult(groupRolePrivilegeTableTemp);
        groupRolePrivilegeTableTemp.clear();
    } catch (Exception e) {
        configErrors.add("Error processing file " + resourcePath + e.getMessage());
        LOGGER.error("Error processing file, ignoring " + resourcePath, e);
    }
}

From source file:es.uam.eps.bmi.recommendation.data.MovieRatingData.java

/**
 * Load the data from file./*from   ww  w  .j  a va2 s .co m*/
 *
 * @param dataPath Path to the file to be read.
 * @throws java.io.IOException
 */
@Override
public void load(String dataPath) throws IOException {
    // Table for reading
    Table<Integer, Integer, Double> table = HashBasedTable.create();

    // Read the file.
    BufferedReader reader = new BufferedReader(new FileReader(new File(dataPath)));

    // Read the first line
    String line = reader.readLine();
    // Get headers
    if (line != null) {
        line = reader.readLine();
    }

    // Array indexes
    ArrayList<Integer> userArray = new ArrayList<>();
    ArrayList<Integer> movieArray = new ArrayList<>();
    int userIndex = 0, movieIndex = 0;

    // Read file
    while (line != null) {
        // Split data
        String[] split = line.split("\t");
        // Get data
        int user = Integer.valueOf(split[0]);
        int movie = Integer.valueOf(split[1]);
        double rating = Double.valueOf(split[2]);

        // Store rows and columns IDs.
        if (!userMap.containsKey(user)) {
            userArray.add(user);
            userMap.put(user, userIndex++);
        }
        if (!movieMap.containsKey(movie)) {
            movieArray.add(movie);
            movieMap.put(movie, movieIndex++);
        }

        // Add data
        table.put(user, movie, rating);
        // Read next line
        line = reader.readLine();
    }

    // Create sparse matrix
    this.userID = userArray.toArray(new Integer[userIndex]);
    this.movieID = movieArray.toArray(new Integer[movieIndex]);
    this.ratingData = new SparseDoubleMatrix2D(userIndex, movieIndex);

    // Fill sparse matrix
    table.rowMap().forEach((Integer row, Map<Integer, Double> rowVector) -> {
        rowVector.forEach((Integer column, Double cellData) -> {
            ratingData.set(userMap.get(row), movieMap.get(column), cellData);
        });
    });

    table.clear();
}