Example usage for com.google.common.collect Multimap entries

List of usage examples for com.google.common.collect Multimap entries

Introduction

In this page you can find the example usage for com.google.common.collect Multimap entries.

Prototype

Collection<Map.Entry<K, V>> entries();

Source Link

Document

Returns a view collection of all key-value pairs contained in this multimap, as Map.Entry instances.

Usage

From source file:fr.ens.biologie.genomique.eoulsan.core.workflow.CommandWorkflow.java

/**
 * Configure the steps of the Workflow.// w w  w .ja v  a 2s  .  c o  m
 * @throws EoulsanException if an error occurs while creating the step
 */
private void configureSteps() throws EoulsanException {

    // Configure all the steps
    for (CommandStep step : this.steps) {
        step.configure();
    }

    Multimap<CommandStep, Requirement> requirements = ArrayListMultimap.create();

    // Get the requiement of all steps
    for (CommandStep step : this.steps) {

        Set<Requirement> stepRequirements = step.getModule().getRequirements();

        if (stepRequirements != null && !stepRequirements.isEmpty()) {
            requirements.putAll(step, stepRequirements);
        }
    }

    int installerCount = 0;
    for (Map.Entry<CommandStep, Requirement> e : requirements.entries()) {

        final String stepId = e.getKey().getId();
        final Requirement r = e.getValue();

        if (r.isAvailable()) {
            getLogger().fine("Requierement found for step \"" + stepId + "\": " + r);
            continue;
        }

        getLogger().fine("Requierement not found for step \"" + stepId + "\": " + r);

        if (!r.isInstallable()) {

            if (r.isOptional()) {
                continue;
            } else {
                throw new EoulsanException(
                        "Requirement for step \"" + e.getKey().getId() + "\" is not available: " + r.getName());
            }
        }

        installerCount++;

        // Create an installer step
        final CommandStep step = new CommandStep(this, r.getName() + "install" + installerCount,
                RequirementInstallerModule.MODULE_NAME, Globals.APP_VERSION.toString(), r.getParameters(),
                false, false, -1, -1, "");

        // Configure the installer step
        step.configure();

        // Add the new step to the workflow
        addStep(indexOfStep(getFirstStep()), step);
    }
}

From source file:com.palantir.atlasdb.keyvalue.partition.map.DynamicPartitionMapImpl.java

/**
 * Copies rows within the specified range from all the tables.
 * @param destKve/*  w w w  . ja  va2 s.  c  o  m*/
 * @param srcKve
 * @param rangeToCopy
 */
private void copyData(KeyValueService destKvs, RangeRequest rangeToCopy) {
    ImmutableList<PartitionMapService> mapServices = ImmutableList
            .<PartitionMapService>of(InMemoryPartitionMapService.create(this));
    PartitionedKeyValueService pkvs = PartitionedKeyValueService.create(quorumParameters, mapServices);
    for (String tableName : pkvs.getAllTableNames()) {
        // TODO: getRangeOfTimestamps?
        try (ClosableIterator<RowResult<Set<Value>>> allRows = pkvs.getRangeWithHistory(tableName, rangeToCopy,
                Long.MAX_VALUE)) {
            while (allRows.hasNext()) {
                RowResult<Set<Value>> row = allRows.next();
                for (Entry<Cell, Set<Value>> cell : row.getCells()) {

                    Multimap<Cell, Value> rowMap = HashMultimap.create();
                    rowMap.putAll(cell.getKey(), cell.getValue());

                    Multimap<Cell, Long> rowTsMap = HashMultimap.create();
                    for (Entry<Cell, Value> entry : rowMap.entries()) {
                        rowTsMap.put(entry.getKey(), entry.getValue().getTimestamp());
                    }

                    destKvs.putWithTimestamps(tableName, rowMap);
                }
            }
        }
    }
}

From source file:com.palantir.atlasdb.cleaner.Scrubber.java

/**
 * @return number of cells read from _scrub table
 *//*from  w w  w. jav  a 2s.  c om*/
private int scrubSomeCells(SortedMap<Long, Multimap<String, Cell>> scrubTimestampToTableNameToCell,
        final TransactionManager txManager, long maxScrubTimestamp) {

    // Don't call expensive toString() if trace logging is off
    if (log.isTraceEnabled()) {
        log.trace("Attempting to scrub cells: " + scrubTimestampToTableNameToCell);
    }

    if (log.isInfoEnabled()) {
        int numCells = 0;
        Set<String> tables = Sets.newHashSet();
        for (Multimap<String, Cell> v : scrubTimestampToTableNameToCell.values()) {
            tables.addAll(v.keySet());
            numCells += v.size();
        }
        log.info("Attempting to scrub " + numCells + " cells from tables " + tables);
    }

    if (scrubTimestampToTableNameToCell.size() == 0) {
        return 0; // No cells left to scrub
    }

    Multimap<Long, Cell> toRemoveFromScrubQueue = HashMultimap.create();

    int numCellsReadFromScrubTable = 0;
    List<Future<Void>> scrubFutures = Lists.newArrayList();
    for (Map.Entry<Long, Multimap<String, Cell>> entry : scrubTimestampToTableNameToCell.entrySet()) {
        final long scrubTimestamp = entry.getKey();
        final Multimap<String, Cell> tableNameToCell = entry.getValue();

        numCellsReadFromScrubTable += tableNameToCell.size();

        long commitTimestamp = getCommitTimestampRollBackIfNecessary(scrubTimestamp, tableNameToCell);
        if (commitTimestamp >= maxScrubTimestamp) {
            // We cannot scrub this yet because not all transactions can read this value.
            continue;
        } else if (commitTimestamp != TransactionConstants.FAILED_COMMIT_TS) {
            // This is CRITICAL; don't scrub if the hard delete transaction didn't actually finish
            // (we still remove it from the _scrub table with the call to markCellsAsScrubbed though),
            // or else we could cause permanent data loss if the hard delete transaction failed after
            // queuing cells to scrub but before successfully committing
            for (final List<Entry<String, Cell>> batch : Iterables.partition(tableNameToCell.entries(),
                    batchSizeSupplier.get())) {
                final Multimap<String, Cell> batchMultimap = HashMultimap.create();
                for (Entry<String, Cell> e : batch) {
                    batchMultimap.put(e.getKey(), e.getValue());
                }
                scrubFutures.add(exec.submit(new Callable<Void>() {
                    @Override
                    public Void call() throws Exception {
                        scrubCells(txManager, batchMultimap, scrubTimestamp,
                                aggressiveScrub ? TransactionType.AGGRESSIVE_HARD_DELETE
                                        : TransactionType.HARD_DELETE);
                        return null;
                    }
                }));
            }
        }
        toRemoveFromScrubQueue.putAll(scrubTimestamp, tableNameToCell.values());
    }

    for (Future<Void> future : scrubFutures) {
        Futures.getUnchecked(future);
    }

    Multimap<Cell, Long> cellToScrubTimestamp = HashMultimap.create();
    scrubberStore.markCellsAsScrubbed(Multimaps.invertFrom(toRemoveFromScrubQueue, cellToScrubTimestamp),
            batchSizeSupplier.get());

    if (log.isTraceEnabled()) {
        log.trace("Finished scrubbing cells: " + scrubTimestampToTableNameToCell);
    }

    if (log.isInfoEnabled()) {
        Set<String> tables = Sets.newHashSet();
        for (Multimap<String, Cell> v : scrubTimestampToTableNameToCell.values()) {
            tables.addAll(v.keySet());
        }
        long minTimestamp = Collections.min(scrubTimestampToTableNameToCell.keySet());
        long maxTimestamp = Collections.max(scrubTimestampToTableNameToCell.keySet());
        log.info("Finished scrubbing " + numCellsReadFromScrubTable + " cells at "
                + scrubTimestampToTableNameToCell.size() + " timestamps (" + minTimestamp + "..." + maxTimestamp
                + ") from tables " + tables);
    }

    return numCellsReadFromScrubTable;
}

From source file:org.basepom.mojo.duplicatefinder.classpath.ClasspathDescriptor.java

public static ClasspathDescriptor createClasspathDescriptor(final MavenProject project,
        final Multimap<File, Artifact> fileToArtifactMap, final Collection<String> ignoredResourcePatterns,
        final Collection<String> ignoredClassPatterns, final Collection<MavenCoordinates> ignoredDependencies,
        final boolean useDefaultResourceIgnoreList, final boolean useDefaultClassIgnoreList,
        final Set<File> bootClasspath, final File[] projectFolders)
        throws MojoExecutionException, InvalidVersionSpecificationException {
    checkNotNull(project, "project is null");
    checkNotNull(fileToArtifactMap, "fileToArtifactMap is null");
    checkNotNull(ignoredResourcePatterns, "ignoredResourcePatterns is null");
    checkNotNull(ignoredClassPatterns, "ignoredClassPatterns is null");
    checkNotNull(ignoredDependencies, "ignoredDependencies is null");
    checkNotNull(projectFolders, "projectFolders is null");

    final ClasspathDescriptor classpathDescriptor = new ClasspathDescriptor(useDefaultResourceIgnoreList,
            ignoredResourcePatterns, useDefaultClassIgnoreList, ignoredClassPatterns);

    File file = null;/*from  w  w w  . j a v a  2  s.  co  m*/

    try {
        for (File bootClasspathElement : bootClasspath) {
            file = bootClasspathElement;
            if (file.exists()) {
                LOG.debug("Adding '%s' as a boot classpath element", file);
                classpathDescriptor.addClasspathElement(file);
            } else {
                LOG.debug("Ignoring '%s', does not exist.", file);
            }
        }
    } catch (final IOException ex) {
        throw new MojoExecutionException(format("Error trying to access file '%s' from boot classpath", file),
                ex);
    }

    final MatchArtifactPredicate matchArtifactPredicate = new MatchArtifactPredicate(ignoredDependencies);

    Artifact artifact = null;

    try {
        // any entry is either a jar in the repo or a folder in the target folder of a referenced
        // project. Add the elements that are not ignored by the ignoredDependencies predicate to
        // the classpath descriptor.
        for (final Map.Entry<File, Artifact> entry : fileToArtifactMap.entries()) {
            artifact = entry.getValue();
            file = entry.getKey();

            if (file.exists()) {
                // Add to the classpath if the artifact predicate does not apply (then it is not in the ignoredDependencies list).
                if (!matchArtifactPredicate.apply(artifact)) {
                    classpathDescriptor.addClasspathElement(file);
                }
            } else {
                // e.g. when running the goal explicitly on a cleaned multi-module project, referenced
                // projects will try to use the output folders of a referenced project but these do not
                // exist. Obviously, in this case the plugin might return incorrect results (unfortunately
                // false negatives, but there is not much it can do here (besides fail the build here with a
                // cryptic error message. Maybe add a flag?).
                LOG.debug("Classpath element '%s' does not exist.", file.getAbsolutePath());
            }
        }
    } catch (final IOException ex) {
        throw new MojoExecutionException(
                format("Error trying to access file '%s' for artifact '%s'", file, artifact), ex);
    }

    try {
        // Add project folders unconditionally.
        for (final File projectFile : projectFolders) {
            file = projectFile;
            if (projectFile.exists()) {
                classpathDescriptor.addClasspathElement(file);
            } else {
                // See above. This may happen in the project has been cleaned before running the goal directly.
                LOG.debug("Project folder '%s' does not exist.", file.getAbsolutePath());
            }
        }
    } catch (final IOException ex) {
        throw new MojoExecutionException(format("Error trying to access project folder '%s'", file), ex);
    }

    return classpathDescriptor;
}

From source file:com.cloudant.sync.datastore.BasicDatastore.java

List<Multimap<String, String>> multiMapPartitions(Multimap<String, String> revisions, int size) {

    List<Multimap<String, String>> partitions = new ArrayList<Multimap<String, String>>();
    Multimap<String, String> current = HashMultimap.create();
    for (Map.Entry<String, String> e : revisions.entries()) {
        current.put(e.getKey(), e.getValue());
        // the query uses below (see revsDiffBatch())
        // `multimap.size() + multimap.keySet().size()` placeholders
        // and SQLite has limit on the number of placeholders on a single query.
        if (current.size() + current.keySet().size() >= size) {
            partitions.add(current);//from   w  w w. j a v  a 2s.c om
            current = HashMultimap.create();
        }
    }

    if (current.size() > 0) {
        partitions.add(current);
    }

    return partitions;
}

From source file:com.metamx.druid.indexing.coordinator.TaskQueue.java

/**
 * Bootstraps this task queue and associated task lockbox. Clears the lockbox before running. Should be called
 * while the queue is stopped. It is not a good idea to start the queue if this method fails.
 *///from   w w  w.j  a  va  2 s .co  m
public void bootstrap() {
    // NOTE: Bootstraps can resurrect bogus stuff caused by leader races or whatevs.

    // We may want to periodically fixup the database to refer to what we think is happening, to prevent
    // this from occurring and also so that bogus stuff is detected by clients in a timely manner.

    giant.lock();

    try {
        Preconditions.checkState(!active, "queue must be stopped");

        log.info("Bootstrapping queue (and associated lockbox)");

        queue.clear();
        taskLockbox.clear();

        // Get all running tasks and their locks
        final Multimap<TaskLock, Task> tasksByLock = ArrayListMultimap.create();

        for (final Task task : taskStorage.getRunningTasks()) {
            try {
                final List<TaskLock> taskLocks = taskStorage.getLocks(task.getId());

                queue.add(task);

                for (final TaskLock taskLock : taskLocks) {
                    tasksByLock.put(taskLock, task);
                }
            } catch (Exception e) {
                log.makeAlert("Failed to bootstrap task").addData("task", task.getId()).emit();
                throw Throwables.propagate(e);
            }
        }

        // Sort locks by version
        final Ordering<Map.Entry<TaskLock, Task>> byVersionOrdering = new Ordering<Map.Entry<TaskLock, Task>>() {
            @Override
            public int compare(Map.Entry<TaskLock, Task> left, Map.Entry<TaskLock, Task> right) {
                return left.getKey().getVersion().compareTo(right.getKey().getVersion());
            }
        };

        // Acquire as many locks as possible, in version order
        for (final Map.Entry<TaskLock, Task> taskAndLock : byVersionOrdering
                .sortedCopy(tasksByLock.entries())) {
            final Task task = taskAndLock.getValue();
            final TaskLock savedTaskLock = taskAndLock.getKey();

            final Optional<TaskLock> acquiredTaskLock = taskLockbox.tryLock(task, savedTaskLock.getInterval(),
                    Optional.of(savedTaskLock.getVersion()));

            if (acquiredTaskLock.isPresent()
                    && savedTaskLock.getVersion().equals(acquiredTaskLock.get().getVersion())) {
                log.info("Reacquired lock on interval[%s] version[%s] for task: %s",
                        savedTaskLock.getInterval(), savedTaskLock.getVersion(), task.getId());
            } else if (acquiredTaskLock.isPresent()) {
                log.info(
                        "Could not reacquire lock on interval[%s] version[%s] (got version[%s] instead) for task: %s",
                        savedTaskLock.getInterval(), savedTaskLock.getVersion(),
                        acquiredTaskLock.get().getVersion(), task.getId());
            } else {
                log.info("Could not reacquire lock on interval[%s] version[%s] for task: %s",
                        savedTaskLock.getInterval(), savedTaskLock.getVersion(), task.getId());
            }
        }

        log.info("Bootstrapped %,d tasks with %,d locks. Ready to go!", queue.size(),
                tasksByLock.keySet().size());
    } finally {
        giant.unlock();
    }
}

From source file:org.kiji.schema.impl.cassandra.CassandraKijiTableReader.java

/**
 * Creates a new CassandraKijiTableReader instance that sends read requests directly to Cassandra.
 *
 * @param table Kiji table from which to read.
 * @param onDecoderCacheMiss behavior to use when a {@link
 *     org.kiji.schema.layout.ColumnReaderSpec} override specified in a {@link
 *     org.kiji.schema.KijiDataRequest} cannot be found in the prebuilt cache of cell decoders.
 * @param overrides mapping from columns to overriding read behavior for those columns.
 * @param alternatives mapping from columns to reader spec alternatives which the
 *     KijiTableReader will accept as overrides in data requests.
 * @throws java.io.IOException on I/O error.
 *///from www .  j av  a2  s .co m
private CassandraKijiTableReader(final CassandraKijiTable table, final OnDecoderCacheMiss onDecoderCacheMiss,
        final Map<KijiColumnName, ColumnReaderSpec> overrides,
        final Multimap<KijiColumnName, ColumnReaderSpec> alternatives) throws IOException {
    mTable = table;
    mOnDecoderCacheMiss = onDecoderCacheMiss;

    final KijiTableLayout layout = mTable.getLayout();
    final Set<KijiColumnName> layoutColumns = layout.getColumnNames();
    final Map<KijiColumnName, BoundColumnReaderSpec> boundOverrides = Maps.newHashMap();
    for (Map.Entry<KijiColumnName, ColumnReaderSpec> override : overrides.entrySet()) {
        final KijiColumnName column = override.getKey();
        if (!layoutColumns.contains(column)
                && !layoutColumns.contains(new KijiColumnName(column.getFamily()))) {
            throw new NoSuchColumnException(
                    String.format("KijiTableLayout: %s does not contain column: %s", layout, column));
        } else {
            boundOverrides.put(column, BoundColumnReaderSpec.create(override.getValue(), column));
        }
    }
    mOverrides = boundOverrides;
    final Collection<BoundColumnReaderSpec> boundAlternatives = Sets.newHashSet();
    for (Map.Entry<KijiColumnName, ColumnReaderSpec> altsEntry : alternatives.entries()) {
        final KijiColumnName column = altsEntry.getKey();
        if (!layoutColumns.contains(column)
                && !layoutColumns.contains(KijiColumnName.create(column.getFamily()))) {
            throw new NoSuchColumnException(
                    String.format("KijiTableLayout: %s does not contain column: %s", layout, column));
        } else {
            boundAlternatives.add(BoundColumnReaderSpec.create(altsEntry.getValue(), altsEntry.getKey()));
        }
    }
    mAlternatives = boundAlternatives;
    mCellSpecOverrides = null;

    mLayoutConsumerRegistration = mTable.registerLayoutConsumer(mInnerLayoutUpdater);
    Preconditions.checkState(mReaderLayoutCapsule != null,
            "KijiTableReader for table: %s failed to initialize.", mTable.getURI());

    // Retain the table only when everything succeeds.
    mTable.retain();
    final State oldState = mState.getAndSet(State.OPEN);
    Preconditions.checkState(oldState == State.UNINITIALIZED,
            "Cannot open KijiTableReader instance in state %s.", oldState);
    DebugResourceTracker.get().registerResource(this);
}

From source file:com.facebook.buck.android.apkmodule.APKModuleGraph.java

/**
 * For each seed target, find its reachable targets and mark them in a multimap as being reachable
 * by that module for later sorting into exclusive and shared targets
 *
 * @return the Multimap containing targets and the seed modules that contain them
 *///from   w  w w.j a  v a2s. co m
private Multimap<BuildTarget, String> mapTargetsToContainingModules() {
    Multimap<BuildTarget, String> targetToContainingApkModuleNameMap = MultimapBuilder.treeKeys()
            .treeSetValues().build();
    for (Map.Entry<String, List<BuildTarget>> seedConfig : getSeedConfigMap().get().entrySet()) {
        String seedModuleName = seedConfig.getKey();
        for (BuildTarget seedTarget : seedConfig.getValue()) {
            targetToContainingApkModuleNameMap.put(seedTarget, seedModuleName);
            new AbstractBreadthFirstTraversal<TargetNode<?>>(targetGraph.get(seedTarget)) {
                @Override
                public ImmutableSet<TargetNode<?>> visit(TargetNode<?> node) {

                    ImmutableSet.Builder<TargetNode<?>> depsBuilder = ImmutableSet.builder();
                    for (BuildTarget depTarget : node.getBuildDeps()) {
                        if (!isInRootModule(depTarget) && !isSeedTarget(depTarget)) {
                            depsBuilder.add(targetGraph.get(depTarget));
                            targetToContainingApkModuleNameMap.put(depTarget, seedModuleName);
                        }
                    }
                    return depsBuilder.build();
                }
            }.start();
        }
    }
    // Now to generate the minimal covers of APKModules for each set of APKModules that contain
    // a buildTarget
    DirectedAcyclicGraph<String> declaredDependencies = getDeclaredDependencyGraph();
    Multimap<BuildTarget, String> targetModuleEntriesToRemove = MultimapBuilder.treeKeys().treeSetValues()
            .build();
    for (BuildTarget key : targetToContainingApkModuleNameMap.keySet()) {
        Collection<String> modulesForTarget = targetToContainingApkModuleNameMap.get(key);
        new AbstractBreadthFirstTraversal<String>(modulesForTarget) {
            @Override
            public Iterable<String> visit(String moduleName) throws RuntimeException {
                Collection<String> dependentModules = declaredDependencies.getIncomingNodesFor(moduleName);
                for (String dependent : dependentModules) {
                    if (modulesForTarget.contains(dependent)) {
                        targetModuleEntriesToRemove.put(key, dependent);
                    }
                }
                return dependentModules;
            }
        }.start();
    }
    for (Map.Entry<BuildTarget, String> entryToRemove : targetModuleEntriesToRemove.entries()) {
        targetToContainingApkModuleNameMap.remove(entryToRemove.getKey(), entryToRemove.getValue());
    }
    return targetToContainingApkModuleNameMap;
}

From source file:eu.esdihumboldt.hale.io.xslt.internal.XsltGenerator.java

/**
 * Write the container fragment.//from   w ww  .  j a v a 2 s .  c  o  m
 * 
 * @param templateFile the file to write to
 * @param groupedResults the result variable names grouped by associated
 *            target type
 * @param targetElements an empty map that is populated with variable names
 *            mapped to target element names
 * @throws IOException if an error occurs writing the template
 * @throws XMLStreamException if an error occurs writing XML content to the
 *             template
 */
private void writeContainerFragment(File templateFile, Multimap<TypeDefinition, String> groupedResults,
        Map<String, QName> targetElements) throws XMLStreamException, IOException {
    XMLStreamWriter writer = XslTransformationUtil
            .setupXMLWriter(new BufferedOutputStream(new FileOutputStream(templateFile)), prefixes);
    try {
        // write container
        GmlWriterUtil.writeStartElement(writer, targetContainer.getName());
        // generate an eventual required identifier on the container
        GmlWriterUtil.writeRequiredID(writer, targetContainer.getType(), null, false);

        writeContainerIntro(writer, context);

        // cache definition paths
        Map<TypeDefinition, DefinitionPath> paths = new HashMap<TypeDefinition, DefinitionPath>();

        Descent lastDescent = null;
        for (Entry<TypeDefinition, String> entry : groupedResults.entries()) {
            TypeDefinition type = entry.getKey();

            // get stored definition path for the type
            DefinitionPath defPath;
            if (paths.containsKey(type)) {
                // get the stored path, may be null
                defPath = paths.get(type);
            } else {
                // determine a valid definition path in the container
                defPath = findMemberAttribute(targetContainer, type);

                // store path (may be null)
                paths.put(type, defPath);
            }
            if (defPath != null) {
                // insert xsl:for-each at the appropriate position in
                // the path
                defPath = pathInsertForEach(defPath, entry.getValue(), targetElements);

                lastDescent = Descent.descend(writer, defPath, lastDescent, false, true);

                // write single target instance from variable
                GmlWriterUtil.writeEmptyElement(writer, new QName(NS_URI_XSL, "copy-of"));
                writer.writeAttribute("select", ".");
            } else {
                reporter.warn(new IOMessageImpl(MessageFormat.format(
                        "No compatible member attribute for type {0} found in root element {1}, one instance was skipped",
                        type.getDisplayName(), targetContainer.getName().getLocalPart()), null));
            }
        }
        if (lastDescent != null) {
            lastDescent.close();
        }

        // end container
        writer.writeEndElement();
    } finally {
        writer.close();
    }
}

From source file:org.corpus_tools.peppermodules.annis.Salt2ANNISMapper.java

@Override
public DOCUMENT_STATUS mapSDocument() {

    this.preorderTable = new ConcurrentHashMap<>();
    this.postorderTable = new ConcurrentHashMap<>();
    prePostOrder = 0l;/*from   w  ww .j ava2 s.  c  o  m*/

    numberOfMappedNodes.set(0);

    if (this.getDocument() == null || this.getDocument().getDocumentGraph() == null) {
        throw new PepperModuleException(this, "Cannot map sDocumentGraph, because sDocumentGraph is null.");
    }

    {//start traversion of documentStructure

        try {

            if (this.getDocument().getDocumentGraph().getNodes() != null) {
                this.numberOfDocumentNodes = this.getDocument().getDocumentGraph().getNodes().size();
            }

            /**
             * traverse by SpanningRelations: DOCUMENT_STRUCTURE_CR
             * DominanceRelations: DOCUMENT_STRUCTURE_DR PointingRelations:
             * DOCUMENT_STRUCTURE_PR
             *
             * DominanceRelations Subcomponents: DOCUMENT_STRUCTURE_DR_SUB
             * PointingRelations Subcomponents: DOCUMENT_STRUCTURE_PR_SUB
             *
             * Dominance relations may consist of different subcomponents since
             * there are "edge" and "secedge" types
             *
             * Since every root node has it's own component, the pre and post order
             * needs to be 0 for the root node. You need to handle this.
             */
            List<? extends SNode> sRelationRoots;
            Multimap<String, SNode> subComponentRoots;
            //        Map<String, List<SNode>> subComponentRoots;

            Map<SToken, Long> token2Index = calculateToken2Index(getDocument().getDocumentGraph());

            // START Step 1: map SOrderRelation
            subComponentRoots = this.getDocument().getDocumentGraph()
                    .getRootsByRelationType(SALT_TYPE.SORDER_RELATION);
            if (subComponentRoots != null) {
                if (subComponentRoots.size() > 0) {
                    for (Entry<String, SNode> entry : subComponentRoots.entries()) {
                        SRelation2ANNISMapper sOrderRelationMapper = new SOrderRelation2ANNISMapper(
                                getIdManager(), getDocument().getDocumentGraph(), token2Index, tw_node,
                                tw_nodeAnno, tw_rank, tw_edgeAnno, tw_component, this);

                        String traversionType = entry.getKey();
                        if (SaltUtil.SALT_NULL_VALUE.equals(traversionType)) {
                            traversionType = "default_seg";
                        }
                        sOrderRelationMapper.setTraversionSType(traversionType);
                        sOrderRelationMapper.mapSRelations2ANNIS(subComponentRoots.get(entry.getKey()),
                                SALT_TYPE.SORDER_RELATION, null);

                    }
                }
            }
            // END Step 1: map SOrderRelation

            // also map the timeline (by creating a virtual tokenization if necessary)
            STimelineRelation2ANNISMapper timelineMapper = new STimelineRelation2ANNISMapper(getIdManager(),
                    getDocument().getDocumentGraph(), token2Index, tw_node, tw_nodeAnno, tw_rank, tw_edgeAnno,
                    tw_component, this, mergeTextsWithTimeline);
            timelineMapper.run();

            // START Step 2: map SText
            if (idManager.hasVirtualTokenization()) {
                Long sDocID;
                Long textId = 0l;
                String sDocumentElementId = this.getDocument().getId();

                if (sDocumentElementId == null) {
                    throw new PepperModuleException(this,
                            "SId Id of the document '" + this.getDocument().getName() + "' is NULL!");
                }
                sDocID = this.idManager.getNewCorpusTabId(sDocumentElementId);
                String textName = "sText0";
                String textContent = Strings.repeat(" ", idManager.getNumberOfVirtualToken());
                ArrayList<String> tuple = new ArrayList<>();
                tuple.add(sDocID.toString());
                tuple.add(textId.toString());
                tuple.add(textName);
                tuple.add(textContent);

                long transactionId = tw_text.beginTA();
                try {
                    tw_text.addTuple(transactionId, tuple);
                    tw_text.commitTA(transactionId);

                } catch (FileNotFoundException e) {
                    tw_text.abortTA(transactionId);
                    throw new PepperModuleException(this,
                            "Could not write to the node.tab, exception was" + e.getMessage());
                }
            } else {
                this.mapSText();
            }

            ExecutorService exec = null;
            if (mapRelationsInParallel) {
                exec = Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors());
            }

            subComponentRoots = getDocument().getDocumentGraph()
                    .getRootsByRelationType(SALT_TYPE.SPOINTING_RELATION);
            if (subComponentRoots != null) {
                //System.out.println("The Pointing relation graphs have "+ subComponentRoots.size() + " STypes.");
                if (subComponentRoots.size() > 0) {

                    for (String key : subComponentRoots.keySet()) {
                        //System.out.println("Count of PR roots for key "+key+" : "+subComponentRoots.get(key).size());
                        //System.out.println("Mapping PointingRelation subcomponents with sType: "+key);
                        SRelation2ANNISMapper sPointingSubRelationMapper = new SPointingRelation2ANNISMapper(
                                getIdManager(), getDocument().getDocumentGraph(), token2Index, tw_node,
                                tw_nodeAnno, tw_rank, tw_edgeAnno, tw_component, this);
                        sPointingSubRelationMapper.mapSRelations2ANNIS(subComponentRoots.get(key),
                                SALT_TYPE.SPOINTING_RELATION, TRAVERSION_TYPE.DOCUMENT_STRUCTURE_PR);
                        sPointingSubRelationMapper.setTraversionSType(key);
                        if (exec != null) {
                            exec.execute(sPointingSubRelationMapper);
                        } else {
                            sPointingSubRelationMapper.run();
                        }
                    }
                } else {
                    //System.out.println("No PointingRelation components found (null map)");
                }
            } else {
                //System.out.println("No PointingRelation components found (empty map)");
            }
            // END Step 2: map SPointingRelations

            // START Step 3: map SDominanceRelations
            sRelationRoots = this.getDocument().getDocumentGraph()
                    .getRootsByRelation(SALT_TYPE.SDOMINANCE_RELATION);
            if (sRelationRoots != null) {
                if (sRelationRoots.size() > 0) {
                    SRelation2ANNISMapper sDominanceRelationMapper = new SDominanceRelation2ANNISMapper(
                            getIdManager(), getDocument().getDocumentGraph(), token2Index, tw_node, tw_nodeAnno,
                            tw_rank, tw_edgeAnno, tw_component, this);
                    sDominanceRelationMapper.mapSRelations2ANNIS(sRelationRoots, SALT_TYPE.SDOMINANCE_RELATION,
                            TRAVERSION_TYPE.DOCUMENT_STRUCTURE_DR);
                    if (exec != null) {
                        exec.execute(sDominanceRelationMapper);
                    } else {
                        sDominanceRelationMapper.run();
                    }
                }
            }
            // END Step 3: map SDominanceRelations

            // START Step 3.1 : map the subComponents of the SDominanceRelations
            subComponentRoots = getDocument().getDocumentGraph()
                    .getRootsByRelationType(SALT_TYPE.SDOMINANCE_RELATION);
            if (subComponentRoots != null) {
                //System.out.println("The Dominance relation graphs have "+ subComponentRoots.size() + " STypes.");
                if (subComponentRoots.size() > 0) {

                    Set<String> domComponentTypeNames = subComponentRoots.keySet();

                    // only output the named relation types if there the user has not choosen
                    // to include them or if there are more than 1 named types
                    if (!((ANNISExporterProperties) this.getProperties()).getExcludeSingleDomType()
                            || domComponentTypeNames.size() >= 2) {
                        for (String key : domComponentTypeNames) {

                            if (!SaltUtil.SALT_NULL_VALUE.equals(key)) {

                                SRelation2ANNISMapper sDominanceSubRelationMapper = new SDominanceRelation2ANNISMapper(
                                        getIdManager(), getDocument().getDocumentGraph(), token2Index, tw_node,
                                        tw_nodeAnno, tw_rank, tw_edgeAnno, tw_component, this);
                                sDominanceSubRelationMapper.setTraversionSType(key);
                                sDominanceSubRelationMapper.mapSRelations2ANNIS(subComponentRoots.get(key),
                                        SALT_TYPE.SDOMINANCE_RELATION, TRAVERSION_TYPE.DOCUMENT_STRUCTURE_DR);
                                if (exec != null) {
                                    exec.execute(sDominanceSubRelationMapper);
                                } else {
                                    sDominanceSubRelationMapper.run();
                                }
                            }
                        }
                    }
                } else {
                    //System.out.println("No DominanceRelation subcomponents found (null map)");
                }
            } else {
                //System.out.println("No DominanceRelation subcomponents found (empty map)");
            }
            // END Step 3.1 : map the subComponents of the SDominanceRelations

            // START Step 4: map SSpanningrelations
            sRelationRoots = this.getDocument().getDocumentGraph()
                    .getRootsByRelation(SALT_TYPE.SSPANNING_RELATION);
            if (sRelationRoots != null) {
                if (sRelationRoots.size() > 0) {
                    SRelation2ANNISMapper spanningRelationMapper = new SSpanningRelation2ANNISMapper(
                            getIdManager(), getDocument().getDocumentGraph(), token2Index, tw_node, tw_nodeAnno,
                            tw_rank, tw_edgeAnno, tw_component, this);
                    spanningRelationMapper.mapSRelations2ANNIS(sRelationRoots, SALT_TYPE.SSPANNING_RELATION,
                            TRAVERSION_TYPE.DOCUMENT_STRUCTURE_CR);
                    if (exec != null) {
                        exec.execute(spanningRelationMapper);
                    } else {
                        spanningRelationMapper.run();
                    }
                }
            }
            // END Step 4: map SSpanningrelations

            // START Step 5: map SMedialRelations
            sRelationRoots = this.getDocument().getDocumentGraph().getTokens();
            if (sRelationRoots != null) {
                if (sRelationRoots.size() > 0) {
                    SRelation2ANNISMapper audioRelationMapper = new Audio2ANNISMapper(getIdManager(),
                            getDocument().getDocumentGraph(), token2Index, tw_node, tw_nodeAnno, tw_rank,
                            tw_edgeAnno, tw_component, this);
                    audioRelationMapper.mapSRelations2ANNIS(sRelationRoots,
                            SALT_TYPE.STIME_OVERLAPPING_RELATION, TRAVERSION_TYPE.DOCUMENT_STRUCTURE_AUDIO);
                    if (exec != null) {
                        exec.execute(audioRelationMapper);
                    } else {
                        audioRelationMapper.run();
                    }
                }
            }
            // END Step 5: map SMedialRelations

            if (exec != null) {
                exec.shutdown();
                while (!exec.awaitTermination(60, TimeUnit.SECONDS)) {
                    // wait to finish
                }
            }

            // START Step 6: map all SToken which were not mapped, yet
            SRelation2ANNISMapper mapper = new SSpanningRelation2ANNISMapper(getIdManager(),
                    getDocument().getDocumentGraph(), token2Index, tw_node, tw_nodeAnno, tw_rank, tw_edgeAnno,
                    tw_component, this);
            mapper.beginTransaction();
            for (SNode node : getDocument().getDocumentGraph().getTokens()) {
                if (this.idManager.getVirtualisedSpanId(node.getId()) == null) {
                    mapper.mapSNode(node);
                }
            }
            mapper.commitTransaction();
            // END Step 6: map all SToken which were not mapped, yet

        } catch (PepperModuleException e) {
            throw new PepperModuleException(this,
                    "Some error occurs while traversing document structure graph.", e);
        } catch (InterruptedException e) {
            throw new PepperModuleException(this,
                    "Some error occurs while traversing document structure graph.", e);
        }
    } //start traversion of corpus structure

    mergeLocalStatsIntoGlobal();

    setProgress(1.0);
    return DOCUMENT_STATUS.COMPLETED;
}