Example usage for com.google.common.collect ListMultimap get

List of usage examples for com.google.common.collect ListMultimap get

Introduction

In this page you can find the example usage for com.google.common.collect ListMultimap get.

Prototype

@Override
List<V> get(@Nullable K key);

Source Link

Document

Because the values for a given key may have duplicates and follow the insertion ordering, this method returns a List , instead of the java.util.Collection specified in the Multimap interface.

Usage

From source file:org.apache.beam.runners.dataflow.worker.StreamingDataflowWorker.java

/** Sends counter updates to Dataflow backend. */
private void sendWorkerUpdatesToDataflowService(CounterSet deltaCounters, CounterSet cumulativeCounters)
        throws IOException {

    List<CounterUpdate> counterUpdates = new ArrayList<>(128);

    if (publishCounters) {
        stageInfoMap.values().forEach(s -> counterUpdates.addAll(s.extractCounterUpdates()));
        counterUpdates/*  w w  w  .ja  v a 2s  . co m*/
                .addAll(cumulativeCounters.extractUpdates(false, DataflowCounterUpdateExtractor.INSTANCE));
        counterUpdates
                .addAll(deltaCounters.extractModifiedDeltaUpdates(DataflowCounterUpdateExtractor.INSTANCE));
    }

    // Handle duplicate counters from different stages. Store all the counters in a multi-map and
    // send the counters that appear multiple times in separate RPCs. Same logical counter could
    // appear in multiple stages if a step runs in multiple stages (as with flatten-unzipped stages)
    // especially if the counter definition does not set execution_step_name.
    ListMultimap<Object, CounterUpdate> counterMultimap = MultimapBuilder.hashKeys(counterUpdates.size())
            .linkedListValues().build();
    boolean hasDuplicates = false;

    for (CounterUpdate c : counterUpdates) {
        Object key = getCounterUpdateKey(c);
        if (counterMultimap.containsKey(key)) {
            hasDuplicates = true;
        }
        counterMultimap.put(key, c);
    }

    // Clears counterUpdates and enqueues unique counters from counterMultimap. If a counter
    // appears more than once, one of them is extracted leaving the remaining in the map.
    Runnable extractUniqueCounters = () -> {
        counterUpdates.clear();
        for (Iterator<Object> iter = counterMultimap.keySet().iterator(); iter.hasNext();) {
            List<CounterUpdate> counters = counterMultimap.get(iter.next());
            counterUpdates.add(counters.get(0));
            if (counters.size() == 1) {
                // There is single value. Remove the entry through the iterator.
                iter.remove();
            } else {
                // Otherwise remove the first value.
                counters.remove(0);
            }
        }
    };

    if (hasDuplicates) {
        extractUniqueCounters.run();
    } else { // Common case: no duplicates. We can just send counterUpdates, empty the multimap.
        counterMultimap.clear();
    }

    List<Status> errors;
    synchronized (pendingFailuresToReport) {
        errors = new ArrayList<>(pendingFailuresToReport.size());
        for (String stackTrace : pendingFailuresToReport) {
            errors.add(new Status().setCode(2) // rpc.Code.UNKNOWN
                    .setMessage(stackTrace));
        }
        pendingFailuresToReport.clear(); // Best effort only, no need to wait till successfully sent.
    }

    WorkItemStatus workItemStatus = new WorkItemStatus().setWorkItemId(WINDMILL_COUNTER_UPDATE_WORK_ID)
            .setErrors(errors).setCounterUpdates(counterUpdates);
    workUnitClient.reportWorkItemStatus(workItemStatus);

    // Send any counters appearing more than once in subsequent RPCs:
    while (!counterMultimap.isEmpty()) {
        extractUniqueCounters.run();
        workUnitClient.reportWorkItemStatus(new WorkItemStatus().setWorkItemId(WINDMILL_COUNTER_UPDATE_WORK_ID)
                .setCounterUpdates(counterUpdates));
    }
}

From source file:com.google.cloud.genomics.cba.GGAnnotateVariants.java

@org.apache.beam.sdk.transforms.DoFn.ProcessElement
public void processElement(DoFn<StreamVariantsRequest, KV<String, String>>.ProcessContext c) throws Exception {

    Genomics genomics = GenomicsFactory.builder().build().fromOfflineAuth(auth);

    StreamVariantsRequest request = StreamVariantsRequest.newBuilder(c.element()).addAllCallSetIds(callSetIds)
            .build();/*from   w w w  .  j av  a  2 s  .com*/

    if (canonicalizeRefName(request.getReferenceName()).equals("M") && supportChrM == false) {
        LOG.info("There is no information about Chr M in the provided AnnotationSet!");
        return;
    }

    Iterator<StreamVariantsResponse> streamVariantIter = VariantStreamIterator.enforceShardBoundary(auth,
            request, ShardBoundary.Requirement.STRICT, VARIANT_FIELDS);

    if (!streamVariantIter.hasNext()) {
        LOG.info("region has no variants, skipping");
        return;
    }

    Stopwatch stopwatch = Stopwatch.createStarted();
    int varCount = 0;

    ListMultimap<Range<Long>, Annotation> variantAnnotationSetList = null;
    if (this.variantAnnotationSetIds != null)
        variantAnnotationSetList = retrieveVariantAnnotations(genomics, request);

    IntervalTree<Annotation> transcripts = null;
    if (this.transcriptSetIds != null)
        transcripts = retrieveTranscripts(genomics, request);

    while (streamVariantIter.hasNext()) {
        Iterable<Variant> varIter;
        if (onlySNP)
            varIter = FluentIterable.from(streamVariantIter.next().getVariantsList())
                    .filter(VariantUtils.IS_SNP);
        else
            varIter = FluentIterable.from(streamVariantIter.next().getVariantsList());

        for (Variant variant : varIter) {
            Range<Long> pos = Range.closedOpen(variant.getStart(), variant.getEnd());

            // This variable helps to keep track of alignment
            String VCFOutput = "";

            // Keep track of Empty VCF records
            boolean EmptyVCF = false;

            // Variant Annotation Section
            if (variantAnnotationSetList != null) {

                // Sort the list of matched annotations
                SortedSet<String> VariantAnnotationKeys = new TreeSet<String>(VariantColInfo.keySet());

                // Retrieve a list of matched variant annotations
                List<Annotation> listMatchedAnnotations = variantAnnotationSetList.get(pos);

                // Visit overlapped annotations in order, and the matches in
                // order (First convert to VCF format, and then add it to
                // VCFOutput);
                int index = 0;
                for (String key : VariantAnnotationKeys) {
                    // The following variables help to put a semicolon
                    // between multiple matches from the same annotationSet
                    // e.g., allele_freq1;allele_freq2;...;allele_freqn;
                    boolean SemiColon = false;

                    for (Annotation match : listMatchedAnnotations) {
                        if (match.getAnnotationSetId().compareTo(key) == 0) {
                            // if (match.getVariant().getAlternateBases() !=
                            // null
                            // && variant.getAlternateBasesList() != null)
                            {
                                // check if Variant's alternate bases are
                                // the same as the matched annotation's
                                // alternate bases

                                if (compareAlternateBases(match.getVariant().getAlternateBases(),
                                        variant.getAlternateBasesList(), variant.getReferenceBases())) {

                                    EmptyVCF = true;

                                    if (DEBUG)
                                        LOG.info("MATCHED: variant: (" + variant.getStart() + ", Annotation: "
                                                + match.getStart() + ") ");

                                    if (!SemiColon) {
                                        VCFOutput += createVCFFormat(variant, match);
                                        SemiColon = true;
                                        // Activate it for the next matched
                                        // element

                                        // TESTING
                                        VCFOutput += "ALT:" + match.getVariant().getAlternateBases() + "\t";
                                    } else {
                                        VCFOutput += ";" + createVCFFormat(variant, match);

                                        // TESTING
                                        VCFOutput += "ALT:" + match.getVariant().getAlternateBases() + "\t";
                                    }
                                }
                            }
                        }
                    }
                    index++;
                    /*
                     * formatTabs function helps to keep track of alignment
                     * in the VCF format (e.g., if there is no match for
                     * Variant X in AnnotationSet Y then add spaces equals
                     * to the number of AnnotationSet Y's columns in the VCF
                     * file)
                     */
                    if (VCFOutput.isEmpty()
                            && (VariantAnnotationKeys.size() > index || TranscriptColInfo.size() > 0)) {
                        VCFOutput += formatTabs(VariantColInfo.get(key));
                    }
                } // end of keys
                if (!EmptyVCF)
                    VCFOutput = "";
            } // End of Variant Annotation

            // Transcript Annotation Section
            if (transcripts != null) {

                // Find all the overlapped matches and create an interval
                // tree
                Iterator<Node<Annotation>> transcriptIter = transcripts
                        .overlappers(pos.lowerEndpoint().intValue(), pos.upperEndpoint().intValue() - 1); // Inclusive.

                Iterator<Node<Annotation>> StartPoint = transcriptIter;
                if (transcriptIter != null) {
                    // Sort the list of matched annotations
                    SortedSet<String> transcriptKeys = new TreeSet<String>(TranscriptColInfo.keySet());
                    int index = 0;
                    // Check annotations in order, and in the case of match
                    // convert the matches to VCF format
                    for (String key : transcriptKeys) {
                        transcriptIter = StartPoint;
                        boolean SemiColon = false;
                        while (transcriptIter.hasNext()) {
                            Annotation transcript = transcriptIter.next().getValue();
                            if (transcript.getAnnotationSetId().compareTo(key) == 0) {
                                if (!SemiColon) {
                                    VCFOutput += createVCFFormat(variant, transcript);
                                    SemiColon = true;
                                } else
                                    VCFOutput += ";" + createVCFFormat(variant, transcript);
                            }
                        }
                        index++;

                        if (VCFOutput.isEmpty() && transcriptKeys.size() > index) {
                            VCFOutput += formatTabs(TranscriptColInfo.get(key));
                        }
                    }
                }
            } // End of Transcripts

            String varintALTs = "";
            for (int index = 0; index < variant.getAlternateBasesCount(); index++) {
                if (index > 0)
                    varintALTs += ",";
                varintALTs += variant.getAlternateBases(index);
            }

            // The following section helps to add genotypes
            /*
             * String VariantGenotype=""; List<VariantCall> Genotypes =
             * variant.getCallsList();
             * 
             * for(String CId: callSetIds){ for(VariantCall VC:Genotypes){
             * if(VC.getCallSetId().equals(CId)){
             * 
             * List<Integer> GentotypeList = VC.getGenotypeList(); for(int
             * index=0; index < GentotypeList.size(); index++){ int Genotype
             * = GentotypeList.get(index);
             * 
             * if(index>0) VariantGenotype += "/";
             * 
             * VariantGenotype += Genotype; } } } VariantGenotype += "\t"; }
             */
            // Map<String, ListValue> VariantInfoMap = variant.getInfo();
            /*
             * String VariantInfo=""; List<VariantCall> VariantCall =
             * variant.getCallsList(); for (Iterator<VariantCall> iter =
             * VariantCall.iterator(); iter.hasNext(); ) { VariantCall
             * element = iter.next(); Map<String, ListValue> VariantCallInfo
             * = element.getInfo(); for (Map.Entry<String, ListValue> entry
             * : VariantCallInfo.entrySet()) { VariantInfo +=entry.getKey()
             * + ":" +
             * entry.getValue().getValuesList().get(0).getStringValue() +
             * ";"; } }
             * 
             * 
             * 
             * /* for (Map.Entry<String, ListValue> entry :
             * VariantInfoMap.entrySet()) { //System.out.println("Key = " +
             * entry.getKey() + ", Value = " + entry.getValue());
             * VariantInfo += entry.getKey() + ":" + entry.getValue() + ";";
             * }
             */

            /*
             * Emit the information in the form of <Key, Value> Print out
             * the variant w/ or w/o any matched annotations Key: (ChromId,
             * Start, End) Value:(variant's <referenceName start end
             * referenceBases alternateBases quality>, + The content of
             * "VCFOutput" OR Annotation's fields
             */
            if (this.BigQuery) {
                if (!VCFOutput.isEmpty()) {
                    c.output(KV.of(
                            variant.getReferenceName() + ";" + Long.toString(variant.getStart()) + ";"
                                    + Long.toString(variant.getEnd()),
                            // Value
                            VCFOutput));
                }
            } else {
                if (!VCFOutput.isEmpty()) {
                    c.output(KV.of(
                            variant.getReferenceName() + ";" + Long.toString(variant.getStart()) + ";"
                                    + Long.toString(variant.getEnd()),
                            // Value
                            variant.getReferenceName()
                                    // <-- increment by 1 => convert to 1-based
                                    // -->
                                    + "\t" + (variant.getStart() + 1) + "\t" + variant.getEnd() + "\t"
                                    + variant.getReferenceBases() + "\t" + varintALTs
                                    // + "\t" + VariantInfo
                                    // + "\t" + variant.getQuality()
                                    // + "\t" + VariantGenotype
                                    + "\t" + VCFOutput));
                }
            }

            varCount++;
            if (varCount % 1e3 == 0) {
                LOG.info(String.format("read %d variants (%.2f / s)", varCount,
                        (double) varCount / stopwatch.elapsed(TimeUnit.SECONDS)));
            }
        }

    }

    LOG.info("finished reading " + varCount + " variants in " + stopwatch);
}

From source file:org.opentripplanner.routing.edgetype.factory.InterliningTrip.java

/**
 * Identify interlined trips (where a physical vehicle continues on to another logical trip)
 * and update the TripPatterns accordingly. This must be called after all the pattern edges and vertices
 * are already created, because it creates interline dwell edges between existing pattern arrive/depart vertices.
 *///from   w w  w.java2  s  .  c  om
private void interline(Collection<TripPattern> tripPatterns) {

    /* Record which Pattern each interlined TripTimes belongs to. */
    Map<TripTimes, TripPattern> patternForTripTimes = Maps.newHashMap();

    /* TripTimes grouped by the block ID and service ID of their trips. Must be a ListMultimap to allow sorting. */
    ListMultimap<BlockIdAndServiceId, TripTimes> tripTimesForBlock = ArrayListMultimap.create();

    LOG.info("Finding interlining trips based on block IDs.");
    for (TripPattern pattern : tripPatterns) {
        Timetable timetable = pattern.scheduledTimetable;
        /* TODO: Block semantics seem undefined for frequency trips, so skip them? */
        for (TripTimes tripTimes : timetable.tripTimes) {
            Trip trip = tripTimes.trip;
            if (!Strings.isNullOrEmpty(trip.getBlockId())) {
                tripTimesForBlock.put(new BlockIdAndServiceId(trip), tripTimes);
                // For space efficiency, only record times that are part of a block.
                patternForTripTimes.put(tripTimes, pattern);
            }
        }
    }

    /* Associate pairs of TripPatterns with lists of trips that continue from one pattern to the other. */
    Multimap<P2<TripPattern>, P2<Trip>> interlines = ArrayListMultimap.create();

    /*
      Sort trips within each block by first departure time, then iterate over trips in this block and service,
      linking them. Has no effect on single-trip blocks.
     */
    SERVICE_BLOCK: for (BlockIdAndServiceId block : tripTimesForBlock.keySet()) {
        List<TripTimes> blockTripTimes = tripTimesForBlock.get(block);
        Collections.sort(blockTripTimes);
        TripTimes prev = null;
        for (TripTimes curr : blockTripTimes) {
            if (prev != null) {
                if (prev.getDepartureTime(prev.getNumStops() - 1) > curr.getArrivalTime(0)) {
                    LOG.error(
                            "Trip times within block {} on service {} are not increasing on service {} after trip {}.",
                            block.blockId, block.serviceId, prev.trip.getId());
                    continue SERVICE_BLOCK;
                }
                TripPattern prevPattern = patternForTripTimes.get(prev);
                TripPattern currPattern = patternForTripTimes.get(curr);
                interlines.put(new P2<TripPattern>(prevPattern, currPattern),
                        new P2<Trip>(prev.trip, curr.trip));
            }
            prev = curr;
        }
    }

    /*
      Create the PatternInterlineDwell edges linking together TripPatterns.
      All the pattern vertices and edges must already have been created.
     */
    for (P2<TripPattern> patterns : interlines.keySet()) {
        TripPattern prevPattern = patterns.first;
        TripPattern nextPattern = patterns.second;
        // This is a single (uni-directional) edge which may be traversed forward and backward.
        PatternInterlineDwell edge = new PatternInterlineDwell(prevPattern, nextPattern);
        for (P2<Trip> trips : interlines.get(patterns)) {
            edge.add(trips.first, trips.second);
        }
    }
    LOG.info("Done finding interlining trips and creating the corresponding edges.");
}

From source file:org.nmdp.ngs.tools.FilterConsensus.java

@Override
public Integer call() throws Exception {
    PrintWriter writer = null;/* ww w .j a v a 2  s  .co  m*/
    try {
        writer = writer(outputFile);

        // map of region alleles from BED-formatted genomic regions file keyed by "exon" as integer
        Map<Integer, Allele> exons = readBedFile(bedFile);

        // todo:  refactor this block into separate static methods
        // map of overlapping alignment alleles from BAM-formatted consensus sequence file keyed by "exon" as integer
        ListMultimap<Integer, Allele> regions = ArrayListMultimap.create();

        for (SAMRecord record : new SAMFileReader(bamFile)) {
            List<Edit> edits = cigarToEditList(record);

            String referenceName = record.getReferenceName();
            int start = record.getAlignmentStart();
            int end = record.getAlignmentEnd();
            Locus alignment = new Locus(referenceName, start, end);
            SymbolList sequence = DNATools.createDNA(record.getReadString());

            for (Edit edit : edits) {
                sequence.edit(edit);
            }

            String name = record.getReadName();
            Allele contig = Allele.builder().withContig(referenceName).withStart(start).withEnd(end)
                    .withSequence(sequence).build();

            for (Map.Entry<Integer, Allele> entry : exons.entrySet()) {
                int index = entry.getKey();
                Allele exon = entry.getValue();

                String chr = exon.getContig();
                int min = exon.getMin();
                int max = exon.getMax();
                String range = chr + ":" + min + "-" + max;

                if (alignment.overlaps(exon)) {
                    Allele xover = exon.doubleCrossover(contig);

                    Allele clipped = xover.leftHardClip("-").rightHardClip("-");
                    clipped.setName(">" + name + "|gene=" + gene + "|exon=" + index + "|location=" + range + "|"
                            + (max - min));
                    regions.put(index, clipped);

                    int offset = 0;
                    for (Edit edit : edits) {
                        if (edit.replacement.equals(SymbolList.EMPTY_LIST) && exon.contains(edit.pos)) {
                            exon.sequence.edit(new Edit(edit.pos + offset, 0, edit.replacement));
                            offset += edit.length;
                        }
                    }
                }
            }
        }

        // todo: improve this data structure
        Map<String, ListMultimap<Integer, Allele>> contigs = new HashMap<String, ListMultimap<Integer, Allele>>();

        for (int index : regions.keySet()) {
            for (Allele allele : regions.get(index)) {
                int sequenceLength = allele.sequence.seqString().length();
                List<String> fields = Splitter.on("|").splitToList(allele.getName());

                Double locusLength = Double.parseDouble(fields.get(fields.size() - 1));

                if (sequenceLength / locusLength >= minimumBreadth) {
                    if (!contigs.containsKey(fields.get(0))) {
                        contigs.put(fields.get(0), ArrayListMultimap.<Integer, Allele>create());
                    }
                    contigs.get(fields.get(0)).put(index, allele);

                    if (!cdna) {
                        writer.println(allele.getName() + "|" + sequenceLength);
                        writer.println(allele.sequence.seqString().toUpperCase());
                    }
                }
            }
        }

        if (cdna) {
            Map<String, String> cdnas = new HashMap<String, String>();
            for (String contig : contigs.keySet()) {
                StringBuilder sb = new StringBuilder();

                for (int index : contigs.get(contig).keySet()) {
                    List<Allele> list = contigs.get(contig).get(index);
                    Collections.sort(list, new Comparator<Allele>() {
                        @Override
                        public int compare(final Allele first, final Allele second) {
                            return first.sequence.seqString().length() - second.sequence.seqString().length();
                        }
                    });
                    Allele best = list.get(0);
                    sb.append(best.sequence.seqString());
                }

                String cdnaSequence = sb.toString();
                if (removeGaps) {
                    cdnaSequence = cdnaSequence.replaceAll("-", "");
                }

                // todo:  use strand from genomic region file
                if (!(gene.equals("HLA-A") || gene.equals("HLA-DPB1"))) {
                    cdnaSequence = DNATools.reverseComplement(DNATools.createDNA(cdnaSequence)).seqString();
                }

                cdnas.put(contig, cdnaSequence.toUpperCase());
            }

            List<Map.Entry<String, String>> list = new ArrayList<Map.Entry<String, String>>(cdnas.entrySet());
            Collections.sort(list, new Comparator<Map.Entry<String, String>>() {
                @Override
                public int compare(final Map.Entry<String, String> first,
                        final Map.Entry<String, String> second) {
                    return second.getValue().length() - first.getValue().length();
                }
            });

            for (Map.Entry<String, String> entry : list.subList(0,
                    expectedPloidy > list.size() ? list.size() : expectedPloidy)) {
                writer.println(entry.getKey() + "\n" + entry.getValue());
            }
        }

        return 0;
    } finally {
        try {
            writer.close();
        } catch (Exception e) {
            // empty
        }
    }
}

From source file:com.android.tools.idea.rendering.ResourceFolderRepository.java

private void rescanImmediately(@NonNull final PsiFile psiFile, final @NonNull ResourceFolderType folderType) {
    PsiFile file = psiFile;/*from   ww  w. j av  a 2  s. c o  m*/
    if (folderType == VALUES) {
        // For unit test tracking purposes only
        //noinspection AssignmentToStaticFieldFromInstanceMethod
        ourFullRescans++;

        // First delete out the previous items
        PsiResourceFile resourceFile = myResourceFiles.get(file);
        boolean removed = false;
        if (resourceFile != null) {
            for (ResourceItem item : resourceFile) {
                boolean removeFromFile = false; // Will throw away file
                removed |= removeItems(resourceFile, item.getType(), item.getName(), removeFromFile);
            }

            myResourceFiles.remove(file);
        }

        file = ensureValid(file);
        boolean added = false;
        if (file != null) {
            // Add items for this file
            PsiDirectory parent = file.getParent();
            assert parent != null; // since we have a folder type
            String dirName = parent.getName();
            PsiDirectory fileParent = psiFile.getParent();
            if (fileParent != null) {
                FolderConfiguration folderConfiguration = FolderConfiguration
                        .getConfigForFolder(fileParent.getName());
                if (folderConfiguration != null) {
                    added = scanValueFile(getQualifiers(dirName), file, folderConfiguration);
                }
            }
        }

        if (added || removed) {
            // TODO: Consider doing a deeper diff of the changes to the resource items
            // to determine if the removed and added items actually differ
            myGeneration++;
            invalidateItemCaches();
        }
    } else {
        PsiResourceFile resourceFile = myResourceFiles.get(file);
        if (resourceFile != null) {
            // Already seen this file; no need to do anything unless it's a layout or
            // menu file; in that case we may need to update the id's
            if (folderType == LAYOUT || folderType == MENU) {
                // For unit test tracking purposes only
                //noinspection AssignmentToStaticFieldFromInstanceMethod
                ourFullRescans++;

                // We've already seen this resource, so no change in the ResourceItem for the
                // file itself (e.g. @layout/foo from layout-land/foo.xml). However, we may have
                // to update the id's:
                Set<String> idsBefore = Sets.newHashSet();
                Set<String> idsAfter = Sets.newHashSet();
                ListMultimap<String, ResourceItem> map = myItems.get(ResourceType.ID);
                if (map != null) {
                    List<ResourceItem> idItems = Lists.newArrayList();
                    for (ResourceItem item : resourceFile) {
                        if (item.getType() == ResourceType.ID) {
                            idsBefore.add(item.getName());
                            idItems.add(item);
                        }
                    }
                    for (String id : idsBefore) {
                        // Note that ResourceFile has a flat map (not a multimap) so it doesn't
                        // record all items (unlike the myItems map) so we need to remove the map
                        // items manually, can't just do map.remove(item.getName(), item)
                        List<ResourceItem> mapItems = map.get(id);
                        if (mapItems != null && !mapItems.isEmpty()) {
                            List<ResourceItem> toDelete = Lists.newArrayListWithExpectedSize(mapItems.size());
                            for (ResourceItem mapItem : mapItems) {
                                if (mapItem.getSource() == resourceFile) {
                                    toDelete.add(mapItem);
                                }
                            }
                            for (ResourceItem delete : toDelete) {
                                map.remove(delete.getName(), delete);
                            }
                        }
                    }
                    resourceFile.removeItems(idItems);
                }

                // Add items for this file
                List<ResourceItem> idItems = Lists.newArrayList();
                file = ensureValid(file);
                if (file != null) {
                    addIds(idItems, file);
                }
                if (!idItems.isEmpty()) {
                    resourceFile.addItems(idItems);
                    for (ResourceItem item : idItems) {
                        idsAfter.add(item.getName());
                    }
                }

                if (!idsBefore.equals(idsAfter)) {
                    myGeneration++;
                }
                scanDataBinding(resourceFile, myGeneration);
                // Identities may have changed even if the ids are the same, so update maps
                invalidateItemCaches(ResourceType.ID);
            }
        } else {
            // For unit test tracking purposes only
            //noinspection AssignmentToStaticFieldFromInstanceMethod
            ourFullRescans++;

            PsiDirectory parent = file.getParent();
            assert parent != null; // since we have a folder type
            String dirName = parent.getName();

            List<ResourceType> resourceTypes = FolderTypeRelationship.getRelatedResourceTypes(folderType);
            assert resourceTypes.size() >= 1 : folderType;
            ResourceType type = resourceTypes.get(0);

            boolean idGenerating = resourceTypes.size() > 1;
            assert !idGenerating || resourceTypes.size() == 2 && resourceTypes.get(1) == ResourceType.ID;

            ListMultimap<String, ResourceItem> map = myItems.get(type);
            if (map == null) {
                map = ArrayListMultimap.create();
                myItems.put(type, map);
            }

            file = ensureValid(file);
            if (file != null) {
                PsiDirectory fileParent = psiFile.getParent();
                if (fileParent != null) {
                    FolderConfiguration folderConfiguration = FolderConfiguration
                            .getConfigForFolder(fileParent.getName());
                    if (folderConfiguration != null) {
                        scanFileResourceFile(getQualifiers(dirName), folderType, folderConfiguration, type,
                                idGenerating, map, file);
                    }
                }
                myGeneration++;
                invalidateItemCaches();
            }
        }
    }
}

From source file:loci.formats.in.SlidebookReader.java

@Override
protected void initFile(String id) throws FormatException, IOException {
    super.initFile(id);
    in = new RandomAccessInputStream(id);
    isSpool = checkSuffix(id, "spl");
    if (isSpool) {
        metadataInPlanes = new HashMap<Integer, Integer>();
    }//from   w  ww.j  a  v a 2 s  .c o m

    LOGGER.info("Finding offsets to pixel data");

    // Slidebook files appear to be comprised of four types of blocks:
    // variable length pixel data blocks, 512 byte metadata blocks,
    // 128 byte metadata blocks, and variable length metadata blocks.
    //
    // Fixed-length metadata blocks begin with a 2 byte identifier,
    // e.g. 'i' or 'h'.
    // Following this are two unknown bytes (usually 256), then a 2 byte
    // endianness identifier - II or MM, for little or big endian, respectively.
    // Presumably these blocks contain useful information, but for the most
    // part we aren't sure what it is or how to extract it.
    //
    // Variable length metadata blocks begin with 0xffff and are
    // (as far as I know) always between two fixed-length metadata blocks.
    // These appear to be a relatively new addition to the format - they are
    // only present in files received on/after March 30, 2008.
    //
    // Each pixel data block corresponds to one series.
    // The first 'i' metadata block after each pixel data block contains
    // the width and height of the planes in that block - this can (and does)
    // vary between blocks.
    //
    // Z, C, and T sizes are computed heuristically based on the number of
    // metadata blocks of a specific type.

    in.skipBytes(4);
    core.get(0).littleEndian = in.read() == 0x49;
    in.order(isLittleEndian());

    metadataOffsets = new ArrayList<Long>();
    pixelOffsets = new ArrayList<Long>();
    pixelLengths = new ArrayList<Long>();
    ndFilters = new ArrayList<Double>();
    imageDescriptions = new HashMap<Integer, String>();

    in.seek(0);

    // gather offsets to metadata and pixel data blocks

    while (in.getFilePointer() < in.length() - 8) {
        LOGGER.debug("Looking for block at {}", in.getFilePointer());
        in.skipBytes(4);
        int checkOne = in.read();
        int checkTwo = in.read();
        if ((checkOne == 'I' && checkTwo == 'I') || (checkOne == 'M' && checkTwo == 'M')) {
            LOGGER.debug("Found metadata offset: {}", (in.getFilePointer() - 6));
            metadataOffsets.add(in.getFilePointer() - 6);
            in.skipBytes(in.readShort() - 8);
        } else if (checkOne == -1 && checkTwo == -1) {
            boolean foundBlock = false;
            byte[] block = new byte[8192];
            in.read(block);
            while (!foundBlock) {
                for (int i = 0; i < block.length - 2; i++) {
                    if ((block[i] == 'M' && block[i + 1] == 'M') || (block[i] == 'I' && block[i + 1] == 'I')) {
                        foundBlock = true;
                        in.seek(in.getFilePointer() - block.length + i - 2);
                        LOGGER.debug("Found metadata offset: {}", (in.getFilePointer() - 2));
                        metadataOffsets.add(in.getFilePointer() - 2);
                        in.skipBytes(in.readShort() - 5);
                        break;
                    }
                }
                if (!foundBlock) {
                    block[0] = block[block.length - 2];
                    block[1] = block[block.length - 1];
                    in.read(block, 2, block.length - 2);
                }
            }

        } else {
            String s = null;
            long fp = in.getFilePointer() - 6;
            in.seek(fp);
            int len = in.read();
            if (len > 0 && len <= 32) {
                s = in.readString(len);
            }

            if (s != null && s.indexOf("Annotation") != -1) {
                if (s.equals("CTimelapseAnnotation")) {
                    in.skipBytes(41);
                    if (in.read() == 0)
                        in.skipBytes(10);
                    else
                        in.seek(in.getFilePointer() - 1);
                } else if (s.equals("CIntensityBarAnnotation")) {
                    in.skipBytes(56);
                    int n = in.read();
                    while (n == 0 || n < 6 || n > 0x80)
                        n = in.read();
                    in.seek(in.getFilePointer() - 1);
                } else if (s.equals("CCubeAnnotation")) {
                    in.skipBytes(66);
                    int n = in.read();
                    if (n != 0)
                        in.seek(in.getFilePointer() - 1);
                } else if (s.equals("CScaleBarAnnotation")) {
                    in.skipBytes(38);
                    int extra = in.read();
                    if (extra <= 16)
                        in.skipBytes(3 + extra);
                    else
                        in.skipBytes(2);
                }
            } else if (s != null && s.indexOf("Decon") != -1) {
                in.seek(fp);
                while (in.read() != ']')
                    ;
            } else {
                if ((fp % 2) == 1)
                    fp -= 2;
                in.seek(fp);

                // make sure there isn't another block nearby

                String checkString = in.readString(64);
                if (checkString.indexOf("II") != -1 || checkString.indexOf("MM") != -1) {
                    int index = checkString.indexOf("II");
                    if (index == -1)
                        index = checkString.indexOf("MM");
                    in.seek(fp + index - 4);
                    continue;
                } else
                    in.seek(fp);

                LOGGER.debug("Found pixel offset at {}", fp);
                pixelOffsets.add(fp);
                try {
                    byte[] buf = new byte[8192];
                    boolean found = false;
                    int n = in.read(buf);

                    while (!found && in.getFilePointer() < in.length()) {
                        for (int i = 0; i < n - 6; i++) {
                            if ((buf[i + 4] == 'I' && buf[i + 5] == 'I')
                                    || (buf[i + 4] == 'M' && buf[i + 5] == 'M')) {
                                if (((buf[i] == 'h' || buf[i] == 'i') && buf[i + 1] == 0)
                                        || (buf[i] == 0 && (buf[i + 1] == 'h' || buf[i + 1] == 'i'))) {
                                    found = true;
                                    in.seek(in.getFilePointer() - n + i - 20);
                                    if (buf[i] == 'i' || buf[i + 1] == 'i') {
                                        pixelOffsets.remove(pixelOffsets.size() - 1);
                                    }
                                    break;
                                } else if (((buf[i] == 'j' || buf[i] == 'k' || buf[i] == 'n')
                                        && buf[i + 1] == 0)
                                        || (buf[i] == 0 && (buf[i + 1] == 'j' || buf[i + 1] == 'k'
                                                || buf[i + 1] == 'n'))
                                        || (buf[i] == 'o' && buf[i + 1] == 'n')) {
                                    found = true;
                                    pixelOffsets.remove(pixelOffsets.size() - 1);
                                    in.seek(in.getFilePointer() - n + i - 20);
                                    break;
                                }
                            }
                        }
                        if (!found) {
                            byte[] tmp = buf;
                            buf = new byte[8192];
                            System.arraycopy(tmp, tmp.length - 20, buf, 0, 20);
                            n = in.read(buf, 20, buf.length - 20);
                        }
                    }

                    if (in.getFilePointer() <= in.length()) {
                        if (pixelOffsets.size() > pixelLengths.size()) {
                            long length = in.getFilePointer() - fp;
                            if (((length / 2) % 2) == 1) {
                                pixelOffsets.set(pixelOffsets.size() - 1, fp + 2);
                                length -= 2;
                            }
                            if (length >= 1024) {
                                pixelLengths.add(length);
                            } else
                                pixelOffsets.remove(pixelOffsets.size() - 1);
                        }
                    } else
                        pixelOffsets.remove(pixelOffsets.size() - 1);
                } catch (EOFException e) {
                    pixelOffsets.remove(pixelOffsets.size() - 1);
                }
            }
        }
    }

    final List<Long> orderedSeries = new ArrayList<Long>();
    final ListMultimap<Long, Integer> uniqueSeries = ArrayListMultimap.create();

    for (int i = 0; i < pixelOffsets.size(); i++) {
        long length = pixelLengths.get(i).longValue();
        long offset = pixelOffsets.get(i).longValue();

        int padding = isSpool ? 0 : 7;

        if (length + offset + padding > in.length()) {
            pixelOffsets.remove(i);
            pixelLengths.remove(i);
            i--;
        } else {
            final List<Integer> v = uniqueSeries.get(length);
            if (v.isEmpty()) {
                orderedSeries.add(length);
            }
            uniqueSeries.put(length, i);
        }
    }

    if (pixelOffsets.size() > 1) {
        boolean little = isLittleEndian();

        int seriesCount = 0;
        for (final Long key : orderedSeries) {
            final List<Integer> pixelIndexes = uniqueSeries.get(key);
            int nBlocks = pixelIndexes.size();
            if (nBlocks == 0) {
                nBlocks++;
            }
            seriesCount += nBlocks;
        }

        core.clear();
        for (int i = 0; i < seriesCount; i++) {
            CoreMetadata ms = new CoreMetadata();
            core.add(ms);
            ms.littleEndian = little;
        }
    }

    LOGGER.info("Determining dimensions");

    // determine total number of pixel bytes

    final Map<Integer, Float> pixelSize = new HashMap<Integer, Float>();
    final Map<Integer, String> objectives = new HashMap<Integer, String>();
    final Map<Integer, Integer> magnifications = new HashMap<Integer, Integer>();
    final List<Double> pixelSizeZ = new ArrayList<Double>();
    final List<Integer> exposureTimes = new ArrayList<Integer>();

    long pixelBytes = 0;
    for (int i = 0; i < pixelLengths.size(); i++) {
        pixelBytes += pixelLengths.get(i).longValue();
    }

    String[] imageNames = new String[getSeriesCount()];
    final List<String> channelNames = new ArrayList<String>();
    int nextName = 0;

    int[] sizeX = new int[pixelOffsets.size()];
    int[] sizeY = new int[pixelOffsets.size()];
    int[] sizeZ = new int[pixelOffsets.size()];
    int[] sizeC = new int[pixelOffsets.size()];

    int[] divValues = new int[pixelOffsets.size()];

    // try to find the width and height
    int iCount = 0;
    int hCount = 0;
    int uCount = 0;
    int prevSeries = -1;
    int prevSeriesU = -1;
    int nextChannel = 0;
    for (int i = 0; i < metadataOffsets.size(); i++) {
        long off = metadataOffsets.get(i).longValue();
        if (isSpool && off == 0) {
            off = 276;
        }
        in.seek(off);
        long next = i == metadataOffsets.size() - 1 ? in.length() : metadataOffsets.get(i + 1).longValue();
        int totalBlocks = (int) ((next - off) / 128);

        // if there are more than 100 blocks, we probably found a pixel block
        // by accident (but we'll check the first block anyway)
        //if (totalBlocks > 100) totalBlocks = 100;
        for (int q = 0; q < totalBlocks; q++) {
            if (withinPixels(off + q * 128)) {
                continue;
            }
            in.seek(off + (long) q * 128);
            char n = (char) in.readShort();
            while (n == 0 && in.getFilePointer() < off + (q + 1) * 128) {
                n = (char) in.readShort();
            }
            if (in.getFilePointer() >= in.length() - 2)
                break;
            if (n == 'i') {
                iCount++;
                in.skipBytes(70);
                int expTime = in.readInt();
                if (expTime > 0) {
                    exposureTimes.add(expTime);
                }
                in.skipBytes(20);
                final Double size = (double) in.readFloat();
                if (isGreaterThanEpsilon(size)) {
                    pixelSizeZ.add(size);
                } else {
                    pixelSizeZ.add(null);
                }
                in.seek(in.getFilePointer() - 20);

                for (int j = 0; j < pixelOffsets.size(); j++) {
                    long end = j == pixelOffsets.size() - 1 ? in.length() : pixelOffsets.get(j + 1).longValue();
                    if (in.getFilePointer() < end) {
                        if (sizeX[j] == 0) {
                            int x = in.readShort();
                            int y = in.readShort();
                            if (x != 0 && y != 0) {
                                sizeX[j] = x;
                                sizeY[j] = y;
                                int checkX = in.readShort();
                                int checkY = in.readShort();
                                int div = in.readShort();
                                if (checkX == checkY) {
                                    divValues[j] = div;
                                    sizeX[j] /= (div == 0 ? 1 : div);
                                    div = in.readShort();
                                    sizeY[j] /= (div == 0 ? 1 : div);
                                }
                            } else
                                in.skipBytes(8);
                        }
                        if (prevSeries != j) {
                            iCount = 1;
                        }
                        prevSeries = j;
                        sizeC[j] = iCount;
                        break;
                    }
                }
            } else if (n == 'u') {
                uCount++;
                for (int j = 0; j < getSeriesCount(); j++) {
                    long end = j == getSeriesCount() - 1 ? in.length() : pixelOffsets.get(j + 1).longValue();
                    if (in.getFilePointer() < end) {
                        if (prevSeriesU != j) {
                            uCount = 1;
                        }
                        prevSeriesU = j;
                        sizeZ[j] = uCount;
                        break;
                    }
                }
            } else if (n == 'h')
                hCount++;
            else if (n == 'j') {
                in.skipBytes(2);
                String check = in.readString(2);
                if (check.equals("II") || check.equals("MM")) {
                    long pointer = in.getFilePointer();
                    // this block should contain an image name
                    in.skipBytes(10);
                    if (nextName < imageNames.length) {
                        String name = readCString().trim();
                        if (name.length() > 0) {
                            imageNames[nextName++] = name;
                        }
                    }

                    long fp = in.getFilePointer();
                    if ((in.getFilePointer() % 2) == 1)
                        in.skipBytes(1);
                    while (in.readShort() == 0)
                        ;
                    if (in.readShort() == 0) {
                        in.skipBytes(4);
                    } else {
                        in.skipBytes(16);
                    }
                    long diff = in.getFilePointer() - fp;
                    if (diff > 123 && (fp % 2) == 0 && diff != 142 && diff != 143 && diff != 130) {
                        in.seek(fp + 123);
                    }

                    int x = in.readInt();
                    int y = in.readInt();

                    if (x > 0x8000 || y > 0x8000) {
                        in.seek(in.getFilePointer() - 7);
                        x = in.readInt();
                        y = in.readInt();
                    } else if (x == 0 || y == 0) {
                        in.seek(in.getFilePointer() - 27);
                        x = in.readInt();
                        y = in.readInt();
                    }

                    int div = in.readShort();
                    x /= (div == 0 || div > 0x100 ? 1 : div);
                    div = in.readShort();
                    y /= (div == 0 || div > 0x100 ? 1 : div);

                    if (x > 0x10000 || y > 0x10000) {
                        in.seek(in.getFilePointer() - 11);
                        x = in.readInt();
                        y = in.readInt();
                        div = in.readShort();
                        x /= (div == 0 ? 1 : div);
                        div = in.readShort();
                        y /= (div == 0 ? 1 : div);

                        if (x > 0x10000 || y > 0x10000) {
                            in.skipBytes(2);

                            x = in.readInt();
                            y = in.readInt();
                            div = in.readShort();
                            x /= (div == 0 ? 1 : div);
                            div = in.readShort();
                            y /= (div == 0 ? 1 : div);
                        }
                    }

                    if (nextName >= 1 && x > 16 && (x < sizeX[nextName - 1] || sizeX[nextName - 1] == 0)
                            && y > 16 && (y < sizeY[nextName - 1] || sizeY[nextName - 1] == 0)) {
                        sizeX[nextName - 1] = x;
                        sizeY[nextName - 1] = y;
                        adjust = false;
                    }

                    in.seek(pointer + 214);
                    int validBits = in.readShort();
                    if (nextName >= 1 && core.get(nextName - 1).bitsPerPixel == 0 && validBits <= 16
                            && validBits > 0) {
                        core.get(nextName - 1).bitsPerPixel = validBits;
                    }
                }
            } else if (n == 'm') {
                // this block should contain a channel name
                if (in.getFilePointer() > pixelOffsets.get(0).longValue() || isSpool) {
                    in.skipBytes(14);
                    String name = readCString().trim();
                    if (name.length() > 1) {
                        channelNames.add(name);
                    }
                }
            } else if (n == 'd') {
                // objective info and pixel size X/Y
                in.skipBytes(6);
                long fp = in.getFilePointer();
                while (in.read() == 0)
                    ;
                in.seek(in.getFilePointer() - 1);
                long nSkipped = in.getFilePointer() - fp;
                if (nSkipped < 8) {
                    in.skipBytes((int) (8 - nSkipped));
                }
                String objective = readCString().trim();
                in.seek(fp + 144);
                float pixSize = in.readFloat();
                int magnification = in.readShort();

                int mult = 1;
                if (pixelSize.size() < divValues.length) {
                    mult = divValues[pixelSize.size()];
                }
                float v = pixSize * mult;
                if (isGreaterThanEpsilon(v)) {
                    pixelSize.put(nextName - 1, v);
                    objectives.put(nextName - 1, objective);
                    magnifications.put(nextName - 1, magnification);
                }
            } else if (n == 'e') {
                in.skipBytes(174);
                ndFilters.add((double) in.readFloat());
                in.skipBytes(40);
                if (nextName >= 0 && nextName < getSeriesCount()) {
                    setSeries(nextName);
                    addSeriesMetaList("channel intensification", in.readShort());
                }
            } else if (n == 'k') {
                in.skipBytes(14);
                if (nextName > 0)
                    setSeries(nextName - 1);
                addSeriesMeta("Mag. changer", readCString());
            } else if (n == 'n') {
                long fp1 = in.getFilePointer();
                in.seek(in.getFilePointer() - 3);
                while (in.read() != 0) {
                    in.seek(in.getFilePointer() - 2);
                }
                long fp2 = in.getFilePointer();
                int len = in.read() - 1;

                int currentSeries = 0;
                for (int j = 0; j < pixelOffsets.size(); j++) {
                    long end = j == pixelOffsets.size() - 1 ? in.length() : pixelOffsets.get(j + 1).longValue();
                    if (in.getFilePointer() < end) {
                        currentSeries = j;
                        break;
                    }
                }

                if (len > 0 && fp1 - fp2 != 2) {
                    if (fp2 < fp1) {
                        in.seek(in.getFilePointer() - 1);
                        String descr = readCString();
                        descr = descr.substring(0, descr.length() - 2);
                        if (!descr.endsWith("Annotatio")) {
                            imageDescriptions.put(currentSeries, descr.trim());
                        }
                    } else {
                        imageDescriptions.put(currentSeries, in.readString(len).trim());
                    }
                }
            } else if (isSpool) {
                // spool files don't necessarily have block identifiers
                for (int j = 0; j < pixelOffsets.size(); j++) {
                    long end = j == pixelOffsets.size() - 1 ? in.length() : pixelOffsets.get(j + 1).longValue();
                    if (in.getFilePointer() < end) {
                        in.skipBytes(14);
                        int check = in.readShort();
                        int x = in.readShort();
                        int y = in.readShort();
                        if (check == 0 && x > 16 && y > 16) {
                            sizeX[j] = x;
                            sizeY[j] = y;
                        }
                        adjust = false;
                        break;
                    }
                }
            }
        }
    }

    // TODO: extend the name matching to include "* Timepoint *"
    String currentName = imageNames[0];
    ArrayList<CoreMetadata> realCore = new ArrayList<CoreMetadata>();
    int t = 1;
    boolean noFlattening = currentName != null && currentName.equals("Untitled");
    for (int i = 1; i < getSeriesCount(); i++) {
        if (imageNames[i] == null || !imageNames[i].equals(currentName) || noFlattening
                || (i == 1 && (sizeX[i - 1] != sizeX[i] || sizeY[i - 1] != sizeY[i] || sizeC[i - 1] != sizeC[i]
                        || sizeZ[i - 1] != sizeZ[i]))) {
            currentName = imageNames[i];
            CoreMetadata nextCore = core.get(i - 1);
            nextCore.sizeT = t;
            realCore.add(nextCore);
            if (t == 1) {
                noFlattening = true;
            }
            t = 1;
            if (i == 1) {
                noFlattening = true;
            }
        } else {
            t++;
        }
    }
    core.get(getSeriesCount() - 1).sizeT = t;
    realCore.add(core.get(getSeriesCount() - 1));
    boolean flattened = false;
    if (core.size() != realCore.size() && !noFlattening) {
        flattened = true;
        core = realCore;
        orderedSeries.clear();
        uniqueSeries.clear();
        int nextIndex = 0;
        for (int i = 0; i < core.size(); i++) {
            long thisSeries = (long) i;
            orderedSeries.add(thisSeries);
            uniqueSeries.put(thisSeries, nextIndex);

            long length = pixelLengths.get(nextIndex);
            length *= core.get(i).sizeT;
            pixelLengths.set(i, length);

            nextIndex += core.get(i).sizeT;
        }
    }

    planeOffset = new long[getSeriesCount()][];

    boolean divByTwo = false;
    boolean divZByTwo = false;

    int nextPixelIndex = 0;
    int nextBlock = 0;
    int nextOffsetIndex = 0;

    for (int i = 0; i < getSeriesCount(); i++) {
        setSeries(i);
        CoreMetadata ms = core.get(i);

        List<Integer> pixelIndexes = uniqueSeries.get(orderedSeries.get(nextPixelIndex));
        int nBlocks = pixelIndexes.size();
        if (nextBlock >= nBlocks) {
            nextPixelIndex++;
            nextBlock = 0;
            pixelIndexes = uniqueSeries.get(orderedSeries.get(nextPixelIndex));
            nBlocks = pixelIndexes.size();
        } else {
            nextBlock++;
        }
        int index = pixelIndexes.size() == getSeriesCount() ? pixelIndexes.get(0) : i;

        long pixels = pixelLengths.get(index).longValue() / 2;
        boolean x = true;

        ms.sizeX = sizeX[index];
        ms.sizeY = sizeY[index];
        ms.sizeC = sizeC[index];
        ms.sizeZ = sizeZ[index];

        if (getSizeC() > 64) {
            // dimensions are probably incorrect
            ms.sizeC = 1;
            ms.sizeZ = 1;
            ms.sizeX /= 2;
            ms.sizeY /= 2;
        }

        boolean isMontage = false;
        if (i > 1 && ((imageNames[i] != null && imageNames[i].startsWith("Montage")) || getSizeC() >= 32)) {
            ms.sizeC = core.get(1).sizeC;
            ms.sizeZ = core.get(1).sizeZ;
            isMontage = true;
        }

        boolean cGreater = ms.sizeC > ms.sizeZ;

        if (isSpool) {
            if (ms.sizeC == 0) {
                ms.sizeC = channelNames.size();
            }
        }

        if (ms.sizeZ % nBlocks == 0 && nBlocks != getSizeC()) {
            int z = ms.sizeZ / nBlocks;
            if (z <= nBlocks) {
                ms.sizeZ = z;
            }
        }

        if (divByTwo)
            ms.sizeX /= 2;

        if (divZByTwo && ms.sizeC > 1) {
            ms.sizeZ = (int) ((pixels / (ms.sizeX * ms.sizeY)) / 2);
            ms.sizeC = 2;
        }

        if (getSizeC() == 0)
            ms.sizeC = 1;
        if (getSizeZ() == 0)
            ms.sizeZ = 1;

        long plane = pixels / (getSizeC() * getSizeZ());
        if (getSizeT() > 0) {
            plane /= getSizeT();
        }

        if (getSizeX() * getSizeY() == pixels) {
            if (getSizeC() == 2 && (getSizeX() % 2 == 0) && (getSizeY() % 2 == 0)) {
                if (getSizeC() != getSizeZ()) {
                    ms.sizeX /= 2;
                    divByTwo = true;
                } else {
                    divZByTwo = true;
                    ms.sizeC = 1;
                }
            } else {
                ms.sizeC = 1;
            }
            ms.sizeZ = 1;
        } else if (getSizeX() * getSizeY() * getSizeZ() == pixels) {
            if (getSizeC() == 2 && getSizeC() != getSizeZ() && (getSizeX() % 2 == 0) && (getSizeY() % 2 == 0)
                    && (i == 0 || core.get(i - 1).sizeC > 1)) {
                ms.sizeX /= 2;
                divByTwo = true;
            } else {
                ms.sizeC = 1;
                ms.sizeZ = (int) (pixels / (getSizeX() * getSizeY()));
            }
        } else if (getSizeX() * getSizeY() * getSizeC() == pixels) {
            ms.sizeC = (int) (pixels / (getSizeX() * getSizeY()));
            ms.sizeZ = 1;
        } else if ((getSizeX() / 2) * (getSizeY() / 2) * getSizeZ() == pixels) {
            ms.sizeX /= 2;
            ms.sizeY /= 2;
        } else if ((getSizeX() / 2) * (getSizeY() / 2) * getSizeC() * getSizeZ() * getSizeT() == pixels) {
            ms.sizeX /= 2;
            ms.sizeY /= 2;
        } else {
            boolean validSizes = true;
            try {
                DataTools.safeMultiply32(getSizeX(), getSizeY());
            } catch (IllegalArgumentException e) {
                validSizes = false;
            }
            if (getSizeX() == 0 || getSizeY() == 0 || !validSizes) {
                ms.sizeX = sizeX[index] / 256;
                ms.sizeY = sizeY[index] / 256;
            }
            long p = pixels / (getSizeX() * getSizeY());
            if (pixels == p * getSizeX() * getSizeY()) {
                if (p != getSizeC() * getSizeZ()) {
                    if (getSizeC() > 1 && core.get(i).sizeZ >= (p / (getSizeC() - 1)) && p >= getSizeC() - 1
                            && p > 2) {
                        core.get(i).sizeC--;
                        core.get(i).sizeZ = (int) (p / getSizeC());
                    } else if (p % getSizeC() != 0) {
                        core.get(i).sizeC = 1;
                        core.get(i).sizeZ = (int) p;
                    } else if (ms.sizeZ == p + 1) {
                        ms.sizeC = 1;
                        ms.sizeZ = 1;
                        ms.sizeT = (int) p;
                    } else if (getSizeC() > 1 && ms.sizeZ == (p / (getSizeC() - 1)) + 1) {
                        ms.sizeC--;
                        ms.sizeZ = 1;
                        ms.sizeT = (int) (p / getSizeC());
                    } else {
                        if (p > getSizeZ() && (p / getSizeZ() < getSizeZ() - 1)) {
                            ms.sizeT = (int) (p / getSizeC());
                            ms.sizeZ = 1;
                        } else if (pixels % getSizeX() == 0 && pixels % getSizeY() == 0) {
                            while (getSizeX() * getSizeY() > plane) {
                                ms.sizeX /= 2;
                                ms.sizeY /= 2;
                            }
                            int originalX = getSizeX();
                            while (getSizeX() * getSizeY() < plane) {
                                ms.sizeX += originalX;
                                ms.sizeY = (int) (plane / getSizeX());
                            }
                            int newX = getSizeX() + originalX;
                            if (newX * (plane / newX) == plane && !flattened) {
                                ms.sizeX = newX;
                                ms.sizeY = (int) (plane / newX);
                            }
                        } else if (!adjust) {
                            ms.sizeZ = (int) (p / getSizeC());
                        } else if (isMontage) {
                            pixels /= getSizeC();
                            while (pixels != getSizeX() * getSizeY() || (getSizeY() / getSizeX() > 2)) {
                                ms.sizeX += 16;
                                ms.sizeY = (int) (pixels / getSizeX());
                            }
                        }
                    }
                }
            } else if (isSpool) {
                ms.sizeZ = (int) (p / getSizeC());
            } else if (p == 0) {
                adjust = true;
                if (getSizeC() > 1) {
                    if (getSizeC() == 3) {
                        ms.sizeC = 2;
                    } else {
                        ms.sizeC = 1;
                    }
                }
            } else {
                if (ms.sizeC > 1 && p <= ms.sizeC) {
                    int z = getSizeZ();
                    ms.sizeZ = 1;
                    ms.sizeC = (int) p;
                    ms.sizeT = 1;

                    if (isMontage && pixels == getSizeX() * (pixels / getSizeX())) {
                        pixels /= getSizeC();
                        while (pixels != getSizeX() * getSizeY()) {
                            ms.sizeX -= 16;
                            ms.sizeY = (int) (pixels / getSizeX());
                        }
                    } else if (!isMontage) {
                        ms.sizeZ = z;
                        adjust = true;
                    }
                } else if (isMontage) {
                    pixels /= (getSizeC() * getSizeZ());
                    int originalX = getSizeX();
                    int originalY = getSizeY();
                    boolean xGreater = getSizeX() > getSizeY();
                    while (getSizeX() * getSizeY() != 0 && (pixels % (getSizeX() * getSizeY()) != 0
                            || ((double) getSizeY() / getSizeX() > 2))) {
                        ms.sizeX += originalX;
                        ms.sizeY = (int) (pixels / getSizeX());
                        if (!xGreater && getSizeX() >= getSizeY()) {
                            break;
                        }
                    }
                    if (getSizeX() * getSizeY() == 0) {
                        if (pixels != getSizeX() * getSizeY()) {
                            pixels *= getSizeC() * getSizeZ();
                            ms.sizeX = originalX;
                            ms.sizeY = originalY;
                            isMontage = false;
                        }
                    }
                    if (pixels % (originalX - (originalX / 4)) == 0) {
                        int newX = originalX - (originalX / 4);
                        int newY = (int) (pixels / newX);
                        if (newX * newY == pixels) {
                            ms.sizeX = newX;
                            ms.sizeY = newY;
                            isMontage = true;
                            adjust = false;
                        }
                    }
                } else if (p != getSizeZ() * getSizeC()) {
                    if (pixels % getSizeX() == 0 && pixels % getSizeY() == 0) {
                        while (getSizeX() * getSizeY() > plane) {
                            ms.sizeX /= 2;
                            ms.sizeY /= 2;
                        }
                    } else {
                        ms.sizeZ = 1;
                        ms.sizeC = 1;
                        ms.sizeT = (int) p;
                    }
                }
            }
        }

        if (getSizeC() == 0) {
            ms.sizeC = 1;
        }
        if (getSizeZ() == 0) {
            ms.sizeZ = 1;
        }

        int div = getSizeC() * getSizeZ();
        if (getSizeT() > 0) {
            div *= getSizeT();
        }
        if (div > 1) {
            plane = pixels / div;
        }

        long diff = 2 * (pixels - (getSizeX() * getSizeY() * div));
        if ((pixelLengths.get(index).longValue() % 2) == 1) {
            diff++;
        }

        if (Math.abs(diff) > plane / 2) {
            diff = 0;
        }

        if (adjust && diff == 0) {
            double ratio = (double) getSizeX() / getSizeY();
            boolean widthGreater = getSizeX() > getSizeY();
            while (getSizeX() * getSizeY() > plane) {
                if (x)
                    ms.sizeX /= 2;
                else
                    ms.sizeY /= 2;
                x = !x;
            }
            if (getSizeX() * getSizeY() != plane) {
                while (ratio - ((double) getSizeX() / getSizeY()) >= 0.01) {
                    boolean first = true;
                    while (first || getSizeX() * getSizeY() < plane
                            || (getSizeX() < getSizeY() && widthGreater)) {
                        if (first) {
                            first = false;
                        }
                        ms.sizeX++;
                        ms.sizeY = (int) (plane / getSizeX());
                    }
                }
            }
        }

        int nPlanes = getSizeZ() * getSizeC();
        ms.sizeT = (int) (pixels / (getSizeX() * getSizeY() * nPlanes));
        while (getSizeX() * getSizeY() * nPlanes * getSizeT() > pixels) {
            ms.sizeT--;
        }
        if (getSizeT() == 0)
            ms.sizeT = 1;

        if (cGreater && getSizeC() == 1 && getSizeZ() > 1) {
            ms.sizeC = getSizeZ();
            ms.sizeZ = 1;
        }

        ms.imageCount = nPlanes * getSizeT();
        ms.pixelType = FormatTools.UINT16;
        ms.dimensionOrder = nBlocks > 1 ? "XYZCT" : "XYZTC";
        ms.indexed = false;
        ms.falseColor = false;
        ms.metadataComplete = true;

        planeOffset[i] = new long[getImageCount()];
        int nextImage = 0;
        Integer pixelIndex = i;
        long offset = pixelOffsets.get(pixelIndex);
        int planeSize = getSizeX() * getSizeY() * 2;

        if (diff < planeSize) {
            offset += diff;
        } else {
            offset += (diff % planeSize);
        }

        long length = pixelLengths.get(pixelIndex);
        int planes = (int) (length / planeSize);
        if (planes > ms.imageCount) {
            planes = ms.imageCount;
        }

        for (int p = 0; p < planes; p++, nextImage++) {
            int[] zct = getZCTCoords(p);
            if (flattened && zct[0] == 0 && zct[1] == 0) {
                offset = pixelOffsets.get(nextOffsetIndex++);

                if (zct[2] > 0 && planeOffset[i][nextImage - 1] % 2 != offset % 2
                        && (offset - planeOffset[i][nextImage - 1] > 3 * getSizeX() * getSizeY())
                        && diff == 0) {
                    diff = 31;
                }
                if (diff < planeSize) {
                    offset += diff;
                } else {
                    offset += (diff % planeSize);
                }

                planeOffset[i][nextImage] = offset;
            } else if (flattened && zct[0] == 0) {
                int idx = getIndex(0, 0, zct[2]);
                planeOffset[i][nextImage] = planeOffset[i][idx] + zct[1] * planeSize;
            } else if (flattened) {
                planeOffset[i][nextImage] = planeOffset[i][nextImage - 1] + planeSize;
            } else if (nextImage < planeOffset[i].length) {
                planeOffset[i][nextImage] = offset + p * planeSize;
            }
        }
    }
    setSeries(0);

    if (pixelSizeZ.size() > 0) {
        int seriesIndex = 0;
        for (int q = 0; q < getSeriesCount(); q++) {
            CoreMetadata msq = core.get(q);
            int inc = msq.sizeC * msq.sizeT;
            if (seriesIndex + inc > pixelSizeZ.size()) {
                int z = msq.sizeT;
                msq.sizeT = msq.sizeZ;
                msq.sizeZ = z;
                inc = msq.sizeC * msq.sizeT;
            }
            seriesIndex += inc;
        }
    }

    MetadataStore store = makeFilterMetadata();
    MetadataTools.populatePixels(store, this, true);

    // populate Image data

    for (int i = 0; i < getSeriesCount(); i++) {
        if (imageNames[i] != null)
            store.setImageName(imageNames[i], i);
    }

    if (getMetadataOptions().getMetadataLevel() != MetadataLevel.MINIMUM) {
        for (int i = 0; i < getSeriesCount(); i++) {
            if (imageDescriptions.containsKey(i)) {
                store.setImageDescription(imageDescriptions.get(i), i);
            } else {
                store.setImageDescription("", i);
            }
        }

        // link Instrument and Image
        String instrumentID = MetadataTools.createLSID("Instrument", 0);
        store.setInstrumentID(instrumentID, 0);
        for (int i = 0; i < getSeriesCount(); i++) {
            store.setImageInstrumentRef(instrumentID, i);
        }

        int index = 0;

        // populate Objective data
        int objectiveIndex = 0;
        for (int i = 0; i < getSeriesCount(); i++) {
            String objective = objectives.get(i);
            if (objective != null) {
                store.setObjectiveModel(objective, 0, objectiveIndex);
                store.setObjectiveCorrection(getCorrection("Other"), 0, objectiveIndex);
                store.setObjectiveImmersion(getImmersion("Other"), 0, objectiveIndex);
                if (magnifications != null && magnifications.get(i) > 0) {
                    store.setObjectiveNominalMagnification(magnifications.get(i).doubleValue(), 0,
                            objectiveIndex);
                }

                // link Objective to Image
                String objectiveID = MetadataTools.createLSID("Objective", 0, objectiveIndex);
                store.setObjectiveID(objectiveID, 0, objectiveIndex);
                if (i < getSeriesCount()) {
                    store.setObjectiveSettingsID(objectiveID, i);
                }

                objectiveIndex++;
            }
        }

        // populate Dimensions data

        int exposureIndex = exposureTimes.size() - channelNames.size();
        if (exposureIndex >= 1) {
            exposureIndex++;
        }

        for (int i = 0; i < getSeriesCount(); i++) {
            setSeries(i);
            if (pixelSize.get(i) != null) {
                final Double size = pixelSize.get(i).doubleValue();
                Length x = FormatTools.getPhysicalSizeX(size);
                Length y = FormatTools.getPhysicalSizeY(size);
                if (x != null) {
                    store.setPixelsPhysicalSizeX(x, i);
                }
                if (y != null) {
                    store.setPixelsPhysicalSizeY(y, i);
                }
            }
            int idx = 0;
            for (int q = 0; q < i; q++) {
                idx += core.get(q).sizeC * core.get(q).sizeT;
            }

            if (idx < pixelSizeZ.size() && pixelSizeZ.get(idx) != null) {
                Length z = FormatTools.getPhysicalSizeZ(pixelSizeZ.get(idx));
                if (z != null) {
                    store.setPixelsPhysicalSizeZ(z, i);
                }
            }

            for (int plane = 0; plane < getImageCount(); plane++) {
                int c = getZCTCoords(plane)[1];
                if (exposureIndex + c < exposureTimes.size() && exposureIndex + c >= 0
                        && exposureTimes.get(exposureIndex + c) != null) {
                    store.setPlaneExposureTime(
                            new Time(exposureTimes.get(exposureIndex + c).doubleValue(), UNITS.S), i, plane);
                }
            }
            exposureIndex += getSizeC();
        }
        setSeries(0);

        // populate LogicalChannel data

        for (int i = 0; i < getSeriesCount(); i++) {
            setSeries(i);
            for (int c = 0; c < getSizeC(); c++) {
                if (index < channelNames.size() && channelNames.get(index) != null) {
                    store.setChannelName(channelNames.get(index), i, c);
                    addSeriesMetaList("channel", channelNames.get(index));
                }
                if (index < ndFilters.size() && ndFilters.get(index) != null) {
                    store.setChannelNDFilter(ndFilters.get(index), i, c);
                    addSeriesMeta("channel " + c + " Neutral density", ndFilters.get(index));
                }
                index++;
            }
        }
        setSeries(0);
    }
}

From source file:com.android.tools.idea.res.ResourceFolderRepository.java

@VisibleForTesting
boolean equalFilesItems(ResourceFolderRepository other) {
    File myResourceDirFile = VfsUtilCore.virtualToIoFile(myResourceDir);
    File otherResourceDir = VfsUtilCore.virtualToIoFile(other.myResourceDir);
    if (!FileUtil.filesEqual(myResourceDirFile, otherResourceDir)) {
        return false;
    }/*from   w  ww .j  a va  2  s  .c o  m*/

    if (myResourceFiles.size() != other.myResourceFiles.size()) {
        return false;
    }
    for (Map.Entry<VirtualFile, ResourceFile> fileEntry : myResourceFiles.entrySet()) {
        ResourceFile otherResFile = other.myResourceFiles.get(fileEntry.getKey());
        if (otherResFile == null) {
            return false;
        }
        if (!FileUtil.filesEqual(fileEntry.getValue().getFile(), otherResFile.getFile())) {
            return false;
        }
    }

    if (myItems.size() != other.myItems.size()) {
        return false;
    }
    for (Map.Entry<ResourceType, ListMultimap<String, ResourceItem>> entry : myItems.entrySet()) {
        ListMultimap<String, ResourceItem> ownEntries = entry.getValue();
        ListMultimap<String, ResourceItem> otherEntries = other.myItems.get(entry.getKey());
        if (otherEntries == null || otherEntries.size() != ownEntries.size()) {
            return false;
        }
        for (Map.Entry<String, ResourceItem> itemEntry : ownEntries.entries()) {
            List<ResourceItem> otherItemsList = otherEntries.get(itemEntry.getKey());
            if (otherItemsList == null) {
                return false;
            }
            final ResourceItem item = itemEntry.getValue();
            if (!ContainerUtil.exists(otherItemsList, new Condition<ResourceItem>() {
                @Override
                public boolean value(ResourceItem resourceItem) {
                    // Use #compareTo instead of #equals because #equals compares pointers of mSource.
                    if (resourceItem.compareTo(item) != 0) {
                        return false;
                    }
                    // #compareTo doesn't check the ResourceValue. At least check that getValue is equivalent (getRawXmlText may be different).
                    // Skip ID type resources, where the ResourceValues are not important and where blob writing doesn't preserve the value.
                    if (item.getType() != ResourceType.ID) {
                        ResourceValue resValue = item.getResourceValue(false);
                        ResourceValue otherResValue = resourceItem.getResourceValue(false);
                        if (resValue == null || otherResValue == null) {
                            if (resValue != otherResValue) {
                                return false;
                            }
                        } else {
                            String resValueStr = resValue.getValue();
                            String otherResValueStr = otherResValue.getValue();
                            if (resValueStr == null || otherResValueStr == null) {
                                if (resValueStr != otherResValueStr) {
                                    return false;
                                }
                            } else {
                                if (!resValueStr.equals(otherResValueStr)) {
                                    return false;
                                }
                            }
                        }
                    }
                    // We can only compareValueWith (compare equivalence of XML nodes) for VALUE items.
                    // For others, the XML node may be different before and after serialization.
                    ResourceFile source = item.getSource();
                    ResourceFile otherSource = resourceItem.getSource();
                    if (source != null && otherSource != null) {
                        ResourceFolderType ownFolderType = ResourceHelper.getFolderType(source);
                        ResourceFolderType otherFolderType = ResourceHelper.getFolderType(otherSource);
                        if (otherFolderType != ownFolderType) {
                            return false;
                        }
                        if (otherFolderType == VALUES) {
                            return resourceItem.compareValueWith(item);
                        }
                    }
                    return true;
                }
            })) {
                return false;
            }
        }
    }

    // Only compare the keys.
    return myDataBindingResourceFiles.keySet().equals(other.myDataBindingResourceFiles.keySet());
}

From source file:org.janusgraph.graphdb.database.StandardJanusGraph.java

public ModificationSummary prepareCommit(final Collection<InternalRelation> addedRelations,
        final Collection<InternalRelation> deletedRelations, final Predicate<InternalRelation> filter,
        final BackendTransaction mutator, final StandardJanusGraphTx tx, final boolean acquireLocks)
        throws BackendException {

    ListMultimap<Long, InternalRelation> mutations = ArrayListMultimap.create();
    ListMultimap<InternalVertex, InternalRelation> mutatedProperties = ArrayListMultimap.create();
    List<IndexSerializer.IndexUpdate> indexUpdates = Lists.newArrayList();
    //1) Collect deleted edges and their index updates and acquire edge locks
    for (InternalRelation del : Iterables.filter(deletedRelations, filter)) {
        Preconditions.checkArgument(del.isRemoved());
        for (int pos = 0; pos < del.getLen(); pos++) {
            InternalVertex vertex = del.getVertex(pos);
            if (pos == 0 || !del.isLoop()) {
                if (del.isProperty())
                    mutatedProperties.put(vertex, del);
                mutations.put(vertex.longId(), del);
            }//from   www  . ja  v a  2s  .c  o m
            if (acquireLock(del, pos, acquireLocks)) {
                Entry entry = edgeSerializer.writeRelation(del, pos, tx);
                mutator.acquireEdgeLock(idManager.getKey(vertex.longId()), entry);
            }
        }
        indexUpdates.addAll(indexSerializer.getIndexUpdates(del));
    }

    //2) Collect added edges and their index updates and acquire edge locks
    for (InternalRelation add : Iterables.filter(addedRelations, filter)) {
        Preconditions.checkArgument(add.isNew());

        for (int pos = 0; pos < add.getLen(); pos++) {
            InternalVertex vertex = add.getVertex(pos);
            if (pos == 0 || !add.isLoop()) {
                if (add.isProperty())
                    mutatedProperties.put(vertex, add);
                mutations.put(vertex.longId(), add);
            }
            if (!vertex.isNew() && acquireLock(add, pos, acquireLocks)) {
                Entry entry = edgeSerializer.writeRelation(add, pos, tx);
                mutator.acquireEdgeLock(idManager.getKey(vertex.longId()), entry.getColumn());
            }
        }
        indexUpdates.addAll(indexSerializer.getIndexUpdates(add));
    }

    //3) Collect all index update for vertices
    for (InternalVertex v : mutatedProperties.keySet()) {
        indexUpdates.addAll(indexSerializer.getIndexUpdates(v, mutatedProperties.get(v)));
    }
    //4) Acquire index locks (deletions first)
    for (IndexSerializer.IndexUpdate update : indexUpdates) {
        if (!update.isCompositeIndex() || !update.isDeletion())
            continue;
        CompositeIndexType iIndex = (CompositeIndexType) update.getIndex();
        if (acquireLock(iIndex, acquireLocks)) {
            mutator.acquireIndexLock((StaticBuffer) update.getKey(), (Entry) update.getEntry());
        }
    }
    for (IndexSerializer.IndexUpdate update : indexUpdates) {
        if (!update.isCompositeIndex() || !update.isAddition())
            continue;
        CompositeIndexType iIndex = (CompositeIndexType) update.getIndex();
        if (acquireLock(iIndex, acquireLocks)) {
            mutator.acquireIndexLock((StaticBuffer) update.getKey(), ((Entry) update.getEntry()).getColumn());
        }
    }

    //5) Add relation mutations
    for (Long vertexid : mutations.keySet()) {
        Preconditions.checkArgument(vertexid > 0, "Vertex has no id: %s", vertexid);
        List<InternalRelation> edges = mutations.get(vertexid);
        List<Entry> additions = new ArrayList<Entry>(edges.size());
        List<Entry> deletions = new ArrayList<Entry>(Math.max(10, edges.size() / 10));
        for (InternalRelation edge : edges) {
            InternalRelationType baseType = (InternalRelationType) edge.getType();
            assert baseType.getBaseType() == null;

            for (InternalRelationType type : baseType.getRelationIndexes()) {
                if (type.getStatus() == SchemaStatus.DISABLED)
                    continue;
                for (int pos = 0; pos < edge.getArity(); pos++) {
                    if (!type.isUnidirected(Direction.BOTH)
                            && !type.isUnidirected(EdgeDirection.fromPosition(pos)))
                        continue; //Directionality is not covered
                    if (edge.getVertex(pos).longId() == vertexid) {
                        StaticArrayEntry entry = edgeSerializer.writeRelation(edge, type, pos, tx);
                        if (edge.isRemoved()) {
                            deletions.add(entry);
                        } else {
                            Preconditions.checkArgument(edge.isNew());
                            int ttl = getTTL(edge);
                            if (ttl > 0) {
                                entry.setMetaData(EntryMetaData.TTL, ttl);
                            }
                            additions.add(entry);
                        }
                    }
                }
            }
        }

        StaticBuffer vertexKey = idManager.getKey(vertexid);
        mutator.mutateEdges(vertexKey, additions, deletions);
    }

    //6) Add index updates
    boolean has2iMods = false;
    for (IndexSerializer.IndexUpdate indexUpdate : indexUpdates) {
        assert indexUpdate.isAddition() || indexUpdate.isDeletion();
        if (indexUpdate.isCompositeIndex()) {
            IndexSerializer.IndexUpdate<StaticBuffer, Entry> update = indexUpdate;
            if (update.isAddition())
                mutator.mutateIndex(update.getKey(), Lists.newArrayList(update.getEntry()),
                        KCVSCache.NO_DELETIONS);
            else
                mutator.mutateIndex(update.getKey(), KeyColumnValueStore.NO_ADDITIONS,
                        Lists.newArrayList(update.getEntry()));
        } else {
            IndexSerializer.IndexUpdate<String, IndexEntry> update = indexUpdate;
            has2iMods = true;
            IndexTransaction itx = mutator.getIndexTransaction(update.getIndex().getBackingIndexName());
            String indexStore = ((MixedIndexType) update.getIndex()).getStoreName();
            if (update.isAddition())
                itx.add(indexStore, update.getKey(), update.getEntry(), update.getElement().isNew());
            else
                itx.delete(indexStore, update.getKey(), update.getEntry().field, update.getEntry().value,
                        update.getElement().isRemoved());
        }
    }
    return new ModificationSummary(!mutations.isEmpty(), has2iMods);
}

From source file:com.streamsets.pipeline.stage.processor.lookup.ForceLookupProcessor.java

private void processRetrieve(Batch batch, SingleLaneBatchMaker batchMaker) throws StageException {
    Iterator<Record> it = batch.getRecords();

    if (!it.hasNext()) {
        emptyBatch(batchMaker);/*from  w w  w.  j  a  v  a  2  s .c  om*/
        return;
    }

    // New metadata cache for each batch
    recordCreator.buildMetadataCacheFromFieldList(partnerConnection, conf.retrieveFields);

    // Could be more than one record with the same value in the lookup
    // field, so we have to build a multimap
    ListMultimap<String, Record> recordsToRetrieve = LinkedListMultimap.create();

    // Iterate through records - three cases
    // * no ID field => use default field values
    // * ID in cache => use cached field values
    // * otherwise   => add ID to list for retrieval
    while (it.hasNext()) {
        Record record = it.next();
        Field idField = record.get(conf.idField);
        String id = (idField != null) ? idField.getValueAsString() : null;
        if (Strings.isNullOrEmpty(id)) {
            switch (conf.missingValuesBehavior) {
            case SEND_TO_ERROR:
                LOG.error(Errors.FORCE_35.getMessage());
                errorRecordHandler.onError(new OnRecordErrorException(record, Errors.FORCE_35));
                break;
            case PASS_RECORD_ON:
                setFieldsInRecord(record, getDefaultFields());
                break;
            default:
                throw new IllegalStateException(
                        "Unknown missing value behavior: " + conf.missingValuesBehavior);
            }
        } else {
            Optional<List<Map<String, Field>>> entry = cache.getIfPresent(id);

            if (entry != null && entry.isPresent()) {
                // Salesforce record id is unique, so we'll always have just one entry in the list
                setFieldsInRecord(record, entry.get().get(0));
            } else {
                recordsToRetrieve.put(id, record);
            }
        }
    }

    Set<Record> badRecords = new HashSet<>();
    if (!recordsToRetrieve.isEmpty()) {
        String fieldList = ("*".equals(conf.retrieveFields.trim())) ? recordCreator.expandWildcard()
                : conf.retrieveFields;
        String[] idArray = recordsToRetrieve.keySet().toArray(new String[0]);

        // Split batch into 'chunks'
        int start = 0;
        while (start < idArray.length) {
            int end = start + Math.min(MAX_OBJECT_IDS, idArray.length - start);
            String[] ids = Arrays.copyOfRange(idArray, start, end);
            try {
                SObject[] sObjects = partnerConnection.retrieve(fieldList, conf.sObjectType, ids);

                for (SObject sObject : sObjects) {
                    String id = sObject.getId();
                    Map<String, Field> fieldMap = recordCreator.addFields(sObject, columnsToTypes);
                    for (Record record : recordsToRetrieve.get(id)) {
                        setFieldsInRecord(record, fieldMap);
                    }
                    cache.put(id, Optional.of(ImmutableList.of(fieldMap)));
                }
            } catch (InvalidIdFault e) {
                // exceptionMessage has form "malformed id 0013600001NnbAdOnE"
                String badId = e.getExceptionMessage().split(" ")[2];
                LOG.error("Bad Salesforce ID: {}", badId);
                switch (getContext().getOnErrorRecord()) {
                case DISCARD:
                    // Need to discard whole chunk!
                    addRecordsToSet(ids, recordsToRetrieve, badRecords);
                    break;
                case TO_ERROR:
                    // Need to send the entire chunk to error - none of them were processed!
                    sendChunkToError(ids, recordsToRetrieve, getContext(), e);
                    addRecordsToSet(ids, recordsToRetrieve, badRecords);
                    break;
                case STOP_PIPELINE:
                    Record badRecord = recordsToRetrieve.get(badId).get(0);
                    throw new OnRecordErrorException(badRecord, Errors.FORCE_29, badId, e);
                default:
                    throw new IllegalStateException(Utils.format("It should never happen. OnError '{}'",
                            getContext().getOnErrorRecord(), e));
                }
            } catch (InvalidFieldFault e) {
                switch (getContext().getOnErrorRecord()) {
                case DISCARD:
                    // Need to discard whole chunk!
                    addRecordsToSet(ids, recordsToRetrieve, badRecords);
                    break;
                case TO_ERROR:
                    // Need to send the entire chunk to error - none of them were processed!
                    sendChunkToError(ids, recordsToRetrieve, getContext(), e);
                    addRecordsToSet(ids, recordsToRetrieve, badRecords);
                    break;
                case STOP_PIPELINE:
                    throw new StageException(Errors.FORCE_30, e.getExceptionMessage(), e);
                default:
                    throw new IllegalStateException(Utils.format("It should never happen. OnError '{}'",
                            getContext().getOnErrorRecord(), e));
                }
            } catch (ConnectionException e) {
                throw new StageException(Errors.FORCE_28, e.getMessage(), e);
            }
            start = end;
        }
    }

    it = batch.getRecords();
    while (it.hasNext()) {
        Record record = it.next();
        if (!badRecords.contains(record)) {
            batchMaker.addRecord(record);
        }
    }
}

From source file:hudson.plugins.parameterizedtrigger.TriggerBuilder.java

@Override
public boolean perform(AbstractBuild<?, ?> build, Launcher launcher, BuildListener listener)
        throws InterruptedException, IOException {
    EnvVars env = build.getEnvironment(listener);
    env.overrideAll(build.getBuildVariables());

    boolean buildStepResult = true;

    try {//from   w  ww. j  a va  2 s  . co  m
        for (BlockableBuildTriggerConfig config : configs) {
            ListMultimap<AbstractProject, Future<AbstractBuild>> futures = config.perform2(build, launcher,
                    listener);
            // Only contains resolved projects
            List<AbstractProject> projectList = config
                    .getProjectList(build.getRootBuild().getProject().getParent(), env);

            // Get the actual defined projects
            StringTokenizer tokenizer = new StringTokenizer(config.getProjects(), ",");

            if (tokenizer.countTokens() == 0) {
                throw new AbortException("Build aborted. No projects to trigger. Check your configuration!");
            } else if (tokenizer.countTokens() != projectList.size()) {

                int nbrOfResolved = tokenizer.countTokens() - projectList.size();

                // Identify the unresolved project(s)
                Set<String> unsolvedProjectNames = new TreeSet<String>();
                while (tokenizer.hasMoreTokens()) {
                    unsolvedProjectNames.add(tokenizer.nextToken().trim());
                }
                for (AbstractProject project : projectList) {
                    unsolvedProjectNames.remove(project.getFullName());
                }

                // Present the undefined project(s) in error message
                StringBuffer missingProject = new StringBuffer();
                for (String projectName : unsolvedProjectNames) {
                    missingProject.append(" > ");
                    missingProject.append(projectName);
                    missingProject.append("\n");
                }

                throw new AbortException("Build aborted. Can't trigger undefined projects. " + nbrOfResolved
                        + " of the below project(s) can't be resolved:\n" + missingProject.toString()
                        + "Check your configuration!");
            } else {
                //handle non-blocking configs
                if (futures.isEmpty()) {
                    listener.getLogger().println("Triggering projects: " + getProjectListAsString(projectList));
                    for (AbstractProject p : projectList) {
                        BuildInfoExporterAction.addBuildInfoExporterAction(build, p.getFullName());
                    }
                    continue;
                }
                //handle blocking configs
                for (AbstractProject p : projectList) {
                    //handle non-buildable projects
                    if (!p.isBuildable()) {
                        listener.getLogger().println("Skipping "
                                + HyperlinkNote.encodeTo('/' + p.getUrl(), p.getFullDisplayName())
                                + ". The project is either disabled or the configuration has not been saved yet.");
                        continue;
                    }
                    for (Future<AbstractBuild> future : futures.get(p)) {
                        try {
                            listener.getLogger().println("Waiting for the completion of "
                                    + HyperlinkNote.encodeTo('/' + p.getUrl(), p.getFullDisplayName()));
                            AbstractBuild b = future.get();
                            listener.getLogger()
                                    .println(HyperlinkNote.encodeTo('/' + b.getUrl(), b.getFullDisplayName())
                                            + " completed. Result was " + b.getResult());
                            BuildInfoExporterAction.addBuildInfoExporterAction(build,
                                    b.getProject().getFullName(), b.getNumber(), b.getResult());

                            if (buildStepResult && config.getBlock().mapBuildStepResult(b.getResult())) {
                                build.setResult(config.getBlock().mapBuildResult(b.getResult()));
                            } else {
                                buildStepResult = false;
                            }
                        } catch (CancellationException x) {
                            throw new AbortException(p.getFullDisplayName() + " aborted.");
                        }
                    }
                }
            }
        }
    } catch (ExecutionException e) {
        throw new IOException2(e); // can't happen, I think.
    }

    return buildStepResult;
}