List of usage examples for com.google.common.collect Lists newLinkedList
@GwtCompatible(serializable = true) public static <E> LinkedList<E> newLinkedList()
From source file:com.bj58.oceanus.core.context.TransactionContext.java
public TransactionContext() { connsInTransaction = Maps.newLinkedHashMap(); stmtsInTransaction = Maps.newLinkedHashMap(); sqlsInTransaction = Lists.newLinkedList(); }
From source file:com.amazon.janusgraph.diskstorage.dynamodb.iterator.SingleRowScanInterpreter.java
@Override public List<SingleKeyRecordIterator> buildRecordIterators(ScanContext scanContext) { final List<SingleKeyRecordIterator> recordIterators = Lists.newLinkedList(); for (Map<String, AttributeValue> item : scanContext.getScanResult().getItems()) { final StaticBuffer hashKey = new KeyBuilder(item).build(Constants.JANUSGRAPH_HASH_KEY); final RecordIterator<Entry> recordIterator = createRecordIterator(item); if (recordIterator.hasNext()) { recordIterators.add(new SingleKeyRecordIterator(hashKey, recordIterator)); }/*from w w w. ja va2s . c o m*/ } return recordIterators; }
From source file:exec.validate_evaluation.microcommits.MicroCommitGenerationRunner.java
private List<MicroCommit> createCommits(List<Usage> qh) { List<MicroCommit> commits = Lists.newLinkedList(); for (int i = 1; i < qh.size(); i++) { // TODO fix/test int first = Math.max(0, i - MAX_HISTORY_LENGTH); for (int j = first; j < i; j++) { Usage end = qh.get(i);// w w w.j a va 2s. co m Usage start = qh.get(j); commits.add(MicroCommit.create(start, end)); } } return commits; }
From source file:com.rapid7.diskstorage.dynamodb.iterator.SingleRowScanInterpreter.java
@Override public List<SingleKeyRecordIterator> buildRecordIterators(ScanContext scanContext) { final List<SingleKeyRecordIterator> recordIterators = Lists.newLinkedList(); for (Map<String, AttributeValue> item : scanContext.getScanResult().getItems()) { final StaticBuffer hashKey = new KeyBuilder(item).build(Constants.TITAN_HASH_KEY); final RecordIterator<Entry> recordIterator = createRecordIterator(item); if (recordIterator.hasNext()) { recordIterators.add(new SingleKeyRecordIterator(hashKey, recordIterator)); }//from ww w. j a v a2 s .c o m } return recordIterators; }
From source file:rapture.dsl.serfun.SplitterHose.java
@SuppressWarnings("unchecked") public SplitterHose(HoseArg input, int fan) { super(1, fan); this.bind(input, 0, 0); this.fan = fan; for (int i = OUTKEYS.size(); i < fan; i++) { OUTKEYS.add("out" + i); }/*from w w w.j av a 2s . c o m*/ outKeys = OUTKEYS.subList(0, fan); backlog = new Queue[fan]; for (int i = 0; i < fan; i++) { backlog[i] = Lists.newLinkedList(); } }
From source file:au.id.wolfe.tribs.repository.impl.WorkLogRepositoryImpl.java
@SuppressWarnings("unchecked") public List<Long> getWorkLogIdListForPeriod(Timestamp startDate, Timestamp endDate) { logger.info("getWorkLogIdListForPeriod startDate " + startDate + ", endDate " + endDate); List<Long> worklogIdList = Lists.newLinkedList(); List<EntityCondition> expressions = new ArrayList<EntityCondition>(); expressions.add(new EntityExpr("startdate", EntityOperator.GREATER_THAN_EQUAL_TO, startDate)); expressions.add(new EntityExpr("startdate", EntityOperator.LESS_THAN, endDate)); EntityCondition condition = new EntityConditionList(expressions, EntityOperator.AND); for (GenericValue value : genericDelegator.findByCondition(OfBizWorklogStore.WORKLOG_ENTITY, condition, EasyList.build("id"), EasyList.build())) { worklogIdList.add(value.getLong("id")); }/* w w w.ja v a2s . c o m*/ return worklogIdList; }
From source file:clocker.docker.networking.entity.sdn.weave.WeaveRouterSshDriver.java
@Override public void install() { List<String> commands = Lists.newLinkedList(); commands.addAll(BashCommands.commandsToDownloadUrlsAs(resolver.getTargets(), getWeaveCommand())); commands.add("chmod 755 " + getWeaveCommand()); newScript(INSTALLING).body.append(commands).execute(); }
From source file:org.apache.impala.infra.tableflattener.SchemaFlattener.java
public FlattenedSchema flatten(Schema srcSchema) { Preconditions.checkState(srcSchema.getType() == Type.RECORD); FlattenedSchema dstDataset = new FlattenedSchema(srcSchema.getName()); LinkedList<Field> fields = Lists.newLinkedList(); addRecordFields(srcSchema, dstDataset, fields, ""); finishCreatingDataset(fields, dstDataset); return dstDataset; }
From source file:org.nmdp.ngs.align.Genewise.java
/** * Return the exons predicted from the alignment of the specified amino acid HMM file in HMMER2 format against * the specified genomic DNA sequence file in FASTA format. * * @param aminoAcidHmm2File amino acid HMM file in HMMER2 format, must not be null * @param genomicDnaFastaFile genomic DNA sequence file in FASTA format, must not be null * @return zero or more exons predicted from the alignment of the specified amino acid HMM file in HMMER2 format against * the specified genomic DNA sequence file in FASTA format * @throws IOException if an I/O error occurs *///from w ww . ja v a2 s . co m public static Iterable<GenewiseExon> genewiseExons(final File aminoAcidHmm2File, final File genomicDnaFastaFile) throws IOException { checkNotNull(aminoAcidHmm2File); checkNotNull(genomicDnaFastaFile); File genewiseResult = File.createTempFile("genewise", ".txt"); ProcessBuilder genewise = new ProcessBuilder("genewise", "-hmmer", "-tfor", "-genes", "-nosplice_gtag", aminoAcidHmm2File.getPath(), genomicDnaFastaFile.getPath()); genewise.redirectErrorStream(true); genewise.redirectOutput(ProcessBuilder.Redirect.to(genewiseResult)); Process genewiseProcess = genewise.start(); try { genewiseProcess.waitFor(); } catch (InterruptedException e) { // ignore } int lineNumber = 0; BufferedReader reader = null; List<GenewiseExon> exons = Lists.newLinkedList(); try { reader = new BufferedReader(new FileReader(genewiseResult)); while (reader.ready()) { String line = reader.readLine(); if (line == null) { break; } if (line.startsWith(" Exon")) { List<String> tokens = SPLITTER.splitToList(line); if (tokens.size() < 5) { throw new IOException( "invalid genewise genes format at line number " + lineNumber + ", line " + line); } try { long start = Long.parseLong(tokens.get(1)); long end = Long.parseLong(tokens.get(2)); if (start > end) { throw new IOException( "invalid genewise exon at line number " + lineNumber + ", start > end"); } int phase = Integer.parseInt(tokens.get(4)); exons.add(new GenewiseExon(start, end, phase)); } catch (NumberFormatException e) { throw new IOException("invalid genewise exon at line number " + lineNumber + ", caught " + e.getMessage()); } } lineNumber++; } } finally { try { reader.close(); } catch (Exception e) { // empty } try { genewiseResult.delete(); } catch (Exception e) { // empty } } return ImmutableList.copyOf(exons); }
From source file:terasort.processors.PartitionProcessor.java
@Override public void run() throws Exception { UnorderedKVReader reader = (UnorderedKVReader) (getInputs().get(TeraSort.SAMPLER_VERTEX).getReader()); List<Text> splitPoints = Lists.newLinkedList(); while (reader.next()) { Text partitionData = (Text) reader.getCurrentKey(); splitPoints.add(partitionData);/*from w ww .j a va 2 s . c om*/ } trie = buildTrie(splitPoints.toArray(new Text[splitPoints.size()]), 0, splitPoints.size(), new Text(), 2); UnorderedKVReader dataReader = (UnorderedKVReader) (getInputs().get(TeraSort.SCAN_VERTEX).getReader()); KeyValuesWriter writer = (KeyValuesWriter) getOutputs().get(TeraSort.SINK_VERTEX).getWriter(); while (dataReader.next()) { Object key = dataReader.getCurrentKey(); Object val = dataReader.getCurrentValue(); writer.write(key, val); } }