List of usage examples for com.google.common.collect ArrayListMultimap create
public static <K, V> ArrayListMultimap<K, V> create()
From source file:org.apache.storm.metric.FakeMetricConsumer.java
@Override public void handleDataPoints(TaskInfo taskInfo, Collection<DataPoint> dataPoints) { synchronized (buffer) { for (DataPoint dp : dataPoints) { for (Map.Entry<String, Object> entry : expandComplexDataPoint(dp).entrySet()) { String metricName = entry.getKey(); Multimap<Integer, Object> taskIdToBucket = buffer.get(taskInfo.srcComponentId, metricName); if (null == taskIdToBucket) { taskIdToBucket = ArrayListMultimap.create(); taskIdToBucket.put(taskInfo.srcTaskId, entry.getValue()); } else { taskIdToBucket.get(taskInfo.srcTaskId).add(entry.getValue()); }/*from w ww .ja v a 2 s . c o m*/ buffer.put(taskInfo.srcComponentId, metricName, taskIdToBucket); } } } }
From source file:org.apache.ctakes.temporal.ae.feature.CheckSpecialWordRelationExtractor.java
public CheckSpecialWordRelationExtractor() { this.specialWd = ArrayListMultimap.create(); URL url = TimeWordsExtractor.class.getResource(LOOKUP_PATH); try {/*w w w. j a va 2 s . co m*/ for (String line : Resources.readLines(url, Charsets.US_ASCII)) { String[] WordAndType = line.split(","); if (WordAndType.length != 2) { throw new IllegalArgumentException("Expected '<word>,<type>', found: " + line); } this.specialWd.put(WordAndType[0], WordAndType[1]); } } catch (IOException e) { System.err.println("TimeLexicon resource initialization error."); } }
From source file:suneido.SuRules.java
public SuRules() { usedBy = HashMultimap.create(); dependencies = ArrayListMultimap.create(); invalid = Sets.newHashSet(); attachedRules = Maps.newHashMap(); }
From source file:de.bund.bfr.knime.pmmlite.core.CombineUtils.java
public static List<TertiaryModel> combine(List<SecondaryModel> dataModels) throws UnitException { // 1. find secondary models and data for each primary model Map<String, PrimaryModel> primModelMap = new LinkedHashMap<>(); Map<String, Map<String, Double>> paramMeanMap = new LinkedHashMap<>(); ListMultimap<String, SecondaryModel> secModelMap = ArrayListMultimap.create(); ListMultimap<String, TimeSeries> dataMap = ArrayListMultimap.create(); for (SecondaryModel dataModel : dataModels) { PrimaryModel primModel = dataModel.getData().get(0); String id = primModel.getFormula().getId(); if (!primModelMap.containsKey(id)) { primModelMap.put(id, primModel); ListMultimap<String, Double> paramValues = ArrayListMultimap.create(); for (PrimaryModel primData : dataModel.getData()) { dataMap.put(id, primData.getData()); for (Parameter param : primModel.getFormula().getParams()) { ParameterValue value = primData.getParamValues().get(param.getName()); if (value != null && value.getValue() != null) { paramValues.put(param.getName(), value.getValue()); }/*from w w w .ja v a2s . c o m*/ } } Map<String, Double> paramMeans = new LinkedHashMap<>(); for (Map.Entry<String, Collection<Double>> entry : paramValues.asMap().entrySet()) { paramMeans.put(entry.getKey(), DoubleMath.mean(Doubles.toArray(entry.getValue()))); } paramMeanMap.put(id, paramMeans); } secModelMap.put(id, dataModel); paramMeanMap.get(id) .remove(dataModel.getAssignments().get(dataModel.getFormula().getDepVar().getName())); } // 2. if secondary does not exist for a parameter create constant model // 3. call combine for each primary models with its secondary models List<TertiaryModel> tertiaryModels = new ArrayList<>(); for (Map.Entry<String, PrimaryModel> entry : primModelMap.entrySet()) { String id = entry.getKey(); PrimaryModel primModel = entry.getValue(); List<SecondaryModel> secModels = secModelMap.get(id); List<Model> allModels = new ArrayList<>(); allModels.add(primModel); allModels.addAll(secModels); TertiaryModel tertModel = ModelsFactory.eINSTANCE.createTertiaryModel(); tertModel.setId(Joiner.on("").join(PmmUtils.getIds(allModels))); tertModel.setName(Joiner.on("_").join(PmmUtils.getNames(secModels))); for (Map.Entry<String, VariableRange> range : primModel.getVariableRanges().entrySet()) { tertModel.getVariableRanges().put(range.getKey(), EcoreUtil.copy(range.getValue())); } for (Map.Entry<String, ParameterValue> value : primModel.getParamValues().entrySet()) { tertModel.getParamValues().put(value.getKey(), EcoreUtil.copy(value.getValue())); } for (SecondaryModel secModel : secModels) { String depVarAssignment = secModel.getAssignments() .get(secModel.getFormula().getDepVar().getName()); tertModel.getParamValues().remove(depVarAssignment); } List<SecondaryModelFormula> secFormulas = new ArrayList<>(); Map<String, String> assignments = new LinkedHashMap<>(); Map<String, Renamings> secondaryRenamings = new LinkedHashMap<>(); for (SecondaryModel secModel : secModels) { SecondaryModelFormula secFormula = secModel.getFormula(); String depVarAssignment = secModel.getAssignments().get(secFormula.getDepVar().getName()); Renamings renamings = ModelsFactory.eINSTANCE.createRenamings(); for (Map.Entry<String, String> assignment : secModel.getAssignments().entrySet()) { if (!assignment.getKey().equals(secFormula.getDepVar().getName())) { renamings.getMap().put(assignment.getKey(), assignment.getValue()); tertModel.getAssignments().put(assignment.getValue(), assignment.getValue()); } } secFormulas.add(secFormula); assignments.put(depVarAssignment, secFormula.getId()); secondaryRenamings.put(depVarAssignment, renamings); for (Variable var : secFormula.getIndepVars()) { VariableRange range = secModel.getVariableRanges() .get(secModel.getAssignments().get(var.getName())); addIndep(tertModel, renamings.getMap().get(var.getName()), EcoreUtil.copy(range)); } for (Map.Entry<String, ParameterValue> value : secModel.getParamValues().entrySet()) { addParam(tertModel, value.getKey(), EcoreUtil.copy(value.getValue())); } } for (Parameter param : primModel.getFormula().getParams()) { if (!assignments.containsKey(param.getName())) { ParameterValue value = ModelsFactory.eINSTANCE.createParameterValue(); value.setValue(paramMeanMap.get(primModel.getFormula().getId()).get(param.getName())); tertModel.getParamValues().put(param.getName(), value); } } for (ParameterValue value : tertModel.getParamValues().values()) { value.setError(null); value.setP(null); value.setT(null); value.getCorrelations().clear(); } tertModel.setFormula(combine(primModel.getFormula(), secFormulas, assignments, secondaryRenamings)); tertModel.getAssignments().putAll(primModel.getAssignments()); tertModel.getData().addAll(dataMap.get(id)); tertiaryModels.add(tertModel); } return tertiaryModels; }
From source file:de.ii.xtraplatform.feature.provider.pgis.SqlFeatureCreator.java
public void reset() { this.multiplicities = ArrayListMultimap.create(); this.valueContainer = inserts.getValueContainer(ImmutableMap.of()); }
From source file:org.shaf.core.process.handle.EmulatorMemoryMapHandler.java
@Override public Object run() throws ProcessException { try {// www . j a v a 2s. c o m ListMultimap<Object, Object> buffer = ArrayListMultimap.create(); try (@SuppressWarnings("unchecked") RecordReader<Object, Object> reader = RecordReaderFactory .<Object, Object, Object, Object, Object, Object>createRecordReader( (Class<? extends DistributedProcess<Object, Object, Object, Object, Object, Object>>) super.cls, super.job.getConfiguration());) { buffer.putAll(super.doMapping(reader)); } catch (Exception exc) { throw new Exception("Fialed to emulate the map process.", exc); } try (@SuppressWarnings("unchecked") RecordWriter<Object, Object> writer = RecordWriterFactory.createRecordWriter( (Class<? extends DistributedProcess<Object, Object, Object, Object, Object, Object>>) super.cls, super.job.getConfiguration());) { super.doReducing(buffer, writer); } catch (Exception exc) { throw new Exception("Fialed to emulate the reduce process.", exc); } return null; } catch (Exception exc) { throw new ProcessExecException("Failed to emulate the map-reduce process in memory-mapping mode.", exc); } }
From source file:org.codeqinvest.codechanges.scm.svn.DefaultSvnRevisionsRetriever.java
/** * {@inheritDoc}// ww w .j a va 2 s .c o m */ @Override @Cacheable("svnRevisions") public DailyRevisions retrieveRevisions(ScmConnectionSettings connectionSettings, LocalDate day) throws SVNException { log.info("Retrieve revisions on day {} for {}", day, connectionSettings); final SVNRepository repository = SvnRepositoryFactory.create(connectionSettings); final LocalDateTime startTime = day.toDateTimeAtStartOfDay().toLocalDateTime(); final long startRevision = repository.getDatedRevision(startTime.toDate()); final long endRevision = repository.getDatedRevision(startTime.withTime(23, 59, 59, 999).toDate()); final Multimap<String, SvnFileRevision> revisions = ArrayListMultimap.create(); repository.log(null, startRevision, endRevision, true, true, new ISVNLogEntryHandler() { @Override public void handleLogEntry(SVNLogEntry logEntry) throws SVNException { for (SVNLogEntryPath logEntryPath : logEntry.getChangedPaths().values()) { if (logEntryPath.getCopyPath() != null) { revisions.put(logEntryPath.getPath(), new SvnFileRevision(logEntry.getRevision(), logEntryPath.getCopyPath(), logEntryPath.getPath())); } else { revisions.put(logEntryPath.getPath(), new SvnFileRevision(logEntry.getRevision(), logEntryPath.getPath(), logEntryPath.getPath())); } } } }); log.info("Found {} changes for day {} with connection {}", revisions.values().size(), day, connectionSettings); return new DailyRevisions(day, revisions); }
From source file:org.eclipse.viatra.addon.viewers.runtime.specifications.ItemQuerySpecificationDescriptor.java
/** * @throws ViatraQueryRuntimeException/*from w w w .j a va 2 s . co m*/ */ public ItemQuerySpecificationDescriptor(IQuerySpecification<?> specification, PAnnotation annotation) { super(specification, getTraceSource(specification, annotation), ArrayListMultimap.create(), Collections.emptyMap()); ParameterReference parameterName = annotation.getFirstValue(SOURCE_PARAMETER_NAME, ParameterReference.class) .orElseThrow(() -> new QueryProcessingException("Invalid item value", specification)); String parameterNameValue = parameterName.getName(); source = parameterNameValue; label = annotation.getFirstValue(LABEL_PARAMETER_NAME, String.class).orElse(""); policy = annotation.getFirstValue(HIERARCHY_PARAMETER_NAME, String.class) .map(input -> HierarchyPolicy.valueOf(input.toUpperCase())).orElse(HierarchyPolicy.ALWAYS); formatAnnotation = specification.getFirstAnnotationByName(FormatParser.ANNOTATION_ID).orElse(null); }
From source file:edu.umd.cs.psl.model.formula.traversal.FormulaEventAnalysis.java
public FormulaEventAnalysis(Formula f) { formula = f;/*from ww w .jav a 2s. c o m*/ dependence = ArrayListMultimap.create(); //FormulaTraverser.traverse(formula, new FormulaAnalyser()); queries = new HashSet<Formula>(); Conjunction c = ((Conjunction) formula).flatten(); Vector<Formula> necessary = new Vector<Formula>(c.getNoFormulas()); Vector<Formula> oneOf = new Vector<Formula>(c.getNoFormulas()); Atom a; int negatedPredCount = 0; String anchorVar = null; for (int i = 0; i < c.getNoFormulas(); i++) { if (c.get(i) instanceof Atom) { a = (Atom) c.get(i); if (a.getPredicate().getNumberOfValues() == 1) { if (a.getPredicate().getDefaultValues()[0] == 0.0) { necessary.add(a); dependence.put(a.getPredicate(), a); } else { oneOf.add(a); } } else { oneOf.add(a); } } else if (c.get(i) instanceof Negation) { a = (Atom) ((Negation) c.get(i)).getFormula(); negatedPredCount++; if (a.getArity() == 1 && negatedPredCount == 1 && Formula2SQL.queryJoinMode == QueryJoinMode.Anchor) anchorVar = a.getArguments()[0].toString(); if (a.getPredicate().getNumberOfValues() == 1) { if (a.getPredicate().getDefaultValues()[0] != 0.0) { oneOf.add(a); } } } } if (necessary.size() == 1) { queries.add(necessary.get(0)); } else { //if (oneOf.isEmpty()) { { Conjunction conj = new Conjunction((Formula[]) necessary.toArray(new Formula[necessary.size()])); conj.conjType = c.conjType; if (Formula2SQL.queryJoinMode == QueryJoinMode.Anchor && anchorVar != null && conj.conjType == ConjunctionTypes.avg) conj.anchorVar = anchorVar; queries.add(conj); } //} //else { // for (Formula formula : oneOf) { // queries.add(new Conjunction(new Conjunction((Formula[]) necessary.toArray(new Formula[necessary.size()])), formula)); // } //} } }
From source file:no.kantega.publishing.jobs.alerts.RevisionContentAlertJob.java
@Scheduled(cron = "${jobs.revision.trigger}") @DisableOnServertype(ServerType.SLAVE)/* w w w . j a v a2 s . c o m*/ public void revisionContentAlert() { try { log.debug("Looking for content revision in " + daysBeforeWarning + " days"); Multimap<String, Content> users = ArrayListMultimap.create(); // Create query to find all docs with revision ContentQuery query = new ContentQuery(); Date fromDate = new Date(); query.setRevisionDateFrom(fromDate); Calendar calendar = new GregorianCalendar(); calendar.add(Calendar.DATE, daysBeforeWarning); query.setRevisionDateTo(calendar.getTime()); query.setSortOrder(new SortOrder(ContentProperty.TITLE, false)); List<Content> contentList = contentAO.getContentList(query, false); // Insert docs into hashmap for (Content content : contentList) { String userId = content.getOwnerPerson(); if (isNotBlank(userId)) { users.put(userId, content); } } // Iterate through users for (String userId : users.keySet()) { Collection<Content> userContentList = users.get(userId); // Lookup user with userid SecurityRealm realm = SecurityRealmFactory.getInstance(); User ownerPerson = realm.lookupUser(userId); // Send message using listeners if (ownerPerson != null) { log.info("Sending alert to user " + ownerPerson.getId() + " - " + userContentList.size() + " docs for revision"); for (ContentAlertListener listener : listeners) { listener.sendContentAlert(ownerPerson, new ArrayList<>(userContentList)); } } else { log.debug("Skipping alert, user unknown " + userId + " - " + userContentList.size() + " docs for revision"); } } } catch (SystemException e) { log.error("Error when seding revision alert", e); } }