List of usage examples for org.apache.commons.lang3.tuple Pair getKey
@Override public final L getKey()
Gets the key from this pair.
This method implements the Map.Entry interface returning the left element as the key.
From source file:ch.uzh.phys.ecn.oboma.agents.model.Agent.java
public int getPreferredTimeOfStay(String pNodeId) { for (Pair<String, Integer> item : this.mRoute) { if (item.getKey().equals(pNodeId)) { return item.getValue(); }//from w w w .ja v a2 s .co m } return 1; }
From source file:com.streamsets.pipeline.hbase.api.common.processor.HBaseStore.java
public Optional<String> get(Pair<String, HBaseColumn> key) throws Exception { if (key.getKey().isEmpty()) { return Optional.absent(); }//from w ww .j a v a 2 s. c o m Get g = new Get(Bytes.toBytes(key.getKey())); if (key.getValue().getCf() != null && key.getValue().getQualifier() != null) { g.addColumn(key.getValue().getCf(), key.getValue().getQualifier()); } if (key.getValue().getTimestamp() > 0) { g.setTimeStamp(key.getValue().getTimestamp()); } Result result = hBaseProcessor.get(g); String value = getValue(key.getValue(), result); return Optional.fromNullable(value); }
From source file:com.acmutv.ontoqa.benchmark.advanced.QuestionA01Test.java
/** * Tests the question-answering with parsing. * @throws QuestionException when the question is malformed. * @throws OntoqaFatalException when the question cannot be processed due to some fatal errors. *///from w ww . j a v a 2 s .co m @Test public void test_nlp() throws Exception { final Grammar grammar = Common.getGrammar(); final Ontology ontology = Common.getOntology(); final Pair<Query, Answer> result = CoreController.process(QUESTION, grammar, ontology); final Query query = result.getKey(); final Answer answer = result.getValue(); LOGGER.info("Query: {}", query); LOGGER.info("Answer: {}", answer); Assert.assertTrue(QUERY.equals(query.toString()) || QUERY_bis.equals(query.toString())); Assert.assertEquals(ANSWER, answer); }
From source file:com.microsoft.tooling.msservices.serviceexplorer.azure.storagearm.StorageModule.java
@Override protected void refresh(@NotNull EventStateHandle eventState) throws AzureCmdException { removeAllChildNodes();// w w w . ja v a 2s . c om AzureManager azureManager = AzureManagerImpl.getManager(getProject()); // load all Storage Accounts List<Subscription> subscriptionList = azureManager.getSubscriptionList(); List<Pair<String, String>> failedSubscriptions = new ArrayList<>(); for (Subscription subscription : subscriptionList) { try { List<ArmStorageAccount> storageAccounts = AzureArmManagerImpl.getManager(getProject()) .getStorageAccounts(subscription.getId()); if (eventState.isEventTriggered()) { return; } for (StorageAccount sm : storageAccounts) { String type = sm.getType(); if (type.equals(StorageAccountTypes.STANDARD_GRS) || type.equals(StorageAccountTypes.STANDARD_LRS) || type.equals(StorageAccountTypes.STANDARD_RAGRS) || type.equals(StorageAccountTypes.STANDARD_ZRS)) { addChildNode(new StorageNode(this, subscription.getId(), sm)); } } } catch (Exception ex) { failedSubscriptions.add(new ImmutablePair<>(subscription.getName(), ex.getMessage())); continue; } } // load External Accounts for (ClientStorageAccount clientStorageAccount : ExternalStorageHelper.getList(getProject())) { ClientStorageAccount storageAccount = StorageClientSDKManagerImpl.getManager() .getStorageAccount(clientStorageAccount.getConnectionString()); if (eventState.isEventTriggered()) { return; } // addChildNode(new ExternalStorageNode(this, storageAccount)); } if (!failedSubscriptions.isEmpty()) { StringBuilder errorMessage = new StringBuilder( "An error occurred when trying to load Storage Accounts for the subscriptions:\n\n"); for (Pair error : failedSubscriptions) { errorMessage.append(error.getKey()).append(": ").append(error.getValue()).append("\n"); } DefaultLoader.getUIHelper().logError( "An error occurred when trying to load Storage Accounts\n\n" + errorMessage.toString(), null); } }
From source file:com.streamsets.pipeline.stage.processor.hbase.HBaseStore.java
public Optional<String> get(Pair<String, HBaseColumn> key) throws Exception { if (key.getKey().isEmpty()) { return Optional.absent(); }//from www . j a v a2s .c o m Get g = new Get(Bytes.toBytes(key.getKey())); if (key.getValue().getCf() != null && key.getValue().getQualifier() != null) { g.addColumn(key.getValue().getCf(), key.getValue().getQualifier()); } if (key.getValue().getTimestamp() > 0) { g.setTimeStamp(key.getValue().getTimestamp()); } Result result = hTable.get(g); String value = getValue(key.getValue(), result); return Optional.fromNullable(value); }
From source file:com.linkedin.pinot.server.api.resources.MmapDebugResource.java
@GET @Path("memory/offheap") @ApiOperation(value = "View current off-heap allocations", notes = "Lists all off-heap allocations and their associated sizes") @ApiResponses(value = { @ApiResponse(code = 200, message = "Success") }) @Produces(MediaType.APPLICATION_JSON)/*ww w .j a va 2 s . c om*/ public Map<String, List<AllocationInfo>> getOffHeapSizes() throws ResourceException { List<AllocationInfo> allocations = new ArrayList<>(); List<Pair<MmapUtils.AllocationContext, Integer>> allocationsMap = MmapUtils.getAllocationsAndSizes(); for (Pair<MmapUtils.AllocationContext, Integer> allocation : allocationsMap) { AllocationInfo info = new AllocationInfo(); info.context = allocation.getKey().getContext(); info.type = allocation.getKey().getContext(); info.size = allocation.getValue(); allocations.add(info); } Map<String, List<AllocationInfo>> allocationMap = new HashMap<>(); allocationMap.put("allocations", allocations); return allocationMap; }
From source file:io.confluent.kafka.connect.source.io.processing.csv.SchemaConfigTest.java
@Test public void schema() { SpoolDirectoryConfig config = new SpoolDirectoryConfig(Data.settings(Files.createTempDir())); final SchemaConfig input = Data.schemaConfig(); final Schema expectedValueSchema = SchemaBuilder.struct().name("io.confluent.kafka.connect.source.MockData") .field("id", Schema.INT32_SCHEMA).field("first_name", Schema.STRING_SCHEMA) .field("last_name", Schema.STRING_SCHEMA).field("email", Schema.STRING_SCHEMA) .field("gender", Schema.STRING_SCHEMA).field("ip_address", Schema.STRING_SCHEMA) .field("last_login", Timestamp.builder().optional()) .field("account_balance", Decimal.builder(10).optional()).field("country", Schema.STRING_SCHEMA) .field("favorite_color", Schema.OPTIONAL_STRING_SCHEMA).build(); final Schema expectedKeySchema = SchemaBuilder.struct() .name("io.confluent.kafka.connect.source.MockDataKey").field("id", Schema.INT32_SCHEMA).build(); final Pair<SchemaConfig.ParserConfig, SchemaConfig.ParserConfig> actual = input.parserConfigs(config); System.out.println(actual.getKey()); System.out.println(actual.getValue()); assertSchema(expectedKeySchema, actual.getKey().structSchema); assertSchema(expectedValueSchema, actual.getValue().structSchema); }
From source file:com.astamuse.asta4d.util.i18n.MappedParamI18nMessageHelper.java
@SuppressWarnings("rawtypes") private Map<String, Object> pairToMap(Pair[] params) { Map<String, Object> map = new HashMap<>(); for (Pair pair : params) { map.put(pair.getKey().toString(), pair.getValue()); }/*from w ww.java 2 s . c o m*/ return map; }
From source file:com.galenframework.speclang2.reader.specs.SpecImageProcessor.java
@Override public Spec process(StringCharReader reader, String contextPath) { List<Pair<String, String>> parameters = Expectations.commaSeparatedRepeatedKeyValues().read(reader); SpecImage spec = new SpecImage(); spec.setImagePaths(new LinkedList<String>()); spec.setStretch(false);//from w ww . ja v a 2 s. co m spec.setErrorRate(GalenConfig.getConfig().getImageSpecDefaultErrorRate()); spec.setTolerance(GalenConfig.getConfig().getImageSpecDefaultTolerance()); for (Pair<String, String> parameter : parameters) { if ("file".equals(parameter.getKey())) { if (contextPath != null) { spec.getImagePaths().add(contextPath + File.separator + parameter.getValue()); } else { spec.getImagePaths().add(parameter.getValue()); } } else if ("error".equals(parameter.getKey())) { spec.setErrorRate(SpecImage.ErrorRate.fromString(parameter.getValue())); } else if ("tolerance".equals(parameter.getKey())) { spec.setTolerance(parseIntegerParameter("tolerance", parameter.getValue())); } else if ("analyze-offset".equals(parameter.getKey())) { spec.setAnalyzeOffset(parseIntegerParameter("analyze-offset", parameter.getValue())); } else if ("stretch".equals(parameter.getKey())) { spec.setStretch(true); } else if ("area".equals(parameter.getKey())) { spec.setSelectedArea(parseRect(parameter.getValue())); } else if ("filter".equals(parameter.getKey())) { ImageFilter filter = parseImageFilter(parameter.getValue()); spec.getOriginalFilters().add(filter); spec.getSampleFilters().add(filter); } else if ("filter-a".equals(parameter.getKey())) { ImageFilter filter = parseImageFilter(parameter.getValue()); spec.getOriginalFilters().add(filter); } else if ("filter-b".equals(parameter.getKey())) { ImageFilter filter = parseImageFilter(parameter.getValue()); spec.getSampleFilters().add(filter); } else if ("map-filter".equals(parameter.getKey())) { ImageFilter filter = parseImageFilter(parameter.getValue()); spec.getMapFilters().add(filter); } else if ("crop-if-outside".equals(parameter.getKey())) { spec.setCropIfOutside(true); } else { throw new SyntaxException("Unknown parameter: " + parameter.getKey()); } } if (spec.getImagePaths() == null || spec.getImagePaths().size() == 0) { throw new SyntaxException("There are no images defined"); } return spec; }
From source file:io.confluent.kafka.connect.source.io.processing.csv.SchemaConfigTest.java
@Test public void schemaNoKeys() { SpoolDirectoryConfig config = new SpoolDirectoryConfig(Data.settings(Files.createTempDir())); final SchemaConfig input = Data.schemaConfig(); input.keys.clear();/*from w w w .j av a2s . c om*/ final Schema expectedValueSchema = SchemaBuilder.struct().name("io.confluent.kafka.connect.source.MockData") .field("id", Schema.INT32_SCHEMA).field("first_name", Schema.STRING_SCHEMA) .field("last_name", Schema.STRING_SCHEMA).field("email", Schema.STRING_SCHEMA) .field("gender", Schema.STRING_SCHEMA).field("ip_address", Schema.STRING_SCHEMA) .field("last_login", Timestamp.builder().optional()) .field("account_balance", Decimal.builder(10).optional()).field("country", Schema.STRING_SCHEMA) .field("favorite_color", Schema.OPTIONAL_STRING_SCHEMA).build(); final Schema expectedKeySchema = null; final Pair<SchemaConfig.ParserConfig, SchemaConfig.ParserConfig> actual = input.parserConfigs(config); System.out.println(actual.getKey()); System.out.println(actual.getValue()); Assert.assertNull(actual.getKey().structSchema); assertSchema(expectedValueSchema, actual.getValue().structSchema); }