Example usage for org.apache.commons.lang3.tuple Pair getKey

List of usage examples for org.apache.commons.lang3.tuple Pair getKey

Introduction

In this page you can find the example usage for org.apache.commons.lang3.tuple Pair getKey.

Prototype

@Override
public final L getKey() 

Source Link

Document

Gets the key from this pair.

This method implements the Map.Entry interface returning the left element as the key.

Usage

From source file:candr.yoclip.DefaultParserHelpFactoryTest.java

@Test
public void testGetOptionPropertyDescriptions() {

    final ParserHelpFactory<PropertiesTestCase> testCase = new DefaultParserHelpFactory<PropertiesTestCase>();
    final ParserOptions<PropertiesTestCase> parserOptions = new ParserOptionsFactory<PropertiesTestCase>(
            PropertiesTestCase.class).create();

    ParserOption<PropertiesTestCase> optionParameter = parserOptions.get("D");
    List<Pair<String, String>> descriptions = testCase.getOptionPropertyDescriptions(optionParameter);

    assertThat("size", descriptions.size(), is(3));

    Pair<String, String> propertyHelp = descriptions.get(0);
    assertThat("property help 0 synopsis", propertyHelp.getKey(), is("one"));
    assertThat("property help 0 details", propertyHelp.getValue(),
            is(new StrBuilder().append("Property one.").append(Options.LINE_BREAK).append(Options.LINE_BREAK)
                    .append("Has explicit line breaks.").toString()));

    propertyHelp = descriptions.get(1);/*  w w  w .  j  a va  2s  .  c  o m*/
    assertThat("property help 1 synopsis", propertyHelp.getKey(), is("two"));
    assertThat("property help 1 details", propertyHelp.getValue(), is("Details for property two."));

    propertyHelp = descriptions.get(2);
    assertThat("property help 2 synopsis", propertyHelp.getKey(), is("key"));
    assertThat("property help 2 details", propertyHelp.getValue(),
            is("An option property value for properties."));
}

From source file:com.streamsets.pipeline.stage.processor.hbase.HBaseLookupProcessor.java

private void doBatchLookup(Batch batch, BatchMaker batchMaker) throws StageException {
    Iterator<Record> records = batch.getRecords();
    final Set<Pair<String, HBaseColumn>> keys = getKeyColumnListMap(batch);

    try {//w ww  .j  a  v a2 s.c  o m
        Map<Pair<String, HBaseColumn>, Optional<String>> values = hbaseConnectionHelper.getUGI().doAs(
                (PrivilegedExceptionAction<ImmutableMap<Pair<String, HBaseColumn>, Optional<String>>>) () -> cache
                        .getAll(keys));
        Record record;
        while (records.hasNext()) {
            record = records.next();
            for (HBaseLookupParameterConfig parameter : conf.lookups) {
                Pair<String, HBaseColumn> key = getKey(record, parameter);

                if (key != null && !key.getKey().trim().isEmpty()) {
                    Optional<String> value = values.get(key);
                    updateRecord(record, parameter, key, value);
                } else {
                    handleEmptyKey(record, key);
                }
            }
            batchMaker.addRecord(record);
        }
    } catch (ELEvalException | JSONException e1) {
        records = batch.getRecords();
        while (records.hasNext()) {
            Record record = records.next();
            LOG.error(Errors.HBASE_38.getMessage(), e1.toString(), e1);
            errorRecordHandler.onError(new OnRecordErrorException(record, Errors.HBASE_38, e1.toString()));
        }
    } catch (IOException | InterruptedException | UndeclaredThrowableException e2) {
        AbstractHBaseConnectionHelper.handleHBaseException(e2, records, errorRecordHandler);
    }
}

From source file:com.streamsets.pipeline.stage.processor.hbase.HBaseLookupProcessor.java

private void doRecordLookup(Batch batch, BatchMaker batchMaker) throws StageException {
    Iterator<Record> records;
    records = batch.getRecords();/*from www  .  j  av a 2 s.  co  m*/
    Record record;
    while (records.hasNext()) {
        record = records.next();
        ELVars elVars = getContext().createELVars();
        RecordEL.setRecordInContext(elVars, record);
        try {
            for (HBaseLookupParameterConfig parameter : conf.lookups) {
                final Pair<String, HBaseColumn> key = getKey(record, parameter);

                if (key != null && !key.getKey().trim().isEmpty()) {
                    Optional<String> value = hbaseConnectionHelper.getUGI()
                            .doAs((PrivilegedExceptionAction<Optional<String>>) () -> cache.getUnchecked(key));
                    updateRecord(record, parameter, key, value);
                } else {
                    handleEmptyKey(record, key);
                }
            }
        } catch (ELEvalException | JSONException e1) {
            LOG.error(Errors.HBASE_38.getMessage(), e1.toString(), e1);
            errorRecordHandler.onError(new OnRecordErrorException(record, Errors.HBASE_38, e1.toString()));
        } catch (IOException | InterruptedException | UncheckedExecutionException e) {
            AbstractHBaseConnectionHelper.handleHBaseException(e, ImmutableList.of(record).iterator(),
                    errorRecordHandler);
        }

        batchMaker.addRecord(record);
    }
}

From source file:com.galenframework.speclang2.reader.pagespec.PageSectionProcessor.java

private void processObjectLevelRule(ObjectSpecs objectSpecs, StructNode sourceNode) throws IOException {
    String ruleText = sourceNode.getName().substring(1).trim();
    Pair<PageRule, Map<String, String>> rule = findAndProcessRule(ruleText, sourceNode);

    pageSpecHandler.setGlobalVariable("objectName", objectSpecs.getObjectName(), sourceNode);

    List<StructNode> specNodes = rule.getKey().apply(pageSpecHandler, ruleText, objectSpecs.getObjectName(),
            rule.getValue());/*from  ww w .ja  v  a 2 s .  c o m*/

    SpecGroup specGroup = new SpecGroup();
    specGroup.setName(ruleText);
    objectSpecs.addSpecGroup(specGroup);

    for (StructNode specNode : specNodes) {
        specGroup.addSpec(
                pageSpecHandler.getSpecReader().read(specNode.getName(), pageSpecHandler.getContextPath()));
    }
}

From source file:io.confluent.kafka.connect.source.io.processing.csv.CSVRecordProcessor.java

@Override
public void configure(SpoolDirectoryConfig config, InputStream inputStream, FileMetadata fileMetadata)
        throws IOException {
    this.config = config;

    if (log.isDebugEnabled()) {
        log.debug("Configuring CSVParser...");
    }//from w w  w . j a v a 2 s  .co m

    DateTypeParser timestampDateConverter = new DateTypeParser(this.config.parserTimestampTimezone(),
            this.config.parserTimestampDateFormats());
    this.parser.registerTypeParser(Timestamp.SCHEMA, timestampDateConverter);

    this.csvParser = this.config.createCSVParserBuilder().build();
    this.streamReader = new InputStreamReader(inputStream, this.config.charset());
    this.csvReader = this.config.createCSVReaderBuilder(this.streamReader, csvParser).build();

    String[] fieldNames;

    if (this.config.firstRowAsHeader()) {
        if (log.isDebugEnabled()) {
            log.debug("Reading the first line ");
        }
        fieldNames = this.csvReader.readNext();
        if (log.isDebugEnabled()) {
            log.debug("FieldMapping names for the file are {}", Joiner.on(", ").join(fieldNames));
        }
    } else {
        fieldNames = null;
    }

    if (this.config.schemaFromHeader()) {
        Preconditions.checkState(this.config.firstRowAsHeader(),
                "If the %s is set to true, then %s must be set to true as well.",
                SpoolDirectoryConfig.CSV_SCHEMA_FROM_HEADER_KEYS_CONF,
                SpoolDirectoryConfig.CSV_FIRST_ROW_AS_HEADER_CONF);

        SchemaConfig schemaConfig = new SchemaConfig();

        for (int i = 0; i < fieldNames.length; i++) {
            FieldConfig fieldConfig = FieldConfig.create(Schema.OPTIONAL_STRING_SCHEMA);
            fieldConfig.name = fieldNames[i];
            fieldConfig.index = i;
            schemaConfig.fields.add(fieldConfig);
        }
        schemaConfig.keys = this.config.schemaFromHeaderKeys();
        schemaConfig.name = this.config.schemaName();
        Preconditions.checkNotNull(schemaConfig.name,
                "%s must be configured when generating the schema from the header row.",
                SpoolDirectoryConfig.CSV_SCHEMA_NAME_CONF);
        Preconditions.checkState(!schemaConfig.name.isEmpty(),
                "%s must be configured when generating the schema from the header row.",
                SpoolDirectoryConfig.CSV_SCHEMA_NAME_CONF);
        this.schemaConfig = schemaConfig;
    } else {
        this.schemaConfig = this.config.schemaConfig();

        if (this.config.firstRowAsHeader()) {
            Map<String, FieldConfig> map = new LinkedHashMap<>();
            for (FieldConfig field : this.schemaConfig.fields) {
                String mapKey = this.config.caseSensitiveFieldNames() ? field.name : field.name.toLowerCase();
                Preconditions.checkState(!map.containsKey(mapKey),
                        "Schema already has a field with name '%s' defined.", field.name);
                map.put(mapKey, field);
            }

            int fieldIndex = 0;
            for (String fieldName : fieldNames) {
                String mapKey = this.config.caseSensitiveFieldNames() ? fieldName : fieldName.toLowerCase();
                FieldConfig field = map.get(mapKey);

                if (null == field) {
                    if (log.isDebugEnabled()) {
                        log.debug("FieldMapping '{}' was not found in schema. Skipping.", fieldName);
                    }
                    continue;
                }
                field.index = fieldIndex;
                fieldIndex++;
            }

        } else {
            if (log.isDebugEnabled()) {
                log.debug("Laying out fields in the order they are in the schema.");
            }

            for (int i = 0; i < this.schemaConfig.fields.size(); i++) {
                FieldConfig field = this.schemaConfig.fields.get(i);
                field.index = i;
                if (log.isDebugEnabled()) {
                    log.debug("FieldMapping {} index {}.", field.name, field.index);
                }
            }
        }

    }

    Pair<SchemaConfig.ParserConfig, SchemaConfig.ParserConfig> parserConfigs = this.schemaConfig
            .parserConfigs(this.config);

    this.keyParserConfig = parserConfigs.getKey();
    this.valueParserConfig = parserConfigs.getValue();

    this.fileMetadata = fileMetadata;
}

From source file:alfio.manager.system.DataMigratorIntegrationTest.java

@Test
public void testUpdateGender() {
    List<TicketCategoryModification> categories = Collections.singletonList(new TicketCategoryModification(null,
            "default", AVAILABLE_SEATS, new DateTimeModification(LocalDate.now(), LocalTime.now()),
            new DateTimeModification(LocalDate.now(), LocalTime.now()), DESCRIPTION, BigDecimal.TEN, false, "",
            false, null, null, null, null, null));
    Pair<Event, String> eventUsername = initEvent(categories);
    Event event = eventUsername.getKey();
    try {//from  w w  w  .j a  v  a  2 s.  co  m
        TicketReservationModification trm = new TicketReservationModification();
        trm.setAmount(2);
        trm.setTicketCategoryId(eventManager.loadTicketCategories(event).get(0).getId());
        TicketReservationWithOptionalCodeModification r = new TicketReservationWithOptionalCodeModification(trm,
                Optional.empty());
        Date expiration = DateUtils.addDays(new Date(), 1);
        String reservationId = ticketReservationManager.createTicketReservation(event,
                Collections.singletonList(r), Collections.emptyList(), expiration, Optional.empty(),
                Optional.empty(), Locale.ENGLISH, false);
        ticketReservationManager.confirm("TOKEN", null, event, reservationId, "email@email.ch",
                new CustomerName("Full Name", "Full", "Name", event), Locale.ENGLISH, null,
                new TotalPrice(1000, 10, 0, 0), Optional.empty(), Optional.of(PaymentProxy.ON_SITE), false,
                null, null, null);
        List<Ticket> tickets = ticketRepository.findTicketsInReservation(reservationId);
        UpdateTicketOwnerForm first = new UpdateTicketOwnerForm();
        first.setEmail("email@email.ch");
        //first.setTShirtSize("SMALL");
        //first.setGender("F");
        first.setFirstName("Full");
        first.setLastName("Name");
        UpdateTicketOwnerForm second = new UpdateTicketOwnerForm();
        //second.setTShirtSize("SMALL-F");
        second.setEmail("email@email.ch");
        second.setFirstName("Full");
        second.setLastName("Name");
        PartialTicketPDFGenerator generator = TemplateProcessor.buildPartialPDFTicket(Locale.ITALIAN, event,
                ticketReservationManager.findById(reservationId).get(),
                ticketCategoryRepository.getByIdAndActive(tickets.get(0).getCategoryId(), event.getId()),
                organizationRepository.getById(event.getOrganizationId()), templateManager, fileUploadManager,
                "");
        ticketReservationManager.updateTicketOwner(tickets.get(0), Locale.ITALIAN, event, first, (t) -> "",
                (t) -> "", Optional.empty());
        ticketReservationManager.updateTicketOwner(tickets.get(1), Locale.ITALIAN, event, second, (t) -> "",
                (t) -> "", Optional.empty());
        //FIXME
        //dataMigrator.fillTicketsGender();
        //ticketRepository.findTicketsInReservation(reservationId).forEach(t -> assertEquals("F", t.getGender()));
    } finally {
        eventManager.deleteEvent(event.getId(), eventUsername.getValue());
    }
}

From source file:com.galenframework.speclang2.pagespec.PageSectionProcessor.java

private void processObjectLevelRule(ObjectSpecs objectSpecs, StructNode sourceNode) throws IOException {
    String ruleText = sourceNode.getName().substring(1).trim();
    Pair<PageRule, Map<String, String>> rule = findAndProcessRule(ruleText, sourceNode);

    try {// ww w .  ja v  a 2 s.  c o  m
        pageSpecHandler.setGlobalVariable("objectName", objectSpecs.getObjectName(), sourceNode);

        List<StructNode> specNodes = rule.getKey().apply(pageSpecHandler, ruleText, objectSpecs.getObjectName(),
                rule.getValue(), sourceNode.getChildNodes());

        SpecGroup specGroup = new SpecGroup();
        specGroup.setName(ruleText);
        objectSpecs.addSpecGroup(specGroup);

        for (StructNode specNode : specNodes) {
            specGroup.addSpec(
                    pageSpecHandler.getSpecReader().read(specNode.getName(), pageSpecHandler.getContextPath()));
        }
    } catch (Exception ex) {
        throw new SyntaxException(sourceNode, "Error processing rule: " + ruleText, ex);
    }
}

From source file:com.snaplogic.snaps.lunex.BaseService.java

private StringBuilder getJsonSlice(Pair<String, ExpressionProperty> paramPair, Document document) {
    String key = paramPair.getKey();
    StringBuilder jsonSlice = new StringBuilder();
    if (((HashMap) document.get()).containsKey(paramPair.getLeft())) {
        if (REQ_BODY_PARAM_INFO.get(key) == 1) {
            jsonSlice.append(QUOTE).append(key).append(QUOTE).append(COLON)
                    .append(paramPair.getRight().eval(document)).append(COMMA);
        } else {//from   w w  w .jav a 2 s. c  o m
            jsonSlice.append(QUOTE).append(key).append(QUOTE).append(COLON).append(QUOTE)
                    .append(paramPair.getRight().eval(document)).append(QUOTE).append(COMMA);
        }
    }
    return jsonSlice;
}

From source file:com.act.lcms.db.model.StandardWell.java

public List<StandardWell> insertFromPlateComposition(DB db, PlateCompositionParser parser, Plate p)
        throws SQLException, IOException {
    Map<Pair<String, String>, String> msids = parser.getCompositionTables().get("chemical");
    List<Pair<String, String>> sortedCoordinates = new ArrayList<>(msids.keySet());
    Collections.sort(sortedCoordinates, new Comparator<Pair<String, String>>() {
        // TODO: parse the values of these pairs as we read them so we don't need this silly comparator.
        @Override//  w  w  w .j a  v a 2  s  .  c o m
        public int compare(Pair<String, String> o1, Pair<String, String> o2) {
            if (o1.getKey().equals(o2.getKey())) {
                return Integer.valueOf(Integer.parseInt(o1.getValue()))
                        .compareTo(Integer.parseInt(o2.getValue()));
            }
            return o1.getKey().compareTo(o2.getKey());
        }
    });

    List<StandardWell> results = new ArrayList<>();
    for (Pair<String, String> coords : sortedCoordinates) {
        String chemical = parser.getCompositionTables().get("chemical").get(coords);
        if (chemical == null || chemical.isEmpty()) {
            continue;
        }
        Map<Pair<String, String>, String> mediaMap = parser.getCompositionTables().get("media");
        if (mediaMap == null) {
            mediaMap = parser.getCompositionTables().get("solvent");
        }
        String media = mediaMap != null ? mediaMap.get(coords) : null;
        Map<Pair<String, String>, String> notesMap = parser.getCompositionTables().get("note");
        String note = notesMap != null ? notesMap.get(coords) : null;
        Pair<Integer, Integer> index = parser.getCoordinatesToIndices().get(coords);
        Map<Pair<String, String>, String> concentrationsMap = parser.getCompositionTables()
                .get("concentration");
        Double concentration = concentrationsMap != null ? Double.parseDouble(concentrationsMap.get(coords))
                : null;
        StandardWell s = INSTANCE.insert(db, p.getId(), index.getLeft(), index.getRight(), chemical, media,
                note, concentration);

        results.add(s);
    }

    return results;
}

From source file:com.kantenkugel.kanzebot.api.command.CommandGroup.java

@Override
public boolean handlePrivate(PrivateChannel channel, User author, Message fullMessage, String args,
        Object[] customArgs) {/*  w w  w .j  a  va  2 s.c om*/
    if (args.length() == 0)
        return false;
    String[] split = args.split("\\s+", 2);
    Pair<Command, ArgParser> sub = subCommands.get(split[0]);
    if (sub != null) {
        if (sub.getValue() != null) {
            ArgParser.ParserResult parserResult = sub.getValue().parseArgs(channel.getJDA(), null, args);
            if (parserResult.getError() != null) {
                MessageUtil.sendMessage(channel,
                        parserResult.getError() + "\nUsage:\n" + sub.getKey().getUsage());
                return true;
            }
            customArgs = parserResult.getArgs();
        }
        return sub.getKey().handlePrivate(channel, author, fullMessage, args, customArgs);
    } else {
        return false;
    }
}