Example usage for org.apache.commons.lang3.tuple ImmutablePair ImmutablePair

List of usage examples for org.apache.commons.lang3.tuple ImmutablePair ImmutablePair

Introduction

In this page you can find the example usage for org.apache.commons.lang3.tuple ImmutablePair ImmutablePair.

Prototype

public ImmutablePair(final L left, final R right) 

Source Link

Document

Create a new pair instance.

Usage

From source file:dao.LineageDAO.java

public static ObjectNode getFlowLineage(String application, String project, Long flowId) {
    ObjectNode resultNode = Json.newObject();
    List<LineageNode> nodes = new ArrayList<LineageNode>();
    List<LineageEdge> edges = new ArrayList<LineageEdge>();
    String flowName = null;//from  w  ww  .  j  av  a 2s .  co  m

    Map<Long, Integer> addedJobNodes = new HashMap<Long, Integer>();
    Map<Pair, Integer> addedDataNodes = new HashMap<Pair, Integer>();

    if (StringUtils.isBlank(application) || StringUtils.isBlank(project) || (flowId <= 0)) {
        resultNode.set("nodes", Json.toJson(nodes));
        resultNode.set("links", Json.toJson(edges));
        return resultNode;
    }

    String applicationName = application.replace(".", " ");

    int appID = 0;
    try {
        appID = getJdbcTemplate().queryForObject(GET_APP_ID, new Object[] { applicationName }, Integer.class);
    } catch (EmptyResultDataAccessException e) {
        Logger.error("getFlowLineage get application id failed, application name = " + application);
        Logger.error("Exception = " + e.getMessage());
    }

    Map<Long, List<LineageNode>> nodeHash = new HashMap<Long, List<LineageNode>>();
    Map<String, List<LineageNode>> partitionedNodeHash = new HashMap<String, List<LineageNode>>();

    if (appID != 0) {
        try {
            flowName = getJdbcTemplate().queryForObject(GET_FLOW_NAME, new Object[] { appID, flowId },
                    String.class);
        } catch (EmptyResultDataAccessException e) {
            Logger.error("getFlowLineage get flow name failed, application name = " + application + " flowId "
                    + Long.toString(flowId));
            Logger.error("Exception = " + e.getMessage());
        }

        Long flowExecId = 0L;
        try {
            flowExecId = getJdbcTemplate().queryForObject(GET_LATEST_FLOW_EXEC_ID,
                    new Object[] { appID, flowId }, Long.class);
        } catch (EmptyResultDataAccessException e) {
            Logger.error("getFlowLineage get flow execution id failed, application name = " + application
                    + " flowId " + Long.toString(flowExecId));
            Logger.error("Exception = " + e.getMessage());
        }
        List<Map<String, Object>> rows = null;
        rows = getJdbcTemplate().queryForList(GET_FLOW_DATA_LINEAGE, appID, flowExecId);
        if (rows != null) {
            for (Map row : rows) {
                Long jobExecId = ((BigInteger) row.get("job_exec_id")).longValue();
                LineageNode node = new LineageNode();
                node.abstracted_path = (String) row.get("abstracted_object_name");
                node.source_target_type = (String) row.get("source_target_type");
                node.exec_id = jobExecId;
                Object recordCountObject = row.get("record_count");
                if (recordCountObject != null) {
                    node.record_count = ((BigInteger) recordCountObject).longValue();
                }

                node.application_id = (int) row.get("app_id");
                node.cluster = (String) row.get("app_code");
                node.partition_type = (String) row.get("partition_type");
                node.operation = (String) row.get("operation");
                node.partition_start = (String) row.get("partition_start");
                node.partition_end = (String) row.get("partition_end");
                node.full_object_name = (String) row.get("full_object_name");
                node.job_start_time = DateFormat.format(row.get("start_time").toString());
                node.job_end_time = DateFormat.format(row.get("end_time").toString());
                node.storage_type = ((String) row.get("storage_type")).toLowerCase();
                node.node_type = "data";
                node._sort_list = new ArrayList<String>();
                node._sort_list.add("cluster");
                node._sort_list.add("abstracted_path");
                node._sort_list.add("storage_type");
                node._sort_list.add("partition_type");
                node._sort_list.add("partition_start");
                node._sort_list.add("partition_end");
                node._sort_list.add("source_target_type");
                List<LineageNode> nodeList = nodeHash.get(jobExecId);
                if (nodeList != null) {
                    nodeList.add(node);
                } else {
                    nodeList = new ArrayList<LineageNode>();
                    nodeList.add(node);
                    nodeHash.put(jobExecId, nodeList);
                }
            }
        }

        List<LineageNode> jobNodes = new ArrayList<LineageNode>();
        List<Map<String, Object>> jobRows = null;
        jobRows = getJdbcTemplate().queryForList(GET_FLOW_JOB, appID, flowExecId, 30);
        int index = 0;
        int edgeIndex = 0;
        Map<Long, LineageNode> jobNodeMap = new HashMap<Long, LineageNode>();
        List<Pair> addedEdges = new ArrayList<Pair>();
        if (rows != null) {
            for (Map row : jobRows) {
                Long jobExecId = ((BigInteger) row.get("job_exec_id")).longValue();
                LineageNode node = new LineageNode();
                node._sort_list = new ArrayList<String>();
                node.node_type = "script";
                node.job_type = (String) row.get("job_type");
                node.cluster = (String) row.get("app_code");
                node.job_path = (String) row.get("job_path");
                node.job_name = (String) row.get("job_name");
                node.pre_jobs = (String) row.get("pre_jobs");
                node.post_jobs = (String) row.get("post_jobs");
                node.job_id = (Long) row.get("job_id");
                node.job_start_time = DateFormat.format(row.get("start_time").toString());
                node.job_end_time = DateFormat.format(row.get("end_time").toString());
                node.exec_id = jobExecId;
                node._sort_list.add("cluster");
                node._sort_list.add("job_path");
                node._sort_list.add("job_name");
                node._sort_list.add("job_type");
                node._sort_list.add("job_start_time");
                node._sort_list.add("job_end_time");
                Integer id = addedJobNodes.get(jobExecId);
                if (id == null) {
                    node.id = index++;
                    nodes.add(node);
                    jobNodeMap.put(node.job_id, node);
                    jobNodes.add(node);
                    addedJobNodes.put(jobExecId, node.id);
                } else {
                    node.id = id;
                }

                String sourceType = (String) row.get("source_target_type");
                if (sourceType.equalsIgnoreCase("target")) {
                    List<LineageNode> sourceNodeList = nodeHash.get(jobExecId);
                    if (sourceNodeList != null && sourceNodeList.size() > 0) {
                        for (LineageNode sourceNode : sourceNodeList) {
                            if (sourceNode.source_target_type.equalsIgnoreCase("source")) {
                                Pair matchedSourcePair = new ImmutablePair<>(sourceNode.abstracted_path,
                                        sourceNode.partition_end);
                                Integer nodeId = addedDataNodes.get(matchedSourcePair);
                                if (nodeId == null) {
                                    List<LineageNode> nodeList = partitionedNodeHash
                                            .get(sourceNode.abstracted_path);
                                    if (StringUtils.isBlank(sourceNode.partition_end)) {
                                        Boolean bFound = false;
                                        if (nodeList != null) {
                                            for (LineageNode n : nodeList) {
                                                if (StringUtils.isNotBlank(n.partition_end) && n.partition_end
                                                        .compareTo(sourceNode.job_start_time) < 0) {
                                                    sourceNode.id = n.id;
                                                    bFound = true;
                                                    break;
                                                }
                                            }
                                        }
                                        if (!bFound) {
                                            sourceNode.id = index++;
                                            nodes.add(sourceNode);
                                            Pair sourcePair = new ImmutablePair<>(sourceNode.abstracted_path,
                                                    sourceNode.partition_end);
                                            addedDataNodes.put(sourcePair, sourceNode.id);
                                        }
                                    } else {
                                        if (nodeList == null) {
                                            nodeList = new ArrayList<LineageNode>();
                                        }
                                        nodeList.add(sourceNode);
                                        partitionedNodeHash.put(sourceNode.abstracted_path, nodeList);
                                        sourceNode.id = index++;
                                        nodes.add(sourceNode);
                                        Pair sourcePair = new ImmutablePair<>(sourceNode.abstracted_path,
                                                sourceNode.partition_end);
                                        addedDataNodes.put(sourcePair, sourceNode.id);
                                    }
                                } else {
                                    sourceNode.id = nodeId;
                                }
                                LineageEdge edge = new LineageEdge();
                                edge.id = edgeIndex++;
                                edge.source = sourceNode.id;
                                edge.target = node.id;
                                if (StringUtils.isNotBlank(sourceNode.operation)) {
                                    edge.label = sourceNode.operation;
                                } else {
                                    edge.label = "load";
                                }
                                edge.chain = "data";
                                edges.add(edge);
                            }
                        }
                    }
                } else if (sourceType.equalsIgnoreCase("source")) {

                    List<LineageNode> targetNodeList = nodeHash.get(jobExecId);
                    if (targetNodeList != null && targetNodeList.size() > 0) {
                        for (LineageNode targetNode : targetNodeList) {
                            if (targetNode.source_target_type.equalsIgnoreCase("target")) {
                                Pair matchedTargetPair = new ImmutablePair<>(targetNode.abstracted_path,
                                        targetNode.partition_end);
                                Integer nodeId = addedDataNodes.get(matchedTargetPair);
                                if (nodeId == null) {
                                    List<LineageNode> nodeList = partitionedNodeHash
                                            .get(targetNode.abstracted_path);
                                    if (StringUtils.isBlank(targetNode.partition_end)) {
                                        Boolean bFound = false;
                                        if (nodeList != null) {
                                            for (LineageNode n : nodeList) {
                                                if (StringUtils.isNotBlank(n.partition_end) && n.partition_end
                                                        .compareTo(targetNode.job_start_time) < 0) {
                                                    targetNode.id = n.id;
                                                    bFound = true;
                                                    break;
                                                }
                                            }
                                        }
                                        if (!bFound) {
                                            targetNode.id = index++;
                                            nodes.add(targetNode);
                                            Pair targetPair = new ImmutablePair<>(targetNode.abstracted_path,
                                                    targetNode.partition_end);
                                            addedDataNodes.put(targetPair, targetNode.id);
                                        }
                                    } else {
                                        if (nodeList == null) {
                                            nodeList = new ArrayList<LineageNode>();
                                        }
                                        nodeList.add(targetNode);
                                        partitionedNodeHash.put(targetNode.abstracted_path, nodeList);
                                        targetNode.id = index++;
                                        nodes.add(targetNode);
                                        Pair targetPair = new ImmutablePair<>(targetNode.abstracted_path,
                                                targetNode.partition_end);
                                        addedDataNodes.put(targetPair, targetNode.id);
                                    }
                                } else {
                                    targetNode.id = nodeId;
                                }
                                LineageEdge edge = new LineageEdge();
                                edge.id = edgeIndex++;
                                edge.source = node.id;
                                edge.target = targetNode.id;
                                if (StringUtils.isNotBlank(targetNode.operation)) {
                                    edge.label = targetNode.operation;
                                } else {
                                    edge.label = "load";
                                }
                                edge.chain = "data";
                                edges.add(edge);
                            }
                        }
                    }
                }
            }
            for (LineageNode node : jobNodes) {
                Long jobId = node.job_id;
                if (StringUtils.isNotBlank(node.pre_jobs)) {
                    String[] prevJobIds = node.pre_jobs.split(",");
                    if (prevJobIds != null) {
                        for (String jobIdString : prevJobIds) {
                            if (StringUtils.isNotBlank(jobIdString)) {
                                Long id = Long.parseLong(jobIdString);
                                LineageNode sourceNode = jobNodeMap.get(id);
                                if (sourceNode != null) {
                                    Pair pair = new ImmutablePair<>(sourceNode.id, node.id);
                                    if (!addedEdges.contains(pair)) {
                                        LineageEdge edge = new LineageEdge();
                                        edge.id = edgeIndex++;
                                        edge.source = sourceNode.id;
                                        edge.target = node.id;
                                        edge.label = "";
                                        edge.type = "job";
                                        edges.add(edge);
                                        addedEdges.add(pair);
                                    }
                                }
                            }
                        }
                    }
                }

                if (StringUtils.isNotBlank(node.post_jobs)) {
                    String[] postJobIds = node.post_jobs.split(",");
                    if (postJobIds != null) {
                        for (String jobIdString : postJobIds) {
                            if (StringUtils.isNotBlank(jobIdString)) {
                                Long id = Long.parseLong(jobIdString);
                                LineageNode targetNode = jobNodeMap.get(id);
                                if (targetNode != null) {
                                    Pair pair = new ImmutablePair<>(node.id, targetNode.id);
                                    if (!addedEdges.contains(pair)) {
                                        LineageEdge edge = new LineageEdge();
                                        edge.id = edgeIndex++;
                                        edge.source = node.id;
                                        edge.target = targetNode.id;
                                        edge.label = "";
                                        edge.type = "job";
                                        edges.add(edge);
                                        addedEdges.add(pair);
                                    }
                                }
                            }
                        }
                    }
                }
            }
        }
    }
    resultNode.set("nodes", Json.toJson(nodes));
    resultNode.set("links", Json.toJson(edges));
    resultNode.put("flowName", flowName);
    return resultNode;
}

From source file:cgeo.geocaching.connector.gc.GCParser.java

public static ImmutablePair<StatusCode, String> postLog(final String geocode, final String cacheid,
        final String[] viewstates, final LogType logType, final int year, final int month, final int day,
        final String log, final List<TrackableLog> trackables) {
    if (GCLogin.isEmpty(viewstates)) {
        Log.e("GCParser.postLog: No viewstate given");
        return new ImmutablePair<StatusCode, String>(StatusCode.LOG_POST_ERROR, "");
    }//  w w w.ja  va 2  s  . co  m

    if (StringUtils.isBlank(log)) {
        Log.e("GCParser.postLog: No log text given");
        return new ImmutablePair<StatusCode, String>(StatusCode.NO_LOG_TEXT, "");
    }

    final String logInfo = log.replace("\n", "\r\n").trim(); // windows' eol and remove leading and trailing whitespaces

    Log.i("Trying to post log for cache #" + cacheid + " - action: " + logType + "; date: " + year + "." + month
            + "." + day + ", log: " + logInfo + "; trackables: "
            + (trackables != null ? trackables.size() : "0"));

    final Parameters params = new Parameters("__EVENTTARGET", "", "__EVENTARGUMENT", "", "__LASTFOCUS", "",
            "ctl00$ContentBody$LogBookPanel1$ddLogType", Integer.toString(logType.id),
            "ctl00$ContentBody$LogBookPanel1$uxDateVisited",
            GCLogin.getCustomGcDateFormat().format(new GregorianCalendar(year, month - 1, day).getTime()),
            "ctl00$ContentBody$LogBookPanel1$uxDateVisited$Month", Integer.toString(month),
            "ctl00$ContentBody$LogBookPanel1$uxDateVisited$Day", Integer.toString(day),
            "ctl00$ContentBody$LogBookPanel1$uxDateVisited$Year", Integer.toString(year),
            "ctl00$ContentBody$LogBookPanel1$DateTimeLogged",
            String.format("%02d", month) + "/" + String.format("%02d", day) + "/" + String.format("%04d", year),
            "ctl00$ContentBody$LogBookPanel1$DateTimeLogged$Month", Integer.toString(month),
            "ctl00$ContentBody$LogBookPanel1$DateTimeLogged$Day", Integer.toString(day),
            "ctl00$ContentBody$LogBookPanel1$DateTimeLogged$Year", Integer.toString(year),
            "ctl00$ContentBody$LogBookPanel1$LogButton", "Submit Log Entry",
            "ctl00$ContentBody$LogBookPanel1$uxLogInfo", logInfo,
            "ctl00$ContentBody$LogBookPanel1$btnSubmitLog", "Submit Log Entry",
            "ctl00$ContentBody$LogBookPanel1$uxLogCreationSource", "Old",
            "ctl00$ContentBody$uxVistOtherListingGC", "");
    GCLogin.putViewstates(params, viewstates);
    if (trackables != null && !trackables.isEmpty()) { //  we have some trackables to proceed
        final StringBuilder hdnSelected = new StringBuilder();

        for (final TrackableLog tb : trackables) {
            if (tb.action != LogTypeTrackable.DO_NOTHING) {
                hdnSelected.append(Integer.toString(tb.id));
                hdnSelected.append(tb.action.action);
                hdnSelected.append(',');
            }
        }

        params.put("ctl00$ContentBody$LogBookPanel1$uxTrackables$hdnSelectedActions", hdnSelected.toString(), // selected trackables
                "ctl00$ContentBody$LogBookPanel1$uxTrackables$hdnCurrentFilter", "");
    }

    final String uri = new Uri.Builder().scheme("http").authority("www.geocaching.com").path("/seek/log.aspx")
            .encodedQuery("ID=" + cacheid).build().toString();
    String page = GCLogin.getInstance().postRequestLogged(uri, params);
    if (!GCLogin.getInstance().getLoginStatus(page)) {
        Log.e("GCParser.postLog: Cannot log in geocaching");
        return new ImmutablePair<StatusCode, String>(StatusCode.NOT_LOGGED_IN, "");
    }

    // maintenance, archived needs to be confirmed

    final MatcherWrapper matcher = new MatcherWrapper(GCConstants.PATTERN_MAINTENANCE, page);

    try {
        if (matcher.find() && matcher.groupCount() > 0) {
            final String[] viewstatesConfirm = GCLogin.getViewstates(page);

            if (GCLogin.isEmpty(viewstatesConfirm)) {
                Log.e("GCParser.postLog: No viewstate for confirm log");
                return new ImmutablePair<StatusCode, String>(StatusCode.LOG_POST_ERROR, "");
            }

            params.clear();
            GCLogin.putViewstates(params, viewstatesConfirm);
            params.put("__EVENTTARGET", "");
            params.put("__EVENTARGUMENT", "");
            params.put("__LASTFOCUS", "");
            params.put("ctl00$ContentBody$LogBookPanel1$btnConfirm", "Yes");
            params.put("ctl00$ContentBody$LogBookPanel1$uxLogInfo", logInfo);
            params.put("ctl00$ContentBody$uxVistOtherListingGC", "");
            if (trackables != null && !trackables.isEmpty()) { //  we have some trackables to proceed
                final StringBuilder hdnSelected = new StringBuilder();

                for (final TrackableLog tb : trackables) {
                    final String action = Integer.toString(tb.id) + tb.action.action;
                    final StringBuilder paramText = new StringBuilder(
                            "ctl00$ContentBody$LogBookPanel1$uxTrackables$repTravelBugs$ctl");

                    if (tb.ctl < 10) {
                        paramText.append('0');
                    }
                    paramText.append(tb.ctl).append("$ddlAction");
                    params.put(paramText.toString(), action);
                    if (tb.action != LogTypeTrackable.DO_NOTHING) {
                        hdnSelected.append(action);
                        hdnSelected.append(',');
                    }
                }

                params.put("ctl00$ContentBody$LogBookPanel1$uxTrackables$hdnSelectedActions",
                        hdnSelected.toString()); // selected trackables
                params.put("ctl00$ContentBody$LogBookPanel1$uxTrackables$hdnCurrentFilter", "");
            }

            page = Network.getResponseData(Network.postRequest(uri, params));
        }
    } catch (final RuntimeException e) {
        Log.e("GCParser.postLog.confim", e);
    }

    try {

        final MatcherWrapper matcherOk = new MatcherWrapper(GCConstants.PATTERN_OK1, page);
        if (matcherOk.find()) {
            Log.i("Log successfully posted to cache #" + cacheid);

            if (geocode != null) {
                DataStore.saveVisitDate(geocode);
            }

            GCLogin.getInstance().getLoginStatus(page);
            // the log-successful-page contains still the old value
            if (GCLogin.getInstance().getActualCachesFound() >= 0) {
                GCLogin.getInstance().setActualCachesFound(GCLogin.getInstance().getActualCachesFound() + 1);
            }

            final String logID = TextUtils.getMatch(page, GCConstants.PATTERN_LOG_IMAGE_UPLOAD, "");

            return new ImmutablePair<StatusCode, String>(StatusCode.NO_ERROR, logID);
        }
    } catch (final Exception e) {
        Log.e("GCParser.postLog.check", e);
    }

    Log.e("GCParser.postLog: Failed to post log because of unknown error");
    return new ImmutablePair<StatusCode, String>(StatusCode.LOG_POST_ERROR, "");
}

From source file:com.linkedin.pinot.integration.tests.BaseClusterIntegrationTest.java

public static Future<Map<File, File>> buildSegmentsFromAvro(final List<File> avroFiles, Executor executor,
        int baseSegmentIndex, final File baseDirectory, final File segmentTarDir, final String tableName,
        final boolean createStarTreeIndex, final com.linkedin.pinot.common.data.Schema inputPinotSchema) {
    int segmentCount = avroFiles.size();
    LOGGER.info("Building " + segmentCount + " segments in parallel");
    List<ListenableFutureTask<Pair<File, File>>> futureTasks = new ArrayList<ListenableFutureTask<Pair<File, File>>>();

    for (int i = 1; i <= segmentCount; ++i) {
        final int segmentIndex = i - 1;
        final int segmentNumber = i + baseSegmentIndex;

        final ListenableFutureTask<Pair<File, File>> buildSegmentFutureTask = ListenableFutureTask
                .<Pair<File, File>>create(new Callable<Pair<File, File>>() {
                    @Override// w w  w.j  a v  a2  s .  c  om
                    public Pair<File, File> call() throws Exception {
                        try {
                            // Build segment
                            LOGGER.info("Starting to build segment " + segmentNumber);
                            File outputDir = new File(baseDirectory, "segment-" + segmentNumber);
                            final File inputAvroFile = avroFiles.get(segmentIndex);
                            final SegmentGeneratorConfig genConfig = SegmentTestUtils
                                    .getSegmentGenSpecWithSchemAndProjectedColumns(inputAvroFile, outputDir,
                                            TimeUnit.DAYS, tableName, inputPinotSchema);

                            if (inputPinotSchema != null) {
                                genConfig.setSchema(inputPinotSchema);
                            }

                            // jfim: We add a space and a special character to do a regression test for PINOT-3296 Segments with spaces
                            // in their filename don't work properly
                            genConfig.setSegmentNamePostfix(Integer.toString(segmentNumber) + " %");
                            genConfig.setEnableStarTreeIndex(createStarTreeIndex);

                            // Enable off heap star tree format in the integration test.
                            StarTreeIndexSpec starTreeIndexSpec = null;
                            if (createStarTreeIndex) {
                                starTreeIndexSpec = new StarTreeIndexSpec();
                                starTreeIndexSpec.setEnableOffHeapFormat(true);
                            }
                            genConfig.setStarTreeIndexSpec(starTreeIndexSpec);

                            final SegmentIndexCreationDriver driver = SegmentCreationDriverFactory.get(null);
                            driver.init(genConfig);
                            driver.build();

                            // Tar segment
                            String segmentName = outputDir.list()[0];
                            final String tarGzPath = TarGzCompressionUtils.createTarGzOfDirectory(
                                    outputDir.getAbsolutePath() + "/" + segmentName,
                                    new File(segmentTarDir, segmentName).getAbsolutePath());
                            LOGGER.info("Completed segment " + segmentNumber + " : " + segmentName
                                    + " from file " + inputAvroFile.getName());
                            return new ImmutablePair<File, File>(inputAvroFile, new File(tarGzPath));
                        } catch (Exception e) {
                            LOGGER.error("Exception while building segment input: {} output {} ",
                                    avroFiles.get(segmentIndex), "segment-" + segmentNumber);
                            throw new RuntimeException(e);
                        }
                    }
                });

        futureTasks.add(buildSegmentFutureTask);
        executor.execute(buildSegmentFutureTask);
    }

    ListenableFuture<List<Pair<File, File>>> pairListFuture = Futures.allAsList(futureTasks);
    return Futures.transform(pairListFuture, new AsyncFunction<List<Pair<File, File>>, Map<File, File>>() {
        @Override
        public ListenableFuture<Map<File, File>> apply(List<Pair<File, File>> input) throws Exception {
            Map<File, File> avroToSegmentMap = new HashMap<File, File>();
            for (Pair<File, File> avroToSegmentPair : input) {
                avroToSegmentMap.put(avroToSegmentPair.getLeft(), avroToSegmentPair.getRight());
            }
            return Futures.immediateFuture(avroToSegmentMap);
        }
    });
}

From source file:cgeo.geocaching.connector.gc.GCParser.java

/**
 * Upload an image to a log that has already been posted
 *
 * @param logId/*  www  .j  ava  2s . co  m*/
 *            the ID of the log to upload the image to. Found on page returned when log is uploaded
 * @param caption
 *            of the image; max 50 chars
 * @param description
 *            of the image; max 250 chars
 * @param imageUri
 *            the URI for the image to be uploaded
 * @return status code to indicate success or failure
 */
public static ImmutablePair<StatusCode, String> uploadLogImage(final String logId, final String caption,
        final String description, final Uri imageUri) {
    final String uri = new Uri.Builder().scheme("http").authority("www.geocaching.com")
            .path("/seek/upload.aspx").encodedQuery("LID=" + logId).build().toString();

    final String page = GCLogin.getInstance().getRequestLogged(uri, null);
    if (StringUtils.isBlank(page)) {
        Log.e("GCParser.uploadLogImage: No data from server");
        return new ImmutablePair<StatusCode, String>(StatusCode.UNKNOWN_ERROR, null);
    }
    assert page != null;

    final String[] viewstates = GCLogin.getViewstates(page);

    final Parameters uploadParams = new Parameters("__EVENTTARGET", "", "__EVENTARGUMENT", "",
            "ctl00$ContentBody$ImageUploadControl1$uxFileCaption", caption,
            "ctl00$ContentBody$ImageUploadControl1$uxFileDesc", description,
            "ctl00$ContentBody$ImageUploadControl1$uxUpload", "Upload");
    GCLogin.putViewstates(uploadParams, viewstates);

    final File image = new File(imageUri.getPath());
    final String response = Network.getResponseData(Network.postRequest(uri, uploadParams,
            "ctl00$ContentBody$ImageUploadControl1$uxFileUpload", "image/jpeg", image));

    final MatcherWrapper matcherUrl = new MatcherWrapper(GCConstants.PATTERN_IMAGE_UPLOAD_URL, response);

    if (matcherUrl.find()) {
        Log.i("Logimage successfully uploaded.");
        final String uploadedImageUrl = matcherUrl.group(1);
        return ImmutablePair.of(StatusCode.NO_ERROR, uploadedImageUrl);
    }
    Log.e("GCParser.uploadLogIMage: Failed to upload image because of unknown error");

    return ImmutablePair.of(StatusCode.LOGIMAGE_POST_ERROR, null);
}

From source file:com.intuit.wasabi.assignment.impl.AssignmentsImpl.java

/**
 * This method first create an assignment object for NEW_ASSIGNMENT and then make entries in to the database.
 *
 * @param experiment/*from  ww  w. j  a v a2s .c o  m*/
 * @param userID
 * @param context
 * @param selectBucket
 * @param bucketList
 * @param date
 * @param segmentationProfile
 * @return new assignment object which is created in the database as well.
 */
protected Assignment generateAssignment(Experiment experiment, User.ID userID, Context context,
        boolean selectBucket, BucketList bucketList, Date date, SegmentationProfile segmentationProfile) {
    Assignment result = createAssignmentObject(experiment, userID, context, selectBucket, bucketList, date,
            segmentationProfile);
    if (result.getStatus().equals(Assignment.Status.NEW_ASSIGNMENT)) {
        assignmentsRepository.assignUsersInBatch(newArrayList(new ImmutablePair<>(experiment, result)), date);
        return result;
    } else {
        return result;
    }
}

From source file:com.epam.catgenome.manager.FeatureIndexManagerTest.java

private void checkDuplicates(List<VcfIndexEntry> entryList) {
    Map<Pair<Integer, Integer>, FeatureIndexEntry> duplicateMap = new HashMap<>();
    entryList.forEach(e -> {//from w  w  w  .  j  a v  a2 s  . c  om
        Pair<Integer, Integer> indexPair = new ImmutablePair<>(e.getStartIndex(), e.getEndIndex());
        Assert.assertFalse(String.format("Found duplicate: %d, %d", e.getStartIndex(), e.getEndIndex()),
                duplicateMap.containsKey(indexPair));
        duplicateMap.put(indexPair, e);
    });
}

From source file:com.intuit.wasabi.assignment.impl.AssignmentsImplTest.java

@Test
public void doBatchAssignmentsMixAssignmentTest() throws IOException {
    //Input/*from  w  w w  .j ava  2  s .  co  m*/
    Application.Name appName = Application.Name.valueOf("Test");
    User.ID user = User.ID.valueOf("testUser");
    Context context = Context.valueOf("TEST");
    SegmentationProfile segmentationProfile = mock(SegmentationProfile.class);
    HttpHeaders headers = mock(HttpHeaders.class);
    Calendar date1 = Calendar.getInstance();
    date1.add(Calendar.DAY_OF_MONTH, -1);
    Calendar date2 = Calendar.getInstance();
    date2.add(Calendar.DAY_OF_MONTH, 10);

    Experiment exp1 = Experiment.withID(Experiment.ID.newInstance()).withApplicationName(appName)
            .withLabel(Experiment.Label.valueOf("exp1Label")).withStartTime(date1.getTime())
            .withEndTime(date2.getTime()).withSamplingPercent(1.0).withState(Experiment.State.RUNNING)
            .withIsPersonalizationEnabled(false).build();

    Experiment exp2 = Experiment.withID(Experiment.ID.newInstance()).withApplicationName(appName)
            .withLabel(Experiment.Label.valueOf("exp2Label")).withStartTime(date1.getTime())
            .withEndTime(date2.getTime()).withSamplingPercent(1.0).withState(Experiment.State.RUNNING)
            .withIsPersonalizationEnabled(false).build();

    ExperimentBatch experimentBatch = ExperimentBatch.newInstance()
            .withLabels(newHashSet(exp1.getLabel(), exp2.getLabel())).build();
    List<Experiment> expList = newArrayList(exp1, exp2);

    Map expMap = newHashMap();
    expMap.put(exp1.getID(), exp1);
    expMap.put(exp2.getID(), exp2);

    PrioritizedExperimentList pExpList = new PrioritizedExperimentList();
    pExpList.addPrioritizedExperiment(PrioritizedExperiment.from(exp1, 1).build());
    pExpList.addPrioritizedExperiment(PrioritizedExperiment.from(exp2, 2).build());
    Optional<PrioritizedExperimentList> prioritizedExperimentListOptional = Optional.of(pExpList);

    BucketList bucketList1 = new BucketList();
    bucketList1.addBucket(
            Bucket.newInstance(exp1.getID(), Bucket.Label.valueOf("red")).withAllocationPercent(0.5).build());
    bucketList1.addBucket(
            Bucket.newInstance(exp1.getID(), Bucket.Label.valueOf("blue")).withAllocationPercent(0.5).build());
    BucketList bucketList2 = new BucketList();
    bucketList2.addBucket(Bucket.newInstance(exp2.getID(), Bucket.Label.valueOf("yellow"))
            .withAllocationPercent(1.0).build());

    List<Experiment.ID> exclusionList = newArrayList();

    //Mock dependent interactions
    when(metadataCache.getExperimentById(exp1.getID())).thenReturn(Optional.of(exp1));
    when(metadataCache.getExperimentById(exp2.getID())).thenReturn(Optional.of(exp2));
    when(metadataCache.getExperimentsByAppName(appName)).thenReturn(expList);
    when(metadataCache.getPrioritizedExperimentListMap(appName)).thenReturn(prioritizedExperimentListOptional);
    when(metadataCache.getBucketList(exp1.getID())).thenReturn(bucketList1);
    when(metadataCache.getBucketList(exp2.getID())).thenReturn(bucketList2);
    when(metadataCache.getExclusionList(exp1.getID())).thenReturn(exclusionList);
    when(metadataCache.getExclusionList(exp2.getID())).thenReturn(exclusionList);

    List<Pair<Experiment, String>> existingAssignments = newArrayList(new ImmutablePair<>(exp2, "yellow"));
    Mockito.when(assignmentsRepository.getAssignments(user, appName, context, expMap))
            .thenReturn(existingAssignments);

    //This is real call to the method
    List<Assignment> resultAssignments = assignmentsImpl.doBatchAssignments(user, appName, context, true, false,
            null, experimentBatch);

    //Verify result
    assertThat(resultAssignments.size(), is(2));
    assertThat(resultAssignments.get(0).getBucketLabel().toString(), anyOf(is("red"), is("blue")));
    assertThat(resultAssignments.get(0).getStatus().toString(),
            is(Assignment.Status.NEW_ASSIGNMENT.toString()));
    assertThat(resultAssignments.get(1).getBucketLabel().toString(), is("yellow"));
    assertThat(resultAssignments.get(1).getStatus().toString(),
            is(Assignment.Status.EXISTING_ASSIGNMENT.toString()));

}

From source file:com.intuit.wasabi.assignment.impl.AssignmentsImplTest.java

@Test
public void doPageAssignmentsTest() throws IOException {
    //Input/* w  ww .  j av  a 2 s .  c  o  m*/
    Application.Name appName = Application.Name.valueOf("Test");
    Page.Name pageName = Page.Name.valueOf("TestPage1");
    User.ID user = User.ID.valueOf("testUser");
    Context context = Context.valueOf("TEST");
    SegmentationProfile segmentationProfile = mock(SegmentationProfile.class);
    HttpHeaders headers = mock(HttpHeaders.class);
    Calendar date1 = Calendar.getInstance();
    date1.add(Calendar.DAY_OF_MONTH, -1);
    Calendar date2 = Calendar.getInstance();
    date2.add(Calendar.DAY_OF_MONTH, 10);

    Experiment exp1 = Experiment.withID(Experiment.ID.newInstance()).withApplicationName(appName)
            .withLabel(Experiment.Label.valueOf("exp1Label")).withStartTime(date1.getTime())
            .withEndTime(date2.getTime()).withSamplingPercent(1.0).withState(Experiment.State.RUNNING)
            .withIsPersonalizationEnabled(false).build();

    Experiment exp2 = Experiment.withID(Experiment.ID.newInstance()).withApplicationName(appName)
            .withLabel(Experiment.Label.valueOf("exp2Label")).withStartTime(date1.getTime())
            .withEndTime(date2.getTime()).withSamplingPercent(1.0).withState(Experiment.State.RUNNING)
            .withIsPersonalizationEnabled(false).build();

    List<PageExperiment> pageExperiments = newArrayList();
    pageExperiments.add(PageExperiment.withAttributes(exp1.getID(), exp1.getLabel(), true).build());
    pageExperiments.add(PageExperiment.withAttributes(exp2.getID(), exp2.getLabel(), true).build());

    ExperimentBatch experimentBatch = ExperimentBatch.newInstance()
            .withLabels(newHashSet(exp1.getLabel(), exp2.getLabel())).build();
    List<Experiment> expList = newArrayList(exp1, exp2);

    Map expMap = newHashMap();
    expMap.put(exp1.getID(), exp1);
    expMap.put(exp2.getID(), exp2);

    PrioritizedExperimentList pExpList = new PrioritizedExperimentList();
    pExpList.addPrioritizedExperiment(PrioritizedExperiment.from(exp1, 1).build());
    pExpList.addPrioritizedExperiment(PrioritizedExperiment.from(exp2, 2).build());
    Optional<PrioritizedExperimentList> prioritizedExperimentListOptional = Optional.of(pExpList);

    BucketList bucketList1 = new BucketList();
    bucketList1.addBucket(
            Bucket.newInstance(exp1.getID(), Bucket.Label.valueOf("red")).withAllocationPercent(0.5).build());
    bucketList1.addBucket(
            Bucket.newInstance(exp1.getID(), Bucket.Label.valueOf("blue")).withAllocationPercent(0.5).build());
    BucketList bucketList2 = new BucketList();
    bucketList2.addBucket(Bucket.newInstance(exp2.getID(), Bucket.Label.valueOf("yellow"))
            .withAllocationPercent(1.0).build());

    List<Experiment.ID> exclusionList = newArrayList();

    //Mock dependent interactions
    when(metadataCache.getPageExperiments(appName, pageName)).thenReturn(pageExperiments);
    when(metadataCache.getExperimentById(exp1.getID())).thenReturn(Optional.of(exp1));
    when(metadataCache.getExperimentById(exp2.getID())).thenReturn(Optional.of(exp2));
    when(metadataCache.getExperimentsByAppName(appName)).thenReturn(expList);
    when(metadataCache.getPrioritizedExperimentListMap(appName)).thenReturn(prioritizedExperimentListOptional);
    when(metadataCache.getBucketList(exp1.getID())).thenReturn(bucketList1);
    when(metadataCache.getBucketList(exp2.getID())).thenReturn(bucketList2);
    when(metadataCache.getExclusionList(exp1.getID())).thenReturn(exclusionList);
    when(metadataCache.getExclusionList(exp2.getID())).thenReturn(exclusionList);

    List<Pair<Experiment, String>> existingAssignments = newArrayList(new ImmutablePair<>(exp2, "yellow"));
    Mockito.when(assignmentsRepository.getAssignments(user, appName, context, expMap))
            .thenReturn(existingAssignments);

    //This is real call to the method
    List<Assignment> resultAssignments = assignmentsImpl.doPageAssignments(appName, pageName, user, context,
            true, false, headers, segmentationProfile);

    //Verify result
    assertThat(resultAssignments.size(), is(2));
    assertThat(resultAssignments.get(0).getBucketLabel().toString(), anyOf(is("red"), is("blue")));
    assertThat(resultAssignments.get(0).getStatus().toString(),
            is(Assignment.Status.NEW_ASSIGNMENT.toString()));
    assertThat(resultAssignments.get(1).getBucketLabel().toString(), is("yellow"));
    assertThat(resultAssignments.get(1).getStatus().toString(),
            is(Assignment.Status.EXISTING_ASSIGNMENT.toString()));
}

From source file:com.intuit.wasabi.assignment.impl.AssignmentsImplTest.java

@Test
public void getExistingAssignmentsTest() {
    //Input/*w  w w.  j  ava  2  s.c  o  m*/
    Application.Name appName = Application.Name.valueOf("Test");
    Page.Name pageName = Page.Name.valueOf("TestPage1");
    User.ID user = User.ID.valueOf("testUser");
    Context context = Context.valueOf("TEST");
    SegmentationProfile segmentationProfile = mock(SegmentationProfile.class);
    HttpHeaders headers = mock(HttpHeaders.class);
    Calendar date1 = Calendar.getInstance();
    date1.add(Calendar.DAY_OF_MONTH, -1);
    Calendar date2 = Calendar.getInstance();
    date2.add(Calendar.DAY_OF_MONTH, 10);

    Experiment exp1 = Experiment.withID(Experiment.ID.newInstance()).withApplicationName(appName)
            .withLabel(Experiment.Label.valueOf("exp1Label")).withStartTime(date1.getTime())
            .withEndTime(date2.getTime()).withSamplingPercent(1.0).withState(Experiment.State.RUNNING)
            .withIsPersonalizationEnabled(false).build();

    Experiment exp2 = Experiment.withID(Experiment.ID.newInstance()).withApplicationName(appName)
            .withLabel(Experiment.Label.valueOf("exp2Label")).withStartTime(date1.getTime())
            .withEndTime(date2.getTime()).withSamplingPercent(1.0).withState(Experiment.State.RUNNING)
            .withIsPersonalizationEnabled(false).build();

    List<PageExperiment> pageExperiments = newArrayList();
    pageExperiments.add(PageExperiment.withAttributes(exp1.getID(), exp1.getLabel(), true).build());
    pageExperiments.add(PageExperiment.withAttributes(exp2.getID(), exp2.getLabel(), true).build());

    ExperimentBatch experimentBatch = ExperimentBatch.newInstance()
            .withLabels(newHashSet(exp1.getLabel(), exp2.getLabel())).build();
    List<Experiment> expList = newArrayList(exp1, exp2);

    Map expMap = newHashMap();
    expMap.put(exp1.getID(), exp1);
    expMap.put(exp2.getID(), exp2);

    PrioritizedExperimentList pExpList = new PrioritizedExperimentList();
    pExpList.addPrioritizedExperiment(PrioritizedExperiment.from(exp1, 1).build());
    pExpList.addPrioritizedExperiment(PrioritizedExperiment.from(exp2, 2).build());
    Optional<PrioritizedExperimentList> prioritizedExperimentListOptional = Optional.of(pExpList);

    BucketList bucketList1 = new BucketList();
    bucketList1.addBucket(
            Bucket.newInstance(exp1.getID(), Bucket.Label.valueOf("red")).withAllocationPercent(0.5).build());
    bucketList1.addBucket(
            Bucket.newInstance(exp1.getID(), Bucket.Label.valueOf("blue")).withAllocationPercent(0.5).build());
    BucketList bucketList2 = new BucketList();
    bucketList2.addBucket(Bucket.newInstance(exp2.getID(), Bucket.Label.valueOf("yellow"))
            .withAllocationPercent(1.0).build());

    List<Experiment.ID> exclusionList = newArrayList();

    //Mock dependent interactions
    when(metadataCache.getPageExperiments(appName, pageName)).thenReturn(pageExperiments);
    when(metadataCache.getExperimentById(exp1.getID())).thenReturn(Optional.of(exp1));
    when(metadataCache.getExperimentById(exp2.getID())).thenReturn(Optional.of(exp2));
    when(metadataCache.getExperimentsByAppName(appName)).thenReturn(expList);
    when(metadataCache.getPrioritizedExperimentListMap(appName)).thenReturn(prioritizedExperimentListOptional);
    when(metadataCache.getBucketList(exp1.getID())).thenReturn(bucketList1);
    when(metadataCache.getBucketList(exp2.getID())).thenReturn(bucketList2);
    when(metadataCache.getExclusionList(exp1.getID())).thenReturn(exclusionList);
    when(metadataCache.getExclusionList(exp2.getID())).thenReturn(exclusionList);

    List<Pair<Experiment, String>> existingAssignments = newArrayList(new ImmutablePair<>(exp2, "yellow"));
    Mockito.when(assignmentsRepository.getAssignments(user, appName, context, expMap))
            .thenReturn(existingAssignments);

    //This is real call to the method
    Assignment resultAssignment1 = assignmentsImpl.getExistingAssignment(user, appName, exp1.getLabel(),
            context);
    Assignment resultAssignment2 = assignmentsImpl.getExistingAssignment(user, appName, exp2.getLabel(),
            context);

    //Verify result
    assertNull(resultAssignment1);
    assertNotNull(resultAssignment2);
    assertThat(resultAssignment2.getStatus(), is(Assignment.Status.EXISTING_ASSIGNMENT));

}

From source file:eu.bittrade.libs.steemj.SteemJ.java

/**
 * Get the private and public key of a given type for the given
 * <code>account</code>/*from   w  w  w.  j a v a2  s .  c  o m*/
 * 
 * @param account
 *            The account name to generate the passwords for.
 * @param role
 *            The key type that should be generated.
 * @param steemPassword
 *            The password of the <code>account</code> valid for the Steem
 *            blockchain.
 * @return The requested key pair.
 */
public static ImmutablePair<PublicKey, String> getPrivateKeyFromPassword(AccountName account,
        PrivateKeyType role, String steemPassword) {
    String seed = account.getName() + role.name().toLowerCase() + steemPassword;
    ECKey keyPair = ECKey.fromPrivate(Sha256Hash.hash(seed.getBytes(), 0, seed.length()));

    return new ImmutablePair<>(new PublicKey(keyPair), SteemJUtils.privateKeyToWIF(keyPair));
}