Example usage for org.apache.commons.lang3.tuple Pair getValue

List of usage examples for org.apache.commons.lang3.tuple Pair getValue

Introduction

In this page you can find the example usage for org.apache.commons.lang3.tuple Pair getValue.

Prototype

@Override
public R getValue() 

Source Link

Document

Gets the value from this pair.

This method implements the Map.Entry interface returning the right element as the value.

Usage

From source file:com.uber.hoodie.utilities.deltastreamer.HoodieDeltaStreamer.java

private void sync() throws Exception {

    // Retrieve the previous round checkpoints, if any
    Optional<String> resumeCheckpointStr = Optional.empty();
    if (commitTimelineOpt.isPresent()) {
        Optional<HoodieInstant> lastCommit = commitTimelineOpt.get().lastInstant();
        if (lastCommit.isPresent()) {
            HoodieCommitMetadata commitMetadata = HoodieCommitMetadata
                    .fromBytes(commitTimelineOpt.get().getInstantDetails(lastCommit.get()).get());
            if (commitMetadata.getMetadata(CHECKPOINT_KEY) != null) {
                resumeCheckpointStr = Optional.of(commitMetadata.getMetadata(CHECKPOINT_KEY));
            } else {
                throw new HoodieDeltaStreamerException(
                        "Unable to find previous checkpoint. Please double check if this table "
                                + "was indeed built via delta streamer ");
            }//from   w  ww  .j  ava  2s  .  c o m
        }
    } else {
        Properties properties = new Properties();
        properties.put(HoodieWriteConfig.TABLE_NAME, cfg.targetTableName);
        HoodieTableMetaClient.initializePathAsHoodieDataset(FSUtils.getFs(), cfg.targetBasePath, properties);
    }
    log.info("Checkpoint to resume from : " + resumeCheckpointStr);

    // Pull the data from the source & prepare the write
    Pair<Optional<JavaRDD<GenericRecord>>, String> dataAndCheckpoint = source.fetchNewData(resumeCheckpointStr,
            cfg.maxInputBytes);

    if (!dataAndCheckpoint.getKey().isPresent()) {
        log.info("No new data, nothing to commit.. ");
        return;
    }

    JavaRDD<GenericRecord> avroRDD = dataAndCheckpoint.getKey().get();
    JavaRDD<HoodieRecord> records = avroRDD.map(gr -> {
        HoodieRecordPayload payload = UtilHelpers.createPayload(cfg.payloadClassName, gr,
                (Comparable) gr.get(cfg.sourceOrderingField));
        return new HoodieRecord<>(keyGenerator.getKey(gr), payload);
    });

    // Perform the write
    HoodieWriteConfig hoodieCfg = getHoodieClientConfig(cfg.hoodieClientProps);
    HoodieWriteClient client = new HoodieWriteClient<>(jssc, hoodieCfg);
    String commitTime = client.startCommit();
    log.info("Starting commit  : " + commitTime);

    JavaRDD<WriteStatus> writeStatusRDD;
    if (cfg.operation == Operation.INSERT) {
        writeStatusRDD = client.insert(records, commitTime);
    } else if (cfg.operation == Operation.UPSERT) {
        writeStatusRDD = client.upsert(records, commitTime);
    } else {
        throw new HoodieDeltaStreamerException("Unknown operation :" + cfg.operation);
    }

    // Simply commit for now. TODO(vc): Support better error handlers later on
    HashMap<String, String> checkpointCommitMetadata = new HashMap<>();
    checkpointCommitMetadata.put(CHECKPOINT_KEY, dataAndCheckpoint.getValue());

    boolean success = client.commit(commitTime, writeStatusRDD, Optional.of(checkpointCommitMetadata));
    if (success) {
        log.info("Commit " + commitTime + " successful!");
        // TODO(vc): Kick off hive sync from here.

    } else {
        log.info("Commit " + commitTime + " failed!");
    }
    client.close();
}

From source file:io.swagger.api.impl.ToolsApiServiceImpl.java

/**
 * @param registryId   registry id/*ww  w  .  j a v a  2 s .c  om*/
 * @param versionId    git reference
 * @param type         type of file
 * @param relativePath if null, return the primary descriptor, if not null, return a specific file
 * @param unwrap       unwrap the file and present the descriptor sans wrapper model
 * @return a specific file wrapped in a response
 */
private Response getFileByToolVersionID(String registryId, String versionId, SourceFile.FileType type,
        String relativePath, boolean unwrap) {
    // if a version is provided, get that version, otherwise return the newest
    ParsedRegistryID parsedID = new ParsedRegistryID(registryId);
    try {
        versionId = URLDecoder.decode(versionId, StandardCharsets.UTF_8.displayName());
    } catch (UnsupportedEncodingException e) {
        throw new RuntimeException(e);
    }
    Entry entry = getEntry(parsedID);

    // check whether this is registered
    if (!entry.getIsPublished()) {
        return Response.status(Response.Status.UNAUTHORIZED).build();
    }

    final Pair<io.swagger.model.Tool, Table<String, SourceFile.FileType, Object>> toolTablePair = convertContainer2Tool(
            entry);
    String finalVersionId = versionId;
    if (toolTablePair == null || toolTablePair.getKey().getVersions() == null) {
        return Response.status(Response.Status.NOT_FOUND).build();
    }
    io.swagger.model.Tool convertedTool = toolTablePair.getKey();
    final Optional<ToolVersion> first = convertedTool.getVersions().stream()
            .filter(toolVersion -> toolVersion.getName().equalsIgnoreCase(finalVersionId)).findFirst();

    Optional<? extends Version> oldFirst;
    if (entry instanceof Tool) {
        Tool toolEntry = (Tool) entry;
        oldFirst = toolEntry.getVersions().stream()
                .filter(toolVersion -> toolVersion.getName().equalsIgnoreCase(finalVersionId)).findFirst();
    } else {
        Workflow workflowEntry = (Workflow) entry;
        oldFirst = workflowEntry.getVersions().stream()
                .filter(toolVersion -> toolVersion.getName().equalsIgnoreCase(finalVersionId)).findFirst();
    }

    final Table<String, SourceFile.FileType, Object> table = toolTablePair.getValue();
    if (first.isPresent() && oldFirst.isPresent()) {
        final ToolVersion toolVersion = first.get();
        final String toolVersionName = toolVersion.getName();
        if (type == DOCKERFILE) {
            final ToolDockerfile dockerfile = (ToolDockerfile) table.get(toolVersionName,
                    SourceFile.FileType.DOCKERFILE);
            return Response.status(Response.Status.OK)
                    .type(unwrap ? MediaType.TEXT_PLAIN : MediaType.APPLICATION_JSON)
                    .entity(unwrap ? dockerfile.getDockerfile() : dockerfile).build();
        } else {
            if (relativePath == null) {
                if ((type == DOCKSTORE_WDL)
                        && (((ToolDescriptor) table.get(toolVersionName, SourceFile.FileType.DOCKSTORE_WDL))
                                .getType() == ToolDescriptor.TypeEnum.WDL)) {
                    final ToolDescriptor descriptor = (ToolDescriptor) table.get(toolVersionName,
                            SourceFile.FileType.DOCKSTORE_WDL);
                    return Response.status(Response.Status.OK)
                            .entity(unwrap ? descriptor.getDescriptor() : descriptor).build();
                } else if (type == DOCKSTORE_CWL
                        && (((ToolDescriptor) table.get(toolVersionName, SourceFile.FileType.DOCKSTORE_CWL))
                                .getType() == ToolDescriptor.TypeEnum.CWL)) {
                    final ToolDescriptor descriptor = (ToolDescriptor) table.get(toolVersionName,
                            SourceFile.FileType.DOCKSTORE_CWL);
                    return Response.status(Response.Status.OK)
                            .type(unwrap ? MediaType.TEXT_PLAIN : MediaType.APPLICATION_JSON)
                            .entity(unwrap ? descriptor.getDescriptor() : descriptor).build();
                }
                return Response.status(Response.Status.NOT_FOUND).build();
            } else {
                final Set<SourceFile> sourceFiles = oldFirst.get().getSourceFiles();
                final Optional<SourceFile> first1 = sourceFiles.stream()
                        .filter(file -> file.getPath().equalsIgnoreCase(relativePath)).findFirst();
                if (first1.isPresent()) {
                    final SourceFile entity = first1.get();
                    return Response.status(Response.Status.OK)
                            .type(unwrap ? MediaType.TEXT_PLAIN : MediaType.APPLICATION_JSON)
                            .entity(unwrap ? entity.getContent() : entity).build();
                }
            }
        }
    }

    return Response.status(Response.Status.NOT_FOUND).build();
}

From source file:alfio.manager.EventManagerIntegrationTest.java

@Test
public void testAddUnboundedCategoryShrinkBoundedCategory() {
    //create the event with a single category which contains all the tickets
    List<TicketCategoryModification> categories = Collections.singletonList(new TicketCategoryModification(null,
            "default", AVAILABLE_SEATS, new DateTimeModification(LocalDate.now(), LocalTime.now()),
            new DateTimeModification(LocalDate.now(), LocalTime.now()), DESCRIPTION, BigDecimal.TEN, false, "",
            true, null, null, null, null, null));
    Pair<Event, String> pair = initEvent(categories, organizationRepository, userManager, eventManager,
            eventRepository);/*  w ww  . ja va  2  s  . c  o  m*/
    Event event = pair.getKey();
    //shrink the original category to AVAILABLE_SEATS - 2, this would free two seats
    int categoryId = ticketCategoryRepository.findAllTicketCategories(event.getId()).get(0).getId();
    TicketCategoryModification shrink = new TicketCategoryModification(categoryId, "default",
            AVAILABLE_SEATS - 2, new DateTimeModification(LocalDate.now(), LocalTime.now()),
            new DateTimeModification(LocalDate.now(), LocalTime.now()), DESCRIPTION, BigDecimal.TEN, false, "",
            true, null, null, null, null, null);
    eventManager.updateCategory(categoryId, event.getId(), shrink, pair.getRight());

    //now insert an unbounded ticket category
    TicketCategoryModification tcm = new TicketCategoryModification(null, "default", 10,
            new DateTimeModification(LocalDate.now(), LocalTime.now()),
            new DateTimeModification(LocalDate.now(), LocalTime.now()), DESCRIPTION, BigDecimal.TEN, false, "",
            false, null, null, null, null, null);
    eventManager.insertCategory(event.getId(), tcm, pair.getValue());

    waitingQueueSubscriptionProcessor.distributeAvailableSeats(event);
    List<Ticket> tickets = ticketRepository.findFreeByEventId(event.getId());
    assertNotNull(tickets);
    assertFalse(tickets.isEmpty());
    assertEquals(AVAILABLE_SEATS, tickets.size());
    assertEquals(18,
            tickets.stream().filter(t -> t.getCategoryId() != null && t.getCategoryId() == categoryId).count());
    assertEquals(2, tickets.stream().filter(t -> t.getCategoryId() == null).count());
}

From source file:com.act.lcms.db.analysis.ChemicalToMapOfMetlinIonsToIntensityTimeValues.java

/**
 * This function plots the positive ion and negative control ions for a given metlin ion mass per plot.
 * @param searchMz - The mz value which is used for finding spectra.
 * @param plottingDirectory - The directory where the plots will live.
 * @param positiveChemical - The positive chemical is used to make sure it is placed at the top of the spectra plot.
 * @return This function returns a map of ion to absolute paths where the plot lives.
 * @throws IOException// w  w w  . jav a 2 s.c  om
 */
public Map<String, String> plotPositiveAndNegativeControlsForEachMetlinIon(Pair<String, Double> searchMz,
        String plottingDirectory, String positiveChemical, List<StandardWell> standardWells)
        throws IOException {
    Map<String, String> ionToPlottingFilePath = new HashMap<>();
    Map<String, Double> individualMaxIntensities = new HashMap<>();
    WriteAndPlotMS1Results plottingUtil = new WriteAndPlotMS1Results();

    //rearrange the order of plotting
    ArrayList<String> orderedPlotChemicalTitles = new ArrayList<>(this.peakData.keySet().size());
    for (String chemical : peakData.keySet()) {
        if (chemical.equals(positiveChemical)) {
            orderedPlotChemicalTitles.add(0, chemical);
        } else {
            orderedPlotChemicalTitles.add(chemical);
        }
    }

    // This variable is used as a part of the file path dir to uniquely identify the pos/neg wells for the chemical.
    StringBuilder indexedPath = new StringBuilder();
    for (StandardWell well : standardWells) {
        indexedPath.append(Integer.toString(well.getId()) + "-");
    }

    for (String ion : this.peakData.get(searchMz.getLeft()).keySet()) {
        LinkedHashMap<String, List<XZ>> ms1s = new LinkedHashMap<>();
        Map<String, Double> metlinMasses = new HashMap<>();
        Double maxIntensity = 0.0d;

        for (String chemical : orderedPlotChemicalTitles) {
            List<XZ> ionValues = this.peakData.get(chemical).get(ion);
            ms1s.put(chemical, ionValues);
            Double localMaxIntensity = findPeakMaxIntensity(ionValues);
            maxIntensity = Math.max(maxIntensity, localMaxIntensity);
            individualMaxIntensities.put(chemical, localMaxIntensity);
            metlinMasses.put(chemical, searchMz.getValue());
        }

        String relativePath = searchMz.getLeft() + "_" + indexedPath.toString() + "_" + ion;

        File absolutePathFileWithoutExtension = new File(plottingDirectory, relativePath);
        String absolutePathWithoutExtension = absolutePathFileWithoutExtension.getAbsolutePath();

        plottingUtil.plotSpectra(ms1s, maxIntensity, individualMaxIntensities, metlinMasses,
                absolutePathWithoutExtension, this.FMT, false, false);
        ionToPlottingFilePath.put(ion, relativePath + "." + this.FMT);
    }

    return ionToPlottingFilePath;
}

From source file:com.samsung.sjs.backend.IRCBackend.java

public CompilationUnit compile() {
    CompilationUnit ccode = new CompilationUnit();
    ccode.addStatement(new IncludeDirective("runtime.h"));
    ccode.addStatement(new IncludeDirective("ffi.h"));
    ccode.addStatement(new IncludeDirective("globals.h"));
    ccode.addStatement(new IncludeDirective("map.h"));

    if (options.eflEnabled()) {
        ccode.addStatement(new IncludeDirective("Elementary.h"));
    }//w  w  w. j  av a 2  s  .c om

    ccode.exportString("#ifdef __cplusplus");
    ccode.exportString("extern \"C\" {");
    ccode.exportString("#endif // __cplusplus");
    if (options.isGuestRuntime()) {
        ccode.exportString("extern int __sjs_main(int);");
    }

    // Process vtables exported to other runtime modules (e.g., for console)
    for (Map.Entry<String, List<String>> table_req : ffi.getTablesToGenerate()) {
        ccode.addStatement(generateObjectMap(table_req.getKey(), table_req.getValue()));
        ccode.exportIndirectionMap(table_req.getKey());
    }

    com.samsung.sjs.backend.asts.c.CompoundStatement vtables = new com.samsung.sjs.backend.asts.c.CompoundStatement();
    ccode.addStatement(vtables);

    ccode.addStatement(new IncludeDirective("array.h"));
    BackPatchDeclarations bpd = new BackPatchDeclarations();
    ccode.addStatement(bpd);

    // insert extern declarations for FFI entities
    for (Map.Entry<String, FFILinkage.LinkEntry> extern : ffi.entrySet()) {
        Type t = toplevel.get(extern.getKey());
        if (t == null) {
            System.err.println("BAD: FFI linkage declaration for [" + extern.getKey()
                    + "], but toplevel has no type for it");
        }
        if (t.isIntersectionType()) {
            continue;
            // TODO: Implement runtime representation for intersection of multiple types
        }
        CType ct = getTypeConverter().convert(t);
        if (extern.getValue().boxed) {
            ccode.addStatement(new com.samsung.sjs.backend.asts.c.ExpressionStatement(
                    new InlineCCode("extern value_t* " + extern.getKey())));
        } else {
            ccode.addStatement(new com.samsung.sjs.backend.asts.c.ExpressionStatement(
                    new InlineCCode("extern " + ct.toSource() + " " + extern.getKey())));
        }
    }

    // extern decls for module load hooks
    for (String hook : modsys.getModuleLoadCalls()) {
        ccode.addStatement(new com.samsung.sjs.backend.asts.c.ExpressionStatement(
                new InlineCCode("extern value_t " + hook + "()")));
    }

    // This emits field #defines in the C code and header
    exportPropertyOffsets(ccode, field_codes);
    // Need to include interop *after* generating the property lookup table
    if (options.interopEnabled()) {
        ccode.addStatement(new IncludeDirective("interop.h"));
    }

    // Make space for string literal decls
    com.samsung.sjs.backend.asts.c.CompoundStatement strlits = new com.samsung.sjs.backend.asts.c.CompoundStatement();
    ccode.addStatement(strlits);
    this.string_literal_decls = strlits;

    // Rhino seems to do a lot of casting from Node to AstNode
    //for (Node n : sourcetree) {
    for (IRNode n : program) {
        CNode result = n.accept(this);
        if (debug) {
            System.err.println("Converting [" + n.toSource(0) + "]");
            System.err.println(">>> [" + result.toSource(0) + "]");
        }
        if (result instanceof FunctionDeclaration) {
            bpd.preDeclare((FunctionDeclaration) result);
        }
        ccode.addStatement((com.samsung.sjs.backend.asts.c.Statement) result);
    }
    // Now that we've observed all anonymous C types, we can backtrack to generate some typedefs

    ccode.exportString("#ifdef __cplusplus");
    ccode.exportString("} // extern C");
    ccode.exportString("#endif // __cplusplus");

    for (Map.Entry<Integer, Set<Pair<int[], Integer>>> entry : vtables_by_hash.entrySet()) {
        for (Pair<int[], Integer> vt_and_id : entry.getValue()) {
            int[] vt = vt_and_id.getKey();
            int i = vt_and_id.getValue();
            CArrayLiteral arr = new CArrayLiteral();
            for (int x = 0; x < vt.length; x++) {
                arr.addElement(new com.samsung.sjs.backend.asts.c.IntLiteral(vt[x]));
            }
            // TODO: Refactor so we're not doing this hideous "int <name>[n] = " gen here
            com.samsung.sjs.backend.asts.c.VariableDeclaration vd =
                    //new com.samsung.sjs.backend.asts.c.VariableDeclaration(false, new VTablePseudoType());
                    new com.samsung.sjs.backend.asts.c.VariableDeclaration(false, new CInteger());
            vd.addVariable(new Variable("__vtable_id_" + i + "[]"), arr);
            vtables.addStatement(vd);
        }
    }

    for (String s : tts.getForwardDecls()) {
        strlits.addExpressionStatement(new InlineCCode(s));
    }
    for (String s : tts.getPropArrayDecls()) {
        strlits.addExpressionStatement(new InlineCCode(s));
    }
    for (String s : tts.getFieldArrayDecls()) {
        strlits.addExpressionStatement(new InlineCCode(s));
    }
    for (String s : tts.getArgArrayDecls()) {
        strlits.addExpressionStatement(new InlineCCode(s));
    }
    for (String s : tts.getCodeDecls()) {
        strlits.addExpressionStatement(new InlineCCode(s));
    }
    for (String s : tts.getTagDecls()) {
        strlits.addExpressionStatement(new InlineCCode(s));
    }

    return ccode;
}

From source file:de.tudarmstadt.tk.statistics.report.ReportGenerator.java

public String createPlainReport() {
    // Set locale to English globally to make reports independent of the
    // machine thei're created on, e.g. use "." as decimal points on any
    // machine/*from   ww w .j a  v a 2s.  c  om*/
    Locale.setDefault(Locale.ENGLISH);

    StringBuilder report = new StringBuilder();

    //
    // Evaluation Overview
    //
    report.append("###\n");
    report.append("Evaluation Overview\n");
    report.append("###\n\n");

    int nModels = evalResults.getSampleData().getModelMetadata().size();
    ArrayList<String> measures = evalResults.getMeasures();
    String ref = "tbl:models";

    // Separate training/testing datasets
    List<String> trainingDataList = new ArrayList<String>();
    List<String> testingDataList = new ArrayList<String>();
    List<Pair<String, String>> datasets = evalResults.getSampleData().getDatasetNames();
    Iterator<Pair<String, String>> itp = datasets.iterator();
    while (itp.hasNext()) {
        Pair<String, String> trainTest = itp.next();
        trainingDataList.add(trainTest.getKey());
        if (trainTest.getValue() != null) {
            testingDataList.add(trainTest.getValue());
        }
    }
    Set<String> trainingDataSet = new HashSet<String>(trainingDataList);
    Set<String> testingDataSet = new HashSet<String>(testingDataList);

    String pipelineDescription = null;
    String sampleOrigin = "per CV";

    ReportTypes pipelineType = this.evalResults.getSampleData().getPipelineType();
    switch (pipelineType) {
    // One-domain n-fold CV (ReportData=per Fold)
    case CV:
        pipelineDescription = String.format("%d-fold cross validation",
                evalResults.getSampleData().getnFolds());
        sampleOrigin = "per fold ";
        break;
    case MULTIPLE_CV:
        pipelineDescription = String.format("%dx%s repeated cross validation",
                evalResults.getSampleData().getnRepetitions(), evalResults.getSampleData().getnFolds());
        break;
    case CV_DATASET_LVL:
        pipelineDescription = String.format("%d-fold cross validation over %d datasets",
                evalResults.getSampleData().getnFolds(), trainingDataSet.size());
        break;
    case MULTIPLE_CV_DATASET_LVL:
        pipelineDescription = String.format("%dx%s repeated cross validation over %d datasets",
                evalResults.getSampleData().getnRepetitions(), evalResults.getSampleData().getnFolds(),
                trainingDataSet.size());
        sampleOrigin = "per dataset";
        break;
    case TRAIN_TEST_DATASET_LVL:
        // In the train/test scenario, the number of datasets only includes
        // distinct ones
        Set<String> allDataSets = new HashSet<String>(testingDataSet);
        allDataSets.addAll(trainingDataSet);
        pipelineDescription = String.format("Train/Test over %d datasets", allDataSets.size());
        sampleOrigin = "per dataset";
        break;
    default:
        pipelineDescription = "!unknown pipeline type!";
        sampleOrigin = "!unknown pipeline type!";
        break;
    }

    boolean isBaselineEvaluation = evalResults.isBaselineEvaluation();
    report.append(String.format("The system performed a %s for the following %d models. \n",
            pipelineDescription, nModels));
    if (isBaselineEvaluation) {
        report.append(String.format("The models were compared against the first baseline model. \n",
                pipelineDescription, nModels));
    } else {
        report.append(
                String.format("The models were compared against each other. \n", pipelineDescription, nModels));
    }

    ArrayList<Pair<String, String>> modelMetadata = evalResults.getSampleData().getModelMetadata();
    for (int modelIndex = 0; modelIndex < modelMetadata.size(); modelIndex++) {
        String[] algorithm = modelMetadata.get(modelIndex).getKey().split("\\.");
        String modelAlgorithm = algorithm[algorithm.length - 1];
        String modelFeatureSet = modelMetadata.get(modelIndex).getValue();
        report.append(String.format("M%d: %s; %s\n", modelIndex, modelAlgorithm, modelFeatureSet));
    }

    // List test/training datasets. Consider the case when these sets are
    // different.
    if (testingDataSet.isEmpty()) {
        if (trainingDataSet.size() == 1) {
            report.append(
                    String.format("\nThe models were evaluated on the dataset %s. ", trainingDataList.get(0)));
        } else {
            report.append(String.format("\nThe models were evaluated on the datasets %s. ",
                    this.createEnumeration(trainingDataList)));
        }
    } else {
        if (trainingDataSet.size() == 1 && testingDataSet.size() == 1) {
            report.append(
                    String.format("\nThe models were trained on the dataset %s and tested on the dataset %s. ",
                            trainingDataList.get(0), testingDataList.get(0)));
        } else if (trainingDataSet.size() > 1 && testingDataSet.size() == 1) {
            report.append(String.format(
                    "\nThe models were trained on the datasets %s and tested on the dataset %s. ",
                    this.createEnumeration(new ArrayList<String>(trainingDataSet)), testingDataList.get(0)));
        } else if (trainingDataSet.size() == 1 && testingDataSet.size() > 1) {
            report.append(String.format(
                    "\nThe models were trained on the dataset %s and tested on the datasets %s. ",
                    trainingDataList.get(0), this.createEnumeration(new ArrayList<String>(testingDataSet))));
        } else {
            report.append(String.format(
                    "\nThe models were trained on the datasets %s and tested on the datasets %s. ",
                    this.createEnumeration(new ArrayList<String>(trainingDataSet)),
                    this.createEnumeration(new ArrayList<String>(testingDataSet))));
        }
    }
    report.append(String.format("Their performance was assessed with the %s", createEnumeration(measures)));

    //
    // Results (for each measure separately)
    //
    report.append("\n\n###\n"); // All previous floats must be placed before
    // this point
    report.append("Results\n");
    report.append("###");

    for (int i = 0; i < measures.size(); i++) {

        // Continue for McNemar contingency matrix
        String measure = measures.get(i);
        if (!evalResults.getSampleData().getSamples().containsKey(measure)) {
            continue;
        }

        // Samples
        report.append("\n\n#\n");
        report.append(String.format("Evaluation for %s. \n", measure));
        report.append("#\n\n");

        report.append("Samples: \n");

        ArrayList<ArrayList<Double>> models = evalResults.getSampleData().getSamples().get(measure);
        for (int modelId = 0; i < models.size(); i++) {
            ArrayList<Double> samples = models.get(modelId);
            report.append(String.format("C%d: ", modelId));
            for (int j = 0; j < samples.size(); j++) {
                report.append(String.format("%.3f;", samples.get(j)));
            }
            report.append("\n");
        }
        report.append("\n");

        // Test results
        for (String testType : new String[] { "Parametric", "Non-Parametric" }) {
            report.append(String.format("%s Testing\n", testType));

            Pair<String, AbstractTestResult> result = null;
            if (testType.equals("Parametric")) {
                result = evalResults.getParametricTestResults().get(measure);
            } else {
                result = evalResults.getNonParametricTestResults().get(measure);
            }

            // Use pretty-print method descriptor if specified
            String method = result.getKey();
            if (StatsConfigConstants.PRETTY_PRINT_METHODS.containsKey(method)) {
                method = StatsConfigConstants.PRETTY_PRINT_METHODS.get(method);
            }

            TestResult r = (TestResult) result.getValue();
            report.append(String.format("The system compared the %d models using the %s. ", nModels, method));

            if (r != null && !Double.isNaN(r.getpValue())) {

                // A priori test: assumptions
                boolean assumptionViolated = false;
                Iterator<String> it = r.getAssumptions().keySet().iterator();
                while (it.hasNext()) {
                    String assumption = it.next();
                    TestResult at = (TestResult) r.getAssumptions().get(assumption);
                    if (at == null) {
                        report.append(String.format("Testing for %s failed. ", assumption));
                        assumptionViolated = true;
                        continue;
                    }
                    if (Double.isNaN(at.getpValue())) {
                        report.append(
                                String.format("Testing for %s using %s failed. ", assumption, at.getMethod()));
                        assumptionViolated = true;
                        continue;
                    }
                    double ap = at.getpValue();

                    if (ap <= this.significance_low) {
                        assumptionViolated = true;
                    }

                    // Verbalize result according to p value
                    Pair<String, Double> verbalizedP = verbalizeP(ap, true);

                    report.append(String.format("%s %s violation of %s (p=%f, alpha=%f). ", at.getMethod(),
                            verbalizedP.getKey(), assumption, ap, verbalizedP.getValue()));

                }

                if (assumptionViolated) {
                    report.append(
                            "Given that the assumptions are violated, the following test may be corrupted. ");
                }

                // A Priori test results
                Pair<String, Double> verbalizedP = verbalizeP(r.getpValue(), false);
                report.append(String.format(
                        "The %s %s differences between the performances of the models (p=%f, alpha=%f).\n\n",
                        method, verbalizedP.getKey(), r.getpValue(), verbalizedP.getValue()));

                // Post-hoc test for >2 models (pairwise comparisons)
                if (evalResults.getSampleData().getModelMetadata().size() > 2) {

                    Pair<String, AbstractTestResult> postHocResult = null;
                    HashMap<Integer, TreeSet<Integer>> postHocOrdering = null;
                    if (testType.equals("Parametric")) {
                        postHocResult = evalResults.getParametricPostHocTestResults().get(measure);
                        postHocOrdering = evalResults.getParameticPostHocOrdering().get(measure);
                    } else {
                        postHocResult = evalResults.getNonParametricPostHocTestResults().get(measure);
                        postHocOrdering = evalResults.getNonParameticPostHocOrdering().get(measure);
                    }
                    method = postHocResult.getKey();
                    if (StatsConfigConstants.PRETTY_PRINT_METHODS.containsKey(method)) {
                        method = StatsConfigConstants.PRETTY_PRINT_METHODS.get(method);
                    }

                    PairwiseTestResult rPostHoc = (PairwiseTestResult) postHocResult.getValue();
                    report.append(String.format("The system performed the %s post-hoc. ", method));

                    if (rPostHoc == null) {
                        report.append("The test failed. ");
                        continue;
                    }

                    // Assumptions
                    boolean assumptionsViolated = false;
                    it = rPostHoc.getAssumptions().keySet().iterator();
                    while (it.hasNext()) {
                        String assumption = it.next();
                        PairwiseTestResult at = (PairwiseTestResult) rPostHoc.getAssumptions().get(assumption);
                        if (at == null) {
                            report.append(String.format("Testing for %s failed. ", assumption));
                            assumptionsViolated = true;
                            continue;
                        }

                        report.append(String.format("\nTesting for %s using %s returned p-values:\n%s",
                                assumption, at.getMethod(), this.pairwiseResultsToString(at.getpValue())));

                        // Create table with pairwise p-values for
                        // assumption testing
                        double[][] ap = at.getpValue();
                        double max = getMax(ap);
                        double min = getMin(ap);
                        verbalizedP = verbalizeP(min, true);
                        if ((max > significance_low && min <= significance_low)
                                || (max > significance_medium && min <= significance_medium)
                                || (max > significance_high && min <= significance_high)) {
                            // partly significant to degree as specified by
                            // verbalized p-value
                            report.append(String.format("%s partly %s violation of %s (alpha=%.2f).\n",
                                    at.getMethod(), verbalizedP.getKey(), assumption, verbalizedP.getValue()));
                        } else {
                            report.append(String.format("%s %s violation of %s (alpha=%.2f).\n", at.getMethod(),
                                    verbalizedP.getKey(), assumption, verbalizedP.getValue()));
                        }

                        if (min <= this.significance_low) {
                            assumptionsViolated = true;
                        }

                    }

                    if (assumptionViolated) {
                        report.append(
                                "Given that the assumptions are violated, the following test may be corrupted. ");
                    }

                    // Result
                    double[][] ap = rPostHoc.getpValue();
                    report.append(
                            String.format("P-values:\n%s", this.pairwiseResultsToString(rPostHoc.getpValue())));

                    // Already fetch pairwise adjustments here in order to
                    // determine choice of words
                    double max = getMax(ap);
                    double min = getMin(ap);
                    verbalizedP = verbalizeP(min, false);
                    ArrayList<StatsConfigConstants.CORRECTION_VALUES> adjustments = new ArrayList<StatsConfigConstants.CORRECTION_VALUES>(
                            rPostHoc.getpValueCorrections().keySet());
                    String adjustWord = "";
                    if (adjustments.size() > 0) {
                        adjustWord = " for non-adjusted p-values";
                    }
                    if ((max > significance_low && min <= significance_low)
                            || (max > significance_medium && min <= significance_medium)
                            || (max > significance_high && min <= significance_high)) {
                        // partly significant to degree as specified by
                        // verbalized p-value
                        report.append(String.format(
                                "The %s partly %s differences between the performances of the models%s ($\\alpha=%.2f$, Tbl. \\ref{%s}). ",
                                method, verbalizedP.getKey(), adjustWord, verbalizedP.getValue(), ref));
                    } else {
                        report.append(String.format(
                                "The %s %s differences between the performances of the models%s ($\\alpha=%.2f$, Tbl. \\ref{%s}). ",
                                method, verbalizedP.getKey(), adjustWord, verbalizedP.getValue(), ref));
                    }

                    // Determine ordering of models
                    String ordering = getModelOrderingRepresentation(postHocOrdering);
                    report.append(ordering);
                    report.append("\n\n");

                    // Pairwise adjustments
                    if (adjustments.size() > 0) {
                        double[] minAdjustments = new double[adjustments.size()];
                        double[] maxAdjustments = new double[adjustments.size()];
                        for (int j = 0; j < adjustments.size(); j++) {
                            StatsConfigConstants.CORRECTION_VALUES adjustmentMethod = adjustments.get(j);
                            double[][] correctedP = rPostHoc.getpValueCorrections().get(adjustmentMethod);
                            String am = adjustmentMethod.name();
                            if (StatsConfigConstants.PRETTY_PRINT_METHODS.containsKey(am)) {
                                am = StatsConfigConstants.PRETTY_PRINT_METHODS.get(am);
                            }
                            report.append(String.format("\nAdjusted p-values according to %s:\n%s", am,
                                    this.pairwiseResultsToString(correctedP)));

                            minAdjustments[j] = getMin(correctedP);
                            maxAdjustments[j] = getMax(correctedP);
                        }

                        min = getMin(minAdjustments);
                        max = getMax(maxAdjustments);
                        verbalizedP = verbalizeP(min, false);

                        if ((max > significance_low && min <= significance_low)
                                || (max > significance_medium && min <= significance_medium)
                                || (max > significance_high && min <= significance_high)) {
                            // partly significant to degree as specified by
                            // verbalized p-value
                            report.append(String.format(
                                    "It partly %s differences for adjusted p-values (alpha=%.2f$).\n\n ",
                                    verbalizedP.getKey(), verbalizedP.getValue(), ref));
                        } else {
                            report.append(
                                    String.format("It %s differences for adjusted p-values (alpha=%.2f$).\n\n ",
                                            verbalizedP.getKey(), verbalizedP.getValue(), ref));
                        }
                    }
                }
            } else {
                report.append(String.format("The %s failed.", method));
            }
        }
    }

    //
    // Contingency table and McNemar results if this test was performed
    //
    if (evalResults.getNonParametricTest().equals("McNemar")) {
        String measure = "Contingency Table";
        String testType = "Non-Parametric";
        report.append("\n\n#\n");
        report.append("Evaluation for Contingency Table\n");
        report.append("#\n\n");

        int[][] contingencyMatrix = evalResults.getSampleData().getContingencyMatrix();
        if (evalResults.getSampleData().getPipelineType() == ReportTypes.MULTIPLE_CV) {
            report.append(String.format(
                    "Contingency table drawn from the %s and the %d models. The correctly and incorrectly classified instances per fold were averaged over all repetitions:\n%s\n",
                    pipelineDescription, nModels, this.contingencyMatrixToString(contingencyMatrix)));
        } else {
            report.append(String.format("Contingency table drawn from the %s and the %d models:\n%s\n",
                    pipelineDescription, nModels, this.contingencyMatrixToString(contingencyMatrix)));
        }

        // Test results
        report.append(String.format("%s Testing\n", testType));
        report.append(String.format("The system compared the %d models using the McNemar test. ", nModels));
        Pair<String, AbstractTestResult> result = evalResults.getNonParametricTestResults().get(measure);

        TestResult r = (TestResult) result.getValue();
        if (r != null && !Double.isNaN(r.getpValue())) {
            StringBuilder parameters = new StringBuilder();
            Iterator<String> it = r.getParameter().keySet().iterator();
            while (it.hasNext()) {
                String parameter = it.next();
                double value = r.getParameter().get(parameter);
                parameters.append(String.format("%s=%.3f, ", parameter, value));
            }

            // Verbalize result according to p value
            Pair<String, Double> verbalizedP = verbalizeP(r.getpValue(), false);
            report.append(String.format(
                    "The test %s differences between the performances of the models (%sp=%.3f, alpha=%.2f).\\\\ \n",
                    verbalizedP.getKey(), parameters.toString(), r.getpValue(), verbalizedP.getValue()));

        } else {
            report.append("The test failed.\n");
        }
    }

    return report.toString();
}

From source file:de.tudarmstadt.tk.statistics.report.ReportGenerator.java

/**
 * Creates a report of the statistical evaluation in the Latex-format
 * //from  w w w .  j  a  va  2  s.  c  om
 * @param outputFolder
 *            the folder where the report will be written later to store
 *            related images etc. there
 * @param evalResults
 *            an object of type {@link EvaluationResults} comprising the
 *            results of the statistical evaluation
 * @return A String representing the report of the statistical evaluation in
 *         Latex-format
 */
public String createLatexReport(File outputFolder) {
    // Set locale to English globally to make reports independent of the
    // machine thei're created on, e.g. use "." as decimal points on any
    // machine
    Locale.setDefault(Locale.ENGLISH);
    StringBuilder report = new StringBuilder();
    Statistics stats = Statistics.getInstance(true);
    HashMap<String, String> methodsSummary = new HashMap<String, String>();
    HashMap<String, HashMap<String, List<String>>> testSummary = new HashMap<String, HashMap<String, List<String>>>();
    ArrayList<String[]> figures = new ArrayList<String[]>();
    testSummary.put("Parametric", new HashMap<String, List<String>>());
    testSummary.put("Non-Parametric", new HashMap<String, List<String>>());
    String outputFolderPath = "";
    if (outputFolder != null) {
        outputFolderPath = outputFolder.getAbsolutePath();
    }

    //
    // Header
    //
    // Packages
    report.append("\\documentclass[a4paper,12pt]{article}\n");
    report.append("\\usepackage[english]{babel}\n");
    report.append("\\usepackage[utf8]{inputenc}\n");
    report.append("\\usepackage{graphicx}\n");
    report.append("\\usepackage{titlesec}\n");
    report.append("\\usepackage{caption}\n");
    report.append("\\usepackage{subcaption}\n");
    report.append("\\usepackage{adjustbox}\n");
    report.append("\\usepackage{placeins}\n");
    report.append("\\usepackage{longtable}\n");
    report.append("\\usepackage{morefloats}\n");
    // Title definition
    report.append("\\titleformat*{\\section}{\\large\\bfseries}\n");
    report.append("\\titleformat*{\\subsection}{\\normalsize\\bfseries}\n");
    report.append("\\titleformat*{\\subsubsection}{\\vspace{-0.3cm}\\normalsize\\bfseries}\n");
    report.append("\\title{Statistical Evaluation Report}\n");
    report.append("\\date{\\vspace{-10ex}}\n");
    report.append("\\begin{document}\n");
    report.append("\\maketitle\n");

    //
    // Evaluation Overview
    //
    report.append("\\section{Evaluation Overview}");

    int nModels = evalResults.getSampleData().getModelMetadata().size();
    ArrayList<String> measures = evalResults.getMeasures();
    int nSamples = evalResults.getSampleData().getSamples().get(measures.get(0)).get(0).size();
    String ref = "tbl:models";

    // Separate training/testing datasets
    List<String> trainingDataList = new ArrayList<String>();
    List<String> testingDataList = new ArrayList<String>();
    List<Pair<String, String>> datasets = evalResults.getSampleData().getDatasetNames();
    Iterator<Pair<String, String>> itp = datasets.iterator();
    while (itp.hasNext()) {
        Pair<String, String> trainTest = itp.next();
        trainingDataList.add(trainTest.getKey());
        if (trainTest.getValue() != null) {
            testingDataList.add(trainTest.getValue());
        }
    }
    Set<String> trainingDataSet = new HashSet<String>(trainingDataList);
    Set<String> testingDataSet = new HashSet<String>(testingDataList);

    String pipelineDescription = null;
    String sampleOrigin = "per CV";

    ReportTypes pipelineType = this.evalResults.getSampleData().getPipelineType();
    switch (pipelineType) {
    // One-domain n-fold CV (ReportData=per Fold)
    case CV:
        pipelineDescription = String.format("%d-fold cross validation",
                evalResults.getSampleData().getnFolds());
        sampleOrigin = "per fold ";
        break;
    case MULTIPLE_CV:
        pipelineDescription = String.format("%dx%s repeated cross validation",
                evalResults.getSampleData().getnRepetitions(), evalResults.getSampleData().getnFolds());
        break;
    case CV_DATASET_LVL:
        pipelineDescription = String.format("%d-fold cross validation over %d datasets",
                evalResults.getSampleData().getnFolds(), trainingDataSet.size());
        break;
    case MULTIPLE_CV_DATASET_LVL:
        pipelineDescription = String.format("%dx%s repeated cross validation over %d datasets",
                evalResults.getSampleData().getnRepetitions(), evalResults.getSampleData().getnFolds(),
                trainingDataSet.size());
        sampleOrigin = "per dataset";
        break;
    case TRAIN_TEST_DATASET_LVL:
        // In the train/test scenario, the number of datasets only includes
        // distinct ones
        Set<String> allDataSets = new HashSet<String>(testingDataSet);
        allDataSets.addAll(trainingDataSet);
        pipelineDescription = String.format("Train/Test over %d datasets", allDataSets.size());
        sampleOrigin = "per dataset";
        break;
    default:
        pipelineDescription = "!unknown pipeline type!";
        sampleOrigin = "!unknown pipeline type!";
        break;
    }

    boolean isBaselineEvaluation = evalResults.isBaselineEvaluation();
    report.append(String.format("The system performed a %s for the %d models in Tbl \\ref{%s}. ",
            pipelineDescription, nModels, ref));
    if (isBaselineEvaluation) {
        report.append(String.format("The models were compared against the first baseline model. \n",
                pipelineDescription, nModels, ref));
    } else {
        report.append(String.format("The models were compared against each other. \n", pipelineDescription,
                nModels, ref));
    }

    String[][] values = new String[nModels][3];
    for (int r = 0; r < nModels; r++) {
        values[r][0] = String.format("M%d", r);
        // Remove package prefix for algorithms, e.g. shorten "trees.J48" to "J48".
        String[] algorithm = evalResults.getSampleData().getModelMetadata().get(r).getKey().split("\\.");
        values[r][1] = escapeLatexCharacters(algorithm[algorithm.length - 1]);
        values[r][2] = escapeLatexCharacters(evalResults.getSampleData().getModelMetadata().get(r).getValue());
    }

    String table = createLatexTable("Evaluated models with classifier algorithm and feature sets", ref,
            new String[] { "Index", "Algorithm", "Feature Set" }, "|l|l|p{11cm}|", values);
    report.append(table);

    // List test/training datasets. Consider the case when these sets are
    // different.
    if (testingDataSet.isEmpty()) {
        if (trainingDataSet.size() == 1) {
            report.append(
                    String.format("The models were evaluated on the dataset %s. ", trainingDataList.get(0)));
        } else {
            report.append(String.format("The models were evaluated on the datasets %s. ",
                    this.createEnumeration(trainingDataList)));
        }
    } else {
        if (trainingDataSet.size() == 1 && testingDataSet.size() == 1) {
            report.append(
                    String.format("The models were trained on the dataset %s and tested on the dataset %s. ",
                            trainingDataList.get(0), testingDataList.get(0)));
        } else if (trainingDataSet.size() > 1 && testingDataSet.size() == 1) {
            report.append(String.format(
                    "The models were trained on the datasets %s and tested on the dataset %s. ",
                    this.createEnumeration(new ArrayList<String>(trainingDataSet)), testingDataList.get(0)));
        } else if (trainingDataSet.size() == 1 && testingDataSet.size() > 1) {
            report.append(String.format(
                    "The models were trained on the dataset %s and tested on the datasets %s. ",
                    trainingDataList.get(0), this.createEnumeration(new ArrayList<String>(testingDataSet))));
        } else {
            report.append(
                    String.format("The models were trained on the datasets %s and tested on the datasets %s. ",
                            this.createEnumeration(new ArrayList<String>(trainingDataSet)),
                            this.createEnumeration(new ArrayList<String>(testingDataSet))));
        }
    }
    report.append(String.format("Their performance was assessed with the %s", createEnumeration(measures)));
    report.append(
            ". In the analysis, the models thus represent levels of the independent variable, while the performance measures are dependent variables.\n");

    //
    // Results (for each measure separately)
    //
    report.append("\\FloatBarrier\n"); // All previous floats must be placed
    // before this point
    report.append("\\section{Results}\n");
    report.append(String.format(
            "Throughout the report, p-values are annotated if they are significant. While {\\footnotesize *} indicates low significance ($p<\\alpha=%.2f$), the annotations {\\footnotesize **} and {\\footnotesize ***} represent medium ($p<\\alpha=%.2f$) and high significance ($p<\\alpha=%.2f$).",
            significance_low, significance_medium, significance_high));

    for (int i = 0; i < measures.size(); i++) {
        /*
         * Create table with samples for the current performance measure If
         * samples are drawn over multiple datasets, transpose table
         */
        String measure = measures.get(i);
        if (!evalResults.getSampleData().getSamples().containsKey(measure)) {
            continue;
        }
        ArrayList<ArrayList<Double>> measureSamples = evalResults.getSampleData().getSamples().get(measure);
        ArrayList<Double> averageMeasureSamples = evalResults.getSampleData().getSamplesAverage().get(measure);

        report.append("\\FloatBarrier\n");
        report.append(String.format("\\subsection{%s}\n", measure));
        ref = String.format("tbl:%s", measure.replaceAll("\\s", ""));
        report.append(String.format(
                "The %s samples drawn from the %s and the %d models are presented in Tbl. \\ref{%s}.\n",
                measure, pipelineDescription, nModels, ref));

        // Plot Box-Whisker-Diagram of samples for the current measure and add the figure to the appendix
        // Use the min/max sample value as indicators for the box-plots limits
        String filename = String.format("boxPlot%s", measure.replaceAll("\\s", ""));
        String path = String.format("%s%s%s", outputFolderPath, File.separator, filename);
        String pathR = this.fixSlashes(path);
        String figRef = String.format("fig:boxPlot%s", measure.replaceAll("\\s", ""));
        String caption = String.format("Box-Whisker-Plot of %s samples. Red dots indicate means.", measure);
        double[][] samples = new double[nModels][];
        double minSample = Double.MAX_VALUE;
        double maxSample = Double.MIN_VALUE;
        for (int k = 0; k < nModels; k++) {
            ArrayList<Double> s = measureSamples.get(k);
            samples[k] = new double[s.size()];
            for (int j = 0; j < s.size(); j++) {
                samples[k][j] = s.get(j);
                if (minSample > s.get(j)) {
                    minSample = s.get(j);
                }
                if (maxSample < s.get(j)) {
                    maxSample = s.get(j);
                }
            }
        }
        double sampleRange = maxSample - minSample;
        int lowerLimit = (int) Math.floor(minSample - sampleRange * 0.1);
        int upperLimit = (int) Math.ceil(maxSample + sampleRange * 0.1);
        boolean successful = stats.plotBoxWhisker(samples, lowerLimit, upperLimit, pathR, measure);
        if (successful) {
            figures.add(new String[] { figRef, caption, filename });
            report.append(
                    String.format("See Fig. \\ref{%s} for a Box-Whisker plot of these samples. ", figRef));
        }

        caption = String.format("Samples of the %s drawn from the %s and the %d models", measure,
                pipelineDescription, nModels);
        switch (pipelineType) {
        case CV:
        case MULTIPLE_CV:
            values = new String[nModels + 1][nSamples + 2];
            for (int r = 0; r <= nModels; r++) {
                // First line of table = Fold indices
                if (r == 0) {
                    values[r][0] = "";
                    values[r][nSamples + 1] = "";
                    for (int f = 1; f <= nSamples; f++) {
                        values[r][f] = Integer.toString(f);
                    }
                    // Next lines with model indices, samples per fold and
                    // average measure over all samples
                } else {
                    values[r][0] = String.format("M%d", (r - 1));
                    //values[r][nSamples + 1] = String.format("%.2f", averageMeasureSamples.get(r - 1) * 100);
                    values[r][nSamples + 1] = String.format("%.2f", averageMeasureSamples.get(r - 1));
                    ArrayList<Double> s = measureSamples.get(r - 1);
                    for (int j = 0; j < s.size(); j++) {
                        //values[r][j + 1] = String.format("%.2f", s.get(j) * 100);
                        values[r][j + 1] = String.format("%.2f", s.get(j));
                    }
                }
            }
            if (values.length > 58) {
                table = createLatexLongTable(caption, ref,
                        new String[] { "Classifier",
                                String.format("\\multicolumn{%d}{|c|}{%s %s}", nSamples, measure, sampleOrigin),
                                "Average" },
                        String.format("|%s", StringUtils.repeat("l|", nSamples + 2)), values);
            } else {
                table = createLatexTable(caption, ref,
                        new String[] { "Classifier",
                                String.format("\\multicolumn{%d}{|c|}{%s %s}", nSamples, measure, sampleOrigin),
                                "Average" },
                        String.format("|%s", StringUtils.repeat("l|", nSamples + 2)), values);
            }
            break;

        case CV_DATASET_LVL:
        case MULTIPLE_CV_DATASET_LVL:
        case TRAIN_TEST_DATASET_LVL:
            values = new String[nSamples + 2][nModels + 1];
            // double[][] valuesNumeric = new double[nSamples][nModels];
            for (int r = 0; r <= nSamples + 1; r++) {
                // First line of table = Model indices
                if (r == 0) {
                    values[r][0] = "";
                    for (int j = 0; j < nModels; j++) {
                        values[r][j + 1] = String.format("M%d", (j));
                    }
                    // Last line of table = average sums
                } else if (r == nSamples + 1) {
                    values[r][0] = "Average";
                    for (int j = 0; j < nModels; j++) {
                        //values[r][j + 1] = String.format("%.2f", averageMeasureSamples.get(j) * 100);
                        values[r][j + 1] = String.format("%.2f", averageMeasureSamples.get(j));
                    }
                    // Next lines with model indices, samples per fold and
                    // average measure over all samples
                } else {
                    // Only print both train- and test set if there is more
                    // than one training set
                    Pair<String, String> trainTest = evalResults.getSampleData().getDatasetNames().get(r - 1);
                    if (pipelineType == ReportTypes.TRAIN_TEST_DATASET_LVL) {
                        if (trainingDataSet.size() > 1) {
                            values[r][0] = String.format("%s-%s", trainTest.getKey(), trainTest.getValue());
                        } else {
                            values[r][0] = trainTest.getValue();
                        }
                    } else {
                        values[r][0] = trainTest.getKey();
                    }
                    for (int j = 0; j < nModels; j++) {
                        ArrayList<Double> s = measureSamples.get(j);
                        //values[r][j + 1] = String.format("%.2f", s.get(r - 1) * 100);
                        values[r][j + 1] = String.format("%.2f", s.get(r - 1));
                    }
                }
            }
            if (values.length > 58) {
                table = createLatexLongTable(caption, ref,
                        new String[] { "Dataset",
                                String.format("\\multicolumn{%d}{|c|}{%s %s}", nModels, measure,
                                        sampleOrigin) },
                        String.format("|%s", StringUtils.repeat("l|", nModels + 1)), values);
            } else {
                table = createLatexTable(caption, ref,
                        new String[] { "Dataset",
                                String.format("\\multicolumn{%d}{|c|}{%s %s}", nModels, measure,
                                        sampleOrigin) },
                        String.format("|%s", StringUtils.repeat("l|", nModels + 1)), values);
            }
            break;
        }
        report.append(table);

        //
        // Results - First parametric tests, then non-parametric (2
        // iterations)
        // Print results for alls non-parametric tests except McNemar.
        // McNemar is not based on the same performance measures but on a
        // contingency matrix, which is
        // printed in a separate section.
        for (String testType : new String[] { "Parametric", "Non-Parametric" }) {
            report.append(String.format("\\subsubsection{%s Testing}", testType));

            Pair<String, AbstractTestResult> result = null;
            if (testType.equals("Parametric")) {
                result = evalResults.getParametricTestResults().get(measure);
            } else {
                result = evalResults.getNonParametricTestResults().get(measure);
            }

            // Use pretty-print method descriptor if specified
            String method = result.getKey();
            if (StatsConfigConstants.PRETTY_PRINT_METHODS.containsKey(method)) {
                method = StatsConfigConstants.PRETTY_PRINT_METHODS.get(method);
            }
            methodsSummary.put(testType, method);

            TestResult r = (TestResult) result.getValue();
            report.append(
                    String.format("The system compared the %d models using the \\emph{%s}. ", nModels, method));

            if (r != null && !Double.isNaN(r.getpValue())) {

                // A priori test: assumptions
                boolean assumptionViolated = false;
                Iterator<String> it = r.getAssumptions().keySet().iterator();
                while (it.hasNext()) {
                    String assumption = it.next();

                    TestResult at = (TestResult) r.getAssumptions().get(assumption);
                    if (at == null) {
                        report.append(String.format("Testing for %s failed. ", assumption));
                        assumptionViolated = true;
                        continue;
                    }
                    if (Double.isNaN(at.getpValue())) {
                        report.append(
                                String.format("Testing for %s using %s failed. ", assumption, at.getMethod()));
                        assumptionViolated = true;
                        continue;
                    }
                    double ap = at.getpValue();

                    if (ap <= this.significance_low) {
                        assumptionViolated = true;
                    }

                    // Verbalize result according to p value
                    Pair<String, Double> verbalizedP = verbalizeP(ap, true);

                    String testResultRepresentation = getTestResultRepresentation(at, verbalizedP.getValue());
                    report.append(String.format("%s %s violation of %s (%s). ", at.getMethod(),
                            verbalizedP.getKey(), assumption, testResultRepresentation));

                }

                // Create QQ-Normal diagram to support the analysis of a
                // normality assumption
                if (result.getKey().equals("DependentT") && samples.length == 2) {
                    filename = String.format("qqNormPlot%s", measure.replaceAll("\\s", ""));
                    path = String.format("%s%s%s", outputFolderPath, File.separator, filename);
                    pathR = this.fixSlashes(path);
                    figRef = String.format("fig:qqNormPlot%s", measure.replaceAll("\\s", ""));
                    caption = String.format("QQ-Normal plot of pairwise differences between %s samples.",
                            measure);
                    double[] differences = new double[samples[0].length];
                    for (int j = 0; j < samples[0].length; j++) {
                        differences[j] = samples[0][j] - samples[1][j];
                    }
                    successful = stats.plotQQNorm(differences, "M0-M1", measure, pathR);
                    if (successful) {
                        figures.add(new String[] { figRef, caption, filename });
                        report.append(String.format("See Fig. \\ref{%s} for a QQ-Normal plot of the samples. ",
                                figRef));
                    }
                }

                if (assumptionViolated) {
                    report.append(
                            "Given that the assumptions are violated, the following test may be corrupted. ");
                }

                // A Priori test results
                // Verbalize result according to p value
                Pair<String, Double> verbalizedP = verbalizeP(r.getpValue(), false);
                String testResultRepresentation = getTestResultRepresentation(r, verbalizedP.getValue());
                report.append(String.format(
                        "The %s %s differences between the performances of the models (%s).\\\\ \n\n ", method,
                        verbalizedP.getKey(), testResultRepresentation));

                // Store result for summary
                if (testSummary.get(testType).containsKey(verbalizedP.getKey())) {
                    testSummary.get(testType).get(verbalizedP.getKey()).add(measure);
                } else {
                    ArrayList<String> list = new ArrayList<String>();
                    list.add(measure);
                    testSummary.get(testType).put(verbalizedP.getKey(), list);
                }

                // Post-hoc test for >2 models (pairwise comparisons)
                if (evalResults.getSampleData().getModelMetadata().size() > 2) {

                    Pair<String, AbstractTestResult> postHocResult = null;
                    if (testType.equals("Parametric")) {
                        postHocResult = evalResults.getParametricPostHocTestResults().get(measure);
                    } else {
                        postHocResult = evalResults.getNonParametricPostHocTestResults().get(measure);
                    }
                    method = postHocResult.getKey();
                    if (StatsConfigConstants.PRETTY_PRINT_METHODS.containsKey(method)) {
                        method = StatsConfigConstants.PRETTY_PRINT_METHODS.get(method);
                    }
                    methodsSummary.put(String.format("%sPostHoc", testType), method);

                    PairwiseTestResult rPostHoc = (PairwiseTestResult) postHocResult.getValue();
                    report.append(String.format("The system performed the \\emph{%s} post-hoc. ", method));

                    if (rPostHoc == null) {
                        report.append("The test failed. ");
                        continue;
                    }

                    // Assumptions
                    boolean assumptionsViolated = false;
                    it = rPostHoc.getAssumptions().keySet().iterator();
                    while (it.hasNext()) {
                        String assumption = it.next();
                        PairwiseTestResult at = (PairwiseTestResult) rPostHoc.getAssumptions().get(assumption);
                        if (at == null) {
                            report.append(String.format("Testing for %s failed. ", assumption));
                            assumptionsViolated = true;
                            continue;
                        }

                        // Create table with pairwise p-values for
                        // assumption testing
                        double[][] ap = at.getpValue();
                        Pair<String[], String[][]> tableData = getPValueStringArray(ap, isBaselineEvaluation); // first
                        // element
                        // is
                        // header,
                        // second
                        // are
                        // values
                        caption = String.format("P-values from the %s for %s", at.getMethod(), measure);
                        ref = String.format("tbl:%s%s", at.getMethod().replaceAll("\\s", ""),
                                measure.replaceAll("\\s", ""));
                        table = createLatexTable(caption, ref, tableData.getKey(),
                                String.format("|%s", StringUtils.repeat("l|", nModels)), tableData.getValue());

                        double max = getMax(ap);
                        double min = getMin(ap);
                        verbalizedP = verbalizeP(min, true);
                        if ((max > significance_low && min <= significance_low)
                                || (max > significance_medium && min <= significance_medium)
                                || (max > significance_high && min <= significance_high)) {
                            // partly significant to degree as specified by
                            // verbalized p-value
                            report.append(String.format(
                                    "%s partly %s violation of %s ($\\alpha=%.2f$, Tbl. \\ref{%s}).\n",
                                    at.getMethod(), verbalizedP.getKey(), assumption, verbalizedP.getValue(),
                                    ref));
                        } else {
                            report.append(String.format(
                                    "%s %s violation of %s ($\\alpha=%.2f$, Tbl. \\ref{%s}).\n", at.getMethod(),
                                    verbalizedP.getKey(), assumption, verbalizedP.getValue(), ref));
                        }
                        report.append(table);

                        if (min <= this.significance_low) {
                            assumptionsViolated = true;
                        }

                    }

                    if (assumptionViolated) {
                        report.append(
                                "Given that the assumptions are violated, the following test may be corrupted. ");
                    }

                    // Result
                    double[][] ap = rPostHoc.getpValue();
                    Pair<String[], String[][]> tableData = getPValueStringArray(ap, isBaselineEvaluation); // first
                    // element
                    // is
                    // header,
                    // second
                    // are
                    // values
                    caption = String.format("P-values from the %s for %s", method, measure);
                    ref = String.format("tbl:%s%s", method.replaceAll("\\s", ""),
                            measure.replaceAll("\\s", ""));
                    String formatting = null;
                    if (!isBaselineEvaluation) {
                        formatting = String.format("|%s", StringUtils.repeat("l|", nModels));
                    } else {
                        formatting = String.format("|l|l|");
                    }
                    String tablePNonAdjusted = createLatexTable(caption, ref, tableData.getKey(), formatting,
                            tableData.getValue());

                    // Already fetch pairwise adjustments here in order to
                    // determine choice of words
                    double max = getMax(ap);
                    double min = getMin(ap);
                    verbalizedP = verbalizeP(min, false);
                    ArrayList<StatsConfigConstants.CORRECTION_VALUES> adjustments = new ArrayList<StatsConfigConstants.CORRECTION_VALUES>(
                            rPostHoc.getpValueCorrections().keySet());
                    String adjustWord = "";
                    if (adjustments.size() > 0) {
                        adjustWord = " for non-adjusted p-values";
                    }
                    if ((max > significance_low && min <= significance_low)
                            || (max > significance_medium && min <= significance_medium)
                            || (max > significance_high && min <= significance_high)) {
                        // partly significant to degree as specified by
                        // verbalized p-value
                        report.append(String.format(
                                "The %s partly %s differences between the performances of the models%s ($\\alpha=%.2f$, Tbl. \\ref{%s}). ",
                                method, verbalizedP.getKey(), adjustWord, verbalizedP.getValue(), ref));
                    } else {
                        report.append(String.format(
                                "The %s %s differences between the performances of the models%s ($\\alpha=%.2f$, Tbl. \\ref{%s}). ",
                                method, verbalizedP.getKey(), adjustWord, verbalizedP.getValue(), ref));
                    }

                    // Determine ordering of models
                    HashMap<Integer, TreeSet<Integer>> postHocOrdering = null;
                    int[][] orderingEdgeList = null;
                    if (testType.equals("Parametric")) {
                        postHocOrdering = evalResults.getParameticPostHocOrdering().get(measure);
                        orderingEdgeList = evalResults.getParameticPostHocEdgelist().get(measure);
                    } else {
                        postHocOrdering = evalResults.getNonParameticPostHocOrdering().get(measure);
                        orderingEdgeList = evalResults.getNonParameticPostHocEdgelist().get(measure);
                    }
                    String ordering = getModelOrderingRepresentation(postHocOrdering);
                    report.append(ordering);

                    // Print graphs of ordering for the current measure and
                    // add the figure to the appendix
                    filename = String.format("graphOrdering%s%s", measure.replaceAll("\\s", ""), testType);
                    path = String.format("%s%s%s", outputFolderPath, File.separator, filename);
                    pathR = this.fixSlashes(path);
                    figRef = String.format("fig:graphOrdering%s%s", measure.replaceAll("\\s", ""), testType);
                    caption = String.format(
                            "Directed graph of significant differences for %s, as indicated by the %s post-hoc test.",
                            measure, testType.toLowerCase());
                    // int nodes[] = new int[nModels];
                    // for(int j=0; j<nModels;j++){nodes[j]=j;};
                    successful = stats.plotGraph(orderingEdgeList, nModels, pathR);
                    if (successful) {
                        figures.add(new String[] { figRef, caption, filename });
                        report.append(String.format("The ordering is visualized in Fig. \\ref{%s}. ", figRef));
                    }

                    // Pairwise adjustments
                    String tablePAdjusted = null;
                    if (adjustments.size() > 0) {
                        String[] subcaption = new String[adjustments.size()];
                        String[] header = null;
                        String[][][] overallValues = new String[adjustments.size()][][];
                        double[] minAdjustments = new double[adjustments.size()];
                        double[] maxAdjustments = new double[adjustments.size()];
                        for (int j = 0; j < adjustments.size(); j++) {
                            StatsConfigConstants.CORRECTION_VALUES adjustmentMethod = adjustments.get(j);
                            subcaption[j] = adjustmentMethod.name();
                            double[][] correctedP = rPostHoc.getpValueCorrections().get(adjustmentMethod);
                            if (StatsConfigConstants.PRETTY_PRINT_METHODS.containsKey(adjustmentMethod)) {
                                subcaption[j] = StatsConfigConstants.PRETTY_PRINT_METHODS.get(adjustmentMethod);
                            }
                            tableData = getPValueStringArray(correctedP, isBaselineEvaluation);
                            header = tableData.getKey();
                            overallValues[j] = tableData.getValue();
                            minAdjustments[j] = getMin(correctedP);
                            maxAdjustments[j] = getMax(correctedP);
                        }

                        caption = String.format("Adjusted p-values from the %s for %s", method, measure);
                        ref = String.format("tbl:%s%sAdjusted", method.replaceAll("\\s", ""),
                                measure.replaceAll("\\s", ""));
                        formatting = null;
                        if (!isBaselineEvaluation) {
                            formatting = String.format("|%s", StringUtils.repeat("l|", nModels));
                        } else {
                            formatting = String.format("|l|l|");
                        }
                        tablePAdjusted = createLatexSubTable(caption, subcaption, ref, header, formatting,
                                overallValues);

                        min = getMin(minAdjustments);
                        max = getMax(maxAdjustments);
                        verbalizedP = verbalizeP(min, false);

                        if ((max > significance_low && min <= significance_low)
                                || (max > significance_medium && min <= significance_medium)
                                || (max > significance_high && min <= significance_high)) {
                            // partly significant to degree as specified by
                            // verbalized p-value
                            report.append(String.format(
                                    "It partly %s differences for adjusted p-values ($\\alpha=%.2f$, Tbl. \\ref{%s}).\n\n ",
                                    verbalizedP.getKey(), verbalizedP.getValue(), ref));
                        } else {
                            report.append(String.format(
                                    "It %s differences for adjusted p-values ($\\alpha=%.2f$, Tbl. \\ref{%s}).\n\n ",
                                    verbalizedP.getKey(), verbalizedP.getValue(), ref));
                        }
                    }

                    report.append(tablePNonAdjusted);
                    if (tablePAdjusted != null) {
                        report.append(tablePAdjusted);
                    }

                }
            } else {
                report.append(String.format("The %s failed.", method));
            }
        }

    }

    //
    // Contingency table and McNemar results if this test was performed
    //
    if (evalResults.getNonParametricTest().equals("McNemar")) {
        String measure = "Contingency Table";
        String testType = "Non-Parametric";
        report.append("\\FloatBarrier\n");
        report.append("\\subsection{Contingency Table}\n");

        String caption = String
                .format("Contingency table with correctly and incorrectly classified folds for %s", measure);
        if (evalResults.getSampleData().getPipelineType() == ReportTypes.MULTIPLE_CV) {
            report.append(String.format(
                    "The contingency table drawn from the %s and the %d models is listed in Tbl. \\ref{%s}. The correctly and incorrectly classified instances per fold were averaged over all repetitions. \n",
                    pipelineDescription, nModels, ref));
            caption = String.format(
                    "Averaged contingency table with correctly and incorrectly classified folds for %s",
                    measure);
        } else {
            report.append(String.format(
                    "The contingency table drawn from the %s and the %d models is listed in Tbl. \\ref{%s}.\n",
                    pipelineDescription, nModels, ref));
        }

        int[][] contingencyMatrix = evalResults.getSampleData().getContingencyMatrix();
        ref = "tbl:ContingencyMatrix";
        values = new String[][] { { "Wrong", "", "" }, { "Correct", "", "" } };
        values[0][1] = String.valueOf(contingencyMatrix[0][0]);
        values[0][2] = String.valueOf(contingencyMatrix[0][1]);
        values[1][1] = String.valueOf(contingencyMatrix[1][0]);
        values[1][2] = String.valueOf(contingencyMatrix[1][1]);

        table = createLatexTable(caption, ref, new String[] { "M0/M1", "Wrong", "Correct" }, "|l|l|l|", values);
        report.append(table);

        // Test results
        report.append(String.format("\\subsubsection{%s Testing}", testType));
        report.append(
                String.format("The system compared the %d models using the \\emph{McNemar test}. ", nModels));
        Pair<String, AbstractTestResult> result = evalResults.getNonParametricTestResults().get(measure);

        // Use pretty-print method descriptor if specified
        String method = result.getKey();
        if (StatsConfigConstants.PRETTY_PRINT_METHODS.containsKey(method)) {
            method = StatsConfigConstants.PRETTY_PRINT_METHODS.get(method);
        }
        methodsSummary.put(testType, method);

        TestResult r = (TestResult) result.getValue();
        if (r != null && !Double.isNaN(r.getpValue())) {
            StringBuilder parameters = new StringBuilder();
            Iterator<String> it = r.getParameter().keySet().iterator();
            while (it.hasNext()) {
                String parameter = it.next();
                double value = r.getParameter().get(parameter);
                parameters.append(String.format("%s=%.3f, ", parameter, value));
            }

            // Verbalize result according to p value
            Pair<String, Double> verbalizedP = verbalizeP(r.getpValue(), false);
            report.append(String.format(
                    "The test %s differences between the performances of the models ($%sp=%.3f, \\alpha=%.2f$).\\\\ \n",
                    verbalizedP.getKey(), parameters.toString(), r.getpValue(), verbalizedP.getValue()));
            // Store result for summary
            if (testSummary.get(testType).containsKey(verbalizedP.getKey())) {
                testSummary.get(testType).get(verbalizedP.getKey()).add(measure);
            } else {
                ArrayList<String> list = new ArrayList<String>();
                list.add(measure);
                testSummary.get(testType).put(verbalizedP.getKey(), list);
            }

        } else {
            report.append("The test failed.\\\\ \n");
        }
    }

    //
    // Summary of results
    //
    report.append("\\FloatBarrier\n");
    report.append("\\section{Summary}\n");
    for (String testType : new String[] { "Parametric", "Non-Parametric" }) {
        String prefix = "";

        if (nModels == 2) {
            report.append(
                    String.format("The system performed %s testing of the %d models using a %s. The test ",
                            testType.toLowerCase(), nModels, methodsSummary.get(testType)));
            prefix = "It";
        } else {
            String postHocTesting = String.format("%sPostHoc", testType);
            report.append(String.format(
                    "The system performed %s testing of the %d models using a %s and a %s post-hoc. The tests ",
                    testType.toLowerCase(), nModels, methodsSummary.get(testType),
                    methodsSummary.get(postHocTesting)));
            prefix = "They";
        }

        // If all tests failed, there're no results to summarize.
        HashMap<String, List<String>> summary = testSummary.get(testType);
        if (summary.keySet().size() == 0) {
            report.append("failed. ");
            continue;
        }

        Iterator<String> it = summary.keySet().iterator();
        boolean usePrefix = false;
        while (it.hasNext()) {
            String pVerbalization = it.next();
            List<String> affectedMeasures = summary.get(pVerbalization);
            if (!usePrefix) {
                report.append(String.format("%s differences in performance for the %s. ", pVerbalization,
                        createEnumeration(affectedMeasures)));
            } else {
                report.append(String.format("%s %s differences in performance for the %s. ", prefix,
                        pVerbalization, createEnumeration(affectedMeasures)));
            }
            usePrefix = true;
        }
        report.append("\\\\ \n\n");

    }

    //
    // Appendix
    //
    // Add all figures
    report.append("\\FloatBarrier\n");
    report.append("\\section{Appendix}\n");
    for (int i = 0; i < figures.size(); i++) {
        ref = figures.get(i)[0];
        String caption = figures.get(i)[1];
        String filename = figures.get(i)[2];
        report.append("\\begin{figure}\n");
        report.append("\\centering\n");
        report.append(String.format("\\includegraphics[width=1\\linewidth]{%s}\n", filename));
        report.append(String.format("\\caption{%s}\n", caption));
        report.append(String.format("\\label{%s}\n", ref));
        report.append("\\end{figure}\n\n");
    }

    // Close document
    report.append("\\end{document}");
    return report.toString();

}

From source file:alfio.manager.system.DataMigratorIntegrationTest.java

@Test
public void testUpdateGender() {
    List<TicketCategoryModification> categories = Collections.singletonList(new TicketCategoryModification(null,
            "default", AVAILABLE_SEATS, new DateTimeModification(LocalDate.now(), LocalTime.now()),
            new DateTimeModification(LocalDate.now(), LocalTime.now()), DESCRIPTION, BigDecimal.TEN, false, "",
            false, null, null, null, null, null));
    Pair<Event, String> eventUsername = initEvent(categories);
    Event event = eventUsername.getKey();
    try {/* w w  w  .j  av a2 s. c  o m*/
        TicketReservationModification trm = new TicketReservationModification();
        trm.setAmount(2);
        trm.setTicketCategoryId(eventManager.loadTicketCategories(event).get(0).getId());
        TicketReservationWithOptionalCodeModification r = new TicketReservationWithOptionalCodeModification(trm,
                Optional.empty());
        Date expiration = DateUtils.addDays(new Date(), 1);
        String reservationId = ticketReservationManager.createTicketReservation(event,
                Collections.singletonList(r), Collections.emptyList(), expiration, Optional.empty(),
                Optional.empty(), Locale.ENGLISH, false);
        ticketReservationManager.confirm("TOKEN", null, event, reservationId, "email@email.ch",
                new CustomerName("Full Name", "Full", "Name", event), Locale.ENGLISH, null,
                new TotalPrice(1000, 10, 0, 0), Optional.empty(), Optional.of(PaymentProxy.ON_SITE), false,
                null, null, null);
        List<Ticket> tickets = ticketRepository.findTicketsInReservation(reservationId);
        UpdateTicketOwnerForm first = new UpdateTicketOwnerForm();
        first.setEmail("email@email.ch");
        //first.setTShirtSize("SMALL");
        //first.setGender("F");
        first.setFirstName("Full");
        first.setLastName("Name");
        UpdateTicketOwnerForm second = new UpdateTicketOwnerForm();
        //second.setTShirtSize("SMALL-F");
        second.setEmail("email@email.ch");
        second.setFirstName("Full");
        second.setLastName("Name");
        PartialTicketPDFGenerator generator = TemplateProcessor.buildPartialPDFTicket(Locale.ITALIAN, event,
                ticketReservationManager.findById(reservationId).get(),
                ticketCategoryRepository.getByIdAndActive(tickets.get(0).getCategoryId(), event.getId()),
                organizationRepository.getById(event.getOrganizationId()), templateManager, fileUploadManager,
                "");
        ticketReservationManager.updateTicketOwner(tickets.get(0), Locale.ITALIAN, event, first, (t) -> "",
                (t) -> "", Optional.empty());
        ticketReservationManager.updateTicketOwner(tickets.get(1), Locale.ITALIAN, event, second, (t) -> "",
                (t) -> "", Optional.empty());
        //FIXME
        //dataMigrator.fillTicketsGender();
        //ticketRepository.findTicketsInReservation(reservationId).forEach(t -> assertEquals("F", t.getGender()));
    } finally {
        eventManager.deleteEvent(event.getId(), eventUsername.getValue());
    }
}

From source file:alfio.manager.TicketReservationManagerIntegrationTest.java

@Test
public void testTicketSelection() {
    List<TicketCategoryModification> categories = Arrays.asList(
            new TicketCategoryModification(null, "default", AVAILABLE_SEATS,
                    new DateTimeModification(LocalDate.now(), LocalTime.now()),
                    new DateTimeModification(LocalDate.now(), LocalTime.now()), DESCRIPTION, BigDecimal.TEN,
                    false, "", false, null, null, null, null, null),
            new TicketCategoryModification(null, "default", 10,
                    new DateTimeModification(LocalDate.now(), LocalTime.now()),
                    new DateTimeModification(LocalDate.now(), LocalTime.now()), DESCRIPTION, BigDecimal.TEN,
                    false, "", true, null, null, null, null, null));
    Pair<Event, String> eventAndUsername = initEvent(categories, organizationRepository, userManager,
            eventManager, eventRepository);
    Event event = eventAndUsername.getKey();

    TicketCategory bounded = ticketCategoryRepository.findByEventId(event.getId()).stream()
            .filter(TicketCategory::isBounded).findFirst().orElseThrow(IllegalStateException::new);
    TicketCategory unbounded = ticketCategoryRepository.findByEventId(event.getId()).stream()
            .filter(t -> !t.isBounded()).findFirst().orElseThrow(IllegalStateException::new);

    assertEquals(0, eventStatisticsManager.loadModifiedTickets(event.getId(), bounded.getId(), 0, null).size());
    assertEquals(Integer.valueOf(0),
            eventStatisticsManager.countModifiedTicket(event.getId(), bounded.getId(), null));
    assertEquals(0,//from ww w.  ja  v a  2s. co  m
            eventStatisticsManager.loadModifiedTickets(event.getId(), unbounded.getId(), 0, null).size());

    TicketReservationModification tr = new TicketReservationModification();
    tr.setAmount(10);
    tr.setTicketCategoryId(bounded.getId());

    TicketReservationModification tr2 = new TicketReservationModification();
    tr2.setAmount(9);
    tr2.setTicketCategoryId(unbounded.getId());

    TicketReservationWithOptionalCodeModification mod = new TicketReservationWithOptionalCodeModification(tr,
            Optional.empty());
    TicketReservationWithOptionalCodeModification mod2 = new TicketReservationWithOptionalCodeModification(tr2,
            Optional.empty());
    String reservationId = ticketReservationManager.createTicketReservation(event, Arrays.asList(mod, mod2),
            Collections.emptyList(), DateUtils.addDays(new Date(), 1), Optional.empty(), Optional.empty(),
            Locale.ENGLISH, false);

    List<TicketReservation> reservations = ticketReservationManager
            .findAllReservationsInEvent(event.getId(), 0, null, null).getKey();
    assertTrue(reservations.size() == 1);
    assertEquals(reservationId, reservations.get(0).getId());

    List<Ticket> pendingTickets = ticketRepository
            .findPendingTicketsInCategories(Arrays.asList(bounded.getId(), unbounded.getId()));
    assertEquals(19, pendingTickets.size());
    pendingTickets.forEach(t -> assertEquals(1000, t.getFinalPriceCts()));
    List<Ticket> tickets = ticketRepository.findFreeByEventId(event.getId());
    assertEquals(1, tickets.size());
    assertTrue(tickets.stream().allMatch(t -> t.getCategoryId() == null));

    TotalPrice totalPrice = ticketReservationManager.totalReservationCostWithVAT(reservationId);

    assertEquals(0, ticketReservationManager.getPendingPayments(event).size());

    PaymentResult confirm = ticketReservationManager.confirm(null, null, event, reservationId,
            "email@example.com", new CustomerName("full name", "full", "name", event), Locale.ENGLISH,
            "billing address", totalPrice, Optional.empty(), Optional.of(PaymentProxy.OFFLINE), false, null,
            null, null);

    assertTrue(confirm.isSuccessful());

    assertEquals(TicketReservation.TicketReservationStatus.OFFLINE_PAYMENT,
            ticketReservationManager.findById(reservationId).get().getStatus());

    assertEquals(1, ticketReservationManager.getPendingPayments(event).size());

    Date now = new Date();
    Date from = DateUtils.addDays(now, -1);
    Date to = DateUtils.addDays(now, 1);

    assertTrue(ticketReservationRepository.getSoldStatistic(event.getId(), from, to).isEmpty()); // -> no reservations
    ticketReservationManager.validateAndConfirmOfflinePayment(reservationId, event, new BigDecimal("190.00"),
            eventAndUsername.getValue());

    assertEquals(19,
            ticketReservationRepository.getSoldStatistic(event.getId(), from, to).get(0).getTicketSoldCount()); // -> 19 tickets reserved

    assertEquals(10,
            eventStatisticsManager.loadModifiedTickets(event.getId(), bounded.getId(), 0, null).size());
    assertEquals(Integer.valueOf(10),
            eventStatisticsManager.countModifiedTicket(event.getId(), bounded.getId(), null));
    assertEquals(9,
            eventStatisticsManager.loadModifiedTickets(event.getId(), unbounded.getId(), 0, null).size());
    assertEquals(Integer.valueOf(9),
            eventStatisticsManager.countModifiedTicket(event.getId(), unbounded.getId(), null));

    assertEquals(TicketReservation.TicketReservationStatus.COMPLETE,
            ticketReservationManager.findById(reservationId).get().getStatus());

    //-------------------

    TicketReservationModification trForDelete = new TicketReservationModification();
    trForDelete.setAmount(1);
    trForDelete.setTicketCategoryId(unbounded.getId());
    TicketReservationWithOptionalCodeModification modForDelete = new TicketReservationWithOptionalCodeModification(
            trForDelete, Optional.empty());
    String reservationId2 = ticketReservationManager.createTicketReservation(event,
            Collections.singletonList(modForDelete), Collections.emptyList(), DateUtils.addDays(new Date(), 1),
            Optional.empty(), Optional.empty(), Locale.ENGLISH, false);

    ticketReservationManager.confirm(null, null, event, reservationId2, "email@example.com",
            new CustomerName("full name", "full", "name", event), Locale.ENGLISH, "billing address", totalPrice,
            Optional.empty(), Optional.of(PaymentProxy.OFFLINE), false, null, null, null);

    assertTrue(ticketReservationManager.findById(reservationId2).isPresent());

    ticketReservationManager.deleteOfflinePayment(event, reservationId2, false);

    Assert.assertFalse(ticketReservationManager.findById(reservationId2).isPresent());
}

From source file:forge.game.card.Card.java

private static String getTextForKwCantBeBlockedByType(final String keyword) {
    boolean negative = true;
    final List<String> subs = Lists.newArrayList(TextUtil.split(keyword.split(" ", 2)[1], ','));
    final List<List<String>> subsAnd = Lists.newArrayList();
    final List<String> orClauses = new ArrayList<>();
    for (final String expession : subs) {
        final List<String> parts = Lists.newArrayList(expession.split("[.+]"));
        for (int p = 0; p < parts.size(); p++) {
            final String part = parts.get(p);
            if (part.equalsIgnoreCase("creature")) {
                parts.remove(p--);//from  w  w  w .ja  v a 2 s  .  c  om
                continue;
            }
            // based on suppossition that each expression has at least 1 predicate except 'creature'
            negative &= part.contains("non") || part.contains("without");
        }
        subsAnd.add(parts);
    }

    final boolean allNegative = negative;
    final String byClause = allNegative ? "except by " : "by ";

    final Function<Pair<Boolean, String>, String> withToString = new Function<Pair<Boolean, String>, String>() {
        @Override
        public String apply(Pair<Boolean, String> inp) {
            boolean useNon = inp.getKey() == allNegative;
            return (useNon ? "*NO* " : "") + inp.getRight();
        }
    };

    for (final List<String> andOperands : subsAnd) {
        final List<Pair<Boolean, String>> prependedAdjectives = Lists.newArrayList();
        final List<Pair<Boolean, String>> postponedAdjectives = Lists.newArrayList();
        String creatures = null;

        for (String part : andOperands) {
            boolean positive = true;
            if (part.startsWith("non")) {
                part = part.substring(3);
                positive = false;
            }
            if (part.startsWith("with")) {
                positive = !part.startsWith("without");
                postponedAdjectives.add(Pair.of(positive, part.substring(positive ? 4 : 7)));
            } else if (part.startsWith("powerLEX")) {// Kraken of the Straits
                postponedAdjectives.add(Pair.of(true, "power less than the number of islands you control"));
            } else if (part.startsWith("power")) {
                int kwLength = 5;
                String opName = Expressions.operatorName(part.substring(kwLength, kwLength + 2));
                String operand = part.substring(kwLength + 2);
                postponedAdjectives.add(Pair.of(true, "power" + opName + operand));
            } else if (CardType.isACreatureType(part)) {
                if (creatures != null && CardType.isACreatureType(creatures)) { // e.g. Kor Castigator
                    creatures = StringUtils.capitalize(Lang.getPlural(part)) + creatures;
                } else {
                    creatures = StringUtils.capitalize(Lang.getPlural(part))
                            + (creatures == null ? "" : " or " + creatures);
                }
            } else {
                prependedAdjectives.add(Pair.of(positive, part.toLowerCase()));
            }
        }

        StringBuilder sbShort = new StringBuilder();
        if (allNegative) {
            boolean isFirst = true;
            for (Pair<Boolean, String> pre : prependedAdjectives) {
                if (isFirst)
                    isFirst = false;
                else
                    sbShort.append(" and/or ");

                boolean useNon = pre.getKey() == allNegative;
                if (useNon)
                    sbShort.append("non-");
                sbShort.append(pre.getValue()).append(" ").append(creatures == null ? "creatures" : creatures);
            }
            if (prependedAdjectives.isEmpty())
                sbShort.append(creatures == null ? "creatures" : creatures);

            if (!postponedAdjectives.isEmpty()) {
                if (!prependedAdjectives.isEmpty()) {
                    sbShort.append(" and/or creatures");
                }

                sbShort.append(" with ");
                sbShort.append(
                        Lang.joinHomogenous(postponedAdjectives, withToString, allNegative ? "or" : "and"));
            }

        } else {
            for (Pair<Boolean, String> pre : prependedAdjectives) {
                boolean useNon = pre.getKey() == allNegative;
                if (useNon)
                    sbShort.append("non-");
                sbShort.append(pre.getValue()).append(" ");
            }
            sbShort.append(creatures == null ? "creatures" : creatures);

            if (!postponedAdjectives.isEmpty()) {
                sbShort.append(" with ");
                sbShort.append(
                        Lang.joinHomogenous(postponedAdjectives, withToString, allNegative ? "or" : "and"));
            }

        }
        orClauses.add(sbShort.toString());
    }
    return byClause + StringUtils.join(orClauses, " or ") + ".";
}