Example usage for com.google.common.io Closer close

List of usage examples for com.google.common.io Closer close

Introduction

In this page you can find the example usage for com.google.common.io Closer close.

Prototype

@Override
public void close() throws IOException 

Source Link

Document

Closes all Closeable instances that have been added to this Closer .

Usage

From source file:com.facebook.buck.android.RealExopackageDevice.java

@Override
public void installFile(final String agentCommand, final int port, final Path targetDevicePath,
        final Path source) throws Exception {
    Preconditions.checkArgument(source.isAbsolute());
    Preconditions.checkArgument(targetDevicePath.isAbsolute());
    Closer closer = Closer.create();
    CollectingOutputReceiver receiver = new CollectingOutputReceiver() {

        private boolean startedPayload = false;
        private boolean wrotePayload = false;
        @Nullable//  w  w w .ja va 2 s  . co m
        private OutputStream outToDevice;

        @Override
        public void addOutput(byte[] data, int offset, int length) {
            super.addOutput(data, offset, length);
            try {
                if (!startedPayload && getOutput().length() >= AgentUtil.TEXT_SECRET_KEY_SIZE) {
                    LOG.verbose("Got key: %s", getOutput().split("[\\r\\n]", 1)[0]);
                    startedPayload = true;
                    Socket clientSocket = new Socket("localhost", port);
                    closer.register(clientSocket);
                    LOG.verbose("Connected");
                    outToDevice = clientSocket.getOutputStream();
                    closer.register(outToDevice);
                    // Need to wait for client to acknowledge that we've connected.
                }
                if (outToDevice == null) {
                    throw new NullPointerException();
                }
                if (!wrotePayload && getOutput().contains("z1")) {
                    if (outToDevice == null) {
                        throw new NullPointerException("outToDevice was null when protocol says it cannot be");
                    }
                    LOG.verbose("Got z1");
                    wrotePayload = true;
                    outToDevice.write(getOutput().substring(0, AgentUtil.TEXT_SECRET_KEY_SIZE).getBytes());
                    LOG.verbose("Wrote key");
                    com.google.common.io.Files.asByteSource(source.toFile()).copyTo(outToDevice);
                    outToDevice.flush();
                    LOG.verbose("Wrote file");
                }
            } catch (IOException e) {
                throw new RuntimeException(e);
            }
        }
    };

    String targetFileName = targetDevicePath.toString();
    String command = "umask 022 && " + agentCommand + "receive-file " + port + " " + Files.size(source) + " "
            + targetFileName + " ; echo -n :$?";
    LOG.debug("Executing %s", command);

    // If we fail to execute the command, stash the exception.  My experience during development
    // has been that the exception from checkReceiverOutput is more actionable.
    Exception shellException = null;
    try {
        device.executeShellCommand(command, receiver);
    } catch (Exception e) {
        shellException = e;
    }

    // Close the client socket, if we opened it.
    closer.close();

    try {
        AdbHelper.checkReceiverOutput(command, receiver);
    } catch (Exception e) {
        if (shellException != null) {
            e.addSuppressed(shellException);
        }
        throw e;
    }

    if (shellException != null) {
        throw shellException;
    }

    // The standard Java libraries on Android always create new files un-readable by other users.
    // We use the shell user or root to create these files, so we need to explicitly set the mode
    // to allow the app to read them.  Ideally, the agent would do this automatically, but
    // there's no easy way to do this in Java.  We can drop this if we drop support for the
    // Java agent.
    AdbHelper.executeCommandWithErrorChecking(device, "chmod 644 " + targetFileName);
}

From source file:com.facebook.buck.android.exopackage.RealExopackageDevice.java

@Override
public void installFile(final Path targetDevicePath, final Path source) throws Exception {
    Preconditions.checkArgument(source.isAbsolute());
    Preconditions.checkArgument(targetDevicePath.isAbsolute());
    Closer closer = Closer.create();
    CollectingOutputReceiver receiver = new CollectingOutputReceiver() {

        private boolean startedPayload = false;
        private boolean wrotePayload = false;
        @Nullable/*  w  ww .j  av  a 2  s.  c  om*/
        private OutputStream outToDevice;

        @Override
        public void addOutput(byte[] data, int offset, int length) {
            super.addOutput(data, offset, length);
            try {
                if (!startedPayload && getOutput().length() >= AgentUtil.TEXT_SECRET_KEY_SIZE) {
                    LOG.verbose("Got key: %s", getOutput().split("[\\r\\n]", 1)[0]);
                    startedPayload = true;
                    Socket clientSocket = new Socket("localhost", agentPort);
                    closer.register(clientSocket);
                    LOG.verbose("Connected");
                    outToDevice = clientSocket.getOutputStream();
                    closer.register(outToDevice);
                    // Need to wait for client to acknowledge that we've connected.
                }
                if (outToDevice == null) {
                    throw new NullPointerException();
                }
                if (!wrotePayload && getOutput().contains("z1")) {
                    if (outToDevice == null) {
                        throw new NullPointerException("outToDevice was null when protocol says it cannot be");
                    }
                    LOG.verbose("Got z1");
                    wrotePayload = true;
                    outToDevice.write(getOutput().substring(0, AgentUtil.TEXT_SECRET_KEY_SIZE).getBytes());
                    LOG.verbose("Wrote key");
                    com.google.common.io.Files.asByteSource(source.toFile()).copyTo(outToDevice);
                    outToDevice.flush();
                    LOG.verbose("Wrote file");
                }
            } catch (IOException e) {
                throw new RuntimeException(e);
            }
        }
    };

    String targetFileName = targetDevicePath.toString();
    String command = "umask 022 && " + agent.get().getAgentCommand() + "receive-file " + agentPort + " "
            + Files.size(source) + " " + targetFileName + " ; echo -n :$?";
    LOG.debug("Executing %s", command);

    // If we fail to execute the command, stash the exception.  My experience during development
    // has been that the exception from checkReceiverOutput is more actionable.
    Exception shellException = null;
    try {
        device.executeShellCommand(command, receiver);
    } catch (Exception e) {
        shellException = e;
    }

    // Close the client socket, if we opened it.
    closer.close();

    try {
        AdbHelper.checkReceiverOutput(command, receiver);
    } catch (Exception e) {
        if (shellException != null) {
            e.addSuppressed(shellException);
        }
        throw e;
    }

    if (shellException != null) {
        throw shellException;
    }

    // The standard Java libraries on Android always create new files un-readable by other users.
    // We use the shell user or root to create these files, so we need to explicitly set the mode
    // to allow the app to read them.  Ideally, the agent would do this automatically, but
    // there's no easy way to do this in Java.  We can drop this if we drop support for the
    // Java agent.
    AdbHelper.executeCommandWithErrorChecking(device, "chmod 644 " + targetFileName);
}

From source file:org.gbif.occurrence.download.file.OccurrenceFileWriterJob.java

/**
 * Executes the job.query and creates a data file that will contains the records from job.from to job.to positions.
 *//*w  w  w  .ja  va 2 s .c  o  m*/
@Override
public Result call() throws IOException {
    // Creates a closer
    Closer closer = Closer.create();

    // Calculates the amount of output records
    final int nrOfOutputRecords = fileJob.getTo() - fileJob.getFrom();
    Map<UUID, Long> datasetUsages = Maps.newHashMap();

    // Creates a search request instance using the search request that comes in the fileJob
    SolrQuery solrQuery = createSolrQuery(fileJob.getQuery());

    try {
        ICsvMapWriter intCsvWriter = closer.register(
                new CsvMapWriter(new FileWriterWithEncoding(fileJob.getInterpretedDataFile(), Charsets.UTF_8),
                        CsvPreference.TAB_PREFERENCE));
        ICsvMapWriter verbCsvWriter = closer.register(
                new CsvMapWriter(new FileWriterWithEncoding(fileJob.getVerbatimDataFile(), Charsets.UTF_8),
                        CsvPreference.TAB_PREFERENCE));
        ICsvBeanWriter multimediaCsvWriter = closer.register(
                new CsvBeanWriter(new FileWriterWithEncoding(fileJob.getMultimediaDataFile(), Charsets.UTF_8),
                        CsvPreference.TAB_PREFERENCE));
        int recordCount = 0;
        while (recordCount < nrOfOutputRecords) {
            solrQuery.setStart(fileJob.getFrom() + recordCount);
            // Limit can't be greater than the maximum number of records assigned to this job
            solrQuery
                    .setRows(recordCount + LIMIT > nrOfOutputRecords ? nrOfOutputRecords - recordCount : LIMIT);
            final QueryResponse response = solrServer.query(solrQuery);
            for (Iterator<SolrDocument> itResults = response.getResults().iterator(); itResults
                    .hasNext(); recordCount++) {
                final Integer occKey = (Integer) itResults.next()
                        .getFieldValue(OccurrenceSolrField.KEY.getFieldName());
                // Writes the occurrence record obtained from HBase as Map<String,Object>.
                org.apache.hadoop.hbase.client.Result result = occurrenceMapReader.get(occKey);
                Map<String, String> occurrenceRecordMap = OccurrenceMapReader.buildOccurrenceMap(result);
                Map<String, String> verbOccurrenceRecordMap = OccurrenceMapReader
                        .buildVerbatimOccurrenceMap(result);
                if (occurrenceRecordMap != null) {
                    incrementDatasetUsage(datasetUsages, occurrenceRecordMap);
                    intCsvWriter.write(occurrenceRecordMap, INT_COLUMNS);
                    verbCsvWriter.write(verbOccurrenceRecordMap, VERB_COLUMNS);
                    writeMediaObjects(multimediaCsvWriter, result, occKey);
                } else {
                    LOG.error(String.format("Occurrence id %s not found!", occKey));
                }
            }
        }
    } catch (Exception e) {
        Throwables.propagate(e);
    } finally {
        closer.close();
        // Unlock the assigned lock.
        lock.unlock();
        LOG.info("Lock released, job detail: {} ", fileJob.toString());
    }
    return new Result(fileJob, datasetUsages);
}

From source file:gobblin.data.management.conversion.hive.validation.ValidationJob.java

/***
 * Execute Hive queries using {@link HiveJdbcConnector} and validate results.
 * @param queries Queries to execute.//  www.  ja v a 2 s  .c  o  m
 */
@edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "SQL_NONCONSTANT_STRING_PASSED_TO_EXECUTE", justification = "Temporary fix")
private List<Long> getValidationOutputFromHive(List<String> queries) throws IOException {

    if (null == queries || queries.size() == 0) {
        log.warn("No queries specified to be executed");
        return Collections.emptyList();
    }

    List<Long> rowCounts = Lists.newArrayList();
    Closer closer = Closer.create();

    try {
        HiveJdbcConnector hiveJdbcConnector = closer.register(HiveJdbcConnector.newConnectorWithProps(props));
        for (String query : queries) {
            String hiveOutput = "hiveConversionValidationOutput_" + UUID.randomUUID().toString();
            Path hiveTempDir = new Path("/tmp" + Path.SEPARATOR + hiveOutput);
            query = "INSERT OVERWRITE DIRECTORY '" + hiveTempDir + "' " + query;
            log.info("Executing query: " + query);
            try {
                if (this.hiveSettings.size() > 0) {
                    hiveJdbcConnector
                            .executeStatements(this.hiveSettings.toArray(new String[this.hiveSettings.size()]));
                }
                hiveJdbcConnector.executeStatements("SET hive.exec.compress.output=false",
                        "SET hive.auto.convert.join=false", query);
                FileStatus[] fileStatusList = this.fs.listStatus(hiveTempDir);
                List<FileStatus> files = new ArrayList<>();
                for (FileStatus fileStatus : fileStatusList) {
                    if (fileStatus.isFile()) {
                        files.add(fileStatus);
                    }
                }
                if (files.size() > 1) {
                    log.warn("Found more than one output file. Should have been one.");
                } else if (files.size() == 0) {
                    log.warn("Found no output file. Should have been one.");
                } else {
                    String theString = IOUtils.toString(
                            new InputStreamReader(this.fs.open(files.get(0).getPath()), Charsets.UTF_8));
                    log.info("Found row count: " + theString.trim());
                    if (StringUtils.isBlank(theString.trim())) {
                        rowCounts.add(0l);
                    } else {
                        try {
                            rowCounts.add(Long.parseLong(theString.trim()));
                        } catch (NumberFormatException e) {
                            throw new RuntimeException("Could not parse Hive output: " + theString.trim(), e);
                        }
                    }
                }
            } finally {
                if (this.fs.exists(hiveTempDir)) {
                    log.debug("Deleting temp dir: " + hiveTempDir);
                    this.fs.delete(hiveTempDir, true);
                }
            }
        }
    } catch (SQLException e) {
        throw new RuntimeException(e);
    } finally {
        try {
            closer.close();
        } catch (Exception e) {
            log.warn("Could not close HiveJdbcConnector", e);
        }
    }

    return rowCounts;
}

From source file:org.apache.gobblin.data.management.conversion.hive.validation.ValidationJob.java

/***
 * Execute Hive queries using {@link HiveJdbcConnector} and validate results.
 * @param queries Queries to execute.//from   w  ww .ja  va2 s.c  o  m
 */
@edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "SQL_NONCONSTANT_STRING_PASSED_TO_EXECUTE", justification = "Temporary fix")
private List<Long> getValidationOutputFromHive(List<String> queries) throws IOException {

    if (null == queries || queries.size() == 0) {
        log.warn("No queries specified to be executed");
        return Collections.emptyList();
    }

    List<Long> rowCounts = Lists.newArrayList();
    Closer closer = Closer.create();

    try {
        HiveJdbcConnector hiveJdbcConnector = closer.register(HiveJdbcConnector.newConnectorWithProps(props));
        for (String query : queries) {
            String hiveOutput = "hiveConversionValidationOutput_" + UUID.randomUUID().toString();
            Path hiveTempDir = new Path("/tmp" + Path.SEPARATOR + hiveOutput);
            query = "INSERT OVERWRITE DIRECTORY '" + hiveTempDir + "' " + query;
            log.info("Executing query: " + query);
            try {
                if (this.hiveSettings.size() > 0) {
                    hiveJdbcConnector
                            .executeStatements(this.hiveSettings.toArray(new String[this.hiveSettings.size()]));
                }
                hiveJdbcConnector.executeStatements("SET hive.exec.compress.output=false",
                        "SET hive.auto.convert.join=false", query);
                FileStatus[] fileStatusList = this.fs.listStatus(hiveTempDir);
                List<FileStatus> files = new ArrayList<>();
                for (FileStatus fileStatus : fileStatusList) {
                    if (fileStatus.isFile()) {
                        files.add(fileStatus);
                    }
                }
                if (files.size() > 1) {
                    log.warn("Found more than one output file. Should have been one.");
                } else if (files.size() == 0) {
                    log.warn("Found no output file. Should have been one.");
                } else {
                    String theString = IOUtils.toString(
                            new InputStreamReader(this.fs.open(files.get(0).getPath()), Charsets.UTF_8));
                    log.info("Found row count: " + theString.trim());
                    if (StringUtils.isBlank(theString.trim())) {
                        rowCounts.add(0l);
                    } else {
                        try {
                            rowCounts.add(Long.parseLong(theString.trim()));
                        } catch (NumberFormatException e) {
                            throw new RuntimeException("Could not parse Hive output: " + theString.trim(), e);
                        }
                    }
                }
            } finally {
                if (this.fs.exists(hiveTempDir)) {
                    log.debug("Deleting temp dir: " + hiveTempDir);
                    this.fs.delete(hiveTempDir, true);
                }
            }
        }
    } catch (SQLException e) {
        log.warn("Execution failed for query set " + queries.toString(), e);
    } finally {
        try {
            closer.close();
        } catch (Exception e) {
            log.warn("Could not close HiveJdbcConnector", e);
        }
    }

    return rowCounts;
}

From source file:org.apache.jackrabbit.oak.run.CheckpointsCommand.java

@Override
public void execute(String... args) throws Exception {
    OptionParser parser = new OptionParser();
    OptionSpec segmentTar = parser.accepts("segment-tar", "Use oak-segment-tar instead of oak-segment");
    OptionSet options = parser.parse(args);

    if (options.nonOptionArguments().isEmpty()) {
        System.out.println(// ww w . j  a va2s .c o m
                "usage: checkpoints {<path>|<mongo-uri>} [list|rm-all|rm-unreferenced|rm <checkpoint>] [--segment-tar]");
        System.exit(1);
    }

    boolean success = false;
    Checkpoints cps;
    Closer closer = Closer.create();
    try {
        String op = "list";
        if (options.nonOptionArguments().size() >= 2) {
            op = options.nonOptionArguments().get(1).toString();
            if (!"list".equals(op) && !"rm-all".equals(op) && !"rm-unreferenced".equals(op)
                    && !"rm".equals(op)) {
                failWith("Unknown command.");
            }
        }

        String connection = options.nonOptionArguments().get(0).toString();
        if (connection.startsWith(MongoURI.MONGODB_PREFIX)) {
            MongoClientURI uri = new MongoClientURI(connection);
            MongoClient client = new MongoClient(uri);
            final DocumentNodeStore store = new DocumentMK.Builder().setMongoDB(client.getDB(uri.getDatabase()))
                    .getNodeStore();
            closer.register(Utils.asCloseable(store));
            cps = Checkpoints.onDocumentMK(store);
        } else if (options.has(segmentTar)) {
            cps = Checkpoints.onSegmentTar(new File(connection), closer);
        } else {
            cps = Checkpoints.onSegment(new File(connection), closer);
        }

        System.out.println("Checkpoints " + connection);
        if ("list".equals(op)) {
            int cnt = 0;
            for (Checkpoints.CP cp : cps.list()) {
                System.out.printf("- %s created %s expires %s%n", cp.id, new Timestamp(cp.created),
                        new Timestamp(cp.expires));
                cnt++;
            }
            System.out.println("Found " + cnt + " checkpoints");
        } else if ("rm-all".equals(op)) {
            long time = System.currentTimeMillis();
            long cnt = cps.removeAll();
            time = System.currentTimeMillis() - time;
            if (cnt != -1) {
                System.out.println("Removed " + cnt + " checkpoints in " + time + "ms.");
            } else {
                failWith("Failed to remove all checkpoints.");
            }
        } else if ("rm-unreferenced".equals(op)) {
            long time = System.currentTimeMillis();
            long cnt = cps.removeUnreferenced();
            time = System.currentTimeMillis() - time;
            if (cnt != -1) {
                System.out.println("Removed " + cnt + " checkpoints in " + time + "ms.");
            } else {
                failWith("Failed to remove unreferenced checkpoints.");
            }
        } else if ("rm".equals(op)) {
            if (options.nonOptionArguments().size() < 3) {
                failWith("Missing checkpoint id");
            } else {
                String cp = options.nonOptionArguments().get(2).toString();
                long time = System.currentTimeMillis();
                int cnt = cps.remove(cp);
                time = System.currentTimeMillis() - time;
                if (cnt != 0) {
                    if (cnt == 1) {
                        System.out.println("Removed checkpoint " + cp + " in " + time + "ms.");
                    } else {
                        failWith("Failed to remove checkpoint " + cp);
                    }
                } else {
                    failWith("Checkpoint '" + cp + "' not found.");
                }
            }
        }
        success = true;
    } catch (Throwable t) {
        System.err.println(t.getMessage());
    } finally {
        closer.close();
    }
    if (!success) {
        System.exit(1);
    }
}

From source file:org.grouplens.lenskit.eval.traintest.TrainTestEvalJob.java

@SuppressWarnings("PMD.AvoidCatchingThrowable")
private void runEvaluation() throws IOException, RecommenderBuildException {
    Closer closer = Closer.create();
    try {/* w  w  w.ja v  a  2  s  .co  m*/
        TableWriter userTable = userOutputSupplier.get();
        if (userTable != null) {
            closer.register(userTable);
        }
        TableWriter predictTable = predictOutputSupplier.get();
        if (predictTable != null) {
            closer.register(predictTable);
        }

        List<Object> outputRow = Lists.newArrayList();

        ExecutionInfo execInfo = buildExecInfo();

        logger.info("Building {}", algorithm.getName());
        StopWatch buildTimer = new StopWatch();
        buildTimer.start();
        RecommenderInstance rec = algorithm.makeTestableRecommender(data, snapshot, execInfo);
        buildTimer.stop();
        logger.info("Built {} in {}", algorithm.getName(), buildTimer);

        logger.info("Measuring {}", algorithm.getName());
        for (ModelMetric metric : modelMetrics) {
            outputRow.addAll(metric.measureAlgorithm(algorithm, data, rec.getRecommender()));
        }

        logger.info("Testing {}", algorithm.getName());
        StopWatch testTimer = new StopWatch();
        testTimer.start();
        List<TestUserMetricAccumulator> evalAccums = new ArrayList<TestUserMetricAccumulator>(
                evaluators.size());

        List<Object> userRow = new ArrayList<Object>();

        UserEventDAO testUsers = data.getTestData().getUserEventDAO();
        for (TestUserMetric eval : evaluators) {
            TestUserMetricAccumulator accum = eval.makeAccumulator(algorithm, data);
            evalAccums.add(accum);
        }

        Cursor<UserHistory<Event>> userProfiles = closer.register(testUsers.streamEventsByUser());
        for (UserHistory<Event> p : userProfiles) {
            assert userRow.isEmpty();
            userRow.add(p.getUserId());

            long uid = p.getUserId();
            LongSet testItems = p.itemSet();

            Supplier<SparseVector> preds = new PredictionSupplier(rec, uid, testItems);
            Supplier<List<ScoredId>> recs = new RecommendationSupplier(rec, uid, testItems);
            Supplier<UserHistory<Event>> hist = new HistorySupplier(rec.getUserEventDAO(), uid);
            Supplier<UserHistory<Event>> testHist = Suppliers.ofInstance(p);

            TestUser test = new TestUser(uid, hist, testHist, preds, recs);

            for (TestUserMetricAccumulator accum : evalAccums) {
                Object[] ures = accum.evaluate(test);
                if (ures != null) {
                    userRow.addAll(Arrays.asList(ures));
                }
            }
            if (userTable != null) {
                try {
                    userTable.writeRow(userRow);
                } catch (IOException e) {
                    throw new RuntimeException("error writing user row", e);
                }
            }
            userRow.clear();

            if (predictTable != null) {
                writePredictions(predictTable, uid, RatingVectorUserHistorySummarizer.makeRatingVector(p),
                        test.getPredictions());
            }
        }
        testTimer.stop();
        logger.info("Tested {} in {}", algorithm.getName(), testTimer);

        writeOutput(buildTimer, testTimer, outputRow, evalAccums);
    } catch (Throwable th) {
        throw closer.rethrow(th, RecommenderBuildException.class);
    } finally {
        closer.close();
    }
}

From source file:org.grouplens.lenskit.eval.traintest.TrainTestJob.java

@SuppressWarnings("PMD.AvoidCatchingThrowable")
private void runEvaluation() throws IOException, RecommenderBuildException {
    EventBus bus = task.getProject().getEventBus();
    bus.post(JobEvents.started(this));
    Closer closer = Closer.create();
    try {/*from   w ww  .  ja  v a2  s. c  o  m*/
        outputs = task.getOutputs().getPrefixed(algorithmInfo, dataSet);
        TableWriter userResults = outputs.getUserWriter();
        List<Object> outputRow = Lists.newArrayList();

        logger.info("Building {} on {}", algorithmInfo, dataSet);
        StopWatch buildTimer = new StopWatch();
        buildTimer.start();
        buildRecommender();
        buildTimer.stop();
        logger.info("Built {} in {}", algorithmInfo.getName(), buildTimer);

        logger.info("Measuring {} on {}", algorithmInfo.getName(), dataSet.getName());

        StopWatch testTimer = new StopWatch();
        testTimer.start();
        List<Object> userRow = Lists.newArrayList();

        List<MetricWithAccumulator<?>> accumulators = Lists.newArrayList();

        for (Metric<?> eval : outputs.getMetrics()) {
            accumulators.add(makeMetricAccumulator(eval));
        }

        LongSet testUsers = dataSet.getTestData().getUserDAO().getUserIds();
        final NumberFormat pctFormat = NumberFormat.getPercentInstance();
        pctFormat.setMaximumFractionDigits(2);
        pctFormat.setMinimumFractionDigits(2);
        final int nusers = testUsers.size();
        logger.info("Testing {} on {} ({} users)", algorithmInfo, dataSet, nusers);
        int ndone = 0;
        for (LongIterator iter = testUsers.iterator(); iter.hasNext();) {
            if (Thread.interrupted()) {
                throw new InterruptedException("eval job interrupted");
            }
            long uid = iter.nextLong();
            userRow.add(uid);
            userRow.add(null); // placeholder for the per-user time
            assert userRow.size() == 2;

            Stopwatch userTimer = Stopwatch.createStarted();
            TestUser test = getUserResults(uid);

            userRow.add(test.getTrainHistory().size());
            userRow.add(test.getTestHistory().size());

            for (MetricWithAccumulator<?> accum : accumulators) {
                List<Object> ures = accum.measureUser(test);
                if (ures != null) {
                    userRow.addAll(ures);
                }
            }
            userTimer.stop();
            userRow.set(1, userTimer.elapsed(TimeUnit.MILLISECONDS) * 0.001);
            if (userResults != null) {
                try {
                    userResults.writeRow(userRow);
                } catch (IOException e) {
                    throw new RuntimeException("error writing user row", e);
                }
            }
            userRow.clear();

            ndone += 1;
            if (ndone % 100 == 0) {
                testTimer.split();
                double time = testTimer.getSplitTime();
                double tpu = time / ndone;
                double tleft = (nusers - ndone) * tpu;
                logger.info("tested {} of {} users ({}), ETA {}", ndone, nusers,
                        pctFormat.format(((double) ndone) / nusers),
                        DurationFormatUtils.formatDurationHMS((long) tleft));
            }
        }
        testTimer.stop();
        logger.info("Tested {} in {}", algorithmInfo.getName(), testTimer);

        writeMetricValues(buildTimer, testTimer, outputRow, accumulators);
        bus.post(JobEvents.finished(this));
    } catch (Throwable th) {
        bus.post(JobEvents.failed(this, th));
        throw closer.rethrow(th, RecommenderBuildException.class);
    } finally {
        try {
            cleanup();
        } finally {
            outputs = null;
            closer.close();
        }
    }
}

From source file:org.apache.jackrabbit.oak.run.DataStoreCheckCommand.java

@Override
public void execute(String... args) throws Exception {
    OptionParser parser = new OptionParser();
    parser.allowsUnrecognizedOptions();/*from  w w  w.  jav  a 2 s  . co  m*/

    String helpStr = "datastorecheck [--id] [--ref] [--consistency] [--store <path>|<mongo_uri>] "
            + "[--s3ds <s3ds_config>|--fds <fds_config>] [--dump <path>]";

    Closer closer = Closer.create();
    try {
        // Options for operations requested
        OptionSpecBuilder idOp = parser.accepts("id", "Get ids");
        OptionSpecBuilder refOp = parser.accepts("ref", "Get references");
        OptionSpecBuilder consistencyOp = parser.accepts("consistency", "Check consistency");

        // Node Store - needed for --ref, --consistency
        ArgumentAcceptingOptionSpec<String> store = parser.accepts("store", "Node Store")
                .requiredIf(refOp, consistencyOp).withRequiredArg().ofType(String.class);
        // Optional argument to specify the dump path
        ArgumentAcceptingOptionSpec<String> dump = parser.accepts("dump", "Dump Path").withRequiredArg()
                .ofType(String.class);
        OptionSpec segmentTar = parser.accepts("segment-tar", "Use oak-segment-tar instead of oak-segment");

        OptionSpec<?> help = parser.acceptsAll(asList("h", "?", "help"), "show help").forHelp();

        // Required rules (any one of --id, --ref, --consistency)
        idOp.requiredUnless(refOp, consistencyOp);
        refOp.requiredUnless(idOp, consistencyOp);
        consistencyOp.requiredUnless(idOp, refOp);

        OptionSet options = null;
        try {
            options = parser.parse(args);
        } catch (Exception e) {
            System.err.println(e);
            parser.printHelpOn(System.err);
            return;
        }

        if (options.has(help)) {
            parser.printHelpOn(System.out);
            return;
        }

        String dumpPath = JAVA_IO_TMPDIR.value();
        if (options.has(dump)) {
            dumpPath = options.valueOf(dump);
        }

        GarbageCollectableBlobStore blobStore = null;
        BlobReferenceRetriever marker = null;
        if (options.has(store)) {
            String source = options.valueOf(store);
            if (source.startsWith(MongoURI.MONGODB_PREFIX)) {
                MongoClientURI uri = new MongoClientURI(source);
                MongoClient client = new MongoClient(uri);
                DocumentNodeStore nodeStore = new DocumentMK.Builder()
                        .setMongoDB(client.getDB(uri.getDatabase())).getNodeStore();
                closer.register(Utils.asCloseable(nodeStore));
                blobStore = (GarbageCollectableBlobStore) nodeStore.getBlobStore();
                marker = new DocumentBlobReferenceRetriever(nodeStore);
            } else if (options.has(segmentTar)) {
                marker = SegmentTarUtils.newBlobReferenceRetriever(source, closer);
            } else {
                FileStore fileStore = openFileStore(source);
                closer.register(Utils.asCloseable(fileStore));
                marker = new SegmentBlobReferenceRetriever(fileStore.getTracker());
            }
        }

        // Initialize S3/FileDataStore if configured
        GarbageCollectableBlobStore dataStore = Utils.bootstrapDataStore(args, closer);
        if (dataStore != null) {
            blobStore = dataStore;
        }

        // blob store still not initialized means configuration not supported
        if (blobStore == null) {
            System.err.println("Operation not defined for SegmentNodeStore without external datastore");
            parser.printHelpOn(System.err);
            return;
        }

        FileRegister register = new FileRegister(options);
        closer.register(register);

        if (options.has(idOp) || options.has(consistencyOp)) {
            retrieveBlobIds(blobStore, register.createFile(idOp, dumpPath));
        }

        if (options.has(refOp) || options.has(consistencyOp)) {
            retrieveBlobReferences(blobStore, marker, register.createFile(refOp, dumpPath));
        }

        if (options.has(consistencyOp)) {
            checkConsistency(register.get(idOp), register.get(refOp),
                    register.createFile(consistencyOp, dumpPath));
        }
    } catch (Throwable t) {
        t.printStackTrace();
    } finally {
        closer.close();
    }
}

From source file:io.druid.segment.IndexMergerV9.java

@Override
protected File makeIndexFiles(final List<IndexableAdapter> adapters, final AggregatorFactory[] metricAggs,
        final File outDir, final ProgressIndicator progress, final List<String> mergedDimensions,
        final List<String> mergedMetrics,
        final Function<ArrayList<Iterable<Rowboat>>, Iterable<Rowboat>> rowMergerFn, final IndexSpec indexSpec)
        throws IOException {
    progress.start();// www  .  j a va2s.  co m
    progress.progress();

    List<Metadata> metadataList = Lists.transform(adapters, new Function<IndexableAdapter, Metadata>() {
        @Override
        public Metadata apply(IndexableAdapter input) {
            return input.getMetadata();
        }
    });

    Metadata segmentMetadata = null;
    if (metricAggs != null) {
        AggregatorFactory[] combiningMetricAggs = new AggregatorFactory[metricAggs.length];
        for (int i = 0; i < metricAggs.length; i++) {
            combiningMetricAggs[i] = metricAggs[i].getCombiningFactory();
        }
        segmentMetadata = Metadata.merge(metadataList, combiningMetricAggs);
    } else {
        segmentMetadata = Metadata.merge(metadataList, null);
    }

    Closer closer = Closer.create();
    final IOPeon ioPeon = new TmpFileIOPeon(false);
    closer.register(new Closeable() {
        @Override
        public void close() throws IOException {
            ioPeon.cleanup();
        }
    });
    final FileSmoosher v9Smoosher = new FileSmoosher(outDir);
    final File v9TmpDir = new File(outDir, "v9-tmp");
    v9TmpDir.mkdirs();
    closer.register(new Closeable() {
        @Override
        public void close() throws IOException {
            FileUtils.deleteDirectory(v9TmpDir);
        }
    });
    log.info("Start making v9 index files, outDir:%s", outDir);
    try {
        long startTime = System.currentTimeMillis();
        ByteStreams.write(Ints.toByteArray(IndexIO.V9_VERSION),
                Files.newOutputStreamSupplier(new File(outDir, "version.bin")));
        log.info("Completed version.bin in %,d millis.", System.currentTimeMillis() - startTime);

        progress.progress();
        final Map<String, ValueType> metricsValueTypes = Maps
                .newTreeMap(Ordering.<String>natural().nullsFirst());
        final Map<String, String> metricTypeNames = Maps.newTreeMap(Ordering.<String>natural().nullsFirst());
        final List<ColumnCapabilitiesImpl> dimCapabilities = Lists
                .newArrayListWithCapacity(mergedDimensions.size());
        mergeCapabilities(adapters, mergedDimensions, metricsValueTypes, metricTypeNames, dimCapabilities);

        final DimensionHandler[] handlers = makeDimensionHandlers(mergedDimensions, dimCapabilities);
        final List<DimensionMerger> mergers = new ArrayList<>();
        for (int i = 0; i < mergedDimensions.size(); i++) {
            mergers.add(handlers[i].makeMerger(indexSpec, v9TmpDir, ioPeon, dimCapabilities.get(i), progress));
        }

        /************* Setup Dim Conversions **************/
        progress.progress();
        startTime = System.currentTimeMillis();
        final ArrayList<Map<String, IntBuffer>> dimConversions = Lists
                .newArrayListWithCapacity(adapters.size());
        final ArrayList<Boolean> dimensionSkipFlag = Lists.newArrayListWithCapacity(mergedDimensions.size());
        final ArrayList<Boolean> convertMissingDimsFlags = Lists
                .newArrayListWithCapacity(mergedDimensions.size());
        writeDimValueAndSetupDimConversion(adapters, progress, mergedDimensions, mergers);
        log.info("Completed dim conversions in %,d millis.", System.currentTimeMillis() - startTime);

        /************* Walk through data sets, merge them, and write merged columns *************/
        progress.progress();
        final Iterable<Rowboat> theRows = makeRowIterable(adapters, mergedDimensions, mergedMetrics,
                rowMergerFn, dimCapabilities, handlers, mergers);
        final LongColumnSerializer timeWriter = setupTimeWriter(ioPeon, indexSpec);
        final ArrayList<GenericColumnSerializer> metWriters = setupMetricsWriters(ioPeon, mergedMetrics,
                metricsValueTypes, metricTypeNames, indexSpec);
        final List<IntBuffer> rowNumConversions = Lists.newArrayListWithCapacity(adapters.size());

        mergeIndexesAndWriteColumns(adapters, progress, theRows, timeWriter, metWriters, rowNumConversions,
                mergers);

        /************ Create Inverted Indexes and Finalize Build Columns *************/
        final String section = "build inverted index and columns";
        progress.startSection(section);
        makeTimeColumn(v9Smoosher, progress, timeWriter);
        makeMetricsColumns(v9Smoosher, progress, mergedMetrics, metricsValueTypes, metricTypeNames, metWriters);

        for (int i = 0; i < mergedDimensions.size(); i++) {
            DimensionMergerV9 merger = (DimensionMergerV9) mergers.get(i);
            merger.writeIndexes(rowNumConversions, closer);
            if (merger.canSkip()) {
                continue;
            }
            ColumnDescriptor columnDesc = merger.makeColumnDescriptor();
            makeColumn(v9Smoosher, mergedDimensions.get(i), columnDesc);
        }

        progress.stopSection(section);

        /************* Make index.drd & metadata.drd files **************/
        progress.progress();
        makeIndexBinary(v9Smoosher, adapters, outDir, mergedDimensions, mergedMetrics, progress, indexSpec,
                mergers);
        makeMetadataBinary(v9Smoosher, progress, segmentMetadata);

        v9Smoosher.close();
        progress.stop();

        return outDir;
    } catch (Throwable t) {
        throw closer.rethrow(t);
    } finally {
        closer.close();
    }
}