List of usage examples for com.google.common.io Closeables close
public static void close(@Nullable Closeable closeable, boolean swallowIOException) throws IOException
From source file:org.apache.mahout.classifier.sgd.SimpleCsvExamples.java
public static void main(String[] args) throws IOException { FeatureVectorEncoder[] encoder = new FeatureVectorEncoder[FIELDS]; for (int i = 0; i < FIELDS; i++) { encoder[i] = new ConstantValueEncoder("v" + 1); }//from w w w .j a v a 2 s. c o m OnlineSummarizer[] s = new OnlineSummarizer[FIELDS]; for (int i = 0; i < FIELDS; i++) { s[i] = new OnlineSummarizer(); } long t0 = System.currentTimeMillis(); Vector v = new DenseVector(1000); if ("--generate".equals(args[0])) { PrintWriter out = new PrintWriter( new OutputStreamWriter(new FileOutputStream(new File(args[2])), Charsets.UTF_8)); try { int n = Integer.parseInt(args[1]); for (int i = 0; i < n; i++) { Line x = Line.generate(); out.println(x); } } finally { Closeables.close(out, false); } } else if ("--parse".equals(args[0])) { BufferedReader in = Files.newReader(new File(args[1]), Charsets.UTF_8); try { String line = in.readLine(); while (line != null) { v.assign(0); Line x = new Line(line); for (int i = 0; i < FIELDS; i++) { s[i].add(x.getDouble(i)); encoder[i].addToVector(x.get(i), v); } line = in.readLine(); } } finally { Closeables.close(in, true); } String separator = ""; for (int i = 0; i < FIELDS; i++) { System.out.printf("%s%.3f", separator, s[i].getMean()); separator = ","; } } else if ("--fast".equals(args[0])) { FastLineReader in = new FastLineReader(new FileInputStream(args[1])); try { FastLine line = in.read(); while (line != null) { v.assign(0); for (int i = 0; i < FIELDS; i++) { double z = line.getDouble(i); s[i].add(z); encoder[i].addToVector((byte[]) null, z, v); } line = in.read(); } } finally { Closeables.close(in, true); } String separator = ""; for (int i = 0; i < FIELDS; i++) { System.out.printf("%s%.3f", separator, s[i].getMean()); separator = ","; } } System.out.printf("\nElapsed time = %.3f%n", (System.currentTimeMillis() - t0) / 1000.0); }
From source file:com.netflix.suro.SuroServer.java
public static void main(String[] args) throws IOException { final AtomicReference<Injector> injector = new AtomicReference<Injector>(); try {//from w ww . java2 s .c om // Parse the command line Options options = createOptions(); final CommandLine line = new BasicParser().parse(options, args); // Load the properties file final Properties properties = new Properties(); if (line.hasOption('p')) { properties.load(new FileInputStream(line.getOptionValue('p'))); } // Bind all command line options to the properties with prefix "SuroServer." for (Option opt : line.getOptions()) { String name = opt.getOpt(); String value = line.getOptionValue(name); String propName = PROP_PREFIX + opt.getArgName(); if (propName.equals(DynamicPropertyRoutingMapConfigurator.ROUTING_MAP_PROPERTY)) { properties.setProperty(DynamicPropertyRoutingMapConfigurator.ROUTING_MAP_PROPERTY, FileUtils.readFileToString(new File(value))); } else if (propName.equals(DynamicPropertySinkConfigurator.SINK_PROPERTY)) { properties.setProperty(DynamicPropertySinkConfigurator.SINK_PROPERTY, FileUtils.readFileToString(new File(value))); } else if (propName.equals(DynamicPropertyInputConfigurator.INPUT_CONFIG_PROPERTY)) { properties.setProperty(DynamicPropertyInputConfigurator.INPUT_CONFIG_PROPERTY, FileUtils.readFileToString(new File(value))); } else { properties.setProperty(propName, value); } } create(injector, properties); injector.get().getInstance(LifecycleManager.class).start(); Runtime.getRuntime().addShutdownHook(new Thread() { @Override public void run() { try { Closeables.close(injector.get().getInstance(LifecycleManager.class), true); } catch (IOException e) { // do nothing because Closeables.close will swallow IOException } } }); waitForShutdown(getControlPort(properties)); } catch (Throwable e) { System.err.println("SuroServer startup failed: " + e.getMessage()); System.exit(-1); } finally { Closeables.close(injector.get().getInstance(LifecycleManager.class), true); } }
From source file:com.infinities.skyport.Main.java
public static void main(String[] args) { Main main = null;// w ww. ja v a 2 s . c o m try { main = new Main(args); logger.trace("initialize skyport"); main.initialize(); } catch (Throwable e) { logger.error("Encounter error when initialize skyport.", e); try { if (main != null) { Closeables.close(main, true); } } catch (IOException e1) { logger.error("Encounter error when close skyport.", e1); } } }
From source file:org.hmahout.example.NetflixDatasetConverter.java
public static void main(String[] args) throws IOException { if (args.length != 4) { System.err.println("Usage: NetflixDatasetConverter /path/to/training_set/ /path/to/qualifying.txt " + "/path/to/judging.txt /path/to/destination"); return;/*from w w w . j a v a2 s .c o m*/ } String trainingDataDir = args[0]; String qualifyingTxt = args[1]; String judgingTxt = args[2]; Path outputPath = new Path(args[3]); Configuration conf = new Configuration(); FileSystem fs = FileSystem.get(outputPath.toUri(), conf); log.info("Creating training set at {}/trainingSet/ratings.tsv ...", outputPath); BufferedWriter writer = null; try { FSDataOutputStream outputStream = fs.create(new Path(outputPath, "trainingSet/ratings.tsv")); writer = new BufferedWriter(new OutputStreamWriter(outputStream, Charsets.UTF_8)); int ratingsProcessed = 0; for (File movieRatings : new File(trainingDataDir).listFiles()) { FileLineIterator lines = null; try { lines = new FileLineIterator(movieRatings); boolean firstLineRead = false; String movieID = null; while (lines.hasNext()) { String line = lines.next(); if (firstLineRead) { String[] tokens = SEPARATOR.split(line); String userID = tokens[0]; String rating = tokens[1]; writer.write(userID + TAB + movieID + TAB + rating + NEWLINE); ratingsProcessed++; if (ratingsProcessed % 1000000 == 0) { log.info("{} ratings processed...", ratingsProcessed); } } else { movieID = line.replaceAll(MOVIE_DENOTER, ""); firstLineRead = true; } } } finally { Closeables.close(lines, true); } } log.info("{} ratings processed. done.", ratingsProcessed); } finally { Closeables.close(writer, false); } log.info("Reading probes..."); List<Preference> probes = Lists.newArrayListWithExpectedSize(2817131); long currentMovieID = -1; for (String line : new FileLineIterable(new File(qualifyingTxt))) { if (line.contains(MOVIE_DENOTER)) { currentMovieID = Long.parseLong(line.replaceAll(MOVIE_DENOTER, "")); } else { long userID = Long.parseLong(SEPARATOR.split(line)[0]); probes.add(new GenericPreference(userID, currentMovieID, 0)); } } log.info("{} probes read...", probes.size()); log.info("Reading ratings, creating probe set at {}/probeSet/ratings.tsv ...", outputPath); writer = null; try { FSDataOutputStream outputStream = fs.create(new Path(outputPath, "probeSet/ratings.tsv")); writer = new BufferedWriter(new OutputStreamWriter(outputStream, Charsets.UTF_8)); int ratingsProcessed = 0; for (String line : new FileLineIterable(new File(judgingTxt))) { if (line.contains(MOVIE_DENOTER)) { currentMovieID = Long.parseLong(line.replaceAll(MOVIE_DENOTER, "")); } else { float rating = Float.parseFloat(SEPARATOR.split(line)[0]); Preference pref = probes.get(ratingsProcessed); Preconditions.checkState(pref.getItemID() == currentMovieID); ratingsProcessed++; writer.write(pref.getUserID() + TAB + pref.getItemID() + TAB + rating + NEWLINE); if (ratingsProcessed % 1000000 == 0) { log.info("{} ratings processed...", ratingsProcessed); } } } log.info("{} ratings processed. done.", ratingsProcessed); } finally { Closeables.close(writer, false); } }
From source file:org.apache.mahout.classifier.mlp.RunMultilayerPerceptron.java
public static void main(String[] args) throws Exception { Parameters parameters = new Parameters(); if (parseArgs(args, parameters)) { log.info("Load model from {}.", parameters.modelFilePathStr); MultilayerPerceptron mlp = new MultilayerPerceptron(parameters.modelFilePathStr); log.info("Topology of MLP: {}.", Arrays.toString(mlp.getLayerSizeList().toArray())); // validate the data log.info("Read the data..."); Path inputFilePath = new Path(parameters.inputFilePathStr); FileSystem inputFS = inputFilePath.getFileSystem(new Configuration()); if (!inputFS.exists(inputFilePath)) { log.error("Input file '{}' does not exists!", parameters.inputFilePathStr); mlp.close();/*from ww w . j a va 2 s. c o m*/ return; } Path outputFilePath = new Path(parameters.outputFilePathStr); FileSystem outputFS = inputFilePath.getFileSystem(new Configuration()); if (outputFS.exists(outputFilePath)) { log.error("Output file '{}' already exists!", parameters.outputFilePathStr); mlp.close(); return; } if (!parameters.inputFileFormat.equals("csv")) { log.error("Currently only supports for csv format."); mlp.close(); return; // current only supports csv format } log.info("Read from column {} to column {}.", parameters.columnStart, parameters.columnEnd); BufferedWriter writer = null; BufferedReader reader = null; try { writer = new BufferedWriter(new OutputStreamWriter(outputFS.create(outputFilePath))); reader = new BufferedReader(new InputStreamReader(inputFS.open(inputFilePath))); String line; if (parameters.skipHeader) { reader.readLine(); } while ((line = reader.readLine()) != null) { String[] tokens = CSVUtils.parseLine(line); double[] features = new double[Math.min(parameters.columnEnd, tokens.length) - parameters.columnStart + 1]; for (int i = parameters.columnStart, j = 0; i < Math.min(parameters.columnEnd + 1, tokens.length); ++i, ++j) { features[j] = Double.parseDouble(tokens[i]); } Vector featureVec = new DenseVector(features); Vector res = mlp.getOutput(featureVec); int mostProbablyLabelIndex = res.maxValueIndex(); writer.write(String.valueOf(mostProbablyLabelIndex)); } mlp.close(); log.info("Labeling finished."); } finally { Closeables.close(reader, true); Closeables.close(writer, true); } } }
From source file:org.apache.mahout.cf.taste.example.kddcup.track1.svd.Track1SVDRunner.java
public static void main(String[] args) throws Exception { if (args.length != 2) { System.err.println("Necessary arguments: <kddDataFileDirectory> <resultFile>"); return;//from www. ja v a 2 s . co m } File dataFileDirectory = new File(args[0]); if (!dataFileDirectory.exists() || !dataFileDirectory.isDirectory()) { throw new IllegalArgumentException("Bad data file directory: " + dataFileDirectory); } File resultFile = new File(args[1]); /* the knobs to turn */ int numFeatures = 20; int numIterations = 5; double learningRate = 0.0001; double preventOverfitting = 0.002; double randomNoise = 0.0001; KDDCupFactorizablePreferences factorizablePreferences = new KDDCupFactorizablePreferences( KDDCupDataModel.getTrainingFile(dataFileDirectory)); Factorizer sgdFactorizer = new ParallelArraysSGDFactorizer(factorizablePreferences, numFeatures, numIterations, learningRate, preventOverfitting, randomNoise); Factorization factorization = sgdFactorizer.factorize(); log.info("Estimating validation preferences..."); int prefsProcessed = 0; RunningAverage average = new FullRunningAverage(); for (Pair<PreferenceArray, long[]> validationPair : new DataFileIterable( KDDCupDataModel.getValidationFile(dataFileDirectory))) { for (Preference validationPref : validationPair.getFirst()) { double estimate = estimatePreference(factorization, validationPref.getUserID(), validationPref.getItemID(), factorizablePreferences.getMinPreference(), factorizablePreferences.getMaxPreference()); double error = validationPref.getValue() - estimate; average.addDatum(error * error); prefsProcessed++; if (prefsProcessed % 100000 == 0) { log.info("Computed {} estimations", prefsProcessed); } } } log.info("Computed {} estimations, done.", prefsProcessed); double rmse = Math.sqrt(average.getAverage()); log.info("RMSE {}", rmse); log.info("Estimating test preferences..."); OutputStream out = null; try { out = new BufferedOutputStream(new FileOutputStream(resultFile)); for (Pair<PreferenceArray, long[]> testPair : new DataFileIterable( KDDCupDataModel.getTestFile(dataFileDirectory))) { for (Preference testPref : testPair.getFirst()) { double estimate = estimatePreference(factorization, testPref.getUserID(), testPref.getItemID(), factorizablePreferences.getMinPreference(), factorizablePreferences.getMaxPreference()); byte result = EstimateConverter.convert(estimate, testPref.getUserID(), testPref.getItemID()); out.write(result); } } } finally { Closeables.close(out, false); } log.info("wrote estimates to {}, done.", resultFile.getAbsolutePath()); }
From source file:org.apache.mahout.classifier.sequencelearning.hmm.BaumWelchTrainer.java
public static void main(String[] args) throws IOException { DefaultOptionBuilder optionBuilder = new DefaultOptionBuilder(); ArgumentBuilder argumentBuilder = new ArgumentBuilder(); Option inputOption = DefaultOptionCreator.inputOption().create(); Option outputOption = DefaultOptionCreator.outputOption().create(); Option stateNumberOption = optionBuilder.withLongName("nrOfHiddenStates") .withDescription("Number of hidden states").withShortName("nh") .withArgument(argumentBuilder.withMaximum(1).withMinimum(1).withName("number").create()) .withRequired(true).create(); Option observedStateNumberOption = optionBuilder.withLongName("nrOfObservedStates") .withDescription("Number of observed states").withShortName("no") .withArgument(argumentBuilder.withMaximum(1).withMinimum(1).withName("number").create()) .withRequired(true).create(); Option epsilonOption = optionBuilder.withLongName("epsilon").withDescription("Convergence threshold") .withShortName("e") .withArgument(argumentBuilder.withMaximum(1).withMinimum(1).withName("number").create()) .withRequired(true).create(); Option iterationsOption = optionBuilder.withLongName("max-iterations") .withDescription("Maximum iterations number").withShortName("m") .withArgument(argumentBuilder.withMaximum(1).withMinimum(1).withName("number").create()) .withRequired(true).create(); Group optionGroup = new GroupBuilder().withOption(inputOption).withOption(outputOption) .withOption(stateNumberOption).withOption(observedStateNumberOption).withOption(epsilonOption) .withOption(iterationsOption).withName("Options").create(); try {/*w ww.java 2 s . c o m*/ Parser parser = new Parser(); parser.setGroup(optionGroup); CommandLine commandLine = parser.parse(args); String input = (String) commandLine.getValue(inputOption); String output = (String) commandLine.getValue(outputOption); int nrOfHiddenStates = Integer.parseInt((String) commandLine.getValue(stateNumberOption)); int nrOfObservedStates = Integer.parseInt((String) commandLine.getValue(observedStateNumberOption)); double epsilon = Double.parseDouble((String) commandLine.getValue(epsilonOption)); int maxIterations = Integer.parseInt((String) commandLine.getValue(iterationsOption)); //constructing random-generated HMM HmmModel model = new HmmModel(nrOfHiddenStates, nrOfObservedStates, new Date().getTime()); List<Integer> observations = Lists.newArrayList(); //reading observations Scanner scanner = new Scanner(new FileInputStream(input), "UTF-8"); try { while (scanner.hasNextInt()) { observations.add(scanner.nextInt()); } } finally { scanner.close(); } int[] observationsArray = new int[observations.size()]; for (int i = 0; i < observations.size(); ++i) { observationsArray[i] = observations.get(i); } //training HmmModel trainedModel = HmmTrainer.trainBaumWelch(model, observationsArray, epsilon, maxIterations, true); //serializing trained model DataOutputStream stream = new DataOutputStream(new FileOutputStream(output)); try { LossyHmmSerializer.serialize(trainedModel, stream); } finally { Closeables.close(stream, false); } //printing tranied model System.out.println("Initial probabilities: "); for (int i = 0; i < trainedModel.getNrOfHiddenStates(); ++i) { System.out.print(i + " "); } System.out.println(); for (int i = 0; i < trainedModel.getNrOfHiddenStates(); ++i) { System.out.print(trainedModel.getInitialProbabilities().get(i) + " "); } System.out.println(); System.out.println("Transition matrix:"); System.out.print(" "); for (int i = 0; i < trainedModel.getNrOfHiddenStates(); ++i) { System.out.print(i + " "); } System.out.println(); for (int i = 0; i < trainedModel.getNrOfHiddenStates(); ++i) { System.out.print(i + " "); for (int j = 0; j < trainedModel.getNrOfHiddenStates(); ++j) { System.out.print(trainedModel.getTransitionMatrix().get(i, j) + " "); } System.out.println(); } System.out.println("Emission matrix: "); System.out.print(" "); for (int i = 0; i < trainedModel.getNrOfOutputStates(); ++i) { System.out.print(i + " "); } System.out.println(); for (int i = 0; i < trainedModel.getNrOfHiddenStates(); ++i) { System.out.print(i + " "); for (int j = 0; j < trainedModel.getNrOfOutputStates(); ++j) { System.out.print(trainedModel.getEmissionMatrix().get(i, j) + " "); } System.out.println(); } } catch (OptionException e) { CommandLineUtil.printHelp(optionGroup); } }
From source file:org.apache.mahout.classifier.mlp.TrainMultilayerPerceptron.java
public static void main(String[] args) throws Exception { Parameters parameters = new Parameters(); if (parseArgs(args, parameters)) { log.info("Validate model..."); // check whether the model already exists Path modelPath = new Path(parameters.modelFilePath); FileSystem modelFs = modelPath.getFileSystem(new Configuration()); MultilayerPerceptron mlp;//from ww w.j a v a 2 s. c om if (modelFs.exists(modelPath) && parameters.updateModel) { // incrementally update existing model log.info("Build model from existing model..."); mlp = new MultilayerPerceptron(parameters.modelFilePath); } else { if (modelFs.exists(modelPath)) { modelFs.delete(modelPath, true); // delete the existing file } log.info("Build model from scratch..."); mlp = new MultilayerPerceptron(); for (int i = 0; i < parameters.layerSizeList.size(); ++i) { if (i != parameters.layerSizeList.size() - 1) { mlp.addLayer(parameters.layerSizeList.get(i), false, parameters.squashingFunctionName); } else { mlp.addLayer(parameters.layerSizeList.get(i), true, parameters.squashingFunctionName); } mlp.setCostFunction("Minus_Squared"); mlp.setLearningRate(parameters.learningRate).setMomentumWeight(parameters.momemtumWeight) .setRegularizationWeight(parameters.regularizationWeight); } mlp.setModelPath(parameters.modelFilePath); } // set the parameters mlp.setLearningRate(parameters.learningRate).setMomentumWeight(parameters.momemtumWeight) .setRegularizationWeight(parameters.regularizationWeight); // train by the training data Path trainingDataPath = new Path(parameters.inputFilePath); FileSystem dataFs = trainingDataPath.getFileSystem(new Configuration()); Preconditions.checkArgument(dataFs.exists(trainingDataPath), "Training dataset %s cannot be found!", parameters.inputFilePath); log.info("Read data and train model..."); BufferedReader reader = null; try { reader = new BufferedReader(new InputStreamReader(dataFs.open(trainingDataPath))); String line; // read training data line by line if (parameters.skipHeader) { reader.readLine(); } int labelDimension = parameters.labelsIndex.size(); while ((line = reader.readLine()) != null) { String[] token = line.split(","); String label = token[token.length - 1]; int labelIndex = parameters.labelsIndex.get(label); double[] instances = new double[token.length - 1 + labelDimension]; for (int i = 0; i < token.length - 1; ++i) { instances[i] = Double.parseDouble(token[i]); } for (int i = 0; i < labelDimension; ++i) { instances[token.length - 1 + i] = 0; } // set the corresponding dimension instances[token.length - 1 + labelIndex] = 1; Vector instance = new DenseVector(instances).viewPart(0, instances.length); mlp.trainOnline(instance); } // write model back log.info("Write trained model to {}", parameters.modelFilePath); mlp.writeModelToFile(); mlp.close(); } finally { Closeables.close(reader, true); } } }
From source file:com.boundary.zoocreeper.Backup.java
public static void main(String[] args) throws IOException, InterruptedException, KeeperException { BackupOptions options = new BackupOptions(); CmdLineParser parser = new CmdLineParser(options); try {//from w w w. ja v a2 s . co m parser.parseArgument(args); if (options.help) { usage(parser, 0); } } catch (CmdLineException e) { if (!options.help) { System.err.println(e.getLocalizedMessage()); } usage(parser, options.help ? 0 : 1); } if (options.verbose) { LoggingUtils.enableDebugLogging(Backup.class.getPackage().getName()); } Backup backup = new Backup(options); OutputStream os; if ("-".equals(options.outputFile)) { os = System.out; } else { os = new BufferedOutputStream(new FileOutputStream(options.outputFile)); } try { if (options.compress) { os = new GZIPOutputStream(os); } backup.backup(os); } finally { os.flush(); Closeables.close(os, true); } }
From source file:eu.interedition.collatex.cli.Engine.java
public static void main(String... args) { final Engine engine = new Engine(); try {//from w w w . ja va 2 s. co m final CommandLine commandLine = new GnuParser().parse(OPTIONS, args); if (commandLine.hasOption("h")) { engine.help(); return; } engine.configure(commandLine).read().collate().write(); } catch (ParseException e) { engine.error("Error while parsing command line arguments", e); engine.log("\n").help(); } catch (IllegalArgumentException e) { engine.error("Illegal argument", e); } catch (IOException e) { engine.error("I/O error", e); } catch (SAXException e) { engine.error("XML error", e); } catch (XPathExpressionException e) { engine.error("XPath error", e); } catch (ScriptException | PluginScript.PluginScriptExecutionException e) { engine.error("Script error", e); } finally { try { Closeables.close(engine, false); } catch (IOException e) { } } }