List of usage examples for com.google.common.io Resources copy
public static void copy(URL from, OutputStream to) throws IOException
From source file:com.ning.killbill.generators.ruby.JRubyPluginGenerator.java
private void generateStaticClass(final String className, final File outputDir) throws GeneratorException { OutputStream out = null;// w w w .j av a 2 s.c o m try { final String resourceName = createFileName(className, true); final URL classUrl = Resources.getResource(resourceName); final File output = new File(outputDir, resourceName); out = new FileOutputStream(output); Resources.copy(classUrl, out); } catch (IOException e) { throw new GeneratorException("Failed to generate file " + className, e); } finally { if (out != null) { try { out.close(); } catch (IOException ignore) { } } } }
From source file:dk.dma.epd.common.prototype.Bootstrap.java
protected void unpackFolderToAppHome(String folder) throws IOException { ApplicationContext context = new ClassPathXmlApplicationContext(); // we do not support recursive folders Resource[] xmlResources = context.getResources("classpath:/" + folder + "/*.*"); Path f = home.resolve(folder); if (!Files.exists(f)) { Files.createDirectories(f); }/*from www . j av a 2 s . c o m*/ for (Resource r : xmlResources) { Path destination = f.resolve(r.getFilename()); if (!Files.exists(destination)) { Resources.copy(r.getURL(), Files.newOutputStream(destination)); } } }
From source file:org.fusesource.process.manager.support.JarInstaller.java
private File getArtifactFile(URL url) throws IOException { File tmpFile = File.createTempFile("artifact", ".jar"); FileOutputStream fos = null;/* w w w .j a v a2s. c om*/ try { fos = new FileOutputStream(tmpFile); Resources.copy(url, fos); } catch (Exception ex) { Throwables.propagate(ex); } finally { Closeables.closeQuietly(fos); } return tmpFile; }
From source file:com.jejking.hh.nord.corpus.DrucksachenHtmlFetcher.java
/** * Schedules tasks to fetch all the specified URLs leaving a random duration between the * execution of each task. // w ww . ja va 2 s .c om * * @param urlsToFetch * @param storageDirectory */ public void fetchUrls(final ImmutableList<URL> urlsToFetch, final Path storageDirectory) { Observable<Runnable> tasks = Observable.from(urlsToFetch).filter(new Func1<URL, Boolean>() { @Override public Boolean call(URL url) { String encodedUrl = fileNameFromUrl(url) + ".gz"; Path filePath = storageDirectory.resolve(encodedUrl); // retain only URLs for which we have no record yet so as not to download them twice return Files.notExists(filePath, LinkOption.NOFOLLOW_LINKS); } }).map(new Func1<URL, Runnable>() { @Override public Runnable call(final URL url) { return new Runnable() { @Override public void run() { try { File target = storageDirectory.resolve(fileNameFromUrl(url) + ".gz").toFile(); try (GzipCompressorOutputStream outputStream = new GzipCompressorOutputStream( new BufferedOutputStream(new FileOutputStream(target)))) { Resources.copy(url, outputStream); System.out.println("Copied " + url + " to " + target); } } catch (IOException e) { e.printStackTrace(); } } }; }; }); tasks.subscribe(new Action1<Runnable>() { Random random = new Random(); long cumulativeDelayInSeconds = 0; int count = 0; @Override public void call(Runnable runnable) { count++; DrucksachenHtmlFetcher.this.scheduledExecutorService.schedule(runnable, cumulativeDelayInSeconds, TimeUnit.SECONDS); // at least two seconds, at most 10 cumulativeDelayInSeconds = cumulativeDelayInSeconds + 2 + random.nextInt(9); DrucksachenHtmlFetcher.this.totalDelayHolder[0] = cumulativeDelayInSeconds; DrucksachenHtmlFetcher.this.actualCount[0] = count; } }); System.out.println("Scheduled " + actualCount[0] + " tasks"); System.out.println("Estimated duration " + totalDelayHolder[0] + " seconds"); try { this.scheduledExecutorService.shutdown(); // + 60 to allow task to finish comfortably... boolean finishedOK = this.scheduledExecutorService.awaitTermination(this.totalDelayHolder[0] + 60, TimeUnit.SECONDS); if (finishedOK) { System.out.println("Finished all tasks. Scheduled executor service shutdown."); } else { System.out.println("Executor service shutdown, but not all tasks completed."); } } catch (InterruptedException e) { e.printStackTrace(); } }
From source file:com.gnapse.metric.app.Main.java
/** * Extracts the resource file with the given name to the application folder, if the target file * does not exist./* ww w .j ava 2 s .com*/ * * @param resourceName the name of the resource file to copy to the application folder * @return a reference to the resulting file * @throws IOException if an I/O error occurs while performing the copy */ private static File copyResource(String resourceName) throws IOException { final File destFile = new File(String.format("%s/%s", APP_FOLDER, resourceName)); if (!destFile.exists()) { Files.createParentDirs(destFile); final OutputStream out = new FileOutputStream(destFile); try { Resources.copy(Resources.getResource(resourceName), out); } finally { out.close(); } } return destFile; }
From source file:com.kheafield.kenlm.NativeLibrary.java
private static URL extractResource(URL url) throws IOException { String path = url.getFile();/* w w w . java 2 s .c o m*/ String name = com.google.common.io.Files.getNameWithoutExtension(path); String extension = com.google.common.io.Files.getFileExtension(path); File tmpFile = Files.createTempFile(name + "-", "." + extension).toFile(); tmpFile.deleteOnExit(); Resources.copy(url, Files.newOutputStream(tmpFile.toPath())); return tmpFile.toURI().toURL(); }
From source file:dk.dma.epd.common.prototype.Bootstrap.java
protected void unpackToAppHome(String filename) throws IOException { Path destination = home.resolve(filename); if (!Files.exists(destination)) { URL url = getClass().getResource("/" + filename); if (url == null) { throw new Error("Missing file src/resources/" + filename); }/* ww w .j a v a 2 s. co m*/ Resources.copy(url, Files.newOutputStream(destination)); } }
From source file:com.ning.killbill.generators.ClientLibraryBaseGenerator.java
protected void writeLicense(final File output) throws GeneratorException { try {/*from w w w. j a v a 2 s . c o m*/ final URL licenseUrl = Resources.getResource(getLicense()); final OutputStream out = new FileOutputStream(output); Resources.copy(licenseUrl, out); } catch (IllegalArgumentException e) { throw new GeneratorException("Cannot find license file " + getLicense(), e); } catch (IOException e) { throw new GeneratorException("Failed to write license file " + getLicense(), e); } }
From source file:org.apache.sentry.tests.e2e.hiveserver.HiveServerFactory.java
private static HiveServer create(HiveServer2Type type, Map<String, String> properties, File baseDir, File confDir, File logDir, File policyFile, FileSystem fileSystem) throws Exception { if (!properties.containsKey(WAREHOUSE_DIR)) { LOGGER.error("fileSystem " + fileSystem.getClass().getSimpleName()); if (fileSystem instanceof DistributedFileSystem) { @SuppressWarnings("static-access") String dfsUri = fileSystem.getDefaultUri(fileSystem.getConf()).toString(); LOGGER.error("dfsUri " + dfsUri); properties.put(WAREHOUSE_DIR, dfsUri + "/data"); } else {/*from ww w. ja v a 2 s .c o m*/ properties.put(WAREHOUSE_DIR, new File(baseDir, "warehouse").getPath()); } } if (!properties.containsKey(METASTORE_CONNECTION_URL)) { properties.put(METASTORE_CONNECTION_URL, String.format("jdbc:derby:;databaseName=%s;create=true", new File(baseDir, "metastore").getPath())); } if (policyFile.exists()) { LOGGER.info("Policy file " + policyFile + " exists"); } else { LOGGER.info("Creating policy file " + policyFile); FileOutputStream to = new FileOutputStream(policyFile); Resources.copy(Resources.getResource(AUTHZ_PROVIDER_FILENAME), to); to.close(); } if (!properties.containsKey(ACCESS_TESTING_MODE)) { properties.put(ACCESS_TESTING_MODE, "true"); } if (!properties.containsKey(AUTHZ_PROVIDER_RESOURCE)) { properties.put(AUTHZ_PROVIDER_RESOURCE, policyFile.getPath()); } if (!properties.containsKey(AUTHZ_PROVIDER)) { properties.put(AUTHZ_PROVIDER, LocalGroupResourceAuthorizationProvider.class.getName()); } if (!properties.containsKey(AUTHZ_SERVER_NAME)) { properties.put(AUTHZ_SERVER_NAME, DEFAULT_AUTHZ_SERVER_NAME); } if (!properties.containsKey(HS2_PORT)) { properties.put(HS2_PORT, String.valueOf(findPort())); } if (!properties.containsKey(SUPPORT_CONCURRENCY)) { properties.put(SUPPORT_CONCURRENCY, "false"); } if (!properties.containsKey(HADOOPBIN)) { properties.put(HADOOPBIN, "./target/hadoop/bin/hadoop"); } String hadoopBinPath = properties.get(HADOOPBIN); Assert.assertNotNull(hadoopBinPath, "Hadoop Bin"); File hadoopBin = new File(hadoopBinPath); if (!hadoopBin.isFile()) { Assert.fail("Path to hadoop bin " + hadoopBin.getPath() + "is invalid. " + "Perhaps you missed the download-hadoop profile."); } /* * This hack, setting the hiveSiteURL field removes a previous hack involving * setting of system properties for each property. Although both are hacks, * I prefer this hack because once the system properties are set they can * affect later tests unless those tests clear them. This hack allows for * a clean switch to a new set of defaults when a new HiveConf object is created. */ Reflection.staticField("hiveSiteURL").ofType(URL.class).in(HiveConf.class).set(null); HiveConf hiveConf = new HiveConf(); HiveAuthzConf authzConf = new HiveAuthzConf(Resources.getResource("sentry-site.xml")); for (Map.Entry<String, String> entry : properties.entrySet()) { LOGGER.info(entry.getKey() + " => " + entry.getValue()); hiveConf.set(entry.getKey(), entry.getValue()); authzConf.set(entry.getKey(), entry.getValue()); } File hiveSite = new File(confDir, "hive-site.xml"); File accessSite = new File(confDir, HiveAuthzConf.AUTHZ_SITE_FILE); OutputStream out = new FileOutputStream(accessSite); authzConf.writeXml(out); out.close(); // points hive-site.xml at access-site.xml hiveConf.set(HiveAuthzConf.HIVE_ACCESS_CONF_URL, accessSite.toURI().toURL().toExternalForm()); if (!properties.containsKey(HiveConf.ConfVars.HIVE_SERVER2_SESSION_HOOK.varname)) { hiveConf.set(HiveConf.ConfVars.HIVE_SERVER2_SESSION_HOOK.varname, "org.apache.sentry.binding.hive.HiveAuthzBindingSessionHook"); } out = new FileOutputStream(hiveSite); hiveConf.writeXml(out); out.close(); Reflection.staticField("hiveSiteURL").ofType(URL.class).in(HiveConf.class).set(hiveSite.toURI().toURL()); switch (type) { case EmbeddedHiveServer2: LOGGER.info("Creating EmbeddedHiveServer"); return new EmbeddedHiveServer(); case InternalHiveServer2: LOGGER.info("Creating InternalHiveServer"); return new InternalHiveServer(hiveConf); case ExternalHiveServer2: LOGGER.info("Creating ExternalHiveServer"); return new ExternalHiveServer(hiveConf, confDir, logDir); case UnmanagedHiveServer2: LOGGER.info("Creating UnmanagedHiveServer"); return new UnmanagedHiveServer(hiveConf); default: throw new UnsupportedOperationException(type.name()); } }
From source file:com.streamsets.datacollector.antennadoctor.storage.AntennaDoctorStorage.java
@Override protected void initTask() { LOG.info("Repository location: {}", repositoryDirectory); try {/* www.ja v a 2 s.c o m*/ // Make sure that we have our own directory to operate in if (!Files.exists(repositoryDirectory)) { Files.createDirectories(repositoryDirectory); } Path store = repositoryDirectory.resolve(AntennaDoctorConstants.FILE_DATABASE); if (!Files.exists(store)) { try (OutputStream stream = Files.newOutputStream(store)) { Resources.copy( Resources.getResource(AntennaDoctorStorage.class, AntennaDoctorConstants.FILE_DATABASE), stream); } } } catch (IOException e) { LOG.error("Cant initialize repository: {}", e.getMessage(), e); return; } // Schedule override runnable if allowed in configuration if (configuration.get(AntennaDoctorConstants.CONF_OVERRIDE_ENABLE, AntennaDoctorConstants.DEFAULT_OVERRIDE_ENABLE)) { LOG.info("Enabling polling of {} to override the rule database", AntennaDoctorConstants.FILE_OVERRIDE); this.executorService = Executors.newSingleThreadExecutor(); this.overrideRunnable = new OverrideFileRunnable(); this.future = executorService.submit(this.overrideRunnable); } // Remote repo handling if (configuration.get(AntennaDoctorConstants.CONF_UPDATE_ENABLE, AntennaDoctorConstants.DEFAULT_UPDATE_ENABLE)) { if (overrideRunnable != null) { LOG.info("Using override, not starting update thread."); } else { this.executorService = Executors.newSingleThreadScheduledExecutor(); this.updateRunnable = new UpdateRunnable(); this.future = ((ScheduledExecutorService) executorService).scheduleAtFixedRate(updateRunnable, configuration.get(AntennaDoctorConstants.CONF_UPDATE_DELAY, AntennaDoctorConstants.DEFAULT_UPDATE_DELAY), configuration.get(AntennaDoctorConstants.CONF_UPDATE_PERIOD, AntennaDoctorConstants.DEFAULT_UPDATE_PERIOD), TimeUnit.MINUTES); } } // And finally load rules delegate.loadNewRules(loadRules()); }