List of usage examples for org.apache.commons.io FileUtils copyFileToDirectory
public static void copyFileToDirectory(File srcFile, File destDir) throws IOException
From source file:com.globalsight.everest.workflowmanager.WorkflowManagerLocal.java
/** * Process the files if the source file is with XLZ file format * /*from w w w . j a v a 2 s. c o m*/ * @param p_wf * @author Vincent Yan, 2011/01/27 */ private void processXLZFiles(Workflow p_wf) { if (p_wf == null || p_wf.getAllTargetPages().size() == 0) return; TargetPage tp = null; String externalId = ""; String tmp = "", tmpFile = ""; String sourceFilename = "", targetFilename = ""; String sourceDir = "", targetDir = ""; File sourceFile = null, targetFile = null; File sourcePath = null, targetPath = null; ArrayList<String> xlzFiles = new ArrayList<String>(); try { Vector targetPages = p_wf.getAllTargetPages(); String baseDir = AmbFileStoragePathUtils.getCxeDocDirPath().concat(File.separator); Job job = p_wf.getJob(); String companyId = String.valueOf(job.getCompanyId()); String companyName = CompanyWrapper.getCompanyNameById(companyId); if ("1".equals(CompanyWrapper.getCurrentCompanyId()) && !"1".equals(job.getCompanyId())) { baseDir += companyName + File.separator; } for (int i = 0; i < targetPages.size(); i++) { tp = (TargetPage) targetPages.get(i); externalId = tp.getSourcePage().getExternalPageId(); if (externalId.toLowerCase().endsWith(".xlf") || externalId.toLowerCase().endsWith(".xliff")) { tmp = externalId.substring(0, externalId.lastIndexOf(File.separator)); sourceFilename = baseDir + tmp + ".xlz"; sourceFile = new File(sourceFilename); if (sourceFile.exists() && sourceFile.isFile()) { // source file is with xlz file format targetDir = baseDir + tp.getExportSubDir() + tmp.substring(tmp.indexOf(File.separator)); if (!xlzFiles.contains(targetDir)) xlzFiles.add(targetDir); // Get exported target path targetPath = new File(targetDir); // Get source path sourceDir = baseDir + tmp; sourcePath = new File(sourceDir); // Copy all files extracted from xlz file from source // path to exported target path // Because xliff files can be exported by GS // auotmatically, then ignore them and // just copy the others file to target path File[] files = sourcePath.listFiles(); for (File f : files) { if (f.isDirectory()) continue; tmpFile = f.getAbsolutePath().toLowerCase(); if (tmpFile.endsWith(".xlf") || tmpFile.endsWith(".xliff")) continue; FileUtils.copyFileToDirectory(f, targetPath); } } } // Verify if the exported file is generated targetFilename = baseDir + tp.getExportSubDir() + File.separator; targetFilename += externalId.substring(externalId.indexOf(File.separator) + 1); targetFile = new File(targetFilename); while (!targetFile.exists()) { Thread.sleep(1000); } } // Generate exported XLZ file and remove temporary folders for (int i = 0; i < xlzFiles.size(); i++) { targetDir = xlzFiles.get(i); targetPath = new File(targetDir); ZipIt.addEntriesToZipFile(new File(targetDir + ".xlz"), targetPath.listFiles(), true, ""); } } catch (Exception e) { s_logger.error("Error in WorkflowManagerLocal.processXLZFiles. " + e.toString()); } }
From source file:com.atlassian.jira.webtests.JIRAWebTest.java
protected void copyFileToJiraImportDirectory(File file) { String filename = file.getName(); if (!copiedFiles.contains(filename)) { File jiraImportDirectory = new File(administration.getJiraHomeDirectory(), "import"); try {/*from w w w. j av a2s . c o m*/ FileUtils.copyFileToDirectory(file, jiraImportDirectory); copiedFiles.add(filename); } catch (IOException e) { throw new RuntimeException("Could not copy file " + file.getAbsolutePath() + " to the import directory in jira home " + jiraImportDirectory, e); } } }
From source file:de.main.sessioncreator.DesktopApplication1View.java
private void moveto(java.awt.event.MouseEvent evt) {//GEN-FIRST:event_moveto if (reviewbtnMove.isEnabled() && reviewbtntopMove.isEnabled()) { String approvedDir = fileHelper.getApprovedDir(); String session = reviewSessionsTabp.getTitleAt(1).substring(10); try {// w w w . j a v a 2 s .co m File file = new File(session); // Destination directory File dir = new File(approvedDir); // Move file to new directory // file.renameTo(new File(dir, file.getName())); FileUtils.copyFileToDirectory(file, dir); file.delete(); JOptionPane.showMessageDialog(null, "File moved to " + approvedDir); } catch (Exception e) { JOptionPane.showMessageDialog(null, "Failed moving file " + e); } } }
From source file:org.accesointeligente.server.robots.ResponseChecker.java
private Attachment cloneAttachment(Attachment attachment) { org.hibernate.Session hibernate = HibernateUtil.getSession(); hibernate.beginTransaction();/* w ww . j av a 2 s. c om*/ Attachment newAttachment = new Attachment(); hibernate.save(newAttachment); hibernate.getTransaction().commit(); File oldFile = new File(ApplicationProperties.getProperty("attachment.directory") + attachment.getId().toString() + "/" + attachment.getName()); File newDirectory = new File( ApplicationProperties.getProperty("attachment.directory") + newAttachment.getId().toString()); try { newDirectory.mkdir(); FileUtils.copyFileToDirectory(oldFile, newDirectory); } catch (Exception e) { hibernate = HibernateUtil.getSession(); hibernate.beginTransaction(); hibernate.delete(attachment); hibernate.getTransaction().commit(); logger.error("Error saving " + newDirectory.getAbsolutePath() + "/" + attachment.getName(), e); return null; } String baseUrl = ApplicationProperties.getProperty("attachment.baseurl") + newAttachment.getId().toString(); newAttachment.setName(attachment.getName()); newAttachment.setType(attachment.getType()); newAttachment.setUrl(baseUrl + "/" + newAttachment.getName()); hibernate = HibernateUtil.getSession(); hibernate.beginTransaction(); hibernate.update(newAttachment); hibernate.getTransaction().commit(); return newAttachment; }
From source file:org.alex73.osm.validators.vioski.Export.java
public static void main(String[] args) throws Exception { RehijonyLoad.load(Env.readProperty("dav") + "/Rehijony.xml"); osm = new Belarus(); String dav = Env.readProperty("dav") + "/Nazvy_nasielenych_punktau.csv"; List<Miesta> daviednik = new CSV('\t').readCSV(dav, Miesta.class); Map<String, List<Mdav>> rajony = new TreeMap<>(); for (Miesta m : daviednik) { String r = m.rajon.startsWith("<") ? m.rajon : m.rajon + " "; List<Mdav> list = rajony.get(r); if (list == null) { list = new ArrayList<>(); rajony.put(r, list);/* ww w . java 2 s . c o m*/ } Mdav mm = new Mdav(); mm.osmID = m.osmID; mm.ss = m.sielsaviet; mm.why = m.osmComment; mm.nameBe = m.nazvaNoStress; mm.nameRu = m.ras; mm.varyjantBe = m.varyjantyBel; mm.varyjantRu = m.rasUsedAsOld; list.add(mm); } placeTag = osm.getTagsPack().getTagCode("place"); osm.byTag("place", o -> o.isNode() && !o.getTag(placeTag).equals("island") && !o.getTag(placeTag).equals("islet"), o -> processNode((IOsmNode) o)); String outDir = Env.readProperty("out.dir"); File foutDir = new File(outDir + "/vioski"); foutDir.mkdirs(); Map<String, String> padzielo = new TreeMap<>(); for (Voblasc v : RehijonyLoad.kraina.getVoblasc()) { for (Rajon r : v.getRajon()) { padzielo.put(r.getNameBe(), osm.getObject(r.getOsmID()).getTag("name", osm)); } } ObjectMapper om = new ObjectMapper(); String o = "var data={};\n"; o += "data.dav=" + om.writeValueAsString(rajony) + "\n"; o += "data.map=" + om.writeValueAsString(map) + "\n"; o += "data.padziel=" + om.writeValueAsString(padzielo) + "\n"; FileUtils.writeStringToFile(new File(outDir + "/vioski/data.js"), o); FileUtils.copyFileToDirectory(new File("vioski/control.js"), foutDir); FileUtils.copyFileToDirectory(new File("vioski/vioski.html"), foutDir); }
From source file:org.apache.accumulo.test.AuditMessageIT.java
@Test public void testImportExportOperationsAudits() throws AccumuloSecurityException, AccumuloException, TableExistsException, TableNotFoundException, IOException, InterruptedException { conn.securityOperations().createLocalUser(AUDIT_USER_1, new PasswordToken(PASSWORD)); conn.securityOperations().grantSystemPermission(AUDIT_USER_1, SystemPermission.SYSTEM); conn.securityOperations().changeUserAuthorizations(AUDIT_USER_1, auths); grantEverySystemPriv(conn, AUDIT_USER_1); // Connect as Audit User and do a bunch of stuff. // Start testing activities here auditConnector = getCluster().getConnector(AUDIT_USER_1, new PasswordToken(PASSWORD)); auditConnector.tableOperations().create(OLD_TEST_TABLE_NAME); // Insert some play data BatchWriter bw = auditConnector.createBatchWriter(OLD_TEST_TABLE_NAME, new BatchWriterConfig()); Mutation m = new Mutation("myRow"); m.put("cf1", "cq1", "v1"); m.put("cf1", "cq2", "v3"); bw.addMutation(m);// ww w.j av a 2 s . c o m bw.close(); // Prepare to export the table File exportDir = new File(getCluster().getConfig().getDir().toString() + "/export"); auditConnector.tableOperations().offline(OLD_TEST_TABLE_NAME); auditConnector.tableOperations().exportTable(OLD_TEST_TABLE_NAME, exportDir.toString()); // We've exported the table metadata to the MiniAccumuloCluster root dir. Grab the .rf file path to re-import it File distCpTxt = new File(exportDir.toString() + "/distcp.txt"); File importFile = null; LineIterator it = FileUtils.lineIterator(distCpTxt, UTF_8.name()); // Just grab the first rf file, it will do for now. String filePrefix = "file:"; try { while (it.hasNext() && importFile == null) { String line = it.nextLine(); if (line.matches(".*\\.rf")) { importFile = new File(line.replaceFirst(filePrefix, "")); } } } finally { LineIterator.closeQuietly(it); } FileUtils.copyFileToDirectory(importFile, exportDir); auditConnector.tableOperations().importTable(NEW_TEST_TABLE_NAME, exportDir.toString()); // Now do a Directory (bulk) import of the same data. auditConnector.tableOperations().create(THIRD_TEST_TABLE_NAME); File failDir = new File(exportDir + "/tmp"); assertTrue(failDir.mkdirs() || failDir.isDirectory()); auditConnector.tableOperations().importDirectory(THIRD_TEST_TABLE_NAME, exportDir.toString(), failDir.toString(), false); auditConnector.tableOperations().online(OLD_TEST_TABLE_NAME); // Stop testing activities here ArrayList<String> auditMessages = getAuditMessages("testImportExportOperationsAudits"); assertEquals(1, findAuditMessage(auditMessages, String .format(AuditedSecurityOperation.CAN_CREATE_TABLE_AUDIT_TEMPLATE, OLD_TEST_TABLE_NAME)) .size()); assertEquals(1, findAuditMessage(auditMessages, String.format(AuditedSecurityOperation.CAN_ONLINE_OFFLINE_TABLE_AUDIT_TEMPLATE, "offlineTable", OLD_TEST_TABLE_NAME)).size()); assertEquals(1, findAuditMessage(auditMessages, String.format(AuditedSecurityOperation.CAN_EXPORT_AUDIT_TEMPLATE, OLD_TEST_TABLE_NAME, exportDir.toString())).size()); assertEquals(1, findAuditMessage(auditMessages, String.format(AuditedSecurityOperation.CAN_IMPORT_AUDIT_TEMPLATE, NEW_TEST_TABLE_NAME, filePrefix + exportDir.toString())).size()); assertEquals(1, findAuditMessage(auditMessages, String .format(AuditedSecurityOperation.CAN_CREATE_TABLE_AUDIT_TEMPLATE, THIRD_TEST_TABLE_NAME)) .size()); assertEquals(1, findAuditMessage(auditMessages, String.format(AuditedSecurityOperation.CAN_BULK_IMPORT_AUDIT_TEMPLATE, THIRD_TEST_TABLE_NAME, filePrefix + exportDir.toString(), filePrefix + failDir.toString())).size()); assertEquals(1, findAuditMessage(auditMessages, String.format(AuditedSecurityOperation.CAN_ONLINE_OFFLINE_TABLE_AUDIT_TEMPLATE, "onlineTable", OLD_TEST_TABLE_NAME)).size()); }
From source file:org.apache.accumulo.test.AuditMessageTest.java
@Test(timeout = 60 * 1000) public void testImportExportOperationsAudits() throws AccumuloSecurityException, AccumuloException, TableExistsException, TableNotFoundException, IOException, InterruptedException { conn.securityOperations().createLocalUser(AUDIT_USER_1, new PasswordToken(PASSWORD)); conn.securityOperations().grantSystemPermission(AUDIT_USER_1, SystemPermission.SYSTEM); conn.securityOperations().changeUserAuthorizations(AUDIT_USER_1, auths); grantEverySystemPriv(conn, AUDIT_USER_1); // Connect as Audit User and do a bunch of stuff. // Start testing activities here auditConnector = accumulo.getConnector(AUDIT_USER_1, PASSWORD); auditConnector.tableOperations().create(OLD_TEST_TABLE_NAME); // Insert some play data BatchWriter bw = auditConnector.createBatchWriter(OLD_TEST_TABLE_NAME, new BatchWriterConfig()); Mutation m = new Mutation("myRow"); m.put("cf1", "cq1", "v1"); m.put("cf1", "cq2", "v3"); bw.addMutation(m);/* ww w .j av a2 s . c o m*/ bw.close(); // Prepare to export the table File exportDir = new File(accumulo.getConfig().getDir().toString() + "/export"); auditConnector.tableOperations().offline(OLD_TEST_TABLE_NAME); auditConnector.tableOperations().exportTable(OLD_TEST_TABLE_NAME, exportDir.toString()); // We've exported the table metadata to the MiniAccumuloCluster root dir. Grab the .rf file path to re-import it File distCpTxt = new File(exportDir.toString() + "/distcp.txt"); File importFile = null; LineIterator it = FileUtils.lineIterator(distCpTxt, Constants.UTF8.name()); // Just grab the first rf file, it will do for now. String filePrefix = "file:"; try { while (it.hasNext() && importFile == null) { String line = it.nextLine(); if (line.matches(".*\\.rf")) { importFile = new File(line.replaceFirst(filePrefix, "")); } } } finally { LineIterator.closeQuietly(it); } FileUtils.copyFileToDirectory(importFile, exportDir); auditConnector.tableOperations().importTable(NEW_TEST_TABLE_NAME, exportDir.toString()); // Now do a Directory (bulk) import of the same data. auditConnector.tableOperations().create(THIRD_TEST_TABLE_NAME); File failDir = new File(exportDir + "/tmp"); failDir.mkdirs(); auditConnector.tableOperations().importDirectory(THIRD_TEST_TABLE_NAME, exportDir.toString(), failDir.toString(), false); auditConnector.tableOperations().online(OLD_TEST_TABLE_NAME); // Stop testing activities here ArrayList<String> auditMessages = getAuditMessages("testImportExportOperationsAudits"); assertEquals(1, findAuditMessage(auditMessages, String .format(AuditedSecurityOperation.CAN_CREATE_TABLE_AUDIT_TEMPLATE, OLD_TEST_TABLE_NAME)) .size()); assertEquals(1, findAuditMessage(auditMessages, String.format(AuditedSecurityOperation.CAN_ONLINE_OFFLINE_TABLE_AUDIT_TEMPLATE, "offlineTable", OLD_TEST_TABLE_NAME)).size()); assertEquals(1, findAuditMessage(auditMessages, String.format(AuditedSecurityOperation.CAN_EXPORT_AUDIT_TEMPLATE, OLD_TEST_TABLE_NAME, exportDir.toString())).size()); assertEquals(1, findAuditMessage(auditMessages, String.format(AuditedSecurityOperation.CAN_IMPORT_AUDIT_TEMPLATE, NEW_TEST_TABLE_NAME, exportDir.toString())).size()); assertEquals(1, findAuditMessage(auditMessages, String .format(AuditedSecurityOperation.CAN_CREATE_TABLE_AUDIT_TEMPLATE, THIRD_TEST_TABLE_NAME)) .size()); assertEquals(1, findAuditMessage(auditMessages, String.format(AuditedSecurityOperation.CAN_BULK_IMPORT_AUDIT_TEMPLATE, THIRD_TEST_TABLE_NAME, filePrefix + exportDir.toString(), filePrefix + failDir.toString())).size()); assertEquals(1, findAuditMessage(auditMessages, String.format(AuditedSecurityOperation.CAN_ONLINE_OFFLINE_TABLE_AUDIT_TEMPLATE, "onlineTable", OLD_TEST_TABLE_NAME)).size()); }
From source file:org.apache.airavata.gfac.hadoop.handler.HadoopDeploymentHandler.java
private File getWhirrConfigurationFile(HostDescription hostDescription, File workingDirectory) throws GFacHandlerException, IOException { HadoopHostType hadoopHostDesc = (HadoopHostType) hostDescription; if (hadoopHostDesc.isSetWhirrConfiguration()) { HadoopHostType.WhirrConfiguration whirrConfig = hadoopHostDesc.getWhirrConfiguration(); if (whirrConfig.isSetConfigurationFile()) { File whirrConfigFile = new File(whirrConfig.getConfigurationFile()); if (!whirrConfigFile.exists()) { throw new GFacHandlerException("Specified whirr configuration file doesn't exists."); }/*from w ww. j a va2 s . c o m*/ FileUtils.copyFileToDirectory(whirrConfigFile, workingDirectory); return new File(workingDirectory, whirrConfigFile.getName()); } else if (whirrConfig.isSetConfiguration()) { Properties whirrConfigProps = whirrConfigurationsToProperties(whirrConfig.getConfiguration()); File whirrConfigFile = new File(workingDirectory, "whirr-hadoop.config"); whirrConfigProps.store(new FileOutputStream(whirrConfigFile), null); return whirrConfigFile; } } throw new GFacHandlerException("Cannot find Whirr configurations. Whirr configuration " + "is required if you don't have already running Hadoop deployment."); }
From source file:org.apache.archiva.converter.artifact.LegacyToDefaultConverterTest.java
private void copyDirectoryStructure(File sourceDirectory, File destinationDirectory) throws IOException { if (!sourceDirectory.exists()) { throw new IOException("Source directory doesn't exists (" + sourceDirectory.getAbsolutePath() + ")."); }// ww w . j a va2 s .c om File[] files = sourceDirectory.listFiles(); String sourcePath = sourceDirectory.getAbsolutePath(); for (int i = 0; i < files.length; i++) { File file = files[i]; String dest = file.getAbsolutePath(); dest = dest.substring(sourcePath.length() + 1); File destination = new File(destinationDirectory, dest); if (file.isFile()) { destination = destination.getParentFile(); FileUtils.copyFileToDirectory(file, destination); } else if (file.isDirectory()) { if (!".svn".equals(file.getName())) { if (!destination.exists() && !destination.mkdirs()) { throw new IOException( "Could not create destination directory '" + destination.getAbsolutePath() + "'."); } copyDirectoryStructure(file, destination); } } else { throw new IOException("Unknown file type: " + file.getAbsolutePath()); } } }
From source file:org.apache.archiva.policies.ChecksumPolicyTest.java
private File createTestableFiles(String md5State, String sha1State) throws Exception { File sourceDir = getTestFile("src/test/resources/checksums/"); File destDir = getTestFile("target/checksum-tests/" + name.getMethodName() + "/"); FileUtils.copyFileToDirectory(new File(sourceDir, "artifact.jar"), destDir); if (md5State != null) { File md5File = new File(sourceDir, "artifact.jar.md5-" + md5State); assertTrue("Testable file exists: " + md5File.getName() + ":", md5File.exists() && md5File.isFile()); File destFile = new File(destDir, "artifact.jar.md5"); FileUtils.copyFile(md5File, destFile); }/*from w ww .j a v a 2 s . c o m*/ if (sha1State != null) { File sha1File = new File(sourceDir, "artifact.jar.sha1-" + sha1State); assertTrue("Testable file exists: " + sha1File.getName() + ":", sha1File.exists() && sha1File.isFile()); File destFile = new File(destDir, "artifact.jar.sha1"); FileUtils.copyFile(sha1File, destFile); } File localFile = new File(destDir, "artifact.jar"); return localFile; }