List of usage examples for org.apache.commons.io FileUtils moveFile
public static void moveFile(File srcFile, File destFile) throws IOException
From source file:io.hops.hopsworks.api.certs.CertSigningService.java
private CsrDTO signCSR(String hostId, String commandId, String csr, boolean rotation, boolean isAppCertificate) throws AppException { try {/*from www . j a v a2s .c o m*/ // If there is a certificate already for that host, rename it to .TO_BE_REVOKED.COMMAND_ID // When AgentResource has received a successful response for the key rotation, revoke and delete it if (rotation) { File certFile = Paths.get(settings.getIntermediateCaDir(), "certs", hostId + CertificatesMgmService.CERTIFICATE_SUFFIX).toFile(); if (certFile.exists()) { File destination = Paths .get(settings.getIntermediateCaDir(), "certs", hostId + serviceCertificateRotationTimer.getToBeRevokedSuffix(commandId)) .toFile(); try { FileUtils.moveFile(certFile, destination); } catch (FileExistsException ex) { FileUtils.deleteQuietly(destination); FileUtils.moveFile(certFile, destination); } } } String agentCert = opensslOperations.signCertificateRequest(csr, true, true, isAppCertificate); File caCertFile = Paths.get(settings.getIntermediateCaDir(), "certs", "ca-chain.cert.pem").toFile(); String caCert = Files.toString(caCertFile, Charset.defaultCharset()); return new CsrDTO(caCert, agentCert, settings.getHadoopVersionedDir()); } catch (IOException ex) { String errorMsg = "Error while signing CSR for host " + hostId + " Reason: " + ex.getMessage(); logger.log(Level.SEVERE, errorMsg, ex); throw new AppException(Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), errorMsg); } }
From source file:com.sdl.dxa.modules.degrees51.api.Degrees51DataProvider.java
private boolean updateLiteFileInternal() { File liteFile = new File(liteFileLocation); try {//from www .j av a 2s.c om File temp = new File(liteFileLocation + UUID.randomUUID()); FileUtils.copyURLToFile(new URL(degrees51DataLiteUrl), temp, fileLiteUpdateTimeoutMinutes * 60 * 1000 / 2, fileLiteUpdateTimeoutMinutes * 60 * 1000); try { liteFileWrite.acquire(); if (!deleteDataFile(liteFileLocation)) { throw new IOException("Could not delete Lite file, (access denied?)"); } FileUtils.moveFile(temp, liteFile); } finally { liteFileWrite.release(); } log.info("51degrees lite file is updated"); getAndSetNextUpdate(liteFileLocation); memorize(fileDelaysByNames, liteFileLocation, now().plusMinutes(fileUpdateReattemptDelayMinutes)); } catch (IOException | InterruptedException e) { log.error("Exception while updating the 51degrees lite file, deleting", e); FileUtils.deleteQuietly(liteFile); return false; } return true; }
From source file:com.linkedin.pinot.core.segment.index.loader.SegmentPreProcessor.java
private void createInvertedIndex(SegmentDirectory.Writer segmentWriter, ColumnMetadata columnMetadata) throws IOException { String segmentName = segmentWriter.toString(); String column = columnMetadata.getColumnName(); File inProgress = new File(segmentWriter.toSegmentDirectory().getPath().toFile(), column + "_inv.inprogress"); // returning existing inverted index only if the marker file does not exist if (!inProgress.exists() && segmentWriter.hasIndexFor(column, ColumnIndexType.INVERTED_INDEX)) { LOGGER.info("Found inverted index for segment: {}, colummn {}, loading it", segmentName, column); return;/*ww w . ja va 2 s. co m*/ } // creating the marker file FileUtils.touch(inProgress); if (segmentWriter.hasIndexFor(column, ColumnIndexType.INVERTED_INDEX)) { LOGGER.info("Deleting stale inverted index for segment: {}, column: {}", segmentName, column); segmentWriter.removeIndex(column, ColumnIndexType.INVERTED_INDEX); } LOGGER.info("Creating new inverted index for segment: {}, column: {}", segmentName, column); // creating inverted index for the column now OffHeapBitmapInvertedIndexCreator creator = new OffHeapBitmapInvertedIndexCreator(indexDir, columnMetadata.getCardinality(), columnMetadata.getTotalDocs(), columnMetadata.getTotalNumberOfEntries(), columnMetadata.toFieldSpec()); DataFileReader fwdIndex = getForwardIndexReader(columnMetadata, segmentWriter); if (!columnMetadata.isSingleValue()) { SingleColumnMultiValueReader mvFwdIndex = (SingleColumnMultiValueReader) fwdIndex; int[] dictIds = new int[columnMetadata.getMaxNumberOfMultiValues()]; for (int i = 0; i < metadata.getTotalDocs(); i++) { int len = mvFwdIndex.getIntArray(i, dictIds); creator.add(i, dictIds, len); } } else { FixedBitSingleValueReader svFwdIndex = (FixedBitSingleValueReader) fwdIndex; for (int i = 0; i < columnMetadata.getTotalDocs(); i++) { creator.add(i, svFwdIndex.getInt(i)); } } creator.seal(); File invertedIndexFile = creator.getInvertedIndexFile(); // Inverted index creation does not know the size upfront so // we create it in a temporary file and then move to the main // file. For v1/v2 format, this is an overkill but it's required // to avoid corruption of v3 format File tempFile = new File(invertedIndexFile + ".temp"); if (tempFile.exists()) { FileUtils.deleteQuietly(tempFile); } FileUtils.moveFile(invertedIndexFile, tempFile); PinotDataBuffer newIndexBuffer = null; try { if (segmentWriter.hasIndexFor(column, ColumnIndexType.INVERTED_INDEX)) { PinotDataBuffer tempBuffer = segmentWriter.getIndexFor(column, ColumnIndexType.INVERTED_INDEX); // almost always we will have matching size since segment data is immutable // but it's good to double check if (tempBuffer.size() == tempFile.length()) { newIndexBuffer = tempBuffer; } else { if (segmentWriter.isIndexRemovalSupported()) { segmentWriter.removeIndex(column, ColumnIndexType.INVERTED_INDEX); newIndexBuffer = segmentWriter.newIndexFor(column, ColumnIndexType.INVERTED_INDEX, (int) tempFile.length()); } else { LOGGER.error( "Segment: {} already has inverted index that can not be removed. Throwing exception to discard and download segment", segmentWriter); throw new IllegalStateException("Inverted Index exists and can not be removed for segment: " + segmentWriter + ". Throwing exception to download fresh segment"); } } } else { // there was no index earlier newIndexBuffer = segmentWriter.newIndexFor(column, ColumnIndexType.INVERTED_INDEX, (int) tempFile.length()); } newIndexBuffer.readFrom(tempFile); } finally { if (newIndexBuffer != null) { newIndexBuffer.close(); } } // delete the marker file FileUtils.deleteQuietly(inProgress); FileUtils.deleteQuietly(tempFile); LOGGER.info("Created inverted index for segment: {}, colummn {}", segmentName, column); }
From source file:it.drwolf.ridire.session.JobCleaner.java
public void clean(List<String> arcFiles, List<String> digests) { try {//from w w w . j a va2 s. c o m List<String> origFiles = new ArrayList<String>(); List<String> endFiles = new ArrayList<String>(); for (int i = 0; i < arcFiles.size(); i++) { String origFile = FilenameUtils.getFullPath(arcFiles.get(i)).concat(JobMapperMonitor.RESOURCESDIR) .concat(digests.get(i).concat(".txt")); File cleanedFile = new File(origFile + ".2"); origFiles.add(cleanedFile.getAbsolutePath()); // duplicate orig file String cleanedText = this.ridirePlainTextCleaner.getCleanText(new File(origFile)); FileUtils.writeStringToFile(cleanedFile, cleanedText, "UTF-8"); String endFile = this.cleanerPath.concat(System.getProperty("file.separator")) .concat(digests.get(i)).concat(".txt.2"); endFiles.add(endFile); } // transfer file to process JSch jSch = new JSch(); com.jcraft.jsch.Session session = jSch.getSession(this.perlUser, "127.0.0.1"); java.util.Properties config = new java.util.Properties(); config.put("StrictHostKeyChecking", "no"); session.setConfig(config); session.setPassword(this.perlPw); session.connect(); Channel channel = session.openChannel("sftp"); channel.connect(); ChannelSftp c = (ChannelSftp) channel; int mode = ChannelSftp.OVERWRITE; for (int i = 0; i < origFiles.size(); i++) { c.put(origFiles.get(i), endFiles.get(i), mode); } c.disconnect(); String command = null; // execute script for (String endFile : endFiles) { command = "perl " + this.cleanerPath.concat(System.getProperty("file.separator")) + "cleaner.pl "; channel = session.openChannel("exec"); ByteArrayOutputStream errorStream = new ByteArrayOutputStream(); ((ChannelExec) channel).setErrStream(errorStream); command += endFile; ((ChannelExec) channel).setCommand(command); channel.setInputStream(null); InputStream inputStream = channel.getInputStream(); channel.connect(); byte[] tmp = new byte[1024]; while (true) { while (inputStream.available() > 0) { int i = inputStream.read(tmp, 0, 1024); if (i < 0) { break; } } if (channel.isClosed()) { break; } try { Thread.sleep(200); } catch (Exception ee) { } } } channel.disconnect(); // get new files channel = session.openChannel("sftp"); channel.connect(); c = (ChannelSftp) channel; List<File> newFiles = new ArrayList<File>(); for (String endFile : endFiles) { File newFile = File.createTempFile("cleanedFile", null); newFiles.add(newFile); c.get(endFile + ".tmp", newFile.getAbsolutePath()); } c.disconnect(); // delete files from working directory channel = session.openChannel("exec"); command = "rm " + this.cleanerPath.concat(System.getProperty("file.separator")) + "*.2 " + this.cleanerPath.concat(System.getProperty("file.separator")) + "*.2.tmp"; ((ChannelExec) channel).setCommand(command); channel.setInputStream(null); channel.connect(); channel.disconnect(); session.disconnect(); for (int i = 0; i < origFiles.size(); i++) { File origF = new File(origFiles.get(i)); FileUtils.deleteQuietly(origF); FileUtils.moveFile(newFiles.get(i), origF); } for (int i = 0; i < origFiles.size(); i++) { this.ridireReTagger.retagFile(new File(origFiles.get(i))); } } catch (JSchException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (SftpException e) { // TODO Auto-generated catch block e.printStackTrace(); } }
From source file:es.uvigo.ei.sing.adops.operations.running.codeml.CodeMLDefaultProcessManager.java
@Override public void moveOutputFiles(CodeMLOutput output) throws OperationException { try {/*from ww w.j ava 2s . com*/ FileUtils.moveFile(new File("rub"), output.getRubFile()); FileUtils.moveFile(new File("2NG.dN"), output.get2ngdnFile()); FileUtils.moveFile(new File("2NG.dS"), output.get2ngdsFile()); FileUtils.moveFile(new File("2NG.t"), output.get2ngtFile()); FileUtils.moveFile(new File("lnf"), output.getLnfFile()); FileUtils.moveFile(new File("rst"), output.getRstFile()); FileUtils.moveFile(new File("rst1"), output.getRst1File()); } catch (IOException e) { throw new OperationException(null, "Error moving CodeML output files", e); } }
From source file:meme.singularsyntax.mojo.JavaflowEnhanceMojo.java
private void enhanceClassFiles(String outputDir, File backupDir, List<String> classFileNames) throws MojoExecutionException { Log log = getLog();//from www. j ava 2 s . c o m ResourceTransformer transformer = new AsmClassTransformer(); for (String classFileName : classFileNames) { try { File source = new File(outputDir, classFileName); File destination = new File(String.format(CLASSFILE_REWRITE_TEMPLATE, source.getAbsolutePath())); File backupClassFile = new File(backupDir, classFileName); if (backupClassFile.exists() && (source.lastModified() <= backupClassFile.lastModified())) { log.info(source + " is up to date"); continue; } log.info(String.format("Enhancing class file bytecode for Javaflow: %s", source)); RewritingUtils.rewriteClassFile(source, transformer, destination); if (backupClassFile.exists()) { log.debug(String.format("Backup for original class file %s already exists - removing it", backupClassFile)); backupClassFile.delete(); } log.debug(String.format("Renaming original class file from %s to %s", source, backupClassFile)); FileUtils.moveFile(source, backupClassFile); log.debug(String.format("Renaming rewritten class file from %s to %s", destination, source)); FileUtils.moveFile(destination, source); backupClassFile.setLastModified(source.lastModified()); } catch (IOException e) { throw new MojoExecutionException(e.getMessage()); } } }
From source file:com.datafibers.kafka.connect.FileGenericSourceTask.java
@Override public List<SourceRecord> poll() throws InterruptedException { if (!inProgressPaths.isEmpty()) { try {// www. j av a 2 s. co m Path currentPath = inProgressPaths.remove(0); processedPaths.add(currentPath); filename = currentPath.getFileName().toString(); fileInProcessing = FileUtils.getFile(currentPath.toString() + FILENAME_EXT_PROCESSING); fileProcessed = FileUtils.getFile(currentPath.toString() + FILENAME_EXT_PROCESSED); FileUtils.moveFile(FileUtils.getFile(currentPath.toString()), fileInProcessing); stream = new FileInputStream(fileInProcessing); Map<String, Object> offset = context.offsetStorageReader() .offset(Collections.singletonMap(FILENAME_FIELD, filename)); if (offset != null && !overwrite) { log.info("Found previous offset, will not process {}", filename); return null; } else streamOffset = 0L; reader = new BufferedReader(new InputStreamReader(stream)); log.info("Opened {} for reading", filename); } catch (IOException e) { throw new ConnectException(String.format("Unable to open file %", filename), e); } } else { log.warn("********* Waiting for file that meets the glob criteria! *********"); synchronized (this) { this.wait(interval); findMatch(); } return null; } ArrayList<SourceRecord> records = new ArrayList<SourceRecord>(); //StringBuilder fileContent = new StringBuilder(); try { final BufferedReader readerCopy; synchronized (this) { readerCopy = reader; } if (readerCopy == null) return null; int nread = 0; while (readerCopy.ready()) { nread = readerCopy.read(buffer, offset, buffer.length - offset); log.trace("Read {} bytes from {}", nread, filename); if (nread > 0) { offset += nread; if (offset == buffer.length) { char[] newbuf = new char[buffer.length * 2]; System.arraycopy(buffer, 0, newbuf, 0, buffer.length); buffer = newbuf; } String line; do { line = extractLine(); if (line != null) { line = line.trim(); log.trace("Read a line from {}", filename); if (records == null) records = new ArrayList<>(); /* records.add(new SourceRecord(offsetKey(filename), offsetValue(streamOffset), topic, dataSchema, structDecodingRoute(line, filename)));*/ if (schemaValidate) { records.add(new SourceRecord(offsetKey(filename), offsetValue(streamOffset), topic, dataSchema, structDecodingRoute(line, filename))); } else { log.info("STRING SCHEMA Processing"); records.add(new SourceRecord(offsetKey(filename), offsetValue(streamOffset), topic, Schema.STRING_SCHEMA, line)); } } new ArrayList<SourceRecord>(); } while (line != null); } } // Finish processing and rename as processed. FileUtils.moveFile(fileInProcessing, fileProcessed); if (nread <= 0) synchronized (this) { this.wait(1000); } return records; } catch (IOException e) { throw new ConnectException(String.format("Unable to read file %", filename), e); } }
From source file:de.thb.ue.backend.service.EvaluationService.java
@Override public void close(String evaluationUID) throws AggregatedAnswerException { Evaluation evaluation = evaluationRepo.findByUID(evaluationUID); List<AggregatedMCAnswer> aggregatedMCAnswers; try {/*from ww w . ja v a 2 s .co m*/ File workingDirectory = new File( (workingDirectoryPath.isEmpty() ? "" : (workingDirectoryPath + File.separatorChar)) + evaluationUID); File qrCodesFile = new File(workingDirectory, "qrcodes.pdf"); List<Vote> votes = evaluation.getVotes(); if (votes != null && !votes.isEmpty()) { aggregatedMCAnswers = aggregatedMCAnswerService.aggregate(votes, evaluation.getQuestionRevision().getName()); List<String> tutors = evaluation.getTutors().stream() .map(tutor -> tutor.getName() + " " + tutor.getFamilyName()).collect(Collectors.toList()); List<String> mcQuestions = evaluation.getQuestionRevision().getMcQuestions().stream() .map(MCQuestion::getText).collect(Collectors.toList()); List<String> textualQuestions = evaluation.getQuestionRevision().getQuestions().stream() .map(Question::getText).collect(Collectors.toList()); new EvaluationExcelFileGenerator(evaluationUID, aggregatedMCAnswers, tutors, mcQuestions, textualQuestions, evaluation.getVotes(), evaluation.getSubject().getName(), evaluation.getSemesterType(), evaluation.getDateOfEvaluation(), evaluation.getStudentsAll(), evaluation.getStudentsVoted()).generateExcelFile(); try { File imageFolder = new File(workingDirectory, "images"); String[] images = imageFolder.list(); int offset = votes.get(0).getId() - 1; if (images != null && images.length > 0) { for (String image : images) { int name = Integer.parseInt(image.substring(0, image.length() - 4)); String newName = name - offset + ""; File tmpFile = new File(imageFolder, image); if (!tmpFile.isDirectory() && FilenameUtils.getExtension(image).equals("zip")) { FileUtils.moveFile(tmpFile, new File(workingDirectory, newName + ".zip")); } } if (imageFolder.exists()) { imageFolder.delete(); } } ZipHelper.folderToZipFile(workingDirectory, new File(workingDirectory, "result.zip")); } catch (IOException e) { log.error("Error while zipping results for: " + evaluationUID); } evaluation.setClosed(true); evaluationRepo.save(evaluation); participantService.deleteByEvaluation(evaluation); } else { participantService.deleteByEvaluation(evaluation); evaluationRepo.delete(evaluation); } if (qrCodesFile.exists()) { qrCodesFile.delete(); } } catch (DBEntryDoesNotExistException e) { log.error("Evaluation not closed: " + e.getMessage()); } }
From source file:au.com.addstar.objects.Plugin.java
/** * Adds the Spigot.ver file to the jar//ww w . j av a 2 s . com * * @param ver */ public void addSpigotVer(String ver) { if (latestFile == null) return; File newFile = new File(latestFile.getParentFile(), SpigotUpdater.getFormat().format(Calendar.getInstance().getTime()) + "-" + ver + "-s.jar"); File spigotFile = new File(latestFile.getParentFile(), "spigot.ver"); if (spigotFile.exists()) FileUtils.deleteQuietly(spigotFile); try { JarFile oldjar = new JarFile(latestFile); if (oldjar.getEntry("spigot.ver") != null) return; JarOutputStream tempJarOutputStream = new JarOutputStream(new FileOutputStream(newFile)); try (Writer wr = new FileWriter(spigotFile); BufferedWriter writer = new BufferedWriter(wr)) { writer.write(ver); writer.newLine(); } try (FileInputStream stream = new FileInputStream(spigotFile)) { byte[] buffer = new byte[1024]; int bytesRead = 0; JarEntry je = new JarEntry(spigotFile.getName()); tempJarOutputStream.putNextEntry(je); while ((bytesRead = stream.read(buffer)) != -1) { tempJarOutputStream.write(buffer, 0, bytesRead); } stream.close(); } Enumeration jarEntries = oldjar.entries(); while (jarEntries.hasMoreElements()) { JarEntry entry = (JarEntry) jarEntries.nextElement(); InputStream entryInputStream = oldjar.getInputStream(entry); tempJarOutputStream.putNextEntry(entry); byte[] buffer = new byte[1024]; int bytesRead = 0; while ((bytesRead = entryInputStream.read(buffer)) != -1) { tempJarOutputStream.write(buffer, 0, bytesRead); } entryInputStream.close(); } tempJarOutputStream.close(); oldjar.close(); FileUtils.deleteQuietly(latestFile); FileUtils.deleteQuietly(spigotFile); FileUtils.moveFile(newFile, latestFile); latestFile = newFile; } catch (ZipException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } }
From source file:com.thoughtworks.go.plugin.infra.monitor.DefaultExternalPluginJarLocationMonitorTest.java
@Test void shouldNotifyRemoveEventBeforeAddEventInCaseOfFileRename() throws Exception { monitor.addPluginJarChangeListener(changeListener); monitor.start();//from ww w . j a v a2 s .c o m copyPluginToThePluginDirectory(pluginExternalDir, "descriptor-aware-test-external-plugin-1.jar"); waitUntilNextRun(monitor); PluginFileDetails orgExternalFile = pluginFileDetails(pluginExternalDir, "descriptor-aware-test-external-plugin-1.jar", false); verify(changeListener).pluginJarAdded(orgExternalFile); PluginFileDetails newExternalFile = pluginFileDetails(pluginExternalDir, "descriptor-aware-test-external-plugin-1-new.jar", false); FileUtils.moveFile(orgExternalFile.file(), newExternalFile.file()); waitUntilNextRun(monitor); InOrder inOrder = inOrder(changeListener); inOrder.verify(changeListener).pluginJarRemoved(orgExternalFile); inOrder.verify(changeListener).pluginJarAdded(newExternalFile); verifyNoMoreInteractions(changeListener); }