List of usage examples for org.apache.hadoop.fs FileSystem rename
public abstract boolean rename(Path src, Path dst) throws IOException;
From source file:dz.lab.hdfs.BadRename.java
/** * @param args/*from www . ja va2 s .com*/ * @throws IOException */ public static void main(String[] args) throws IOException { FileSystem fs = FileSystem.get(new Configuration()); Path source = new Path("/does/not/exist/file.txt"); Path nonExistentPath = new Path("/does/not/exist/file1.txt"); boolean result = fs.rename(source, nonExistentPath); System.out.println("Rename: " + result); }
From source file:edu.nyu.vida.data_polygamy.utils.FrameworkUtils.java
License:BSD License
public static void renameFile(String from, String to, Configuration conf, boolean s3) throws IOException { if (s3) {/* www .j a v a 2 s.co m*/ Path pathFrom = new Path(from); Path pathTo = new Path(to); FileSystem fs = FileSystem.get(pathFrom.toUri(), conf); fs.rename(pathFrom, pathTo); fs.close(); } else { FileSystem hdfs = FileSystem.get(new Configuration()); Path pathFrom = new Path(hdfs.getHomeDirectory() + "/" + from); Path pathTo = new Path(hdfs.getHomeDirectory() + "/" + to); hdfs.rename(pathFrom, pathTo); } }
From source file:edu.uci.ics.pregelix.dataflow.HDFSFileWriteOperatorDescriptor.java
License:Apache License
@SuppressWarnings("rawtypes") @Override//ww w.ja va 2s .c o m public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx, final IRecordDescriptorProvider recordDescProvider, final int partition, int nPartitions) throws HyracksDataException { return new AbstractUnaryInputSinkOperatorNodePushable() { private RecordDescriptor rd0; private FrameDeserializer frameDeserializer; private Configuration conf; private VertexWriter vertexWriter; private TaskAttemptContext context; private String TEMP_DIR = "_temporary"; private ClassLoader ctxCL; private ContextFactory ctxFactory = new ContextFactory(); @Override public void open() throws HyracksDataException { rd0 = inputRdFactory == null ? recordDescProvider.getInputRecordDescriptor(getActivityId(), 0) : inputRdFactory.createRecordDescriptor(); frameDeserializer = new FrameDeserializer(ctx.getFrameSize(), rd0); ctxCL = Thread.currentThread().getContextClassLoader(); Thread.currentThread().setContextClassLoader(this.getClass().getClassLoader()); conf = confFactory.createConfiguration(); VertexOutputFormat outputFormat = BspUtils.createVertexOutputFormat(conf); context = ctxFactory.createContext(conf, partition); try { vertexWriter = outputFormat.createVertexWriter(context); } catch (InterruptedException e) { throw new HyracksDataException(e); } catch (IOException e) { throw new HyracksDataException(e); } } @SuppressWarnings("unchecked") @Override public void nextFrame(ByteBuffer frame) throws HyracksDataException { frameDeserializer.reset(frame); try { while (!frameDeserializer.done()) { Object[] tuple = frameDeserializer.deserializeRecord(); Vertex value = (Vertex) tuple[1]; vertexWriter.writeVertex(value); } } catch (InterruptedException e) { throw new HyracksDataException(e); } catch (IOException e) { throw new HyracksDataException(e); } } @Override public void fail() throws HyracksDataException { Thread.currentThread().setContextClassLoader(ctxCL); } @Override public void close() throws HyracksDataException { try { vertexWriter.close(context); moveFilesToFinalPath(); } catch (InterruptedException e) { throw new HyracksDataException(e); } catch (IOException e) { throw new HyracksDataException(e); } } private void moveFilesToFinalPath() throws HyracksDataException { try { JobContext job = ctxFactory.createJobContext(conf); Path outputPath = FileOutputFormat.getOutputPath(job); FileSystem dfs = FileSystem.get(conf); Path filePath = new Path(outputPath, "part-" + new Integer(partition).toString()); FileStatus[] results = findPartitionPaths(outputPath, dfs); if (results.length >= 1) { /** * for Hadoop-0.20.2 */ renameFile(dfs, filePath, results); } else { /** * for Hadoop-0.23.1 */ int jobId = job.getJobID().getId(); outputPath = new Path( outputPath.toString() + File.separator + TEMP_DIR + File.separator + jobId); results = findPartitionPaths(outputPath, dfs); renameFile(dfs, filePath, results); } } catch (IOException e) { throw new HyracksDataException(e); } finally { Thread.currentThread().setContextClassLoader(ctxCL); } } private FileStatus[] findPartitionPaths(Path outputPath, FileSystem dfs) throws FileNotFoundException, IOException { FileStatus[] tempPaths = dfs.listStatus(outputPath, new PathFilter() { @Override public boolean accept(Path dir) { return dir.getName().endsWith(TEMP_DIR); } }); Path tempDir = tempPaths[0].getPath(); FileStatus[] results = dfs.listStatus(tempDir, new PathFilter() { @Override public boolean accept(Path dir) { return dir.getName().indexOf(context.getTaskAttemptID().toString()) >= 0; } }); return results; } private void renameFile(FileSystem dfs, Path filePath, FileStatus[] results) throws IOException, HyracksDataException, FileNotFoundException { Path srcDir = results[0].getPath(); if (!dfs.exists(srcDir)) throw new HyracksDataException("file " + srcDir.toString() + " does not exist!"); FileStatus[] srcFiles = dfs.listStatus(srcDir); Path srcFile = srcFiles[0].getPath(); dfs.delete(filePath, true); dfs.rename(srcFile, filePath); } }; }
From source file:edu.uci.ics.pregelix.dataflow.VertexFileWriteOperatorDescriptor.java
License:Apache License
@SuppressWarnings("rawtypes") @Override/*from w w w . j a v a 2 s . c o m*/ public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx, final IRecordDescriptorProvider recordDescProvider, final int partition, int nPartitions) throws HyracksDataException { return new AbstractUnaryInputSinkOperatorNodePushable() { private RecordDescriptor rd0; private FrameDeserializer frameDeserializer; private Configuration conf; private VertexWriter vertexWriter; private TaskAttemptContext context; private String TEMP_DIR = "_temporary"; private ClassLoader ctxCL; private ContextFactory ctxFactory = new ContextFactory(); @Override public void open() throws HyracksDataException { rd0 = inputRdFactory == null ? recordDescProvider.getInputRecordDescriptor(getActivityId(), 0) : inputRdFactory.createRecordDescriptor(ctx); frameDeserializer = new FrameDeserializer(rd0); ctxCL = Thread.currentThread().getContextClassLoader(); Thread.currentThread().setContextClassLoader(this.getClass().getClassLoader()); conf = confFactory.createConfiguration(ctx); VertexOutputFormat outputFormat = BspUtils.createVertexOutputFormat(conf); context = ctxFactory.createContext(conf, partition); context.getConfiguration().setClassLoader(ctx.getJobletContext().getClassLoader()); try { if (preHookFactory != null) { preHookFactory.createRuntimeHook().configure(ctx); } vertexWriter = outputFormat.createVertexWriter(context); } catch (InterruptedException e) { throw new HyracksDataException(e); } catch (IOException e) { throw new HyracksDataException(e); } } @SuppressWarnings("unchecked") @Override public void nextFrame(ByteBuffer frame) throws HyracksDataException { frameDeserializer.reset(frame); try { while (!frameDeserializer.done()) { Object[] tuple = frameDeserializer.deserializeRecord(); Vertex value = (Vertex) tuple[1]; vertexWriter.writeVertex(value); } } catch (InterruptedException e) { throw new HyracksDataException(e); } catch (IOException e) { throw new HyracksDataException(e); } } @Override public void fail() throws HyracksDataException { Thread.currentThread().setContextClassLoader(ctxCL); } @Override public void close() throws HyracksDataException { try { vertexWriter.close(context); moveFilesToFinalPath(); } catch (InterruptedException e) { throw new HyracksDataException(e); } catch (IOException e) { throw new HyracksDataException(e); } } private void moveFilesToFinalPath() throws HyracksDataException { try { JobContext job = ctxFactory.createJobContext(conf); Path outputPath = FileOutputFormat.getOutputPath(job); FileSystem dfs = FileSystem.get(conf); Path filePath = new Path(outputPath, "part-" + new Integer(partition).toString()); FileStatus[] results = findPartitionPaths(outputPath, dfs); if (results.length >= 1) { /** * for Hadoop-0.20.2 */ renameFile(dfs, filePath, results); } else { /** * for Hadoop-0.23.1 */ int jobId = job.getJobID().getId(); outputPath = new Path( outputPath.toString() + File.separator + TEMP_DIR + File.separator + jobId); results = findPartitionPaths(outputPath, dfs); renameFile(dfs, filePath, results); } } catch (IOException e) { throw new HyracksDataException(e); } finally { Thread.currentThread().setContextClassLoader(ctxCL); } } private FileStatus[] findPartitionPaths(Path outputPath, FileSystem dfs) throws FileNotFoundException, IOException { FileStatus[] tempPaths = dfs.listStatus(outputPath, new PathFilter() { @Override public boolean accept(Path dir) { return dir.getName().endsWith(TEMP_DIR) && dir.getName().indexOf(".crc") < 0; } }); Path tempDir = tempPaths[0].getPath(); FileStatus[] results = dfs.listStatus(tempDir, new PathFilter() { @Override public boolean accept(Path dir) { return dir.getName().indexOf(context.getTaskAttemptID().toString()) >= 0 && dir.getName().indexOf(".crc") < 0; } }); return results; } private void renameFile(FileSystem dfs, Path filePath, FileStatus[] results) throws IOException, HyracksDataException, FileNotFoundException { Path srcDir = results[0].getPath(); if (!dfs.exists(srcDir)) { throw new HyracksDataException("file " + srcDir.toString() + " does not exist!"); } FileStatus[] srcFiles = dfs.listStatus(srcDir); Path srcFile = srcFiles[0].getPath(); dfs.delete(filePath, true); dfs.rename(srcFile, filePath); } }; }
From source file:edu.umn.cs.spatialHadoop.nasa.StockQuadTree.java
License:Open Source License
/** * Creates a full spatio-temporal hierarchy for a source folder * @throws ParseException /*from w w w .java 2 s.c o m*/ * @throws InterruptedException */ public static void directoryIndexer(final OperationsParams params) throws IOException, ParseException, InterruptedException { Path inputDir = params.getInputPath(); FileSystem sourceFs = inputDir.getFileSystem(params); final Path sourceDir = inputDir.makeQualified(sourceFs); Path destDir = params.getOutputPath(); final FileSystem destFs = destDir.getFileSystem(params); TimeRange timeRange = params.get("time") != null ? new TimeRange(params.get("time")) : null; // Create daily indexes that do not exist final Path dailyIndexDir = new Path(destDir, "daily"); FileStatus[] mathcingDays = timeRange == null ? sourceFs.listStatus(inputDir) : sourceFs.listStatus(inputDir, timeRange); final Vector<Path> sourceFiles = new Vector<Path>(); for (FileStatus matchingDay : mathcingDays) { for (FileStatus matchingTile : sourceFs.listStatus(matchingDay.getPath())) { sourceFiles.add(matchingTile.getPath()); } } // Shuffle the array for better load balancing across threads Collections.shuffle(sourceFiles); final String datasetName = params.get("dataset"); Parallel.forEach(sourceFiles.size(), new RunnableRange<Object>() { @Override public Object run(int i1, int i2) { LOG.info("Worker [" + i1 + "," + i2 + ") started"); for (int i = i1; i < i2; i++) { Path sourceFile = sourceFiles.get(i); try { Path relativeSourceFile = makeRelative(sourceDir, sourceFile); Path destFilePath = new Path(dailyIndexDir, relativeSourceFile); if (!destFs.exists(destFilePath)) { LOG.info("Worker [" + i1 + "," + i2 + ") indexing: " + sourceFile.getName()); Path tmpFile; do { tmpFile = new Path((int) (Math.random() * 1000000) + ".tmp"); } while (destFs.exists(tmpFile)); tmpFile = tmpFile.makeQualified(destFs); if (datasetName == null) throw new RuntimeException( "Please provide the name of dataset you would like to index"); AggregateQuadTree.build(params, sourceFile, datasetName, tmpFile); synchronized (destFs) { Path destDir = destFilePath.getParent(); if (!destFs.exists(destDir)) destFs.mkdirs(destDir); } destFs.rename(tmpFile, destFilePath); } } catch (IOException e) { throw new RuntimeException("Error building an index for " + sourceFile, e); } } LOG.info("Worker [" + i1 + "," + i2 + ") finished"); return null; } }); LOG.info("Done generating daily indexes"); // Merge daily indexes into monthly indexes Path monthlyIndexDir = new Path(destDir, "monthly"); final SimpleDateFormat dayFormat = new SimpleDateFormat("yyyy.MM.dd"); final SimpleDateFormat monthFormat = new SimpleDateFormat("yyyy.MM"); mergeIndexes(destFs, dailyIndexDir, monthlyIndexDir, dayFormat, monthFormat, params); LOG.info("Done generating monthly indexes"); // Merge daily indexes into monthly indexes Path yearlyIndexDir = new Path(destDir, "yearly"); final SimpleDateFormat yearFormat = new SimpleDateFormat("yyyy"); mergeIndexes(destFs, monthlyIndexDir, yearlyIndexDir, monthFormat, yearFormat, params); LOG.info("Done generating yearly indexes"); }
From source file:edu.umn.cs.spatialHadoop.nasa.StockQuadTree.java
License:Open Source License
/** * Merges a set of indexes into larger indexes * @param fs/*from w w w.j a va 2s . c o m*/ * @param srcIndexDir * @param dstIndexDir * @param srcFormat * @param dstFormat * @param params * @throws IOException * @throws ParseException * @throws InterruptedException */ private static void mergeIndexes(final FileSystem fs, Path srcIndexDir, Path dstIndexDir, SimpleDateFormat srcFormat, SimpleDateFormat dstFormat, final OperationsParams params) throws IOException, ParseException, InterruptedException { TimeRange timeRange = params.get("time") != null ? new TimeRange(params.get("time")) : null; final FileStatus[] sourceIndexes = timeRange == null ? fs.listStatus(srcIndexDir) : fs.listStatus(srcIndexDir, timeRange); Arrays.sort(sourceIndexes); // Alphabetical sort acts as sort-by-date here // Scan the source indexes and merge each consecutive run belonging to the // same unit int i1 = 0; while (i1 < sourceIndexes.length) { final String indexToCreate = dstFormat.format(srcFormat.parse(sourceIndexes[i1].getPath().getName())); int i2 = i1 + 1; // Keep scanning as long as the source index belongs to the same dest index while (i2 < sourceIndexes.length && dstFormat .format(srcFormat.parse(sourceIndexes[i2].getPath().getName())).equals(indexToCreate)) i2++; // Merge all source indexes in the range [i1, i2) into one dest index // Copy i1, i2 to other variables as final to be accessible from threads final int firstIndex = i1; final int lastIndex = i2; final Path destIndex = new Path(dstIndexDir, indexToCreate); // For each tile, merge all values in all source indexes /*A regular expression to catch the tile identifier of a MODIS grid cell*/ final Pattern MODISTileID = Pattern.compile("^.*(h\\d\\dv\\d\\d).*$"); final FileStatus[] tilesInFirstDay = fs.listStatus(sourceIndexes[i1].getPath()); // Shuffle the array for better load balancing across threads Random rand = new Random(); for (int i = 0; i < tilesInFirstDay.length - 1; i++) { // Swap the entry at i with any following entry int j = i + rand.nextInt(tilesInFirstDay.length - i - 1); FileStatus temp = tilesInFirstDay[i]; tilesInFirstDay[i] = tilesInFirstDay[j]; tilesInFirstDay[j] = temp; } Parallel.forEach(tilesInFirstDay.length, new RunnableRange<Object>() { @Override public Object run(int i_file1, int i_file2) { for (int i_file = i_file1; i_file < i_file2; i_file++) { try { FileStatus tileInFirstDay = tilesInFirstDay[i_file]; // Extract tile ID Matcher matcher = MODISTileID.matcher(tileInFirstDay.getPath().getName()); if (!matcher.matches()) { LOG.warn("Cannot extract tile id from file " + tileInFirstDay.getPath()); continue; } final String tileID = matcher.group(1); Path destIndexFile = new Path(destIndex, tileID); PathFilter tileFilter = new PathFilter() { @Override public boolean accept(Path path) { return path.getName().contains(tileID); } }; // Find matching tiles in all source indexes to merge Vector<Path> filesToMerge = new Vector<Path>(lastIndex - firstIndex); filesToMerge.add(tileInFirstDay.getPath()); for (int iDailyIndex = firstIndex + 1; iDailyIndex < lastIndex; iDailyIndex++) { FileStatus[] matchedTileFile = fs.listStatus(sourceIndexes[iDailyIndex].getPath(), tileFilter); if (matchedTileFile.length == 0) LOG.warn("Could not find tile " + tileID + " in dir " + sourceIndexes[iDailyIndex].getPath()); else if (matchedTileFile.length == 1) filesToMerge.add(matchedTileFile[0].getPath()); } if (fs.exists(destIndexFile)) { // Destination file already exists // Check the date of the destination and source files to see // whether it needs to be updated or not long destTimestamp = fs.getFileStatus(destIndexFile).getModificationTime(); boolean needsUpdate = false; for (Path fileToMerge : filesToMerge) { long sourceTimestamp = fs.getFileStatus(fileToMerge).getModificationTime(); if (sourceTimestamp > destTimestamp) { needsUpdate = true; break; } } if (!needsUpdate) continue; else LOG.info("Updating file " + destIndexFile.getName()); } // Do the merge Path tmpFile; do { tmpFile = new Path((int) (Math.random() * 1000000) + ".tmp"); } while (fs.exists(tmpFile)); tmpFile = tmpFile.makeQualified(fs); LOG.info("Merging tile " + tileID + " into file " + destIndexFile); AggregateQuadTree.merge(params, filesToMerge.toArray(new Path[filesToMerge.size()]), tmpFile); synchronized (fs) { Path destDir = destIndexFile.getParent(); if (!fs.exists(destDir)) fs.mkdirs(destDir); } fs.rename(tmpFile, destIndexFile); } catch (IOException e) { e.printStackTrace(); } } return null; } }); i1 = i2; } }
From source file:edu.umn.cs.spatialHadoop.nasa.MakeHDFVideo.java
License:Open Source License
/** * @param args// w w w . j a v a 2 s . c o m * @throws IOException * @throws InterruptedException * @throws ClassNotFoundException */ public static void main(String[] args) throws IOException, InterruptedException, ClassNotFoundException { OperationsParams params = new OperationsParams(new GenericOptionsParser(args)); if (!params.checkInputOutput()) { System.exit(1); } //Path input = params.getPaths()[0]; Path output = params.getPaths()[1]; boolean recoverHoles = params.is("recoverholes"); boolean addDate = params.is("adddate"); Vector<String> vargs = new Vector<String>(Arrays.asList(args)); Rectangle plotRange = (Rectangle) params.getShape("rect"); if (plotRange != null && recoverHoles) { // Extend the plot range to improve the quality of RecoverHoles for (int i = 0; i < vargs.size();) { if (vargs.get(i).startsWith("rect:") || vargs.get(i).startsWith("mbr:") || vargs.get(i).startsWith("width:") || vargs.get(i).startsWith("height:")) { vargs.remove(i); } else { i++; } } double w = plotRange.getWidth(); double h = plotRange.getHeight(); plotRange = plotRange.buffer(w / 2, h / 2); int new_width = params.getInt("width", 1000) * 2; int new_height = params.getInt("height", 1000) * 2; vargs.add(plotRange.toText(new Text("rect:")).toString()); vargs.add("width:" + new_width); vargs.add("height:" + new_height); } // 1- Call HDF plot to generate all images HDFPlot.main(vargs.toArray(new String[vargs.size()])); // 2- Call RecoverHoles to recover holes (surprise) if (recoverHoles) { RecoverHoles.recoverInterpolationDir(output); if (plotRange != null) { // Need to crop all images to restore original selection cropImages(output, (Rectangle) params.getShape("rect"), plotRange); } } if (addDate) { RecoverHoles.addDate(output); } FileSystem outFs = output.getFileSystem(params); FileStatus[] generatedImages = outFs.listStatus(output, new PathFilter() { @Override public boolean accept(Path path) { return path.getName().toLowerCase().endsWith(".png"); } }); if (generatedImages.length == 0) { Log.warn("No generated images"); System.exit(1); } InputStream inStream = outFs.open(generatedImages[0].getPath()); BufferedImage firstImage = ImageIO.read(inStream); inStream.close(); int imageWidth = firstImage.getWidth(); int imageHeight = firstImage.getHeight(); String scaleRangeStr = params.get("scale-range"); if (scaleRangeStr != null) { String[] parts = scaleRangeStr.split("\\.\\."); MinMax scaleRange = new MinMax(); scaleRange.minValue = Integer.parseInt(parts[0]); scaleRange.maxValue = Integer.parseInt(parts[1]); HDFPlot2.drawScale(new Path(output, "scale.png"), scaleRange, 64, imageHeight); } InputStream logoInputStream = MakeHDFVideo.class.getResourceAsStream("/gistic_logo.png"); OutputStream logoOutputStream = outFs.create(new Path(output, "gistic_logo.png")); byte[] buffer = new byte[4096]; int size = 0; while ((size = logoInputStream.read(buffer)) > 0) { logoOutputStream.write(buffer, 0, size); } logoOutputStream.close(); // Rename files to be ready to use with ffmpeg FileStatus[] all_images = outFs.listStatus(output, new PathFilter() { @Override public boolean accept(Path path) { return path.getName().matches("\\d+\\.\\d+\\.\\d+\\.png"); } }); Arrays.sort(all_images, new Comparator<FileStatus>() { @Override public int compare(FileStatus f1, FileStatus f2) { return f1.getPath().getName().compareTo(f2.getPath().getName()); } }); int day = 1; for (FileStatus image : all_images) { String newFileName = String.format("day_%03d.png", day++); outFs.rename(image.getPath(), new Path(output, newFileName)); } // Plot the overlay image Path overlay = params.get("overlay") == null ? null : new Path(params.get("overlay")); if (overlay != null) { // Draw an overlay image OperationsParams plotParams = new OperationsParams(params); // Keep all arguments except input and output which change for each call // to Plot or PlotPyramid plotParams.clearAllPaths(); Path overlayOutput = new Path(output, "overlay.png"); plotParams.setClass("shape", OSMPolygon.class, Shape.class); GeometricPlot.plot(new Path[] { overlay }, overlayOutput, plotParams); } String video_command; if (overlay != null) { video_command = "avconv -r 4 -i day_%3d.png " + "-vf \"movie=gistic_logo.png [watermark]; " + "movie=overlay.png [ways]; " + "movie=scale.png [scale]; " + "[in] crop=" + plotRange.getWidth() + ":" + plotRange.getHeight() + "[in]; " + "[ways] crop=" + plotRange.getWidth() + ":" + plotRange.getHeight() + "[ways]; " + "[in][watermark] overlay=main_w-overlay_w-10:10 [mid]; " + "[mid][ways] overlay=0:0 [mid2]; " + "[mid2] pad=iw+64:ih [mid3]; " + "[mid3][scale] overlay=main_w-overlay_w:0 [out]\" " + "-r 4 -pix_fmt yuv420p output.mp4 "; } else { video_command = "avconv -r 4 -i day_%3d.png -vf " + "\"movie=gistic_logo.png [watermark]; " + "movie=scale.png [scale]; " + "[in][watermark] overlay=main_w-overlay_w-10:10 [mid]; " + "[mid] pad=iw+64:ih [mid2]; " + "[mid2][scale] overlay=main_w-overlay_w:0 [out]\" " + "-r 4 -pix_fmt yuv420p output.mp4 "; } PrintStream video_script = new PrintStream(outFs.create(new Path(output, "make_video.sh"))); video_script.println(video_command); video_script.close(); }
From source file:edu.umn.cs.spatialHadoop.nasa.MultiHDFPlot.java
License:Open Source License
private static void createVideo(FileSystem outFs, Path output, boolean addLogo) throws IOException { // Rename all generated files to be day_%3d.png // Rename files to be ready to use with ffmpeg FileStatus[] all_images = outFs.listStatus(output, new PathFilter() { @Override// w w w. j a v a 2 s . c o m public boolean accept(Path path) { return path.getName().matches("\\d+\\.\\d+\\.\\d+\\.png"); } }); Arrays.sort(all_images, new Comparator<FileStatus>() { @Override public int compare(FileStatus f1, FileStatus f2) { return f1.getPath().getName().compareTo(f2.getPath().getName()); } }); int day = 1; for (FileStatus image : all_images) { String newFileName = String.format("day_%03d.png", day++); outFs.rename(image.getPath(), new Path(output, newFileName)); } String videoCommand; if (addLogo) { // Puts frames together into a video videoCommand = "avconv -r 4 -i day_%3d.png -vf " + "\"movie=gistic_logo.png [watermark]; " + "movie=scale.png [scale]; " + "[in][watermark] overlay=main_w-overlay_w-10:10 [mid]; " + "[mid] pad=iw+64:ih [mid2]; " + "[mid2][scale] overlay=main_w-overlay_w:0 [out]\" " + "-r 4 -pix_fmt yuv420p output.mp4 "; } else { videoCommand = "avconv -r 4 -i day_%3d.png -vf \"" + "movie=scale.png [scale]; " + "[in] pad=iw+64:ih [mid2]; " + "[mid2][scale] overlay=main_w-overlay_w:0 [out]\" " + "-r 4 -pix_fmt yuv420p output.mp4 "; } System.out.println("Run the following command to generate the video"); System.out.println(videoCommand); }
From source file:eu.larkc.iris.indexing.DistributedFileSystemManager.java
License:Apache License
public void savePredicateConfig() { try {//www. j a v a 2 s . c om FileSystem fs = FileSystem.get(configuration.hadoopConfiguration); String predicatesConfigFileTemp = getPredicatesConfigFilePath(configuration.project); Path predicatesConfigFileTempPath = new Path(predicatesConfigFileTemp + "_"); FSDataOutputStream predicatesConfigOutputStream = null; if (fs.exists(predicatesConfigFileTempPath)) { logger.error("path " + predicatesConfigFileTemp + " exists already!"); throw new RuntimeException("path " + predicatesConfigFileTemp + " exists already!"); } predicatesConfigOutputStream = fs.create(predicatesConfigFileTempPath); for (PredicateData predicateData : predicatesConfig) { predicateData.write(predicatesConfigOutputStream); } predicatesConfigOutputStream.close(); fs.delete(new Path(predicatesConfigFileTemp), true); fs.rename(predicatesConfigFileTempPath, new Path(predicatesConfigFileTemp)); } catch (IOException e) { logger.error("io exception", e); throw new RuntimeException("io exception", e); } }
From source file:eu.stratosphere.hadoopcompatibility.FileOutputCommitterWrapper.java
License:Apache License
private void moveTaskOutputs(JobConf conf, TaskAttemptID taskAttemptID, FileSystem fs, Path jobOutputDir, Path taskOutput) throws IOException { if (fs.isFile(taskOutput)) { Path finalOutputPath = getFinalPath(jobOutputDir, taskOutput, getTempTaskOutputPath(conf, taskAttemptID)); if (!fs.rename(taskOutput, finalOutputPath)) { if (!fs.delete(finalOutputPath, true)) { throw new IOException("Failed to delete earlier output of task: " + taskAttemptID); }// w w w . j a v a 2 s . c om if (!fs.rename(taskOutput, finalOutputPath)) { throw new IOException("Failed to save output of task: " + taskAttemptID); } } LOG.debug("Moved " + taskOutput + " to " + finalOutputPath); } else if (fs.getFileStatus(taskOutput).isDir()) { FileStatus[] paths = fs.listStatus(taskOutput); Path finalOutputPath = getFinalPath(jobOutputDir, taskOutput, getTempTaskOutputPath(conf, taskAttemptID)); fs.mkdirs(finalOutputPath); if (paths != null) { for (FileStatus path : paths) { moveTaskOutputs(conf, taskAttemptID, fs, jobOutputDir, path.getPath()); } } } }