List of usage examples for org.apache.commons.io FilenameUtils removeExtension
public static String removeExtension(String filename)
From source file:importToNewTable.createDataJob.java
public void importData(String filePath, String[] selectedFamilies, String[] keys) throws Exception { //Use family class get columns names. family familyName = new family(); String[] column = familyName.columnfamily(filePath); //Get input file name to be used as table name. File f = new File(filePath); String fname = f.getName();/*from w w w.j ava 2 s .com*/ String vcfName = FilenameUtils.removeExtension(fname); createTable ht = new createTable(); ht.table(vcfName, column, selectedFamilies); //Mapreduce job. Configuration conf = new Configuration(); conf.setStrings("column", column); conf.setStrings("keys", keys); conf.setStrings("selectedFamilies", selectedFamilies); conf.set("tableName", vcfName); Job job = Job.getInstance(conf, "VCF"); job.setJarByClass(importToNewTable.createDataJob.class); job.setMapperClass(importToNewTable.createDataMapper.class); job.setReducerClass(org.apache.hadoop.hbase.mapreduce.PutSortReducer.class); // TODO: specify output types job.setMapOutputKeyClass(ImmutableBytesWritable.class); job.setMapOutputValueClass(Put.class); job.setOutputFormatClass(HFileOutputFormat.class); // TODO: specify input and output DIRECTORIES (not files) FileInputFormat.setInputPaths(job, new Path(filePath)); File temp = new File("/tmp/HVCF/output"); this.deleteDir(temp); FileOutputFormat.setOutputPath(job, new Path("/tmp/HVCF/output")); Configuration hbconf = HBaseConfiguration.create(); HTable table = new HTable(hbconf, vcfName); HFileOutputFormat.configureIncrementalLoad(job, table); if (!job.waitForCompletion(true)) { return; } //Bulkload Hfiles into HBase table. LoadIncrementalHFiles loader = new LoadIncrementalHFiles(hbconf); loader.doBulkLoad(new Path("/tmp/HVCF/output"), table); }
From source file:de.tsystems.mms.apm.performancesignature.util.PerfSigUIUtils.java
public static String removeExtension(final String fileName) { return FilenameUtils.removeExtension(fileName); }
From source file:asciidoc.maven.plugin.tools.ZipHelper.java
public File unzipEntry(File _zipFile, String _name) { File directory = null;//from www . j a v a 2 s. co m try { if (this.log.isDebugEnabled()) this.log.debug("zip file: " + _zipFile.getAbsolutePath()); ZipEntry zipEntry = null; String zipEntryName = null; ZipFile jarZipFile = new ZipFile(_zipFile); Enumeration<? extends ZipEntry> e = jarZipFile.entries(); while (e.hasMoreElements()) { zipEntry = (ZipEntry) e.nextElement(); zipEntryName = zipEntry.getName(); if (zipEntryName.startsWith(_name) && zipEntryName.endsWith(".zip")) { if (this.log.isInfoEnabled()) this.log.info("Found in " + zipEntryName); directory = new File(_zipFile.getParent(), FilenameUtils.removeExtension(zipEntryName)); break; } } if (directory != null && !directory.exists()) { unzipEntry(jarZipFile, zipEntry, _zipFile.getParentFile()); File asciiDocArchive = new File(_zipFile.getParent(), zipEntryName); unzipArchive(asciiDocArchive, _zipFile.getParentFile()); asciiDocArchive.deleteOnExit(); } } catch (ZipException ze) { this.log.error(ze.getMessage(), ze); } catch (IOException ioe) { this.log.error(ioe.getMessage(), ioe); } return directory; }
From source file:com.mapr.ocr.text.ImageToText.java
public static String processPDF(String fileName) { File imageFile = new File(fileName); StringBuilder resultText = new StringBuilder(); PDFDocument pdfDocument = new PDFDocument(); try {/*ww w.j av a 2 s . co m*/ pdfDocument.load(imageFile); SimpleRenderer simpleRenderer = new SimpleRenderer(); simpleRenderer.setResolution(300); List<Image> images = simpleRenderer.render(pdfDocument); ITesseract tesseract = new Tesseract(); tesseract.setLanguage("eng"); for (Image x : images) { resultText.append(tesseract.doOCR((BufferedImage) x)); } } catch (Exception e) { e.printStackTrace(); LOGGER.log(Level.SEVERE, "Exception processing PDF file " + fileName + " " + e); String rowKey = FilenameUtils .removeExtension(fileName.substring(fileName.lastIndexOf("/") + 1, fileName.length())); populateDataInMapRDB(config, errorTable, rowKey, cf, "error", e.toString()); populateDataInMapRDB(config, errorTable, rowKey, cf, "filepath", fileName); } finally { return resultText.toString(); } }
From source file:com.splunk.shuttl.archiver.importexport.csv.CsvBucketCreatorTest.java
@Test(groups = { "fast-unit" }) public void _givenCsvFileAndBucket_bucketNamedAsCsvFileWithoutTheCsvExtension() { Bucket csvBucket = csvBucketCreator.createBucketWithCsvFile(csvFile, bucket); String fileWithoutExtension = FilenameUtils.removeExtension(csvFile.getName()); assertEquals(csvBucket.getName(), fileWithoutExtension); }
From source file:com.github.thesmartenergy.gb.entities.Queries.java
@PostConstruct public void initialize() { try {// ww w . j ava2s. c o m String dir = context.getRealPath("/WEB-INF/classes/"); LOG.info(dir); LOG.info(context.getRealPath("/WEB-INF/classes")); LOG.info(context.getClassLoader().getResource("/").toString()); LOG.info(context.getClassLoader().getResource("/").getPath()); LOG.info(context.getClassLoader().getResource("/").toURI().toString()); File ontoDir = new File(dir + "/query/"); for (File queryFile : ontoDir.listFiles()) { String queryFileName = queryFile.getName(); String queryName = FilenameUtils.removeExtension(queryFileName); queries.put(base + queryFileName, queryFile); queries.put(base + queryName, queryFile); } } catch (Exception ex) { LOG.log(Level.SEVERE, "error while initializing app ", ex); throw new RuntimeException("error while initializing app ", ex); } }
From source file:de.uni_siegen.wineme.come_in.thumbnailer.util.TemporaryFilesManager.java
/** * Create a new, read-only temporary file. * /*from ww w . j a v a 2 s.c om*/ * @param file Original file that you need a copy of * @param newExtension The extension that the new file should have * @return File (read-only) * @throws IOException */ public File createTempfileCopy(File file, String newExtension) throws IOException { File destFile = files.get(file); if (destFile == null) { destFile = File.createTempFile("temp", "." + newExtension); createNewCopy(file, destFile); destFile.setWritable(false, false); } else { String newFilename = FilenameUtils.removeExtension(destFile.getAbsolutePath()) + "." + newExtension; File newFile = new File(newFilename); boolean renameSucces = destFile.renameTo(newFile); if (!renameSucces) { createNewCopy(file, newFile); } files.put(file, newFile); destFile = newFile; } return destFile; }
From source file:com.itemanalysis.jmetrik.file.JmetrikOutputWriter.java
public void saveJmetrikFile(File outputFile, Outputter outputter) throws IOException { ArrayList<VariableAttributes> variables = outputter.getColumnAttributes(); LinkedHashMap<VariableName, VariableAttributes> variableAttributeMap = new LinkedHashMap<VariableName, VariableAttributes>(); for (VariableAttributes v : variables) { variableAttributeMap.put(v.getName(), v); }//from www . j av a2 s.c o m //Ensure that file is a jMetrik file. String fname = FilenameUtils.removeExtension(outputFile.getAbsolutePath()); outputFile = new File(fname + ".jmetrik"); JmetrikFileWriter writer = new JmetrikFileWriter(outputFile, variableAttributeMap); try { writer.openConnection(); writer.writeHeader(outputter.getNumberOfOutputRows()); Iterator<Object[][]> iter = outputter.iterator(); Object[][] outputChunk = null; while (iter.hasNext()) { outputChunk = iter.next(); for (int i = 0; i < outputChunk.length; i++) { for (int j = 0; j < outputChunk[0].length; j++) { VariableAttributes v = variables.get(j); if (outputChunk[i][j] != null) { writer.writeValue(v.getName(), outputChunk[i][j].toString()); } } writer.updateRow(); } } } catch (IOException ex) { throw ex; } finally { writer.close(); } }
From source file:com.github.thesmartenergy.gb.query.QueryResource.java
@GET @Produces("application/sparql-generate") @Path("{id}") public Response getAsTurtle(@PathParam("id") String id) { try {// www . ja v a2 s . c om String requestedUri = queries.getBase() + id; System.out.println(requestedUri); if (!queries.getQueries().containsKey(requestedUri)) { return Response.status(Response.Status.NOT_FOUND).build(); } File queryFile = queries.getQueries().get(requestedUri); Response.ResponseBuilder res = Response.ok(IOUtils.toString(new FileInputStream(queryFile)), "application/sparql-generate"); res.header("Content-Disposition", "filename= " + FilenameUtils.removeExtension(id) + "." + "rqg;"); return res.build(); } catch (Exception ex) { Logger.getLogger(QueryResource.class.getName()).log(Level.SEVERE, null, ex); return Response.status(Response.Status.INTERNAL_SERVER_ERROR).build(); } }
From source file:ch.unibas.fittingwizard.infrastructure.RealFittabMarkerScript.java
@Override public FittabScriptOutput execute(FittabScriptInput input) { String moleculeName = getMoleculeName(input.getCubeFile()); File specificMoleculeDir = new File(moleculesDir, moleculeName); runner.setWorkingDir(specificMoleculeDir); List<String> args = Arrays.asList("-cube", input.getCubeFile().getAbsoluteFile().toString(), "-vdw", input.getVdwFile().getAbsoluteFile().toString(), "-pun", input.getLpunFile().getAbsoluteFile().toString()); File mtpFile = new File(specificMoleculeDir, FilenameUtils.removeExtension(input.getCubeFile().getName()) + MtpFittabExtension); runner.exec(fitTabMarkerScriptFile, args); if (!mtpFile.exists()) { throw new ScriptExecutionException("FittabMarker script did not create " + FilenameUtils.normalize(mtpFile.getAbsolutePath()) + " output file."); }//ww w.j a v a 2s.co m return new FittabScriptOutput(mtpFile); }