List of usage examples for org.apache.commons.io FilenameUtils getExtension
public static String getExtension(String filename)
From source file:com.torresbueno.RSAEncryptionDecryptionUtil.java
/** * Check if the path parameter corresponds to a certificate. * Extensions verified .cer, .csr//from w ww. j a v a2s .c o m * @param path * @return true if the path is a certificate. */ private boolean isCertificate(String path) { return "cer".equals(FilenameUtils.getExtension(path)) || "csr".equals(FilenameUtils.getExtension(path)); }
From source file:com.heliosdecompiler.helios.tasks.AddFilesTask.java
private void handleFile(File file) { System.out.println("Handling file " + file); String extension = FilenameUtils.getExtension(file.getName()).toLowerCase(); File fileToLoad = file;//from w ww . jav a 2 s.c o m if (extension.equals("apk")) { try { if (Settings.APKTOOL.get().asBoolean()) { File decodedResources = File.createTempFile("apktoolout", ".apk"); decodedResources.deleteOnExit(); APKTool.decodeResources(file, decodedResources); fileToLoad = decodedResources; } } catch (final Exception e) { ExceptionHandler.handle(e); } } if (extension.equals("apk") || extension.equals("dex")) { try { if (Settings.APK_CONVERSION.get().asString().equals(Converter.ENJARIFY.getId())) { File transformedResources = File.createTempFile("enjarifyout", ".jar"); transformedResources.deleteOnExit(); Converter.ENJARIFY.convert(fileToLoad, transformedResources); fileToLoad = transformedResources; } else if (Settings.APK_CONVERSION.get().asString().equals(Converter.DEX2JAR.getId())) { File transformedResources = File.createTempFile("dex2jarout", ".jar"); transformedResources.deleteOnExit(); Converter.DEX2JAR.convert(fileToLoad, transformedResources); fileToLoad = transformedResources; } } catch (final Exception e) { ExceptionHandler.handle(e); } } try { Helios.loadFile(fileToLoad); } catch (final Exception e) { ExceptionHandler.handle(e); } }
From source file:net.mitnet.tools.pdf.book.openoffice.reports.OpenOfficeReportBuilder.java
public void buildReport(File templateFile, File dataFile, File outputFile) throws Exception { Object templateData = null;//from w w w . j av a 2 s.c o m String dataFileExtension = FilenameUtils.getExtension(dataFile.getName()); if (dataFileExtension.equals("xml")) { NodeModel nodeModel = NodeModel.parse(dataFile); templateData = nodeModel; } else if (dataFileExtension.equals("properties")) { Properties properties = new Properties(); properties.load(new FileInputStream(dataFile)); templateData = properties; } else { String msg = "Template data file must be 'xml' or 'properties'; unsupported type: " + dataFileExtension; System.err.println(msg); throw new Exception(msg); } buildReport(templateFile, templateData, outputFile); }
From source file:aldenjava.opticalmapping.data.DataFormat.java
public static final DataFormat lookup(String path, int format) { if (format == -1) return lookupfileext(FilenameUtils.getExtension(path)); if (!lookupmap.containsKey(format)) throw new InvalidFileFormatException(); return lookupmap.get(format); }
From source file:de.yaio.services.metaextract.server.controller.MetaExtractFacade.java
/** * extract metadata from the inputStream depending on the extension of fileName * * @param input content to extract the metadata from * @param fileName extension to get extension and mimetype to support extraction * @param lang language-key to support extraction * @return extracted metadata from the different extractors * @throws IOException possible errors while reading and copying tmpFiles * @throws ExtractorException possible errors while running extractor *///from ww w . ja va 2 s . c o m public ExtractedMetaData extractMetaData(final InputStream input, final String fileName, final String lang) throws IOException, ExtractorException { List<Extractor> extractors = new ArrayList<>(); extractors.add(extractor1); extractors.add(extractor2); File tmpFile = File.createTempFile("metaextractor", "." + FilenameUtils.getExtension(fileName)); tmpFile.deleteOnExit(); Files.copy(input, tmpFile.toPath(), java.nio.file.StandardCopyOption.REPLACE_EXISTING); ExtractedMetaData extractedMetaData = new ExtractedMetaData(); for (Extractor extractor : extractors) { try { ExtractedMetaDataVersion extractedMetaDataVersion = extractor.extractMetaData(tmpFile, lang); String content = extractedMetaDataVersion.getContent(); if (StringUtils.isNotBlank(content)) { extractedMetaData.getVersions().put(extractor.getClass().toString(), extractedMetaDataVersion); } } catch (Exception ex) { ex.printStackTrace(); } } LOGGER.info( "done extract metadat for file:" + fileName + " with lang:" + lang + " to " + extractedMetaData); return extractedMetaData; }
From source file:fr.pilato.elasticsearch.crawler.fs.tika.TikaDocParser.java
public static void generate(FsSettings fsSettings, InputStream inputStream, String filename, Doc doc, MessageDigest messageDigest, long filesize) throws IOException { logger.trace("Generating document [{}]", filename); // Extracting content with Tika // See #38: https://github.com/dadoonet/fscrawler/issues/38 int indexedChars = 100000; if (fsSettings.getFs().getIndexedChars() != null) { if (fsSettings.getFs().getIndexedChars().percentage()) { indexedChars = (int) Math.round(filesize * fsSettings.getFs().getIndexedChars().asDouble()); logger.trace("using percentage [{}] to define indexed chars: [{}]", fsSettings.getFs().getIndexedChars(), indexedChars); } else {/*from w ww . j a va 2 s . c om*/ indexedChars = (int) fsSettings.getFs().getIndexedChars().value(); logger.trace("indexed chars [{}]", indexedChars == -1 ? "has been disabled. All text will be extracted" : indexedChars); } } Metadata metadata = new Metadata(); String parsedContent = null; if (messageDigest != null) { logger.trace("Generating hash with [{}]", messageDigest.getAlgorithm()); inputStream = new DigestInputStream(inputStream, messageDigest); } ByteArrayOutputStream bos = null; if (fsSettings.getFs().isStoreSource()) { logger.debug("Using a TeeInputStream as we need to store the source"); bos = new ByteArrayOutputStream(); inputStream = new TeeInputStream(inputStream, bos); } try { // Set the maximum length of strings returned by the parseToString method, -1 sets no limit logger.trace("Beginning Tika extraction"); parsedContent = tika().parseToString(inputStream, metadata, indexedChars); logger.trace("End of Tika extraction"); } catch (Throwable e) { logger.debug("Failed to extract [" + indexedChars + "] characters of text for [" + filename + "]", e); } // Adding what we found to the document we want to index // File doc.getFile().setContentType(metadata.get(Metadata.CONTENT_TYPE)); doc.getFile().setExtension(FilenameUtils.getExtension(filename)); // We only add `indexed_chars` if we have other value than default or -1 if (fsSettings.getFs().getIndexedChars() != null && fsSettings.getFs().getIndexedChars().value() != -1) { doc.getFile().setIndexedChars(indexedChars); } if (fsSettings.getFs().isAddFilesize()) { if (metadata.get(Metadata.CONTENT_LENGTH) != null) { // We try to get CONTENT_LENGTH from Tika first doc.getFile().setFilesize(Long.parseLong(metadata.get(Metadata.CONTENT_LENGTH))); } } if (messageDigest != null) { byte[] digest = messageDigest.digest(); String result = ""; // Convert to Hexa for (int i = 0; i < digest.length; i++) { result += Integer.toString((digest[i] & 0xff) + 0x100, 16).substring(1); } doc.getFile().setChecksum(result); } // File // Meta doc.getMeta().setAuthor(metadata.get(TikaCoreProperties.CREATOR)); doc.getMeta().setTitle(metadata.get(TikaCoreProperties.TITLE)); String sDate = metadata.get(TikaCoreProperties.MODIFIED); if (sDate != null) { try { LocalDateTime date = LocalDateTime.parse(sDate, DateTimeFormatter.ISO_DATE_TIME); doc.getMeta().setDate(date); } catch (DateTimeParseException e) { logger.warn("Can not parse date [{}] for [{}]. Skipping date field...", sDate, filename); } } doc.getMeta().setKeywords(commaDelimitedListToStringArray(metadata.get(TikaCoreProperties.KEYWORDS))); if (fsSettings.getFs().isRawMetadata()) { logger.trace("Listing all available metadata:"); for (String metadataName : metadata.names()) { String value = metadata.get(metadataName); // This is a logger trick which helps to generate our unit tests // You need to change test/resources/log4j2.xml fr.pilato.elasticsearch.crawler.fs.tika level to trace logger.trace(" assertThat(raw, hasEntry(\"{}\", \"{}\"));", metadataName, value); doc.getMeta().addRaw(metadataName, value); } } // Meta // Doc content doc.setContent(parsedContent); // Doc as binary attachment if (fsSettings.getFs().isStoreSource()) { doc.setAttachment(Base64.getEncoder().encodeToString(bos.toByteArray())); } logger.trace("End document generation"); // End of our document }
From source file:com.norconex.importer.ImporterLauncher.java
private static void writeResponse(ImporterResponse response, String outputPath, int depth, int index) throws IOException { if (!response.isSuccess()) { String statusLabel = "REJECTED: "; if (response.getImporterStatus().isError()) { statusLabel = " ERROR: "; }//from ww w.ja va 2 s. c o m System.out.println(statusLabel + response.getReference() + " (" + response.getImporterStatus().getDescription() + ")"); } else { ImporterDocument doc = response.getDocument(); StringBuilder path = new StringBuilder(outputPath); if (depth > 0) { int pathLength = outputPath.length(); int extLength = FilenameUtils.getExtension(outputPath).length(); if (extLength > 0) { extLength++; } String nameSuffix = "_" + depth + "-" + index; path.insert(pathLength - extLength, nameSuffix); } File docfile = new File(path.toString()); File metafile = new File(path.toString() + ".meta"); // Write document file FileOutputStream docOutStream = new FileOutputStream(docfile); CachedInputStream docInStream = doc.getContent(); FileOutputStream metaOut = null; try { IOUtils.copy(docInStream, docOutStream); IOUtils.closeQuietly(docOutStream); IOUtils.closeQuietly(docInStream); // Write metadata file metaOut = new FileOutputStream(metafile); doc.getMetadata().store(metaOut, null); System.out.println("IMPORTED: " + response.getReference()); } catch (IOException e) { System.err.println("Could not write: " + doc.getReference()); e.printStackTrace(System.err); System.err.println(); System.err.flush(); } finally { IOUtils.closeQuietly(metaOut); } } ImporterResponse[] nextedResponses = response.getNestedResponses(); for (int i = 0; i < nextedResponses.length; i++) { ImporterResponse nextedResponse = nextedResponses[i]; writeResponse(nextedResponse, outputPath, depth + 1, i + 1); } }
From source file:com.pamarin.income.controller.SuggestionCtrl.java
private void validateImageType() { LOG.debug("file name --> {}", file.getFileName()); String exctension = FilenameUtils.getExtension(file.getFileName()).toLowerCase(); if (!isImage(exctension)) { throw new UserException( " .png, .jpg, .jpeg "); }/*from w w w .ja v a 2s . c o m*/ }
From source file:io.gravitee.repository.couchbase.management.AbstractCouchbaseDBTest.java
@Before public void setup() throws Exception { LOG.info("Setup of Couchbase Cluster for Integration Test"); final File file = new File(AbstractCouchbaseDBTest.class.getResource(getTestCasesPath()).toURI()); File[] collectionsDumps = file.listFiles(pathname -> pathname.isFile() && JSON_EXTENSION.equalsIgnoreCase(FilenameUtils.getExtension(pathname.toString()))); //template.queryN1QL( N1qlQuery.simple(Index.dropPrimaryIndex(bucket.name()))); template.queryN1QL(N1qlQuery.simple(Index.createPrimaryIndex().on(bucket.name()))); LOG.debug("Flushing bucket ..."); bucket.bucketManager().flush();//from w w w . j ava 2 s .c om importJsonFiles(collectionsDumps); //workaround to avoid select before inserts commits ... Thread.sleep(1000L); }
From source file:com.qq.tars.web.controller.patch.UploadController.java
@RequestMapping(value = "server/api/upload_patch_package", produces = "application/json") @ResponseBody//from w w w . j a va 2 s. com public ServerPatchView upload(@Application @RequestParam String application, @ServerName @RequestParam("module_name") String moduleName, HttpServletRequest request, ModelMap modelMap) throws Exception { String comment = StringUtils.trimToEmpty(request.getParameter("comment")); String uploadTgzBasePath = systemConfigService.getUploadTgzPath(); CommonsMultipartResolver multipartResolver = new CommonsMultipartResolver( request.getSession().getServletContext()); if (multipartResolver.isMultipart(request)) { MultipartHttpServletRequest multiRequest = (MultipartHttpServletRequest) request; Iterator<String> it = multiRequest.getFileNames(); if (it.hasNext()) { MultipartFile file = multiRequest.getFile(it.next()); String originalName = file.getOriginalFilename(); String extension = FilenameUtils.getExtension(originalName); String temporary = uploadTgzBasePath + "/" + UUID.randomUUID() + "." + extension; IOUtils.copy(file.getInputStream(), new FileOutputStream(temporary)); String packageType = "suse"; // war? if (temporary.endsWith(".war")) { temporary = patchService.war2tgz(temporary, moduleName); } // ? String updateTgzPath = uploadTgzBasePath + "/" + application + "/" + moduleName; // ???? String uploadTgzName = application + "." + moduleName + "_" + packageType + "_" + System.currentTimeMillis() + ".tgz"; // ?? String uploadTgzFullPath = updateTgzPath + "/" + uploadTgzName; log.info("temporary path={}, upload path={}", temporary, uploadTgzFullPath); File uploadPathDir = new File(updateTgzPath); if (!uploadPathDir.exists()) { if (!uploadPathDir.mkdirs()) { throw new IOException( String.format("mkdirs error, path=%s", uploadPathDir.getCanonicalPath())); } } FileUtils.moveFile(new File(temporary), new File(uploadTgzFullPath)); return mapper.map(patchService.addServerPatch(application, moduleName, uploadTgzFullPath, comment), ServerPatchView.class); } } throw new Exception("???"); }