Example usage for java.nio.file Path getFileName

List of usage examples for java.nio.file Path getFileName

Introduction

In this page you can find the example usage for java.nio.file Path getFileName.

Prototype

Path getFileName();

Source Link

Document

Returns the name of the file or directory denoted by this path as a Path object.

Usage

From source file:org.apache.flink.tests.util.FlinkDistribution.java

public Stream<String> searchAllLogs(Pattern pattern, Function<Matcher, String> matchProcessor)
        throws IOException {
    final List<String> matches = new ArrayList<>(2);

    try (Stream<Path> logFilesStream = Files.list(log)) {
        final Iterator<Path> logFiles = logFilesStream.iterator();
        while (logFiles.hasNext()) {
            final Path logFile = logFiles.next();
            if (!logFile.getFileName().toString().endsWith(".log")) {
                // ignore logs for previous runs that have a number suffix
                continue;
            }/*from  w ww . j a v a2s  . co m*/
            try (BufferedReader br = new BufferedReader(
                    new InputStreamReader(new FileInputStream(logFile.toFile()), StandardCharsets.UTF_8))) {
                String line;
                while ((line = br.readLine()) != null) {
                    Matcher matcher = pattern.matcher(line);
                    if (matcher.matches()) {
                        matches.add(matchProcessor.apply(matcher));
                    }
                }
            }
        }
    }
    return matches.stream();
}

From source file:com.qwazr.extractor.ParserInterface.java

/**
 * Read a document and fill the resultBuilder.
 *
 * @param parameters    The optional parameters of the parser
 * @param filePath      the path of the file instance of the document to parse
 * @param extension     an optional extension of the file
 * @param mimeType      an optional mime type of the file
 * @param resultBuilder the result builder to fill
 * @throws Exception if any error occurs
 *//*from   ww w . j  a va 2  s.  c o  m*/
default void parseContent(final MultivaluedMap<String, String> parameters, final Path filePath,
        String extension, final String mimeType, final ParserResultBuilder resultBuilder) throws Exception {
    if (extension == null)
        extension = FilenameUtils.getExtension(filePath.getFileName().toString());
    try (final InputStream in = Files.newInputStream(filePath);
            final BufferedInputStream bIn = new BufferedInputStream(in);) {
        parseContent(parameters, bIn, extension, mimeType, resultBuilder);
    }
}

From source file:org.trustedanalytics.h2oscoringengine.publisher.steps.AppBitsUploadingStep.java

private ByteArrayResource prepareData(Path dataPath) throws IOException {
    return new ByteArrayResource(Files.readAllBytes(dataPath)) {
        @Override/*from   w w  w .  j  a v a2 s .  c  o  m*/
        public String getFilename() {
            return dataPath.getFileName().toString();
        }
    };
}

From source file:org.apache.archiva.indexer.maven.merger.DefaultIndexMerger.java

@Override
public IndexingContext buildMergedIndex(IndexMergerRequest indexMergerRequest) throws IndexMergerException {
    String groupId = indexMergerRequest.getGroupId();

    if (runningGroups.contains(groupId)) {
        log.info("skip build merge remote indexes for id: '{}' as already running", groupId);
        return null;
    }//from   w  w w. jav  a  2s  .  c o m

    runningGroups.add(groupId);

    StopWatch stopWatch = new StopWatch();
    stopWatch.reset();
    stopWatch.start();

    Path mergedIndexDirectory = indexMergerRequest.getMergedIndexDirectory();

    String tempRepoId = mergedIndexDirectory.getFileName().toString();

    try {
        Path indexLocation = mergedIndexDirectory.resolve(indexMergerRequest.getMergedIndexPath());

        List<IndexingContext> members = indexMergerRequest.getRepositoriesIds().stream()
                .map(id -> repositoryRegistry.getRepository(id))
                .filter(repo -> repo.getType().equals(RepositoryType.MAVEN)).map(repo -> {
                    try {
                        return repo.getIndexingContext().getBaseContext(IndexingContext.class);
                    } catch (UnsupportedBaseContextException e) {
                        return null;
                        // Ignore
                    }
                }).filter(Objects::nonNull).collect(Collectors.toList());
        ContextMemberProvider memberProvider = new StaticContextMemberProvider(members);
        IndexingContext mergedCtx = indexer.createMergedIndexingContext(tempRepoId, tempRepoId,
                mergedIndexDirectory.toFile(), indexLocation.toFile(), true, memberProvider);
        mergedCtx.optimize();

        if (indexMergerRequest.isPackIndex()) {
            IndexPackingRequest request = new IndexPackingRequest(mergedCtx, //
                    mergedCtx.acquireIndexSearcher().getIndexReader(), //
                    indexLocation.toFile());
            indexPacker.packIndex(request);
        }

        if (indexMergerRequest.isTemporary()) {
            temporaryGroupIndexes.add(new TemporaryGroupIndex(mergedIndexDirectory, tempRepoId, groupId,
                    indexMergerRequest.getMergedIndexTtl()));
            temporaryContextes.add(mergedCtx);
        }
        stopWatch.stop();
        log.info("merged index for repos {} in {} s", indexMergerRequest.getRepositoriesIds(),
                stopWatch.getTime());
        return mergedCtx;
    } catch (IOException e) {
        throw new IndexMergerException(e.getMessage(), e);
    } finally {
        runningGroups.remove(groupId);
    }
}

From source file:com.sastix.cms.server.utils.FileService.java

private String saveFile(String relativePath, byte[] resourceBinary) {
    Path path = Paths.get(volume + relativePath);
    try {/*from   ww w  .  jav a  2  s .com*/
        Files.write(path, resourceBinary, StandardOpenOption.CREATE_NEW);
    } catch (IOException e) {
        e.printStackTrace();
    }
    return path.getFileName().toString();
}

From source file:com.github.zhanhb.ckfinder.connector.plugins.WatermarkProcessor.java

@Override
public void onFileUploadComplete(FileUploadEvent event) {
    try {//from w  w  w. j a v  a2s.  co m
        final Path originalFile = event.getFile();
        final WatermarkPosition position = new WatermarkPosition(settings.getMarginBottom(),
                settings.getMarginRight());
        String format = FileUtils.getExtension(originalFile.getFileName().toString());
        format = format != null ? format.toLowerCase() : null;
        BufferedImage watermark = getWatermarkImage(settings);
        if (watermark != null) {
            BufferedImage image;
            try (InputStream in = Files.newInputStream(originalFile)) {
                image = ImageIO.read(in);
            }
            try (OutputStream out = Files.newOutputStream(originalFile)) {
                Thumbnails.of(image).watermark(position, watermark, settings.getTransparency()).scale(1)
                        .outputQuality(settings.getQuality()).outputFormat(format).toOutputStream(out);
            }
        }
    } catch (Exception ex) {
        // only log error if watermark is not created
        log.error("", ex);
    }
}

From source file:neembuu.uploader.zip.generator.NUZipFileGenerator.java

private void handleClassEntry(Path pathInZip, final Class c, FileSystem fs, Path uploadersDirectory)
        throws IOException {
    Path classLocationOnDisk = uploadersDirectory.resolve(pathInZip.toString());
    DirectoryStream<Path> ds = Files.newDirectoryStream(classLocationOnDisk,
            new DirectoryStream.Filter<Path>() {
                @Override/*  ww  w  . j  av  a  2 s.c o  m*/
                public boolean accept(Path entry) throws IOException {
                    String fn = entry.getFileName().toString();
                    String cn = c.getSimpleName();
                    return fn.equals(cn + ".class") || fn.startsWith(cn + "$");
                }
            });
    for (Path p : ds) {
        byte[] b = Files.readAllBytes(p);
        Files.write(pathInZip.resolve(p.getFileName().toString()), b);
    }

    // say we want to zie SomeClass.class
    // then we also need to zip SomeClass$1.class
    // That is, we also need to zip inner classes and inner annoymous classes 
    // into the zip as well
}

From source file:au.org.ands.vocabs.toolkit.provider.transform.GetMetadataTransformProvider.java

/**
 * Parse the files harvested from PoolParty and extract the
 * metadata./*from   w  w w.ja  v  a  2 s  .  c o  m*/
 * @param pPprojectId The PoolParty project id.
 * @return The results of the metadata extraction.
 */
public final HashMap<String, Object> extractMetadata(final String pPprojectId) {
    Path dir = Paths.get(ToolkitFileUtils.getMetadataOutputPath(pPprojectId));
    HashMap<String, Object> results = new HashMap<String, Object>();
    ConceptHandler conceptHandler = new ConceptHandler();
    try (DirectoryStream<Path> stream = Files.newDirectoryStream(dir)) {
        for (Path entry : stream) {
            conceptHandler.setSource(entry.getFileName().toString());
            RDFFormat format = Rio.getParserFormatForFileName(entry.toString());
            RDFParser rdfParser = Rio.createParser(format);
            rdfParser.setRDFHandler(conceptHandler);
            FileInputStream is = new FileInputStream(entry.toString());
            logger.debug("Reading RDF:" + entry.toString());
            rdfParser.parse(is, entry.toString());
        }
    } catch (DirectoryIteratorException | IOException | RDFParseException | RDFHandlerException ex) {
        results.put(TaskStatus.EXCEPTION, "Exception in extractMetadata while Parsing RDF");
        logger.error("Exception in extractMetadata while Parsing RDF:", ex);
        return results;
    }
    results.putAll(conceptHandler.getMetadata());
    results.put("concept_count", Integer.toString(conceptHandler.getCountedConcepts()));
    return results;
}

From source file:com.vaushell.superpipes.nodes.buffer.N_Buffer.java

@Override
protected void prepareImpl() throws Exception {
    // Load messages IDs
    messagesPath = getDispatcher().getDatas().resolve(getNodeID());

    Files.createDirectories(messagesPath);

    try (final DirectoryStream<Path> stream = Files.newDirectoryStream(messagesPath)) {
        for (final Path p : stream) {
            final long ID = Long.parseLong(p.getFileName().toString());
            messageIDs.add(ID);// ww  w .  j  a  va2 s. c o  m
        }
    }
}

From source file:net.sf.jabref.logic.exporter.FileSaveSession.java

@Override
public void commit(Path file) throws SaveException {
    if (file == null) {
        return;/*w ww. j  a v a 2  s .  co  m*/
    }
    if (backup && Files.exists(file)) {
        Path fileName = file.getFileName();
        Path backupFile = file.resolveSibling(fileName + BACKUP_EXTENSION);
        try {
            FileUtil.copyFile(file.toFile(), backupFile.toFile(), true);
        } catch (IOException ex) {
            LOGGER.error("Problem copying file", ex);
            throw SaveException.BACKUP_CREATION;
        }
    }
    try {
        if (useLockFile) {
            try {
                if (FileBasedLock.createLockFile(file)) {
                    // Oops, the lock file already existed. Try to wait it out:
                    if (!FileBasedLock.waitForFileLock(file, 10)) {
                        throw SaveException.FILE_LOCKED;
                    }
                }
            } catch (IOException ex) {
                LOGGER.error("Error when creating lock file.", ex);
            }
        }

        FileUtil.copyFile(temporaryFile.toFile(), file.toFile(), true);
    } catch (IOException ex2) {
        // If something happens here, what can we do to correct the problem? The file is corrupted, but we still
        // have a clean copy in tmp. However, we just failed to copy tmp to file, so it's not likely that
        // repeating the action will have a different result.
        // On the other hand, our temporary file should still be clean, and won't be deleted.
        throw new SaveException("Save failed while committing changes: " + ex2.getMessage(),
                Localization.lang("Save failed while committing changes: %0", ex2.getMessage()));
    } finally {
        if (useLockFile) {
            FileBasedLock.deleteLockFile(file);
        }
    }
    try {
        Files.delete(temporaryFile);
    } catch (IOException e) {
        LOGGER.warn("Cannot delete temporary file", e);
    }
}