Example usage for org.apache.commons.io FilenameUtils isExtension

List of usage examples for org.apache.commons.io FilenameUtils isExtension

Introduction

In this page you can find the example usage for org.apache.commons.io FilenameUtils isExtension.

Prototype

public static boolean isExtension(String filename, Collection<String> extensions) 

Source Link

Document

Checks whether the extension of the filename is one of those specified.

Usage

From source file:it.isislab.dmason.util.SystemManagement.Master.thrower.DMasonMaster.java

/**Used for setting the name of last worker version
 * //  w w  w . j  av a  2  s . c om
 * @return 0 if no file
 *          1 if more then 2 file
 *          2 if number of file is correct
 */
private int getCurWorkerJarName() {
    File file = new File(FTP_HOME + dirSeparator + UPDATE_DIR);
    File[] files = file.listFiles();

    if (files.length == 0)
        return 0;
    if (files.length > 2)
        return 1;
    if (files.length == 2) {
        for (File curfile : files) {
            if (curfile.isFile() && FilenameUtils.isExtension(curfile.getName(), "jar")) {
                workerJarName = curfile.getName();
                return 2;
            }
        }
        return 1;
    }
    return -1;
}

From source file:nl.systemsgenetics.cellTypeSpecificAlleleSpecificExpression.ReadGenoAndAsFromIndividual.java

public static void readGenoAndAsFromIndividual(String loc_of_bam1, String genotype_loc,
        String coupling_location, String outputLocation, String snpLocation) throws IOException, Exception {

    if (GlobalVariables.verbosity >= 1) {
        //Print ASREADS header
        System.out.println("---- Starting ASREADS for the following settings: ----");
        System.out.println("\t input bam:         " + loc_of_bam1);
        System.out.println("\t genotype location: " + genotype_loc);
        System.out.println("\t coupling file:     " + coupling_location);
        System.out.println("\t output location:   " + outputLocation);
        if (!snpLocation.equals("")) {
            System.out.println("\t snp Location:      " + snpLocation);
        } else {/*from w w w  . j  a v a 2 s  .c  o  m*/
            System.out.println("\t snp Location:      " + "NONE");
        }

        System.out.println("------------------------------------------------------");
    }

    //parse command line arguments
    String loc_of_bam;
    loc_of_bam = loc_of_bam1;
    System.out.println("Location of bam file: ");
    System.out.println(loc_of_bam);

    if (!new File(loc_of_bam).exists()) {
        throw new IllegalArgumentException("ERROR! Location of bam file is not an existing file. Exitting.");
    } else {
        if (GlobalVariables.verbosity >= 10) {
            System.out.println("Location of bam file is an existing file, will continue.");
        }
    }

    RandomAccessGenotypeData TTdataSet;
    VcfGenotypeData VCFdataSet;
    HashMap<String, GeneticVariant> variantIdMap;
    String[] individual_names;

    String tabixLoc = genotype_loc + ".tbi";

    //open vcf dataset
    //based on extention and existance of both files. 
    if (FilenameUtils.isExtension(genotype_loc, "gz") && new File(tabixLoc).exists()
            && new File(genotype_loc).exists()) {
        try {
            VCFdataSet = new VcfGenotypeData(new File(genotype_loc), new File(tabixLoc), 0.99);
            variantIdMap = VCFdataSet.getVariantIdMap();
            individual_names = VCFdataSet.getSampleNames();
        } catch (IOException ex) {
            System.err.println("Error reading vcf dataset: " + genotype_loc);
            throw new IllegalArgumentException();
        }

    } else if (new File(genotype_loc + "/GenotypeMatrix.dat").exists()) {
        //assuming trityper dataset based on the genotype matrix
        try {
            TTdataSet = new TriTyperGenotypeData(new File(genotype_loc));
            variantIdMap = TTdataSet.getVariantIdMap();
            individual_names = TTdataSet.getSampleNames();
        } catch (IOException ex) {
            System.err.println("Error reading trityper dataset: " + genotype_loc);
            throw new IllegalArgumentException();
        }

    } else {
        throw new IllegalDataException("could not find a Trityper or vcf file in the genotype location");
    }

    //get the variants in the variantIdMAP

    Set<String> snpNames = variantIdMap.keySet();

    ArrayList<String> SNPsToAnalyze;
    SNPsToAnalyze = new ArrayList<String>();

    //If available, read the file with rs numbers.
    if (!snpLocation.equals("")) {

        ArrayList<String> includeSNPs = UtilityMethods.readFileIntoStringArrayList(snpLocation);

        int snpsNotFound = 0;

        for (String snp_to_include : includeSNPs) {
            if (snpNames.contains(snp_to_include)) {
                SNPsToAnalyze.add(snp_to_include);
            } else {
                snpsNotFound++;
            }
        }

        if (GlobalVariables.verbosity >= 1) {
            System.out.println("WARNING: Did not find " + Integer.toString(snpsNotFound) + " out of "
                    + Integer.toString(includeSNPs.size()) + " SNPs in the include file.");
        }
    } else {
        for (String snp_to_include : snpNames) {
            SNPsToAnalyze.add(snp_to_include);
        }
    }

    //String path = "/gcc/groups/lld/tmp01/projects/bamFiles/";
    //sample_map contains all the individuals that are in the sample file.
    HashMap sample_map = convert_individual_names(individual_names, coupling_location);

    if (GlobalVariables.verbosity >= 10) {
        System.out.println("Sample names were loaded.");
    }
    if (GlobalVariables.verbosity >= 100) {
        System.out.println(sample_map.toString());
    }

    //Twice because my files have the .MERGED.sorted.bam suffix attached to them.
    String sample_name = FilenameUtils
            .getBaseName(FilenameUtils.getBaseName(FilenameUtils.getBaseName(loc_of_bam)));

    if (GlobalVariables.verbosity >= 10) {
        System.out.println("sample_name: " + sample_name);
        System.out.println("sample_map:  " + sample_map.toString());
    }

    Object sample_idx = sample_map.get(sample_name);

    if (sample_idx == null) {
        throw new IllegalArgumentException("Couldn't find the filename in the sample names. Quitting.");
    }

    int sample_index = Integer.parseInt(sample_idx.toString());

    if (GlobalVariables.verbosity >= 10) {
        System.out.println("sample_index: " + sample_index);
    }

    //bam file path and filename
    String path_and_filename = loc_of_bam;
    File sample_file = new File(path_and_filename);

    SamReader bam_file = SamReaderFactory.makeDefault().open(sample_file);

    if (GlobalVariables.verbosity >= 10) {
        System.out.println("Initialized for reading bam file");
    }

    PrintWriter writer = new PrintWriter(outputLocation, "UTF-8");

    int i = 0;
    for (String i_snp : SNPsToAnalyze) {

        //System.out.println(i_snp);

        GeneticVariant this_variant = variantIdMap.get(i_snp);

        String chromosome = this_variant.getSequenceName();
        String position = String.valueOf(this_variant.getStartPos());

        // We only do analyses if we find a SNP and it is biallelic
        // However this is trityper data, so if we use
        // the allele count is used for the check of something. 

        //DO NOT ENTER A SEPARATED GENOMIC DATASET OTHERWISE THIS WILL BREAK.
        if (this_variant.isSnp() & this_variant.isBiallelic()) {

            String row_of_table = get_allele_specific_overlap_at_snp(this_variant, sample_index, chromosome,
                    position, bam_file);

            //commented out the phasing part.

            writer.println(chromosome + "\t" + position + "\t" + i_snp + "\t"
                    + this_variant.getVariantAlleles().getAllelesAsChars()[0] + "\t"
                    + this_variant.getVariantAlleles().getAllelesAsChars()[1] + "\t" + row_of_table + "\t"
                    + Arrays.toString(this_variant.getSampleVariants().get(sample_index).getAllelesAsChars()) //+ "\t" +
            //Boolean.toString(this_variant.getSamplePhasing().get(sample_index))
            );
        }

        i++;

        if ((i % 10000 == 0) && (GlobalVariables.verbosity >= 10)) {

            System.out.println("Finished " + Integer.toString(i) + " SNPs");

        }

    }
    writer.close();
}

From source file:org.apache.gobblin.compaction.mapreduce.MRCompactorJobRunner.java

@Override
public void run() {
    Configuration conf = HadoopUtils.getConfFromState(this.dataset.jobProps());

    // Turn on mapreduce output compression by default
    if (conf.get("mapreduce.output.fileoutputformat.compress") == null
            && conf.get("mapred.output.compress") == null) {
        conf.setBoolean("mapreduce.output.fileoutputformat.compress", true);
    }/* ww w  . j av a2  s  .  co m*/

    // Disable delegation token cancellation by default
    if (conf.get("mapreduce.job.complete.cancel.delegation.tokens") == null) {
        conf.setBoolean("mapreduce.job.complete.cancel.delegation.tokens", false);
    }

    try {
        DateTime compactionTimestamp = getCompactionTimestamp();
        LOG.info("MR Compaction Job Timestamp " + compactionTimestamp.getMillis());
        if (this.dataset.jobProps().getPropAsBoolean(MRCompactor.COMPACTION_JOB_LATE_DATA_MOVEMENT_TASK,
                false)) {
            List<Path> newLateFilePaths = Lists.newArrayList();
            for (String filePathString : this.dataset.jobProps()
                    .getPropAsList(MRCompactor.COMPACTION_JOB_LATE_DATA_FILES)) {
                if (FilenameUtils.isExtension(filePathString, getApplicableFileExtensions())) {
                    newLateFilePaths.add(new Path(filePathString));
                }
            }

            Path lateDataOutputPath = this.outputDeduplicated ? this.dataset.outputLatePath()
                    : this.dataset.outputPath();
            LOG.info(String.format("Copying %d late data files to %s", newLateFilePaths.size(),
                    lateDataOutputPath));
            if (this.outputDeduplicated) {
                if (!this.fs.exists(lateDataOutputPath)) {
                    if (!this.fs.mkdirs(lateDataOutputPath)) {
                        throw new RuntimeException(
                                String.format("Failed to create late data output directory: %s.",
                                        lateDataOutputPath.toString()));
                    }
                }
            }
            this.copyDataFiles(lateDataOutputPath, newLateFilePaths);
            if (this.outputDeduplicated) {
                dataset.checkIfNeedToRecompact(datasetHelper);
            }
            this.status = Status.COMMITTED;
        } else {
            if (this.fs.exists(this.dataset.outputPath()) && !canOverwriteOutputDir()) {
                LOG.warn(String.format("Output paths %s exists. Will not compact %s.",
                        this.dataset.outputPath(), this.dataset.inputPaths()));
                this.status = Status.COMMITTED;
                return;
            }
            addJars(conf);
            Job job = Job.getInstance(conf);
            this.configureJob(job);
            this.submitAndWait(job);
            if (shouldPublishData(compactionTimestamp)) {
                // remove all invalid empty files due to speculative task execution
                List<Path> goodPaths = CompactionAvroJobConfigurator.getGoodFiles(job,
                        this.dataset.outputTmpPath(), this.tmpFs);

                if (!this.recompactAllData && this.recompactFromDestPaths) {
                    // append new files without deleting output directory
                    addGoodFilesToOutputPath(goodPaths);
                    // clean up late data from outputLateDirectory, which has been set to inputPath
                    deleteFilesByPaths(this.dataset.inputPaths());
                } else {
                    moveTmpPathToOutputPath();
                    if (this.recompactFromDestPaths) {
                        deleteFilesByPaths(this.dataset.additionalInputPaths());
                    }
                }
                submitSlaEvent(job);
                LOG.info("Successfully published data for input folder " + this.dataset.inputPaths());
                this.status = Status.COMMITTED;
            } else {
                LOG.info("Data not published for input folder " + this.dataset.inputPaths()
                        + " due to incompleteness");
                this.status = Status.ABORTED;
                return;
            }
        }
        if (renameSourceDir) {
            MRCompactor.renameSourceDirAsCompactionComplete(this.fs, this.dataset);
        } else {
            this.markOutputDirAsCompleted(compactionTimestamp);
        }
        this.submitRecordsCountsEvent();
    } catch (Throwable t) {
        throw Throwables.propagate(t);
    }
}

From source file:org.apache.jena.atlas.io.IO.java

/** The filename without any compression extension, or the original filename.
 *  It tests for compression types handled by {@link #openFileEx}.
 *//* ww w  .j a v a  2  s  .  co  m*/
static public String filenameNoCompression(String filename) {
    if (FilenameUtils.isExtension(filename, extensions)) {
        return FilenameUtils.removeExtension(filename);
    }
    return filename;
}

From source file:org.apache.maven.plugin.cxx.CMakeMojo.java

protected String baseNameAsStaticLibrary(String sName, boolean bMavenDependency) {
    if (FilenameUtils.isExtension(sName, FilenameUtils.getExtension(staticLibrarySuffix))) {
        sName = FilenameUtils.removeExtension(sName) + (bMavenDependency ? "${STATIC_LIBRARY_SUFFIX}" : "");
        if (!StringUtils.isEmpty(staticLibraryPrefix)) {
            if (0 == sName.indexOf(staticLibraryPrefix)) {
                sName = sName.replaceFirst(Pattern.quote(staticLibraryPrefix), "");
                sName = (bMavenDependency ? "${STATIC_LIBRARY_PREFIX}" : "") + sName;
            } else {
                sName = "";
            }/*from  w w w .  j av a  2s .com*/
        } else {
            sName = (bMavenDependency ? "${STATIC_LIBRARY_PREFIX}" : "") + sName;
        }
    }
    return sName;
}

From source file:org.apache.maven.plugin.cxx.CMakeMojo.java

protected String baseNameAsSharedModule(String sName, boolean bMavenDependency) {
    if (FilenameUtils.isExtension(sName, FilenameUtils.getExtension(sharedModuleSuffix))) {
        sName = FilenameUtils.removeExtension(sName) + (bMavenDependency ? "${SHARED_MODULE_SUFFIX}" : "");
        if (!StringUtils.isEmpty(sharedModulePrefix)) {
            if (0 == sName.indexOf(sharedModulePrefix)) {
                sName = sName.replaceFirst(Pattern.quote(sharedModulePrefix), "");
                sName = (bMavenDependency ? "${SHARED_MODULE_PREFIX}" : "") + sName;
            } else {
                sName = "";
            }/*from   w w  w . j a  v a  2s . c  om*/
        } else {
            sName = (bMavenDependency ? "${SHARED_MODULE_PREFIX}" : "") + sName;
        }
    }
    return sName;
}

From source file:org.apache.maven.plugin.cxx.CMakeMojo.java

protected String baseNameAsSharedLibrary(String sName, boolean bMavenDependency) {
    if (FilenameUtils.isExtension(sName, FilenameUtils.getExtension(sharedLibrarySuffix))) {
        sName = FilenameUtils.removeExtension(sName) + (bMavenDependency ? "${SHARED_LIBRARY_SUFFIX}" : "");
        if (!StringUtils.isEmpty(sharedLibraryPrefix)) {
            if (0 == sName.indexOf(sharedLibraryPrefix)) {
                sName = sName.replaceFirst(Pattern.quote(sharedLibraryPrefix), "");
                sName = (bMavenDependency ? "${SHARED_LIBRARY_PREFIX}" : "") + sName;
            } else {
                sName = "";
            }/*  w w w .ja va2  s  . co m*/
        } else {
            sName = (bMavenDependency ? "${SHARED_LIBRARY_PREFIX}" : "") + sName;
        }
    }
    return sName;
}

From source file:org.apache.maven.plugins.scmpublish.AbstractScmPublishMojo.java

protected boolean requireNormalizeNewlines(File f) throws IOException {
    if (normalizeExtensions == null) {
        normalizeExtensions = new HashSet<String>(Arrays.asList(NORMALIZE_EXTENSIONS));
        if (extraNormalizeExtensions != null) {
            normalizeExtensions.addAll(Arrays.asList(extraNormalizeExtensions));
        }//from   ww w.jav  a 2s  .  c  o m
    }

    return FilenameUtils.isExtension(f.getName(), normalizeExtensions);
}

From source file:org.batoo.jpa.parser.impl.acl.ClassloaderAnnotatedClassLocator.java

private Set<Class<?>> findClasses(ClassLoader cl, Set<Class<?>> classes, String root, String path) {
    final File file = new File(path);

    if (file.isDirectory()) {
        ClassloaderAnnotatedClassLocator.LOG.debug("Processing directory {0}", path);

        for (final String child : file.list()) {
            this.findClasses(cl, classes, root, path + "/" + child);
        }/*from w ww . j  a  v  a2 s  .  co m*/
    } else {
        if (FilenameUtils.isExtension(path, "class")) {
            final String normalizedPath = FilenameUtils.separatorsToUnix(FilenameUtils.normalize(path));

            final int rootLength = FilenameUtils.normalizeNoEndSeparator(root).length();
            String className = normalizedPath.substring(rootLength + 1).replaceAll("/", ".");
            className = StringUtils.left(className, className.length() - 6);

            final Class<?> clazz = this.isPersistentClass(cl, className);
            if (clazz != null) {
                ClassloaderAnnotatedClassLocator.LOG.debug("Found persistent class {0}", className);
                classes.add(clazz);
            }
        }
    }

    return classes;
}

From source file:org.codice.ddf.commands.catalog.ImportCommand.java

private File initImportFile(String importFile) {
    File file = new File(importFile);

    if (!file.exists()) {
        throw new CatalogCommandRuntimeException("File does not exist: " + importFile);
    }//from  w  w w.  j  a  v a  2 s.  com

    if (!FilenameUtils.isExtension(importFile, "zip")) {
        throw new CatalogCommandRuntimeException("File must be a zip file: " + importFile);
    }

    return file;
}