Example usage for org.apache.commons.io FilenameUtils getFullPath

List of usage examples for org.apache.commons.io FilenameUtils getFullPath

Introduction

In this page you can find the example usage for org.apache.commons.io FilenameUtils getFullPath.

Prototype

public static String getFullPath(String filename) 

Source Link

Document

Gets the full path from a full filename, which is the prefix + path.

Usage

From source file:au.com.redboxresearchdata.fascinator.harvester.BaseJsonHarvester.java

protected void setUpRules() throws HarvesterException {
    try {/* w  ww.  j ava  2 s .co  m*/
        String rulesConfigFilePath = harvestConfig.getString(null, "harvester", "rulesConfig");
        log.debug("Initialising Harvester, using config path:" + rulesConfigFilePath);
        rulesConfigFile = new File(rulesConfigFilePath);
        if (!rulesConfigFile.exists()) {
            errorMessage = "Rules config file not found: " + rulesConfigFilePath;
            throw new Exception(errorMessage);
        }
        rulesConfig = new JsonSimple(rulesConfigFile);
        String rulesFilePath = rulesConfig.getString("", "indexer", "script", "rules");
        log.debug("Initialising Harvester, checking if rulesFilePath exists:" + rulesFilePath);
        rulesFile = new File(rulesFilePath);
        if (!rulesFile.exists()) {
            // try again this time appending the base directory of the rules config path                  
            rulesFilePath = FilenameUtils.getFullPath(rulesConfigFilePath) + rulesFilePath;
            log.debug("Initialising Harvester, nope wasn't there, trying if this exists:" + rulesFilePath);
            rulesFile = new File(rulesFilePath);
            if (!rulesFile.exists()) {
                errorMessage = "Rules file not found '" + rulesFilePath + "', please check the set up...";
                throw new Exception(errorMessage);
            }
        }
        log.info("Using rules file path:" + rulesFilePath);
        handlingType = harvestConfig.getString(HANDLING_TYPE_OVERWRITE, "harvester", "handlingType");
        idField = harvestConfig.getString("", "harvester", "idField");
        if (idField == null) {
            throw new HarvesterException("harvester.idField is not defined in the harvest configuration.");
        }
        log.debug("idField is:" + idField);
        idPrefix = harvestConfig.getString("", "harvester", "recordIDPrefix");
        if (idPrefix == null) {
            throw new HarvesterException(
                    "harvester.recordIDPrefix is not defined in the harvest configuration.");
        }
        mainPayloadId = harvestConfig.getString(DEFAULT_PAYLOAD_ID, "harvester", "mainPayloadId");
        rulesConfigObject = updateHarvestFile(rulesConfigFile);
        rulesObject = updateHarvestFile(rulesFile);
    } catch (Exception e) {
        log.error(errorMessage);
        throw new HarvesterException(e);
    }
    isReady = true;
}

From source file:it.geosolutions.geobatch.geoserver.shapefile.ShapeFileAction.java

/**
 *
 *//*  w ww  .  j  a  va  2 s.  c o  m*/
public Queue<EventObject> execute(Queue<EventObject> events) throws ActionException {

    listenerForwarder.setTask("config");
    listenerForwarder.started();

    try {
        //
        // Initializing input variables
        //
        GeoServerActionConfiguration configuration = getConfiguration();
        if (configuration == null) {
            throw new IllegalStateException("ActionConfig is null.");
        }

        // how many files do we have?
        final int inputSize = events.size();

        // Fetch the first event in the queue.
        // We may have one in these 2 cases:
        // 1) a single event for a .zip file
        // 2) a list of events for a (.shp+.dbf+.shx) collection, plus some other optional files
        final EventObject event = events.peek();

        // the name of the shapefile
        String[] shapeNames;

        // the output (to send to the geoserver) file
        File zippedFile = null;

        // upload method to use
        it.geosolutions.geobatch.geoserver.UploadMethod transferMethod = it.geosolutions.geobatch.geoserver.UploadMethod
                .valueOf(configuration.getDataTransferMethod());
        if (transferMethod == null) {
            transferMethod = it.geosolutions.geobatch.geoserver.UploadMethod.getDefault(); // default one
        }

        // list of file to send to the GeoServer
        File[] files = null;
        File tmpDirFile = null;
        Integer epsgCode = null;
        GeometryDescriptor descriptor = null;
        CoordinateReferenceSystem crs = null;

        if (inputSize == 1) {
            //
            // SINGLE FILE, is a zip or throw error
            //
            zippedFile = toFile(event);
            if (LOGGER.isDebugEnabled())
                LOGGER.debug("Testing for compressed file: " + zippedFile);

            // try to extract
            tmpDirFile = Extract.extract(zippedFile, getTempDir(), false);
            listenerForwarder.progressing(5, "File extracted");

            //if the output (Extract) file is not a dir the event was a not compressed file so
            //we have to throw and error
            if (tmpDirFile == null) {
                throw new IllegalStateException("Not valid input: we need a zip file ");
            }

            if (!tmpDirFile.isDirectory()) {
                if (!tmpDirFile.isFile()) {
                    throw new IllegalStateException("Not valid input: we need a zip file ");
                } else {
                    tmpDirFile = tmpDirFile.getParentFile();
                }
            }

            // collect extracted files
            final Collector c = new Collector(
                    FileFilterUtils.notFileFilter(FileFilterUtils.nameFileFilter(tmpDirFile.getName()))); // no filter
            final List<File> fileList = c.collect(tmpDirFile);
            files = fileList.toArray(new File[1]);

            // Check if there is at least one shp there
            shapeNames = acceptable(files);

        } else {
            //
            // Multiple separated files, let's look for the right one
            //
            if (LOGGER.isTraceEnabled())
                LOGGER.trace("Checking input collection...");

            listenerForwarder.progressing(5, "Checking input collection...");

            // collect files
            files = new File[events.size()];
            int i = 0;
            for (EventObject ev : events) {
                files[i++] = toFile(ev);
            }

            // Get tmp dir from the absolute path of the first captured file
            tmpDirFile = new File(FilenameUtils.getFullPath(files[0].getAbsolutePath()));

            // Check for shapefile names
            shapeNames = acceptable(files);

            // zip to a single file if method is not external.
            // Will use the first shapeName as the zip name.
            if (transferMethod != it.geosolutions.geobatch.geoserver.UploadMethod.EXTERNAL) {
                zippedFile = Compressor.deflate(getTempDir(), shapeNames[0], files);
                if (zippedFile == null) {
                    throw new IllegalStateException("Unable to create the zip file");
                }
            }

        }

        // check that we actually found some shapefiles
        if (shapeNames == null) {
            final String message = "Input is not a zipped file nor a valid collection of files";
            if (LOGGER.isErrorEnabled())
                LOGGER.error(message);
            throw new IllegalStateException(message);
        }

        // do some additional checks and look for some ausillary information
        for (String shape : shapeNames) {
            FileDataStore store = null;

            try {
                // create a shapefile datastore
                store = Utils.SHP_FACTORY.createDataStore(new File(tmpDirFile, shape + ".shp").toURI().toURL());

                // get the CRS
                crs = store.getSchema().getCoordinateReferenceSystem();
                epsgCode = crs != null ? CRS.lookupEpsgCode(crs, false) : null;

                // get the geometry
                descriptor = store.getSchema().getGeometryDescriptor();
            } finally {
                if (store != null) {
                    try {
                        store.dispose();
                    } catch (Exception e) {
                        if (LOGGER.isTraceEnabled()) {
                            LOGGER.trace(e.getLocalizedMessage(), e);
                        }
                    }
                }
            }
        }
        listenerForwarder.progressing(10, "In progress");

        GeoServerRESTReader reader = new GeoServerRESTReader(configuration.getGeoserverURL(),
                configuration.getGeoserverUID(), configuration.getGeoserverPWD());
        GeoServerRESTPublisher publisher = new GeoServerRESTPublisher(configuration.getGeoserverURL(),
                configuration.getGeoserverUID(), configuration.getGeoserverPWD());

        WorkspaceUtils.createWorkspace(reader, publisher, configuration.getDefaultNamespace(),
                configuration.getDefaultNamespaceUri());

        // TODO: check if a layer with the same name already exists in GS
        // TODO: Handle CRSs for multiple files
        // TODO: Handle styles for multiple files (see comment on #16)

        // decide CRS
        String nativeCRS = null;
        ProjectionPolicy projectionPolicy = ProjectionPolicy.NONE; // by default we do nothing
        final String defaultCRS = configuration.getCrs(); //do we have a default crs in the config
        String finalEPSGCode = defaultCRS; // this is the SRS for this shape

        // retain original CRS if the code is there
        if (epsgCode == null) {
            // we do not have a valid EPSG code in the input file, we do need one as per default
            if (finalEPSGCode == null) {
                final String message = "Input file has no CRS neither the configuration provides a default one";
                final ActionException ae = new ActionException(this, message);
                if (LOGGER.isErrorEnabled())
                    LOGGER.error(message, ae);
                listenerForwarder.failed(ae);
                throw ae;
            }

            // we do have a default, let's choose the proper CRS management
            if (crs != null) {
                // we have a WKT native crs, let's use it
                nativeCRS = crs.toWKT();
                projectionPolicy = ProjectionPolicy.REPROJECT_TO_DECLARED;
            } else {
                projectionPolicy = ProjectionPolicy.FORCE_DECLARED;
            }

        } else {
            // we do have an EPSG code for the original CRS, do nothing
            finalEPSGCode = "EPSG:" + epsgCode;
            nativeCRS = finalEPSGCode;
        }

        // check style for this geometry
        String defaultStyle = configuration.getDefaultStyle();
        if (defaultStyle == null || defaultStyle.isEmpty()) {
            final GeometryType geometryType = descriptor.getType();
            Class clazz = geometryType.getBinding();
            if (clazz.isAssignableFrom(Point.class) || clazz.isAssignableFrom(MultiPoint.class)) {
                defaultStyle = Utils.DEFAULT_POINT_STYLE;
            } else if (clazz.isAssignableFrom(LineString.class)
                    || clazz.isAssignableFrom(MultiLineString.class)) {
                defaultStyle = Utils.DEFAULT_LINE_STYLE;
            } else if (clazz.isAssignableFrom(Polygon.class) || clazz.isAssignableFrom(MultiPolygon.class)) {
                defaultStyle = Utils.DEFAULT_POLYGON_STYLE;
            }
        }

        UploadMethod uMethod = null;
        switch (transferMethod) {
        case DIRECT:
            uMethod = UploadMethod.FILE;
            break;
        case EXTERNAL:
            uMethod = UploadMethod.EXTERNAL;
            break;
        default:
            throw new IllegalArgumentException(
                    "Unsupported transfer method: " + configuration.getDataTransferMethod());
        }

        // Get some common parameters
        String wsName = configuration.getDefaultNamespace();
        String dsName = configuration.getStoreName() == null ? shapeNames[0] : configuration.getStoreName();
        String lyrName = configuration.getLayerName() == null ? shapeNames[0] : configuration.getLayerName();
        String styleName = defaultStyle;

        //
        // SENDING data to GeoServer via REST protocol.
        //
        boolean success = false;

        // Either publish a single shapefile, or a collection of shapefiles
        if (shapeNames.length == 1) {
            success = publisher.publishShp(wsName, dsName, null, lyrName, uMethod, zippedFile.toURI(),
                    finalEPSGCode, nativeCRS, projectionPolicy, styleName);
        } else {
            success = publisher.publishShpCollection(wsName, dsName, zippedFile.toURI());
        }

        if (success) {
            final String message = "Shape file SUCCESFULLY sent";
            if (LOGGER.isInfoEnabled())
                LOGGER.info(message);
            listenerForwarder.progressing(90, message);
        } else {
            final String message = "Shape file FAILED to be sent";
            final ActionException ae = new ActionException(this, message);
            if (LOGGER.isErrorEnabled())
                LOGGER.error(message, ae);
            listenerForwarder.failed(ae);
            throw ae;
        }

        // If we have shape specific config, apply now
        if (configuration instanceof GeoServerShapeActionConfiguration) {
            // Log
            if (LOGGER.isInfoEnabled())
                LOGGER.info("Configuring shape datastore connection parameters");

            // Get config
            GeoServerShapeActionConfiguration shpConfig = (GeoServerShapeActionConfiguration) configuration;

            // Get managers from geoserver-manager
            GeoServerRESTManager manager = new GeoServerRESTManager(new URL(shpConfig.getGeoserverURL()),
                    shpConfig.getGeoserverUID(), shpConfig.getGeoserverPWD());
            GeoServerRESTStoreManager dsManager = manager.getStoreManager();

            // Read config from GS
            RESTDataStore dsRead = manager.getReader().getDatastore(wsName, dsName);
            GSShapefileDatastoreEncoder dsWrite = new GSShapefileDatastoreEncoder(dsRead);

            // Update store params
            if (shpConfig.getUrl() != null)
                dsWrite.setUrl(shpConfig.getUrl());
            if (shpConfig.getCharset() != null)
                dsWrite.setCharset(shpConfig.getCharset());
            if (shpConfig.getCreateSpatialIndex() != null)
                dsWrite.setCreateSpatialIndex(shpConfig.getCreateSpatialIndex());
            if (shpConfig.getMemoryMappedBuffer() != null)
                dsWrite.setMemoryMappedBuffer(shpConfig.getMemoryMappedBuffer());
            if (shpConfig.getCacheAndReuseMemoryMaps() != null)
                dsWrite.setCacheAndReuseMemoryMaps(shpConfig.getCacheAndReuseMemoryMaps());

            // Push changes to GS
            success = dsManager.update(wsName, dsWrite);

            // Success or die
            if (success) {
                String message = "Shape datastore SUCCESFULLY configured";
                if (LOGGER.isInfoEnabled())
                    LOGGER.info(message);
                listenerForwarder.progressing(100, message);
            } else {
                String message = "Shape datastore FAILED to be configured";
                final ActionException ae = new ActionException(this, message);
                if (LOGGER.isErrorEnabled())
                    LOGGER.error(message, ae);
                listenerForwarder.failed(ae);
                throw ae;
            }
        }

        return events;

    } catch (Throwable t) {
        final ActionException ae = new ActionException(this, t.getMessage(), t);
        if (LOGGER.isErrorEnabled())
            LOGGER.error(ae.getLocalizedMessage(), ae);
        listenerForwarder.failed(ae); // fails the Action
        throw ae;
    }
}

From source file:fr.insalyon.creatis.vip.datamanager.server.business.TransferPoolBusiness.java

/**
 * /*  w  ww .j a v  a  2s. com*/
 * @param user
 * @param email
 * @param remoteFile
 * @return Operation ID
 * @throws BusinessException 
 */
public String downloadFile(User user, String remoteFile) throws BusinessException {

    try {
        lfcBusiness.getModificationDate(user, remoteFile);
        GRIDAPoolClient poolClient = CoreUtil.getGRIDAPoolClient();

        String remotePath = DataManagerUtil.parseBaseDir(user, remoteFile);
        String localDirPath = serverConfiguration.getDataManagerPath() + "/downloads"
                + FilenameUtils.getFullPath(remotePath);

        return poolClient.downloadFile(remotePath, localDirPath, user.getEmail());

    } catch (DataManagerException ex) {
        logger.error(ex);
        throw new BusinessException(ex);
    } catch (GRIDAClientException ex) {
        logger.error(ex);
        throw new BusinessException(ex);
    }
}

From source file:it.drwolf.ridire.session.JobCleaner.java

public void testScript(CrawledResource cr, String cleaningScript) {
    JSch jSch = new JSch();
    String origFile = FilenameUtils.getFullPath(cr.getArcFile()).concat(JobMapperMonitor.RESOURCESDIR)
            .concat(cr.getDigest().concat(".txt"));
    String endFile = this.cleanerPath.concat(System.getProperty("file.separator")).concat(cr.getDigest())
            .concat(".txt");
    String command = "perl " + this.cleanerPath.concat(System.getProperty("file.separator")) + "testscript.pl "
            + endFile;/* ww w .j  av a2 s  . c  o  m*/
    try {
        this.testBefore = this.ridirePlainTextCleaner.getCleanText(new File(origFile));
        File script = File.createTempFile("cleaner", ".pl");
        File origTemp = File.createTempFile("orig", ".temp");
        FileUtils.writeStringToFile(origTemp, this.testBefore);
        FileUtils.writeStringToFile(script, cleaningScript);
        // transfer file to process
        com.jcraft.jsch.Session session = jSch.getSession(this.perlUser, "127.0.0.1");
        java.util.Properties config = new java.util.Properties();
        config.put("StrictHostKeyChecking", "no");
        session.setConfig(config);
        session.setPassword(this.perlPw);
        session.connect();
        Channel channel = session.openChannel("sftp");
        channel.connect();
        ChannelSftp c = (ChannelSftp) channel;
        int mode = ChannelSftp.OVERWRITE;
        c.put(origTemp.getAbsolutePath(), endFile, mode);
        // transfer script
        c.put(script.getAbsolutePath(),
                this.cleanerPath.concat(System.getProperty("file.separator")).concat("testscript.pl"));
        c.disconnect();
        FileUtils.deleteQuietly(script);
        // execute script
        channel = session.openChannel("exec");
        ByteArrayOutputStream errorStream = new ByteArrayOutputStream();
        ((ChannelExec) channel).setErrStream(errorStream);
        ((ChannelExec) channel).setCommand(command);
        channel.setInputStream(null);
        InputStream inputStream = channel.getInputStream();
        channel.connect();
        byte[] tmp = new byte[1024];
        while (true) {
            while (inputStream.available() > 0) {
                int i = inputStream.read(tmp, 0, 1024);
                if (i < 0) {
                    break;
                }
            }
            if (channel.isClosed()) {
                break;
            }
            try {
                Thread.sleep(200);
            } catch (Exception ee) {
            }
        }
        this.testOutput = errorStream.toString();
        channel.disconnect();
        // get new file
        channel = session.openChannel("sftp");
        channel.connect();
        c = (ChannelSftp) channel;
        File newFile = File.createTempFile("cleanedFile", null);
        c.get(endFile + ".tmp", newFile.getAbsolutePath());
        c.disconnect();
        // delete files from working directory
        channel = session.openChannel("exec");
        command = "rm " + this.cleanerPath.concat(System.getProperty("file.separator")) + "testscript.pl "
                + endFile + " " + endFile + ".tmp";
        ((ChannelExec) channel).setCommand(command);
        channel.setInputStream(null);
        inputStream = channel.getInputStream();
        channel.connect();
        StringBuffer testScriptOutputStringBuffer = new StringBuffer();
        while (true) {
            while (inputStream.available() > 0) {
                int i = inputStream.read(tmp, 0, 1024);
                if (i < 0) {
                    break;
                }
                testScriptOutputStringBuffer.append(tmp);
            }
            if (channel.isClosed()) {
                testScriptOutputStringBuffer.append("Exit status: " + channel.getExitStatus());
                break;
            }
            try {
                Thread.sleep(200);
            } catch (Exception ee) {
            }
        }
        channel.disconnect();
        session.disconnect();
        this.testAfter = FileUtils.readFileToString(newFile);
        FileUtils.deleteQuietly(newFile);
    } catch (JSchException e1) {
        // TODO Auto-generated catch block
        e1.printStackTrace();
    } catch (IOException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    } catch (SftpException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }
}

From source file:edu.ur.ir.ir_export.service.DefaultCollectionExportService.java

/**
 * Generate an xml file with the specified collections.
 * //w  w w  .j  a v a  2  s.  c om
 * @see edu.ur.dspace.export.CollectionExporter#generateCollectionXMLFile(java.io.File, java.util.Collection)
 */
public Set<FileInfo> createXmlFile(File f, Collection<InstitutionalCollection> collections,
        boolean includeChildren) throws IOException {
    DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
    DocumentBuilder builder;

    Set<FileInfo> allPictures = new HashSet<FileInfo>();
    String path = FilenameUtils.getPath(f.getCanonicalPath());
    if (!path.equals("")) {
        File pathOnly = new File(FilenameUtils.getFullPath(f.getCanonicalPath()));
        FileUtils.forceMkdir(pathOnly);
    }

    if (!f.exists()) {
        if (!f.createNewFile()) {
            throw new IllegalStateException("could not create file");
        }
    }

    try {
        builder = factory.newDocumentBuilder();
    } catch (ParserConfigurationException e) {
        throw new IllegalStateException(e);
    }

    DOMImplementation impl = builder.getDOMImplementation();
    DOMImplementationLS domLs = (DOMImplementationLS) impl.getFeature("LS", "3.0");
    LSSerializer serializer = domLs.createLSSerializer();
    LSOutput lsOut = domLs.createLSOutput();

    Document doc = impl.createDocument(null, "institutionalCollections", null);
    Element root = doc.getDocumentElement();

    FileOutputStream fos;
    OutputStreamWriter outputStreamWriter;
    BufferedWriter writer;

    try {
        fos = new FileOutputStream(f);

        try {
            outputStreamWriter = new OutputStreamWriter(fos, "UTF-8");
        } catch (UnsupportedEncodingException e) {
            throw new IllegalStateException(e);
        }
        writer = new BufferedWriter(outputStreamWriter);
        lsOut.setCharacterStream(writer);
    } catch (FileNotFoundException e) {
        throw new IllegalStateException(e);
    }

    // create XML for the child collections
    for (InstitutionalCollection c : collections) {
        Element collection = doc.createElement("collection");

        this.addIdElement(collection, c.getId().toString(), doc);
        this.addNameElement(collection, c.getName(), doc);
        this.addDescription(collection, c.getDescription(), doc);
        this.addCopyright(collection, c.getCopyright(), doc);

        if (c.getPrimaryPicture() != null) {
            this.addPrimaryImage(collection, c.getPrimaryPicture().getFileInfo().getNameWithExtension(), doc);
            allPictures.add(c.getPrimaryPicture().getFileInfo());
        }
        Set<IrFile> pictures = c.getPictures();

        if (pictures.size() > 0) {
            Element pics = doc.createElement("pictures");
            for (IrFile irFile : pictures) {
                Element picture = doc.createElement("picture");
                this.addImage(picture, irFile.getFileInfo().getNameWithExtension(), doc);
                pics.appendChild(picture);
                allPictures.add(irFile.getFileInfo());
            }
            collection.appendChild(pics);
        }

        if (c.getLinks().size() > 0) {
            Element links = doc.createElement("links");
            for (InstitutionalCollectionLink l : c.getLinks()) {
                this.addLink(links, l, doc);
            }
            collection.appendChild(links);
        }

        if (includeChildren) {
            for (InstitutionalCollection child : c.getChildren()) {
                addChild(child, collection, doc, allPictures);
            }
        }
        root.appendChild(collection);
    }
    serializer.write(root, lsOut);

    try {
        fos.close();
        writer.close();
        outputStreamWriter.close();
    } catch (Exception e) {
        throw new IllegalStateException(e);
    }
    return allPictures;
}

From source file:MSUmpire.LCMSPeakStructure.LCMSPeakDIAMS2.java

private void PrepareMGF_MS1Cluster(LCMSPeakMS1 ms1lcms) throws IOException {

    ArrayList<PseudoMSMSProcessing> ScanList = new ArrayList<>();
    ExecutorService executorPool = Executors.newFixedThreadPool(NoCPUs);
    for (PeakCluster ms1cluster : ms1lcms.PeakClusters) {
        final ArrayList<PrecursorFragmentPairEdge> frags = FragmentsClu2Cur.get(ms1cluster.Index);
        if (frags != null && DIA_MZ_Range.getX() <= ms1cluster.GetMaxMz()
                && DIA_MZ_Range.getY() >= ms1cluster.TargetMz()) {
            //            if (DIA_MZ_Range.getX() <= ms1cluster.GetMaxMz() && DIA_MZ_Range.getY() >= ms1cluster.TargetMz() && FragmentsClu2Cur.containsKey(ms1cluster.Index)) {
            //                ArrayList<PrecursorFragmentPairEdge> frags = FragmentsClu2Cur.get(ms1cluster.Index);
            ms1cluster.GroupedFragmentPeaks.addAll(frags);
            if (Last_MZ_Range == null || Last_MZ_Range.getY() < ms1cluster.TargetMz()) {
                PseudoMSMSProcessing mSMSProcessing = new PseudoMSMSProcessing(ms1cluster, parameter);
                ScanList.add(mSMSProcessing);
            }/*ww  w.  j  a  v  a  2s. co m*/
        }
    }

    for (PseudoMSMSProcessing proc : ScanList) {
        executorPool.execute(proc);
    }
    executorPool.shutdown();

    try {
        executorPool.awaitTermination(Long.MAX_VALUE, TimeUnit.NANOSECONDS);
    } catch (InterruptedException e) {
        Logger.getRootLogger().info("interrupted..");
    }

    String mgffile = FilenameUtils.getFullPath(ParentmzXMLName) + GetQ1Name() + ".mgf.temp";
    String mgffile2 = FilenameUtils.getFullPath(ParentmzXMLName) + GetQ2Name() + ".mgf.temp";
    //        FileWriter mapwriter = new FileWriter(FilenameUtils.getFullPath(ParentmzXMLName) + FilenameUtils.getBaseName(ParentmzXMLName) + ".ScanClusterMapping_Q1", true);
    //        FileWriter mapwriter2 = new FileWriter(FilenameUtils.getFullPath(ParentmzXMLName) + FilenameUtils.getBaseName(ParentmzXMLName) + ".ScanClusterMapping_Q2", true);

    //        FileWriter mgfWriter = new FileWriter(mgffile, true);
    //        FileWriter mgfWriter2 = new FileWriter(mgffile2, true);
    final BufferedWriter mapwriter = DIAPack.get_file(DIAPack.OutputFile.ScanClusterMapping_Q1,
            FilenameUtils.getFullPath(ParentmzXMLName) + FilenameUtils.getBaseName(ParentmzXMLName)
                    + ".ScanClusterMapping_Q1"),
            mapwriter2 = DIAPack.get_file(DIAPack.OutputFile.ScanClusterMapping_Q2,
                    FilenameUtils.getFullPath(ParentmzXMLName) + FilenameUtils.getBaseName(ParentmzXMLName)
                            + ".ScanClusterMapping_Q2"),
            mgfWriter = DIAPack.get_file(DIAPack.OutputFile.Mgf_Q1, mgffile),
            mgfWriter2 = DIAPack.get_file(DIAPack.OutputFile.Mgf_Q2, mgffile2);

    for (PseudoMSMSProcessing mSMSProcessing : ScanList) {
        if (MatchedFragmentMap.size() > 0) {
            mSMSProcessing.RemoveMatchedFrag(MatchedFragmentMap);
        }

        XYPointCollection Scan = mSMSProcessing.GetScan();

        if (Scan != null && Scan.PointCount() > parameter.MinFrag) {
            //                StringBuilder mgfString = new StringBuilder();

            if (mSMSProcessing.Precursorcluster.IsotopeComplete(3)) {
                final BufferedWriter mgfString = mgfWriter;
                parentDIA.Q1Scan++;
                mgfString.append("BEGIN IONS\n");
                mgfString.append("PEPMASS=").append(String.valueOf(mSMSProcessing.Precursorcluster.TargetMz()))
                        .append("\n");
                mgfString.append("CHARGE=").append(String.valueOf(mSMSProcessing.Precursorcluster.Charge))
                        .append("+\n");
                mgfString.append("RTINSECONDS=")
                        .append(String.valueOf(mSMSProcessing.Precursorcluster.PeakHeightRT[0] * 60f))
                        .append("\n");
                mgfString.append("TITLE=").append(GetQ1Name()).append(".")
                        .append(String.valueOf(parentDIA.Q1Scan)).append(".")
                        .append(String.valueOf(parentDIA.Q1Scan)).append(".")
                        .append(String.valueOf(mSMSProcessing.Precursorcluster.Charge)).append("\n");
                for (int i = 0; i < Scan.PointCount(); i++) {
                    mgfString.append(String.valueOf(Scan.Data.get(i).getX())).append(" ")
                            .append(String.valueOf(Scan.Data.get(i).getY())).append("\n");
                }
                mgfString.append("END IONS\n\n");
                mapwriter.write(parentDIA.Q1Scan + "_" + mSMSProcessing.Precursorcluster.Index + "\n");
                //                    mgfWriter.write(mgfString.toString());
                //} else if (mSMSProcessing.Precursorcluster.IsotopeComplete(2)) {
            } else {
                final BufferedWriter mgfString = mgfWriter2;
                parentDIA.Q2Scan++;
                mgfString.append("BEGIN IONS\n");
                mgfString.append("PEPMASS=").append(String.valueOf(mSMSProcessing.Precursorcluster.TargetMz()))
                        .append("\n");
                mgfString.append("CHARGE=").append(String.valueOf(mSMSProcessing.Precursorcluster.Charge))
                        .append("+\n");
                mgfString.append("RTINSECONDS=")
                        .append(String.valueOf(mSMSProcessing.Precursorcluster.PeakHeightRT[0] * 60f))
                        .append("\n");
                mgfString.append("TITLE=").append(GetQ2Name()).append(".")
                        .append(String.valueOf(parentDIA.Q2Scan)).append(".")
                        .append(String.valueOf(parentDIA.Q2Scan)).append(".")
                        .append(String.valueOf(mSMSProcessing.Precursorcluster.Charge)).append("\n");
                for (int i = 0; i < Scan.PointCount(); i++) {
                    mgfString.append(String.valueOf(Scan.Data.get(i).getX())).append(" ")
                            .append(String.valueOf(Scan.Data.get(i).getY())).append("\n");
                }
                mgfString.append("END IONS\n\n");
                mapwriter2.write(parentDIA.Q2Scan + "_" + mSMSProcessing.Precursorcluster.Index + "\n");
                //                    mgfWriter2.write(mgfString.toString());
            }
        }
        mSMSProcessing.Precursorcluster.GroupedFragmentPeaks.clear();
    }
    //        mgfWriter2.close();
    //        mgfWriter.close();
    //        mapwriter.close();
    //        mapwriter2.close();
}

From source file:MSUmpire.DIA.DIAPack.java

public void ReadScanNoMapping() throws FileNotFoundException, IOException {
    ScanClusterMap_Q1 = new HashMap<>();
    ScanClusterMap_Q2 = new HashMap<>();
    ScanClusterMap_Q3 = new HashMap<>();
    BufferedReader reader = new BufferedReader(new FileReader(FilenameUtils.getFullPath(Filename)
            + FilenameUtils.getBaseName(Filename) + ".ScanClusterMapping_Q1"));
    BufferedReader reader2 = new BufferedReader(new FileReader(FilenameUtils.getFullPath(Filename)
            + FilenameUtils.getBaseName(Filename) + ".ScanClusterMapping_Q2"));
    BufferedReader reader4 = new BufferedReader(new FileReader(FilenameUtils.getFullPath(Filename)
            + FilenameUtils.getBaseName(Filename) + ".ScanClusterMapping_Q3"));

    String line = "";
    int StartNo = 0;
    if (new File(FilenameUtils.getFullPath(Filename) + GetQ1Name() + ".mzXML").exists()) {
        BufferedReader mzReader = new BufferedReader(
                new FileReader(FilenameUtils.getFullPath(Filename) + GetQ1Name() + ".mzXML"));
        while ((line = mzReader.readLine()) != null) {
            if (line.contains("<scan num=")) {
                String substr = line.substring(line.indexOf("<scan num=") + 11);
                StartNo = Integer.parseInt(substr.substring(0, substr.indexOf("\"")));
                break;
            }//  ww  w. j a v a 2 s  .  co m
        }
    }

    line = reader.readLine();
    int offset = StartNo - Integer.parseInt(line.split("_")[0]);
    Integer ScanNo = Integer.parseInt(line.split("_")[0]) + offset;
    Integer ClusterIndex = Integer.parseInt(line.split("_")[1]);
    ScanClusterMap_Q1.put(ScanNo, ClusterIndex);

    while ((line = reader.readLine()) != null) {
        ScanNo = Integer.parseInt(line.split("_")[0]) + offset;
        ClusterIndex = Integer.parseInt(line.split("_")[1]);
        ScanClusterMap_Q1.put(ScanNo, ClusterIndex);
    }

    line = "";
    StartNo = 0;
    if (new File(FilenameUtils.getFullPath(Filename) + GetQ2Name() + ".mzXML").exists()) {
        BufferedReader mzReader = new BufferedReader(
                new FileReader(FilenameUtils.getFullPath(Filename) + GetQ2Name() + ".mzXML"));
        while ((line = mzReader.readLine()) != null) {
            if (line.contains("<scan num=")) {
                String substr = line.substring(line.indexOf("<scan num=") + 11);
                StartNo = Integer.parseInt(substr.substring(0, substr.indexOf("\"")));
                break;
            }
        }
    }
    line = reader2.readLine();
    offset = StartNo - Integer.parseInt(line.split("_")[0]);
    ScanNo = Integer.parseInt(line.split("_")[0]) + offset;
    ClusterIndex = Integer.parseInt(line.split("_")[1]);
    ScanClusterMap_Q2.put(ScanNo, ClusterIndex);

    while ((line = reader2.readLine()) != null) {
        ScanNo = Integer.parseInt(line.split("_")[0]) + offset;
        ClusterIndex = Integer.parseInt(line.split("_")[1]);
        ScanClusterMap_Q2.put(ScanNo, ClusterIndex);
    }

    line = "";
    StartNo = 0;
    if (new File(FilenameUtils.getFullPath(Filename) + GetQ3Name() + ".mzXML").exists()) {
        BufferedReader mzReader = new BufferedReader(
                new FileReader(FilenameUtils.getFullPath(Filename) + GetQ3Name() + ".mzXML"));
        while ((line = mzReader.readLine()) != null) {
            if (line.contains("<scan num=")) {
                String substr = line.substring(line.indexOf("<scan num=") + 11);
                StartNo = Integer.parseInt(substr.substring(0, substr.indexOf("\"")));
                break;
            }
        }
    }
    line = reader4.readLine();
    if (line != null) {
        if (line.split(";").length == 3) {
            offset = StartNo - Integer.parseInt(line.split(";")[0]);
            ScanNo = Integer.parseInt(line.split(";")[0]) + offset;
            String WindowClusterIndex = line.split(";")[1] + ";" + line.split(";")[2];
            ScanClusterMap_Q3.put(ScanNo, WindowClusterIndex);
        } else {
            String ClusterIndexS = line.split("_")[1];
            ScanNo = Integer.parseInt(line.split("_")[0]);
            ScanClusterMap_Q3.put(ScanNo, ClusterIndexS);
        }
        while ((line = reader4.readLine()) != null) {
            if (line.split(";").length == 3) {
                ScanNo = Integer.parseInt(line.split(";")[0]) + offset;
                String WindowClusterIndex = line.split(";")[1] + ";" + line.split(";")[2];
                ScanClusterMap_Q3.put(ScanNo, WindowClusterIndex);
            } else {
                ScanNo = Integer.parseInt(line.split("_")[0]);
                String ClusterIndexS = line.split("_")[1];
                ScanClusterMap_Q3.put(ScanNo, ClusterIndexS);
            }
        }
    }
    reader.close();
    reader2.close();
    reader4.close();
}

From source file:net.mach6.listeners.DependencyReportingListener.java

private void generatePngFromDotFiles() {
    if (!Option.OUTPUT.isSet("png", "all")) {
        return;/*from  w w  w  . ja v a 2s . co  m*/
    }

    for (String dotFile : dotFiles) {
        dotFile = FilenameUtils.getFullPath(dotFile) + FilenameUtils.getName(dotFile);
        try {
            final String cmd = "/usr/local/bin/dot " + dotFile + " -Grankdir=LR -Tpng -o "
                    + StringUtils.removeEnd(dotFile, ".dot").concat(".png");

            Process p = Runtime.getRuntime().exec(cmd, null);
            p.waitFor();
        } catch (IOException | InterruptedException e) {
            LOGGER.severe("Error generating png file due to " + e.getMessage());
            throw new RuntimeException("Error generating png file", e);
        } finally {
            // Delete the dot file if it is not a requested output format
            if (!Option.OUTPUT.isSet("dot", "all")) {
                LOGGER.fine("deleting -> " + dotFile);
                FileUtils.deleteQuietly(new File(dotFile));
            }
        }
    }
}

From source file:MSUmpire.SearchResultParser.TPPResult.java

public void ReadSearchResult(LCMSID lcmsid, ArrayList<String> pepxmls, String protxml)
        throws ParserConfigurationException, IOException, SAXException, XmlPullParserException,
        ClassNotFoundException, InterruptedException {
    for (String pepxml : pepxmls) {
        LCMSID pepxmlid = new LCMSID(FilenameUtils.getFullPath(pepxml) + FilenameUtils.getBaseName(pepxml),
                DecoyTag, lcmsid.FastaPath);
        PepXMLParser pepxmlparser = new PepXMLParser(pepxmlid, pepxml, 0f);
        pepxmlid.FilterByPepDecoyFDR(DecoyTag, FDR);
        Logger.getRootLogger().info("peptide No.:" + pepxmlid.GetPepIonList().size()
                + "; Peptide level threshold: " + pepxmlid.PepProbThreshold);
        for (PepIonID pepID : pepxmlid.GetPepIonList().values()) {
            lcmsid.AddPeptideID(pepID);/*from   w  w  w .j a  va2  s  . c  o  m*/
        }
    }
    ProtXMLParser protxmlparser = new ProtXMLParser(lcmsid, protxml, 0f);
    lcmsid.RemoveLowLocalPWProtein(0.5f);
    //lcmsid.RemoveLowMaxIniProbProtein(0.9f);
    lcmsid.FilterByProteinDecoyFDRUsingMaxIniProb(DecoyTag, ProtFDR);
    lcmsid.LoadSequence();
    lcmsid.ReMapProPep();
    lcmsid.CreateInstanceForAllPepIon();
    Logger.getRootLogger()
            .info("Protein No.:" + lcmsid.ProteinList.size() + "; Assigned Peptide No.:"
                    + lcmsid.AssignedPepIonList.size() + "; All peptide No.:" + lcmsid.GetPepIonList().size()
                    + "; Spectrum level threshold: " + lcmsid.SpecProbThreshold + "; Peptide level threshold: "
                    + lcmsid.PepProbThreshold + "; Protein level threshold: " + lcmsid.ProteinProbThreshold);
}

From source file:de.unirostock.sems.cbarchive.web.dataholder.Archive.java

@JsonIgnore
public ArchiveEntry addArchiveEntry(String fileName, Path file, ReplaceStrategy strategy)
        throws CombineArchiveWebException, IOException {

    if (archive == null) {
        LOGGER.error("The archive was not opened");
        throw new CombineArchiveWebException("The archive was not opened");
    }/*w  w w.  j a va  2s  .  c  o  m*/

    // check for blacklisted filename
    if (Tools.isFilenameBlacklisted(fileName))
        throw new CombineArchiveWebException(
                MessageFormat.format("The filename is blacklisted. You may not add files called {0}!",
                        FilenameUtils.getName(fileName)));

    ArchiveEntry entry = null;

    if (strategy == ReplaceStrategy.RENAME || strategy == ReplaceStrategy.OVERRIDE) {

        // make sure file name is not taken yet
        if (archive.getEntry(fileName) != null && strategy == ReplaceStrategy.RENAME) {
            String altFileName = fileName;
            int i = 1;
            while (archive.getEntry(altFileName) != null) {
                i++;
                String extension = FilenameUtils.getExtension(fileName);
                String pureName = FilenameUtils.getBaseName(fileName);
                String fullPath = FilenameUtils.getFullPath(fileName);

                altFileName = fullPath + (pureName != null ? pureName : "") + "-" + String.valueOf(i)
                        + (extension != null && !extension.isEmpty() ? "." + extension : "");
            }
            fileName = altFileName;
        }

        entry = archive.addEntry(file.toFile(), fileName, Formatizer.guessFormat(file.toFile()));
        // adds the entry to the dataholder (warning: this is probably inconsistent)
        if (entry != null) {
            // entry information are gathered in the entry dataholder
            ArchiveEntryDataholder dataholder = new ArchiveEntryDataholder(entry);
            // put it into the map
            this.entries.put(dataholder.getFilePath(), dataholder);
        }
    } else if (archive.getEntry(fileName) != null && strategy == ReplaceStrategy.REPLACE) {

        ArchiveEntry oldEntry = archive.getEntry(fileName);
        entry = archive.replaceFile(file.toFile(), oldEntry);
        // adds the entry to the dataholder (warning: this is probably inconsistent)
        if (entry != null) {
            // entry information are gathered in the entry dataholder
            ArchiveEntryDataholder dataholder = new ArchiveEntryDataholder(entry);
            // put it into the map
            this.entries.put(dataholder.getFilePath(), dataholder);
        }
    }

    return entry;
}