Example usage for java.io FileNotFoundException FileNotFoundException

List of usage examples for java.io FileNotFoundException FileNotFoundException

Introduction

In this page you can find the example usage for java.io FileNotFoundException FileNotFoundException.

Prototype

public FileNotFoundException(String s) 

Source Link

Document

Constructs a FileNotFoundException with the specified detail message.

Usage

From source file:com.weibo.wesync.notify.xml.XMLProperties.java

/**
 * Creates a new XMLPropertiesTest object.
 *
 * @param file the file that properties should be read from and written to.
 * @throws IOException if an error occurs loading the properties.
 *///from   ww  w. j  ava  2s  .  c om
public XMLProperties(File file) throws IOException {
    this.file = file;
    if (!file.exists()) {
        // Attempt to recover from this error case by seeing if the
        // tmp file exists. It's possible that the rename of the
        // tmp file failed the last time Jive was running,
        // but that it exists now.
        File tempFile;
        tempFile = new File(file.getParentFile(), file.getName() + ".tmp");
        if (tempFile.exists()) {
            tempFile.renameTo(file);
        }
        // There isn't a possible way to recover from the file not
        // being there, so throw an error.
        else {
            throw new FileNotFoundException("XML properties file does not exist: " + file.getName());
        }
    }
    // Check read and write privs.
    if (!file.canRead()) {
        throw new IOException("XML properties file must be readable: " + file.getName());
    }
    if (!file.canWrite()) {
        throw new IOException("XML properties file must be writable: " + file.getName());
    }

    FileReader reader = new FileReader(file);
    buildDoc(reader);
}

From source file:com.pangdata.sdk.http.AbstractHttp.java

public AbstractHttp(boolean mustinvoke) {
    try {/*w  w  w  .j av  a 2 s  .  c om*/
        Properties props = SdkUtils.loadPangProperties();

        String username = (String) props.get("pang.username");
        if (username != null && username.trim().length() > 0) {
            this.username = username;
        } else {
            throw new PangException(new IllegalStateException("pang.username not found in pang.properties"));
        }
        String userkey = (String) props.get("pang.userkey");
        if (userkey != null && userkey.trim().length() > 0) {
            this.userkey = userkey;
        } else {
            throw new PangException(new IllegalStateException("pang.userkey not found in pang.properties"));
        }
        String url = (String) props.get("pang.url");
        if (url != null && url.trim().length() > 0) {
            this.url = url;
        }
    } catch (PangException e) {
        logger.error("Property error", e);
        throw e;
    } catch (IOException e) {
        logger.error("Could not find a pang.properties in classpath", e);
        throw new PangException(new FileNotFoundException("pang.properties"));
    }
}

From source file:eu.dime.ps.storage.datastore.impl.DataStoreImpl.java

@Override
public InputStream getBlob(final String id) throws FileNotFoundException {

    PersistentDimeObject object = getObject(id);
    if (object != null && object instanceof DimeBinary) {
        try {// w ww.  jav a 2s .  c o  m
            return new FileInputStream(((DimeBinary) object).getFile());
        } catch (IOException e) {
            logger.warn("Could not read File. " + id, e);
            throw new FileNotFoundException("Could not load blob for id: " + id + " catched IOException");
        }
    } else {
        throw new FileNotFoundException("Could not find blob for id: " + id);
    }
}

From source file:eu.stratosphere.nephele.fs.file.LocalFileSystem.java

/**
 * {@inheritDoc}/*from  ww w  .  j  ava  2s  .co m*/
 */
@Override
public FileStatus getFileStatus(Path f) throws IOException {

    final File path = pathToFile(f);
    if (path.exists()) {
        return new LocalFileStatus(pathToFile(f), this);
    } else {
        throw new FileNotFoundException("File " + f + " does not exist.");
    }

}

From source file:com.asakusafw.runtime.util.cache.HadoopFileCacheRepository.java

private Path doResolve(Path sourcePath) throws IOException, InterruptedException {
    assert sourcePath.isAbsolute();
    FileSystem fs = sourcePath.getFileSystem(configuration);
    if (fs.exists(sourcePath) == false) {
        throw new FileNotFoundException(sourcePath.toString());
    }/*  w  w w . j av  a2 s  . co  m*/
    long sourceChecksum = computeChecksum(fs, sourcePath);
    Path cachePath = computeCachePath(sourcePath);
    Path cacheChecksumPath = computeCacheChecksumPath(cachePath);

    IOException firstException = null;
    RetryObject retry = retryStrategy
            .newInstance(MessageFormat.format("preparing cache ({0} -> {1})", sourcePath, cachePath));
    do {
        try (LockObject<? super Path> lock = lockProvider.tryLock(cachePath)) {
            // TODO reduce lock scope?
            if (lock == null) {
                continue;
            }
            if (isCached(cachePath, cacheChecksumPath, sourceChecksum)) {
                if (LOG.isDebugEnabled()) {
                    LOG.debug(MessageFormat.format("cache hit: {0} -> {1}", //$NON-NLS-1$
                            sourcePath, cachePath));
                }
                // just returns cached file
            } else {
                if (LOG.isDebugEnabled()) {
                    LOG.debug(MessageFormat.format("cache miss: {0} -> {1}", //$NON-NLS-1$
                            sourcePath, cachePath));
                }
                updateCache(sourcePath, sourceChecksum, cachePath, cacheChecksumPath);
            }
            return cachePath;
        } catch (IOException e) {
            LOG.warn(MessageFormat.format("Failed to prepare cache: {0} -> {1}", sourcePath, cachePath), e);
            if (firstException == null) {
                firstException = e;
            }
        }
    } while (retry.waitForNextAttempt());
    if (firstException == null) {
        throw new IOException(MessageFormat.format("Failed to acquire a lock for remote cache file: {0} ({1})",
                sourcePath, cachePath));
    }
    throw firstException;
}

From source file:it.greenvulcano.configuration.BaseConfigurationManager.java

@Override
public byte[] export(String name) throws IOException, FileNotFoundException {

    Path configurationPath = getConfigurationPath(name);

    if (Files.exists(configurationPath) && !Files.isDirectory(configurationPath)) {
        return Files.readAllBytes(configurationPath);
    } else {//  w w w  .j  ava2s.co  m
        throw new FileNotFoundException(configurationPath.toString());
    }
}

From source file:com.emr.utilities.CSVLoader.java

/**
* Parse CSV file using OpenCSV library and load in 
* given database table. // w  w w . java 2s.com
* @param csvFile {@link String} Input CSV file
* @param tableName {@link String} Database table name to import data
* @param truncateBeforeLoad {@link boolean} Truncate the table before inserting 
*          new records.
 * @param destinationColumns {@link String[]} Array containing the destination columns
*/
public void loadCSV(String csvFile, String tableName, boolean truncateBeforeLoad, String[] destinationColumns,
        List columnsToBeMapped) throws Exception {
    CSVReader csvReader = null;
    if (null == this.connection) {
        throw new Exception("Not a valid connection.");
    }
    try {

        csvReader = new CSVReader(new FileReader(csvFile), this.seprator);

    } catch (Exception e) {
        String stacktrace = org.apache.commons.lang3.exception.ExceptionUtils.getStackTrace(e);
        JOptionPane.showMessageDialog(null, "Error occured while executing file. Error Details: " + stacktrace,
                "File Error", JOptionPane.ERROR_MESSAGE);
        throw new Exception("Error occured while executing file. " + stacktrace);
    }
    String[] headerRow = csvReader.readNext();

    if (null == headerRow) {
        throw new FileNotFoundException(
                "No columns defined in given CSV file." + "Please check the CSV file format.");
    }
    //Get indices of columns to be mapped
    List mapColumnsIndices = new ArrayList();
    for (Object o : columnsToBeMapped) {
        String column = (String) o;
        column = column.substring(column.lastIndexOf(".") + 1, column.length());
        int i;

        for (i = 0; i < headerRow.length; i++) {

            if (headerRow[i].equals(column)) {
                mapColumnsIndices.add(i);
            }
        }
    }

    String questionmarks = StringUtils.repeat("?,", headerRow.length);
    questionmarks = (String) questionmarks.subSequence(0, questionmarks.length() - 1);

    String query = SQL_INSERT.replaceFirst(TABLE_REGEX, tableName);
    query = query.replaceFirst(KEYS_REGEX, StringUtils.join(destinationColumns, ","));
    query = query.replaceFirst(VALUES_REGEX, questionmarks);

    String log_query = query.substring(0, query.indexOf("VALUES("));

    String[] nextLine;
    Connection con = null;
    PreparedStatement ps = null;
    PreparedStatement ps2 = null;
    PreparedStatement reader = null;
    ResultSet rs = null;
    try {
        con = this.connection;
        con.setAutoCommit(false);
        ps = con.prepareStatement(query);

        File file = new File("sqlite/db");
        if (!file.exists()) {
            file.createNewFile();
        }
        db = new SQLiteConnection(file);
        db.open(true);

        //if destination table==person, also add an entry in the table person_identifier
        //get column indices for the person_id and uuid columns
        int person_id_column_index = -1;
        int uuid_column_index = -1;
        int maxLength = 100;
        int firstname_index = -1;
        int middlename_index = -1;
        int lastname_index = -1;
        int clanname_index = -1;
        int othername_index = -1;
        if (tableName.equals("person")) {
            int i;
            ps2 = con.prepareStatement(
                    "insert ignore into person_identifier(person_id,identifier_type_id,identifier) values(?,?,?)");
            for (i = 0; i < headerRow.length; i++) {
                if (headerRow[i].equals("person_id")) {
                    person_id_column_index = i;
                }
                if (headerRow[i].equals("uuid")) {
                    uuid_column_index = i;
                }
                /*if(headerRow[i].equals("first_name")){
                    System.out.println("Found firstname index: " + i);
                    firstname_index=i;
                }
                if(headerRow[i].equals("middle_name")){
                    System.out.println("Found firstname index: " + i);
                    middlename_index=i;
                }
                if(headerRow[i].equals("last_name")){
                    System.out.println("Found firstname index: " + i);
                    lastname_index=i;
                }
                if(headerRow[i].equals("clan_name")){
                    System.out.println("Found firstname index: " + i);
                    clanname_index=i;
                }
                if(headerRow[i].equals("other_name")){
                    System.out.println("Found firstname index: " + i);
                    othername_index=i;
                }*/
            }
        }

        if (truncateBeforeLoad) {
            //delete data from table before loading csv
            try (Statement stmnt = con.createStatement()) {
                stmnt.execute("DELETE FROM " + tableName);
                stmnt.close();
            }
        }
        if (tableName.equals("person")) {
            try (Statement stmt2 = con.createStatement()) {
                stmt2.execute(
                        "ALTER TABLE person CHANGE COLUMN first_name first_name VARCHAR(50) NULL DEFAULT NULL AFTER person_guid,CHANGE COLUMN middle_name middle_name VARCHAR(50) NULL DEFAULT NULL AFTER first_name,CHANGE COLUMN last_name last_name VARCHAR(50) NULL DEFAULT NULL AFTER middle_name;");
                stmt2.close();
            }
        }
        final int batchSize = 1000;
        int count = 0;
        Date date = null;

        while ((nextLine = csvReader.readNext()) != null) {

            if (null != nextLine) {
                int index = 1;
                int person_id = -1;
                String uuid = "";
                int identifier_type_id = 3;
                if (tableName.equals("person")) {
                    reader = con.prepareStatement(
                            "select identifier_type_id from identifier_type where identifier_type_name='UUID'");
                    rs = reader.executeQuery();
                    if (!rs.isBeforeFirst()) {
                        //no uuid row
                        //insert it
                        Integer numero = 0;
                        Statement stmt = con.createStatement();
                        numero = stmt.executeUpdate(
                                "insert into identifier_type(identifier_type_id,identifier_type_name) values(50,'UUID')",
                                Statement.RETURN_GENERATED_KEYS);
                        ResultSet rs2 = stmt.getGeneratedKeys();
                        if (rs2.next()) {
                            identifier_type_id = rs2.getInt(1);
                        }
                        rs2.close();
                        stmt.close();
                    } else {
                        while (rs.next()) {
                            identifier_type_id = rs.getInt("identifier_type_id");
                        }
                    }

                }
                int counter = 1;
                String temp_log = log_query + "VALUES("; //string to be logged

                for (String string : nextLine) {
                    //if current index is in the list of columns to be mapped, we apply that mapping
                    for (Object o : mapColumnsIndices) {
                        int i = (int) o;
                        if (index == (i + 1)) {
                            //apply mapping to this column
                            string = applyDataMapping(string);
                        }
                    }
                    if (tableName.equals("person")) {
                        //get person_id and uuid

                        if (index == (person_id_column_index + 1)) {
                            person_id = Integer.parseInt(string);
                        }

                        if (index == (uuid_column_index + 1)) {
                            uuid = string;
                        }

                    }
                    //check if string is a date
                    if (string.matches("\\d{2}-[a-zA-Z]{3}-\\d{4} \\d{2}:\\d{2}:\\d{2}")
                            || string.matches("\\d{2}-[a-zA-Z]{3}-\\d{4}")) {
                        java.sql.Date dt = formatDate(string);
                        temp_log = temp_log + "'" + dt.toString() + "'";
                        ps.setDate(index++, dt);
                    } else {
                        if ("".equals(string)) {
                            temp_log = temp_log + "''";
                            ps.setNull(index++, Types.NULL);
                        } else {
                            temp_log = temp_log + "'" + string + "'";
                            ps.setString(index++, string);
                        }

                    }
                    if (counter < headerRow.length) {
                        temp_log = temp_log + ",";
                    } else {
                        temp_log = temp_log + ");";
                        System.out.println(temp_log);
                    }
                    counter++;
                }
                if (tableName.equals("person")) {
                    if (!"".equals(uuid) && person_id != -1) {
                        ps2.setInt(1, person_id);
                        ps2.setInt(2, identifier_type_id);
                        ps2.setString(3, uuid);

                        ps2.addBatch();
                    }
                }

                ps.addBatch();
            }
            if (++count % batchSize == 0) {
                ps.executeBatch();
                if (tableName.equals("person")) {
                    ps2.executeBatch();
                }
            }
        }
        ps.executeBatch(); // insert remaining records
        if (tableName.equals("person")) {
            ps2.executeBatch();
        }

        con.commit();
    } catch (Exception e) {
        if (con != null)
            con.rollback();
        if (db != null)
            db.dispose();
        String stacktrace = org.apache.commons.lang3.exception.ExceptionUtils.getStackTrace(e);
        JOptionPane.showMessageDialog(null, "Error occured while executing file. Error Details: " + stacktrace,
                "File Error", JOptionPane.ERROR_MESSAGE);
        throw new Exception("Error occured while executing file. " + stacktrace);
    } finally {
        if (null != reader)
            reader.close();
        if (null != ps)
            ps.close();
        if (null != ps2)
            ps2.close();
        if (null != con)
            con.close();

        csvReader.close();
    }
}

From source file:com.threerings.media.tile.tools.xml.XMLTileSetParser.java

/**
 * Loads all of the tilesets specified in the supplied XML tileset
 * description file and places them into the supplied map indexed
 * by tileset name. This method is not reentrant, so don't go calling
 * it from multiple threads.//from  w  w w .ja  va2 s  .  c o  m
 *
 * @param path a path, relative to the classpath, at which the tileset
 * definition file can be found.
 * @param tilesets the map into which the tilesets will be placed,
 * indexed by tileset name.
 */
public void loadTileSets(String path, Map<String, TileSet> tilesets) throws IOException {
    // get an input stream for this XML file
    InputStream is = ConfigUtil.getStream(path);
    if (is == null) {
        String errmsg = "Can't load tileset description file from " + "classpath [path=" + path + "].";
        throw new FileNotFoundException(errmsg);
    }

    // load up the tilesets
    loadTileSets(is, tilesets);
}

From source file:edu.isi.misd.scanner.network.modules.worker.processors.ptr.PrepToResearchProcessor.java

/**
 * Performs the Prep to Research analysis.
 * //from www .j a  va 2 s  .co m
 * @param exchange The current exchange.
 * @return The formatted response.
 * @throws Exception 
 */
private ServiceResponse executeAnalysis(Exchange exchange) throws Exception {
    // Create the service response data object
    ServiceResponseData responseData = new ServiceResponseData();

    // Create the service response object
    ServiceResponse response = new ServiceResponse();

    try {
        PrepToResearchRequest request = (PrepToResearchRequest) exchange.getIn()
                .getBody(PrepToResearchRequest.class);

        // locate the specified input file
        String fileName = (String) exchange.getIn().getHeader(BaseConstants.DATASOURCE);
        if (fileName == null) {
            FileNotFoundException fnf = new FileNotFoundException("A null file name was specified");
            throw fnf;
        }
        String baseInputDir = ConfigUtils.getBaseInputDir(exchange, BaseConstants.WORKER_INPUT_DIR_PROPERTY);
        File file = new File(baseInputDir, fileName);

        // perform the analysis 
        responseData.setAny(analyzeFile(request, file));
        response.setServiceResponseData(responseData);

        // write the response metadata
        response.setServiceResponseMetadata(MessageUtils.createServiceResponseMetadata(exchange,
                ServiceRequestStateType.COMPLETE, BaseConstants.STATUS_COMPLETE));

    } catch (Exception e) {
        response.setServiceResponseMetadata(MessageUtils.createServiceResponseMetadata(exchange,
                ServiceRequestStateType.ERROR,
                "Unhandled exception during Prep to Research processing. Caused by [" + e.toString() + "]"));
    }
    return response;
}

From source file:com.dianping.resource.io.util.Log4jConfigurer.java

/**
 * Initialize log4j from the given location, with the given refresh interval
 * for the config file. Assumes an XML file in case of a ".xml" file extension,
 * and a properties file otherwise.//  w w  w .ja va  2 s. co m
 * <p>Log4j's watchdog thread will asynchronously check whether the timestamp
 * of the config file has changed, using the given interval between checks.
 * A refresh interval of 1000 milliseconds (one second), which allows to
 * do on-demand log level changes with immediate effect, is not unfeasible.
 * <p><b>WARNING:</b> Log4j's watchdog thread does not terminate until VM shutdown;
 * in particular, it does not terminate on LogManager shutdown. Therefore, it is
 * recommended to <i>not</i> use config file refreshing in a production J2EE
 * environment; the watchdog thread would not stop on application shutdown there.
 * @param location the location of the config file: either a "classpath:" location
 * (e.g. "classpath:myLog4j.properties"), an absolute file URL
 * (e.g. "file:C:/log4j.properties), or a plain absolute path in the file system
 * (e.g. "C:/log4j.properties")
 * @param refreshInterval interval between config file refresh checks, in milliseconds
 * @throws java.io.FileNotFoundException if the location specifies an invalid file path
 */
public static void initLogging(String location, long refreshInterval) throws FileNotFoundException {
    String resolvedLocation = SystemPropertyUtils.resolvePlaceholders(location);
    File file = ResourceUtils.getFile(resolvedLocation);
    if (!file.exists()) {
        throw new FileNotFoundException("Log4j config file [" + resolvedLocation + "] not found");
    }
    if (resolvedLocation.toLowerCase().endsWith(XML_FILE_EXTENSION)) {
        DOMConfigurator.configureAndWatch(file.getAbsolutePath(), refreshInterval);
    } else {
        PropertyConfigurator.configureAndWatch(file.getAbsolutePath(), refreshInterval);
    }
}