Example usage for org.springframework.dao DataAccessResourceFailureException DataAccessResourceFailureException

List of usage examples for org.springframework.dao DataAccessResourceFailureException DataAccessResourceFailureException

Introduction

In this page you can find the example usage for org.springframework.dao DataAccessResourceFailureException DataAccessResourceFailureException.

Prototype

public DataAccessResourceFailureException(String msg, @Nullable Throwable cause) 

Source Link

Document

Constructor for DataAccessResourceFailureException.

Usage

From source file:com.ethlo.geodata.importer.GeonamesImporter.java

@Override
public CloseableIterator<Map<String, String>> iterator() throws IOException {
    final Map<Long, Long> childToParentMap = new HashMap<>();
    final Set<Long> inHierarchy = new TreeSet<>();
    new HierarchyImporter(hierarchyFile).processFile(h -> {
        childToParentMap.put(Long.parseLong(h.get("child_id")), Long.parseLong(h.get("parent_id")));
        inHierarchy.add(Long.parseLong(h.get("child_id")));
        inHierarchy.add(Long.parseLong(h.get("parent_id")));
    });/*  w w w . j a  va 2s  . co  m*/

    // Load alternate names
    final Map<Long, String> preferredNames = loadPreferredNames("EN");

    final BufferedReader reader = IoUtils.getBufferedReader(allCountriesFile);

    final AbstractIterator<Map<String, String>> actual = new AbstractIterator<Map<String, String>>() {
        @Override
        protected Map<String, String> computeNext() {
            try {
                String line;
                while ((line = reader.readLine()) != null) {
                    final Map<String, String> map = lineToMap(line, preferredNames, childToParentMap,
                            inHierarchy);
                    if (map != null) {
                        return map;
                    }
                }
                return endOfData();
            } catch (IOException exc) {
                throw new DataAccessResourceFailureException("Cannot read line from file", exc);
            }
        }
    };

    return new CloseableIterator<Map<String, String>>() {
        @Override
        public boolean hasNext() {
            return actual.hasNext();
        }

        @Override
        public Map<String, String> next() {
            return actual.next();
        }

        @Override
        public void close() {
            IOUtils.closeQuietly(reader);
        }
    };
}

From source file:org.dkpro.lab.storage.filesystem.FileSystemStorageService.java

@Override
public <T extends StreamReader> T retrieveBinary(String aContextId, String aKey, T aConsumer) {
    InputStream is = null;//w  w w.  j a  v a2s .c om
    int currentTry = 1;
    IOException lastException = null;

    while (currentTry <= MAX_RETRIES) {
        try {
            is = new FileInputStream(new File(getContextFolder(aContextId, true), aKey));
            if (aKey.endsWith(".gz")) {
                is = new GZIPInputStream(is);
            }
            aConsumer.read(is);
            return aConsumer;
        } catch (IOException e) {
            // https://code.google.com/p/dkpro-lab/issues/detail?id=64
            // may be related to a concurrent access so try again after some time
            lastException = e;

            currentTry++;
            log.debug(currentTry + ". try accessing " + aKey + " in context " + aContextId);

            try {
                Thread.sleep(SLEEP_TIME);
            } catch (InterruptedException e1) {
                // we should probably abort the whole thing
                currentTry = MAX_RETRIES;
            }
        } catch (Throwable e) {
            throw new DataAccessResourceFailureException(
                    "Unable to load [" + aKey + "] from context [" + aContextId + "]", e);
        } finally {
            Util.close(is);
        }
    }

    throw new DataAccessResourceFailureException(
            "Unable to access [" + aKey + "] in context [" + aContextId + "]", lastException);
}

From source file:org.grails.datastore.mapping.cassandra.engine.CassandraEntityPersister.java

private void performInsertion(Keyspace keyspace, String key, Map<String, List<SuperColumn>> insertMap,
        @SuppressWarnings("unused") KeyValueEntry nativeEntry) {
    try {/*from   w  w  w  .  j ava 2  s  .  co m*/
        keyspace.batchInsert(key, null, insertMap);
    } catch (HectorException e) {
        throw new DataAccessResourceFailureException("Exception occurred invoking Cassandra: " + e.getMessage(),
                e);
    }
}

From source file:org.grails.datastore.mapping.redis.util.JedisTemplate.java

public Object execute(RedisCallback<Jedis> jedisRedisCallback) {
    try {//from  w  w  w  .jav a  2 s. c  o m
        if (redis == null) {
            redis = getNewConnection();
            doConnect();
        } else {
            if (!redis.isConnected()) {
                try {
                    doConnect();
                } catch (JedisConnectionException e) {
                    throw new DataAccessResourceFailureException(
                            "Connection failure connecting to Redis: " + e.getMessage(), e);
                }
            }
        }

        return jedisRedisCallback.doInRedis(redis);
    } catch (IOException e) {
        throw new DataAccessResourceFailureException(
                "I/O exception thrown connecting to Redis: " + e.getMessage(), e);
    }
}

From source file:org.dkpro.lab.engine.impl.DefaultTaskContext.java

@Override
public File getFile(String aKey, AccessMode aMode) {
    StorageKey key;//  w w w.ja v a  2s  . co m

    StorageService storage = getStorageService();
    Map<String, String> imports = getMetadata().getImports();

    if (storage.containsKey(getId(), aKey)) {
        // If the context contains the key, we do nothing. Locally available data always
        // supersedes imported data.
        key = new StorageKey(getId(), aKey);
    } else if (imports.containsKey(aKey)) {
        URI uri;
        try {
            uri = new URI(imports.get(aKey));
        } catch (URISyntaxException e) {
            throw new DataAccessResourceFailureException(
                    "Imported key [" + aKey + "] resolves to illegal URL [" + imports.get(aKey) + "]", e);
        }

        if ("file".equals(uri.getScheme()) && new File(uri).isFile()) {
            if (aMode == AccessMode.READONLY) {
                return new File(uri);
            } else {
                // Here we should probably just copy the imported file into the context
                throw new DataAccessResourceFailureException(
                        "READWRITE access of imported " + "files is not implemented yet.");
            }
        } else {
            key = resolve(aKey, aMode, true);
        }
    } else {
        key = resolve(aKey, aMode, true);
    }

    File file = getStorageService().locateKey(key.contextId, key.key);

    if (file.exists() && !file.isFile()) {
        throw new DataAccessResourceFailureException(
                "Key [" + aKey + "] resolves to [" + file + "] which is not a file.");
    }

    return file;
}

From source file:org.dkpro.lab.Util.java

public static <T extends StreamReader> T retrieveBinary(final File aFile, final T aConsumer) {
    InputStream is = null;/*from   ww  w  .j  a  va  2  s.c  om*/
    try {
        is = new FileInputStream(aFile);
        if (aFile.getName().toLowerCase().endsWith(".gz")) {
            is = new GZIPInputStream(is);
        }
        aConsumer.read(is);
        return aConsumer;
    } catch (IOException e) {
        throw new DataAccessResourceFailureException(e.getMessage(), e);
    } finally {
        Util.close(is);
    }
}

From source file:com.jpeterson.littles3.dao.je.JeS3ObjectDao.java

public void removeS3Object(S3Object s3Object) throws DataAccessException {
    DatabaseEntry theKey;// ww w.  j  av  a2  s .  co m

    // Environment myDbEnvironment = null;
    Database database = null;

    try {
        theKey = new DatabaseEntry();
        s3ObjectBucketKeyBinding.objectToEntry(s3Object, theKey);

        // TODO: generalize this
        /*
         * EnvironmentConfig envConfig = new EnvironmentConfig();
         * envConfig.setAllowCreate(true); myDbEnvironment = new
         * Environment(new File( "C:/temp/StorageEngine/db"), envConfig);
         * 
         * DatabaseConfig dbConfig = new DatabaseConfig();
         * dbConfig.setAllowCreate(true); database =
         * myDbEnvironment.openDatabase(null, "sampleDatabase", dbConfig);
         */

        database = jeCentral.getDatabase(JeCentral.OBJECT_DB_NAME);

        database.delete(null, theKey);
    } catch (DatabaseException e) {
        throw new DataAccessResourceFailureException("Unable to store a database record", e);
    } finally {
        /*
         * if (database != null) { try { database.close(); } catch
         * (DatabaseException e) { // do nothing } } database = null;
         * 
         * if (myDbEnvironment != null) { try { myDbEnvironment.close(); }
         * catch (DatabaseException e) { // do nothing } } myDbEnvironment =
         * null;
         */
    }
}

From source file:org.grails.datastore.mapping.redis.util.JedisTemplate.java

private void doAuthentication() {
    if (password != null) {
        try {/* w w  w  .  j av a  2 s .  c o  m*/
            redis.auth(password);
        } catch (Exception e) {
            throw new DataAccessResourceFailureException(
                    "I/O exception authenticating with Redis: " + e.getMessage(), e);
        }
    }
}

From source file:org.dkpro.lab.storage.filesystem.FileSystemStorageService.java

@Override
public void storeBinary(String aContextId, String aKey, StreamWriter aProducer) {
    File context = getContextFolder(aContextId, false);
    File tmpFile = new File(context, aKey + ".tmp");
    File finalFile = new File(context, aKey);

    OutputStream os = null;//from w  w  w.  j  a  v  a 2  s . c  om
    try {
        tmpFile.getParentFile().mkdirs(); // Necessary if the key addresses a sub-directory
        log.debug("Storing to: " + finalFile);
        os = new FileOutputStream(tmpFile);
        if (aKey.endsWith(".gz")) {
            os = new GZIPOutputStream(os);
        }
        aProducer.write(os);
    } catch (Exception e) {
        tmpFile.delete();
        throw new DataAccessResourceFailureException(e.getMessage(), e);
    } finally {
        Util.close(os);
    }

    // On some platforms, it is not possible to rename a file to another one which already
    // exists. So try to delete the target file before renaming.
    if (finalFile.exists()) {
        boolean deleteSuccess = finalFile.delete();
        if (!deleteSuccess) {
            throw new DataAccessResourceFailureException(
                    "Unable to delete [" + finalFile + "] in order to replace it with an updated version.");
        }
    }

    // Make sure the file is only visible under the final name after all data has been
    // written into it.
    boolean renameSuccess = tmpFile.renameTo(finalFile);
    if (!renameSuccess) {
        throw new DataAccessResourceFailureException(
                "Unable to rename [" + tmpFile + "] to [" + finalFile + "]");
    }
}

From source file:org.grails.datastore.mapping.cassandra.engine.CassandraEntityPersister.java

private Keyspace getKeyspace() {
    final String keyspaceName = getKeyspaceName();
    try {/*from  ww w.ja  va2s  . c o  m*/
        return cassandraClient.getKeyspace(keyspaceName);
    } catch (HectorException e) {
        throw new DataAccessResourceFailureException("Exception occurred invoking Cassandra: " + e.getMessage(),
                e);
    }
}