Example usage for org.springframework.dao DataAccessResourceFailureException DataAccessResourceFailureException

List of usage examples for org.springframework.dao DataAccessResourceFailureException DataAccessResourceFailureException

Introduction

In this page you can find the example usage for org.springframework.dao DataAccessResourceFailureException DataAccessResourceFailureException.

Prototype

public DataAccessResourceFailureException(String msg, @Nullable Throwable cause) 

Source Link

Document

Constructor for DataAccessResourceFailureException.

Usage

From source file:org.dkpro.lab.engine.impl.DefaultTaskContext.java

public StorageKey resolve(String aKey, AccessMode aMode, boolean aAllowMissing) {
    StorageService storage = getStorageService();
    Map<String, String> imports = getMetadata().getImports();

    if (storage.containsKey(getId(), aKey)) {
        // If the context contains the key, we do nothing. Locally available data always
        // supersedes imported data.
        return new StorageKey(getId(), aKey);
    } else if (imports.containsKey(aKey)) {
        URI uri;/*www. ja va 2 s.  co m*/
        try {
            uri = new URI(imports.get(aKey));
        } catch (URISyntaxException e) {
            throw new DataAccessResourceFailureException(
                    "Imported key [" + aKey + "] resolves to illegal URL [" + imports.get(aKey) + "]", e);
        }

        // Try resolving by ID or by type/constraints
        StorageKey key = null;
        if (CONTEXT_ID_SCHEME.equals(uri.getScheme()) || LATEST_CONTEXT_SCHEME.equals(uri.getScheme())) {
            TaskContextMetadata meta = resolve(uri);
            key = new StorageKey(meta.getId(), uri.getPath());
        }

        // If the resource is imported from another context and will be modified it has to
        // be copied into the current context. The storage may decide though not to copy
        // data at this point if it can assure a copy-on-write behavior. E.g. it may copy
        // imported storage folders now but imported stream-access (files) keys later.
        if (key != null) {
            switch (aMode) {
            case ADD_ONLY:
            case READWRITE:
                storage.copy(getId(), aKey, key, aMode);
                return new StorageKey(getId(), aKey);
            case READONLY:
                return key;
            }
        }

        // If this is an external URL, copy it to the current context and then return a location
        // in the current context.
        InputStream is = null;
        try {
            is = uri.toURL().openStream();
            storage.storeBinary(getId(), aKey, is);
            return new StorageKey(getId(), aKey);
        } catch (MalformedURLException e) {
            throw new DataAccessResourceFailureException(
                    "Imported external key [" + aKey + "] resolves to illegal URL [" + uri + "]", e);
        } catch (IOException e) {
            throw new DataAccessResourceFailureException(
                    "Unable to read data for external key [" + aKey + "] from [" + uri + "]", e);
        } finally {
            close(is);
        }
    } else if (aAllowMissing) {
        return new StorageKey(getId(), aKey);
    }

    throw new DataAccessResourceFailureException("No resource bound to key [" + aKey + "]");
}

From source file:org.guzz.web.context.spring.TransactionManagerUtils.java

/**
 * Convert the given GuzzException to an appropriate exception
 * from the <code>org.springframework.dao</code> hierarchy.
 * @param ex GuzzException that occured//  w w  w.  j a  v  a  2 s. c  o m
 * @return the corresponding DataAccessException instance
 * @see GuzzAccessor#convertGuzzAccessException
 * @see GuzzTransactionManager#convertGuzzAccessException
 */
public static DataAccessException convertGuzzAccessException(GuzzException ex) {
    if (ex instanceof JDBCException) {
        JDBCException e = (JDBCException) ex;

        return new UncategorizedSQLException(e.getMessage(), e.getSQL(), e.getSQLException());
    }

    return new DataAccessResourceFailureException(ex.getMessage(), ex);
}

From source file:de.tudarmstadt.ukp.csniper.webapp.search.cqp.CqpQuery.java

private Process getCQPProcess() throws DataAccessResourceFailureException {
    try {//www.  j  a  v  a 2  s.c om
        List<String> cmd = new ArrayList<String>();

        cmd.add(engine.getCqpExecutable().getAbsolutePath());
        cmd.add("-r");
        cmd.add(engine.getRegistryPath().getAbsolutePath());
        // run cqp as child process (-c)
        cmd.add("-c");

        if (log.isTraceEnabled()) {
            log.trace("Invoking [" + StringUtils.join(cmd, " ") + "]");
        }

        final ProcessBuilder pb = new ProcessBuilder(cmd);
        return pb.start();
    } catch (IOException e1) {
        throw new DataAccessResourceFailureException("Unable to start CQP process", e1);
    }
}

From source file:com.jpeterson.littles3.dao.je.JeS3ObjectDao.java

public String listKeys(String bucket, String prefix, String marker, String delimiter, int maxKeys)
        throws DataAccessException {
    DatabaseEntry theKey;/*  w w  w. j a v  a 2s . c  o m*/
    DatabaseEntry theData;
    boolean truncated = false;
    List<S3Object> contents = new ArrayList<S3Object>();
    List<String> commonPrefixes = new ArrayList<String>();
    int results = 0;
    String commonPrefix = null;
    String currentPrefix;
    S3Object o = null;
    String key;
    OperationStatus operationStatus;
    int prefixLength;
    int delimiterIndex;
    boolean processed;

    if (prefix == null) {
        prefix = "";
    }
    prefixLength = prefix.length();

    if ((delimiter != null) && (delimiter.equals(""))) {
        // make delimiter null for performance reasons
        delimiter = null;
    }

    if (maxKeys > MAXIMUM_MAX_KEYS) {
        maxKeys = MAXIMUM_MAX_KEYS;
    }

    // perform query, saving results

    if (maxKeys > 0) {
        Database database = null;
        Cursor cursor = null;
        try {
            S3Object s3ObjectBucketKey;

            database = jeCentral.getDatabase(JeCentral.OBJECT_DB_NAME);
            cursor = database.openCursor(null, null);

            s3ObjectBucketKey = new S3ObjectBucketKey();

            // validate the marker, should start with prefix
            if (marker != null) {
                if (!marker.startsWith(prefix)) {
                    logger.info("marker[" + marker + "] doesn't start with prefix[" + prefix
                            + "], ignoring marker");
                    marker = null;
                }
            }

            s3ObjectBucketKey.setBucket(bucket);
            if (marker != null) {
                // try to find cursor starting with marker
                s3ObjectBucketKey.setKey(marker);
            } else {
                // try to find cursor starting with prefix
                s3ObjectBucketKey.setKey(prefix);
            }

            theKey = new DatabaseEntry();
            s3ObjectBucketKeyBinding.objectToEntry(s3ObjectBucketKey, theKey);
            theData = new DatabaseEntry();

            // get first record "closest" to the key
            if ((operationStatus = cursor.getSearchKeyRange(theKey, theData,
                    LockMode.DEFAULT)) == OperationStatus.SUCCESS) {
                s3ObjectBucketKey = (S3Object) s3ObjectBucketKeyBinding.entryToObject(theKey);

                if (bucket.equals(s3ObjectBucketKey.getBucket())) {
                    key = s3ObjectBucketKey.getKey();
                    if (logger.isTraceEnabled()) {
                        logger.trace("bucket: " + s3ObjectBucketKey.getBucket() + " key: " + key);
                    }
                    if (key.equals(marker)) {
                        // skip to next result after the marker
                        if ((operationStatus = cursor.getNext(theKey, theData,
                                LockMode.DEFAULT)) != OperationStatus.SUCCESS) {
                            key = s3ObjectBucketKey.getKey();
                        }
                    }

                    // restrict to results that begin with the prefix
                    while ((operationStatus == OperationStatus.SUCCESS) && key.startsWith(prefix)) {
                        if (!bucket.equals(s3ObjectBucketKey.getBucket())) {
                            break;
                        }

                        if (results >= maxKeys) {
                            truncated = true;
                            break;
                        }

                        // valid result
                        o = (S3Object) fileS3ObjectBinding.entryToObject(theData);

                        // is it a content or a common prefix?
                        processed = false;
                        if (delimiter != null) {
                            key = o.getKey();
                            if ((delimiterIndex = key.indexOf(delimiter, prefixLength)) != -1) {
                                // include the delimiter in the common
                                // prefix
                                currentPrefix = key.substring(0, delimiterIndex + delimiter.length());
                                if (currentPrefix.equals(commonPrefix)) {
                                    // skip common prefix
                                    processed = true;
                                } else {
                                    // new common prefix
                                    commonPrefix = currentPrefix;
                                    commonPrefixes.add(commonPrefix);
                                    ++results;
                                    processed = true;
                                }
                            }
                        }
                        if (!processed) {
                            contents.add(o);
                            ++results;
                            processed = true;
                        }

                        if ((operationStatus = cursor.getNext(theKey, theData,
                                LockMode.DEFAULT)) != OperationStatus.SUCCESS) {
                            break;
                        }

                        s3ObjectBucketKey = (S3Object) s3ObjectBucketKeyBinding.entryToObject(theKey);
                        key = s3ObjectBucketKey.getKey();
                    }
                }
            }
        } catch (DatabaseException e) {
            throw new DataAccessResourceFailureException("Unable to read database record", e);
        } finally {
            if (cursor != null) {
                try {
                    cursor.close();
                } catch (DatabaseException e) {
                    // do nothing
                }
                cursor = null;
            }
        }
    }

    // generate XML from results

    StringBuffer buffer = new StringBuffer();

    buffer.append("<?xml version=\"1.0\" encoding=\"UTF-8\"?>");
    buffer.append("<ListBucketResult xmlns=\"http://s3.amazonaws.com/doc/2006-03-01/\">");
    buffer.append("<Name>").append(bucket).append("</Name>");
    if (prefix.equals("")) {
        buffer.append("<Prefix/>");
    } else {
        buffer.append("<Prefix>").append(prefix).append("</Prefix>");
    }
    if ((marker == null) || (marker.equals(""))) {
        buffer.append("<Marker/>");
    } else {
        buffer.append("<Marker>").append(marker).append("</Marker>");
    }
    buffer.append("<MaxKeys>").append(maxKeys).append("</MaxKeys>");
    if (delimiter == null) {
        buffer.append("<Delimiter/>");
    } else {
        buffer.append("<Delimiter>").append(delimiter).append("</Delimiter>");
        // next marker is last key
        if (results > 0) {
            buffer.append("<NextMarker>").append(o.getKey()).append("</NextMarker>");
        }
    }
    buffer.append("<IsTruncated>").append(truncated).append("</IsTruncated>");

    // print out contents
    for (Iterator iter = contents.iterator(); iter.hasNext();) {
        o = (S3Object) iter.next();
        buffer.append("<Contents>");
        buffer.append("<Key>").append(o.getKey()).append("</Key>");
        buffer.append("<LastModified>").append(iso8601.format(o.getLastModified())).append("</LastModified>");
        buffer.append("<ETag>\"").append(o.getETag()).append("\"</ETag>");
        buffer.append("<Size>").append(o.getContentLength()).append("</Size>");
        buffer.append("<Owner>");
        Acp acp = o.getAcp();
        CanonicalUser owner = acp.getOwner();
        buffer.append("<ID>").append(owner.getId()).append("</ID>");
        buffer.append("<DisplayName>").append(owner.getDisplayName()).append("</DisplayName>");
        buffer.append("</Owner>");
        buffer.append("<StorageClass>STANDARD</StorageClass>");
        buffer.append("</Contents>");
    }

    // print common prefixes
    for (Iterator iter = commonPrefixes.iterator(); iter.hasNext();) {
        buffer.append("<CommonPrefixes>");
        buffer.append("<Prefix>").append((String) iter.next()).append("</Prefix>");
        buffer.append("</CommonPrefixes>");
    }

    buffer.append("</ListBucketResult>");

    return buffer.toString();
}

From source file:org.grails.orm.hibernate.GrailsHibernateTemplate.java

protected Session getSession() {
    try {/*from www.j a  v a2 s  .  c om*/
        return sessionFactory.getCurrentSession();
    } catch (HibernateException ex) {
        throw new DataAccessResourceFailureException("Could not obtain current Hibernate Session", ex);
    }
}

From source file:com.ethlo.geodata.GeodataServiceImpl.java

public int loadHierarchy() {
    nodes = new HashMap<>();

    final File hierarchyFile = new File(baseDirectory, "hierarchy");
    if (!hierarchyFile.exists()) {
        return 0;
    }//from w  ww. j av  a2 s .  c o m

    final Map<Long, Long> childToParent = new HashMap<>();
    try {
        new HierarchyImporter(hierarchyFile).processFile(r -> {
            final String featureCode = r.get("feature_code");
            if (featureCode == null || "adm".equalsIgnoreCase(featureCode)) {
                final long parentId = Long.parseLong(r.get("parent_id"));
                final long childId = Long.parseLong(r.get("child_id"));
                final Node parent = nodes.getOrDefault(parentId, new Node(parentId));
                final Node child = nodes.getOrDefault(childId, new Node(childId));
                nodes.put(parent.getId(), parent);
                nodes.put(child.getId(), child);
                childToParent.put(childId, parentId);
            }
        });
    } catch (IOException exc) {
        throw new DataAccessResourceFailureException(exc.getMessage(), exc);
    }

    // Process hierarchy after, as we do not know the order of the pairs
    childToParent.entrySet().forEach(e -> {
        final long child = e.getKey();
        final long parent = e.getValue();
        final Node childNode = nodes.get(child);
        final Node parentNode = nodes.get(parent);
        parentNode.addChild(childNode);
        childNode.setParent(parentNode);
    });

    final Set<Node> roots = new HashSet<>();
    for (Entry<Long, Node> e : nodes.entrySet()) {
        if (e.getValue().getParent() == null) {
            roots.add(e.getValue());
        }
    }

    publisher.publishEvent(new DataLoadedEvent(this, DataType.HIERARCHY, Operation.LOAD, childToParent.size(),
            childToParent.size()));

    return childToParent.size();
}

From source file:com.ethlo.geodata.GeodataServiceImpl.java

@Override
public byte[] findBoundaries(long id, @Valid View view) {
    final byte[] fullWkb = this.findBoundaries(id);

    if (fullWkb == null) {
        return null;
    }// w w w. jav  a  2  s . co  m

    final WKBReader reader = new WKBReader();
    try {
        final Stopwatch stopwatch = Stopwatch.createStarted();
        final Geometry full = reader.read(fullWkb);
        Geometry simplified = GeometryUtil.simplify(full, view, qualityConstant);
        if (simplified == null) {
            return createEmptyGeometry();
        }

        final Geometry clipped = GeometryUtil.clip(
                new Envelope(view.getMinLng(), view.getMaxLng(), view.getMinLat(), view.getMaxLat()),
                simplified);
        if (clipped != null) {
            simplified = clipped;
        }

        logger.debug("locationId: {}, original points: {}, remaining points: {}, ratio: {}, elapsed: {}", id,
                full.getNumPoints(), simplified.getNumPoints(),
                full.getNumPoints() / (double) simplified.getNumPoints(), stopwatch);
        return new WKBWriter().write(simplified);
    } catch (ParseException exc) {
        throw new DataAccessResourceFailureException(exc.getMessage(), exc);
    }
}

From source file:com.ethlo.geodata.GeodataServiceImpl.java

@Override
public byte[] findBoundaries(long id, double maxTolerance) {
    final byte[] fullWkb = this.findBoundaries(id);

    if (fullWkb == null) {
        return null;
    }/*from w  ww  .jav a  2s. com*/

    final WKBReader reader = new WKBReader();
    try {
        final Stopwatch stopwatch = Stopwatch.createStarted();
        final Geometry full = reader.read(fullWkb);
        final Geometry simplified = GeometryUtil.simplify(full, maxTolerance);
        if (simplified == null) {
            return createEmptyGeometry();
        }
        logger.debug("locationId: {}, original points: {}, remaining points: {}, ratio: {}, elapsed: {}", id,
                full.getNumPoints(), simplified.getNumPoints(),
                full.getNumPoints() / (double) simplified.getNumPoints(), stopwatch);
        return new WKBWriter().write(simplified);
    } catch (ParseException exc) {
        throw new DataAccessResourceFailureException(exc.getMessage(), exc);
    }
}

From source file:de.tudarmstadt.ukp.csniper.webapp.evaluation.EvaluationRepository.java

private String loadQuery(String aLocation) {
    InputStream is = null;/*w  ww  . java2 s .c  o m*/
    try {
        is = ResourceUtils.resolveLocation(aLocation, null, null).openStream();
        return IOUtils.toString(is);
    } catch (IOException e) {
        throw new DataAccessResourceFailureException("Unable to load query from [" + aLocation + "]", e);
    } finally {
        IOUtils.closeQuietly(is);
    }
}

From source file:com._4dconcept.springframework.data.marklogic.core.MarklogicTemplate.java

private void doInsertContent(Content content) {
    doInSession((session) -> {//from  w  w  w  .  ja  v a 2s.  c o m
        try {
            session.insertContent(content);
        } catch (RequestException re) {
            throw new DataAccessResourceFailureException("Unable to execute request", re);
        }
    });
}