com.archivas.clienttools.arcutils.impl.adapter.Hcap2Adapter.java Source code

Java tutorial

Introduction

Here is the source code for com.archivas.clienttools.arcutils.impl.adapter.Hcap2Adapter.java

Source

// Copyright 2007 Hitachi Data Systems
// All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may
// not use this file except in compliance with the License. You may obtain
// a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations
// under the License.

package com.archivas.clienttools.arcutils.impl.adapter;

import com.archivas.clienttools.arcutils.api.jobs.DeleteJob;
import com.archivas.clienttools.arcutils.config.HCPMoverConstants;
import com.archivas.clienttools.arcutils.config.HCPMoverProperties;
import com.archivas.clienttools.arcutils.profile.AbstractProfileBase;
import com.archivas.clienttools.arcutils.profile.HCAPProfile;
import com.archivas.clienttools.arcutils.profile.Hcap2Profile;
import com.archivas.clienttools.arcutils.model.*;
import com.archivas.clienttools.arcutils.utils.RFC2396Encoder;
import com.archivas.clienttools.arcutils.utils.UidGidUtil;
import com.archivas.clienttools.arcutils.utils.net.SSLCertChain;
import com.archivas.clienttools.arcutils.utils.net.SSLCertException;
import com.archivas.clienttools.arcutils.utils.net.SSLCertificateCallback;
import org.apache.http.HttpHost;
import org.apache.http.HttpResponse;
import org.apache.http.NameValuePair;
import org.apache.http.client.methods.HttpHead;
import org.apache.http.client.methods.HttpPut;
import org.apache.http.client.methods.HttpRequestBase;
import org.apache.http.client.methods.HttpUriRequest;
import org.apache.http.client.utils.URIUtils;
import org.apache.http.client.utils.URLEncodedUtils;
import org.apache.http.conn.ConnectionPoolTimeoutException;
import org.apache.http.entity.InputStreamEntity;
import org.apache.http.impl.client.AbstractHttpClient;
import org.apache.http.message.BasicNameValuePair;
import org.apache.http.params.CoreProtocolPNames;
import org.apache.http.protocol.BasicHttpContext;
import org.jdom.Document;
import org.jdom.Element;
import org.jdom.JDOMException;
import org.jdom.Namespace;
import org.jdom.input.SAXBuilder;

import javax.net.ssl.SSLException;
import javax.xml.stream.XMLStreamReader;
import java.io.*;
import java.net.*;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.List;
import java.util.logging.Level;

/**
 * Adapter for accessing objects (Files, Directories, and their Metadata) in an archive, using the
 * HTTP protocol (or derived protocols such as HTTPS).
 *
 */
public class Hcap2Adapter extends HCAPAdapter {

    SSLCertChain sslCerts = null;
    Hcap2Profile profile;

    /**
     * Uses the HttpClient provided instead of creating a new one. This lets the CopyJob reuse the
     * HttpClient (which caches the connections) for an entire Job. This is dirty, because not all
     * adapters have an HTTPClient.
     * 
     * @param profile
     * @param sslExceptionCallback
     * @param httpClient
     * @throws StorageAdapterException
     *             if we cannot initialize
     */
    public Hcap2Adapter(Hcap2Profile profile, SSLCertificateCallback sslExceptionCallback,
            AbstractHttpClient httpClient) throws StorageAdapterException {
        super();
        this.profile = profile;
        this.httpClient = httpClient;
        if (httpClient == null) {
            init(sslExceptionCallback);
        }
        this.sslExceptionCallback = sslExceptionCallback;

        // debug start
        debugName = "hcp2adapter" + ++adapterCnt;
    }

    /**
     * Sets the profile to use for all the connections through this adapter. The profile contains
     * namespace username, password and other important information for connecting to an
     * authenticated namespace.
     * 
     * @param profile
     *            - the profile used for all connections through this adapter
     * @return
     */
    public void setProfile(AbstractProfileBase profile) {
        if (profile instanceof HCAPProfile) {
            this.profile = (Hcap2Profile) profile;
        }
    }

    /**
     * Gets the profile being used for all connections through this adapter
     * 
     * @return HCAPMoverRestProfile - The profile being used for all connections through this
     *         adapter
     */
    public HCAPProfile getProfile() {
        return this.profile;
    }

    /**
     * @inheritDoc
     */
    public void mkdir(final String path, FileMetadata metadata) throws StorageAdapterException {
        HttpHost httpHost = new HttpHost(getHost(), profile.getPort(), profile.getProtocol());

        String queryString = null;
        if (metadata != null) {
            queryString = generateQueryParameters(metadata, true);
        }

        // Constructing a uri handles the filename encoding for us here
        String uriPath = path;
        if (!path.startsWith(HttpGatewayConstants.METADATA_MOUNT_URL_DIR)) {
            uriPath = getProfile().resolvePath(uriPath);
        }

        URI uri;
        URL url;
        try {
            String urlPath = uriPath;
            if (queryString != null) {
                urlPath = urlPath + "?" + queryString;
            }
            url = new URL(profile.getProtocol(), getHost(), urlPath);
            uri = url.toURI();
        } catch (URISyntaxException e) {
            LOG.log(Level.WARNING, "Unexpected error generating put URI for : " + uriPath);
            throw new StorageAdapterLiteralException("Error making a new directory on the server", e);
        } catch (MalformedURLException e) {
            LOG.log(Level.WARNING, "Unexpected error generating put URL for : " + uriPath);
            throw new StorageAdapterLiteralException("Error making a new directory on the server", e);
        }
        HttpMkdir request = new HttpMkdir(uri);

        // Eventually we will just return this cookie which will be passed back to the caller.
        HcapAdapterCookie cookie = new HcapAdapterCookie(request, httpHost);
        synchronized (savingCookieLock) {
            if (savedCookie != null) {
                throw new RuntimeException(
                        "This adapter already has a current connection to host -- cannot create two at once.");
            }
            savedCookie = cookie;
        }

        try {
            executeMethod(cookie);
            this.handleHttpResponse(cookie.getResponse(), "creating directory", path);
        } catch (IOException e) {
            this.handleIOExceptionFromRequest(e, "creating directory", path);
        } finally {
            close();
        }

        // Now we also have to set shred, retention, and other future items
        if (metadata != null && metadata.hasShred()) {
            setShredOnDirectory(path, metadata.isShred());
        }
        if (metadata != null && metadata.hasRetention()) {
            this.setRetentionOnDirectory(path, metadata.getRetention());
        }
        setAdditionalMetadataOnDirectories(path, metadata);
    }

    /**
     * @inheritDoc
     */
    public void writeObjectFromStream(final String targetNode, final String targetPath, final InputStream is,
            final FileMetadata ingestionMetadata) throws StorageAdapterException {
        HttpHost httpHost = new HttpHost(targetNode, profile.getPort(), profile.getProtocol());
        String filePath = targetPath;

        if (!filePath.startsWith(HttpGatewayConstants.METADATA_MOUNT_URL_DIR)) {
            filePath = getProfile().resolvePath(filePath);
        }

        String queryString = null;
        if (ingestionMetadata != null) {
            queryString = generateQueryParameters(ingestionMetadata, false);
        }

        URI uri = null;
        try {
            uri = URIUtils.createURI(profile.getProtocol(), targetNode, profile.getPort(), filePath, queryString,
                    null);
        } catch (URISyntaxException e) {
            LOG.log(Level.WARNING, "Unexpected error generating put URI for : " + targetPath);
            throw new StorageAdapterLiteralException("Error writing object to the server", e);
        }
        HttpPut request = new HttpPut(uri);

        InputStreamEntity isEntity = new InputStreamEntity(is, -1);
        request.setEntity(isEntity);

        request.getParams().setParameter(CoreProtocolPNames.USE_EXPECT_CONTINUE, Boolean.TRUE);
        request.getParams().setParameter(CoreProtocolPNames.WAIT_FOR_CONTINUE, 100);

        // Eventually we will just return this cookie which will be passed back to the caller.
        HcapAdapterCookie cookie = new HcapAdapterCookie(request, httpHost);
        synchronized (savingCookieLock) {
            if (savedCookie != null) {
                throw new RuntimeException(
                        "This adapter already has a current connection to host -- cannot create two at once.");
            }
            savedCookie = cookie;
        }
        try {
            executeMethod(cookie);
            this.handleHttpResponse(cookie.getResponse(), "writing", targetPath);
        } catch (IOException e) {
            this.handleIOExceptionFromRequest(e, "writing", targetPath);
        } finally {
            close();
        }
    }

    public void setMetadata(final String targetNode, final String path, final FileMetadata metadata)
            throws StorageAdapterException {
        throw new UnsupportedOperationException();
    }

    /**
     * We have to override the parent call to this because in order to set the retention-hold on an
     * object in HCAP 3.0 we have to actually write to the rentention.txt file
     * 
     * @inheritDoc
     */
    public void setRetentionHoldToTrue(final String targetPath, final FileType fileType)
            throws StorageAdapterException {
        // Change the filePath to go to rentention.txt
        String filePath = getMetadataPathForObject(targetPath, fileType, true) + "/"
                + HttpGatewayConstants.RENTENTION_TXT;

        // Create the value we want to put in
        String putHoldToFile = HttpGatewayConstants.RENTENTION_TXT_HOLD;

        InputStream is = null;

        try {
            is = new ByteArrayInputStream(putHoldToFile.getBytes());

            this.writeObjectFromStream(getHost(), filePath, is, null);
        } finally {
            if (is != null) {
                try {
                    is.close();
                } catch (IOException e) {
                    // do write stream handles the status returns from HCP and will throw if the
                    // status is not success
                    // so we can ignore this IOException
                    LOG.log(Level.INFO, "IO Error while setting retention hold", e);
                }
            }
        }
    }

    /**
     * For future adapter implementations to handle other metadata
     * 
     * @param targetPath
     */
    protected void setAdditionalMetadataOnDirectories(final String targetPath, FileMetadata metadata)
            throws StorageAdapterException {
        // do nothing
    }

    /**
     * We have to manually edit the shred.txt file prior to auth namespaces
     */
    private void setShredOnDirectory(final String targetPath, final boolean value) throws StorageAdapterException {
        // Create the value we want to put in
        String putShredToFile = value ? "1" : "0";
        setDirectorySystemMetadata(targetPath, putShredToFile, HttpGatewayConstants.SHRED_TXT);
    }

    /**
     * We have to manually edit the retention txt file prior to auth namespaces
     */
    private void setRetentionOnDirectory(final String targetPath, final Retention retention)
            throws StorageAdapterException {
        // Create the value we want to put in
        String putRetentionToFile = retention.getHCAPValue();
        setDirectorySystemMetadata(targetPath, putRetentionToFile, HttpGatewayConstants.RENTENTION_TXT);
    }

    protected void setDirectorySystemMetadata(final String targetPath, String value, String fileUsedToSet)
            throws StorageAdapterException {

        String filePath = getMetadataPathForObject(targetPath, FileType.DIRECTORY, true) + "/" + fileUsedToSet;
        InputStream is = null;

        try {
            is = new ByteArrayInputStream(value.getBytes());

            this.writeObjectFromStream(getHost(), filePath, is, null);
        } finally {
            if (is != null) {
                try {
                    is.close();
                } catch (IOException e) {
                    // do write stream handles the status returns from HCP and will throw if the
                    // status is not success
                    // so we can ignore this IOException
                    LOG.log(Level.FINE, "IO Error while setting directory system metadata", e);
                }
            }
        }
    }

    /**
     * Load Metadata with the directory list if there is less than this number of files in the
     * directory. Separate method so it can be overridden without overriding all of getDirectory()
     * 
     * @return -
     */
    protected int getDirListMetadataThreshold() {
        return HCPMoverProperties.DIR_LIST_METADATA_THRESHOLD_26.getAsInt();
    }

    public String getCustomMetadata(String filePath, final Long version, FileType fileType)
            throws StorageAdapterException {
        String metadataPath = this.getMetadataPathForObject(filePath, fileType, false);
        metadataPath = metadataPath + "/custom-metadata.xml";

        String result = getCustomMetadata(metadataPath);
        return result;
    }

    private String getCustomMetadata(String filePath) throws StorageAdapterException {
        String metadataPath = filePath;
        InputStream inputStream = null;
        BufferedReader reader = null;
        try {
            inputStream = getInputStream(metadataPath, false);
            reader = new BufferedReader(new InputStreamReader(inputStream));

            return (reader != null ? getCustomMetadata(reader) : null);
        } catch (StorageAdapterLiteralException sale) {
            throw sale;
        } catch (StorageAdapterException e) {
            LOG.log(Level.FINE, "Unexpected Exception reading CustomMetadata", e);
            throw new StorageAdapterLiteralException("Error reading custom metadata", e);
        } catch (IOException e) {
            LOG.log(Level.FINE, "Unexpected Exception reading CustomMetadata", e);
            throw new StorageAdapterException("Error reading custom metadata", e);
        } finally {
            RuntimeException e = null;
            try {
                if (inputStream != null) {
                    inputStream.close();
                }
            } catch (IOException io) {
                LOG.log(Level.INFO, "Unexpected Exception closing streams after reading CustomMetadata", io);
            } catch (RuntimeException re) {
                e = re;
            }
            try {
                if (reader != null) {
                    reader.close();
                }
            } catch (IOException io) {
                LOG.log(Level.INFO, "Unexpected Exception closing streams after reading CustomMetadata", io);
            } catch (RuntimeException re) {
                e = re;
            }

            close();

            if (e != null) {
                throw e;
            }
        }
    }

    protected String getMetadataPathForObject(final String fileURL, FileType fileType, boolean setting) {
        String metadataPath = "/";

        String filePath = (fileURL.startsWith("/") ? "" : "/") + fileURL;
        // the metadata xml is stored in a different location for directories
        // currently, the path for a directory ends with a '/' so check for it

        if (fileType.equals(FileType.DIRECTORY)) {
            filePath += filePath.endsWith("/") ? "" : "/";
            if (setting) {
                metadataPath = HttpGatewayConstants.METADATA_MOUNT_URL_DIR + filePath
                        + ".directory-metadata/settings";
            } else {
                metadataPath = HttpGatewayConstants.METADATA_MOUNT_URL_DIR + filePath + ".directory-metadata/info";
            }
        } else {
            metadataPath = HttpGatewayConstants.METADATA_MOUNT_URL_DIR + filePath;
        }

        return metadataPath;
    }

    /**
     * @inheridoc
     */
    public void setCustomMetadata(final String fileURL, String metadata) throws StorageAdapterException {
        setCustomMetadata(fileURL, metadata, null);
    }

    public void setCustomMetadata(final String fileURL, String metadata, String annotation)
            throws StorageAdapterException {
        InputStream is = null;
        FileMetadata fileInfo = getMetadata(fileURL, null, FileType.FILE, false); // Cannot
                                                                                  // currently set
                                                                                  // on a directory
        is = new ByteArrayInputStream(metadata.getBytes());
        setCustomMetadata(fileURL, is, fileInfo.getFileType());

        if (is != null) {
            try {
                is.close();
            } catch (IOException e) {
                // We have already received a success or failure from HCP at this point so if the
                // operation
                // failed we would already be throwing an exception. It is safe to just log this and
                // move on.
                LOG.log(Level.INFO, "IO Error while setting custom metadata", e);
            }
        }
    }

    public InputStream getCustomMetadataStream(String filePath, Long version, FileType fileType)
            throws StorageAdapterException {
        return getCustomMetadataStream(filePath, version, fileType, null);
    }

    public InputStream getCustomMetadataStream(String filePath, Long version, FileType fileType, String annotation)
            throws StorageAdapterException {
        String metadataPath = this.getMetadataPathForObject(filePath, fileType, false);
        metadataPath = metadataPath + "/custom-metadata.xml";

        InputStream result = null;
        result = getInputStream(metadataPath, false);

        return result;
    }

    /**
     * @inheridoc
     */
    public void setCustomMetadataStream(final String fileURL, InputStream metadata) throws StorageAdapterException {
        setCustomMetadataStream(fileURL, metadata, null);
    }

    public void setCustomMetadataStream(final String fileURL, InputStream metadata, String annotation)
            throws StorageAdapterException {
        FileMetadata fileInfo = getMetadata(fileURL, null, FileType.FILE, false);
        setCustomMetadata(fileURL, metadata, fileInfo.getFileType());
    }

    /**
     * Added this because I don't want to go and have to get the metadata just to get the file type
     *
     * There is more work to be done here. This version of this method needs to be exported as part
     * of the StroageAdapter interface (taking an InputStream instead of a string). We want the call
     * to this to come from a job not from writeObjectFromStream.
     */
    protected void setCustomMetadata(final String fileURL, InputStream is, final FileType fileType)
            throws StorageAdapterException {
        String metadataPath = getMetadataPathForObject(fileURL, fileType, false) + "/custom-metadata.xml";
        this.writeObjectFromStream(getHost(), metadataPath, is, null);
    }

    public void deleteCustomMetadata(final String path) throws StorageAdapterException {
        String metadataPath = getMetadataPathForObject(path, FileType.FILE, false) + "/custom-metadata.xml";
        // Call into the delete method with the custom-metadata metadata flag explicitly set
        // to false because we're going directly to the custom-metadata.xml file instead.
        this.delete(metadataPath, false, DeleteJob.Operation.DELETE, null, false);
    }

    /**
     * @inheritDoc
     */
    public FileMetadata getMetadata(String filePath, final String queryString, FileType fileType, boolean isVersion)
            throws StorageAdapterException {
        FileMetadata metadata = new FileMetadata();
        metadata.setFileType(fileType);
        getMetadata(filePath, metadata);
        return metadata;
    }

    /**
     * Shred, Hold, Rention date (there is no rention classes or search index in HCAP2.6) POSIX
     * information: ctime, mtime, atime, UID, GID, permissions
     *
     * @param pathOfObjFile
     *            the file path to get metadata from
     * @param metadata:
     *            already filled out with some existing metadata (or a new one in the case of above)
     * @throws IOException
     */
    public void getMetadata(final String pathOfObjFile, FileMetadata metadata) throws StorageAdapterException {
        String filePath = (pathOfObjFile.startsWith("/") ? "" : "/") + pathOfObjFile;

        // we need to do a head request here to figure out if this is a directory or a file because
        // we cannot rely on
        // this having a slash
        if (metadata.getFileType() == FileType.UNKNOWN) {
            FileType fileType = getFileType(filePath);
            metadata.setFileType(fileType);
        }

        String metadataPath = this.getMetadataPathForObject(filePath, metadata.getFileType(), false);
        metadataPath = metadataPath + "/core-metadata.xml";

        InputStream xmlInputStream = null;

        try {
            SAXBuilder builder = new SAXBuilder(false);
            xmlInputStream = getInputStream(metadataPath, false);
            Document fileXML = builder.build(xmlInputStream);

            Element coreMetadata = fileXML.getRootElement();
            Namespace ns = coreMetadata.getNamespace();

            String value = coreMetadata.getChild(HttpGatewayConstants.MD_PARAM_CREATION_TIME, ns).getValue();
            Date creationTime = new Date(Long.parseLong(value) * 1000);
            metadata.setCreationTime(creationTime);

            value = coreMetadata.getChild(HttpGatewayConstants.MD_PARAM_CTIME, ns).getValue();
            Date cTime = new Date(Long.parseLong(value) * 1000);
            metadata.setCtime(cTime);

            value = coreMetadata.getChild(HttpGatewayConstants.MD_PARAM_ATIME, ns).getValue();
            Date accessTime = new Date(Long.parseLong(value) * 1000);
            metadata.setAccessTime(accessTime);

            value = coreMetadata.getChild(HttpGatewayConstants.MD_PARAM_MTIME, ns).getValue();
            Date modeTime = new Date(Long.parseLong(value) * 1000);
            metadata.setModTime(modeTime);

            try {
                metadata.setUid(coreMetadata.getChild(HttpGatewayConstants.MD_PARAM_PERMS_UID, ns).getValue());
            } catch (Exception e) {
                LOG.log(Level.WARNING, "Exception parsing metadata for: " + filePath, e);
            }

            try {
                metadata.setGid(coreMetadata.getChild(HttpGatewayConstants.MD_PARAM_PERMS_GID, ns).getValue());
            } catch (Exception e) {
                LOG.log(Level.WARNING, "Exception parsing metadata for: " + filePath, e);
            }

            try {
                metadata.setShred(
                        Boolean.valueOf(coreMetadata.getChild(HttpGatewayConstants.MD_PARAM_SHRED, ns).getValue()));
            } catch (Exception e) {
                LOG.log(Level.WARNING, "Exception parsing metadata for: " + filePath, e);
            }

            // Rentention is set differently for a directory and a file
            if (metadata.getFileType().equals(FileType.DIRECTORY)) {
                try {
                    String retentionStr = coreMetadata.getChild(HttpGatewayConstants.MD_PARAM_RETENTION_FOR_DIR, ns)
                            .getValue();
                    if (retentionStr != null && retentionStr.length() > 0) {
                        Retention dirRetention = Retention.fromHcapValue(retentionStr);
                        metadata.setRetention(dirRetention);
                    }
                } catch (Exception e) {
                    LOG.log(Level.WARNING, "Exception parsing metadata for: " + filePath, e);
                }
            } else {
                try {
                    Retention fileRetention = Retention.fromRetentionValue(Long.parseLong(
                            coreMetadata.getChild(HttpGatewayConstants.MD_PARAM_RETENTION, ns).getValue()));
                    metadata.setRetention(fileRetention);
                } catch (NumberFormatException e) {
                    LOG.log(Level.WARNING, "Exception parsing metadata for: " + filePath, e);
                }
            }

            if (metadata.getFileType().equals(FileType.FILE)) {
                try {
                    metadata.setSize(Long
                            .parseLong(coreMetadata.getChild(HttpGatewayConstants.MD_PARAM_SIZE, ns).getValue()));
                } catch (Exception e) {
                    LOG.log(Level.WARNING, "Exception parsing metadata for: " + filePath, e);
                }

                String modeStr = coreMetadata.getChild(HttpGatewayConstants.MD_PARAM_PERMS, ns).getValue();
                int mode = Integer.parseInt(modeStr, 8);
                try {
                    metadata.setFileMode(FileMetadata.covertHCAPToUNIX(mode, metadata.getFileType()));
                } catch (Exception e) {
                    LOG.log(Level.WARNING, "Exception parsing metadata for: " + filePath, e);
                }
                metadata.setHashScheme(
                        coreMetadata.getChild(HttpGatewayConstants.MD_PARAM_HASH_SCHEME, ns).getValue());
                metadata.setHashValue(coreMetadata.getChild(HttpGatewayConstants.MD_PARAM_HASH, ns).getValue());
                try {
                    metadata.setDpl(Integer
                            .parseInt(coreMetadata.getChild(HttpGatewayConstants.MD_PARAM_DPL, ns).getValue()));
                } catch (NumberFormatException e) {
                    LOG.log(Level.WARNING, "Exception parsing metadata for: " + filePath, e);
                }
                try {
                    metadata.setRetentionHold(Boolean
                            .valueOf(coreMetadata.getChild(HttpGatewayConstants.MD_PARAM_HOLD, ns).getValue()));
                } catch (Exception e) {
                    LOG.log(Level.WARNING, "Exception parsing metadata for: " + filePath, e);
                }

            } else if (metadata.getFileType().equals(FileType.DIRECTORY)) {
                String modeStr = coreMetadata.getChild(HttpGatewayConstants.MD_PARAM_PERMS, ns).getValue();
                int mode = Integer.parseInt(modeStr, 8);
                try {
                    metadata.setDirMode(FileMetadata.covertHCAPToUNIX(mode, metadata.getFileType()));
                } catch (Exception e) {
                    LOG.log(Level.WARNING, "Exception parsing metadata for: " + filePath, e);
                }
            }

            //
            // Now call down to children and see if there is any other data to collect
            // This is where you add support for additional metadata for future versions
            //
            getAdditionalMetadata(coreMetadata, metadata);

            // Figure out whether or not there is custom-metadata
            Element customMetadataSizeElem = coreMetadata
                    .getChild(HttpGatewayConstants.MD_PARAM_HAS_CUSTOM_METADATA_SIZE, ns);
            CustomMetadata customMetadata = null;
            if (customMetadataSizeElem != null) {
                customMetadata = new CustomMetadata(CustomMetadata.Form.PROFILED, filePath);
            }
            metadata.setCustomMetadata(customMetadata);

        } catch (JDOMException e) {
            LOG.log(Level.WARNING, "JDOMException parsing additional metadata for: " + filePath, e);
            throw new StorageAdapterLiteralException("Error parsing metadata for file " + filePath, e);
        } catch (IOException e) {
            this.handleIOExceptionFromRequest(e, "getting metadata of", pathOfObjFile);
        } finally {
            // Close the xml stream we used
            try {
                if (xmlInputStream != null) {
                    xmlInputStream.close();
                }
            } catch (Exception e) {
                // do nothing
            }

            // Close the adapter.
            try {
                close(); // Closes the HttpRequest
            } catch (Exception e) {
                // do nothing
            }
        }
    }

    // @todo: This is repeated code that is already in the HCAPProfile -- could be removed
    public static List<InetAddress> getHostAddresses(URL location) throws UnknownHostException {
        return getHostAddresses(location.getHost());
    }

    public static List<InetAddress> getHostAddresses(String hostName) throws UnknownHostException {
        return Arrays.asList(InetAddress.getAllByName(hostName));
    }

    public static List<InetAddress> getHostAddresses(HCAPProfile profile) throws UnknownHostException {
        List<InetAddress> addresses = null;
        if (profile.isConnectByDns()) {
            addresses = getHostAddresses(profile.getHostname());
        } else {
            addresses = new ArrayList<InetAddress>();
            for (String ipString : profile.getIpAddressList()) {
                addresses.addAll(getHostAddresses(ipString));
            }
        }
        return addresses;
    }

    public ArcMoverFile createArcMoverFileObject(XMLStreamReader xmlr, ArcMoverDirectory caller)
            throws StorageAdapterException {
        ArcMoverFile retVal = null;
        try {
            ArcMoverDirectory rootDir = ArcMoverDirectory.getDirInstance(this.getProfile(), caller.getPath(), this);

            String fileName = xmlr.getAttributeValue(null, HttpGatewayConstants.FILE_NAME);
            try {
                // HCP sends incorrectly encoded filenames. Not all chars are encoded. Fix it.
                fileName = RFC2396Encoder.fixEncoding(fileName);
            } catch (UnsupportedEncodingException e) {
                throw new StorageAdapterException(e.getMessage(), e);
            }

            String fileTypeStr = xmlr.getAttributeValue(null, HttpGatewayConstants.FILE_TYPE);
            FileType fileType = FileType.FILE;
            if (fileTypeStr.equals(DIRECTORY)) {
                fileType = FileType.DIRECTORY;
            } else if (fileTypeStr.equals(SYMLINK)) {
                fileType = FileType.SYMLINK;
            }

            // The time provided by the cluster is in seconds since epoch. Java uses milliseconds
            // since epoch, so we must multiply by 1000
            String modifyTimeString = xmlr.getAttributeValue(null, HttpGatewayConstants.MD_DEFAULT_PARAM_MTIME);
            String accessTimeString = xmlr.getAttributeValue(null, HttpGatewayConstants.MD_DEFAULT_PARAM_ATIME);
            Date modifyTime = (modifyTimeString == null ? null : new Date(Long.parseLong(modifyTimeString) * 1000));
            Date accessTime = (accessTimeString == null ? null : new Date(Long.parseLong(accessTimeString) * 1000));

            long size = 0;
            int mode = 0;
            Long uid = Long.valueOf(0);
            Long gid = Long.valueOf(0);

            try {
                size = Long.parseLong(xmlr.getAttributeValue(null, HttpGatewayConstants.MD_PARAM_SIZE));
            } catch (NumberFormatException e) {
                // do nothing
            }
            try {
                mode = FileMetadata.covertHCAPToUNIX(
                        Integer.parseInt(xmlr.getAttributeValue(null, HttpGatewayConstants.MD_PARAM_PERMS)),
                        fileType);
            } catch (NumberFormatException e) {
                // do nothing
            }
            try {
                uid = UidGidUtil.validateId(xmlr.getAttributeValue(null, HttpGatewayConstants.MD_PARAM_PERMS_UID));
            } catch (NumberFormatException e) {
                // do nothing
            }
            try {
                gid = UidGidUtil.validateId(xmlr.getAttributeValue(null, HttpGatewayConstants.MD_PARAM_PERMS_GID));
            } catch (NumberFormatException e) {
                // do nothing
            }

            FileMetadata metadata = new FileMetadata(fileType, null, // creation time
                    null, // ctime
                    modifyTime, accessTime, size, fileName.startsWith("."), // hidden
                    null, // file perms
                    null, // directory perms
                    uid, gid, null, // hcap version
                    null, // dpl
                    null, // hash scheme
                    null, // hash value
                    null, // shred
                    null, // retention
                    null, // retention hold
                    null, // search index
                    null, // replicated
                    null, // acl
                    null, // owner
                    null); // custom metadata
            if (fileType.equals(FileType.DIRECTORY) && !fileName.equals(DOT)) {
                metadata.setDirMode(mode);
                retVal = ArcMoverDirectory.getDirInstance(rootDir, fileName, metadata, this);
            } else if (fileType.equals(FileType.FILE)) {
                metadata.setFileMode(mode);
                ArcMoverFile moverFile = ArcMoverFile.getFileInstance(getProfile(), rootDir, fileName, metadata);
                retVal = moverFile;
            } else if (FileType.SYMLINK == fileType) {
                retVal = ArcMoverSymlink.getSymlinkInstance(rootDir, fileName, null, this);
            }

        } catch (Exception e) {
            String msg = "Error parsing directory for: " + caller.getPath();
            LOG.log(Level.INFO, msg, e);
            IOException e2 = new IOException(msg);
            e2.initCause(e);
            throw new StorageAdapterException(e2.getMessage(), e2);
        }
        return retVal;
    }

    ////////////////////////////////////////////////// Protected Methods
    ////////////////////////////////////////////////// /////////////////////////////////////////////////////

    /**
     * Returns the file type for an object on HCP
     * 
     * @param path
     * @return
     */
    protected FileType getFileType(String path) throws StorageAdapterException {
        // There is no way to nicely has the default namespace if an object is a directory so in the
        // absense of that we are going to test for the directory metadata and if it is there return
        // directory
        // else file.
        FileType result = FileType.FILE;
        try {
            String pathToTest = getMetadataPathForObject(path, FileType.DIRECTORY, false);
            if (exists(pathToTest)) {
                result = FileType.DIRECTORY;
            }
        } catch (StorageAdapterException e) {
            // Do nothing here -- it doesn't exist, we return FILE since that is expected
        }

        return result;
    }

    /**
     * Called from @getMetadata. This allows children of this class to be able to collect additional
     * metadata For example the default HCP adapter gets the search index
     * 
     * @param coreMetadata:
     *            XML
     * @param metadata:
     *            data to fill
     * @return true if metadata added
     */
    protected boolean getAdditionalMetadata(final Element coreMetadata, FileMetadata metadata) {
        return false;
    }

    /**
     * Generate the Query String as a list of {@link NameValuePair}s. The query paramaters can
     * contain the initial Archive Metadata values.
     *
     * We get from the file metadata: shred, hold, rentention value (no search index or retention
     * classes in 2.6) POSIX data: uid, gid, perms We always use the targets (so we don't add to the
     * string): dpl, hash, hashscheme
     *
     * Sometimes we need to just get the POSIX values (ex. when created a dir, in that case pass in
     * true of the second param
     */
    protected String generateQueryParameters(FileMetadata md, boolean addJustPOSIX) {
        List<NameValuePair> params = new ArrayList<NameValuePair>();

        if (md.hasFileMode()) {
            params.add(new BasicNameValuePair(HttpGatewayConstants.PARAM_PERMS_FILE, "" + md.getFileMode()));
        }

        if (md.hasDirMode()) {
            params.add(new BasicNameValuePair(HttpGatewayConstants.PARAM_PERMS_DIR, "" + md.getDirMode()));
        }

        if (md.hasUid()) {
            params.add(new BasicNameValuePair(HttpGatewayConstants.PARAM_PERMS_UID, "" + md.getUid()));
        }

        if (md.hasGid()) {
            params.add(new BasicNameValuePair(HttpGatewayConstants.PARAM_PERMS_GID, "" + md.getGid()));
        }

        if (addJustPOSIX != true) {
            if (md.hasNonDefaultRetention()) {
                params.add(new BasicNameValuePair(HttpGatewayConstants.PARAM_RETENTION,
                        "" + md.getRetention().getHCAPValue()));
            }

            if (md.hasShred()) {
                String shredValue;
                if (md.isShred()) {
                    shredValue = HttpGatewayConstants.PARAM_SHRED_TRUE;
                } else {
                    shredValue = HttpGatewayConstants.PARAM_SHRED_FALSE;
                }
                params.add(new BasicNameValuePair(HttpGatewayConstants.PARAM_SHRED, shredValue));
            }

            addAdditionalQueryParams(md, params);
        }

        String queryString = null;
        if (params.size() > 0) {
            queryString = URLEncodedUtils.format(params, HCPMoverConstants.SUPPORTED_CHARSET);
            LOG.log(Level.FINE, "generateQueryParameters=" + queryString);
        }
        return queryString;
    }

    /**
     * Called from @generateQueryParameters. Gives children of this class the ability to add params
     * to the list before creating the string.
     *
     * Retention hold is not set via the PUT call in HCAP3.0 and greater so we add it here.
     *
     * @param md
     * @param params
     * @return true if params were added
     */
    protected boolean addAdditionalQueryParams(final FileMetadata md, List<NameValuePair> params) {
        return false;
    }

    protected List<NameValuePair> handleDeleteOperation(DeleteJob.Operation operation, String reason)
            throws StorageAdapterLiteralException {
        return null;
    }

    protected String getErrorHeader() {
        return HttpGatewayConstants.HEADER_ERROR_MESSAGE_DEF;
    }

    /**
     * Helper method to trap exceptions and log useful data and debugging tips. All caught
     * exceptions are rethrown after logging.
     */
    protected void executeMethod(HcapAdapterCookie cookie) throws IOException {
        try {
            BasicHttpContext context = new BasicHttpContext();
            HttpResponse response = httpClient.execute(cookie.getHost(), cookie.getRequest());
            cookie.setResponseAndContext(response, context);
        } catch (ConnectionPoolTimeoutException e) {

            LOG.log(Level.FINE,
                    "Timed out waiting for connection from pool.  This may be caused by a failure to call HttpMethod.release()",
                    e);
            throw e;
        } catch (IOException e) {
            LOG.log(Level.FINE, "Unexpected IOException", e);
            throw e;
        }
    }

    // -------------------------------------------------------------------------------------------

    private static class HttpMkdir extends HttpRequestBase {
        public final static String METHOD_NAME = "MKDIR";

        private HttpMkdir(URI uri) {
            setURI(uri);
        }

        public String getMethod() {
            return METHOD_NAME;
        }

        @Override
        public Object clone() throws CloneNotSupportedException {
            return super.clone();
        }
    }

    public SSLCertChain getSSLCerts() throws IOException, StorageAdapterException {
        HttpHost httpHost = new HttpHost(getHost(), profile.getPort(), "getcerts");
        HttpUriRequest request = new HttpHead("/");

        // Eventually we will just return this cookie which will be passed back to the caller.
        HcapAdapterCookie cookie = new HcapAdapterCookie(request, httpHost);
        synchronized (savingCookieLock) {
            if (savedCookie != null) {
                throw new RuntimeException(
                        "This adapter already has a current connection to host -- cannot create two at once.");
            }
            savedCookie = cookie;
        }
        try {
            executeMethod(cookie);
        } catch (SSLException e) {
            LOG.log(Level.WARNING, "Exception getting certs.  sslCerts = " + sslCerts, e);
            throw new SSLCertException(e, sslCerts);
        } finally {
            close();
        }
        LOG.finer("Returning sslCerts = " + sslCerts);

        return sslCerts;
    }

    public boolean copyDirMetadataWhenMigrating() {
        return true;
    }

}