com.archivas.clienttools.arcutils.impl.adapter.Hcp3AuthNamespaceAdapter.java Source code

Java tutorial

Introduction

Here is the source code for com.archivas.clienttools.arcutils.impl.adapter.Hcp3AuthNamespaceAdapter.java

Source

// Copyright 2007 Hitachi Data Systems
// All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may
// not use this file except in compliance with the License. You may obtain
// a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations
// under the License.

package com.archivas.clienttools.arcutils.impl.adapter;

import com.archivas.clienttools.arcutils.api.jobs.DeleteJob;
import com.archivas.clienttools.arcutils.config.HCPMoverConstants;
import com.archivas.clienttools.arcutils.model.*;
import com.archivas.clienttools.arcutils.profile.AbstractProfileBase;
import com.archivas.clienttools.arcutils.profile.Hcp3AuthNamespaceProfile;
import com.archivas.clienttools.arcutils.utils.Base64Utils;
import com.archivas.clienttools.arcutils.utils.FileUtil;
import com.archivas.clienttools.arcutils.utils.RFC2396Encoder;
import com.archivas.clienttools.arcutils.utils.StringUtils;
import com.archivas.clienttools.arcutils.utils.net.GetCertsX509TrustManager;
import com.archivas.clienttools.arcutils.utils.net.SSLCertChain;
import com.archivas.clienttools.arcutils.utils.net.SSLCertificateCallback;
import org.apache.http.*;
import org.apache.http.client.methods.HttpHead;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.methods.HttpPut;
import org.apache.http.client.methods.HttpUriRequest;
import org.apache.http.client.params.ClientPNames;
import org.apache.http.client.utils.URIUtils;
import org.apache.http.client.utils.URLEncodedUtils;
import org.apache.http.conn.ConnectionPoolTimeoutException;
import org.apache.http.conn.scheme.Scheme;
import org.apache.http.conn.scheme.SchemeRegistry;
import org.apache.http.conn.ssl.SSLSocketFactory;
import org.apache.http.entity.InputStreamEntity;
import org.apache.http.impl.client.AbstractHttpClient;
import org.apache.http.message.BasicNameValuePair;
import org.apache.http.params.CoreProtocolPNames;
import org.apache.http.protocol.BasicHttpContext;
import org.jdom.Document;
import org.jdom.Element;
import org.jdom.JDOMException;
import org.jdom.input.SAXBuilder;

import javax.net.ssl.SSLContext;
import javax.net.ssl.TrustManager;
import javax.xml.stream.XMLStreamReader;
import java.awt.datatransfer.StringSelection;
import java.io.*;
import java.net.MalformedURLException;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.logging.Level;
import java.util.logging.Logger;

/**
 * Adapter for accessing objects (Files, Directories, and their Metadata) in an archive, using the
 * HTTP protocol (or derived protocols such as HTTPS).
 *
 */
public class Hcp3AuthNamespaceAdapter extends HCAPAdapter {

    public static Logger LOG = Logger.getLogger(Hcp3AuthNamespaceAdapter.class.getName());
    Hcp3AuthNamespaceProfile profile;
    public static final String PROC_PATH = "/" + HttpGatewayConstants.PROC_URL + "/";

    /**
     * Uses the HttpClient provided instead of creating a new one. This lets the CopyJob reuse the
     * HttpClient (which caches the connections) for an entire Job. This is dirty, because not all
     * adapters have an HTTPClient.
     * 
     * @param profile
     * @param sslExceptionCallback
     * @param httpClient
     * @throws StorageAdapterException
     *             if we cannot initialize
     */
    public Hcp3AuthNamespaceAdapter(Hcp3AuthNamespaceProfile profile, SSLCertificateCallback sslExceptionCallback,
            AbstractHttpClient httpClient) throws StorageAdapterException {
        super();
        this.profile = profile;
        this.httpClient = httpClient;
        if (httpClient == null) {
            init(sslExceptionCallback);
        }
        this.sslExceptionCallback = sslExceptionCallback;

        debugName = "hcap3AuthNamespaceAdapter" + ++adapterCnt;
    }

    /**
     * Sets the profile to use for all the connections through this adapter. The profile contains
     * namespace username, password and other important information for connecting to an
     * authenticated namespace.
     * 
     * @param profile
     *            - the profile used for all connections through this adapter
     * @return
     */
    public void setProfile(AbstractProfileBase profile) {
        if (profile instanceof Hcp3AuthNamespaceProfile) {
            this.profile = (Hcp3AuthNamespaceProfile) profile;
        }
    }

    /**
     * Gets the profile being used for all connections through this adapter
     * 
     * @return HCAPMoverRestProfile - The profile being used for all connections through this
     *         adapter
     */
    public Hcp3AuthNamespaceProfile getProfile() {
        return this.profile;
    }

    /**
     * @inheritDoc
     */
    public void mkdir(final String path, FileMetadata metadata) throws StorageAdapterException {
        HttpHost httpHost = new HttpHost(getHost(), profile.getPort(), profile.getProtocol());

        String query = "?type=directory";

        // Constructing a uri handles the filename encoding for us here
        String uriPath = getProfile().resolvePath(path);
        URL url;
        URI uri;
        try {
            String urlPath = uriPath + query;
            url = new URL(profile.getProtocol(), getHost(), urlPath);
            uri = url.toURI();
        } catch (URISyntaxException e) {
            LOG.log(Level.WARNING, "Unexpected error generating put URI for : " + uriPath);
            throw new StorageAdapterLiteralException("Error making a new directory on the server", e);
        } catch (MalformedURLException e) {
            LOG.log(Level.WARNING, "Unexpected error generating put URL for : " + uriPath);
            throw new StorageAdapterLiteralException("Error making a new directory on the server", e);
        }

        HttpUriRequest request = new HttpPut(uri);
        try {

            // Eventually we will just return this cookie which will be passed back to the caller.
            HcapAdapterCookie cookie = new HcapAdapterCookie(request, httpHost);
            synchronized (savingCookieLock) {
                if (savedCookie != null) {
                    throw new RuntimeException(
                            "This adapter already has a current connection to host -- cannot create two at once.");
                }
                savedCookie = cookie;
            }
            executeMethod(cookie);
            this.handleHttpResponse(cookie.getResponse(), "creating directory", path);
        } catch (IOException e) {
            this.handleIOExceptionFromRequest(e, "creating directory", path);
        } finally {
            close();
        }
    }

    /*
     * @override
     */
    public void mkSymlink(String path, String symlinkTargetPath) throws StorageAdapterException {
        // remove any trailing / from the path
        if (path.length() > 1 && path.endsWith("/")) {
            path = path.substring(0, path.length() - 1);
        }

        // first try and create the parent directory, because HCP symlink create does not create
        // parent dirs for the symlink. If there is a conflict, ignore it; the parent dir didn't
        // need creating (it
        // could have been created by another thread operating on the dir at the same time)
        String parentPath = FileUtil.getPath(path);
        if (parentPath != null && !parentPath.equals(path) && !exists(parentPath)) {
            try {
                mkdir(parentPath, null);
            } catch (StorageAdapterException e) {
                Integer statusCode = e.getStatusCode();
                if (statusCode != null && statusCode == HttpStatus.SC_CONFLICT) {
                    // ignore this. Another thread must have created it already.
                } else {
                    throw e;
                }
            }
        }

        HttpHost httpHost = new HttpHost(getHost(), profile.getPort(), profile.getProtocol());

        String query = "?type=symlink&symlink-target=";

        // Constructing a uri handles the filename encoding for us here
        String uriPath = getProfile().resolvePath(path);
        URL url;
        URI uri;
        try {
            String urlPath = uriPath + query + symlinkTargetPath;
            url = new URL(profile.getProtocol(), getHost(), urlPath);
            uri = url.toURI();
        } catch (URISyntaxException e) {
            LOG.log(Level.WARNING, "Unexpected error generating put URI for : " + uriPath);
            throw new StorageAdapterLiteralException("Error making a new symlink on the server", e);
        } catch (MalformedURLException e) {
            LOG.log(Level.WARNING, "Unexpected error generating put URL for : " + uriPath);
            throw new StorageAdapterLiteralException("Error making a new symlink on the server", e);
        }

        HttpUriRequest request = new HttpPut(uri);
        try {
            // Eventually we will just return this cookie which will be passed back to the caller.
            HcapAdapterCookie cookie = new HcapAdapterCookie(request, httpHost);
            synchronized (savingCookieLock) {
                if (savedCookie != null) {
                    throw new RuntimeException(
                            "This adapter already has a current connection to host -- cannot create two at once.");
                }
                savedCookie = cookie;
            }
            executeMethod(cookie);
            this.handleHttpResponse(cookie.getResponse(), "creating symlink", path);
        } catch (IOException e) {
            this.handleIOExceptionFromRequest(e, "creating symlink", path);
        } finally {
            close();
        }
    }

    /**
     * @inheritDoc
     */
    public void writeObjectFromStream(final String targetNode, final String targetPath, final InputStream is,
            final FileMetadata ingestionMetadata) throws StorageAdapterException {

        doWriteStream(targetNode, targetPath, is, ingestionMetadata, false);
    }

    /**
     * Same as above except takes a parameter if this is writing the custom metadata
     */
    public int doWriteStream(final String targetNode, final String targetPath, final InputStream is,
            final FileMetadata ingestionMetadata, final boolean writingCustomMetadata)
            throws StorageAdapterException {
        return doWriteStream(targetNode, targetPath, is, ingestionMetadata, writingCustomMetadata, null);
    }

    public int doWriteStream(final String targetNode, final String targetPath, final InputStream is,
            final FileMetadata ingestionMetadata, final boolean writingCustomMetadata, final String annotation)
            throws StorageAdapterException {
        String activity = "writing";
        if (writingCustomMetadata) {
            activity += " custom metadata for";
        }

        String targetFile = targetPath;
        String queryString = null;

        if (ingestionMetadata != null && !writingCustomMetadata) {
            queryString = generateQueryParameters(ingestionMetadata, true);
        } else if (writingCustomMetadata) {
            // This is the custom metadata we're PUTing
            queryString = buildCMQueryString(null, annotation);
        }

        return reallyDoWriteStream(targetNode, targetFile, queryString, is, activity, null);
    }

    public int reallyDoWriteStream(final String targetNode, String targetFile, final String queryString,
            final InputStream is, final String activity, final Header contentTypeHeader)
            throws StorageAdapterLiteralException, StorageAdapterException {
        int statusCode = -1;
        HttpHost httpHost = new HttpHost(targetNode, profile.getPort(), profile.getProtocol());
        URI uri;
        try {
            String resolvedPath = getProfile().resolvePath(targetFile);
            uri = URIUtils.createURI(profile.getProtocol(), targetNode, profile.getPort(), resolvedPath,
                    queryString, null);
        } catch (URISyntaxException e) {
            LOG.log(Level.WARNING, "Unexpected error generating put URI for : " + targetFile);
            throw new StorageAdapterLiteralException("Error writing object to the server", e);
        }

        HttpPut request = new HttpPut(uri);

        if (contentTypeHeader != null) {
            request.setHeader(contentTypeHeader);
        }

        try {
            InputStreamEntity isEntity = new InputStreamEntity(is, -1);
            request.setEntity(isEntity);

            request.getParams().setParameter(CoreProtocolPNames.USE_EXPECT_CONTINUE, Boolean.TRUE);
            request.getParams().setParameter(CoreProtocolPNames.WAIT_FOR_CONTINUE, 100);

            // Eventually we will just return this cookie which will be passed back to the caller.
            HcapAdapterCookie cookie = new HcapAdapterCookie(request, httpHost);
            synchronized (savingCookieLock) {
                if (savedCookie != null) {
                    throw new RuntimeException(
                            "This adapter already has a current connection to host -- cannot create two at once.");
                }
                savedCookie = cookie;
            }
            executeMethod(cookie);

            this.handleHttpResponse(cookie.getResponse(), activity, targetFile);
        } catch (IOException e) {
            this.handleIOExceptionFromRequest(e, activity, targetFile);
        } finally {
            close();
        }

        return statusCode;
    }

    @Override
    public void setMetadata(final String targetNode, final String path, final FileMetadata metadata)
            throws StorageAdapterException {

        String queryString = generateQueryParameters(metadata, false);

        HttpHost httpHost = new HttpHost(targetNode, profile.getPort(), profile.getProtocol());
        URI uri;
        try {
            String resolvedPath = getProfile().resolvePath(path);
            uri = URIUtils.createURI(profile.getProtocol(), targetNode, profile.getPort(), resolvedPath,
                    queryString, null);
        } catch (URISyntaxException e) {
            LOG.log(Level.WARNING, "Unexpected error generating post URI for : " + path);
            throw new StorageAdapterLiteralException("Error writing metadata to the server", e);
        }

        String activity = "setting metadata";
        HttpPost request = new HttpPost(uri);

        try {
            HcapAdapterCookie cookie = new HcapAdapterCookie(request, httpHost);
            synchronized (savingCookieLock) {
                if (savedCookie != null) {
                    throw new RuntimeException(
                            "This adapter already has a current connection to host -- cannot create two at once.");
                }
                savedCookie = cookie;
            }
            executeMethod(cookie);

            this.handleHttpResponse(cookie.getResponse(), activity, path);
        } catch (IOException e) {
            this.handleIOExceptionFromRequest(e, activity, path);
        } finally {
            close();
        }
    }

    public ArcMoverDirectory getVersions(final String path) throws StorageAdapterException {
        return ArcMoverDirectory.getFileVersions(this.getProfile(), path, this);
    }

    /**
     * @inheritDoc
     */
    public void setCustomMetadata(final String file, String metadata) throws StorageAdapterException {
        setCustomMetadata(file, metadata, null);
    }

    public void setCustomMetadata(final String file, String metadata, String annotation)
            throws StorageAdapterException {
        LOG.info("Attempting to write " + ((annotation == null) ? "custom metadata" : "annotation " + annotation)
                + ".");
        doWriteStream(getHost(), file, new ByteArrayInputStream(metadata.getBytes()), null, true, annotation);
    }

    public void deleteCustomMetadata(final String path) throws StorageAdapterException {
        LOG.info("Attempting to delete custom metadata");
        delete(path, false, DeleteJob.Operation.DELETE, null, true);
    }

    /**
     * The Metadata we collect from the Authenticated Namespace is as follows: index, shred,
     * retention (class or value), hold We collect DPL and Hash algorithm for display reasons, but
     * these will always get the target default when we are moving them We do not collect POSIX
     * values for the Authenticated namespace
     * 
     * @inheritDoc
     */
    public FileMetadata getMetadata(String filePath, final String queryString, FileType unused, boolean isVersion)
            throws StorageAdapterException {

        FileMetadata metadata = null;

        HttpHost httpHost = new HttpHost(getHost(), profile.getPort(), profile.getProtocol());
        String urlPath = filePath;

        if (queryString != null) {
            urlPath = urlPath + queryString;
        }
        HttpUriRequest request = new HttpHead(getProfile().resolvePath(urlPath));

        try {
            // Eventually we will just return this cookie which will be passed back to the caller.
            HcapAdapterCookie cookie = new HcapAdapterCookie(request, httpHost);
            synchronized (savingCookieLock) {
                if (savedCookie != null) {
                    throw new RuntimeException(
                            "This adapter already has a current connection to host -- cannot create two at once.");
                }
                savedCookie = cookie;
            }
            executeMethod(cookie);
            this.handleHttpResponse(cookie.getResponse(), "getting metadata", filePath);

            Header headers[] = cookie.getResponse().getAllHeaders();
            String symlinkTarget = null;
            for (Header h : headers) {
                if (HttpGatewayConstants.HEADER_LINK_ANS.equals(h.getName())) {
                    symlinkTarget = h.getValue();
                }
            }

            // resultType indicates what is returned:
            String responseTypeStr = cookie.getResponse().getFirstHeader(HttpGatewayConstants.HEADER_OBJ_TYPE_ANS)
                    .getValue();
            FileType responseType = "object".equals(responseTypeStr) ? FileType.FILE : FileType.DIRECTORY;
            FileType fileType = responseType;
            if (symlinkTarget != null) {
                // if there is a symlink target, then the FileType is symlink. The symlink can be a
                // link to a directory or
                // a link to a file.
                fileType = FileType.SYMLINK;
            }
            if (responseType == FileType.DIRECTORY) {
                // no additional metadata for directories
                metadata = new FileMetadata(fileType);
            } else if (responseType == FileType.FILE) {
                long size = Long.parseLong(
                        cookie.getResponse().getFirstHeader(HttpGatewayConstants.HEADER_OBJ_SIZE_ANS).getValue());
                long version = Long.parseLong(cookie.getResponse()
                        .getFirstHeader(HttpGatewayConstants.HEADER_OBJ_VERSION_ANS).getValue());
                String ingestTimeString = cookie.getResponse()
                        .getFirstHeader(HttpGatewayConstants.HEADER_OBJ_CREATE_TIME_ANS).getValue();
                Date ingestTime = (ingestTimeString == null ? null
                        : new Date(Long.parseLong(ingestTimeString) * 1000));
                boolean retentionHold = Boolean.parseBoolean(cookie.getResponse()
                        .getFirstHeader(HttpGatewayConstants.HEADER_RETENTION_HOLD_BOOLEAN_ANS).getValue());
                boolean shred = Boolean.parseBoolean(cookie.getResponse()
                        .getFirstHeader(HttpGatewayConstants.HEADER_SHRED_BOOLEAN_ANS).getValue());
                int dpl = Integer.parseInt(
                        cookie.getResponse().getFirstHeader(HttpGatewayConstants.HEADER_DPL_VALUE_ANS).getValue());
                boolean searchIndex = Boolean.parseBoolean(cookie.getResponse()
                        .getFirstHeader(HttpGatewayConstants.HEADER_INDEX_VALUE_ANS).getValue());

                // >= 4.0 value. Need to test for null
                Boolean replicated = null;
                try {
                    replicated = Boolean.parseBoolean(cookie.getResponse()
                            .getFirstHeader(HttpGatewayConstants.HEADER_REPLICATED_CSL_ANS).getValue());
                } catch (NullPointerException e) {
                    /* We don't do anything */ }

                boolean hasCustomMetadata = Boolean.parseBoolean(cookie.getResponse()
                        .getFirstHeader(HttpGatewayConstants.HEADER_CUST_METADATA_BOOLEAN_ANS).getValue());
                CustomMetadata customMetadata = null;
                if (hasCustomMetadata) {
                    customMetadata = new CustomMetadata(CustomMetadata.Form.PROFILED, filePath,
                            isVersion ? version : null);
                }

                // Retention on the auth namespace either comes back with a class, a value or a
                // string (deletion
                // prohibited, etc.) So we have to construct the rentention object here
                // appropriately
                String retentionClass = null;
                Long retentionValue = null;
                String retentionString = null;
                try {
                    retentionClass = cookie.getResponse()
                            .getFirstHeader(HttpGatewayConstants.HEADER_RETENTION_CLASS_ANS).getValue();
                } catch (NullPointerException e) {
                    // do nothing
                }
                try {
                    retentionValue = Long.parseLong(cookie.getResponse()
                            .getFirstHeader(HttpGatewayConstants.HEADER_RETENTION_LONG_TIME_ANS).getValue());
                } catch (NullPointerException e) {
                    // do nothing
                }
                try {
                    retentionString = cookie.getResponse()
                            .getFirstHeader(HttpGatewayConstants.HEADER_RETENTION_STRING_TIME_ANS).getValue();
                } catch (NullPointerException e) {
                    // do nothing
                }
                if (retentionString != null && !retentionString.equals("")) {
                    if (retentionString.equals(Retention.DELETION_ALLOWED.getUIValue())) {
                        retentionValue = Long.parseLong(Retention.DELETION_ALLOWED.getHCAPValue());
                    } else if (retentionString.equals(Retention.DELETION_PROHIBITED.getUIValue())) {
                        retentionValue = Long.parseLong(Retention.DELETION_PROHIBITED.getHCAPValue());
                    } else if (retentionString.equals(Retention.INITIAL_UNSPECIFIED.getUIValue())) {
                        retentionValue = Long.parseLong(Retention.INITIAL_UNSPECIFIED.getHCAPValue());
                    }
                }
                Retention retention = Retention.fromRetentionValueAndClass(retentionValue, retentionClass);

                String hash = "";
                String hashScheme = "";
                String hashValue = "";
                Header hashHeader = cookie.getResponse().getFirstHeader(HttpGatewayConstants.HEADER_HASH_ANS);
                if (hashHeader != null) {
                    hash = hashHeader.getValue();
                    int hashIndex = hash.indexOf(' ');
                    if (hashIndex > 0) {
                        hashScheme = hash.substring(0, hashIndex).trim();
                        hashValue = hash.substring(hashIndex).trim();
                    }
                }

                metadata = new FileMetadata(fileType, ingestTime, null, null, null, size, null, null, null, null,
                        null, version, dpl, hashScheme, hashValue, shred, retention, retentionHold, searchIndex,
                        replicated, null, null, customMetadata);

                if (symlinkTarget != null) {
                    metadata.setSymlinkTarget(symlinkTarget);
                }
                getAdditionalMetadata(cookie.getResponse(), metadata, filePath);
            }

            if (metadata != null && symlinkTarget != null) {
                metadata.setSymlinkTarget(symlinkTarget);
            }

        } catch (IOException e) {
            handleIOExceptionFromRequest(e, "getting metadata", filePath);
        } finally {
            close();
        }

        return metadata;
    }

    /**
     * @inheritDoc
     */
    public String getCustomMetadata(String filePath, final Long version, final FileType fileType)
            throws StorageAdapterException {
        BufferedReader reader = null;
        try {
            InputStream inputStream = getCustomMetadataStream(filePath, version, fileType);

            if (inputStream != null) {
                reader = new BufferedReader(new InputStreamReader(inputStream));
            }

            return (reader != null ? getCustomMetadata(reader) : null);
        } catch (IOException e) {
            String errorMessage = String.format("CustomMetadata does not exist for path '%s'%s.", filePath,
                    ((version == null) ? "" : " at version " + version));
            LOG.log(Level.FINE, errorMessage, e);
            return null;
        } finally {
            RuntimeException e = null;
            try {
                if (reader != null) {
                    reader.close();
                }
            } catch (IOException io) {
                LOG.fine("Error closing steam in getCustomMetadata.  Exception=" + io);
            } catch (RuntimeException ex) {
                e = ex;
            }
            close();

            if (e != null) {
                throw e;
            }
        }
    }

    /**
     * @inheritDoc
     */

    public InputStream getCustomMetadataStream(String filePath, final Long version, final FileType fileType)
            throws StorageAdapterException {
        return getCustomMetadataStream(filePath, version, fileType, null);
    }

    public InputStream getCustomMetadataStream(String filePath, final Long version, final FileType fileType,
            final String annotation) throws StorageAdapterException {
        InputStream result = null;
        String queryString = buildCMQueryString(version, annotation);
        if (exists(filePath, queryString)) {
            result = getInputStream(filePath, queryString);
        }
        return result;
    }

    protected String buildCMQueryString(final Long version, final String annotation) {
        StringBuilder sb = new StringBuilder("type=custom-metadata");
        if (version != null) {
            sb.append("&version=" + version);
        }
        return sb.toString();
    }

    public void setCustomMetadataStream(final String file, InputStream metadata) throws StorageAdapterException {
        setCustomMetadataStream(file, metadata, null);
    }

    public void setCustomMetadataStream(final String file, InputStream metadata, String annotation)
            throws StorageAdapterException {
        LOG.info("Attempting to write " + ((annotation == null) ? "custom metadata" : "annotation " + annotation)
                + ".");
        doWriteStream(getHost(), file, metadata, null, true, annotation);
    }

    /**
     * Called from @getAdditionalMetadata. This allows children of this class to be able to collect
     * additional metadata
     * 
     * @param response
     * @param metadata:
     *            data to fill
     * @param filePath
     * @return true if additional metadata was added
     */
    protected boolean getAdditionalMetadata(final HttpResponse response, FileMetadata metadata, String filePath) {
        return false;
    }

    /**
     * Called from @getAdditionalMetadata. This allows children of this class to be able to collect
     * additional metadata
     * 
     * @param xmlr
     * @param metadata:
     *            data to fill
     * @param filePath
     * @return true if additional metadata was added
     */
    protected boolean getAdditionalMetadata(final XMLStreamReader xmlr, FileMetadata metadata, String filePath) {
        return false;
    }

    /**
     * Add query strings for parameters you want to search for when getting the file list iterator
     *
     * @param caller
     * @param includeDeleted
     * @param supportsVersioning
     *            -- parameters in the query string are different if versioing is enabled on the
     *            namespace ?deleted returns a 400 on a non-versioned namespace
     * @return
     * @throws StorageAdapterException
     */
    protected String getQueryStringForFileListIterator(ArcMoverDirectory caller, boolean includeDeleted,
            boolean supportsVersioning) throws StorageAdapterException {
        String query = "";
        if (supportsVersioning) {
            if (!includeDeleted) {
                query = "?deleted=false";
            } else if (caller.isVersionList()) {
                query = "?version=list&deleted=true";
            } else {
                query = "?deleted=true";
            }
        }
        return query;
    }

    public ArcMoverFile createArcMoverFileObject(XMLStreamReader xmlr, ArcMoverDirectory caller)
            throws StorageAdapterException {

        ArcMoverFile retVal = null;
        try {

            // The urlName is the byte stream representation of the file name, the utf8Name is the
            // HCP's utf8 version
            // of that byte stream. Depending on the libraries installed on the client that name may
            // not translate
            // so it is best to use the urlName.
            String fileName = xmlr.getAttributeValue(null, HttpGatewayConstants.MD_AUTH_FILE_NAME);
            if (fileName == null) {
                fileName = xmlr.getAttributeValue(null, HttpGatewayConstants.MD_AUTH_FILE_UTF8_NAME);
            } else {
                try {
                    // Not all chars are encoded. Fix it.
                    fileName = RFC2396Encoder.fixEncoding(fileName);
                } catch (UnsupportedEncodingException e) {
                    throw new StorageAdapterException(e.getMessage(), e); // This should never
                                                                          // happen but just in
                                                                          // case.
                }
            }

            String fileType = xmlr.getAttributeValue(null, HttpGatewayConstants.MD_TYPE);
            if (fileType != null) {

                String state = xmlr.getAttributeValue(null, HttpGatewayConstants.MD_AUTH_PARAM_STATE);

                if (DIRECTORY.equals(fileType)) {
                    // Metadata is not available for a directory in the authenticated namespace.
                    retVal = ArcMoverDirectory.getDirInstance(caller, fileName, null, this);
                } else if (SYMLINK.equals(fileType)) {
                    String symlinkTarget = xmlr.getAttributeValue(null,
                            HttpGatewayConstants.MD_AUTH_SYMLINK_TARGET);
                    retVal = ArcMoverSymlink.getSymlinkInstance(caller, fileName, symlinkTarget, this);
                } else if (OBJECT.equals(fileType)) {
                    long size = 0;
                    long version = 0;
                    long retentionValue = 0;
                    int dpl = 0;
                    boolean shred = false;
                    boolean retentionHold = false;
                    boolean searchIndex = false;
                    boolean replicated = false;
                    boolean hasCustomMetadata = false;

                    /*
                     * We have access to all of the metadata already so we just do that to construct
                     * the FileMetadata Object rather than going through the getMetadata call below.
                     * We collect all the same data here. See @getMetadata
                     */
                    try {
                        size = Long.parseLong(xmlr.getAttributeValue(null, HttpGatewayConstants.MD_PARAM_SIZE));
                    } catch (NumberFormatException e) {
                        LOG.log(Level.WARNING, "Exception parsing metadata for: " + fileName, e);
                    }
                    try {
                        version = Long.parseLong(xmlr.getAttributeValue(null, HttpGatewayConstants.MD_VERSION));
                    } catch (NumberFormatException e) {
                        LOG.log(Level.WARNING, "Exception parsing metadata for: " + fileName, e);
                    }

                    String ingestTimeString = xmlr.getAttributeValue(null,
                            HttpGatewayConstants.MD_AUTH_PARAM_INJEST_TIME);
                    Date ingestTime = (ingestTimeString == null ? null
                            : new Date(Long.parseLong(ingestTimeString) * 1000));

                    try {
                        size = Long
                                .parseLong(xmlr.getAttributeValue(null, HttpGatewayConstants.MD_AUTH_PARAM_SIZE));
                    } catch (NumberFormatException e) {
                        LOG.log(Level.WARNING, "Exception parsing metadata for: " + fileName, e);
                    }
                    try {
                        version = Long.parseLong(
                                xmlr.getAttributeValue(null, HttpGatewayConstants.MD_AUTH_PARAM_VERSION));
                    } catch (NumberFormatException e) {
                        LOG.log(Level.WARNING, "Exception parsing metadata for: " + fileName, e);
                    }
                    try {
                        retentionValue = Long.parseLong(
                                xmlr.getAttributeValue(null, HttpGatewayConstants.MD_AUTH_PARAM_RETENTION));
                    } catch (NumberFormatException e) {
                        LOG.log(Level.WARNING, "Exception parsing metadata for: " + fileName, e);
                    }
                    try {
                        dpl = Integer
                                .parseInt(xmlr.getAttributeValue(null, HttpGatewayConstants.MD_AUTH_PARAM_DPL));
                    } catch (NumberFormatException e) {
                        LOG.log(Level.WARNING, "Exception parsing metadata for: " + fileName, e);
                    }

                    try {
                        shred = Boolean.parseBoolean(
                                xmlr.getAttributeValue(null, HttpGatewayConstants.MD_AUTH_PARAM_SHRED));
                    } catch (NullPointerException e) {
                        LOG.log(Level.WARNING, "Exception parsing metadata for: " + fileName, e);
                    }
                    try {
                        retentionHold = Boolean.parseBoolean(
                                xmlr.getAttributeValue(null, HttpGatewayConstants.MD_AUTH_PARAM_HOLD));
                    } catch (NullPointerException e) {
                        LOG.log(Level.WARNING, "Exception parsing metadata for: " + fileName, e);
                    }
                    try {
                        searchIndex = Boolean.parseBoolean(
                                xmlr.getAttributeValue(null, HttpGatewayConstants.MD_AUTH_PARAM_INDEX));
                    } catch (NullPointerException e) {
                        LOG.log(Level.WARNING, "Exception parsing metadata for: " + fileName, e);
                    }

                    try {
                        replicated = Boolean.parseBoolean(
                                xmlr.getAttributeValue(null, HttpGatewayConstants.MD_PARAM_REPLICATED));
                    } catch (NullPointerException e) {
                        LOG.log(Level.WARNING, "Exception parsing metadata for: " + fileName, e);
                    }
                    String retentionClass = xmlr.getAttributeValue(null,
                            HttpGatewayConstants.MD_AUTH_PARAM_RENTENTION_CLASS);
                    String hashScheme = xmlr.getAttributeValue(null,
                            HttpGatewayConstants.MD_AUTH_PARAM_HASH_SCHEME);
                    String hashValue = xmlr.getAttributeValue(null, HttpGatewayConstants.MD_AUTH_PARAM_HASH);

                    // Construct the retention object
                    Retention retention = null;
                    if (retentionClass != null && !retentionClass.equals("")) {
                        retention = Retention.fromRetentionClass(retentionClass, retentionValue);
                    } else {
                        retention = Retention.fromRetentionValue(retentionValue);
                    }

                    FileMetadata metadata = new FileMetadata(FileType.FILE, ingestTime, null, null, null, size,
                            null, null, null, null, null, version, dpl, hashScheme, hashValue, shred, retention,
                            retentionHold, searchIndex, replicated, null, null, null);
                    metadata.setIsVersion(caller.isVersionList());

                    retVal = ArcMoverFile.getFileInstance(getProfile(), caller, fileName, metadata);

                    getAdditionalMetadata(xmlr, retVal.getMetadata(), retVal.getPath());

                    try {
                        hasCustomMetadata = Boolean.parseBoolean(
                                xmlr.getAttributeValue(null, HttpGatewayConstants.MD_AUTH_PARAM_CUSTOM_METADATA));
                    } catch (NullPointerException e) {
                        LOG.log(Level.WARNING, "Exception parsing metadata for: " + fileName, e);
                    }
                    CustomMetadata customMetadata = null;
                    if (hasCustomMetadata) {
                        customMetadata = new CustomMetadata(CustomMetadata.Form.PROFILED, retVal.getPath());
                    }
                    retVal.setCustomMetadata(customMetadata);
                }

                if (retVal != null) {
                    retVal.getMetadata().setRestState(state);
                }
            }
        } catch (Throwable e) {
            String msg = "Error parsing directory for: " + caller.getPath();
            IOException e2 = new IOException(msg);
            e2.initCause(e);
            throw new StorageAdapterException(e2.getMessage(), e2);
        }

        return retVal;

    }

    private boolean doesNamespaceHavePermissions() throws StorageAdapterException {

        InputStream iStream = null;
        try {
            SAXBuilder builder = new SAXBuilder(false);
            iStream = getInputStream(PROC_PATH, false);
            Document procXML = builder.build(iStream);
            LOG.log(Level.FINEST, "procXML=" + procXML);
            close(); // Done with that stream, close

            Element procElement = procXML.getRootElement();
            return doesNamespaceHavePermissions(procElement, getProfile().getNamespace());

        } catch (IOException e) {
            handleIOExceptionFromRequest(e, "accessing namespace info for", getHost()); // throws an
                                                                                        // exception
        } catch (JDOMException e) {
            String errMsg = "Exception parsing proc data for archive with profile: " + getProfile();
            LOG.log(Level.WARNING, errMsg, e);
            throw new StorageAdapterLiteralException(errMsg, e);
        } finally {
            RuntimeException e = null;
            try {
                if (iStream != null) {
                    iStream.close();
                }
            } catch (IOException io) {
                LOG.log(Level.FINE, "IOException closing during getProc", io);
            } catch (RuntimeException ex) {
                e = ex;
            }

            close();

            if (e != null) {
                throw e;
            }
        }
        // handleIOExceptionFromRequest() should have thrown, but if its implementation ever changes
        // and it doesn't,
        // throw
        throw new RuntimeException("Error determining if the profile has permissions to the requested namespace");
    }

    @Override
    public boolean isVersioningEnabled() throws StorageAdapterException {
        return isVersioningEnabled(PROC_PATH);
    }

    private boolean doesNamespaceHavePermissions(Element procElement, String namespace) {
        if (namespace == null || namespace.length() == 0) {
            return false;
        }

        List<Element> nspaces = (List<Element>) procElement.getChildren("namespace");
        for (Element nsElem : nspaces) {
            if (namespace.equalsIgnoreCase(nsElem.getAttributeValue("name"))) {
                return true; // the namespace is in the list, so it has at least one permission
            }
        }
        return false;
    }

    protected boolean isVersioningEnabled(String procPath) throws StorageAdapterException {

        InputStream iStream = null;
        try {
            SAXBuilder builder = new SAXBuilder(false);
            iStream = getInputStream(procPath, false);
            Document procXML = builder.build(iStream);
            LOG.log(Level.FINEST, "procXML=" + procXML);
            close(); // Done with that stream, close

            Element procElement = procXML.getRootElement();
            return isVersioningEnabled(procElement, getProfile().getNamespace());

        } catch (IOException e) {
            handleIOExceptionFromRequest(e, "accessing namespace info for", getHost()); // throws an
                                                                                        // exception
        } catch (JDOMException e) {
            String errMsg = "Exception parsing proc data for archive with profile: " + getProfile();
            LOG.log(Level.WARNING, errMsg, e);
            throw new StorageAdapterLiteralException(errMsg, e);
        } finally {
            RuntimeException e = null;
            try {
                if (iStream != null) {
                    iStream.close();
                }
            } catch (IOException io) {
                LOG.log(Level.FINE, "IOException closing during getProc", io);
            } catch (RuntimeException ex) {
                e = ex;
            }

            close();

            if (e != null) {
                throw e;
            }
        }
        // handleIOExceptionFromRequest() should have thrown, but if its implementation ever changes
        // and it doesn't,
        // throw
        throw new RuntimeException("Error determining if versioning is enabled");
    }

    private boolean isVersioningEnabled(Element procElement, String namespace) {
        if (namespace == null || namespace.length() == 0) {
            return false;
        }

        List<Element> nspaces = (List<Element>) procElement.getChildren("namespace");
        for (Element nsElem : nspaces) {
            if (namespace.equalsIgnoreCase(nsElem.getAttributeValue("name"))) {
                return Boolean.valueOf(nsElem.getAttributeValue("versioningEnabled"));
            }
        }
        return false;
    }

    ////////////////////////////////////////////////// Protected Methods
    ////////////////////////////////////////////////// /////////////////////////////////////////////////////
    /**
     * Helper method to trap exceptions and log useful data and debugging tips. All caught
     * exceptions are rethrown after logging. Note: This is overwritten from its parent
     */
    protected String setAuthHeader(HcapAdapterCookie cookie) {
        String authString = StringUtils.EMPTYSTR;
        if (!profile.isAnonymousAccess()) {
            String uname64 = Base64Utils.encode(profile.getUsername().getBytes());
            authString = "hcp-ns-auth=" + uname64 + ":" + profile.getPassword();

            cookie.getRequest().setHeader("Cookie", authString);
        }
        return authString;
    }

    protected void executeMethod(HcapAdapterCookie cookie) throws IOException {
        String authString = setAuthHeader(cookie);

        HttpHost vHost = new HttpHost(profile.getHostHeader(), cookie.getHost().getPort(),
                cookie.getHost().getSchemeName());
        cookie.getRequest().getParams().setParameter(ClientPNames.VIRTUAL_HOST, vHost);

        LOG.fine("Namespace Host: " + profile.getHostHeader());

        if (!profile.isAnonymousAccess() && !profile.supportsAuthorizationHeader()) {
            testHeaders(cookie, authString, false);
        }

        try {

            BasicHttpContext context = new BasicHttpContext();
            HttpResponse response = httpClient.execute(cookie.getHost(), cookie.getRequest());
            cookie.setResponseAndContext(response, context);

            if (!profile.isAnonymousAccess() && !profile.supportsAuthorizationHeader()) {
                testHeaders(cookie, authString, false);
            }

        } catch (ConnectionPoolTimeoutException e) {
            LOG.log(Level.WARNING,
                    "Timed out waiting for connection from pool.  This may be caused by a failure to call HttpMethod.release()",
                    e);
            throw e;
        } catch (IOException e) {
            LOG.log(Level.INFO, "Unexpected IOException in executeMethod", e);
            throw e;
        } catch (NullPointerException npe) {
            if (!profile.isAnonymousAccess() && !profile.supportsAuthorizationHeader()) {
                testHeaders(cookie, authString, true);
            }

            // We are seeing NPE's come out of the apache code dealing with RequestAddCookies
            String uri = "null";
            if (cookie.getRequest() != null && cookie.getRequest().getURI() != null) {
                uri = cookie.getRequest().getURI().getPath();
            }
            LOG.log(Level.WARNING, "NullPointerException in executeMethod.  Executing request on file: " + uri,
                    npe);

            // Convert it for now so we continue and see if we get more of these.
            throw new NoHttpResponseException("Error executing http request,  NullPointerException thrown ");
        }
    }

    private void testHeaders(HcapAdapterCookie cookie, String savedAuthString, boolean printSuccess) {
        Header[] headers = cookie.getRequest().getHeaders("Cookie");
        if (headers.length == 1) {
            String authStringAfter = headers[0].getValue();
            if (!savedAuthString.equals(authStringAfter)) {
                LOG.severe("Cookie Changed from: " + savedAuthString + " to " + authStringAfter);
            } else if (printSuccess) {
                LOG.severe("Cookie header is valid.");
            }
        } else {
            LOG.severe("The cookie header array had an unexpected length of: " + headers.length);
        }
    }

    /**
     * Generate the Query String as a list of {@link NameValuePair}s. The query paramaters can
     * contain the initial Archive Metadata values.
     *
     * Here we set: index, shred, rentention (via an HCAP formatted string) We set hold if the user
     * has permissions to do so, otherwise it gets the namespace default We do NOT set any POSIX
     * values on the authenticated namespace We do NOT set the hash or dpl values
     *
     */
    protected String generateQueryParameters(FileMetadata md, boolean isCreate) {
        List<NameValuePair> params = new ArrayList<NameValuePair>();

        if (md.hasNonDefaultRetention()) {
            // If there is a retentionValue set then we need to get it now and make it into an HCAP
            // String
            params.add(new BasicNameValuePair(HttpGatewayConstants.PARAM_RETENTION,
                    "" + md.getRetention().getHCAPValue()));
        }

        if (md.hasShred()) {
            params.add(new BasicNameValuePair(HttpGatewayConstants.PARAM_SHRED, "" + md.isShred()));
        }

        if (md.hasSearchIndex()) {
            params.add(new BasicNameValuePair(HttpGatewayConstants.PARAM_SEARCH_INDEX, "" + md.isSearchIndex()));
        }

        // We only need to set retention hold if it is true. On a new file adding unhold is a noop.
        if (md.hasRetentionHold() && md.isRetentionHold()) {
            // If the user does not have permissions to set this we want the server to fail on the
            // put
            params.add(
                    new BasicNameValuePair(HttpGatewayConstants.PARAM_RETENTION_HOLD, "" + md.isRetentionHold()));
        }

        addAdditionalQueryParams(md, params, isCreate);

        String queryString = null;

        if (params.size() > 0) {
            queryString = URLEncodedUtils.format(params, HCPMoverConstants.SUPPORTED_CHARSET);
            LOG.log(Level.FINE, "generateQueryParameters=" + queryString);
        }
        return queryString;
    }

    /**
     * Called from @generateQueryParameters. Gives children of this class the ability to add params
     * to the list before creating the string.
     * 
     * @param md
     * @param params
     * @return true if params were added
     */
    protected boolean addAdditionalQueryParams(final FileMetadata md, List<NameValuePair> params,
            boolean isCreate) {
        return false;
    }

    protected List<NameValuePair> handleDeleteOperation(DeleteJob.Operation operation, String reason)
            throws StorageAdapterLiteralException {
        List<NameValuePair> deleteOperations = new ArrayList<NameValuePair>();
        if (operation == DeleteJob.Operation.PRIVILEGED_PURGE) {
            if (reason == null || reason == "") {
                throw new StorageAdapterLiteralException("When doing a priviledged delete a reason is required.");
            }
            deleteOperations.add(new BasicNameValuePair(HttpGatewayConstants.PARAM_PRIVILEGED, "true"));
            deleteOperations.add(new BasicNameValuePair(HttpGatewayConstants.PARAM_PURGE, "true"));
            deleteOperations.add(new BasicNameValuePair(HttpGatewayConstants.PARAM_REASON, reason));
        } else if (operation == DeleteJob.Operation.PURGE) {
            deleteOperations.add(new BasicNameValuePair(HttpGatewayConstants.PARAM_PURGE, "true"));
        }

        return deleteOperations;
    }

    protected String getErrorHeader() {
        return HttpGatewayConstants.HEADER_ERROR_MESSAGE_AUTH;
    }

    /**
     * Overwritten from parent
     * 
     * @return
     * @throws IOException
     */
    public SSLCertChain getSSLCerts() throws IOException {
        return null;
    }

    public void getAdditionalHcapProtocolSchemeRegistryForHttpClient(SchemeRegistry schemeRegistry,
            SSLCertificateCallback sslExceptionCallback) throws StorageAdapterException {
        try {
            SSLSocketFactory getCertsFactory;
            SSLContext sslcontext = SSLContext.getInstance("TLS");
            TrustManager tm = new GetCertsX509TrustManager(getProfile(), sslExceptionCallback);
            sslcontext.init(null, new TrustManager[] { tm }, null);
            getCertsFactory = new SSLSocketFactory(sslcontext);
            getCertsFactory.setHostnameVerifier(SSLSocketFactory.ALLOW_ALL_HOSTNAME_VERIFIER);
            getCertsFactory = new SSLSocketFactory(sslcontext);
            Scheme getCerts = new Scheme("getcerts", getCertsFactory, 443);
            schemeRegistry.register(getCerts);

        } catch (Exception e) {
            LOG.log(Level.INFO, "Unable to initialize SSL for hcaphttps protocol!", e);
            throw new StorageAdapterException("Unable to initialize SSL for https protocol", e);
        }
    }

    @Override
    protected boolean doTestConnection() throws ConnectionTestException {
        boolean isValid = false;

        HttpUriRequest request = null;

        HttpHost httpHost;
        try {
            httpHost = new HttpHost(getHost(), getProfile().getPort(), getProfile().getProtocol());
        } catch (StorageAdapterException e) {
            throw new ConnectionTestException(e, null, getProfile().getNamespace(), getProfile().getName());
        }

        String root = getProfile().resolvePath("/");
        request = new HttpHead(root);

        // Eventually we will just return this cookie which will be passed back to the caller.
        HcapAdapterCookie cookie = new HcapAdapterCookie(request, httpHost);
        synchronized (savingCookieLock) {
            if (savedCookie != null) {
                throw new RuntimeException(
                        "This adapter already has a current connection to host -- cannot create two at once.");
            }
            savedCookie = cookie;
        }

        Throwable cause = null;
        Integer statusCode = null;
        try {
            executeMethod(cookie);
        } catch (IOException e) {
            LOG.log(Level.WARNING, "IOException during testConnection", e);
            cause = e;
            isValid = false;
        } finally {
            close();
        }

        if (cookie.getResponse() != null) {
            statusCode = cookie.getResponse().getStatusLine().getStatusCode();
        }

        if (statusCode != null && statusCode == HttpStatus.SC_OK) {
            isValid = true;
        } else if (statusCode != null && statusCode == HttpStatus.SC_MOVED_TEMPORARILY) {
            throw new ConnectionTestException("Invalid namespace access configuration for namespace "
                    + getProfile().getNamespace() + " using profile " + getProfile().getName(), statusCode);
        } else if (cause != null) {
            throw new ConnectionTestException(cause, statusCode, getProfile().getNamespace(),
                    getProfile().getName());
        } else {
            throw new ConnectionTestException(statusCode, getProfile().getNamespace(), getProfile().getName());
        }

        try {
            if (!doesNamespaceHavePermissions()) {
                throw new ConnectionTestException(
                        "The profile user has no explicit permissions to namespace " + getProfile().getNamespace(),
                        (Integer) null);
            }
        } catch (ConnectionTestException e) {
            throw e;
        } catch (StorageAdapterException e) {
            throw new ConnectionTestException(e, null, null, null);
        } catch (Exception e) {
            throw new ConnectionTestException(e, null, null, null);
        }

        return isValid;
    }

    public boolean copyDirMetadataWhenMigrating() {
        return false;
    }

}