Example usage for com.mongodb.gridfs GridFSDBFile getInputStream

List of usage examples for com.mongodb.gridfs GridFSDBFile getInputStream

Introduction

In this page you can find the example usage for com.mongodb.gridfs GridFSDBFile getInputStream.

Prototype

public InputStream getInputStream() 

Source Link

Document

Returns an InputStream from which data can be read.

Usage

From source file:me.yyam.mongodbutils.MongoDbOperater.java

/**
 * GridFS/*from w  ww  . j a  v  a 2s . c o m*/
 * @param dbName ???
 * @param fsName GridFS???fsnull?
 * @param fsFileName GridFS??
 * @param fileName ???fsFileName??fsFileName??
 * @return ???
 * @throws FileNotFoundException
 * @throws IOException 
 */
public File downloadFsFile(String dbName, String fsName, String fsFileName, String fileName)
        throws FileNotFoundException, IOException {
    if (StringUtils.isBlank(fileName)) {
        fileName = fsFileName;
    }
    File saveFile = new File(fileName);
    if (saveFile.isDirectory()) {
        fileName = saveFile.getPath() + "/" + fsFileName;
    }
    DB db = mongoClient.getDB(dbName);
    if (fsName == null) {
        fsName = "fs";
    }
    GridFS fs = new GridFS(db, fsName);
    GridFSDBFile gfile = fs.findOne(fsFileName);
    if (gfile == null) {
        throw new FileNotFoundException("gridfs" + fsFileName);
    }
    InputStream input = gfile.getInputStream();
    try {
        File f = new File(fileName);
        if (!f.exists()) {
            f.createNewFile();
        }
        OutputStream output = new FileOutputStream(f);
        byte[] bytes = new byte[1024];
        int read = 0;
        while ((read = input.read(bytes)) != -1) {
            output.write(bytes, 0, read);
        }
        output.flush();
        output.close();
        return f;
    } finally {
        input.close();
    }

}

From source file:me.yyam.mongodbutils.MongoDbOperater.java

public byte[] getFsFileBytes(String dbName, String fsName, String fsFileName)
        throws FileNotFoundException, IOException {
    DB db = mongoClient.getDB(dbName);//w  w w. j  a va 2  s.  c  o m
    if (fsName == null) {
        fsName = "fs";
    }
    GridFS fs = new GridFS(db, fsName);
    GridFSDBFile gfile = fs.findOne(fsFileName);
    if (gfile == null) {
        throw new FileNotFoundException("gridfs" + fsFileName);
    }
    InputStream input = gfile.getInputStream();
    try {
        byte[] b = new byte[(int) gfile.getLength()];
        int readCount = 0;
        while (true) {
            int count = input.read(b, readCount, 255);
            if (count <= 0) {
                break;
            }
            readCount += count;
        }
        return b;
    } finally {
        input.close();
    }

}

From source file:mytubermiserver.mongo.GridFileSystem.java

public InputStream receiveVideo(String fileName) throws UnknownHostException, MongoException, IOException {
    Mongo mongo = new Mongo(hostAddress, portAddress);
    DB db = mongo.getDB("MyTube");

    // create a "video" namespace
    GridFS gfsPhoto = new GridFS(db, "video");

    // get image file by it's filename
    GridFSDBFile videoForOutput = gfsPhoto.findOne(fileName);
    return videoForOutput.getInputStream();

}

From source file:net.tooan.ynpay.third.mongodb.fs.ImageUploader.java

License:Apache License

private InputStream getOriginalInputStream() {
    DBObject query = new BasicDBObject(BuguFS.FILENAME, filename);
    query.put(DIMENSION, null);/*from   ww w  .  j a  v  a2s .co  m*/
    BuguFS fs = new BuguFS(bucketName, chunkSize);
    GridFSDBFile f = fs.findOne(query);
    return f.getInputStream();
}

From source file:net.tooan.ynpay.third.mongodb.fs.UploadedFileServlet.java

License:Apache License

protected void processRequest(HttpServletRequest request, HttpServletResponse response)
        throws ServletException, IOException {
    String url = request.getRequestURI();
    int second = url.indexOf(SLASH, 1);
    url = url.substring(second);//from w w w  .  j  a  va 2  s  . c  o  m
    int last = url.lastIndexOf(SLASH);
    String filename = url.substring(last + 1);
    DBObject query = new BasicDBObject(BuguFS.FILENAME, filename);
    query.put(ImageUploader.DIMENSION, null);
    String bucketName = BuguFS.DEFAULT_BUCKET;
    int first = url.indexOf(SLASH);
    if (first != last) {
        String sub = url.substring(first + 1, last);
        String[] arr = sub.split(SLASH);
        for (int i = 0; i < arr.length; i += 2) {
            if (arr[i].equals(BuguFS.BUCKET)) {
                bucketName = arr[i + 1];
            } else {
                query.put(arr[i], arr[i + 1]);
            }
        }
    }
    //check if the bucket is allowed to access by this servlet
    if (!StringUtil.isEmpty(allowBucket) && !allowBucket.equalsIgnoreCase(bucketName)) {
        return;
    }
    if (!StringUtil.isEmpty(forbidBucket) && forbidBucket.equalsIgnoreCase(bucketName)) {
        return;
    }
    BuguFS fs = new BuguFS(bucketName);
    GridFSDBFile f = fs.findOne(query);
    if (f == null) {
        return;
    }
    OutputStream os = response.getOutputStream();
    int fileLength = (int) f.getLength();
    String ext = StringUtil.getExtention(filename);
    response.setContentType(getContentType(ext));
    String range = request.getHeader("Range");
    //normal http request, no "range" in header.
    if (StringUtil.isEmpty(range)) {
        response.setStatus(HttpServletResponse.SC_OK);
        response.setContentLength(fileLength);
        if (needCache(ext)) {
            String modifiedSince = request.getHeader("If-Modified-Since");
            DateFormat df = new SimpleDateFormat("EEE, dd MMM yyyy HH:mm:ss z", Locale.ENGLISH);
            df.setTimeZone(TimeZone.getTimeZone("GMT"));
            Date uploadDate = f.getUploadDate();
            String lastModified = df.format(uploadDate);
            if (modifiedSince != null) {
                Date modifiedDate = null;
                Date sinceDate = null;
                try {
                    modifiedDate = df.parse(lastModified);
                    sinceDate = df.parse(modifiedSince);
                } catch (ParseException ex) {
                    logger.error("Can not parse the Date", ex);
                }
                if (modifiedDate.compareTo(sinceDate) <= 0) {
                    response.setStatus(304); //Not Modified
                    return;
                }
            }
            long maxAge = 365L * 24L * 60L * 60L; //one year, in seconds
            response.setHeader("Cache-Control", "max-age=" + maxAge);
            response.setHeader("Last-Modified", lastModified);
            response.setDateHeader("Expires", uploadDate.getTime() + maxAge * 1000L);
        } else {
            response.setHeader("Pragma", "no-cache");
            response.setHeader("Cache-Control", "no-cache");
            response.setDateHeader("Expires", 0);
        }
        f.writeTo(os);
    }
    //has "range" in header
    else {
        range = range.substring("bytes=".length());
        if (StringUtil.isEmpty(range)) {
            return;
        }
        int begin = 0;
        int end = fileLength - 1;
        String[] rangeArray = range.split("-");
        if (rangeArray.length == 1) {
            begin = Integer.parseInt(rangeArray[0]);
        } else if (rangeArray.length == 2) {
            begin = Integer.parseInt(rangeArray[0]);
            end = Integer.parseInt(rangeArray[1]);
        }
        response.setStatus(HttpServletResponse.SC_PARTIAL_CONTENT);
        int contentLength = end - begin + 1;
        response.setContentLength(contentLength);
        response.setHeader("Content-Range", "bytes " + begin + "-" + end + "/" + contentLength);
        InputStream is = f.getInputStream();
        is.skip(begin);
        int read = -1;
        int bufferSize = (int) f.getChunkSize();
        byte[] buffer = new byte[bufferSize];
        int remain = contentLength;
        int readSize = Math.min(bufferSize, remain);
        while ((read = is.read(buffer, 0, readSize)) != -1) {
            os.write(buffer, 0, read);
            remain -= read;
            if (remain <= 0) {
                break;
            }
            readSize = Math.min(bufferSize, remain);
        }
        StreamUtil.safeClose(is);
    }
    StreamUtil.safeClose(os);
}

From source file:nl.minbzk.dwr.zoeken.enricher.processor.TikaProcessor.java

License:Open Source License

/**
 * Retrieve a grid stream from GridFS.//www . j  a  va  2s.  c o m
 *
 * @param id
 * @return InputStream
 */
private InputStream retrieveGridStream(final String id) {
    if (this.gridFs == null)
        this.gridFs = new GridFS(dbFactory.getDb());

    GridFSDBFile file = gridFs.find(new ObjectId(id));

    return file.getInputStream();
}

From source file:org.apache.camel.component.gridfs.GridFsConsumer.java

License:Apache License

@Override
public void run() {
    DBCursor c = null;//from   ww  w  .  j av  a2s. c  o  m
    java.util.Date fromDate = null;

    QueryStrategy s = endpoint.getQueryStrategy();
    boolean usesTimestamp = (s != QueryStrategy.FileAttribute);
    boolean persistsTimestamp = (s == QueryStrategy.PersistentTimestamp
            || s == QueryStrategy.PersistentTimestampAndFileAttribute);
    boolean usesAttribute = (s == QueryStrategy.FileAttribute || s == QueryStrategy.TimeStampAndFileAttribute
            || s == QueryStrategy.PersistentTimestampAndFileAttribute);

    DBCollection ptsCollection = null;
    DBObject persistentTimestamp = null;
    if (persistsTimestamp) {
        ptsCollection = endpoint.getDB().getCollection(endpoint.getPersistentTSCollection());
        // ensure standard indexes as long as collections are small
        try {
            if (ptsCollection.count() < 1000) {
                ptsCollection.createIndex(new BasicDBObject("id", 1));
            }
        } catch (MongoException e) {
            //TODO: Logging
        }
        persistentTimestamp = ptsCollection.findOne(new BasicDBObject("id", endpoint.getPersistentTSObject()));
        if (persistentTimestamp == null) {
            persistentTimestamp = new BasicDBObject("id", endpoint.getPersistentTSObject());
            fromDate = new java.util.Date();
            persistentTimestamp.put("timestamp", fromDate);
            ptsCollection.save(persistentTimestamp);
        }
        fromDate = (java.util.Date) persistentTimestamp.get("timestamp");
    } else if (usesTimestamp) {
        fromDate = new java.util.Date();
    }
    try {
        Thread.sleep(endpoint.getInitialDelay());
        while (isStarted()) {
            if (c == null || c.getCursorId() == 0) {
                if (c != null) {
                    c.close();
                }
                String queryString = endpoint.getQuery();
                DBObject query;
                if (queryString == null) {
                    query = new BasicDBObject();
                } else {
                    query = (DBObject) JSON.parse(queryString);
                }
                if (usesTimestamp) {
                    query.put("uploadDate", new BasicDBObject("$gt", fromDate));
                }
                if (usesAttribute) {
                    query.put(endpoint.getFileAttributeName(), null);
                }
                c = endpoint.getFilesCollection().find(query);
            }
            boolean dateModified = false;
            while (c.hasNext() && isStarted()) {
                GridFSDBFile file = (GridFSDBFile) c.next();
                GridFSDBFile forig = file;
                if (usesAttribute) {
                    file.put(endpoint.getFileAttributeName(), "processing");
                    DBObject q = BasicDBObjectBuilder.start("_id", file.getId()).append("camel-processed", null)
                            .get();
                    forig = (GridFSDBFile) endpoint.getFilesCollection().findAndModify(q, null, null, false,
                            file, true, false);
                }
                if (forig != null) {
                    file = endpoint.getGridFs().findOne(new BasicDBObject("_id", file.getId()));

                    Exchange exchange = endpoint.createExchange();
                    exchange.getIn().setHeader(GridFsEndpoint.GRIDFS_METADATA,
                            JSON.serialize(file.getMetaData()));
                    exchange.getIn().setHeader(Exchange.FILE_CONTENT_TYPE, file.getContentType());
                    exchange.getIn().setHeader(Exchange.FILE_LENGTH, file.getLength());
                    exchange.getIn().setHeader(Exchange.FILE_LAST_MODIFIED, file.getUploadDate());
                    exchange.getIn().setBody(file.getInputStream(), InputStream.class);
                    try {
                        getProcessor().process(exchange);
                        //System.out.println("Processing " + file.getFilename());
                        if (usesAttribute) {
                            forig.put(endpoint.getFileAttributeName(), "done");
                            endpoint.getFilesCollection().save(forig);
                        }
                        if (usesTimestamp) {
                            if (file.getUploadDate().compareTo(fromDate) > 0) {
                                fromDate = file.getUploadDate();
                                dateModified = true;
                            }
                        }
                    } catch (Exception e) {
                        // TODO Auto-generated catch block
                        e.printStackTrace();
                    }
                }
            }
            if (persistsTimestamp && dateModified) {
                persistentTimestamp.put("timestamp", fromDate);
                ptsCollection.save(persistentTimestamp);
            }
            Thread.sleep(endpoint.getDelay());
        }
    } catch (Throwable e1) {
        // TODO Auto-generated catch block
        e1.printStackTrace();
    }
    if (c != null) {
        c.close();
    }
}

From source file:org.apache.camel.component.gridfs.GridFsProducer.java

License:Apache License

public void process(Exchange exchange) throws Exception {
    String operation = endpoint.getOperation();
    if (operation == null) {
        operation = exchange.getIn().getHeader(GridFsEndpoint.GRIDFS_OPERATION, String.class);
    }//from  w  ww . j a  va2 s  . co  m
    if (operation == null || "create".equals(operation)) {
        final String filename = exchange.getIn().getHeader(Exchange.FILE_NAME, String.class);
        Long chunkSize = exchange.getIn().getHeader(GridFsEndpoint.GRIDFS_CHUNKSIZE, Long.class);

        InputStream ins = exchange.getIn().getMandatoryBody(InputStream.class);
        GridFSInputFile gfsFile = endpoint.getGridFs().createFile(ins, filename, true);
        if (chunkSize != null && chunkSize > 0) {
            gfsFile.setChunkSize(chunkSize);
        }
        final String ct = exchange.getIn().getHeader(Exchange.CONTENT_TYPE, String.class);
        if (ct != null) {
            gfsFile.setContentType(ct);
        }
        String metaData = exchange.getIn().getHeader(GridFsEndpoint.GRIDFS_METADATA, String.class);
        DBObject dbObject = (DBObject) JSON.parse(metaData);
        gfsFile.setMetaData(dbObject);
        gfsFile.save();
        exchange.getIn().setHeader(Exchange.FILE_NAME_PRODUCED, gfsFile.getFilename());
    } else if ("remove".equals(operation)) {
        final String filename = exchange.getIn().getHeader(Exchange.FILE_NAME, String.class);
        endpoint.getGridFs().remove(filename);
    } else if ("findOne".equals(operation)) {
        final String filename = exchange.getIn().getHeader(Exchange.FILE_NAME, String.class);
        GridFSDBFile file = endpoint.getGridFs().findOne(filename);
        if (file != null) {
            exchange.getIn().setHeader(GridFsEndpoint.GRIDFS_METADATA, JSON.serialize(file.getMetaData()));
            exchange.getIn().setHeader(Exchange.FILE_CONTENT_TYPE, file.getContentType());
            exchange.getIn().setHeader(Exchange.FILE_LENGTH, file.getLength());
            exchange.getIn().setHeader(Exchange.FILE_LAST_MODIFIED, file.getUploadDate());
            exchange.getIn().setBody(file.getInputStream(), InputStream.class);
        } else {
            throw new FileNotFoundException("No GridFS file for " + filename);
        }
    } else if ("listAll".equals(operation)) {
        final String filename = exchange.getIn().getHeader(Exchange.FILE_NAME, String.class);
        DBCursor cursor;
        if (filename == null) {
            cursor = endpoint.getGridFs().getFileList();
        } else {
            cursor = endpoint.getGridFs().getFileList(new BasicDBObject("filename", filename));
        }
        exchange.getIn().setBody(new DBCursorFilenameReader(cursor), Reader.class);
    } else if ("count".equals(operation)) {
        final String filename = exchange.getIn().getHeader(Exchange.FILE_NAME, String.class);
        DBCursor cursor;
        if (filename == null) {
            cursor = endpoint.getGridFs().getFileList();
        } else {
            cursor = endpoint.getGridFs().getFileList(new BasicDBObject("filename", filename));
        }
        exchange.getIn().setBody(cursor.count(), Integer.class);
    }

}

From source file:org.apache.manifoldcf.crawler.connectors.gridfs.GridFSRepositoryConnector.java

License:Apache License

/** Process a set of documents.
* This is the method that should cause each document to be fetched, processed, and the results either added
* to the queue of documents for the current job, and/or entered into the incremental ingestion manager.
* The document specification allows this class to filter what is done based on the job.
* The connector will be connected before this method can be called.
*@param documentIdentifiers is the set of document identifiers to process.
*@param statuses are the currently-stored document versions for each document in the set of document identifiers
* passed in above.//from  w  ww.  j a  va 2s . c  o  m
*@param activities is the interface this method should use to queue up new document references
* and ingest documents.
*@param jobMode is an integer describing how the job is being run, whether continuous or once-only.
*@param usesDefaultAuthority will be true only if the authority in use for these documents is the default one.
*/
@Override
public void processDocuments(String[] documentIdentifiers, IExistingVersions statuses, Specification spec,
        IProcessActivity activities, int jobMode, boolean usesDefaultAuthority)
        throws ManifoldCFException, ServiceInterruption {

    for (String documentIdentifier : documentIdentifiers) {

        String versionString;
        GridFS gfs;
        GridFSDBFile document;

        getSession();
        String _id = documentIdentifier;
        gfs = new GridFS(session, bucket);
        document = gfs.findOne(new ObjectId(_id));
        if (document == null) {
            activities.deleteDocument(documentIdentifier);
            continue;
        } else {
            DBObject metadata = document.getMetaData();
            versionString = document.getMD5() + "+" + metadata != null ? Integer.toString(metadata.hashCode())
                    : StringUtils.EMPTY;
        }

        if (versionString.length() == 0
                || activities.checkDocumentNeedsReindexing(documentIdentifier, versionString)) {
            long startTime = System.currentTimeMillis();
            String errorCode = null;
            String errorDesc = null;
            String version = versionString;
            try {

                if (Logging.connectors.isDebugEnabled()) {
                    Logging.connectors.debug("GridFS: Processing document _id = " + _id);
                }

                DBObject metadata = document.getMetaData();
                if (metadata == null) {
                    errorCode = "NULLMETADATA";
                    errorDesc = "Excluded because document had a null Metadata";
                    Logging.connectors.warn("GridFS: Document " + _id + " has a null metadata - skipping.");
                    activities.noDocument(_id, version);
                    continue;
                }

                String urlValue = document.getMetaData().get(this.url) == null ? StringUtils.EMPTY
                        : document.getMetaData().get(this.url).toString();
                if (!StringUtils.isEmpty(urlValue)) {
                    boolean validURL;
                    try {
                        new java.net.URI(urlValue);
                        validURL = true;
                    } catch (java.net.URISyntaxException e) {
                        validURL = false;
                    }
                    if (validURL) {
                        long fileLenght = document.getLength();
                        Date createdDate = document.getUploadDate();
                        String fileName = document.getFilename();
                        String mimeType = document.getContentType();

                        if (!activities.checkURLIndexable(urlValue)) {
                            Logging.connectors.warn(
                                    "GridFS: Document " + _id + " has a URL excluded by the output connector ('"
                                            + urlValue + "') - skipping.");
                            errorCode = activities.EXCLUDED_URL;
                            errorDesc = "Excluded because of URL (" + urlValue + ")";
                            activities.noDocument(_id, version);
                            continue;
                        }

                        if (!activities.checkLengthIndexable(fileLenght)) {
                            Logging.connectors.warn("GridFS: Document " + _id
                                    + " has a length excluded by the output connector (" + fileLenght
                                    + ") - skipping.");
                            errorCode = activities.EXCLUDED_LENGTH;
                            errorDesc = "Excluded because of length (" + fileLenght + ")";
                            activities.noDocument(_id, version);
                            continue;
                        }

                        if (!activities.checkMimeTypeIndexable(mimeType)) {
                            Logging.connectors.warn("GridFS: Document " + _id
                                    + " has a mime type excluded by the output connector ('" + mimeType
                                    + "') - skipping.");
                            errorCode = activities.EXCLUDED_MIMETYPE;
                            errorDesc = "Excluded because of mime type (" + mimeType + ")";
                            activities.noDocument(_id, version);
                            continue;
                        }

                        if (!activities.checkDateIndexable(createdDate)) {
                            Logging.connectors.warn(
                                    "GridFS: Document " + _id + " has a date excluded by the output connector ("
                                            + createdDate + ") - skipping.");
                            errorCode = activities.EXCLUDED_DATE;
                            errorDesc = "Excluded because of date (" + createdDate + ")";
                            activities.noDocument(_id, version);
                            continue;
                        }

                        RepositoryDocument rd = new RepositoryDocument();
                        rd.setCreatedDate(createdDate);
                        rd.setModifiedDate(createdDate);
                        rd.setFileName(fileName);
                        rd.setMimeType(mimeType);
                        String[] aclsArray = null;
                        String[] denyAclsArray = null;
                        if (acl != null) {
                            try {
                                Object aclObject = document.getMetaData().get(acl);
                                if (aclObject != null) {
                                    List<String> acls = (List<String>) aclObject;
                                    aclsArray = (String[]) acls.toArray();
                                }
                            } catch (ClassCastException e) {
                                // This is bad because security will fail
                                Logging.connectors.warn("GridFS: Document " + _id
                                        + " metadata ACL field doesn't contain List<String> type.");
                                errorCode = "ACLTYPE";
                                errorDesc = "Allow ACL field doesn't contain List<String> type.";
                                throw new ManifoldCFException("Security decoding error: " + e.getMessage(), e);
                            }
                        }
                        if (denyAcl != null) {
                            try {
                                Object denyAclObject = document.getMetaData().get(denyAcl);
                                if (denyAclObject != null) {
                                    List<String> denyAcls = (List<String>) denyAclObject;
                                    denyAcls.add(GLOBAL_DENY_TOKEN);
                                    denyAclsArray = (String[]) denyAcls.toArray();
                                }
                            } catch (ClassCastException e) {
                                // This is bad because security will fail
                                Logging.connectors.warn("GridFS: Document " + _id
                                        + " metadata DenyACL field doesn't contain List<String> type.");
                                errorCode = "ACLTYPE";
                                errorDesc = "Deny ACL field doesn't contain List<String> type.";
                                throw new ManifoldCFException("Security decoding error: " + e.getMessage(), e);
                            }
                        }
                        rd.setSecurity(RepositoryDocument.SECURITY_TYPE_DOCUMENT, aclsArray, denyAclsArray);

                        InputStream is = document.getInputStream();
                        try {
                            rd.setBinary(is, fileLenght);
                            try {
                                activities.ingestDocumentWithException(_id, version, urlValue, rd);
                            } catch (IOException e) {
                                handleIOException(e);
                            }
                        } finally {
                            try {
                                is.close();
                            } catch (IOException e) {
                                handleIOException(e);
                            }
                        }
                        gfs.getDB().getMongo().getConnector().close();
                        session = null;
                        errorCode = "OK";
                    } else {
                        Logging.connectors.warn(
                                "GridFS: Document " + _id + " has a invalid URL: " + urlValue + " - skipping.");
                        errorCode = activities.BAD_URL;
                        errorDesc = "Excluded because document had illegal URL ('" + urlValue + "')";
                        activities.noDocument(_id, version);
                    }
                } else {
                    Logging.connectors.warn("GridFS: Document " + _id + " has a null URL - skipping.");
                    errorCode = activities.NULL_URL;
                    errorDesc = "Excluded because document had a null URL.";
                    activities.noDocument(_id, version);
                }
            } finally {
                if (errorCode != null) {
                    activities.recordActivity(startTime, ACTIVITY_FETCH, document.getLength(), _id, errorCode,
                            errorDesc, null);
                }
            }
        }
    }
}

From source file:org.cleaner.domain.DuplicateStrategy.java

License:Open Source License

@SuppressWarnings("unchecked")
default public void handleDuplicates(Document t, GridFS gridFS) {
    List<ObjectId> ids = (List<ObjectId>) t.get("ids");

    for (int i = 0; i < ids.size() - 1; i++) {
        GridFSDBFile findFile1 = gridFS.find(ids.get(i));
        InputStream is1 = findFile1.getInputStream();
        GridFSDBFile findFile2 = gridFS.find(ids.get(i + 1));
        InputStream is2 = findFile2.getInputStream();
        if (new DuplicateStreamChecker().isDuplicateStream(is1, is2)) {
            duplicateStrategy(findFile1, findFile2);
        } else {/*from w w w . j  av  a 2  s. c  o m*/
            hashCollisionStrategy(findFile1, findFile2);
        }

    }
}