Example usage for java.io RandomAccessFile read

List of usage examples for java.io RandomAccessFile read

Introduction

In this page you can find the example usage for java.io RandomAccessFile read.

Prototype

public int read(byte b[], int off, int len) throws IOException 

Source Link

Document

Reads up to len bytes of data from this file into an array of bytes.

Usage

From source file:org.commoncrawl.service.crawler.CrawlLog.java

private static void transferLocalCheckpointLog(File crawlLogPath, HDFSCrawlURLWriter writer, long checkpointId)
        throws IOException {

    // and open the crawl log file ...
    RandomAccessFile inputStream = null;

    IOException exception = null;

    CRC32 crc = new CRC32();
    CustomByteArrayOutputStream buffer = new CustomByteArrayOutputStream(1 << 17);
    byte[] syncBytesBuffer = new byte[SYNC_BYTES_SIZE];

    // save position for potential debug output.
    long lastReadPosition = 0;

    try {//  w  ww  . j  av a 2  s.  c om
        inputStream = new RandomAccessFile(crawlLogPath, "rw");
        // and a data input stream ...
        RandomAccessFile reader = inputStream;
        // seek to zero
        reader.seek(0L);

        // read the header ...
        LogFileHeader header = readLogFileHeader(reader);

        // read a crawl url from the stream...

        while (inputStream.getFilePointer() < header._fileSize) {

            if (seekToNextSyncBytesPos(syncBytesBuffer, reader, header._fileSize)) {

                try {
                    lastReadPosition = inputStream.getFilePointer();

                    // skip sync
                    inputStream.skipBytes(SYNC_BYTES_SIZE);

                    // read length ...
                    int urlDataLen = reader.readInt();
                    long urlDataCRC = reader.readLong();

                    if (urlDataLen > buffer.getBuffer().length) {
                        buffer = new CustomByteArrayOutputStream(((urlDataLen / 65536) + 1) * 65536);
                    }
                    reader.read(buffer.getBuffer(), 0, urlDataLen);
                    crc.reset();
                    crc.update(buffer.getBuffer(), 0, urlDataLen);

                    long computedValue = crc.getValue();

                    // validate crc values ...
                    if (computedValue != urlDataCRC) {
                        LOG.error("CRC Mismatch Detected during HDFS transfer in CrawlLog:"
                                + crawlLogPath.getAbsolutePath() + " Checkpoint Id:" + checkpointId
                                + " FilePosition:" + lastReadPosition);
                        inputStream.seek(lastReadPosition + 1);
                    } else {
                        // allocate a crawl url data structure
                        CrawlURL url = new CrawlURL();
                        DataInputStream bufferReader = new DataInputStream(
                                new ByteArrayInputStream(buffer.getBuffer(), 0, urlDataLen));
                        // populate it from the (in memory) data stream
                        url.readFields(bufferReader);
                        try {
                            // and write out appropriate sequence file entries ...
                            writer.writeCrawlURLItem(new Text(url.getUrl()), url);
                        } catch (IOException e) {
                            LOG.error("Failed to write CrawlURL to SequenceFileWriter with Exception:"
                                    + CCStringUtils.stringifyException(e));
                            throw new URLWriterException();
                        }
                    }
                } catch (URLWriterException e) {
                    LOG.error("Caught URLRewriter Exception! - Throwing to outer layer!");
                    throw e;
                } catch (Exception e) {
                    LOG.error("Ignoring Error Processing CrawlLog Entry at Position:" + lastReadPosition
                            + " Exception:" + CCStringUtils.stringifyException(e));
                }
            } else {
                break;
            }
        }
    } catch (EOFException e) {
        LOG.error("Caught EOF Exception during read of local CrawlLog:" + crawlLogPath.getAbsolutePath()
                + " Checkpoint Id:" + checkpointId + " FilePosition:" + lastReadPosition);
    } catch (IOException e) {
        LOG.error(CCStringUtils.stringifyException(e));
        exception = e;
        throw e;
    } finally {
        if (inputStream != null)
            inputStream.close();
    }
}

From source file:org.openmeetings.servlet.outputhandler.BackupExport.java

public void service(HttpServletRequest httpServletRequest, HttpServletResponse httpServletResponse,
        ServletContext servletCtx) throws ServletException, IOException {

    String sid = httpServletRequest.getParameter("sid");
    if (sid == null) {
        sid = "default";
    }/*from w w w.j a  v  a 2 s. c  om*/
    log.debug("sid: " + sid);

    Long users_id = sessionManagement.checkSession(sid);
    Long user_level = userManagement.getUserLevelByID(users_id);

    log.debug("users_id: " + users_id);
    log.debug("user_level: " + user_level);

    if (authLevelManagement.checkAdminLevel(user_level)) {
        // if (true) {

        String includeFileOption = httpServletRequest.getParameter("includeFileOption");
        boolean includeFiles = includeFileOption == null || "yes".equals(includeFileOption);

        String moduleName = httpServletRequest.getParameter("moduleName");
        if (moduleName == null) {
            moduleName = "moduleName";
        }
        log.debug("moduleName: " + moduleName);

        if (moduleName.equals("backup")) {

            /*
             * ##################### Create Base Folder structure
             */

            String current_dir = servletCtx.getRealPath("/");
            File working_dir = new File(new File(current_dir, OpenmeetingsVariables.UPLOAD_DIR), "backup");

            if (!working_dir.exists()) {
                working_dir.mkdir();
            }

            String dateString = "backup_" + CalendarPatterns.getTimeForStreamId(new Date());

            File backup_dir = new File(working_dir, dateString);
            String requestedFile = dateString + ".zip";
            File backupFile = new File(backup_dir, requestedFile);

            String full_path = backupFile.getAbsolutePath();
            try {
                performExport(full_path, backup_dir, includeFiles, current_dir);

                RandomAccessFile rf = new RandomAccessFile(full_path, "r");

                httpServletResponse.reset();
                httpServletResponse.resetBuffer();
                httpServletResponse.setContentType("APPLICATION/OCTET-STREAM");
                httpServletResponse.setHeader("Content-Disposition",
                        "attachment; filename=\"" + requestedFile + "\"");
                httpServletResponse.setHeader("Content-Length", "" + rf.length());

                OutputStream out = httpServletResponse.getOutputStream();

                byte[] buffer = new byte[1024];
                int readed = -1;

                while ((readed = rf.read(buffer, 0, buffer.length)) > -1) {
                    out.write(buffer, 0, readed);
                }

                rf.close();

                out.flush();
                out.close();
            } catch (Exception er) {
                log.error("Error exporting: ", er);
            }

            if (backupFile.exists()) {
                // log.debug("DELETE :1: "+backupFile.getAbsolutePath());
                backupFile.delete();
            }

            deleteDirectory(backup_dir);

        }
    } else {
        log.debug("ERROR LangExport: not authorized FileDownload " + (new Date()));
    }
}

From source file:org.commoncrawl.service.crawler.CrawlLog.java

public static void walkCrawlLogFile(File crawlLogPath, long startOffset) throws IOException {

    // and open the crawl log file ...
    RandomAccessFile inputStream = null;

    IOException exception = null;

    CRC32 crc = new CRC32();
    CustomByteArrayOutputStream buffer = new CustomByteArrayOutputStream(1 << 17);
    byte[] syncBytesBuffer = new byte[SYNC_BYTES_SIZE];

    // save position for potential debug output.
    long lastReadPosition = 0;

    try {//from  w  ww .  j  a va 2  s .c  o  m
        inputStream = new RandomAccessFile(crawlLogPath, "rw");

        // and a data input stream ...
        RandomAccessFile reader = inputStream;
        // seek to zero
        reader.seek(0L);

        // read the header ...
        LogFileHeader header = readLogFileHeader(reader);

        System.out.println("Header ItemCount:" + header._itemCount + " FileSize:" + header._fileSize);

        if (startOffset != 0L) {
            System.out.println("Preseeking to:" + startOffset);
            reader.seek(startOffset);
        }

        Configuration conf = new Configuration();

        // read a crawl url from the stream...

        long recordCount = 0;
        while (inputStream.getFilePointer() < header._fileSize) {

            // System.out.println("PRE-SYNC SeekPos:"+
            // inputStream.getFilePointer());
            if (seekToNextSyncBytesPos(syncBytesBuffer, reader, header._fileSize)) {

                // System.out.println("POST-SYNC SeekPos:"+
                // inputStream.getFilePointer());

                lastReadPosition = inputStream.getFilePointer();

                // skip sync
                inputStream.skipBytes(SYNC_BYTES_SIZE);

                // read length ...
                int urlDataLen = reader.readInt();
                long urlDataCRC = reader.readLong();

                if (urlDataLen > buffer.getBuffer().length) {
                    buffer = new CustomByteArrayOutputStream(((urlDataLen / 65536) + 1) * 65536);
                }
                reader.read(buffer.getBuffer(), 0, urlDataLen);
                crc.reset();
                crc.update(buffer.getBuffer(), 0, urlDataLen);

                long computedValue = crc.getValue();

                // validate crc values ...
                if (computedValue != urlDataCRC) {
                    LOG.error("CRC Mismatch Detected during HDFS transfer in CrawlLog:"
                            + crawlLogPath.getAbsolutePath() + " FilePosition:" + lastReadPosition);
                    inputStream.seek(lastReadPosition + 1);
                } else {
                    if (recordCount++ % 10000 == 0) {
                        // allocate a crawl url data structure
                        CrawlURL url = new CrawlURL();
                        DataInputStream bufferReader = new DataInputStream(
                                new ByteArrayInputStream(buffer.getBuffer(), 0, urlDataLen));
                        // populate it from the (in memory) data stream
                        url.readFields(bufferReader);

                        System.out.println("Record:" + recordCount + " At:" + lastReadPosition + " URL:"
                                + url.getUrl() + " BuffSize:" + urlDataLen + " ContentLen:"
                                + url.getContentRaw().getCount() + " LastModified:"
                                + new Date(url.getLastAttemptTime()).toString());
                    }
                }
            } else {
                break;
            }
        }
    } catch (EOFException e) {
        LOG.error("Caught EOF Exception during read of local CrawlLog:" + crawlLogPath.getAbsolutePath()
                + " FilePosition:" + lastReadPosition);
    } catch (IOException e) {
        LOG.error(CCStringUtils.stringifyException(e));
        exception = e;
        throw e;
    } finally {
        if (inputStream != null)
            inputStream.close();
    }
}

From source file:ar.com.qbe.siniestros.model.utils.MimeMagic.MagicMatcher.java

/**
 * test to see if this match or any submatches match
 *
 * @param f the file that should be used to test the match
 * @param onlyMimeMatch DOCUMENT ME!//from   w w w.  java2s  .  c om
 *
 * @return the deepest magic match object that matched
 *
 * @throws IOException DOCUMENT ME!
 * @throws UnsupportedTypeException DOCUMENT ME!
 */
public MagicMatch test(File f, boolean onlyMimeMatch) throws IOException, UnsupportedTypeException {
    log.debug("test(File)");

    int offset = match.getOffset();
    String description = match.getDescription();
    String type = match.getType();
    String mimeType = match.getMimeType();

    log.debug("test(File): testing '" + f.getName() + "' for '" + description + "'");

    log.debug("test(File): \n=== BEGIN MATCH INFO ==");
    log.debug(match.print());
    log.debug("test(File): \n=== END MATCH INFO ====\n");

    RandomAccessFile file = null;
    file = new RandomAccessFile(f, "r");

    try {
        int length = 0;

        if (type.equals("byte")) {
            length = 1;
        } else if (type.equals("short") || type.equals("leshort") || type.equals("beshort")) {
            length = 4;
        } else if (type.equals("long") || type.equals("lelong") || type.equals("belong")) {
            length = 8;
        } else if (type.equals("string")) {
            length = match.getTest().capacity();
        } else if (type.equals("regex")) {

            final int matchLength = match.getLength();
            length = (matchLength == 0) ? (int) file.length() - offset : matchLength;

            if (length < 0) {
                length = 0;
            }
        } else if (type.equals("detector")) {
            length = (int) file.length() - offset;

            if (length < 0) {
                length = 0;
            }
        } else {
            throw new UnsupportedTypeException("unsupported test type '" + type + "'");
        }

        // we know this match won't work since there isn't enough data for the test
        if (length > (file.length() - offset)) {
            return null;
        }

        byte[] buf = new byte[length];
        file.seek(offset);

        int bytesRead = 0;
        int size = 0;
        boolean gotAllBytes = false;
        boolean done = false;

        while (!done) {
            size = file.read(buf, 0, length - bytesRead);

            if (size == -1) {
                throw new IOException("reached end of file before all bytes were read");
            }

            bytesRead += size;

            if (bytesRead == length) {
                gotAllBytes = true;
                done = true;
            }
        }

        log.debug("test(File): stream size is '" + buf.length + "'");

        MagicMatch match = null;
        MagicMatch submatch = null;

        if (testInternal(buf)) {
            // set the top level match to this one
            try {
                match = getMatch() != null ? (MagicMatch) getMatch().clone() : null;
            } catch (CloneNotSupportedException e) {
                // noop
            }

            log.debug("test(File): testing matched '" + description + "'");

            // set the data on this match
            if ((onlyMimeMatch == false) && (subMatchers != null) && (subMatchers.size() > 0)) {
                log.debug(
                        "test(File): testing " + subMatchers.size() + " submatches for '" + description + "'");

                for (int i = 0; i < subMatchers.size(); i++) {
                    log.debug("test(File): testing submatch " + i);

                    MagicMatcher m = (MagicMatcher) subMatchers.get(i);

                    if ((submatch = m.test(f, false)) != null) {
                        log.debug("test(File): submatch " + i + " matched with '" + submatch.getDescription()
                                + "'");
                        match.addSubMatch(submatch);
                    } else {
                        log.debug("test(File): submatch " + i + " doesn't match");
                    }
                }
            }
        }

        return match;
    } finally {
        try {
            file.close();
        } catch (Exception fce) {
        }
    }
}

From source file:org.commoncrawl.service.crawler.CrawlList.java

private static int readTargetsFromLogFile(CrawlList domain, File logFileName, int desiredReadAmount,
        IntrusiveList<CrawlTarget> targetsOut) throws IOException {

    int itemsRead = 0;

    if (logFileName.exists()) {

        RandomAccessFile file = new RandomAccessFile(logFileName, "rw");

        LogFileHeader header = new LogFileHeader();

        try {/*from   ww w  .  j av a2s.  c  o m*/

            long headerOffset = readLogFileHeader(file, header);

            // seelk to appropriate write position 
            if (header._readPos != 0)
                file.seek(header._readPos);

            int itemsToRead = Math.min(desiredReadAmount, header._itemCount);

            PersistentCrawlTarget persistentTarget = new PersistentCrawlTarget();
            CRC32 crc = new CRC32();
            CustomByteArrayOutputStream buffer = new CustomByteArrayOutputStream(1 << 16);

            for (int i = 0; i < itemsToRead; ++i) {
                // read length ... 
                int urlDataLen = file.readInt();
                long urlDataCRC = file.readLong();

                buffer.reset();

                if (urlDataLen > buffer.getBuffer().length) {
                    buffer = new CustomByteArrayOutputStream(((urlDataLen / 65536) + 1) * 65536);
                }
                file.read(buffer.getBuffer(), 0, urlDataLen);
                crc.reset();
                crc.update(buffer.getBuffer(), 0, urlDataLen);

                long computedValue = crc.getValue();

                // validate crc values ... 
                if (computedValue != urlDataCRC) {
                    throw new IOException("Crawl Target Log File Corrupt");
                } else {
                    //populate a persistentTarget from the (in memory) data stream
                    DataInputStream bufferReader = new DataInputStream(
                            new ByteArrayInputStream(buffer.getBuffer(), 0, urlDataLen));

                    persistentTarget.clear();
                    persistentTarget.readFields(bufferReader);

                    //populate a new crawl target structure ... 
                    CrawlTarget newTarget = new CrawlTarget(domain, persistentTarget);

                    targetsOut.addTail(newTarget);
                }
            }

            itemsRead = itemsToRead;

            // now update header ... 
            header._itemCount -= itemsRead;
            // now if item count is non zero ... 
            if (header._itemCount != 0) {
                // set read cursor to next record location 
                header._readPos = file.getFilePointer();
            }
            // otherwise ... 
            else {
                // reset both cursors ... 
                header._readPos = 0;
                header._writePos = 0;
            }

            // now write out header anew ... 
            writeLogFileHeader(file, header);
        } finally {
            if (file != null) {
                file.close();
            }
        }
    }
    return itemsRead;
}

From source file:org.commoncrawl.service.listcrawler.CacheManager.java

private final void flushLocalLog(final long bytesToRemove, final int itemsToRemove,
        final List<FingerprintAndOffsetTuple> flushedTupleList,
        final ArrayList<IndexDataFileTriple> tempFileTriples) {

    LOG.info("Acquiring Log Access Semaphores");
    // first boost this thread's priority ... 
    int originalThreadPriority = Thread.currentThread().getPriority();
    Thread.currentThread().setPriority(Thread.MAX_PRIORITY);
    // next acquire all permits to the local access log ... block until we get there ... 
    getLocalLogAccessSemaphore().acquireUninterruptibly(LOG_ACCESS_SEMAPHORE_COUNT);
    // now that we have all the semaphores we need, reduce the thread's priority to normal
    Thread.currentThread().setPriority(originalThreadPriority);
    LOG.info("Acquired ALL Log Access Semaphores");

    long timeStart = System.currentTimeMillis();

    // now we have exclusive access to the local transaction log ... 
    File activeLogFilePath = getActiveLogFilePath();
    File checkpointLogFilePath = getCheckpointLogFilePath();
    try {/* ww  w. j  av  a2 s. c  o m*/
        // delete checkpoint file if it existed ... 
        checkpointLogFilePath.delete();
        // now rename activelog to checkpoint path 
        activeLogFilePath.renameTo(checkpointLogFilePath);

        long logFileConsolidationStartTime = System.currentTimeMillis();
        // now trap for exceptions in case something fails 
        try {
            // fix up the header ... 
            _header._fileSize -= bytesToRemove;
            _header._itemCount -= itemsToRemove;

            // open a old file and new file 
            RandomAccessFile newFile = new RandomAccessFile(activeLogFilePath, "rw");
            RandomAccessFile oldFile = new RandomAccessFile(checkpointLogFilePath, "r");

            LOG.info("Opened new and old files. New Header FileSize is:" + _header._fileSize + " ItemCount:"
                    + _header._itemCount);
            try {
                // write out header ...
                long bytesRemainingInLogFile = _header._fileSize;

                LOG.info("Writing Header to New File. Bytes Remaining for Data are:" + bytesRemainingInLogFile);
                // write header to new file ... 
                _header.writeHeader(newFile);
                // decrement bytes available ... 
                bytesRemainingInLogFile -= LocalLogFileHeader.SIZE;

                if (bytesRemainingInLogFile != 0) {
                    byte transferBuffer[] = new byte[(1 << 20) * 16];
                    LOG.info("Seeking old file past flushed data (pos:" + LocalLogFileHeader.SIZE
                            + bytesToRemove + ")");
                    // seek past old data ... 
                    oldFile.seek(LocalLogFileHeader.SIZE + bytesToRemove);
                    // and copy across remaining data 
                    while (bytesRemainingInLogFile != 0) {
                        int bytesToReadWriteThisIteration = Math.min((int) bytesRemainingInLogFile,
                                transferBuffer.length);
                        oldFile.read(transferBuffer, 0, bytesToReadWriteThisIteration);
                        newFile.write(transferBuffer, 0, bytesToReadWriteThisIteration);
                        LOG.info("Copied " + bytesToReadWriteThisIteration + " from Old to New");
                        bytesRemainingInLogFile -= bytesToReadWriteThisIteration;
                    }
                }
            } finally {
                if (newFile != null) {
                    newFile.close();
                }
                if (oldFile != null) {
                    oldFile.close();
                }
            }
            // if we reached here then checkpoint was successfull ... 
            LOG.info("Checkpoint - Log Consolidation Successfull! TOOK:"
                    + (System.currentTimeMillis() - logFileConsolidationStartTime));

            LOG.info("Loading Index Files");
            for (IndexDataFileTriple triple : tempFileTriples) {
                LOG.info("Loading Index File:" + triple._localIndexFilePath);
                final HDFSFileIndex fileIndex = new HDFSFileIndex(_remoteFileSystem, triple._localIndexFilePath,
                        triple._dataFilePath);
                LOG.info("Loaded Index File");
                // update hdfs index list ... 
                synchronized (CacheManager.this) {
                    LOG.info("Adding HDFS Index to list");
                    _hdfsIndexList.addElement(fileIndex);
                }
            }

            // create a semaphore to wait on 
            final Semaphore semaphore = new Semaphore(0);

            LOG.info("Scheduling Async Event");
            // now we need to schedule an async call to main thread to update data structures safely ... 
            _eventLoop.setTimer(new Timer(0, false, new Timer.Callback() {

                @Override
                public void timerFired(Timer timer) {
                    LOG.info("Cleaning Map");

                    synchronized (CacheManager.this) {
                        // walk tuples 
                        for (FingerprintAndOffsetTuple tuple : flushedTupleList) {
                            //TODO: HACK!
                            // remove from collection ... 
                            _fingerprintToLocalLogPos.removeAll(tuple._fingerprint);
                        }
                    }
                    LOG.info("Increment Offset Info");
                    // finally increment locallog offset by bytes removed ... 
                    _localLogStartOffset += bytesToRemove;

                    LOG.info("Releasing Wait Semaphore");
                    //release wait sempahore 
                    semaphore.release();
                }
            }));

            LOG.info("Waiting for Async Event to Complete");
            //wait for async operation to complete ...
            semaphore.acquireUninterruptibly();

            LOG.info("Async Event to Completed");
        } catch (IOException e) {
            LOG.error("Checkpoint Failed with Exception:" + CCStringUtils.stringifyException(e));
            // delete new file ... 
            activeLogFilePath.delete();
            // and rename checkpoint file to active file ... 
            checkpointLogFilePath.renameTo(activeLogFilePath);
        }
    } finally {
        LOG.info("Releasing ALL Log Access Semaphores. HELD FOR:" + (System.currentTimeMillis() - timeStart));
        getLocalLogAccessSemaphore().release(LOG_ACCESS_SEMAPHORE_COUNT);
    }
}

From source file:org.openmeetings.servlet.outputhandler.DownloadHandler.java

@Override
protected void service(HttpServletRequest httpServletRequest, HttpServletResponse httpServletResponse)
        throws ServletException, IOException {

    try {/*from w  w  w. j a  v  a 2s  . c om*/

        if (getUserManagement() == null || getSessionManagement() == null) {
            return;
        }

        httpServletRequest.setCharacterEncoding("UTF-8");

        log.debug("\nquery = " + httpServletRequest.getQueryString());
        log.debug("\n\nfileName = " + httpServletRequest.getParameter("fileName"));
        log.debug("\n\nparentPath = " + httpServletRequest.getParameter("parentPath"));

        String queryString = httpServletRequest.getQueryString();
        if (queryString == null) {
            queryString = "";
        }

        String sid = httpServletRequest.getParameter("sid");

        if (sid == null) {
            sid = "default";
        }
        log.debug("sid: " + sid);

        Long users_id = getSessionManagement().checkSession(sid);
        Long user_level = getUserManagement().getUserLevelByID(users_id);

        if (user_level != null && user_level > 0) {
            String room_id = httpServletRequest.getParameter("room_id");
            if (room_id == null) {
                room_id = "default";
            }

            String moduleName = httpServletRequest.getParameter("moduleName");
            if (moduleName == null) {
                moduleName = "nomodule";
            }

            String parentPath = httpServletRequest.getParameter("parentPath");
            if (parentPath == null) {
                parentPath = "nomodule";
            }

            String requestedFile = httpServletRequest.getParameter("fileName");
            if (requestedFile == null) {
                requestedFile = "";
            }

            String fileExplorerItemIdParam = httpServletRequest.getParameter("fileExplorerItemId");
            Long fileExplorerItemId = null;
            if (fileExplorerItemIdParam != null) {
                fileExplorerItemId = Long.parseLong(fileExplorerItemIdParam);
            }

            // make a complete name out of domain(organisation) + roomname
            String roomName = room_id;
            // trim whitespaces cause it is a directory name
            roomName = StringUtils.deleteWhitespace(roomName);

            // Get the current User-Directory

            String current_dir = getServletContext().getRealPath("/");

            String working_dir = "";

            working_dir = current_dir + OpenmeetingsVariables.UPLOAD_DIR + File.separatorChar;

            // Add the Folder for the Room
            if (moduleName.equals("lzRecorderApp")) {
                working_dir = current_dir + OpenmeetingsVariables.STREAMS_DIR + File.separatorChar + "hibernate"
                        + File.separatorChar;
            } else if (moduleName.equals("videoconf1")) {
                if (parentPath.length() != 0) {
                    if (parentPath.equals("/")) {
                        working_dir = working_dir + roomName + File.separatorChar;
                    } else {
                        working_dir = working_dir + roomName + File.separatorChar + parentPath
                                + File.separatorChar;
                    }
                } else {
                    working_dir = current_dir + roomName + File.separatorChar;
                }
            } else if (moduleName.equals("userprofile")) {
                working_dir += "profiles" + File.separatorChar;
                logNonExistentFolder(working_dir);

                working_dir += ScopeApplicationAdapter.profilesPrefix + users_id + File.separatorChar;
                logNonExistentFolder(working_dir);
            } else if (moduleName.equals("remoteuserprofile")) {
                working_dir += "profiles" + File.separatorChar;
                logNonExistentFolder(working_dir);

                String remoteUser_id = httpServletRequest.getParameter("remoteUserid");
                if (remoteUser_id == null) {
                    remoteUser_id = "0";
                }

                working_dir += ScopeApplicationAdapter.profilesPrefix + remoteUser_id + File.separatorChar;
                logNonExistentFolder(working_dir);
            } else if (moduleName.equals("remoteuserprofilebig")) {
                working_dir += "profiles" + File.separatorChar;
                logNonExistentFolder(working_dir);

                String remoteUser_id = httpServletRequest.getParameter("remoteUserid");
                if (remoteUser_id == null) {
                    remoteUser_id = "0";
                }

                working_dir += ScopeApplicationAdapter.profilesPrefix + remoteUser_id + File.separatorChar;
                logNonExistentFolder(working_dir);

                requestedFile = this.getBigProfileUserName(working_dir);

            } else if (moduleName.equals("chat")) {

                working_dir += "profiles" + File.separatorChar;
                logNonExistentFolder(working_dir);

                String remoteUser_id = httpServletRequest.getParameter("remoteUserid");
                if (remoteUser_id == null) {
                    remoteUser_id = "0";
                }

                working_dir += ScopeApplicationAdapter.profilesPrefix + remoteUser_id + File.separatorChar;
                logNonExistentFolder(working_dir);

                requestedFile = this.getChatUserName(working_dir);
            } else {
                working_dir = working_dir + roomName + File.separatorChar;
            }

            if (!moduleName.equals("nomodule")) {

                log.debug("requestedFile: " + requestedFile + " current_dir: " + working_dir);

                String full_path = working_dir + requestedFile;

                File f = new File(full_path);

                // If the File does not exist or is not readable show/load a
                // place-holder picture

                if (!f.exists() || !f.canRead()) {
                    if (!f.canRead()) {
                        log.debug("LOG DownloadHandler: The request file is not readable");
                    } else {
                        log.debug(
                                "LOG DownloadHandler: The request file does not exist / has already been deleted");
                    }
                    log.debug("LOG ERROR requestedFile: " + requestedFile);
                    // replace the path with the default picture/document

                    if (requestedFile.endsWith(".jpg")) {
                        log.debug("LOG endsWith d.jpg");

                        log.debug("LOG moduleName: " + moduleName);

                        requestedFile = DownloadHandler.defaultImageName;
                        if (moduleName.equals("remoteuserprofile")) {
                            requestedFile = DownloadHandler.defaultProfileImageName;
                        } else if (moduleName.equals("remoteuserprofilebig")) {
                            requestedFile = DownloadHandler.defaultProfileImageNameBig;
                        } else if (moduleName.equals("userprofile")) {
                            requestedFile = DownloadHandler.defaultProfileImageName;
                        } else if (moduleName.equals("chat")) {
                            requestedFile = DownloadHandler.defaultChatImageName;
                        }
                        // request for an image
                        full_path = current_dir + "default" + File.separatorChar + requestedFile;
                    } else if (requestedFile.endsWith(".swf")) {
                        requestedFile = DownloadHandler.defaultSWFName;
                        // request for a SWFPresentation
                        full_path = current_dir + "default" + File.separatorChar
                                + DownloadHandler.defaultSWFName;
                    } else {
                        // Any document, must be a download request
                        // OR a Moodle Loggedin User
                        requestedFile = DownloadHandler.defaultImageName;
                        full_path = current_dir + "default" + File.separatorChar
                                + DownloadHandler.defaultImageName;
                    }
                }

                log.debug("full_path: " + full_path);

                File f2 = new File(full_path);
                if (!f2.exists() || !f2.canRead()) {
                    if (!f2.canRead()) {
                        log.debug(
                                "DownloadHandler: The request DEFAULT-file does not exist / has already been deleted");
                    } else {
                        log.debug(
                                "DownloadHandler: The request DEFAULT-file does not exist / has already been deleted");
                    }
                    // no file to handle abort processing
                    return;
                }
                // Requested file is outside OM webapp folder
                File curDirFile = new File(current_dir);
                if (!f2.getCanonicalPath().startsWith(curDirFile.getCanonicalPath())) {
                    throw new Exception("Invalid file requested: f2.cp == " + f2.getCanonicalPath()
                            + "; curDir.cp == " + curDirFile.getCanonicalPath());
                }

                // Get file and handle download
                RandomAccessFile rf = new RandomAccessFile(full_path, "r");

                // Default type - Explorer, Chrome and others
                int browserType = 0;

                // Firefox and Opera browsers
                if (httpServletRequest.getHeader("User-Agent") != null) {
                    if ((httpServletRequest.getHeader("User-Agent").contains("Firefox"))
                            || (httpServletRequest.getHeader("User-Agent").contains("Opera"))) {
                        browserType = 1;
                    }
                }

                log.debug("Detected browser type:" + browserType);

                httpServletResponse.reset();
                httpServletResponse.resetBuffer();
                OutputStream out = httpServletResponse.getOutputStream();

                if (requestedFile.endsWith(".swf")) {
                    // trigger download to SWF => THIS is a workaround for
                    // Flash Player 10, FP 10 does not seem
                    // to accept SWF-Downloads with the Content-Disposition
                    // in the Header
                    httpServletResponse.setContentType("application/x-shockwave-flash");
                    httpServletResponse.setHeader("Content-Length", "" + rf.length());
                } else {
                    httpServletResponse.setContentType("APPLICATION/OCTET-STREAM");

                    String fileNameResult = requestedFile;
                    if (fileExplorerItemId != null && fileExplorerItemId > 0) {
                        FileExplorerItem fileExplorerItem = getFileExplorerItemDaoImpl()
                                .getFileExplorerItemsById(fileExplorerItemId);
                        if (fileExplorerItem != null) {

                            fileNameResult = fileExplorerItem.getFileName().substring(0,
                                    fileExplorerItem.getFileName().length() - 4)
                                    + fileNameResult.substring(fileNameResult.length() - 4,
                                            fileNameResult.length());

                        }
                    }

                    if (browserType == 0) {
                        httpServletResponse.setHeader("Content-Disposition",
                                "attachment; filename=" + java.net.URLEncoder.encode(fileNameResult, "UTF-8"));
                    } else {
                        httpServletResponse.setHeader("Content-Disposition", "attachment; filename*=UTF-8'en'"
                                + java.net.URLEncoder.encode(fileNameResult, "UTF-8"));
                    }

                    httpServletResponse.setHeader("Content-Length", "" + rf.length());
                }

                byte[] buffer = new byte[1024];
                int readed = -1;

                while ((readed = rf.read(buffer, 0, buffer.length)) > -1) {
                    out.write(buffer, 0, readed);
                }

                rf.close();

                out.flush();
                out.close();

            }
        } else {
            System.out.println("ERROR DownloadHandler: not authorized FileDownload " + (new Date()));
        }

    } catch (Exception er) {
        log.error("Error downloading: ", er);
        // er.printStackTrace();
    }
}