Example usage for java.io ObjectInputStream close

List of usage examples for java.io ObjectInputStream close

Introduction

In this page you can find the example usage for java.io ObjectInputStream close.

Prototype

public void close() throws IOException 

Source Link

Document

Closes the input stream.

Usage

From source file:be.fgov.kszbcss.rhq.websphere.config.cache.ConfigQueryCache.java

public void start(int numThreads) {
    synchronized (cache) {
        if (threads != null || stopping) {
            // start has already been called before
            throw new IllegalStateException();
        }//from w w  w . jav  a 2  s.  com
        if (persistentFile.exists()) {
            if (log.isDebugEnabled()) {
                log.debug("Reading persistent cache " + persistentFile);
            }
            try {
                ObjectInputStream in = new ObjectInputStream(new FileInputStream(persistentFile));
                try {
                    for (int i = in.readInt(); i > 0; i--) {
                        ConfigQueryCacheEntry<?> entry = (ConfigQueryCacheEntry<?>) in.readObject();
                        cache.put(entry.query, entry);
                    }
                } finally {
                    in.close();
                }
            } catch (IOException ex) {
                log.error("Failed to read persistent cache data", ex);
            } catch (ClassNotFoundException ex) {
                log.error("Unexpected exception", ex);
            }
        }
    }
    if (log.isDebugEnabled()) {
        log.debug("Starting " + numThreads + " worker threads");
    }
    threads = new Thread[numThreads];
    for (int i = 0; i < numThreads; i++) {
        Thread thread = new Thread(this, name + "-query-" + (i + 1));
        threads[i] = thread;
        thread.start();
    }
    timer = new Timer();
    timer.schedule(new TimerTask() {
        @Override
        public void run() {
            persist();
        }
    }, 5 * 60 * 1000);
    // TODO: need another timer that removes entries that are no longer used!
}

From source file:com.atlassian.jira.action.admin.OfbizImportHandler.java

private Object deserialize(final String attr) {
    final ByteArrayInputStream bytes = new ByteArrayInputStream(Base64.decodeBase64(attr));
    try {//ww  w  .ja v  a 2s  .c  o  m
        final ObjectInputStream is = new ObjectInputStream(bytes);
        final Object obj = is.readObject();
        is.close();
        return obj;
    } catch (ClassNotFoundException e) {
        throw new RuntimeException(e);
    } catch (IOException e) {
        throw new RuntimeException(e);
    }
}

From source file:com.ibm.jaggr.service.impl.deps.DepTree.java

/**
 * Object constructor. Attempts to de-serialize the cached dependency lists
 * from disk and then validates the dependency lists based on last-modified
 * dates, looking for any new or removed files. If the cached dependency
 * list data cannot be de-serialized, new lists are constructed. Once the
 * dependency lists have been validated, the list data is serialized back
 * out to disk.//from  ww  w.ja va 2s  .  c o  m
 * 
 * @param paths
 *            Collection of URIs which specify the target resources
 *            to be scanned for javascript files.
 * @param aggregator
 *            The servlet instance for this object
 * @param stamp
 *            timestamp associated with external override/customization 
 *            resources that are check on every server restart                     
 * @param clean
 *            If true, then the dependency lists are generated from scratch
 *            rather than by de-serializing and then validating the cached
 *            dependency lists.
 * @param validateDeps
 *            If true, then validate existing cached dependencies using
 *            file last-modified times.
 * @throws IOException
 */
public DepTree(Collection<URI> paths, IAggregator aggregator, long stamp, boolean clean, boolean validateDeps)
        throws IOException {
    this.stamp = stamp;
    IConfig config = aggregator.getConfig();
    rawConfig = config.toString();

    File cacheDir = new File(aggregator.getWorkingDirectory(), DEPCACHE_DIRNAME);
    File cacheFile = new File(cacheDir, CACHE_FILE);

    /*
     * The de-serialized dependency map. If we have a cached dependency map,
     * then it will be validated against the last-modified dates of the
     * current files and only the files that have changed will need to be
     * re-parsed to update the dependency lists.
     */
    DepTree cached = null;

    if (!clean) {
        // If we're not starting clean, try to de-serialize the map from
        // cache
        try {
            ObjectInputStream is = new ObjectInputStream(new FileInputStream(cacheFile));
            try {
                cached = (DepTree) is.readObject();
            } finally {
                try {
                    is.close();
                } catch (Exception ignore) {
                }
            }
        } catch (FileNotFoundException e) {
            /*
             * Not an error. Just means that the cache file hasn't been
             * written yet or else it's been deleted.
             */
            if (log.isLoggable(Level.INFO))
                log.log(Level.INFO, Messages.DepTree_1);
        } catch (Exception e) {
            if (log.isLoggable(Level.SEVERE))
                log.log(Level.SEVERE, e.getMessage(), e);
        }
    }

    // If the cacheBust config param has changed, then do a clean build
    // of the dependencies.
    if (cached != null) {
        if (stamp == 0) {
            // no init stamp provided.  Preserve the cached one.
            stamp = cached.stamp;
        }
        if (stamp > cached.stamp) {
            // init stamp has been updated.  Validate dependencies.
            validateDeps = true;
        }
        cacheBust = aggregator.getOptions().getCacheBust();
        if (!StringUtils.equals(cacheBust, cached.cacheBust)) {
            if (log.isLoggable(Level.INFO)) {
                log.info(Messages.DepTree_2);
            }
            cached = null;
        }
    }

    /*
     * If we de-serialized a previously saved dependency map, then go with
     * that.
     */
    if (cached != null && rawConfig.equals(cached.rawConfig) && !validateDeps && !clean) {
        depMap = cached.depMap;
        return;
    }

    // Initialize the dependency map
    depMap = new ConcurrentHashMap<URI, DepTreeNode>();

    // This can take a while, so print something to the console
    String msg = MessageFormat.format(Messages.DepTree_3, new Object[] { aggregator.getName() });

    ConsoleService cs = new ConsoleService();
    cs.println(msg);

    if (log.isLoggable(Level.INFO)) {
        log.info(msg);
    }
    // Make sure that all the paths are unique and orthogonal
    paths = DepUtils.removeRedundantPaths(paths);

    /*
     * Create the thread pools, one for the tree builders and one for the
     * parsers. Since a tree builder thread will wait for all the outstanding
     * parser threads started by that builder to complete, we need to use two
     * independent thread pools to guard against the possibility of deadlock
     * caused by all the threads in the pool being consumed by tree builders
     * and leaving none available to service the parsers.
     */
    final ThreadGroup treeBuilderTG = new ThreadGroup(TREEBUILDER_TGNAME),
            parserTG = new ThreadGroup(JSPARSER_TGNAME);
    ExecutorService treeBuilderExc = Executors.newFixedThreadPool(10, new ThreadFactory() {
        public Thread newThread(Runnable r) {
            return new Thread(treeBuilderTG, r, MessageFormat.format(THREADNAME,
                    new Object[] { treeBuilderTG.getName(), treeBuilderTG.activeCount() }));
        }
    }), parserExc = Executors.newFixedThreadPool(20, new ThreadFactory() {
        public Thread newThread(Runnable r) {
            return new Thread(parserTG, r, MessageFormat.format(THREADNAME,
                    new Object[] { parserTG.getName(), parserTG.activeCount() }));
        }
    });

    // Counter to keep track of number of tree builder threads started
    AtomicInteger treeBuilderCount = new AtomicInteger(0);

    // The completion services for the thread pools
    final CompletionService<URI> parserCs = new ExecutorCompletionService<URI>(parserExc);
    CompletionService<DepTreeBuilder.Result> treeBuilderCs = new ExecutorCompletionService<DepTreeBuilder.Result>(
            treeBuilderExc);

    // Start the tree builder threads to process the paths
    for (final URI path : paths) {
        /*
         * Create or get from cache the root node for this path and
         * add it to the new map.
         */
        DepTreeNode root = new DepTreeNode(PathUtil.getModuleName(path));
        DepTreeNode cachedNode = null;
        if (cached != null) {
            cachedNode = cached.depMap.get(path);
            if (log.isLoggable(Level.INFO)) {
                log.info(MessageFormat.format(Messages.DepTree_4, new Object[] { path }));
            }
        } else {
            if (log.isLoggable(Level.INFO)) {
                log.info(MessageFormat.format(Messages.DepTree_5, new Object[] { path }));
            }
        }
        depMap.put(path, root);

        treeBuilderCount.incrementAndGet();
        treeBuilderCs.submit(new DepTreeBuilder(aggregator, parserCs, path, root, cachedNode));
    }

    // List of parser exceptions
    LinkedList<Exception> parserExceptions = new LinkedList<Exception>();

    /*
     * Pull the completed tree builder tasks from the completion queue until
     * all the paths have been processed
     */
    while (treeBuilderCount.decrementAndGet() >= 0) {
        try {
            DepTreeBuilder.Result result = treeBuilderCs.take().get();
            if (log.isLoggable(Level.INFO)) {
                log.info(MessageFormat.format(Messages.DepTree_6,
                        new Object[] { result.parseCount, result.dirName }));
            }
        } catch (Exception e) {
            if (log.isLoggable(Level.SEVERE))
                log.log(Level.SEVERE, e.getMessage(), e);
            parserExceptions.add(e);
        }
    }

    // shutdown the thread pools now that we're done with them
    parserExc.shutdown();
    treeBuilderExc.shutdown();

    // If parser exceptions occurred, then rethrow the first one 
    if (parserExceptions.size() > 0) {
        throw new RuntimeException(parserExceptions.get(0));
    }

    // Prune dead nodes (nodes with no children or dependency lists)
    for (Map.Entry<URI, DepTreeNode> entry : depMap.entrySet()) {
        entry.getValue().prune();
    }

    /*
     * Make sure the cache directory exists before we try to serialize the
     * dependency map.
     */
    if (!cacheDir.exists())
        if (!cacheDir.mkdirs()) {
            throw new IOException(
                    MessageFormat.format(Messages.DepTree_0, new Object[] { cacheDir.getAbsolutePath() }));
        }

    // Serialize the map to the cache directory
    ObjectOutputStream os;
    os = new ObjectOutputStream(new FileOutputStream(cacheFile));
    try {
        os.writeObject(this);
    } finally {
        try {
            os.close();
        } catch (Exception ignore) {
        }
    }
    msg = MessageFormat.format(Messages.DepTree_7, new Object[] { aggregator.getName() });

    // Output that we're done.
    cs.println(msg);
    if (log.isLoggable(Level.INFO)) {
        log.info(msg);
    }
}

From source file:com.sec.ose.osi.ui.cache.UICache.java

@SuppressWarnings("unchecked")
private void load() {
    File file = new File(CACHE_FILE_NAME);
    if (file.exists() == false) {
        return;//from ww  w  .  j  a v a 2 s . c  o m
    }

    log.debug("load cache from cache file");

    ObjectInputStream ois = null;
    try {
        ois = new ObjectInputStream(new FileInputStream(file)); // IO, FileNotFound

        this.mMap = (HashMap<Integer, UIEntity>) ois.readObject(); // ClassNotFound

    } catch (FileNotFoundException e) {
        log.warn(e);
    } catch (IOException e) {
        log.warn(e);
        file.delete();
    } catch (ClassNotFoundException e) {
        log.warn(e);
        file.delete();
    } catch (Exception e) {
        log.warn(e);
        file.delete();
    } finally {

        if (ois != null) {
            try {
                ois.close();
            } catch (IOException e) {
                log.warn(e);
            }
        }
        ois = null;
    }
}

From source file:com.mvdb.etl.dao.impl.JdbcGenericDAO.java

private Metadata readMetadata(String objectName, File snapshotDirectory)
        throws IOException, ClassNotFoundException {
    Metadata metadata = new Metadata();
    FileInputStream fis = null;/*from   w  w w.jav  a 2s.c o m*/
    ObjectInputStream ois = null;
    try {
        String structFileName = "schema-" + objectName + ".dat";
        File structFile = new File(snapshotDirectory, structFileName);
        fis = new FileInputStream(structFile);
        ois = new ObjectInputStream(fis);
        metadata = (Metadata) ois.readObject();
        return metadata;
    } finally {
        if (fis != null) {
            fis.close();
        }
        if (ois != null) {
            ois.close();
        }
    }

}

From source file:com.aurel.track.exchange.excel.ExcelImportAction.java

/**
 * Execute the import from excel//from w w w  .j  a v  a  2  s.co  m
 * @return
 */
public String excelImport() {
    String excelMappingsDirectory = AttachBL.getExcelImportDirBase() + personID;
    Workbook workbook = ExcelFieldMatchBL.loadWorkbook(excelMappingsDirectory, fileName);
    Set<Integer> lastSavedIdentifierFieldIDIsSet = null;
    Map<String, Integer> columNameToFieldIDMap = null;
    try {
        File file = new File(excelMappingsDirectory, mappingFileName);
        FileInputStream fileInputStream = new FileInputStream(file);
        ObjectInputStream objectInputStream = new ObjectInputStream(fileInputStream);
        columNameToFieldIDMap = (Map<String, Integer>) objectInputStream.readObject();
        lastSavedIdentifierFieldIDIsSet = (Set<Integer>) objectInputStream.readObject();
        objectInputStream.close();
    } catch (FileNotFoundException e) {
        LOGGER.warn("Creating the input stream for mapping failed with " + e.getMessage());
        LOGGER.debug(ExceptionUtils.getStackTrace(e));
    } catch (IOException e) {
        LOGGER.warn("Saving the mapping failed with " + e.getMessage());
        LOGGER.debug(ExceptionUtils.getStackTrace(e));
    } catch (ClassNotFoundException e) {
        LOGGER.warn("Class not found for  the mapping " + e.getMessage());
        LOGGER.debug(ExceptionUtils.getStackTrace(e));
    }

    if (workbook == null) {
        JSONUtility.encodeJSON(servletResponse, ImportJSON.importErrorMessageJSON(
                ImportJSON.ERROR_CODES.ERROR_MESSAGE, getText("admin.actions.importTp.err.uploadAgain"), true));
        return null;
    }
    if (columNameToFieldIDMap == null) {
        //for example the sheet contains no columns at all
        columNameToFieldIDMap = new HashMap<String, Integer>();
    }
    try {
        Map<Integer, String> columnIndexToColumNameMap = ExcelFieldMatchBL.getFirstRowHeaders(workbook,
                selectedSheet);
        Map<Integer, Integer> columnIndexToFieldIDMap = ExcelImportBL
                .getColumnIndexToFieldID(columNameToFieldIDMap, columnIndexToColumNameMap);
        Map<Integer, Integer> fieldIDToColumnIndexMap = ExcelImportBL.reverseMap(columnIndexToFieldIDMap);
        List<ErrorData> errorDataList = ExcelImportBL.validateRequiredColumns(workbook, selectedSheet,
                fieldIDToColumnIndexMap, lastSavedIdentifierFieldIDIsSet, invalidValueHandlingMap,
                defaultValuesMap, locale);
        if (!errorDataList.isEmpty()) {
            //required columns are missing: do not disable the Finish button and do not delete
            //the file because it may be solved by stepping back and forth in the wizard
            JSONUtility.encodeJSON(servletResponse,
                    ImportJSON.importErrorMessageListJSON(
                            ErrorHandlerJSONAdapter.handleErrorList(errorDataList, locale),
                            ImportJSON.ERROR_CODES.ERROR_MESSAGES, false));
            return null;
        } else {
            //delete the file for this case because it
            //either results in a error which should be resolved in the excel file
            //consequently a new upload cannot be avoided before re-import
            //or everything is fine and in this case no new import is needed
            //with any other return a
            //grid errors
            Map<Integer, SortedMap<Integer, SortedMap<String, ErrorData>>> gridErrorsMap = new HashMap<Integer, SortedMap<Integer, SortedMap<String, ErrorData>>>();
            //row errors
            Map<Integer, SortedSet<Integer>> rowErrorsMap = new HashMap<Integer, SortedSet<Integer>>();
            Map<Integer, SortedSet<Integer>> requiredFieldErrorsMap = new HashMap<Integer, SortedSet<Integer>>();
            Map<Integer, Map<Integer, List<Integer>>> rowNoToPseudoFieldsOriginal = new HashMap<Integer, Map<Integer, List<Integer>>>();
            Map<Integer, Map<Integer, List<Integer>>> rowNoToPseudoFieldsExcel = new HashMap<Integer, Map<Integer, List<Integer>>>();
            Map<Integer, Integer> rowToParentRow = new HashMap<Integer, Integer>();
            SortedMap<Integer, TWorkItemBean> workItemBeansMap = ExcelImportBL.getAndValidateGridData(workbook,
                    selectedSheet, personID, locale, columnIndexToFieldIDMap, fieldIDToColumnIndexMap,
                    lastSavedIdentifierFieldIDIsSet, defaultValuesMap, invalidValueHandlingMap,
                    rowNoToPseudoFieldsOriginal, rowNoToPseudoFieldsExcel, gridErrorsMap, rowErrorsMap,
                    requiredFieldErrorsMap, rowToParentRow);
            Collection<TWorkItemBean> workItemBeans = workItemBeansMap.values();
            if (gridErrorsMap.isEmpty() && rowErrorsMap.isEmpty() && requiredFieldErrorsMap.isEmpty()) {
                List<Integer> alreadyExistingRows = ExcelImportBL.getExistingWorkItemRows(workItemBeans);
                //already existing rows with the same synopsis, project, issueType and release scheduled
                //(independently of the identifierFieldIDs) to avoid importing the same new issues more
                //(only the not found i.e. new issues are tested)
                if (!alreadyExistingRows.isEmpty()) {
                    JSONUtility.encodeJSON(servletResponse, ImportJSON.importErrorMessageJSON(
                            ImportJSON.ERROR_CODES.ERROR_MESSAGE,
                            LocalizeUtil.getParametrizedString("admin.actions.importExcel.err.existingRows",
                                    new String[] { MergeUtil.getMergedString(alreadyExistingRows, ", ") },
                                    locale),
                            true));
                    return null;
                } else {
                    Set<Integer> presentFieldIDs = ExcelImportBL.getPresentFields(columNameToFieldIDMap);
                    presentFieldIDs.addAll(FieldsManagerRT.getRequiredSystemFieldsList());
                    //the explicit change of this field is not allowed
                    presentFieldIDs.remove(SystemFields.LASTMODIFIEDDATE);
                    presentFieldIDs.remove(SystemFields.CREATEDATE);
                    Map<Integer, Map<Integer, Map<Integer, TFieldConfigBean>>> projectsIssueTypesFieldConfigsMap = FieldRuntimeBL
                            .getFieldConfigsForWorkItemBeans(workItemBeans, presentFieldIDs, locale);
                    Map<Integer, Map<Integer, Map<String, Object>>> projectsIssueTypesFieldSettingsMap = FieldRuntimeBL
                            .getFieldSettingsForFieldConfigs(projectsIssueTypesFieldConfigsMap);
                    Map<Integer, WorkItemContext> existingIssueContextsMap = FieldsManagerRT
                            .createImportContext(workItemBeans, presentFieldIDs,
                                    projectsIssueTypesFieldConfigsMap, projectsIssueTypesFieldSettingsMap, null,
                                    null, personID, locale);
                    SortedMap<Integer, List<ErrorData>> validationErrorsMap = FieldsManagerRT.validateWorkItems(
                            workItemBeans, presentFieldIDs, existingIssueContextsMap,
                            projectsIssueTypesFieldConfigsMap, projectsIssueTypesFieldSettingsMap, personID,
                            locale);
                    //validation errors: either grid (workItem and field) or row (workItem) level errors. There is a chance to resolve the problems
                    //without modifying the excel file: for ex. by setting further/other default values
                    if (!validationErrorsMap.isEmpty()) {
                        List<String> rowErrors = ExcelImportBL.renderRowErrors(validationErrorsMap,
                                fieldIDToColumnIndexMap, locale);
                        JSONUtility.encodeJSON(servletResponse, ImportJSON.importErrorMessageListJSON(rowErrors,
                                ImportJSON.ERROR_CODES.ERROR_MESSAGES, false));
                        return null;
                    } else {
                        if (overwriteMap == null) {
                            overwriteMap = new HashMap<String, Boolean>();
                        }
                        SortedMap<Integer, SortedMap<Integer, Map<Integer, Object>>> confictsMap = ExcelImportBL
                                .conflictResolutionWorkItems(workItemBeans, presentFieldIDs,
                                        existingIssueContextsMap, projectsIssueTypesFieldConfigsMap,
                                        columnIndexToColumNameMap, fieldIDToColumnIndexMap, personID, locale,
                                        overwriteMap);
                        if (confictsMap != null && !confictsMap.isEmpty()) {
                            //render conflicts
                            //do not disable Finish and do not delete the file instead resolve the conflicts and import again
                            JSONUtility.encodeJSON(servletResponse,
                                    ExcelImportJSON.getExcelConflictsJSON(confictsMap, locale, false));
                            return null;
                        } else {
                            //no conflicts or conflict handling is set (overwriteMap was submitted)
                            List<ErrorData> errorsList = new ArrayList<ErrorData>();
                            ImportCounts importCounts = FieldsManagerRT.saveWorkItems(workItemBeansMap,
                                    presentFieldIDs, existingIssueContextsMap,
                                    projectsIssueTypesFieldConfigsMap, projectsIssueTypesFieldSettingsMap,
                                    rowNoToPseudoFieldsOriginal, rowNoToPseudoFieldsExcel, rowToParentRow,
                                    personID, locale, errorsList);
                            if (!errorsList.isEmpty()) {
                                JSONUtility.encodeJSON(servletResponse,
                                        ImportJSON.importErrorMessageListJSON(
                                                ErrorHandlerJSONAdapter.handleErrorList(errorDataList, locale),
                                                ImportJSON.ERROR_CODES.ERROR_MESSAGES, true));
                                return null;
                            }
                            JSONUtility.encodeJSON(servletResponse,
                                    ImportJSON.importMessageJSON(true,
                                            LocalizeUtil.getParametrizedString(
                                                    "admin.actions.importExcel.message.importResult",
                                                    new String[] {
                                                            Integer.valueOf(importCounts.getNoOfCreatedIssues())
                                                                    .toString(),
                                                            Integer.valueOf(importCounts.getNoOfUpdatedIssues())
                                                                    .toString() },
                                                    locale),
                                            true, locale));
                            //successful import, delete the file
                            File file = new File(excelMappingsDirectory, fileName);
                            file.delete();
                            return null;
                        }
                    }
                }
            } else {
                //grid or row errors
                Map<Integer, List<String>> gridErrorsForJsonMap = null;
                if (!gridErrorsMap.isEmpty()) {
                    gridErrorsForJsonMap = ExcelImportBL.getGridErrorsForJsonMap(gridErrorsMap, locale);
                }
                Map<String, String> rowErrorsForJsonMap = null;
                if (!rowErrorsMap.isEmpty()) {
                    rowErrorsForJsonMap = ExcelImportBL.getRowErrorsForJsonMap(rowErrorsMap);
                }
                List<String> requiredFieldErrorsList = null;
                if (!requiredFieldErrorsMap.isEmpty()) {
                    requiredFieldErrorsList = ExcelImportBL
                            .getMissingRequiredFieldErrorsForJsonMap(requiredFieldErrorsMap, locale);
                }
                JSONUtility.encodeJSON(servletResponse, ExcelImportJSON.getExcelWrongGridValuesJSON(
                        gridErrorsForJsonMap, rowErrorsForJsonMap, requiredFieldErrorsList, locale, true));
            }
        }
    } catch (Exception e) {
        addActionError(getText("admin.actions.importTp.err.failed"));
        LOGGER.error(ExceptionUtils.getStackTrace(e));
        JSONUtility.encodeJSON(servletResponse,
                ImportJSON.importErrorMessageJSON(ImportJSON.ERROR_CODES.ERROR_MESSAGE, LocalizeUtil
                        .getLocalizedTextFromApplicationResources("admin.actions.importTp.err.failed", locale),
                        true));
    }
    //delete the uploaded excel file
    return null;
}

From source file:com.ibm.jaggr.service.impl.cache.CacheManagerImpl.java

/**
 * Starts up the cache. Attempts to de-serialize a previously serialized
 * cache from disk and starts the periodic serializer task.
 * /*w w  w  . jav a2s  . c o  m*/
 * @param aggregator
 *            the aggregator instance this cache manager belongs to
 * @param stamp
 *            a time stamp used to determine if the cache should be cleared.
 *            The cache should be cleared if the time stamp is later than
 *            the one associated with the cached resources.
 * @throws IOException
 */
public CacheManagerImpl(IAggregator aggregator, long stamp) throws IOException {

    _directory = new File(aggregator.getWorkingDirectory(), CACHEDIR_NAME);
    _aggregator = aggregator;
    // Make sure the cache directory exists
    if (!_directory.exists()) {
        if (!_directory.mkdirs()) {
            throw new IOException(MessageFormat.format(Messages.CacheManagerImpl_0,
                    new Object[] { _directory.getAbsoluteFile() }));
        }
    }
    // Attempt to de-serialize the cache from disk
    CacheImpl cache = null;
    try {
        File file = new File(_directory, CACHE_META_FILENAME);
        ObjectInputStream is = new ObjectInputStream(new FileInputStream(file));
        try {
            cache = (CacheImpl) is.readObject();
        } finally {
            try {
                is.close();
            } catch (Exception ignore) {
            }
        }
    } catch (FileNotFoundException e) {
        if (log.isLoggable(Level.INFO))
            log.log(Level.INFO, Messages.CacheManagerImpl_1);
    } catch (InvalidClassException e) {
        if (log.isLoggable(Level.INFO))
            log.log(Level.INFO, Messages.CacheManagerImpl_2);
        // one or more of the serializable classes has changed.  Delete the stale
        // cache files
    } catch (Exception e) {
        if (log.isLoggable(Level.SEVERE))
            log.log(Level.SEVERE, e.getMessage(), e);
    }
    if (cache != null) {
        _control = (CacheControl) cache.getControlObj();
    }
    if (_control != null) {
        // stamp == 0 means no overrides.  Need to check for this explicitly
        // in case the overrides directory has been removed.
        if (stamp == 0 && _control.initStamp == 0 || stamp != 0 && stamp <= _control.initStamp) {
            // Use AggregatorProxy so that getCacheManager will return non-null
            // if called from within setAggregator.  Need to do this because
            // IAggregator.getCacheManager() is unable to return this object
            // since it is still being constructed.
            cache.setAggregator(AggregatorProxy.newInstance(_aggregator, this));
            _cache.set(cache);
        }
    } else {
        _control = new CacheControl();
        _control.initStamp = stamp;
    }

    // Start up the periodic serializer task.  Serializes the cache every 10 minutes.
    // This is done so that we can recover from an unexpected shutdown
    aggregator.getExecutors().getScheduledExecutor().scheduleAtFixedRate(new Runnable() {
        public void run() {
            try {
                File file = new File(_directory, CACHE_META_FILENAME);
                // Synchronize on the cache object to keep the scheduled cache sync thread and
                // the thread processing servlet destroy from colliding.
                synchronized (cacheSerializerSyncObj) {
                    ObjectOutputStream os = new ObjectOutputStream(new FileOutputStream(file));
                    try {
                        os.writeObject(_cache.get());
                    } finally {
                        try {
                            os.close();
                        } catch (Exception ignore) {
                        }
                    }
                }
            } catch (Exception e) {
                if (log.isLoggable(Level.SEVERE))
                    log.log(Level.SEVERE, e.getMessage(), e);
            }
        }
    }, 10, 10, TimeUnit.MINUTES);

    Properties dict;
    BundleContext bundleContext = aggregator.getBundleContext();
    if (bundleContext != null) {
        // Register listeners
        dict = new Properties();
        dict.put("name", aggregator.getName()); //$NON-NLS-1$
        _shutdownListener = bundleContext.registerService(IShutdownListener.class.getName(), this, dict);

        dict = new Properties();
        dict.put("name", aggregator.getName()); //$NON-NLS-1$
        _configUpdateListener = bundleContext.registerService(IConfigListener.class.getName(), this, dict);

        dict = new Properties();
        dict.put("name", aggregator.getName()); //$NON-NLS-1$
        _depsUpdateListener = bundleContext.registerService(IDependenciesListener.class.getName(), this, dict);

        dict = new Properties();
        dict.put("name", aggregator.getName()); //$NON-NLS-1$
        _optionsUpdateListener = bundleContext.registerService(IOptionsListener.class.getName(), this, dict);
        optionsUpdated(aggregator.getOptions(), 1);
        configLoaded(aggregator.getConfig(), 1);
        dependenciesLoaded(aggregator.getDependencies(), 1);
    }

    // Now invoke the listeners for objects that have already been initialized
    IOptions options = _aggregator.getOptions();
    if (options != null) {
        optionsUpdated(options, 1);
    }
    IConfig config = _aggregator.getConfig();
    if (config != null) {
        configLoaded(config, 1);
    }

    IDependencies deps = _aggregator.getDependencies();
    if (deps != null) {
        dependenciesLoaded(deps, 1);
    }
}

From source file:de.javakaffee.web.msm.serializer.javolution.AaltoTranscoderTest.java

private StandardSession javaRoundtrip(final StandardSession session,
        final MemcachedBackupSessionManager manager) throws IOException, ClassNotFoundException {

    final long start1 = System.nanoTime();
    final ByteArrayOutputStream bos = new ByteArrayOutputStream();
    final ObjectOutputStream oos = new ObjectOutputStream(bos);
    session.writeObjectData(oos);//from   w ww . j a va  2  s  . c o  m
    oos.close();
    bos.close();
    System.out.println("java-ser took " + (System.nanoTime() - start1) / 1000);

    final ByteArrayInputStream bis = new ByteArrayInputStream(bos.toByteArray());
    final ObjectInputStream ois = new ObjectInputStream(bis);
    final StandardSession readSession = manager.createEmptySession();
    readSession.readObjectData(ois);
    ois.close();
    bis.close();

    return readSession;
}

From source file:jfs.sync.encryption.JFSEncryptedFile.java

/**
 * @see JFSFile#getLength()/*from   w  w  w  .  j a v  a  2s  . c  o  m*/
 */
@Override
public final long getLength() {
    if (fileInfo.getSize() < 0) {
        try {
            // TODO: move this to storage layer?
            InputStream fis = fileProducer.getInputStream(getRelativePath());
            ObjectInputStream ois = new ObjectInputStream(fis);
            JFSEncryptedStream.readMarker(ois);
            fileInfo.setSize(JFSEncryptedStream.readLength(ois));
            if (log.isDebugEnabled()) {
                log.debug("getLength(" + getRelativePath() + ") detected plain text length "
                        + fileInfo.getSize());
            } // if
            ois.close();
        } catch (Exception e) {
            // TODO: what to do now?!?!?!
            log.error("getLength() could not detect plain text length for " + getPath(), e);
        } // try/catch
    } // if
    return fileInfo.getSize();
}

From source file:org.apache.lucene.replicator.http.HttpClientBase.java

protected void throwKnownError(HttpResponse response, StatusLine statusLine) throws IOException {
    ObjectInputStream in = null;
    try {/*from   www . java  2 s . c o  m*/
        in = new ObjectInputStream(response.getEntity().getContent());
    } catch (Throwable t) {
        // the response stream is not an exception - could be an error in servlet.init().
        throw new RuntimeException("Unknown error: " + statusLine, t);
    }

    Throwable t;
    try {
        t = (Throwable) in.readObject();
    } catch (Throwable th) {
        throw new RuntimeException("Failed to read exception object: " + statusLine, th);
    } finally {
        in.close();
    }
    IOUtils.reThrow(t);
}