Example usage for java.util.logging Logger removeHandler

List of usage examples for java.util.logging Logger removeHandler

Introduction

In this page you can find the example usage for java.util.logging Logger removeHandler.

Prototype

public void removeHandler(Handler handler) throws SecurityException 

Source Link

Document

Remove a log Handler.

Usage

From source file:com.cyberway.issue.crawler.framework.CrawlController.java

/**
 * Close all log files and remove handlers from loggers.
 *//*from   www . j a v  a 2  s .  c  om*/
public void closeLogFiles() {
    for (Iterator i = fileHandlers.keySet().iterator(); i.hasNext();) {
        Logger l = (Logger) i.next();
        GenerationFileHandler gfh = (GenerationFileHandler) fileHandlers.get(l);
        gfh.close();
        l.removeHandler(gfh);
    }
}

From source file:com.cyberway.issue.crawler.framework.CrawlController.java

protected void rotateLogFiles(String generationSuffix) throws IOException {
    if (this.state != PAUSED && this.state != CHECKPOINTING) {
        throw new IllegalStateException("Pause crawl before requesting " + "log rotation.");
    }/*  ww w.j  ava  2s.c o  m*/
    for (Iterator i = fileHandlers.keySet().iterator(); i.hasNext();) {
        Logger l = (Logger) i.next();
        GenerationFileHandler gfh = (GenerationFileHandler) fileHandlers.get(l);
        GenerationFileHandler newGfh = gfh.rotate(generationSuffix, CURRENT_LOG_SUFFIX);
        if (gfh.shouldManifest()) {
            addToManifest((String) newGfh.getFilenameSeries().get(1), MANIFEST_LOG_FILE,
                    newGfh.shouldManifest());
        }
        l.removeHandler(gfh);
        l.addHandler(newGfh);
        fileHandlers.put(l, newGfh);
    }
}

From source file:edu.harvard.iq.dvn.core.study.StudyServiceBean.java

public void exportStudyFilesToLegacySystem(String lastUpdateTime, String authority) {
    // Get list of studies that have been updated yesterday,
    // and export them to legacy VDC system

    Logger logger = null;

    String exportLogDirStr = System.getProperty("vdc.export.log.dir");
    if (exportLogDirStr == null) {
        System.out.println("Missing system property: vdc.export.log.dir.  Please add to JVM options");
        return;//from   w  w w  . j a  va2s . c o  m
    }
    File exportLogDir = new File(exportLogDirStr);
    if (!exportLogDir.exists()) {
        exportLogDir.mkdir();
    }

    logger = Logger.getLogger("edu.harvard.iq.dvn.core.web.servlet.VDCExportServlet");

    // Everytime export runs, we want to write to a separate log file (handler).
    // So if export has run previously, remove the previous handler
    if (logger.getHandlers() != null && logger.getHandlers().length > 0) {
        int numHandlers = logger.getHandlers().length;
        for (int i = 0; i < numHandlers; i++) {
            logger.removeHandler(logger.getHandlers()[i]);
        }
    }

    SimpleDateFormat formatter = new SimpleDateFormat("yyyy_MM_dd");
    FileHandler handler = null;
    try {
        handler = new FileHandler(
                exportLogDirStr + File.separator + "export_" + formatter.format(new Date()) + ".log");
    } catch (IOException e) {
        throw new EJBException(e);
    }

    // Add handler to the desired logger
    logger.addHandler(handler);

    logger.info("Begin Exporting Studies");
    int studyCount = 0;
    int deletedStudyCount = 0;
    try {

        /* THIS IS LEGACY CODE AND SHOULD BE DELETED
        // For all studies that have been deleted in the dataverse since last export, remove study directory in VDC
                
        String query = "SELECT s from DeletedStudy s where s.authority = '" + authority + "' ";
        List deletedStudies = em.createQuery(query).getResultList();
        for (Iterator it = deletedStudies.iterator(); it.hasNext();) {
        DeletedStudy deletedStudy = (DeletedStudy) it.next();
                
        logger.info("Deleting study " + deletedStudy.getGlobalId());
        Study study = em.find(Study.class, deletedStudy.getId());
        File legacyStudyDir = new File(FileUtil.getLegacyFileDir() + File.separatorChar + study.getAuthority() + File.separatorChar + study.getStudyId());
                
        // Remove files in the directory, then delete the directory.
        File[] studyFiles = legacyStudyDir.listFiles();
        if (studyFiles != null) {
            for (int i = 0; i < studyFiles.length; i++) {
                studyFiles[i].delete();
            }
        }
        legacyStudyDir.delete();
        deletedStudyCount++;
                
        em.remove(deletedStudy);
        }
        */

        // Do export of all studies updated at "lastUpdateTime""

        if (authority == null) {
            authority = vdcNetworkService.find().getAuthority();
        }
        String beginTime = null;
        String endTime = null;
        if (lastUpdateTime == null) {
            Calendar cal = Calendar.getInstance();
            cal.add(Calendar.DAY_OF_YEAR, -1);
            beginTime = new SimpleDateFormat("yyyy-MM-dd").format(cal.getTime()); // Use yesterday as default value
            cal.add(Calendar.DAY_OF_YEAR, 1);
            endTime = new SimpleDateFormat("yyyy-MM-dd").format(cal.getTime());
        } else {
            beginTime = lastUpdateTime;
            Date date = new SimpleDateFormat("yyyy-MM-dd").parse(lastUpdateTime);
            Calendar cal = Calendar.getInstance();
            cal.setTime(date);
            cal.add(Calendar.DAY_OF_YEAR, 1);
            endTime = new SimpleDateFormat("yyyy-MM-dd").format(cal.getTime());
        }
        String query = "SELECT s from Study s where s.authority = '" + authority + "' ";
        query += " and s.lastUpdateTime >'" + beginTime + "'";
        //    query+=" and s.lastUpdateTime <'" +endTime+"'";
        query += " order by s.studyId";
        List updatedStudies = em.createQuery(query).getResultList();

        for (Iterator it = updatedStudies.iterator(); it.hasNext();) {
            Study study = (Study) it.next();
            logger.info("Exporting study " + study.getStudyId());

            exportStudyToLegacySystem(study, authority);
            studyCount++;

        }
    } catch (Exception e) {
        logger.severe(e.getMessage());

        String stackTrace = "StackTrace: \n";
        logger.severe("Exception caused by: " + e + "\n");
        StackTraceElement[] ste = e.getStackTrace();
        for (int m = 0; m < ste.length; m++) {
            stackTrace += ste[m].toString() + "\n";
        }
        logger.severe(stackTrace);
    }

    logger.info("End export, " + studyCount + " studies successfully exported, " + deletedStudyCount
            + " studies deleted.");
}

From source file:org.jenkinsci.remoting.protocol.ProtocolStackTest.java

@Test
public void initSequence() throws IOException {
    Logger logger = Logger.getLogger(ProtocolStack.class.getName());
    CapturingHandler handler = new CapturingHandler();
    assertThat(logger.isLoggable(Level.FINEST), is(false));
    Level oldLevel = logger.getLevel();
    logger.addHandler(handler);// ww  w  .j a va  2 s  .c o  m
    try {
        logger.setLevel(Level.FINEST);
        assertThat(logger.isLoggable(Level.FINEST), is(true));
        final AtomicInteger state = new AtomicInteger();
        ProtocolStack.on(new NetworkLayer(selector) {

            @Override
            protected void write(@NonNull ByteBuffer data) throws IOException {

            }

            @Override
            public void start() throws IOException {
                state.compareAndSet(0, 1);
            }

            @Override
            public void doCloseSend() throws IOException {

            }

            @Override
            public void doCloseRecv() {

            }

            @Override
            public boolean isSendOpen() {
                return true;
            }

        }).filter(new FilterLayer() {
            @Override
            public void start() throws IOException {
                state.compareAndSet(1, 2);
            }

            @Override
            public void onRecv(@NonNull ByteBuffer data) throws IOException {

            }

            @Override
            public void doSend(@NonNull ByteBuffer data) throws IOException {

            }

        }).filter(new FilterLayer() {
            @Override
            public void start() throws IOException {
                state.compareAndSet(2, 3);
            }

            @Override
            public void onRecv(@NonNull ByteBuffer data) throws IOException {

            }

            @Override
            public void doSend(@NonNull ByteBuffer data) throws IOException {

            }

        }).named("initSeq").build(new ApplicationLayer<Void>() {
            @Override
            public Void get() {
                return null;
            }

            @Override
            public void onRead(@NonNull ByteBuffer data) throws IOException {

            }

            @Override
            public void start() throws IOException {
                state.compareAndSet(3, 4);
            }

            @Override
            public void onReadClosed(IOException cause) throws IOException {

            }

            @Override
            public boolean isReadOpen() {
                return true;
            }

        });
        assertThat("Init in sequence", state.get(), is(4));
        assertThat(handler.logRecords,
                contains(
                        allOf(hasProperty("message", is("[{0}] Initializing")),
                                hasProperty("parameters", is(new Object[] { "initSeq" }))),
                        allOf(hasProperty("message", is("[{0}] Starting")),
                                hasProperty("parameters", is(new Object[] { "initSeq" }))),
                        allOf(hasProperty("message", is("[{0}] Started")),
                                hasProperty("parameters", is(new Object[] { "initSeq" })))));
    } finally {
        logger.removeHandler(handler);
        logger.setLevel(oldLevel);
    }
}

From source file:org.quickserver.net.server.QuickServer.java

/**
 * Sets the console log handler level./*  w  w w.  j  a  v  a  2s .c  o  m*/
 * @since 1.2
 */
public void setConsoleLoggingLevel(Level level) {
    Logger rlogger = Logger.getLogger("");
    Handler[] handlers = rlogger.getHandlers();

    boolean isConsole = true;
    try {
        if (System.console() == null) {
            isConsole = false;
        }
    } catch (Throwable e) {
        //ignore
    }

    for (int index = 0; index < handlers.length; index++) {
        if (ConsoleHandler.class.isInstance(handlers[index])) {
            if (isConsole == false && level != Level.OFF) {
                System.out.println("QuickServer: You do not have a console.. so turning console logger off..");
                level = Level.OFF;
            }

            if (level == Level.OFF) {
                logger.info("QuickServer: Removing console handler.. ");
                rlogger.removeHandler(handlers[index]);

                handlers[index].setLevel(level);
                handlers[index].close();
            } else {
                handlers[index].setLevel(level);
            }
        }
    }
    if (level == Level.SEVERE)
        consoleLoggingLevel = "SEVERE";
    else if (level == Level.WARNING)
        consoleLoggingLevel = "WARNING";
    else if (level == Level.INFO)
        consoleLoggingLevel = "INFO";
    else if (level == Level.CONFIG)
        consoleLoggingLevel = "CONFIG";
    else if (level == Level.FINE)
        consoleLoggingLevel = "FINE";
    else if (level == Level.FINER)
        consoleLoggingLevel = "FINER";
    else if (level == Level.FINEST)
        consoleLoggingLevel = "FINEST";
    else if (level == Level.OFF)
        consoleLoggingLevel = "OFF";
    else
        consoleLoggingLevel = "UNKNOWN";

    logger.log(Level.FINE, "Set to {0}", level);
}

From source file:org.jenkinsci.remoting.protocol.ProtocolStackTest.java

@Test
public void initSequenceFailure() throws IOException {
    Logger logger = Logger.getLogger(ProtocolStack.class.getName());
    CapturingHandler handler = new CapturingHandler();
    assertThat(logger.isLoggable(Level.FINEST), is(false));
    Level oldLevel = logger.getLevel();
    logger.addHandler(handler);/*w  w w. j  a  va2 s  .  c om*/
    try {
        logger.setLevel(Level.FINEST);
        assertThat(logger.isLoggable(Level.FINEST), is(true));
        final AtomicInteger state = new AtomicInteger();
        try {
            ProtocolStack.on(new NetworkLayer(selector) {

                @Override
                protected void write(@NonNull ByteBuffer data) throws IOException {

                }

                @Override
                public void start() throws IOException {
                    state.compareAndSet(0, 1);
                }

                @Override
                public void doCloseSend() throws IOException {

                }

                @Override
                public void doCloseRecv() {

                }

                @Override
                public boolean isSendOpen() {
                    return true;
                }

            }).filter(new FilterLayer() {
                @Override
                public void start() throws IOException {
                    state.compareAndSet(1, 2);
                    throw new IOException("boom");
                }

                @Override
                public void onRecv(@NonNull ByteBuffer data) throws IOException {

                }

                @Override
                public void doSend(@NonNull ByteBuffer data) throws IOException {

                }

            }).filter(new FilterLayer() {
                @Override
                public void start() throws IOException {
                    state.set(-2);
                }

                @Override
                public void onRecv(@NonNull ByteBuffer data) throws IOException {

                }

                @Override
                public void doSend(@NonNull ByteBuffer data) throws IOException {

                }

                @Override
                public void onRecvClosed(IOException cause) throws IOException {
                    state.compareAndSet(2, 3);
                    super.onRecvClosed(cause);
                }
            }).named("initSeq").build(new ApplicationLayer<Void>() {
                @Override
                public Void get() {
                    return null;
                }

                @Override
                public void onRead(@NonNull ByteBuffer data) throws IOException {

                }

                @Override
                public void start() throws IOException {
                    state.set(-3);
                }

                @Override
                public void onReadClosed(IOException cause) throws IOException {
                    state.compareAndSet(3, 4);
                }

                @Override
                public boolean isReadOpen() {
                    return true;
                }

            });
            fail("Expecting IOException");
        } catch (IOException e) {
            assertThat(e.getMessage(), is("boom"));
        }
        assertThat(handler.logRecords,
                contains(
                        allOf(hasProperty("message", is("[{0}] Initializing")),
                                hasProperty("parameters", is(new Object[] { "initSeq" }))),
                        allOf(hasProperty("message", is("[{0}] Starting")),
                                hasProperty("parameters", is(new Object[] { "initSeq" }))),
                        allOf(hasProperty("message", is("[{0}] Start failure")),
                                hasProperty("parameters", is(new Object[] { "initSeq" })),
                                hasProperty("thrown", hasProperty("message", is("boom"))))));
        assertThat("Init in sequence", state.get(), is(4));
    } finally {
        logger.removeHandler(handler);
        logger.setLevel(oldLevel);
    }
}

From source file:org.jenkinsci.remoting.protocol.ProtocolStackTest.java

@Test
public void stackCloseSequence() throws IOException {
    Logger logger = Logger.getLogger(ProtocolStack.class.getName());
    CapturingHandler handler = new CapturingHandler();
    assertThat(logger.isLoggable(Level.FINEST), is(false));
    Level oldLevel = logger.getLevel();
    logger.addHandler(handler);//from   ww w  .j  a v a  2s.c  o  m
    try {
        logger.setLevel(Level.FINEST);
        assertThat(logger.isLoggable(Level.FINEST), is(true));

        final AtomicInteger state = new AtomicInteger();
        ProtocolStack.on(new NetworkLayer(selector) {

            @Override
            public void start() throws IOException {
            }

            @Override
            protected void write(@NonNull ByteBuffer data) throws IOException {

            }

            @Override
            public void doCloseRecv() {
                state.compareAndSet(3, 4);
                onRecvClosed();

            }

            @Override
            public void doCloseSend() throws IOException {
                state.compareAndSet(2, 3);
                doCloseRecv();
            }

            @Override
            public boolean isSendOpen() {
                return true;
            }

        }).filter(new FilterLayer() {
            @Override
            public void start() throws IOException {
            }

            @Override
            public void onRecv(@NonNull ByteBuffer data) throws IOException {

            }

            @Override
            public void doSend(@NonNull ByteBuffer data) throws IOException {

            }

            @Override
            public void doCloseSend() throws IOException {
                state.compareAndSet(1, 2);
                super.doCloseSend();
            }

            @Override
            public void onRecvClosed(IOException cause) throws IOException {
                state.compareAndSet(4, 5);
                super.onRecvClosed(cause);
            }
        }).filter(new FilterLayer() {
            @Override
            public void start() throws IOException {
            }

            @Override
            public void onRecv(@NonNull ByteBuffer data) throws IOException {

            }

            @Override
            public void doSend(@NonNull ByteBuffer data) throws IOException {

            }

            @Override
            public void doCloseSend() throws IOException {
                state.compareAndSet(0, 1);
                super.doCloseSend();
            }

            @Override
            public void onRecvClosed(IOException cause) throws IOException {
                state.compareAndSet(5, 6);
                super.onRecvClosed(cause);
            }
        }).named("closeSeq").build(new ApplicationLayer<Void>() {
            @Override
            public boolean isReadOpen() {
                return true;
            }

            @Override
            public void onRead(@NonNull ByteBuffer data) throws IOException {

            }

            @Override
            public Void get() {
                return null;
            }

            @Override
            public void start() throws IOException {
            }

            @Override
            public void onReadClosed(IOException cause) throws IOException {
                state.compareAndSet(6, 7);
            }

        }).close();
        assertThat("Close in sequence", state.get(), is(7));
        assertThat(handler.logRecords,
                contains(
                        allOf(hasProperty("message", is("[{0}] Initializing")),
                                hasProperty("parameters", is(new Object[] { "closeSeq" }))),
                        allOf(hasProperty("message", is("[{0}] Starting")),
                                hasProperty("parameters", is(new Object[] { "closeSeq" }))),
                        allOf(hasProperty("message", is("[{0}] Started")),
                                hasProperty("parameters", is(new Object[] { "closeSeq" }))),
                        allOf(hasProperty("message", is("[{0}] Closing")),
                                hasProperty("parameters", is(new Object[] { "closeSeq" }))),
                        allOf(hasProperty("message", is("[{0}] Closed")),
                                hasProperty("parameters", is(new Object[] { "closeSeq" })))));
    } finally {
        logger.removeHandler(handler);
        logger.setLevel(oldLevel);
    }
}

From source file:edu.harvard.iq.dataverse.harvest.client.HarvesterServiceBean.java

/**
 * Run a harvest for an individual harvesting Dataverse
 * @param dataverseRequest// w ww. j ava  2 s . c o m
 * @param harvestingClientId
 * @throws IOException
 */
public void doHarvest(DataverseRequest dataverseRequest, Long harvestingClientId) throws IOException {
    HarvestingClient harvestingClientConfig = harvestingClientService.find(harvestingClientId);

    if (harvestingClientConfig == null) {
        throw new IOException("No such harvesting client: id=" + harvestingClientId);
    }

    Dataverse harvestingDataverse = harvestingClientConfig.getDataverse();

    MutableBoolean harvestErrorOccurred = new MutableBoolean(false);
    String logTimestamp = logFormatter.format(new Date());
    Logger hdLogger = Logger.getLogger("edu.harvard.iq.dataverse.harvest.client.HarvesterServiceBean."
            + harvestingDataverse.getAlias() + logTimestamp);
    String logFileName = "../logs" + File.separator + "harvest_" + harvestingClientConfig.getName() + "_"
            + logTimestamp + ".log";
    FileHandler fileHandler = new FileHandler(logFileName);
    hdLogger.setUseParentHandlers(false);
    hdLogger.addHandler(fileHandler);

    PrintWriter importCleanupLog = new PrintWriter(new FileWriter(
            "../logs/harvest_cleanup_" + harvestingClientConfig.getName() + "_" + logTimestamp + ".txt"));

    List<Long> harvestedDatasetIds = null;

    List<Long> harvestedDatasetIdsThisBatch = new ArrayList<Long>();

    List<String> failedIdentifiers = new ArrayList<String>();
    List<String> deletedIdentifiers = new ArrayList<String>();

    Date harvestStartTime = new Date();

    try {
        boolean harvestingNow = harvestingClientConfig.isHarvestingNow();

        if (harvestingNow) {
            harvestErrorOccurred.setValue(true);
            hdLogger.log(Level.SEVERE, "Cannot begin harvesting, Dataverse " + harvestingDataverse.getName()
                    + " is currently being harvested.");

        } else {
            harvestingClientService.resetHarvestInProgress(harvestingClientId);
            harvestingClientService.setHarvestInProgress(harvestingClientId, harvestStartTime);

            if (harvestingClientConfig.isOai()) {
                harvestedDatasetIds = harvestOAI(dataverseRequest, harvestingClientConfig, hdLogger,
                        importCleanupLog, harvestErrorOccurred, failedIdentifiers, deletedIdentifiers,
                        harvestedDatasetIdsThisBatch);

            } else {
                throw new IOException("Unsupported harvest type");
            }
            harvestingClientService.setHarvestSuccess(harvestingClientId, new Date(),
                    harvestedDatasetIds.size(), failedIdentifiers.size(), deletedIdentifiers.size());
            hdLogger.log(Level.INFO, "COMPLETED HARVEST, server=" + harvestingClientConfig.getArchiveUrl()
                    + ", metadataPrefix=" + harvestingClientConfig.getMetadataPrefix());
            hdLogger.log(Level.INFO,
                    "Datasets created/updated: " + harvestedDatasetIds.size() + ", datasets deleted: "
                            + deletedIdentifiers.size() + ", datasets failed: " + failedIdentifiers.size());

            // now index all the datasets we have harvested - created, modified or deleted:
            /* (TODO: may not be needed at all. In Dataverse4, we may be able to get away with the normal 
            reindexing after every import. See the rest of the comments about batch indexing throughout 
            this service bean)
            if (this.processedSizeThisBatch > 0) {
                hdLogger.log(Level.INFO, "POST HARVEST, reindexing the remaining studies.");
                if (this.harvestedDatasetIdsThisBatch != null) {
                    hdLogger.log(Level.INFO, this.harvestedDatasetIdsThisBatch.size()+" studies in the batch");
                }
                hdLogger.log(Level.INFO, this.processedSizeThisBatch + " bytes of content");
                indexService.updateIndexList(this.harvestedDatasetIdsThisBatch);
                hdLogger.log(Level.INFO, "POST HARVEST, calls to index finished.");
            } else {
                hdLogger.log(Level.INFO, "(All harvested content already reindexed)");
            }
             */
        }
        //mailService.sendHarvestNotification(...getSystemEmail(), harvestingDataverse.getName(), logFileName, logTimestamp, harvestErrorOccurred.booleanValue(), harvestedDatasetIds.size(), failedIdentifiers);
    } catch (Throwable e) {
        harvestErrorOccurred.setValue(true);
        String message = "Exception processing harvest, server= " + harvestingClientConfig.getHarvestingUrl()
                + ",format=" + harvestingClientConfig.getMetadataPrefix() + " " + e.getClass().getName() + " "
                + e.getMessage();
        hdLogger.log(Level.SEVERE, message);
        logException(e, hdLogger);
        hdLogger.log(Level.INFO, "HARVEST NOT COMPLETED DUE TO UNEXPECTED ERROR.");
        // TODO: 
        // even though this harvesting run failed, we may have had successfully 
        // processed some number of datasets, by the time the exception was thrown. 
        // We should record that number too. And the number of the datasets that
        // had failed, that we may have counted.  -- L.A. 4.4
        harvestingClientService.setHarvestFailure(harvestingClientId, new Date());

    } finally {
        harvestingClientService.resetHarvestInProgress(harvestingClientId);
        fileHandler.close();
        hdLogger.removeHandler(fileHandler);
        importCleanupLog.close();
    }
}

From source file:edu.harvard.iq.dvn.core.harvest.HarvesterServiceBean.java

/**
 * Harvest an individual Dataverse//from w w w .  ja  va  2  s. c  om
 * @param dataverseId
 */
public void doHarvesting(Long dataverseId) throws IOException {
    HarvestingDataverse dataverse = em.find(HarvestingDataverse.class, dataverseId);
    MutableBoolean harvestErrorOccurred = new MutableBoolean(false);
    String logTimestamp = logFormatter.format(new Date());
    Logger hdLogger = Logger.getLogger("edu.harvard.iq.dvn.core.harvest.HarvesterServiceBean."
            + dataverse.getVdc().getAlias() + logTimestamp);
    String logFileName = FileUtil.getImportFileDir() + File.separator + "harvest_"
            + dataverse.getVdc().getAlias() + logTimestamp + ".log";
    FileHandler fileHandler = new FileHandler(logFileName);
    hdLogger.addHandler(fileHandler);
    List<Long> harvestedStudyIds = null;

    this.processedSizeThisBatch = 0;
    this.harvestedStudyIdsThisBatch = new ArrayList<Long>();

    List<String> failedIdentifiers = new ArrayList<String>();
    try {
        boolean harvestingNow = dataverse.isHarvestingNow();

        if (harvestingNow) {
            harvestErrorOccurred.setValue(true);
            hdLogger.log(Level.SEVERE, "Cannot begin harvesting, Dataverse " + dataverse.getVdc().getName()
                    + " is currently being harvested.");

        } else {
            harvestingDataverseService.resetHarvestingStatus(dataverse.getId());
            String until = null; // If we don't set until date, we will get all the changes since the last harvest.
            String from = null;
            Date lastSuccessfulHarvestTime = dataverse.getLastSuccessfulHarvestTime();
            if (lastSuccessfulHarvestTime != null) {
                from = formatter.format(lastSuccessfulHarvestTime);
            }
            if (dataverse.isOai() || dataverse.isNesstar()) {
                harvestingDataverseService.setHarvestingNow(dataverse.getId(), true);
                Date currentTime = new Date();
                harvestingDataverseService.setLastHarvestTime(dataverse.getId(), currentTime);

                hdLogger.log(Level.INFO,
                        "BEGIN HARVEST..., oaiUrl=" + dataverse.getServerUrl() + ",set="
                                + dataverse.getHarvestingSet() + ", metadataPrefix="
                                + dataverse.getHarvestFormatType().getMetadataPrefix() + ", from=" + from
                                + ", until=" + until);

                if (dataverse.isOai()) {
                    harvestedStudyIds = harvestOAI(dataverse, hdLogger, from, until, harvestErrorOccurred,
                            failedIdentifiers);

                } else {
                    harvestedStudyIds = harvestNesstar(dataverse, hdLogger, harvestErrorOccurred,
                            failedIdentifiers);
                }
                harvestingDataverseService.setHarvestSuccess(dataverse.getId(), currentTime,
                        harvestedStudyIds.size(), failedIdentifiers.size());
                hdLogger.log(Level.INFO, "COMPLETED HARVEST, server=" + dataverse.getServerUrl()
                        + ", metadataPrefix=" + dataverse.getHarvestFormatType().getMetadataPrefix());

                if (harvestedStudyIds.size() > 0) {
                    harvestingDataverseService.setHarvestSuccessNotEmpty(dataverse.getId(), currentTime,
                            harvestedStudyIds.size(), failedIdentifiers.size());
                    hdLogger.log(Level.INFO, "COMPLETED HARVEST with results");
                }
                // now index all studies (need to modify for update)
                if (this.processedSizeThisBatch > 0) {
                    hdLogger.log(Level.INFO, "POST HARVEST, reindexing the remaining studies.");
                    if (this.harvestedStudyIdsThisBatch != null) {
                        hdLogger.log(Level.INFO,
                                this.harvestedStudyIdsThisBatch.size() + " studies in the batch");
                    }
                    hdLogger.log(Level.INFO, this.processedSizeThisBatch + " bytes of content");
                    indexService.updateIndexList(this.harvestedStudyIdsThisBatch);
                    hdLogger.log(Level.INFO, "POST HARVEST, calls to index finished.");
                } else {
                    hdLogger.log(Level.INFO, "(All harvested content already reindexed)");
                }
            } else {
                harvestErrorOccurred.setValue(true);
                harvestingDataverseService.setHarvestFailure(dataverse.getId(), harvestedStudyIds.size(),
                        failedIdentifiers.size());

                hdLogger.log(Level.SEVERE, "Cannot begin harvesting, Unknown harvest type.");
            }
        }
        mailService.sendHarvestNotification(vdcNetworkService.find().getSystemEmail(),
                dataverse.getVdc().getName(), logFileName, logTimestamp, harvestErrorOccurred.booleanValue(),
                harvestedStudyIds.size(), failedIdentifiers);
    } catch (Throwable e) {
        harvestErrorOccurred.setValue(true);
        String message = "Exception processing harvest, server= " + dataverse.getServerUrl() + ",format="
                + dataverse.getHarvestFormatType().getMetadataPrefix() + " " + e.getClass().getName() + " "
                + e.getMessage();
        hdLogger.log(Level.SEVERE, message);
        logException(e, hdLogger);
        hdLogger.log(Level.INFO, "HARVEST NOT COMPLETED DUE TO UNEXPECTED ERROR.");
        harvestingDataverseService.setHarvestFailure(dataverse.getId(), harvestedStudyIds.size(),
                failedIdentifiers.size());

    } finally {
        harvestingDataverseService.setHarvestingNow(dataverse.getId(), false);
        fileHandler.close();
        hdLogger.removeHandler(fileHandler);
    }
}

From source file:edu.harvard.iq.dvn.core.web.networkAdmin.UtilitiesPage.java

public String importBatch_action() {
    FileHandler logFileHandler = null;
    Logger importLogger = null;

    if (importBatchDir == null || importBatchDir.equals(""))
        return null;
    try {//from   w ww.  ja v  a  2 s  . c  o  m
        int importFailureCount = 0;
        int fileFailureCount = 0;
        List<Long> studiesToIndex = new ArrayList<Long>();
        //sessionId =  ((HttpSession) FacesContext.getCurrentInstance().getExternalContext().getSession(false)).getId();

        File batchDir = new File(importBatchDir);
        if (batchDir.exists() && batchDir.isDirectory()) {

            // create Logger
            String logTimestamp = new SimpleDateFormat("yyyy-MM-dd'T'HH-mm-ss").format(new Date());
            String dvAlias = vdcService.find(importDVId).getAlias();
            importLogger = Logger.getLogger(
                    "edu.harvard.iq.dvn.core.web.networkAdmin.UtilitiesPage." + dvAlias + "_" + logTimestamp);
            String logFileName = FileUtil.getImportFileDir() + File.separator + "batch_" + dvAlias + "_"
                    + logTimestamp + ".log";
            logFileHandler = new FileHandler(logFileName);
            importLogger.addHandler(logFileHandler);

            importLogger
                    .info("BEGIN BATCH IMPORT (dvId = " + importDVId + ") from directory: " + importBatchDir);

            for (int i = 0; i < batchDir.listFiles().length; i++) {
                File studyDir = batchDir.listFiles()[i];
                if (studyDir.isDirectory()) { // one directory per study
                    importLogger.info("Found study directory: " + studyDir.getName());

                    File xmlFile = null;
                    Map<File, String> filesToUpload = new HashMap();

                    for (int j = 0; j < studyDir.listFiles().length; j++) {
                        File file = studyDir.listFiles()[j];
                        if ("study.xml".equals(file.getName())) {
                            xmlFile = file;
                        } else {
                            addFile(file, "", filesToUpload);
                        }
                    }

                    if (xmlFile != null) {
                        try {
                            importLogger.info("Found study.xml and " + filesToUpload.size() + " other "
                                    + (filesToUpload.size() == 1 ? "file." : "files."));
                            // TODO: we need to incorporate the add files step into the same transaction of the import!!!
                            Study study = studyService.importStudy(xmlFile, importFileFormat, importDVId,
                                    getVDCSessionBean().getLoginBean().getUser().getId());
                            study.getLatestVersion().setVersionNote("Study imported via batch import.");
                            importLogger.info("Import of study.xml succeeded: study id = " + study.getId());
                            studiesToIndex.add(study.getId());

                            if (!filesToUpload.isEmpty()) {

                                List<StudyFileEditBean> fileBeans = new ArrayList();
                                for (File file : filesToUpload.keySet()) {
                                    StudyFileEditBean fileBean = new StudyFileEditBean(file,
                                            studyService.generateFileSystemNameSequence(), study);
                                    fileBean.getFileMetadata().setCategory(filesToUpload.get(file));
                                    fileBeans.add(fileBean);
                                }

                                try {
                                    studyFileService.addFiles(study.getLatestVersion(), fileBeans,
                                            getVDCSessionBean().getLoginBean().getUser());
                                    importLogger.info("File upload succeeded.");
                                } catch (Exception e) {
                                    fileFailureCount++;
                                    importLogger.severe("File Upload failed (dir = " + studyDir.getName()
                                            + "): exception message = " + e.getMessage());
                                    logException(e, importLogger);
                                }
                            }

                        } catch (Exception e) {
                            importFailureCount++;
                            importLogger.severe("Import failed (dir = " + studyDir.getName()
                                    + "): exception message = " + e.getMessage());
                            logException(e, importLogger);
                        }

                    } else { // no ddi.xml found in studyDir
                        importLogger.warning("No study.xml file was found in study directory. Skipping... ");
                    }
                } else {
                    importLogger.warning("Found non directory at top level. Skipping... (filename = "
                            + studyDir.getName() + ")");
                }
            }

            // generate status message
            String statusMessage = studiesToIndex.size() + (studiesToIndex.size() == 1 ? " study" : " studies")
                    + " successfully imported";
            statusMessage += (fileFailureCount == 0 ? ""
                    : " (" + fileFailureCount + " of which failed file upload)");
            statusMessage += (importFailureCount == 0 ? "."
                    : "; " + importFailureCount + (importFailureCount == 1 ? " study" : " studies")
                            + " failed import.");

            importLogger.info("COMPLETED BATCH IMPORT: " + statusMessage);

            // now index all studies
            importLogger.info("POST BATCH IMPORT, start calls to index.");
            indexService.updateIndexList(studiesToIndex);
            importLogger.info("POST BATCH IMPORT, calls to index finished.");

            addMessage("importMessage", "Batch Import request completed.");
            addMessage("importMessage", statusMessage);
            addMessage("importMessage", "For more detail see log file at: " + logFileName);

        } else {
            addMessage("importMessage",
                    "Batch Import failed: " + importBatchDir + " does not exist or is not a directory.");
        }
    } catch (Exception e) {
        e.printStackTrace();
        addMessage("importMessage", "Batch Import failed: An unexpected error occurred during processing.");
        addMessage("importMessage", "Exception message: " + e.getMessage());
    } finally {
        if (logFileHandler != null) {
            logFileHandler.close();
            importLogger.removeHandler(logFileHandler);
        }
        //   importBatchDir = "";
    }

    return null;
}