Example usage for java.util.logging Logger info

List of usage examples for java.util.logging Logger info

Introduction

In this page you can find the example usage for java.util.logging Logger info.

Prototype

public void info(Supplier<String> msgSupplier) 

Source Link

Document

Log a INFO message, which is only to be constructed if the logging level is such that the message will actually be logged.

Usage

From source file:edu.harvard.iq.dvn.core.harvest.HarvesterServiceBean.java

@TransactionAttribute(TransactionAttributeType.NOT_SUPPORTED)
public ResumptionTokenType harvestFromIdentifiers(Logger hdLogger, ResumptionTokenType resumptionToken,
        HarvestingDataverse dataverse, String from, String until, List<Long> harvestedStudyIds,
        List<String> failedIdentifiers, MutableBoolean harvestErrorOccurred) throws java.io.IOException,
        ParserConfigurationException, SAXException, TransformerException, JAXBException {
    String encodedSet = dataverse.getHarvestingSet() == null ? null
            : URLEncoder.encode(dataverse.getHarvestingSet(), "UTF-8");
    ListIdentifiers listIdentifiers = null;

    if (resumptionToken == null) {
        listIdentifiers = new ListIdentifiers(dataverse.getServerUrl(), from, until, encodedSet,
                URLEncoder.encode(dataverse.getHarvestFormatType().getMetadataPrefix(), "UTF-8"));
    } else {/* ww  w.j  a v a2 s  . c o  m*/
        hdLogger.log(Level.INFO, "harvestFromIdentifiers(), resumptionToken=" + resumptionToken.getValue());
        listIdentifiers = new ListIdentifiers(dataverse.getServerUrl(), resumptionToken.getValue());
    }

    Document doc = listIdentifiers.getDocument();

    //       JAXBContext jc = JAXBContext.newInstance("edu.harvard.hmdc.vdcnet.jaxb.oai");
    //       Unmarshaller unmarshaller = jc.createUnmarshaller();
    JAXBElement unmarshalObj = (JAXBElement) unmarshaller.unmarshal(doc);
    OAIPMHtype oaiObj = (OAIPMHtype) unmarshalObj.getValue();

    if (oaiObj.getError() != null && oaiObj.getError().size() > 0) {
        if (oaiObj.getError().get(0).getCode().equals(OAIPMHerrorcodeType.NO_RECORDS_MATCH)) {
            hdLogger.info("ListIdentifiers returned NO_RECORDS_MATCH - no studies found to be harvested.");
        } else {
            handleOAIError(hdLogger, oaiObj,
                    "calling listIdentifiers, oaiServer= " + dataverse.getServerUrl() + ",from=" + from
                            + ",until=" + until + ",encodedSet=" + encodedSet + ",format="
                            + dataverse.getHarvestFormatType().getMetadataPrefix());
            throw new EJBException("Received OAI Error response calling ListIdentifiers");
        }
    } else {
        ListIdentifiersType listIdentifiersType = oaiObj.getListIdentifiers();
        if (listIdentifiersType != null) {
            resumptionToken = listIdentifiersType.getResumptionToken();
            for (Iterator it = listIdentifiersType.getHeader().iterator(); it.hasNext();) {
                HeaderType header = (HeaderType) it.next();
                MutableBoolean getRecordErrorOccurred = new MutableBoolean(false);
                Long studyId = getRecord(hdLogger, dataverse, header.getIdentifier(),
                        dataverse.getHarvestFormatType().getMetadataPrefix(), getRecordErrorOccurred);
                if (studyId != null) {
                    harvestedStudyIds.add(studyId);
                }
                if (getRecordErrorOccurred.booleanValue() == true) {
                    failedIdentifiers.add(header.getIdentifier());
                }

            }

        }
    }
    String logMsg = "Returning from harvestFromIdentifiers";

    if (resumptionToken == null) {
        logMsg += " resumptionToken is null";
    } else if (!StringUtil.isEmpty(resumptionToken.getValue())) {
        logMsg += " resumptionToken is " + resumptionToken.getValue();
    } else {
        // Some OAIServers return an empty resumptionToken element when all
        // the identifiers have been sent, so need to check  for this, and 
        // treat it as if resumptiontoken is null.
        logMsg += " resumptionToken is empty, setting return value to null.";
        resumptionToken = null;
    }
    hdLogger.info(logMsg);
    return resumptionToken;
}

From source file:edu.harvard.iq.dvn.core.web.networkAdmin.UtilitiesPage.java

public String importBatch_action() {
    FileHandler logFileHandler = null;
    Logger importLogger = null;

    if (importBatchDir == null || importBatchDir.equals(""))
        return null;
    try {/*from  ww w.j a  va2s .  c  om*/
        int importFailureCount = 0;
        int fileFailureCount = 0;
        List<Long> studiesToIndex = new ArrayList<Long>();
        //sessionId =  ((HttpSession) FacesContext.getCurrentInstance().getExternalContext().getSession(false)).getId();

        File batchDir = new File(importBatchDir);
        if (batchDir.exists() && batchDir.isDirectory()) {

            // create Logger
            String logTimestamp = new SimpleDateFormat("yyyy-MM-dd'T'HH-mm-ss").format(new Date());
            String dvAlias = vdcService.find(importDVId).getAlias();
            importLogger = Logger.getLogger(
                    "edu.harvard.iq.dvn.core.web.networkAdmin.UtilitiesPage." + dvAlias + "_" + logTimestamp);
            String logFileName = FileUtil.getImportFileDir() + File.separator + "batch_" + dvAlias + "_"
                    + logTimestamp + ".log";
            logFileHandler = new FileHandler(logFileName);
            importLogger.addHandler(logFileHandler);

            importLogger
                    .info("BEGIN BATCH IMPORT (dvId = " + importDVId + ") from directory: " + importBatchDir);

            for (int i = 0; i < batchDir.listFiles().length; i++) {
                File studyDir = batchDir.listFiles()[i];
                if (studyDir.isDirectory()) { // one directory per study
                    importLogger.info("Found study directory: " + studyDir.getName());

                    File xmlFile = null;
                    Map<File, String> filesToUpload = new HashMap();

                    for (int j = 0; j < studyDir.listFiles().length; j++) {
                        File file = studyDir.listFiles()[j];
                        if ("study.xml".equals(file.getName())) {
                            xmlFile = file;
                        } else {
                            addFile(file, "", filesToUpload);
                        }
                    }

                    if (xmlFile != null) {
                        try {
                            importLogger.info("Found study.xml and " + filesToUpload.size() + " other "
                                    + (filesToUpload.size() == 1 ? "file." : "files."));
                            // TODO: we need to incorporate the add files step into the same transaction of the import!!!
                            Study study = studyService.importStudy(xmlFile, importFileFormat, importDVId,
                                    getVDCSessionBean().getLoginBean().getUser().getId());
                            study.getLatestVersion().setVersionNote("Study imported via batch import.");
                            importLogger.info("Import of study.xml succeeded: study id = " + study.getId());
                            studiesToIndex.add(study.getId());

                            if (!filesToUpload.isEmpty()) {

                                List<StudyFileEditBean> fileBeans = new ArrayList();
                                for (File file : filesToUpload.keySet()) {
                                    StudyFileEditBean fileBean = new StudyFileEditBean(file,
                                            studyService.generateFileSystemNameSequence(), study);
                                    fileBean.getFileMetadata().setCategory(filesToUpload.get(file));
                                    fileBeans.add(fileBean);
                                }

                                try {
                                    studyFileService.addFiles(study.getLatestVersion(), fileBeans,
                                            getVDCSessionBean().getLoginBean().getUser());
                                    importLogger.info("File upload succeeded.");
                                } catch (Exception e) {
                                    fileFailureCount++;
                                    importLogger.severe("File Upload failed (dir = " + studyDir.getName()
                                            + "): exception message = " + e.getMessage());
                                    logException(e, importLogger);
                                }
                            }

                        } catch (Exception e) {
                            importFailureCount++;
                            importLogger.severe("Import failed (dir = " + studyDir.getName()
                                    + "): exception message = " + e.getMessage());
                            logException(e, importLogger);
                        }

                    } else { // no ddi.xml found in studyDir
                        importLogger.warning("No study.xml file was found in study directory. Skipping... ");
                    }
                } else {
                    importLogger.warning("Found non directory at top level. Skipping... (filename = "
                            + studyDir.getName() + ")");
                }
            }

            // generate status message
            String statusMessage = studiesToIndex.size() + (studiesToIndex.size() == 1 ? " study" : " studies")
                    + " successfully imported";
            statusMessage += (fileFailureCount == 0 ? ""
                    : " (" + fileFailureCount + " of which failed file upload)");
            statusMessage += (importFailureCount == 0 ? "."
                    : "; " + importFailureCount + (importFailureCount == 1 ? " study" : " studies")
                            + " failed import.");

            importLogger.info("COMPLETED BATCH IMPORT: " + statusMessage);

            // now index all studies
            importLogger.info("POST BATCH IMPORT, start calls to index.");
            indexService.updateIndexList(studiesToIndex);
            importLogger.info("POST BATCH IMPORT, calls to index finished.");

            addMessage("importMessage", "Batch Import request completed.");
            addMessage("importMessage", statusMessage);
            addMessage("importMessage", "For more detail see log file at: " + logFileName);

        } else {
            addMessage("importMessage",
                    "Batch Import failed: " + importBatchDir + " does not exist or is not a directory.");
        }
    } catch (Exception e) {
        e.printStackTrace();
        addMessage("importMessage", "Batch Import failed: An unexpected error occurred during processing.");
        addMessage("importMessage", "Exception message: " + e.getMessage());
    } finally {
        if (logFileHandler != null) {
            logFileHandler.close();
            importLogger.removeHandler(logFileHandler);
        }
        //   importBatchDir = "";
    }

    return null;
}

From source file:edu.harvard.iq.dvn.core.study.StudyServiceBean.java

public void exportStudies(List<Long> studyIds, String exportFormat) {
    String logTimestamp = exportLogFormatter.format(new Date());
    Logger exportLogger = Logger
            .getLogger("edu.harvard.iq.dvn.core.study.StudyServiceBean.export." + logTimestamp);
    List<Long> harvestedStudyIds = new ArrayList<Long>();
    try {/* w  ww. j  av  a  2s  .  co m*/

        exportLogger.addHandler(new FileHandler(
                FileUtil.getExportFileDir() + File.separator + "export_" + logTimestamp + ".log"));
    } catch (IOException e) {

        logger.severe("Exception adding log file handler " + FileUtil.getExportFileDir() + File.separator
                + "export_" + logTimestamp + ".log");
        return;
    }
    try {
        int exportCount = 0;
        exportLogger.info("Begin exporting studies, number of possible studies to export: " + studyIds.size());
        for (Long studyId : studyIds) {
            Study study = em.find(Study.class, studyId);
            if (study.getReleasedVersion() != null) {
                exportLogger.info("Begin export for study " + study.getGlobalId());
                if (exportFormat == null) {
                    studyService.exportStudy(studyId); //TODO check why do we pass the id and not the study
                } else {
                    studyService.exportStudyToFormat(studyId, exportFormat); //TODO check why do we pass the id and not the study
                }
                exportLogger.info("Complete export for study " + study.getGlobalId());
                exportCount++;
            } else {
                exportLogger
                        .info("No released version for study " + study.getGlobalId() + "; skipping export.");
            }
        }
        exportLogger.info("Completed exporting studies. Actual number of studies exported: " + exportCount);
    } catch (EJBException e) {
        logException(e, exportLogger);
        throw e;

    }

}

From source file:edu.harvard.iq.dvn.core.study.StudyServiceBean.java

public void exportStudyFilesToLegacySystem(String lastUpdateTime, String authority) {
    // Get list of studies that have been updated yesterday,
    // and export them to legacy VDC system

    Logger logger = null;

    String exportLogDirStr = System.getProperty("vdc.export.log.dir");
    if (exportLogDirStr == null) {
        System.out.println("Missing system property: vdc.export.log.dir.  Please add to JVM options");
        return;//from  w  w  w .j a  v  a 2  s  .c o  m
    }
    File exportLogDir = new File(exportLogDirStr);
    if (!exportLogDir.exists()) {
        exportLogDir.mkdir();
    }

    logger = Logger.getLogger("edu.harvard.iq.dvn.core.web.servlet.VDCExportServlet");

    // Everytime export runs, we want to write to a separate log file (handler).
    // So if export has run previously, remove the previous handler
    if (logger.getHandlers() != null && logger.getHandlers().length > 0) {
        int numHandlers = logger.getHandlers().length;
        for (int i = 0; i < numHandlers; i++) {
            logger.removeHandler(logger.getHandlers()[i]);
        }
    }

    SimpleDateFormat formatter = new SimpleDateFormat("yyyy_MM_dd");
    FileHandler handler = null;
    try {
        handler = new FileHandler(
                exportLogDirStr + File.separator + "export_" + formatter.format(new Date()) + ".log");
    } catch (IOException e) {
        throw new EJBException(e);
    }

    // Add handler to the desired logger
    logger.addHandler(handler);

    logger.info("Begin Exporting Studies");
    int studyCount = 0;
    int deletedStudyCount = 0;
    try {

        /* THIS IS LEGACY CODE AND SHOULD BE DELETED
        // For all studies that have been deleted in the dataverse since last export, remove study directory in VDC
                
        String query = "SELECT s from DeletedStudy s where s.authority = '" + authority + "' ";
        List deletedStudies = em.createQuery(query).getResultList();
        for (Iterator it = deletedStudies.iterator(); it.hasNext();) {
        DeletedStudy deletedStudy = (DeletedStudy) it.next();
                
        logger.info("Deleting study " + deletedStudy.getGlobalId());
        Study study = em.find(Study.class, deletedStudy.getId());
        File legacyStudyDir = new File(FileUtil.getLegacyFileDir() + File.separatorChar + study.getAuthority() + File.separatorChar + study.getStudyId());
                
        // Remove files in the directory, then delete the directory.
        File[] studyFiles = legacyStudyDir.listFiles();
        if (studyFiles != null) {
            for (int i = 0; i < studyFiles.length; i++) {
                studyFiles[i].delete();
            }
        }
        legacyStudyDir.delete();
        deletedStudyCount++;
                
        em.remove(deletedStudy);
        }
        */

        // Do export of all studies updated at "lastUpdateTime""

        if (authority == null) {
            authority = vdcNetworkService.find().getAuthority();
        }
        String beginTime = null;
        String endTime = null;
        if (lastUpdateTime == null) {
            Calendar cal = Calendar.getInstance();
            cal.add(Calendar.DAY_OF_YEAR, -1);
            beginTime = new SimpleDateFormat("yyyy-MM-dd").format(cal.getTime()); // Use yesterday as default value
            cal.add(Calendar.DAY_OF_YEAR, 1);
            endTime = new SimpleDateFormat("yyyy-MM-dd").format(cal.getTime());
        } else {
            beginTime = lastUpdateTime;
            Date date = new SimpleDateFormat("yyyy-MM-dd").parse(lastUpdateTime);
            Calendar cal = Calendar.getInstance();
            cal.setTime(date);
            cal.add(Calendar.DAY_OF_YEAR, 1);
            endTime = new SimpleDateFormat("yyyy-MM-dd").format(cal.getTime());
        }
        String query = "SELECT s from Study s where s.authority = '" + authority + "' ";
        query += " and s.lastUpdateTime >'" + beginTime + "'";
        //    query+=" and s.lastUpdateTime <'" +endTime+"'";
        query += " order by s.studyId";
        List updatedStudies = em.createQuery(query).getResultList();

        for (Iterator it = updatedStudies.iterator(); it.hasNext();) {
            Study study = (Study) it.next();
            logger.info("Exporting study " + study.getStudyId());

            exportStudyToLegacySystem(study, authority);
            studyCount++;

        }
    } catch (Exception e) {
        logger.severe(e.getMessage());

        String stackTrace = "StackTrace: \n";
        logger.severe("Exception caused by: " + e + "\n");
        StackTraceElement[] ste = e.getStackTrace();
        for (int m = 0; m < ste.length; m++) {
            stackTrace += ste[m].toString() + "\n";
        }
        logger.severe(stackTrace);
    }

    logger.info("End export, " + studyCount + " studies successfully exported, " + deletedStudyCount
            + " studies deleted.");
}

From source file:org.torproject.ernie.db.ArchiveReader.java

public ArchiveReader(RelayDescriptorParser rdp, String archivesDir, boolean keepImportHistory) {
    int parsedFiles = 0, ignoredFiles = 0;
    Logger logger = Logger.getLogger(ArchiveReader.class.getName());
    SortedSet<String> archivesImportHistory = new TreeSet<String>();
    File archivesImportHistoryFile = new File("stats/archives-import-history");
    if (keepImportHistory && archivesImportHistoryFile.exists()) {
        try {//from ww  w. j  a  v a 2 s  .  c o m
            BufferedReader br = new BufferedReader(new FileReader(archivesImportHistoryFile));
            String line = null;
            while ((line = br.readLine()) != null) {
                archivesImportHistory.add(line);
            }
            br.close();
        } catch (IOException e) {
            logger.log(Level.WARNING, "Could not read in archives import " + "history file. Skipping.");
        }
    }
    if (new File(archivesDir).exists()) {
        logger.fine("Importing files in directory " + archivesDir + "/...");
        Stack<File> filesInInputDir = new Stack<File>();
        filesInInputDir.add(new File(archivesDir));
        List<File> problems = new ArrayList<File>();
        while (!filesInInputDir.isEmpty()) {
            File pop = filesInInputDir.pop();
            if (pop.isDirectory()) {
                for (File f : pop.listFiles()) {
                    filesInInputDir.add(f);
                }
            } else {
                if (rdp != null) {
                    try {
                        BufferedInputStream bis = null;
                        if (keepImportHistory && archivesImportHistory.contains(pop.getName())) {
                            ignoredFiles++;
                            continue;
                        } else if (pop.getName().endsWith(".tar.bz2")) {
                            logger.warning(
                                    "Cannot parse compressed tarball " + pop.getAbsolutePath() + ". Skipping.");
                            continue;
                        } else if (pop.getName().endsWith(".bz2")) {
                            FileInputStream fis = new FileInputStream(pop);
                            BZip2CompressorInputStream bcis = new BZip2CompressorInputStream(fis);
                            bis = new BufferedInputStream(bcis);
                        } else {
                            FileInputStream fis = new FileInputStream(pop);
                            bis = new BufferedInputStream(fis);
                        }
                        if (keepImportHistory) {
                            archivesImportHistory.add(pop.getName());
                        }
                        ByteArrayOutputStream baos = new ByteArrayOutputStream();
                        int len;
                        byte[] data = new byte[1024];
                        while ((len = bis.read(data, 0, 1024)) >= 0) {
                            baos.write(data, 0, len);
                        }
                        bis.close();
                        byte[] allData = baos.toByteArray();
                        rdp.parse(allData);
                        parsedFiles++;
                    } catch (IOException e) {
                        problems.add(pop);
                        if (problems.size() > 3) {
                            break;
                        }
                    }
                }
            }
        }
        if (problems.isEmpty()) {
            logger.fine("Finished importing files in directory " + archivesDir + "/.");
        } else {
            StringBuilder sb = new StringBuilder(
                    "Failed importing files in " + "directory " + archivesDir + "/:");
            int printed = 0;
            for (File f : problems) {
                sb.append("\n  " + f.getAbsolutePath());
                if (++printed >= 3) {
                    sb.append("\n  ... more");
                    break;
                }
            }
        }
    }
    if (keepImportHistory) {
        try {
            archivesImportHistoryFile.getParentFile().mkdirs();
            BufferedWriter bw = new BufferedWriter(new FileWriter(archivesImportHistoryFile));
            for (String line : archivesImportHistory) {
                bw.write(line + "\n");
            }
            bw.close();
        } catch (IOException e) {
            logger.log(Level.WARNING, "Could not write archives import " + "history file.");
        }
    }
    logger.info("Finished importing relay descriptors from local " + "directory:\nParsed " + parsedFiles
            + ", ignored " + ignoredFiles + " files.");
}

From source file:edu.harvard.iq.dvn.core.web.admin.OptionsPage.java

private void addFile(File file, String catName, Map<File, String> filesToUpload, Logger importLogger)
        throws Exception {
    if (file.getName() != null && file.getName().startsWith(".")) {
        // ignore hidden files (ie files that start with "."
    } else if (file.isDirectory()) {
        String tempCatName = StringUtil.isEmpty(catName) ? file.getName() : catName + " - " + file.getName();
        for (int j = 0; j < file.listFiles().length; j++) {
            addFile(file.listFiles()[j], tempCatName, filesToUpload, importLogger);
        }/* ww  w . j  a  v a  2 s .  c  om*/
    } else {
        importLogger.info("Attempting to create temp file " + sessionId + "/" + file.getName());
        File tempFile = FileUtil.createTempFile(sessionId, file.getName());
        FileUtil.copyFile(file, tempFile);
        filesToUpload.put(tempFile, catName);
    }
}

From source file:edu.harvard.iq.dvn.core.web.admin.OptionsPage.java

public String importBatch_action() {
    FileHandler logFileHandler = null;
    Logger importLogger = null;

    if (importBatchDir == null || importBatchDir.equals(""))
        return null;
    try {//  w w w. j  a va  2 s  . co  m
        int importFailureCount = 0;
        int fileFailureCount = 0;
        List<Long> studiesToIndex = new ArrayList<Long>();
        //sessionId =  ((HttpSession) FacesContext.getCurrentInstance().getExternalContext().getSession(false)).getId();
        sessionId = "batchimportsession";

        File batchDir = new File(importBatchDir);
        if (batchDir.exists() && batchDir.isDirectory()) {

            // create Logger
            String logTimestamp = new SimpleDateFormat("yyyy-MM-dd'T'HH-mm-ss").format(new Date());
            String dvAlias = vdcService.find(importDVId).getAlias();
            importLogger = Logger.getLogger(
                    "edu.harvard.iq.dvn.core.web.networkAdmin.UtilitiesPage." + dvAlias + "_" + logTimestamp);
            String logFileName = FileUtil.getImportFileDir() + File.separator + "batch_" + dvAlias + "_"
                    + logTimestamp + ".log";
            logFileHandler = new FileHandler(logFileName);
            importLogger.addHandler(logFileHandler);

            importLogger
                    .info("BEGIN BATCH IMPORT (dvId = " + importDVId + ") from directory: " + importBatchDir);

            for (int i = 0; i < batchDir.listFiles().length; i++) {
                File studyDir = batchDir.listFiles()[i];
                if (studyDir.isDirectory()) { // one directory per study
                    importLogger.info("Found study directory: " + studyDir.getName());

                    File xmlFile = null;
                    Map<File, String> filesToUpload = new HashMap();

                    for (int j = 0; j < studyDir.listFiles().length; j++) {
                        File file = studyDir.listFiles()[j];
                        if ("study.xml".equals(file.getName())) {
                            xmlFile = file;
                        } else {
                            addFile(file, "", filesToUpload, importLogger);
                        }
                    }

                    if (xmlFile != null) {
                        try {
                            importLogger.info("Found study.xml and " + filesToUpload.size() + " other "
                                    + (filesToUpload.size() == 1 ? "file." : "files."));
                            // TODO: we need to incorporate the add files step into the same transaction of the import!!!
                            Study study = studyService.importStudy(xmlFile, importFileFormat, importDVId,
                                    getVDCSessionBean().getLoginBean().getUser().getId());
                            study.getLatestVersion().setVersionNote("Study imported via batch import.");
                            importLogger.info("Import of study.xml succeeded: study id = " + study.getId());
                            studiesToIndex.add(study.getId());

                            if (!filesToUpload.isEmpty()) {

                                List<StudyFileEditBean> fileBeans = new ArrayList();
                                for (File file : filesToUpload.keySet()) {
                                    StudyFileEditBean fileBean = new StudyFileEditBean(file,
                                            studyService.generateFileSystemNameSequence(), study);
                                    fileBean.getFileMetadata().setCategory(filesToUpload.get(file));
                                    fileBeans.add(fileBean);
                                }

                                try {
                                    studyFileService.addFiles(study.getLatestVersion(), fileBeans,
                                            getVDCSessionBean().getLoginBean().getUser());
                                    importLogger.info("File upload succeeded.");
                                } catch (Exception e) {
                                    fileFailureCount++;
                                    importLogger.severe("File Upload failed (dir = " + studyDir.getName()
                                            + "): exception message = " + e.getMessage());
                                    logException(e, importLogger);
                                }
                            }

                        } catch (Exception e) {
                            importFailureCount++;
                            importLogger.severe("Import failed (dir = " + studyDir.getName()
                                    + "): exception message = " + e.getMessage());
                            logException(e, importLogger);
                        }

                    } else { // no ddi.xml found in studyDir
                        importLogger.warning("No study.xml file was found in study directory. Skipping... ");
                    }
                } else {
                    importLogger.warning("Found non directory at top level. Skipping... (filename = "
                            + studyDir.getName() + ")");
                }
            }

            // generate status message
            String statusMessage = studiesToIndex.size() + (studiesToIndex.size() == 1 ? " study" : " studies")
                    + " successfully imported";
            statusMessage += (fileFailureCount == 0 ? ""
                    : " (" + fileFailureCount + " of which failed file upload)");
            statusMessage += (importFailureCount == 0 ? "."
                    : "; " + importFailureCount + (importFailureCount == 1 ? " study" : " studies")
                            + " failed import.");

            importLogger.info("COMPLETED BATCH IMPORT: " + statusMessage);

            // now index all studies
            importLogger.info("POST BATCH IMPORT, start calls to index.");
            indexService.updateIndexList(studiesToIndex);
            importLogger.info("POST BATCH IMPORT, calls to index finished.");

            addMessage("importMessage", "Batch Import request completed.");
            addMessage("importMessage", statusMessage);
            addMessage("importMessage", "For more detail see log file at: " + logFileName);

        } else {
            addMessage("importMessage",
                    "Batch Import failed: " + importBatchDir + " does not exist or is not a directory.");
        }
    } catch (Exception e) {
        e.printStackTrace();
        addMessage("importMessage", "Batch Import failed: An unexpected error occurred during processing.");
        addMessage("importMessage", "Exception message: " + e.getMessage());
    } finally {
        if (logFileHandler != null) {
            logFileHandler.close();
            importLogger.removeHandler(logFileHandler);
        }
        //   importBatchDir = "";
    }

    return null;
}