Example usage for java.util.logging Logger severe

List of usage examples for java.util.logging Logger severe

Introduction

In this page you can find the example usage for java.util.logging Logger severe.

Prototype

public void severe(Supplier<String> msgSupplier) 

Source Link

Document

Log a SEVERE message, which is only to be constructed if the logging level is such that the message will actually be logged.

Usage

From source file:org.usrz.libs.logging.LevelInfoTest.java

@Test
public void testJavaLogging() {
    final java.util.logging.Logger logger = java.util.logging.Logger.getLogger(this.getClass().getName());

    logger.finest("Foobar FINEST");
    AppenderForTests.hasNoLastEvent("at Finest level");
    assertFalse(logger.isLoggable(java.util.logging.Level.FINEST));

    logger.finer("Foobar FINER");
    AppenderForTests.hasNoLastEvent("at Finer level");
    assertFalse(logger.isLoggable(java.util.logging.Level.FINER));

    logger.fine("Foobar FINE");
    AppenderForTests.hasNoLastEvent("at Fine level");
    assertFalse(logger.isLoggable(java.util.logging.Level.FINE));

    logger.config("Foobar CONFIG");
    AppenderForTests.hasLastEvent("at Config level");
    assertTrue(logger.isLoggable(java.util.logging.Level.CONFIG));

    logger.info("Foobar INFO");
    AppenderForTests.hasLastEvent("at Info level");
    assertTrue(logger.isLoggable(java.util.logging.Level.INFO));

    logger.warning("Foobar WARNING");
    AppenderForTests.hasLastEvent("at Warning level");
    assertTrue(logger.isLoggable(java.util.logging.Level.WARNING));

    logger.severe("Foobar SEVERE");
    AppenderForTests.hasLastEvent("at Severe level");
    assertTrue(logger.isLoggable(java.util.logging.Level.SEVERE));

}

From source file:org.usrz.libs.logging.LevelWarningTest.java

@Test
public void testJavaLogging() {
    final java.util.logging.Logger logger = java.util.logging.Logger.getLogger(this.getClass().getName());

    logger.finest("Foobar FINEST");
    AppenderForTests.hasNoLastEvent("at Finest level");
    assertFalse(logger.isLoggable(java.util.logging.Level.FINEST));

    logger.finer("Foobar FINER");
    AppenderForTests.hasNoLastEvent("at Finer level");
    assertFalse(logger.isLoggable(java.util.logging.Level.FINER));

    logger.fine("Foobar FINE");
    AppenderForTests.hasNoLastEvent("at Fine level");
    assertFalse(logger.isLoggable(java.util.logging.Level.FINE));

    logger.config("Foobar CONFIG");
    AppenderForTests.hasNoLastEvent("at Config level");
    assertFalse(logger.isLoggable(java.util.logging.Level.CONFIG));

    logger.info("Foobar INFO");
    AppenderForTests.hasNoLastEvent("at Info level");
    assertFalse(logger.isLoggable(java.util.logging.Level.INFO));

    logger.warning("Foobar WARNING");
    AppenderForTests.hasLastEvent("at Warning level");
    assertTrue(logger.isLoggable(java.util.logging.Level.WARNING));

    logger.severe("Foobar SEVERE");
    AppenderForTests.hasLastEvent("at Severe level");
    assertTrue(logger.isLoggable(java.util.logging.Level.SEVERE));

}

From source file:es.upm.dit.gsi.barmas.dataset.utils.DatasetSplitter.java

/**
 * @param ratio/* w  w  w.  j a va2s  .  co m*/
 * @param agents
 * @param originalDatasetPath
 * @param outputDir
 * @param central
 * @param scenario
 */
private void createExperimentInfoFile(int folds, int agents, String originalDatasetPath, String outputDir,
        String scenario, Logger logger) {

    try {
        String fileName = outputDir + "/" + agents + "agents/experiment.info";
        File file = new File(fileName);
        File parent = file.getParentFile();
        if (!parent.exists()) {
            parent.mkdirs();
        }
        if (!file.exists()) {
            FileWriter fw = new FileWriter(file);
            fw.write("Scenario: " + scenario + "\n");
            fw.write("Number of folds: " + Integer.toString(folds) + "\n");
            fw.write("Number of Agents: " + Integer.toString(agents) + "\n");
            fw.write("Original dataset: " + originalDatasetPath + "\n");
            fw.write("Experiment dataset folder: " + outputDir + "\n");
            fw.close();
        }

    } catch (Exception e) {
        logger.severe(e.getMessage());
        System.exit(1);
    }
}

From source file:es.upm.dit.gsi.barmas.dataset.utils.DatasetSplitter.java

/**
 * @param ratio// w  w w  .ja  v a2 s  . c om
 * @param agents
 * @param originalDatasetPath
 * @param outputDir
 * @param central
 * @param scenario
 */
private void createExperimentInfoFile(double ratio, int agents, String originalDatasetPath, String outputDir,
        boolean central, String scenario, Logger logger) {

    try {
        String fileName = outputDir + "/" + agents + "agents/experiment.info";
        File file = new File(fileName);
        File parent = file.getParentFile();
        if (!parent.exists()) {
            parent.mkdirs();
        }
        FileWriter fw = new FileWriter(file);
        fw.write("Scenario: " + scenario + "\n");
        fw.write("Test Cases Ratio: " + Double.toString(ratio) + "\n");
        fw.write("Number of Agents: " + Integer.toString(agents) + "\n");
        fw.write("Original dataset: " + originalDatasetPath + "\n");
        fw.write("Experiment dataset folder: " + outputDir + "\n");
        fw.write("Central approach: " + Boolean.toString(central) + "\n");
        fw.close();

    } catch (Exception e) {
        logger.severe(e.getMessage());
        System.exit(1);
    }
}

From source file:org.usrz.libs.logging.LevelErrorTest.java

@Test
public void testJavaLogging() {
    final java.util.logging.Logger logger = java.util.logging.Logger.getLogger(this.getClass().getName());

    logger.finest("Foobar FINEST");
    AppenderForTests.hasNoLastEvent("at Finest level");
    assertFalse(logger.isLoggable(java.util.logging.Level.FINEST));

    logger.finer("Foobar FINER");
    AppenderForTests.hasNoLastEvent("at Finer level");
    assertFalse(logger.isLoggable(java.util.logging.Level.FINER));

    logger.fine("Foobar FINE");
    AppenderForTests.hasNoLastEvent("at Fine level");
    assertFalse(logger.isLoggable(java.util.logging.Level.FINE));

    logger.config("Foobar CONFIG");
    AppenderForTests.hasNoLastEvent("at Config level");
    assertFalse(logger.isLoggable(java.util.logging.Level.CONFIG));

    logger.info("Foobar INFO");
    AppenderForTests.hasNoLastEvent("at Info level");
    assertFalse(logger.isLoggable(java.util.logging.Level.INFO));

    logger.warning("Foobar WARNING");
    AppenderForTests.hasNoLastEvent("at Warning level");
    assertFalse(logger.isLoggable(java.util.logging.Level.WARNING));

    logger.severe("Foobar SEVERE");
    AppenderForTests.hasLastEvent("at Severe level");
    assertTrue(logger.isLoggable(java.util.logging.Level.SEVERE));

}

From source file:es.upm.dit.gsi.barmas.dataset.utils.DatasetSplitter.java

/**
 * @param originalDatasetPath//from w  w  w  . java 2 s . com
 * @param scenario
 * @return
 */
private List<String[]> getEssentials(String originalDatasetPath, Logger logger) {
    // Find essentials
    List<String[]> essentials = new ArrayList<String[]>();
    HashMap<String, List<String>> nodesAndStates = new HashMap<String, List<String>>();
    try {
        // Look for all possible states
        Reader fr = new FileReader(originalDatasetPath);
        CsvReader reader = new CsvReader(fr);
        reader.readHeaders();
        String[] headers = reader.getHeaders();
        for (String header : headers) {
            nodesAndStates.put(header, new ArrayList<String>());
        }
        String[] values;
        while (reader.readRecord()) {
            values = reader.getValues();
            for (int i = 0; i < values.length; i++) {
                if (!nodesAndStates.get(headers[i]).contains(values[i])) {
                    nodesAndStates.get(headers[i]).add(values[i]);
                    if (!essentials.contains(values)) {
                        essentials.add(values);
                    }
                }
            }
        }

        reader.close();

        logger.fine("Number of Essentials: " + essentials.size());
    } catch (Exception e) {
        logger.severe(e.getMessage());
        System.exit(1);
    }
    return essentials;
}

From source file:com.zacwolf.commons.crypto._CRYPTOfactory.java

public final _CRYPTOfactory test(Logger logger, String[] testargs) {
    try {/*w  w  w .  j a v  a  2s.  com*/
        final File testfile = createTempFile("_CRYPTOfactory" + this.getType() + "_Test", ".tmp");

        logger.finer(this.getClass().getName() + ":TEST:" + this.getType() + ":.crypt(\"" + testargs[0]
                + "\"):RESULT:"
                + new String(this.decrypt(this.encrypt(testargs[0].getBytes(_CRYPTOfactory.ENCODING))),
                        _CRYPTOfactory.ENCODING));

        this.encryptToOutputStream(testargs[0].getBytes(_CRYPTOfactory.ENCODING),
                new FileOutputStream(testfile));
        logger.finer(this.getClass().getName() + ":TEST:" + this.getType() + ":.cryptToOutputStream(\""
                + testargs[0] + "\"):RESULT:" + new String(
                        this.decryptFromInputStream(new FileInputStream(testfile)), _CRYPTOfactory.ENCODING));
        testfile.delete();

        if (!this.getType().equalsIgnoreCase("RSA")) {
            this.encryptObjToOutputStream(new TestSerObj(testargs), new FileOutputStream(testfile));
            logger.finer("_CRYPTOfactory:TEST:" + this.getType() + ":.cryptObjToOutputStream:RESULT:"
                    + this.decryptObjFromInputStream(new FileInputStream(testfile)));
            logger.finer(this.getClass().getName() + ":TEST:Fully initialized " + this.crypter.getType()
                    + " cipher\n");
            testfile.delete();
        }
    } catch (Exception e) {
        if (e instanceof InvalidKeyException && this.crypter instanceof Crypter_RSA) {
            logger.fine("Unable to test an RSACypter with only a public key");
        } else {
            logger.severe(this.getClass().getName() + ":TEST:" + this.crypter.getType() + ":ERROR:" + e + "\n");
            e.printStackTrace();
        }
    } finally {
        logger.exiting(this.getClass().getName(), "test()", JVM.getMemoryStats());
    }
    return this;//So that it can be chained with the constructor call
}

From source file:es.upm.dit.gsi.barmas.dataset.utils.DatasetSplitter.java

/**
 * This method splits the original dataset in many small datasets for a
 * given number of agents./* w w w.  j  ava2s  .c o m*/
 * 
 * @param ratio
 *            0 < ratio < 1 -> Normally, 0.3 or 0.4 to build a test dataset
 *            with this percentage of the original data.
 * @param agents
 *            number of agents to split the original dataset
 * @param originalDatasetPath
 * @param outputDir
 * @param central
 *            true to create a bayescentral dataset that joint all agent
 *            data
 * @param scenario
 * @param iteration
 * @throws Exception
 */
public void splitDataset(double ratio, int agents, String originalDatasetPath, String outputDir,
        boolean central, String scenario, Logger logger, int iteration) {

    int ratioint = (int) (ratio * 100);
    double roundedratio = ((double) ratioint) / 100;
    String outputDirWithRatio = outputDir + "/" + roundedratio + "testRatio/iteration-" + iteration;
    File dir = new File(outputDirWithRatio);
    if (!dir.exists() || !dir.isDirectory()) {
        dir.mkdirs();
    }

    logger.finer("--> splitDataset()");
    logger.fine("Creating experiment.info...");
    this.createExperimentInfoFile(ratio, agents, originalDatasetPath, outputDirWithRatio, central, scenario,
            logger);

    try {
        // Look for essentials
        List<String[]> essentials = this.getEssentials(originalDatasetPath, logger);

        HashMap<String, CsvWriter> writers = new HashMap<String, CsvWriter>();
        CsvReader csvreader = new CsvReader(new FileReader(new File(originalDatasetPath)));

        csvreader.readHeaders();
        String[] headers = csvreader.getHeaders();
        int originalDatasetRowsCounter = 0;
        while (csvreader.readRecord()) {
            originalDatasetRowsCounter++;
        }
        csvreader.close();

        // Create datasets files

        // Central dataset
        if (central) {
            String fileName = outputDirWithRatio + File.separator + "bayes-central-dataset.csv";
            CsvWriter writer = new CsvWriter(new FileWriter(fileName), ',');
            writer.writeRecord(headers);
            writers.put("CENTRAL", writer);
            for (String[] essential : essentials) {
                writer.writeRecord(essential);
            }
            logger.fine("Bayes central dataset created.");
        }

        // Agent datasets
        String agentsDatasetsDir = outputDirWithRatio + File.separator + agents + "agents";
        File f = new File(agentsDatasetsDir);
        if (!f.isDirectory()) {
            f.mkdirs();
        }
        for (int i = 0; i < agents; i++) {
            String fileName = agentsDatasetsDir + File.separator + "agent-" + i + "-dataset.csv";
            CsvWriter writer = new CsvWriter(new FileWriter(fileName), ',');
            writer.writeRecord(headers);
            for (String[] essential : essentials) {
                writer.writeRecord(essential);
            }
            writers.put("AGENT" + i, writer);
            logger.fine("AGENT" + i + " dataset created.");
        }

        // Test dataset
        String fileName = outputDirWithRatio + File.separator + "test-dataset.csv";
        CsvWriter writer = new CsvWriter(new FileWriter(fileName), ',');
        writer.writeRecord(headers);
        writers.put("TEST", writer);
        logger.fine("Test dataset created.");

        // Create an ordering queue
        int testCases = (int) (ratio * originalDatasetRowsCounter);
        int testStep = originalDatasetRowsCounter / testCases;

        csvreader = new CsvReader(new FileReader(new File(originalDatasetPath)));

        csvreader.readHeaders();
        int stepCounter = 0 - (iteration % testStep);
        int agentCounter = 0;
        while (csvreader.readRecord()) {
            String[] row = csvreader.getValues();
            if (stepCounter % testStep == 0) {
                writer = writers.get("TEST");
                writer.writeRecord(row);
            } else {
                writer = writers.get("AGENT" + agentCounter);
                writer.writeRecord(row);
                writer = writers.get("CENTRAL");
                writer.writeRecord(row);
                agentCounter++;
                if (agentCounter == agents) {
                    agentCounter = 0;
                }
            }
            stepCounter++;
        }

        csvreader.close();
        for (CsvWriter w : writers.values()) {
            w.close();
        }

    } catch (Exception e) {
        logger.severe("Exception while splitting dataset. ->");
        logger.severe(e.getMessage());
        System.exit(1);
    }

    logger.finer("<-- splitDataset()");
}

From source file:org.blazr.extrastorage.ExtraStorage.java

@SuppressWarnings("deprecation")
public void onEnable() {
    compilationSuccess();//from   w w  w.java  2s.c  o m
    Logger log = getLogger();
    try {
        plugin = this;
        e_file = getFile();
        PluginManager pm = getServer().getPluginManager();
        EventHandlers eh = new EventHandlers(this);
        pm.registerEvents(eh, this);
        File defaultDir = getDataFolder().getCanonicalFile();
        if (!defaultDir.exists()) {
            defaultDir.mkdir();
            File newDataLoc = new File(defaultDir.getCanonicalPath() + File.separator + "data");
            newDataLoc.mkdir();
            saveResource("LICENSE.txt", true);
        } else {
            File newDataLoc = new File(defaultDir.getCanonicalPath() + File.separator + "data");
            if (!newDataLoc.exists()) {
                newDataLoc.mkdir();
                saveResource("LICENSE.txt", true);
            }
        }
        File oldFile1 = new File(defaultDir.getCanonicalPath() + File.separator + "data" + File.separator
                + "LastUpdateCheckTime");
        File oldFile2 = new File(
                defaultDir.getCanonicalPath() + File.separator + "data" + File.separator + "LatestVersion");
        if (oldFile1.exists()) {
            oldFile1.delete();
        }
        if (oldFile2.exists()) {
            oldFile2.delete();
        }
        for (Player player : getServer().getOnlinePlayers()) {
            if (!getConfig().getList("world-blacklist.worlds").contains(player.getWorld().getName())) {
                IO.loadBackpackFromDiskOnLogin(player, this);
            }
        }
        log.info("Enabled successfully.");
        FileConfiguration conf = getConfig();
        conf.options().copyDefaults(true);
        if (conf.get("Comaptibility-Settings.Vanish-No-Packet.no-item-pickup-when-vanished") != null) {
            conf.set("Comaptibility-Settings", null);
        }
        if (!conf.isSet("display-prefix")) {
            conf.set("display-prefix", true);
        }
        if (conf.getBoolean("display-prefix")) {
            PNC = ChatColor.YELLOW + "[ExtraStorage]";
        } else {
            PNC = "";
        }
        List<String> blacklist = conf.getStringList("blacklisted-items");
        boolean isOldStyle = false;
        for (String item : blacklist) {
            if (isNumeric(item)) {
                isOldStyle = true;
            }
        }
        if (isOldStyle) {
            List<String> newList = new ArrayList<String>();
            for (String item : blacklist) {
                if (isNumeric(item)) {
                    int itemCode = Integer.parseInt(item);

                    ItemStack tempIS = new ItemStack(itemCode);
                    newList.add(tempIS.getType().toString());
                } else {
                    newList.add(item);
                }
            }
            conf.set("blacklisted-items", newList);
        }
        if (!conf.isSet("update-check"))
            conf.set("update-check", true);
        if (!conf.isSet("use-Minecraft-UUID"))
            conf.set("use-Minecraft-UUID", true);
        boolean update_check = conf.getBoolean("update-check");
        mojangUUID = conf.getBoolean("use-Minecraft-UUID");
        loadUUID(this);
        saveConfig();

        try {
            Metrics metrics = new Metrics(this);
            metrics.start();
        } catch (IOException e) {
            // Failed to submit the stats :-(
        }

        if (update_check) {
            Updater up = new Updater(this, 56836, getFile(), Updater.UpdateType.NO_DOWNLOAD, false);
            if (up.getResult() == UpdateResult.UPDATE_AVAILABLE) {
                getLogger().info("A new version of the plugin is available !");
                updatenotice = true;
                updatenoticemessage = up.getLatestName().toLowerCase().replace("extrastorage", "");
            }
        }

    } catch (Exception e) {
        e.printStackTrace();
        log.severe("Error in onEnable! Plugin not enabled properly!");
    }

}

From source file:edu.harvard.iq.dvn.core.web.networkAdmin.UtilitiesPage.java

public String importBatch_action() {
    FileHandler logFileHandler = null;
    Logger importLogger = null;

    if (importBatchDir == null || importBatchDir.equals(""))
        return null;
    try {//from  ww w. j  ava2 s  . co m
        int importFailureCount = 0;
        int fileFailureCount = 0;
        List<Long> studiesToIndex = new ArrayList<Long>();
        //sessionId =  ((HttpSession) FacesContext.getCurrentInstance().getExternalContext().getSession(false)).getId();

        File batchDir = new File(importBatchDir);
        if (batchDir.exists() && batchDir.isDirectory()) {

            // create Logger
            String logTimestamp = new SimpleDateFormat("yyyy-MM-dd'T'HH-mm-ss").format(new Date());
            String dvAlias = vdcService.find(importDVId).getAlias();
            importLogger = Logger.getLogger(
                    "edu.harvard.iq.dvn.core.web.networkAdmin.UtilitiesPage." + dvAlias + "_" + logTimestamp);
            String logFileName = FileUtil.getImportFileDir() + File.separator + "batch_" + dvAlias + "_"
                    + logTimestamp + ".log";
            logFileHandler = new FileHandler(logFileName);
            importLogger.addHandler(logFileHandler);

            importLogger
                    .info("BEGIN BATCH IMPORT (dvId = " + importDVId + ") from directory: " + importBatchDir);

            for (int i = 0; i < batchDir.listFiles().length; i++) {
                File studyDir = batchDir.listFiles()[i];
                if (studyDir.isDirectory()) { // one directory per study
                    importLogger.info("Found study directory: " + studyDir.getName());

                    File xmlFile = null;
                    Map<File, String> filesToUpload = new HashMap();

                    for (int j = 0; j < studyDir.listFiles().length; j++) {
                        File file = studyDir.listFiles()[j];
                        if ("study.xml".equals(file.getName())) {
                            xmlFile = file;
                        } else {
                            addFile(file, "", filesToUpload);
                        }
                    }

                    if (xmlFile != null) {
                        try {
                            importLogger.info("Found study.xml and " + filesToUpload.size() + " other "
                                    + (filesToUpload.size() == 1 ? "file." : "files."));
                            // TODO: we need to incorporate the add files step into the same transaction of the import!!!
                            Study study = studyService.importStudy(xmlFile, importFileFormat, importDVId,
                                    getVDCSessionBean().getLoginBean().getUser().getId());
                            study.getLatestVersion().setVersionNote("Study imported via batch import.");
                            importLogger.info("Import of study.xml succeeded: study id = " + study.getId());
                            studiesToIndex.add(study.getId());

                            if (!filesToUpload.isEmpty()) {

                                List<StudyFileEditBean> fileBeans = new ArrayList();
                                for (File file : filesToUpload.keySet()) {
                                    StudyFileEditBean fileBean = new StudyFileEditBean(file,
                                            studyService.generateFileSystemNameSequence(), study);
                                    fileBean.getFileMetadata().setCategory(filesToUpload.get(file));
                                    fileBeans.add(fileBean);
                                }

                                try {
                                    studyFileService.addFiles(study.getLatestVersion(), fileBeans,
                                            getVDCSessionBean().getLoginBean().getUser());
                                    importLogger.info("File upload succeeded.");
                                } catch (Exception e) {
                                    fileFailureCount++;
                                    importLogger.severe("File Upload failed (dir = " + studyDir.getName()
                                            + "): exception message = " + e.getMessage());
                                    logException(e, importLogger);
                                }
                            }

                        } catch (Exception e) {
                            importFailureCount++;
                            importLogger.severe("Import failed (dir = " + studyDir.getName()
                                    + "): exception message = " + e.getMessage());
                            logException(e, importLogger);
                        }

                    } else { // no ddi.xml found in studyDir
                        importLogger.warning("No study.xml file was found in study directory. Skipping... ");
                    }
                } else {
                    importLogger.warning("Found non directory at top level. Skipping... (filename = "
                            + studyDir.getName() + ")");
                }
            }

            // generate status message
            String statusMessage = studiesToIndex.size() + (studiesToIndex.size() == 1 ? " study" : " studies")
                    + " successfully imported";
            statusMessage += (fileFailureCount == 0 ? ""
                    : " (" + fileFailureCount + " of which failed file upload)");
            statusMessage += (importFailureCount == 0 ? "."
                    : "; " + importFailureCount + (importFailureCount == 1 ? " study" : " studies")
                            + " failed import.");

            importLogger.info("COMPLETED BATCH IMPORT: " + statusMessage);

            // now index all studies
            importLogger.info("POST BATCH IMPORT, start calls to index.");
            indexService.updateIndexList(studiesToIndex);
            importLogger.info("POST BATCH IMPORT, calls to index finished.");

            addMessage("importMessage", "Batch Import request completed.");
            addMessage("importMessage", statusMessage);
            addMessage("importMessage", "For more detail see log file at: " + logFileName);

        } else {
            addMessage("importMessage",
                    "Batch Import failed: " + importBatchDir + " does not exist or is not a directory.");
        }
    } catch (Exception e) {
        e.printStackTrace();
        addMessage("importMessage", "Batch Import failed: An unexpected error occurred during processing.");
        addMessage("importMessage", "Exception message: " + e.getMessage());
    } finally {
        if (logFileHandler != null) {
            logFileHandler.close();
            importLogger.removeHandler(logFileHandler);
        }
        //   importBatchDir = "";
    }

    return null;
}