Example usage for java.util.logging Logger setLevel

List of usage examples for java.util.logging Logger setLevel

Introduction

In this page you can find the example usage for java.util.logging Logger setLevel.

Prototype

public void setLevel(Level newLevel) throws SecurityException 

Source Link

Document

Set the log level specifying which message levels will be logged by this logger.

Usage

From source file:org.archive.io.Warc2Arc.java

protected void transform(final WARCReader reader, final ARCWriter writer)
        throws IOException, java.text.ParseException {
    // No point digesting. Digest is available after reading of ARC which
    // is too late for inclusion in WARC.
    reader.setDigest(false);//ww  w.j  a v a  2  s .  co m
    // I don't want the close being logged -- least, not w/o log of
    // an opening (and that'd be a little silly for simple script
    // like this). Currently, it logs at level INFO so that close
    // of files gets written to log files.  Up the log level just
    // for the close.
    Logger l = Logger.getLogger(writer.getClass().getName());
    Level oldLevel = l.getLevel();
    try {
        l.setLevel(Level.WARNING);
        for (final Iterator<ArchiveRecord> i = reader.iterator(); i.hasNext();) {
            WARCRecord r = (WARCRecord) i.next();
            if (!isARCType(r.getHeader().getMimetype())) {
                continue;
            }
            if (r.getHeader().getContentBegin() <= 0) {
                // Otherwise, because length include Header-Line and
                // Named Fields, these will end up in the ARC unless there
                // is a non-zero content begin.
                continue;
            }
            String ip = (String) r.getHeader().getHeaderValue((WARCConstants.HEADER_KEY_IP));
            long length = r.getHeader().getLength();
            int offset = r.getHeader().getContentBegin();
            // This mimetype is not exactly what you'd expect to find in
            // an ARC though technically its 'correct'.  To get right one,
            // need to parse the HTTP Headers.  Thats messy.  Not doing for
            // now.
            String mimetype = r.getHeader().getMimetype();
            // Clean out ISO time string '-', 'T', ':', and 'Z' characters.
            String t = r.getHeader().getDate().replaceAll("[-T:Z]", "");
            long time = ArchiveUtils.getSecondsSinceEpoch(t).getTime();
            writer.write(r.getHeader().getUrl(), mimetype, ip, time, (int) (length - offset), r);
        }
    } finally {
        if (reader != null) {
            reader.close();
        }
        if (writer != null) {
            try {
                writer.close();
            } finally {
                l.setLevel(oldLevel);
            }
        }
    }
}

From source file:com.cyberway.issue.io.Warc2Arc.java

protected void transform(final WARCReader reader, final ARCWriter writer)
        throws IOException, java.text.ParseException {
    // No point digesting. Digest is available after reading of ARC which
    // is too late for inclusion in WARC.
    reader.setDigest(false);/*from ww  w . j  a v a  2s  .c  o m*/
    // I don't want the close being logged -- least, not w/o log of
    // an opening (and that'd be a little silly for simple script
    // like this). Currently, it logs at level INFO so that close
    // of files gets written to log files.  Up the log level just
    // for the close.
    Logger l = Logger.getLogger(writer.getClass().getName());
    Level oldLevel = l.getLevel();
    try {
        l.setLevel(Level.WARNING);
        for (final Iterator i = reader.iterator(); i.hasNext();) {
            WARCRecord r = (WARCRecord) i.next();
            if (!isARCType(r.getHeader().getMimetype())) {
                continue;
            }
            if (r.getHeader().getContentBegin() <= 0) {
                // Otherwise, because length include Header-Line and
                // Named Fields, these will end up in the ARC unless there
                // is a non-zero content begin.
                continue;
            }
            String ip = (String) r.getHeader().getHeaderValue((WARCConstants.HEADER_KEY_IP));
            long length = r.getHeader().getLength();
            int offset = r.getHeader().getContentBegin();
            // This mimetype is not exactly what you'd expect to find in
            // an ARC though technically its 'correct'.  To get right one,
            // need to parse the HTTP Headers.  Thats messy.  Not doing for
            // now.
            String mimetype = r.getHeader().getMimetype();
            // Clean out ISO time string '-', 'T', ':', and 'Z' characters.
            String t = r.getHeader().getDate().replaceAll("[-T:Z]", "");
            long time = ArchiveUtils.getSecondsSinceEpoch(t).getTime();
            writer.write(r.getHeader().getUrl(), mimetype, ip, time, (int) (length - offset), r);
        }
    } finally {
        if (reader != null) {
            reader.close();
        }
        if (writer != null) {
            try {
                writer.close();
            } finally {
                l.setLevel(oldLevel);
            }
        }
    }
}

From source file:org.archive.io.Arc2Warc.java

protected void transform(final ARCReader reader, final File warc) throws IOException {
    WARCWriter writer = null;/*from   w  w w .j av  a2s . c  om*/
    // No point digesting. Digest is available after reading of ARC which
    // is too late for inclusion in WARC.
    reader.setDigest(false);
    try {
        BufferedOutputStream bos = new BufferedOutputStream(new FileOutputStream(warc));
        // Get the body of the first ARC record as a String so can dump it
        // into first record of WARC.
        final Iterator<ArchiveRecord> i = reader.iterator();
        ARCRecord firstRecord = (ARCRecord) i.next();
        ByteArrayOutputStream baos = new ByteArrayOutputStream((int) firstRecord.getHeader().getLength());
        firstRecord.dump(baos);
        // Add ARC first record content as an ANVLRecord.
        ANVLRecord ar = new ANVLRecord();
        ar.addLabelValue("Filedesc", baos.toString());
        List<String> metadata = new ArrayList<String>(1);
        metadata.add(ar.toString());
        // Now create the writer.  If reader was compressed, lets write
        // a compressed WARC.
        writer = new WARCWriter(new AtomicInteger(), bos, warc,
                new WARCWriterPoolSettingsData("", "", -1, reader.isCompressed(), null, metadata, generator));
        // Write a warcinfo record with description about how this WARC
        // was made.
        writer.writeWarcinfoRecord(warc.getName(), "Made from " + reader.getReaderIdentifier() + " by "
                + this.getClass().getName() + "/" + getRevision());
        for (; i.hasNext();) {
            write(writer, (ARCRecord) i.next());
        }
    } finally {
        if (reader != null) {
            reader.close();
        }
        if (writer != null) {
            // I don't want the close being logged -- least, not w/o log of
            // an opening (and that'd be a little silly for simple script
            // like this). Currently, it logs at level INFO so that close
            // of files gets written to log files.  Up the log level just
            // for the close.
            Logger l = Logger.getLogger(writer.getClass().getName());
            Level oldLevel = l.getLevel();
            l.setLevel(Level.WARNING);
            try {
                writer.close();
            } finally {
                l.setLevel(oldLevel);
            }
        }
    }
}

From source file:org.kalypso.ui.KalypsoGisPlugin.java

private void configureLogger() {
    // TODO:REMOVE THIS: we should always use the eclipse logging mechanisms
    final Logger logger = Logger.getLogger("org.kalypso"); //$NON-NLS-1$
    logger.setLevel(Level.INFO);

    final Handler[] handlers = logger.getHandlers();
    for (final Handler handler : handlers) {
        handler.setLevel(Level.FINER);
    }//from  w w  w .j  a  va 2s .c om
}

From source file:org.cloudifysource.esc.shell.commands.TeardownCloud.java

private void limitLoggingLevel() {

    if (!this.verbose) {
        loggerStates.clear();//w w  w . j  a  v  a2s  . c  o  m
        for (String loggerName : NON_VERBOSE_LOGGERS) {
            Logger provisioningLogger = Logger.getLogger(loggerName);
            Level logLevelBefore = provisioningLogger.getLevel();
            provisioningLogger.setLevel(Level.WARNING);
            loggerStates.put(loggerName, logLevelBefore);
        }
    }
}

From source file:com.speed.ob.Obfuscator.java

public Obfuscator(final Config config) {
    transforms = new LinkedList<>();
    store = new ClassStore();
    this.config = config;
    //set up logging
    this.LOGGER = Logger.getLogger(this.getClass().getName());
    LOGGER.info("Ob2 is starting");
    String logLvl = config.get("Obfuscator.logging");
    String logDir = config.get("Obfuscator.log_dir");
    level = parseLevel(logLvl);//w  w w  .j av  a2 s  .c o  m
    LOGGER.info("Logger level set to " + level.getName());
    Logger topLevel = Logger.getLogger("");
    topLevel.setLevel(level);
    File logs = new File(logDir);
    if (!logs.exists()) {
        if (!logs.mkdir())
            Logger.getLogger(this.getClass().getName()).warning("Could not create logging directory");
    }
    try {
        if (logs.exists()) {
            fHandler = new FileHandler(logs.getAbsolutePath() + File.separator + "ob%g.log");
            topLevel.addHandler(fHandler);
        }

    } catch (IOException e) {
        e.printStackTrace();
    }
    for (Handler handler : topLevel.getHandlers()) {
        handler.setLevel(level);
    }
    //populate transforms
    LOGGER.info("Configuring Ob");
    LOGGER.fine("Parsing config");
    if (config.getBoolean("Obfuscator.all_transforms")) {
        LOGGER.fine("Adding all transforms");
        transforms.add(ClassNameTransform.class);
    } else {
        if (config.getBoolean("Obfuscator.classname_obfuscation")) {
            LOGGER.fine("Adding class name transform");
            transforms.add(ClassNameTransform.class);
        }
        if (config.getBoolean("Obfuscator.controlflow_obfuscation")) {
            LOGGER.fine("Control flow obfuscation not added, transform does not exist");
        }
        if (config.getBoolean("Obfuscator.string_obfuscation")) {
            LOGGER.fine("String obfuscation not added, transform does not exist");

        }
        if (config.getBoolean("Obfuscator.fieldname_transforms")) {
            LOGGER.fine("Field name obfuscation not added, transform does not exist");

        }
        if (config.getBoolean("Obfuscator.methodname_transforms")) {
            LOGGER.fine("Method name obfuscation not added, transform does not exist");

        }
    }
    LOGGER.info("Loaded " + transforms.size() + " transforms");
    String inputFile = config.get("Obfuscator.input");
    LOGGER.fine("Checking input file(s) and output directory");
    String outFile = config.get("Obfuscator.out_dir");
    out = new File(outFile);
    if (inputFile == null || inputFile.isEmpty()) {
        LOGGER.severe("Input file not specified in config");
        throw new RuntimeException("Input file not specified");
    } else {
        in = new File(inputFile);
        if (!in.exists()) {
            LOGGER.severe("Input file not found");
            throw new RuntimeException("Input file not found");
        }
        LOGGER.fine("Attempting to initialise classes");
        if (in.isDirectory()) {
            try {
                store.init(in.listFiles(), false);
            } catch (IOException e) {
                e.printStackTrace();
            }
        } else if (in.getName().endsWith(".class")) {
            try {
                store.init(new File[] { in }, false);
            } catch (IOException e) {
                e.printStackTrace();
            }
        } else if (in.getName().endsWith(".jar")) {
            try {
                JarInputStream in = new JarInputStream(new FileInputStream(this.in));
                store.init(in, out, this.in);
                in.close();
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
        LOGGER.info("Loaded " + store.nodes().size() + " classes");
    }
    if (!out.exists()) {
        LOGGER.fine("Attempting to make output directory");
        if (!out.mkdir()) {
            LOGGER.severe("Could not make output directory");
            throw new RuntimeException("Could not create output dir: " + out.getAbsolutePath());
        }
    } else if (!out.isDirectory()) {
        LOGGER.severe("Output directory is a file");
        throw new RuntimeException(out.getName() + " is not a directory, cannot output there");
    } else {
        if (!out.canWrite()) {
            LOGGER.severe("Cannot write to output directory");
            throw new RuntimeException("Cannot write to output dir: " + out.getAbsolutePath());
        }
    }

}

From source file:hudson.plugins.active_directory.docker.TheFlintstonesTest.java

private List<String> captureLogMessages(int size) {
    final List<String> logMessages = new ArrayList<>(size);
    Logger logger = Logger.getLogger("");
    logger.setLevel(Level.ALL);

    RingBufferLogHandler ringHandler = new RingBufferLogHandler(size) {

        final Formatter f = new SimpleFormatter(); // placeholder instance for what should have been a static method perhaps

        @Override//from   w w w .ja  v  a 2 s  . c  om
        public synchronized void publish(LogRecord record) {
            super.publish(record);
            String message = f.formatMessage(record);
            Throwable x = record.getThrown();
            logMessages.add(message == null && x != null ? x.toString() : message);
        }
    };
    logger.addHandler(ringHandler);

    return logMessages;
}

From source file:org.opencastproject.rest.RestServiceTestEnv.java

/**
 * Create an environment for <code>baseUrl</code>.
 * The base URL should be the URL where the service to test is mounted, e.g. http://localhost:8090/test
 *///from  w w  w  .  j a  v  a2 s  .  c  o  m
private RestServiceTestEnv(URL baseUrl, Option<? extends ResourceConfig> cfg) {
    this.baseUrl = baseUrl;
    this.cfg = cfg;
    // configure jersey logger to get some output in case of an error
    final Logger jerseyLogger = Logger.getLogger(com.sun.jersey.spi.inject.Errors.class.getName());
    jerseyLogger.addHandler(new ConsoleHandler());
    jerseyLogger.setLevel(Level.WARNING);
}

From source file:org.javaan.JavaanCli.java

private void setLoggerLevel(Level level) {
    Logger logger = LogManager.getLogManager().getLogger("");
    Handler[] handlers = logger.getHandlers();
    for (Handler handler : handlers) {
        handler.setLevel(level);/*ww w. jav a 2s.  c  o m*/
    }
    logger.setLevel(level);
}

From source file:pe.chalk.telegram.TelegramBot.java

public Logger initLogger(final Level level) {
    final Logger logger = this.getLogger();
    for (Handler handler : logger.getHandlers())
        logger.removeHandler(handler);/*from w w w .  j  a v a 2  s.  c  om*/

    logger.setUseParentHandlers(false);
    logger.addHandler(new StandardHandler(level));
    logger.setLevel(level);

    return logger;
}