Example usage for java.util.logging Logger setUseParentHandlers

List of usage examples for java.util.logging Logger setUseParentHandlers

Introduction

In this page you can find the example usage for java.util.logging Logger setUseParentHandlers.

Prototype

public void setUseParentHandlers(boolean useParentHandlers) 

Source Link

Document

Specify whether or not this logger should send its output to its parent Logger.

Usage

From source file:org.kalypso.model.hydrology.internal.test.NAPostprocessingTest.java

private File doPostprocessing(final String baseResourceLocation, final File outputDir, final File asciiBaseDir)
        throws Exception {
    final File resultsDir = new File(outputDir, "results"); //$NON-NLS-1$

    final URL gmlInputZipLocation = getClass().getResource(baseResourceLocation + "/gmlInput.zip"); //$NON-NLS-1$
    final URL baseURL = new URL(String.format("jar:%s!/", gmlInputZipLocation.toExternalForm())); //$NON-NLS-1$

    final Logger logger = Logger.getAnonymousLogger();
    logger.setUseParentHandlers(false);
    final Handler[] handlers = logger.getHandlers();
    for (final Handler handler : handlers)
        logger.removeHandler(handler);/* w w w .j a  va2s  .c  o  m*/

    final URL modelResource = new URL(baseURL, "modell.gml"); //$NON-NLS-1$
    final GMLWorkspace modelWorkspace = GmlSerializer.createGMLWorkspace(modelResource, null);

    final URL parameterResource = new URL(baseURL, "parameter.gml"); //$NON-NLS-1$
    final GMLWorkspace parameterWorkspace = GmlSerializer.createGMLWorkspace(parameterResource, null);
    final Parameter parameter = (Parameter) parameterWorkspace.getRootFeature();

    final URL controlResource = new URL(baseURL, "expertControl.gml"); //$NON-NLS-1$
    final GMLWorkspace controlWorkspace = GmlSerializer.createGMLWorkspace(controlResource, null);
    final NAModellControl naControl = (NAModellControl) controlWorkspace.getRootFeature();

    final NaAsciiDirs naAsciiDirs = new NaAsciiDirs(asciiBaseDir);
    final NaSimulationDirs naSimulationDirs = new NaSimulationDirs(resultsDir);

    final URL hydrotopResource = new URL(baseURL, "hydrotop.gml"); //$NON-NLS-1$
    final GMLWorkspace hydrotopWorkspace = GmlSerializer.createGMLWorkspace(hydrotopResource, null);
    final HydrotopeCollection naHydrotop = (HydrotopeCollection) hydrotopWorkspace.getRootFeature();

    final NaModell model = (NaModell) modelWorkspace.getRootFeature();
    final IFeatureBindingCollection<Catchment> catchmentList = model.getCatchments();
    final Catchment[] catchments = catchmentList.toArray(new Catchment[catchmentList.size()]);

    final ParameterHash landuseHash = new ParameterHash(parameter, logger);

    final IDManager idManager = new IDManager();

    final HydroHash hydroHash = new HydroHash(landuseHash, catchments, idManager, false);
    hydroHash.initHydrotopes(naHydrotop);

    final NaPostProcessor postProcessor = new NaPostProcessor(idManager, logger, modelWorkspace, naControl,
            hydroHash);
    postProcessor.process(naAsciiDirs, naSimulationDirs);

    return resultsDir;
}

From source file:pe.chalk.telegram.TelegramBot.java

public Logger initLogger(final Level level) {
    final Logger logger = this.getLogger();
    for (Handler handler : logger.getHandlers())
        logger.removeHandler(handler);//from   w  ww.  ja va2 s . c o  m

    logger.setUseParentHandlers(false);
    logger.addHandler(new StandardHandler(level));
    logger.setLevel(level);

    return logger;
}

From source file:bookkeepr.BookKeepr.java

/**
 * This loads the configuration file and sets the intial settings. 
 *//*from   www . j ava 2  s .co  m*/
public BookKeepr(File configFile) {

    try {
        this.configFile = configFile;
        if (!configFile.exists()) {
            config = new BookkeeprConfig();
            config.setOriginId(0);
            saveConfig();
        }
        config = (BookkeeprConfig) XMLReader.read(new FileInputStream(configFile));
        if (config.getOriginId() < 0 || config.getOriginId() > 255) {
            config.setOriginId(0);

        }
        if (config.getOriginId() == 0) {
            Logger.getLogger(BookKeepr.class.getName()).log(Level.INFO,
                    "Client mode active, creation or modification disabled");
        }

        statusMon = new BookKeeprStatusMonitor();
        Logger logger = Logger.getLogger("bookkeepr");
        logger.setLevel(Level.ALL);
        logger.setUseParentHandlers(false);
        logger.addHandler(statusMon);
    } catch (SAXException ex) {
        Logger.getLogger(BookKeepr.class.getName()).log(Level.SEVERE, null, ex);
    } catch (IOException ex) {
        Logger.getLogger(BookKeepr.class.getName()).log(Level.SEVERE, null, ex);
    }
    for (int i = 0; i < 20; i++) {
        HttpClient httpclient = new DefaultHttpClient();
        HttpClientParams.setRedirecting(httpclient.getParams(), false);
        HttpConnectionParams.setConnectionTimeout(httpclient.getParams(), 10000);
        if (config.getProxyUrl() != null) {
            final HttpHost proxy = new HttpHost(config.getProxyUrl(), config.getProxyPort(), "http");
            httpclient.getParams().setParameter(ConnRoutePNames.DEFAULT_PROXY, proxy);
        }
        httpClients.add(httpclient);
    }

}

From source file:org.mp4parser.tools.Main.java

public Logger setupLogger() {
    Logger logger = Logger.getLogger("tools");
    InputStream stream;//from  ww  w .  j a  v a  2 s.  c om
    if (verbose) {
        stream = Main.class.getResourceAsStream("/log-verbose.properties");
    } else {
        stream = Main.class.getResourceAsStream("/log.properties");
    }
    try {
        LogManager.getLogManager().readConfiguration(stream);
    } catch (IOException e) {
        throw new RuntimeException(e);
    }
    logger.setLevel(Level.FINE);
    logger.addHandler(new java.util.logging.ConsoleHandler());
    logger.setUseParentHandlers(false);

    return logger;
}

From source file:com.ebixio.virtmus.stats.StatsLogger.java

private StatsLogger() {
    try {//  ww w  .j  a  v  a 2s .c  o m

        statsLog.setUseParentHandlers(false);
        statsLog.setLevel(Level.ALL);

        Logger uiLogger = Logger.getLogger("org.netbeans.ui");
        // org.netbeans.ui.focus = maybe too much info

        uiLogger.setUseParentHandlers(false);
        uiLogger.setLevel(Level.ALL);

        logSet = pref.get(Options.OptLogSet, "A");
        if (!changeHandler(makeLogHandler(logSet))) {
            Log.log("Stats logging init failed.");
            statsLog.setLevel(Level.OFF);
            uiLogger.setLevel(Level.OFF);
        }

    } catch (SecurityException ex) {
        Log.log(ex);
    }
}

From source file:nl.strohalm.cyclos.utils.logging.LoggingHandler.java

/**
 * Creates a new logger//  ww w  .j  av  a 2s. co m
 */
private Logger init(final Level level, final String file) {
    final LogSettings logSettings = settingsService.getLogSettings();
    final Logger logger = Logger.getAnonymousLogger();
    logger.setLevel(level);
    logger.setUseParentHandlers(false);
    try {
        final FileUnits units = logSettings.getMaxLengthPerFileUnits();
        final FileHandler fileHandler = new FileHandler(file,
                units.calculate(logSettings.getMaxLengthPerFile()), logSettings.getMaxFilesPerLog(), true);
        fileHandler.setFormatter(logFormatter);
        fileHandler.setEncoding(settingsService.getLocalSettings().getCharset());
        logger.addHandler(fileHandler);
    } catch (final Exception e) {
        final ConsoleHandler consoleHandler = new ConsoleHandler();
        consoleHandler.setFormatter(logFormatter);
        try {
            consoleHandler.setEncoding(settingsService.getLocalSettings().getCharset());
        } catch (final Exception e1) {
            // Just ignore
        }
        logger.addHandler(consoleHandler);
        logger.log(Level.WARNING, "Unable to create logger for file " + file);
    }
    return logger;
}

From source file:org.archive.crawler.reporting.CrawlerLoggerModule.java

private void setupLogFile(Logger logger, String filename, Formatter f, boolean shouldManifest)
        throws IOException, SecurityException {
    logger.setLevel(Level.INFO); // set all standard loggers to INFO
    GenerationFileHandler fh = GenerationFileHandler.makeNew(filename, false, shouldManifest);
    fh.setFormatter(f);// w  w  w.  j  a  va 2s.com
    logger.addHandler(fh);
    addToManifest(filename, MANIFEST_LOG_FILE, shouldManifest);
    logger.setUseParentHandlers(false);
    this.fileHandlers.put(logger, fh);
}

From source file:org.archive.crawler.reporting.CrawlerLoggerModule.java

private void setupAlertLog(String logsPath) throws IOException {
    Logger logger = Logger.getLogger(LOGNAME_ALERTS + "." + logsPath);
    String filename = getAlertsLogPath().getFile().getAbsolutePath();
    GenerationFileHandler fh = GenerationFileHandler.makeNew(filename, false, true);
    fh.setFormatter(new SimpleFormatter());
    AlertThreadGroup.current().addLogger(logger);
    AlertHandler.ensureStaticInitialization();
    logger.addHandler(fh);/*from ww  w. j a v a 2  s .  c o  m*/
    addToManifest(filename, MANIFEST_LOG_FILE, true);
    logger.setUseParentHandlers(false);
    this.fileHandlers.put(logger, fh);
}

From source file:edu.uiuc.ncsa.myproxy.MyProxyLogon.java

/**
 * Constructs a MyProxyLogon object.  This turns off any logging, so only use this
 * constructor if you need to do that. Otherwise, inject your favorite logger wrapped
 * in an {@link MyLoggingFacade} object.
 *///from   ww  w  .  j  av  a 2 s .  c  om
public MyProxyLogon() {
    super();
    Logger logger = Logger.getLogger(MyProxyLogon.class.getName());
    logger.setUseParentHandlers(false);
    MyLoggingFacade facade = new MyLoggingFacade(logger);
    this.mlf = facade;

    host = System.getenv("MYPROXY_SERVER");
    if (host == null) {
        host = "localhost";
    }
    String portString = System.getenv("MYPROXY_SERVER_PORT");
    if (portString != null) {
        port = Integer.parseInt(portString);
    }
    username = System.getProperty("user.name");
}

From source file:edu.harvard.iq.dataverse.harvest.client.HarvesterServiceBean.java

/**
 * Run a harvest for an individual harvesting Dataverse
 * @param dataverseRequest/*from w  ww  .  j  av  a  2  s .  c om*/
 * @param harvestingClientId
 * @throws IOException
 */
public void doHarvest(DataverseRequest dataverseRequest, Long harvestingClientId) throws IOException {
    HarvestingClient harvestingClientConfig = harvestingClientService.find(harvestingClientId);

    if (harvestingClientConfig == null) {
        throw new IOException("No such harvesting client: id=" + harvestingClientId);
    }

    Dataverse harvestingDataverse = harvestingClientConfig.getDataverse();

    MutableBoolean harvestErrorOccurred = new MutableBoolean(false);
    String logTimestamp = logFormatter.format(new Date());
    Logger hdLogger = Logger.getLogger("edu.harvard.iq.dataverse.harvest.client.HarvesterServiceBean."
            + harvestingDataverse.getAlias() + logTimestamp);
    String logFileName = "../logs" + File.separator + "harvest_" + harvestingClientConfig.getName() + "_"
            + logTimestamp + ".log";
    FileHandler fileHandler = new FileHandler(logFileName);
    hdLogger.setUseParentHandlers(false);
    hdLogger.addHandler(fileHandler);

    PrintWriter importCleanupLog = new PrintWriter(new FileWriter(
            "../logs/harvest_cleanup_" + harvestingClientConfig.getName() + "_" + logTimestamp + ".txt"));

    List<Long> harvestedDatasetIds = null;

    List<Long> harvestedDatasetIdsThisBatch = new ArrayList<Long>();

    List<String> failedIdentifiers = new ArrayList<String>();
    List<String> deletedIdentifiers = new ArrayList<String>();

    Date harvestStartTime = new Date();

    try {
        boolean harvestingNow = harvestingClientConfig.isHarvestingNow();

        if (harvestingNow) {
            harvestErrorOccurred.setValue(true);
            hdLogger.log(Level.SEVERE, "Cannot begin harvesting, Dataverse " + harvestingDataverse.getName()
                    + " is currently being harvested.");

        } else {
            harvestingClientService.resetHarvestInProgress(harvestingClientId);
            harvestingClientService.setHarvestInProgress(harvestingClientId, harvestStartTime);

            if (harvestingClientConfig.isOai()) {
                harvestedDatasetIds = harvestOAI(dataverseRequest, harvestingClientConfig, hdLogger,
                        importCleanupLog, harvestErrorOccurred, failedIdentifiers, deletedIdentifiers,
                        harvestedDatasetIdsThisBatch);

            } else {
                throw new IOException("Unsupported harvest type");
            }
            harvestingClientService.setHarvestSuccess(harvestingClientId, new Date(),
                    harvestedDatasetIds.size(), failedIdentifiers.size(), deletedIdentifiers.size());
            hdLogger.log(Level.INFO, "COMPLETED HARVEST, server=" + harvestingClientConfig.getArchiveUrl()
                    + ", metadataPrefix=" + harvestingClientConfig.getMetadataPrefix());
            hdLogger.log(Level.INFO,
                    "Datasets created/updated: " + harvestedDatasetIds.size() + ", datasets deleted: "
                            + deletedIdentifiers.size() + ", datasets failed: " + failedIdentifiers.size());

            // now index all the datasets we have harvested - created, modified or deleted:
            /* (TODO: may not be needed at all. In Dataverse4, we may be able to get away with the normal 
            reindexing after every import. See the rest of the comments about batch indexing throughout 
            this service bean)
            if (this.processedSizeThisBatch > 0) {
                hdLogger.log(Level.INFO, "POST HARVEST, reindexing the remaining studies.");
                if (this.harvestedDatasetIdsThisBatch != null) {
                    hdLogger.log(Level.INFO, this.harvestedDatasetIdsThisBatch.size()+" studies in the batch");
                }
                hdLogger.log(Level.INFO, this.processedSizeThisBatch + " bytes of content");
                indexService.updateIndexList(this.harvestedDatasetIdsThisBatch);
                hdLogger.log(Level.INFO, "POST HARVEST, calls to index finished.");
            } else {
                hdLogger.log(Level.INFO, "(All harvested content already reindexed)");
            }
             */
        }
        //mailService.sendHarvestNotification(...getSystemEmail(), harvestingDataverse.getName(), logFileName, logTimestamp, harvestErrorOccurred.booleanValue(), harvestedDatasetIds.size(), failedIdentifiers);
    } catch (Throwable e) {
        harvestErrorOccurred.setValue(true);
        String message = "Exception processing harvest, server= " + harvestingClientConfig.getHarvestingUrl()
                + ",format=" + harvestingClientConfig.getMetadataPrefix() + " " + e.getClass().getName() + " "
                + e.getMessage();
        hdLogger.log(Level.SEVERE, message);
        logException(e, hdLogger);
        hdLogger.log(Level.INFO, "HARVEST NOT COMPLETED DUE TO UNEXPECTED ERROR.");
        // TODO: 
        // even though this harvesting run failed, we may have had successfully 
        // processed some number of datasets, by the time the exception was thrown. 
        // We should record that number too. And the number of the datasets that
        // had failed, that we may have counted.  -- L.A. 4.4
        harvestingClientService.setHarvestFailure(harvestingClientId, new Date());

    } finally {
        harvestingClientService.resetHarvestInProgress(harvestingClientId);
        fileHandler.close();
        hdLogger.removeHandler(fileHandler);
        importCleanupLog.close();
    }
}