Example usage for java.util.logging Level FINE

List of usage examples for java.util.logging Level FINE

Introduction

In this page you can find the example usage for java.util.logging Level FINE.

Prototype

Level FINE

To view the source code for java.util.logging Level FINE.

Click Source Link

Document

FINE is a message level providing tracing information.

Usage

From source file:org.archive.checkpointing.Checkpoint.java

public JSONObject loadJson(String beanName) {
    File sourceFile = new File(getCheckpointDir().getFile(), beanName);
    try {// www. j a v a 2 s.c o m
        if (LOGGER.isLoggable(Level.FINE)) {
            LOGGER.fine("reading json from " + sourceFile);
        }
        return new JSONObject(FileUtils.readFileToString(sourceFile));
    } catch (JSONException e) {
        throw new RuntimeException(e);
    } catch (IOException e) {
        throw new RuntimeException(e);
    }
}

From source file:com.stratuscom.harvester.deployer.FolderBasedAppRunner.java

private void tryInitialize() throws IOException, ParseException {
    log.log(Level.FINE, MessageNames.STARTER_SERVICE_DEPLOYER_STARTING, myName);
    /*/*from  w w  w  .  j  a v a  2  s.c o m*/
    If the deployerName is supplied, look it up and override the injected deployer.
    */
    if (deployerName != null) {
        deployer = (StarterServiceDeployer) context.get(deployerName);
    }
    /*
     Establish the deployment directory.
     */
    deploymentDirectoryFile = fileUtility.getProfileDirectory().resolveFile(deployDirectory);
    if (deploymentDirectoryFile == null || deploymentDirectoryFile.getType() != FileType.FOLDER) {
        log.log(Level.WARNING, MessageNames.NO_DEPLOYMENT_DIRECTORY,
                new Object[] { deployDirectory, fileUtility.getProfileDirectory() });
    }
    /*
     Do the scan task once - this will launch all the services currently in 
     deploy dir.
     */
    new ScanTask().runOnce();

    if (autoDeploy) {
        /* Now schedule a scan in the required scan time. */
        deployer.workManager.schedule(null, new ScanTask(), getScanInterval(), TimeUnit.SECONDS);
    }
}

From source file:maltcms.ui.nb.pipelineRunner.actions.RunMaltcmsPipelinesAction.java

private Action[] buildActions(Lookup lookup) {
    final IChromAUIProject project = LookupUtils.ensureSingle(lookup, IChromAUIProject.class);
    Collection<Action> topLevelActions = new ArrayList<>();
    File projectPipelinesPath = new File(FileUtil.toFile(project.getLocation()), "pipelines");
    File[] maltcmsVersions = projectPipelinesPath.listFiles(new FileFilter() {
        @Override//from  www .java2 s  .c o m
        public boolean accept(File f) {
            return f.isDirectory();
        }
    });
    if (maltcmsVersions == null) {
        return new Action[0];
    }
    Logger.getLogger(RunMaltcmsPipelinesAction.class.getName()).log(Level.FINE, "Found maltcms versions: {0}",
            Arrays.deepToString(maltcmsVersions));
    for (File maltcmsVersion : maltcmsVersions) {
        Logger.getLogger(RunMaltcmsPipelinesAction.class.getName()).log(Level.FINE,
                "Checking pipelines below {0}", maltcmsVersion);
        List<File> c = new ArrayList<>(FileUtils.listFiles(maltcmsVersion, new String[] { "mpl" }, true));
        Collections.sort(c, new Comparator<File>() {

            @Override
            public int compare(File o1, File o2) {
                return o1.getName().compareTo(o2.getName());
            }
        });
        Logger.getLogger(RunMaltcmsPipelinesAction.class.getName()).log(Level.FINE,
                "Found {0} pipeline definitions!", c.size());
        Collection<Action> actions = new ArrayList<>();
        for (File pipelineFile : c) {
            FileObject fo = FileUtil.toFileObject(pipelineFile);
            Logger.getLogger(RunMaltcmsPipelinesAction.class.getName()).log(Level.FINE, "Adding pipeline {0}",
                    pipelineFile.getName());
            DataObject dobj;
            try {
                dobj = DataObject.find(fo);
                if (dobj instanceof MaltcmsPipelineFormatDataObject) {
                    final MaltcmsPipelineFormatDataObject mpfdo = (MaltcmsPipelineFormatDataObject) dobj;
                    AbstractAction pipelineRunAction = new AbstractAction(fo.getName()) {
                        @Override
                        public void actionPerformed(ActionEvent e) {
                            SwingUtilities.invokeLater(new Runnable() {

                                @Override
                                public void run() {
                                    Logger.getLogger(RunMaltcmsPipelinesAction.class.getName()).log(Level.FINE,
                                            "Creating PipelineRunOpenSupport");
                                    PipelineRunOpenSupport pos = new PipelineRunOpenSupport(
                                            mpfdo.getPrimaryEntry());
                                    Logger.getLogger(RunMaltcmsPipelinesAction.class.getName()).log(Level.FINE,
                                            "Calling pos.open()!");
                                    pos.open();
                                    Logger.getLogger(RunMaltcmsPipelinesAction.class.getName()).log(Level.FINE,
                                            "Done!");
                                }
                            });
                        }
                    };
                    Logger.getLogger(RunMaltcmsPipelinesAction.class.getName()).log(Level.FINE,
                            "Adding dataobject action");
                    actions.add(pipelineRunAction);
                    //                  subMenu.add(new JMenuItem(pipelineRunAction));
                }
            } catch (DataObjectNotFoundException ex) {
                Exceptions.printStackTrace(ex);
            }

        }
        Logger.getLogger(RunMaltcmsPipelinesAction.class.getName()).log(Level.FINE,
                "Adding {0} Pipeline specific actions!", actions.size());
        topLevelActions.add(Lookup.getDefault().lookup(INodeFactory.class)
                .createMenuItem(maltcmsVersion.getName(), actions.toArray(new Action[actions.size()])));
    }
    return topLevelActions.toArray(new Action[topLevelActions.size()]);
}

From source file:hudson.os.solaris.ZFSInstaller.java

private boolean shouldBeActive() {
    if (!System.getProperty("os.name").equals("SunOS") || disabled)
        // on systems that don't have ZFS, we don't need this monitor
        return false;

    try {/*from  w  w  w.  jav a 2 s . c  o  m*/
        LibZFS zfs = new LibZFS();
        List<ZFSFileSystem> roots = zfs.roots();
        if (roots.isEmpty())
            return false; // no active ZFS pool

        // if we don't run on a ZFS file system, activate
        ZFSFileSystem hudsonZfs = zfs.getFileSystemByMountPoint(Hudson.getInstance().getRootDir());
        if (hudsonZfs != null)
            return false; // already on ZFS

        // decide what file system we'll create
        ZFSFileSystem pool = roots.get(0);
        prospectiveZfsFileSystemName = computeHudsonFileSystemName(zfs, pool);

        return true;
    } catch (Exception e) {
        LOGGER.log(Level.WARNING, "Failed to detect whether Hudson is on ZFS", e);
        return false;
    } catch (LinkageError e) {
        LOGGER.info(
                "No ZFS available. If you believe this is an error, increase the logging level to get the stack trace");
        LOGGER.log(Level.FINE, "Stack trace of failed ZFS load", e);
        return false;
    }
}

From source file:es.csic.iiia.planes.util.InverseWishartDistribution.java

/**
 * Returns a sample matrix from this distribution.
 * @return sampled matrix.//from w w w .j a  v  a 2s. c  o m
 */
public RealMatrix sample() {
    for (int i = 0; i < 100; i++) {
        try {
            RealMatrix A = sampleWishart();
            RealMatrix result = new LUDecomposition(A).getSolver().getInverse();
            LOG.log(Level.FINE, "Cov = {0}", result);
            return result;
        } catch (SingularMatrixException ex) {
            LOG.finer("Discarding singular matrix generated by the wishart distribution.");
        }
    }
    throw new RuntimeException("Unable to generate inverse wishart samples!");
}

From source file:com.ibm.liberty.starter.api.v1.LibertyFileUploader.java

protected void doPost(HttpServletRequest request, HttpServletResponse response)
        throws ServletException, IOException {
    String tech = request.getParameter(PARAMETER_TECH);
    String workspaceId = request.getParameter(PARAMETER_WORKSPACE); //specify the unique workspace directory to upload the file(s) to.   
    Collection<Part> filePartCollection = request.getParts();

    String serverHostPort = request.getRequestURL().toString().replace(request.getRequestURI(), "");
    int schemeLength = request.getScheme().toString().length();
    String internalHostPort = "http" + serverHostPort.substring(schemeLength);
    log.log(Level.FINER, "serverHostPort : " + serverHostPort);
    final ServiceConnector serviceConnector = new ServiceConnector(serverHostPort, internalHostPort);
    HashMap<Part, String> fileNames = new HashMap<Part, String>();
    if (!isValidRequest(request, response, tech, workspaceId, filePartCollection, serviceConnector,
            fileNames)) {/*  ww w.  ja  va  2  s.co m*/
        return;
    }

    Service techService = serviceConnector.getServiceObjectFromId(tech);
    String techDirPath = StarterUtil.getWorkspaceDir(workspaceId) + "/" + techService.getId();
    File techDir = new File(techDirPath);
    if (techDir.exists() && techDir.isDirectory()
            && "true".equalsIgnoreCase(request.getParameter(PARAMETER_CLEANUP))) {
        FileUtils.cleanDirectory(techDir);
        log.log(Level.FINER, "Cleaned up tech workspace directory : " + techDirPath);
    }

    for (Part filePart : filePartCollection) {
        if (!techDir.exists()) {
            FileUtils.forceMkdir(techDir);
            log.log(Level.FINER, "Created tech directory :" + techDirPath);
        }

        String filePath = techDirPath + "/" + fileNames.get(filePart);
        log.log(Level.FINER, "File path : " + filePath);
        File uploadedFile = new File(filePath);

        Files.copy(filePart.getInputStream(), uploadedFile.toPath(), StandardCopyOption.REPLACE_EXISTING);
        log.log(Level.FINE, "Copied file to " + filePath);
    }

    if ("true".equalsIgnoreCase(request.getParameter(PARAMETER_PROCESS))) {
        // Process uploaded file(s)
        String processResult = serviceConnector.processUploadedFiles(techService, techDirPath);
        if (!processResult.equalsIgnoreCase("success")) {
            log.log(Level.INFO,
                    "Error processing the files uploaded to " + techDirPath + " : Result=" + processResult);
            response.sendError(500, processResult);
            return;
        }
        log.log(Level.FINE, "Processed the files uploaded to " + techDirPath);
    }

    response.setContentType("text/html");
    PrintWriter out = response.getWriter();
    out.println("success");
    out.close();
}

From source file:com.vmware.admiral.adapter.docker.service.DockerNetworkAdapterService.java

/**
 * Start processing the request. First fetches the {@link ContainerNetworkState}. Will result in
 * filling the {@link RequestContext#networkState} property.
 *//*from   w ww  . j av  a 2 s.co m*/
private void processNetworkRequest(RequestContext context) {
    Operation getNetworkState = Operation.createGet(context.request.getNetworkStateReference())
            .setCompletion((o, ex) -> {
                if (ex != null) {
                    fail(context.request, ex);
                } else {
                    handleExceptions(context.request, context.operation, () -> {
                        context.networkState = o.getBody(ContainerNetworkState.class);
                        processNetworkState(context);
                    });
                }
            });
    handleExceptions(context.request, context.operation, () -> {
        getHost().log(Level.FINE, "Fetching NetworkState: %s %s", context.request.getRequestTrackingLog(),
                context.request.getNetworkStateReference());
        sendRequest(getNetworkState);
    });
}

From source file:com.ibm.ws.lars.rest.RepositoryRESTResourceLoggingTest.java

@Test
public void testPostAssets(@Mocked final Logger logger, @Mocked SecurityContext context) {

    final String json = "{\"name\":\"myname\"}";

    new Expectations() {
        {//from   w  w w.  java 2 s  . c  o m
            logger.isLoggable(Level.FINE);
            result = true;

            logger.fine("postAssets called with json content:\n" + json);
        }
    };

    getRestResource().postAssets(json, context);
}

From source file:de.fosd.jdime.Main.java

/**
 * Perform a merge operation on the input files or directories.
 *
 * @param args//from  w  w  w  . j  av  a2  s . c o  m
 *         command line arguments
 */
public static void run(String[] args) {
    MergeContext context = new MergeContext();

    if (!parseCommandLineArgs(context, args)) {
        return;
    }

    List<FileArtifact> inputFiles = context.getInputFiles();

    if (context.isInspect()) {
        inspectElement(inputFiles.get(0), context.getInspectArtifact(), context.getInspectionScope());
        return;
    }

    if (context.getDumpMode() != DumpMode.NONE) {
        inputFiles.forEach(artifact -> dump(artifact, context.getDumpMode()));
        return;
    }

    try {
        merge(context);
        output(context);
    } finally {
        outputStatistics(context);
    }

    if (LOG.isLoggable(Level.FINE)) {
        Map<MergeScenario<?>, Throwable> crashes = context.getCrashes();

        if (crashes.isEmpty()) {
            LOG.fine("No crashes occurred while merging.");
        } else {
            String ls = System.lineSeparator();
            StringBuilder sb = new StringBuilder();

            sb.append(String.format("%d crashes occurred while merging:%n", crashes.size()));

            for (Map.Entry<MergeScenario<?>, Throwable> entry : crashes.entrySet()) {
                sb.append("* ").append(entry.getValue().toString()).append(ls);
                sb.append("    ").append(entry.getKey().toString().replace(" ", ls + "    ")).append(ls);
            }

            LOG.fine(sb.toString());
        }
    }
}

From source file:com.cyberway.issue.io.WriterPool.java

/**
 * Check out a {@link WriterPoolMember}.
 * //from  w  ww. java 2s .  c  om
 * This method must be answered by a call to
 * {@link #returnFile(WriterPoolMember)} else pool starts leaking.
 * 
 * @return Writer checked out of a pool of files.
 * @throws IOException Problem getting Writer from pool (Converted
 * from Exception to IOException so this pool can live as a good citizen
 * down in depths of ARCSocketFactory).
 * @throws NoSuchElementException If we time out waiting on a pool member.
 */
public WriterPoolMember borrowFile() throws IOException {
    WriterPoolMember f = null;
    for (int i = 0; f == null; i++) {
        long waitStart = System.currentTimeMillis();
        try {
            f = (WriterPoolMember) this.pool.borrowObject();
            if (logger.getLevel() == Level.FINE) {
                logger.fine("Borrowed " + f + " (Pool State: " + getPoolState(waitStart) + ").");
            }
        } catch (NoSuchElementException e) {
            // Let this exception out. Unit test at least depends on it.
            // Log current state of the pool.
            logger.warning(e.getMessage() + ": Retry #" + i + " of " + " max of " + arbitraryRetryMax
                    + ": NSEE Pool State: " + getPoolState(waitStart));
            if (i >= arbitraryRetryMax) {
                logger.log(Level.SEVERE, "maximum retries exceeded; rethrowing", e);
                throw e;
            }
        } catch (Exception e) {
            // Convert.
            logger.log(Level.SEVERE, "E Pool State: " + getPoolState(waitStart), e);
            throw new IOException("Failed getting writer from pool: " + e.getMessage());
        }
    }
    return f;
}