Example usage for java.io PrintStream close

List of usage examples for java.io PrintStream close

Introduction

In this page you can find the example usage for java.io PrintStream close.

Prototype

public void close() 

Source Link

Document

Closes the stream.

Usage

From source file:es.cnio.bioinfo.bicycle.gatk.MethylationFilePair.java

private GlobalMethylationStatistics writeMethylCytosines() throws FileNotFoundException {

    PrintStream out = new PrintStream(new BufferedOutputStream(new FileOutputStream(getMethylcytosinesfile())));
    PrintStream outvcf = new PrintStream(
            new BufferedOutputStream(new FileOutputStream(getMethylcytosinesVCFfile())));

    try {/* w ww. ja v a2 s .  c  om*/
        BufferedReader wReader = new BufferedReader(new FileReader(this.methylationFiles.get(Strand.WATSON)));
        BufferedReader cReader = new BufferedReader(new FileReader(this.methylationFiles.get(Strand.CRICK)));

        List<String> sortedSequenceNames = toSequenceNames(super.getMasterSequenceDictionary());
        GPFilesReader reader = new GPFilesReader(sortedSequenceNames, wReader, cReader);

        String line = null;

        GlobalMethylationStatistics stats = new GlobalMethylationStatistics();

        writeMethylcytosinesHeader(out);
        writeVCFHeader(outvcf);
        while ((line = reader.readLine()) != null) {
            MethylationCall call = MethylationCall.unmarshall(line);

            writeMehylcytosinesRecord(out, call, stats);
            writeVCFRecord(outvcf, call);
        }
        out.close();
        outvcf.close();
        return stats;

    } catch (FileNotFoundException e) {
        throw new RuntimeException(e);
    } catch (IOException e) {
        throw new RuntimeException(e);
    }

}

From source file:org.apache.hadoop.hive.ql.QTestUtil.java

private static QTestProcessExecResult executeCmd(String[] args, String outFile, String errFile)
        throws Exception {
    System.out.println("Running: " + org.apache.commons.lang.StringUtils.join(args, ' '));

    PrintStream out = outFile == null ? SessionState.getConsole().getChildOutStream()
            : new PrintStream(new FileOutputStream(outFile), true, "UTF-8");
    PrintStream err = errFile == null ? SessionState.getConsole().getChildErrStream()
            : new PrintStream(new FileOutputStream(errFile), true, "UTF-8");

    Process executor = Runtime.getRuntime().exec(args);

    ByteArrayOutputStream bos = new ByteArrayOutputStream();
    PrintStream str = new PrintStream(bos, true, "UTF-8");

    StreamPrinter errPrinter = new StreamPrinter(executor.getErrorStream(), null, err);
    StreamPrinter outPrinter = new StreamPrinter(executor.getInputStream(), null, out, str);

    outPrinter.start();//  ww  w . ja v a2 s .  co  m
    errPrinter.start();

    int result = executor.waitFor();

    outPrinter.join();
    errPrinter.join();

    if (outFile != null) {
        out.close();
    }

    if (errFile != null) {
        err.close();
    }

    return QTestProcessExecResult.create(result, new String(bos.toByteArray(), StandardCharsets.UTF_8));
}

From source file:hudson.scm.AbstractCvs.java

/**
 * Gets the output for the CVS <tt>rlog</tt> command for the given module
 * between the specified dates.//from   www  .  ja  va 2  s  .c o m
 *
 * @param repository
 *            the repository to connect to for running rlog against
 * @param module
 *            the module to check for changes against
 * @param listener
 *            where to log any error messages to
 * @param startTime
 *            don't list any changes before this time
 * @param endTime
 *            don't list any changes after this time
 * @return the output of rlog with no modifications
 * @throws IOException
 *             on underlying communication failure
 */
private CvsLog getRemoteLogForModule(final CvsRepository repository, final CvsRepositoryItem item,
        final CvsModule module, final Date startTime, final Date endTime, final EnvVars envVars,
        final TaskListener listener) throws IOException {
    final Client cvsClient = getCvsClient(repository, envVars, listener);

    RlogCommand rlogCommand = new RlogCommand();

    // we have to synchronize since we're dealing with DateFormat.format()
    synchronized (DATE_FORMATTER) {
        final String lastBuildDate = DATE_FORMATTER.format(startTime);
        final String endDate = DATE_FORMATTER.format(endTime);

        rlogCommand.setDateFilter(lastBuildDate + "<" + endDate);
    }

    // tell CVS which module we're logging
    rlogCommand.setModule(envVars.expand(module.getRemoteName()));

    // ignore headers for files that aren't in the current change-set
    rlogCommand.setSuppressHeader(true);

    // create an output stream to send the output from CVS command to - we
    // can then parse it from here
    final File tmpRlogSpill = File.createTempFile("cvs", "rlog");
    final DeferredFileOutputStream outputStream = new DeferredFileOutputStream(100 * 1024, tmpRlogSpill);
    final PrintStream logStream = new PrintStream(outputStream, true, getDescriptor().getChangelogEncoding());

    // set a listener with our output stream that we parse the log from
    final CVSListener basicListener = new BasicListener(logStream, listener.getLogger());
    cvsClient.getEventManager().addCVSListener(basicListener);

    // log the command to the current run/polling log
    listener.getLogger().println("cvs " + rlogCommand.getCVSCommand());

    // send the command to be run, we can't continue of the task fails
    try {
        if (!cvsClient.executeCommand(rlogCommand, getGlobalOptions(repository, envVars))) {
            throw new RuntimeException("Error while trying to run CVS rlog");
        }
    } catch (CommandAbortedException e) {
        throw new RuntimeException("CVS rlog command aborted", e);
    } catch (CommandException e) {
        throw new RuntimeException("CVS rlog command failed", e);
    } catch (AuthenticationException e) {
        throw new RuntimeException("CVS authentication failure while running rlog command", e);
    } finally {
        try {
            cvsClient.getConnection().close();
        } catch (IOException ex) {
            listener.getLogger().println("Could not close client connection: " + ex.getMessage());
        }
    }

    // flush the output so we have it all available for parsing
    logStream.close();

    // return the contents of the stream as the output of the command
    return new CvsLog() {
        @Override
        public Reader read() throws IOException {
            // note that master and slave can have different platform encoding
            if (outputStream.isInMemory())
                return new InputStreamReader(new ByteArrayInputStream(outputStream.getData()),
                        getDescriptor().getChangelogEncoding());
            else
                return new InputStreamReader(new FileInputStream(outputStream.getFile()),
                        getDescriptor().getChangelogEncoding());
        }

        @Override
        public void dispose() {
            tmpRlogSpill.delete();
        }
    };
}

From source file:net.rim.ejde.internal.ui.views.profiler.ProfilerView.java

/**
 * Writes the profile data to <code>file</code>.
 *
 * @param file/*  w ww.jav a2s  . c o  m*/
 *            Destination file.
 * @throws IDEError
 */
private void saveContents(File file) throws IDEError {
    if (file == null) {
        return;
    }
    RIA ria = RIA.getCurrentDebugger();
    if (ria == null) {
        return;
    }
    String debugAttachedTo = ria.getDebugAttachTo();
    if (debugAttachedTo == null || debugAttachedTo.isEmpty()) {
        return;
    }

    PrintStream out = null;
    try {
        out = new PrintStream(new FileOutputStream(file));
        out.print(RIA.getString("ProfileCSVFileHeader1")); //$NON-NLS-1$
        out.print(ria.profileGetTypes()[_whatToProfile].getDescription());
        out.print(RIA.getString("ProfileCSVFileHeader2")); //$NON-NLS-1$
        out.println();

        ProfileItem[] modules = sortedElements(_pd, null);
        for (int i = 0; i < modules.length; i++) {
            ProfileItem module = modules[i];
            Object moduleName = module;

            ProfileItem[] methods = sortedElements(module, null);
            for (int j = 0; j < methods.length; j++) {
                ProfileItem method = methods[j];

                out.print(moduleName);
                out.print(", "); //$NON-NLS-1$
                String methodStr = method.toString();
                Object handle = method.getMethodHandle();
                if (handle != null && handle instanceof DebugMethod) {
                    methodStr = ((DebugMethod) handle).getFullName();
                }
                out.print(Util.replace(methodStr, ",", "")); //$NON-NLS-1$ //$NON-NLS-2$
                out.print(", "); //$NON-NLS-1$
                out.print(method.getTicks());
                out.print(", "); //$NON-NLS-1$
                out.print(method.getCount());
                out.println();
            }
        }
        out.close();
    } catch (IOException e) {
        log.error("", e);
    }
}

From source file:org.apache.hadoop.hbase.PerformanceEvaluation.java

private Path writeInputFile(final Configuration c, final TestOptions opts) throws IOException {
    SimpleDateFormat formatter = new SimpleDateFormat("yyyyMMddHHmmss");
    Path jobdir = new Path(PERF_EVAL_DIR, formatter.format(new Date()));
    Path inputDir = new Path(jobdir, "inputs");

    FileSystem fs = FileSystem.get(c);
    fs.mkdirs(inputDir);// w w  w . ja v a 2s  . c  o  m

    Path inputFile = new Path(inputDir, "input.txt");
    PrintStream out = new PrintStream(fs.create(inputFile));
    // Make input random.
    Map<Integer, String> m = new TreeMap<Integer, String>();
    Hash h = MurmurHash.getInstance();
    int perClientRows = (opts.totalRows / opts.numClientThreads);
    ObjectMapper mapper = new ObjectMapper();
    mapper.configure(SORT_PROPERTIES_ALPHABETICALLY, true);
    try {
        for (int i = 0; i < 10; i++) {
            for (int j = 0; j < opts.numClientThreads; j++) {
                TestOptions next = new TestOptions(opts);
                next.startRow = (j * perClientRows) + (i * (perClientRows / 10));
                next.perClientRunRows = perClientRows / 10;
                String s = mapper.writeValueAsString(next);
                int hash = h.hash(Bytes.toBytes(s));
                m.put(hash, s);
            }
        }
        for (Map.Entry<Integer, String> e : m.entrySet()) {
            out.println(e.getValue());
        }
    } finally {
        out.close();
    }
    return inputDir;
}

From source file:hudson.tools.JDKInstaller.java

@SuppressWarnings("unchecked") // dom4j doesn't do generics, apparently... should probably switch to XOM
private HttpURLConnection locateStage1(Platform platform, CPU cpu) throws IOException {
    URL url = new URL(
            "https://cds.sun.com/is-bin/INTERSHOP.enfinity/WFS/CDS-CDS_Developer-Site/en_US/-/USD/ViewProductDetail-Start?ProductRef="
                    + id);/*from  w w  w .ja v  a  2s .  c o  m*/
    HttpURLConnection con = (HttpURLConnection) url.openConnection();
    String cookie = con.getHeaderField("Set-Cookie");
    LOGGER.fine("Cookie=" + cookie);

    Tidy tidy = new Tidy();
    tidy.setErrout(new PrintWriter(new NullWriter()));
    DOMReader domReader = new DOMReader();
    Document dom = domReader.read(tidy.parseDOM(con.getInputStream(), null));

    Element form = null;
    for (Element e : (List<Element>) dom.selectNodes("//form")) {
        String action = e.attributeValue("action");
        LOGGER.fine("Found form:" + action);
        if (action.contains("ViewFilteredProducts")) {
            form = e;
            break;
        }
    }

    con = (HttpURLConnection) new URL(form.attributeValue("action")).openConnection();
    con.setRequestMethod("POST");
    con.setDoOutput(true);
    con.setRequestProperty("Cookie", cookie);
    con.setRequestProperty("Content-Type", "application/x-www-form-urlencoded");
    PrintStream os = new PrintStream(con.getOutputStream());

    // select platform
    String primary = null, secondary = null;
    Element p = (Element) form.selectSingleNode(".//select[@id='dnld_platform']");
    for (Element opt : (List<Element>) p.elements("option")) {
        String value = opt.attributeValue("value");
        String vcap = value.toUpperCase(Locale.ENGLISH);
        if (!platform.is(vcap))
            continue;
        switch (cpu.accept(vcap)) {
        case PRIMARY:
            primary = value;
            break;
        case SECONDARY:
            secondary = value;
            break;
        case UNACCEPTABLE:
            break;
        }
    }
    if (primary == null)
        primary = secondary;
    if (primary == null)
        throw new AbortException(
                "Couldn't find the right download for " + platform + " and " + cpu + " combination");
    os.print(p.attributeValue("name") + '=' + primary);
    LOGGER.fine("Platform choice:" + primary);

    // select language
    Element l = (Element) form.selectSingleNode(".//select[@id='dnld_language']");
    if (l != null) {
        os.print("&" + l.attributeValue("name") + "=" + l.element("option").attributeValue("value"));
    }

    // the rest
    for (Element e : (List<Element>) form.selectNodes(".//input")) {
        os.print('&');
        os.print(e.attributeValue("name"));
        os.print('=');
        String value = e.attributeValue("value");
        if (value == null)
            os.print("on"); // assume this is a checkbox
        else
            os.print(URLEncoder.encode(value, "UTF-8"));
    }
    os.close();
    return con;
}

From source file:com.tremolosecurity.provisioning.core.ProvisioningEngineImpl.java

@Override
public void initScheduler() throws ProvisioningException {
    if (this.cfgMgr.getCfg().getProvisioning() == null
            || this.cfgMgr.getCfg().getProvisioning().getScheduler() == null) {
        logger.warn("Scheduler not defined");
        return;/*from  ww w.  j a va  2s . c o m*/
    }

    SchedulingType sct = this.cfgMgr.getCfg().getProvisioning().getScheduler();

    Properties scheduleProps = new Properties();

    scheduleProps.setProperty("org.quartz.scheduler.instanceName", sct.getInstanceLabel());

    String instanceLabel = null;
    try {
        Enumeration<NetworkInterface> enumer = NetworkInterface.getNetworkInterfaces();
        while (enumer.hasMoreElements()) {
            NetworkInterface ni = enumer.nextElement();
            Enumeration<InetAddress> enumeri = ni.getInetAddresses();
            while (enumeri.hasMoreElements()) {
                InetAddress addr = enumeri.nextElement();
                if (addr.getHostAddress().startsWith(sct.getInstanceIPMask())) {
                    instanceLabel = addr.getHostAddress();
                }
            }
        }
    } catch (SocketException e) {
        throw new ProvisioningException("Could not read network addresses", e);
    }

    if (instanceLabel == null) {
        logger.warn("No IP starts with '" + sct.getInstanceIPMask() + "'");
        instanceLabel = "AUTO";
    }

    scheduleProps.setProperty("org.quartz.scheduler.instanceId", instanceLabel);
    scheduleProps.setProperty("org.quartz.threadPool.threadCount", Integer.toString(sct.getThreadCount()));

    if (sct.isUseDB()) {
        scheduleProps.setProperty("org.quartz.jobStore.class", "org.quartz.impl.jdbcjobstore.JobStoreTX");
        scheduleProps.setProperty("org.quartz.jobStore.driverDelegateClass",
                sct.getScheduleDB().getDelegateClassName());
        scheduleProps.setProperty("org.quartz.jobStore.dataSource", "scheduleDB");
        scheduleProps.setProperty("org.quartz.dataSource.scheduleDB.driver", sct.getScheduleDB().getDriver());
        scheduleProps.setProperty("org.quartz.dataSource.scheduleDB.URL", sct.getScheduleDB().getUrl());
        scheduleProps.setProperty("org.quartz.dataSource.scheduleDB.user", sct.getScheduleDB().getUser());
        scheduleProps.setProperty("org.quartz.dataSource.scheduleDB.password",
                sct.getScheduleDB().getPassword());
        scheduleProps.setProperty("org.quartz.dataSource.scheduleDB.maxConnections",
                Integer.toString(sct.getScheduleDB().getMaxConnections()));
        scheduleProps.setProperty("org.quartz.dataSource.scheduleDB.validationQuery",
                sct.getScheduleDB().getValidationQuery());
        scheduleProps.setProperty("org.quartz.jobStore.useProperties", "true");
        scheduleProps.setProperty("org.quartz.jobStore.isClustered", "true");
    } else {
        scheduleProps.setProperty("org.quartz.jobStore.class", "org.quartz.simpl.RAMJobStore");
    }

    try {

        /*String classpath = System.getProperty("java.class.path");
        String[] classpathEntries = classpath.split(File.pathSeparator);
        for (String cp : classpathEntries) {
           System.out.println(cp);
        }*/

        PrintStream out = new PrintStream(new FileOutputStream(
                System.getProperty(OpenUnisonConstants.UNISON_CONFIG_QUARTZDIR) + "/quartz.properties"));
        scheduleProps.store(out, "Unison internal scheduler properties");
        out.flush();
        out.close();
    } catch (IOException e) {
        throw new ProvisioningException("Could not write to quartz.properties", e);
    }

    try {
        this.scheduler = StdSchedulerFactory.getDefaultScheduler();
        this.scheduler.start();
        this.cfgMgr.addThread(new StopScheduler(this.scheduler));
        HashSet<String> jobKeys = new HashSet<String>();

        for (JobType jobType : sct.getJob()) {
            jobKeys.add(jobType.getName() + "-" + jobType.getGroup());
            JobKey jk = new JobKey(jobType.getName(), jobType.getGroup());
            JobDetail jd = this.scheduler.getJobDetail(jk);
            if (jd == null) {
                logger.info("Adding new job '" + jobType.getName() + "' / '" + jobType.getGroup() + "'");
                try {
                    addJob(jobType, jk);

                } catch (ClassNotFoundException e) {
                    throw new ProvisioningException("Could not initialize job", e);
                }

            } else {
                //check to see if we need to modify
                StringBuffer cron = new StringBuffer();
                cron.append(jobType.getCronSchedule().getSeconds()).append(' ')
                        .append(jobType.getCronSchedule().getMinutes()).append(' ')
                        .append(jobType.getCronSchedule().getHours()).append(' ')
                        .append(jobType.getCronSchedule().getDayOfMonth()).append(' ')
                        .append(jobType.getCronSchedule().getMonth()).append(' ')
                        .append(jobType.getCronSchedule().getDayOfWeek()).append(' ')
                        .append(jobType.getCronSchedule().getYear());

                Properties configProps = new Properties();
                for (ParamType pt : jobType.getParam()) {
                    configProps.setProperty(pt.getName(), pt.getValue());
                }

                Properties jobProps = new Properties();
                for (String key : jd.getJobDataMap().getKeys()) {
                    jobProps.setProperty(key, (String) jd.getJobDataMap().getString(key));
                }

                List<Trigger> triggers = (List<Trigger>) scheduler.getTriggersOfJob(jd.getKey());
                CronTrigger trigger = (CronTrigger) triggers.get(0);

                if (!jobType.getClassName().equals(jd.getJobClass().getName())) {
                    logger.info("Reloading job '" + jobType.getName() + "' / '" + jobType.getGroup()
                            + "' - change in class name");
                    reloadJob(jobType, jd);
                } else if (!cron.toString().equalsIgnoreCase(trigger.getCronExpression())) {
                    logger.info("Reloading job '" + jobType.getName() + "' / '" + jobType.getGroup()
                            + "' - change in schedule");
                    reloadJob(jobType, jd);
                } else if (!configProps.equals(jobProps)) {
                    logger.info("Reloading job '" + jobType.getName() + "' / '" + jobType.getGroup()
                            + "' - change in properties");
                    reloadJob(jobType, jd);
                }
            }
        }

        for (String groupName : scheduler.getJobGroupNames()) {

            for (JobKey jobKey : scheduler.getJobKeys(GroupMatcher.jobGroupEquals(groupName))) {

                String jobName = jobKey.getName();
                String jobGroup = jobKey.getGroup();

                //get job's trigger
                List<Trigger> triggers = (List<Trigger>) scheduler.getTriggersOfJob(jobKey);

                if (!jobKeys.contains(jobName + "-" + jobGroup)) {
                    logger.info("Removing jab '" + jobName + "' / '" + jobGroup + "'");
                    scheduler.deleteJob(jobKey);
                }

            }

        }

    } catch (SchedulerException e) {
        throw new ProvisioningException("Could not initialize scheduler", e);
    } catch (ClassNotFoundException e) {
        throw new ProvisioningException("Could not initialize scheduler", e);
    }

}

From source file:edu.msu.cme.rdp.alignment.errorcheck.RmPartialSeqs.java

public HashSet<Sequence> checkPartial(PrintStream seqOutstream, PrintStream alignOutstream)
        throws OverlapCheckFailedException, IOException {
    HashSet<Sequence> partialSeqs = new HashSet<Sequence>();
    for (int i = 0; i < seqList.size(); i++) {
        Sequence seqx = seqList.get(i);
        PairwiseAlignment bestResult = null;
        int bestScore = Integer.MIN_VALUE;
        Sequence bestSeqy = null;

        ArrayList<NuclSeqMatch.BestMatch> matchResults = sabCalculator.findTopKMatch(seqx, knn);
        for (NuclSeqMatch.BestMatch match : matchResults) {

            Sequence seqy = refSeqMap.get(match.getBestMatch().getSeqName());
            PairwiseAlignment result = PairwiseAligner.align(seqx.getSeqString().replaceAll("U", "T"),
                    seqy.getSeqString().replaceAll("U", "T"), scoringMatrix, mode);

            if (bestResult == null || result.getScore() >= bestScore) {
                bestResult = result;/*w w  w .  j a v a  2  s .  c  o  m*/
                bestScore = result.getScore();
                bestSeqy = seqy;
            }

        }
        double distance = dist.getDistance(bestResult.getAlignedSeqj().getBytes(),
                bestResult.getAlignedSeqi().getBytes(), 0);

        int beginGaps = getBeginGapLength(bestResult.getAlignedSeqi());
        int endGaps = getEndGapLength(bestResult.getAlignedSeqi());
        if ((beginGaps >= this.min_begin_gaps) || (endGaps >= this.min_end_gaps)) {
            partialSeqs.add(seqx);
        } else {
            seqOutstream.println(">" + seqx.getSeqName() + "\t" + seqx.getDesc() + "\n" + seqx.getSeqString());
        }
        if (alignOutstream != null) {
            alignOutstream.println(">\t" + seqx.getSeqName() + "\t" + bestSeqy.getSeqName() + "\t"
                    + String.format("%.3f", distance) + "\tmissingBegin=" + (beginGaps >= this.min_begin_gaps)
                    + "\tmissingEnd=" + (endGaps >= this.min_end_gaps) + "\tbeginGaps=" + beginGaps
                    + "\tendGaps=" + endGaps);
            alignOutstream.print(bestResult.getAlignedSeqi() + "\n");
            alignOutstream.print(bestResult.getAlignedSeqj() + "\n");
        }
    }
    seqOutstream.close();
    if (alignOutstream != null)
        alignOutstream.close();

    return partialSeqs;
}

From source file:com.moscona.dataSpace.ExportHelper.java

public void csvExport(DataFrame df, String fileName, boolean includeMetaData)
        throws FileNotFoundException, DataSpaceException {
    // FIXME exports sorted and label columns twice - once populated and once not - the populated ones are the wrong ones
    PrintStream out = new PrintStream(new File(fileName));
    try {/*from   ww w  .  j ava 2  s .  c  om*/
        ArrayList<String> labels = new ArrayList<String>();
        ArrayList<String> sorted = new ArrayList<String>();

        for (String col : df.getColumnNames()) {
            if (df.isLabel(col)) {
                labels.add(col);
            }
            if (df.get(col).isSorted()) {
                sorted.add(col);
            }
        }

        if (includeMetaData) {
            csvOut(out, "name", df.getName());
            csvOut(out, "description", df.getDescription());
            csvOut(out, "row ID", df.getRowId());
            csvOut(out, "sort column", df.getSortColumn());

            Collections.sort(labels);
            Collections.sort(sorted);

            out.println(excelQuote("label columns") + "," + StringUtils.join(labels, ","));
            out.println(excelQuote("sorted columns") + "," + StringUtils.join(sorted, ","));
            out.println();
        }

        ArrayList<String> columns = new ArrayList<String>();
        ArrayList<String> remaining = new ArrayList<String>(df.getColumnNames());
        if (df.getRowId() != null) {
            // make first column the row ID
            String rowId = df.getRowId();
            columns.add(rowId);
            remaining.remove(rowId);
        }
        // add all the sorted columns
        columns.addAll(sorted);
        remaining.removeAll(sorted);
        remaining.removeAll(labels); // those will come in last
        Collections.sort(remaining);
        columns.addAll(remaining);
        columns.addAll(labels);

        out.println(StringUtils.join(columns, ","));
        IVectorIterator<Map<String, IScalar>> iterator = df.iterator();
        while (iterator.hasNext()) {
            Map<String, IScalar> row = iterator.next();
            ArrayList<String> values = new ArrayList<String>();
            for (String col : columns) {
                values.add(toCsvString(row.get(col)));
            }
            out.println(StringUtils.join(values, ","));
        }
    } finally {
        out.close();
    }
}

From source file:com.orange.atk.graphAnalyser.LectureJATKResult.java

/**
 * save config file./*  www.  jav a2 s.c  om*/
 *
 *            
 */
private void jMenuItemSaveConfigFileActionPerformed(ActionEvent evt) {

    String JATKpath = Platform.getInstance().getJATKPath();
    String pathihmconfig = JATKpath + Platform.FILE_SEPARATOR + "log" + Platform.FILE_SEPARATOR
            + "ConfigIHM.cfg";
    //get a value from confile
    String Scriptpath = getvalueconfigfile(pathihmconfig, "path_READGRAPH");

    PrintStream ps = null;
    try {
        ps = new PrintStream(new FileOutputStream(Scriptpath + Platform.FILE_SEPARATOR + "Confile2.xml"));
    } catch (FileNotFoundException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
        Logger.getLogger(this.getClass()).warn("Can't Create config file");
        return;
    }
    ps.println("<confile>");
    ps.println("<graphlist>");

    Set<String> cles = mapPerfGraph.keySet();
    Iterator<String> it = cles.iterator();
    while (it.hasNext()) {
        String cle = (String) it.next();
        PerformanceGraph graph = (PerformanceGraph) mapPerfGraph.get(cle);

        ps.println("<graph  name=\"" + cle + ".csv" + "\"" + " color=\"" + getcolor(graph.getColor()) + "\""
                + "/>");
    }
    ps.println("</graphlist>");
    ps.println("<markerlist>");
    ps.println("<marker  name=\"keyPress\" position=\"0.2\"  color=\"gray\"/>");
    ps.println("<marker  name=\"log\" position=\"0.4\"  color=\"gray\"/>");
    ps.println("<marker  name=\"Action\" position=\"0.5\"  color=\"gray\"/>");
    ps.println("<marker  name=\"Standard Out/Err\" position=\"0.7\"  color=\"gray\"/>");
    ps.println("<marker  name=\"ScreenShot\" position=\"0.9\"  color=\"gray\"/>");
    ps.println("<marker  name=\"Error JATK\" position=\"0.9\"  color=\"gray\"/>");
    ps.println("</markerlist>");

    ps.println("</confile>");
    ps.flush();
    ps.close();

}