Example usage for java.io BufferedWriter append

List of usage examples for java.io BufferedWriter append

Introduction

In this page you can find the example usage for java.io BufferedWriter append.

Prototype

public Writer append(CharSequence csq) throws IOException 

Source Link

Document

Appends the specified character sequence to this writer.

Usage

From source file:com.ideateam.plugin.Version.java

public void writeLocaleToFile(String fileName, String msg) {

    try {//  w  w w  . jav  a2  s  .co m
        String path = Environment.getExternalStorageDirectory().getAbsolutePath() + "/TSVB/" + fileName;

        File file = new File(path);

        if (!file.exists()) {
            File f = new File(Environment.getExternalStorageDirectory().getAbsolutePath() + "/TSVB/");
            f.mkdirs();
            file.createNewFile();

        }

        BufferedWriter buf = new BufferedWriter(new FileWriter(file, true));
        buf.append(msg);
        buf.newLine();
        buf.close();
        // callbackContext.sendPluginResult(new PluginResult(PluginResult.Status.OK, "writed to file"));
        Log.d(TAG, "..callBackPlugin");
        activity.sendJavascript("TSVB.system.Helper.callBackPlugin('ok')");

    } catch (IOException e) {
        Log.d(TAG, e.getMessage());
    }

}

From source file:usemp.certh.visualizations.Visualizations.java

public void copyVisualizationFiles(String targetDir) {
    URL url = this.getClass().getClassLoader().getResource("");
    File resourcesFile = null;//from  w w w .  j  a  v  a 2 s.c  om
    String resourcesDir = null;
    try {
        resourcesFile = new File(url.toURI());
    } catch (URISyntaxException e) {
        resourcesFile = new File(url.getPath());
    } finally {
        resourcesDir = resourcesFile.getAbsolutePath();
        if ((!resourcesDir.endsWith("/")) && (!resourcesDir.endsWith("\\")))
            resourcesDir = resourcesDir + "/";
    }

    try {
        FileUtils.copyDirectoryToDirectory(new File(resourcesDir + "visualizations/"), new File(targetDir));
    } catch (IOException ex) {
        Logger.getLogger(Main.class.getName()).log(Level.SEVERE, null, ex);
    }

    /*
    //First prepare the files for the 'advanced' scoring visualizations
    String fileOut=targetDir+"visualizations/js/advanced/plot.js";
    String fileHead=targetDir+"visualizations/js/advanced/plot_head";
    String fileJson=targetDir+"myScores.json";
    String fileTail=targetDir+"visualizations/js/advanced/plot_tail";
            
    try{
    BufferedWriter bw = new BufferedWriter(
        new OutputStreamWriter(new FileOutputStream(fileOut), "UTF8"));
            
    BufferedReader br = new BufferedReader(
        new InputStreamReader(new FileInputStream(fileHead), "UTF8"));
    String line=null;
    while((line=br.readLine())!=null){
        bw.append(line);
        bw.newLine();
    }
    br.close();
            
    br = new BufferedReader(
        new InputStreamReader(new FileInputStream(fileJson), "UTF8"));
    line=br.readLine();
    br.close();
    String escaped=StringEscapeUtils.escapeJavaScript(line);
    //bw.append("'{\"data\":");
    //            bw.append(escaped);
    bw.append(line);
    //bw.append("}';");
    bw.newLine();
    br.close();
            
    br = new BufferedReader(
        new InputStreamReader(new FileInputStream(fileTail), "UTF8"));
    line=null;
    while((line=br.readLine())!=null){
        bw.append(line);
        bw.newLine();
    }
    br.close();
    bw.close();
            
    }
    catch(Exception e){
    e.printStackTrace();
    }
    */
    //Then prepare the files for the 'simple' scoring visualization
    String fileOut = targetDir + "visualizations/js/advanced/plot.js";
    String fileHead = targetDir + "visualizations/js/advanced/plot_head";
    String fileJson = targetDir + "myScores.json";
    String fileTail = targetDir + "visualizations/js/advanced/plot_tail";

    try {
        BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(fileOut), "UTF8"));

        BufferedReader br = new BufferedReader(new InputStreamReader(new FileInputStream(fileHead), "UTF8"));
        String line = null;
        while ((line = br.readLine()) != null) {
            bw.append(line);
            bw.newLine();
        }
        br.close();

        br = new BufferedReader(new InputStreamReader(new FileInputStream(fileJson), "UTF8"));
        line = br.readLine();
        br.close();
        String escaped = StringEscapeUtils.escapeJavaScript(line);
        //bw.append("'{\"data\":");
        //            bw.append(escaped);
        bw.append(line);
        //bw.append("}';");
        bw.newLine();
        br.close();

        br = new BufferedReader(new InputStreamReader(new FileInputStream(fileTail), "UTF8"));
        line = null;
        while ((line = br.readLine()) != null) {
            bw.append(line);
            bw.newLine();
        }
        br.close();
        bw.close();

    } catch (Exception e) {
        e.printStackTrace();
    }

    //Then prepare the files for the 'plain control' visualization
    fileOut = targetDir + "visualizations/js/wall_plain/main.js";
    fileHead = targetDir + "visualizations/js/wall_plain/main_head";
    fileJson = targetDir + "myControlSuggestionSetPlain.json";
    fileTail = targetDir + "visualizations/js/wall_plain/main_tail";

    try {
        BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(fileOut), "UTF8"));

        BufferedReader br = new BufferedReader(new InputStreamReader(new FileInputStream(fileHead), "UTF8"));
        String line = null;
        while ((line = br.readLine()) != null) {
            bw.append(line);
            bw.newLine();
        }
        br.close();

        br = new BufferedReader(new InputStreamReader(new FileInputStream(fileJson), "UTF8"));
        line = br.readLine();
        br.close();
        String escaped = StringEscapeUtils.escapeJavaScript(line);
        //bw.append("'{\"data\":");
        //            bw.append(escaped);
        bw.append(line);
        //bw.append("}';");
        bw.newLine();
        br.close();

        br = new BufferedReader(new InputStreamReader(new FileInputStream(fileTail), "UTF8"));
        line = null;
        while ((line = br.readLine()) != null) {
            bw.append(line);
            bw.newLine();
        }
        br.close();
        bw.close();

    } catch (Exception e) {
        e.printStackTrace();
    }

    //Then prepare the files for the 'extended control' visualization
    fileOut = targetDir + "visualizations/js/wall_extended/main.js";
    fileHead = targetDir + "visualizations/js/wall_extended/main_head";
    fileJson = targetDir + "myControlSuggestionSetExtended.json";
    fileTail = targetDir + "visualizations/js/wall_extended/main_tail";

    try {
        BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(fileOut), "UTF8"));

        BufferedReader br = new BufferedReader(new InputStreamReader(new FileInputStream(fileHead), "UTF8"));
        String line = null;
        while ((line = br.readLine()) != null) {
            bw.append(line);
            bw.newLine();
        }
        br.close();

        br = new BufferedReader(new InputStreamReader(new FileInputStream(fileJson), "UTF8"));
        line = br.readLine();
        br.close();
        String escaped = StringEscapeUtils.escapeJavaScript(line);
        //bw.append("'{\"data\":");
        //            bw.append(escaped);
        bw.append(line);
        //            bw.append("}';");
        bw.newLine();
        br.close();

        br = new BufferedReader(new InputStreamReader(new FileInputStream(fileTail), "UTF8"));
        line = null;
        while ((line = br.readLine()) != null) {
            bw.append(line);
            bw.newLine();
        }
        br.close();
        bw.close();

    } catch (Exception e) {
        e.printStackTrace();
    }

}

From source file:de.dfki.km.perspecting.obie.experiments.PhraseExperiment.java

@Test
public void analysePhraseLength() throws Exception {
    final BufferedWriter bw = new BufferedWriter(
            new FileWriter($SCOOBIE_HOME + "results/token_length_histogram.csv"));
    Connection conn = pool.getConnection();
    ResultSet rs = conn.createStatement().executeQuery(
            "SELECT length(literal), count(*)  FROM index_literals GROUP BY length(literal) ORDER BY length(literal)");
    while (rs.next()) {
        bw.append(rs.getInt(1) + "\t" + rs.getInt(2));
        bw.newLine();//from  www. ja  v a2  s.  c  o m
    }
    bw.close();
}

From source file:org.forgerock.openidm.audit.impl.CSVAuditLogger.java

private void writeHeaders(Collection<String> fieldOrder, BufferedWriter fileWriter) throws IOException {
    Iterator<String> iter = fieldOrder.iterator();
    while (iter.hasNext()) {
        String key = iter.next();
        fileWriter.append("\"");
        String escapedStr = key.replaceAll("\"", "\"\"");
        fileWriter.append(escapedStr);/*  w ww  . j  av  a 2 s .  co m*/
        fileWriter.append("\"");
        if (iter.hasNext()) {
            fileWriter.append(",");
        }
    }
    fileWriter.append(recordDelim);
}

From source file:diffhunter.Indexer.java

public void Make_Index(Database hashdb, String file_name, String read_gene_location)
        throws FileNotFoundException, IOException {
    Set_Parameters();/*  ww w  .  j  av  a  2 s  . co  m*/
    //System.out.print("Sasa");
    ConcurrentHashMap<String, Map<Integer, Integer>> dic_gene_loc_count = new ConcurrentHashMap<>();
    ArrayList<String> lines_from_bed_file = new ArrayList<>();
    BufferedReader br = new BufferedReader(new FileReader(file_name));

    String line = br.readLine();
    List<String> toks = Arrays.asList(line.split("\t"));
    lines_from_bed_file.add(line);
    String last_Seen_chromosome = toks.get(0).replace("chr", "");
    line = br.readLine();
    lines_from_bed_file.add(line);
    toks = Arrays.asList(line.split("\t"));
    String new_chromosome = toks.get(0).replace("chr", "");

    while (((line = br.readLine()) != null) || lines_from_bed_file.size() > 0) {
        if (line != null) {
            toks = Arrays.asList(line.split("\t"));
            new_chromosome = toks.get(0).replace("chr", "");
        }
        // process the line.
        if (line == null || !new_chromosome.equals(last_Seen_chromosome)) {
            System.out.println("Processing chromosome" + "\t" + last_Seen_chromosome);
            last_Seen_chromosome = new_chromosome;
            lines_from_bed_file.parallelStream().forEach(content -> {

                List<String> inner_toks = Arrays.asList(content.split("\t"));
                //WARNINNG WARNING WARNING WARNINNG WARNING WARNING WARNINNG WARNING WARNING WARNINNG WARNING WARNING WARNINNG WARNING WARNING WARNINNG WARNING WARNING WARNINNG WARNING WARNING WARNINNG WARNING WARNING 
                //STRAND column count should be changed. 
                String strand = inner_toks.get(5);
                String chromosome_ = inner_toks.get(0).replace("chr", "");
                if (!dic_Loc_gene.get(strand).containsKey(chromosome_)) {
                    return;
                }
                Integer start_loc = Integer.parseInt(inner_toks.get(1));
                Integer end_loc = Integer.parseInt(inner_toks.get(2));
                List<Interval<String>> res__ = dic_Loc_gene.get(strand).get(chromosome_).getIntervals(start_loc,
                        end_loc);
                //IntervalTree<String> pot_gene_name=new IntervalTree<>(res__);
                //                        for (int z = 0; z < pot_gene_name.Intervals.Count; z++)
                //{
                for (int z = 0; z < res__.size(); z++) {

                    dic_gene_loc_count.putIfAbsent(res__.get(z).getData(), new HashMap<>());
                    String gene_symbol = res__.get(z).getData();
                    Integer temp_gene_start_loc = dic_genes.get(gene_symbol).start_loc;
                    Integer temp_gene_end_loc = dic_genes.get(gene_symbol).end_loc;
                    if (start_loc < temp_gene_start_loc) {
                        start_loc = temp_gene_start_loc;
                    }
                    if (end_loc > temp_gene_end_loc) {
                        end_loc = temp_gene_end_loc;
                    }
                    synchronized (dic_synchrinzer_genes.get(gene_symbol)) {
                        for (int k = start_loc; k <= end_loc; k++) {
                            Integer value_inside = 0;
                            value_inside = dic_gene_loc_count.get(gene_symbol).get(k);
                            dic_gene_loc_count.get(gene_symbol).put(k,
                                    value_inside == null ? 1 : (value_inside + 1));
                        }
                    }
                }
            });
            /*                    List<string> keys_ = dic_gene_loc_count.Keys.ToList();
             List<string> alt_keys = new List<string>();// dic_gene_loc_count.Keys.ToList();
             for (int i = 0; i < keys_.Count; i++)
             {
             Dictionary<int, int> dicccc_ = new Dictionary<int, int>();
             dic_gene_loc_count[keys_[i]] = new Dictionary<int, int>(dic_gene_loc_count[keys_[i]].Where(x => x.Value >= 2).ToDictionary(x => x.Key, x => x.Value));
             if (dic_gene_loc_count[keys_[i]].Count == 0)
             {
                    
             dic_gene_loc_count.TryRemove(keys_[i], out dicccc_);
             continue;
             }
             hashdb.Put(Get_BDB(keys_[i]), Get_BDB_Dictionary(dic_gene_loc_count[keys_[i]]));
             alt_keys.Add(keys_[i]);
             dic_gene_loc_count.TryRemove(keys_[i], out dicccc_);
             }*/
            ArrayList<String> keys_ = new ArrayList<>(dic_gene_loc_count.keySet());
            ArrayList<String> alt_keys = new ArrayList<>();
            for (int i = 0; i < keys_.size(); i++) {

                //LinkedHashMap<Integer, Integer> tmep_map = new LinkedHashMap<>(dic_gene_loc_count.get(keys_.get(i)));
                LinkedHashMap<Integer, Integer> tmep_map = new LinkedHashMap<>();
                /*tmep_map = */
                dic_gene_loc_count.get(keys_.get(i)).entrySet().stream().filter(p -> p.getValue() >= 2)
                        .sorted(Comparator.comparing(E -> E.getKey()))
                        .forEach((entry) -> tmep_map.put(entry.getKey(), entry.getValue()));//.collect(Collectors.toMap(p -> p.getKey(), p -> p.getValue()));
                if (tmep_map.isEmpty()) {
                    dic_gene_loc_count.remove(keys_.get(i));
                    continue;
                }

                //Map<Integer, Integer> tmep_map1 = new LinkedHashMap<>();
                //tmep_map1=sortByKey(tmep_map);
                //tmep_map.entrySet().stream().sorted(Comparator.comparing(E -> E.getKey())).forEach((entry) -> tmep_map1.put(entry.getKey(), entry.getValue()));
                //BerkeleyDB_Box box=new BerkeleyDB_Box();
                hashdb.put(null, BerkeleyDB_Box.Get_BDB(keys_.get(i)),
                        BerkeleyDB_Box.Get_BDB_Dictionary(tmep_map));
                alt_keys.add(keys_.get(i));
                dic_gene_loc_count.remove(keys_.get(i));
                //dic_gene_loc_count.put(keys_.get(i),tmep_map);
            }

            hashdb.sync();
            int a = 1111;
            /*                    hashdb.Sync();
             File.AppendAllLines("InputDB\\" + Path.GetFileNameWithoutExtension(file_name) + "_genes.txt", alt_keys);
             //total_lines_processed_till_now += lines_from_bed_file.Count;
             //worker.ReportProgress(total_lines_processed_till_now / count_);
             lines_from_bed_file.Clear();
             if (!reader.EndOfStream)
             {
             lines_from_bed_file.Add(_line_);
             }
             last_Seen_chromosome = new_choromosome;*/
            lines_from_bed_file.clear();
            if (line != null) {
                lines_from_bed_file.add(line);
            }
            Path p = Paths.get(file_name);
            file_name = p.getFileName().toString();

            BufferedWriter output = new BufferedWriter(new FileWriter((Paths
                    .get(read_gene_location, FilenameUtils.removeExtension(file_name) + ".txt").toString()),
                    true));
            for (String alt_key : alt_keys) {
                output.append(alt_key);
                output.newLine();
            }
            output.close();
            /*if (((line = br.readLine()) != null))
            {
            lines_from_bed_file.add(line);
            toks=Arrays.asList(line.split("\t"));
            new_chromosome=toks.get(0).replace("chr", "");
            }*/
            //last_Seen_chromosome=new_chromosome;
        } else if (new_chromosome.equals(last_Seen_chromosome)) {
            lines_from_bed_file.add(line);
        }

    }
    br.close();
    hashdb.sync();
    hashdb.close();

}

From source file:edu.utah.bmi.biosio.rdf.DbToRdfExporter.java

/**
 * Export ontology concepts from the database to a single Turtle/RDF file
 * @param filePath Path to the output file
 * @throws Exception /*from w  w  w . j  av a2  s. co  m*/
 */
public void exportToFile(String filePath) throws Exception {
    BufferedWriter bw = null;
    File file = new File(filePath);
    try {
        bw = new BufferedWriter(new FileWriter(file));

        DBQueryUtils queryService = new DBQueryUtils(this.sessionFactory);

        //ontology prefixes
        bw.append("@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .\n");
        bw.append("@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .\n");
        bw.append("@prefix owl: <http://www.w3.org/2002/07/owl#> .\n");
        bw.append("@prefix dc: <http://purl.org/dc/elements/1.1/> .\n");
        bw.append("@prefix dct: <http://purl.org/dc/terms/> .\n");
        bw.append("@prefix " + rdfOntoPrefix + ": <" + rdfOntoUri + "#> .\n");
        List<ExternalOntology> ontologies = queryService.getExternalOntologies();
        if (ontologies != null) {
            for (ExternalOntology ontology : ontologies) {
                bw.append("@prefix " + ontology.getId().toLowerCase() + ": <" + ontology.getPrefix() + "> .\n");
            }
        }
        bw.append("@base <" + rdfOntoUri + "> .\n");

        //ontology information
        bw.append("\n<" + rdfOntoUri + ">\n");
        bw.append("  rdf:type owl:Ontology ;\n");
        /*//ontology imports
          if (ontologies!=null){
             for (ExternalOntology ontology : ontologies){
         bw.append("   owl:imports <"+ontology.getDocumentURL()+"> ;\n");
             }
          }*/
        bw.append("  dc:title \"" + rdfOntoDescription + "\" ;\n");
        SimpleDateFormat formatter = new SimpleDateFormat("YYYY-MM-dd");
        bw.append("  dc:date \"" + formatter.format(new Date()) + "\" ;\n");
        bw.append("  dc:creator \"" + rdfOntoAuthor + "\" .\n");

        System.out.println("Retrieving concepts from database...");
        List<Concept> concepts = queryService.getConcepts();
        System.out.println(" -> " + concepts.size() + " concepts and relationships found.\nExporting...");
        int c = 0;
        int r = 0;

        //add citation as possible annotation property
        String citationPropertyStr = "\n" + rdfOntoPrefix + ":citation rdf:type owl:AnnotationProperty ;\n";
        citationPropertyStr += "  rdfs:label \"citation\"@en ;\n";
        citationPropertyStr += "  rdfs:comment \"Reference to scientific literature or online resource\"@en ;\n";
        citationPropertyStr += "  rdfs:subClassOf <http://purl.obolibrary.org/obo/IAO_0000115> .\n";

        bw.append(citationPropertyStr);

        //separate concepts (classes) and relationship definitions (properties)
        List<Concept> conceptDefinitions = new ArrayList<Concept>();
        for (Concept concept : concepts) {
            if (concept.getIsRelationship()) {
                //relationshipDefinitions.put(getNormalizedConceptTerm(concept.getTerm()), concept);
                String relTerm = getNormalizedConceptTerm(concept.getTerm());
                if (!relTerm.equals("is_a")) {
                    String relationshipStr = "\n" + rdfOntoPrefix + ":" + concept.getCui()
                            + " rdf:type owl:ObjectProperty ;\n";
                    //relationship terms
                    List<Synonym> synonyms = queryService.getConceptTerms(concept.getCui());
                    for (Synonym synonym : synonyms) {
                        relationshipStr += "  rdfs:label \"" + synonym.getTerm() + "\"@"
                                + synonym.getLanguage().toLowerCase() + " ;\n";
                    }
                    //relationship definitions
                    Set<Description> descriptions = concept.getDescriptions();
                    for (Description desc : descriptions) {
                        relationshipStr += "  rdfs:comment \"" + desc.getDescription() + "\"@"
                                + desc.getLanguage().toLowerCase() + " ;\n";
                    }
                    bw.append(relationshipStr.substring(0, relationshipStr.length() - 2) + ".\n");
                    r++;
                }
            } else {
                conceptDefinitions.add(concept);
                c++;
            }
        }
        //get concepts (classes)
        for (Concept concept : conceptDefinitions) {

            StringBuilder conceptStringBuilder = new StringBuilder();

            //concept
            conceptStringBuilder
                    .append("\n" + rdfOntoPrefix + ":" + concept.getCui() + " rdf:type owl:Class ;\n");
            //conceptStringBuilder.append("\n"+rdfOntoPrefix+":"+getNormalizedConceptTerm(concept.getTerm()) + " rdf:type rdfs:Class;\n");

            //concept terms
            List<Synonym> synonyms = queryService.getConceptTerms(concept.getCui());
            for (Synonym synonym : synonyms) {
                conceptStringBuilder.append("  rdfs:label \"" + synonym.getTerm() + "\"@"
                        + synonym.getLanguage().toLowerCase() + " ;\n");
            }
            //definitions
            Set<Description> descriptions = concept.getDescriptions();
            for (Description desc : descriptions) {
                conceptStringBuilder.append("  rdfs:comment \"" + desc.getDescription() + "\"@"
                        + desc.getLanguage().toLowerCase() + " ;\n");
            }

            //citations
            List<Citation> citations = queryService.getCitationsByCUI(concept.getCui());
            if (citations != null) {
                for (Citation cit : citations) {
                    conceptStringBuilder
                            .append("  " + rdfOntoPrefix + ":citation \"" + cit.getText() + "\" ;\n");
                }
            }

            //relationships
            List<Relationship> relationships = queryService.getConceptRelationships(concept.getCui());
            for (Relationship rel : relationships) {
                Concept conceptTo = rel.getConceptTo();
                Concept conceptRel = rel.getRelationshipDefinition();
                //String relTerm = conceptRel.getCui();
                String concept2 = conceptTo.getCui();
                String relTerm = getNormalizedConceptTerm(conceptRel.getTerm());
                //String concept2 = getNormalizedConceptTerm(conceptTo.getTerm());
                if (relTerm.equals("is_a")) {
                    conceptStringBuilder.append("  rdfs:subClassOf " + rdfOntoPrefix + ":" + concept2 + " ;\n");
                } else if (relTerm.equals("has_part")) {
                    conceptStringBuilder.append("  <http://purl.obolibrary.org/obo/BFO_0000051> "
                            + rdfOntoPrefix + ":" + concept2 + " ;\n");
                } else {
                    conceptStringBuilder.append("  " + rdfOntoPrefix + ":" + conceptRel.getCui() + " "
                            + rdfOntoPrefix + ":" + concept2 + ";\n");
                }
            }

            String conceptString = conceptStringBuilder.toString();
            bw.append(conceptString.substring(0, conceptString.length() - 2) + ".\n");

            // add mappings ('subclassOf' relationships with other ontology concepts)
            List<Mapping> ontologyMappings = queryService.getOntologyMappingsByCUI(concept.getCui());
            if (ontologyMappings != null) {
                for (Mapping ontologyMapping : ontologyMappings) {
                    String concept2 = ontologyMapping.getOntologyDefinition().getId().toLowerCase() + ":"
                            + ontologyMapping.getOntologyTerm();
                    bw.append("\n" + rdfOntoPrefix + ":" + concept.getCui() + " rdf:type owl:Class ; \n");
                    bw.append("  rdfs:subClassOf " + concept2 + " .\n");
                }
            }
        }
        System.out.println(" -> " + c + " concepts (RDF classes) and " + r
                + " relationship definitions (RDF properties) exported!");
        bw.close();
    } catch (Exception e) {
        if (bw != null) {
            try {
                bw.close();
            } catch (IOException ioe) {
                ioe.printStackTrace();
            }
        }
        throw e;
    }
}

From source file:com.termmed.statistics.Processor.java

/**
 * Adds the header.//from w  w w .  ja  va2 s. com
 *
 * @param bw the bw
 * @param detail the detail
 * @param listDescriptors 
 * @throws IOException Signals that an I/O exception has occurred.
 */
private void addHeader(BufferedWriter bw, OutputDetailFile detail,
        List<HierarchicalConfiguration> listDescriptors) throws IOException {
    bw.append(detail.getReportHeader());
    if (listDescriptors != null) {
        for (HierarchicalConfiguration additionalList : listDescriptors) {
            bw.append(",");
            bw.append(additionalList.getString("listTitle"));
        }
    }
    bw.append("\r\n");

}

From source file:socialtrade1.Engine1.java

void tree() {

    url = "https://www.socialtrade.biz/User/MyConnections.aspx/GetUserDetails";
    int n = 10000000;
    for (int i = 0; i < 10000000; i = i + 1000) {
        System.out.println(n);/*from w w w.j a  v  a2 s  .  com*/
        body = "{ 'lblId':'" + n + "'}";
        n++;
        PostThread2();
        try {
            Thread.sleep(5);
        } catch (InterruptedException ex) {
            Logger.getLogger(Engine1.class.getName()).log(Level.SEVERE, null, ex);
        }
        if (i % 5000 == 0) {
            FileWriter fw = null;
            try {

                fw = new FileWriter("C:\\Users\\Harry\\Desktop\\test" + "\\st2.csv", true);
                BufferedWriter bw = new BufferedWriter(fw);
                bw.append(Utilities.res);
                Utilities.res = "";
                bw.close();
            } catch (IOException ex) {
                Logger.getLogger(Engine1.class.getName()).log(Level.SEVERE, null, ex);
            } finally {
                try {
                    fw.close();
                } catch (IOException ex) {
                    Logger.getLogger(Engine1.class.getName()).log(Level.SEVERE, null, ex);
                }
            }
        }
    }

    FileWriter fw = null;
    try {

        fw = new FileWriter("C:\\Users\\Harry\\Desktop\\test" + "\\st2.csv", true);
        BufferedWriter bw = new BufferedWriter(fw);
        bw.append(Utilities.res);
        Utilities.res = "";
        bw.close();
    } catch (IOException ex) {
        Logger.getLogger(Engine1.class.getName()).log(Level.SEVERE, null, ex);
    } finally {
        try {
            fw.close();
        } catch (IOException ex) {
            Logger.getLogger(Engine1.class.getName()).log(Level.SEVERE, null, ex);
        }
    }

    state = 700;
}

From source file:tagtime.beeminder.BeeminderGraph.java

public void writeToBeeFile(String id, long timestamp, double hours, String comment) {
    if (id == null) {
        throw new IllegalArgumentException("id cannot be null!");
    }//w  w w  .j  av a  2 s .com

    BufferedWriter fileWriter;

    try {
        fileWriter = new BufferedWriter(new FileWriter(
                Main.getDataDirectory().getPath() + "/" + tagTimeInstance.username + "_" + graphName + ".bee"));
    } catch (IOException e) {
        e.printStackTrace();
        return;
    }

    try {
        fileWriter.append(id + " " + timestamp + " " + hourFormatter.format(hours) + " " + comment);
        fileWriter.flush();
    } catch (IOException e) {
        e.printStackTrace();
    }

    try {
        fileWriter.close();
    } catch (IOException e) {
        e.printStackTrace();
    }
}

From source file:com.itemanalysis.jmetrik.gui.JmetrikPreferencesManager.java

private void createLogProperties(String logHome) {
    //directory should already exist
    //create log4j properties file if it does not exist
    String header = "#DO NOT EDIT - JMETRIK LOG PROPERTIES FILE - DO NOT EDIT";
    String fullPropertiesName = (logHome + "/" + DEFAULT_LOG_PROPERTY_FILE);
    String fullLogFileName = (logHome + "/" + DEFAULT_LOG_NAME);
    String fullScriptLogFileName = (logHome + "/" + DEFAULT_SCRIPT_LOG_NAME);

    File f = new File(fullPropertiesName);
    if (!f.exists()) {
        try {//from w ww  .  j a v a 2 s.c om
            createLogHome(logHome);
            f.createNewFile();
            BufferedWriter bw = new BufferedWriter(new FileWriter(f));
            bw.append(header);
            bw.newLine();
            bw.append("log4j.logger.jmetrik-logger=ALL, adminAppender");
            bw.newLine();
            bw.append("log4j.logger.jmetrik-script-logger=INFO, scriptAppender");
            bw.newLine();
            bw.append("log4j.additivity.jmetrik-logger=false");
            bw.newLine();
            bw.append("log4j.additivity.jmetrik-script-logger=false");
            bw.newLine();

            //Main appender processes all levels
            bw.append("log4j.appender.adminAppender=org.apache.log4j.FileAppender");
            bw.newLine();
            bw.append("log4j.appender.adminAppender.layout=org.apache.log4j.PatternLayout");
            bw.newLine();
            bw.append("log4j.appender.adminAppender.File=" + fullLogFileName);
            bw.newLine();
            bw.append("log4j.appender.adminAppender.Append=false");
            bw.newLine();
            bw.append("log4j.appender.adminAppender.layout.ConversionPattern=[%p] %d{DATE} %n%m%n%n");
            bw.newLine();

            //Script appender processes scripts only
            bw.append("log4j.appender.scriptAppender=org.apache.log4j.FileAppender");
            bw.newLine();
            bw.append("log4j.appender.scriptAppender.layout=org.apache.log4j.PatternLayout");
            bw.newLine();
            bw.append("log4j.appender.scriptAppender.File=" + fullScriptLogFileName);
            bw.newLine();
            bw.append("log4j.appender.scriptAppender.Append=false");
            bw.newLine();
            bw.append("log4j.appender.scriptAppender.layout.ConversionPattern=%m%n%n");
            bw.newLine();

            bw.close();
        } catch (IOException ex) {
            firePropertyChange("error", "", "Error - Log properties file could not be created.");
        }
    }
}