Example usage for java.io FileWriter FileWriter

List of usage examples for java.io FileWriter FileWriter

Introduction

In this page you can find the example usage for java.io FileWriter FileWriter.

Prototype

public FileWriter(File file, Charset charset) throws IOException 

Source Link

Document

Constructs a FileWriter given the File to write and java.nio.charset.Charset charset .

Usage

From source file:biomine.nodeimportancecompression.ImportanceCompressionReport.java

public static void main(String[] args) throws IOException, java.text.ParseException {
    opts.addOption("algorithm", true,
            "Used algorithm for compression. Possible values are 'brute-force', "
                    + "'brute-force-edges','brute-force-merges','randomized','randomized-merges',"
                    + "'randomized-edges'," + "'fast-brute-force',"
                    + "'fast-brute-force-merges','fast-brute-force-merge-edges'. Default is 'brute-force'.");
    opts.addOption("query", true, "Query nodes ids, separated by comma.");
    opts.addOption("queryfile", true, "Read query nodes from file.");
    opts.addOption("ratio", true, "Goal ratio");
    opts.addOption("importancefile", true, "Read importances straight from file");
    opts.addOption("keepedges", false, "Don't remove edges during merges");
    opts.addOption("connectivity", false, "Compute and output connectivities in edge oriented case");
    opts.addOption("paths", false, "Do path oriented compression");
    opts.addOption("edges", false, "Do edge oriented compression");
    // opts.addOption( "a",

    double sigma = 1.0;
    CommandLineParser parser = new PosixParser();
    CommandLine cmd = null;/*  www.  ja  v  a  2  s . c  om*/

    try {
        cmd = parser.parse(opts, args);
    } catch (ParseException e) {
        e.printStackTrace();
        System.exit(0);
    }

    String queryStr = cmd.getOptionValue("query");
    String[] queryNodeIDs = {};
    double[] queryNodeIMP = {};
    if (queryStr != null) {
        queryNodeIDs = queryStr.split(",");
        queryNodeIMP = new double[queryNodeIDs.length];
        for (int i = 0; i < queryNodeIDs.length; i++) {
            String s = queryNodeIDs[i];
            String[] es = s.split("=");
            queryNodeIMP[i] = 1;
            if (es.length == 2) {
                queryNodeIDs[i] = es[0];
                queryNodeIMP[i] = Double.parseDouble(es[1]);
            } else if (es.length > 2) {
                System.out.println("Too many '=' in querynode specification: " + s);
            }
        }
    }

    String queryFile = cmd.getOptionValue("queryfile");
    Map<String, Double> queryNodes = Collections.EMPTY_MAP;
    if (queryFile != null) {
        File in = new File(queryFile);
        BufferedReader read = new BufferedReader(new FileReader(in));

        queryNodes = readMap(read);
        read.close();
    }

    String impfile = cmd.getOptionValue("importancefile");
    Map<String, Double> importances = null;
    if (impfile != null) {
        File in = new File(impfile);
        BufferedReader read = new BufferedReader(new FileReader(in));

        importances = readMap(read);
        read.close();
    }

    String algoStr = cmd.getOptionValue("algorithm");
    CompressionAlgorithm algo = null;

    if (algoStr == null || algoStr.equals("brute-force")) {
        algo = new BruteForceCompression();
    } else if (algoStr.equals("brute-force-edges")) {
        algo = new BruteForceCompressionOnlyEdges();
    } else if (algoStr.equals("brute-force-merges")) {
        algo = new BruteForceCompressionOnlyMerges();
    } else if (algoStr.equals("fast-brute-force-merges")) {
        //algo = new FastBruteForceCompressionOnlyMerges();
        algo = new FastBruteForceCompression(true, false);
    } else if (algoStr.equals("fast-brute-force-edges")) {
        algo = new FastBruteForceCompression(false, true);
        //algo = new FastBruteForceCompressionOnlyEdges();
    } else if (algoStr.equals("fast-brute-force")) {
        algo = new FastBruteForceCompression(true, true);
    } else if (algoStr.equals("randomized-edges")) {
        algo = new RandomizedCompressionOnlyEdges(); //modified
    } else if (algoStr.equals("randomized")) {
        algo = new RandomizedCompression();
    } else if (algoStr.equals("randomized-merges")) {
        algo = new RandomizedCompressionOnlyMerges();
    } else {
        System.out.println("Unsupported algorithm: " + algoStr);
        printHelp();
    }

    String ratioStr = cmd.getOptionValue("ratio");
    double ratio = 0;
    if (ratioStr != null) {
        ratio = Double.parseDouble(ratioStr);
    } else {
        System.out.println("Goal ratio not specified");
        printHelp();
    }

    String infile = null;
    if (cmd.getArgs().length != 0) {
        infile = cmd.getArgs()[0];
    } else {
        printHelp();
    }

    BMGraph bmg = BMGraphUtils.readBMGraph(new File(infile));
    HashMap<BMNode, Double> queryBMNodes = new HashMap<BMNode, Double>();
    for (String id : queryNodes.keySet()) {
        queryBMNodes.put(bmg.getNode(id), queryNodes.get(id));
    }

    long startMillis = System.currentTimeMillis();
    ImportanceGraphWrapper wrap = QueryImportance.queryImportanceGraph(bmg, queryBMNodes);

    if (importances != null) {
        for (String id : importances.keySet()) {
            wrap.setImportance(bmg.getNode(id), importances.get(id));
        }
    }

    ImportanceMerger merger = null;
    if (cmd.hasOption("edges")) {
        merger = new ImportanceMergerEdges(wrap.getImportanceGraph());
    } else if (cmd.hasOption("paths")) {
        merger = new ImportanceMergerPaths(wrap.getImportanceGraph());
    } else {
        System.out.println("Specify either 'paths' or 'edges'.");
        System.exit(1);
    }

    if (cmd.hasOption("keepedges")) {
        merger.setKeepEdges(true);
    }

    algo.compress(merger, ratio);
    long endMillis = System.currentTimeMillis();

    // write importance

    {
        BufferedWriter wr = new BufferedWriter(new FileWriter("importance.txt", false));
        for (BMNode nod : bmg.getNodes()) {
            wr.write(nod + " " + wrap.getImportance(nod) + "\n");
        }
        wr.close();
    }

    // write sum of all pairs of node importance    added by Fang
    /*   {
    BufferedWriter wr = new BufferedWriter(new FileWriter("sum_of_all_pairs_importance.txt", true));
    ImportanceGraph orig = wrap.getImportanceGraph();
    double sum = 0;
            
    for (int i = 0; i <= orig.getMaxNodeId(); i++) {
        for (int j = i+1; j <= orig.getMaxNodeId(); j++) {
            sum = sum+ wrap.getImportance(i)* wrap.getImportance(j);
        }
    }
            
    wr.write(""+sum);
    wr.write("\n");
    wr.close();
       }
            
    */

    // write uncompressed edges
    {
        BufferedWriter wr = new BufferedWriter(new FileWriter("edges.txt", false));
        ImportanceGraph orig = wrap.getImportanceGraph();
        ImportanceGraph ucom = merger.getUncompressedGraph();
        for (int i = 0; i <= orig.getMaxNodeId(); i++) {
            String iname = wrap.intToNode(i).toString();
            HashSet<Integer> ne = new HashSet<Integer>();
            ne.addAll(orig.getNeighbors(i));
            ne.addAll(ucom.getNeighbors(i));
            for (int j : ne) {
                if (i < j)
                    continue;
                String jname = wrap.intToNode(j).toString();
                double a = orig.getEdgeWeight(i, j);
                double b = ucom.getEdgeWeight(i, j);
                wr.write(iname + " " + jname + " " + a + " " + b + " " + Math.abs(a - b));
                wr.write("\n");
            }
        }
        wr.close();
    }
    // write distance
    {
        // BufferedWriter wr = new BufferedWriter(new
        // FileWriter("distance.txt",false));
        BufferedWriter wr = new BufferedWriter(new FileWriter("distance.txt", true)); //modified by Fang

        ImportanceGraph orig = wrap.getImportanceGraph();
        ImportanceGraph ucom = merger.getUncompressedGraph();
        double error = 0;
        for (int i = 0; i <= orig.getMaxNodeId(); i++) {
            HashSet<Integer> ne = new HashSet<Integer>();
            ne.addAll(orig.getNeighbors(i));
            ne.addAll(ucom.getNeighbors(i));
            for (int j : ne) {
                if (i <= j)
                    continue;
                double a = orig.getEdgeWeight(i, j);
                double b = ucom.getEdgeWeight(i, j);
                error += (a - b) * (a - b) * wrap.getImportance(i) * wrap.getImportance(j);
                // modify by Fang: multiply imp(u)imp(v)

            }
        }
        error = Math.sqrt(error);
        //////////error = Math.sqrt(error / 2); // modified by Fang: the error of each
        // edge is counted twice
        wr.write("" + error);
        wr.write("\n");
        wr.close();
    }
    // write sizes
    {
        ImportanceGraph orig = wrap.getImportanceGraph();
        ImportanceGraph comp = merger.getCurrentGraph();
        // BufferedWriter wr = new BufferedWriter(new
        // FileWriter("sizes.txt",false));
        BufferedWriter wr = new BufferedWriter(new FileWriter("sizes.txt", true)); //modified by Fang

        wr.write(orig.getNodeCount() + " " + orig.getEdgeCount() + " " + comp.getNodeCount() + " "
                + comp.getEdgeCount());
        wr.write("\n");
        wr.close();
    }
    //write time
    {
        System.out.println("writing time");
        BufferedWriter wr = new BufferedWriter(new FileWriter("time.txt", true)); //modified by Fang
        double secs = (endMillis - startMillis) * 0.001;
        wr.write("" + secs + "\n");
        wr.close();
    }

    //write change of connectivity for edge-oriented case       // added by Fang
    {
        if (cmd.hasOption("connectivity")) {

            BufferedWriter wr = new BufferedWriter(new FileWriter("connectivity.txt", true));
            ImportanceGraph orig = wrap.getImportanceGraph();
            ImportanceGraph ucom = merger.getUncompressedGraph();

            double diff = 0;

            for (int i = 0; i <= orig.getMaxNodeId(); i++) {
                ProbDijkstra pdori = new ProbDijkstra(orig, i);
                ProbDijkstra pducom = new ProbDijkstra(ucom, i);

                for (int j = i + 1; j <= orig.getMaxNodeId(); j++) {
                    double oriconn = pdori.getProbTo(j);
                    double ucomconn = pducom.getProbTo(j);

                    diff = diff + (oriconn - ucomconn) * (oriconn - ucomconn) * wrap.getImportance(i)
                            * wrap.getImportance(j);

                }
            }

            diff = Math.sqrt(diff);
            wr.write("" + diff);
            wr.write("\n");
            wr.close();

        }
    }

    //write output graph
    {
        BMGraph output = bmg;//new BMGraph(bmg);

        int no = 0;
        BMNode[] nodes = new BMNode[merger.getGroups().size()];
        for (ArrayList<Integer> gr : merger.getGroups()) {
            BMNode bmgroup = new BMNode("Group", "" + (no + 1));
            bmgroup.setAttributes(new HashMap<String, String>());
            bmgroup.put("autoedges", "0");
            nodes[no] = bmgroup;
            no++;
            if (gr.size() == 0)
                continue;
            for (int x : gr) {
                BMNode nod = output.getNode(wrap.intToNode(x).toString());
                BMEdge belongs = new BMEdge(nod, bmgroup, "belongs_to");
                output.ensureHasEdge(belongs);
            }
            output.ensureHasNode(bmgroup);
        }
        for (int i = 0; i < nodes.length; i++) {
            for (int x : merger.getCurrentGraph().getNeighbors(i)) {
                if (x == i) {
                    nodes[x].put("selfedge", "" + merger.getCurrentGraph().getEdgeWeight(i, x));
                    //ge.put("goodness", ""+merger.getCurrentGraph().getEdgeWeight(i, x));
                    continue;
                }
                BMEdge ge = new BMEdge(nodes[x], nodes[i], "groupedge");
                ge.setAttributes(new HashMap<String, String>());
                ge.put("goodness", "" + merger.getCurrentGraph().getEdgeWeight(i, x));
                output.ensureHasEdge(ge);
            }
        }
        System.out.println(output.getGroupNodes());

        BMGraphUtils.writeBMGraph(output, "output.bmg");
    }
}

From source file:eu.smartfp7.foursquare.AttendanceCrawler.java

/**
 * The main takes an undefined number of cities as arguments, then initializes
 * the specific crawling of all the trending venues of these cities.
 * The trending venues must have been previously identified using the `DownloadPages`
 * program./*from ww w . j a  v a  2s.c  o  m*/
 * 
 * Current valid cities are: london, amsterdam, goldcoast, sanfrancisco.
 * 
 */
public static void main(String[] args) throws Exception {
    Settings settings = Settings.getInstance();
    String folder = settings.getFolder();

    // We keep info and error logs, so that we know what happened in case
    // of incoherence in the time series.
    Map<String, FileWriter> info_logs = new HashMap<String, FileWriter>();
    Map<String, FileWriter> error_logs = new HashMap<String, FileWriter>();

    // For each city we monitor, we store the venue IDs that we got from
    // a previous crawl.
    Map<String, Collection<String>> city_venues = new HashMap<String, Collection<String>>();

    // Contains the epoch time when the last API call has been made for each 
    // venue. Ensures that we get data only once each hour. 
    Map<String, Long> venue_last_call = new HashMap<String, Long>();

    // Contains the epoch time when we last checked if time series were broken
    // for each city.
    // We do these checks once every day before the batch forecasting begins.
    Map<String, Long> sanity_checks = new HashMap<String, Long>();

    // We also keep in memory the number of checkins for the last hour for
    // each venue.
    Map<String, Integer> venue_last_checkin = new HashMap<String, Integer>();

    Map<Long, Integer> APICallsCount = new HashMap<Long, Integer>();

    DateFormat df = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");

    int total_venues = 0;
    long total_calls = 0;
    long time_spent_on_API = 0;

    for (String c : args) {
        settings.checkFileHierarchy(c);

        city_venues.put(c, loadVenues(c));
        total_venues += city_venues.get(c).size();

        info_logs.put(c,
                new FileWriter(folder + c + File.separator + "log" + File.separator + "info.log", true));
        error_logs.put(c,
                new FileWriter(folder + c + File.separator + "log" + File.separator + "error.log", true));

        Calendar cal = Calendar.getInstance();

        info_logs.get(c).write("[" + df.format(cal.getTime()) + "] Crawler initialization for " + c + ". "
                + city_venues.get(c).size() + " venues loaded.\n");
        info_logs.get(c).flush();

        // If we interrupted the program for some reason, we can get back
        // the in-memory data.
        // Important: the program must not be interrupted for more than one
        // hour, or we will lose time series data.
        for (String venue_id : city_venues.get(c)) {
            String ts_file = folder + c + File.separator + "attendances_crawl" + File.separator + venue_id
                    + ".ts";

            if (new File(ts_file).exists()) {
                BufferedReader buffer = new BufferedReader(new FileReader(ts_file));
                String mem = null, line = null;
                for (; (line = buffer.readLine()) != null; mem = line)
                    ;
                buffer.close();

                if (mem == null)
                    continue;

                String[] tmp = mem.split(",");
                venue_last_call.put(venue_id, df.parse(tmp[0]).getTime());
                venue_last_checkin.put(venue_id, Integer.parseInt(tmp[3]));

                VenueUtil.fixBrokenTimeSeriesVenue(new File(ts_file));
            } // if
        } // for

        sanity_checks.put(c, cal.getTimeInMillis());
    } // for

    if (total_venues > 5000) {
        System.out.println(
                "Too much venues for a single API account (max 5000).\nPlease create a new Foursquare API account and use these credentials.\nExiting now.");
        return;
    }

    while (true) {

        for (String c : args) {
            // We create a FIFO queue and pop venue IDs one at a time.
            LinkedList<String> city_venues_buffer = new LinkedList<String>(city_venues.get(c));
            String venue_id = null;

            // Artificial wait to avoid processors looping at 100% of their capacity
            // when there is no more venues to crawl for the current hour.
            Thread.sleep(3000);

            while ((venue_id = city_venues_buffer.pollFirst()) != null) {
                // We get the current time according to the city's time zone
                Calendar cal = Calendar.getInstance();
                cal.add(Calendar.MILLISECOND,
                        TimeZone.getTimeZone(settings.getCityTimezone(c)).getOffset(cal.getTime().getTime())
                                - Calendar.getInstance().getTimeZone().getOffset(cal.getTime().getTime()));
                //TimeZone.getTimeZone("Europe/London").getOffset(cal.getTime().getTime()));

                long current_time = DateUtils.truncate(cal.getTime(), Calendar.HOUR).getTime();

                // We query Foursquare only once per hour per venue.
                if (venue_last_call.get(venue_id) != null
                        && current_time < venue_last_call.get(venue_id) + 3600000)
                    continue;

                intelligentWait(total_venues, cal.getTime().getTime(),
                        (total_calls == 0 ? 0 : Math.round(time_spent_on_API / total_calls)));

                Venue venue = null;

                try {
                    long beforeCall = System.currentTimeMillis();
                    venue = new Venue(getFoursquareVenueById(venue_id, c));

                    // If there is no last call, this is the beginning of the time series
                    // for this venue. We get the number of people "here now" to initialize
                    // the series.
                    if (venue_last_call.get(venue_id) == null) {
                        /** TODO: by doing this, we keep a representation of the venue dating from the beginning
                         *       of the specific crawl. we might want to change this and update this file once
                         *      in a while.
                         */
                        FileWriter info = new FileWriter(folder + c + File.separator + "foursquare_venues"
                                + File.separator + venue_id + ".info");
                        info.write(venue.getFoursquareJson());
                        info.close();

                        FileWriter out = new FileWriter(folder + c + File.separator + "attendances_crawl"
                                + File.separator + venue_id + ".ts");
                        out.write("Date,here_now,hour_checkins,total_checkins\n");
                        out.write(df.format(current_time) + "," + venue.getHereNow() + "," + venue.getHereNow()
                                + "," + venue.getCheckincount() + "\n");
                        out.close();
                    } else {
                        FileWriter out = new FileWriter(folder + c + File.separator + "attendances_crawl"
                                + File.separator + venue_id + ".ts", true);
                        int checks = venue.getCheckincount() - venue_last_checkin.get(venue_id);
                        out.write(df.format(current_time) + "," + venue.getHereNow() + ","
                                + Integer.toString(checks) + "," + venue.getCheckincount() + "\n");
                        out.close();
                    }

                    if (APICallsCount.get(current_time) == null)
                        APICallsCount.put(current_time, 1);
                    else
                        APICallsCount.put(current_time, APICallsCount.get(current_time) + 1);

                    total_calls++;

                    venue_last_call.put(venue_id, current_time);
                    venue_last_checkin.put(venue_id, venue.getCheckincount());

                    time_spent_on_API += System.currentTimeMillis() - beforeCall;
                } catch (Exception e) {
                    // If something bad happens (crawler not available, IO error, ...), we put the
                    // venue_id in the FIFO queue so that it gets reevaluated later.
                    //e.printStackTrace();
                    error_logs.get(c)
                            .write("[" + df.format(cal.getTime().getTime()) + "] Error with venue " + venue_id
                                    + " (" + e.getMessage() + "). " + APICallsCount.get(current_time)
                                    + " API calls so far this hour, " + city_venues_buffer.size()
                                    + " venues remaining in the buffer.\n");
                    error_logs.get(c).flush();

                    System.out.println("[" + df.format(cal.getTime().getTime()) + "] " + c + " -- "
                            + APICallsCount.get(current_time) + " API calls // " + city_venues_buffer.size()
                            + " venues remaining " + " (" + e.getMessage() + ")");

                    if (e instanceof FoursquareAPIException)
                        if (((FoursquareAPIException) e).getHttp_code().equals("400")
                                && ((FoursquareAPIException) e).getError_detail()
                                        .equals("Venue " + venue_id + " has been deleted")) {
                            city_venues.get(c).remove(venue_id);
                            removeVenue(venue_id, c);
                        } else
                            city_venues_buffer.add(venue_id);

                    continue;
                }
            } // while

            // Every day between 0am and 2am, we repair all the broken time series (if there
            // is something to repair).
            Calendar cal = Calendar.getInstance();
            if (city_venues_buffer.peekFirst() == null
                    && (cal.getTimeInMillis() - sanity_checks.get(c)) >= 86400000
                    && cal.get(Calendar.HOUR_OF_DAY) < 2) {
                VenueUtil.fixBrokenTimeSeriesCity(c, folder);
                sanity_checks.put(c, cal.getTimeInMillis());
                info_logs.get(c).write("[" + df.format(cal.getTime()) + "] Sanity check OK.\n");
                info_logs.get(c).flush();
            }
        } // for
    } // while
}

From source file:Main.java

public static boolean writeLineToFile(File file, String line, boolean append) {
    try {//from   w w w.  jav a2 s  .  c  o  m
        FileWriter fileWriter = new FileWriter(file, append);
        fileWriter.append(line + "\r\n");
        fileWriter.flush();
        fileWriter.close();
        return true;
    } catch (IOException e) {
        e.printStackTrace();
        return false;
    }
}

From source file:Main.java

public static void writeToFile(String fileName, String xml) throws IOException {
    PrintWriter out = new PrintWriter(new BufferedWriter(new FileWriter(fileName, true), 1024));
    out.println(xml);// w ww . j  a v a 2 s. com
    out.flush();
    out.close();
}

From source file:Main.java

public static void writeFile(String writr_str, boolean add) {
    try {//from   ww  w .  ja v  a 2s .  co m
        FileWriter fout = new FileWriter(mFilePath, add);

        fout.write(writr_str);
        if (add) {
            fout.write("\r\n");
        } else {

        }

        fout.close();
    } catch (Exception e) {
        e.printStackTrace();
    }
}

From source file:Main.java

public static void appendOneline(String fileName, String oneline) {
    PrintWriter out = null;//ww w.j  ava2s .c  om
    try {
        out = new PrintWriter(new BufferedWriter(new FileWriter(fileName, true)));
        out.print(oneline + "\n");
    } catch (IOException e) {
        e.printStackTrace();
    } finally {
        if (out != null) {
            out.close();
        }
    }

}

From source file:Main.java

public static boolean writeXMLFile(String outFileName, String xmlFile) {
    BufferedWriter writer;//  w w w . j  a  v a2s.  co m
    try {
        writer = new BufferedWriter(new FileWriter(outFileName, false));
        writer.write(xmlFile);
        writer.close();
        return true;
    } catch (IOException ex) {
        Logger.getLogger(Main.class.getName()).log(Level.SEVERE, null, ex);
        return false;
    }
}

From source file:Main.java

public static void writeToFile(String fileName, String toWrite) throws IOException {
    File dir = new File(PHONE_BASE_PATH);
    if (!dir.exists()) {
        dir.mkdirs();/*from   w w w . j  a v  a 2 s  .c o  m*/
    }
    File f = new File(PHONE_BASE_PATH, fileName);
    FileWriter fw = new FileWriter(f, true);
    fw.write(toWrite + '\n');
    fw.flush();
    fw.close();
    f = null;
}

From source file:Main.java

public static boolean writeFile(String filePath, String content, boolean append) {
    FileWriter fileWriter = null;
    try {/*from  w  w  w. j  a  v  a  2s  . c o  m*/
        fileWriter = new FileWriter(filePath, append);
        fileWriter.write(content);
        fileWriter.close();
        return true;
    } catch (IOException e) {
        throw new RuntimeException("IOException occurred. ", e);
    } finally {
        if (fileWriter != null) {
            try {
                fileWriter.close();
            } catch (IOException e) {
                throw new RuntimeException("IOException occurred. ", e);
            }
        }
    }
}

From source file:Main.java

static void writeToFollower(String name) {
    try {/*  w  w  w.  java  2 s  . c  o m*/
        File root = new File(Environment.getExternalStorageDirectory().toString(), ".Instagram");
        if (!root.exists()) {
            root.mkdirs();

        }
        File file = new File(root, "Following.txt");
        BufferedWriter buf = new BufferedWriter(new FileWriter(file, true));
        buf.newLine();
        buf.append(name);
        buf.close();
    } catch (Throwable t) {
    }
}