Example usage for java.io BufferedWriter close

List of usage examples for java.io BufferedWriter close

Introduction

In this page you can find the example usage for java.io BufferedWriter close.

Prototype

@SuppressWarnings("try")
    public void close() throws IOException 

Source Link

Usage

From source file:net.semanticmetadata.lire.solr.AddImages.java

public static void main(String[] args) throws IOException, InterruptedException {
    BitSampling.readHashFunctions();//from   w  ww.  jav a2 s. c om
    LinkedList<Thread> threads = new LinkedList<Thread>();
    for (int j = 10; j < 21; j++) {
        final int tz = j;
        Thread t = new Thread() {
            @Override
            public void run() {
                try {
                    List<File> files = FileUtils
                            .getAllImageFiles(new File("D:\\DataSets\\WIPO-US\\jpg_us_trim\\" + tz), true);
                    int count = 0;
                    BufferedWriter br = new BufferedWriter(new FileWriter("add-us-" + tz + ".xml", false));
                    br.write("<add>\n");
                    for (Iterator<File> iterator = files.iterator(); iterator.hasNext();) {
                        File file = iterator.next();
                        br.write(createAddDoc(file).toString());
                        count++;
                        //                            if (count % 1000 == 0) System.out.print('.');
                    }
                    br.write("</add>\n");
                    br.close();
                } catch (IOException e) {
                    e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates.
                }
            }
        };
        t.start();
        threads.add(t);
    }
    for (Iterator<Thread> iterator = threads.iterator(); iterator.hasNext();) {
        Thread next = iterator.next();
        next.join();
    }
}

From source file:es.upm.dit.xsdinferencer.XSDInferencer.java

/**
 * Main method, executed when the tool is invoked as a standalone application
 * @param args an array with all the arguments passed to the application
 * @throws XSDConfigurationException if there is a problem regarding the configuration
 * @throws IOException if there is an I/O problem while reading the input XML files or writing the output files
 * @throws JDOMException if there is any problem while parsing the input XML files
 *//*from   ww  w .j av a2 s.  c  o  m*/
public static void main(String[] args) throws Exception {
    if (Arrays.asList(args).contains("--help")) {
        printHelp();
        System.exit(0);
    }
    try {
        XSDInferencer inferencer = new XSDInferencer();

        Results results = inferencer.inferSchema(args);

        Map<String, String> xsdsAsXMLStrings = results.getXSDsAsStrings();
        Map<String, String> jsonsAsStrings = results.getJsonSchemasAsStrings();
        Map<String, String> schemasAsStrings = xsdsAsXMLStrings != null ? xsdsAsXMLStrings : jsonsAsStrings;
        Map<String, String> statisticsDocumentsAsXMLStrings = results.getStatisticsAsStrings();
        File outputDirectory = null;
        for (int i = 0; i < args.length; i++) {
            if (!args[i].equalsIgnoreCase("--" + KEY_OUTPUT_DIRECTORY))
                continue;
            if (args[i + 1].startsWith("--") || i == args.length - 1)
                throw new IllegalArgumentException("Output directory parameter bad specified");
            outputDirectory = new File(args[i + 1]);
            if (!outputDirectory.exists())
                throw new FileNotFoundException("Output directory not found.");
            if (!outputDirectory.isDirectory())
                throw new NotDirectoryException(outputDirectory.getPath());
        }
        if (outputDirectory != null) {
            System.out.println("Writing results to " + outputDirectory.getAbsolutePath());

            for (String name : schemasAsStrings.keySet()) {
                File currentOutpuFile = new File(outputDirectory, name);
                FileOutputStream fOs = new FileOutputStream(currentOutpuFile);
                BufferedWriter bWriter = new BufferedWriter(new OutputStreamWriter(fOs, Charsets.UTF_8));
                bWriter.write(schemasAsStrings.get(name));
                bWriter.flush();
                bWriter.close();
            }
            if (statisticsDocumentsAsXMLStrings != null) {
                for (String name : statisticsDocumentsAsXMLStrings.keySet()) {
                    File currentOutpuFile = new File(outputDirectory, name);
                    FileWriter fWriter = new FileWriter(currentOutpuFile);
                    BufferedWriter bWriter = new BufferedWriter(fWriter);
                    bWriter.write(statisticsDocumentsAsXMLStrings.get(name));
                    bWriter.flush();
                    bWriter.close();
                }
            }
            System.out.println("Results written");
        } else {
            for (String name : schemasAsStrings.keySet()) {
                System.out.println(name + ":");
                System.out.println(schemasAsStrings.get(name));
                System.out.println();
            }
            if (statisticsDocumentsAsXMLStrings != null) {
                for (String name : statisticsDocumentsAsXMLStrings.keySet()) {
                    System.out.println(name + ":");
                    System.out.println(statisticsDocumentsAsXMLStrings.get(name));
                    System.out.println();
                }
            }
        }
    } catch (XSDInferencerException e) {
        System.err.println();
        System.err.println("Error at inference proccess: " + e.getMessage());
        e.printStackTrace();
        System.exit(1);
    }
}

From source file:edu.internet2.middleware.psp.names.CreateLdapNames.java

/**
 * @param args// w ww.j a  v  a2  s. c  o m
 */

public static void main(String[] args) {

    try {
        BufferedWriter writer = new BufferedWriter(new FileWriter("/tmp/people.ldif"));

        RandomCollection<String> givenNames = readGivenNames();

        RandomCollection<String> surnames = readSurnames();

        for (int i = 0; i < 1000; i++) {
            String surname = surnames.next();
            String givenName = givenNames.next();
            String ldif = getLdif(i, givenName, surname);
            // System.out.println(ldif);
            writer.write(ldif);
            writer.write("\n");
        }

        writer.close();

    } catch (IOException e) {
        e.printStackTrace();
    }
}

From source file:discovery.compression.kdd2011.ratio.RatioCompressionReport.java

public static void main(String[] args) throws GraphReadingException, IOException, java.text.ParseException {
    opts.addOption("r", true, "Goal compression ratio");

    //      opts.addOption( "a",
    //       true,
    //       "Algorithm used for compression. The default and only currently available option is \"greedy\"");
    //opts.addOption("cost-output",true,"Output file for costs, default is costs.txt");
    //opts.addOption("cost-format",true,"Output format for ");

    opts.addOption("ctype", true, "Connectivity type: global or local, default is global.");
    opts.addOption("connectivity", false,
            "enables output for connectivity. Connectivity info will be written to connectivity.txt");
    opts.addOption("output_bmg", true, "Write bmg file with groups to given file.");
    opts.addOption("algorithm", true, "Algorithm to use, one of: greedy random1 random2 bruteforce slowgreedy");
    opts.addOption("hop2", false, "Only try to merge nodes that have common neighbors");
    opts.addOption("kmedoids", false, "Enables output for kmedoids clustering");
    opts.addOption("kmedoids_k", true, "Number of clusters to be used in kmedoids. Default is 3");
    opts.addOption("kmedoids_output", true,
            "Output file for kmedoid clusters. Default is clusters.txt. This file will be overwritten.");
    opts.addOption("norefresh", false,
            "Use old style merging: all connectivities are not refreshed when merging");
    opts.addOption("edge_attribute", true, "Attribute from bmgraph used as edge weight");
    opts.addOption("only_times", false, "Only write times.txt");
    //opts.addOption("no_metrics",false,"Exit after compression, don't calculate any metrics or produce output bmg for the compression.");
    CommandLineParser parser = new PosixParser();
    CommandLine cmd = null;// w w w . j  a va2s. c  o  m

    try {
        cmd = parser.parse(opts, args);
    } catch (ParseException e) {
        e.printStackTrace();
        System.exit(0);
    }

    boolean connectivity = false;
    double ratio = 0;

    boolean hop2 = cmd.hasOption("hop2");

    RatioCompression compression = new GreedyRatioCompression(hop2);

    if (cmd.hasOption("connectivity"))
        connectivity = true;

    ConnectivityType ctype = ConnectivityType.GLOBAL;
    CompressionMergeModel mergeModel = new PathAverageMergeModel();
    if (cmd.hasOption("ctype")) {
        String ctypeStr = cmd.getOptionValue("ctype");
        if (ctypeStr.equals("local")) {
            ctype = ConnectivityType.LOCAL;
            mergeModel = new EdgeAverageMergeModel();
        } else if (ctypeStr.equals("global")) {
            ctype = ConnectivityType.GLOBAL;
            mergeModel = new PathAverageMergeModel();
        } else {
            System.out.println(PROGRAM_NAME + ": unknown connectivity type " + ctypeStr);
            printHelp();
        }
    }

    if (cmd.hasOption("norefresh"))
        mergeModel = new PathAverageMergeModelNorefresh();
    if (cmd.hasOption("algorithm")) {
        String alg = cmd.getOptionValue("algorithm");
        if (alg.equals("greedy")) {
            compression = new GreedyRatioCompression(hop2);
        } else if (alg.equals("random1")) {
            compression = new RandomRatioCompression(hop2);
        } else if (alg.equals("random2")) {
            compression = new SmartRandomRatioCompression(hop2);
        } else if (alg.equals("bruteforce")) {
            compression = new BruteForceCompression(hop2, ctype == ConnectivityType.LOCAL);
        } else if (alg.equals("slowgreedy")) {
            compression = new SlowGreedyRatioCompression(hop2);
        } else {
            System.out.println("algorithm must be one of: greedy random1 random2 bruteforce slowgreedy");
            printHelp();
        }
    }

    compression.setMergeModel(mergeModel);

    if (cmd.hasOption("r")) {
        ratio = Double.parseDouble(cmd.getOptionValue("r"));
    } else {
        System.out.println(PROGRAM_NAME + ": compression ratio not defined");
        printHelp();
    }

    if (cmd.hasOption("help")) {
        printHelp();
    }

    String infile = null;
    if (cmd.getArgs().length != 0) {
        infile = cmd.getArgs()[0];
    } else {
        printHelp();
    }

    boolean kmedoids = false;
    int kmedoidsK = 3;
    String kmedoidsOutput = "clusters.txt";
    if (cmd.hasOption("kmedoids"))
        kmedoids = true;
    if (cmd.hasOption("kmedoids_k"))
        kmedoidsK = Integer.parseInt(cmd.getOptionValue("kmedoids_k"));
    if (cmd.hasOption("kmedoids_output"))
        kmedoidsOutput = cmd.getOptionValue("kmedoids_output");

    String edgeAttrib = "goodness";
    if (cmd.hasOption("edge_attribute"))
        edgeAttrib = cmd.getOptionValue("edge_attribute");

    // This program should directly use bmgraph-java to read and
    // DefaultGraph should have a constructor that takes a BMGraph as an
    // argument.

    //VisualGraph vg = new VisualGraph(infile, edgeAttrib, false);
    //System.out.println("vg read");
    //SimpleVisualGraph origSG = new SimpleVisualGraph(vg);
    BMGraph bmg = BMGraphUtils.readBMGraph(infile);

    int origN = bmg.getNodes().size();

    //for(int i=0;i<origN;i++)
    //System.out.println(i+"="+origSG.getVisualNode(i));
    System.out.println("bmgraph read");

    BMNode[] i2n = new BMNode[origN];
    HashMap<BMNode, Integer> n2i = new HashMap<BMNode, Integer>();
    {
        int pi = 0;
        for (BMNode nod : bmg.getNodes()) {
            n2i.put(nod, pi);
            i2n[pi++] = nod;
        }
    }

    DefaultGraph dg = new DefaultGraph();
    for (BMEdge e : bmg.getEdges()) {
        dg.addEdge(n2i.get(e.getSource()), n2i.get(e.getTarget()), Double.parseDouble(e.get(edgeAttrib)));
    }

    DefaultGraph origDG = dg.copy();

    System.out.println("inputs read");
    RatioCompression nopCompressor = new RatioCompression.DefaultRatioCompression();
    ResultGraph nopResult = nopCompressor.compressGraph(dg, 1);

    long start = System.currentTimeMillis();
    ResultGraph result = compression.compressGraph(dg, ratio);
    long timeSpent = System.currentTimeMillis() - start;
    double seconds = timeSpent * 0.001;

    BufferedWriter timesWriter = new BufferedWriter(new FileWriter("times.txt", true));
    timesWriter.append("" + seconds + "\n");
    timesWriter.close();

    if (cmd.hasOption("only_times")) {
        System.out.println("Compression done, exiting.");
        System.exit(0);
    }

    BufferedWriter costsWriter = new BufferedWriter(new FileWriter("costs.txt", true));
    costsWriter.append("" + nopResult.getCompressorCosts() + " " + result.getCompressorCosts() + "\n");
    costsWriter.close();

    double[][] origProb;
    double[][] compProb;
    int[] group = new int[origN];

    for (int i = 0; i < result.partition.size(); i++)
        for (int x : result.partition.get(i))
            group[x] = i;

    if (ctype == ConnectivityType.LOCAL) {
        origProb = new double[origN][origN];
        compProb = new double[origN][origN];
        DefaultGraph g = result.uncompressedGraph();
        for (int i = 0; i < origN; i++) {
            for (int j = 0; j < origN; j++) {
                origProb[i][j] = dg.getEdgeWeight(i, j);
                compProb[i][j] = g.getEdgeWeight(i, j);
            }
        }
        System.out.println("Writing edge-dissimilarity");
    } else {

        origProb = ProbDijkstra.getProbMatrix(origDG);

        compProb = new double[origN][origN];

        System.out.println("nodeCount = " + result.graph.getNodeCount());
        double[][] ccProb = ProbDijkstra.getProbMatrix(result.graph);
        System.out.println("ccProb.length = " + ccProb.length);

        System.out.println("ccProb[0].length = " + ccProb[0].length);

        for (int i = 0; i < origN; i++) {
            for (int j = 0; j < origN; j++) {
                if (group[i] == group[j])
                    compProb[i][j] = result.graph.getEdgeWeight(group[i], group[j]);
                else {
                    int gj = group[j];
                    int gi = group[i];
                    compProb[i][j] = ccProb[group[i]][group[j]];
                }
            }
        }

        System.out.println("Writing best-path-dissimilarity");
        //compProb = ProbDijkstra.getProbMatrix(result.uncompressedGraph());

    }

    {
        BufferedWriter connWr = null;//

        if (connectivity) {
            connWr = new BufferedWriter(new FileWriter("connectivity.txt", true));
        }
        double totalDiff = 0;

        for (int i = 0; i < origN; i++) {
            for (int j = i + 1; j < origN; j++) {

                double diff = Math.abs(origProb[i][j] - compProb[i][j]);
                //VisualNode ni = origSG.getVisualNode(i);
                //VisualNode nj = origSG.getVisualNode(j);
                BMNode ni = i2n[i];
                BMNode nj = i2n[j];
                if (connectivity)
                    connWr.append(ni + "\t" + nj + "\t" + origProb[i][j] + "\t" + compProb[i][j] + "\t" + diff
                            + "\n");
                totalDiff += diff * diff;
            }
        }

        if (connectivity) {
            connWr.append("\n");
            connWr.close();
        }

        totalDiff = Math.sqrt(totalDiff);
        BufferedWriter dissWr = new BufferedWriter(new FileWriter("dissimilarity.txt", true));
        dissWr.append("" + totalDiff + "\n");
        dissWr.close();
    }

    if (cmd.hasOption("output_bmg")) {
        BMGraph outgraph = new BMGraph();

        String outputfile = cmd.getOptionValue("output_bmg");
        HashMap<Integer, BMNode> nodes = new HashMap<Integer, BMNode>();

        for (int i = 0; i < result.partition.size(); i++) {
            ArrayList<Integer> g = result.partition.get(i);
            if (g.size() == 0)
                continue;
            BMNode node = new BMNode("Supernode_" + i);
            HashMap<String, String> attributes = new HashMap<String, String>();
            StringBuffer contents = new StringBuffer();
            for (int x : g)
                contents.append(i2n[x] + ",");
            contents.delete(contents.length() - 1, contents.length());

            attributes.put("nodes", contents.toString());
            attributes.put("self-edge", "" + result.graph.getEdgeWeight(i, i));
            node.setAttributes(attributes);
            nodes.put(i, node);
            outgraph.ensureHasNode(node);
        }

        for (int i = 0; i < result.partition.size(); i++) {
            if (result.partition.get(i).size() == 0)
                continue;
            for (int x : result.graph.getNeighbors(i)) {
                if (x < i)
                    continue;
                BMNode from = nodes.get(i);
                BMNode to = nodes.get(x);
                if (from == null || to == null) {
                    System.out.println(from + "->" + to);
                    System.out.println(i + "->" + x);
                    System.out.println("");
                }
                BMEdge e = new BMEdge(nodes.get(i), nodes.get(x), "notype");

                e.setAttributes(new HashMap<String, String>());
                e.put("goodness", "" + result.graph.getEdgeWeight(i, x));
                outgraph.ensureHasEdge(e);
            }
        }
        BMGraphUtils.writeBMGraph(outgraph, outputfile);
    }

    // k medoids!
    if (kmedoids) {
        //KMedoidsResult clustersOrig=KMedoids.runKMedoids(origProb,kmedoidsK);

        if (ctype == ConnectivityType.LOCAL) {
            compProb = ProbDijkstra.getProbMatrix(result.uncompressedGraph());
        }

        //KMedoidsResult compClusters = KMedoids.runKMedoids(ProbDijkstra.getProbMatrix(result.graph),kmedoidsK);
        KMedoidsResult clustersComp = KMedoids.runKMedoids(compProb, kmedoidsK);

        BufferedWriter bw = new BufferedWriter(new FileWriter(kmedoidsOutput));

        for (int i = 0; i < origN; i++) {
            int g = group[i];
            //bw.append(origSG.getVisualNode(i).getBMNode()+" "+compClusters.clusters[g]+"\n");
            bw.append(i2n[i] + " " + clustersComp.clusters[i] + "\n");
        }
        bw.close();
    }

    System.exit(0);
}

From source file:com.idega.util.FileLocalizer.java

public static void main(String[] args) {
    if (args.length != 2) {
        System.err.println("Wimp. I need two parameters, input file og directory and output file");
        System.err.println("Usage java FileLocalizer input output");

        return;/*  ww  w.  j  a  va  2  s  .  c o m*/
    }

    File in = null;
    BufferedWriter out = null;

    Properties props = new Properties();

    try {
        in = new File(args[0]);
    } catch (Exception e) {
        System.err.println("Auli. Error : " + e.toString());

        return;
    }

    try {
        out = new BufferedWriter(new FileWriter(args[1]));
    } catch (java.io.IOException e) {
        System.err.println("Auli. Error : " + e.toString());

        return;
    }

    try {
        findRecursive(in, props);
        props.list(new PrintWriter(out));
    } catch (Exception e) {
        System.err.println("Error reading or writing file : " + e.toString());
    }

    try {
        out.close();
    } catch (java.io.IOException e) {
        System.err.println("Error closing files : " + e.toString());
    }
}

From source file:org.jfree.chart.demo.Display.java

/**
 * Launch the application.//w ww. j a v  a2s .c o  m
 */
public static void main(String[] args) throws IOException {

    EventQueue.invokeLater(new Runnable() {
        public void run() {
            try {
                Display window = new Display();
                window.frame.setVisible(true);
            } catch (Exception e) {
                e.printStackTrace();
            }
            ////////////
        }
    });

    try {

        BufferedReader in = new BufferedReader(new InputStreamReader(System.in));
        String s;
        File dataFile = new File("data.txt");
        FileWriter fw = new FileWriter(dataFile);
        BufferedWriter bw = new BufferedWriter(fw);
        DataParser datIn = new DataParser(effectiveX, effectiveY);

        //while(!enabled){}//spin until enabled

        while ((s = in.readLine()) != null && s.length() != 0 && enabled) {
            // System.out.println(s);
            if (datIn.parseString(s)) {
                bw.write(s);
                bw.write('\n');
                bw.flush();
                panel_1.plotCoords(datIn.getX(), datIn.getFlippedY());
                TimeElapsed.setText(Double.toString(datIn.getTime()));
            }
        }

        bw.close();
    } catch (IOException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }

}

From source file:biomine.nodeimportancecompression.ImportanceCompressionReport.java

public static void main(String[] args) throws IOException, java.text.ParseException {
    opts.addOption("algorithm", true,
            "Used algorithm for compression. Possible values are 'brute-force', "
                    + "'brute-force-edges','brute-force-merges','randomized','randomized-merges',"
                    + "'randomized-edges'," + "'fast-brute-force',"
                    + "'fast-brute-force-merges','fast-brute-force-merge-edges'. Default is 'brute-force'.");
    opts.addOption("query", true, "Query nodes ids, separated by comma.");
    opts.addOption("queryfile", true, "Read query nodes from file.");
    opts.addOption("ratio", true, "Goal ratio");
    opts.addOption("importancefile", true, "Read importances straight from file");
    opts.addOption("keepedges", false, "Don't remove edges during merges");
    opts.addOption("connectivity", false, "Compute and output connectivities in edge oriented case");
    opts.addOption("paths", false, "Do path oriented compression");
    opts.addOption("edges", false, "Do edge oriented compression");
    // opts.addOption( "a",

    double sigma = 1.0;
    CommandLineParser parser = new PosixParser();
    CommandLine cmd = null;//from   www  .  j av  a 2  s.  c om

    try {
        cmd = parser.parse(opts, args);
    } catch (ParseException e) {
        e.printStackTrace();
        System.exit(0);
    }

    String queryStr = cmd.getOptionValue("query");
    String[] queryNodeIDs = {};
    double[] queryNodeIMP = {};
    if (queryStr != null) {
        queryNodeIDs = queryStr.split(",");
        queryNodeIMP = new double[queryNodeIDs.length];
        for (int i = 0; i < queryNodeIDs.length; i++) {
            String s = queryNodeIDs[i];
            String[] es = s.split("=");
            queryNodeIMP[i] = 1;
            if (es.length == 2) {
                queryNodeIDs[i] = es[0];
                queryNodeIMP[i] = Double.parseDouble(es[1]);
            } else if (es.length > 2) {
                System.out.println("Too many '=' in querynode specification: " + s);
            }
        }
    }

    String queryFile = cmd.getOptionValue("queryfile");
    Map<String, Double> queryNodes = Collections.EMPTY_MAP;
    if (queryFile != null) {
        File in = new File(queryFile);
        BufferedReader read = new BufferedReader(new FileReader(in));

        queryNodes = readMap(read);
        read.close();
    }

    String impfile = cmd.getOptionValue("importancefile");
    Map<String, Double> importances = null;
    if (impfile != null) {
        File in = new File(impfile);
        BufferedReader read = new BufferedReader(new FileReader(in));

        importances = readMap(read);
        read.close();
    }

    String algoStr = cmd.getOptionValue("algorithm");
    CompressionAlgorithm algo = null;

    if (algoStr == null || algoStr.equals("brute-force")) {
        algo = new BruteForceCompression();
    } else if (algoStr.equals("brute-force-edges")) {
        algo = new BruteForceCompressionOnlyEdges();
    } else if (algoStr.equals("brute-force-merges")) {
        algo = new BruteForceCompressionOnlyMerges();
    } else if (algoStr.equals("fast-brute-force-merges")) {
        //algo = new FastBruteForceCompressionOnlyMerges();
        algo = new FastBruteForceCompression(true, false);
    } else if (algoStr.equals("fast-brute-force-edges")) {
        algo = new FastBruteForceCompression(false, true);
        //algo = new FastBruteForceCompressionOnlyEdges();
    } else if (algoStr.equals("fast-brute-force")) {
        algo = new FastBruteForceCompression(true, true);
    } else if (algoStr.equals("randomized-edges")) {
        algo = new RandomizedCompressionOnlyEdges(); //modified
    } else if (algoStr.equals("randomized")) {
        algo = new RandomizedCompression();
    } else if (algoStr.equals("randomized-merges")) {
        algo = new RandomizedCompressionOnlyMerges();
    } else {
        System.out.println("Unsupported algorithm: " + algoStr);
        printHelp();
    }

    String ratioStr = cmd.getOptionValue("ratio");
    double ratio = 0;
    if (ratioStr != null) {
        ratio = Double.parseDouble(ratioStr);
    } else {
        System.out.println("Goal ratio not specified");
        printHelp();
    }

    String infile = null;
    if (cmd.getArgs().length != 0) {
        infile = cmd.getArgs()[0];
    } else {
        printHelp();
    }

    BMGraph bmg = BMGraphUtils.readBMGraph(new File(infile));
    HashMap<BMNode, Double> queryBMNodes = new HashMap<BMNode, Double>();
    for (String id : queryNodes.keySet()) {
        queryBMNodes.put(bmg.getNode(id), queryNodes.get(id));
    }

    long startMillis = System.currentTimeMillis();
    ImportanceGraphWrapper wrap = QueryImportance.queryImportanceGraph(bmg, queryBMNodes);

    if (importances != null) {
        for (String id : importances.keySet()) {
            wrap.setImportance(bmg.getNode(id), importances.get(id));
        }
    }

    ImportanceMerger merger = null;
    if (cmd.hasOption("edges")) {
        merger = new ImportanceMergerEdges(wrap.getImportanceGraph());
    } else if (cmd.hasOption("paths")) {
        merger = new ImportanceMergerPaths(wrap.getImportanceGraph());
    } else {
        System.out.println("Specify either 'paths' or 'edges'.");
        System.exit(1);
    }

    if (cmd.hasOption("keepedges")) {
        merger.setKeepEdges(true);
    }

    algo.compress(merger, ratio);
    long endMillis = System.currentTimeMillis();

    // write importance

    {
        BufferedWriter wr = new BufferedWriter(new FileWriter("importance.txt", false));
        for (BMNode nod : bmg.getNodes()) {
            wr.write(nod + " " + wrap.getImportance(nod) + "\n");
        }
        wr.close();
    }

    // write sum of all pairs of node importance    added by Fang
    /*   {
    BufferedWriter wr = new BufferedWriter(new FileWriter("sum_of_all_pairs_importance.txt", true));
    ImportanceGraph orig = wrap.getImportanceGraph();
    double sum = 0;
            
    for (int i = 0; i <= orig.getMaxNodeId(); i++) {
        for (int j = i+1; j <= orig.getMaxNodeId(); j++) {
            sum = sum+ wrap.getImportance(i)* wrap.getImportance(j);
        }
    }
            
    wr.write(""+sum);
    wr.write("\n");
    wr.close();
       }
            
    */

    // write uncompressed edges
    {
        BufferedWriter wr = new BufferedWriter(new FileWriter("edges.txt", false));
        ImportanceGraph orig = wrap.getImportanceGraph();
        ImportanceGraph ucom = merger.getUncompressedGraph();
        for (int i = 0; i <= orig.getMaxNodeId(); i++) {
            String iname = wrap.intToNode(i).toString();
            HashSet<Integer> ne = new HashSet<Integer>();
            ne.addAll(orig.getNeighbors(i));
            ne.addAll(ucom.getNeighbors(i));
            for (int j : ne) {
                if (i < j)
                    continue;
                String jname = wrap.intToNode(j).toString();
                double a = orig.getEdgeWeight(i, j);
                double b = ucom.getEdgeWeight(i, j);
                wr.write(iname + " " + jname + " " + a + " " + b + " " + Math.abs(a - b));
                wr.write("\n");
            }
        }
        wr.close();
    }
    // write distance
    {
        // BufferedWriter wr = new BufferedWriter(new
        // FileWriter("distance.txt",false));
        BufferedWriter wr = new BufferedWriter(new FileWriter("distance.txt", true)); //modified by Fang

        ImportanceGraph orig = wrap.getImportanceGraph();
        ImportanceGraph ucom = merger.getUncompressedGraph();
        double error = 0;
        for (int i = 0; i <= orig.getMaxNodeId(); i++) {
            HashSet<Integer> ne = new HashSet<Integer>();
            ne.addAll(orig.getNeighbors(i));
            ne.addAll(ucom.getNeighbors(i));
            for (int j : ne) {
                if (i <= j)
                    continue;
                double a = orig.getEdgeWeight(i, j);
                double b = ucom.getEdgeWeight(i, j);
                error += (a - b) * (a - b) * wrap.getImportance(i) * wrap.getImportance(j);
                // modify by Fang: multiply imp(u)imp(v)

            }
        }
        error = Math.sqrt(error);
        //////////error = Math.sqrt(error / 2); // modified by Fang: the error of each
        // edge is counted twice
        wr.write("" + error);
        wr.write("\n");
        wr.close();
    }
    // write sizes
    {
        ImportanceGraph orig = wrap.getImportanceGraph();
        ImportanceGraph comp = merger.getCurrentGraph();
        // BufferedWriter wr = new BufferedWriter(new
        // FileWriter("sizes.txt",false));
        BufferedWriter wr = new BufferedWriter(new FileWriter("sizes.txt", true)); //modified by Fang

        wr.write(orig.getNodeCount() + " " + orig.getEdgeCount() + " " + comp.getNodeCount() + " "
                + comp.getEdgeCount());
        wr.write("\n");
        wr.close();
    }
    //write time
    {
        System.out.println("writing time");
        BufferedWriter wr = new BufferedWriter(new FileWriter("time.txt", true)); //modified by Fang
        double secs = (endMillis - startMillis) * 0.001;
        wr.write("" + secs + "\n");
        wr.close();
    }

    //write change of connectivity for edge-oriented case       // added by Fang
    {
        if (cmd.hasOption("connectivity")) {

            BufferedWriter wr = new BufferedWriter(new FileWriter("connectivity.txt", true));
            ImportanceGraph orig = wrap.getImportanceGraph();
            ImportanceGraph ucom = merger.getUncompressedGraph();

            double diff = 0;

            for (int i = 0; i <= orig.getMaxNodeId(); i++) {
                ProbDijkstra pdori = new ProbDijkstra(orig, i);
                ProbDijkstra pducom = new ProbDijkstra(ucom, i);

                for (int j = i + 1; j <= orig.getMaxNodeId(); j++) {
                    double oriconn = pdori.getProbTo(j);
                    double ucomconn = pducom.getProbTo(j);

                    diff = diff + (oriconn - ucomconn) * (oriconn - ucomconn) * wrap.getImportance(i)
                            * wrap.getImportance(j);

                }
            }

            diff = Math.sqrt(diff);
            wr.write("" + diff);
            wr.write("\n");
            wr.close();

        }
    }

    //write output graph
    {
        BMGraph output = bmg;//new BMGraph(bmg);

        int no = 0;
        BMNode[] nodes = new BMNode[merger.getGroups().size()];
        for (ArrayList<Integer> gr : merger.getGroups()) {
            BMNode bmgroup = new BMNode("Group", "" + (no + 1));
            bmgroup.setAttributes(new HashMap<String, String>());
            bmgroup.put("autoedges", "0");
            nodes[no] = bmgroup;
            no++;
            if (gr.size() == 0)
                continue;
            for (int x : gr) {
                BMNode nod = output.getNode(wrap.intToNode(x).toString());
                BMEdge belongs = new BMEdge(nod, bmgroup, "belongs_to");
                output.ensureHasEdge(belongs);
            }
            output.ensureHasNode(bmgroup);
        }
        for (int i = 0; i < nodes.length; i++) {
            for (int x : merger.getCurrentGraph().getNeighbors(i)) {
                if (x == i) {
                    nodes[x].put("selfedge", "" + merger.getCurrentGraph().getEdgeWeight(i, x));
                    //ge.put("goodness", ""+merger.getCurrentGraph().getEdgeWeight(i, x));
                    continue;
                }
                BMEdge ge = new BMEdge(nodes[x], nodes[i], "groupedge");
                ge.setAttributes(new HashMap<String, String>());
                ge.put("goodness", "" + merger.getCurrentGraph().getEdgeWeight(i, x));
                output.ensureHasEdge(ge);
            }
        }
        System.out.println(output.getGroupNodes());

        BMGraphUtils.writeBMGraph(output, "output.bmg");
    }
}

From source file:com.vmware.photon.controller.core.Main.java

public static void main(String[] args) throws Throwable {
    try {/*from  ww  w.  j  av a  2 s.co m*/
        LoggingFactory.bootstrap();

        logger.info("args: " + Arrays.toString(args));

        ArgumentParser parser = ArgumentParsers.newArgumentParser("PhotonControllerCore").defaultHelp(true)
                .description("Photon Controller Core");
        parser.addArgument("config-file").help("photon controller configuration file");
        parser.addArgument("--manual").type(Boolean.class).setDefault(false)
                .help("If true, create default deployment.");

        Namespace namespace = parser.parseArgsOrFail(args);

        PhotonControllerConfig photonControllerConfig = getPhotonControllerConfig(namespace);
        DeployerConfig deployerConfig = photonControllerConfig.getDeployerConfig();

        new LoggingFactory(photonControllerConfig.getLogging(), "photon-controller-core").configure();

        SSLContext sslContext;
        if (deployerConfig.getDeployerContext().isAuthEnabled()) {
            sslContext = SSLContext.getInstance(KeyStoreUtils.THRIFT_PROTOCOL);
            TrustManagerFactory tmf = null;

            tmf = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm());
            KeyStore keyStore = KeyStore.getInstance("JKS");
            InputStream in = FileUtils
                    .openInputStream(new File(deployerConfig.getDeployerContext().getKeyStorePath()));
            keyStore.load(in, deployerConfig.getDeployerContext().getKeyStorePassword().toCharArray());
            tmf.init(keyStore);
            sslContext.init(null, tmf.getTrustManagers(), null);
        } else {
            KeyStoreUtils.generateKeys("/thrift/");
            sslContext = KeyStoreUtils.acceptAllCerts(KeyStoreUtils.THRIFT_PROTOCOL);
        }

        ThriftModule thriftModule = new ThriftModule(sslContext);
        PhotonControllerXenonHost xenonHost = startXenonHost(photonControllerConfig, thriftModule,
                deployerConfig, sslContext);

        if ((Boolean) namespace.get("manual")) {
            DefaultDeployment.createDefaultDeployment(photonControllerConfig.getXenonConfig().getPeerNodes(),
                    deployerConfig, xenonHost);
        }

        // Creating a temp configuration file for apife with modification to some named sections in photon-controller-config
        // so that it can match the Configuration class of dropwizard.
        File apiFeTempConfig = File.createTempFile("apiFeTempConfig", ".tmp");
        File source = new File(args[0]);
        FileInputStream fis = new FileInputStream(source);
        BufferedReader in = new BufferedReader(new InputStreamReader(fis));

        FileWriter fstream = new FileWriter(apiFeTempConfig, true);
        BufferedWriter out = new BufferedWriter(fstream);

        String aLine = null;
        while ((aLine = in.readLine()) != null) {
            if (aLine.equals("apife:")) {
                aLine = aLine.replace("apife:", "server:");
            }
            out.write(aLine);
            out.newLine();
        }
        in.close();
        out.close();

        // This approach can be simplified once the apife container is gone, but for the time being
        // it expects the first arg to be the string "server".
        String[] apiFeArgs = new String[2];
        apiFeArgs[0] = "server";
        apiFeArgs[1] = apiFeTempConfig.getAbsolutePath();
        ApiFeService.setupApiFeConfigurationForServerCommand(apiFeArgs);
        ApiFeService.addServiceHost(xenonHost);
        ApiFeService.setSSLContext(sslContext);

        ApiFeService apiFeService = new ApiFeService();
        apiFeService.run(apiFeArgs);
        apiFeTempConfig.deleteOnExit();

        LocalApiClient localApiClient = apiFeService.getInjector().getInstance(LocalApiClient.class);
        xenonHost.setApiClient(localApiClient);

        // in the non-auth enabled scenario we need to be able to accept any self-signed certificate
        if (!deployerConfig.getDeployerContext().isAuthEnabled()) {
            KeyStoreUtils.acceptAllCerts(KeyStoreUtils.THRIFT_PROTOCOL);
        }

        Runtime.getRuntime().addShutdownHook(new Thread() {
            @Override
            public void run() {
                logger.info("Shutting down");
                xenonHost.stop();
                logger.info("Done");
                LoggingFactory.detachAndStop();
            }
        });
    } catch (Exception e) {
        logger.error("Failed to start photon controller ", e);
        throw e;
    }
}

From source file:org.lieuofs.extraction.etatpays.ExtractionPays.java

/**
 * @param args//ww w .j  a va  2  s . c  om
 */
public static void main(String[] args) throws IOException {
    ApplicationContext context = new ClassPathXmlApplicationContext(new String[] { "beans_lieuofs.xml" });
    EtatTerritoireCritere critere = new EtatTerritoireCritere();
    //critere.setEstEtat(Boolean.FALSE);

    EtatTerritoireDao dao = (EtatTerritoireDao) context.getBean("etatTerritoireDao");
    Set<EtatTerritoirePersistant> etats = dao.rechercher(critere);

    EtatWriter etatWriter = new NumOFSEtatWriter();
    BufferedWriter writer = new BufferedWriter(
            new OutputStreamWriter(new FileOutputStream("ExtractionPaysOFSReconnuRecemment.txt"), "UTF-8"));

    List<EtatTerritoirePersistant> listeTriee = new ArrayList<EtatTerritoirePersistant>(etats);
    Collections.sort(listeTriee, new Comparator<EtatTerritoirePersistant>() {

        @Override
        public int compare(EtatTerritoirePersistant o1, EtatTerritoirePersistant o2) {
            //return o1.getFormeCourte("fr").compareTo(o2.getFormeCourte("fr"));
            return o1.getNumeroOFS() - o2.getNumeroOFS();
        }

    });

    for (EtatTerritoirePersistant etat : filtre(listeTriee)) {
        String etatStr = etatWriter.ecrireEtat(etat);
        if (null != etatStr) {
            writer.append(etatStr);
        }
    }
    writer.close();
}

From source file:org.lieuofs.extraction.etatpays.ExtractionEtat.java

/**
 * @param args/*from   w w  w.  j a va2s.  c om*/
 */
public static void main(String[] args) throws IOException {
    ApplicationContext context = new ClassPathXmlApplicationContext(new String[] { "beans_lieuofs.xml" });
    EtatTerritoireCritere critere = new EtatTerritoireCritere();
    critere.setEstEtat(Boolean.TRUE);
    // critere.setValide(Boolean.TRUE);

    EtatTerritoireDao dao = (EtatTerritoireDao) context.getBean("etatTerritoireDao");
    Set<EtatTerritoirePersistant> etats = dao.rechercher(critere);

    EtatWriter etatWriter = new CsvPlatEtatWriter();
    BufferedWriter writer = new BufferedWriter(
            new OutputStreamWriter(new FileOutputStream("Etat.txt"), "UTF-8"));

    List<EtatTerritoirePersistant> listeTriee = new ArrayList<EtatTerritoirePersistant>(etats);
    Collections.sort(listeTriee, new Comparator<EtatTerritoirePersistant>() {

        @Override
        public int compare(EtatTerritoirePersistant o1, EtatTerritoirePersistant o2) {
            //return o1.getFormeCourte("fr").compareTo(o2.getFormeCourte("fr"));
            return o1.getNumeroOFS() - o2.getNumeroOFS();
        }

    });

    for (EtatTerritoirePersistant etat : listeTriee) {
        String etatStr = etatWriter.ecrireEtat(etat);
        if (null != etatStr) {
            writer.append(etatStr);
        }
    }
    writer.close();
}