Example usage for java.io FileWriter close

List of usage examples for java.io FileWriter close

Introduction

In this page you can find the example usage for java.io FileWriter close.

Prototype

public void close() throws IOException 

Source Link

Usage

From source file:com.github.fritaly.graphml4j.samples.GradleDependenciesWithGroupsAndBuffering.java

public static void main(String[] args) throws Exception {
    if (args.length != 1) {
        System.out.println(String.format("%s <output-file>",
                GradleDependenciesWithGroupsAndBuffering.class.getSimpleName()));
        System.exit(1);//from  ww w .  j  a  v a  2  s  .co  m
    }

    final File file = new File(args[0]);

    System.out.println("Writing GraphML file to " + file.getAbsolutePath() + " ...");

    FileWriter fileWriter = null;
    Reader reader = null;
    LineNumberReader lineReader = null;

    try {
        fileWriter = new FileWriter(file);

        final com.github.fritaly.graphml4j.datastructure.Graph graph = new Graph();

        // The dependency graph has been generated by Gradle with the
        // command "gradle dependencies". The output of this command has
        // been saved to a text file which will be parsed to rebuild the
        // dependency graph
        reader = new InputStreamReader(
                GradleDependenciesWithGroupsAndBuffering.class.getResourceAsStream("gradle-dependencies.txt"));
        lineReader = new LineNumberReader(reader);

        String line = null;

        // Stack containing the nodes per depth inside the dependency graph
        // (the topmost dependency is the first one in the stack)
        final Stack<Node> parentNodes = new Stack<Node>();

        while ((line = lineReader.readLine()) != null) {
            // Determine the depth of the current dependency inside the
            // graph. The depth can be inferred from the indentation used by
            // Gradle. Each level of depth adds 5 more characters of
            // indentation
            final int initialLength = line.length();

            // Remove the strings used by Gradle to indent dependencies
            line = StringUtils.replace(line, "+--- ", "");
            line = StringUtils.replace(line, "|    ", "");
            line = StringUtils.replace(line, "\\--- ", "");
            line = StringUtils.replace(line, "     ", "");

            // The depth can easily be inferred now
            final int depth = (initialLength - line.length()) / 5;

            // Remove unnecessary node ids
            while (depth <= parentNodes.size()) {
                parentNodes.pop();
            }

            final Artifact artifact = createArtifact(line);

            Node node = graph.getNodeByData(artifact);

            // Has this dependency already been added to the graph ?
            if (node == null) {
                // No, add the node
                node = graph.addNode(artifact);
            }

            parentNodes.push(node);

            if (parentNodes.size() > 1) {
                // Generate an edge between the current node and its parent
                graph.addEdge("Depends on", parentNodes.get(parentNodes.size() - 2), node);
            }
        }

        // Create the groups after creating the nodes & edges
        for (Node node : graph.getNodes()) {
            final Artifact artifact = (Artifact) node.getData();

            final String groupId = artifact.group;

            Node groupNode = graph.getNodeByData(groupId);

            if (groupNode == null) {
                groupNode = graph.addNode(groupId);
            }

            // add the node to the group
            node.setParent(groupNode);
        }

        graph.toGraphML(fileWriter, new Renderer() {

            @Override
            public String getNodeLabel(Node node) {
                return node.isGroup() ? node.getData().toString() : ((Artifact) node.getData()).getLabel();
            }

            @Override
            public boolean isGroupOpen(Node node) {
                return true;
            }

            @Override
            public NodeStyle getNodeStyle(Node node) {
                // Customize the rendering of nodes
                final NodeStyle nodeStyle = new NodeStyle();
                nodeStyle.setWidth(250.0f);

                return nodeStyle;
            }

            @Override
            public GroupStyles getGroupStyles(Node node) {
                return new GroupStyles();
            }

            @Override
            public EdgeStyle getEdgeStyle(Edge edge) {
                return new EdgeStyle();
            }
        });

        System.out.println("Done");
    } finally {
        // Calling GraphMLWriter.close() is necessary to dispose the underlying resources
        fileWriter.close();
        lineReader.close();
        reader.close();
    }
}

From source file:com.github.fritaly.graphml4j.samples.GradleDependenciesWithGroups.java

public static void main(String[] args) throws Exception {
    if (args.length != 1) {
        System.out//from   ww  w .  ja  va  2 s . com
                .println(String.format("%s <output-file>", GradleDependenciesWithGroups.class.getSimpleName()));
        System.exit(1);
    }

    final File file = new File(args[0]);

    System.out.println("Writing GraphML file to " + file.getAbsolutePath() + " ...");

    FileWriter fileWriter = null;
    GraphMLWriter graphWriter = null;
    Reader reader = null;
    LineNumberReader lineReader = null;

    try {
        fileWriter = new FileWriter(file);
        graphWriter = new GraphMLWriter(fileWriter);

        // Customize the rendering of nodes
        final NodeStyle nodeStyle = graphWriter.getNodeStyle();
        nodeStyle.setWidth(250.0f);
        nodeStyle.setHeight(50.0f);

        graphWriter.setNodeStyle(nodeStyle);

        // The dependency graph has been generated by Gradle with the
        // command "gradle dependencies". The output of this command has
        // been saved to a text file which will be parsed to rebuild the
        // dependency graph
        reader = new InputStreamReader(
                GradleDependenciesWithGroups.class.getResourceAsStream("gradle-dependencies.txt"));
        lineReader = new LineNumberReader(reader);

        String line = null;

        // Stack containing the artifacts per depth inside the dependency
        // graph (the topmost dependency is the first one in the stack)
        final Stack<Artifact> stack = new Stack<Artifact>();

        final Map<String, Set<Artifact>> artifactsByGroup = new HashMap<String, Set<Artifact>>();

        // List of parent/child relationships between artifacts
        final List<Relationship> relationships = new ArrayList<Relationship>();

        while ((line = lineReader.readLine()) != null) {
            // Determine the depth of the current dependency inside the
            // graph. The depth can be inferred from the indentation used by
            // Gradle. Each level of depth adds 5 more characters of
            // indentation
            final int initialLength = line.length();

            // Remove the strings used by Gradle to indent dependencies
            line = StringUtils.replace(line, "+--- ", "");
            line = StringUtils.replace(line, "|    ", "");
            line = StringUtils.replace(line, "\\--- ", "");
            line = StringUtils.replace(line, "     ", "");

            // The depth can easily be inferred now
            final int depth = (initialLength - line.length()) / 5;

            // Remove unnecessary artifacts
            while (depth <= stack.size()) {
                stack.pop();
            }

            // Create an artifact from the dependency (group, artifact,
            // version) tuple
            final Artifact artifact = createArtifact(line);

            stack.push(artifact);

            if (stack.size() > 1) {
                // Store the artifact and its parent
                relationships.add(new Relationship(stack.get(stack.size() - 2), artifact));
            }

            if (!artifactsByGroup.containsKey(artifact.group)) {
                artifactsByGroup.put(artifact.group, new HashSet<Artifact>());
            }

            artifactsByGroup.get(artifact.group).add(artifact);
        }

        // Open the graph
        graphWriter.graph();

        final Map<Artifact, String> nodeIdsByArtifact = new HashMap<Artifact, String>();

        // Loop over the groups and generate the associated nodes
        for (String group : artifactsByGroup.keySet()) {
            graphWriter.group(group, true);

            for (Artifact artifact : artifactsByGroup.get(group)) {
                final String nodeId = graphWriter.node(artifact.getLabel());

                nodeIdsByArtifact.put(artifact, nodeId);
            }

            graphWriter.closeGroup();
        }

        // Generate the edges
        for (Relationship relationship : relationships) {
            final String parentId = nodeIdsByArtifact.get(relationship.parent);
            final String childId = nodeIdsByArtifact.get(relationship.child);

            graphWriter.edge(parentId, childId);
        }

        // Close the graph
        graphWriter.closeGraph();

        System.out.println("Done");
    } finally {
        // Calling GraphMLWriter.close() is necessary to dispose the underlying resources
        graphWriter.close();
        fileWriter.close();
        lineReader.close();
        reader.close();
    }
}

From source file:enrichment.Disambiguate.java

/**prerequisites:
 * cd silk_2.5.3/*_links//from  w w  w. java 2s . c  o  m
 * cat *.nt|sort  -t' ' -k3   > $filename
 * 
 * @param args $filename
 * @throws IOException
 * @throws URISyntaxException
 */
public static void main(String[] args) {
    File file = new File(args[0]);
    if (file.isDirectory()) {
        args = file.list(new OnlyExtFilenameFilter("nt"));
    }

    BufferedReader in;
    for (int q = 0; q < args.length; q++) {
        String filename = null;
        if (file.isDirectory()) {
            filename = file.getPath() + File.separator + args[q];
        } else {
            filename = args[q];
        }
        try {
            FileWriter output = new FileWriter(filename + "_disambiguated.nt");
            String prefix = "@prefix rdrel: <http://rdvocab.info/RDARelationshipsWEMI/> .\n"
                    + "@prefix dbpedia:    <http://de.dbpedia.org/resource/> .\n"
                    + "@prefix frbr:   <http://purl.org/vocab/frbr/core#> .\n"
                    + "@prefix lobid: <http://lobid.org/resource/> .\n"
                    + "@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .\n"
                    + "@prefix foaf: <http://xmlns.com/foaf/0.1/> .\n"
                    + "@prefix mo: <http://purl.org/ontology/mo/> .\n"
                    + "@prefix wikipedia: <https://de.wikipedia.org/wiki/> .";
            output.append(prefix + "\n\n");
            in = new BufferedReader(new InputStreamReader(new FileInputStream(filename)));

            HashMap<String, HashMap<String, ArrayList<String>>> hm = new HashMap<String, HashMap<String, ArrayList<String>>>();
            String s;
            HashMap<String, ArrayList<String>> hmLobid = new HashMap<String, ArrayList<String>>();
            Stack<String> old_object = new Stack<String>();

            while ((s = in.readLine()) != null) {
                String[] triples = s.split(" ");
                String object = triples[2].substring(1, triples[2].length() - 1);
                if (old_object.size() > 0 && !old_object.firstElement().equals(object)) {
                    hmLobid = new HashMap<String, ArrayList<String>>();
                    old_object = new Stack<String>();
                }
                old_object.push(object);
                String subject = triples[0].substring(1, triples[0].length() - 1);
                System.out.print("\nSubject=" + object);
                System.out.print("\ntriples[2]=" + triples[2]);
                hmLobid.put(subject, getAllCreators(new URI(subject)));
                hm.put(object, hmLobid);

            }
            // get all dbpedia resources
            for (String key_one : hm.keySet()) {
                System.out.print("\n==============\n==== " + key_one + "\n===============");
                int resources_cnt = hm.get(key_one).keySet().size();
                ArrayList<String>[] creators = new ArrayList[resources_cnt];
                HashMap<String, Integer> creators_backed = new HashMap<String, Integer>();
                int x = 0;
                // get all lobid_resources subsumed under the dbpedia resource
                for (String subject_uri : hm.get(key_one).keySet()) {
                    creators[x] = new ArrayList<String>();
                    System.out.print("\n     subject_uri=" + subject_uri);
                    Iterator<String> ite = hm.get(key_one).get(subject_uri).iterator();
                    int y = 0;
                    // get all creators of the lobid resource
                    while (ite.hasNext()) {
                        String creator = ite.next();
                        System.out.print("\n          " + creator);
                        if (creators_backed.containsKey(creator)) {
                            y = creators_backed.get(creator);
                        } else {
                            y = creators_backed.size();
                            creators_backed.put(creator, y);
                        }
                        while (creators[x].size() <= y) {
                            creators[x].add("-");
                        }
                        creators[x].set(y, creator);
                        y++;
                    }
                    x++;
                }
                if (creators_backed.size() == 1) {
                    System.out
                            .println("\n" + "Every resource pointing to " + key_one + " has the same creator!");
                    for (String key_two : hm.get(key_one).keySet()) {
                        output.append("<" + key_two + "> rdrel:workManifested <" + key_one + "> .\n");
                        output.append("<" + key_two + ">  mo:wikipedia <"
                                + key_one.replaceAll("dbpedia\\.org/resource", "wikipedia\\.org/wiki")
                                + "> .\n");
                    }
                } /*else  {
                    for (int a = 0; a < creators.length; a++) {
                       System.out.print(creators[a].toString()+",");
                    }
                  }*/
            }

            output.flush();
            if (output != null) {
                output.close();
            }
        } catch (Exception e) {
            System.out.print("Exception while working on " + filename + ": \n");
            e.printStackTrace(System.out);
        }
    }
}

From source file:com.twentyn.patentScorer.ScoreMerger.java

public static void main(String[] args) throws Exception {
    System.out.println("Starting up...");
    System.out.flush();/*from ww w.j a  va  2s.co m*/
    Options opts = new Options();
    opts.addOption(Option.builder("h").longOpt("help").desc("Print this help message and exit").build());

    opts.addOption(Option.builder("r").longOpt("results").required().hasArg()
            .desc("A directory of search results to read").build());
    opts.addOption(Option.builder("s").longOpt("scores").required().hasArg()
            .desc("A directory of patent classification scores to read").build());
    opts.addOption(Option.builder("o").longOpt("output").required().hasArg()
            .desc("The output file where results will be written.").build());

    HelpFormatter helpFormatter = new HelpFormatter();
    CommandLineParser cmdLineParser = new DefaultParser();
    CommandLine cmdLine = null;
    try {
        cmdLine = cmdLineParser.parse(opts, args);
    } catch (ParseException e) {
        System.out.println("Caught exception when parsing command line: " + e.getMessage());
        helpFormatter.printHelp("DocumentIndexer", opts);
        System.exit(1);
    }

    if (cmdLine.hasOption("help")) {
        helpFormatter.printHelp("DocumentIndexer", opts);
        System.exit(0);
    }
    File scoresDirectory = new File(cmdLine.getOptionValue("scores"));
    if (cmdLine.getOptionValue("scores") == null || !scoresDirectory.isDirectory()) {
        LOGGER.error("Not a directory of score files: " + cmdLine.getOptionValue("scores"));
    }

    File resultsDirectory = new File(cmdLine.getOptionValue("results"));
    if (cmdLine.getOptionValue("results") == null || !resultsDirectory.isDirectory()) {
        LOGGER.error("Not a directory of results files: " + cmdLine.getOptionValue("results"));
    }

    FileWriter outputWriter = new FileWriter(cmdLine.getOptionValue("output"));

    ObjectMapper objectMapper = new ObjectMapper();
    objectMapper.enable(SerializationFeature.INDENT_OUTPUT);
    objectMapper.setVisibility(PropertyAccessor.ALL, JsonAutoDetect.Visibility.ANY);

    FilenameFilter jsonFilter = new FilenameFilter() {
        public final Pattern JSON_PATTERN = Pattern.compile("\\.json$");

        public boolean accept(File dir, String name) {
            return JSON_PATTERN.matcher(name).find();
        }
    };

    Map<String, PatentScorer.ClassificationResult> scores = new HashMap<>();
    LOGGER.info("Reading scores from directory at " + scoresDirectory.getAbsolutePath());
    for (File scoreFile : scoresDirectory.listFiles(jsonFilter)) {
        BufferedReader reader = new BufferedReader(new FileReader(scoreFile));
        int count = 0;
        String line;
        while ((line = reader.readLine()) != null) {
            PatentScorer.ClassificationResult res = objectMapper.readValue(line,
                    PatentScorer.ClassificationResult.class);
            scores.put(res.docId, res);
            count++;
        }
        LOGGER.info("Read " + count + " scores from " + scoreFile.getAbsolutePath());
    }

    Map<String, List<DocumentSearch.SearchResult>> synonymsToResults = new HashMap<>();
    Map<String, List<DocumentSearch.SearchResult>> inchisToResults = new HashMap<>();
    LOGGER.info("Reading results from directory at " + resultsDirectory);
    // With help from http://stackoverflow.com/questions/6846244/jackson-and-generic-type-reference.
    JavaType resultsType = objectMapper.getTypeFactory().constructCollectionType(List.class,
            DocumentSearch.SearchResult.class);

    List<File> resultsFiles = Arrays.asList(resultsDirectory.listFiles(jsonFilter));
    Collections.sort(resultsFiles, new Comparator<File>() {
        @Override
        public int compare(File o1, File o2) {
            return o1.getName().compareTo(o2.getName());
        }
    });
    for (File resultsFile : resultsFiles) {
        BufferedReader reader = new BufferedReader(new FileReader(resultsFile));
        CharBuffer buffer = CharBuffer.allocate(Long.valueOf(resultsFile.length()).intValue());
        int bytesRead = reader.read(buffer);
        LOGGER.info("Read " + bytesRead + " bytes from " + resultsFile.getName() + " (length is "
                + resultsFile.length() + ")");
        List<DocumentSearch.SearchResult> results = objectMapper.readValue(new CharArrayReader(buffer.array()),
                resultsType);

        LOGGER.info("Read " + results.size() + " results from " + resultsFile.getAbsolutePath());

        int count = 0;
        for (DocumentSearch.SearchResult sres : results) {
            for (DocumentSearch.ResultDocument resDoc : sres.getResults()) {
                String docId = resDoc.getDocId();
                PatentScorer.ClassificationResult classificationResult = scores.get(docId);
                if (classificationResult == null) {
                    LOGGER.warn("No classification result found for " + docId);
                } else {
                    resDoc.setClassifierScore(classificationResult.getScore());
                }
            }
            if (!synonymsToResults.containsKey(sres.getSynonym())) {
                synonymsToResults.put(sres.getSynonym(), new ArrayList<DocumentSearch.SearchResult>());
            }
            synonymsToResults.get(sres.getSynonym()).add(sres);
            count++;
            if (count % 1000 == 0) {
                LOGGER.info("Processed " + count + " search result documents");
            }
        }
    }

    Comparator<DocumentSearch.ResultDocument> resultDocumentComparator = new Comparator<DocumentSearch.ResultDocument>() {
        @Override
        public int compare(DocumentSearch.ResultDocument o1, DocumentSearch.ResultDocument o2) {
            int cmp = o2.getClassifierScore().compareTo(o1.getClassifierScore());
            if (cmp != 0) {
                return cmp;
            }
            cmp = o2.getScore().compareTo(o1.getScore());
            return cmp;
        }
    };

    for (Map.Entry<String, List<DocumentSearch.SearchResult>> entry : synonymsToResults.entrySet()) {
        DocumentSearch.SearchResult newSearchRes = null;
        // Merge all result documents into a single search result.
        for (DocumentSearch.SearchResult sr : entry.getValue()) {
            if (newSearchRes == null) {
                newSearchRes = sr;
            } else {
                newSearchRes.getResults().addAll(sr.getResults());
            }
        }
        if (newSearchRes == null || newSearchRes.getResults() == null) {
            LOGGER.error("Search results for " + entry.getKey() + " are null.");
            continue;
        }
        Collections.sort(newSearchRes.getResults(), resultDocumentComparator);
        if (!inchisToResults.containsKey(newSearchRes.getInchi())) {
            inchisToResults.put(newSearchRes.getInchi(), new ArrayList<DocumentSearch.SearchResult>());
        }
        inchisToResults.get(newSearchRes.getInchi()).add(newSearchRes);
    }

    List<String> sortedKeys = new ArrayList<String>(inchisToResults.keySet());
    Collections.sort(sortedKeys);
    List<GroupedInchiResults> orderedResults = new ArrayList<>(sortedKeys.size());
    Comparator<DocumentSearch.SearchResult> synonymSorter = new Comparator<DocumentSearch.SearchResult>() {
        @Override
        public int compare(DocumentSearch.SearchResult o1, DocumentSearch.SearchResult o2) {
            return o1.getSynonym().compareTo(o2.getSynonym());
        }
    };
    for (String inchi : sortedKeys) {
        List<DocumentSearch.SearchResult> res = inchisToResults.get(inchi);
        Collections.sort(res, synonymSorter);
        orderedResults.add(new GroupedInchiResults(inchi, res));
    }

    objectMapper.writerWithView(Object.class).writeValue(outputWriter, orderedResults);
    outputWriter.close();
}

From source file:com.iciql.test.IciqlSuite.java

/**
 * Main entry point for the test suite. Executing this method will run the
 * test suite on all registered databases.
 * //from ww  w  .  j  a va2 s. co m
 * @param args
 * @throws Exception
 */
public static void main(String... args) throws Exception {
    Params params = new Params();
    JCommander jc = new JCommander(params);
    try {
        jc.parse(args);
    } catch (ParameterException t) {
        usage(jc, t);
    }

    // Replace System.out with a file
    if (!StringUtils.isNullOrEmpty(params.dbPerformanceFile)) {
        out = new PrintStream(params.dbPerformanceFile);
        System.setErr(out);
    }

    deleteRecursively(new File("testdbs"));

    // Start the HSQL and H2 servers in-process
    org.hsqldb.Server hsql = startHSQL();
    org.h2.tools.Server h2 = startH2();

    // Statement logging
    final FileWriter statementWriter;
    if (StringUtils.isNullOrEmpty(params.sqlStatementsFile)) {
        statementWriter = null;
    } else {
        statementWriter = new FileWriter(params.sqlStatementsFile);
    }
    IciqlListener statementListener = new IciqlListener() {
        @Override
        public void logIciql(StatementType type, String statement) {
            if (statementWriter == null) {
                return;
            }
            try {
                statementWriter.append(statement);
                statementWriter.append('\n');
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
    };
    IciqlLogger.registerListener(statementListener);

    SuiteClasses suiteClasses = IciqlSuite.class.getAnnotation(SuiteClasses.class);
    long quickestDatabase = Long.MAX_VALUE;
    String dividerMajor = buildDivider('*', 79);
    String dividerMinor = buildDivider('-', 79);

    // Header
    out.println(dividerMajor);
    out.println(MessageFormat.format("{0} {1} ({2}) testing {3} database configurations", Constants.NAME,
            Constants.VERSION, Constants.VERSION_DATE, TEST_DBS.length));
    out.println(dividerMajor);
    out.println();

    showProperty("java.vendor");
    showProperty("java.runtime.version");
    showProperty("java.vm.name");
    showProperty("os.name");
    showProperty("os.version");
    showProperty("os.arch");
    showProperty("available processors", "" + Runtime.getRuntime().availableProcessors());
    showProperty("available memory", MessageFormat.format("{0,number,0.0} GB",
            ((double) Runtime.getRuntime().maxMemory()) / (1024 * 1024)));
    out.println();

    // Test a database
    long lastCount = 0;
    for (TestDb testDb : TEST_DBS) {
        out.println(dividerMinor);
        out.println("Testing " + testDb.describeDatabase());
        out.println("        " + testDb.url);
        out.println(dividerMinor);

        // inject a database section delimiter in the statement log
        if (statementWriter != null) {
            statementWriter.append("\n\n");
            statementWriter.append("# ").append(dividerMinor).append('\n');
            statementWriter.append("# ").append("Testing " + testDb.describeDatabase()).append('\n');
            statementWriter.append("# ").append(dividerMinor).append('\n');
            statementWriter.append("\n\n");
        }

        if (testDb.getVersion().equals("OFFLINE")) {
            // Database not available
            out.println("Skipping.  Could not find " + testDb.url);
            out.println();
        } else {
            // Setup system properties
            System.setProperty("iciql.url", testDb.url);
            System.setProperty("iciql.user", testDb.username);
            System.setProperty("iciql.password", testDb.password);

            // Test database
            Result result = JUnitCore.runClasses(suiteClasses.value());

            // Report results
            testDb.runtime = result.getRunTime();
            if (testDb.runtime < quickestDatabase) {
                quickestDatabase = testDb.runtime;
            }
            testDb.statements = IciqlLogger.getTotalCount() - lastCount;
            // reset total count for next database
            lastCount = IciqlLogger.getTotalCount();

            out.println(MessageFormat.format(
                    "{0} tests ({1} failures, {2} ignores)  {3} statements in {4,number,0.000} secs",
                    result.getRunCount(), result.getFailureCount(), result.getIgnoreCount(), testDb.statements,
                    result.getRunTime() / 1000f));

            if (result.getFailureCount() == 0) {
                out.println();
                out.println("  100% successful test suite run.");
                out.println();
            } else {
                for (Failure failure : result.getFailures()) {
                    out.println(MessageFormat.format("\n  + {0}\n    {1}", failure.getTestHeader(),
                            failure.getMessage()));
                }
                out.println();
            }
        }
    }

    // Display runtime results sorted by performance leader
    out.println();
    out.println(dividerMajor);
    out.println(MessageFormat.format("{0} {1} ({2}) test suite performance results", Constants.NAME,
            Constants.VERSION, Constants.VERSION_DATE));
    out.println(dividerMajor);
    List<TestDb> dbs = Arrays.asList(TEST_DBS);
    Collections.sort(dbs);

    out.println(MessageFormat.format("{0} {1} {2} {3} {4}", StringUtils.pad("Name", 11, " ", true),
            StringUtils.pad("Type", 5, " ", true), StringUtils.pad("Version", 23, " ", true),
            StringUtils.pad("Stats/Sec", 10, " ", true), "Runtime"));
    out.println(dividerMinor);
    for (TestDb testDb : dbs) {
        DecimalFormat df = new DecimalFormat("0.0");
        out.println(MessageFormat.format("{0} {1} {2}   {3} {4} {5}s  ({6,number,0.0}x)",
                StringUtils.pad(testDb.name, 11, " ", true), testDb.isEmbedded ? "E" : "T",
                testDb.isMemory ? "M" : "F", StringUtils.pad(testDb.getVersion(), 21, " ", true),
                StringUtils.pad("" + testDb.getStatementRate(), 10, " ", false),
                StringUtils.pad(df.format(testDb.getRuntime()), 8, " ", false),
                ((double) testDb.runtime) / quickestDatabase));
    }
    out.println(dividerMinor);
    out.println("  E = embedded connection");
    out.println("  T = tcp/ip connection");
    out.println("  M = memory database");
    out.println("  F = file/persistent database");

    // cleanup
    for (PoolableConnectionFactory factory : connectionFactories.values()) {
        factory.getPool().close();
    }
    IciqlLogger.unregisterListener(statementListener);
    out.close();
    System.setErr(ERR);
    if (statementWriter != null) {
        statementWriter.close();
    }
    hsql.stop();
    h2.stop();
    System.exit(0);
}

From source file:com.github.fritaly.svngraph.SvnGraph.java

public static void main(String[] args) throws Exception {
    if (args.length != 2) {
        System.out.println(String.format("%s <input-file> <output-file>", SvnGraph.class.getSimpleName()));
        System.exit(1);//from  w  w w. ja va 2  s  .c  om
    }

    final File input = new File(args[0]);

    if (!input.exists()) {
        throw new IllegalArgumentException(
                String.format("The given file '%s' doesn't exist", input.getAbsolutePath()));
    }

    final File output = new File(args[1]);

    final Document document = DocumentBuilderFactory.newInstance().newDocumentBuilder().parse(input);

    final History history = new History(document);

    final Set<String> rootPaths = history.getRootPaths();

    System.out.println(rootPaths);

    for (String path : rootPaths) {
        System.out.println(path);
        System.out.println(history.getHistory(path).getRevisions());
        System.out.println();
    }

    int count = 0;

    FileWriter fileWriter = null;
    GraphMLWriter graphWriter = null;

    try {
        fileWriter = new FileWriter(output);

        graphWriter = new GraphMLWriter(fileWriter);

        final NodeStyle tagStyle = graphWriter.getNodeStyle();
        tagStyle.setFillColor(Color.WHITE);

        graphWriter.graph();

        // map associating node labels to their corresponding node id in the graph
        final Map<String, String> nodeIdsPerLabel = new TreeMap<>();

        // the node style associated to each branch
        final Map<String, NodeStyle> nodeStyles = new TreeMap<>();

        for (Revision revision : history.getSignificantRevisions()) {
            System.out.println(revision.getNumber() + " - " + revision.getMessage());

            // TODO Render also the deletion of branches
            // there should be only 1 significant update per revision (the one with action ADD)
            for (Update update : revision.getSignificantUpdates()) {
                if (update.isCopy()) {
                    // a merge is also considered a copy
                    final RevisionPath source = update.getCopySource();

                    System.out.println(String.format("  > %s %s from %s@%d", update.getAction(),
                            update.getPath(), source.getPath(), source.getRevision()));

                    final String sourceRoot = Utils.getRootName(source.getPath());

                    if (sourceRoot == null) {
                        // skip the revisions whose associated root is
                        // null (happens whether a branch was created
                        // outside the 'branches' directory for
                        // instance)
                        System.err.println(String.format("Skipped revision %d because of a null root",
                                source.getRevision()));
                        continue;
                    }

                    final String sourceLabel = computeNodeLabel(sourceRoot, source.getRevision());

                    // create a node for the source (path, revision)
                    final String sourceId;

                    if (nodeIdsPerLabel.containsKey(sourceLabel)) {
                        // retrieve the id of the existing node
                        sourceId = nodeIdsPerLabel.get(sourceLabel);
                    } else {
                        // create the new node
                        if (Utils.isTagPath(source.getPath())) {
                            graphWriter.setNodeStyle(tagStyle);
                        } else {
                            if (!nodeStyles.containsKey(sourceRoot)) {
                                final NodeStyle style = new NodeStyle();
                                style.setFillColor(randomColor());

                                nodeStyles.put(sourceRoot, style);
                            }

                            graphWriter.setNodeStyle(nodeStyles.get(sourceRoot));
                        }

                        sourceId = graphWriter.node(sourceLabel);

                        nodeIdsPerLabel.put(sourceLabel, sourceId);
                    }

                    // and another for the newly created directory
                    final String targetRoot = Utils.getRootName(update.getPath());

                    if (targetRoot == null) {
                        System.err.println(String.format("Skipped revision %d because of a null root",
                                revision.getNumber()));
                        continue;
                    }

                    final String targetLabel = computeNodeLabel(targetRoot, revision.getNumber());

                    if (Utils.isTagPath(update.getPath())) {
                        graphWriter.setNodeStyle(tagStyle);
                    } else {
                        if (!nodeStyles.containsKey(targetRoot)) {
                            final NodeStyle style = new NodeStyle();
                            style.setFillColor(randomColor());

                            nodeStyles.put(targetRoot, style);
                        }

                        graphWriter.setNodeStyle(nodeStyles.get(targetRoot));
                    }

                    final String targetId;

                    if (nodeIdsPerLabel.containsKey(targetLabel)) {
                        // retrieve the id of the existing node
                        targetId = nodeIdsPerLabel.get(targetLabel);
                    } else {
                        // create the new node
                        if (Utils.isTagPath(update.getPath())) {
                            graphWriter.setNodeStyle(tagStyle);
                        } else {
                            if (!nodeStyles.containsKey(targetRoot)) {
                                final NodeStyle style = new NodeStyle();
                                style.setFillColor(randomColor());

                                nodeStyles.put(targetRoot, style);
                            }

                            graphWriter.setNodeStyle(nodeStyles.get(targetRoot));
                        }

                        targetId = graphWriter.node(targetLabel);

                        nodeIdsPerLabel.put(targetLabel, targetId);
                    }

                    // create an edge between the 2 nodes
                    graphWriter.edge(sourceId, targetId);
                } else {
                    System.out.println(String.format("  > %s %s", update.getAction(), update.getPath()));
                }
            }

            System.out.println();

            count++;
        }

        // Dispatch the revisions per corresponding branch
        final Map<String, Set<Long>> revisionsPerBranch = new TreeMap<>();

        for (String nodeLabel : nodeIdsPerLabel.keySet()) {
            if (nodeLabel.contains("@")) {
                final String branchName = StringUtils.substringBefore(nodeLabel, "@");
                final long revision = Long.parseLong(StringUtils.substringAfter(nodeLabel, "@"));

                if (!revisionsPerBranch.containsKey(branchName)) {
                    revisionsPerBranch.put(branchName, new TreeSet<Long>());
                }

                revisionsPerBranch.get(branchName).add(revision);
            } else {
                throw new IllegalStateException(nodeLabel);
            }
        }

        // Recreate the missing edges between revisions from a same branch
        for (String branchName : revisionsPerBranch.keySet()) {
            final List<Long> branchRevisions = new ArrayList<>(revisionsPerBranch.get(branchName));

            for (int i = 0; i < branchRevisions.size() - 1; i++) {
                final String nodeLabel1 = String.format("%s@%d", branchName, branchRevisions.get(i));
                final String nodeLabel2 = String.format("%s@%d", branchName, branchRevisions.get(i + 1));

                graphWriter.edge(nodeIdsPerLabel.get(nodeLabel1), nodeIdsPerLabel.get(nodeLabel2));
            }
        }

        graphWriter.closeGraph();

        System.out.println(String.format("Found %d significant revisions", count));
    } finally {
        if (graphWriter != null) {
            graphWriter.close();
        }
        if (fileWriter != null) {
            fileWriter.close();
        }
    }

    System.out.println("Done");
}

From source file:Main.java

private static void writeFileAsString(File file, String str) throws IOException {
    FileWriter fw = new FileWriter(file);
    fw.write(str);//w w  w .  j av  a  2  s .c o  m
    fw.close();
}

From source file:Main.java

public static void close(FileWriter fileWriter) {
    try {//  w w w  .j  a  v  a  2  s.  co m
        fileWriter.close();
    } catch (IOException e) {
        e.printStackTrace();
    }
}

From source file:Main.java

public static void saveToFile(String fileName, String str) throws IOException {
    File f = new File(fileName);
    if (!f.exists()) {
        if (f.getParent() != null) {
            new File(f.getParent()).mkdirs();
        }/*from w w w . j  a v a  2 s . c  om*/
        f.createNewFile();
    }
    FileWriter fw = new FileWriter(f);
    fw.write(str);
    fw.close();
}

From source file:Main.java

/**
 * Create a script file to run one command
 * @param cmmd Command for the script to run
 * @param fileName The file name//from   w  ww  .j  a v a2 s. c o m
 * @return The file
 * @throws IOException
 */
public static File createScriptFile(String cmmd, String fileName) throws IOException {
    String f = fileName == null ? "script_" + System.currentTimeMillis() : fileName;
    FileWriter w = new FileWriter(f);
    w.write(cmmd + "\n");
    w.close();
    File file = new File(f);
    file.setExecutable(true, false);
    return file;
}