Example usage for java.io File getAbsolutePath

List of usage examples for java.io File getAbsolutePath

Introduction

In this page you can find the example usage for java.io File getAbsolutePath.

Prototype

public String getAbsolutePath() 

Source Link

Document

Returns the absolute pathname string of this abstract pathname.

Usage

From source file:de.tudarmstadt.ukp.dkpro.core.mallet.lda.util.PrintTopicWordWeights.java

public static void main(String[] args) throws IOException {
    if (args.length < 2 || args.length % 2 != 0) {
        printHelp();/*ww w  .j  av  a2 s  .co  m*/
        System.exit(1);
    }

    /* iterate over input files */
    for (int i = 0; i < args.length; i += 2) {
        File modelFile = new File(args[i]);
        int nWords = Integer.parseInt(args[i + 1]);

        PrintTopicWordWeights writer = new PrintTopicWordWeights(modelFile, nWords);

        File targetFile = new File(modelFile.getAbsolutePath() + OUTPUTFILE_SUFFIX + nWords);
        writer.writeWords(targetFile);
    }
}

From source file:airnowgrib2tojson.AirNowGRIB2toJSON.java

/**
 * @param args the command line arguments
 *//* w ww. ja v a2 s  .c o  m*/
public static void main(String[] args) {

    SimpleDateFormat GMT = new SimpleDateFormat("yyMMddHH");
    GMT.setTimeZone(TimeZone.getTimeZone("GMT-2"));

    System.out.println(GMT.format(new Date()));

    FTPClient ftpClient = new FTPClient();
    FileOutputStream fos = null;

    try {
        //Connecting to AirNow FTP server to get the fresh AQI data  
        ftpClient.connect("ftp.airnowapi.org");
        ftpClient.login("pixelshade", "GZDN8uqduwvk");
        ftpClient.enterLocalPassiveMode();
        ftpClient.setFileType(FTP.BINARY_FILE_TYPE);

        //downloading .grib2 file
        File of = new File("US-" + GMT.format(new Date()) + "_combined.grib2");
        OutputStream outstr = new BufferedOutputStream(new FileOutputStream(of));
        InputStream instr = ftpClient
                .retrieveFileStream("GRIB2/US-" + GMT.format(new Date()) + "_combined.grib2");
        byte[] bytesArray = new byte[4096];
        int bytesRead = -1;
        while ((bytesRead = instr.read(bytesArray)) != -1) {
            outstr.write(bytesArray, 0, bytesRead);
        }

        //Close used resources
        ftpClient.completePendingCommand();
        outstr.close();
        instr.close();

        // logout the user 
        ftpClient.logout();

    } catch (SocketException e) {
        e.printStackTrace();
    } catch (IOException e) {
        e.printStackTrace();
    } finally {
        try {
            //disconnect from AirNow server
            ftpClient.disconnect();
        } catch (IOException e) {
            e.printStackTrace();
        }
    }

    try {
        //Open .grib2 file
        final File AQIfile = new File("US-" + GMT.format(new Date()) + "_combined.grib2");
        final GridDataset gridDS = GridDataset.open(AQIfile.getAbsolutePath());

        //The data type needed - AQI; since it isn't defined in GRIB2 standard,
        //Aerosol type is used instead; look AirNow API documentation for details.
        GridDatatype AQI = gridDS.findGridDatatype("Aerosol_type_msl");

        //Get the coordinate system for selected data type;
        //cut the rectangle to work with - time and height axes aren't present in these files
        //and latitude/longitude go "-1", which means all the data provided.
        GridCoordSystem AQIGCS = AQI.getCoordinateSystem();
        List<CoordinateAxis> AQI_XY = AQIGCS.getCoordinateAxes();
        Array AQIslice = AQI.readDataSlice(0, 0, -1, -1);

        //Variables for iterating through coordinates
        VariableDS var = AQI.getVariable();
        Index index = AQIslice.getIndex();

        //Variables for counting lat/long from the indices provided
        double stepX = (AQI_XY.get(2).getMaxValue() - AQI_XY.get(2).getMinValue()) / index.getShape(1);
        double stepY = (AQI_XY.get(1).getMaxValue() - AQI_XY.get(1).getMinValue()) / index.getShape(0);
        double curX = AQI_XY.get(2).getMinValue();
        double curY = AQI_XY.get(1).getMinValue();

        //Output details
        OutputStream ValLog = new FileOutputStream("USA_AQI.json");
        Writer ValWriter = new OutputStreamWriter(ValLog);

        for (int j = 0; j < index.getShape(0); j++) {
            for (int i = 0; i < index.getShape(1); i++) {
                float val = AQIslice.getFloat(index.set(j, i));

                //Write the AQI value and its coordinates if it's present by i/j indices
                if (!Float.isNaN(val))
                    ValWriter.write("{\r\n\"lat\":" + curX + ",\r\n\"lng\":" + curY + ",\r\n\"AQI\":" + val
                            + ",\r\n},\r\n");

                curX += stepX;
            }
            curY += stepY;
            curX = AQI_XY.get(2).getMinValue();
        }
    } catch (Exception e) {
        e.printStackTrace();
    }
}

From source file:com.github.fritaly.graphml4j.samples.GradleDependencies.java

public static void main(String[] args) throws Exception {
    if (args.length != 1) {
        System.out.println(String.format("%s <output-file>", GradleDependencies.class.getSimpleName()));
        System.exit(1);/*  w w w  . ja v a 2s  .co  m*/
    }

    final File file = new File(args[0]);

    System.out.println("Writing GraphML file to " + file.getAbsolutePath() + " ...");

    FileWriter fileWriter = null;
    GraphMLWriter graphWriter = null;
    Reader reader = null;
    LineNumberReader lineReader = null;

    try {
        fileWriter = new FileWriter(file);
        graphWriter = new GraphMLWriter(fileWriter);

        // Customize the rendering of nodes
        final NodeStyle nodeStyle = graphWriter.getNodeStyle();
        nodeStyle.setWidth(250.0f);

        graphWriter.setNodeStyle(nodeStyle);

        // The dependency graph has been generated by Gradle with the
        // command "gradle dependencies". The output of this command has
        // been saved to a text file which will be parsed to rebuild the
        // dependency graph
        reader = new InputStreamReader(GradleDependencies.class.getResourceAsStream("gradle-dependencies.txt"));
        lineReader = new LineNumberReader(reader);

        String line = null;

        // Stack containing the node identifiers per depth inside the
        // dependency graph (the topmost dependency is the first one in the
        // stack)
        final Stack<String> parentIds = new Stack<String>();

        // Open the graph
        graphWriter.graph();

        // Map storing the node identifiers per label
        final Map<String, String> nodeIdsByLabel = new TreeMap<String, String>();

        while ((line = lineReader.readLine()) != null) {
            // Determine the depth of the current dependency inside the
            // graph. The depth can be inferred from the indentation used by
            // Gradle. Each level of depth adds 5 more characters of
            // indentation
            final int initialLength = line.length();

            // Remove the strings used by Gradle to indent dependencies
            line = StringUtils.replace(line, "+--- ", "");
            line = StringUtils.replace(line, "|    ", "");
            line = StringUtils.replace(line, "\\--- ", "");
            line = StringUtils.replace(line, "     ", "");

            // The depth can easily be inferred now
            final int depth = (initialLength - line.length()) / 5;

            // Remove unnecessary node ids
            while (depth <= parentIds.size()) {
                parentIds.pop();
            }

            // Compute a nice label from the dependency (group, artifact,
            // version) tuple
            final String label = computeLabel(line);

            // Has this dependency already been added to the graph ?
            if (!nodeIdsByLabel.containsKey(label)) {
                // No, add the node
                nodeIdsByLabel.put(label, graphWriter.node(label));
            }

            final String nodeId = nodeIdsByLabel.get(label);

            parentIds.push(nodeId);

            if (parentIds.size() > 1) {
                // Generate an edge between the current node and its parent
                graphWriter.edge(parentIds.get(parentIds.size() - 2), nodeId);
            }
        }

        // Close the graph
        graphWriter.closeGraph();

        System.out.println("Done");
    } finally {
        // Calling GraphMLWriter.close() is necessary to dispose the underlying resources
        graphWriter.close();
        fileWriter.close();
        lineReader.close();
        reader.close();
    }
}

From source file:com.kylinolap.query.QueryCli.java

public static void main(String[] args) throws Exception {

    Options options = new Options();
    options.addOption(OPTION_METADATA);//w w w .  j  a  va 2  s . c  om
    options.addOption(OPTION_SQL);

    CommandLineParser parser = new GnuParser();
    CommandLine commandLine = parser.parse(options, args);
    KylinConfig config = KylinConfig
            .createInstanceFromUri(commandLine.getOptionValue(OPTION_METADATA.getOpt()));
    String sql = commandLine.getOptionValue(OPTION_SQL.getOpt());

    Class.forName("net.hydromatic.optiq.jdbc.Driver");
    File olapTmp = OLAPSchemaFactory.createTempOLAPJson(null, config);

    Connection conn = null;
    Statement stmt = null;
    ResultSet rs = null;
    try {
        conn = DriverManager.getConnection("jdbc:optiq:model=" + olapTmp.getAbsolutePath());

        stmt = conn.createStatement();
        rs = stmt.executeQuery(sql);
        int n = 0;
        ResultSetMetaData meta = rs.getMetaData();
        while (rs.next()) {
            n++;
            for (int i = 1; i <= meta.getColumnCount(); i++) {
                System.out.println(n + " - " + meta.getColumnLabel(i) + ":\t" + rs.getObject(i));
            }
        }
    } finally {
        if (rs != null) {
            rs.close();
        }
        if (stmt != null) {
            stmt.close();
        }
        if (conn != null) {
            conn.close();
        }
    }

}

From source file:org.n52.iceland.statistics.api.utils.KibanaExporter.java

public static void main(String args[]) throws Exception {
    if (args.length != 2) {
        System.out.printf("Usage: java KibanaExporter.jar %s %s\n", "localhost:9300", "my-cluster-name");
        System.exit(0);//from w w  w  .  ja v a 2  s  .  co  m
    }
    if (!args[0].contains(":")) {
        throw new IllegalArgumentException(
                String.format("%s not a valid format. Expected <hostname>:<port>.", args[0]));
    }

    // set ES address
    String split[] = args[0].split(":");
    InetSocketTransportAddress address = new InetSocketTransportAddress(InetAddress.getByName(split[0]),
            Integer.parseInt(split[1], 10));

    // set cluster name
    Builder tcSettings = Settings.settingsBuilder();
    tcSettings.put("cluster.name", args[1]);
    System.out.println("Connection to " + args[1]);

    client = TransportClient.builder().settings(tcSettings).build();
    client.addTransportAddress(address);

    // search index pattern for needle
    searchIndexPattern();

    KibanaConfigHolderDto holder = new KibanaConfigHolderDto();
    System.out.println("Reading .kibana index");

    SearchResponse resp = client.prepareSearch(".kibana").setSize(1000).get();
    Arrays.asList(resp.getHits().getHits()).stream().map(KibanaExporter::parseSearchHit).forEach(holder::add);
    System.out.println("Reading finished");

    ObjectMapper mapper = new ObjectMapper();
    // we love pretty things
    mapper.enable(SerializationFeature.INDENT_OUTPUT);
    File f = new File("kibana_config.json");

    try (FileOutputStream out = new FileOutputStream(f, false)) {
        mapper.writeValue(out, holder);
    }

    System.out.println("File outputted to: " + f.getAbsolutePath());

    client.close();

}

From source file:mitm.common.tools.CreateCA.java

public static void main(String[] args) throws Exception {
    PropertyConfigurator.configure("conf/log4j.properties");

    if (args.length != 1) {
        System.err.println("p12 file expected.");

        return;/*from   w  ww.  ja va  2s .  c o m*/
    }

    System.out.println("Please enter your password: ");

    ConsoleReader consoleReader = new ConsoleReader(new FileInputStream(FileDescriptor.in),
            new PrintWriter(System.err));

    String password = consoleReader.readLine(new Character('*'));

    CreateCA createCA = new CreateCA();

    File p12File = new File(args[0]);

    createCA.generateCA(password, p12File);

    System.out.println("CA generated and written to " + p12File.getAbsolutePath());
}

From source file:net.itransformers.idiscover.v2.core.Main.java

public static void main(String[] args) throws MalformedURLException {
    logger.debug("iDiscover v2. gearing up");
    Map<String, String> params = CmdLineParser.parseCmdLine(args);
    //        String connectionDetailsFileName = params.get("-f");
    //        if (connectionDetailsFileName == null) {
    //            printUsage("fileName"); return;
    //        }//from  w ww.jav  a2 s  .c  o m
    String depthCmdArg = params.get("-d");
    //        if (depthCmdArg == null) {
    //            printUsage("depth"); return;
    //        }
    String projectPath = params.get("-p");

    if (projectPath == null) {
        File cwd = new File(".");
        System.out.println("Project path is not specified. Will use current dir: " + cwd.getAbsolutePath());
        projectPath = cwd.getAbsolutePath();
    }

    File workingDir = new File(projectPath);
    if (!workingDir.exists()) {
        System.out.println("Invalid project path!");
        return;
    }
    System.out.println("Loading beans!!");

    File conDetails = new File(projectPath, "iDiscover/conf/txt/connection-details.txt");

    File generic = new File(projectPath, "iDiscover/conf/xml/generic.xml");
    String genericContextPath = generic.toURI().toURL().toString();

    File snmpDiscovery = new File(projectPath, "iDiscover/conf/xml/snmpNetworkDiscovery.xml");
    String snmpDiscoveryContextPath = snmpDiscovery.toURI().toURL().toString();

    File connectionsDetails = new File(projectPath, "iDiscover/conf/xml/connectionsDetails.xml");
    String connectionsDetailsContextPath = connectionsDetails.toURI().toURL().toString();

    DefaultListableBeanFactory beanFactory = new DefaultListableBeanFactory();
    BeanDefinition beanDefinition = BeanDefinitionBuilder.rootBeanDefinition(String.class)
            .addConstructorArgValue(projectPath).getBeanDefinition();

    String labelDirName = autolabel(projectPath);

    BeanDefinition beanDefinition2 = BeanDefinitionBuilder.rootBeanDefinition(String.class)
            .addConstructorArgValue(labelDirName).getBeanDefinition();

    beanFactory.registerBeanDefinition("projectPath", beanDefinition);

    beanFactory.registerBeanDefinition("labelDirName", beanDefinition2);

    GenericApplicationContext cmdArgCxt = new GenericApplicationContext(beanFactory);
    // Must call refresh to initialize context
    cmdArgCxt.refresh();

    String[] paths = new String[] { genericContextPath, snmpDiscoveryContextPath,
            connectionsDetailsContextPath };
    //        ,project.getAbsolutePath()+project.getAbsolutePath()+File.separator+"iDiscover/conf/xml/snmpNetworkDiscovery.xml", project.getAbsolutePath()+File.separator+"iDiscover/src/main/resources/connectionsDetails.xml"
    FileSystemXmlApplicationContext applicationContext = new FileSystemXmlApplicationContext(paths, cmdArgCxt);
    //        ClassPathXmlApplicationContext applicationContext = new ClassPathXmlApplicationContext(workingDir+File.separator+"iDiscover/conf/xml/generic.xml",workingDir+File.separator+"/iDiscover/conf/xml/snmpNetworkDiscovery.xml","connectionsDetails.xml");
    // NetworkDiscoverer discoverer = fileApplicationContext.getBean("bgpPeeringMapDiscovery", NetworkDiscoverer.class);
    //NetworkDiscoverer discoverer = fileApplicationContext.getBean("floodLightNodeDiscoverer", NetworkDiscoverer.class);
    NetworkDiscoverer discoverer = applicationContext.getBean("snmpDiscovery", NetworkDiscoverer.class);
    LinkedHashMap<String, ConnectionDetails> connectionList = (LinkedHashMap) applicationContext
            .getBean("connectionList", conDetails);
    int depth = (Integer) applicationContext.getBean("discoveryDepth",
            depthCmdArg == null ? "-1" : depthCmdArg);
    NetworkDiscoveryResult result = discoverer
            .discoverNetwork(new ArrayList<ConnectionDetails>(connectionList.values()), depth);
    if (result != null) {
        for (String s : result.getNodes().keySet()) {
            System.out.println("\nNode: " + s);

        }
    }

    //
}

From source file:fm.last.peyote.cacti.PeyoteCactiLauncher.java

public static void main(String[] args) throws JAXBException, IOException {
    if (args.length < 2 || args.length > 3) {
        printUsage();/* w ww  .  ja  v  a 2 s  .  c o  m*/
    }
    String name = args[0];
    String url = args[1];
    ConfigurableApplicationContext applicationContext = new ClassPathXmlApplicationContext("spring/peyote.xml");
    applicationContext.registerShutdownHook();
    try {
        InputData inputData = createInputData(args, applicationContext, name, url);
        PeyoteMarshaller marshaller = applicationContext.getBean(PeyoteMarshaller.class);
        marshaller.setInputData(inputData);
        log.info("Starting Peyote");
        File file = new File("datatemplate.xml");
        Writer outWriter = new FileWriter(file);
        marshaller.generateCactiDataTemplate(outWriter);
        outWriter.close();
        log.info("generated data template for '" + name + "' in " + file.getAbsolutePath());

        // file = new File("graphtemplate.xml");
        // outWriter = new FileWriter(file);
        // marshaller.generateCactiGraphTemplate(outWriter);
        // outWriter.close();
        // log.info("generated data template for '" + name + "' in " +
        // file.getAbsolutePath());

        log.info("Peyote finished.");
    } finally {
        applicationContext.close();
    }
}

From source file:com.jkoolcloud.tnt4j.streams.utils.ZorkaAttach.java

/**
 * Main entry point for attaching Zorka agent to running JVM.
 *
 * @param args/*from   w  ww .  j a va 2  s . c  o  m*/
 *            command-line arguments. Supported arguments:
 *            <table summary="TNT4J-Streams agent command line arguments">
 *            <tr>
 *            <td>&nbsp;&nbsp;</td>
 *            <td>&nbsp;zorkaAgentPath</td>
 *            <td>(required) Zorka agent path</td>
 *            </tr>
 *            <tr>
 *            <td>&nbsp;&nbsp;</td>
 *            <td>&nbsp;VMNameToAttachTo</td>
 *            <td>(required) Java VM name to attach to</td>
 *            </tr>
 *            </table>
 */
public static void main(String... args) {
    LOGGER.log(OpLevel.DEBUG,
            StreamsResources.getString(ZorkaConstants.RESOURCE_BUNDLE_NAME, "ZorkaAttach.starting.main"));
    if (args.length != 2) {
        System.out.println(
                StreamsResources.getString(ZorkaConstants.RESOURCE_BUNDLE_NAME, "ZorkaAttach.main.usage"));
        return;
    }
    List<VirtualMachineDescriptor> runningVMsList = VirtualMachine.list();
    boolean found = false;
    // args[0] - agent path to attach
    // args[1] - VM name to attach to

    for (VirtualMachineDescriptor rVM : runningVMsList) {
        if (rVM.displayName().contains(args[1])
                && !rVM.displayName().contains(ZorkaAttach.class.getSimpleName())) {
            try {
                VirtualMachine vm = VirtualMachine.attach(rVM.id());
                File pathFile = new File(
                        ZorkaAttach.class.getProtectionDomain().getCodeSource().getLocation().getPath());
                String agentPath = pathFile.getAbsolutePath();
                LOGGER.log(OpLevel.INFO, StreamsResources.getString(ZorkaConstants.RESOURCE_BUNDLE_NAME,
                        "ZorkaAttach.attaching.agent"), agentPath, rVM.displayName());
                vm.loadAgent(agentPath, args[0]);
                vm.detach();
            } catch (Exception e) {
                throw new RuntimeException(e);
            }
            found = true;
        }
    }

    if (!found) {
        System.out.println(StreamsResources.getStringFormatted(ZorkaConstants.RESOURCE_BUNDLE_NAME,
                "ZorkaAttach.no.jvm", args[0]));
        System.out.println(
                StreamsResources.getString(ZorkaConstants.RESOURCE_BUNDLE_NAME, "ZorkaAttach.available.jvms"));
        System.out.println(
                StreamsResources.getString(ZorkaConstants.RESOURCE_BUNDLE_NAME, "ZorkaAttach.list.begin"));
        for (VirtualMachineDescriptor vmD : runningVMsList) {
            System.out.println(vmD.displayName());
        }
        System.out.println(
                StreamsResources.getString(ZorkaConstants.RESOURCE_BUNDLE_NAME, "ZorkaAttach.list.end"));
    }
}

From source file:org.omg.bpmn.miwg.TestRunner.java

/**
 * First argument path to folder containing the reference bpmn xml files
 * Second argument path to folder containing the bpmn files to compare with
 *
 * @param args//from   w  w w  . j  a va 2 s. c o m
 * @throws ParserConfigurationException
 * @throws IOException
 * @throws SAXException
 *
 */
public static void main(String[] args) throws SAXException, IOException, ParserConfigurationException {

    System.out.println("Running BPMN 2.0 XML Compare Test...");
    String result = runXmlCompareTest(args[0], args[1], Variant.valueOf(args[2]));

    if (args.length > 3) {
        File outputFile = new File(args[3]);
        FileUtils.writeStringToFile(outputFile, result);
        System.out.println("Output printed to: \n" + outputFile.getAbsolutePath());
    } else {
        System.out.println(result);
    }

    System.out.println("Finished BPMN 2.0 XML Compare Test");
}