Example usage for java.lang String split

List of usage examples for java.lang String split

Introduction

In this page you can find the example usage for java.lang String split.

Prototype

public String[] split(String regex) 

Source Link

Document

Splits this string around matches of the given regular expression.

Usage

From source file:edu.nyu.vida.data_polygamy.scalar_function_computation.Aggregation.java

/**
 * @param args/*from w  w w .j a v  a 2  s  .c o m*/
 */
@SuppressWarnings({ "deprecation" })
public static void main(String[] args) throws IOException, InterruptedException, ClassNotFoundException {

    Options options = new Options();

    Option forceOption = new Option("f", "force", false,
            "force the computation of the aggregate functions " + "even if files already exist");
    forceOption.setRequired(false);
    options.addOption(forceOption);

    Option gOption = new Option("g", "group", true, "set group of datasets for which the aggregate functions"
            + " will be computed, followed by their temporal and spatial attribute indices");
    gOption.setRequired(true);
    gOption.setArgName("GROUP");
    gOption.setArgs(Option.UNLIMITED_VALUES);
    options.addOption(gOption);

    Option machineOption = new Option("m", "machine", true, "machine identifier");
    machineOption.setRequired(true);
    machineOption.setArgName("MACHINE");
    machineOption.setArgs(1);
    options.addOption(machineOption);

    Option nodesOption = new Option("n", "nodes", true, "number of nodes");
    nodesOption.setRequired(true);
    nodesOption.setArgName("NODES");
    nodesOption.setArgs(1);
    options.addOption(nodesOption);

    Option s3Option = new Option("s3", "s3", false, "data on Amazon S3");
    s3Option.setRequired(false);
    options.addOption(s3Option);

    Option awsAccessKeyIdOption = new Option("aws_id", "aws-id", true,
            "aws access key id; " + "this is required if the execution is on aws");
    awsAccessKeyIdOption.setRequired(false);
    awsAccessKeyIdOption.setArgName("AWS-ACCESS-KEY-ID");
    awsAccessKeyIdOption.setArgs(1);
    options.addOption(awsAccessKeyIdOption);

    Option awsSecretAccessKeyOption = new Option("aws_key", "aws-id", true,
            "aws secrect access key; " + "this is required if the execution is on aws");
    awsSecretAccessKeyOption.setRequired(false);
    awsSecretAccessKeyOption.setArgName("AWS-SECRET-ACCESS-KEY");
    awsSecretAccessKeyOption.setArgs(1);
    options.addOption(awsSecretAccessKeyOption);

    Option bucketOption = new Option("b", "s3-bucket", true,
            "bucket on s3; " + "this is required if the execution is on aws");
    bucketOption.setRequired(false);
    bucketOption.setArgName("S3-BUCKET");
    bucketOption.setArgs(1);
    options.addOption(bucketOption);

    Option helpOption = new Option("h", "help", false, "display this message");
    helpOption.setRequired(false);
    options.addOption(helpOption);

    HelpFormatter formatter = new HelpFormatter();
    CommandLineParser parser = new PosixParser();
    CommandLine cmd = null;

    try {
        cmd = parser.parse(options, args);
    } catch (ParseException e) {
        formatter.printHelp("hadoop jar data-polygamy.jar "
                + "edu.nyu.vida.data_polygamy.scalar_function_computation.Aggregation", options, true);
        System.exit(0);
    }

    if (cmd.hasOption("h")) {
        formatter.printHelp("hadoop jar data-polygamy.jar "
                + "edu.nyu.vida.data_polygamy.scalar_function_computation.Aggregation", options, true);
        System.exit(0);
    }

    boolean s3 = cmd.hasOption("s3");
    String s3bucket = "";
    String awsAccessKeyId = "";
    String awsSecretAccessKey = "";

    if (s3) {
        if ((!cmd.hasOption("aws_id")) || (!cmd.hasOption("aws_key")) || (!cmd.hasOption("b"))) {
            System.out.println(
                    "Arguments 'aws_id', 'aws_key', and 'b'" + " are mandatory if execution is on AWS.");
            formatter.printHelp(
                    "hadoop jar data-polygamy.jar "
                            + "edu.nyu.vida.data_polygamy.scalar_function_computation.Aggregation",
                    options, true);
            System.exit(0);
        }
        s3bucket = cmd.getOptionValue("b");
        awsAccessKeyId = cmd.getOptionValue("aws_id");
        awsSecretAccessKey = cmd.getOptionValue("aws_key");
    }

    boolean snappyCompression = false;
    boolean bzip2Compression = false;
    String machine = cmd.getOptionValue("m");
    int nbNodes = Integer.parseInt(cmd.getOptionValue("n"));

    Configuration s3conf = new Configuration();
    if (s3) {
        s3conf.set("fs.s3.awsAccessKeyId", awsAccessKeyId);
        s3conf.set("fs.s3.awsSecretAccessKey", awsSecretAccessKey);
        s3conf.set("bucket", s3bucket);
    }

    String datasetNames = "";
    String datasetIds = "";
    String preProcessingDatasets = "";

    ArrayList<String> shortDataset = new ArrayList<String>();
    ArrayList<String> shortDatasetAggregation = new ArrayList<String>();
    HashMap<String, String> datasetTempAtt = new HashMap<String, String>();
    HashMap<String, String> datasetSpatialAtt = new HashMap<String, String>();
    HashMap<String, String> preProcessingDataset = new HashMap<String, String>();
    HashMap<String, String> datasetId = new HashMap<String, String>();

    boolean removeExistingFiles = cmd.hasOption("f");
    String[] datasetArgs = cmd.getOptionValues("g");

    for (int i = 0; i < datasetArgs.length; i += 3) {
        String dataset = datasetArgs[i];

        // getting pre-processing
        String tempPreProcessing = FrameworkUtils.searchPreProcessing(dataset, s3conf, s3);
        if (tempPreProcessing == null) {
            System.out.println("No pre-processing available for " + dataset);
            continue;
        }
        preProcessingDataset.put(dataset, tempPreProcessing);

        shortDataset.add(dataset);
        datasetTempAtt.put(dataset, ((datasetArgs[i + 1] == "null") ? null : datasetArgs[i + 1]));
        datasetSpatialAtt.put(dataset, ((datasetArgs[i + 2] == "null") ? null : datasetArgs[i + 2]));

        datasetId.put(dataset, null);
    }

    if (shortDataset.size() == 0) {
        System.out.println("No datasets to process.");
        System.exit(0);
    }

    // getting dataset id

    Path path = null;
    FileSystem fs = null;

    if (s3) {
        path = new Path(s3bucket + FrameworkUtils.datasetsIndexDir);
        fs = FileSystem.get(path.toUri(), s3conf);
    } else {
        fs = FileSystem.get(new Configuration());
        path = new Path(fs.getHomeDirectory() + "/" + FrameworkUtils.datasetsIndexDir);
    }
    BufferedReader br = new BufferedReader(new InputStreamReader(fs.open(path)));
    String line = br.readLine();
    while (line != null) {
        String[] dt = line.split("\t");
        if (datasetId.containsKey(dt[0])) {
            datasetId.put(dt[0], dt[1]);
            datasetNames += dt[0] + ",";
            datasetIds += dt[1] + ",";
        }
        line = br.readLine();
    }
    br.close();
    if (s3)
        fs.close();

    datasetNames = datasetNames.substring(0, datasetNames.length() - 1);
    datasetIds = datasetIds.substring(0, datasetIds.length() - 1);
    Iterator<String> it = shortDataset.iterator();
    while (it.hasNext()) {
        String dataset = it.next();
        if (datasetId.get(dataset) == null) {
            System.out.println("No dataset id for " + dataset);
            System.exit(0);
        }
    }

    FrameworkUtils.createDir(s3bucket + FrameworkUtils.aggregatesDir, s3conf, s3);

    // getting smallest resolution

    HashMap<String, String> tempResMap = new HashMap<String, String>();
    HashMap<String, String> spatialResMap = new HashMap<String, String>();

    HashMap<String, String> datasetTemporalStrMap = new HashMap<String, String>();
    HashMap<String, String> datasetSpatialStrMap = new HashMap<String, String>();

    HashSet<String> input = new HashSet<String>();

    for (String dataset : shortDataset) {

        String[] datasetArray = preProcessingDataset.get(dataset).split("-");

        String datasetTemporalStr = datasetArray[datasetArray.length - 2];
        int datasetTemporal = utils.temporalResolution(datasetTemporalStr);

        String datasetSpatialStr = datasetArray[datasetArray.length - 1];
        int datasetSpatial = utils.spatialResolution(datasetSpatialStr);

        // finding all possible resolutions

        String[] temporalResolutions = FrameworkUtils.getAggTempResolutions(datasetTemporal);
        String[] spatialResolutions = FrameworkUtils.getAggSpatialResolutions(datasetSpatial);

        String temporalResolution = "";
        String spatialResolution = "";

        String tempRes = "";
        String spatialRes = "";

        boolean dataAdded = false;

        for (int i = 0; i < temporalResolutions.length; i++) {
            for (int j = 0; j < spatialResolutions.length; j++) {

                temporalResolution = temporalResolutions[i];
                spatialResolution = spatialResolutions[j];

                String aggregatesOutputFileName = s3bucket + FrameworkUtils.aggregatesDir + "/" + dataset + "/";

                if (removeExistingFiles) {
                    FrameworkUtils.removeFile(aggregatesOutputFileName, s3conf, s3);
                }

                if (!FrameworkUtils.fileExists(aggregatesOutputFileName, s3conf, s3)) {

                    dataAdded = true;

                    tempRes += temporalResolution + "-";
                    spatialRes += spatialResolution + "-";
                }
            }
        }

        if (dataAdded) {
            input.add(s3bucket + FrameworkUtils.preProcessingDir + "/" + preProcessingDataset.get(dataset));
            shortDatasetAggregation.add(dataset);

            tempResMap.put(dataset, tempRes.substring(0, tempRes.length() - 1));
            spatialResMap.put(dataset, spatialRes.substring(0, spatialRes.length() - 1));

            datasetTemporalStrMap.put(dataset, datasetTemporalStr);
            datasetSpatialStrMap.put(dataset, datasetSpatialStr);
        }
    }

    if (input.isEmpty()) {
        System.out.println("All the input datasets have aggregates.");
        System.out.println("Use -f in the beginning of the command line to force the computation.");
        System.exit(0);
    }

    it = input.iterator();
    while (it.hasNext()) {
        preProcessingDatasets += it.next() + ",";
    }

    Job aggJob = null;
    String aggregatesOutputDir = s3bucket + FrameworkUtils.aggregatesDir + "/tmp/";
    String jobName = "aggregates";

    FrameworkUtils.removeFile(aggregatesOutputDir, s3conf, s3);

    Configuration aggConf = new Configuration();
    Machine machineConf = new Machine(machine, nbNodes);

    aggConf.set("dataset-name", datasetNames);
    aggConf.set("dataset-id", datasetIds);

    for (int i = 0; i < shortDatasetAggregation.size(); i++) {
        String dataset = shortDatasetAggregation.get(i);
        String id = datasetId.get(dataset);
        aggConf.set("dataset-" + id + "-temporal-resolutions", tempResMap.get(dataset));
        aggConf.set("dataset-" + id + "-spatial-resolutions", spatialResMap.get(dataset));
        aggConf.set("dataset-" + id + "-temporal-att", datasetTempAtt.get(dataset));
        aggConf.set("dataset-" + id + "-spatial-att", datasetSpatialAtt.get(dataset));
        aggConf.set("dataset-" + id + "-temporal", datasetTemporalStrMap.get(dataset));
        aggConf.set("dataset-" + id + "-spatial", datasetSpatialStrMap.get(dataset));

        if (s3)
            aggConf.set("dataset-" + id,
                    s3bucket + FrameworkUtils.preProcessingDir + "/" + preProcessingDataset.get(dataset));
        else
            aggConf.set("dataset-" + id, FileSystem.get(new Configuration()).getHomeDirectory() + "/"
                    + FrameworkUtils.preProcessingDir + "/" + preProcessingDataset.get(dataset));
    }

    aggConf.set("mapreduce.tasktracker.map.tasks.maximum", String.valueOf(machineConf.getMaximumTasks()));
    aggConf.set("mapreduce.tasktracker.reduce.tasks.maximum", String.valueOf(machineConf.getMaximumTasks()));
    aggConf.set("mapreduce.jobtracker.maxtasks.perjob", "-1");
    aggConf.set("mapreduce.reduce.shuffle.parallelcopies", "20");
    aggConf.set("mapreduce.input.fileinputformat.split.minsize", "0");
    aggConf.set("mapreduce.task.io.sort.mb", "200");
    aggConf.set("mapreduce.task.io.sort.factor", "100");
    machineConf.setMachineConfiguration(aggConf);

    if (s3) {
        machineConf.setMachineConfiguration(aggConf);
        aggConf.set("fs.s3.awsAccessKeyId", awsAccessKeyId);
        aggConf.set("fs.s3.awsSecretAccessKey", awsSecretAccessKey);
    }

    if (snappyCompression) {
        aggConf.set("mapreduce.map.output.compress", "true");
        aggConf.set("mapreduce.map.output.compress.codec", "org.apache.hadoop.io.compress.SnappyCodec");
        //aggConf.set("mapreduce.output.fileoutputformat.compress.codec", "org.apache.hadoop.io.compress.SnappyCodec");
    }
    if (bzip2Compression) {
        aggConf.set("mapreduce.map.output.compress", "true");
        aggConf.set("mapreduce.map.output.compress.codec", "org.apache.hadoop.io.compress.BZip2Codec");
        //aggConf.set("mapreduce.output.fileoutputformat.compress.codec", "org.apache.hadoop.io.compress.BZip2Codec");
    }

    aggJob = new Job(aggConf);
    aggJob.setJobName(jobName);

    aggJob.setMapOutputKeyClass(SpatioTemporalWritable.class);
    aggJob.setMapOutputValueClass(AggregationArrayWritable.class);
    aggJob.setOutputKeyClass(SpatioTemporalWritable.class);
    aggJob.setOutputValueClass(FloatArrayWritable.class);
    //aggJob.setOutputKeyClass(Text.class);
    //aggJob.setOutputValueClass(Text.class);

    aggJob.setMapperClass(AggregationMapper.class);
    aggJob.setCombinerClass(AggregationCombiner.class);
    aggJob.setReducerClass(AggregationReducer.class);
    aggJob.setNumReduceTasks(machineConf.getNumberReduces());

    aggJob.setInputFormatClass(SequenceFileInputFormat.class);
    //aggJob.setOutputFormatClass(SequenceFileOutputFormat.class);
    LazyOutputFormat.setOutputFormatClass(aggJob, SequenceFileOutputFormat.class);
    //LazyOutputFormat.setOutputFormatClass(aggJob, TextOutputFormat.class);
    SequenceFileOutputFormat.setCompressOutput(aggJob, true);
    SequenceFileOutputFormat.setOutputCompressionType(aggJob, CompressionType.BLOCK);

    FileInputFormat.setInputDirRecursive(aggJob, true);
    FileInputFormat.setInputPaths(aggJob,
            preProcessingDatasets.substring(0, preProcessingDatasets.length() - 1));
    FileOutputFormat.setOutputPath(aggJob, new Path(aggregatesOutputDir));

    aggJob.setJarByClass(Aggregation.class);

    long start = System.currentTimeMillis();
    aggJob.submit();
    aggJob.waitForCompletion(true);
    System.out.println(jobName + "\t" + (System.currentTimeMillis() - start));

    // moving files to right place
    for (String dataset : shortDatasetAggregation) {
        String from = s3bucket + FrameworkUtils.aggregatesDir + "/tmp/" + dataset + "/";
        String to = s3bucket + FrameworkUtils.aggregatesDir + "/" + dataset + "/";
        FrameworkUtils.renameFile(from, to, s3conf, s3);
    }

}

From source file:CTmousetrack.java

public static void main(String[] args) {

    String outLoc = new String("." + File.separator + "CTdata"); // Location of the base output data folder; only used when writing out CT data to a local folder
    String srcName = "CTmousetrack"; // name of the output CT source
    long blockPts = 10; // points per block flush
    long sampInterval = 10; // time between sampling updates, msec
    double trimTime = 0.0; // amount of data to keep (trim time), sec
    boolean debug = false; // turn on debug?

    // Specify the CT output connection
    CTWriteMode writeMode = CTWriteMode.LOCAL; // The selected mode for writing out CT data
    String serverHost = ""; // Server (FTP or HTTP/S) host:port
    String serverUser = ""; // Server (FTP or HTTPS) username
    String serverPassword = ""; // Server (FTP or HTTPS) password

    // For UDP output mode
    DatagramSocket udpServerSocket = null;
    InetAddress udpServerAddress = null;
    String udpHost = "";
    int udpPort = -1;

    // Concatenate all of the CTWriteMode types
    String possibleWriteModes = "";
    for (CTWriteMode wm : CTWriteMode.values()) {
        possibleWriteModes = possibleWriteModes + ", " + wm.name();
    }/*from  w w  w.j a va  2  s .  co m*/
    // Remove ", " from start of string
    possibleWriteModes = possibleWriteModes.substring(2);

    //
    // Argument processing using Apache Commons CLI
    //
    // 1. Setup command line options
    Options options = new Options();
    options.addOption("h", "help", false, "Print this message.");
    options.addOption(Option.builder("o").argName("base output dir").hasArg().desc(
            "Base output directory when writing data to local folder (i.e., this is the location of CTdata folder); default = \""
                    + outLoc + "\".")
            .build());
    options.addOption(Option.builder("s").argName("source name").hasArg()
            .desc("Name of source to write data to; default = \"" + srcName + "\".").build());
    options.addOption(Option.builder("b").argName("points per block").hasArg()
            .desc("Number of points per block; UDP output mode will use 1 point/block; default = "
                    + Long.toString(blockPts) + ".")
            .build());
    options.addOption(Option.builder("dt").argName("samp interval msec").hasArg()
            .desc("Sampling period in msec; default = " + Long.toString(sampInterval) + ".").build());
    options.addOption(Option.builder("t").argName("trim time sec").hasArg().desc(
            "Trim (ring-buffer loop) time (sec); this is only used when writing data to local folder; specify 0 for indefinite; default = "
                    + Double.toString(trimTime) + ".")
            .build());
    options.addOption(
            Option.builder("w").argName("write mode").hasArg()
                    .desc("Type of write connection; one of " + possibleWriteModes
                            + "; all but UDP mode write out to CT; default = " + writeMode.name() + ".")
                    .build());
    options.addOption(Option.builder("host").argName("host[:port]").hasArg()
            .desc("Host:port when writing via FTP, HTTP, HTTPS, UDP.").build());
    options.addOption(Option.builder("u").argName("username,password").hasArg()
            .desc("Comma-delimited username and password when writing to CT via FTP or HTTPS.").build());
    options.addOption("x", "debug", false, "Enable CloudTurbine debug output.");

    // 2. Parse command line options
    CommandLineParser parser = new DefaultParser();
    CommandLine line = null;
    try {
        line = parser.parse(options, args);
    } catch (ParseException exp) { // oops, something went wrong
        System.err.println("Command line argument parsing failed: " + exp.getMessage());
        return;
    }

    // 3. Retrieve the command line values
    if (line.hasOption("help")) { // Display help message and quit
        HelpFormatter formatter = new HelpFormatter();
        formatter.setWidth(120);
        formatter.printHelp("CTmousetrack", "", options,
                "NOTE: UDP output is a special non-CT output mode where single x,y points are sent via UDP to the specified host:port.");
        return;
    }

    outLoc = line.getOptionValue("o", outLoc);
    if (!outLoc.endsWith("\\") && !outLoc.endsWith("/")) {
        outLoc = outLoc + File.separator;
    }
    // Make sure the base output folder location ends in "CTdata"
    if (!outLoc.endsWith("CTdata\\") && !outLoc.endsWith("CTdata/")) {
        outLoc = outLoc + "CTdata" + File.separator;
    }

    srcName = line.getOptionValue("s", srcName);

    blockPts = Long.parseLong(line.getOptionValue("b", Long.toString(blockPts)));

    sampInterval = Long.parseLong(line.getOptionValue("dt", Long.toString(sampInterval)));

    trimTime = Double.parseDouble(line.getOptionValue("t", Double.toString(trimTime)));

    // Type of output connection
    String writeModeStr = line.getOptionValue("w", writeMode.name());
    boolean bMatch = false;
    for (CTWriteMode wm : CTWriteMode.values()) {
        if (wm.name().toLowerCase().equals(writeModeStr.toLowerCase())) {
            writeMode = wm;
            bMatch = true;
        }
    }
    if (!bMatch) {
        System.err.println("Unrecognized write mode, \"" + writeModeStr + "\"; write mode must be one of "
                + possibleWriteModes);
        System.exit(0);
    }
    if (writeMode != CTWriteMode.LOCAL) {
        // User must have specified the host
        // If FTP or HTTPS, they may also specify username/password
        serverHost = line.getOptionValue("host", serverHost);
        if (serverHost.isEmpty()) {
            System.err.println(
                    "When using write mode \"" + writeModeStr + "\", you must specify the server host.");
            System.exit(0);
        }
        if (writeMode == CTWriteMode.UDP) {
            // Force blockPts to be 1
            blockPts = 1;
            // User must have specified both host and port
            int colonIdx = serverHost.indexOf(':');
            if ((colonIdx == -1) || (colonIdx >= serverHost.length() - 1)) {
                System.err.println(
                        "For UDP output mode, both the host and port (<host>:<port>)) must be specified.");
                System.exit(0);
            }
            udpHost = serverHost.substring(0, colonIdx);
            String udpPortStr = serverHost.substring(colonIdx + 1);
            try {
                udpPort = Integer.parseInt(udpPortStr);
            } catch (NumberFormatException nfe) {
                System.err.println("The UDP port must be a positive integer.");
                System.exit(0);
            }
        }
        if ((writeMode == CTWriteMode.FTP) || (writeMode == CTWriteMode.HTTPS)) {
            String userpassStr = line.getOptionValue("u", "");
            if (!userpassStr.isEmpty()) {
                // This string should be comma-delimited username and password
                String[] userpassCSV = userpassStr.split(",");
                if (userpassCSV.length != 2) {
                    System.err.println("When specifying a username and password for write mode \""
                            + writeModeStr + "\", separate the username and password by a comma.");
                    System.exit(0);
                }
                serverUser = userpassCSV[0];
                serverPassword = userpassCSV[1];
            }
        }
    }

    debug = line.hasOption("debug");

    System.err.println("CTmousetrack parameters:");
    System.err.println("\toutput mode = " + writeMode.name());
    if (writeMode == CTWriteMode.UDP) {
        System.err.println("\twrite to " + udpHost + ":" + udpPort);
    } else {
        System.err.println("\tsource = " + srcName);
        System.err.println("\ttrim time = " + trimTime + " sec");
    }
    System.err.println("\tpoints per block = " + blockPts);
    System.err.println("\tsample interval = " + sampInterval + " msec");

    try {
        //
        // Setup CTwriter or UDP output
        //
        CTwriter ctw = null;
        CTinfo.setDebug(debug);
        if (writeMode == CTWriteMode.LOCAL) {
            ctw = new CTwriter(outLoc + srcName, trimTime);
            System.err.println("\tdata will be written to local folder \"" + outLoc + "\"");
        } else if (writeMode == CTWriteMode.FTP) {
            CTftp ctftp = new CTftp(srcName);
            try {
                ctftp.login(serverHost, serverUser, serverPassword);
            } catch (Exception e) {
                throw new IOException(
                        new String("Error logging into FTP server \"" + serverHost + "\":\n" + e.getMessage()));
            }
            ctw = ctftp; // upcast to CTWriter
            System.err.println("\tdata will be written to FTP server at " + serverHost);
        } else if (writeMode == CTWriteMode.HTTP) {
            // Don't send username/pw in HTTP mode since they will be unencrypted
            CThttp cthttp = new CThttp(srcName, "http://" + serverHost);
            ctw = cthttp; // upcast to CTWriter
            System.err.println("\tdata will be written to HTTP server at " + serverHost);
        } else if (writeMode == CTWriteMode.HTTPS) {
            CThttp cthttp = new CThttp(srcName, "https://" + serverHost);
            // Username/pw are optional for HTTPS mode; only use them if username is not empty
            if (!serverUser.isEmpty()) {
                try {
                    cthttp.login(serverUser, serverPassword);
                } catch (Exception e) {
                    throw new IOException(new String(
                            "Error logging into HTTP server \"" + serverHost + "\":\n" + e.getMessage()));
                }
            }
            ctw = cthttp; // upcast to CTWriter
            System.err.println("\tdata will be written to HTTPS server at " + serverHost);
        } else if (writeMode == CTWriteMode.UDP) {
            try {
                udpServerSocket = new DatagramSocket();
            } catch (SocketException se) {
                System.err.println("Error creating socket for UDP:\n" + se);
                System.exit(0);
            }
            try {
                udpServerAddress = InetAddress.getByName(udpHost);
            } catch (UnknownHostException uhe) {
                System.err.println("Error getting UDP server host address:\n" + uhe);
                System.exit(0);
            }
        }
        if (writeMode != CTWriteMode.UDP) {
            ctw.setBlockMode(blockPts > 1, blockPts > 1);
            ctw.autoFlush(0); // no autoflush
            ctw.autoSegment(1000);
        }

        // screen dims
        Dimension screenSize = Toolkit.getDefaultToolkit().getScreenSize();
        double width = screenSize.getWidth();
        double height = screenSize.getHeight();

        // use Map for consolidated putData
        Map<String, Object> cmap = new LinkedHashMap<String, Object>();

        // loop and write some output
        for (int i = 0; i < 1000000; i++) { // go until killed
            long currentTime = System.currentTimeMillis();
            Point mousePos = MouseInfo.getPointerInfo().getLocation();
            float x_pt = (float) (mousePos.getX() / width); // normalize
            float y_pt = (float) ((height - mousePos.getY()) / height); // flip Y (so bottom=0)
            if (writeMode != CTWriteMode.UDP) {
                // CT output mode
                ctw.setTime(currentTime);
                cmap.clear();
                cmap.put("x", x_pt);
                cmap.put("y", y_pt);
                ctw.putData(cmap);
                if (((i + 1) % blockPts) == 0) {
                    ctw.flush();
                    System.err.print(".");
                }
            } else {
                // UDP output mode
                // We force blockPts to be 1 for UDP output mode, i.e. we "flush" the data every time
                // Write the following data (21 bytes total):
                //     header = "MOUSE", 5 bytes
                //     current time, long, 8 bytes
                //     2 floats (x,y) 4 bytes each, 8 bytes
                int len = 21;
                ByteBuffer bb = ByteBuffer.allocate(len);
                String headerStr = "MOUSE";
                bb.put(headerStr.getBytes("UTF-8"));
                bb.putLong(currentTime);
                bb.putFloat(x_pt);
                bb.putFloat(y_pt);
                // Might be able to use the following, but not sure:
                //     byte[] sendData = bb.array();
                byte[] sendData = new byte[len];
                bb.position(0);
                bb.get(sendData, 0, len);
                DatagramPacket sendPacket = new DatagramPacket(sendData, sendData.length, udpServerAddress,
                        udpPort);
                try {
                    udpServerSocket.send(sendPacket);
                } catch (IOException e) {
                    System.err.println("Test server caught exception trying to send data to UDP client:\n" + e);
                }
                System.err.print(".");
            }
            try {
                Thread.sleep(sampInterval);
            } catch (Exception e) {
            }
            ;
        }
        if (writeMode != CTWriteMode.UDP) {
            ctw.flush(); // wrap up
        }
    } catch (Exception e) {
        System.err.println("CTmousetrack exception: " + e);
        e.printStackTrace();
    }
}

From source file:edu.nyu.vida.data_polygamy.pre_processing.PreProcessing.java

/**
 * @param args/*from   ww w .  j  ava  2s .  c  om*/
 * @throws IOException 
 * @throws ClassNotFoundException 
 * @throws InterruptedException 
 */
@SuppressWarnings("deprecation")
public static void main(String[] args) throws IOException, InterruptedException, ClassNotFoundException {

    Options options = new Options();

    Option nameOption = new Option("dn", "name", true, "the name of the dataset");
    nameOption.setRequired(true);
    nameOption.setArgName("DATASET NAME");
    options.addOption(nameOption);

    Option headerOption = new Option("dh", "header", true, "the file that contains the header of the dataset");
    headerOption.setRequired(true);
    headerOption.setArgName("DATASET HEADER FILE");
    options.addOption(headerOption);

    Option deafultsOption = new Option("dd", "defaults", true,
            "the file that contains the default values of the dataset");
    deafultsOption.setRequired(true);
    deafultsOption.setArgName("DATASET DEFAULTS FILE");
    options.addOption(deafultsOption);

    Option tempResOption = new Option("t", "temporal", true,
            "desired temporal resolution (hour, day, week, or month)");
    tempResOption.setRequired(true);
    tempResOption.setArgName("TEMPORAL RESOLUTION");
    options.addOption(tempResOption);

    Option spatialResOption = new Option("s", "spatial", true,
            "desired spatial resolution (points, nbhd, zip, grid, or city)");
    spatialResOption.setRequired(true);
    spatialResOption.setArgName("SPATIAL RESOLUTION");
    options.addOption(spatialResOption);

    Option currentSpatialResOption = new Option("cs", "current-spatial", true,
            "current spatial resolution (points, nbhd, zip, grid, or city)");
    currentSpatialResOption.setRequired(true);
    currentSpatialResOption.setArgName("CURRENT SPATIAL RESOLUTION");
    options.addOption(currentSpatialResOption);

    Option indexResOption = new Option("i", "index", true, "indexes of the temporal and spatial attributes");
    indexResOption.setRequired(true);
    indexResOption.setArgName("INDEX OF SPATIO-TEMPORAL RESOLUTIONS");
    indexResOption.setArgs(Option.UNLIMITED_VALUES);
    options.addOption(indexResOption);

    Option machineOption = new Option("m", "machine", true, "machine identifier");
    machineOption.setRequired(true);
    machineOption.setArgName("MACHINE");
    machineOption.setArgs(1);
    options.addOption(machineOption);

    Option nodesOption = new Option("n", "nodes", true, "number of nodes");
    nodesOption.setRequired(true);
    nodesOption.setArgName("NODES");
    nodesOption.setArgs(1);
    options.addOption(nodesOption);

    Option s3Option = new Option("s3", "s3", false, "data on Amazon S3");
    s3Option.setRequired(false);
    options.addOption(s3Option);

    Option awsAccessKeyIdOption = new Option("aws_id", "aws-id", true,
            "aws access key id; " + "this is required if the execution is on aws");
    awsAccessKeyIdOption.setRequired(false);
    awsAccessKeyIdOption.setArgName("AWS-ACCESS-KEY-ID");
    awsAccessKeyIdOption.setArgs(1);
    options.addOption(awsAccessKeyIdOption);

    Option awsSecretAccessKeyOption = new Option("aws_key", "aws-id", true,
            "aws secrect access key; " + "this is required if the execution is on aws");
    awsSecretAccessKeyOption.setRequired(false);
    awsSecretAccessKeyOption.setArgName("AWS-SECRET-ACCESS-KEY");
    awsSecretAccessKeyOption.setArgs(1);
    options.addOption(awsSecretAccessKeyOption);

    Option bucketOption = new Option("b", "s3-bucket", true,
            "bucket on s3; " + "this is required if the execution is on aws");
    bucketOption.setRequired(false);
    bucketOption.setArgName("S3-BUCKET");
    bucketOption.setArgs(1);
    options.addOption(bucketOption);

    Option helpOption = new Option("h", "help", false, "display this message");
    helpOption.setRequired(false);
    options.addOption(helpOption);

    HelpFormatter formatter = new HelpFormatter();
    CommandLineParser parser = new PosixParser();
    CommandLine cmd = null;

    try {
        cmd = parser.parse(options, args);
    } catch (ParseException e) {
        formatter.printHelp(
                "hadoop jar data-polygamy.jar " + "edu.nyu.vida.data_polygamy.pre_processing.PreProcessing",
                options, true);
        System.exit(0);
    }

    if (cmd.hasOption("h")) {
        formatter.printHelp(
                "hadoop jar data-polygamy.jar " + "edu.nyu.vida.data_polygamy.pre_processing.PreProcessing",
                options, true);
        System.exit(0);
    }

    boolean s3 = cmd.hasOption("s3");
    String s3bucket = "";
    String awsAccessKeyId = "";
    String awsSecretAccessKey = "";

    if (s3) {
        if ((!cmd.hasOption("aws_id")) || (!cmd.hasOption("aws_key")) || (!cmd.hasOption("b"))) {
            System.out.println(
                    "Arguments 'aws_id', 'aws_key', and 'b'" + " are mandatory if execution is on AWS.");
            formatter.printHelp(
                    "hadoop jar data-polygamy.jar " + "edu.nyu.vida.data_polygamy.pre_processing.PreProcessing",
                    options, true);
            System.exit(0);
        }
        s3bucket = cmd.getOptionValue("b");
        awsAccessKeyId = cmd.getOptionValue("aws_id");
        awsSecretAccessKey = cmd.getOptionValue("aws_key");
    }

    boolean snappyCompression = false;
    boolean bzip2Compression = false;
    String machine = cmd.getOptionValue("m");
    int nbNodes = Integer.parseInt(cmd.getOptionValue("n"));

    Configuration s3conf = new Configuration();
    if (s3) {
        s3conf.set("fs.s3.awsAccessKeyId", awsAccessKeyId);
        s3conf.set("fs.s3.awsSecretAccessKey", awsSecretAccessKey);
        s3conf.set("bucket", s3bucket);
    }

    Configuration conf = new Configuration();
    Machine machineConf = new Machine(machine, nbNodes);
    String dataset = cmd.getOptionValue("dn");
    String header = cmd.getOptionValue("dh");
    String defaults = cmd.getOptionValue("dd");
    String temporalResolution = cmd.getOptionValue("t");
    String spatialResolution = cmd.getOptionValue("s");
    String gridResolution = "";
    String currentSpatialResolution = cmd.getOptionValue("cs");

    if (spatialResolution.contains("grid")) {
        String[] res = spatialResolution.split("-");
        spatialResolution = res[0];
        gridResolution = res[1];
    }

    conf.set("header", s3bucket + FrameworkUtils.dataDir + "/" + header);
    conf.set("defaults", s3bucket + FrameworkUtils.dataDir + "/" + defaults);
    conf.set("temporal-resolution", temporalResolution);
    conf.set("spatial-resolution", spatialResolution);
    conf.set("grid-resolution", gridResolution);
    conf.set("current-spatial-resolution", currentSpatialResolution);

    String[] indexes = cmd.getOptionValues("i");
    String temporalPos = "";
    Integer sizeSpatioTemp = 0;
    if (!(currentSpatialResolution.equals("points"))) {
        String spatialPos = "";
        for (int i = 0; i < indexes.length; i++) {
            temporalPos += indexes[i] + ",";
            spatialPos += indexes[++i] + ",";
            sizeSpatioTemp++;
        }
        conf.set("spatial-pos", spatialPos);
    } else {
        String xPositions = "", yPositions = "";
        for (int i = 0; i < indexes.length; i++) {
            temporalPos += indexes[i] + ",";
            xPositions += indexes[++i] + ",";
            yPositions += indexes[++i] + ",";
            sizeSpatioTemp++;
        }
        conf.set("xPositions", xPositions);
        conf.set("yPositions", yPositions);
    }
    conf.set("temporal-pos", temporalPos);

    conf.set("size-spatio-temporal", sizeSpatioTemp.toString());

    // checking resolutions

    if (utils.spatialResolution(spatialResolution) < 0) {
        System.out.println("Invalid spatial resolution: " + spatialResolution);
        System.exit(-1);
    }

    if (utils.spatialResolution(spatialResolution) == FrameworkUtils.POINTS) {
        System.out.println("The data needs to be reduced at least to neighborhoods or grid.");
        System.exit(-1);
    }

    if (utils.spatialResolution(currentSpatialResolution) < 0) {
        System.out.println("Invalid spatial resolution: " + currentSpatialResolution);
        System.exit(-1);
    }

    if (utils.spatialResolution(currentSpatialResolution) > utils.spatialResolution(spatialResolution)) {
        System.out.println("The current spatial resolution is coarser than "
                + "the desired one. You can only navigate from a fine resolution" + " to a coarser one.");
        System.exit(-1);
    }

    if (utils.temporalResolution(temporalResolution) < 0) {
        System.out.println("Invalid temporal resolution: " + temporalResolution);
        System.exit(-1);
    }

    String fileName = s3bucket + FrameworkUtils.preProcessingDir + "/" + dataset + "-" + temporalResolution
            + "-" + spatialResolution + gridResolution;
    conf.set("aggregates", fileName + ".aggregates");

    // making sure both files are removed, if they exist
    FrameworkUtils.removeFile(fileName, s3conf, s3);
    FrameworkUtils.removeFile(fileName + ".aggregates", s3conf, s3);

    /**
     * Hadoop Parameters
     * sources: http://www.slideshare.net/ImpetusInfo/ppt-on-advanced-hadoop-tuning-n-optimisation
     *          https://cloudcelebrity.wordpress.com/2013/08/14/12-key-steps-to-keep-your-hadoop-cluster-running-strong-and-performing-optimum/
     */

    conf.set("mapreduce.tasktracker.map.tasks.maximum", String.valueOf(machineConf.getMaximumTasks()));
    conf.set("mapreduce.tasktracker.reduce.tasks.maximum", String.valueOf(machineConf.getMaximumTasks()));
    conf.set("mapreduce.jobtracker.maxtasks.perjob", "-1");
    conf.set("mapreduce.reduce.shuffle.parallelcopies", "20");
    conf.set("mapreduce.input.fileinputformat.split.minsize", "0");
    conf.set("mapreduce.task.io.sort.mb", "200");
    conf.set("mapreduce.task.io.sort.factor", "100");

    // using SnappyCodec for intermediate and output data ?
    // TODO: for now, using SnappyCodec -- what about LZO + Protocol Buffer serialization?
    //   LZO - http://www.oberhumer.com/opensource/lzo/#download
    //   Hadoop-LZO - https://github.com/twitter/hadoop-lzo
    //   Protocol Buffer - https://github.com/twitter/elephant-bird
    //   General Info - http://www.devx.com/Java/Article/47913
    //   Compression - http://comphadoop.weebly.com/index.html
    if (snappyCompression) {
        conf.set("mapreduce.map.output.compress", "true");
        conf.set("mapreduce.map.output.compress.codec", "org.apache.hadoop.io.compress.SnappyCodec");
        conf.set("mapreduce.output.fileoutputformat.compress.codec",
                "org.apache.hadoop.io.compress.SnappyCodec");
    }
    if (bzip2Compression) {
        conf.set("mapreduce.map.output.compress", "true");
        conf.set("mapreduce.map.output.compress.codec", "org.apache.hadoop.io.compress.BZip2Codec");
        conf.set("mapreduce.output.fileoutputformat.compress.codec",
                "org.apache.hadoop.io.compress.BZip2Codec");
    }

    // TODO: this is dangerous!
    if (s3) {
        conf.set("fs.s3.awsAccessKeyId", awsAccessKeyId);
        conf.set("fs.s3.awsSecretAccessKey", awsSecretAccessKey);
    }

    Job job = new Job(conf);
    job.setJobName(dataset + "-" + temporalResolution + "-" + spatialResolution);

    job.setMapOutputKeyClass(MultipleSpatioTemporalWritable.class);
    job.setMapOutputValueClass(AggregationArrayWritable.class);

    job.setOutputKeyClass(MultipleSpatioTemporalWritable.class);
    job.setOutputValueClass(AggregationArrayWritable.class);

    job.setMapperClass(PreProcessingMapper.class);
    job.setCombinerClass(PreProcessingCombiner.class);
    job.setReducerClass(PreProcessingReducer.class);
    job.setNumReduceTasks(machineConf.getNumberReduces());
    //job.setNumReduceTasks(1);

    job.setInputFormatClass(TextInputFormat.class);
    job.setOutputFormatClass(SequenceFileOutputFormat.class);
    SequenceFileOutputFormat.setCompressOutput(job, true);
    SequenceFileOutputFormat.setOutputCompressionType(job, CompressionType.BLOCK);

    FileInputFormat.setInputPaths(job, new Path(s3bucket + FrameworkUtils.dataDir + "/" + dataset));
    FileOutputFormat.setOutputPath(job, new Path(fileName));

    job.setJarByClass(PreProcessing.class);

    long start = System.currentTimeMillis();
    job.submit();
    job.waitForCompletion(true);
    System.out.println(fileName + "\t" + (System.currentTimeMillis() - start));

}

From source file:com.moss.schematrax.SchemaUpdater.java

public static void main(String[] args) throws Exception {

    boolean configureLogging = new Boolean(System.getProperty("configureLogging", "true")).booleanValue();
    if (configureLogging) {
        BasicConfigurator.configure();//from   w ww .j ava 2s.c o m
        Logger.getRootLogger().setLevel(Level.INFO);
    }

    Map<String, String> argsMap = new HashMap<String, String>();
    for (int x = 0; x < args.length; x++) {
        String arg = args[x];
        String[] pair = arg.split("=");
        if (pair.length != 2) {
            System.out.println("Error parsing command line arguments in value pair:" + arg);
            System.out.println(usage);
            System.exit(1);
        }
        String name = pair[0];
        String value = pair[1];
        argsMap.put(name, value);
    }

    String mode = (String) argsMap.get("mode");
    String databaseTypeName = (String) argsMap.get("dbtype");
    String databaseName = (String) argsMap.get("catalog");
    String logon = (String) argsMap.get("logon");
    String password = (String) argsMap.get("password");
    String host = (String) argsMap.get("host");
    if (args.length < 5 || databaseTypeName == null || databaseName == null || logon == null || password == null
            || host == null || mode == null) {
        System.out.println(usage);
    }

    String schematraxFileLocation = (String) argsMap.get("schematraxFile");

    if (schematraxFileLocation == null)
        schematraxFileLocation = "";

    String schemaName = (String) argsMap.get("schema");
    if (schemaName == null)
        schemaName = logon;

    DatabaseType dbType = DatabaseType.getDatabaseType(databaseTypeName);

    String schemaVersion = (String) argsMap.get("version");

    JdbcConnectionConfig connectionConfig = new JdbcConnectionConfig();
    connectionConfig.setJdbcDriverClassName(dbType.getJdbcDriver().getClassName());
    connectionConfig
            .setJdbcUrl(dbType.getJdbcDriver().createJdbcUrl(host, null, databaseName, logon, password));
    connectionConfig.setLogon(logon);
    connectionConfig.setPassword(password);

    SchemaUpdater updater = new SchemaUpdater(new File(schematraxFileLocation).toURL());

    if (mode.equals("update")) {
        if (schemaVersion == null) {
            System.err.println("You must specify a schema version");
            System.err.println(usage);
        }
        updater.updateSchema(dbType, connectionConfig, schemaName, schemaVersion);
    } else if (mode.equals("create")) {
        updater.createSchema(dbType, connectionConfig, schemaName, schemaVersion);
    } else {
        System.err.println("Invalid mode:" + mode);
        System.err.println(usage);
    }
}

From source file:com.msd.gin.halyard.tools.HalyardExport.java

/**
 * Main of the HalyardExport//w w w .j  a  va 2s  . c  o  m
 * @param args String command line arguments
 * @throws Exception throws Exception in case of any problem
 */
public static void main(final String args[]) throws Exception {
    if (conf == null)
        conf = new Configuration();
    Options options = new Options();
    options.addOption(newOption("h", null, "Prints this help"));
    options.addOption(newOption("v", null, "Prints version"));
    options.addOption(newOption("s", "source_htable", "Source HBase table with Halyard RDF store"));
    options.addOption(
            newOption("q", "sparql_query", "SPARQL tuple or graph query executed to export the data"));
    options.addOption(newOption("t", "target_url",
            "file://<path>/<file_name>.<ext> or hdfs://<path>/<file_name>.<ext> or jdbc:<jdbc_connection>/<table_name>"));
    options.addOption(newOption("p", "property=value", "JDBC connection properties"));
    options.addOption(newOption("l", "driver_classpath", "JDBC driver classpath delimited by ':'"));
    options.addOption(newOption("c", "driver_class", "JDBC driver class name"));
    options.addOption(newOption("r", null, "Trim target table before export (apply for JDBC only)"));
    try {
        CommandLine cmd = new PosixParser().parse(options, args);
        if (args.length == 0 || cmd.hasOption('h')) {
            printHelp(options);
            return;
        }
        if (cmd.hasOption('v')) {
            Properties p = new Properties();
            try (InputStream in = HalyardExport.class
                    .getResourceAsStream("/META-INF/maven/com.msd.gin.halyard/hbasesail/pom.properties")) {
                if (in != null)
                    p.load(in);
            }
            System.out.println("Halyard Export version " + p.getProperty("version", "unknown"));
            return;
        }
        if (!cmd.getArgList().isEmpty())
            throw new ExportException("Unknown arguments: " + cmd.getArgList().toString());
        for (char c : "sqt".toCharArray()) {
            if (!cmd.hasOption(c))
                throw new ExportException("Missing mandatory option: " + c);
        }
        for (char c : "sqtlc".toCharArray()) {
            String s[] = cmd.getOptionValues(c);
            if (s != null && s.length > 1)
                throw new ExportException("Multiple values for option: " + c);
        }
        StatusLog log = new StatusLog() {
            private final Logger l = Logger.getLogger(HalyardExport.class.getName());

            @Override
            public void tick() {
            }

            @Override
            public void logStatus(String status) {
                l.info(status);
            }
        };
        String driverClasspath = cmd.getOptionValue('l');
        URL driverCP[] = null;
        if (driverClasspath != null) {
            String jars[] = driverClasspath.split(":");
            driverCP = new URL[jars.length];
            for (int j = 0; j < jars.length; j++) {
                File f = new File(jars[j]);
                if (!f.isFile())
                    throw new ExportException("Invalid JDBC driver classpath element: " + jars[j]);
                driverCP[j] = f.toURI().toURL();
            }
        }
        export(conf, log, cmd.getOptionValue('s'), cmd.getOptionValue('q'), cmd.getOptionValue('t'),
                cmd.getOptionValue('c'), driverCP, cmd.getOptionValues('p'), cmd.hasOption('r'));
    } catch (RuntimeException exp) {
        System.out.println(exp.getMessage());
        printHelp(options);
        throw exp;
    }
}

From source file:com.hpe.nv.samples.advanced.AdvMultipleTestsSequential.java

public static void main(String[] args) throws Exception {
    try {//from w  w w. ja  v  a  2  s  .co  m
        // program arguments
        Options options = new Options();
        options.addOption("i", "server-ip", true, "[mandatory] NV Test Manager IP");
        options.addOption("o", "server-port", true, "[mandatory] NV Test Manager port");
        options.addOption("u", "username", true, "[mandatory] NV username");
        options.addOption("w", "password", true, "[mandatory] NV password");
        options.addOption("e", "ssl", true, "[optional] Pass true to use SSL. Default: false");
        options.addOption("y", "proxy", true, "[optional] Proxy server host:port");
        options.addOption("t", "site-url", true,
                "[optional] Site under test URL. Default: HPE Network Virtualization site URL. If you change this value, make sure to change the --xpath argument too");
        options.addOption("x", "xpath", true,
                "[optional] Parameter for ExpectedConditions.visibilityOfElementLocated(By.xpath(...)) method. Use an xpath expression of some element in the site. Default: //div[@id='content']");
        options.addOption("a", "active-adapter-ip", true,
                "[optional] Active adapter IP. Default: --server-ip argument");
        options.addOption("f", "first-zip-result-file-path", true,
                "[optional] File path to store the first test analysis results as a .zip file");
        options.addOption("s", "second-zip-result-file-path", true,
                "[optional] File path to store the second test analysis results as a .zip file");
        options.addOption("k", "analysis-ports", true,
                "[optional] A comma-separated list of ports for test analysis");
        options.addOption("b", "browser", true,
                "[optional] The browser for which the Selenium WebDriver is built. Possible values: Chrome and Firefox. Default: Firefox");
        options.addOption("d", "debug", true,
                "[optional] Pass true to view console debug messages during execution. Default: false");
        options.addOption("h", "help", false, "[optional] Generates and prints help information");

        // parse and validate the command line arguments
        CommandLineParser parser = new DefaultParser();
        CommandLine line = parser.parse(options, args);

        if (line.hasOption("help")) {
            // print help if help argument is passed
            HelpFormatter formatter = new HelpFormatter();
            formatter.printHelp("AdvMultipleTestsSequential.java", options);
            return;
        }

        if (line.hasOption("server-ip")) {
            serverIp = line.getOptionValue("server-ip");
            if (serverIp.equals("0.0.0.0")) {
                throw new Exception(
                        "Please replace the server IP argument value (0.0.0.0) with your NV Test Manager IP");
            }
        } else {
            throw new Exception("Missing argument -i/--server-ip <serverIp>");
        }

        if (line.hasOption("server-port")) {
            serverPort = Integer.parseInt(line.getOptionValue("server-port"));
        } else {
            throw new Exception("Missing argument -o/--server-port <serverPort>");
        }

        if (line.hasOption("username")) {
            username = line.getOptionValue("username");
        } else {
            throw new Exception("Missing argument -u/--username <username>");
        }

        if (line.hasOption("password")) {
            password = line.getOptionValue("password");
        } else {
            throw new Exception("Missing argument -w/--password <password>");
        }

        if (line.hasOption("ssl")) {
            ssl = Boolean.parseBoolean(line.getOptionValue("ssl"));
        }

        if (line.hasOption("site-url")) {
            siteUrl = line.getOptionValue("site-url");
        } else {
            siteUrl = "http://www8.hp.com/us/en/software-solutions/network-virtualization/index.html";
        }

        if (line.hasOption("xpath")) {
            xpath = line.getOptionValue("xpath");
        } else {
            xpath = "//div[@id='content']";
        }

        if (line.hasOption("first-zip-result-file-path")) {
            firstZipResultFilePath = line.getOptionValue("first-zip-result-file-path");
        }

        if (line.hasOption("second-zip-result-file-path")) {
            secondZipResultFilePath = line.getOptionValue("second-zip-result-file-path");
        }

        if (line.hasOption("proxy")) {
            proxySetting = line.getOptionValue("proxy");
        }

        if (line.hasOption("active-adapter-ip")) {
            activeAdapterIp = line.getOptionValue("active-adapter-ip");
        } else {
            activeAdapterIp = serverIp;
        }

        if (line.hasOption("analysis-ports")) {
            String analysisPortsStr = line.getOptionValue("analysis-ports");
            analysisPorts = analysisPortsStr.split(",");
        } else {
            analysisPorts = new String[] { "80", "8080" };
        }

        if (line.hasOption("browser")) {
            browser = line.getOptionValue("browser");
        } else {
            browser = "Firefox";
        }

        if (line.hasOption("debug")) {
            debug = Boolean.parseBoolean(line.getOptionValue("debug"));
        }

        String newLine = System.getProperty("line.separator");
        String testDescription = "***   This sample shows how to run several tests sequentially with different network scenarios.       ***"
                + newLine
                + "***                                                                                                   ***"
                + newLine
                + "***   You can view the actual steps of this sample in the AdvMultipleTestsSequential.java file.       ***"
                + newLine;

        // print the sample's description
        System.out.println(testDescription);

        // start console spinner
        if (!debug) {
            spinner = new Thread(new Spinner());
            spinner.start();
        }

        // sample execution steps
        /*****    Part 1 - Initialize the TestManager object and set the active adapter                                                 *****/
        printPartDescription(
                "\b------    Part 1 - Initialize the TestManager object and set the active adapter");
        initTestManager();
        setActiveAdapter();
        printPartSeparator();
        /*****    Part 2 - Start the first NV test with the "3G Busy" network scenario and run the "Home Page" transaction              *****/
        printPartDescription(
                "------    Part 2 - Start the first NV test with the \"3G Busy\" network scenario and run the \"Home Page\" transaction");
        startTest("3G Busy");
        testRunning = true;
        connectToTransactionManager();
        startTransaction();
        transactionInProgress = true;
        buildSeleniumWebDriver();
        seleniumNavigateToPage();
        stopTransaction();
        transactionInProgress = false;
        driverCloseAndQuit();
        printPartSeparator();
        /*****    Part 3 - Stop the first NV test, analyze it and print the results to the console                                      *****/
        printPartDescription(
                "------    Part 3 - Stop the first NV test, analyze it and print the results to the console");
        stopTest();
        testRunning = false;
        analyzeTest();
        printPartSeparator();
        /*****    Part 4 - Start the second NV test with the "3G Good" network scenario and run the "Home Page" transaction                                                                                                                 *****/
        printPartDescription(
                "------    Part 4 - Start the second NV test with the \"3G Good\" network scenario and run the \"Home Page\" transaction");
        startTest("3G Good");
        testRunning = true;
        connectToTransactionManager();
        startTransaction();
        transactionInProgress = true;
        buildSeleniumWebDriver();
        seleniumNavigateToPage();
        stopTransaction();
        transactionInProgress = false;
        driverCloseAndQuit();
        printPartSeparator();
        /*****    Part 5 - Stop the second NV test, analyze it and print the results to the console                                      *****/
        printPartDescription(
                "------    Part 3 - Stop the second NV test, analyze it and print the results to the console");
        stopTest();
        testRunning = false;
        analyzeTest();
        printPartSeparator();
        doneCallback();

    } catch (Exception e) {
        try {
            handleError(e.getMessage());
        } catch (Exception e2) {
            System.out.println("Error occurred: " + e2.getMessage());
        }
    }
}

From source file:edu.nyu.vida.data_polygamy.standard_techniques.CorrelationTechniques.java

/**
 * @param args/* w w w  . ja  v a 2  s. c  o  m*/
 * @throws ParseException 
 */
@SuppressWarnings({ "deprecation" })
public static void main(String[] args) throws IOException, InterruptedException, ClassNotFoundException {

    Options options = new Options();

    Option forceOption = new Option("f", "force", false,
            "force the computation of the relationship " + "even if files already exist");
    forceOption.setRequired(false);
    options.addOption(forceOption);

    Option g1Option = new Option("g1", "first-group", true, "set first group of datasets");
    g1Option.setRequired(true);
    g1Option.setArgName("FIRST GROUP");
    g1Option.setArgs(Option.UNLIMITED_VALUES);
    options.addOption(g1Option);

    Option g2Option = new Option("g2", "second-group", true, "set second group of datasets");
    g2Option.setRequired(false);
    g2Option.setArgName("SECOND GROUP");
    g2Option.setArgs(Option.UNLIMITED_VALUES);
    options.addOption(g2Option);

    Option machineOption = new Option("m", "machine", true, "machine identifier");
    machineOption.setRequired(true);
    machineOption.setArgName("MACHINE");
    machineOption.setArgs(1);
    options.addOption(machineOption);

    Option nodesOption = new Option("n", "nodes", true, "number of nodes");
    nodesOption.setRequired(true);
    nodesOption.setArgName("NODES");
    nodesOption.setArgs(1);
    options.addOption(nodesOption);

    Option s3Option = new Option("s3", "s3", false, "data on Amazon S3");
    s3Option.setRequired(false);
    options.addOption(s3Option);

    Option awsAccessKeyIdOption = new Option("aws_id", "aws-id", true,
            "aws access key id; " + "this is required if the execution is on aws");
    awsAccessKeyIdOption.setRequired(false);
    awsAccessKeyIdOption.setArgName("AWS-ACCESS-KEY-ID");
    awsAccessKeyIdOption.setArgs(1);
    options.addOption(awsAccessKeyIdOption);

    Option awsSecretAccessKeyOption = new Option("aws_key", "aws-id", true,
            "aws secrect access key; " + "this is required if the execution is on aws");
    awsSecretAccessKeyOption.setRequired(false);
    awsSecretAccessKeyOption.setArgName("AWS-SECRET-ACCESS-KEY");
    awsSecretAccessKeyOption.setArgs(1);
    options.addOption(awsSecretAccessKeyOption);

    Option bucketOption = new Option("b", "s3-bucket", true,
            "bucket on s3; " + "this is required if the execution is on aws");
    bucketOption.setRequired(false);
    bucketOption.setArgName("S3-BUCKET");
    bucketOption.setArgs(1);
    options.addOption(bucketOption);

    Option helpOption = new Option("h", "help", false, "display this message");
    helpOption.setRequired(false);
    options.addOption(helpOption);

    HelpFormatter formatter = new HelpFormatter();
    CommandLineParser parser = new PosixParser();
    CommandLine cmd = null;

    try {
        cmd = parser.parse(options, args);
    } catch (ParseException e) {
        formatter.printHelp(
                "hadoop jar data-polygamy.jar "
                        + "edu.nyu.vida.data_polygamy.standard_techniques.CorrelationTechniques",
                options, true);
        System.exit(0);
    }

    if (cmd.hasOption("h")) {
        formatter.printHelp(
                "hadoop jar data-polygamy.jar "
                        + "edu.nyu.vida.data_polygamy.standard_techniques.CorrelationTechniques",
                options, true);
        System.exit(0);
    }

    boolean s3 = cmd.hasOption("s3");
    String s3bucket = "";
    String awsAccessKeyId = "";
    String awsSecretAccessKey = "";

    if (s3) {
        if ((!cmd.hasOption("aws_id")) || (!cmd.hasOption("aws_key")) || (!cmd.hasOption("b"))) {
            System.out.println(
                    "Arguments 'aws_id', 'aws_key', and 'b'" + " are mandatory if execution is on AWS.");
            formatter.printHelp(
                    "hadoop jar data-polygamy.jar "
                            + "edu.nyu.vida.data_polygamy.standard_techniques.CorrelationTechniques",
                    options, true);
            System.exit(0);
        }
        s3bucket = cmd.getOptionValue("b");
        awsAccessKeyId = cmd.getOptionValue("aws_id");
        awsSecretAccessKey = cmd.getOptionValue("aws_key");
    }

    boolean snappyCompression = false;
    boolean bzip2Compression = false;
    String machine = cmd.getOptionValue("m");
    int nbNodes = Integer.parseInt(cmd.getOptionValue("n"));

    Configuration s3conf = new Configuration();
    if (s3) {
        s3conf.set("fs.s3.awsAccessKeyId", awsAccessKeyId);
        s3conf.set("fs.s3.awsSecretAccessKey", awsSecretAccessKey);
        s3conf.set("bucket", s3bucket);
    }

    Path path = null;
    FileSystem fs = FileSystem.get(new Configuration());

    ArrayList<String> shortDataset = new ArrayList<String>();
    ArrayList<String> firstGroup = new ArrayList<String>();
    ArrayList<String> secondGroup = new ArrayList<String>();
    HashMap<String, String> datasetAgg = new HashMap<String, String>();

    boolean removeExistingFiles = cmd.hasOption("f");

    String[] firstGroupCmd = cmd.getOptionValues("g1");
    String[] secondGroupCmd = cmd.hasOption("g2") ? cmd.getOptionValues("g2") : new String[0];
    addDatasets(firstGroupCmd, firstGroup, shortDataset, datasetAgg, path, fs, s3conf, s3, s3bucket);
    addDatasets(secondGroupCmd, secondGroup, shortDataset, datasetAgg, path, fs, s3conf, s3, s3bucket);

    if (shortDataset.size() == 0) {
        System.out.println("No datasets to process.");
        System.exit(0);
    }

    if (firstGroup.isEmpty()) {
        System.out.println("First group of datasets (G1) is empty. " + "Doing G1 = G2.");
        firstGroup.addAll(secondGroup);
    }

    if (secondGroup.isEmpty()) {
        System.out.println("Second group of datasets (G2) is empty. " + "Doing G2 = G1.");
        secondGroup.addAll(firstGroup);
    }

    // getting dataset ids

    String datasetNames = "";
    String datasetIds = "";
    HashMap<String, String> datasetId = new HashMap<String, String>();
    Iterator<String> it = shortDataset.iterator();
    while (it.hasNext()) {
        datasetId.put(it.next(), null);
    }

    if (s3) {
        path = new Path(s3bucket + FrameworkUtils.datasetsIndexDir);
        fs = FileSystem.get(path.toUri(), s3conf);
    } else {
        path = new Path(fs.getHomeDirectory() + "/" + FrameworkUtils.datasetsIndexDir);
    }
    BufferedReader br = new BufferedReader(new InputStreamReader(fs.open(path)));
    String line = br.readLine();
    while (line != null) {
        String[] dt = line.split("\t");
        if (datasetId.containsKey(dt[0])) {
            datasetId.put(dt[0], dt[1]);
            datasetNames += dt[0] + ",";
            datasetIds += dt[1] + ",";
        }
        line = br.readLine();
    }
    br.close();
    if (s3)
        fs.close();

    datasetNames = datasetNames.substring(0, datasetNames.length() - 1);
    datasetIds = datasetIds.substring(0, datasetIds.length() - 1);
    it = shortDataset.iterator();
    while (it.hasNext()) {
        String dataset = it.next();
        if (datasetId.get(dataset) == null) {
            System.out.println("No dataset id for " + dataset);
            System.exit(0);
        }
    }

    String firstGroupStr = "";
    String secondGroupStr = "";
    for (String dataset : firstGroup) {
        firstGroupStr += datasetId.get(dataset) + ",";
    }
    for (String dataset : secondGroup) {
        secondGroupStr += datasetId.get(dataset) + ",";
    }
    firstGroupStr = firstGroupStr.substring(0, firstGroupStr.length() - 1);
    secondGroupStr = secondGroupStr.substring(0, secondGroupStr.length() - 1);

    FrameworkUtils.createDir(s3bucket + FrameworkUtils.correlationTechniquesDir, s3conf, s3);

    String dataAttributesInputDirs = "";
    String noRelationship = "";

    HashSet<String> dirs = new HashSet<String>();

    String dataset1;
    String dataset2;
    String datasetId1;
    String datasetId2;
    for (int i = 0; i < firstGroup.size(); i++) {
        for (int j = 0; j < secondGroup.size(); j++) {

            if (Integer.parseInt(datasetId.get(firstGroup.get(i))) < Integer
                    .parseInt(datasetId.get(secondGroup.get(j)))) {
                dataset1 = firstGroup.get(i);
                dataset2 = secondGroup.get(j);
            } else {
                dataset1 = secondGroup.get(j);
                dataset2 = firstGroup.get(i);
            }

            datasetId1 = datasetId.get(dataset1);
            datasetId2 = datasetId.get(dataset2);

            if (dataset1.equals(dataset2))
                continue;
            String correlationOutputFileName = s3bucket + FrameworkUtils.correlationTechniquesDir + "/"
                    + dataset1 + "-" + dataset2 + "/";

            if (removeExistingFiles) {
                FrameworkUtils.removeFile(correlationOutputFileName, s3conf, s3);
            }
            if (!FrameworkUtils.fileExists(correlationOutputFileName, s3conf, s3)) {
                dirs.add(s3bucket + FrameworkUtils.aggregatesDir + "/" + dataset1);
                dirs.add(s3bucket + FrameworkUtils.aggregatesDir + "/" + dataset2);
            } else {
                noRelationship += datasetId1 + "-" + datasetId2 + ",";
            }
        }
    }

    if (dirs.isEmpty()) {
        System.out.println("All the relationships were already computed.");
        System.out.println("Use -f in the beginning of the command line to force the computation.");
        System.exit(0);
    }

    for (String dir : dirs) {
        dataAttributesInputDirs += dir + ",";
    }

    Configuration conf = new Configuration();
    Machine machineConf = new Machine(machine, nbNodes);

    String jobName = "correlation";
    String correlationOutputDir = s3bucket + FrameworkUtils.correlationTechniquesDir + "/tmp/";

    FrameworkUtils.removeFile(correlationOutputDir, s3conf, s3);

    for (int i = 0; i < shortDataset.size(); i++) {
        conf.set("dataset-" + datasetId.get(shortDataset.get(i)) + "-agg", datasetAgg.get(shortDataset.get(i)));
    }
    for (int i = 0; i < shortDataset.size(); i++) {
        conf.set("dataset-" + datasetId.get(shortDataset.get(i)) + "-agg-size",
                Integer.toString(datasetAgg.get(shortDataset.get(i)).split(",").length));
    }
    conf.set("dataset-keys", datasetIds);
    conf.set("dataset-names", datasetNames);
    conf.set("first-group", firstGroupStr);
    conf.set("second-group", secondGroupStr);
    conf.set("main-dataset-id", datasetId.get(shortDataset.get(0)));
    if (noRelationship.length() > 0) {
        conf.set("no-relationship", noRelationship.substring(0, noRelationship.length() - 1));
    }

    conf.set("mapreduce.tasktracker.map.tasks.maximum", String.valueOf(machineConf.getMaximumTasks()));
    conf.set("mapreduce.tasktracker.reduce.tasks.maximum", String.valueOf(machineConf.getMaximumTasks()));
    conf.set("mapreduce.jobtracker.maxtasks.perjob", "-1");
    conf.set("mapreduce.reduce.shuffle.parallelcopies", "20");
    conf.set("mapreduce.input.fileinputformat.split.minsize", "0");
    conf.set("mapreduce.task.io.sort.mb", "200");
    conf.set("mapreduce.task.io.sort.factor", "100");
    conf.set("mapreduce.task.timeout", "2400000");

    if (s3) {
        machineConf.setMachineConfiguration(conf);
        conf.set("fs.s3.awsAccessKeyId", awsAccessKeyId);
        conf.set("fs.s3.awsSecretAccessKey", awsSecretAccessKey);
        conf.set("bucket", s3bucket);
    }

    if (snappyCompression) {
        conf.set("mapreduce.map.output.compress", "true");
        conf.set("mapreduce.map.output.compress.codec", "org.apache.hadoop.io.compress.SnappyCodec");
        //conf.set("mapreduce.output.fileoutputformat.compress.codec", "org.apache.hadoop.io.compress.SnappyCodec");
    }
    if (bzip2Compression) {
        conf.set("mapreduce.map.output.compress", "true");
        conf.set("mapreduce.map.output.compress.codec", "org.apache.hadoop.io.compress.BZip2Codec");
        //conf.set("mapreduce.output.fileoutputformat.compress.codec", "org.apache.hadoop.io.compress.BZip2Codec");
    }

    Job job = new Job(conf);
    job.setJobName(jobName);

    job.setMapOutputKeyClass(PairAttributeWritable.class);
    job.setMapOutputValueClass(SpatioTemporalValueWritable.class);
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(Text.class);

    job.setMapperClass(CorrelationTechniquesMapper.class);
    job.setReducerClass(CorrelationTechniquesReducer.class);
    job.setNumReduceTasks(machineConf.getNumberReduces());

    job.setInputFormatClass(SequenceFileInputFormat.class);
    LazyOutputFormat.setOutputFormatClass(job, TextOutputFormat.class);

    FileInputFormat.setInputDirRecursive(job, true);
    FileInputFormat.setInputPaths(job,
            dataAttributesInputDirs.substring(0, dataAttributesInputDirs.length() - 1));
    FileOutputFormat.setOutputPath(job, new Path(correlationOutputDir));

    job.setJarByClass(CorrelationTechniques.class);

    long start = System.currentTimeMillis();
    job.submit();
    job.waitForCompletion(true);
    System.out.println(jobName + "\t" + (System.currentTimeMillis() - start));

    // moving files to right place
    for (int i = 0; i < firstGroup.size(); i++) {
        for (int j = 0; j < secondGroup.size(); j++) {

            if (Integer.parseInt(datasetId.get(firstGroup.get(i))) < Integer
                    .parseInt(datasetId.get(secondGroup.get(j)))) {
                dataset1 = firstGroup.get(i);
                dataset2 = secondGroup.get(j);
            } else {
                dataset1 = secondGroup.get(j);
                dataset2 = firstGroup.get(i);
            }

            if (dataset1.equals(dataset2))
                continue;

            String from = s3bucket + FrameworkUtils.correlationTechniquesDir + "/tmp/" + dataset1 + "-"
                    + dataset2 + "/";
            String to = s3bucket + FrameworkUtils.correlationTechniquesDir + "/" + dataset1 + "-" + dataset2
                    + "/";
            FrameworkUtils.renameFile(from, to, s3conf, s3);
        }
    }
}

From source file:eu.smartfp7.foursquare.AttendanceCrawler.java

/**
 * The main takes an undefined number of cities as arguments, then initializes
 * the specific crawling of all the trending venues of these cities.
 * The trending venues must have been previously identified using the `DownloadPages`
 * program./* www . j  a v  a2  s. c  om*/
 * 
 * Current valid cities are: london, amsterdam, goldcoast, sanfrancisco.
 * 
 */
public static void main(String[] args) throws Exception {
    Settings settings = Settings.getInstance();
    String folder = settings.getFolder();

    // We keep info and error logs, so that we know what happened in case
    // of incoherence in the time series.
    Map<String, FileWriter> info_logs = new HashMap<String, FileWriter>();
    Map<String, FileWriter> error_logs = new HashMap<String, FileWriter>();

    // For each city we monitor, we store the venue IDs that we got from
    // a previous crawl.
    Map<String, Collection<String>> city_venues = new HashMap<String, Collection<String>>();

    // Contains the epoch time when the last API call has been made for each 
    // venue. Ensures that we get data only once each hour. 
    Map<String, Long> venue_last_call = new HashMap<String, Long>();

    // Contains the epoch time when we last checked if time series were broken
    // for each city.
    // We do these checks once every day before the batch forecasting begins.
    Map<String, Long> sanity_checks = new HashMap<String, Long>();

    // We also keep in memory the number of checkins for the last hour for
    // each venue.
    Map<String, Integer> venue_last_checkin = new HashMap<String, Integer>();

    Map<Long, Integer> APICallsCount = new HashMap<Long, Integer>();

    DateFormat df = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");

    int total_venues = 0;
    long total_calls = 0;
    long time_spent_on_API = 0;

    for (String c : args) {
        settings.checkFileHierarchy(c);

        city_venues.put(c, loadVenues(c));
        total_venues += city_venues.get(c).size();

        info_logs.put(c,
                new FileWriter(folder + c + File.separator + "log" + File.separator + "info.log", true));
        error_logs.put(c,
                new FileWriter(folder + c + File.separator + "log" + File.separator + "error.log", true));

        Calendar cal = Calendar.getInstance();

        info_logs.get(c).write("[" + df.format(cal.getTime()) + "] Crawler initialization for " + c + ". "
                + city_venues.get(c).size() + " venues loaded.\n");
        info_logs.get(c).flush();

        // If we interrupted the program for some reason, we can get back
        // the in-memory data.
        // Important: the program must not be interrupted for more than one
        // hour, or we will lose time series data.
        for (String venue_id : city_venues.get(c)) {
            String ts_file = folder + c + File.separator + "attendances_crawl" + File.separator + venue_id
                    + ".ts";

            if (new File(ts_file).exists()) {
                BufferedReader buffer = new BufferedReader(new FileReader(ts_file));
                String mem = null, line = null;
                for (; (line = buffer.readLine()) != null; mem = line)
                    ;
                buffer.close();

                if (mem == null)
                    continue;

                String[] tmp = mem.split(",");
                venue_last_call.put(venue_id, df.parse(tmp[0]).getTime());
                venue_last_checkin.put(venue_id, Integer.parseInt(tmp[3]));

                VenueUtil.fixBrokenTimeSeriesVenue(new File(ts_file));
            } // if
        } // for

        sanity_checks.put(c, cal.getTimeInMillis());
    } // for

    if (total_venues > 5000) {
        System.out.println(
                "Too much venues for a single API account (max 5000).\nPlease create a new Foursquare API account and use these credentials.\nExiting now.");
        return;
    }

    while (true) {

        for (String c : args) {
            // We create a FIFO queue and pop venue IDs one at a time.
            LinkedList<String> city_venues_buffer = new LinkedList<String>(city_venues.get(c));
            String venue_id = null;

            // Artificial wait to avoid processors looping at 100% of their capacity
            // when there is no more venues to crawl for the current hour.
            Thread.sleep(3000);

            while ((venue_id = city_venues_buffer.pollFirst()) != null) {
                // We get the current time according to the city's time zone
                Calendar cal = Calendar.getInstance();
                cal.add(Calendar.MILLISECOND,
                        TimeZone.getTimeZone(settings.getCityTimezone(c)).getOffset(cal.getTime().getTime())
                                - Calendar.getInstance().getTimeZone().getOffset(cal.getTime().getTime()));
                //TimeZone.getTimeZone("Europe/London").getOffset(cal.getTime().getTime()));

                long current_time = DateUtils.truncate(cal.getTime(), Calendar.HOUR).getTime();

                // We query Foursquare only once per hour per venue.
                if (venue_last_call.get(venue_id) != null
                        && current_time < venue_last_call.get(venue_id) + 3600000)
                    continue;

                intelligentWait(total_venues, cal.getTime().getTime(),
                        (total_calls == 0 ? 0 : Math.round(time_spent_on_API / total_calls)));

                Venue venue = null;

                try {
                    long beforeCall = System.currentTimeMillis();
                    venue = new Venue(getFoursquareVenueById(venue_id, c));

                    // If there is no last call, this is the beginning of the time series
                    // for this venue. We get the number of people "here now" to initialize
                    // the series.
                    if (venue_last_call.get(venue_id) == null) {
                        /** TODO: by doing this, we keep a representation of the venue dating from the beginning
                         *       of the specific crawl. we might want to change this and update this file once
                         *      in a while.
                         */
                        FileWriter info = new FileWriter(folder + c + File.separator + "foursquare_venues"
                                + File.separator + venue_id + ".info");
                        info.write(venue.getFoursquareJson());
                        info.close();

                        FileWriter out = new FileWriter(folder + c + File.separator + "attendances_crawl"
                                + File.separator + venue_id + ".ts");
                        out.write("Date,here_now,hour_checkins,total_checkins\n");
                        out.write(df.format(current_time) + "," + venue.getHereNow() + "," + venue.getHereNow()
                                + "," + venue.getCheckincount() + "\n");
                        out.close();
                    } else {
                        FileWriter out = new FileWriter(folder + c + File.separator + "attendances_crawl"
                                + File.separator + venue_id + ".ts", true);
                        int checks = venue.getCheckincount() - venue_last_checkin.get(venue_id);
                        out.write(df.format(current_time) + "," + venue.getHereNow() + ","
                                + Integer.toString(checks) + "," + venue.getCheckincount() + "\n");
                        out.close();
                    }

                    if (APICallsCount.get(current_time) == null)
                        APICallsCount.put(current_time, 1);
                    else
                        APICallsCount.put(current_time, APICallsCount.get(current_time) + 1);

                    total_calls++;

                    venue_last_call.put(venue_id, current_time);
                    venue_last_checkin.put(venue_id, venue.getCheckincount());

                    time_spent_on_API += System.currentTimeMillis() - beforeCall;
                } catch (Exception e) {
                    // If something bad happens (crawler not available, IO error, ...), we put the
                    // venue_id in the FIFO queue so that it gets reevaluated later.
                    //e.printStackTrace();
                    error_logs.get(c)
                            .write("[" + df.format(cal.getTime().getTime()) + "] Error with venue " + venue_id
                                    + " (" + e.getMessage() + "). " + APICallsCount.get(current_time)
                                    + " API calls so far this hour, " + city_venues_buffer.size()
                                    + " venues remaining in the buffer.\n");
                    error_logs.get(c).flush();

                    System.out.println("[" + df.format(cal.getTime().getTime()) + "] " + c + " -- "
                            + APICallsCount.get(current_time) + " API calls // " + city_venues_buffer.size()
                            + " venues remaining " + " (" + e.getMessage() + ")");

                    if (e instanceof FoursquareAPIException)
                        if (((FoursquareAPIException) e).getHttp_code().equals("400")
                                && ((FoursquareAPIException) e).getError_detail()
                                        .equals("Venue " + venue_id + " has been deleted")) {
                            city_venues.get(c).remove(venue_id);
                            removeVenue(venue_id, c);
                        } else
                            city_venues_buffer.add(venue_id);

                    continue;
                }
            } // while

            // Every day between 0am and 2am, we repair all the broken time series (if there
            // is something to repair).
            Calendar cal = Calendar.getInstance();
            if (city_venues_buffer.peekFirst() == null
                    && (cal.getTimeInMillis() - sanity_checks.get(c)) >= 86400000
                    && cal.get(Calendar.HOUR_OF_DAY) < 2) {
                VenueUtil.fixBrokenTimeSeriesCity(c, folder);
                sanity_checks.put(c, cal.getTimeInMillis());
                info_logs.get(c).write("[" + df.format(cal.getTime()) + "] Sanity check OK.\n");
                info_logs.get(c).flush();
            }
        } // for
    } // while
}

From source file:com.mycompany.mavenproject4.NewMain.java

/**
 * @param args the command line arguments
 *//*  ww  w  .java  2  s . co m*/
public static void main(String[] args) {
    System.out.println("Enter your choice do you want to work with 1.postgre 2.Redis 3.Mongodb");
    InputStreamReader IORdatabase = new InputStreamReader(System.in);
    BufferedReader BIOdatabase = new BufferedReader(IORdatabase);
    String databasechoice = null;
    try {
        databasechoice = BIOdatabase.readLine();
    } catch (Exception E7) {
        System.out.println(E7.getMessage());
    }

    // loading data from the CSV file 

    CsvReader Employees = null;

    try {
        Employees = new CsvReader("CSVFile\\Employees.csv");
    } catch (FileNotFoundException EB) {
        System.out.println(EB.getMessage());
    }
    int ColumnCount = 3;
    int RowCount = 100;
    int iloop = 0;

    try {
        Employees = new CsvReader("CSVFile\\Employees.csv");
    } catch (FileNotFoundException E) {
        System.out.println(E.getMessage());
    }

    String[][] Dataarray = new String[RowCount][ColumnCount];
    try {
        while (Employees.readRecord()) {
            String v;
            String[] x;
            v = Employees.getRawRecord();
            x = v.split(",");
            for (int j = 0; j < ColumnCount; j++) {
                String value = null;
                int z = j;
                value = x[z];
                try {
                    Dataarray[iloop][j] = value;
                } catch (Exception E) {
                    System.out.println(E.getMessage());
                }
                // System.out.println(Dataarray[iloop][j]);
            }
            iloop = iloop + 1;
        }
    } catch (IOException Em) {
        System.out.println(Em.getMessage());
    }

    Employees.close();

    // connection to Database 
    switch (databasechoice) {
    // postgre code goes here 
    case "1":

        Connection Conn = null;
        Statement Stmt = null;
        URI dbUri = null;
        String choice = null;
        InputStreamReader objin = new InputStreamReader(System.in);
        BufferedReader objbuf = new BufferedReader(objin);
        try {
            Class.forName("org.postgresql.Driver");
        } catch (Exception E1) {
            System.out.println(E1.getMessage());
        }
        String username = "ldoiarosfrzrua";
        String password = "HBkE_pJpK5mMIg3p2q1_odmEFX";
        String dbUrl = "jdbc:postgresql://ec2-54-83-53-120.compute-1.amazonaws.com:5432/d6693oljh5cco4?ssl=true&sslfactory=org.postgresql.ssl.NonValidatingFactory";

        // now update data in the postgress Database 

        // for(int i=0;i<RowCount;i++)
        // {
        //       try 
        //          {

        //         Conn= DriverManager.getConnection(dbUrl, username, password);    
        //           Stmt = Conn.createStatement();
        //           String Connection_String = "insert into EmployeeDistinct (name,jobtitle,department) values (' "+ Dataarray[i][0]+" ',' "+ Dataarray[i][1]+" ',' "+ Dataarray[i][2]+" ')";
        //         Stmt.executeUpdate(Connection_String);
        //       
        //        }
        //        catch(SQLException E4)
        //         {
        //             System.out.println(E4.getMessage());
        //          }
        // }

        // Quering with the Database 

        System.out.println("1. Display Data ");
        System.out.println("2. Select data based on primary key");
        System.out.println("3. Select data based on Other attributes e.g Name");
        System.out.println("Enter your Choice ");
        try {
            choice = objbuf.readLine();
        } catch (IOException E) {
            System.out.println(E.getMessage());
        }
        switch (choice) {
        case "1":
            try {
                Conn = DriverManager.getConnection(dbUrl, username, password);
                Stmt = Conn.createStatement();
                String Connection_String = " Select * from EmployeeDistinct;";
                ResultSet objRS = Stmt.executeQuery(Connection_String);
                while (objRS.next()) {
                    System.out.println("Employee ID: " + objRS.getInt("EmployeeID"));
                    System.out.println("Employee Name: " + objRS.getString("Name"));
                    System.out.println("Employee City: " + objRS.getString("Jobtitle"));
                    System.out.println("Employee City: " + objRS.getString("Department"));
                }

            } catch (Exception E2) {
                System.out.println(E2.getMessage());
            }

            break;

        case "2":

            System.out.println("Enter the Primary Key");
            InputStreamReader objkey = new InputStreamReader(System.in);
            BufferedReader objbufkey = new BufferedReader(objkey);
            int key = 0;
            try {
                key = Integer.parseInt(objbufkey.readLine());
            } catch (IOException E) {
                System.out.println(E.getMessage());
            }
            try {
                Conn = DriverManager.getConnection(dbUrl, username, password);
                Stmt = Conn.createStatement();
                String Connection_String = " Select * from EmployeeDistinct where EmployeeID=" + key + ";";
                ResultSet objRS = Stmt.executeQuery(Connection_String);
                while (objRS.next()) {
                    System.out.println("Employee Name: " + objRS.getString("Name"));
                    System.out.println("Employee City: " + objRS.getString("Jobtitle"));
                    System.out.println("Employee City: " + objRS.getString("Department"));
                }

            } catch (Exception E2) {
                System.out.println(E2.getMessage());
            }
            break;

        case "3":
            String Name = null;
            System.out.println("Enter Name to find the record");
            InputStreamReader objname = new InputStreamReader(System.in);
            BufferedReader objbufname = new BufferedReader(objname);

            try {
                Name = (objbufname.readLine()).toString();
            } catch (IOException E) {
                System.out.println(E.getMessage());
            }
            try {
                Conn = DriverManager.getConnection(dbUrl, username, password);
                Stmt = Conn.createStatement();
                String Connection_String = " Select * from EmployeeDistinct where Name=" + "'" + Name + "'"
                        + ";";

                ResultSet objRS = Stmt.executeQuery(Connection_String);
                while (objRS.next()) {
                    System.out.println("Employee ID: " + objRS.getInt("EmployeeID"));
                    System.out.println("Employee City: " + objRS.getString("Jobtitle"));
                    System.out.println("Employee City: " + objRS.getString("Department"));
                }

            } catch (Exception E2) {
                System.out.println(E2.getMessage());
            }
            break;
        }
        try {
            Conn.close();
        } catch (SQLException E6) {
            System.out.println(E6.getMessage());
        }
        break;

    // Redis code goes here 

    case "2":
        int Length = 0;
        String ID = null;
        Length = 100;

        // Connection to Redis 
        Jedis jedis = new Jedis("pub-redis-18017.us-east-1-4.6.ec2.redislabs.com", 18017);
        jedis.auth("7EFOisRwMu8zvhin");
        System.out.println("Connected to Redis");

        // Storing values in the database 

        //  System.out.println("Storing values in the Database might take a minute ");

        // for(int i=0;i<Length;i++)
        // { 
        // Store data in redis 
        //     int j=i+1;
        //  jedis.hset("Employe:" + j , "Name", Dataarray[i][0]);
        //   jedis.hset("Employe:" + j , "Jobtitle ", Dataarray[i][1]);
        //  jedis.hset("Employe:" + j , "Department", Dataarray[i][2]);

        //  }

        System.out.println("Search by 1.primary key,2.Name 3.display first 20");
        InputStreamReader objkey = new InputStreamReader(System.in);
        BufferedReader objbufkey = new BufferedReader(objkey);
        String interimChoice1 = null;
        try {
            interimChoice1 = objbufkey.readLine();
        } catch (IOException E) {
            System.out.println(E.getMessage());
        }
        switch (interimChoice1) {
        case "1":
            System.out.print("Get details using the primary Key");
            InputStreamReader IORKey = new InputStreamReader(System.in);
            BufferedReader BIORKey = new BufferedReader(IORKey);
            String ID1 = null;
            try {
                ID1 = BIORKey.readLine();
            } catch (IOException E3) {
                System.out.println(E3.getMessage());
            }

            Map<String, String> properties = jedis.hgetAll("Employe:" + Integer.parseInt(ID1));
            for (Map.Entry<String, String> entry : properties.entrySet()) {
                System.out.println("Name:" + jedis.hget("Employee:" + Integer.parseInt(ID1), "Name"));
                System.out.println("Jobtitle:" + jedis.hget("Employee:" + Integer.parseInt(ID1), "Jobtitle"));
                System.out
                        .println("Department:" + jedis.hget("Employee:" + Integer.parseInt(ID1), "Department"));
            }
            break;

        case "2":
            System.out.print("Get details using Department");
            InputStreamReader IORName1 = new InputStreamReader(System.in);
            BufferedReader BIORName1 = new BufferedReader(IORName1);

            String ID2 = null;
            try {
                ID2 = BIORName1.readLine();
            } catch (IOException E3) {
                System.out.println(E3.getMessage());
            }
            for (int i = 0; i < 100; i++) {
                Map<String, String> properties3 = jedis.hgetAll("Employe:" + i);
                for (Map.Entry<String, String> entry : properties3.entrySet()) {
                    String value = entry.getValue();
                    if (entry.getValue().equals(ID2)) {
                        System.out.println("Name:" + jedis.hget("Employee:" + i, "Name"));
                        System.out.println("Jobtitle:" + jedis.hget("Employee:" + i, "Jobtitle"));
                        System.out.println("Department:" + jedis.hget("Employee:" + i, "Department"));
                    }

                }
            }

            //for (Map.Entry<String, String> entry : properties1.entrySet())
            //{
            //System.out.println(entry.getKey() + "---" + entry.getValue());
            // }
            break;

        case "3":
            for (int i = 1; i < 21; i++) {
                Map<String, String> properties3 = jedis.hgetAll("Employe:" + i);
                for (Map.Entry<String, String> entry : properties3.entrySet()) {

                    // System.out.println(jedis.hget("Employee:" + i,"Name"));
                    System.out.println("Name:" + jedis.hget("Employee:" + i, "Name"));
                    System.out.println("Jobtitle:" + jedis.hget("Employee:" + i, "Jobtitle"));
                    System.out.println("Department:" + jedis.hget("Employee:" + i, "Department"));

                }
            }
            break;
        }

        break;

    // mongo db code goes here 

    case "3":
        MongoClient mongo = new MongoClient(
                new MongoClientURI("mongodb://root2:12345@ds055564.mongolab.com:55564/heroku_766dnj8c"));
        DB db;
        db = mongo.getDB("heroku_766dnj8c");
        // storing values in the database 
        //  for(int i=0;i<100;i++)
        // {
        //     BasicDBObject document = new BasicDBObject();
        //    document.put("_id", i+1);
        //    document.put("Name", Dataarray[i][0]);
        //    document.put("Jobtitle", Dataarray[i][1]);    
        //    document.put("Department", Dataarray[i][2]);    
        //    db.getCollection("EmployeeRaw").insert(document);

        // }
        System.out.println("Search by 1.primary key,2.Name 3.display first 20");
        InputStreamReader objkey6 = new InputStreamReader(System.in);
        BufferedReader objbufkey6 = new BufferedReader(objkey6);
        String interimChoice = null;
        try {
            interimChoice = objbufkey6.readLine();
        } catch (IOException E) {
            System.out.println(E.getMessage());
        }
        switch (interimChoice) {
        case "1":
            System.out.println("Enter the Primary Key");
            InputStreamReader IORPkey = new InputStreamReader(System.in);
            BufferedReader BIORPkey = new BufferedReader(IORPkey);
            int Pkey = 0;
            try {
                Pkey = Integer.parseInt(BIORPkey.readLine());
            } catch (IOException E) {
                System.out.println(E.getMessage());
            }
            BasicDBObject inQuery = new BasicDBObject();
            inQuery.put("_id", Pkey);
            DBCursor cursor = db.getCollection("EmployeeRaw").find(inQuery);
            while (cursor.hasNext()) {
                // System.out.println(cursor.next());
                System.out.println(cursor.next());
            }
            break;
        case "2":
            System.out.println("Enter the Name to Search");
            InputStreamReader objName = new InputStreamReader(System.in);
            BufferedReader objbufName = new BufferedReader(objName);
            String Name = null;
            try {
                Name = objbufName.readLine();
            } catch (IOException E) {
                System.out.println(E.getMessage());
            }
            BasicDBObject inQuery1 = new BasicDBObject();
            inQuery1.put("Name", Name);
            DBCursor cursor1 = db.getCollection("EmployeeRaw").find(inQuery1);
            while (cursor1.hasNext()) {
                // System.out.println(cursor.next());
                System.out.println(cursor1.next());
            }
            break;

        case "3":
            for (int i = 1; i < 21; i++) {
                BasicDBObject inQuerya = new BasicDBObject();
                inQuerya.put("_id", i);
                DBCursor cursora = db.getCollection("EmployeeRaw").find(inQuerya);
                while (cursora.hasNext()) {
                    // System.out.println(cursor.next());
                    System.out.println(cursora.next());
                }
            }
            break;
        }
        break;

    }

}

From source file:biomine.nodeimportancecompression.ImportanceCompressionReport.java

public static void main(String[] args) throws IOException, java.text.ParseException {
    opts.addOption("algorithm", true,
            "Used algorithm for compression. Possible values are 'brute-force', "
                    + "'brute-force-edges','brute-force-merges','randomized','randomized-merges',"
                    + "'randomized-edges'," + "'fast-brute-force',"
                    + "'fast-brute-force-merges','fast-brute-force-merge-edges'. Default is 'brute-force'.");
    opts.addOption("query", true, "Query nodes ids, separated by comma.");
    opts.addOption("queryfile", true, "Read query nodes from file.");
    opts.addOption("ratio", true, "Goal ratio");
    opts.addOption("importancefile", true, "Read importances straight from file");
    opts.addOption("keepedges", false, "Don't remove edges during merges");
    opts.addOption("connectivity", false, "Compute and output connectivities in edge oriented case");
    opts.addOption("paths", false, "Do path oriented compression");
    opts.addOption("edges", false, "Do edge oriented compression");
    // opts.addOption( "a",

    double sigma = 1.0;
    CommandLineParser parser = new PosixParser();
    CommandLine cmd = null;/*from  w  ww  . java 2  s. c o m*/

    try {
        cmd = parser.parse(opts, args);
    } catch (ParseException e) {
        e.printStackTrace();
        System.exit(0);
    }

    String queryStr = cmd.getOptionValue("query");
    String[] queryNodeIDs = {};
    double[] queryNodeIMP = {};
    if (queryStr != null) {
        queryNodeIDs = queryStr.split(",");
        queryNodeIMP = new double[queryNodeIDs.length];
        for (int i = 0; i < queryNodeIDs.length; i++) {
            String s = queryNodeIDs[i];
            String[] es = s.split("=");
            queryNodeIMP[i] = 1;
            if (es.length == 2) {
                queryNodeIDs[i] = es[0];
                queryNodeIMP[i] = Double.parseDouble(es[1]);
            } else if (es.length > 2) {
                System.out.println("Too many '=' in querynode specification: " + s);
            }
        }
    }

    String queryFile = cmd.getOptionValue("queryfile");
    Map<String, Double> queryNodes = Collections.EMPTY_MAP;
    if (queryFile != null) {
        File in = new File(queryFile);
        BufferedReader read = new BufferedReader(new FileReader(in));

        queryNodes = readMap(read);
        read.close();
    }

    String impfile = cmd.getOptionValue("importancefile");
    Map<String, Double> importances = null;
    if (impfile != null) {
        File in = new File(impfile);
        BufferedReader read = new BufferedReader(new FileReader(in));

        importances = readMap(read);
        read.close();
    }

    String algoStr = cmd.getOptionValue("algorithm");
    CompressionAlgorithm algo = null;

    if (algoStr == null || algoStr.equals("brute-force")) {
        algo = new BruteForceCompression();
    } else if (algoStr.equals("brute-force-edges")) {
        algo = new BruteForceCompressionOnlyEdges();
    } else if (algoStr.equals("brute-force-merges")) {
        algo = new BruteForceCompressionOnlyMerges();
    } else if (algoStr.equals("fast-brute-force-merges")) {
        //algo = new FastBruteForceCompressionOnlyMerges();
        algo = new FastBruteForceCompression(true, false);
    } else if (algoStr.equals("fast-brute-force-edges")) {
        algo = new FastBruteForceCompression(false, true);
        //algo = new FastBruteForceCompressionOnlyEdges();
    } else if (algoStr.equals("fast-brute-force")) {
        algo = new FastBruteForceCompression(true, true);
    } else if (algoStr.equals("randomized-edges")) {
        algo = new RandomizedCompressionOnlyEdges(); //modified
    } else if (algoStr.equals("randomized")) {
        algo = new RandomizedCompression();
    } else if (algoStr.equals("randomized-merges")) {
        algo = new RandomizedCompressionOnlyMerges();
    } else {
        System.out.println("Unsupported algorithm: " + algoStr);
        printHelp();
    }

    String ratioStr = cmd.getOptionValue("ratio");
    double ratio = 0;
    if (ratioStr != null) {
        ratio = Double.parseDouble(ratioStr);
    } else {
        System.out.println("Goal ratio not specified");
        printHelp();
    }

    String infile = null;
    if (cmd.getArgs().length != 0) {
        infile = cmd.getArgs()[0];
    } else {
        printHelp();
    }

    BMGraph bmg = BMGraphUtils.readBMGraph(new File(infile));
    HashMap<BMNode, Double> queryBMNodes = new HashMap<BMNode, Double>();
    for (String id : queryNodes.keySet()) {
        queryBMNodes.put(bmg.getNode(id), queryNodes.get(id));
    }

    long startMillis = System.currentTimeMillis();
    ImportanceGraphWrapper wrap = QueryImportance.queryImportanceGraph(bmg, queryBMNodes);

    if (importances != null) {
        for (String id : importances.keySet()) {
            wrap.setImportance(bmg.getNode(id), importances.get(id));
        }
    }

    ImportanceMerger merger = null;
    if (cmd.hasOption("edges")) {
        merger = new ImportanceMergerEdges(wrap.getImportanceGraph());
    } else if (cmd.hasOption("paths")) {
        merger = new ImportanceMergerPaths(wrap.getImportanceGraph());
    } else {
        System.out.println("Specify either 'paths' or 'edges'.");
        System.exit(1);
    }

    if (cmd.hasOption("keepedges")) {
        merger.setKeepEdges(true);
    }

    algo.compress(merger, ratio);
    long endMillis = System.currentTimeMillis();

    // write importance

    {
        BufferedWriter wr = new BufferedWriter(new FileWriter("importance.txt", false));
        for (BMNode nod : bmg.getNodes()) {
            wr.write(nod + " " + wrap.getImportance(nod) + "\n");
        }
        wr.close();
    }

    // write sum of all pairs of node importance    added by Fang
    /*   {
    BufferedWriter wr = new BufferedWriter(new FileWriter("sum_of_all_pairs_importance.txt", true));
    ImportanceGraph orig = wrap.getImportanceGraph();
    double sum = 0;
            
    for (int i = 0; i <= orig.getMaxNodeId(); i++) {
        for (int j = i+1; j <= orig.getMaxNodeId(); j++) {
            sum = sum+ wrap.getImportance(i)* wrap.getImportance(j);
        }
    }
            
    wr.write(""+sum);
    wr.write("\n");
    wr.close();
       }
            
    */

    // write uncompressed edges
    {
        BufferedWriter wr = new BufferedWriter(new FileWriter("edges.txt", false));
        ImportanceGraph orig = wrap.getImportanceGraph();
        ImportanceGraph ucom = merger.getUncompressedGraph();
        for (int i = 0; i <= orig.getMaxNodeId(); i++) {
            String iname = wrap.intToNode(i).toString();
            HashSet<Integer> ne = new HashSet<Integer>();
            ne.addAll(orig.getNeighbors(i));
            ne.addAll(ucom.getNeighbors(i));
            for (int j : ne) {
                if (i < j)
                    continue;
                String jname = wrap.intToNode(j).toString();
                double a = orig.getEdgeWeight(i, j);
                double b = ucom.getEdgeWeight(i, j);
                wr.write(iname + " " + jname + " " + a + " " + b + " " + Math.abs(a - b));
                wr.write("\n");
            }
        }
        wr.close();
    }
    // write distance
    {
        // BufferedWriter wr = new BufferedWriter(new
        // FileWriter("distance.txt",false));
        BufferedWriter wr = new BufferedWriter(new FileWriter("distance.txt", true)); //modified by Fang

        ImportanceGraph orig = wrap.getImportanceGraph();
        ImportanceGraph ucom = merger.getUncompressedGraph();
        double error = 0;
        for (int i = 0; i <= orig.getMaxNodeId(); i++) {
            HashSet<Integer> ne = new HashSet<Integer>();
            ne.addAll(orig.getNeighbors(i));
            ne.addAll(ucom.getNeighbors(i));
            for (int j : ne) {
                if (i <= j)
                    continue;
                double a = orig.getEdgeWeight(i, j);
                double b = ucom.getEdgeWeight(i, j);
                error += (a - b) * (a - b) * wrap.getImportance(i) * wrap.getImportance(j);
                // modify by Fang: multiply imp(u)imp(v)

            }
        }
        error = Math.sqrt(error);
        //////////error = Math.sqrt(error / 2); // modified by Fang: the error of each
        // edge is counted twice
        wr.write("" + error);
        wr.write("\n");
        wr.close();
    }
    // write sizes
    {
        ImportanceGraph orig = wrap.getImportanceGraph();
        ImportanceGraph comp = merger.getCurrentGraph();
        // BufferedWriter wr = new BufferedWriter(new
        // FileWriter("sizes.txt",false));
        BufferedWriter wr = new BufferedWriter(new FileWriter("sizes.txt", true)); //modified by Fang

        wr.write(orig.getNodeCount() + " " + orig.getEdgeCount() + " " + comp.getNodeCount() + " "
                + comp.getEdgeCount());
        wr.write("\n");
        wr.close();
    }
    //write time
    {
        System.out.println("writing time");
        BufferedWriter wr = new BufferedWriter(new FileWriter("time.txt", true)); //modified by Fang
        double secs = (endMillis - startMillis) * 0.001;
        wr.write("" + secs + "\n");
        wr.close();
    }

    //write change of connectivity for edge-oriented case       // added by Fang
    {
        if (cmd.hasOption("connectivity")) {

            BufferedWriter wr = new BufferedWriter(new FileWriter("connectivity.txt", true));
            ImportanceGraph orig = wrap.getImportanceGraph();
            ImportanceGraph ucom = merger.getUncompressedGraph();

            double diff = 0;

            for (int i = 0; i <= orig.getMaxNodeId(); i++) {
                ProbDijkstra pdori = new ProbDijkstra(orig, i);
                ProbDijkstra pducom = new ProbDijkstra(ucom, i);

                for (int j = i + 1; j <= orig.getMaxNodeId(); j++) {
                    double oriconn = pdori.getProbTo(j);
                    double ucomconn = pducom.getProbTo(j);

                    diff = diff + (oriconn - ucomconn) * (oriconn - ucomconn) * wrap.getImportance(i)
                            * wrap.getImportance(j);

                }
            }

            diff = Math.sqrt(diff);
            wr.write("" + diff);
            wr.write("\n");
            wr.close();

        }
    }

    //write output graph
    {
        BMGraph output = bmg;//new BMGraph(bmg);

        int no = 0;
        BMNode[] nodes = new BMNode[merger.getGroups().size()];
        for (ArrayList<Integer> gr : merger.getGroups()) {
            BMNode bmgroup = new BMNode("Group", "" + (no + 1));
            bmgroup.setAttributes(new HashMap<String, String>());
            bmgroup.put("autoedges", "0");
            nodes[no] = bmgroup;
            no++;
            if (gr.size() == 0)
                continue;
            for (int x : gr) {
                BMNode nod = output.getNode(wrap.intToNode(x).toString());
                BMEdge belongs = new BMEdge(nod, bmgroup, "belongs_to");
                output.ensureHasEdge(belongs);
            }
            output.ensureHasNode(bmgroup);
        }
        for (int i = 0; i < nodes.length; i++) {
            for (int x : merger.getCurrentGraph().getNeighbors(i)) {
                if (x == i) {
                    nodes[x].put("selfedge", "" + merger.getCurrentGraph().getEdgeWeight(i, x));
                    //ge.put("goodness", ""+merger.getCurrentGraph().getEdgeWeight(i, x));
                    continue;
                }
                BMEdge ge = new BMEdge(nodes[x], nodes[i], "groupedge");
                ge.setAttributes(new HashMap<String, String>());
                ge.put("goodness", "" + merger.getCurrentGraph().getEdgeWeight(i, x));
                output.ensureHasEdge(ge);
            }
        }
        System.out.println(output.getGroupNodes());

        BMGraphUtils.writeBMGraph(output, "output.bmg");
    }
}