Example usage for java.lang System currentTimeMillis

List of usage examples for java.lang System currentTimeMillis

Introduction

In this page you can find the example usage for java.lang System currentTimeMillis.

Prototype

@HotSpotIntrinsicCandidate
public static native long currentTimeMillis();

Source Link

Document

Returns the current time in milliseconds.

Usage

From source file:com.thed.zapi.cloud.sample.CreateCycleAndAddTests.java

public static void main(String[] args)
        throws URISyntaxException, JobProgressException, JSONException, IllegalStateException, IOException {
    // Replace zephyr baseurl <ZAPI_Cloud_URL> shared with the user for ZAPI Cloud
    String zephyrBaseUrl = "<ZAPI_Cloud_URL>";
    // zephyr accessKey , we can get from Addons >> zapi section
    String accessKey = "YjE2MjdjMGEtNzExNy0zYjY1LWFkMzQtNjcwMDM3OTljOGkbWluIGFkbWlu";
    // zephyr secretKey , we can get from Addons >> zapi section
    String secretKey = "qufnbimi96Ob2hq3ISF08yZ8HmQw4c1eHGeGlk";
    String userName = "admin";
    ZFJCloudRestClient client = ZFJCloudRestClient.restBuilder(zephyrBaseUrl, accessKey, secretKey, userName)
            .build();//from www.  j a  v  a2s  .  c  o m

    /** Declare the Variables here **/
    Long projectId = 10100l;
    Long versionId = -1l;
    String cycleName = "Test Cycle -- API DEMO";
    String cycleDescription = "Created by ZAPI CLOUD API";

    String createCycleUri = zephyrBaseUrl + "/public/rest/api/1.0/cycle?expand=&clonedCycleId=";

    /** Cycle Object created - DO NOT EDIT **/
    JSONObject createCycleObj = new JSONObject();
    createCycleObj.put("name", cycleName);
    createCycleObj.put("description", cycleDescription);
    createCycleObj.put("startDate", System.currentTimeMillis());
    createCycleObj.put("projectId", projectId);
    createCycleObj.put("versionId", versionId);

    StringEntity cycleJSON = null;
    try {
        cycleJSON = new StringEntity(createCycleObj.toString());
    } catch (UnsupportedEncodingException e1) {
        e1.printStackTrace();
    }
    CreateCycleAndAddTests cc = new CreateCycleAndAddTests();
    String cycleID = cc.createCycle(createCycleUri, client, accessKey, cycleJSON);
    System.out.println("Cycle Created with Cycle Id :" + cycleID);

    /**
     * Add tests to Cycle IssueId's
     * 
     */

    String addTestsUri = zephyrBaseUrl + "/public/rest/api/1.0/executions/add/cycle/" + cycleID;
    String[] issueIds = { "SUP-1", "TUR-1" }; //Issue Id's to be added to Test Cycle, add more issueKeys separated by comma

    JSONObject addTestsObj = new JSONObject();
    addTestsObj.put("issues", issueIds);
    addTestsObj.put("method", "1");
    addTestsObj.put("projectId", projectId);
    addTestsObj.put("versionId", versionId);

    StringEntity addTestsJSON = null;
    try {
        addTestsJSON = new StringEntity(addTestsObj.toString());
    } catch (UnsupportedEncodingException e1) {
        e1.printStackTrace();
    }
    String ID = cc.addTestsToCycle(addTestsUri, client, accessKey, addTestsJSON);
    System.out.println("Tests added successfully  ");
}

From source file:com.appeligo.ccdataindexer.SpellIndexer.java

public static void main(String args[]) throws Exception {
    ConfigurationService.setRootDir(new File("config"));
    ConfigurationService.setEnvName("live");
    ConfigurationService.init();//from w w w. ja v a 2 s.co m
    if (args.length < 2 || args.length > 3) {
        usage();
        System.exit(-1);
    }
    File file = new File(args[0]);
    if (!file.isDirectory()) {
        usage();
        System.exit(-1);
    }

    long now = System.currentTimeMillis();
    try {

        DidYouMeanIndexer.createDefaultSpellIndex(args[0], args[1]);
    } catch (IOException e) {
        log.error("Can't create spell index", e);
    }

    long after = System.currentTimeMillis();
    log.info("Processing took " + ((after - now) / (60 * 1000)) + " minutes to index the programs.");

    //log.info("Indexed " + count+ " programs");
}

From source file:PinotThroughput.java

@SuppressWarnings("InfiniteLoopStatement")
public static void main(String[] args) throws Exception {
    final int numQueries = QUERIES.length;
    final Random random = new Random(RANDOM_SEED);
    final AtomicInteger counter = new AtomicInteger(0);
    final AtomicLong totalResponseTime = new AtomicLong(0L);
    final ExecutorService executorService = Executors.newFixedThreadPool(NUM_CLIENTS);

    for (int i = 0; i < NUM_CLIENTS; i++) {
        executorService.submit(new Runnable() {
            @Override//from w  w  w  . ja  va2s.  co  m
            public void run() {
                try (CloseableHttpClient client = HttpClients.createDefault()) {
                    HttpPost post = new HttpPost("http://localhost:8099/query");
                    CloseableHttpResponse res;
                    while (true) {
                        String query = QUERIES[random.nextInt(numQueries)];
                        post.setEntity(new StringEntity("{\"pql\":\"" + query + "\"}"));
                        long start = System.currentTimeMillis();
                        res = client.execute(post);
                        res.close();
                        counter.getAndIncrement();
                        totalResponseTime.getAndAdd(System.currentTimeMillis() - start);
                    }
                } catch (IOException e) {
                    e.printStackTrace();
                }
            }
        });
    }

    long startTime = System.currentTimeMillis();
    while (true) {
        Thread.sleep(REPORT_INTERVAL_MILLIS);
        double timePassedSeconds = ((double) (System.currentTimeMillis() - startTime)) / MILLIS_PER_SECOND;
        int count = counter.get();
        double avgResponseTime = ((double) totalResponseTime.get()) / count;
        System.out.println("Time Passed: " + timePassedSeconds + "s, Query Executed: " + count + ", QPS: "
                + count / timePassedSeconds + ", Avg Response Time: " + avgResponseTime + "ms");
    }
}

From source file:kr.ac.kaist.wala.hybridroid.shell.Shell.java

/**
 * HybriDroid main function. Now, There is CFG-building option only in
 * HybriDroid./*from   w  ww.java2 s . c  om*/
 * 
 * @param args
 * @throws IOException
 * @throws IllegalArgumentException
 * @throws CancelException
 * @throws ParseException
 * @throws WalaException 
 */
public static void main(String[] args)
        throws IOException, IllegalArgumentException, CancelException, ParseException, WalaException {
    Shell.args = new CommandArguments(args);
    // Load wala property. Now, 'PROP_ARG' is essential option, so else
    // branch cannot be reached.
    if (Shell.args.has(CommandArguments.PROP_ARG)) {
        String propertyfile = Shell.args.get(CommandArguments.PROP_ARG);
        File propFile = new File(propertyfile);
        walaProperties = new Properties();
        walaProperties.load(new FileInputStream(propFile));
    } else {
        try {
            walaProperties = WalaProperties.loadProperties();
        } catch (WalaException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }
    }

    // Load target file for analysis.
    String targetPath = Shell.args.get(CommandArguments.TARGET_ARG);
    long startTime = System.currentTimeMillis();
    START = startTime;
    /**
     * Below is the switch case for HybriDroid functions. One function of
     * the CommandLineOptionGroup must be one case in below.
     */
    // Build Control-flow Graph.
    if (Shell.args.has(CommandArguments.CFG_ARG)) {
        if (Shell.args.has(CommandArguments.ONLY_JS_ARG)) {
            //            File analysisfile = new File(targetPath);
            //                   URL url = analysisfile.toURI().toURL();
            //                   // Setting WALA analyzer
            //                   CAstRhinoTranslatorFactory translatorFactory = new CAstRhinoTranslatorFactory();
            //                   JSCallGraphUtil.setTranslatorFactory(translatorFactory);
            //                   // make actual file name and directory
            //                   JSCFABuilder b = JSCallGraphBuilderUtil.makeHTMLCGBuilder(url);
            //                   CallGraph callGraph = b.makeCallGraph(b.getOptions());
            //                   PointerAnalysis<InstanceKey> pa = b.getPointerAnalysis();
            //                   WalaCGVisualizer vis = new WalaCGVisualizer();
            //                   vis.visualize(callGraph, "cfg.dot");
            //                   vis.printLabel("jslabel.txt");
        } else {

            //            Shell.START = System.currentTimeMillis();
            HybridCFGAnalysis cfgAnalysis = new HybridCFGAnalysis();
            Pair<CallGraph, PointerAnalysis<InstanceKey>> p = cfgAnalysis.main(targetPath,
                    LocalFileReader.androidJar(Shell.walaProperties).getPath());
            CallGraph cg = p.fst;
            PointerAnalysis<InstanceKey> pa = p.snd;

            //            PointerAnalysis<InstanceKey> pa = p.snd;
            //            Shell.END = System.currentTimeMillis();
            //            System.err.println("#time: " + (((double)(Shell.END - Shell.START))/1000d) + "s");

            //            System.err.println("Graph Modeling for taint...");
            //            ModeledCallGraphForTaint mcg = new ModeledCallGraphForTaint(p.fst);
            //            System.err.println("Taint analysis...");
            //            PrivateLeakageDetector pld = new PrivateLeakageDetector(p.fst, p.snd);
            //            pld.analyze();
            //            Shell.END = System.currentTimeMillis();
            //            System.err.println("#time: " + (((double)(Shell.END - Shell.START))/1000d) + "s");
            //             for(PrivateLeakageDetector.LeakWarning w : pld.getWarnings()){
            //                System.out.println("=========");
            //                System.out.println(w);
            //                System.out.println("=========");
            // //               w.printPathFlow("leak.dot");
            //             }
        }
    } else {
        // TODO: support several functions
    }
    long endTime = System.currentTimeMillis();
    System.out.println("#Time: " + (endTime - startTime));
}

From source file:experimentos.LevenshteinExperimentCDQuID.java

public static void main(String[] args) throws Exception {

    // enables dynamic data-loading for file-based sorting
    GlobalConfig.getInstance().setInMemoryObjectThreshold(10000);

    // sets the CSV data source
    CSVSource dataSource = new CSVSource("cd", new File("cd.csv"));
    dataSource.enableHeader();//from   ww w. j a  v a2 s  . c o  m
    dataSource.addIdAttributes("pk");

    //   CSVSource goldstandardSource = new CSVSource("goldstandard", new File("cd_gold.csv"));
    //   goldstandardSource.enableHeader();

    // instantiate the gold standard
    // "cddb" is the source identifier
    //GoldStandard goldStandard = new GoldStandard(goldstandardSource);
    //goldStandard.setFirstElementsObjectIdAttributes("disc1_id");
    //goldStandard.setSecondElementsObjectIdAttributes("disc2_id");
    //goldStandard.setSourceIdLiteral("cddb");

    // defines sub-keys that are used to generate the sorting key
    TextBasedSubkey artistSubkey = new TextBasedSubkey("artist");
    artistSubkey.setIgnoredCharactersRegEx(TextBasedSubkey.NO_VOWELS_REGEX);

    DocumentFrequencyPreprocessor dfPreprocessor = new DocumentFrequencyPreprocessor("artist");
    // the key generator uses sub-key selectors to generate a key for each object

    SortingKey sortingKey = new SortingKey();
    sortingKey.addSubkey(artistSubkey);

    Algorithm algorithm = new SortedNeighborhoodMethod(sortingKey, 30);
    algorithm.addPreprocessor(dfPreprocessor);

    // enable in-memory storing
    algorithm.enableInMemoryProcessing();

    // adds the "data" to the algorithm
    algorithm.addDataSource(dataSource);

    // instantiates similarity measure
    //SimilarityFunction similarityFunction = new TFIDFSimilarityFunction(dfPreprocessor, "title");
    SimilarityFunction similarityFunction = new LevenshteinDistanceFunction("artist");
    // DuDeOutput output = new CSVOutput(new File("saida.csv"));

    long start = System.currentTimeMillis();

    // counts the generated object pairs
    int cnt = 0;

    int dupCnt = 0;
    int nondupCnt = 0;

    //Map<String, ArrayList<String>> mapaSimilares = new HashMap<String, ArrayList<String>>();

    BlockIndex bi = new BlockIndex();
    DoubleMetaphone db = new DoubleMetaphone();
    //  StatisticComponent statistic = new StatisticComponent(goldStandard, algorithm);
    for (DuDeObjectPair pair : algorithm) {
        if (similarityFunction.getSimilarity(pair) > 0.8) {
            ++dupCnt;
            String pk1 = pair.getFirstElement().getAttributeValue("pk").toString();
            String pk2 = pair.getSecondElement().getAttributeValue("pk").toString();

            String title1 = pair.getFirstElement().getAttributeValue("artist").toString();
            String title2 = pair.getSecondElement().getAttributeValue("artist").toString();

            String keyBlockpair1 = db.encode(title1);
            String keyBlockpair2 = db.encode(title2);

            //int cluster1 = bi.getclusterId(pk1, keyBlockpair1, "cd");
            //int cluster2 = bi.getclusterId(pk2, keyBlockpair2, "cd");

            Vertice v1 = new Vertice(pk1, "cd", 0);
            Vertice v2 = new Vertice(pk2, "cd", 0);

            bi.insertVertice(keyBlockpair1, v1);
            bi.insertVertice(keyBlockpair2, v2);
            // statistic.addDuplicate(pair);

            /**if ((cluster1 != -1) && (cluster2 == -1)) {
            Vertice v2 = new Vertice(pk2, "cd", cluster1);
            bi.insertVertice(keyBlockpair2, v2);
            caso1++;
                    
            }else if ((cluster1 == -1) && (cluster2 != -1)) {
             Vertice v1 = new Vertice(pk1, "cd", cluster2);
                bi.insertVertice(keyBlockpair1, v1);
                caso2++;
                    
            }  else if ((cluster1 == -1) && (cluster2 == -1)) {
             Vertice v1 = new Vertice(pk1, "cd", cluster);
                Vertice v2 = new Vertice(pk2, "cd", cluster);
                    
                bi.insertVertice(keyBlockpair1, v1);
                bi.insertVertice(keyBlockpair2, v2);
               cluster++;
               caso3++;
            }**/

            // System.err.println(  pair.getFirstElement().getAttributeValue("title").toString());
        } else {
            ++nondupCnt;
            //  statistic.addNonDuplicate(pair);
        }
        ++cnt;
    }
    //bi.printBlockIndex();
    // System.err.println(" numero total de elementor " + bi.getNumeroElementos());
    //StatisticOutput statisticOutput = new SimpleStatisticOutput(System.out, statistic);
    //      statisticOutput.writeStatistics();
    algorithm.cleanUp();

    // print statistics
    //  System.out.println();
    // System.out.println();

    System.err.println(
            dupCnt + " duplicates out of " + cnt + " pairs detected in " + (System.currentTimeMillis() - start)
                    + " ms  " + bi.getNumeroElementos() + "  " + bi.getNumeroBlocos());
    //System.err.println(" casos  " + caso1 + " " + caso2 + " " + caso3 + " ");
    QueryExperimento query = new QueryExperimento(bi);
    query.query();

}

From source file:edu.umn.cs.spatialHadoop.operations.Sampler2.java

/**
 * @param args//w  w  w. ja v  a  2  s.  c o  m
 * @throws ClassNotFoundException 
 * @throws InterruptedException 
 * @throws IOException 
 */
public static void main(String[] args) throws IOException, InterruptedException, ClassNotFoundException {
    OperationsParams params = new OperationsParams(new GenericOptionsParser(args), false);

    if (!params.checkInputOutput()) {
        printUsage();
        System.exit(1);
    }

    Path[] input = params.getInputPaths();
    Path output = params.getOutputPath();

    long t1 = System.currentTimeMillis();
    sampleMapReduce(input, output, params);
    long t2 = System.currentTimeMillis();

    System.out.println("Total time for sampling " + (t2 - t1) + " millis");
}

From source file:it.tizianofagni.sparkboost.BoostClassifierExe.java

public static void main(String[] args) {

    Options options = new Options();
    options.addOption("b", "binaryProblem", false,
            "Indicate if the input dataset contains a binary problem and not a multilabel one");
    options.addOption("z", "labels0based", false,
            "Indicate if the labels IDs in the dataset to classifyLibSvmWithResults are already assigned in the range [0, numLabels-1] included");
    options.addOption("l", "enableSparkLogging", false, "Enable logging messages of Spark");
    options.addOption("w", "windowsLocalModeFix", true,
            "Set the directory containing the winutils.exe command");
    options.addOption("p", "parallelismDegree", true,
            "Set the parallelism degree (default: number of available cores in the Spark runtime");

    CommandLineParser parser = new BasicParser();
    CommandLine cmd = null;/*from  www  . ja v a  2s.c  om*/
    String[] remainingArgs = null;
    try {
        cmd = parser.parse(options, args);
        remainingArgs = cmd.getArgs();
        if (remainingArgs.length != 3)
            throw new ParseException("You need to specify all mandatory parameters");
    } catch (ParseException e) {
        System.out.println("Parsing failed.  Reason: " + e.getMessage());
        HelpFormatter formatter = new HelpFormatter();
        formatter.printHelp(
                BoostClassifierExe.class.getSimpleName() + " [OPTIONS] <inputFile> <inputModel> <outputFile>",
                options);
        System.exit(-1);
    }

    boolean binaryProblem = false;
    if (cmd.hasOption("b"))
        binaryProblem = true;
    boolean labels0Based = false;
    if (cmd.hasOption("z"))
        labels0Based = true;
    boolean enablingSparkLogging = false;
    if (cmd.hasOption("l"))
        enablingSparkLogging = true;

    if (cmd.hasOption("w")) {
        System.setProperty("hadoop.home.dir", cmd.getOptionValue("w"));
    }

    String inputFile = remainingArgs[0];
    String inputModel = remainingArgs[1];
    String outputFile = remainingArgs[2];

    long startTime = System.currentTimeMillis();

    // Disable Spark logging.
    if (!enablingSparkLogging) {
        Logger.getLogger("org").setLevel(Level.OFF);
        Logger.getLogger("akka").setLevel(Level.OFF);
    }

    // Create and configure Spark context.
    SparkConf conf = new SparkConf().setAppName("Spark MPBoost classifier");
    JavaSparkContext sc = new JavaSparkContext(conf);

    // Load boosting classifier from disk.
    BoostClassifier classifier = DataUtils.loadModel(sc, inputModel);

    // Get the parallelism degree.
    int parallelismDegree = sc.defaultParallelism();
    if (cmd.hasOption("p")) {
        parallelismDegree = Integer.parseInt(cmd.getOptionValue("p"));
    }

    // Classify documents available on specified input file.
    classifier.classifyLibSvm(sc, inputFile, parallelismDegree, labels0Based, binaryProblem, outputFile);
    long endTime = System.currentTimeMillis();
    System.out.println("Execution time: " + (endTime - startTime) + " milliseconds.");
}

From source file:com.datastax.brisk.demo.pricer.Pricer.java

public static void main(String[] arguments) throws Exception {
    long latency, oldLatency;
    int epoch, total, oldTotal, keyCount, oldKeyCount;

    try {//from   w  ww  .ja va 2  s  .c om
        session = new Session(arguments);
    } catch (IllegalArgumentException e) {
        printHelpMessage();
        return;
    }

    session.createKeySpaces();

    int threadCount = session.getThreads();
    Thread[] consumers = new Thread[threadCount];
    PrintStream out = session.getOutputStream();

    out.println("total,interval_op_rate,interval_key_rate,avg_latency,elapsed_time");

    int itemsPerThread = session.getKeysPerThread();
    int modulo = session.getNumKeys() % threadCount;

    // creating required type of the threads for the test
    for (int i = 0; i < threadCount; i++) {
        if (i == threadCount - 1)
            itemsPerThread += modulo; // last one is going to handle N + modulo items

        consumers[i] = new Consumer(itemsPerThread);
    }

    new Producer().start();

    // starting worker threads
    for (int i = 0; i < threadCount; i++) {
        consumers[i].start();
    }

    // initialization of the values
    boolean terminate = false;
    latency = 0;
    epoch = total = keyCount = 0;

    int interval = session.getProgressInterval();
    int epochIntervals = session.getProgressInterval() * 10;
    long testStartTime = System.currentTimeMillis();

    while (!terminate) {
        Thread.sleep(100);

        int alive = 0;
        for (Thread thread : consumers)
            if (thread.isAlive())
                alive++;

        if (alive == 0)
            terminate = true;

        epoch++;

        if (terminate || epoch > epochIntervals) {
            epoch = 0;

            oldTotal = total;
            oldLatency = latency;
            oldKeyCount = keyCount;

            total = session.operations.get();
            keyCount = session.keys.get();
            latency = session.latency.get();

            int opDelta = total - oldTotal;
            int keyDelta = keyCount - oldKeyCount;
            double latencyDelta = latency - oldLatency;

            long currentTimeInSeconds = (System.currentTimeMillis() - testStartTime) / 1000;
            String formattedDelta = (opDelta > 0) ? Double.toString(latencyDelta / (opDelta * 1000)) : "NaN";

            out.println(String.format("%d,%d,%d,%s,%d", total, opDelta / interval, keyDelta / interval,
                    formattedDelta, currentTimeInSeconds));
        }
    }
}

From source file:UtilTimerDemo.java

public static void main(String[] args) {
    // Start and run a fixed-delay timer
    timer = new Timer();
    startTime = prevTime = System.currentTimeMillis();
    System.out.println("Fixed Delay Times");
    timer.schedule(new UtilTimerDemo(), DELAY, DELAY);

    // Sleep long enough to let the first timer finish
    try {//  ww  w.java 2  s .  c om
        Thread.sleep(DURATION * 2);
    } catch (Exception e) {
    }

    // Start and run a fixed-rate timer
    timer = new Timer();
    startTime = prevTime = System.currentTimeMillis();
    System.out.println("Fixed Rate Times");
    timer.scheduleAtFixedRate(new UtilTimerDemo(), DELAY, DELAY);
}

From source file:eu.learnpad.simulator.mon.example.MyGlimpseProbe_BPMN_LearnPAd.java

public static void main(String[] args) throws UnknownHostException {

    DebugMessages.line();//from  w  w  w  . ja  v  a2 s.  c om
    DebugMessages.println(TimeStamp.getCurrentTime(), MyGlimpseProbe_BPMN_LearnPAd.class.getName(),
            "\nONE EVENT CONTAINING BPMN SIMULATED PARAMETER WILL BE SENT EACH 10 SECONDS\n"
                    + "TO SPECIFY A DIFFERENT RATE, PROVIDE AN ARG IN MILLISECONDS\n"
                    + "USAGE: java -jar MyGlimpseProbe_BPMN.jar [amountOfMilliseconds]");
    DebugMessages.line();
    try {
        if (args.length > 0 && Integer.parseInt(args[0]) > 0) {
            sendingInterval = Integer.parseInt(args[0]);
        }
    } catch (IndexOutOfBoundsException e) {
    }

    MyGlimpseProbe_BPMN_LearnPAd aGenericProbe = new MyGlimpseProbe_BPMN_LearnPAd(Manager
            .createProbeSettingsPropertiesObject("org.apache.activemq.jndi.ActiveMQInitialContextFactory",
                    "tcp://atlantis.isti.cnr.it:61616", "system", "manager", "TopicCF", "jms.probeTopic", false,
                    "probeName", "probeTopic"));

    DebugMessages.println(TimeStamp.getCurrentTime(), MyGlimpseProbe_BPMN_LearnPAd.class.getName(),
            "Starting infinite loop");

    aGenericProbe.generateAndSendExample_GlimpseBaseEvents_StringPayload(
            "Activity_" + System.currentTimeMillis(), sendingInterval);
}