Example usage for java.lang Math ceil

List of usage examples for java.lang Math ceil

Introduction

In this page you can find the example usage for java.lang Math ceil.

Prototype

public static double ceil(double a) 

Source Link

Document

Returns the smallest (closest to negative infinity) double value that is greater than or equal to the argument and is equal to a mathematical integer.

Usage

From source file:lineage2.gameserver.model.reward.RewardData.java

/**
 * Method roll./* w ww .  j av a 2  s . c o m*/
 * @param rate double
 * @return List<RewardItem>
 */
public List<RewardItem> roll(double rate) {
    double mult = Math.ceil(rate);
    List<RewardItem> ret = new ArrayList<>(1);
    RewardItem t = null;
    long count;
    for (int n = 0; n < mult; n++) {
        if (Rnd.get(RewardList.MAX_CHANCE) <= (_chance * Math.min(rate - n, 1.0))) {
            if (getMinDrop() >= getMaxDrop()) {
                count = getMinDrop();
            } else {
                count = Rnd.get(getMinDrop(), getMaxDrop());
            }
            if (t == null) {
                ret.add(t = new RewardItem(_item.getItemId()));
                t.count = count;
            } else {
                t.count = SafeMath.addAndLimit(t.count, count);
            }
        }
    }
    return ret;
}

From source file:com.myJava.util.Util.java

public static String[] split(String data, int chunkSize) {
    int size = (int) Math.ceil(((double) data.length()) / ((double) chunkSize));
    String[] ret = new String[size];

    for (int i = 0; i < size; i++) {
        ret[i] = data.substring(chunkSize * i, Math.min(chunkSize * (i + 1), data.length()));
    }/* w w  w.ja  v  a2s.  c o  m*/

    return ret;
}

From source file:statistic.ca.gui.DiagramContainer.java

public void addDiagram(String title, JPanel d) {
    DiagramTupel newTupel = new DiagramTupel(title, d);
    diagramList.add(newTupel);//from  w w w.  j a  va 2s  . com
    diagramMapping.clear();
    for (DiagramTupel dt : diagramList) {
        diagramMapping.put(dt.getTitle(), diagramList.indexOf(dt));
    }
    super.removeAll();

    setLayout(new GridLayout((int) (Math.ceil((double) diagramList.size() / 2)), 2));
    setMinimumSize(new Dimension((int) ((parent.getWidth()) - 20),
            (int) (Math.ceil((double) diagramList.size() / 2)) * 300));

    for (DiagramTupel dt : diagramList) {
        add(dt.getDiagram());
        (dt.getDiagram()).addMouseListener(parent.new diagramClick());
    }

    parent.getContentD().validate();
    repaint();
    validate();
}

From source file:com.ibm.bi.dml.runtime.matrix.MMCJMR.java

public static JobReturn runJob(MRJobInstruction inst, String[] inputs, InputInfo[] inputInfos, long[] rlens,
        long[] clens, int[] brlens, int[] bclens, String instructionsInMapper, String aggInstructionsInReducer,
        String aggBinInstrction, int numReducers, int replication, String output, OutputInfo outputinfo)
        throws Exception {
    JobConf job = new JobConf(MMCJMR.class);

    // TODO: check w/ yuanyuan. This job always runs in blocked mode, and hence derivation is not necessary.
    boolean inBlockRepresentation = MRJobConfiguration.deriveRepresentation(inputInfos);

    // by default, assume that dimensions of MMCJ's output are known at compile time
    byte resultDimsUnknown = (byte) 0;
    MatrixCharacteristics[] stats = commonSetup(job, inBlockRepresentation, inputs, inputInfos, rlens, clens,
            brlens, bclens, instructionsInMapper, aggInstructionsInReducer, aggBinInstrction, numReducers,
            replication, resultDimsUnknown, output, outputinfo);

    // Print the complete instruction
    if (LOG.isTraceEnabled())
        inst.printCompleteMRJobInstruction(stats);

    // Update resultDimsUnknown based on computed "stats"
    // There is always a single output
    if (stats[0].getRows() == -1 || stats[0].getCols() == -1) {
        resultDimsUnknown = (byte) 1;

        // if the dimensions are unknown, then setup done in commonSetup() must be updated
        byte[] resultIndexes = new byte[] {
                MRInstructionParser.parseSingleInstruction(aggBinInstrction).output };
        byte[] resultDimsUnknown_Array = new byte[] { resultDimsUnknown };
        //set up the multiple output files, and their format information
        MRJobConfiguration.setUpMultipleOutputs(job, resultIndexes, resultDimsUnknown_Array,
                new String[] { output }, new OutputInfo[] { outputinfo }, inBlockRepresentation);
    }// w  ww .  ja va 2  s.  c o m

    AggregateBinaryInstruction ins = (AggregateBinaryInstruction) MRInstructionParser
            .parseSingleInstruction(aggBinInstrction);
    MatrixCharacteristics dim1 = MRJobConfiguration.getMatrixCharactristicsForBinAgg(job, ins.input1);
    MatrixCharacteristics dim2 = MRJobConfiguration.getMatrixCharactristicsForBinAgg(job, ins.input2);

    if (dim1.getRowsPerBlock() > dim1.getRows())
        dim1.setRowsPerBlock((int) dim1.getRows());
    if (dim1.getColsPerBlock() > dim1.getCols())
        dim1.setColsPerBlock((int) dim1.getCols());
    if (dim2.getRowsPerBlock() > dim2.getRows())
        dim2.setRowsPerBlock((int) dim2.getRows());
    if (dim2.getColsPerBlock() > dim2.getCols())
        dim2.setColsPerBlock((int) dim2.getCols());

    long blockSize1 = 77 + 8 * dim1.getRowsPerBlock() * dim1.getColsPerBlock();
    long blockSize2 = 77 + 8 * dim2.getRowsPerBlock() * dim2.getColsPerBlock();
    long blockSizeResult = 77 + 8 * dim1.getRowsPerBlock() * dim2.getColsPerBlock();

    long cacheSize = -1;
    //cache the first result
    if (dim1.getRows() < dim2.getCols()) {
        long numBlocks = (long) Math.ceil((double) dim1.getRows() / (double) dim1.getRowsPerBlock());
        cacheSize = numBlocks * (20 + blockSize1) + 32;
    } else //cache the second result
    {
        long numBlocks = (long) Math.ceil((double) dim2.getCols() / (double) dim2.getColsPerBlock());
        cacheSize = numBlocks * (20 + blockSize2) + 32;
    }
    //add known memory consumption (will be substracted from output buffer)
    cacheSize += 2 * Math.max(blockSize1, blockSize2) //the cached key-value pair  (plus input instance)
            + blockSizeResult //the cached single result
            + MRJobConfiguration.getMiscMemRequired(job); //misc memory requirement by hadoop
    MRJobConfiguration.setMMCJCacheSize(job, (int) cacheSize);

    //set unique working dir
    MRJobConfiguration.setUniqueWorkingDir(job);

    //run mmcj job
    RunningJob runjob = JobClient.runJob(job);

    /* Process different counters */

    // NOTE: MMCJ job always has only a single output. 
    // Hence, no need to scan resultIndexes[] like other jobs

    int outputIndex = 0;
    Byte outputMatrixID = MRInstructionParser.parseSingleInstruction(aggBinInstrction).output;

    Group group = runjob.getCounters().getGroup(MRJobConfiguration.NUM_NONZERO_CELLS);

    // number of non-zeros
    stats[outputIndex].setNonZeros(group.getCounter(Byte.toString(outputMatrixID)));

    return new JobReturn(stats[outputIndex], outputinfo, runjob.isSuccessful());
}

From source file:com.insthub.O2OMobile.Model.PublishedOrderListModel.java

public void fetchNextUnfinished() {
    orderlistpublishedRequest request = new orderlistpublishedRequest();

    request.sid = SESSION.getInstance().sid;
    request.uid = SESSION.getInstance().uid;
    request.count = NUMPERPAGE;/*from   ww  w  .ja  v  a2  s  .c o m*/
    request.ver = O2OMobileAppConst.VERSION_CODE;
    request.by_no = (int) Math.ceil(publicUnfinishedOrderList.size() * 1.0 / NUMPERPAGE) + 1;
    ;

    request.published_order = ENUM_PUBLISHED_ORDER_STATE.PUBLISHED_ORDER_UNDONE.value();

    BeeCallback<JSONObject> cb = new BeeCallback<JSONObject>() {

        @Override
        public void callback(String url, JSONObject jo, AjaxStatus status) {
            try {
                PublishedOrderListModel.this.callback(this, url, jo, status);
                if (null != jo) {
                    orderlistpublishedResponse response = new orderlistpublishedResponse();
                    response.fromJson(jo);

                    if (response.succeed == 1) {
                        publicUnfinishedOrderList.addAll(response.orders);
                        PublishedOrderListModel.this.OnMessageResponse(url, jo, status);
                    } else {
                        PublishedOrderListModel.this.callback(url, response.error_code, response.error_desc);
                    }
                } else {
                    PublishedOrderListModel.this.OnMessageResponse(url, jo, status);
                }

            } catch (JSONException e) {

            }
        }
    };

    Map<String, Object> params = new HashMap<String, Object>();
    try {
        JSONObject requestJson = request.toJson();
        requestJson.remove("by_id");
        params.put("json", requestJson.toString());

    } catch (JSONException e) {

    }
    if (isSendingMessage(ApiInterface.ORDERLIST_PUBLISHED)) {
        return;
    }
    cb.url(ApiInterface.ORDERLIST_PUBLISHED).type(JSONObject.class).params(params);
    ajax(cb);
}

From source file:com.github.pitzcarraldo.dissimilar.Dissimilar.java

/**
 * Calculate the SSIM; see http://en.wikipedia.org/wiki/Structural_similarity
 * @param pOne array of integer pixel values for first image
 * @param pTwo array of integer pixel values for second image
 * @param pWidth width of the two images
 * @param pHeight height of the two images
 * @param pGreyscale if the images are greyscale
 * @param pHeatMapFilename filename for the ssim heatmap image to be saved to (png)
 * @param pMin list to hold return value for ssim-minimum
 * @param pVariance list to hold return value for ssim-variance
 * @return SSIM// w  w  w  . ja va2  s  . c  om
 */
public static double calcSSIM(final int[] pOne, final int[] pTwo, final int pWidth, final int pHeight,
        final boolean pGreyscale, final String pHeatMapFilename, List<Double> pMin, List<Double> pVariance) {

    if (!checkPair(pOne, pTwo))
        return -1;

    double[] lumaOne = null;
    double[] lumaTwo = null;

    //if the image is greyscale then don't extract the luma
    if (pGreyscale) {
        System.out.println("=> Greyscale");
        lumaOne = new double[pOne.length];
        lumaTwo = new double[pTwo.length];
        for (int i = 0; i < lumaOne.length; i++) {
            //all rgb values are the same
            lumaOne[i] = (pOne[i] >> 0) & 0xFF;
            lumaTwo[i] = (pTwo[i] >> 0) & 0xFF;
        }
    } else {
        lumaOne = calcLuma(pOne);
        lumaTwo = calcLuma(pTwo);
    }

    final int windowSize = SSIMWINDOWSIZE;

    final int windowsH = (int) Math.ceil((double) pHeight / windowSize);
    final int windowsW = (int) Math.ceil((double) pWidth / windowSize);

    double[] mssim = new double[windowsH * windowsW];

    double mean = 0;
    double min = 1;

    for (int height = 0; height < windowsH; height++) {
        for (int width = 0; width < windowsW; width++) {
            final int window = (height * windowsW) + width;
            mssim[window] = calcSSIMOnWindow(lumaOne, lumaTwo, pWidth, pHeight, windowSize, width * windowSize,
                    height * windowSize);
            mean += mssim[window];
            if (mssim[window] < min) {
                min = mssim[window];
            }
        }
    }

    final double variance = new Variance().evaluate(mssim);

    mean /= (windowsH * windowsW);

    //if(variance>0.001) System.out.println("warning: high variance");

    if (null != pHeatMapFilename) {
        dumpSSIMHeatMap(mssim, windowsH, windowsW, pHeatMapFilename);
    }

    if (null != pMin) {
        pMin.add(0, new Double(min));
    }
    if (null != pVariance) {
        pVariance.add(0, new Double(variance));
    }

    return mean;
}

From source file:gov.utah.dts.det.ccl.model.view.BasicFacilityInformation.java

public int getDaysToExpiration() {
    if (licenseExpirationDate == null) {
        return 0;
    } else {//from  w ww .  ja  va 2s .  co m
        Date now = new Date();
        now = DateUtils.truncate(now, Calendar.DATE);
        if (licenseExpirationDate.compareTo(now) <= 0) {
            return 0;
        } else {
            int days = (int) Math
                    .ceil((licenseExpirationDate.getTime() - now.getTime()) / (1000 * 60 * 60 * 24));
            return days;
        }
    }
}

From source file:amie.keys.CSAKey.java

/**
 * Parses the command line arguments and the returns an object that maps each argument
 * to its value.//from  w w  w .  j a v  a2s .  c o m
 * @param args
 * @return
 * @throws IOException
 */
public static Triple<MiningAssistant, Float, String> parseArguments(String[] args) throws IOException {
    HelpFormatter formatter = new HelpFormatter();
    float inputSupport = defaultMinSupport;

    // create the command line parser
    CommandLineParser parser = new PosixParser();
    // create the Options
    Options options = new Options();
    CommandLine cli = null;

    Option supportOpt = OptionBuilder.withArgName("min-support").hasArg()
            .withDescription(
                    "Minimum support. Default: 5 positive examples. If the option percentage is enabled, "
                            + "the value is considered as the percentage of entities covered "
                            + "by the a conditional key.")
            .create("mins");

    Option ratioOpt = OptionBuilder.withArgName("percentage")
            .withDescription("Interpret the support as a percentage " + "Default: false").create("p");

    Option nonKeysOpt = OptionBuilder.withArgName("non-keys").withDescription("Path the to the non-keys file.")
            .hasArg().isRequired().create("nk");

    Option minLoadOpt = OptionBuilder.withArgName("").withDescription("Mininum load").hasArg().create("minl");

    Option maxLoadOpt = OptionBuilder.withArgName("").withDescription("Maximum load").hasArg().create("maxl");

    options.addOption(supportOpt);
    options.addOption(ratioOpt);
    options.addOption(nonKeysOpt);
    options.addOption(minLoadOpt);
    options.addOption(maxLoadOpt);

    try {
        cli = parser.parse(options, args);
    } catch (ParseException e) {
        System.out.println("Unexpected exception: " + e.getMessage());
        formatter.printHelp("CombinationsExploration [OPTIONS] <TSV FILES>", options);
        System.exit(1);
    }

    String[] fileNameArgs = cli.getArgs();
    // Use kb = amie.data.U.loadFiles(args, 1) to ignore the first
    // argument. This first argument could be the list of non-keys
    //kb = amie.data.U.loadFiles(fileNameArgs);        
    KB kb = AlignKBs.loadFiles(fileNameArgs, 0);
    MiningAssistant miningHelper = new DefaultMiningAssistant(kb);

    if (cli.hasOption("mins")) {
        try {
            inputSupport = Float.parseFloat(cli.getOptionValue("mins"));
        } catch (NumberFormatException e) {
            System.out.println("Unexpected exception: " + e.getMessage());
            formatter.printHelp("CombinationsExploration [OPTIONS] <TSV FILES>", options);
            System.exit(1);
        }
    }

    if (cli.hasOption("p")) {
        System.out.println("Support interpreted as a " + inputSupport + "% of the number of entities.");
        long numberOfInstances = kb.size(Column.Subject);
        System.out.println(numberOfInstances + " instances found as subjects in the KB.");
        inputSupport = (int) Math.ceil(numberOfInstances * inputSupport / 100.0);
    }

    if (cli.hasOption("minl")) {
        minLoad = Integer.parseInt(cli.getOptionValue("minl"));
        System.out.println("minLoad=" + minLoad);
    }

    if (cli.hasOption("maxl")) {
        maxLoad = Integer.parseInt(cli.getOptionValue("maxl"));
        System.out.println("maxLoad=" + maxLoad);
    }

    System.out.println("Using minimum support " + inputSupport);

    return new Triple<>(miningHelper, inputSupport, cli.getOptionValue("nk"));
}

From source file:com.wandisco.s3hdfs.rewrite.filter.S3HdfsTestUtil.java

void compareS3ObjectWithHdfsFile(InputStream objectStream, Path path, long rangeStart, long rangeEnd)
        throws IOException, ServiceException {
    FileStatus fsStat = hdfs.listStatus(path)[0];
    int expectedSize = (int) (rangeEnd - rangeStart);
    int blockSize = (int) fsStat.getBlockSize();
    int blocks = (int) Math.ceil((double) expectedSize / (double) blockSize);

    DataInputStream origStream = hdfs.open(path);
    assertEquals(origStream.skip(rangeStart), rangeStart);

    int size = 0;

    for (int i = 0; i < expectedSize; i++) {
        int A = origStream.read();
        int B = objectStream.read();
        if (A == -1 || B == -1)
            fail("Premature end of steam.");
        if (A != B) {
            fail("ERROR: Byte A: " + A + " Byte B: " + B + ", at offset: " + size);
        }/* ww w  . ja  va2 s.  c o m*/
        size++;
    }
    if (size != expectedSize) {
        fail("Incorrect size: " + size + ", expected: " + expectedSize);
    }

    System.out.println("File: " + path + " has " + blocks + " blocks.");
    System.out.println("File: " + path + " has " + blockSize + " blockSize.");
    System.out.println("File: " + path + " has " + expectedSize + " length.");

    System.out.println("SUCCESS! The files match up!");
}