Example usage for org.apache.commons.io FileUtils ONE_MB

List of usage examples for org.apache.commons.io FileUtils ONE_MB

Introduction

In this page you can find the example usage for org.apache.commons.io FileUtils ONE_MB.

Prototype

long ONE_MB

To view the source code for org.apache.commons.io FileUtils ONE_MB.

Click Source Link

Document

The number of bytes in a megabyte.

Usage

From source file:kaljurand_at_gmail_dot_com.diktofon.MyFileUtils.java

public static String getSizeAsString(long size) {
    String sizeAsString;/*ww w.j  a v a  2  s.  c om*/
    if (size > FileUtils.ONE_MB) {
        sizeAsString = (long) (size / FileUtils.ONE_MB) + "MB";
    } else if (size > FileUtils.ONE_KB) {
        sizeAsString = (long) (size / FileUtils.ONE_KB) + "kB";
    } else {
        sizeAsString = size + "b";
    }
    if (size > NetSpeechApiUtils.MAX_AUDIO_FILE_LENGTH) {
        sizeAsString += " !!!";
    }
    return sizeAsString;
}

From source file:kaljurand_at_gmail_dot_com.diktofon.MyFileUtils.java

public static String getSizeAsStringExact(long size) {
    String sizeAsString;//from  w  w  w .  j a va2s  . c  o  m
    if (size > FileUtils.ONE_MB) {
        sizeAsString = String.format("%.1fMB", (float) size / FileUtils.ONE_MB);
    } else if (size > FileUtils.ONE_KB) {
        sizeAsString = String.format("%.1fkB", (float) size / FileUtils.ONE_KB);
    } else {
        sizeAsString = size + "b";
    }
    if (size > NetSpeechApiUtils.MAX_AUDIO_FILE_LENGTH) {
        sizeAsString += " !!!";
    }
    return sizeAsString;
}

From source file:net.pickapack.util.StorageUnitHelper.java

/**
 * Convert the specified display size to the byte count.
 *
 * @param displaySize the display size/*  w  ww. j a v  a2  s . com*/
 * @return the byte count corresponding to the specified display size
 */
public static long displaySizeToByteCount(String displaySize) {
    String[] parts = displaySize.split(" ");
    if (parts.length == 2) {
        double scale = Double.parseDouble(parts[0]);
        String unit = parts[1];

        if (unit.equals("KB")) {
            return (long) (scale * FileUtils.ONE_KB);
        } else if (unit.equals("MB")) {
            return (long) (scale * FileUtils.ONE_MB);
        } else if (unit.equals("GB")) {
            return (long) (scale * FileUtils.ONE_GB);
        } else if (unit.equals("TB")) {
            return (long) (scale * FileUtils.ONE_TB);
        } else if (unit.equals("PB")) {
            return (long) (scale * FileUtils.ONE_PB);
        } else if (unit.equals("EB")) {
            return (long) (scale * FileUtils.ONE_EB);
        }
    }

    throw new IllegalArgumentException();
}

From source file:ee.ioc.phon.android.speak.Utils.java

/**
 * <p>Pretty-prints an integer value which expresses a size
 * of some data.</p>//ww  w.j  a  va  2s .  co  m
 */
public static String getSizeAsString(int size) {
    if (size > FileUtils.ONE_MB) {
        return String.format("%.1fMB", (float) size / FileUtils.ONE_MB);
    }

    if (size > FileUtils.ONE_KB) {
        return String.format("%.1fkB", (float) size / FileUtils.ONE_KB);
    }
    return size + "b";
}

From source file:com.github.shredder121.gh_event_api.handler.AbstractHandlerTest.java

private static GitHub getGitHub() throws IOException {
    OkHttpClient client = new OkHttpClient();
    client.setCache(new Cache(Paths.get(".", ".cache").toFile(), FileUtils.ONE_MB * 10));
    return new GitHubBuilder().withConnector(new RawGitOkHttpConnector(new OkUrlFactory(client))).build();
}

From source file:com.linkedin.drelephant.spark.TestSparkAggregatedMetrics.java

@Test
public void TestValidExecutorsWithNoEnvironmentData() {
    ApplicationType appType = new ApplicationType("SPARK");
    AggregatorConfigurationData conf = new AggregatorConfigurationData(
            "org.apache.spark.SparkMetricsAggregator", appType, null);
    SparkMetricsAggregator metrics = new SparkMetricsAggregator(conf);

    MockSparkApplicationData appData = new MockSparkApplicationData();
    appData.getExecutorData().setExecutorInfo("1",
            mockExecutorInfo(100 * FileUtils.ONE_MB, 60 * FileUtils.ONE_MB, 1000));
    appData.getExecutorData().setExecutorInfo("2",
            mockExecutorInfo(100 * FileUtils.ONE_MB, 60 * FileUtils.ONE_MB, 1000));

    metrics.aggregate(appData);//from w  ww .ja v  a 2  s .c  o  m

    Assert.assertEquals(0L, metrics.getResult().getResourceUsed());
    Assert.assertEquals(20L, metrics.getResult().getResourceWasted());
    Assert.assertEquals(0L, metrics.getResult().getTotalDelay());
}

From source file:fr.paris.lutece.plugins.plu.utils.PluUtils.java

/**
 * Transform a byte size to a readable size including units
 * @param size size in bytes// w w  w.  j a  v  a  2s.com
 * @return size in string
 */
public static String formatSize(Long size) {
    String displaySize;

    if (size / FileUtils.ONE_GB > 0) {
        displaySize = String.valueOf(new BigDecimal(size).divide(BD_ONE_GO, BigDecimal.ROUND_CEILING)) + " GO";
    } else if (size / FileUtils.ONE_MB > 0) {
        displaySize = String.valueOf(new BigDecimal(size).divide(BD_ONE_MO, BigDecimal.ROUND_CEILING)) + " MO";
    } else if (size / FileUtils.ONE_KB > 0) {
        displaySize = String.valueOf(new BigDecimal(size).divide(BD_ONE_KO, BigDecimal.ROUND_CEILING)) + " KO";
    } else {
        displaySize = String.valueOf(size) + " octets";
    }
    return displaySize;
}

From source file:com.linkedin.drelephant.mapreduce.heuristics.MapperMemoryHeuristicTest.java

private Severity analyzeJob(long taskAvgMemMB, long containerMemMB) throws IOException {
    MapReduceCounterData jobCounter = new MapReduceCounterData();
    MapReduceTaskData[] mappers = new MapReduceTaskData[NUMTASKS];

    MapReduceCounterData counter = new MapReduceCounterData();
    counter.set(MapReduceCounterData.CounterName.PHYSICAL_MEMORY_BYTES, taskAvgMemMB * FileUtils.ONE_MB);

    Properties p = new Properties();
    p.setProperty(MapperMemoryHeuristic.MAPPER_MEMORY_CONF, Long.toString(containerMemMB));

    int i = 0;//from w  w w. j  av  a2  s . co m
    for (; i < NUMTASKS; i++) {
        mappers[i] = new MapReduceTaskData(counter, new long[5]);
    }

    MapReduceApplicationData data = new MapReduceApplicationData().setCounters(jobCounter)
            .setMapperData(mappers);
    data.setJobConf(p);
    HeuristicResult result = _heuristic.apply(data);
    return result.getSeverity();
}

From source file:com.linkedin.drelephant.mapreduce.heuristics.ReducerMemoryHeuristicTest.java

private Severity analyzeJob(long taskAvgMemMB, long containerMemMB) throws IOException {
    MapReduceCounterData jobCounter = new MapReduceCounterData();
    MapReduceTaskData[] reducers = new MapReduceTaskData[NUMTASKS];

    MapReduceCounterData counter = new MapReduceCounterData();
    counter.set(MapReduceCounterData.CounterName.PHYSICAL_MEMORY_BYTES, taskAvgMemMB * FileUtils.ONE_MB);

    Properties p = new Properties();
    p.setProperty(ReducerMemoryHeuristic.REDUCER_MEMORY_CONF, Long.toString(containerMemMB));

    int i = 0;/*w ww  .  ja v  a 2s  . c  o m*/
    for (; i < NUMTASKS; i++) {
        reducers[i] = new MapReduceTaskData(counter, new long[5]);
    }

    MapReduceApplicationData data = new MapReduceApplicationData().setCounters(jobCounter)
            .setReducerData(reducers);
    data.setJobConf(p);
    HeuristicResult result = _heuristic.apply(data);
    return result.getSeverity();
}

From source file:com.linkedin.drelephant.tez.heuristics.MapperMemoryHeuristicTest.java

private Severity analyzeJob(long taskAvgMemMB, long containerMemMB) throws IOException {
    TezCounterData jobCounter = new TezCounterData();
    TezTaskData[] mappers = new TezTaskData[NUMTASKS + 1];

    TezCounterData counter = new TezCounterData();
    counter.set(TezCounterData.CounterName.PHYSICAL_MEMORY_BYTES, taskAvgMemMB * FileUtils.ONE_MB);

    Properties p = new Properties();
    p.setProperty(MapperMemoryHeuristic.MAPPER_MEMORY_CONF, Long.toString(containerMemMB));

    int i = 0;// w ww  .  j a  v  a  2  s  .  c o  m
    for (; i < NUMTASKS; i++) {
        mappers[i] = new TezTaskData("task-id-" + i, "task-attempt-id-" + i);
        mappers[i].setTime(new long[5]);
        mappers[i].setCounter(counter);
    }
    // Non-sampled task, which does not contain time and counter data
    mappers[i] = new TezTaskData("task-id-" + i, "task-attempt-id-" + i);

    TezApplicationData data = new TezApplicationData().setCounters(jobCounter).setMapTaskData(mappers);
    data.setConf(p);
    HeuristicResult result = _heuristic.apply(data);
    return result.getSeverity();
}