List of usage examples for com.google.common.primitives Ints max
public static int max(int... array)
From source file:com.metamx.druid.kv.VSizeIndexedInts.java
public static VSizeIndexedInts fromArray(int[] array) { return fromArray(array, Ints.max(array)); }
From source file:org.apache.druid.segment.data.VSizeColumnarInts.java
public static VSizeColumnarInts fromArray(int[] array) { return fromArray(array, Ints.max(array)); }
From source file:org.apache.flink.api.java.io.CsvInputFormat.java
protected static boolean[] toBooleanMask(int[] sourceFieldIndices) { Preconditions.checkNotNull(sourceFieldIndices); for (int i : sourceFieldIndices) { if (i < 0) { throw new IllegalArgumentException("Field indices must not be smaller than zero."); }//from w w w .j a v a 2s . c om } boolean[] includedMask = new boolean[Ints.max(sourceFieldIndices) + 1]; // check if we support parsers for these types for (int i = 0; i < sourceFieldIndices.length; i++) { includedMask[sourceFieldIndices[i]] = true; } return includedMask; }
From source file:org.nla.tarotdroid.biz.MapPlayersScores.java
/** * Returns the maximum score./*from w ww. j av a 2 s .c o m*/ * @return the maximum score. */ public int getMaxScore() { return Ints.max(Ints.toArray(this.mapPlayersScores.values())); }
From source file:edu.mit.streamjit.impl.compiler2.CompositionAllocationStrategy.java
@Override public void allocateGroup(ActorGroup group, Range<Integer> iterations, List<Core> cores, Configuration config) { if (group.isStateful()) { int minStatefulId = Integer.MAX_VALUE; for (Actor a : group.actors()) if (a instanceof WorkerActor && ((WorkerActor) a).archetype().isStateful()) minStatefulId = Math.min(minStatefulId, a.id()); Configuration.SwitchParameter<Integer> param = config.getParameter("Group" + minStatefulId + "Core", Configuration.SwitchParameter.class, Integer.class); cores.get(param.getValue() % cores.size()).allocate(group, iterations); return;//w ww . ja va2 s . c o m } Configuration.CompositionParameter param = config.getParameter("Group" + group.id() + "Cores", Configuration.CompositionParameter.class); assert iterations.lowerBoundType() == BoundType.CLOSED && iterations.upperBoundType() == BoundType.OPEN; int totalAvailable = iterations.upperEndpoint() - iterations.lowerEndpoint(); int[] allocations = new int[cores.size()]; int totalAllocated = 0; for (int i = 0; i < param.getLength() && i < allocations.length; ++i) { int allocation = DoubleMath.roundToInt(param.getValue(i) * totalAvailable, RoundingMode.HALF_EVEN); allocations[i] = allocation; totalAllocated += allocation; } //If we allocated more than we have, remove from the cores with the least. //Need a loop here because we might not have enough on the least core. while (totalAllocated > totalAvailable) { int least = Ints.indexOf(allocations, Ints.max(allocations)); for (int i = 0; i < allocations.length; ++i) if (allocations[i] > 0 && allocations[i] < allocations[least]) least = i; int toRemove = Math.min(allocations[least], totalAllocated - totalAvailable); allocations[least] -= toRemove; totalAllocated -= toRemove; } //If we didn't allocate enough, allocate on the cores with the most. if (totalAllocated < totalAvailable) { int most = Ints.indexOf(allocations, Ints.min(allocations)); for (int i = 0; i < allocations.length; ++i) if (allocations[i] > allocations[most]) most = i; allocations[most] += totalAvailable - totalAllocated; totalAllocated += totalAvailable - totalAllocated; } assert totalAllocated == totalAvailable : totalAllocated + " " + totalAvailable; int lower = iterations.lowerEndpoint(); for (int i = 0; i < allocations.length; ++i) if (allocations[i] > 0) { cores.get(i).allocate(group, Range.closedOpen(lower, lower + allocations[i])); lower += allocations[i]; } }
From source file:eu.stratosphere.api.common.io.GenericCsvInputFormat.java
protected void setFieldsGeneric(int[] sourceFieldIndices, Class<?>[] fieldTypes) { Preconditions.checkNotNull(sourceFieldIndices); Preconditions.checkNotNull(fieldTypes); Preconditions.checkArgument(sourceFieldIndices.length == fieldTypes.length, "Number of field indices and field types must match."); for (int i : sourceFieldIndices) { if (i < 0) { throw new IllegalArgumentException("Field indices must not be smaller than zero."); }/* w ww. jav a2 s .c o m*/ } int largestFieldIndex = Ints.max(sourceFieldIndices); this.fieldIncluded = new boolean[largestFieldIndex + 1]; ArrayList<Class<?>> types = new ArrayList<Class<?>>(); // check if we support parsers for these types for (int i = 0; i < fieldTypes.length; i++) { Class<?> type = fieldTypes[i]; if (type != null) { if (FieldParser.getParserForType(type) == null) { throw new IllegalArgumentException( "The type '" + type.getName() + "' is not supported for the CSV input format."); } types.add(type); fieldIncluded[sourceFieldIndices[i]] = true; } } Class<?>[] denseTypeArray = (Class<?>[]) types.toArray(new Class[types.size()]); this.fieldTypes = denseTypeArray; }
From source file:org.nla.tarotdroid.biz.GameSetScores.java
/** * @param player/*from w ww . ja v a2s .co m*/ * @return */ public int getMaxScoreEverForPlayer(final Player player) { if (player == null) { throw new IllegalArgumentException("player is null"); } if (this.gameScores.size() == 0) { return 0; } List<Integer> allPlayerScoreValues = newArrayList(); for (int i = 0; i < this.gameScores.size(); ++i) { allPlayerScoreValues.add(this.getResultsAtGameOfIndex(i).get(player)); } allPlayerScoreValues.add(0); return Ints.max(Ints.toArray(allPlayerScoreValues)); }
From source file:org.nla.tarotdroid.biz.GameSetScores.java
/** * Returns the maximum score of all games. * @return the maximum score of all games. *///from ww w.j a v a 2 s.c o m public int getMaxScoreEver() { if (this.gameScores.size() == 0) { return 0; } List<Integer> allScoreValues = newArrayList(); for (int i = 0; i < this.gameScores.size(); ++i) { allScoreValues.addAll(this.getResultsAtGameOfIndex(i).values()); } allScoreValues.add(0); return Ints.max(Ints.toArray(allScoreValues)); }
From source file:org.apache.flink.api.common.io.GenericCsvInputFormat.java
protected void setFieldsGeneric(int[] sourceFieldIndices, Class<?>[] fieldTypes) { Preconditions.checkNotNull(sourceFieldIndices); Preconditions.checkNotNull(fieldTypes); Preconditions.checkArgument(sourceFieldIndices.length == fieldTypes.length, "Number of field indices and field types must match."); for (int i : sourceFieldIndices) { if (i < 0) { throw new IllegalArgumentException("Field indices must not be smaller than zero."); }// w w w. j a v a 2s .c o m } int largestFieldIndex = Ints.max(sourceFieldIndices); this.fieldIncluded = new boolean[largestFieldIndex + 1]; ArrayList<Class<?>> types = new ArrayList<Class<?>>(); // check if we support parsers for these types for (int i = 0; i < fieldTypes.length; i++) { Class<?> type = fieldTypes[i]; if (type != null) { if (FieldParser.getParserForType(type) == null) { throw new IllegalArgumentException( "The type '" + type.getName() + "' is not supported for the CSV input format."); } types.add(type); fieldIncluded[sourceFieldIndices[i]] = true; } } this.fieldTypes = types.toArray(new Class<?>[types.size()]); }
From source file:org.apache.hadoop.hbase.io.hfile.bucket.BucketAllocator.java
BucketAllocator(long availableSpace, int[] bucketSizes) throws BucketAllocatorException { this.bucketSizes = bucketSizes == null ? DEFAULT_BUCKET_SIZES : bucketSizes; Arrays.sort(this.bucketSizes); this.bigItemSize = Ints.max(this.bucketSizes); this.bucketCapacity = FEWEST_ITEMS_IN_BUCKET * bigItemSize; buckets = new Bucket[(int) (availableSpace / bucketCapacity)]; if (buckets.length < this.bucketSizes.length) throw new BucketAllocatorException("Bucket allocator size too small - must have room for at least " + this.bucketSizes.length + " buckets"); bucketSizeInfos = new BucketSizeInfo[this.bucketSizes.length]; for (int i = 0; i < this.bucketSizes.length; ++i) { bucketSizeInfos[i] = new BucketSizeInfo(i); }/*from ww w . ja va 2 s. c o m*/ for (int i = 0; i < buckets.length; ++i) { buckets[i] = new Bucket(bucketCapacity * i); bucketSizeInfos[i < this.bucketSizes.length ? i : this.bucketSizes.length - 1] .instantiateBucket(buckets[i]); } this.totalSize = ((long) buckets.length) * bucketCapacity; }