List of usage examples for org.apache.hadoop.io WritableComparator get
public static WritableComparator get(Class<? extends WritableComparable> c)
From source file:com.explorys.apothecary.hadoop.mr.MapFileReader.java
License:Apache License
protected synchronized void open(FileSystem fs, String dirName, WritableComparator comparator, Configuration conf) throws IOException { Path dir = new Path(dirName); Path dataFile = new Path(dir, MapFile.DATA_FILE_NAME); // open the data this.data = createDataFileReader(fs, dataFile, conf); this.firstPosition = data.getPosition(); if (comparator == null) this.comparator = WritableComparator.get(data.getKeyClass().asSubclass(WritableComparable.class)); else/*from w w w . jav a 2s . co m*/ this.comparator = comparator; }
From source file:com.foursquare.twofishes.io.MapFileConcurrentReader.java
License:Apache License
protected synchronized void open(Path dir, WritableComparator comparator, final Configuration conf, final SequenceFile.Reader.Option... options) throws IOException { final Path dataFile = new Path(dir, MapFile.DATA_FILE_NAME); final Path indexFile = new Path(dir, MapFile.INDEX_FILE_NAME); // open the data this.data = new ThreadLocal<SequenceFile.Reader>() { protected SequenceFile.Reader initialValue() { try { SequenceFile.Reader r = createDataFileReader(dataFile, conf, options); LOG.info("opened new SequenceFile.Reader for " + dataFile); synchronized (this) { allDataFiles.add(r); }//from ww w . j a v a2 s .co m return r; } catch (IOException ioe) { throw new RuntimeException(ioe); } } }; this.firstPosition = data.get().getPosition(); this.comparator = WritableComparator.get(data.get().getKeyClass().asSubclass(WritableComparable.class)); // open the index SequenceFile.Reader.Option[] indexOptions = Options.prependOptions(options, SequenceFile.Reader.file(indexFile)); this.index = new SequenceFile.Reader(conf, indexOptions); }
From source file:crunch.MaxTemperature.java
License:Apache License
private void check(IntPair ip1, IntPair ip2, int c) throws IOException { check(WritableComparator.get(IntPair.class), ip1, ip2, c); }
From source file:crunch.MaxTemperature.java
License:Apache License
@Test
public void comparator() throws IOException {
// vv IntWritableTest-Comparator
RawComparator<IntWritable> comparator = WritableComparator.get(IntWritable.class);
// ^^ IntWritableTest-Comparator
// vv IntWritableTest-ObjectComparison
IntWritable w1 = new IntWritable(163);
IntWritable w2 = new IntWritable(67);
assertThat(comparator.compare(w1, w2), greaterThan(0));
// ^^ IntWritableTest-ObjectComparison
// vv IntWritableTest-BytesComparison
byte[] b1 = serialize(w1);
byte[] b2 = serialize(w2);
assertThat(comparator.compare(b1, 0, b1.length, b2, 0, b2.length), greaterThan(0));
// ^^ IntWritableTest-BytesComparison
}//w w w. ja v a2s . c o m
From source file:crunch.MaxTemperature.java
License:Apache License
private void check(TextPair tp1, TextPair tp2, int c) throws IOException { check(WritableComparator.get(TextPair.class), tp1, tp2, c); }
From source file:eagle.log.entity.filter.TypedByteArrayComparator.java
License:Apache License
/** * <ol>/* w w w.j a v a 2 s .c o m*/ * <li>Try registered comparator</li> * <li>If not found, try all possible WritableComparator</li> * </ol> * * If not found finally, throw new IllegalArgumentException("unable to get comparator for class: "+type); * * @param type value type class * @return RawComparator */ public static RawComparator get(Class type) { RawComparator comparator = null; try { comparator = _typedClassComparator.get(type); } catch (ClassCastException ex) { // ignore } try { if (comparator == null) comparator = WritableComparator.get(type); } catch (ClassCastException ex) { // ignore } return comparator; }
From source file:edu.uci.ics.hyracks.dataflow.hadoop.HadoopReducerOperatorDescriptor.java
License:Apache License
@Override public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx, IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) { try {/*from w w w .j a va2 s .com*/ if (this.comparatorFactory == null) { String comparatorClassName = getJobConf().getOutputValueGroupingComparator().getClass().getName(); Thread.currentThread().setContextClassLoader(this.getClass().getClassLoader()); RawComparator rawComparator = null; if (comparatorClassName != null) { Class comparatorClazz = getHadoopClassFactory().loadClass(comparatorClassName); this.comparatorFactory = new KeyComparatorFactory(comparatorClazz); } else { String mapOutputKeyClass = getJobConf().getMapOutputKeyClass().getName(); if (getHadoopClassFactory() != null) { rawComparator = WritableComparator .get(getHadoopClassFactory().loadClass(mapOutputKeyClass)); } else { rawComparator = WritableComparator .get((Class<? extends WritableComparable>) Class.forName(mapOutputKeyClass)); } this.comparatorFactory = new RawComparingComparatorFactory(rawComparator.getClass()); } } IOpenableDataWriterOperator op = new DeserializedPreclusteredGroupOperator(new int[] { 0 }, new IComparator[] { comparatorFactory.createComparator() }, new ReducerAggregator(createReducer())); return new DeserializedOperatorNodePushable(ctx, op, recordDescProvider.getInputRecordDescriptor(getActivityId(), 0)); } catch (Exception e) { throw new RuntimeException(e); } }
From source file:edu.uci.ics.hyracks.hadoop.compat.util.HadoopAdapter.java
License:Apache License
public static InMemorySortOperatorDescriptor getInMemorySorter(JobConf conf, IOperatorDescriptorRegistry spec) { InMemorySortOperatorDescriptor inMemorySortOp = null; RecordDescriptor recordDescriptor = getHadoopRecordDescriptor(conf.getMapOutputKeyClass().getName(), conf.getMapOutputValueClass().getName()); Class<? extends RawComparator> rawComparatorClass = null; WritableComparator writableComparator = WritableComparator .get(conf.getMapOutputKeyClass().asSubclass(WritableComparable.class)); WritableComparingBinaryComparatorFactory comparatorFactory = new WritableComparingBinaryComparatorFactory( writableComparator.getClass()); inMemorySortOp = new InMemorySortOperatorDescriptor(spec, new int[] { 0 }, new IBinaryComparatorFactory[] { comparatorFactory }, recordDescriptor); return inMemorySortOp; }
From source file:edu.uci.ics.hyracks.hadoop.compat.util.HadoopAdapter.java
License:Apache License
public static ExternalSortOperatorDescriptor getExternalSorter(JobConf conf, IOperatorDescriptorRegistry spec) { ExternalSortOperatorDescriptor externalSortOp = null; RecordDescriptor recordDescriptor = getHadoopRecordDescriptor(conf.getMapOutputKeyClass().getName(), conf.getMapOutputValueClass().getName()); Class<? extends RawComparator> rawComparatorClass = null; WritableComparator writableComparator = WritableComparator .get(conf.getMapOutputKeyClass().asSubclass(WritableComparable.class)); WritableComparingBinaryComparatorFactory comparatorFactory = new WritableComparingBinaryComparatorFactory( writableComparator.getClass()); externalSortOp = new ExternalSortOperatorDescriptor(spec, conf.getInt(HYRACKS_EX_SORT_FRAME_LIMIT, DEFAULT_EX_SORT_FRAME_LIMIT), new int[] { 0 }, new IBinaryComparatorFactory[] { comparatorFactory }, recordDescriptor); return externalSortOp; }
From source file:edu.uci.ics.pregelix.core.jobgen.JobGen.java
License:Apache License
@SuppressWarnings({ "rawtypes", "unchecked" })
@Override/*from w w w . j a v a2 s . c om*/
public JobSpecification generateCreatingJob() throws HyracksException {
Class<? extends WritableComparable<?>> vertexIdClass = BspUtils.getVertexIndexClass(conf);
JobSpecification spec = new JobSpecification();
ITypeTraits[] typeTraits = new ITypeTraits[2];
typeTraits[0] = new TypeTraits(false);
typeTraits[1] = new TypeTraits(false);
IBinaryComparatorFactory[] comparatorFactories = new IBinaryComparatorFactory[1];
comparatorFactories[0] = new WritableComparingBinaryComparatorFactory(
WritableComparator.get(vertexIdClass).getClass());
IFileSplitProvider fileSplitProvider = ClusterConfig.getFileSplitProvider(jobId, PRIMARY_INDEX);
TreeIndexCreateOperatorDescriptor btreeCreate = new TreeIndexCreateOperatorDescriptor(spec,
storageManagerInterface, lcManagerProvider, fileSplitProvider, typeTraits, comparatorFactories,
null, new BTreeDataflowHelperFactory(), new TransientLocalResourceFactoryProvider(),
NoOpOperationCallbackFactory.INSTANCE);
ClusterConfig.setLocationConstraint(spec, btreeCreate);
return spec;
}