List of usage examples for org.apache.hadoop.io LongWritable LongWritable
public LongWritable(long value)
From source file:edu.ucsb.cs.knn.core.KnnMapper.java
License:Apache License
public void dumpNeighbours(OutputCollector output) throws IOException { Iterator<Long> itr = similarityNeighbourhood.keySet().iterator(); while (itr.hasNext()) { long songId = itr.next(); SortedArrayList<Neighbour> neighbourhood = similarityNeighbourhood.get(songId); Neighbour[] toArray = new Neighbour[neighbourhood.size()]; neighbourhood.toArray(toArray);// debug this output.collect(new LongWritable(songId), new NeighboursArrayWritable(toArray)); }/*from w w w .jav a 2s . c o m*/ }
From source file:edu.ucsb.cs.knn.query.QueryMain.java
License:Apache License
public static void main(String[] args) throws IOException { JobConf job = new JobConf(); Path queryPath = new Path("query"); FileSystem hdfs = queryPath.getFileSystem(job); if (!hdfs.exists(queryPath)) throw new UnsupportedEncodingException("Query is not set"); FSDataInputStream in = hdfs.open(queryPath); String line;// w w w. j a v a 2s. c o m // Get songId to predict its rating = s^i BufferedReader br = new BufferedReader(new InputStreamReader(System.in)); System.out.print("Enter song id of music you want its predicted rate: "); String songId = br.readLine(); // Get songId neighbourhood (real) MapFile.Reader reader = new MapFile.Reader(hdfs, "knn-output/part-00000", new Configuration()); LongWritable key = new LongWritable(Integer.parseInt(songId)); NeighboursArrayWritable neighborhood = new NeighboursArrayWritable(); reader.get(key, neighborhood); System.out.println("Real neighbourhood of " + songId + " is " + neighborhood.toString()); // Process each user while ((line = in.readLine()) != null) { float predictedRateUp = 0f; float predictedRateDown = 0f; StringTokenizer str = new StringTokenizer(line.toString(), " |\t"); long userId = Long.parseLong(str.nextToken()); int nRatings = Integer.parseInt(str.nextToken()); float predictedValue; boolean rated = false; for (int lineNo = 0; lineNo < nRatings; lineNo++) { line = in.readLine(); // <songid rate> str = new StringTokenizer(line.toString(), " |\t"); long currentUserSong = Long.parseLong(str.nextToken()); int currentUserRate = Integer.parseInt(str.nextToken()); if (currentUserSong == key.get()) { rated = true; System.out.println("User " + userId + " already rated this song to " + currentUserRate); while (lineNo < nRatings) { in.readLine(); lineNo++; } break; } float wij = neighborhood.getWeight(currentUserSong); predictedRateUp += currentUserRate * wij; predictedRateDown += Math.abs(wij); } // Predicted rating for this user if (!rated) System.out.println( "Predicted rating for user " + userId + " is " + (predictedRateUp / predictedRateDown)); } }
From source file:edu.ucsb.cs.lsh.types.CounterWritable.java
License:Apache License
public void incrementCount(int key) { featuresCount.put(new LongWritable(key), new IntWritable(getCount(key) + 1)); totalCount++; }
From source file:edu.ucsb.cs.partitioning.cosine.Organizer.java
License:Apache License
public static void readCombineCopy(Path input, String output, JobConf job) throws IOException { boolean printDist = job.getBoolean(Config.PRINT_DISTRIBUTION_PROPERTY, Config.PRINT_DISTRIBUTION_VALUE); BufferedWriter distout = null; SequenceFile.Writer out = null; if (printDist) distout = new BufferedWriter(new FileWriter("p-norm-distribution" + output)); int pc = 0, pr = 0; float pChoice = job.getFloat(NormSortMain.P_NORM_PROPERTY, NormSortMain.P_NORM_VALUE); FileSystem hdfs = input.getFileSystem(new JobConf()); FileStatus[] files = Partitioner.setFiles(hdfs, input); ArrayList<String> partitions = arrangeNames(files); for (int i = 0; i < partitions.size(); i++) { Path inputPath = new Path(input.toString() + "/" + partitions.get(i)); if (hdfs.isDirectory(inputPath)) continue; SequenceFile.Reader in = new SequenceFile.Reader(hdfs, inputPath, job); if (!isCombined(pr, pc, getRow(inputPath.getName()), getCol(inputPath.getName()), partitions)) { if (out != null) out.close();//from ww w . j a v a 2 s . c o m pr = getRow(inputPath.getName()); pc = getCol(inputPath.getName()); out = SequenceFile.createWriter(hdfs, job, new Path(output + "/" + inputPath.getName()), LongWritable.class, FeatureWeightArrayWritable.class, SequenceFile.CompressionType.NONE); } while (in.next(unused, document)) { out.append(new LongWritable(document.id), new FeatureWeightArrayWritable(document.vectorSize, document.vector)); if (printDist) distout.write(document.getPNorm(pChoice) + " \n"); } in.close(); } if (out != null) out.close(); }
From source file:edu.umd.cloud9.io.HashMapWritableTest.java
License:Apache License
@Test public void testSerialize2() throws IOException { HashMapWritable<Text, LongWritable> origMap = new HashMapWritable<Text, LongWritable>(); origMap.put(new Text("hi"), new LongWritable(52)); origMap.put(new Text("there"), new LongWritable(77)); ByteArrayOutputStream bytesOut = new ByteArrayOutputStream(); DataOutputStream dataOut = new DataOutputStream(bytesOut); origMap.write(dataOut);/*from w w w.j av a 2 s.c o m*/ HashMapWritable<Text, LongWritable> map = new HashMapWritable<Text, LongWritable>(); map.readFields(new DataInputStream(new ByteArrayInputStream(bytesOut.toByteArray()))); Text key; LongWritable value; assertEquals(map.size(), 2); key = new Text("hi"); value = map.get(key); assertTrue(value != null); assertEquals(value.get(), 52); value = map.remove(key); assertEquals(map.size(), 1); key = new Text("there"); value = map.get(key); assertTrue(value != null); assertEquals(value.get(), 77); }
From source file:eu.stratosphere.hadoopcompatibility.datatypes.DefaultStratosphereTypeConverter.java
License:Apache License
@SuppressWarnings("unchecked") private <T> T convert(Record stratosphereType, int pos, Class<T> hadoopType) { if (hadoopType == LongWritable.class) { return (T) new LongWritable((stratosphereType.getField(pos, LongValue.class)).getValue()); }/* w ww . ja v a 2 s .c o m*/ if (hadoopType == org.apache.hadoop.io.Text.class) { return (T) new Text((stratosphereType.getField(pos, StringValue.class)).getValue()); } if (hadoopType == org.apache.hadoop.io.IntWritable.class) { return (T) new IntWritable((stratosphereType.getField(pos, IntValue.class)).getValue()); } if (hadoopType == org.apache.hadoop.io.FloatWritable.class) { return (T) new FloatWritable((stratosphereType.getField(pos, FloatValue.class)).getValue()); } if (hadoopType == org.apache.hadoop.io.DoubleWritable.class) { return (T) new DoubleWritable((stratosphereType.getField(pos, DoubleValue.class)).getValue()); } if (hadoopType == org.apache.hadoop.io.BooleanWritable.class) { return (T) new BooleanWritable((stratosphereType.getField(pos, BooleanValue.class)).getValue()); } if (hadoopType == org.apache.hadoop.io.ByteWritable.class) { return (T) new ByteWritable((stratosphereType.getField(pos, ByteValue.class)).getValue()); } throw new RuntimeException("Unable to convert Stratosphere type (" + stratosphereType.getClass().getCanonicalName() + ") to Hadoop."); }
From source file:example.TestMapReduce.java
License:Apache License
@Test public void testMapper() { DBInputWritable dbInputWritable = new DBInputWritable(); dbInputWritable.setCustomer("ABC"); dbInputWritable.setAmount(100);// w w w . ja va2 s . c o m mapDriver.withInput(new LongWritable(1), dbInputWritable); mapDriver.withOutput(new Text("ABC"), new IntWritable(100)); mapDriver.runTest(); }
From source file:fileformats.AuctionMessage.java
License:Apache License
public AuctionMessage() { sender = new LongWritable(0); bid = 0; }
From source file:fileformats.AuctionMessage.java
License:Apache License
public AuctionMessage(long s, double b) { sender = new LongWritable(s); bid = b; }
From source file:fileformats.AuctionVertexValue.java
License:Apache License
public AuctionVertexValue() { row = false;/*from w ww. j a v a2s.c o m*/ N = 0; benefit = new double[N]; colOwned = new LongWritable(-1); rowOwnedBy = new LongWritable(-1); price = 0; }