Example usage for org.apache.hadoop.io IntWritable set

List of usage examples for org.apache.hadoop.io IntWritable set

Introduction

In this page you can find the example usage for org.apache.hadoop.io IntWritable set.

Prototype

public void set(int value) 

Source Link

Document

Set the value of this IntWritable.

Usage

From source file:org.pentaho.hadoop.mapreduce.converter.converters.KettleTypeToIntWritableConverter.java

License:Apache License

@Override
public IntWritable convert(ValueMetaInterface meta, Object obj) throws TypeConversionException {
    try {//  w  w w . j ava  2s .  c  o m
        IntWritable result = new IntWritable();
        result.set(meta.getInteger(obj).intValue());
        return result;
    } catch (KettleValueException ex) {
        throw new TypeConversionException(BaseMessages.getString(TypeConverterFactory.class, "ErrorConverting",
                IntWritable.class.getSimpleName(), obj), ex);
    }
}

From source file:org.plista.kornakapi.core.training.SemanticModel.java

License:Apache License

/**
 * Method to safe the model//  www.j  a  v  a2 s .c o m
 * @throws IOException
 */
public void safe(String safeKey) throws IOException {
    /**
     * New Model training changes the key. Inference can only safe the model if its key is still valid. Thus since inference job start and end no new model was calculated
     */
    if (!this.key.equals(safeKey)) {
        if (log.isInfoEnabled()) {
            log.info("Storing model Failed. Modelkey Changed");
        }
        return;
    }

    if (itemFeatures != null) {
        Path model = path.suffix("/itemFeature.model");
        Writer w = SequenceFile.createWriter(fs, lconf, model, Text.class, VectorWritable.class);
        for (String itemid : itemFeatures.keySet()) {
            Text id = new Text();
            VectorWritable val = new VectorWritable();
            id.set(itemid);
            val.set(itemFeatures.get(itemid));
            w.append(id, val);
        }
        Closeables.close(w, false);
    }
    if (indexItem != null) {
        Path model = path.suffix("/indexItem.model");
        Writer w = SequenceFile.createWriter(fs, lconf, model, IntWritable.class, Text.class);
        for (Integer itemid : indexItem.keySet()) {
            IntWritable key = new IntWritable();
            Text val = new Text();
            key.set(itemid);
            val.set(indexItem.get(itemid));
            w.append(key, val);
        }
        Closeables.close(w, false);
    }
    if (itemIndex != null) {
        Path model = path.suffix("/itemIndex.model");
        Writer w = SequenceFile.createWriter(fs, lconf, model, Text.class, IntWritable.class);
        for (String itemid : itemIndex.keySet()) {
            IntWritable val = new IntWritable();
            Text key = new Text();
            key.set(itemid);
            val.set(itemIndex.get(itemid));
            w.append(key, val);
        }
        Closeables.close(w, false);
    }
    if (log.isInfoEnabled()) {
        log.info("LDA Model Safed");
    }
}

From source file:org.plista.kornakapi.core.training.SemanticModel.java

License:Apache License

/**
 * Key is set to handle concurent writes from DocumentTopicInferenceTrainer and LDATrainer
 * @throws IOException//from w w  w  .  j  a v a2 s  . c o m
 */
private void writeKey(String key) throws IOException {
    Path keyPath = path.suffix("/key.txt");
    Writer w = SequenceFile.createWriter(fs, lconf, keyPath, IntWritable.class, Text.class);
    IntWritable id = new IntWritable();
    Text val = new Text();
    id.set(1);
    val.set(key);
    w.append(id, val);
    Closeables.close(w, false);
}

From source file:org.qcri.pca.FileFormat.java

public static void convertFromDenseToSeq(String inputPath, int cardinality, String outputFolderPath) {
    try {/*from  w  w w  . j  av  a 2s  . c  o m*/
        final Configuration conf = new Configuration();
        final FileSystem fs = FileSystem.get(conf);
        SequenceFile.Writer writer;

        final IntWritable key = new IntWritable();
        final VectorWritable value = new VectorWritable();

        int lineNumber = 0;
        String thisLine;
        File[] filePathList = null;
        File inputFile = new File(inputPath);
        if (inputFile.isFile()) // if it is a file
        {
            filePathList = new File[1];
            filePathList[0] = inputFile;
        } else {
            filePathList = inputFile.listFiles();
        }
        if (filePathList == null) {
            log.error("The path " + inputPath + " does not exist");
            return;
        }
        for (File file : filePathList) {
            BufferedReader br = new BufferedReader(new FileReader(file));
            Vector vector = null;
            String outputFileName = outputFolderPath + File.separator + file.getName() + ".seq";
            writer = SequenceFile.createWriter(fs, conf, new Path(outputFileName), IntWritable.class,
                    VectorWritable.class, CompressionType.BLOCK);
            while ((thisLine = br.readLine()) != null) { // while loop begins here
                if (thisLine.isEmpty())
                    continue;
                String[] splitted = thisLine.split("\\s+");
                vector = new SequentialAccessSparseVector(splitted.length);
                for (int i = 0; i < splitted.length; i++) {
                    vector.set(i, Double.parseDouble(splitted[i]));
                }
                key.set(lineNumber);
                value.set(vector);
                //System.out.println(vector);
                writer.append(key, value);//write last row
                lineNumber++;
            }
            writer.close();
        }
    } catch (Exception e) {
        e.printStackTrace();
    }

}

From source file:org.qcri.pca.FileFormat.java

public static void convertFromCooToSeq(String inputPath, int cardinality, int base, String outputFolderPath) {
    try {//from   w ww.  j  av a 2s  .  com
        final Configuration conf = new Configuration();
        final FileSystem fs = FileSystem.get(conf);
        SequenceFile.Writer writer = null;

        final IntWritable key = new IntWritable();
        final VectorWritable value = new VectorWritable();

        Vector vector = null;

        String thisLine;
        int prevRowID = -1;
        boolean first = true;
        File[] filePathList = null;
        File inputFile = new File(inputPath);
        if (inputFile.isFile()) // if it is a file
        {
            filePathList = new File[1];
            filePathList[0] = inputFile;
        } else {
            filePathList = inputFile.listFiles();
        }
        if (filePathList == null) {
            log.error("The path " + inputPath + " does not exist");
            return;
        }
        for (File file : filePathList) {
            BufferedReader br = new BufferedReader(new FileReader(file));
            String outputFileName = outputFolderPath + File.separator + file.getName() + ".seq";
            writer = SequenceFile.createWriter(fs, conf, new Path(outputFileName), IntWritable.class,
                    VectorWritable.class, CompressionType.BLOCK);
            while ((thisLine = br.readLine()) != null) { // while loop begins here            
                String[] splitted = thisLine.split(",");
                int rowID = Integer.parseInt(splitted[0]);
                int colID = Integer.parseInt(splitted[1]);
                double element = Double.parseDouble(splitted[2]);
                if (first) {
                    first = false;
                    vector = new SequentialAccessSparseVector(cardinality);
                } else if (rowID != prevRowID) {
                    key.set(prevRowID);
                    value.set(vector);
                    //System.out.println(vector);
                    writer.append(key, value);//write last row
                    vector = new SequentialAccessSparseVector(cardinality);
                }
                prevRowID = rowID;
                vector.set(colID - base, element);
            }
            /*//here we append the last vector in each file (assuming that we will start a new row in the next file
            key.set(prevRowID);
            value.set(vector);
            //System.out.println("last vector");
            //System.out.println(vector);
            writer.append(key,value);//write last row
            writer.close();
            */
        }
        if (writer != null) //append last vector in last file
        {
            key.set(prevRowID);
            value.set(vector);
            //System.out.println("last vector");
            //System.out.println(vector);
            writer.append(key, value);//write last row
            writer.close();
        }

    } catch (Exception e) {
        e.printStackTrace();
    }
}

From source file:org.qcri.sparkpca.FileFormat.java

public static void convertFromCooToSeq(String inputPath, int cardinality, int base, String outputFolderPath) {
    try {//from   w  w  w  . j a va 2s  .  com
        final Configuration conf = new Configuration();
        final FileSystem fs = FileSystem.get(conf);
        SequenceFile.Writer writer = null;

        final IntWritable key = new IntWritable();
        final VectorWritable value = new VectorWritable();

        Vector vector = null;

        String thisLine;

        int lineNumber = 0;
        int prevRowID = -1;
        boolean first = true;
        File[] filePathList = null;
        File inputFile = new File(inputPath);
        if (inputFile.isFile()) // if it is a file
        {
            filePathList = new File[1];
            filePathList[0] = inputFile;
        } else {
            filePathList = inputFile.listFiles();
        }
        if (filePathList == null) {
            log.error("The path " + inputPath + " does not exist");
            return;
        }
        for (File file : filePathList) {
            BufferedReader br = new BufferedReader(new FileReader(file));
            String outputFileName = outputFolderPath + File.separator + file.getName() + ".seq";
            writer = SequenceFile.createWriter(fs, conf, new Path(outputFileName), IntWritable.class,
                    VectorWritable.class, CompressionType.BLOCK);
            while ((thisLine = br.readLine()) != null) { // while loop begins here            
                String[] splitted = thisLine.split(",");
                int rowID = Integer.parseInt(splitted[0]);
                int colID = Integer.parseInt(splitted[1]);
                double element = Double.parseDouble(splitted[2]);
                if (first) {
                    first = false;
                    vector = new SequentialAccessSparseVector(cardinality);
                } else if (rowID != prevRowID) {
                    key.set(prevRowID);
                    value.set(vector);
                    //System.out.println(vector);
                    writer.append(key, value);//write last row
                    vector = new SequentialAccessSparseVector(cardinality);
                }
                prevRowID = rowID;
                vector.set(colID - base, element);
            }
        }
        if (writer != null) //append last vector in last file
        {
            key.set(prevRowID);
            value.set(vector);
            writer.append(key, value);//write last row
            writer.close();
        }

    } catch (Exception e) {
        e.printStackTrace();
    }
}

From source file:org.shadowmask.engine.hive.udf.UDFAgeTest.java

License:Apache License

@Test
public void testUDFAgeLong() {
    UDFAge udfAge = new UDFAge();
    LongWritable age = new LongWritable(45);
    IntWritable level = new IntWritable(0);
    IntWritable unit = new IntWritable(10);
    LongWritable result = udfAge.evaluate(age, level, unit);
    assertEquals(45, result.get());/*from  w w  w .j  a  va  2  s .c  o  m*/
    level.set(1);
    result = udfAge.evaluate(age, level, unit);
    assertEquals(40, result.get());
    level.set(2);
    result = udfAge.evaluate(age, level, unit);
    assertEquals(0, result.get());

    unit.set(3);
    level.set(0);
    result = udfAge.evaluate(age, level, unit);
    assertEquals(45, result.get());
    level.set(1);
    result = udfAge.evaluate(age, level, unit);
    assertEquals(45, result.get());
    level.set(2);
    result = udfAge.evaluate(age, level, unit);
    assertEquals(45, result.get());
    level.set(3);
    result = udfAge.evaluate(age, level, unit);
    assertEquals(27, result.get());
    level.set(4);
    result = udfAge.evaluate(age, level, unit);
    assertEquals(0, result.get());

    age = null;
    result = udfAge.evaluate(age, level, unit);
    assertNull(result);
}

From source file:org.shadowmask.engine.hive.udf.UDFAgeTest.java

License:Apache License

@Test
public void testUDFAgeInt() {
    UDFAge udfAge = new UDFAge();
    IntWritable age = new IntWritable(45);
    IntWritable level = new IntWritable(0);
    IntWritable unit = new IntWritable(10);
    IntWritable result = udfAge.evaluate(age, level, unit);
    assertEquals(45, result.get());//from   w w w.j a  v  a2  s . c  o m
    level.set(1);
    result = udfAge.evaluate(age, level, unit);
    assertEquals(40, result.get());
    level.set(2);
    result = udfAge.evaluate(age, level, unit);
    assertEquals(0, result.get());

    unit.set(3);
    level.set(0);
    result = udfAge.evaluate(age, level, unit);
    assertEquals(45, result.get());
    level.set(1);
    result = udfAge.evaluate(age, level, unit);
    assertEquals(45, result.get());
    level.set(2);
    result = udfAge.evaluate(age, level, unit);
    assertEquals(45, result.get());
    level.set(3);
    result = udfAge.evaluate(age, level, unit);
    assertEquals(27, result.get());
    level.set(4);
    result = udfAge.evaluate(age, level, unit);
    assertEquals(0, result.get());

    age = null;
    result = udfAge.evaluate(age, level, unit);
    assertNull(result);
}

From source file:org.shadowmask.engine.hive.udf.UDFAgeTest.java

License:Apache License

@Test
public void testUDFAgeShort() {
    UDFAge udfAge = new UDFAge();
    ShortWritable age = new ShortWritable((short) 45);
    IntWritable level = new IntWritable(0);
    IntWritable unit = new IntWritable(10);
    ShortWritable result = udfAge.evaluate(age, level, unit);
    assertEquals(45, result.get());//from   ww  w  . j  ava 2  s .  c  o m
    level.set(1);
    result = udfAge.evaluate(age, level, unit);
    assertEquals(40, result.get());
    level.set(2);
    result = udfAge.evaluate(age, level, unit);
    assertEquals(0, result.get());

    unit.set(3);
    level.set(0);
    result = udfAge.evaluate(age, level, unit);
    assertEquals(45, result.get());
    level.set(1);
    result = udfAge.evaluate(age, level, unit);
    assertEquals(45, result.get());
    level.set(2);
    result = udfAge.evaluate(age, level, unit);
    assertEquals(45, result.get());
    level.set(3);
    result = udfAge.evaluate(age, level, unit);
    assertEquals(27, result.get());
    level.set(4);
    result = udfAge.evaluate(age, level, unit);
    assertEquals(0, result.get());

    age = null;
    result = udfAge.evaluate(age, level, unit);
    assertNull(result);
}

From source file:org.shadowmask.engine.hive.udf.UDFAgeTest.java

License:Apache License

@Test
public void testUDFAgeByte() {
    UDFAge udfAge = new UDFAge();
    ByteWritable age = new ByteWritable((byte) 45);
    IntWritable level = new IntWritable(0);
    IntWritable unit = new IntWritable(10);
    ByteWritable result = udfAge.evaluate(age, level, unit);
    assertEquals(45, result.get());/*  w  ww  .  ja  v a2s  . c o m*/
    level.set(1);
    result = udfAge.evaluate(age, level, unit);
    assertEquals(40, result.get());
    level.set(2);
    result = udfAge.evaluate(age, level, unit);
    assertEquals(0, result.get());

    unit.set(3);
    level.set(0);
    result = udfAge.evaluate(age, level, unit);
    assertEquals(45, result.get());
    level.set(1);
    result = udfAge.evaluate(age, level, unit);
    assertEquals(45, result.get());
    level.set(2);
    result = udfAge.evaluate(age, level, unit);
    assertEquals(45, result.get());
    level.set(3);
    result = udfAge.evaluate(age, level, unit);
    assertEquals(27, result.get());
    level.set(4);
    result = udfAge.evaluate(age, level, unit);
    assertEquals(0, result.get());

    age = null;
    result = udfAge.evaluate(age, level, unit);
    assertNull(result);
}