Example usage for org.apache.lucene.document StoredField StoredField

List of usage examples for org.apache.lucene.document StoredField StoredField

Introduction

In this page you can find the example usage for org.apache.lucene.document StoredField StoredField.

Prototype

public StoredField(String name, double value) 

Source Link

Document

Create a stored-only field with the given double value.

Usage

From source file:net.semanticmetadata.lire.impl.SimpleBuilder.java

License:Open Source License

private Field[] useCVSURF(BufferedImage image) {
    ArrayList<Field> fields = new ArrayList<Field>();
    CvSurfExtractor extractor = new CvSurfExtractor();
    LinkedList<CvSurfFeature> descriptors = extractor.computeSurfKeypoints(image);
    CvSurfFeature next;// w  ww  .  j av  a2s .  c o m
    for (Iterator<CvSurfFeature> iterator = descriptors.iterator(); iterator.hasNext();) {
        next = iterator.next();
        lireFeature.extract(ImageUtils.cropImage(image, (int) (next.point[0] - (int) next.size / 2),
                (int) (next.point[1] - (int) next.size / 2), (int) next.size, (int) next.size));
        fields.add(new StoredField(
                DocumentBuilder.FIELD_NAME_SIMPLE + lireFeature.getFieldName() + Detector_CVSURF,
                lireFeature.getByteArrayRepresentation()));
    }
    return fields.toArray(new Field[fields.size()]);
}

From source file:net.semanticmetadata.lire.impl.SimpleBuilder.java

License:Open Source License

private Field[] useCVSIFT(BufferedImage image) {
    ArrayList<Field> fields = new ArrayList<Field>();
    CvSiftExtractor extractor = new CvSiftExtractor();
    LinkedList<CvSiftFeature> descriptors = extractor.computeSiftKeypoints(image);
    CvSiftFeature next;/* w  ww . j  ava2  s . c  o m*/
    for (Iterator<CvSiftFeature> iterator = descriptors.iterator(); iterator.hasNext();) {
        next = iterator.next();
        lireFeature.extract(ImageUtils.cropImage(image, (int) (next.point[0] - (int) next.size / 2),
                (int) (next.point[1] - (int) next.size / 2), (int) next.size, (int) next.size));
        fields.add(new StoredField(
                DocumentBuilder.FIELD_NAME_SIMPLE + lireFeature.getFieldName() + Detector_CVSIFT,
                lireFeature.getByteArrayRepresentation()));
    }
    return fields.toArray(new Field[fields.size()]);
}

From source file:net.semanticmetadata.lire.impl.SimpleBuilder.java

License:Open Source License

private Field[] useRandom(BufferedImage image) {
    ArrayList<Field> fields = new ArrayList<Field>();
    //        LinkedList<keypoint> keypointsList = createRndPts(image.getWidth(), image.getHeight(), samples);
    //        keypoint next;
    // opting in for more performance: no "new" for creating key points, no String concat in the loop. (ml)
    int[] myKeypoint = new int[3];
    Random r = new Random();
    String fieldName = DocumentBuilder.FIELD_NAME_SIMPLE + lireFeature.getFieldName() + Detector_RANDOM;
    for (int i = 0; i < samples; i++) {
        createNextRandomPoint(myKeypoint, image.getWidth(), image.getHeight(), r);
        lireFeature.extract(/*w w  w.jav  a 2 s  .  c o  m*/
                ImageUtils.cropImage(image, myKeypoint[0], myKeypoint[1], myKeypoint[2], myKeypoint[2]));
        fields.add(new StoredField(fieldName, lireFeature.getByteArrayRepresentation()));

    }
    //        for (Iterator<keypoint> iterator = keypointsList.iterator(); iterator.hasNext(); ) {
    //            next = iterator.next();
    //            lireFeature.extract(ImageUtils.cropImage(image, (int) (next.X), (int) (next.Y), (int) next.Size, (int) next.Size));
    //            fields.add(new StoredField(DocumentBuilder.FIELD_NAME_SIMPLE + lireFeature.getFieldName() + Detector_RANDOM, lireFeature.getByteArrayRepresentation()));
    //        }
    return fields.toArray(new Field[fields.size()]);
}

From source file:net.semanticmetadata.lire.impl.SimpleBuilder.java

License:Open Source License

private Field[] useGaussRandom(BufferedImage image) {
    ArrayList<Field> fields = new ArrayList<Field>();
    LinkedList<keypoint> keypointsList = createGaussRndPts(image.getWidth(), image.getHeight(), samples);
    keypoint next;//w w w. ja v a  2 s .  c  o m
    String fieldName = DocumentBuilder.FIELD_NAME_SIMPLE + lireFeature.getFieldName() + Detector_GAUSSRANDOM;
    for (Iterator<keypoint> iterator = keypointsList.iterator(); iterator.hasNext();) {
        next = iterator.next();
        lireFeature.extract(ImageUtils.cropImage(image, (int) (next.X - (next.Size / 2)),
                (int) (next.Y - (next.Size / 2)), (int) next.Size, (int) next.Size));
        fields.add(new StoredField(fieldName, lireFeature.getByteArrayRepresentation()));
    }
    return fields.toArray(new Field[fields.size()]);
}

From source file:net.semanticmetadata.lire.impl.SurfDocumentBuilder.java

License:Open Source License

public Document createDocument(BufferedImage image, String identifier) {
    Document doc = null;/*from   w  ww.  jav a 2s.  c  o m*/
    Surf s = new Surf(image);
    List<SURFInterestPoint> interestPoints = s.getFreeOrientedInterestPoints();
    doc = new Document();
    for (Iterator<SURFInterestPoint> sipi = interestPoints.iterator(); sipi.hasNext();) {
        SURFInterestPoint sip = sipi.next();
        SurfFeature sf = new SurfFeature(sip);
        doc.add(new StoredField(DocumentBuilder.FIELD_NAME_SURF, sf.getByteArrayRepresentation()));
    }
    if (identifier != null)
        doc.add(new StringField(DocumentBuilder.FIELD_NAME_IDENTIFIER, identifier, Field.Store.YES));
    return doc;
}

From source file:net.semanticmetadata.lire.indexers.tools.binary.HashingIndexor.java

License:Open Source License

protected void addToDocument(GlobalFeature feature, Document document, String featureFieldName) {
    // This is for debugging the image features.
    //        try {
    ////            System.out.println(feature.getClass().getName() + " " + document.getValues(DocumentBuilder.FIELD_NAME_IDENTIFIER)[0]);
    //            LireFeature f1 = feature.getClass().newInstance();
    //            f1.extract(ImageIO.read(new File(document.getValues(DocumentBuilder.FIELD_NAME_IDENTIFIER)[0])));
    //            float distance = feature.getDistance(f1);
    //            if (distance != 0) {
    //                System.out.println("Extracted:" + java.util.Arrays.toString(f1.getFeatureVector()).replaceAll("\\.0,", "") + "\n" +
    //                        "Data     :" + java.util.Arrays.toString(feature.getFeatureVector()).replaceAll("\\.0,", "") + "\n" +
    //                        "Problem with " + f1.getClass().getName() + " at file " + document.getValues(DocumentBuilder.FIELD_NAME_IDENTIFIER)[0] + ", distance=" + distance
    //                );
    ////                System.out.println("Problem with " + f1.getClass().getName() + " at file " + document.getValues(DocumentBuilder.FIELD_NAME_IDENTIFIER)[0] + ", distance=" + distance);
    //            }
    //        } catch (Exception e) {
    //            e.printStackTrace();
    ////  ww w  . j a v a  2  s.c om
    //        }
    if (feature.getClass().getCanonicalName().equals(featureClass.getCanonicalName())) {
        // generate hashes here:
        //            int[] hashes = LocalitySensitiveHashing.generateHashes(feature.getFeatureVector());
        int[] hashes = BitSampling.generateHashes(feature.getFeatureVector());
        //            System.out.println(Arrays.toString(hashes));
        // store hashes in index as terms
        document.add(new TextField(featureFieldName + "_hash", SerializationUtils.arrayToString(hashes),
                Field.Store.YES));
        // add the specific feature
        document.add(new StoredField(featureFieldName, feature.getByteArrayRepresentation()));
    }
    // add the specific feature
    //        document.add(new StoredField(featureFieldName, feature.getByteArrayRepresentation()));
}

From source file:net.semanticmetadata.lire.indexers.tools.binary.Indexor.java

License:Open Source License

/**
 * Overwrite this method if you want to filter the input, apply hashing, etc.
 *
 * @param feature          the current feature.
 * @param document         the current document.
 * @param featureFieldName the field hashFunctionsFileName of the feature.
 *///from w w w  .  j ava  2 s  .com
protected void addToDocument(GlobalFeature feature, Document document, String featureFieldName) {
    document.add(new StoredField(featureFieldName, feature.getByteArrayRepresentation()));
}

From source file:net.semanticmetadata.lire.indexers.tools.binary.ProximityHashingIndexor.java

License:Open Source License

/**
 * Overwrite this method if you want to filter the input, apply hashing, etc.
 *
 * @param feature          the current feature.
 * @param document         the current document.
 * @param featureFieldName the field hashFunctionsFileName of the feature.
 *//*  w  ww .  j ava  2 s  . c o m*/
protected void addToDocument(GlobalFeature feature, Document document, String featureFieldName) {
    if (run == 0) {
    } // just count documents
    else if (run == 1) { // Select the representatives ...
        if (representativesID.contains(docCount)
                && feature.getClass().getCanonicalName().equals(featureClass.getCanonicalName())) { // it's a representative.
            // put it into a temporary data structure ...
            representatives.add(feature);
        }
    } else if (run == 2) { // actual hashing: find the nearest representatives and put those as a hash into a document.
        if (feature.getClass().getCanonicalName().equals(featureClass.getCanonicalName())) { // it's a feature to be hashed
            int[] hashes = getHashes(feature);
            document.add(new TextField(featureFieldName + "_hash", createDocumentString(hashes, hashes.length),
                    Field.Store.YES));
            document.add(new TextField(featureFieldName + "_hash_q", createDocumentString(hashes, 10),
                    Field.Store.YES));
        }
        document.add(new StoredField(featureFieldName, feature.getByteArrayRepresentation()));
    }
}

From source file:net.semanticmetadata.lire.indexing.tools.HashingIndexor.java

License:Open Source License

protected void addToDocument(LireFeature feature, Document document, String featureFieldName) {
    // This is for debugging the image features.
    //        try {
    ////            System.out.println(feature.getClass().getName() + " " + document.getValues(DocumentBuilder.FIELD_NAME_IDENTIFIER)[0]);
    //            LireFeature f1 = feature.getClass().newInstance();
    //            f1.extract(ImageIO.read(new File(document.getValues(DocumentBuilder.FIELD_NAME_IDENTIFIER)[0])));
    //            float distance = feature.getDistance(f1);
    //            if (distance != 0) {
    //                System.out.println("Extracted:" + java.util.Arrays.toString(f1.getDoubleHistogram()).replaceAll("\\.0,", "") + "\n" +
    //                        "Data     :" + java.util.Arrays.toString(feature.getDoubleHistogram()).replaceAll("\\.0,", "") + "\n" +
    //                        "Problem with " + f1.getClass().getName() + " at file " + document.getValues(DocumentBuilder.FIELD_NAME_IDENTIFIER)[0] + ", distance=" + distance
    //                );
    ////                System.out.println("Problem with " + f1.getClass().getName() + " at file " + document.getValues(DocumentBuilder.FIELD_NAME_IDENTIFIER)[0] + ", distance=" + distance);
    //            }
    //        } catch (Exception e) {
    //            e.printStackTrace();
    ///*from   w ww  .ja  v  a2  s  .  c o  m*/
    //        }
    if (feature.getClass().getCanonicalName().equals(featureClass.getCanonicalName())) {
        // generate hashes here:
        //            int[] hashes = LocalitySensitiveHashing.generateHashes(feature.getDoubleHistogram());
        int[] hashes = BitSampling.generateHashes(feature.getDoubleHistogram());
        //            System.out.println(Arrays.toString(hashes));
        // store hashes in index as terms
        document.add(new TextField(featureFieldName + "_hash", SerializationUtils.arrayToString(hashes),
                Field.Store.YES));
        // add the specific feature
        document.add(new StoredField(featureFieldName, feature.getByteArrayRepresentation()));
    }
    // add the specific feature
    //        document.add(new StoredField(featureFieldName, feature.getByteArrayRepresentation()));
}

From source file:net.semanticmetadata.lire.indexing.tools.Indexor.java

License:Open Source License

/**
 * Overwrite this method if you want to filter the input, apply hashing, etc.
 *
 * @param feature          the current feature.
 * @param document         the current document.
 * @param featureFieldName the field hashFunctionsFileName of the feature.
 *//*from   www . ja v  a2s .c  om*/
protected void addToDocument(LireFeature feature, Document document, String featureFieldName) {
    document.add(new StoredField(featureFieldName, feature.getByteArrayRepresentation()));
}