Example usage for org.apache.cassandra.dht IPartitioner decorateKey

List of usage examples for org.apache.cassandra.dht IPartitioner decorateKey

Introduction

In this page you can find the example usage for org.apache.cassandra.dht IPartitioner decorateKey.

Prototype

public DecoratedKey decorateKey(ByteBuffer key);

Source Link

Document

Transform key to object representation of the on-disk format.

Usage

From source file:com.cloudian.support.RowFinder.java

License:Apache License

private void find() throws IOException {

    // get ColumnFamilyStore instance
    System.out.println("Opening keyspace " + ksName + " ...");
    Keyspace keyspace = Keyspace.open(ksName);
    System.out.println("Opened keyspace " + ksName);

    System.out.println("Getting column family " + cfName + " ...");
    ColumnFamilyStore cfStore = keyspace.getColumnFamilyStore(cfName);
    Collection<SSTableReader> ssTables = cfStore.getSSTables();
    System.out.println("Got column family " + cfName);

    ByteBuffer buff = ByteBufferUtil.bytes(this.rowKey);
    IPartitioner<?> partitioner = cfStore.partitioner;

    System.out.println(this.rowKey + " is included in the following files");
    System.out.println("==============================================================================");
    System.out.println("FINE_NAME, COLUMN_INFO, CONTIGUOUS_TOMBSTONED_COLUMNS(over " + threshold + ")");
    System.out.println("==============================================================================");
    for (SSTableReader reader : ssTables) {

        if (reader.getBloomFilter().isPresent(buff)) {

            // seek to row key
            RandomAccessReader dfile = reader.openDataReader();
            RowIndexEntry entry = reader.getPosition(partitioner.decorateKey(buff), SSTableReader.Operator.EQ);
            if (entry == null)
                continue;
            dfile.seek(entry.position);/*from  w ww .j a v  a  2s .com*/

            // read some
            ByteBufferUtil.readWithShortLength(dfile);
            if (reader.descriptor.version.hasRowSizeAndColumnCount)
                dfile.readLong();
            DeletionInfo deletionInfo = new DeletionInfo(DeletionTime.serializer.deserialize(dfile));
            int columnCount = reader.descriptor.version.hasRowSizeAndColumnCount ? dfile.readInt()
                    : Integer.MAX_VALUE;

            // get iterator
            Iterator<OnDiskAtom> atomIterator = reader.metadata.getOnDiskIterator(dfile, columnCount,
                    reader.descriptor.version);

            // iterate
            System.out.print(new File(reader.getFilename()).getName());
            boolean isContiguous = false;
            int contiguousTombstonedColumns = 0;
            String contiguousTombstonedColumnsStart = null;
            int live = 0;
            int deleted = 0;
            int rangeTombstone = 0;
            StringBuffer sb = new StringBuffer();
            while (atomIterator.hasNext()) {

                OnDiskAtom atom = atomIterator.next();

                if (atom instanceof Column) {

                    if (atom instanceof DeletedColumn) {

                        deleted++;

                        if (!isContiguous) {

                            isContiguous = true;
                            contiguousTombstonedColumnsStart = ByteBufferUtil.string(atom.name());

                        }

                        contiguousTombstonedColumns++;

                    } else {

                        live++;

                        if (isContiguous) {

                            // print
                            if (contiguousTombstonedColumns >= this.threshold) {

                                sb.append(", [" + contiguousTombstonedColumnsStart + "|"
                                        + contiguousTombstonedColumns + "]");

                            }

                            // reset
                            contiguousTombstonedColumns = 0;
                            contiguousTombstonedColumnsStart = null;
                            isContiguous = false;

                        }

                    }

                } else if (atom instanceof RangeTombstone) {

                    rangeTombstone++;

                    int localDeletionTime = atom.getLocalDeletionTime();
                    ByteBuffer min = ((RangeTombstone) atom).min;
                    ByteBuffer max = ((RangeTombstone) atom).max;
                    String minString = ByteBufferUtil.string(min);
                    String maxString = ByteBufferUtil.string(max);

                    sb.append(", [" + minString + ", " + maxString + "(" + localDeletionTime + ")]");

                }

                // if it ends with finished columns
                if (contiguousTombstonedColumns >= this.threshold) {

                    sb.append(
                            ", [" + contiguousTombstonedColumnsStart + "|" + contiguousTombstonedColumns + "]");

                }

            }

            System.out.print(", (live, deleted, range tombstone)=(" + live + ", " + deleted + ", "
                    + rangeTombstone + ")");
            System.out.println(sb.toString());

        }

    }

}

From source file:net.imagini.cassandra.DumpSSTables.SSTableExport.java

License:Apache License

/**
 * Export specific rows from an SSTable and write the resulting JSON to a
 * PrintStream.// w  w w  .  j a v a  2  s.com
 * 
 * @param desc
 *            the descriptor of the sstable table to read from
 * @param outs
 *            PrintStream to write the output to
 * @param toExport
 *            the keys corresponding to the rows to export
 * @param excludes
 *            keys to exclude from export
 * @throws IOException
 *             on failure to read/write input/output
 */
public static void export(Descriptor desc, PrintStream outs, Collection<String> toExport, String[] excludes)
        throws IOException {
    SSTableReader reader = SSTableReader.open(desc);
    SSTableScanner scanner = reader.getDirectScanner();

    IPartitioner<?> partitioner = reader.partitioner;

    if (excludes != null)
        toExport.removeAll(Arrays.asList(excludes));

    int i = 0;

    // last key to compare order
    DecoratedKey lastKey = null;

    for (String key : toExport) {
        DecoratedKey decoratedKey = partitioner.decorateKey(ByteBufferUtil.hexToBytes(key));

        if (lastKey != null && lastKey.compareTo(decoratedKey) > 0)
            throw new IOException("Key out of order! " + lastKey + " > " + decoratedKey);

        lastKey = decoratedKey;

        scanner.seekTo(decoratedKey);

        if (!scanner.hasNext())
            continue;

        SSTableIdentityIterator row = (SSTableIdentityIterator) scanner.next();
        if (!row.getKey().equals(decoratedKey))
            continue;

        serializeRow(row, decoratedKey, outs);

        if (i != 0)
            outs.println(",");

        i++;
    }

    outs.println("\n");
    outs.flush();

    scanner.close();
}