Example usage for org.apache.cassandra.io.sstable SSTableLoader SSTableLoader

List of usage examples for org.apache.cassandra.io.sstable SSTableLoader SSTableLoader

Introduction

In this page you can find the example usage for org.apache.cassandra.io.sstable SSTableLoader SSTableLoader.

Prototype

public SSTableLoader(File directory, Client client, OutputHandler outputHandler) 

Source Link

Usage

From source file:com.spotify.hdfs2cass.cassandra.cql.CrunchCqlBulkRecordWriter.java

License:Apache License

private void prepareWriter() {
    try {/*from  w ww  .j  a  v a2  s  . c om*/
        if (writer == null) {
            writer = CQLSSTableWriter.builder().forTable(schema).using(insertStatement)
                    .withPartitioner(ConfigHelper.getOutputPartitioner(conf)).inDirectory(outputDir).sorted()
                    .build();
        }
        if (loader == null) {
            CrunchExternalClient externalClient = new CrunchExternalClient(conf);
            externalClient.addKnownCfs(keyspace, schema);
            this.loader = new SSTableLoader(outputDir, externalClient,
                    new BulkRecordWriter.NullOutputHandler());
        }
    } catch (Exception e) {
        throw new CrunchRuntimeException(e);
    }
}

From source file:com.spotify.hdfs2cass.cassandra.thrift.CrunchBulkRecordWriter.java

License:Apache License

private void prepareWriter() {
    String columnFamily = CrunchConfigHelper.getOutputColumnFamily(conf);
    String keyspace = ConfigHelper.getOutputKeyspace(conf);

    if (outputdir == null) {
        // dir must be named by ks/cf for the loader
        outputdir = Paths.get(getOutputLocation(), keyspace, columnFamily).toFile();
        outputdir.mkdirs();//www.j a  va2 s  . co m
    }

    if (writer == null) {
        AbstractType<?> subcomparator = null;

        if (cfType == CFType.SUPER)
            subcomparator = BytesType.instance;

        int bufferSizeInMB = Integer.parseInt(conf.get(BUFFER_SIZE_IN_MB, "64"));
        this.writer = new SSTableSimpleUnsortedWriter(outputdir, ConfigHelper.getOutputPartitioner(conf),
                keyspace, columnFamily, BytesType.instance, subcomparator, bufferSizeInMB,
                ConfigHelper.getOutputCompressionParamaters(conf));

        ExternalSSTableLoaderClient externalClient = new ExternalSSTableLoaderClient(
                ConfigHelper.getOutputInitialAddress(conf), ConfigHelper.getOutputRpcPort(conf),
                ConfigHelper.getOutputKeyspaceUserName(conf), ConfigHelper.getOutputKeyspacePassword(conf));

        this.loader = new SSTableLoader(outputdir, externalClient, new OutputHandler.SystemOutput(true, true));
    }
}

From source file:com.tuplejump.calliope.hadoop.BulkRecordWriter.java

License:Apache License

private void prepareWriter() throws IOException {
    if (outputdir == null) {
        String keyspace = ConfigHelper.getOutputKeyspace(conf);
        //dir must be named by ks/cf for the loader
        outputdir = new File(getOutputLocation() + File.separator + keyspace + File.separator
                + ConfigHelper.getOutputColumnFamily(conf));
        outputdir.mkdirs();//from   w ww  .  j a v  a2  s  . c om
    }

    if (writer == null) {
        AbstractType<?> subcomparator = null;
        ExternalClient externalClient = null;
        String username = ConfigHelper.getOutputKeyspaceUserName(conf);
        String password = ConfigHelper.getOutputKeyspacePassword(conf);

        if (cfType == CFType.SUPER)
            subcomparator = BytesType.instance;

        this.writer = new SSTableSimpleUnsortedWriter(outputdir, ConfigHelper.getOutputPartitioner(conf),
                ConfigHelper.getOutputKeyspace(conf), ConfigHelper.getOutputColumnFamily(conf),
                BytesType.instance, subcomparator, Integer.parseInt(conf.get(BUFFER_SIZE_IN_MB, "64")),
                ConfigHelper.getOutputCompressionParamaters(conf));

        externalClient = new ExternalClient(ConfigHelper.getOutputInitialAddress(conf),
                ConfigHelper.getOutputRpcPort(conf), username, password);

        this.loader = new SSTableLoader(outputdir, externalClient, new NullOutputHandler());
    }
}

From source file:de.hpi.isg.mdms.hadoop.cassandra.CqlBulkRecordWriter.java

License:Apache License

private void prepareWriter() throws IOException {
    try {//from  www  . ja v a2 s. com
        if (writer == null) {
            writer = CQLSSTableWriter.builder().forTable(schema).using(insertStatement)
                    .withPartitioner(ConfigHelper.getOutputPartitioner(conf)).inDirectory(outputDir)
                    .withBufferSizeInMB(Integer.parseInt(conf.get(BUFFER_SIZE_IN_MB, "64"))).build();
        }
        if (loader == null) {
            ExternalClient externalClient = new ExternalClient(conf);

            this.loader = new SSTableLoader(outputDir, externalClient,
                    new BulkRecordWriter.NullOutputHandler()) {
                @Override
                public void onSuccess(StreamState finalState) {
                    if (deleteSrc)
                        FileUtils.deleteRecursive(outputDir);
                }
            };
        }
    } catch (Exception e) {
        throw new IOException(e);
    }
}

From source file:uk.co.pinpointlabs.io.CqlConcurrentBulkRecordWriter.java

License:Apache License

private void prepareWriter() throws IOException {
    try {/*from   w  w w  .  j  av  a2s .  c o m*/
        if (writer == null) {
            writer = CQLSSTableWriter.builder().forTable(schema).using(insertStatement)
                    .withPartitioner(ConfigHelper.getOutputPartitioner(conf)).inDirectory(outputDir)
                    .withBufferSizeInMB(Integer.parseInt(conf.get(BUFFER_SIZE_IN_MB, "64"))).build();
        }
        if (loader == null) {
            ExternalClient externalClient = new ExternalClient(conf);

            externalClient.addKnownCfs(keyspace, schema);

            this.loader = new SSTableLoader(outputDir, externalClient,
                    new BulkRecordWriter.NullOutputHandler()) {
                @Override
                public void onSuccess(StreamState finalState) {
                    if (deleteSrc)
                        FileUtils.deleteRecursive(outputDir);
                }
            };
        }
    } catch (Exception e) {
        throw new IOException(e);
    }
}