List of usage examples for org.apache.cassandra.hadoop HadoopCompat progress
public static void progress(TaskAttemptContext context)
From source file:com.spotify.hdfs2cass.cassandra.cql.CrunchCqlBulkRecordWriter.java
License:Apache License
@Override public void write(final ByteBuffer ignoredKey, final CQLRecord record) { prepareWriter();/*ww w . j a v a 2 s . co m*/ // To ensure Crunch doesn't reuse CQLSSTableWriter's objects List<ByteBuffer> bb = Lists.newArrayList(); for (ByteBuffer v : record.getValues()) { bb.add(ByteBufferUtil.clone(v)); } try { ((CQLSSTableWriter) writer).rawAddRow(bb); if (null != progress) progress.progress(); if (null != context) HadoopCompat.progress(context); } catch (InvalidRequestException | IOException e) { LOG.error(e.getMessage()); throw new CrunchRuntimeException("Error adding row : " + e.getMessage()); } }
From source file:de.hpi.isg.mdms.hadoop.cassandra.AbstractBulkRecordWriter.java
License:Apache License
private void close() throws IOException { if (writer != null) { writer.close();/*w ww . j av a 2 s . c o m*/ Future<StreamState> future = loader.stream(); while (true) { try { future.get(1000, TimeUnit.MILLISECONDS); break; } catch (ExecutionException | TimeoutException te) { if (null != progress) progress.progress(); if (null != context) HadoopCompat.progress(context); } catch (InterruptedException e) { throw new IOException(e); } } if (loader.getFailedHosts().size() > 0) { if (loader.getFailedHosts().size() > maxFailures) throw new IOException("Too many hosts failed: " + loader.getFailedHosts()); else logger.warn("Some hosts failed: {}", loader.getFailedHosts()); } } }
From source file:de.hpi.isg.mdms.hadoop.cassandra.CqlBulkRecordWriter.java
License:Apache License
/** * The column values must correspond to the order in which * they appear in the insert stored procedure. * //from w w w . j a v a2s. c o m * Key is not used, so it can be null or any object. * </p> * * @param key * any object or null. * @param values * the values to write. * @throws IOException */ @Override public void write(Object key, List<ByteBuffer> values) throws IOException { prepareWriter(); try { ((CQLSSTableWriter) writer).rawAddRow(values); if (null != progress) progress.progress(); if (null != context) HadoopCompat.progress(context); } catch (InvalidRequestException e) { throw new IOException("Error adding row with key: " + key, e); } }