List of usage examples for org.apache.thrift TDeserializer deserialize
public void deserialize(TBase base, byte[] bytes) throws TException
From source file:io.warp10.standalone.StandaloneIngressHandler.java
License:Apache License
/** * Handle Metadata updating/*from w ww .j a v a 2s .com*/ */ public void handleMeta(String target, Request baseRequest, HttpServletRequest request, HttpServletResponse response) throws IOException, ServletException { if (target.equals(Constants.API_ENDPOINT_META)) { baseRequest.setHandled(true); } else { return; } try { // // CORS header // response.setHeader("Access-Control-Allow-Origin", "*"); // // Extract DatalogRequest if specified // String datalogHeader = request.getHeader(Constants.getHeader(Configuration.HTTP_HEADER_DATALOG)); DatalogRequest dr = null; boolean forwarded = false; if (null != datalogHeader) { byte[] bytes = OrderPreservingBase64.decode(datalogHeader.getBytes(Charsets.US_ASCII)); if (null != datalogPSK) { bytes = CryptoUtils.unwrap(datalogPSK, bytes); } if (null == bytes) { throw new IOException("Invalid Datalog header."); } TDeserializer deser = new TDeserializer(new TCompactProtocol.Factory()); try { dr = new DatalogRequest(); deser.deserialize(dr, bytes); } catch (TException te) { throw new IOException(); } Map<String, String> labels = new HashMap<String, String>(); labels.put(SensisionConstants.SENSISION_LABEL_ID, new String( OrderPreservingBase64.decode(dr.getId().getBytes(Charsets.US_ASCII)), Charsets.UTF_8)); labels.put(SensisionConstants.SENSISION_LABEL_TYPE, dr.getType()); Sensision.update(SensisionConstants.CLASS_WARP_DATALOG_REQUESTS_RECEIVED, labels, 1); forwarded = true; } // // Loop over the input lines. // Each has the following format: // // class{labels}{attributes} // String token = null != dr ? dr.getToken() : request.getHeader(Constants.getHeader(Configuration.HTTP_HEADER_TOKENX)); WriteToken wtoken; try { wtoken = Tokens.extractWriteToken(token); } catch (WarpScriptException ee) { throw new IOException(ee); } String application = wtoken.getAppName(); String producer = Tokens.getUUID(wtoken.getProducerId()); String owner = Tokens.getUUID(wtoken.getOwnerId()); if (null == producer || null == owner) { response.sendError(HttpServletResponse.SC_FORBIDDEN, "Invalid token."); return; } // // Determine if content if gzipped // boolean gzipped = false; if (null != request.getHeader("Content-Type") && "application/gzip".equals(request.getHeader("Content-Type"))) { gzipped = true; } BufferedReader br = null; if (gzipped) { GZIPInputStream is = new GZIPInputStream(request.getInputStream()); br = new BufferedReader(new InputStreamReader(is)); } else { br = request.getReader(); } File loggingFile = null; PrintWriter loggingWriter = null; // // Open the logging file if logging is enabled // if (null != loggingDir) { long nanos = null != dr ? dr.getTimestamp() : TimeSource.getNanoTime(); StringBuilder sb = new StringBuilder(); sb.append(Long.toHexString(nanos)); sb.insert(0, "0000000000000000", 0, 16 - sb.length()); sb.append("-"); if (null != dr) { sb.append(dr.getId()); } else { sb.append(datalogId); } sb.append("-"); sb.append(dtf.print(nanos / 1000000L)); sb.append(Long.toString(1000000L + (nanos % 1000000L)).substring(1)); sb.append("Z"); if (null == dr) { dr = new DatalogRequest(); dr.setTimestamp(nanos); dr.setType(Constants.DATALOG_META); dr.setId(datalogId); dr.setToken(token); } if (null != dr && (!forwarded || (forwarded && this.logforwarded))) { // // Serialize the request // TSerializer ser = new TSerializer(new TCompactProtocol.Factory()); byte[] encoded; try { encoded = ser.serialize(dr); } catch (TException te) { throw new IOException(te); } if (null != this.datalogPSK) { encoded = CryptoUtils.wrap(this.datalogPSK, encoded); } encoded = OrderPreservingBase64.encode(encoded); loggingFile = new File(loggingDir, sb.toString()); loggingWriter = new PrintWriter(new FileWriterWithEncoding(loggingFile, Charsets.UTF_8)); // // Write request // loggingWriter.println(new String(encoded, Charsets.US_ASCII)); } } try { // // Loop on all lines // while (true) { String line = br.readLine(); if (null == line) { break; } // Ignore blank lines if ("".equals(line)) { continue; } // Ignore comments if ('#' == line.charAt(0)) { continue; } Metadata metadata = MetadataUtils.parseMetadata(line); // Add labels from the WriteToken if they exist if (wtoken.getLabelsSize() > 0) { metadata.getLabels().putAll(wtoken.getLabels()); } // // Force owner/producer // metadata.getLabels().put(Constants.PRODUCER_LABEL, producer); metadata.getLabels().put(Constants.OWNER_LABEL, owner); if (null != application) { metadata.getLabels().put(Constants.APPLICATION_LABEL, application); } else { // remove application label metadata.getLabels().remove(Constants.APPLICATION_LABEL); } if (!MetadataUtils.validateMetadata(metadata)) { response.sendError(HttpServletResponse.SC_BAD_REQUEST, "Invalid metadata " + line); return; } metadata.setSource(Configuration.INGRESS_METADATA_UPDATE_ENDPOINT); this.directoryClient.register(metadata); // // Write the line last, so we do not write lines which triggered exceptions // if (null != loggingWriter) { loggingWriter.println(line); } } } finally { if (null != loggingWriter) { Map<String, String> labels = new HashMap<String, String>(); labels.put(SensisionConstants.SENSISION_LABEL_ID, new String( OrderPreservingBase64.decode(dr.getId().getBytes(Charsets.US_ASCII)), Charsets.UTF_8)); labels.put(SensisionConstants.SENSISION_LABEL_TYPE, dr.getType()); Sensision.update(SensisionConstants.CLASS_WARP_DATALOG_REQUESTS_LOGGED, labels, 1); loggingWriter.close(); loggingFile.renameTo(new File(loggingFile.getAbsolutePath() + DatalogForwarder.DATALOG_SUFFIX)); } } response.setStatus(HttpServletResponse.SC_OK); } catch (Exception e) { if (!response.isCommitted()) { response.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, e.getMessage()); return; } } }
From source file:io.warp10.standalone.StandaloneMemoryStore.java
License:Apache License
private void load(String path) throws IOException { long nano = System.nanoTime(); int gts = 0;/*from ww w.ja v a 2 s . c om*/ long bytes = 0L; Configuration conf = new Configuration(); conf.set("fs.hdfs.impl", org.apache.hadoop.hdfs.DistributedFileSystem.class.getName()); conf.set("fs.file.impl", org.apache.hadoop.fs.LocalFileSystem.class.getName()); BytesWritable key = new BytesWritable(); BytesWritable value = new BytesWritable(); TDeserializer deserializer = new TDeserializer(new TCompactProtocol.Factory()); SequenceFile.Reader.Option optPath = SequenceFile.Reader.file(new Path(path)); SequenceFile.Reader reader = null; try { reader = new SequenceFile.Reader(conf, optPath); System.out.println("Loading '" + path + "' back in memory."); while (reader.next(key, value)) { gts++; GTSWrapper wrapper = new GTSWrapper(); deserializer.deserialize(wrapper, key.copyBytes()); GTSEncoder encoder = new GTSEncoder(0L, null, value.copyBytes()); encoder.setCount(wrapper.getCount()); bytes += value.getLength() + key.getLength(); encoder.safeSetMetadata(wrapper.getMetadata()); store(encoder); if (null != this.directoryClient) { this.directoryClient.register(wrapper.getMetadata()); } } } catch (FileNotFoundException fnfe) { System.err.println("File '" + path + "' was not found, skipping."); return; } catch (IOException ioe) { throw ioe; } catch (Exception e) { throw new IOException(e); } reader.close(); nano = System.nanoTime() - nano; System.out.println("Loaded " + gts + " GTS (" + bytes + " bytes) in " + (nano / 1000000.0D) + " ms."); }
From source file:io.warp10.udf.TUNWRAP.java
License:Apache License
@Override public Object apply(WarpScriptStack stack) throws WarpScriptException { Object top = stack.pop();//w w w. ja va 2s . c om Set<Object> ticks = new HashSet<Object>(); if (top instanceof Collection) { boolean noticks = false; for (Object elt : (Collection) top) { if (!(elt instanceof Long)) { noticks = true; } } if (!noticks) { ticks.addAll((Collection<Object>) top); top = stack.pop(); } } if (!(top instanceof String) && !(top instanceof byte[]) && !(top instanceof List)) { throw new WarpScriptException(getName() + " operates on a string or byte array or a list thereof."); } List<Object> inputs = new ArrayList<Object>(); if (top instanceof String || top instanceof byte[]) { inputs.add(top); } else { for (Object o : (List) top) { if (!(o instanceof String) && !(o instanceof byte[])) { throw new WarpScriptException( getName() + " operates on a string or byte array or a list thereof."); } inputs.add(o); } } List<Object> outputs = new ArrayList<Object>(); for (Object s : inputs) { byte[] bytes = s instanceof String ? OrderPreservingBase64.decode(s.toString().getBytes(Charsets.US_ASCII)) : (byte[]) s; TDeserializer deser = new TDeserializer(new TCompactProtocol.Factory()); try { GTSWrapper wrapper = new GTSWrapper(); deser.deserialize(wrapper, bytes); GTSDecoder decoder = GTSWrapperHelper.fromGTSWrapperToGTSDecoder(wrapper); GeoTimeSerie[] series = new GeoTimeSerie[4]; for (int i = 0; i < series.length; i++) { // Use a heuristic and consider a hint of 25% of the wrapper count series[i] = new GeoTimeSerie(wrapper.getLastbucket(), (int) wrapper.getBucketcount(), wrapper.getBucketspan(), (int) wrapper.getCount()); series[i].setMetadata(decoder.getMetadata()); } GeoTimeSerie gts = null; while (decoder.next()) { long timestamp = decoder.getTimestamp(); if (!ticks.isEmpty() && !ticks.contains(timestamp)) { continue; } long location = decoder.getLocation(); long elevation = decoder.getElevation(); Object value = decoder.getValue(); if (value instanceof Long) { gts = series[0]; } else if (value instanceof Boolean) { gts = series[1]; } else if (value instanceof String) { gts = series[2]; } else { gts = series[3]; } GTSHelper.setValue(gts, timestamp, location, elevation, value, false); } Map<String, GeoTimeSerie> typedSeries = new HashMap<String, GeoTimeSerie>(); // // Shrink the series // for (int i = 0; i < series.length; i++) { GTSHelper.shrink(series[i]); } typedSeries.put("LONG", series[0]); typedSeries.put("BOOLEAN", series[1]); typedSeries.put("STRING", series[2]); typedSeries.put("DOUBLE", series[3]); outputs.add(typedSeries); } catch (TException te) { throw new WarpScriptException(getName() + " failed to unwrap GTS."); } } if (!(top instanceof List)) { stack.push(outputs.get(0)); } else { stack.push(outputs); } return stack; }
From source file:it.polimi.hegira.adapters.cassandra.Cassandra.java
License:Apache License
@Override protected AbstractDatabase fromMyModel(Metamodel mm) { log.debug(Thread.currentThread().getName() + " Cassandra consumer started "); //Thrift Deserializer TDeserializer deserializer = new TDeserializer(new TBinaryProtocol.Factory()); //retrieve thread number int thread_id = 0; if (TWTs_NO != 0) { thread_id = (int) (Thread.currentThread().getId() % TWTs_NO); }//w w w . j a v a2 s. co m //instantiate the cassandra transformer //the consistency level is not needed. Entity inserted with eventual consistency CassandraTransformer transformer = new CassandraTransformer(); //instantiate the TableManager TablesManager tablesManager = TablesManager.getTablesManager(); while (true) { log.debug(Thread.currentThread().getName() + " Extracting from the taskQueue" + thread_id + " TWTs_NO: " + TWTs_NO); try { //extract from the task queue Delivery delivery = taskQueues.get(thread_id).getConsumer().nextDelivery(); if (delivery != null) { //deserialize and retrieve the metamodel Metamodel metaModel = new Metamodel(); deserializer.deserialize(metaModel, delivery.getBody()); //retrieve the Cassandra Model CassandraModel cassandraModel = transformer.fromMyModel(metaModel); //retrieve the table and tries perform the insert try { tablesManager.getTable(cassandraModel.getTable()).insert(cassandraModel); } catch (ConnectException ex) { log.error(Thread.currentThread().getName() + " - Not able to connect to Cassandra", ex); //nack taskQueues.get(thread_id).sendNack(delivery); log.info("Sending Nack!! for entity(/ies)"); } catch (ClassNotFoundException ex) { log.error(Thread.currentThread().getName() + " - Error in during the insertion -", ex); //nack taskQueues.get(thread_id).sendNack(delivery); log.info("Sending Nack!! for entity(/ies)"); } //send ack taskQueues.get(thread_id).sendAck(delivery); } else { log.debug(Thread.currentThread().getName() + " - The queue " + TaskQueue.getDefaultTaskQueueName() + " is empty"); return null; } } catch (ShutdownSignalException | ConsumerCancelledException | InterruptedException ex) { log.error(Thread.currentThread().getName() + " - Cannot read next delivery from the queue " + TaskQueue.getDefaultTaskQueueName(), ex); } catch (TException ex) { log.error(Thread.currentThread().getName() + " - Error deserializing message ", ex); } catch (QueueException ex) { log.error(Thread.currentThread().getName() + " - Error sending an acknowledgment to the queue " + TaskQueue.getDefaultTaskQueueName(), ex); } } }
From source file:it.polimi.hegira.adapters.cassandra.CassandraAdapterTest.java
License:Apache License
/** * This test assumes the read consistency is set to eventual *///from w ww.j a v a 2 s .c o m @Test public void toMyModelTest() { TSerializer serializer = new TSerializer(new TBinaryProtocol.Factory()); TDeserializer deserializer = new TDeserializer(new TBinaryProtocol.Factory()); // // Connects to the db and creates two tables with some sample data for testing purposes // The table will be dropped at the end of the test // String hostname = "127.0.0.1"; Cluster.Builder clusterBuilder = Cluster.builder().addContactPoint(hostname); Cluster cluster = clusterBuilder.build(); session = cluster.connect("test"); String firstTableName = "players"; String secondTableName = "users"; session.execute("CREATE TABLE IF NOT EXISTS " + firstTableName + " ( " + primaryKeyName + " varchar PRIMARY KEY," + "goal int," + "teams list<varchar> );"); session.execute("CREATE TABLE IF NOT EXISTS " + secondTableName + " ( " + primaryKeyName + " varchar PRIMARY KEY," + "age int," + "contacts map<varchar,varchar> );"); List<String> fakeList1 = new ArrayList<String>(); fakeList1.add("Real Madrid"); fakeList1.add("Napoli"); List<String> fakeList2 = new ArrayList<String>(); fakeList2.add("Manchester United"); fakeList2.add("Real Madrid"); Map<String, String> fakeMap1 = new HashMap<String, String>(); fakeMap1.put("Andrea", "andrea@gmail.com"); fakeMap1.put("Andre", "andre@gmail.com"); Map<String, String> fakeMap2 = new HashMap<String, String>(); fakeMap2.put("Luca", "luca@gmail.com"); fakeMap2.put("leo", "leo@gmail.com"); Statement insert1 = QueryBuilder.insertInto(firstTableName).values(new String[] { "id", "goal", "teams" }, new Object[] { "Callejon", 9, fakeList1 }); session.execute(insert1); Statement insert2 = QueryBuilder.insertInto(firstTableName).values(new String[] { "id", "goal", "teams" }, new Object[] { "Ronaldo", 30, fakeList2 }); session.execute(insert2); Statement insert3 = QueryBuilder.insertInto(secondTableName) .values(new String[] { "id", "age", "contacts" }, new Object[] { "Andrea", 22, fakeMap1 }); session.execute(insert3); Statement insert4 = QueryBuilder.insertInto(secondTableName) .values(new String[] { "id", "age", "contacts" }, new Object[] { "Leo", 1, fakeMap2 }); session.execute(insert4); //create the serialized column coresponding to the one read /*Column col=new Column(); col.setColumnName("goal"); try { col.setColumnValue(DefaultSerializer.serialize(9)); } catch (IOException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } col.setColumnValueType("Integer"); col.setIndexable(false); List<Column> colList=new ArrayList<Column>(); colList.add(col); Map<String,List<Column>> columns=new HashMap<String,List<Column>>(); columns.put("test", colList); Metamodel metaColumn=new Metamodel("@players#Callejon", "Callejon", columns);*/ ArgumentCaptor<byte[]> serializedRow = ArgumentCaptor.forClass(byte[].class); reader.toMyModel(null); /*try { Mockito.verify(mockedTaskQueue).publish(serializer.serialize(metaColumn)); } catch (QueueException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (TException e) { // TODO Auto-generated catch block e.printStackTrace(); }*/ try { Mockito.verify(mockedTaskQueue, Mockito.times(4)).publish(serializedRow.capture()); } catch (QueueException e) { // TODO Auto-generated catch block e.printStackTrace(); } // //check user 1 // Metamodel resultUser1 = new Metamodel(); try { deserializer.deserialize(resultUser1, serializedRow.getAllValues().get(0)); } catch (TException e) { // TODO Auto-generated catch block e.printStackTrace(); } assertEquals("Leo", resultUser1.getRowKey()); assertEquals("@users#Leo", resultUser1.getPartitionGroup()); assertEquals(resultUser1.getColumns().get("users").size(), 2); assertEquals(1, resultUser1.getColumnFamilies().size()); assertEquals("users", resultUser1.getColumnFamilies().get(0)); Column userAge = resultUser1.getColumns().get("users").get(0); Column contactsUser = resultUser1.getColumns().get("users").get(1); assertEquals("age", userAge.getColumnName()); assertEquals("contacts", contactsUser.getColumnName()); try { assertEquals(1, DefaultSerializer.deserialize(userAge.getColumnValue())); assertEquals(fakeMap2, DefaultSerializer.deserialize(contactsUser.getColumnValue())); } catch (ClassNotFoundException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } assertEquals("Integer", userAge.getColumnValueType()); assertEquals("Map<String,String>", contactsUser.getColumnValueType()); assertEquals(false, userAge.isIndexable()); assertEquals(false, contactsUser.isIndexable()); // //check user 2 // Metamodel resultUser2 = new Metamodel(); try { deserializer.deserialize(resultUser2, serializedRow.getAllValues().get(1)); } catch (TException e) { // TODO Auto-generated catch block e.printStackTrace(); } assertEquals("Andrea", resultUser2.getRowKey()); assertEquals("@users#Andrea", resultUser2.getPartitionGroup()); assertEquals(resultUser2.getColumns().get("users").size(), 2); Column userAge2 = resultUser2.getColumns().get("users").get(0); Column contactsUser2 = resultUser2.getColumns().get("users").get(1); assertEquals("age", userAge2.getColumnName()); assertEquals("contacts", contactsUser2.getColumnName()); try { assertEquals(22, DefaultSerializer.deserialize(userAge2.getColumnValue())); assertEquals(fakeMap1, DefaultSerializer.deserialize(contactsUser2.getColumnValue())); } catch (ClassNotFoundException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } assertEquals("Integer", userAge2.getColumnValueType()); assertEquals("Map<String,String>", contactsUser2.getColumnValueType()); assertEquals(false, userAge2.isIndexable()); assertEquals(false, contactsUser2.isIndexable()); // //check players row 1 // Metamodel resultPlayer1 = new Metamodel(); try { deserializer.deserialize(resultPlayer1, serializedRow.getAllValues().get(3)); } catch (TException e) { // TODO Auto-generated catch block e.printStackTrace(); } assertEquals("Callejon", resultPlayer1.getRowKey()); assertEquals("@players#Callejon", resultPlayer1.getPartitionGroup()); assertEquals(resultPlayer1.getColumns().get("players").size(), 2); Column resultCol = resultPlayer1.getColumns().get("players").get(0); Column listResult = resultPlayer1.getColumns().get("players").get(1); assertEquals("goal", resultCol.getColumnName()); assertEquals("teams", listResult.getColumnName()); try { assertEquals(9, DefaultSerializer.deserialize(resultCol.getColumnValue())); assertEquals(fakeList1, DefaultSerializer.deserialize(listResult.getColumnValue())); } catch (ClassNotFoundException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } assertEquals("Integer", resultCol.getColumnValueType()); assertEquals("List<String>", listResult.getColumnValueType()); assertEquals(false, resultCol.isIndexable()); assertEquals(false, listResult.isIndexable()); // // check players row 2 // Metamodel resultPlayer2 = new Metamodel(); try { deserializer.deserialize(resultPlayer2, serializedRow.getAllValues().get(2)); } catch (TException e) { // TODO Auto-generated catch block e.printStackTrace(); } assertEquals("Ronaldo", resultPlayer2.getRowKey()); assertEquals("@players#Ronaldo", resultPlayer2.getPartitionGroup()); assertEquals(resultPlayer2.getColumns().get("players").size(), 2); Column resultCol2 = resultPlayer2.getColumns().get("players").get(0); Column listResult2 = resultPlayer2.getColumns().get("players").get(1); assertEquals("goal", resultCol2.getColumnName()); assertEquals("teams", listResult2.getColumnName()); try { assertEquals(30, DefaultSerializer.deserialize(resultCol2.getColumnValue())); assertEquals(fakeList2, DefaultSerializer.deserialize(listResult2.getColumnValue())); } catch (ClassNotFoundException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } assertEquals("Integer", resultCol2.getColumnValueType()); assertEquals("List<String>", listResult2.getColumnValueType()); assertEquals(false, resultCol2.isIndexable()); assertEquals(false, listResult2.isIndexable()); // // drops the table // session.execute("DROP TABLE " + firstTableName); session.execute("DROP TABLE " + secondTableName); }
From source file:it.polimi.hegira.adapters.cassandra.CassandraAdapterTest.java
License:Apache License
@Test public void fromMyModel() { writer.fromMyModel(null);//from w w w . jav a 2 s. c o m ArgumentCaptor<byte[]> serializedRow = ArgumentCaptor.forClass(byte[].class); TDeserializer deserializer = new TDeserializer(new TBinaryProtocol.Factory()); reader.toMyModel(null); try { Mockito.verify(mockedTaskQueue, Mockito.times(5)).publish(serializedRow.capture()); } catch (QueueException e) { // TODO Auto-generated catch block e.printStackTrace(); } Metamodel result = new Metamodel(); try { deserializer.deserialize(result, serializedRow.getAllValues().get(4)); } catch (TException e) { // TODO Auto-generated catch block e.printStackTrace(); } assertEquals("ronaldo", result.getRowKey()); assertEquals("@players#ronaldo", result.getPartitionGroup()); assertEquals(result.getColumns().get("players").size(), 2); Column resultCol = result.getColumns().get("players").get(0); Column set = result.getColumns().get("players").get(1); assertEquals("goal", resultCol.getColumnName()); assertEquals("mates", set.getColumnName()); try { assertEquals(30, DefaultSerializer.deserialize(resultCol.getColumnValue())); assertEquals(fakeSet, DefaultSerializer.deserialize(set.getColumnValue())); } catch (ClassNotFoundException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } assertEquals("Integer", resultCol.getColumnValueType()); assertEquals("Set<String>", set.getColumnValueType()); assertEquals(false, resultCol.isIndexable()); assertEquals(false, set.isIndexable()); session.execute("DROP TABLE players"); }
From source file:it.polimi.hegira.adapters.datastore.Datastore.java
License:Apache License
@Override protected AbstractDatabase fromMyModel(Metamodel mm) { // TWC/* ww w . jav a2s.c om*/ //log.debug(Thread.currentThread().getName()+" Hi I'm the GAE consumer!"); List<Entity> batch = new ArrayList<Entity>(); TDeserializer deserializer = new TDeserializer(new TBinaryProtocol.Factory()); long k = 0; int thread_id = 0; if (TWTs_NO != 0) thread_id = (int) (Thread.currentThread().getId() % TWTs_NO); while (true) { log.debug(Thread.currentThread().getName() + " Extracting from the taskQueue" + thread_id + " TWTs_NO: " + TWTs_NO); try { Delivery delivery = taskQueues.get(thread_id).getConsumer().nextDelivery(2000); if (delivery != null) { Metamodel myModel = new Metamodel(); deserializer.deserialize(myModel, delivery.getBody()); DatastoreTransformer dt = new DatastoreTransformer(connectionList.get(thread_id).ds); DatastoreModel fromMyModel = dt.fromMyModel(myModel); batch.add(fromMyModel.getEntity()); batch.add(fromMyModel.getFictitiousEntity()); taskQueues.get(thread_id).sendAck(delivery.getEnvelope().getDeliveryTag()); k++; if (k % 100 == 0) { putBatch(batch); log.debug(Thread.currentThread().getName() + " ===>100 entities. putting normal batch"); batch = new ArrayList<Entity>(); } else { if (k > 0) { //log.debug(Thread.currentThread().getName()+" ===>Nothing in the queue for me!"); putBatch(batch); log.debug(Thread.currentThread().getName() + " ===>less than 100 entities. putting short batch"); batch = new ArrayList<Entity>(); k = 0; } } } } catch (ShutdownSignalException | ConsumerCancelledException | InterruptedException e) { log.error("Error consuming from the queue " + TaskQueue.getDefaultTaskQueueName(), e); } catch (TException e) { log.error("Errore deserializing", e); } catch (QueueException e) { log.error("Couldn't send the ack to the queue " + TaskQueue.getDefaultTaskQueueName(), e); } } }
From source file:it.polimi.hegira.adapters.tables.Tables.java
License:Apache License
@Override protected AbstractDatabase fromMyModel(Metamodel mm) { //TWC/*from w w w .j a v a 2 s .c om*/ log.debug(Thread.currentThread().getName() + " Hi I'm the AZURE consumer!"); //Instantiate the Thrift Deserializer TDeserializer deserializer = new TDeserializer(new TBinaryProtocol.Factory()); int thread_id = (int) (Thread.currentThread().getId() % TWTs_NO); while (true) { try { log.debug(Thread.currentThread().getName() + " - getting taskQueue with id: " + thread_id); Delivery delivery = taskQueues.get(thread_id).getConsumer().nextDelivery(); if (delivery != null) { Metamodel myModel = new Metamodel(); deserializer.deserialize(myModel, delivery.getBody()); AzureTablesTransformer att = new AzureTablesTransformer(); AzureTablesModel fromMyModel = att.fromMyModel(myModel); List<DynamicTableEntity> entities = fromMyModel.getEntities(); String tableName = fromMyModel.getTableName(); CloudTable tbl = createTable(tableName); if (tbl == null) { taskQueues.get(thread_id).sendNack(delivery); log.info("Sending Nack!! for entity(/ies)"); return null; } for (DynamicTableEntity entity : entities) { TableResult ie = insertEntity(tableName, entity); if (ie == null) { taskQueues.get(thread_id).sendNack(delivery); log.info("Sending Nack!! for entity(/ies)"); return null; } count++; if (count % 2000 == 0) log.debug(Thread.currentThread().getName() + " Inserted " + count + " entities"); } taskQueues.get(thread_id).sendAck(delivery); } else { log.debug(Thread.currentThread().getName() + " - The queue " + TaskQueue.getDefaultTaskQueueName() + " is empty"); } } catch (ShutdownSignalException | ConsumerCancelledException | InterruptedException e) { log.error(Thread.currentThread().getName() + " - Cannot read next delivery from the queue " + TaskQueue.getDefaultTaskQueueName(), e); } catch (TException e) { log.error(Thread.currentThread().getName() + " - Error deserializing message ", e); } catch (QueueException e) { log.error(Thread.currentThread().getName() + " - Error sending an acknowledgment to the queue " + TaskQueue.getDefaultTaskQueueName(), e); } catch (URISyntaxException e) { log.error(Thread.currentThread().getName() + " - Error operating on Azure Tables ", e); } catch (StorageException e) { log.error(Thread.currentThread().getName() + " - Error storing data on Azure Tables ", e); } } }
From source file:it.polimi.hegira.adapters.tables.Tables.java
License:Apache License
@Override protected AbstractDatabase fromMyModelPartitioned(Metamodel mm) { //TWC//from w w w . j a va 2 s . c o m //log.debug(Thread.currentThread().getName()+" Hi I'm the AZURE consumer!"); //Instantiate the Thrift Deserializer TDeserializer deserializer = new TDeserializer(new TBinaryProtocol.Factory()); int thread_id = Integer.parseInt(Thread.currentThread().getName()); while (true) { try { //log.debug(Thread.currentThread().getName() + // " - getting taskQueue with id: "+thread_id); Delivery delivery = taskQueues.get(thread_id).getConsumer().nextDelivery(); if (delivery != null) { Metamodel myModel = new Metamodel(); deserializer.deserialize(myModel, delivery.getBody()); AzureTablesTransformer att = new AzureTablesTransformer(); AzureTablesModel fromMyModel = att.fromMyModel(myModel); List<DynamicTableEntity> entities = fromMyModel.getEntities(); String tableName = fromMyModel.getTableName(); CloudTable tbl = createTable(tableName); if (tbl == null) { taskQueues.get(thread_id).sendNack(delivery); log.info(Thread.currentThread().getName() + " - Sending Nack!! for entity(/ies)"); //return null; } for (DynamicTableEntity entity : entities) { TableResult ie = insertEntity(tableName, entity); if (ie == null) { //taskQueues.get(thread_id).sendNack(delivery); //log.info(Thread.currentThread().getName()+" - Sending Nack!! for entity(/ies)"); //return null; } count++; //if(count%100==0) //log.debug(Thread.currentThread().getName()+" Inserted "+count+" entities"); } taskQueues.get(thread_id).sendAck(delivery); //incrementing the VDPsCounters updateVDPsCounters(myModel); //////////////////////////////// } else { log.debug(Thread.currentThread().getName() + " - The queue " + TaskQueue.getDefaultTaskQueueName() + " is empty"); } } catch (ShutdownSignalException | ConsumerCancelledException | InterruptedException e) { log.error(Thread.currentThread().getName() + " - Cannot read next delivery from the queue " + TaskQueue.getDefaultTaskQueueName(), e); } catch (TException e) { log.error(Thread.currentThread().getName() + " - Error deserializing message ", e); } catch (QueueException e) { log.error(Thread.currentThread().getName() + " - Error sending an acknowledgment to the queue " + TaskQueue.getDefaultTaskQueueName(), e); } catch (URISyntaxException e) { log.error(Thread.currentThread().getName() + " - Error operating on Azure Tables ", e); } catch (StorageException e) { log.error(Thread.currentThread().getName() + " - Error storing data on Azure Tables ", e); } } }
From source file:kr.co.vcnc.haeinsa.thrift.TRowLocks.java
License:Apache License
public static TRowLock deserialize(byte[] rowLockBytes) throws IOException { if (rowLockBytes == null) { return new TRowLock(ROW_LOCK_VERSION, TRowLockState.STABLE, Long.MIN_VALUE); }//from w w w . ja va2s . co m TRowLock rowLock = new TRowLock(); TDeserializer deserializer = createDeserializer(); try { deserializer.deserialize(rowLock, rowLockBytes); return rowLock; } catch (TException e) { throw new IOException(e.getMessage(), e); } }