List of usage examples for org.apache.thrift TDeserializer deserialize
public void deserialize(TBase base, byte[] bytes) throws TException
From source file:ezbake.deployer.impl.HdfsArtifactWriter.java
License:Apache License
@Override public DeploymentArtifact readArtifact(DeploymentMetadata metadata) throws DeploymentException { Path artifactPath = createPath(ArtifactHelpers.getFqAppId(metadata), metadata.getVersion()); DeploymentArtifact artifact = new DeploymentArtifact(); try {/*from www. java 2 s . c om*/ if (!fs.exists(artifactPath)) { throw new DeploymentException("Could not find artifact at " + artifactPath.toString()); } FSDataInputStream input = fs.open(artifactPath); byte[] artifactBytes = ByteStreams.toByteArray(input); input.close(); TDeserializer deserializer = new TDeserializer(); deserializer.deserialize(artifact, artifactBytes); } catch (IOException e) { logger.error("Could not read data from : " + artifactPath.toString(), e); throw new DeploymentException(e.getMessage()); } catch (TException e) { logger.error("Could not deserialize artifact!", e); throw new DeploymentException(e.getMessage()); } return artifact; }
From source file:ezbake.deployer.impl.LocalFileArtifactWriter.java
License:Apache License
@Override public DeploymentArtifact readArtifact(DeploymentMetadata metadata) throws DeploymentException { File artifactFile = new File(buildFilePath(metadata)); DeploymentArtifact artifact = new DeploymentArtifact(); if (artifactFile.exists()) { TDeserializer deserializer = new TDeserializer(); try {//from w ww . j av a2 s . c o m byte[] fileBytes = FileUtils.readFileToByteArray(artifactFile); deserializer.deserialize(artifact, fileBytes); } catch (Exception ex) { log.error("Failed reading artifact", ex); throw new DeploymentException("Failed to read artifact file from disk." + ex.getMessage()); } } else { log.warn("The artifact {} {} could not be loaded from disk. Only metadata is available", ArtifactHelpers.getAppId(metadata), ArtifactHelpers.getServiceId(metadata)); artifact.setMetadata(metadata); } return artifact; }
From source file:ezbake.thrift.serializer.BinarySerializer.java
License:Apache License
@Override public <U extends TBase<?, ?>> U deserialize(Class<U> thriftClass, byte[] serializedObject) throws TException { final TDeserializer deserializer = new TDeserializer(); try {//ww w . j a v a2 s .co m final U object = thriftClass.newInstance(); deserializer.deserialize(object, serializedObject); return object; } catch (final TException e) { throw e; } catch (final Exception e) { throw new TException(e); } }
From source file:ezbake.thrift.ThriftUtils.java
License:Apache License
/** * Deserialize a thrift object// w ww . j a v a2 s . c om * * @param type The type of object * @param bytes The bytes of the object * @param <T> The type of object * @return The object */ public static <T extends TBase<?, ?>> T deserialize(Class<T> type, byte[] bytes) throws TException { final TDeserializer deserializer = new TDeserializer(); try { final T object = type.newInstance(); deserializer.deserialize(object, bytes); return object; } catch (final Exception ex) { throw new TException(ex); } }
From source file:ezbake.warehaus.tools.WarehausImport.java
License:Apache License
public void importTestData(ByteBuffer tArchive, EzSecurityToken security) throws TException { TDeserializer deserializer = new TDeserializer(); TarInputStream tar = new TarInputStream(new ByteArrayInputStream(tArchive.array())); TarEntry entry;/*from w w w .j a v a 2s . c om*/ List<String> uriList = Lists.newArrayList(); Map<String, VersionControl> parsed = Maps.newHashMap(); Map<String, VersionControl> raw = Maps.newHashMap(); Map<String, ColumnVisibility> visibilityMap = Maps.newHashMap(); try { entry = tar.getNextEntry(); while (entry != null) { ByteArrayOutputStream baos = new ByteArrayOutputStream(); int value = tar.read(); while (value > -1) { baos.write(value); value = tar.read(); } ExportFile file = new ExportFile(); deserializer.deserialize(file, baos.toByteArray()); String uri = file.getData().getUri(); uriList.add(uri); raw.put(uri, new VersionControl(ByteBuffer.wrap(file.getData().getRawData()), file.getName())); parsed.put(uri, new VersionControl(ByteBuffer.wrap(file.getData().getParsedData()), file.getName())); visibilityMap.put(uri, new ColumnVisibility(file.getVisibility().getFormalVisibility())); entry = tar.getNextEntry(); } insertData(uriList, parsed, raw, visibilityMap, security); } catch (IOException e) { throw new TException(e); } finally { try { tar.close(); } catch (IOException e) { throw new TException(e); } } }
From source file:io.warp10.continuum.egress.EgressFetchHandler.java
License:Apache License
@Override public void handle(String target, Request baseRequest, HttpServletRequest req, HttpServletResponse resp) throws IOException, ServletException { boolean fromArchive = false; boolean splitFetch = false; boolean writeTimestamp = false; if (Constants.API_ENDPOINT_FETCH.equals(target)) { baseRequest.setHandled(true);//from w w w . j a v a 2 s.c o m fromArchive = false; } else if (Constants.API_ENDPOINT_AFETCH.equals(target)) { baseRequest.setHandled(true); fromArchive = true; } else if (Constants.API_ENDPOINT_SFETCH.equals(target)) { baseRequest.setHandled(true); splitFetch = true; } else if (Constants.API_ENDPOINT_CHECK.equals(target)) { baseRequest.setHandled(true); resp.setStatus(HttpServletResponse.SC_OK); return; } else { return; } try { // Labels for Sensision Map<String, String> labels = new HashMap<String, String>(); labels.put(SensisionConstants.SENSISION_LABEL_TYPE, target); // // Add CORS header // resp.setHeader("Access-Control-Allow-Origin", "*"); String start = null; String stop = null; long now = Long.MIN_VALUE; long timespan = 0L; String nowParam = null; String timespanParam = null; String dedupParam = null; String showErrorsParam = null; if (splitFetch) { nowParam = req.getHeader(Constants.getHeader(Configuration.HTTP_HEADER_NOW_HEADERX)); timespanParam = req.getHeader(Constants.getHeader(Configuration.HTTP_HEADER_TIMESPAN_HEADERX)); showErrorsParam = req.getHeader(Constants.getHeader(Configuration.HTTP_HEADER_SHOW_ERRORS_HEADERX)); } else { start = req.getParameter(Constants.HTTP_PARAM_START); stop = req.getParameter(Constants.HTTP_PARAM_STOP); nowParam = req.getParameter(Constants.HTTP_PARAM_NOW); timespanParam = req.getParameter(Constants.HTTP_PARAM_TIMESPAN); dedupParam = req.getParameter(Constants.HTTP_PARAM_DEDUP); showErrorsParam = req.getParameter(Constants.HTTP_PARAM_SHOW_ERRORS); } String maxDecoderLenParam = req.getParameter(Constants.HTTP_PARAM_MAXSIZE); int maxDecoderLen = null != maxDecoderLenParam ? Integer.parseInt(maxDecoderLenParam) : Constants.DEFAULT_PACKED_MAXSIZE; String suffix = req.getParameter(Constants.HTTP_PARAM_SUFFIX); if (null == suffix) { suffix = Constants.DEFAULT_PACKED_CLASS_SUFFIX; } boolean unpack = null != req.getParameter(Constants.HTTP_PARAM_UNPACK); long chunksize = Long.MAX_VALUE; if (null != req.getParameter(Constants.HTTP_PARAM_CHUNKSIZE)) { chunksize = Long.parseLong(req.getParameter(Constants.HTTP_PARAM_CHUNKSIZE)); } if (chunksize <= 0) { throw new IOException("Invalid chunksize."); } boolean showErrors = null != showErrorsParam; boolean dedup = null != dedupParam && "true".equals(dedupParam); if (null != start && null != stop) { long tsstart = fmt.parseDateTime(start).getMillis() * Constants.TIME_UNITS_PER_MS; long tsstop = fmt.parseDateTime(stop).getMillis() * Constants.TIME_UNITS_PER_MS; if (tsstart < tsstop) { now = tsstop; timespan = tsstop - tsstart; } else { now = tsstart; timespan = tsstart - tsstop; } } else if (null != nowParam && null != timespanParam) { if ("now".equals(nowParam)) { now = TimeSource.getTime(); } else { try { now = Long.parseLong(nowParam); } catch (Exception e) { now = fmt.parseDateTime(nowParam).getMillis() * Constants.TIME_UNITS_PER_MS; } } timespan = Long.parseLong(timespanParam); } if (Long.MIN_VALUE == now) { resp.sendError(HttpServletResponse.SC_BAD_REQUEST, "Missing now/timespan or start/stop parameters."); return; } String selector = splitFetch ? null : req.getParameter(Constants.HTTP_PARAM_SELECTOR); // // Extract token from header // String token = req.getHeader(Constants.getHeader(Configuration.HTTP_HEADER_TOKENX)); // If token was not found in header, extract it from the 'token' parameter if (null == token && !splitFetch) { token = req.getParameter(Constants.HTTP_PARAM_TOKEN); } String fetchSig = req.getHeader(Constants.getHeader(Configuration.HTTP_HEADER_FETCH_SIGNATURE)); // // Check token signature if it was provided // boolean signed = false; if (splitFetch) { // Force showErrors showErrors = true; signed = true; } if (null != fetchSig) { if (null != fetchPSK) { String[] subelts = fetchSig.split(":"); if (2 != subelts.length) { throw new IOException("Invalid fetch signature."); } long nowts = System.currentTimeMillis(); long sigts = new BigInteger(subelts[0], 16).longValue(); long sighash = new BigInteger(subelts[1], 16).longValue(); if (nowts - sigts > 10000L) { throw new IOException("Fetch signature has expired."); } // Recompute hash of ts:token String tstoken = Long.toString(sigts) + ":" + token; long checkedhash = SipHashInline.hash24(fetchPSK, tstoken.getBytes(Charsets.ISO_8859_1)); if (checkedhash != sighash) { throw new IOException("Corrupted fetch signature"); } signed = true; } else { throw new IOException("Fetch PreSharedKey is not set."); } } ReadToken rtoken = null; String format = splitFetch ? "wrapper" : req.getParameter(Constants.HTTP_PARAM_FORMAT); if (!splitFetch) { try { rtoken = Tokens.extractReadToken(token); if (rtoken.getHooksSize() > 0) { throw new IOException("Tokens with hooks cannot be used for fetching data."); } } catch (WarpScriptException ee) { throw new IOException(ee); } if (null == rtoken) { resp.sendError(HttpServletResponse.SC_FORBIDDEN, "Missing token."); return; } } boolean showAttr = "true".equals(req.getParameter(Constants.HTTP_PARAM_SHOWATTR)); boolean sortMeta = "true".equals(req.getParameter(Constants.HTTP_PARAM_SORTMETA)); // // Extract the class and labels selectors // The class selector and label selectors are supposed to have // values which use percent encoding, i.e. explicit percent encoding which // might have been re-encoded using percent encoding when passed as parameter // // Set<Metadata> metadatas = new HashSet<Metadata>(); List<Iterator<Metadata>> iterators = new ArrayList<Iterator<Metadata>>(); if (!splitFetch) { if (null == selector) { throw new IOException("Missing '" + Constants.HTTP_PARAM_SELECTOR + "' parameter."); } String[] selectors = selector.split("\\s+"); for (String sel : selectors) { Matcher m = SELECTOR_RE.matcher(sel); if (!m.matches()) { resp.sendError(HttpServletResponse.SC_BAD_REQUEST); return; } String classSelector = URLDecoder.decode(m.group(1), "UTF-8"); String labelsSelection = m.group(2); Map<String, String> labelsSelectors; try { labelsSelectors = GTSHelper.parseLabelsSelectors(labelsSelection); } catch (ParseException pe) { throw new IOException(pe); } // // Force 'producer'/'owner'/'app' from token // labelsSelectors.remove(Constants.PRODUCER_LABEL); labelsSelectors.remove(Constants.OWNER_LABEL); labelsSelectors.remove(Constants.APPLICATION_LABEL); labelsSelectors.putAll(Tokens.labelSelectorsFromReadToken(rtoken)); List<Metadata> metas = null; List<String> clsSels = new ArrayList<String>(); List<Map<String, String>> lblsSels = new ArrayList<Map<String, String>>(); clsSels.add(classSelector); lblsSels.add(labelsSelectors); try { metas = directoryClient.find(clsSels, lblsSels); metadatas.addAll(metas); } catch (Exception e) { // // If metadatas is not empty, create an iterator for it, then clear it // if (!metadatas.isEmpty()) { iterators.add(metadatas.iterator()); metadatas.clear(); } iterators.add(directoryClient.iterator(clsSels, lblsSels)); } } } else { // // Add an iterator which reads splits from the request body // boolean gzipped = false; if (null != req.getHeader("Content-Type") && "application/gzip".equals(req.getHeader("Content-Type"))) { gzipped = true; } BufferedReader br = null; if (gzipped) { GZIPInputStream is = new GZIPInputStream(req.getInputStream()); br = new BufferedReader(new InputStreamReader(is)); } else { br = req.getReader(); } final BufferedReader fbr = br; MetadataIterator iterator = new MetadataIterator() { private List<Metadata> metadatas = new ArrayList<Metadata>(); private boolean done = false; private String lasttoken = ""; @Override public void close() throws Exception { fbr.close(); } @Override public Metadata next() { if (!metadatas.isEmpty()) { Metadata meta = metadatas.get(metadatas.size() - 1); metadatas.remove(metadatas.size() - 1); return meta; } else { if (hasNext()) { return next(); } else { throw new NoSuchElementException(); } } } @Override public boolean hasNext() { if (!metadatas.isEmpty()) { return true; } if (done) { return false; } String line = null; try { line = fbr.readLine(); } catch (IOException ioe) { throw new RuntimeException(ioe); } if (null == line) { done = true; return false; } // // Decode/Unwrap/Deserialize the split // byte[] data = OrderPreservingBase64.decode(line.getBytes(Charsets.US_ASCII)); if (null != fetchAES) { data = CryptoUtils.unwrap(fetchAES, data); } if (null == data) { throw new RuntimeException("Invalid wrapped content."); } TDeserializer deserializer = new TDeserializer(new TCompactProtocol.Factory()); GTSSplit split = new GTSSplit(); try { deserializer.deserialize(split, data); } catch (TException te) { throw new RuntimeException(te); } // // Check the expiry // long instant = System.currentTimeMillis(); if (instant - split.getTimestamp() > maxSplitAge || instant > split.getExpiry()) { throw new RuntimeException("Split has expired."); } this.metadatas.addAll(split.getMetadatas()); // We assume there was at least one metadata instance in the split!!! return true; } }; iterators.add(iterator); } List<Metadata> metas = new ArrayList<Metadata>(); metas.addAll(metadatas); if (!metas.isEmpty()) { iterators.add(metas.iterator()); } // // Loop over the iterators, storing the read metadata to a temporary file encrypted on disk // Data is encrypted using a onetime pad // final byte[] onetimepad = new byte[(int) Math.min(65537, System.currentTimeMillis() % 100000)]; new Random().nextBytes(onetimepad); final File cache = File.createTempFile( Long.toHexString(System.currentTimeMillis()) + "-" + Long.toHexString(System.nanoTime()), ".dircache"); cache.deleteOnExit(); FileWriter writer = new FileWriter(cache); TSerializer serializer = new TSerializer(new TCompactProtocol.Factory()); int padidx = 0; for (Iterator<Metadata> itermeta : iterators) { try { while (itermeta.hasNext()) { Metadata metadata = itermeta.next(); try { byte[] bytes = serializer.serialize(metadata); // Apply onetimepad for (int i = 0; i < bytes.length; i++) { bytes[i] = (byte) (bytes[i] ^ onetimepad[padidx++]); if (padidx >= onetimepad.length) { padidx = 0; } } OrderPreservingBase64.encodeToWriter(bytes, writer); writer.write('\n'); } catch (TException te) { } } if (!itermeta.hasNext() && (itermeta instanceof MetadataIterator)) { try { ((MetadataIterator) itermeta).close(); } catch (Exception e) { } } } catch (Throwable t) { throw t; } finally { if (itermeta instanceof MetadataIterator) { try { ((MetadataIterator) itermeta).close(); } catch (Exception e) { } } } } writer.close(); // // Create an iterator based on the cache // MetadataIterator cacheiterator = new MetadataIterator() { BufferedReader reader = new BufferedReader(new FileReader(cache)); private Metadata current = null; private boolean done = false; private TDeserializer deserializer = new TDeserializer(new TCompactProtocol.Factory()); int padidx = 0; @Override public boolean hasNext() { if (done) { return false; } if (null != current) { return true; } try { String line = reader.readLine(); if (null == line) { done = true; return false; } byte[] raw = OrderPreservingBase64.decode(line.getBytes(Charsets.US_ASCII)); // Apply one time pad for (int i = 0; i < raw.length; i++) { raw[i] = (byte) (raw[i] ^ onetimepad[padidx++]); if (padidx >= onetimepad.length) { padidx = 0; } } Metadata metadata = new Metadata(); try { deserializer.deserialize(metadata, raw); this.current = metadata; return true; } catch (TException te) { LOG.error("", te); } } catch (IOException ioe) { LOG.error("", ioe); } return false; } @Override public Metadata next() { if (null != this.current) { Metadata metadata = this.current; this.current = null; return metadata; } else { throw new NoSuchElementException(); } } @Override public void close() throws Exception { this.reader.close(); cache.delete(); } }; iterators.clear(); iterators.add(cacheiterator); metas = new ArrayList<Metadata>(); PrintWriter pw = resp.getWriter(); AtomicReference<Metadata> lastMeta = new AtomicReference<Metadata>(null); AtomicLong lastCount = new AtomicLong(0L); long fetchtimespan = timespan; for (Iterator<Metadata> itermeta : iterators) { while (itermeta.hasNext()) { metas.add(itermeta.next()); // // Access the data store every 'FETCH_BATCHSIZE' GTS or at the end of each iterator // if (metas.size() > FETCH_BATCHSIZE || !itermeta.hasNext()) { try (GTSDecoderIterator iterrsc = storeClient.fetch(rtoken, metas, now, fetchtimespan, fromArchive, writeTimestamp)) { GTSDecoderIterator iter = iterrsc; if (unpack) { iter = new UnpackingGTSDecoderIterator(iter, suffix); timespan = Long.MIN_VALUE + 1; } if ("text".equals(format)) { textDump(pw, iter, now, timespan, false, dedup, signed, showAttr, lastMeta, lastCount, sortMeta); } else if ("fulltext".equals(format)) { textDump(pw, iter, now, timespan, true, dedup, signed, showAttr, lastMeta, lastCount, sortMeta); } else if ("raw".equals(format)) { rawDump(pw, iter, dedup, signed, timespan, lastMeta, lastCount, sortMeta); } else if ("wrapper".equals(format)) { wrapperDump(pw, iter, dedup, signed, fetchPSK, timespan, lastMeta, lastCount); } else if ("json".equals(format)) { jsonDump(pw, iter, now, timespan, dedup, signed, lastMeta, lastCount); } else if ("tsv".equals(format)) { tsvDump(pw, iter, now, timespan, false, dedup, signed, lastMeta, lastCount, sortMeta); } else if ("fulltsv".equals(format)) { tsvDump(pw, iter, now, timespan, true, dedup, signed, lastMeta, lastCount, sortMeta); } else if ("pack".equals(format)) { packedDump(pw, iter, now, timespan, dedup, signed, lastMeta, lastCount, maxDecoderLen, suffix, chunksize, sortMeta); } else if ("null".equals(format)) { nullDump(iter); } else { textDump(pw, iter, now, timespan, false, dedup, signed, showAttr, lastMeta, lastCount, sortMeta); } } catch (Throwable t) { LOG.error("", t); Sensision.update(SensisionConstants.CLASS_WARP_FETCH_ERRORS, Sensision.EMPTY_LABELS, 1); if (showErrors) { pw.println(); StringWriter sw = new StringWriter(); PrintWriter pw2 = new PrintWriter(sw); t.printStackTrace(pw2); pw2.close(); sw.flush(); String error = URLEncoder.encode(sw.toString(), "UTF-8"); pw.println(Constants.EGRESS_FETCH_ERROR_PREFIX + error); } throw new IOException(t); } finally { if (!itermeta.hasNext() && (itermeta instanceof MetadataIterator)) { try { ((MetadataIterator) itermeta).close(); } catch (Exception e) { } } } // // Reset 'metas' // metas.clear(); } } if (!itermeta.hasNext() && (itermeta instanceof MetadataIterator)) { try { ((MetadataIterator) itermeta).close(); } catch (Exception e) { } } } Sensision.update(SensisionConstants.SENSISION_CLASS_CONTINUUM_FETCH_REQUESTS, labels, 1); } catch (Exception e) { if (!resp.isCommitted()) { resp.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, e.getMessage()); return; } } }
From source file:io.warp10.continuum.egress.StreamingMetadataIterator.java
License:Apache License
private synchronized boolean hasNextInternal() throws Exception { ////w ww . j a v a 2s.c o m // If there is a pending Metadata, return true // if (null != metadata) { return true; } // // If we ran out of selectors, return false // // TODO(hbs): swap idx and urlidx. Add support for multiple selectors in query string if (idx >= classSelectors.size()) { return false; } if (null == reader) { if (urlidx >= urls.size()) { urlidx = 0; idx++; // Call us recursively return hasNext(); } // Compute request signature long now = System.currentTimeMillis(); // Rebuild selector StringBuilder selector = new StringBuilder(); selector.append(WarpURLEncoder.encode(classSelectors.get(idx), "UTF-8")); selector.append("{"); boolean first = true; for (Entry<String, String> entry : labelsSelectors.get(idx).entrySet()) { if (!first) { selector.append(","); // ',' } selector.append(entry.getKey()); if (entry.getValue().startsWith("=")) { selector.append("="); selector.append(WarpURLEncoder.encode(entry.getValue().substring(1), "UTF-8")); } else if (entry.getValue().startsWith("~")) { selector.append("~"); selector.append(WarpURLEncoder.encode(entry.getValue().substring(1), "UTF-8")); } else { selector.append("="); selector.append(WarpURLEncoder.encode(entry.getValue(), "UTF-8")); } first = false; } selector.append("}"); String tssel = now + ":" + selector.toString(); byte[] data = tssel.getBytes(Charsets.UTF_8); long hash = SipHashInline.hash24(SIPHASH_PSK[0], SIPHASH_PSK[1], data, 0, data.length); String signature = Long.toHexString(now) + ":" + Long.toHexString(hash); // Open connection String qs = Constants.HTTP_PARAM_SELECTOR + "=" + new String( OrderPreservingBase64.encode(selector.toString().getBytes(Charsets.UTF_8)), Charsets.US_ASCII); //URL url = new URL(urls.get(urlidx) + "?" + qs); URL url = urls.get(urlidx); conn = (HttpURLConnection) (this.noProxy ? url.openConnection(Proxy.NO_PROXY) : url.openConnection()); conn.setRequestMethod("POST"); conn.setChunkedStreamingMode(8192); conn.setRequestProperty(Constants.getHeader(Configuration.HTTP_HEADER_DIRECTORY_SIGNATURE), signature); conn.setDoInput(true); conn.setDoOutput(true); OutputStream out = conn.getOutputStream(); out.write(qs.getBytes(Charsets.US_ASCII)); out.flush(); reader = new BufferedReader(new InputStreamReader(conn.getInputStream())); } // // Attempt to read the next line // String line = reader.readLine(); if (null == line) { reader.close(); conn.disconnect(); reader = null; metadata = null; urlidx++; return hasNext(); } // // Decode Metadata // byte[] bytes = OrderPreservingBase64.decode(line.getBytes(Charsets.US_ASCII)); TDeserializer deserializer = new TDeserializer(new TCompactProtocol.Factory()); Metadata meta = new Metadata(); deserializer.deserialize(meta, bytes); metadata = meta; return true; }
From source file:io.warp10.continuum.egress.ThriftDirectoryClient.java
License:Apache License
public Map<String, Object> statsHttp(List<String> classSelector, List<Map<String, String>> labelsSelectors) throws IOException { ///*from ww w . ja v a 2s . co m*/ // Extract the URLs we will use to retrieve the Metadata // // Set of already called remainders for the selected modulus Set<Integer> called = new HashSet<Integer>(); long selectedmodulus = -1L; final List<URL> urls = new ArrayList<URL>(); List<Entry<String, DirectoryService.Client>> servers = new ArrayList<Entry<String, DirectoryService.Client>>(); synchronized (clientCacheMutex) { servers.addAll(clientCache.entrySet()); } // Shuffle the list Collections.shuffle(servers); for (Entry<String, DirectoryService.Client> entry : servers) { // // Make sure the current entry has a streaming port defined // if (!streamingPorts.containsKey(entry.getKey())) { continue; } if (-1L == selectedmodulus) { selectedmodulus = modulus.get(entry.getKey()); } // Make sure we use a common modulus if (modulus.get(entry.getKey()) != selectedmodulus) { continue; } // Skip client if we already called one with this remainder if (called.contains(remainder.get(entry.getKey()))) { continue; } // // Extract host and port // String host = hosts.get(entry.getKey()); int port = streamingPorts.get(entry.getKey()); URL url = new URL("http://" + host + ":" + port + "" + Constants.API_ENDPOINT_DIRECTORY_STATS_INTERNAL); urls.add(url); // Track which remainders we already selected called.add(remainder.get(entry.getKey())); } final DirectoryStatsRequest request = new DirectoryStatsRequest(); request.setTimestamp(System.currentTimeMillis()); request.setClassSelector(classSelector); request.setLabelsSelectors(labelsSelectors); long hash = DirectoryUtil.computeHash(this.SIPHASH_PSK[0], this.SIPHASH_PSK[1], request); request.setHash(hash); List<Future<DirectoryStatsResponse>> responses = new ArrayList<Future<DirectoryStatsResponse>>(); final AtomicBoolean transportException = new AtomicBoolean(false); TSerializer serializer = new TSerializer(new TCompactProtocol.Factory()); byte[] bytes = null; try { bytes = OrderPreservingBase64.encode(serializer.serialize(request)); } catch (TException te) { throw new IOException(te); } final byte[] encodedReq = bytes; synchronized (executorMutex) { for (URL urlx : urls) { final URL url = urlx; responses.add(executor.submit(new Callable<DirectoryStatsResponse>() { @Override public DirectoryStatsResponse call() throws Exception { HttpURLConnection conn = null; try { conn = (HttpURLConnection) (noProxy ? url.openConnection(Proxy.NO_PROXY) : url.openConnection()); conn.setDoOutput(true); conn.setDoInput(true); conn.setRequestMethod("POST"); conn.setChunkedStreamingMode(2048); conn.connect(); OutputStream out = conn.getOutputStream(); out.write(encodedReq); out.write('\r'); out.write('\n'); out.close(); BufferedReader reader = new BufferedReader( new InputStreamReader(conn.getInputStream())); DirectoryStatsResponse resp = new DirectoryStatsResponse(); try { while (true) { String line = reader.readLine(); if (null == line) { break; } byte[] data = OrderPreservingBase64.decode(line.getBytes(Charsets.US_ASCII)); TDeserializer deser = new TDeserializer(new TCompactProtocol.Factory()); deser.deserialize(resp, data); } reader.close(); reader = null; } catch (IOException ioe) { if (null != reader) { try { reader.close(); } catch (Exception e) { } } throw ioe; } return resp; } finally { if (null != conn) { try { conn.disconnect(); } catch (Exception e) { } } } } })); } } // // Await for all requests to have completed, either successfully or not // int count = 0; while (count != responses.size()) { LockSupport.parkNanos(1000L); count = 0; for (Future<DirectoryStatsResponse> response : responses) { if (response.isDone()) { count++; } } } return mergeStatsResponses(responses); }
From source file:io.warp10.continuum.geo.GeoDirectory.java
License:Apache License
/** * Force indexing of some data by fetching them and forwarding them onto the data topic * This enables someone with a ReadToken to re-index historical data *//*from www .jav a 2s .c om*/ private void doIndex(Request baseRequest, HttpServletRequest request, HttpServletResponse response) throws IOException, ServletException { baseRequest.setHandled(true); // // Extract parameters // String token = request.getParameter(Constants.HTTP_PARAM_TOKEN); String[] selectors = request.getParameterValues(Constants.HTTP_PARAM_SELECTOR); if (null == selectors) { throw new IOException("Missing selector."); } if (selectors.length != 1) { throw new IOException("Can only specify a single selector per request."); } if (null == token) { throw new IOException("Missing token."); } // // A token can only be used if it has subscribed to GTS in this index // if (!this.subscriptions.containsKey(token)) { throw new IOException("The provided token does not have any current subscriptions in this index."); } // // INFO(hbs): this will trigger billing for every one subscribing to GTS in this index, we consider this a marginal case // // // TODO(hbs): Issue a signed fetch request which will retrieve GTSWrappers (to be implemented in Fetch) // URL url = new URL(this.fetchEndpoint); HttpURLConnection conn = (HttpURLConnection) url.openConnection(); conn.setDoOutput(false); conn.setDoInput(true); conn.setChunkedStreamingMode(8192); conn.setRequestMethod("POST"); conn.setRequestProperty("Content-Type", "application/x-www-form-urlencoded"); long now = System.currentTimeMillis(); StringBuilder sb = new StringBuilder(Long.toHexString(now)); sb.append(":"); byte[] content = (Long.toString(now) + ":" + token).getBytes(Charsets.ISO_8859_1); long hash = SipHashInline.hash24(this.SIPHASH_FETCH_PSK[0], this.SIPHASH_FETCH_PSK[1], content, 0, content.length); sb.append(Long.toHexString(hash)); conn.setRequestProperty(Constants.getHeader(Configuration.HTTP_HEADER_FETCH_SIGNATURE), sb.toString()); // // Build query string // // token // selector // format=wrapper // now + timespan // or start/stop // sb = new StringBuilder(); sb.append(WarpURLEncoder.encode(Constants.HTTP_PARAM_TOKEN, "UTF-8")); sb.append("="); sb.append(WarpURLEncoder.encode(token, "UTF-8")); sb.append("&"); sb.append(WarpURLEncoder.encode(Constants.HTTP_PARAM_SELECTOR, "UTF-8")); sb.append("="); sb.append(WarpURLEncoder.encode(selectors[0], "UTF-8")); sb.append("&"); sb.append(WarpURLEncoder.encode(Constants.HTTP_PARAM_FORMAT, "UTF-8")); sb.append("="); sb.append(WarpURLEncoder.encode("wrapper", "UTF-8")); if (null != request.getParameter(Constants.HTTP_PARAM_NOW) && null != request.getParameter(Constants.HTTP_PARAM_TIMESPAN)) { sb.append("&"); sb.append(WarpURLEncoder.encode(Constants.HTTP_PARAM_NOW, "UTF-8")); sb.append("="); sb.append(WarpURLEncoder.encode(request.getParameter(Constants.HTTP_PARAM_NOW), "UTF-8")); sb.append("&"); sb.append(WarpURLEncoder.encode(Constants.HTTP_PARAM_TIMESPAN, "UTF-8")); sb.append("="); sb.append(WarpURLEncoder.encode(request.getParameter(Constants.HTTP_PARAM_TIMESPAN), "UTF-8")); } else if (null != request.getParameter(Constants.HTTP_PARAM_START) && null != request.getParameter(Constants.HTTP_PARAM_STOP)) { sb.append("&"); sb.append(WarpURLEncoder.encode(Constants.HTTP_PARAM_START, "UTF-8")); sb.append("="); sb.append(WarpURLEncoder.encode(request.getParameter(Constants.HTTP_PARAM_START), "UTF-8")); sb.append("&"); sb.append(WarpURLEncoder.encode(Constants.HTTP_PARAM_STOP, "UTF-8")); sb.append("="); sb.append(WarpURLEncoder.encode(request.getParameter(Constants.HTTP_PARAM_STOP), "UTF-8")); } else { throw new IOException( "Missing parameters " + Constants.HTTP_PARAM_START + "/" + Constants.HTTP_PARAM_STOP + " or " + Constants.HTTP_PARAM_NOW + "/" + Constants.HTTP_PARAM_TIMESPAN); } byte[] postDataBytes = sb.toString().getBytes(Charsets.UTF_8); conn.setRequestProperty("Content-Length", Long.toString(postDataBytes.length)); conn.getOutputStream().write(postDataBytes); InputStream in = conn.getInputStream(); BufferedReader br = new BufferedReader(new InputStreamReader(in)); TDeserializer deserializer = new TDeserializer(new TCompactProtocol.Factory()); long total = 0L; while (true) { String line = br.readLine(); if (null == line) { break; } // // Extract MAC // byte[] data = line.getBytes(Charsets.US_ASCII); long mac = Longs.fromByteArray(Hex.decode(new String(data, 0, 16, Charsets.US_ASCII))); // // Extract content and decode it // data = OrderPreservingBase64.decode(data, 16, data.length - 16); // // Compute hash // hash = SipHashInline.hash24(this.SIPHASH_FETCH_PSK[0], this.SIPHASH_FETCH_PSK[1], data, 0, data.length); // // Ignore invalid GTSWrapper // if (hash != mac) { Map<String, String> labels = new HashMap<String, String>(); labels.put(SensisionConstants.SENSISION_LABEL_GEODIR, this.name); Sensision.update(SensisionConstants.SENSISION_CLASS_GEODIR_FETCH_INVALIDMACS, labels, 1); continue; } // // Extract GTSWrapper // GTSWrapper wrapper = new GTSWrapper(); try { deserializer.deserialize(wrapper, Arrays.copyOfRange(data, 16, data.length)); total += wrapper.getCount(); } catch (TException te) { Map<String, String> labels = new HashMap<String, String>(); labels.put(SensisionConstants.SENSISION_LABEL_GEODIR, this.name); Sensision.update(SensisionConstants.SENSISION_CLASS_GEODIR_FETCH_FAILEDDESER, labels, 1); continue; } // // Check encoder base // if (0L != wrapper.getBase()) { Map<String, String> labels = new HashMap<String, String>(); labels.put(SensisionConstants.SENSISION_LABEL_GEODIR, this.name); Sensision.update(SensisionConstants.SENSISION_CLASS_GEODIR_FETCH_INVALIDBASE, labels, 1); continue; } // // Now push the encoder to Kafka // pushData(wrapper); } br.close(); conn.disconnect(); // // Flush Kafka // pushData(null); response.setContentType("text/plain"); response.setStatus(HttpServletResponse.SC_OK); response.getWriter().println(total); }
From source file:io.warp10.continuum.geo.GeoDirectory.java
License:Apache License
/** * Load initial subscriptions from ZooKeeper *///from w ww . j a va 2 s. co m private void zkLoad() { List<String> znodes = null; try { znodes = this.subsCurator.getChildren().forPath(this.subsZnodeRoot); for (String znode : znodes) { byte[] data = this.subsCurator.getData().forPath(znode); // // Unwrap // if (null != AES_ZK_SUBS) { data = CryptoUtils.unwrap(AES_ZK_SUBS, data); } if (null == data) { LOG.error("Ignoring invalid znode data for " + znode); continue; } TDeserializer deser = new TDeserializer(new TCompactProtocol.Factory()); GeoDirectorySubscriptions gds = new GeoDirectorySubscriptions(); deser.deserialize(gds, data); // // Only consider the subscription if it is for our GeoDirectory // if (this.name.equals(gds.getName())) { continue; } if (0 == gds.getSubscriptionsSize()) { continue; } for (Entry<String, Set<String>> entry : gds.getSubscriptions().entrySet()) { if (!this.selectors.containsKey(entry.getKey())) { this.selectors.put(entry.getKey(), entry.getValue()); } else { this.selectors.get(entry.getKey()).addAll(entry.getValue()); } } // // Record the znode if it was produced by us (at least by a process using the same id) // if (znode.contains(this.name + "-" + this.id + "-")) { this.currentSubsZnodes.add(znode); } } } catch (Exception e) { LOG.error("Error while loading subscriptions", e); throw new RuntimeException(e); } }