List of usage examples for org.apache.thrift TException TException
public TException(String message, Throwable cause)
From source file:com.cloudera.flume.handlers.thrift.ThriftFlumeEventServerImpl.java
License:Apache License
@Override public void close() throws TException { try {//from w w w .ja v a2 s .co m sink.close(); } catch (Exception e) { // TODO figure out how to deal with different exns throw new TException("Caught exception " + e, e); } }
From source file:com.linecorp.armeria.common.thrift.text.TTextProtocol.java
License:Apache License
@Override public TMessage readMessageBegin() throws TException { try {/* w w w . j a v a 2 s. com*/ readRoot(); } catch (IOException e) { throw new TException("Could not parse input, is it valid json?", e); } if (!root.isObject()) { throw new TException("The top level of the input must be a json object with method and args!"); } if (!root.has("method")) { throw new TException("Object must have field 'method' with the rpc method name!"); } String methodName = root.get("method").asText(); if (!root.has("type")) { throw new TException( "Object must have field 'type' with the message type (CALL, REPLY, EXCEPTION, ONEWAY)!"); } Byte messageType = TypedParser.TMESSAGE_TYPE.readFromJsonElement(root.get("type")); if (!root.has("args") || !root.get("args").isObject()) { throw new TException("Object must have field 'args' with the rpc method args!"); } int sequenceId = root.has("seqid") ? root.get("seqid").asInt() : 0; // Override the root with the content of args - thrift's rpc reading will // proceed to read it as a message object. root = root.get("args"); return new TMessage(methodName, messageType, sequenceId); }
From source file:com.linecorp.armeria.common.thrift.text.TTextProtocol.java
License:Apache License
@Override public TStruct readStructBegin() throws TException { getCurrentContext().read();//from w ww .j a va2s . c om JsonNode structElem; // Reading a new top level struct if the only item on the stack // is the BaseContext if (1 == contextStack.size()) { try { readRoot(); } catch (IOException e) { throw new TException("Could not parse input, is it valid json?", e); } if (root == null) { throw new TException("parser.next() has nothing to parse!"); } structElem = root; } else { structElem = getCurrentContext().getCurrentChild(); } if (getCurrentContext().isMapKey()) { try { structElem = OBJECT_MAPPER.readTree(structElem.asText()); } catch (IOException e) { throw new TException("Could not parse map key, is it valid json?", e); } } if (!structElem.isObject()) { throw new TException("Expected Json Object!"); } Class<?> fieldClass = getCurrentFieldClassIfIs(TBase.class); if (fieldClass != null) { pushContext(new StructContext(structElem, fieldClass)); } else { pushContext(new StructContext(structElem)); } return ANONYMOUS_STRUCT; }
From source file:com.linecorp.armeria.common.thrift.text.TTextProtocol.java
License:Apache License
@Override public TMap readMapBegin() throws TException { getCurrentContext().read();/*from ww w .j a v a 2s .c om*/ JsonNode curElem = getCurrentContext().getCurrentChild(); if (getCurrentContext().isMapKey()) { try { curElem = OBJECT_MAPPER.readTree(curElem.asText()); } catch (IOException e) { throw new TException("Could not parse map key, is it valid json?", e); } } if (!curElem.isObject()) { throw new TException("Expected JSON Object!"); } pushContext(new MapContext(curElem)); return new TMap(UNUSED_TYPE, UNUSED_TYPE, curElem.size()); }
From source file:com.netflix.metacat.connector.hive.client.embedded.EmbeddedHiveClient.java
License:Apache License
private <R> R callWrap(final String requestName, final Callable<R> supplier) throws TException { final long start = registry.clock().wallTime(); final Map<String, String> tags = new HashMap<String, String>(); tags.put("request", requestName); try {//from w w w. ja v a 2s. c o m return supplier.call(); } catch (TException e) { handleSqlException(e); throw e; } catch (Exception e) { throw new TException(e.getMessage(), e.getCause()); } finally { final long duration = registry.clock().wallTime() - start; log.debug("### Time taken to complete {} is {} ms", requestName, duration); this.registry.timer(requestTimerId.withTags(tags)).record(duration, TimeUnit.MILLISECONDS); } }
From source file:com.netflix.metacat.connector.hive.client.thrift.MetacatHiveClient.java
License:Apache License
/** * {@inheritDoc}.//from w w w .j a va2s .co m */ @Override public void addDropPartitions(final String dbName, final String tableName, final List<Partition> partitions, final List<String> delPartitionNames) throws TException { try (HiveMetastoreClient client = createMetastoreClient()) { try { dropHivePartitions(client, dbName, tableName, delPartitionNames); client.add_partitions(partitions); } catch (MetaException | InvalidObjectException e) { throw new InvalidMetaException("One or more partitions are invalid.", e); } catch (TException e) { throw new TException(String.format( "Internal server error adding/dropping partitions for table %s.%s", dbName, tableName), e); } } }
From source file:com.uber.jaeger.senders.HttpSender.java
License:Apache License
@Override public void send(Process process, List<Span> spans) throws TException { Batch batch = new Batch(process, spans); byte[] bytes = serializer.serialize(batch); RequestBody body = RequestBody.create(MEDIA_TYPE_THRIFT, bytes); Request request = requestBuilder.post(body).build(); Response response;//from w w w .ja v a 2s. c om try { response = httpClient.newCall(request).execute(); } catch (IOException e) { throw new TException(String.format("Could not send %d spans", spans.size()), e); } if (!response.isSuccessful()) { String responseBody; try { responseBody = response.body() != null ? response.body().string() : "null"; } catch (IOException e) { responseBody = "unable to read response"; } String exceptionMessage = String.format("Could not send %d spans, response %d: %s", spans.size(), response.code(), responseBody); throw new TException(exceptionMessage); } }
From source file:ezbake.data.elastic.ElasticClient.java
License:Apache License
private void ensureVisible(DocumentIdentifier id, EzSecurityToken token) throws TException { Document foundDoc;// w w w . j a v a 2 s . c o m try { foundDoc = get(id.getId(), id.getType(), null, token); } catch (final TException e) { final String errMsg = "There was an error finding the given id " + id; logger.error(errMsg, e); throw new TException(errMsg, e); } if (EzElasticHandler.BLANK_DOCUMENT.equals(foundDoc)) { final String errMsg = "There was an attempt to get a document by ID that was not present: " + id; logger.error(errMsg); throw new TException(errMsg); } }
From source file:ezbake.frack.submitter.SubmitterService.java
License:Apache License
@Override public SubmitResult submit(ByteBuffer zip, String pipelineId) throws TException { File unzippedPack = null;/*from w w w .j a v a 2 s . co m*/ try { // Unzip the provided pack unzippedPack = UnzipUtil.unzip(new File("/tmp"), zip); // Find the jar path Optional<String> jarPath = UnzipUtil.getJarPath(unzippedPack); if (!jarPath.isPresent()) { return new SubmitResult(false, "Could not find jar file. Make sure to place jar file in zip within bin/ directory"); } final File jarFile = new File(jarPath.get()); // Get the builder class Optional<String> builderClass = ClasspathUtil.findClassInJar(jarFile); if (!builderClass.isPresent()) { log.error("Could not find PipelineBuilder implementation in {}", jarFile.getName()); return new SubmitResult(false, "Could not find PipelineBuilder implementation in jar: " + jarFile.getName()); } log.debug("Building pipeline with builder class {}", builderClass.get()); SubmitterConfiguration config = new SubmitterConfiguration().setIngestSystem("Storm") .setBuilderClass(builderClass.get()).setPipelineId(pipelineId).setProduction(!insecure); Optional<String> confPath = UnzipUtil.getConfDirectory(unzippedPack); List<File> newFiles = Lists.newArrayList(); if (confPath.isPresent()) { config.setUserPropertiesFolder(confPath.get()); File confDir = new File(confPath.get()); File stagingDir = new File(unzippedPack, "staging"); stagingDir.mkdir(); File newPropertiesDir = new File(stagingDir, "frack_properties"); FileUtils.copyDirectory(confDir, newPropertiesDir); newFiles.add(newPropertiesDir); Optional<String> sslPath = UnzipUtil.getSSLPath(confDir); if (sslPath.isPresent()) { File sslDir = new File(sslPath.get()); config.setSslDir(sslDir.getAbsoluteFile().getAbsolutePath()); File newSSLDir = new File(stagingDir, "ssl"); FileUtils.copyDirectory(sslDir, newSSLDir); newFiles.add(newSSLDir); } else { log.warn("No SSL directory found for {}, needed for using Thrift services", jarFile.getName()); } File keyDir = UnzipUtil.findSubDir(unzippedPack, "keys"); if (keyDir != null && keyDir.exists()) { File newKeyDir = new File(stagingDir, "keys"); FileUtils.copyDirectory(keyDir, newKeyDir); newFiles.add(newKeyDir); } else { log.warn("No Keys directory found for {}, needed for broadcasting", jarFile.getName()); } } else { log.warn("No configuration directory found for {}", jarFile.getName()); } // Create the repackaged jar log.info("Repackaging jar with configuration information"); File newJar = JarUtil.addFilesToJar(jarFile, newFiles); config.setPathToJar(newJar.getAbsolutePath()); log.debug("Sending information to PipelineSubmitter"); SubmitResult result = PipelineSubmitter.submit(config); log.info("Submission result: {}", result.getMessage()); return result; } catch (IOException e) { String message = "Could not unzip provided build pack binary"; log.error(message, e); throw new TException(message, e); } catch (InterruptedException e) { String message = "Interrupted exception occurred when submitting pipeline"; log.error(message, e); throw new TException(message, e); } catch (ClassNotFoundException e) { String message = "PipelineBuilder class not found when interrogating jar file"; log.error(message, e); throw new TException(message, e); } finally { if (unzippedPack != null && unzippedPack.exists()) { try { FileUtils.deleteDirectory(unzippedPack); } catch (IOException e) { throw new TException("Could not delete unzipped buildpack directory", e); } } } }
From source file:ezbake.frack.submitter.SubmitterService.java
License:Apache License
@Override public void shutdown(String pipelineId) throws PipelineNotRunningException, TException { try {//from www . j a v a 2s . c o m PipelineSubmitter.shutdownPipeline("Storm", pipelineId); log.info("{} successfully shut down", pipelineId); } catch (IOException e) { log.error("An exception occurred when shutting down {}", pipelineId); throw new TException("Exception occurred when shutting down pipeline '" + pipelineId + "'. It may not have shut down properly", e); } }