List of usage examples for com.google.common.base Preconditions checkNotNull
public static <T> T checkNotNull(T reference)
From source file:cz.cuni.mff.ms.brodecva.botnicek.ide.utils.data.graphs.Direction.java
/** * Vrt opa?nou orientaci.// ww w . j av a 2s . c o m * * @param direction * orientace * @return opa?n orientace */ public static Direction getOpposite(final Direction direction) { Preconditions.checkNotNull(direction); final Direction[] values = values(); assert values.length == 2; final Set<Direction> valuesSet = Sets.newHashSet(values); valuesSet.remove(direction); return valuesSet.iterator().next(); }
From source file:org.apache.aurora.common.zookeeper.Credentials.java
/** * Creates a set of credentials for the ZooKeeper digest authentication mechanism. * * @param username the username to authenticate with * @param password the password to authenticate with * @return a set of credentials that can be used to authenticate the zoo keeper client *///from w w w . j av a2s . c om public static Credentials digestCredentials(String username, String password) { MorePreconditions.checkNotBlank(username); Preconditions.checkNotNull(password); // TODO(John Sirois): DigestAuthenticationProvider is broken - uses platform default charset // (on server) and so we just have to hope here that clients are deployed in compatible jvms. // Consider writing and installing a version of DigestAuthenticationProvider that controls its // Charset explicitly. return new Credentials("digest", (username + ":" + password).getBytes()); }
From source file:net.sf.lucis.core.impl.SingleSearcherProvider.java
public static SingleSearcherProvider of(final DirectoryProvider provider) { Preconditions.checkNotNull(provider); return new SingleSearcherProvider() { @Override// ww w . j a v a2 s. co m Directory directory() { return provider.getDirectory(); } }; }
From source file:ivory.smrf.model.importance.ConceptImportanceModel.java
@SuppressWarnings("unchecked") public static ConceptImportanceModel get(Node model) throws ConfigurationException { Preconditions.checkNotNull(model); // Get model type. String modelType = XMLTools.getAttributeValue(model, "type", null); if (modelType == null) { throw new ConfigurationException("Model type must be specified!"); }//w w w .j ava 2s. c o m // Dynamically construct importance model. ConceptImportanceModel importanceModel = null; try { Class<? extends ConceptImportanceModel> clz = (Class<? extends ConceptImportanceModel>) Class .forName(modelType); importanceModel = clz.newInstance(); importanceModel.configure(model); } catch (Exception e) { throw new ConfigurationException("Error instantiating ConceptImportanceModel! " + e); } return importanceModel; }
From source file:edu.byu.nlp.stats.SymmetricDirichletMLEFPOptimizable.java
public static SymmetricDirichletMLEFPOptimizable newOptimizable(double[][] data) { Preconditions.checkNotNull(data); Preconditions.checkArgument(data.length > 0); double[] meanLogTheta = Matrices.sumOverFirst(data); DoubleArrays.divideToSelf(meanLogTheta, data.length); return new SymmetricDirichletMLEFPOptimizable(meanLogTheta, data.length, data[0].length); }
From source file:org.apache.aurora.scheduler.app.Log4jConfigurator.java
/** * Configures log4j to log to stderr with a glog format. * * @param glogConfig The glog configuration in effect. *//*from w w w.j a va 2 s . c o m*/ static void configureConsole(Configuration glogConfig) { Preconditions.checkNotNull(glogConfig); BasicConfigurator.configure(new ConsoleAppender(new GlogLayout(), ConsoleAppender.SYSTEM_ERR)); Logger.getRootLogger().setLevel(getLevel(glogConfig)); }
From source file:org.apache.impala.catalog.HiveStorageDescriptorFactory.java
/** * Creates and returns a Hive StoreDescriptor for the given FileFormat and RowFormat. * Currently supports creating StorageDescriptors for Parquet, Text, Sequence, Avro and * RC file.//w ww .j ava 2 s . c om * TODO: Add support for HBase */ public static StorageDescriptor createSd(THdfsFileFormat fileFormat, RowFormat rowFormat) { Preconditions.checkNotNull(fileFormat); Preconditions.checkNotNull(rowFormat); StorageDescriptor sd = new StorageDescriptor(); sd.setSerdeInfo(new org.apache.hadoop.hive.metastore.api.SerDeInfo()); sd.getSerdeInfo().setParameters(new HashMap<String, String>()); // The compressed flag is not used to determine whether the table is compressed or // not. Instead, we use the input format or the filename. sd.setCompressed(false); HdfsFileFormat hdfsFileFormat = HdfsFileFormat.fromThrift(fileFormat); sd.setInputFormat(hdfsFileFormat.inputFormat()); sd.setOutputFormat(hdfsFileFormat.outputFormat()); sd.getSerdeInfo().setSerializationLib(hdfsFileFormat.serializationLib()); if (rowFormat.getFieldDelimiter() != null) { sd.getSerdeInfo().putToParameters("serialization.format", rowFormat.getFieldDelimiter()); sd.getSerdeInfo().putToParameters("field.delim", rowFormat.getFieldDelimiter()); } if (rowFormat.getEscapeChar() != null) { sd.getSerdeInfo().putToParameters("escape.delim", rowFormat.getEscapeChar()); } if (rowFormat.getLineDelimiter() != null) { sd.getSerdeInfo().putToParameters("line.delim", rowFormat.getLineDelimiter()); } return sd; }
From source file:com.youtube.serializer.YoutubeEventClassifier.java
public static Class detectClass(String json) { Preconditions.checkNotNull(json); Preconditions.checkArgument(StringUtils.isNotEmpty(json)); ObjectNode objectNode;//from w w w .ja v a 2s.c om try { objectNode = (ObjectNode) mapper.readTree(json); } catch (IOException e) { e.printStackTrace(); return null; } if (objectNode.findValue("kind") != null && objectNode.get("kind").toString().equals(VIDEO_IDENTIFIER)) { return Video.class; } else if (objectNode.findValue("kind") != null && objectNode.get("kind").toString().contains(CHANNEL_IDENTIFIER)) { return com.google.api.services.youtube.model.Channel.class; } else { return ObjectNode.class; } }
From source file:com.davidbracewell.io.CharsetDetector.java
/** * Detects the character set for the buffer. * * @param buffer The buffer//from w w w . jav a 2s. c om * @param offset where to start * @param length the length to read * @return The detected charset or null */ public static Charset detect(byte[] buffer, int offset, int length) { Preconditions.checkNotNull(buffer); Preconditions.checkArgument(length > 0); Preconditions.checkArgument(offset >= 0); final com.ibm.icu.text.CharsetDetector detector = new com.ibm.icu.text.CharsetDetector(); try { detector.setText(new ByteArrayInputStream(buffer, offset, length)); return Charset.forName(detector.detect().getName()); } catch (Exception e) { return null; } }
From source file:org.apache.aurora.common.net.InetSocketAddressHelper.java
/** * Attempts to parse an endpoint spec into an InetSocketAddress. * * @param value the endpoint spec//from w w w . ja v a 2 s . c o m * @return a parsed InetSocketAddress * @throws NullPointerException if {@code value} is {@code null} * @throws IllegalArgumentException if {@code value} cannot be parsed */ public static InetSocketAddress parse(String value) { Preconditions.checkNotNull(value); String[] spec = value.split(":", 2); if (spec.length != 2) { throw new IllegalArgumentException("Invalid socket address spec: " + value); } String host = spec[0]; int port = asPort(spec[1]); return StringUtils.isEmpty(host) ? new InetSocketAddress(port) : InetSocketAddress.createUnresolved(host, port); }