List of usage examples for com.google.common.base MoreObjects firstNonNull
@CheckReturnValue public static <T> T firstNonNull(@Nullable T first, @Nullable T second)
From source file:org.glowroot.container.TempDirs.java
public static File createTempDir(String prefix) throws IOException { final int tempDirAttempts = 10000; String javaTempDir = MoreObjects.firstNonNull(StandardSystemProperty.JAVA_IO_TMPDIR.value(), "."); File baseDir = new File(javaTempDir); String baseName = prefix + "-" + System.currentTimeMillis() + "-"; for (int counter = 0; counter < tempDirAttempts; counter++) { File tempDir = new File(baseDir, baseName + counter); if (tempDir.mkdir()) { return tempDir; }/*from w ww. ja v a 2 s .c om*/ } throw new IOException("Failed to create directory within " + tempDirAttempts + " attempts (tried " + baseName + "0 to " + baseName + (tempDirAttempts - 1) + ')'); }
From source file:org.killbill.billing.payment.core.PaymentTransactionInfoPluginConverter.java
public static TransactionStatus toTransactionStatus( final PaymentTransactionInfoPlugin paymentTransactionInfoPlugin) { final PaymentPluginStatus status = MoreObjects.firstNonNull(paymentTransactionInfoPlugin.getStatus(), PaymentPluginStatus.UNDEFINED); switch (status) { case PROCESSED: return TransactionStatus.SUCCESS; case PENDING: return TransactionStatus.PENDING; // The naming is a bit inconsistent, but ERROR on the plugin side means PAYMENT_FAILURE (that is a case where transaction went through but did not // return successfully (e.g: CC denied, ...) case ERROR://from w ww. j a v a2 s. c o m return TransactionStatus.PAYMENT_FAILURE; // // The plugin is trying to tell us that it knows for sure that payment transaction did not happen (connection failure,..) case CANCELED: return TransactionStatus.PLUGIN_FAILURE; // // This will be picked up by Janitor to figure out what really happened and correct the state if needed // Note that the default case includes the null status // case UNDEFINED: default: return TransactionStatus.UNKNOWN; } }
From source file:com.esofthead.mycollab.core.utils.FileUtils.java
public static File getUserFolder() { String userDir = MoreObjects.firstNonNull(System.getProperty("MYCOLLAB_APP_HOME"), System.getProperty("user.dir")); return new File(userDir); }
From source file:com.amazonaws.services.cognito.streams.connector.AmazonCognitoStreamsEnvironmentOptions.java
static String getKinesisInputStream() { String variable = MoreObjects.firstNonNull(System.getProperty("KINESIS_INPUT_STREAM"), System.getProperty("PARAM1")); return variable; }
From source file:org.opendaylight.openflowplugin.applications.frsync.util.FxChainUtil.java
public static FutureCallback<RpcResult<Void>> logResultCallback(final NodeId nodeId, final String prefix) { return new FutureCallback<RpcResult<Void>>() { @Override/* w w w. j a v a2 s. com*/ public void onSuccess(@Nullable final RpcResult<Void> result) { if (result != null) { if (result.isSuccessful()) { LOG.debug(prefix + " finished successfully: {}", nodeId.getValue()); } else { final Collection<RpcError> errors = MoreObjects.firstNonNull(result.getErrors(), ImmutableList.<RpcError>of()); LOG.debug(prefix + " failed: {} -> {}", nodeId.getValue(), Arrays.toString(errors.toArray())); } } else { LOG.debug(prefix + " reconciliation failed: {} -> null result", nodeId.getValue()); } } @Override public void onFailure(final Throwable t) { LOG.debug(prefix + " reconciliation failed seriously: {}", nodeId.getValue(), t); } }; }
From source file:com.googlesource.gerrit.plugins.lfs.s3.S3LargeFileRepository.java
private static S3Config getS3Config(LfsGlobalConfig config, LfsBackend backendConfig) { String section = backendConfig.type.name(); String region = config.getString(section, backendConfig.name, "region"); String bucket = config.getString(section, backendConfig.name, "bucket"); String storageClass = MoreObjects .firstNonNull(config.getString(section, backendConfig.name, "storageClass"), "REDUCED_REDUNDANCY"); int expirationSeconds = config.getInt(section, backendConfig.name, "expirationSeconds", 60); boolean disableSslVerify = config.getBoolean(section, backendConfig.name, "disableSslVerify", false); String accessKey = config.getString(section, backendConfig.name, "accessKey"); String secretKey = config.getString(section, backendConfig.name, "secretKey"); return new S3Config(region, bucket, storageClass, accessKey, secretKey, expirationSeconds, disableSslVerify);/*from ww w . j a v a2s . com*/ }
From source file:com.google.gitiles.MimeTypes.java
public static String getMimeType(String path) { int d = path.lastIndexOf('.'); if (d == -1) { return ANY; }/*from w w w. j ava2 s . c o m*/ String ext = path.substring(d + 1); String type = TYPES.get(ext.toLowerCase()); return MoreObjects.firstNonNull(type, ANY); }
From source file:com.google.cloud.dataflow.examples.complete.AutoComplete.java
public static void main(String[] args) throws IOException { Options options = PipelineOptionsFactory.fromArgs(args).withValidation().as(Options.class); if (options.isStreaming()) { // In order to cancel the pipelines automatically, // {@literal DataflowPipelineRunner} is forced to be used. options.setRunner(DataflowPipelineRunner.class); }// w ww .j ava 2 s . c o m options.setBigQuerySchema(FormatForBigquery.getSchema()); DataflowExampleUtils dataflowUtils = new DataflowExampleUtils(options); // We support running the same pipeline in either // batch or windowed streaming mode. PTransform<? super PBegin, PCollection<String>> readSource; WindowFn<Object, ?> windowFn; if (options.isStreaming()) { checkArgument(!options.getOutputToDatastore(), "DatastoreIO is not supported in streaming."); dataflowUtils.setupPubsub(); readSource = PubsubIO.Read.topic(options.getPubsubTopic()); windowFn = SlidingWindows.of(Duration.standardMinutes(30)).every(Duration.standardSeconds(5)); } else { readSource = TextIO.Read.from(options.getInputFile()); windowFn = new GlobalWindows(); } // Create the pipeline. Pipeline p = Pipeline.create(options); PCollection<KV<String, Iterable<String>>> toWrite = p.apply(readSource) .apply(ParDo.of(new ExtractPrefixes(options.getMinPrefix(), options.getMaxPrefix()))) .apply(GroupByKey.<String, String>create()); if (options.getOutputToDatastore()) { toWrite.apply(ParDo.named("FormatForDatastore") .of(new FormatForDatastore(options.getKind(), options.getDatastoreAncestorKey()))) .apply(DatastoreIO.v1().write().withProjectId( MoreObjects.firstNonNull(options.getOutputDataset(), options.getProject()))); } if (options.getOutputToBigQuery()) { dataflowUtils.setupBigQueryTable(); TableReference tableRef = new TableReference(); tableRef.setProjectId(options.getProject()); tableRef.setDatasetId(options.getBigQueryDataset()); tableRef.setTableId(options.getBigQueryTable()); toWrite.apply(ParDo.of(new FormatForBigquery())) .apply(BigQueryIO.Write.to(tableRef).withSchema(FormatForBigquery.getSchema()) .withCreateDisposition(BigQueryIO.Write.CreateDisposition.CREATE_IF_NEEDED) .withWriteDisposition( options.isStreaming() ? BigQueryIO.Write.WriteDisposition.WRITE_APPEND : BigQueryIO.Write.WriteDisposition.WRITE_TRUNCATE)); } // Run the pipeline. PipelineResult result = p.run(); if (options.isStreaming() && !options.getInputFile().isEmpty()) { // Inject the data into the Pub/Sub topic with a Dataflow batch pipeline. dataflowUtils.runInjectorPipeline(options.getInputFile(), options.getPubsubTopic()); } // dataflowUtils will try to cancel the pipeline and the injector before the program exists. dataflowUtils.waitToFinish(result); }
From source file:com.amazonaws.services.cognito.streams.connector.AmazonCognitoStreamsEnvironmentOptions.java
static String getRedshiftUserName() { String variable = MoreObjects.firstNonNull(System.getProperty("REDSHIFT_USER_NAME"), System.getProperty("PARAM2")); return variable; }
From source file:com.google.errorprone.refaster.testdata.TryTemplateExample.java
int foo(String str) { int result; result = MoreObjects.firstNonNull(Ints.tryParse(str), 0); return result; }