Example usage for org.springframework.util Assert notEmpty

List of usage examples for org.springframework.util Assert notEmpty

Introduction

In this page you can find the example usage for org.springframework.util Assert notEmpty.

Prototype

public static void notEmpty(@Nullable Map<?, ?> map, Supplier<String> messageSupplier) 

Source Link

Document

Assert that a Map contains entries; that is, it must not be null and must contain at least one entry.

Usage

From source file:org.springframework.data.hadoop.fs.DistributedCacheFactoryBean.java

@Override
public void afterPropertiesSet() throws Exception {
    Assert.notNull(conf, "A Hadoop configuration is required");
    Assert.notEmpty(entries, "No entries specified");

    // fall back to system discovery
    if (fs == null) {
        fs = FileSystem.get(conf);
    }//from   ww w. j  a  va2 s. com

    ds = new DistributedCache();

    if (createSymlink) {
        DistributedCache.createSymlink(conf);
    }

    HdfsResourceLoader loader = new HdfsResourceLoader(conf);

    boolean warnCpEntry = !":".equals(System.getProperty("path.separator"));

    try {
        for (CacheEntry entry : entries) {
            Resource[] resources = loader.getResources(entry.value);
            if (!ObjectUtils.isEmpty(resources)) {
                for (Resource resource : resources) {
                    HdfsResource res = (HdfsResource) resource;

                    URI uri = res.getURI();
                    String path = getPathWithFragment(uri);

                    String defaultLink = resource.getFilename();
                    boolean isArchive = (defaultLink.endsWith(".tgz") || defaultLink.endsWith(".tar")
                            || defaultLink.endsWith(".tar.gz") || defaultLink.endsWith(".zip"));

                    switch (entry.type) {
                    case CP:
                        // Path does not handle fragments so use the URI instead
                        Path p = new Path(URI.create(path));

                        if (FILE_SEPARATOR_WARNING && warnCpEntry) {
                            LogFactory.getLog(DistributedCacheFactoryBean.class).warn(
                                    "System path separator is not ':' - this will likely cause invalid classpath entries within the DistributedCache. See the docs and HADOOP-9123 for more information.");
                            // show the warning once per CL
                            FILE_SEPARATOR_WARNING = false;
                        }

                        if (isArchive) {
                            DistributedCache.addArchiveToClassPath(p, conf, fs);
                        } else {
                            DistributedCache.addFileToClassPath(p, conf, fs);
                        }

                        break;

                    case LOCAL:

                        if (isArchive) {
                            if (VersionUtils.isHadoop2X()) {
                                // TODO - Need to figure out how to add local archive
                            } else {
                                Method addLocalArchives = ReflectionUtils.findMethod(DistributedCache.class,
                                        "addLocalArchives", Configuration.class, String.class);
                                addLocalArchives.invoke(null, conf, path);
                            }
                        } else {
                            if (VersionUtils.isHadoop2X()) {
                                // TODO - Need to figure out how to add local files
                            } else {
                                Method addLocalFiles = ReflectionUtils.findMethod(DistributedCache.class,
                                        "addLocalFiles", Configuration.class, String.class);
                                addLocalFiles.invoke(null, conf, path);
                            }
                        }

                        break;

                    case CACHE:

                        if (!path.contains("#")) {
                            // use the path to avoid adding the host:port into the uri
                            uri = URI.create(path + "#" + defaultLink);
                        }

                        if (isArchive) {
                            DistributedCache.addCacheArchive(uri, conf);
                        } else {
                            DistributedCache.addCacheFile(uri, conf);
                        }

                        break;
                    }
                }
            }
        }
    } finally {
        loader.close();
    }
}

From source file:org.springframework.data.hadoop.hbase.HbaseInterceptor.java

public void afterPropertiesSet() {
    super.afterPropertiesSet();
    Assert.notEmpty(tableNames, "at least one table needs to be specified");
}

From source file:org.springframework.data.hadoop.mapreduce.JobRunner.java

@Override
public void afterPropertiesSet() throws Exception {
    Assert.notEmpty(jobs, "at least one job needs to be specified");

    runningJobs = new CopyOnWriteArrayList<RunningJob>();
    if (runAtStartup) {
        getObject();//from   w ww. j a v a  2s .c o m
    }
}

From source file:org.springframework.data.redis.listener.RedisMessageListenerContainer.java

private void addListener(MessageListener listener, Collection<? extends Topic> topics) {
    Assert.notNull(listener, "a valid listener is required");
    Assert.notEmpty(topics, "at least one topic is required");

    List<byte[]> channels = new ArrayList<byte[]>(topics.size());
    List<byte[]> patterns = new ArrayList<byte[]>(topics.size());

    boolean trace = logger.isTraceEnabled();

    // add listener mapping
    Set<Topic> set = listenerTopics.get(listener);
    if (set == null) {
        set = new CopyOnWriteArraySet<Topic>();
        listenerTopics.put(listener, set);
    }//from www .ja va  2 s  .co  m
    set.addAll(topics);

    for (Topic topic : topics) {

        ByteArrayWrapper holder = new ByteArrayWrapper(serializer.serialize(topic.getTopic()));

        if (topic instanceof ChannelTopic) {
            Collection<MessageListener> collection = channelMapping.get(holder);
            if (collection == null) {
                collection = new CopyOnWriteArraySet<MessageListener>();
                channelMapping.put(holder, collection);
            }
            collection.add(listener);
            channels.add(holder.getArray());

            if (trace)
                logger.trace("Adding listener '" + listener + "' on channel '" + topic.getTopic() + "'");
        }

        else if (topic instanceof PatternTopic) {
            Collection<MessageListener> collection = patternMapping.get(holder);
            if (collection == null) {
                collection = new CopyOnWriteArraySet<MessageListener>();
                patternMapping.put(holder, collection);
            }
            collection.add(listener);
            patterns.add(holder.getArray());

            if (trace)
                logger.trace("Adding listener '" + listener + "' for pattern '" + topic.getTopic() + "'");
        }

        else {
            throw new IllegalArgumentException("Unknown topic type '" + topic.getClass() + "'");
        }
    }

    // check the current listening state
    if (listening) {
        subscriptionTask.subscribeChannel(channels.toArray(new byte[channels.size()][]));
        subscriptionTask.subscribePattern(patterns.toArray(new byte[patterns.size()][]));
    }
}

From source file:org.springframework.data.rest.webmvc.config.PersistentEntityResourceHandlerMethodArgumentResolver.java

/**
 * Creates a new {@link PersistentEntityResourceHandlerMethodArgumentResolver} for the given
 * {@link HttpMessageConverter}s and {@link RootResourceInformationHandlerMethodArgumentResolver}..
 * /*from  w ww.ja v  a  2 s.c  om*/
 * @param messageConverters must not be {@literal null}.
 * @param resourceInformationResolver must not be {@literal null}.
 * @param idResolver must not be {@literal null}.
 * @param reader must not be {@literal null}.
 */
public PersistentEntityResourceHandlerMethodArgumentResolver(List<HttpMessageConverter<?>> messageConverters,
        RootResourceInformationHandlerMethodArgumentResolver resourceInformationResolver,
        BackendIdHandlerMethodArgumentResolver idResolver, DomainObjectReader reader) {

    Assert.notEmpty(messageConverters, "MessageConverters must not be null or empty!");
    Assert.notNull(resourceInformationResolver,
            "RootResourceInformationHandlerMethodArgumentResolver must not be empty!");
    Assert.notNull(idResolver, "BackendIdHandlerMethodArgumentResolver must not be null!");
    Assert.notNull(reader, "DomainObjectReader must not be null!");

    this.messageConverters = messageConverters;
    this.resourceInformationResolver = resourceInformationResolver;
    this.idResolver = idResolver;
    this.reader = reader;
}

From source file:org.springframework.hateoas.client.Traverson.java

/**
 * Creates a new {@link Traverson} interacting with the given base URI and using the given {@link MediaType}s to
 * interact with the service./*from   w w w  .j  av  a 2s  .  co m*/
 * 
 * @param baseUri must not be {@literal null}.
 * @param mediaType must not be {@literal null} or empty.
 */
public Traverson(URI baseUri, MediaType... mediaTypes) {

    Assert.notNull(baseUri, "Base URI must not be null!");
    Assert.notEmpty(mediaTypes, "At least one media type must be given!");

    this.mediaTypes = Arrays.asList(mediaTypes);
    this.baseUri = baseUri;
    this.discoverers = DEFAULT_LINK_DISCOVERERS;

    setRestOperations(createDefaultTemplate(this.mediaTypes));
}

From source file:org.springframework.http.converter.AbstractHttpMessageConverter.java

/**
 * Set the list of {@link MediaType} objects supported by this converter.
 *//* www  . j a  v a2  s  .c om*/
public void setSupportedMediaTypes(List<MediaType> supportedMediaTypes) {
    Assert.notEmpty(supportedMediaTypes, "MediaType List must not be empty");
    this.supportedMediaTypes = new ArrayList<>(supportedMediaTypes);
}

From source file:org.springframework.integration.aws.s3.InboundLocalFileOperationsImpl.java

@Override
public void setEventListeners(List<FileEventHandler> handlers) {
    Assert.notNull(handlers, "Handlers must be non null and non empty");
    Assert.notEmpty(handlers, "Handlers must be non null and non empty");
    this.handlers.clear();
    this.handlers.addAll(handlers);
}

From source file:org.springframework.integration.config.IntegrationManagementConfigurer.java

/**
 * Set the array of simple patterns for component names for which message statistics
 * will be enabled (response times, rates etc), as well as counts (a positive match
 * here overrides {@link #setEnabledCountsPatterns(String[]) enabledCountsPatterns},
 * you can't have statistics without counts). (defaults to '*').
 * Enables statistics for those components that support statistics
 * (channels - when sending, message handlers, etc). This is the initial setting only,
 * individual components can have stats enabled/disabled at runtime. If a pattern
 * starts with `!`, stats (and counts) are disabled for matches. Note: this means that
 * '!foo' here will disable stats and counts for 'foo' even if counts are enabled for
 * 'foo' in {@link #setEnabledCountsPatterns(String[]) enabledCountsPatterns}. For
 * components that match multiple patterns, the first pattern wins. Enabling stats at
 * runtime also enables counts.//from  ww w .  j  a v a2  s.com
 * @param enabledStatsPatterns the patterns.
 */
public void setEnabledStatsPatterns(String[] enabledStatsPatterns) {
    Assert.notEmpty(enabledStatsPatterns, "enabledStatsPatterns must not be empty");
    this.enabledStatsPatterns = Arrays.copyOf(enabledStatsPatterns, enabledStatsPatterns.length);
}

From source file:org.springframework.integration.gemfire.inbound.CacheListeningMessageProducer.java

public void setSupportedEventTypes(EventType... eventTypes) {
    Assert.notEmpty(eventTypes, "eventTypes must not be empty");
    this.supportedEventTypes = new HashSet<EventType>(Arrays.asList(eventTypes));
}