List of usage examples for org.apache.commons.lang ArrayUtils contains
public static boolean contains(boolean[] array, boolean valueToFind)
Checks if the value is in the given array.
From source file:net.sf.zekr.engine.search.lucene.ZekrLuceneAnalyzer.java
public TokenStream tokenStream(String fieldName, Reader reader) { TokenStream resultTokenStream = null; if (ArrayUtils.contains(new String[] { "fa", "pk", "ar", "ps", "ku", "hw" }, id)) { // non-European languages resultTokenStream = new WhitespaceTokenizer(reader); } else {// ww w. ja v a 2 s. c o m resultTokenStream = new StandardTokenizer(Version.LUCENE_CURRENT, reader); } resultTokenStream = new StandardFilter(resultTokenStream); resultTokenStream = new LowerCaseFilter(resultTokenStream); SearchInfo searchInfo = conf.getSearchInfo(); if (searchInfo.getStopWord(id) != null && searchInfo.getStopWord(id).size() > 0) { resultTokenStream = new StopFilter(false, resultTokenStream, searchInfo.getStopWord(id)); } Map<Pattern, String> replacePattern = new LinkedHashMap<Pattern, String>(searchInfo.getReplacePattern(id)); if (searchInfo.getDiacritic(id) != null) { replacePattern.put(searchInfo.getDiacritic(id), ""); } if (searchInfo.getPunctuation(id) != null) { replacePattern.put(searchInfo.getPunctuation(id), ""); } resultTokenStream = new RegexReplaceFilter(resultTokenStream, replacePattern); if (name != null) { resultTokenStream = new SnowballFilter(resultTokenStream, name); } return resultTokenStream; }
From source file:com.uber.hoodie.hive.client.SchemaUtil.java
/** * Get the schema difference between the storage schema and hive table schema * * @param storageSchema/*ww w. ja va 2 s . c o m*/ * @param tableSchema * @param partitionKeys * @return */ public static SchemaDifference getSchemaDifference(MessageType storageSchema, Map<String, String> tableSchema, String[] partitionKeys) { Map<String, String> newTableSchema; try { newTableSchema = convertParquetSchemaToHiveSchema(storageSchema); } catch (IOException e) { throw new HoodieHiveDatasetException("Failed to convert parquet schema to hive schema", e); } LOG.info("Getting schema difference for " + tableSchema + "\r\n\r\n" + newTableSchema); SchemaDifference.Builder schemaDiffBuilder = SchemaDifference.newBuilder(storageSchema, tableSchema); Set<String> tableColumns = Sets.newHashSet(); for (Map.Entry<String, String> field : tableSchema.entrySet()) { String fieldName = field.getKey().toLowerCase(); String tickSurroundedFieldName = tickSurround(fieldName); if (!isFieldExistsInSchema(newTableSchema, tickSurroundedFieldName) && !ArrayUtils.contains(partitionKeys, fieldName)) { schemaDiffBuilder.deleteTableColumn(fieldName); } else { // check type String tableColumnType = field.getValue(); if (!isFieldExistsInSchema(newTableSchema, tickSurroundedFieldName)) { if (ArrayUtils.contains(partitionKeys, fieldName)) { // Partition key does not have to be part of the storage schema continue; } // We will log this and continue. Hive schema is a superset of all parquet schemas LOG.warn("Ignoring table column " + fieldName + " as its not present in the parquet schema"); continue; } tableColumnType = tableColumnType.replaceAll("\\s+", ""); String expectedType = getExpectedType(newTableSchema, tickSurroundedFieldName); expectedType = expectedType.replaceAll("\\s+", ""); expectedType = expectedType.replaceAll("`", ""); if (!tableColumnType.equalsIgnoreCase(expectedType)) { // check for incremental datasets, the schema type change is allowed as per evolution rules if (!isSchemaTypeUpdateAllowed(tableColumnType, expectedType)) { throw new HoodieHiveDatasetException("Could not convert field Type from " + tableColumnType + " to " + expectedType + " for field " + fieldName); } schemaDiffBuilder.updateTableColumn(fieldName, getExpectedType(newTableSchema, tickSurroundedFieldName)); } } tableColumns.add(tickSurroundedFieldName); } for (Map.Entry<String, String> entry : newTableSchema.entrySet()) { if (!tableColumns.contains(entry.getKey().toLowerCase())) { schemaDiffBuilder.addTableColumn(entry.getKey(), entry.getValue()); } } LOG.info("Difference between schemas: " + schemaDiffBuilder.build().toString()); return schemaDiffBuilder.build(); }
From source file:com.manydesigns.elements.reflection.FilteredClassAccessor.java
protected FilteredClassAccessor(ClassAccessor delegate, boolean whitelist, String... properties) { this.delegate = delegate; List<PropertyAccessor> propertiesList = new ArrayList<PropertyAccessor>(); List<PropertyAccessor> keyPropertiesList = new ArrayList<PropertyAccessor>(); for (PropertyAccessor p : delegate.getProperties()) { if (whitelist == ArrayUtils.contains(properties, p.getName())) { propertiesList.add(p);/* w w w . j ava 2 s . c o m*/ if (ArrayUtils.contains(delegate.getKeyProperties(), p)) { keyPropertiesList.add(p); } } } if (whitelist && propertiesList.size() != properties.length) { for (String property : properties) { try { delegate.getProperty(property); //Cause exception to be thrown } catch (NoSuchFieldException e) { throw new RuntimeException(e); } } } this.properties = propertiesList.toArray(new PropertyAccessor[propertiesList.size()]); this.keyProperties = keyPropertiesList.toArray(new PropertyAccessor[keyPropertiesList.size()]); }
From source file:eu.sofia.adk.common.xsd.JavaDatatype.java
/** * Determines if the datatype is a long datatype * @param datatype a string representation of the datatype * @return <code>true</code> if the datatype is a long type *//* w w w . ja v a 2 s.c om*/ private static boolean isLongDatatype(String datatype) { return ArrayUtils.contains(longDatatype, datatype); }
From source file:edu.illinois.cs.cogcomp.edison.features.lrec.TestCorlex.java
public final void test() throws EdisonException { log.debug("Corlex Feature Extractor"); // Using the first TA and a constituent between span of 30-40 as a test TextAnnotation ta = tas.get(1);//from www . j a v a 2s . c o m View TOKENS = ta.getView("TOKENS"); log.debug("Got tokens FROM TextAnnotation"); CorelexFeatureExtractor testInstance = new CorelexFeatureExtractor(true); Set<Feature> feats = testInstance.getWordFeatures(ta, 1); String[] expected_outputs = { "atr" }; if (feats == null) { log.debug("Feats are returning NULL."); } log.debug("Printing Set of Features"); for (Feature f : feats) { log.debug(f.getName()); assertTrue(ArrayUtils.contains(expected_outputs, f.getName())); } }
From source file:com.adobe.acs.commons.workflow.bulk.impl.ResumableResourceVisitor.java
@Override public final void accept(final Resource resource) { // Only accept the Root folder and cq:Page and cq:PageContent nodes; All other structures are uninteresting // to this functionality and may be very large final ValueMap properties = resource.adaptTo(ValueMap.class); final String primaryType = properties.get(JcrConstants.JCR_PRIMARYTYPE, String.class); if (BULK_WORKFLOW_MANAGER_PAGE_FOLDER_PATH.equals(resource.getPath())) { super.accept(resource); } else if (ArrayUtils.contains(ACCEPTED_PRIMARY_TYPES, primaryType)) { super.accept(resource); }//from w ww . j a va 2s. c om }
From source file:com.hubspot.utils.circuitbreaker.CircuitBreakerWrapper.java
/** * Wraps the supplied object toWrap in a CircuitBreaker conforming to the supplied CircuitBreakerPolicy. *//*from www. j a va 2 s .c om*/ public <T, W extends T> T wrap(W toWrap, Class<T> interfaceToProxy, CircuitBreakerPolicy policy) throws CircuitBreakerWrappingException { sanityCheck(toWrap, interfaceToProxy, policy); // walk the chain of interfaces implemented by T and check for their blacklisted methods Stack<Class<?>> implementedInterfaces = new Stack<Class<?>>(); implementedInterfaces.addAll(Arrays.asList(interfaceToProxy.getInterfaces())); implementedInterfaces.add(interfaceToProxy); Map<Method, Class[]> blacklist = new HashMap(); while (!implementedInterfaces.isEmpty()) { Class<?> implementedInterface = implementedInterfaces.pop(); for (Method m : implementedInterface.getDeclaredMethods()) { // check that the blacklisted method throws CircuitBreakerException if (m.isAnnotationPresent(CircuitBreakerExceptionBlacklist.class)) { if (!ArrayUtils.contains(m.getExceptionTypes(), CircuitBreakerException.class)) { throw new CircuitBreakerWrappingException( "Wrapped methods must throw CircuitBreakerException"); } CircuitBreakerExceptionBlacklist a = (CircuitBreakerExceptionBlacklist) m .getAnnotation(CircuitBreakerExceptionBlacklist.class); blacklist.put(m, a.blacklist()); } } implementedInterfaces.addAll(Arrays.asList(implementedInterface.getInterfaces())); } Class<?>[] interfaces = new Class<?>[] { interfaceToProxy }; InvocationHandler handler = new CircuitBreakerInvocationHandler(toWrap, blacklist, policy); T newProxyInstance = (T) Proxy.newProxyInstance(getClass().getClassLoader(), interfaces, handler); return newProxyInstance; }
From source file:hudson.plugins.clearcase.history.OperationFilter.java
@Override public boolean accept(HistoryEntry entry) { if (!ArrayUtils.contains(getApplicableOperations(), entry.getOperation())) { // Operation not applicable. return getAllowOtherOperations(); }/* w ww . ja va 2s . c o m*/ if (namePatterns == null) { // No name filtering requested, accept operation. return true; } String objectName = getObjectName(entry); for (Pattern pattern : namePatterns) { if (pattern.matcher(objectName).matches()) { return true; } } return false; }
From source file:de.codesourcery.eve.skills.ui.utils.PersistentDialogManager.java
public PersistentDialogManager() { SpringBeanInjector.getInstance().injectDependencies(this); configProvider.addChangeListener(new IAppConfigChangeListener() { @Override//w w w. ja v a 2 s.co m public void appConfigChanged(AppConfig config, String... properties) { if (ArrayUtils.contains(properties, AppConfig.PROP_ALL_DIALOGS_REENABLED)) { reenableAllDialogs(); } } }); }
From source file:com.github.ithildir.liferay.mobile.go.GoUtil.java
public String getParameterName(String parameterName) { if (ArrayUtils.contains(_RESERVED_KEYWORDS, parameterName)) { return CharPool.UNDERLINE + parameterName; }//from w w w . ja va 2 s . c o m return parameterName; }