List of usage examples for org.springframework.util StringUtils tokenizeToStringArray
public static String[] tokenizeToStringArray(@Nullable String str, String delimiters, boolean trimTokens, boolean ignoreEmptyTokens)
From source file:com.seajas.search.codex.service.codex.CodexService.java
/** * Default constructor.//from w w w . j a v a 2s . co m * * @param availableApplicationLanguages */ @Autowired public CodexService(@Value("${codex.project.languages.available}") final String availableApplicationLanguages) { this.availableApplicationLanguages = Arrays .asList(StringUtils.tokenizeToStringArray(availableApplicationLanguages, ",", true, true)); }
From source file:com.ryantenney.metrics.spring.reporter.DatadogReporterFactoryBean.java
@SuppressWarnings("resource") @Override//from www.ja v a2 s .c o m protected DatadogReporter createInstance() { final DatadogReporter.Builder reporter = DatadogReporter.forRegistry(getMetricRegistry()); final Transport transport; String transportName = getProperty(TRANSPORT); if ("http".equalsIgnoreCase(transportName)) { HttpTransport.Builder builder = new HttpTransport.Builder(); builder.withApiKey(getProperty(API_KEY)); if (hasProperty(CONNECT_TIMEOUT)) { builder.withConnectTimeout(getProperty(CONNECT_TIMEOUT, Integer.class)); } if (hasProperty(SOCKET_TIMEOUT)) { builder.withSocketTimeout(getProperty(SOCKET_TIMEOUT, Integer.class)); } transport = builder.build(); } else if ("udp".equalsIgnoreCase(transportName) || "statsd".equalsIgnoreCase(transportName)) { UdpTransport.Builder builder = new UdpTransport.Builder(); if (hasProperty(STATSD_HOST)) { builder.withStatsdHost(getProperty(STATSD_HOST)); } if (hasProperty(STATSD_PORT)) { builder.withPort(getProperty(STATSD_PORT, Integer.class)); } if (hasProperty(STATSD_PREFIX)) { builder.withPrefix(getProperty(STATSD_PREFIX)); } transport = builder.build(); } else { throw new IllegalArgumentException("Invalid Datadog Transport: " + transportName); } reporter.withTransport(transport); if (hasProperty(TAGS)) { reporter.withTags(asList(StringUtils.tokenizeToStringArray(getProperty(TAGS), ",", true, true))); } if (StringUtils.hasText(getProperty(HOST))) { reporter.withHost(getProperty(HOST)); } else if ("true".equalsIgnoreCase(getProperty(EC2_HOST))) { try { reporter.withEC2Host(); } catch (IOException e) { throw new IllegalStateException("DatadogReporter.Builder.withEC2Host threw an exception", e); } } if (hasProperty(EXPANSION)) { String configString = getProperty(EXPANSION).trim().toUpperCase(Locale.ENGLISH); final EnumSet<Expansion> expansions; if ("ALL".equals(configString)) { expansions = Expansion.ALL; } else { expansions = EnumSet.noneOf(Expansion.class); for (String expandedMetricStr : StringUtils.tokenizeToStringArray(configString, ",", true, true)) { expansions.add(Expansion.valueOf(expandedMetricStr.replace(' ', '_'))); } } reporter.withExpansions(expansions); } if (hasProperty(DYNAMIC_TAG_CALLBACK_REF)) { reporter.withDynamicTagCallback(getPropertyRef(DYNAMIC_TAG_CALLBACK_REF, DynamicTagsCallback.class)); } if (hasProperty(METRIC_NAME_FORMATTER_REF)) { reporter.withMetricNameFormatter(getPropertyRef(METRIC_NAME_FORMATTER_REF, MetricNameFormatter.class)); } if (hasProperty(PREFIX)) { reporter.withPrefix(getProperty(PREFIX)); } if (hasProperty(DURATION_UNIT)) { reporter.convertDurationsTo(getProperty(DURATION_UNIT, TimeUnit.class)); } if (hasProperty(RATE_UNIT)) { reporter.convertRatesTo(getProperty(RATE_UNIT, TimeUnit.class)); } if (hasProperty(CLOCK_REF)) { reporter.withClock(getPropertyRef(CLOCK_REF, Clock.class)); } reporter.filter(getMetricFilter()); return reporter.build(); }
From source file:nl.minbzk.dwr.zoeken.enricher.uploader.ElasticSearchResultUploader.java
/** * Default constructor.//from w ww . j a v a 2 s . c o m * * @param settings * @throws java.net.MalformedURLException */ @Autowired public ElasticSearchResultUploader(final EnricherSettings settings) throws MalformedURLException { if (!StringUtils.hasText(settings.getElasticSearchUri())) return; this.elasticSearchUris = Arrays .asList(StringUtils.tokenizeToStringArray(settings.getElasticSearchUri(), ",", true, true)); this.elasticSearchClusterName = StringUtils.hasText(settings.getElasticSearchClusterName()) ? settings.getElasticSearchClusterName() : null; }
From source file:org.web4thejob.web.panel.DefaultTabbedEntityViewPanel.java
private String getSettingPart(SettingEnum id, int index, String defaultValue) { String value = getSettingValue(id, defaultValue); if (value != null && StringUtils.hasText(value)) { String[] parts = StringUtils.tokenizeToStringArray(value, ",", true, false); if (parts.length > index) { return parts[index]; }/*w w w .j a v a 2 s .co m*/ } return defaultValue; }
From source file:com.seajas.search.contender.service.enricher.EnricherService.java
/** * Default constructor./*from ww w .j a v a 2 s. com*/ * * @param thumbnailEnclosures * @param geoRssInclusions */ @Autowired public EnricherService( @Value("${contender.project.rss.reader.thumbnail.enclosures}") final String thumbnailEnclosures, @Value("${contender.project.search.enricher.georss.included.hosts}") final String geoRssInclusions) { this.thumbnailEnclosures = Arrays .asList(StringUtils.tokenizeToStringArray(thumbnailEnclosures, ",", true, true)); this.geoRssInclusions = Arrays.asList(StringUtils.tokenizeToStringArray(geoRssInclusions, ",", true, true)); }
From source file:com.seajas.search.profiler.service.task.TaskService.java
/** * Inject the triggers for the given group - optionally distributing those that require it. * * @param groupName//from www . j a v a2 s . c o m * @param triggers * @param triggersDistributed * @param taskScheduler */ private void injectTriggers(final String groupName, final String triggers, final List<String> triggersDistributed, final Scheduler taskScheduler) { String injectionJob; if (groupName.equals(GROUP_FEED)) injectionJob = JOB_FEED_INJECTION; else if (groupName.equals((GROUP_ARCHIVE))) injectionJob = JOB_ARCHIVE_INJECTION; else throw new IllegalStateException("Unknown group '" + groupName + "'"); for (String injectionTrigger : StringUtils.tokenizeToStringArray(triggers, ",", true, true)) { String[] keyValue = injectionTrigger.split(":"); if (keyValue.length != 2 || !StringUtils.hasText(keyValue[0]) || !StringUtils.hasText(keyValue[1])) throw new IllegalArgumentException("Invalid " + groupName + " injection trigger '" + injectionTrigger + "' - should be of type <key>:<cronExpression"); if (staticLogger.isInfoEnabled()) staticLogger.info("Creating " + groupName + " injection trigger '" + keyValue[0].trim() + "' using cron-pattern '" + keyValue[1].trim() + "'"); if (groupName.equals(GROUP_FEED)) this.feedInjectionTriggers.put(keyValue[0].trim(), keyValue[1].trim()); else if (groupName.equals((GROUP_ARCHIVE))) this.archiveInjectionTriggers.put(keyValue[0].trim(), keyValue[1].trim()); else throw new IllegalStateException("Unknown group '" + groupName + "'"); // Determine if this trigger should be distributed if (triggersDistributed.contains(keyValue[0])) { CronTriggerImpl trigger = new CronTriggerImpl(); List<Date> dates; try { trigger.setCronExpression(keyValue[1]); dates = TriggerUtils.computeFireTimes(trigger, new BaseCalendar(), 2); } catch (ParseException e) { throw new IllegalStateException(e); } if (dates.size() != 2) throw new IllegalStateException("The trigger '" + keyValue[1] + "' does not evaluate to at least two future fire times"); Long interval = dates.get(1).getTime() - dates.get(0).getTime(); if (staticLogger.isInfoEnabled()) staticLogger.info( "Distributed " + groupName + " injection trigger evaluated to a total interval of " + interval + " milliseconds - will fire every second instead"); SimpleTrigger perSecondTrigger = TriggerBuilder.newTrigger() .withIdentity(keyValue[0].trim(), groupName) .withSchedule(SimpleScheduleBuilder.repeatSecondlyForever()) .forJob(new JobKey(injectionJob, groupName)).build(); perSecondTrigger.getJobDataMap().put("intervalTotal", interval / 1000); try { taskScheduler.scheduleJob(perSecondTrigger); } catch (SchedulerException e) { staticLogger.error("Unable to schedule distributed feedInjection job with cron-trigger '" + keyValue[0].trim() + "' / '" + keyValue[1].trim() + "'", e); } } else { // Now create a cron trigger and attach it to the appropriate job CronTrigger trigger = TriggerBuilder.newTrigger().withIdentity(keyValue[0].trim(), groupName) .withSchedule(CronScheduleBuilder.cronSchedule(keyValue[1].trim())) .forJob(new JobKey(injectionJob, groupName)).build(); try { taskScheduler.scheduleJob(trigger); } catch (SchedulerException e) { staticLogger.error("Unable to schedule feedInjection job with cron-trigger '" + keyValue[0].trim() + "' / '" + keyValue[1].trim() + "'", e); } } } }
From source file:com.seajas.search.attender.service.attender.AttenderService.java
/** * Default constructor.//w ww . j av a2s . co m * * @param availableApplicationLanguages * @param availableSearchLanguages */ @Autowired public AttenderService( @Value("${attender.project.languages.available}") final String availableApplicationLanguages, @Value("${attender.project.search.service.languages}") final String availableSearchLanguages) { this.availableApplicationLanguages = Arrays .asList(StringUtils.tokenizeToStringArray(availableApplicationLanguages, ",", true, true)); this.availableSearchLanguages = Arrays .asList(StringUtils.tokenizeToStringArray(availableSearchLanguages, ",", true, true)); }
From source file:com.seajas.search.profiler.service.profiler.ProfilerService.java
/** * Default constructor./* w w w . j av a 2 s . c o m*/ * * @param jobNames * @param jobDescriptions * @param availableApplicationLanguages * @param availableSearchLanguages * @param jmsRequestQueue * @param jmsConnectionFactory * @throws Exception */ @Autowired public ProfilerService(@Value("${profiler.project.search.enricher.jobs}") final String jobNames, @Value("${profiler.project.search.enricher.jobs.descriptions}") final String jobDescriptions, @Value("${profiler.project.languages.available}") final String availableApplicationLanguages, @Value("${profiler.project.search.languages}") final String availableSearchLanguages, @Qualifier("jmsPrimaryRequestQueue") final ActiveMQQueue jmsRequestQueue, @Qualifier("connectionFactory") final ConnectionFactory jmsConnectionFactory) throws Exception { /* InputStream caCertificate = getClass().getClassLoader().getResourceAsStream("ca.crt"); LicenseValidator.validateLicenseFile(caCertificate, licenseFile); * * try { caCertificate.close(); } catch (IOException e) { logger.error("Could not close the CA certificate stream."); } */ String[] names = jobNames.split(","); String[] descriptions = jobDescriptions.split(","); this.jobNames = new LinkedHashMap<String, String>(); for (int i = 0; i < names.length; i++) this.jobNames.put(names[i].trim(), descriptions[i].trim()); this.availableApplicationLanguages = Arrays .asList(StringUtils.tokenizeToStringArray(availableApplicationLanguages, ",", true, true)); this.availableSearchLanguages = Arrays .asList(StringUtils.tokenizeToStringArray(availableSearchLanguages, ",", true, true)); // Keep track of the active consumers on the request channel Connection connection = jmsConnectionFactory.createConnection(); Session session = connection.createSession(true, Session.AUTO_ACKNOWLEDGE); connection.start(); Destination destinationAdvisory = AdvisorySupport.getConsumerAdvisoryTopic(jmsRequestQueue); MessageConsumer consumerAdvisory = session.createConsumer(destinationAdvisory); consumerAdvisory.setMessageListener(new MessageListener() { @Override public void onMessage(final Message message) { try { Object consumerCount = ((ActiveMQMessage) message).getProperty("consumerCount"); if (consumerCount != null) { String clientId = ((ActiveMQMessage) message).getConnection().getConnectionInfo() .getClientId(); if (activeContenderClients.contains(clientId) && ((Integer) consumerCount == 0)) { if (staticLogger.isInfoEnabled()) staticLogger.info("Client with ID " + clientId + " was dropped from the current consumer-clients"); activeContenderClients.remove(clientId); } else if (!activeContenderClients.contains(clientId) && ((Integer) consumerCount > 0)) { if (staticLogger.isInfoEnabled()) staticLogger.info("Client with ID " + clientId + " was added to the current consumer-clients"); activeContenderClients.add(clientId); } } } catch (IOException e) { staticLogger.error("Could not retrieve consumer count from connection message", e); } } }); }
From source file:nl.minbzk.dwr.zoeken.enricher.processor.UIMAInjector.java
/** * Process the given content through the UIMA pipeline. * /*from w w w . ja v a2s .c o m*/ * @param documentOutput * @param documentId * @param detectedLanguage * @param mediaType * @param processorOutput * @throws AnalysisEngineProcessException */ public void inject(final String documentOutput, final String documentId, final String detectedLanguage, final MediaType mediaType, final ProcessorContent processorOutput) throws AnalysisEngineProcessException { if (logger.isTraceEnabled()) logger.trace(format("Feeding document %s (%s) as text: %s", documentId, mediaType, documentOutput)); try { Feature alternativeFeature = getFeature("alternative"); Feature locationFeature = getFeature("location"); cas.setDocumentLanguage(uimaUnit.getLanguage()); cas.setDocumentText(cleanOutput(documentOutput, mediaType)); // Now add in document metadata and set the document ID Type metadataType = cas.getTypeSystem().getType("enricher.uima.DocumentMetadata"); FeatureStructure metadataFS = cas.createFS(metadataType); metadataFS.setStringValue(metadataType.getFeatureByBaseName("id"), documentId); cas.addFsToIndexes(metadataFS); // And continue on with processing if (logger.isTraceEnabled()) logger.trace("Start processing the relevant CAS"); uimaUnit.getAnalysisEngine().process(cas); if (logger.isTraceEnabled()) logger.trace("Finished processing the relevant CAS - now cycling through results"); long sessionProcessingTime = uimaUnit.getSessionProcessingTime(); if (sessionProcessingTime > 0) { if (logger.isInfoEnabled()) logger.info(format("Reporting the session processing time as %d", sessionProcessingTime)); ((AnalysisEngineManagementImpl) uimaUnit.getInternalManagementInterface()) .reportServiceCallTime(sessionProcessingTime); ((AnalysisEngineManagementImpl) uimaUnit.getInternalManagementInterface()) .incrementCASesProcessed(); } // Filter down to the relevant data sets Map<String, Map<String, Integer>> annotationsWithCounts = new HashMap<String, Map<String, Integer>>(); Map<String, List<String>> annotationsWithDuplicates = new HashMap<String, List<String>>(); Map<String, List<String>> annotationsWithLocations = new HashMap<String, List<String>>(); if (logger.isTraceEnabled()) logger.trace("Filtering types"); filterTypes(documentId, alternativeFeature, locationFeature, annotationsWithCounts, annotationsWithDuplicates, annotationsWithLocations); if (logger.isTraceEnabled()) logger.trace("Ordering types"); orderTypes(detectedLanguage, processorOutput, annotationsWithCounts); if (logger.isTraceEnabled()) logger.trace("Deduplicating types"); deduplicateTypes(processorOutput, annotationsWithDuplicates); // And the relevant geo-locations (also including duplicates) if (shouldPerformGeoSpatialAnalysis()) { if (logger.isTraceEnabled()) logger.trace("Geo-filtering types"); filterGeo(processorOutput, annotationsWithLocations); } // And extract the sentiment value, if present String sentimentValue = metadataFS.getStringValue(metadataType.getFeatureByBaseName("sentiment")); if (StringUtils.hasText(sentimentValue)) { if (logger.isInfoEnabled()) logger.info(format("Sentiment value determined to be %s", sentimentValue)); List<String> sentimentValues = new ArrayList<String>(1); sentimentValues.add(sentimentValue); processorOutput.getMetadata().put(SENTIMENT_FIELD, sentimentValues); } else { if (logger.isTraceEnabled()) logger.trace("No sentiment value was extracted or could be provided"); } // And extract the sentiment value, if present String classificationsValue = metadataFS .getStringValue(metadataType.getFeatureByBaseName("classifications")); if (StringUtils.hasText(classificationsValue)) { if (logger.isInfoEnabled()) logger.info(format("Classification value determined to be %s", classificationsValue)); List<String> classificationValues = new ArrayList<String>(); classificationValues.addAll( Arrays.asList(StringUtils.tokenizeToStringArray(classificationsValue, ",", true, false))); processorOutput.getMetadata().put(CLASSIFICATIONS_FIELD, classificationValues); } else { if (logger.isTraceEnabled()) logger.trace("No classifications value was extracted or could be provided"); } if (logger.isTraceEnabled()) logger.trace("Finished injecting UIMA-generated metadata"); } catch (RuntimeException e) { logger.error("UIMA processing failed", e); } catch (Exception e) { logger.error("UIMA processing failed", e); } finally { cas.reset(); } }
From source file:nl.minbzk.dwr.zoeken.enricher.settings.EnricherSettings.java
/** * Default constructor.//from www.ja v a2 s .com * * @param extraProperties * @throws Exception */ public EnricherSettings(final Properties extraProperties) throws Exception { if (StringUtils.hasText(System.getProperty(PROPERTY_LOGLEVEL))) ((Logger) LoggerFactory.getLogger(Logger.ROOT_LOGGER_NAME)) .setLevel(Level.toLevel(System.getProperty(PROPERTY_LOGLEVEL))); // Fetch the initial properties try { InputStream internalStream = getClass().getClassLoader().getResourceAsStream("enricher.properties"); properties.load(internalStream); internalStream.close(); // Load the external properties as an addition to the internal ones if (extraProperties != null) for (Entry<Object, Object> extraProperty : extraProperties.entrySet()) properties.setProperty(extraProperty.getKey().toString(), extraProperty.getValue().toString()); } catch (IOException e) { logger.error("Could not initialize settings from the given properties files", e); } databaseMatch = properties.getProperty(KEY_ENVELOPE_MATCH_DB).trim(); encodingMatch = properties.getProperty(KEY_ENVELOPE_MATCH_ENCODING).trim(); if (properties.containsKey(KEY_PROCESSOR_TIKA_DETECTORS)) for (String detector : properties.getProperty(KEY_PROCESSOR_TIKA_DETECTORS).split(",")) tikaDetectors.add(detector.trim()); if (properties.containsKey(KEY_PROCESSOR_TIKA_PARSERS)) for (String parser : properties.getProperty(KEY_PROCESSOR_TIKA_PARSERS).split(",")) tikaParsers.add(parser.trim()); aciAddUri = properties.getProperty(KEY_GENERATOR_ACI_ADD_URI).trim(); aciCommitUri = properties.getProperty(KEY_GENERATOR_ACI_COMMIT_URI).trim(); aciDeleteReferenceUri = properties.getProperty(KEY_GENERATOR_ACI_DELETE_REFERENCE_URI).trim(); aciDeleteDocIdUri = properties.getProperty(KEY_GENERATOR_ACI_DELETE_DOCID_URI).trim(); aciUserAgent = properties.getProperty(KEY_GENERATOR_ACI_USER_AGENT).trim(); solrUri = properties.getProperty(KEY_GENERATOR_SOLR_URI).trim(); solrCloudUri = properties.getProperty(KEY_GENERATOR_SOLR_CLOUD_URI).trim(); solrUniqueKeyComposition = properties.getProperty(KEY_GENERATOR_SOLR_UNIQUE_KEY_COMPOSITION).trim(); elasticSearchUri = properties.getProperty(KEY_GENERATOR_ELASTIC_SEARCH_URI).trim(); elasticSearchClusterName = properties.getProperty(KEY_GENERATOR_ELASTIC_SEARCH_CLUSTER_NAME).trim(); elasticSearchUniqueKeyComposition = properties .getProperty(KEY_GENERATOR_ELASTIC_SEARCH_UNIQUE_KEY_COMPOSITION).trim(); if (properties.containsKey(KEY_GENERATOR_DOCUMENTS_PER_UPLOAD)) documentsPerUpload = new Integer(properties.getProperty(KEY_GENERATOR_DOCUMENTS_PER_UPLOAD)); if (properties.containsKey(KEY_LANGUAGE_ANALYSIS_THRESHOLD)) languageAnalysisThreshold = new Integer(properties.getProperty(KEY_LANGUAGE_ANALYSIS_THRESHOLD)); if (properties.containsKey(KEY_LANGUAGE_ANALYSIS_MAXIMUM_INSTANCES)) languageAnalysisMaximumInstances = new Integer( properties.getProperty(KEY_LANGUAGE_ANALYSIS_MAXIMUM_INSTANCES)); if (properties.containsKey(KEY_LANGUAGE_ANALYSIS_WAITING_TIMEOUT)) languageAnalysisWaitingTimeout = new Integer( properties.getProperty(KEY_LANGUAGE_ANALYSIS_WAITING_TIMEOUT)); if (properties.containsKey(KEY_LANGUAGE_DETECTION_PROFILES)) languageDetectionProfiles = properties.getProperty(KEY_LANGUAGE_DETECTION_PROFILES); if (properties.containsKey(KEY_ATTENDER_MEMORY_INDEX_CONFIGURATION)) attenderMemoryIndexConfiguration = properties.getProperty(KEY_ATTENDER_MEMORY_INDEX_CONFIGURATION); if (properties.containsKey(KEY_ENVELOPE_WORD_BREAK_MIN) && properties.containsKey(KEY_ENVELOPE_WORD_BREAK_MAX)) { wordBreakMin = new Integer(properties.getProperty(KEY_ENVELOPE_WORD_BREAK_MIN)); wordBreakMax = new Integer(properties.getProperty(KEY_ENVELOPE_WORD_BREAK_MAX)); } for (Entry<Object, Object> property : properties.entrySet()) { String name = (String) property.getKey(); if (name.startsWith(KEY_ENVELOPE_JOB)) { String jobName = name.substring(KEY_ENVELOPE_JOB.length() + 1, name.indexOf('.', KEY_ENVELOPE_JOB.length() + 1)); if (!jobs.containsKey(jobName)) { EnricherJob job = new EnricherJob(); job.setName(jobName); job.setProcessedPath(retrieveJobProperty(jobName, KEY_ENVELOPE_JOB_RESULT)); job.setDatabaseName(retrieveJobProperty(jobName, KEY_ENVELOPE_JOB_DB_NAME)); job.setDatabaseNamePrerequisitesExpression( retrieveJobProperty(jobName, KEY_ENVELOPE_JOB_DB_NAME_PREREQUISITES)); job.setDatabaseNameComposition( retrieveJobProperty(jobName, KEY_ENVELOPE_JOB_DB_NAME_COMPOSITION)); job.setDatabaseType(retrieveJobProperty(jobName, KEY_ENVELOPE_JOB_DB_TYPE)); job.setGeneratorTypes(StringUtils.tokenizeToStringArray( retrieveJobProperty(jobName, KEY_ENVELOPE_JOB_GENERATOR_TYPE), ",", true, true)); job.setGeneratorMapping(retrieveJobProperty(jobName, KEY_ENVELOPE_JOB_GENERATOR_MAPPING)); job.setBoilerpipeExtractor(retrieveJobProperty(jobName, KEY_ENVELOPE_BOILERPIPE_EXTRACTOR)); job.setBoilerpipeExclusionField( retrieveJobProperty(jobName, KEY_ENVELOPE_BOILERPIPE_EXCLUSION_FIELD)); job.setBoilerpipeExclusionValues( retrieveJobProperty(jobName, KEY_ENVELOPE_BOILERPIPE_EXCLUSION_VALUES)); String[] stripTags = StringUtils.tokenizeToStringArray( retrieveJobProperty(jobName, KEY_ENVELOPE_JOB_STRIP_TAGS), ",", true, true); job.setStripTags(stripTags != null ? Arrays.asList(stripTags) : new ArrayList<String>()); job.setFormatDetectionParameter( retrieveJobProperty(jobName, KEY_ENVELOPE_JOB_FORMAT_DETECTION_PARAMETER)); job.setFormatDetectionDefault( retrieveJobProperty(jobName, KEY_ENVELOPE_JOB_FORMAT_DETECTION_DEFAULT)); job.setLanguageDetectionParameter( retrieveJobProperty(jobName, KEY_ENVELOPE_JOB_LANGUAGE_DETECTION_PARAMETER)); job.setLanguageDetectionSupported( retrieveJobProperty(jobName, KEY_ENVELOPE_JOB_LANGUAGE_DETECTION_SUPPORTED)); job.setLanguageDetectionDefault( retrieveJobProperty(jobName, KEY_ENVELOPE_JOB_LANGUAGE_DETECTION_DEFAULT)); job.setEntityDetectionLanguages( retrieveJobProperty(jobName, KEY_ENVELOPE_JOB_ENTITY_DETECTION_LANGUAGES)); job.setEntityDetectionDescriptors( retrieveJobProperty(jobName, KEY_ENVELOPE_JOB_ENTITY_DETECTION_DESCRIPTORS)); job.setEntityDetectionScanTypes( retrieveJobProperty(jobName, KEY_ENVELOPE_JOB_ENTITY_DETECTION_SCAN_TYPES)); job.setEntityDetectionFieldPrefix( retrieveJobProperty(jobName, KEY_ENVELOPE_JOB_ENTITY_DETECTION_FIELD_PREFIX)); job.setGeoSpatialFieldPrefix( retrieveJobProperty(jobName, KEY_ENVELOPE_JOB_GEO_SPATIAL_FIELD_PREFIX)); job.setTikaResourceKeyPriority( retrieveJobProperty(jobName, KEY_ENVELOPE_JOB_TIKA_RESOURCE_KEY_PRIORITY)); job.setFieldConverters(deriveFieldConverters(jobName)); jobs.put(jobName, job); } } } }