List of usage examples for org.apache.commons.configuration PropertiesConfiguration PropertiesConfiguration
public PropertiesConfiguration()
From source file:edu.cmu.lti.oaqa.bioasq.concept.retrieval.GoPubMedSeparateConceptRetrievalExecutor.java
@Override public void initialize(UimaContext context) throws ResourceInitializationException { super.initialize(context); String conf = UimaContextHelper.getConfigParameterStringValue(context, "conf"); PropertiesConfiguration gopubmedProperties = new PropertiesConfiguration(); try {//from w w w. j a v a 2 s. c o m gopubmedProperties.load(getClass().getResourceAsStream(conf)); } catch (ConfigurationException e) { throw new ResourceInitializationException(e); } service = new GoPubMedService(gopubmedProperties); pages = UimaContextHelper.getConfigParameterIntValue(context, "pages", 1); hits = UimaContextHelper.getConfigParameterIntValue(context, "hits", 1); bopQueryStringConstructor = new BagOfPhraseQueryStringConstructor(); timeout = UimaContextHelper.getConfigParameterIntValue(context, "timeout", 4); limit = UimaContextHelper.getConfigParameterIntValue(context, "limit", Integer.MAX_VALUE); }
From source file:com.pinterest.secor.io.FileReaderWriterFactoryTest.java
private void setupDelimitedTextFileWriterConfig() { PropertiesConfiguration properties = new PropertiesConfiguration(); properties.addProperty("secor.file.reader.writer.factory", "com.pinterest.secor.io.impl.DelimitedTextFileReaderWriterFactory"); mConfig = new SecorConfig(properties); }
From source file:com.linkedin.pinot.server.integration.IntegrationTest.java
@BeforeTest public void setUp() throws Exception { //Process Command Line to get config and port FileUtils.deleteDirectory(new File("/tmp/pinot/test1")); setupSegmentList();/*from w w w .j a v a2 s .co m*/ File confFile = new File(TestUtils.getFileFromResourceUrl( InstanceServerStarter.class.getClassLoader().getResource("conf/" + PINOT_PROPERTIES))); // build _serverConf PropertiesConfiguration serverConf = new PropertiesConfiguration(); serverConf.setDelimiterParsingDisabled(false); serverConf.load(confFile); _serverConf = new ServerConf(serverConf); LOGGER.info("Trying to create a new ServerInstance!"); _serverInstance = new ServerInstance(); LOGGER.info("Trying to initial ServerInstance!"); _serverInstance.init(_serverConf, new MetricsRegistry()); LOGGER.info("Trying to start ServerInstance!"); _serverInstance.start(); _queryExecutor = _serverInstance.getQueryExecutor(); FileBasedInstanceDataManager instanceDataManager = (FileBasedInstanceDataManager) _serverInstance .getInstanceDataManager(); for (int i = 0; i < 2; ++i) { instanceDataManager.getTableDataManager("testTable"); instanceDataManager.getTableDataManager("testTable").addSegment(_indexSegmentList.get(i)); } }
From source file:com.pinterest.secor.io.FileReaderWriterTest.java
private void setupDelimitedTextFileReaderWriterConfig() { PropertiesConfiguration properties = new PropertiesConfiguration(); properties.addProperty("secor.file.reader.writer", "com.pinterest.secor.io.impl.DelimitedTextFileReaderWriter"); mConfig = new SecorConfig(properties); }
From source file:com.linkedin.pinot.core.segment.index.converter.SegmentV1V2ToV3FormatConverterTest.java
@BeforeMethod public void setUp() throws Exception { INDEX_DIR = Files.createTempDirectory(SegmentV1V2ToV3FormatConverter.class.getName() + "_segmentDir") .toFile();/*w w w . j a v a2 s.c o m*/ final String filePath = TestUtils.getFileFromResourceUrl( SegmentV1V2ToV3FormatConverter.class.getClassLoader().getResource(AVRO_DATA)); // intentionally changed this to TimeUnit.Hours to make it non-default for testing final SegmentGeneratorConfig config = SegmentTestUtils.getSegmentGenSpecWithSchemAndProjectedColumns( new File(filePath), INDEX_DIR, "daysSinceEpoch", TimeUnit.HOURS, "testTable"); config.setSegmentNamePostfix("1"); config.setTimeColumnName("daysSinceEpoch"); final SegmentIndexCreationDriver driver = SegmentCreationDriverFactory.get(null); driver.init(config); driver.build(); segmentDirectory = new File(INDEX_DIR, driver.getSegmentName()); File starTreeFile = new File(segmentDirectory, V1Constants.STAR_TREE_INDEX_FILE); FileUtils.touch(starTreeFile); FileUtils.writeStringToFile(starTreeFile, "This is a star tree index"); Configuration tableConfig = new PropertiesConfiguration(); tableConfig.addProperty(IndexLoadingConfigMetadata.KEY_OF_SEGMENT_FORMAT_VERSION, "v1"); v1LoadingConfig = new IndexLoadingConfigMetadata(tableConfig); tableConfig.clear(); tableConfig.addProperty(IndexLoadingConfigMetadata.KEY_OF_SEGMENT_FORMAT_VERSION, "v3"); v3LoadingConfig = new IndexLoadingConfigMetadata(tableConfig); }
From source file:com.mirth.connect.manager.ManagerController.java
private PropertiesConfiguration initializeProperties(String path, boolean alert) { PropertiesConfiguration properties = new PropertiesConfiguration(); // Auto reload changes FileChangedReloadingStrategy fileChangedReloadingStrategy = new FileChangedReloadingStrategy(); fileChangedReloadingStrategy.setRefreshDelay(1000); properties.setReloadingStrategy(fileChangedReloadingStrategy); properties.setFile(new File(path)); if (properties.isEmpty() && alert) { alertErrorDialog("Could not load properties from file: " + path); }/*w w w .ja v a 2s. c om*/ return properties; }
From source file:fr.jetoile.hadoopunit.HadoopBootstrapRemoteStarter.java
private void editHadoopUnitConfFile() { Path hadoopPropertiesPath = Paths.get(hadoopUnitPath, "conf", "hadoop.properties"); Path hadoopPropertiesBackupPath = Paths.get(hadoopUnitPath, "conf", "hadoop.properties.old"); if (hadoopPropertiesBackupPath.toFile().exists() && hadoopPropertiesBackupPath.toFile().canWrite()) { hadoopPropertiesBackupPath.toFile().delete(); }/*from w w w . j a va2s . c om*/ hadoopPropertiesPath.toFile().renameTo(hadoopPropertiesBackupPath.toFile()); PropertiesConfiguration configuration = new PropertiesConfiguration(); values.forEach(v -> configuration.addProperty(v.toLowerCase(), "true")); try { configuration.save(new FileWriter(hadoopPropertiesPath.toFile())); } catch (ConfigurationException | IOException e) { getLog().error("unable to find or modifying hadoop.properties. Check user rights", e); } }
From source file:com.linkedin.pinot.query.aggregation.AggregationMVGroupByMVQueriesTest.java
@BeforeClass public void setup() throws Exception { INSTANCE_PLAN_MAKER = new InstancePlanMakerImplV2(new QueryExecutorConfig(new PropertiesConfiguration())); setupSegmentList(NUM_SEGMENTS);//from ww w . ja v a2s . c om }
From source file:com.linkedin.pinot.query.executor.QueryExecutorTest.java
@BeforeClass public void setup() throws Exception { TableDataManagerProvider.setServerMetrics(new ServerMetrics(new MetricsRegistry())); File confDir = new File(QueryExecutorTest.class.getClassLoader().getResource("conf").toURI()); setupSegmentList(2);/* w ww. j av a 2 s .c om*/ // ServerBuilder serverBuilder = new ServerBuilder(confDir.getAbsolutePath()); String configFilePath = confDir.getAbsolutePath(); // build _serverConf PropertiesConfiguration serverConf = new PropertiesConfiguration(); serverConf.setDelimiterParsingDisabled(false); serverConf.load(new File(configFilePath, PINOT_PROPERTIES)); FileBasedInstanceDataManager instanceDataManager = FileBasedInstanceDataManager.getInstanceDataManager(); instanceDataManager .init(new FileBasedInstanceDataManagerConfig(serverConf.subset("pinot.server.instance"))); instanceDataManager.start(); for (int i = 0; i < 2; ++i) { instanceDataManager.getTableDataManager("midas"); instanceDataManager.getTableDataManager("midas").addSegment(_indexSegmentList.get(i)); } _queryExecutor = new ServerQueryExecutorV1Impl(); _queryExecutor.init(serverConf.subset("pinot.server.query.executor"), instanceDataManager, new ServerMetrics(new MetricsRegistry())); }
From source file:maltcms.ui.fileHandles.properties.tools.PropertyLoader.java
private static Tuple2D<Configuration, Configuration> loadPropertiesFromClass(Class<?> c) { PropertiesConfiguration ret = new PropertiesConfiguration(); PropertiesConfiguration var = new PropertiesConfiguration(); String requiredVariables = ""; String optionalVariables = ""; String providedVariables = ""; Collection<String> reqVars = AnnotationInspector.getRequiredVariables(c); for (String rv : reqVars) { requiredVariables += rv + ","; }//ww w . java 2s . com if (requiredVariables.length() > 0) { requiredVariables = requiredVariables.substring(0, requiredVariables.length() - 1); } var.setProperty(REQUIRED_VARS, requiredVariables); Collection<String> optVars = AnnotationInspector.getOptionalRequiredVariables(c); for (String rv : optVars) { optionalVariables += rv + ","; } if (optionalVariables.length() > 0) { optionalVariables = optionalVariables.substring(0, optionalVariables.length() - 1); } var.setProperty(OPTIONAL_VARS, optionalVariables); Collection<String> provVars = AnnotationInspector.getProvidedVariables(c); for (String rv : provVars) { providedVariables += rv + ","; } if (providedVariables.length() > 0) { providedVariables = providedVariables.substring(0, providedVariables.length() - 1); } var.setProperty(PROVIDED_VARS, providedVariables); Collection<String> keys = AnnotationInspector.getRequiredConfigKeys(c); if (!keys.isEmpty()) { for (String key : keys) { ret.setProperty(key, AnnotationInspector.getDefaultValueFor(c, key)); } } return new Tuple2D<Configuration, Configuration>(ret, var); }