List of usage examples for org.apache.commons.io FilenameUtils concat
public static String concat(String basePath, String fullFilenameToAdd)
From source file:org.apache.geode.distributed.internal.SharedConfiguration.java
/** * Reads the configuration information from the shared configuration directory and returns a {@link Configuration} object * @param configName//from w w w . j a v a 2 s. c o m * @param configDirectory * @return {@link Configuration} * @throws TransformerException * @throws TransformerFactoryConfigurationError * @throws ParserConfigurationException * @throws SAXException */ private Configuration readConfiguration(final String configName, final String configDirectory) throws SAXException, ParserConfigurationException, TransformerFactoryConfigurationError, TransformerException { Configuration configuration = new Configuration(configName); String cacheXmlFullPath = FilenameUtils.concat(configDirectory, configuration.getCacheXmlFileName()); String propertiesFullPath = FilenameUtils.concat(configDirectory, configuration.getPropertiesFileName()); File file = new File(configDirectory); String[] jarFileNames = file.list(jarFileFilter); if (jarFileNames != null && jarFileNames.length != 0) { configuration.addJarNames(jarFileNames); } try { configuration.setCacheXmlContent(XmlUtils.readXmlAsStringFromFile(cacheXmlFullPath)); configuration.setGemfireProperties(readProperties(propertiesFullPath)); } catch (IOException e) { logger.info(e); } return configuration; }
From source file:org.apache.geode.distributed.internal.SharedConfiguration.java
/** * Reads the "shared_config" directory and loads all the cache.xml, gemfire.properties and deployed jars information * @return {@link Map}/*from w w w .ja v a 2 s . c om*/ * @throws TransformerException * @throws TransformerFactoryConfigurationError * @throws ParserConfigurationException * @throws SAXException */ private Map<String, Configuration> readSharedConfigurationFromDisk() throws SAXException, ParserConfigurationException, TransformerFactoryConfigurationError, TransformerException { String[] subdirectoryNames = getSubdirectories(configDirPath); Map<String, Configuration> sharedConfiguration = new HashMap<String, Configuration>(); if (subdirectoryNames != null) { for (String subdirectoryName : subdirectoryNames) { String fullpath = FilenameUtils.concat(configDirPath, subdirectoryName); Configuration configuration = readConfiguration(subdirectoryName, fullpath); sharedConfiguration.put(subdirectoryName, configuration); } } return sharedConfiguration; }
From source file:org.apache.geode.distributed.internal.SharedConfiguration.java
/** * Removes the jar files from the given directory * @param dirPath Path of the configuration directory * @param jarNames Names of the jar files * @throws IOException/*from w ww .java2 s . co m*/ */ private void removeJarFiles(final String dirPath, final String[] jarNames) throws IOException { if (jarNames != null) { for (int i = 0; i < jarNames.length; i++) { File jarFile = new File(FilenameUtils.concat(dirPath, jarNames[i])); if (jarFile.exists()) { FileUtils.forceDelete(jarFile); } } } else { File dir = new File(dirPath); String[] jarFileNames = dir.list(jarFileFilter); if (jarFileNames.length != 0) { File jarFileToBeDeleted; for (String jarFileName : jarFileNames) { String fullPath = FilenameUtils.concat(dirPath, jarFileName); jarFileToBeDeleted = new File(fullPath); FileUtils.forceDelete(jarFileToBeDeleted); } } } }
From source file:org.apache.geode.distributed.internal.SharedConfiguration.java
/** * Writes the cache.xml to the file , based on Configuration *//* w ww . j av a 2s . c o m*/ private void writeCacheXml(final String dirPath, final Configuration configuration) throws IOException { String fullPath = FilenameUtils.concat(dirPath, configuration.getCacheXmlFileName()); FileUtils.writeStringToFile(new File(fullPath), configuration.getCacheXmlContent(), "UTF-8"); }
From source file:org.apache.geode.distributed.internal.SharedConfiguration.java
/** * Writes the//from w w w.ja va2 s.c o m * @param dirPath target directory , where the jar files are to be written * @param jarNames Array containing the name of the jar files. * @param jarBytes Array of byte arrays for the jar files. */ private void writeJarFiles(final String dirPath, final String[] jarNames, final byte[][] jarBytes) { for (int i = 0; i < jarNames.length; i++) { String filePath = FilenameUtils.concat(dirPath, jarNames[i]); File jarFile = new File(filePath); try { FileUtils.writeByteArrayToFile(jarFile, jarBytes[i]); } catch (IOException e) { logger.info(e); } } }
From source file:org.apache.geode.distributed.internal.SharedConfiguration.java
/** * Writes the properties to the file based on the {@link Configuration} *//*from w w w. ja v a 2 s . co m*/ private void writeProperties(final String dirPath, final Configuration configuration) throws IOException { String fullPath = FilenameUtils.concat(dirPath, configuration.getPropertiesFileName()); BufferedWriter bw = new BufferedWriter(new FileWriter(fullPath)); configuration.getGemfireProperties().store(bw, ""); bw.close(); }
From source file:org.apache.geode.management.internal.cli.functions.ExportSharedConfigurationFunction.java
@Override public void execute(FunctionContext context) { InternalLocator locator = InternalLocator.getLocator(); String memberName = locator.getDistributedSystem().getName(); if (locator.isSharedConfigurationRunning()) { SharedConfiguration sc = locator.getSharedConfiguration(); String zipFileName = CliStrings.format(CliStrings.EXPORT_SHARED_CONFIG__FILE__NAME, UUID.randomUUID()); String targetFilePath = FilenameUtils.concat(sc.getSharedConfigurationDirPath(), zipFileName); try {/*w ww .j a va 2s . co m*/ ZipUtils.zip(sc.getSharedConfigurationDirPath(), targetFilePath); File zippedSharedConfig = new File(targetFilePath); byte[] zippedConfigData = FileUtils.readFileToByteArray(zippedSharedConfig); FileUtils.forceDelete(zippedSharedConfig); CliFunctionResult result = new CliFunctionResult(locator.getDistributedSystem().getName(), zippedConfigData, new String[] { zipFileName }); context.getResultSender().lastResult(result); } catch (Exception e) { context.getResultSender().lastResult(new CliFunctionResult(memberName, e, e.getMessage())); } } else { CliFunctionResult result = new CliFunctionResult(memberName, false, CliStrings.SHARED_CONFIGURATION_NOT_STARTED); context.getResultSender().lastResult(result); } }
From source file:org.apache.geode.management.internal.configuration.ZipUtilsJUnitTest.java
@Before public void setUp() throws Exception { sourceFolder = temporaryFolder.newFolder("sourceFolder"); File clusterFolder = new File(sourceFolder.getCanonicalPath(), clusterFolderName); assertTrue(clusterFolder.mkdir());//from ww w. ja v a 2s . co m File groupFolder = new File(sourceFolder.getCanonicalPath(), groupFolderName); assertTrue(groupFolder.mkdir()); zipFolder = temporaryFolder.newFolder("zipFolder"); FileUtils.writeStringToFile( new File(FilenameUtils.concat(clusterFolder.getCanonicalPath(), clusterTextFileName)), clusterText); FileUtils.writeStringToFile( new File(FilenameUtils.concat(groupFolder.getCanonicalPath(), groupTextFileName)), groupText); }
From source file:org.apache.geode.management.internal.configuration.ZipUtilsJUnitTest.java
@Test public void testZipUtils() throws Exception { File zipFile = new File(zipFolder, "target.zip"); assertFalse(zipFile.exists());//w w w . ja va 2s. co m assertFalse(zipFile.isFile()); ZipUtils.zipDirectory(sourceFolder.getCanonicalPath(), zipFile.getCanonicalPath()); assertTrue(zipFile.exists()); assertTrue(zipFile.isFile()); File destinationFolder = new File( FilenameUtils.concat(temporaryFolder.getRoot().getCanonicalPath(), destinationFolderName)); assertFalse(destinationFolder.exists()); assertFalse(destinationFolder.isFile()); ZipUtils.unzip(zipFile.getCanonicalPath(), destinationFolder.getCanonicalPath()); assertTrue(destinationFolder.exists()); assertTrue(destinationFolder.isDirectory()); File[] destinationSubDirs = destinationFolder.listFiles(); assertNotNull(destinationSubDirs); assertEquals(2, destinationSubDirs.length); File destinationClusterTextFile = new File(FilenameUtils.concat(destinationFolder.getCanonicalPath(), clusterFolderName + File.separator + clusterTextFileName)); assertTrue(destinationClusterTextFile.exists()); assertTrue(destinationClusterTextFile.isFile()); File destinationGroupTextFile = new File(FilenameUtils.concat(destinationFolder.getCanonicalPath(), groupFolderName + File.separator + groupTextFileName)); assertTrue(destinationGroupTextFile.exists()); assertTrue(destinationGroupTextFile.isFile()); assertTrue(clusterText.equals(FileUtils.readFileToString(destinationClusterTextFile))); assertTrue(groupText.equals(FileUtils.readFileToString(destinationGroupTextFile))); }
From source file:org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.aggregators.TimelineMetricAggregatorFactory.java
/** * Minute based aggregation for hosts.//from ww w. j a va 2s. c o m * Interval : 5 mins */ public static TimelineMetricAggregator createTimelineMetricAggregatorMinute(PhoenixHBaseAccessor hBaseAccessor, Configuration metricsConf) { String checkpointDir = metricsConf.get(TIMELINE_METRICS_AGGREGATOR_CHECKPOINT_DIR, DEFAULT_CHECKPOINT_LOCATION); String checkpointLocation = FilenameUtils.concat(checkpointDir, HOST_AGGREGATE_MINUTE_CHECKPOINT_FILE); long sleepIntervalMillis = SECONDS .toMillis(metricsConf.getLong(HOST_AGGREGATOR_MINUTE_SLEEP_INTERVAL, 300l)); // 5 mins int checkpointCutOffMultiplier = metricsConf.getInt(HOST_AGGREGATOR_MINUTE_CHECKPOINT_CUTOFF_MULTIPLIER, 3); String hostAggregatorDisabledParam = HOST_AGGREGATOR_MINUTE_DISABLED; String inputTableName = METRICS_RECORD_TABLE_NAME; String outputTableName = METRICS_AGGREGATE_MINUTE_TABLE_NAME; if (useGroupByAggregator(metricsConf)) { return new org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.aggregators.v2.TimelineMetricHostAggregator( "TimelineMetricHostAggregatorMinute", hBaseAccessor, metricsConf, checkpointLocation, sleepIntervalMillis, checkpointCutOffMultiplier, hostAggregatorDisabledParam, inputTableName, outputTableName, 120000l); } return new TimelineMetricHostAggregator("TimelineMetricHostAggregatorMinute", hBaseAccessor, metricsConf, checkpointLocation, sleepIntervalMillis, checkpointCutOffMultiplier, hostAggregatorDisabledParam, inputTableName, outputTableName, 120000l); }